1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
74 #include "tree-ssa-address.h"
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* This variable holds information helping the rewriting of SSA trees
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple
*currently_expanding_gimple_stmt
;
94 static rtx
expand_debug_expr (tree
);
96 static bool defer_stack_allocation (tree
, bool);
98 static void record_alignment_for_reg_var (unsigned int);
100 /* Return an expression tree corresponding to the RHS of GIMPLE
104 gimple_assign_rhs_to_tree (gimple
*stmt
)
107 enum gimple_rhs_class grhs_class
;
109 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
111 if (grhs_class
== GIMPLE_TERNARY_RHS
)
112 t
= build3 (gimple_assign_rhs_code (stmt
),
113 TREE_TYPE (gimple_assign_lhs (stmt
)),
114 gimple_assign_rhs1 (stmt
),
115 gimple_assign_rhs2 (stmt
),
116 gimple_assign_rhs3 (stmt
));
117 else if (grhs_class
== GIMPLE_BINARY_RHS
)
118 t
= build2 (gimple_assign_rhs_code (stmt
),
119 TREE_TYPE (gimple_assign_lhs (stmt
)),
120 gimple_assign_rhs1 (stmt
),
121 gimple_assign_rhs2 (stmt
));
122 else if (grhs_class
== GIMPLE_UNARY_RHS
)
123 t
= build1 (gimple_assign_rhs_code (stmt
),
124 TREE_TYPE (gimple_assign_lhs (stmt
)),
125 gimple_assign_rhs1 (stmt
));
126 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
128 t
= gimple_assign_rhs1 (stmt
);
129 /* Avoid modifying this tree in place below. */
130 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
131 && gimple_location (stmt
) != EXPR_LOCATION (t
))
132 || (gimple_block (stmt
)
133 && currently_expanding_to_rtl
140 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
141 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
159 leader_merge (tree cur
, tree next
)
161 if (cur
== NULL
|| cur
== next
)
164 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
167 if (DECL_P (next
) && DECL_IGNORED_P (next
))
173 /* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
177 set_rtl (tree t
, rtx x
)
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
181 || (use_register_for_decl (t
)
183 || (GET_CODE (x
) == CONCAT
184 && (REG_P (XEXP (x
, 0))
185 || SUBREG_P (XEXP (x
, 0)))
186 && (REG_P (XEXP (x
, 1))
187 || SUBREG_P (XEXP (x
, 1))))
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
192 || (GET_CODE (x
) == PARALLEL
194 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
195 && (GET_MODE (x
) == BLKmode
196 || !flag_tree_coalesce_vars
)))
197 : (MEM_P (x
) || x
== pc_rtx
198 || (GET_CODE (x
) == CONCAT
199 && MEM_P (XEXP (x
, 0))
200 && MEM_P (XEXP (x
, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
210 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
212 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
213 && (promote_ssa_mode (t
, NULL
) == BLKmode
214 || !flag_tree_coalesce_vars
))
215 || !use_register_for_decl (t
)
216 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
221 tree cur
= NULL_TREE
;
229 else if (SUBREG_P (xm
))
231 gcc_assert (subreg_lowpart_p (xm
));
232 xm
= SUBREG_REG (xm
);
235 else if (GET_CODE (xm
) == CONCAT
)
240 else if (GET_CODE (xm
) == PARALLEL
)
242 xm
= XVECEXP (xm
, 0, 0);
243 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
247 else if (xm
== pc_rtx
)
252 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
257 set_mem_attributes (x
,
258 next
&& TREE_CODE (next
) == SSA_NAME
262 set_reg_attrs_for_decl_rtl (next
, x
);
266 if (TREE_CODE (t
) == SSA_NAME
)
268 int part
= var_to_partition (SA
.map
, t
);
269 if (part
!= NO_PARTITION
)
271 if (SA
.partition_to_pseudo
[part
])
272 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
273 else if (x
!= pc_rtx
)
274 SA
.partition_to_pseudo
[part
] = x
;
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
278 DECL. For PARMs and RESULTs, do so only when setting the
280 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
281 && (VAR_P (SSA_NAME_VAR (t
))
282 || SSA_NAME_IS_DEFAULT_DEF (t
)))
284 tree var
= SSA_NAME_VAR (t
);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var
))
287 SET_DECL_RTL (var
, x
);
288 /* If we have it set already to "multiple places" don't
290 else if (DECL_RTL (var
) == pc_rtx
)
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var
) != x
)
299 SET_DECL_RTL (var
, pc_rtx
);
306 /* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
321 /* The partition representative. */
322 size_t representative
;
324 /* The next stack variable in the partition, or EOC. */
327 /* The numbers of conflicting stack variables. */
331 #define EOC ((size_t)-1)
333 /* We have an array of such objects while deciding allocation. */
334 static struct stack_var
*stack_vars
;
335 static size_t stack_vars_alloc
;
336 static size_t stack_vars_num
;
337 static hash_map
<tree
, size_t> *decl_to_stack_part
;
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack
;
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted
;
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase
;
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls
;
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer
;
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
364 align_local_variable (tree decl
)
368 if (TREE_CODE (decl
) == SSA_NAME
)
369 align
= TYPE_ALIGN (TREE_TYPE (decl
));
372 align
= LOCAL_DECL_ALIGNMENT (decl
);
373 SET_DECL_ALIGN (decl
, align
);
375 return align
/ BITS_PER_UNIT
;
378 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
379 down otherwise. Return truncated BASE value. */
381 static inline unsigned HOST_WIDE_INT
382 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
384 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
387 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
388 Return the frame offset. */
391 alloc_stack_frame_space (poly_int64 size
, unsigned HOST_WIDE_INT align
)
393 poly_int64 offset
, new_frame_offset
;
395 if (FRAME_GROWS_DOWNWARD
)
398 = aligned_lower_bound (frame_offset
- frame_phase
- size
,
399 align
) + frame_phase
;
400 offset
= new_frame_offset
;
405 = aligned_upper_bound (frame_offset
- frame_phase
,
406 align
) + frame_phase
;
407 offset
= new_frame_offset
;
408 new_frame_offset
+= size
;
410 frame_offset
= new_frame_offset
;
412 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
413 frame_offset
= offset
= 0;
418 /* Accumulate DECL into STACK_VARS. */
421 add_stack_var (tree decl
)
425 if (stack_vars_num
>= stack_vars_alloc
)
427 if (stack_vars_alloc
)
428 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
430 stack_vars_alloc
= 32;
432 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
434 if (!decl_to_stack_part
)
435 decl_to_stack_part
= new hash_map
<tree
, size_t>;
437 v
= &stack_vars
[stack_vars_num
];
438 decl_to_stack_part
->put (decl
, stack_vars_num
);
441 tree size
= TREE_CODE (decl
) == SSA_NAME
442 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
443 : DECL_SIZE_UNIT (decl
);
444 v
->size
= tree_to_poly_uint64 (size
);
445 /* Ensure that all variables have size, so that &a != &b for any two
446 variables that are simultaneously live. */
447 if (known_eq (v
->size
, 0U))
449 v
->alignb
= align_local_variable (decl
);
450 /* An alignment of zero can mightily confuse us later. */
451 gcc_assert (v
->alignb
!= 0);
453 /* All variables are initially in their own partition. */
454 v
->representative
= stack_vars_num
;
457 /* All variables initially conflict with no other. */
460 /* Ensure that this decl doesn't get put onto the list twice. */
461 set_rtl (decl
, pc_rtx
);
466 /* Make the decls associated with luid's X and Y conflict. */
469 add_stack_var_conflict (size_t x
, size_t y
)
471 struct stack_var
*a
= &stack_vars
[x
];
472 struct stack_var
*b
= &stack_vars
[y
];
474 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
476 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
477 bitmap_set_bit (a
->conflicts
, y
);
478 bitmap_set_bit (b
->conflicts
, x
);
481 /* Check whether the decls associated with luid's X and Y conflict. */
484 stack_var_conflict_p (size_t x
, size_t y
)
486 struct stack_var
*a
= &stack_vars
[x
];
487 struct stack_var
*b
= &stack_vars
[y
];
490 /* Partitions containing an SSA name result from gimple registers
491 with things like unsupported modes. They are top-level and
492 hence conflict with everything else. */
493 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
496 if (!a
->conflicts
|| !b
->conflicts
)
498 return bitmap_bit_p (a
->conflicts
, y
);
501 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
502 enter its partition number into bitmap DATA. */
505 visit_op (gimple
*, tree op
, tree
, void *data
)
507 bitmap active
= (bitmap
)data
;
508 op
= get_base_address (op
);
511 && DECL_RTL_IF_SET (op
) == pc_rtx
)
513 size_t *v
= decl_to_stack_part
->get (op
);
515 bitmap_set_bit (active
, *v
);
520 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
521 record conflicts between it and all currently active other partitions
525 visit_conflict (gimple
*, tree op
, tree
, void *data
)
527 bitmap active
= (bitmap
)data
;
528 op
= get_base_address (op
);
531 && DECL_RTL_IF_SET (op
) == pc_rtx
)
533 size_t *v
= decl_to_stack_part
->get (op
);
534 if (v
&& bitmap_set_bit (active
, *v
))
539 gcc_assert (num
< stack_vars_num
);
540 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
541 add_stack_var_conflict (num
, i
);
547 /* Helper routine for add_scope_conflicts, calculating the active partitions
548 at the end of BB, leaving the result in WORK. We're called to generate
549 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
553 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
557 gimple_stmt_iterator gsi
;
558 walk_stmt_load_store_addr_fn visit
;
561 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
562 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
566 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
568 gimple
*stmt
= gsi_stmt (gsi
);
569 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
571 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
573 gimple
*stmt
= gsi_stmt (gsi
);
575 if (gimple_clobber_p (stmt
))
577 tree lhs
= gimple_assign_lhs (stmt
);
579 /* Nested function lowering might introduce LHSs
580 that are COMPONENT_REFs. */
583 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
584 && (v
= decl_to_stack_part
->get (lhs
)))
585 bitmap_clear_bit (work
, *v
);
587 else if (!is_gimple_debug (stmt
))
590 && visit
== visit_op
)
592 /* If this is the first real instruction in this BB we need
593 to add conflicts for everything live at this point now.
594 Unlike classical liveness for named objects we can't
595 rely on seeing a def/use of the names we're interested in.
596 There might merely be indirect loads/stores. We'd not add any
597 conflicts for such partitions. */
600 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
602 struct stack_var
*a
= &stack_vars
[i
];
604 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
605 bitmap_ior_into (a
->conflicts
, work
);
607 visit
= visit_conflict
;
609 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
614 /* Generate stack partition conflicts between all partitions that are
615 simultaneously live. */
618 add_scope_conflicts (void)
622 bitmap work
= BITMAP_ALLOC (NULL
);
626 /* We approximate the live range of a stack variable by taking the first
627 mention of its name as starting point(s), and by the end-of-scope
628 death clobber added by gimplify as ending point(s) of the range.
629 This overapproximates in the case we for instance moved an address-taken
630 operation upward, without also moving a dereference to it upwards.
631 But it's conservatively correct as a variable never can hold values
632 before its name is mentioned at least once.
634 We then do a mostly classical bitmap liveness algorithm. */
636 FOR_ALL_BB_FN (bb
, cfun
)
637 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
639 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
640 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
647 for (i
= 0; i
< n_bbs
; i
++)
650 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
651 active
= (bitmap
)bb
->aux
;
652 add_scope_conflicts_1 (bb
, work
, false);
653 if (bitmap_ior_into (active
, work
))
658 FOR_EACH_BB_FN (bb
, cfun
)
659 add_scope_conflicts_1 (bb
, work
, true);
663 FOR_ALL_BB_FN (bb
, cfun
)
664 BITMAP_FREE (bb
->aux
);
667 /* A subroutine of partition_stack_vars. A comparison function for qsort,
668 sorting an array of indices by the properties of the object. */
671 stack_var_cmp (const void *a
, const void *b
)
673 size_t ia
= *(const size_t *)a
;
674 size_t ib
= *(const size_t *)b
;
675 unsigned int aligna
= stack_vars
[ia
].alignb
;
676 unsigned int alignb
= stack_vars
[ib
].alignb
;
677 poly_int64 sizea
= stack_vars
[ia
].size
;
678 poly_int64 sizeb
= stack_vars
[ib
].size
;
679 tree decla
= stack_vars
[ia
].decl
;
680 tree declb
= stack_vars
[ib
].decl
;
682 unsigned int uida
, uidb
;
684 /* Primary compare on "large" alignment. Large comes first. */
685 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
686 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
687 if (largea
!= largeb
)
688 return (int)largeb
- (int)largea
;
690 /* Secondary compare on size, decreasing */
691 int diff
= compare_sizes_for_sort (sizeb
, sizea
);
695 /* Tertiary compare on true alignment, decreasing. */
701 /* Final compare on ID for sort stability, increasing.
702 Two SSA names are compared by their version, SSA names come before
703 non-SSA names, and two normal decls are compared by their DECL_UID. */
704 if (TREE_CODE (decla
) == SSA_NAME
)
706 if (TREE_CODE (declb
) == SSA_NAME
)
707 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
711 else if (TREE_CODE (declb
) == SSA_NAME
)
714 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
722 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
723 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
725 /* If the points-to solution *PI points to variables that are in a partition
726 together with other variables add all partition members to the pointed-to
730 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
731 part_hashmap
*decls_to_partitions
,
732 hash_set
<bitmap
> *visited
, bitmap temp
)
740 /* The pointed-to vars bitmap is shared, it is enough to
742 || visited
->add (pt
->vars
))
747 /* By using a temporary bitmap to store all members of the partitions
748 we have to add we make sure to visit each of the partitions only
750 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
752 || !bitmap_bit_p (temp
, i
))
753 && (part
= decls_to_partitions
->get (i
)))
754 bitmap_ior_into (temp
, *part
);
755 if (!bitmap_empty_p (temp
))
756 bitmap_ior_into (pt
->vars
, temp
);
759 /* Update points-to sets based on partition info, so we can use them on RTL.
760 The bitmaps representing stack partitions will be saved until expand,
761 where partitioned decls used as bases in memory expressions will be
765 update_alias_info_with_stack_vars (void)
767 part_hashmap
*decls_to_partitions
= NULL
;
769 tree var
= NULL_TREE
;
771 for (i
= 0; i
< stack_vars_num
; i
++)
775 struct ptr_info_def
*pi
;
777 /* Not interested in partitions with single variable. */
778 if (stack_vars
[i
].representative
!= i
779 || stack_vars
[i
].next
== EOC
)
782 if (!decls_to_partitions
)
784 decls_to_partitions
= new part_hashmap
;
785 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
788 /* Create an SSA_NAME that points to the partition for use
789 as base during alias-oracle queries on RTL for bases that
790 have been partitioned. */
791 if (var
== NULL_TREE
)
792 var
= create_tmp_var (ptr_type_node
);
793 name
= make_ssa_name (var
);
795 /* Create bitmaps representing partitions. They will be used for
796 points-to sets later, so use GGC alloc. */
797 part
= BITMAP_GGC_ALLOC ();
798 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
800 tree decl
= stack_vars
[j
].decl
;
801 unsigned int uid
= DECL_PT_UID (decl
);
802 bitmap_set_bit (part
, uid
);
803 decls_to_partitions
->put (uid
, part
);
804 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
805 if (TREE_ADDRESSABLE (decl
))
806 TREE_ADDRESSABLE (name
) = 1;
809 /* Make the SSA name point to all partition members. */
810 pi
= get_ptr_info (name
);
811 pt_solution_set (&pi
->pt
, part
, false);
814 /* Make all points-to sets that contain one member of a partition
815 contain all members of the partition. */
816 if (decls_to_partitions
)
820 hash_set
<bitmap
> visited
;
821 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
823 FOR_EACH_SSA_NAME (i
, name
, cfun
)
825 struct ptr_info_def
*pi
;
827 if (POINTER_TYPE_P (TREE_TYPE (name
))
828 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
829 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
833 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
834 decls_to_partitions
, &visited
, temp
);
836 delete decls_to_partitions
;
841 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
842 partitioning algorithm. Partitions A and B are known to be non-conflicting.
843 Merge them into a single partition A. */
846 union_stack_vars (size_t a
, size_t b
)
848 struct stack_var
*vb
= &stack_vars
[b
];
852 gcc_assert (stack_vars
[b
].next
== EOC
);
853 /* Add B to A's partition. */
854 stack_vars
[b
].next
= stack_vars
[a
].next
;
855 stack_vars
[b
].representative
= a
;
856 stack_vars
[a
].next
= b
;
858 /* Update the required alignment of partition A to account for B. */
859 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
860 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
862 /* Update the interference graph and merge the conflicts. */
865 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
866 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
867 BITMAP_FREE (vb
->conflicts
);
871 /* A subroutine of expand_used_vars. Binpack the variables into
872 partitions constrained by the interference graph. The overall
873 algorithm used is as follows:
875 Sort the objects by size in descending order.
880 Look for the largest non-conflicting object B with size <= S.
887 partition_stack_vars (void)
889 size_t si
, sj
, n
= stack_vars_num
;
891 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
892 for (si
= 0; si
< n
; ++si
)
893 stack_vars_sorted
[si
] = si
;
898 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
900 for (si
= 0; si
< n
; ++si
)
902 size_t i
= stack_vars_sorted
[si
];
903 unsigned int ialign
= stack_vars
[i
].alignb
;
904 poly_int64 isize
= stack_vars
[i
].size
;
906 /* Ignore objects that aren't partition representatives. If we
907 see a var that is not a partition representative, it must
908 have been merged earlier. */
909 if (stack_vars
[i
].representative
!= i
)
912 for (sj
= si
+ 1; sj
< n
; ++sj
)
914 size_t j
= stack_vars_sorted
[sj
];
915 unsigned int jalign
= stack_vars
[j
].alignb
;
916 poly_int64 jsize
= stack_vars
[j
].size
;
918 /* Ignore objects that aren't partition representatives. */
919 if (stack_vars
[j
].representative
!= j
)
922 /* Do not mix objects of "small" (supported) alignment
923 and "large" (unsupported) alignment. */
924 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
925 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
928 /* For Address Sanitizer do not mix objects with different
929 sizes, as the shorter vars wouldn't be adequately protected.
930 Don't do that for "large" (unsupported) alignment objects,
931 those aren't protected anyway. */
932 if (asan_sanitize_stack_p ()
933 && maybe_ne (isize
, jsize
)
934 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
937 /* Ignore conflicting objects. */
938 if (stack_var_conflict_p (i
, j
))
941 /* UNION the objects, placing J at OFFSET. */
942 union_stack_vars (i
, j
);
946 update_alias_info_with_stack_vars ();
949 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
952 dump_stack_var_partition (void)
954 size_t si
, i
, j
, n
= stack_vars_num
;
956 for (si
= 0; si
< n
; ++si
)
958 i
= stack_vars_sorted
[si
];
960 /* Skip variables that aren't partition representatives, for now. */
961 if (stack_vars
[i
].representative
!= i
)
964 fprintf (dump_file
, "Partition %lu: size ", (unsigned long) i
);
965 print_dec (stack_vars
[i
].size
, dump_file
);
966 fprintf (dump_file
, " align %u\n", stack_vars
[i
].alignb
);
968 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
970 fputc ('\t', dump_file
);
971 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
973 fputc ('\n', dump_file
);
977 /* Assign rtl to DECL at BASE + OFFSET. */
980 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
986 /* If this fails, we've overflowed the stack frame. Error nicely? */
987 gcc_assert (known_eq (offset
, trunc_int_for_mode (offset
, Pmode
)));
989 x
= plus_constant (Pmode
, base
, offset
);
990 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
991 ? TYPE_MODE (TREE_TYPE (decl
))
992 : DECL_MODE (SSAVAR (decl
)), x
);
994 if (TREE_CODE (decl
) != SSA_NAME
)
996 /* Set alignment we actually gave this decl if it isn't an SSA name.
997 If it is we generate stack slots only accidentally so it isn't as
998 important, we'll simply use the alignment that is already set. */
999 if (base
== virtual_stack_vars_rtx
)
1000 offset
-= frame_phase
;
1001 align
= known_alignment (offset
);
1002 align
*= BITS_PER_UNIT
;
1003 if (align
== 0 || align
> base_align
)
1006 /* One would think that we could assert that we're not decreasing
1007 alignment here, but (at least) the i386 port does exactly this
1008 via the MINIMUM_ALIGNMENT hook. */
1010 SET_DECL_ALIGN (decl
, align
);
1011 DECL_USER_ALIGN (decl
) = 0;
1017 struct stack_vars_data
1019 /* Vector of offset pairs, always end of some padding followed
1020 by start of the padding that needs Address Sanitizer protection.
1021 The vector is in reversed, highest offset pairs come first. */
1022 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1024 /* Vector of partition representative decls in between the paddings. */
1025 auto_vec
<tree
> asan_decl_vec
;
1027 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1031 unsigned int asan_alignb
;
1034 /* A subroutine of expand_used_vars. Give each partition representative
1035 a unique location within the stack frame. Update each partition member
1036 with that location. */
1039 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
1041 size_t si
, i
, j
, n
= stack_vars_num
;
1042 poly_uint64 large_size
= 0, large_alloc
= 0;
1043 rtx large_base
= NULL
;
1044 unsigned large_align
= 0;
1045 bool large_allocation_done
= false;
1048 /* Determine if there are any variables requiring "large" alignment.
1049 Since these are dynamically allocated, we only process these if
1050 no predicate involved. */
1051 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1052 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1054 /* Find the total size of these variables. */
1055 for (si
= 0; si
< n
; ++si
)
1059 i
= stack_vars_sorted
[si
];
1060 alignb
= stack_vars
[i
].alignb
;
1062 /* All "large" alignment decls come before all "small" alignment
1063 decls, but "large" alignment decls are not sorted based on
1064 their alignment. Increase large_align to track the largest
1065 required alignment. */
1066 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1067 large_align
= alignb
* BITS_PER_UNIT
;
1069 /* Stop when we get to the first decl with "small" alignment. */
1070 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1073 /* Skip variables that aren't partition representatives. */
1074 if (stack_vars
[i
].representative
!= i
)
1077 /* Skip variables that have already had rtl assigned. See also
1078 add_stack_var where we perpetrate this pc_rtx hack. */
1079 decl
= stack_vars
[i
].decl
;
1080 if (TREE_CODE (decl
) == SSA_NAME
1081 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1082 : DECL_RTL (decl
) != pc_rtx
)
1085 large_size
= aligned_upper_bound (large_size
, alignb
);
1086 large_size
+= stack_vars
[i
].size
;
1090 for (si
= 0; si
< n
; ++si
)
1093 unsigned base_align
, alignb
;
1096 i
= stack_vars_sorted
[si
];
1098 /* Skip variables that aren't partition representatives, for now. */
1099 if (stack_vars
[i
].representative
!= i
)
1102 /* Skip variables that have already had rtl assigned. See also
1103 add_stack_var where we perpetrate this pc_rtx hack. */
1104 decl
= stack_vars
[i
].decl
;
1105 if (TREE_CODE (decl
) == SSA_NAME
1106 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1107 : DECL_RTL (decl
) != pc_rtx
)
1110 /* Check the predicate to see whether this variable should be
1111 allocated in this pass. */
1112 if (pred
&& !pred (i
))
1115 alignb
= stack_vars
[i
].alignb
;
1116 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1118 base
= virtual_stack_vars_rtx
;
1119 /* ASAN description strings don't yet have a syntax for expressing
1120 polynomial offsets. */
1121 HOST_WIDE_INT prev_offset
;
1122 if (asan_sanitize_stack_p ()
1124 && frame_offset
.is_constant (&prev_offset
)
1125 && stack_vars
[i
].size
.is_constant ())
1127 prev_offset
= align_base (prev_offset
,
1128 MAX (alignb
, ASAN_MIN_RED_ZONE_SIZE
),
1129 !FRAME_GROWS_DOWNWARD
);
1130 tree repr_decl
= NULL_TREE
;
1131 unsigned HOST_WIDE_INT size
1132 = asan_var_and_redzone_size (stack_vars
[i
].size
.to_constant ());
1133 if (data
->asan_vec
.is_empty ())
1134 size
= MAX (size
, ASAN_RED_ZONE_SIZE
);
1136 unsigned HOST_WIDE_INT alignment
= MAX (alignb
,
1137 ASAN_MIN_RED_ZONE_SIZE
);
1138 offset
= alloc_stack_frame_space (size
, alignment
);
1140 data
->asan_vec
.safe_push (prev_offset
);
1141 /* Allocating a constant amount of space from a constant
1142 starting offset must give a constant result. */
1143 data
->asan_vec
.safe_push ((offset
+ stack_vars
[i
].size
)
1145 /* Find best representative of the partition.
1146 Prefer those with DECL_NAME, even better
1147 satisfying asan_protect_stack_decl predicate. */
1148 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1149 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1150 && DECL_NAME (stack_vars
[j
].decl
))
1152 repr_decl
= stack_vars
[j
].decl
;
1155 else if (repr_decl
== NULL_TREE
1156 && DECL_P (stack_vars
[j
].decl
)
1157 && DECL_NAME (stack_vars
[j
].decl
))
1158 repr_decl
= stack_vars
[j
].decl
;
1159 if (repr_decl
== NULL_TREE
)
1160 repr_decl
= stack_vars
[i
].decl
;
1161 data
->asan_decl_vec
.safe_push (repr_decl
);
1163 /* Make sure a representative is unpoison if another
1164 variable in the partition is handled by
1165 use-after-scope sanitization. */
1166 if (asan_handled_variables
!= NULL
1167 && !asan_handled_variables
->contains (repr_decl
))
1169 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1170 if (asan_handled_variables
->contains (stack_vars
[j
].decl
))
1173 asan_handled_variables
->add (repr_decl
);
1176 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1177 if (data
->asan_base
== NULL
)
1178 data
->asan_base
= gen_reg_rtx (Pmode
);
1179 base
= data
->asan_base
;
1181 if (!STRICT_ALIGNMENT
)
1182 base_align
= crtl
->max_used_stack_slot_alignment
;
1184 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1185 GET_MODE_ALIGNMENT (SImode
)
1186 << ASAN_SHADOW_SHIFT
);
1190 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1191 base_align
= crtl
->max_used_stack_slot_alignment
;
1196 /* Large alignment is only processed in the last pass. */
1200 /* If there were any variables requiring "large" alignment, allocate
1202 if (maybe_ne (large_size
, 0U) && ! large_allocation_done
)
1205 rtx large_allocsize
;
1207 large_allocsize
= gen_int_mode (large_size
, Pmode
);
1208 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1209 loffset
= alloc_stack_frame_space
1210 (rtx_to_poly_int64 (large_allocsize
),
1211 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1212 large_base
= get_dynamic_stack_base (loffset
, large_align
);
1213 large_allocation_done
= true;
1215 gcc_assert (large_base
!= NULL
);
1217 large_alloc
= aligned_upper_bound (large_alloc
, alignb
);
1218 offset
= large_alloc
;
1219 large_alloc
+= stack_vars
[i
].size
;
1222 base_align
= large_align
;
1225 /* Create rtl for each variable based on their location within the
1227 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1229 expand_one_stack_var_at (stack_vars
[j
].decl
,
1235 gcc_assert (known_eq (large_alloc
, large_size
));
1238 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1240 account_stack_vars (void)
1242 size_t si
, j
, i
, n
= stack_vars_num
;
1243 poly_uint64 size
= 0;
1245 for (si
= 0; si
< n
; ++si
)
1247 i
= stack_vars_sorted
[si
];
1249 /* Skip variables that aren't partition representatives, for now. */
1250 if (stack_vars
[i
].representative
!= i
)
1253 size
+= stack_vars
[i
].size
;
1254 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1255 set_rtl (stack_vars
[j
].decl
, NULL
);
1260 /* Record the RTL assignment X for the default def of PARM. */
1263 set_parm_rtl (tree parm
, rtx x
)
1265 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1266 || TREE_CODE (parm
) == RESULT_DECL
);
1268 if (x
&& !MEM_P (x
))
1270 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1271 TYPE_MODE (TREE_TYPE (parm
)),
1272 TYPE_ALIGN (TREE_TYPE (parm
)));
1274 /* If the variable alignment is very large we'll dynamicaly
1275 allocate it, which means that in-frame portion is just a
1276 pointer. ??? We've got a pseudo for sure here, do we
1277 actually dynamically allocate its spilling area if needed?
1278 ??? Isn't it a problem when Pmode alignment also exceeds
1279 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1280 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1281 align
= GET_MODE_ALIGNMENT (Pmode
);
1283 record_alignment_for_reg_var (align
);
1286 tree ssa
= ssa_default_def (cfun
, parm
);
1288 return set_rtl (parm
, x
);
1290 int part
= var_to_partition (SA
.map
, ssa
);
1291 gcc_assert (part
!= NO_PARTITION
);
1293 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1294 gcc_assert (changed
);
1297 gcc_assert (DECL_RTL (parm
) == x
);
1300 /* A subroutine of expand_one_var. Called to immediately assign rtl
1301 to a variable to be allocated in the stack frame. */
1304 expand_one_stack_var_1 (tree var
)
1308 unsigned byte_align
;
1310 if (TREE_CODE (var
) == SSA_NAME
)
1312 tree type
= TREE_TYPE (var
);
1313 size
= tree_to_poly_uint64 (TYPE_SIZE_UNIT (type
));
1314 byte_align
= TYPE_ALIGN_UNIT (type
);
1318 size
= tree_to_poly_uint64 (DECL_SIZE_UNIT (var
));
1319 byte_align
= align_local_variable (var
);
1322 /* We handle highly aligned variables in expand_stack_vars. */
1323 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1325 offset
= alloc_stack_frame_space (size
, byte_align
);
1327 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1328 crtl
->max_used_stack_slot_alignment
, offset
);
1331 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1332 already assigned some MEM. */
1335 expand_one_stack_var (tree var
)
1337 if (TREE_CODE (var
) == SSA_NAME
)
1339 int part
= var_to_partition (SA
.map
, var
);
1340 if (part
!= NO_PARTITION
)
1342 rtx x
= SA
.partition_to_pseudo
[part
];
1344 gcc_assert (MEM_P (x
));
1349 return expand_one_stack_var_1 (var
);
1352 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1353 that will reside in a hard register. */
1356 expand_one_hard_reg_var (tree var
)
1358 rest_of_decl_compilation (var
, 0, 0);
1361 /* Record the alignment requirements of some variable assigned to a
1365 record_alignment_for_reg_var (unsigned int align
)
1367 if (SUPPORTS_STACK_ALIGNMENT
1368 && crtl
->stack_alignment_estimated
< align
)
1370 /* stack_alignment_estimated shouldn't change after stack
1371 realign decision made */
1372 gcc_assert (!crtl
->stack_realign_processed
);
1373 crtl
->stack_alignment_estimated
= align
;
1376 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1377 So here we only make sure stack_alignment_needed >= align. */
1378 if (crtl
->stack_alignment_needed
< align
)
1379 crtl
->stack_alignment_needed
= align
;
1380 if (crtl
->max_used_stack_slot_alignment
< align
)
1381 crtl
->max_used_stack_slot_alignment
= align
;
1384 /* Create RTL for an SSA partition. */
1387 expand_one_ssa_partition (tree var
)
1389 int part
= var_to_partition (SA
.map
, var
);
1390 gcc_assert (part
!= NO_PARTITION
);
1392 if (SA
.partition_to_pseudo
[part
])
1395 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1396 TYPE_MODE (TREE_TYPE (var
)),
1397 TYPE_ALIGN (TREE_TYPE (var
)));
1399 /* If the variable alignment is very large we'll dynamicaly allocate
1400 it, which means that in-frame portion is just a pointer. */
1401 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1402 align
= GET_MODE_ALIGNMENT (Pmode
);
1404 record_alignment_for_reg_var (align
);
1406 if (!use_register_for_decl (var
))
1408 if (defer_stack_allocation (var
, true))
1409 add_stack_var (var
);
1411 expand_one_stack_var_1 (var
);
1415 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1416 rtx x
= gen_reg_rtx (reg_mode
);
1420 /* For a promoted variable, X will not be used directly but wrapped in a
1421 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1422 will assume that its upper bits can be inferred from its lower bits.
1423 Therefore, if X isn't initialized on every path from the entry, then
1424 we must do it manually in order to fulfill the above assumption. */
1425 if (reg_mode
!= TYPE_MODE (TREE_TYPE (var
))
1426 && bitmap_bit_p (SA
.partitions_for_undefined_values
, part
))
1427 emit_move_insn (x
, CONST0_RTX (reg_mode
));
1430 /* Record the association between the RTL generated for partition PART
1431 and the underlying variable of the SSA_NAME VAR. */
1434 adjust_one_expanded_partition_var (tree var
)
1439 tree decl
= SSA_NAME_VAR (var
);
1441 int part
= var_to_partition (SA
.map
, var
);
1442 if (part
== NO_PARTITION
)
1445 rtx x
= SA
.partition_to_pseudo
[part
];
1454 /* Note if the object is a user variable. */
1455 if (decl
&& !DECL_ARTIFICIAL (decl
))
1458 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1459 mark_reg_pointer (x
, get_pointer_alignment (var
));
1462 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1463 that will reside in a pseudo register. */
1466 expand_one_register_var (tree var
)
1468 if (TREE_CODE (var
) == SSA_NAME
)
1470 int part
= var_to_partition (SA
.map
, var
);
1471 if (part
!= NO_PARTITION
)
1473 rtx x
= SA
.partition_to_pseudo
[part
];
1475 gcc_assert (REG_P (x
));
1482 tree type
= TREE_TYPE (decl
);
1483 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1484 rtx x
= gen_reg_rtx (reg_mode
);
1488 /* Note if the object is a user variable. */
1489 if (!DECL_ARTIFICIAL (decl
))
1492 if (POINTER_TYPE_P (type
))
1493 mark_reg_pointer (x
, get_pointer_alignment (var
));
1496 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1497 has some associated error, e.g. its type is error-mark. We just need
1498 to pick something that won't crash the rest of the compiler. */
1501 expand_one_error_var (tree var
)
1503 machine_mode mode
= DECL_MODE (var
);
1506 if (mode
== BLKmode
)
1507 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1508 else if (mode
== VOIDmode
)
1511 x
= gen_reg_rtx (mode
);
1513 SET_DECL_RTL (var
, x
);
1516 /* A subroutine of expand_one_var. VAR is a variable that will be
1517 allocated to the local stack frame. Return true if we wish to
1518 add VAR to STACK_VARS so that it will be coalesced with other
1519 variables. Return false to allocate VAR immediately.
1521 This function is used to reduce the number of variables considered
1522 for coalescing, which reduces the size of the quadratic problem. */
1525 defer_stack_allocation (tree var
, bool toplevel
)
1527 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1528 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1529 : DECL_SIZE_UNIT (var
);
1532 /* Whether the variable is small enough for immediate allocation not to be
1533 a problem with regard to the frame size. */
1535 = (poly_int_tree_p (size_unit
, &size
)
1536 && (estimated_poly_value (size
)
1537 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
)));
1539 /* If stack protection is enabled, *all* stack variables must be deferred,
1540 so that we can re-order the strings to the top of the frame.
1541 Similarly for Address Sanitizer. */
1542 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1545 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1546 ? TYPE_ALIGN (TREE_TYPE (var
))
1549 /* We handle "large" alignment via dynamic allocation. We want to handle
1550 this extra complication in only one place, so defer them. */
1551 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1554 bool ignored
= TREE_CODE (var
) == SSA_NAME
1555 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1556 : DECL_IGNORED_P (var
);
1558 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1559 might be detached from their block and appear at toplevel when we reach
1560 here. We want to coalesce them with variables from other blocks when
1561 the immediate contribution to the frame size would be noticeable. */
1562 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1565 /* Variables declared in the outermost scope automatically conflict
1566 with every other variable. The only reason to want to defer them
1567 at all is that, after sorting, we can more efficiently pack
1568 small variables in the stack frame. Continue to defer at -O2. */
1569 if (toplevel
&& optimize
< 2)
1572 /* Without optimization, *most* variables are allocated from the
1573 stack, which makes the quadratic problem large exactly when we
1574 want compilation to proceed as quickly as possible. On the
1575 other hand, we don't want the function's stack frame size to
1576 get completely out of hand. So we avoid adding scalars and
1577 "small" aggregates to the list at all. */
1578 if (optimize
== 0 && smallish
)
1584 /* A subroutine of expand_used_vars. Expand one variable according to
1585 its flavor. Variables to be placed on the stack are not actually
1586 expanded yet, merely recorded.
1587 When REALLY_EXPAND is false, only add stack values to be allocated.
1588 Return stack usage this variable is supposed to take.
1592 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1594 unsigned int align
= BITS_PER_UNIT
;
1599 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1601 if (is_global_var (var
))
1604 /* Because we don't know if VAR will be in register or on stack,
1605 we conservatively assume it will be on stack even if VAR is
1606 eventually put into register after RA pass. For non-automatic
1607 variables, which won't be on stack, we collect alignment of
1608 type and ignore user specified alignment. Similarly for
1609 SSA_NAMEs for which use_register_for_decl returns true. */
1610 if (TREE_STATIC (var
)
1611 || DECL_EXTERNAL (var
)
1612 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1613 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1614 TYPE_MODE (TREE_TYPE (var
)),
1615 TYPE_ALIGN (TREE_TYPE (var
)));
1616 else if (DECL_HAS_VALUE_EXPR_P (var
)
1617 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1618 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1619 or variables which were assigned a stack slot already by
1620 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1621 changed from the offset chosen to it. */
1622 align
= crtl
->stack_alignment_estimated
;
1624 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1626 /* If the variable alignment is very large we'll dynamicaly allocate
1627 it, which means that in-frame portion is just a pointer. */
1628 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1629 align
= GET_MODE_ALIGNMENT (Pmode
);
1632 record_alignment_for_reg_var (align
);
1635 if (TREE_CODE (origvar
) == SSA_NAME
)
1637 gcc_assert (!VAR_P (var
)
1638 || (!DECL_EXTERNAL (var
)
1639 && !DECL_HAS_VALUE_EXPR_P (var
)
1640 && !TREE_STATIC (var
)
1641 && TREE_TYPE (var
) != error_mark_node
1642 && !DECL_HARD_REGISTER (var
)
1645 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1647 else if (DECL_EXTERNAL (var
))
1649 else if (DECL_HAS_VALUE_EXPR_P (var
))
1651 else if (TREE_STATIC (var
))
1653 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1655 else if (TREE_TYPE (var
) == error_mark_node
)
1658 expand_one_error_var (var
);
1660 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1664 expand_one_hard_reg_var (var
);
1665 if (!DECL_HARD_REGISTER (var
))
1666 /* Invalid register specification. */
1667 expand_one_error_var (var
);
1670 else if (use_register_for_decl (var
))
1673 expand_one_register_var (origvar
);
1675 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var
), &size
)
1676 || !valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1678 /* Reject variables which cover more than half of the address-space. */
1681 if (DECL_NONLOCAL_FRAME (var
))
1682 error_at (DECL_SOURCE_LOCATION (current_function_decl
),
1683 "total size of local objects is too large");
1685 error_at (DECL_SOURCE_LOCATION (var
),
1686 "size of variable %q+D is too large", var
);
1687 expand_one_error_var (var
);
1690 else if (defer_stack_allocation (var
, toplevel
))
1691 add_stack_var (origvar
);
1696 if (lookup_attribute ("naked",
1697 DECL_ATTRIBUTES (current_function_decl
)))
1698 error ("cannot allocate stack for variable %q+D, naked function.",
1701 expand_one_stack_var (origvar
);
1708 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1709 expanding variables. Those variables that can be put into registers
1710 are allocated pseudos; those that can't are put on the stack.
1712 TOPLEVEL is true if this is the outermost BLOCK. */
1715 expand_used_vars_for_block (tree block
, bool toplevel
)
1719 /* Expand all variables at this level. */
1720 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1722 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1723 || !DECL_NONSHAREABLE (t
)))
1724 expand_one_var (t
, toplevel
, true);
1726 /* Expand all variables at containing levels. */
1727 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1728 expand_used_vars_for_block (t
, false);
1731 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1732 and clear TREE_USED on all local variables. */
1735 clear_tree_used (tree block
)
1739 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1740 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1741 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1742 || !DECL_NONSHAREABLE (t
))
1745 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1746 clear_tree_used (t
);
1750 SPCT_FLAG_DEFAULT
= 1,
1752 SPCT_FLAG_STRONG
= 3,
1753 SPCT_FLAG_EXPLICIT
= 4
1756 /* Examine TYPE and determine a bit mask of the following features. */
1758 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1759 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1760 #define SPCT_HAS_ARRAY 4
1761 #define SPCT_HAS_AGGREGATE 8
1764 stack_protect_classify_type (tree type
)
1766 unsigned int ret
= 0;
1769 switch (TREE_CODE (type
))
1772 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1773 if (t
== char_type_node
1774 || t
== signed_char_type_node
1775 || t
== unsigned_char_type_node
)
1777 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1778 unsigned HOST_WIDE_INT len
;
1780 if (!TYPE_SIZE_UNIT (type
)
1781 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1784 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1787 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1789 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1792 ret
= SPCT_HAS_ARRAY
;
1796 case QUAL_UNION_TYPE
:
1798 ret
= SPCT_HAS_AGGREGATE
;
1799 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1800 if (TREE_CODE (t
) == FIELD_DECL
)
1801 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1811 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1812 part of the local stack frame. Remember if we ever return nonzero for
1813 any variable in this function. The return value is the phase number in
1814 which the variable should be allocated. */
1817 stack_protect_decl_phase (tree decl
)
1819 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1822 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1823 has_short_buffer
= true;
1825 if (flag_stack_protect
== SPCT_FLAG_ALL
1826 || flag_stack_protect
== SPCT_FLAG_STRONG
1827 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1828 && lookup_attribute ("stack_protect",
1829 DECL_ATTRIBUTES (current_function_decl
))))
1831 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1832 && !(bits
& SPCT_HAS_AGGREGATE
))
1834 else if (bits
& SPCT_HAS_ARRAY
)
1838 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1841 has_protected_decls
= true;
1846 /* Two helper routines that check for phase 1 and phase 2. These are used
1847 as callbacks for expand_stack_vars. */
1850 stack_protect_decl_phase_1 (size_t i
)
1852 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1856 stack_protect_decl_phase_2 (size_t i
)
1858 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1861 /* And helper function that checks for asan phase (with stack protector
1862 it is phase 3). This is used as callback for expand_stack_vars.
1863 Returns true if any of the vars in the partition need to be protected. */
1866 asan_decl_phase_3 (size_t i
)
1870 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1872 i
= stack_vars
[i
].next
;
1877 /* Ensure that variables in different stack protection phases conflict
1878 so that they are not merged and share the same stack slot. */
1881 add_stack_protection_conflicts (void)
1883 size_t i
, j
, n
= stack_vars_num
;
1884 unsigned char *phase
;
1886 phase
= XNEWVEC (unsigned char, n
);
1887 for (i
= 0; i
< n
; ++i
)
1888 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1890 for (i
= 0; i
< n
; ++i
)
1892 unsigned char ph_i
= phase
[i
];
1893 for (j
= i
+ 1; j
< n
; ++j
)
1894 if (ph_i
!= phase
[j
])
1895 add_stack_var_conflict (i
, j
);
1901 /* Create a decl for the guard at the top of the stack frame. */
1904 create_stack_guard (void)
1906 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1907 VAR_DECL
, NULL
, ptr_type_node
);
1908 TREE_THIS_VOLATILE (guard
) = 1;
1909 TREE_USED (guard
) = 1;
1910 expand_one_stack_var (guard
);
1911 crtl
->stack_protect_guard
= guard
;
1914 /* Prepare for expanding variables. */
1916 init_vars_expansion (void)
1918 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1919 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1921 /* A map from decl to stack partition. */
1922 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1924 /* Initialize local stack smashing state. */
1925 has_protected_decls
= false;
1926 has_short_buffer
= false;
1929 /* Free up stack variable graph data. */
1931 fini_vars_expansion (void)
1933 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1935 XDELETEVEC (stack_vars
);
1936 if (stack_vars_sorted
)
1937 XDELETEVEC (stack_vars_sorted
);
1939 stack_vars_sorted
= NULL
;
1940 stack_vars_alloc
= stack_vars_num
= 0;
1941 delete decl_to_stack_part
;
1942 decl_to_stack_part
= NULL
;
1945 /* Make a fair guess for the size of the stack frame of the function
1946 in NODE. This doesn't have to be exact, the result is only used in
1947 the inline heuristics. So we don't want to run the full stack var
1948 packing algorithm (which is quadratic in the number of stack vars).
1949 Instead, we calculate the total size of all stack vars. This turns
1950 out to be a pretty fair estimate -- packing of stack vars doesn't
1951 happen very often. */
1954 estimated_stack_frame_size (struct cgraph_node
*node
)
1956 poly_int64 size
= 0;
1959 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1963 init_vars_expansion ();
1965 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1966 if (auto_var_in_fn_p (var
, fn
->decl
))
1967 size
+= expand_one_var (var
, true, false);
1969 if (stack_vars_num
> 0)
1971 /* Fake sorting the stack vars for account_stack_vars (). */
1972 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1973 for (i
= 0; i
< stack_vars_num
; ++i
)
1974 stack_vars_sorted
[i
] = i
;
1975 size
+= account_stack_vars ();
1978 fini_vars_expansion ();
1980 return estimated_poly_value (size
);
1983 /* Helper routine to check if a record or union contains an array field. */
1986 record_or_union_type_has_array_p (const_tree tree_type
)
1988 tree fields
= TYPE_FIELDS (tree_type
);
1991 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1992 if (TREE_CODE (f
) == FIELD_DECL
)
1994 tree field_type
= TREE_TYPE (f
);
1995 if (RECORD_OR_UNION_TYPE_P (field_type
)
1996 && record_or_union_type_has_array_p (field_type
))
1998 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
2004 /* Check if the current function has local referenced variables that
2005 have their addresses taken, contain an array, or are arrays. */
2008 stack_protect_decl_p ()
2013 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2014 if (!is_global_var (var
))
2016 tree var_type
= TREE_TYPE (var
);
2018 && (TREE_CODE (var_type
) == ARRAY_TYPE
2019 || TREE_ADDRESSABLE (var
)
2020 || (RECORD_OR_UNION_TYPE_P (var_type
)
2021 && record_or_union_type_has_array_p (var_type
))))
2027 /* Check if the current function has calls that use a return slot. */
2030 stack_protect_return_slot_p ()
2034 FOR_ALL_BB_FN (bb
, cfun
)
2035 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2036 !gsi_end_p (gsi
); gsi_next (&gsi
))
2038 gimple
*stmt
= gsi_stmt (gsi
);
2039 /* This assumes that calls to internal-only functions never
2040 use a return slot. */
2041 if (is_gimple_call (stmt
)
2042 && !gimple_call_internal_p (stmt
)
2043 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2044 gimple_call_fndecl (stmt
)))
2050 /* Expand all variables used in the function. */
2053 expand_used_vars (void)
2055 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2056 auto_vec
<tree
> maybe_local_decls
;
2057 rtx_insn
*var_end_seq
= NULL
;
2060 bool gen_stack_protect_signal
= false;
2062 /* Compute the phase of the stack frame for this function. */
2064 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2065 int off
= targetm
.starting_frame_offset () % align
;
2066 frame_phase
= off
? align
- off
: 0;
2069 /* Set TREE_USED on all variables in the local_decls. */
2070 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2071 TREE_USED (var
) = 1;
2072 /* Clear TREE_USED on all variables associated with a block scope. */
2073 clear_tree_used (DECL_INITIAL (current_function_decl
));
2075 init_vars_expansion ();
2077 if (targetm
.use_pseudo_pic_reg ())
2078 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2080 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2082 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2085 tree var
= partition_to_var (SA
.map
, i
);
2087 gcc_assert (!virtual_operand_p (var
));
2089 expand_one_ssa_partition (var
);
2092 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2093 gen_stack_protect_signal
2094 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2096 /* At this point all variables on the local_decls with TREE_USED
2097 set are not associated with any block scope. Lay them out. */
2099 len
= vec_safe_length (cfun
->local_decls
);
2100 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2102 bool expand_now
= false;
2104 /* Expanded above already. */
2105 if (is_gimple_reg (var
))
2107 TREE_USED (var
) = 0;
2110 /* We didn't set a block for static or extern because it's hard
2111 to tell the difference between a global variable (re)declared
2112 in a local scope, and one that's really declared there to
2113 begin with. And it doesn't really matter much, since we're
2114 not giving them stack space. Expand them now. */
2115 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2118 /* Expand variables not associated with any block now. Those created by
2119 the optimizers could be live anywhere in the function. Those that
2120 could possibly have been scoped originally and detached from their
2121 block will have their allocation deferred so we coalesce them with
2122 others when optimization is enabled. */
2123 else if (TREE_USED (var
))
2126 /* Finally, mark all variables on the list as used. We'll use
2127 this in a moment when we expand those associated with scopes. */
2128 TREE_USED (var
) = 1;
2131 expand_one_var (var
, true, true);
2134 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2136 rtx rtl
= DECL_RTL_IF_SET (var
);
2138 /* Keep artificial non-ignored vars in cfun->local_decls
2139 chain until instantiate_decls. */
2140 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2141 add_local_decl (cfun
, var
);
2142 else if (rtl
== NULL_RTX
)
2143 /* If rtl isn't set yet, which can happen e.g. with
2144 -fstack-protector, retry before returning from this
2146 maybe_local_decls
.safe_push (var
);
2150 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2152 +-----------------+-----------------+
2153 | ...processed... | ...duplicates...|
2154 +-----------------+-----------------+
2156 +-- LEN points here.
2158 We just want the duplicates, as those are the artificial
2159 non-ignored vars that we want to keep until instantiate_decls.
2160 Move them down and truncate the array. */
2161 if (!vec_safe_is_empty (cfun
->local_decls
))
2162 cfun
->local_decls
->block_remove (0, len
);
2164 /* At this point, all variables within the block tree with TREE_USED
2165 set are actually used by the optimized function. Lay them out. */
2166 expand_used_vars_for_block (outer_block
, true);
2168 if (stack_vars_num
> 0)
2170 add_scope_conflicts ();
2172 /* If stack protection is enabled, we don't share space between
2173 vulnerable data and non-vulnerable data. */
2174 if (flag_stack_protect
!= 0
2175 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2176 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2177 && lookup_attribute ("stack_protect",
2178 DECL_ATTRIBUTES (current_function_decl
)))))
2179 add_stack_protection_conflicts ();
2181 /* Now that we have collected all stack variables, and have computed a
2182 minimal interference graph, attempt to save some stack space. */
2183 partition_stack_vars ();
2185 dump_stack_var_partition ();
2188 switch (flag_stack_protect
)
2191 create_stack_guard ();
2194 case SPCT_FLAG_STRONG
:
2195 if (gen_stack_protect_signal
2196 || cfun
->calls_alloca
|| has_protected_decls
2197 || lookup_attribute ("stack_protect",
2198 DECL_ATTRIBUTES (current_function_decl
)))
2199 create_stack_guard ();
2202 case SPCT_FLAG_DEFAULT
:
2203 if (cfun
->calls_alloca
|| has_protected_decls
2204 || lookup_attribute ("stack_protect",
2205 DECL_ATTRIBUTES (current_function_decl
)))
2206 create_stack_guard ();
2209 case SPCT_FLAG_EXPLICIT
:
2210 if (lookup_attribute ("stack_protect",
2211 DECL_ATTRIBUTES (current_function_decl
)))
2212 create_stack_guard ();
2218 /* Assign rtl to each variable based on these partitions. */
2219 if (stack_vars_num
> 0)
2221 struct stack_vars_data data
;
2223 data
.asan_base
= NULL_RTX
;
2224 data
.asan_alignb
= 0;
2226 /* Reorder decls to be protected by iterating over the variables
2227 array multiple times, and allocating out of each phase in turn. */
2228 /* ??? We could probably integrate this into the qsort we did
2229 earlier, such that we naturally see these variables first,
2230 and thus naturally allocate things in the right order. */
2231 if (has_protected_decls
)
2233 /* Phase 1 contains only character arrays. */
2234 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2236 /* Phase 2 contains other kinds of arrays. */
2237 if (flag_stack_protect
== SPCT_FLAG_ALL
2238 || flag_stack_protect
== SPCT_FLAG_STRONG
2239 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2240 && lookup_attribute ("stack_protect",
2241 DECL_ATTRIBUTES (current_function_decl
))))
2242 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2245 if (asan_sanitize_stack_p ())
2246 /* Phase 3, any partitions that need asan protection
2247 in addition to phase 1 and 2. */
2248 expand_stack_vars (asan_decl_phase_3
, &data
);
2250 /* ASAN description strings don't yet have a syntax for expressing
2251 polynomial offsets. */
2252 HOST_WIDE_INT prev_offset
;
2253 if (!data
.asan_vec
.is_empty ()
2254 && frame_offset
.is_constant (&prev_offset
))
2256 HOST_WIDE_INT offset
, sz
, redzonesz
;
2257 redzonesz
= ASAN_RED_ZONE_SIZE
;
2258 sz
= data
.asan_vec
[0] - prev_offset
;
2259 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2260 && data
.asan_alignb
<= 4096
2261 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2262 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2263 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2264 /* Allocating a constant amount of space from a constant
2265 starting offset must give a constant result. */
2266 offset
= (alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
)
2268 data
.asan_vec
.safe_push (prev_offset
);
2269 data
.asan_vec
.safe_push (offset
);
2270 /* Leave space for alignment if STRICT_ALIGNMENT. */
2271 if (STRICT_ALIGNMENT
)
2272 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2273 << ASAN_SHADOW_SHIFT
)
2274 / BITS_PER_UNIT
, 1);
2277 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2280 data
.asan_vec
.address (),
2281 data
.asan_decl_vec
.address (),
2282 data
.asan_vec
.length ());
2285 expand_stack_vars (NULL
, &data
);
2288 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2289 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2290 virtual_stack_vars_rtx
,
2293 fini_vars_expansion ();
2295 /* If there were any artificial non-ignored vars without rtl
2296 found earlier, see if deferred stack allocation hasn't assigned
2298 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2300 rtx rtl
= DECL_RTL_IF_SET (var
);
2302 /* Keep artificial non-ignored vars in cfun->local_decls
2303 chain until instantiate_decls. */
2304 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2305 add_local_decl (cfun
, var
);
2308 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2309 if (STACK_ALIGNMENT_NEEDED
)
2311 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2312 if (FRAME_GROWS_DOWNWARD
)
2313 frame_offset
= aligned_lower_bound (frame_offset
, align
);
2315 frame_offset
= aligned_upper_bound (frame_offset
, align
);
2322 /* If we need to produce a detailed dump, print the tree representation
2323 for STMT to the dump file. SINCE is the last RTX after which the RTL
2324 generated for STMT should have been appended. */
2327 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2329 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2331 fprintf (dump_file
, "\n;; ");
2332 print_gimple_stmt (dump_file
, stmt
, 0,
2333 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2334 fprintf (dump_file
, "\n");
2336 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2340 /* Maps the blocks that do not contain tree labels to rtx labels. */
2342 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2344 /* Returns the label_rtx expression for a label starting basic block BB. */
2346 static rtx_code_label
*
2347 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2349 gimple_stmt_iterator gsi
;
2352 if (bb
->flags
& BB_RTL
)
2353 return block_label (bb
);
2355 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2359 /* Find the tree label if it is present. */
2361 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2365 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2369 lab
= gimple_label_label (lab_stmt
);
2370 if (DECL_NONLOCAL (lab
))
2373 return jump_target_rtx (lab
);
2376 rtx_code_label
*l
= gen_label_rtx ();
2377 lab_rtx_for_bb
->put (bb
, l
);
2382 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2383 of a basic block where we just expanded the conditional at the end,
2384 possibly clean up the CFG and instruction sequence. LAST is the
2385 last instruction before the just emitted jump sequence. */
2388 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2390 /* Special case: when jumpif decides that the condition is
2391 trivial it emits an unconditional jump (and the necessary
2392 barrier). But we still have two edges, the fallthru one is
2393 wrong. purge_dead_edges would clean this up later. Unfortunately
2394 we have to insert insns (and split edges) before
2395 find_many_sub_basic_blocks and hence before purge_dead_edges.
2396 But splitting edges might create new blocks which depend on the
2397 fact that if there are two edges there's no barrier. So the
2398 barrier would get lost and verify_flow_info would ICE. Instead
2399 of auditing all edge splitters to care for the barrier (which
2400 normally isn't there in a cleaned CFG), fix it here. */
2401 if (BARRIER_P (get_last_insn ()))
2405 /* Now, we have a single successor block, if we have insns to
2406 insert on the remaining edge we potentially will insert
2407 it at the end of this block (if the dest block isn't feasible)
2408 in order to avoid splitting the edge. This insertion will take
2409 place in front of the last jump. But we might have emitted
2410 multiple jumps (conditional and one unconditional) to the
2411 same destination. Inserting in front of the last one then
2412 is a problem. See PR 40021. We fix this by deleting all
2413 jumps except the last unconditional one. */
2414 insn
= PREV_INSN (get_last_insn ());
2415 /* Make sure we have an unconditional jump. Otherwise we're
2417 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2418 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2420 insn
= PREV_INSN (insn
);
2421 if (JUMP_P (NEXT_INSN (insn
)))
2423 if (!any_condjump_p (NEXT_INSN (insn
)))
2425 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2426 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2428 delete_insn (NEXT_INSN (insn
));
2434 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2435 Returns a new basic block if we've terminated the current basic
2436 block and created a new one. */
2439 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2441 basic_block new_bb
, dest
;
2444 rtx_insn
*last2
, *last
;
2445 enum tree_code code
;
2448 code
= gimple_cond_code (stmt
);
2449 op0
= gimple_cond_lhs (stmt
);
2450 op1
= gimple_cond_rhs (stmt
);
2451 /* We're sometimes presented with such code:
2455 This would expand to two comparisons which then later might
2456 be cleaned up by combine. But some pattern matchers like if-conversion
2457 work better when there's only one compare, so make up for this
2458 here as special exception if TER would have made the same change. */
2460 && TREE_CODE (op0
) == SSA_NAME
2461 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2462 && TREE_CODE (op1
) == INTEGER_CST
2463 && ((gimple_cond_code (stmt
) == NE_EXPR
2464 && integer_zerop (op1
))
2465 || (gimple_cond_code (stmt
) == EQ_EXPR
2466 && integer_onep (op1
)))
2467 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2469 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2470 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2472 enum tree_code code2
= gimple_assign_rhs_code (second
);
2473 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2476 op0
= gimple_assign_rhs1 (second
);
2477 op1
= gimple_assign_rhs2 (second
);
2479 /* If jumps are cheap and the target does not support conditional
2480 compare, turn some more codes into jumpy sequences. */
2481 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2482 && targetm
.gen_ccmp_first
== NULL
)
2484 if ((code2
== BIT_AND_EXPR
2485 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2486 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2487 || code2
== TRUTH_AND_EXPR
)
2489 code
= TRUTH_ANDIF_EXPR
;
2490 op0
= gimple_assign_rhs1 (second
);
2491 op1
= gimple_assign_rhs2 (second
);
2493 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2495 code
= TRUTH_ORIF_EXPR
;
2496 op0
= gimple_assign_rhs1 (second
);
2497 op1
= gimple_assign_rhs2 (second
);
2503 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2504 into (x - C2) * C3 < C4. */
2505 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
2506 && TREE_CODE (op0
) == SSA_NAME
2507 && TREE_CODE (op1
) == INTEGER_CST
)
2508 code
= maybe_optimize_mod_cmp (code
, &op0
, &op1
);
2510 last2
= last
= get_last_insn ();
2512 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2513 set_curr_insn_location (gimple_location (stmt
));
2515 /* These flags have no purpose in RTL land. */
2516 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2517 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2519 /* We can either have a pure conditional jump with one fallthru edge or
2520 two-way jump that needs to be decomposed into two basic blocks. */
2521 if (false_edge
->dest
== bb
->next_bb
)
2523 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2524 true_edge
->probability
);
2525 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2526 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2527 set_curr_insn_location (true_edge
->goto_locus
);
2528 false_edge
->flags
|= EDGE_FALLTHRU
;
2529 maybe_cleanup_end_of_block (false_edge
, last
);
2532 if (true_edge
->dest
== bb
->next_bb
)
2534 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2535 false_edge
->probability
);
2536 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2537 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2538 set_curr_insn_location (false_edge
->goto_locus
);
2539 true_edge
->flags
|= EDGE_FALLTHRU
;
2540 maybe_cleanup_end_of_block (true_edge
, last
);
2544 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2545 true_edge
->probability
);
2546 last
= get_last_insn ();
2547 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2548 set_curr_insn_location (false_edge
->goto_locus
);
2549 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2552 if (BARRIER_P (BB_END (bb
)))
2553 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2554 update_bb_for_insn (bb
);
2556 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2557 dest
= false_edge
->dest
;
2558 redirect_edge_succ (false_edge
, new_bb
);
2559 false_edge
->flags
|= EDGE_FALLTHRU
;
2560 new_bb
->count
= false_edge
->count ();
2561 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2562 add_bb_to_loop (new_bb
, loop
);
2563 if (loop
->latch
== bb
2564 && loop
->header
== dest
)
2565 loop
->latch
= new_bb
;
2566 make_single_succ_edge (new_bb
, dest
, 0);
2567 if (BARRIER_P (BB_END (new_bb
)))
2568 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2569 update_bb_for_insn (new_bb
);
2571 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2573 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2575 set_curr_insn_location (true_edge
->goto_locus
);
2576 true_edge
->goto_locus
= curr_insn_location ();
2582 /* Mark all calls that can have a transaction restart. */
2585 mark_transaction_restart_calls (gimple
*stmt
)
2587 struct tm_restart_node dummy
;
2588 tm_restart_node
**slot
;
2590 if (!cfun
->gimple_df
->tm_restart
)
2594 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2597 struct tm_restart_node
*n
= *slot
;
2598 tree list
= n
->label_or_list
;
2601 for (insn
= next_real_insn (get_last_insn ());
2603 insn
= next_real_insn (insn
))
2606 if (TREE_CODE (list
) == LABEL_DECL
)
2607 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2609 for (; list
; list
= TREE_CHAIN (list
))
2610 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2614 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2618 expand_call_stmt (gcall
*stmt
)
2620 tree exp
, decl
, lhs
;
2624 if (gimple_call_internal_p (stmt
))
2626 expand_internal_call (stmt
);
2630 /* If this is a call to a built-in function and it has no effect other
2631 than setting the lhs, try to implement it using an internal function
2633 decl
= gimple_call_fndecl (stmt
);
2634 if (gimple_call_lhs (stmt
)
2635 && !gimple_has_side_effects (stmt
)
2636 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2638 internal_fn ifn
= replacement_internal_fn (stmt
);
2639 if (ifn
!= IFN_LAST
)
2641 expand_internal_call (ifn
, stmt
);
2646 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2648 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2649 builtin_p
= decl
&& fndecl_built_in_p (decl
);
2651 /* If this is not a builtin function, the function type through which the
2652 call is made may be different from the type of the function. */
2655 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2656 CALL_EXPR_FN (exp
));
2658 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2659 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2661 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2663 tree arg
= gimple_call_arg (stmt
, i
);
2665 /* TER addresses into arguments of builtin functions so we have a
2666 chance to infer more correct alignment information. See PR39954. */
2668 && TREE_CODE (arg
) == SSA_NAME
2669 && (def
= get_gimple_for_ssa_name (arg
))
2670 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2671 arg
= gimple_assign_rhs1 (def
);
2672 CALL_EXPR_ARG (exp
, i
) = arg
;
2675 if (gimple_has_side_effects (stmt
))
2676 TREE_SIDE_EFFECTS (exp
) = 1;
2678 if (gimple_call_nothrow_p (stmt
))
2679 TREE_NOTHROW (exp
) = 1;
2681 if (gimple_no_warning_p (stmt
))
2682 TREE_NO_WARNING (exp
) = 1;
2684 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2685 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2686 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2688 && fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
2689 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2690 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2692 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2693 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2694 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2695 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2697 /* Ensure RTL is created for debug args. */
2698 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2700 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2705 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2707 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2708 expand_debug_expr (dtemp
);
2712 rtx_insn
*before_call
= get_last_insn ();
2713 lhs
= gimple_call_lhs (stmt
);
2715 expand_assignment (lhs
, exp
, false);
2717 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2719 /* If the gimple call is an indirect call and has 'nocf_check'
2720 attribute find a generated CALL insn to mark it as no
2721 control-flow verification is needed. */
2722 if (gimple_call_nocf_check_p (stmt
)
2723 && !gimple_call_fndecl (stmt
))
2725 rtx_insn
*last
= get_last_insn ();
2726 while (!CALL_P (last
)
2727 && last
!= before_call
)
2728 last
= PREV_INSN (last
);
2730 if (last
!= before_call
)
2731 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2734 mark_transaction_restart_calls (stmt
);
2738 /* Generate RTL for an asm statement (explicit assembler code).
2739 STRING is a STRING_CST node containing the assembler code text,
2740 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2741 insn is volatile; don't optimize it. */
2744 expand_asm_loc (tree string
, int vol
, location_t locus
)
2748 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2749 ggc_strdup (TREE_STRING_POINTER (string
)),
2752 MEM_VOLATILE_P (body
) = vol
;
2754 /* Non-empty basic ASM implicitly clobbers memory. */
2755 if (TREE_STRING_LENGTH (string
) != 0)
2758 unsigned i
, nclobbers
;
2759 auto_vec
<rtx
> input_rvec
, output_rvec
;
2760 auto_vec
<const char *> constraints
;
2761 auto_vec
<rtx
> clobber_rvec
;
2762 HARD_REG_SET clobbered_regs
;
2763 CLEAR_HARD_REG_SET (clobbered_regs
);
2765 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2766 clobber_rvec
.safe_push (clob
);
2768 if (targetm
.md_asm_adjust
)
2769 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2770 constraints
, clobber_rvec
,
2774 nclobbers
= clobber_rvec
.length ();
2775 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2777 XVECEXP (body
, 0, 0) = asm_op
;
2778 for (i
= 0; i
< nclobbers
; i
++)
2779 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2785 /* Return the number of times character C occurs in string S. */
2787 n_occurrences (int c
, const char *s
)
2795 /* A subroutine of expand_asm_operands. Check that all operands have
2796 the same number of alternatives. Return true if so. */
2799 check_operand_nalternatives (const vec
<const char *> &constraints
)
2801 unsigned len
= constraints
.length();
2804 int nalternatives
= n_occurrences (',', constraints
[0]);
2806 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2808 error ("too many alternatives in %<asm%>");
2812 for (unsigned i
= 1; i
< len
; ++i
)
2813 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2815 error ("operand constraints for %<asm%> differ "
2816 "in number of alternatives");
2823 /* Check for overlap between registers marked in CLOBBERED_REGS and
2824 anything inappropriate in T. Emit error and return the register
2825 variable definition for error, NULL_TREE for ok. */
2828 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2830 /* Conflicts between asm-declared register variables and the clobber
2831 list are not allowed. */
2832 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2836 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2837 DECL_NAME (overlap
));
2839 /* Reset registerness to stop multiple errors emitted for a single
2841 DECL_REGISTER (overlap
) = 0;
2848 /* Generate RTL for an asm statement with arguments.
2849 STRING is the instruction template.
2850 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2851 Each output or input has an expression in the TREE_VALUE and
2852 a tree list in TREE_PURPOSE which in turn contains a constraint
2853 name in TREE_VALUE (or NULL_TREE) and a constraint string
2855 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2856 that is clobbered by this insn.
2858 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2859 should be the fallthru basic block of the asm goto.
2861 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2862 Some elements of OUTPUTS may be replaced with trees representing temporary
2863 values. The caller should copy those temporary values to the originally
2866 VOL nonzero means the insn is volatile; don't optimize it. */
2869 expand_asm_stmt (gasm
*stmt
)
2871 class save_input_location
2876 explicit save_input_location(location_t where
)
2878 old
= input_location
;
2879 input_location
= where
;
2882 ~save_input_location()
2884 input_location
= old
;
2888 location_t locus
= gimple_location (stmt
);
2890 if (gimple_asm_input_p (stmt
))
2892 const char *s
= gimple_asm_string (stmt
);
2893 tree string
= build_string (strlen (s
), s
);
2894 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2898 /* There are some legacy diagnostics in here, and also avoids a
2899 sixth parameger to targetm.md_asm_adjust. */
2900 save_input_location
s_i_l(locus
);
2902 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2903 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2904 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2907 /* ??? Diagnose during gimplification? */
2908 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2910 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2914 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2915 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2916 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2918 /* Copy the gimple vectors into new vectors that we can manipulate. */
2920 output_tvec
.safe_grow (noutputs
);
2921 input_tvec
.safe_grow (ninputs
);
2922 constraints
.safe_grow (noutputs
+ ninputs
);
2924 for (i
= 0; i
< noutputs
; ++i
)
2926 tree t
= gimple_asm_output_op (stmt
, i
);
2927 output_tvec
[i
] = TREE_VALUE (t
);
2928 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2930 for (i
= 0; i
< ninputs
; i
++)
2932 tree t
= gimple_asm_input_op (stmt
, i
);
2933 input_tvec
[i
] = TREE_VALUE (t
);
2934 constraints
[i
+ noutputs
]
2935 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2938 /* ??? Diagnose during gimplification? */
2939 if (! check_operand_nalternatives (constraints
))
2942 /* Count the number of meaningful clobbered registers, ignoring what
2943 we would ignore later. */
2944 auto_vec
<rtx
> clobber_rvec
;
2945 HARD_REG_SET clobbered_regs
;
2946 CLEAR_HARD_REG_SET (clobbered_regs
);
2948 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2950 clobber_rvec
.reserve (n
);
2951 for (i
= 0; i
< n
; i
++)
2953 tree t
= gimple_asm_clobber_op (stmt
, i
);
2954 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2957 j
= decode_reg_name_and_count (regname
, &nregs
);
2962 /* ??? Diagnose during gimplification? */
2963 error ("unknown register name %qs in %<asm%>", regname
);
2967 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2968 clobber_rvec
.safe_push (x
);
2972 /* Otherwise we should have -1 == empty string
2973 or -3 == cc, which is not a register. */
2974 gcc_assert (j
== -1 || j
== -3);
2978 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
2980 /* Clobbering the PIC register is an error. */
2981 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2983 /* ??? Diagnose during gimplification? */
2984 error ("PIC register clobbered by %qs in %<asm%>",
2989 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2990 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
2991 clobber_rvec
.safe_push (x
);
2995 unsigned nclobbers
= clobber_rvec
.length();
2997 /* First pass over inputs and outputs checks validity and sets
2998 mark_addressable if needed. */
2999 /* ??? Diagnose during gimplification? */
3001 for (i
= 0; i
< noutputs
; ++i
)
3003 tree val
= output_tvec
[i
];
3004 tree type
= TREE_TYPE (val
);
3005 const char *constraint
;
3010 /* Try to parse the output constraint. If that fails, there's
3011 no point in going further. */
3012 constraint
= constraints
[i
];
3013 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
3014 &allows_mem
, &allows_reg
, &is_inout
))
3017 /* If the output is a hard register, verify it doesn't conflict with
3018 any other operand's possible hard register use. */
3020 && REG_P (DECL_RTL (val
))
3021 && HARD_REGISTER_P (DECL_RTL (val
)))
3023 unsigned j
, output_hregno
= REGNO (DECL_RTL (val
));
3024 bool early_clobber_p
= strchr (constraints
[i
], '&') != NULL
;
3025 unsigned long match
;
3027 /* Verify the other outputs do not use the same hard register. */
3028 for (j
= i
+ 1; j
< noutputs
; ++j
)
3029 if (DECL_P (output_tvec
[j
])
3030 && REG_P (DECL_RTL (output_tvec
[j
]))
3031 && HARD_REGISTER_P (DECL_RTL (output_tvec
[j
]))
3032 && output_hregno
== REGNO (DECL_RTL (output_tvec
[j
])))
3033 error ("invalid hard register usage between output operands");
3035 /* Verify matching constraint operands use the same hard register
3036 and that the non-matching constraint operands do not use the same
3037 hard register if the output is an early clobber operand. */
3038 for (j
= 0; j
< ninputs
; ++j
)
3039 if (DECL_P (input_tvec
[j
])
3040 && REG_P (DECL_RTL (input_tvec
[j
]))
3041 && HARD_REGISTER_P (DECL_RTL (input_tvec
[j
])))
3043 unsigned input_hregno
= REGNO (DECL_RTL (input_tvec
[j
]));
3044 switch (*constraints
[j
+ noutputs
])
3046 case '0': case '1': case '2': case '3': case '4':
3047 case '5': case '6': case '7': case '8': case '9':
3048 match
= strtoul (constraints
[j
+ noutputs
], NULL
, 10);
3055 && output_hregno
!= input_hregno
)
3056 error ("invalid hard register usage between output operand "
3057 "and matching constraint operand");
3058 else if (early_clobber_p
3060 && output_hregno
== input_hregno
)
3061 error ("invalid hard register usage between earlyclobber "
3062 "operand and input operand");
3070 && REG_P (DECL_RTL (val
))
3071 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
3072 mark_addressable (val
);
3075 for (i
= 0; i
< ninputs
; ++i
)
3077 bool allows_reg
, allows_mem
;
3078 const char *constraint
;
3080 constraint
= constraints
[i
+ noutputs
];
3081 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3082 constraints
.address (),
3083 &allows_mem
, &allows_reg
))
3086 if (! allows_reg
&& allows_mem
)
3087 mark_addressable (input_tvec
[i
]);
3090 /* Second pass evaluates arguments. */
3092 /* Make sure stack is consistent for asm goto. */
3094 do_pending_stack_adjust ();
3095 int old_generating_concat_p
= generating_concat_p
;
3097 /* Vector of RTX's of evaluated output operands. */
3098 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
3099 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3100 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3102 output_rvec
.safe_grow (noutputs
);
3104 for (i
= 0; i
< noutputs
; ++i
)
3106 tree val
= output_tvec
[i
];
3107 tree type
= TREE_TYPE (val
);
3108 bool is_inout
, allows_reg
, allows_mem
, ok
;
3111 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3112 noutputs
, &allows_mem
, &allows_reg
,
3116 /* If an output operand is not a decl or indirect ref and our constraint
3117 allows a register, make a temporary to act as an intermediate.
3118 Make the asm insn write into that, then we will copy it to
3119 the real output operand. Likewise for promoted variables. */
3121 generating_concat_p
= 0;
3123 if ((TREE_CODE (val
) == INDIRECT_REF
&& allows_mem
)
3125 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3126 && ! (REG_P (DECL_RTL (val
))
3127 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3130 || TREE_ADDRESSABLE (type
))
3132 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3133 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3135 op
= validize_mem (op
);
3137 if (! allows_reg
&& !MEM_P (op
))
3138 error ("output number %d not directly addressable", i
);
3139 if ((! allows_mem
&& MEM_P (op
) && GET_MODE (op
) != BLKmode
)
3140 || GET_CODE (op
) == CONCAT
)
3143 op
= gen_reg_rtx (GET_MODE (op
));
3145 generating_concat_p
= old_generating_concat_p
;
3148 emit_move_insn (op
, old_op
);
3150 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3151 emit_move_insn (old_op
, op
);
3152 after_rtl_seq
= get_insns ();
3153 after_rtl_end
= get_last_insn ();
3159 op
= assign_temp (type
, 0, 1);
3160 op
= validize_mem (op
);
3161 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3162 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3164 generating_concat_p
= old_generating_concat_p
;
3166 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3167 expand_assignment (val
, make_tree (type
, op
), false);
3168 after_rtl_seq
= get_insns ();
3169 after_rtl_end
= get_last_insn ();
3172 output_rvec
[i
] = op
;
3175 inout_opnum
.safe_push (i
);
3178 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3179 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3181 input_rvec
.safe_grow (ninputs
);
3182 input_mode
.safe_grow (ninputs
);
3184 generating_concat_p
= 0;
3186 for (i
= 0; i
< ninputs
; ++i
)
3188 tree val
= input_tvec
[i
];
3189 tree type
= TREE_TYPE (val
);
3190 bool allows_reg
, allows_mem
, ok
;
3191 const char *constraint
;
3194 constraint
= constraints
[i
+ noutputs
];
3195 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3196 constraints
.address (),
3197 &allows_mem
, &allows_reg
);
3200 /* EXPAND_INITIALIZER will not generate code for valid initializer
3201 constants, but will still generate code for other types of operand.
3202 This is the behavior we want for constant constraints. */
3203 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3204 allows_reg
? EXPAND_NORMAL
3205 : allows_mem
? EXPAND_MEMORY
3206 : EXPAND_INITIALIZER
);
3208 /* Never pass a CONCAT to an ASM. */
3209 if (GET_CODE (op
) == CONCAT
)
3210 op
= force_reg (GET_MODE (op
), op
);
3211 else if (MEM_P (op
))
3212 op
= validize_mem (op
);
3214 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3216 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3217 op
= force_reg (TYPE_MODE (type
), op
);
3218 else if (!allows_mem
)
3219 warning (0, "asm operand %d probably doesn%'t match constraints",
3221 else if (MEM_P (op
))
3223 /* We won't recognize either volatile memory or memory
3224 with a queued address as available a memory_operand
3225 at this point. Ignore it: clearly this *is* a memory. */
3231 input_mode
[i
] = TYPE_MODE (type
);
3234 /* For in-out operands, copy output rtx to input rtx. */
3235 unsigned ninout
= inout_opnum
.length();
3236 for (i
= 0; i
< ninout
; i
++)
3238 int j
= inout_opnum
[i
];
3239 rtx o
= output_rvec
[j
];
3241 input_rvec
.safe_push (o
);
3242 input_mode
.safe_push (GET_MODE (o
));
3245 sprintf (buffer
, "%d", j
);
3246 constraints
.safe_push (ggc_strdup (buffer
));
3250 /* Sometimes we wish to automatically clobber registers across an asm.
3251 Case in point is when the i386 backend moved from cc0 to a hard reg --
3252 maintaining source-level compatibility means automatically clobbering
3253 the flags register. */
3254 rtx_insn
*after_md_seq
= NULL
;
3255 if (targetm
.md_asm_adjust
)
3256 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3257 constraints
, clobber_rvec
,
3260 /* Do not allow the hook to change the output and input count,
3261 lest it mess up the operand numbering. */
3262 gcc_assert (output_rvec
.length() == noutputs
);
3263 gcc_assert (input_rvec
.length() == ninputs
);
3264 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3266 /* But it certainly can adjust the clobbers. */
3267 nclobbers
= clobber_rvec
.length();
3269 /* Third pass checks for easy conflicts. */
3270 /* ??? Why are we doing this on trees instead of rtx. */
3272 bool clobber_conflict_found
= 0;
3273 for (i
= 0; i
< noutputs
; ++i
)
3274 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3275 clobber_conflict_found
= 1;
3276 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3277 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3278 clobber_conflict_found
= 1;
3280 /* Make vectors for the expression-rtx, constraint strings,
3281 and named operands. */
3283 rtvec argvec
= rtvec_alloc (ninputs
);
3284 rtvec constraintvec
= rtvec_alloc (ninputs
);
3285 rtvec labelvec
= rtvec_alloc (nlabels
);
3287 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3288 : GET_MODE (output_rvec
[0])),
3289 ggc_strdup (gimple_asm_string (stmt
)),
3290 "", 0, argvec
, constraintvec
,
3292 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3294 for (i
= 0; i
< ninputs
; ++i
)
3296 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3297 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3298 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3299 constraints
[i
+ noutputs
],
3303 /* Copy labels to the vector. */
3304 rtx_code_label
*fallthru_label
= NULL
;
3307 basic_block fallthru_bb
= NULL
;
3308 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3310 fallthru_bb
= fallthru
->dest
;
3312 for (i
= 0; i
< nlabels
; ++i
)
3314 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3316 /* If asm goto has any labels in the fallthru basic block, use
3317 a label that we emit immediately after the asm goto. Expansion
3318 may insert further instructions into the same basic block after
3319 asm goto and if we don't do this, insertion of instructions on
3320 the fallthru edge might misbehave. See PR58670. */
3321 if (fallthru_bb
&& label_to_block (cfun
, label
) == fallthru_bb
)
3323 if (fallthru_label
== NULL_RTX
)
3324 fallthru_label
= gen_label_rtx ();
3328 r
= label_rtx (label
);
3329 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3333 /* Now, for each output, construct an rtx
3334 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3335 ARGVEC CONSTRAINTS OPNAMES))
3336 If there is more than one, put them inside a PARALLEL. */
3338 if (nlabels
> 0 && nclobbers
== 0)
3340 gcc_assert (noutputs
== 0);
3341 emit_jump_insn (body
);
3343 else if (noutputs
== 0 && nclobbers
== 0)
3345 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3348 else if (noutputs
== 1 && nclobbers
== 0)
3350 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3351 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3361 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3363 /* For each output operand, store a SET. */
3364 for (i
= 0; i
< noutputs
; ++i
)
3366 rtx src
, o
= output_rvec
[i
];
3369 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3374 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3375 ASM_OPERANDS_TEMPLATE (obody
),
3376 constraints
[i
], i
, argvec
,
3377 constraintvec
, labelvec
, locus
);
3378 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3380 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3383 /* If there are no outputs (but there are some clobbers)
3384 store the bare ASM_OPERANDS into the PARALLEL. */
3386 XVECEXP (body
, 0, i
++) = obody
;
3388 /* Store (clobber REG) for each clobbered register specified. */
3389 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3391 rtx clobbered_reg
= clobber_rvec
[j
];
3393 /* Do sanity check for overlap between clobbers and respectively
3394 input and outputs that hasn't been handled. Such overlap
3395 should have been detected and reported above. */
3396 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3398 /* We test the old body (obody) contents to avoid
3399 tripping over the under-construction body. */
3400 for (unsigned k
= 0; k
< noutputs
; ++k
)
3401 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3402 internal_error ("asm clobber conflict with output operand");
3404 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3405 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3406 internal_error ("asm clobber conflict with input operand");
3409 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3413 emit_jump_insn (body
);
3418 generating_concat_p
= old_generating_concat_p
;
3421 emit_label (fallthru_label
);
3424 emit_insn (after_md_seq
);
3426 emit_insn (after_rtl_seq
);
3429 crtl
->has_asm_statement
= 1;
3432 /* Emit code to jump to the address
3433 specified by the pointer expression EXP. */
3436 expand_computed_goto (tree exp
)
3438 rtx x
= expand_normal (exp
);
3440 do_pending_stack_adjust ();
3441 emit_indirect_jump (x
);
3444 /* Generate RTL code for a `goto' statement with target label LABEL.
3445 LABEL should be a LABEL_DECL tree node that was or will later be
3446 defined with `expand_label'. */
3449 expand_goto (tree label
)
3453 /* Check for a nonlocal goto to a containing function. Should have
3454 gotten translated to __builtin_nonlocal_goto. */
3455 tree context
= decl_function_context (label
);
3456 gcc_assert (!context
|| context
== current_function_decl
);
3459 emit_jump (jump_target_rtx (label
));
3462 /* Output a return with no value. */
3465 expand_null_return_1 (void)
3467 clear_pending_stack_adjust ();
3468 do_pending_stack_adjust ();
3469 emit_jump (return_label
);
3472 /* Generate RTL to return from the current function, with no value.
3473 (That is, we do not do anything about returning any value.) */
3476 expand_null_return (void)
3478 /* If this function was declared to return a value, but we
3479 didn't, clobber the return registers so that they are not
3480 propagated live to the rest of the function. */
3481 clobber_return_register ();
3483 expand_null_return_1 ();
3486 /* Generate RTL to return from the current function, with value VAL. */
3489 expand_value_return (rtx val
)
3491 /* Copy the value to the return location unless it's already there. */
3493 tree decl
= DECL_RESULT (current_function_decl
);
3494 rtx return_reg
= DECL_RTL (decl
);
3495 if (return_reg
!= val
)
3497 tree funtype
= TREE_TYPE (current_function_decl
);
3498 tree type
= TREE_TYPE (decl
);
3499 int unsignedp
= TYPE_UNSIGNED (type
);
3500 machine_mode old_mode
= DECL_MODE (decl
);
3502 if (DECL_BY_REFERENCE (decl
))
3503 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3505 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3507 if (mode
!= old_mode
)
3508 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3510 if (GET_CODE (return_reg
) == PARALLEL
)
3511 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3513 emit_move_insn (return_reg
, val
);
3516 expand_null_return_1 ();
3519 /* Generate RTL to evaluate the expression RETVAL and return it
3520 from the current function. */
3523 expand_return (tree retval
)
3529 /* If function wants no value, give it none. */
3530 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3532 expand_normal (retval
);
3533 expand_null_return ();
3537 if (retval
== error_mark_node
)
3539 /* Treat this like a return of no value from a function that
3541 expand_null_return ();
3544 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3545 || TREE_CODE (retval
) == INIT_EXPR
)
3546 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3547 retval_rhs
= TREE_OPERAND (retval
, 1);
3549 retval_rhs
= retval
;
3551 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3553 /* If we are returning the RESULT_DECL, then the value has already
3554 been stored into it, so we don't have to do anything special. */
3555 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3556 expand_value_return (result_rtl
);
3558 /* If the result is an aggregate that is being returned in one (or more)
3559 registers, load the registers here. */
3561 else if (retval_rhs
!= 0
3562 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3563 && REG_P (result_rtl
))
3565 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3568 /* Use the mode of the result value on the return register. */
3569 PUT_MODE (result_rtl
, GET_MODE (val
));
3570 expand_value_return (val
);
3573 expand_null_return ();
3575 else if (retval_rhs
!= 0
3576 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3577 && (REG_P (result_rtl
)
3578 || (GET_CODE (result_rtl
) == PARALLEL
)))
3580 /* Compute the return value into a temporary (usually a pseudo reg). */
3582 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3583 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3584 val
= force_not_mem (val
);
3585 expand_value_return (val
);
3589 /* No hard reg used; calculate value into hard return reg. */
3590 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3591 expand_value_return (result_rtl
);
3595 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3596 register, tell the rtl optimizers that its value is no longer
3600 expand_clobber (tree lhs
)
3604 rtx decl_rtl
= DECL_RTL_IF_SET (lhs
);
3605 if (decl_rtl
&& REG_P (decl_rtl
))
3607 machine_mode decl_mode
= GET_MODE (decl_rtl
);
3608 if (maybe_gt (GET_MODE_SIZE (decl_mode
),
3609 REGMODE_NATURAL_SIZE (decl_mode
)))
3610 emit_clobber (decl_rtl
);
3615 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3616 STMT that doesn't require special handling for outgoing edges. That
3617 is no tailcalls and no GIMPLE_COND. */
3620 expand_gimple_stmt_1 (gimple
*stmt
)
3624 set_curr_insn_location (gimple_location (stmt
));
3626 switch (gimple_code (stmt
))
3629 op0
= gimple_goto_dest (stmt
);
3630 if (TREE_CODE (op0
) == LABEL_DECL
)
3633 expand_computed_goto (op0
);
3636 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3639 case GIMPLE_PREDICT
:
3643 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3644 if (gimple_switch_num_labels (swtch
) == 1)
3645 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3647 expand_case (swtch
);
3651 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3654 expand_call_stmt (as_a
<gcall
*> (stmt
));
3659 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3661 if (op0
&& op0
!= error_mark_node
)
3663 tree result
= DECL_RESULT (current_function_decl
);
3665 /* If we are not returning the current function's RESULT_DECL,
3666 build an assignment to it. */
3669 /* I believe that a function's RESULT_DECL is unique. */
3670 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3672 /* ??? We'd like to use simply expand_assignment here,
3673 but this fails if the value is of BLKmode but the return
3674 decl is a register. expand_return has special handling
3675 for this combination, which eventually should move
3676 to common code. See comments there. Until then, let's
3677 build a modify expression :-/ */
3678 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3684 expand_null_return ();
3686 expand_return (op0
);
3692 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3693 tree lhs
= gimple_assign_lhs (assign_stmt
);
3695 /* Tree expand used to fiddle with |= and &= of two bitfield
3696 COMPONENT_REFs here. This can't happen with gimple, the LHS
3697 of binary assigns must be a gimple reg. */
3699 if (TREE_CODE (lhs
) != SSA_NAME
3700 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3701 == GIMPLE_SINGLE_RHS
)
3703 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3704 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3705 == GIMPLE_SINGLE_RHS
);
3706 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3707 /* Do not put locations on possibly shared trees. */
3708 && !is_gimple_min_invariant (rhs
))
3709 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3710 if (TREE_CLOBBER_P (rhs
))
3711 /* This is a clobber to mark the going out of scope for
3713 expand_clobber (lhs
);
3715 expand_assignment (lhs
, rhs
,
3716 gimple_assign_nontemporal_move_p (
3722 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3723 struct separate_ops ops
;
3724 bool promoted
= false;
3726 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3727 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3730 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3731 ops
.type
= TREE_TYPE (lhs
);
3732 switch (get_gimple_rhs_class (ops
.code
))
3734 case GIMPLE_TERNARY_RHS
:
3735 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3737 case GIMPLE_BINARY_RHS
:
3738 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3740 case GIMPLE_UNARY_RHS
:
3741 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3746 ops
.location
= gimple_location (stmt
);
3748 /* If we want to use a nontemporal store, force the value to
3749 register first. If we store into a promoted register,
3750 don't directly expand to target. */
3751 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3752 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3759 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3760 /* If TEMP is a VOIDmode constant, use convert_modes to make
3761 sure that we properly convert it. */
3762 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3764 temp
= convert_modes (GET_MODE (target
),
3765 TYPE_MODE (ops
.type
),
3767 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3768 GET_MODE (target
), temp
, unsignedp
);
3771 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3773 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3777 temp
= force_operand (temp
, target
);
3779 emit_move_insn (target
, temp
);
3790 /* Expand one gimple statement STMT and return the last RTL instruction
3791 before any of the newly generated ones.
3793 In addition to generating the necessary RTL instructions this also
3794 sets REG_EH_REGION notes if necessary and sets the current source
3795 location for diagnostics. */
3798 expand_gimple_stmt (gimple
*stmt
)
3800 location_t saved_location
= input_location
;
3801 rtx_insn
*last
= get_last_insn ();
3806 /* We need to save and restore the current source location so that errors
3807 discovered during expansion are emitted with the right location. But
3808 it would be better if the diagnostic routines used the source location
3809 embedded in the tree nodes rather than globals. */
3810 if (gimple_has_location (stmt
))
3811 input_location
= gimple_location (stmt
);
3813 expand_gimple_stmt_1 (stmt
);
3815 /* Free any temporaries used to evaluate this statement. */
3818 input_location
= saved_location
;
3820 /* Mark all insns that may trap. */
3821 lp_nr
= lookup_stmt_eh_lp (stmt
);
3825 for (insn
= next_real_insn (last
); insn
;
3826 insn
= next_real_insn (insn
))
3828 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3829 /* If we want exceptions for non-call insns, any
3830 may_trap_p instruction may throw. */
3831 && GET_CODE (PATTERN (insn
)) != CLOBBER
3832 && GET_CODE (PATTERN (insn
)) != CLOBBER_HIGH
3833 && GET_CODE (PATTERN (insn
)) != USE
3834 && insn_could_throw_p (insn
))
3835 make_reg_eh_region_note (insn
, 0, lp_nr
);
3842 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3843 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3844 generated a tail call (something that might be denied by the ABI
3845 rules governing the call; see calls.c).
3847 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3848 can still reach the rest of BB. The case here is __builtin_sqrt,
3849 where the NaN result goes through the external function (with a
3850 tailcall) and the normal result happens via a sqrt instruction. */
3853 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3855 rtx_insn
*last2
, *last
;
3858 profile_probability probability
;
3860 last2
= last
= expand_gimple_stmt (stmt
);
3862 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3863 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3866 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3868 *can_fallthru
= true;
3872 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3873 Any instructions emitted here are about to be deleted. */
3874 do_pending_stack_adjust ();
3876 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3877 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3878 EH or abnormal edges, we shouldn't have created a tail call in
3879 the first place. So it seems to me we should just be removing
3880 all edges here, or redirecting the existing fallthru edge to
3883 probability
= profile_probability::never ();
3885 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3887 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3889 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3890 e
->dest
->count
-= e
->count ();
3891 probability
+= e
->probability
;
3898 /* This is somewhat ugly: the call_expr expander often emits instructions
3899 after the sibcall (to perform the function return). These confuse the
3900 find_many_sub_basic_blocks code, so we need to get rid of these. */
3901 last
= NEXT_INSN (last
);
3902 gcc_assert (BARRIER_P (last
));
3904 *can_fallthru
= false;
3905 while (NEXT_INSN (last
))
3907 /* For instance an sqrt builtin expander expands if with
3908 sibcall in the then and label for `else`. */
3909 if (LABEL_P (NEXT_INSN (last
)))
3911 *can_fallthru
= true;
3914 delete_insn (NEXT_INSN (last
));
3917 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3919 e
->probability
= probability
;
3921 update_bb_for_insn (bb
);
3923 if (NEXT_INSN (last
))
3925 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3928 if (BARRIER_P (last
))
3929 BB_END (bb
) = PREV_INSN (last
);
3932 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3937 /* Return the difference between the floor and the truncated result of
3938 a signed division by OP1 with remainder MOD. */
3940 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3942 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3943 return gen_rtx_IF_THEN_ELSE
3944 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3945 gen_rtx_IF_THEN_ELSE
3946 (mode
, gen_rtx_LT (BImode
,
3947 gen_rtx_DIV (mode
, op1
, mod
),
3949 constm1_rtx
, const0_rtx
),
3953 /* Return the difference between the ceil and the truncated result of
3954 a signed division by OP1 with remainder MOD. */
3956 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3958 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3959 return gen_rtx_IF_THEN_ELSE
3960 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3961 gen_rtx_IF_THEN_ELSE
3962 (mode
, gen_rtx_GT (BImode
,
3963 gen_rtx_DIV (mode
, op1
, mod
),
3965 const1_rtx
, const0_rtx
),
3969 /* Return the difference between the ceil and the truncated result of
3970 an unsigned division by OP1 with remainder MOD. */
3972 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3974 /* (mod != 0 ? 1 : 0) */
3975 return gen_rtx_IF_THEN_ELSE
3976 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3977 const1_rtx
, const0_rtx
);
3980 /* Return the difference between the rounded and the truncated result
3981 of a signed division by OP1 with remainder MOD. Halfway cases are
3982 rounded away from zero, rather than to the nearest even number. */
3984 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3986 /* (abs (mod) >= abs (op1) - abs (mod)
3987 ? (op1 / mod > 0 ? 1 : -1)
3989 return gen_rtx_IF_THEN_ELSE
3990 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3991 gen_rtx_MINUS (mode
,
3992 gen_rtx_ABS (mode
, op1
),
3993 gen_rtx_ABS (mode
, mod
))),
3994 gen_rtx_IF_THEN_ELSE
3995 (mode
, gen_rtx_GT (BImode
,
3996 gen_rtx_DIV (mode
, op1
, mod
),
3998 const1_rtx
, constm1_rtx
),
4002 /* Return the difference between the rounded and the truncated result
4003 of a unsigned division by OP1 with remainder MOD. Halfway cases
4004 are rounded away from zero, rather than to the nearest even
4007 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4009 /* (mod >= op1 - mod ? 1 : 0) */
4010 return gen_rtx_IF_THEN_ELSE
4011 (mode
, gen_rtx_GE (BImode
, mod
,
4012 gen_rtx_MINUS (mode
, op1
, mod
)),
4013 const1_rtx
, const0_rtx
);
4016 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4020 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
4023 #ifndef POINTERS_EXTEND_UNSIGNED
4024 gcc_assert (mode
== Pmode
4025 || mode
== targetm
.addr_space
.address_mode (as
));
4026 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
4030 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
4032 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
4035 /* X must have some form of address mode already. */
4036 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
4037 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
4038 x
= lowpart_subreg (mode
, x
, xmode
);
4039 else if (POINTERS_EXTEND_UNSIGNED
> 0)
4040 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
4041 else if (!POINTERS_EXTEND_UNSIGNED
)
4042 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4045 switch (GET_CODE (x
))
4048 if ((SUBREG_PROMOTED_VAR_P (x
)
4049 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4050 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4051 && REG_P (XEXP (SUBREG_REG (x
), 0))
4052 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4053 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4054 && GET_MODE (SUBREG_REG (x
)) == mode
)
4055 return SUBREG_REG (x
);
4058 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4059 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4062 temp
= shallow_copy_rtx (x
);
4063 PUT_MODE (temp
, mode
);
4066 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4068 temp
= gen_rtx_CONST (mode
, temp
);
4072 if (CONST_INT_P (XEXP (x
, 1)))
4074 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4076 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4082 /* Don't know how to express ptr_extend as operation in debug info. */
4085 #endif /* POINTERS_EXTEND_UNSIGNED */
4090 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4091 by avoid_deep_ter_for_debug. */
4093 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4095 /* Split too deep TER chains for debug stmts using debug temporaries. */
4098 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4100 use_operand_p use_p
;
4102 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4104 tree use
= USE_FROM_PTR (use_p
);
4105 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4107 gimple
*g
= get_gimple_for_ssa_name (use
);
4110 if (depth
> 6 && !stmt_ends_bb_p (g
))
4112 if (deep_ter_debug_map
== NULL
)
4113 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4115 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4118 vexpr
= make_node (DEBUG_EXPR_DECL
);
4119 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4120 DECL_ARTIFICIAL (vexpr
) = 1;
4121 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4122 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (use
)));
4123 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4124 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4125 avoid_deep_ter_for_debug (def_temp
, 0);
4128 avoid_deep_ter_for_debug (g
, depth
+ 1);
4132 /* Return an RTX equivalent to the value of the parameter DECL. */
4135 expand_debug_parm_decl (tree decl
)
4137 rtx incoming
= DECL_INCOMING_RTL (decl
);
4140 && GET_MODE (incoming
) != BLKmode
4141 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4142 || (MEM_P (incoming
)
4143 && REG_P (XEXP (incoming
, 0))
4144 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4146 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4148 #ifdef HAVE_window_save
4149 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4150 If the target machine has an explicit window save instruction, the
4151 actual entry value is the corresponding OUTGOING_REGNO instead. */
4152 if (REG_P (incoming
)
4153 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4155 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4156 OUTGOING_REGNO (REGNO (incoming
)), 0);
4157 else if (MEM_P (incoming
))
4159 rtx reg
= XEXP (incoming
, 0);
4160 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4162 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4163 incoming
= replace_equiv_address_nv (incoming
, reg
);
4166 incoming
= copy_rtx (incoming
);
4170 ENTRY_VALUE_EXP (rtl
) = incoming
;
4175 && GET_MODE (incoming
) != BLKmode
4176 && !TREE_ADDRESSABLE (decl
)
4178 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4179 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4180 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4181 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4182 return copy_rtx (incoming
);
4187 /* Return an RTX equivalent to the value of the tree expression EXP. */
4190 expand_debug_expr (tree exp
)
4192 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4193 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4194 machine_mode inner_mode
= VOIDmode
;
4195 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4197 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4199 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4201 case tcc_expression
:
4202 switch (TREE_CODE (exp
))
4207 case WIDEN_MULT_PLUS_EXPR
:
4208 case WIDEN_MULT_MINUS_EXPR
:
4211 case TRUTH_ANDIF_EXPR
:
4212 case TRUTH_ORIF_EXPR
:
4213 case TRUTH_AND_EXPR
:
4215 case TRUTH_XOR_EXPR
:
4218 case TRUTH_NOT_EXPR
:
4227 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4234 if (mode
== BLKmode
)
4236 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4239 switch (TREE_CODE (exp
))
4245 case WIDEN_LSHIFT_EXPR
:
4246 /* Ensure second operand isn't wider than the first one. */
4247 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4248 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4249 && (GET_MODE_UNIT_PRECISION (mode
)
4250 < GET_MODE_PRECISION (op1_mode
)))
4251 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4260 if (mode
== BLKmode
)
4262 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4263 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4268 case tcc_comparison
:
4269 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4277 case tcc_exceptional
:
4278 case tcc_declaration
:
4284 switch (TREE_CODE (exp
))
4287 if (!lookup_constant_def (exp
))
4289 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4290 != (size_t) TREE_STRING_LENGTH (exp
))
4292 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4293 op0
= gen_rtx_MEM (BLKmode
, op0
);
4294 set_mem_attributes (op0
, exp
, 0);
4302 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4306 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
4309 gcc_assert (COMPLEX_MODE_P (mode
));
4310 op0
= expand_debug_expr (TREE_REALPART (exp
));
4311 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4312 return gen_rtx_CONCAT (mode
, op0
, op1
);
4314 case DEBUG_EXPR_DECL
:
4315 op0
= DECL_RTL_IF_SET (exp
);
4320 op0
= gen_rtx_DEBUG_EXPR (mode
);
4321 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4322 SET_DECL_RTL (exp
, op0
);
4332 op0
= DECL_RTL_IF_SET (exp
);
4334 /* This decl was probably optimized away. */
4338 || DECL_EXTERNAL (exp
)
4339 || !TREE_STATIC (exp
)
4341 || DECL_HARD_REGISTER (exp
)
4342 || DECL_IN_CONSTANT_POOL (exp
)
4343 || mode
== VOIDmode
)
4346 op0
= make_decl_rtl_for_debug (exp
);
4348 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4349 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4353 op0
= copy_rtx (op0
);
4355 if (GET_MODE (op0
) == BLKmode
4356 /* If op0 is not BLKmode, but mode is, adjust_mode
4357 below would ICE. While it is likely a FE bug,
4358 try to be robust here. See PR43166. */
4360 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4362 gcc_assert (MEM_P (op0
));
4363 op0
= adjust_address_nv (op0
, mode
, 0);
4373 inner_mode
= GET_MODE (op0
);
4375 if (mode
== inner_mode
)
4378 if (inner_mode
== VOIDmode
)
4380 if (TREE_CODE (exp
) == SSA_NAME
)
4381 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4383 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4384 if (mode
== inner_mode
)
4388 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4390 if (GET_MODE_UNIT_BITSIZE (mode
)
4391 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4392 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4393 else if (GET_MODE_UNIT_BITSIZE (mode
)
4394 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4395 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4397 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4399 else if (FLOAT_MODE_P (mode
))
4401 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4402 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4403 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4405 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4407 else if (FLOAT_MODE_P (inner_mode
))
4410 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4412 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4414 else if (GET_MODE_UNIT_PRECISION (mode
)
4415 == GET_MODE_UNIT_PRECISION (inner_mode
))
4416 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4417 else if (GET_MODE_UNIT_PRECISION (mode
)
4418 < GET_MODE_UNIT_PRECISION (inner_mode
))
4419 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4420 else if (UNARY_CLASS_P (exp
)
4421 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4423 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4425 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4431 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4433 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4434 TREE_OPERAND (exp
, 0),
4435 TREE_OPERAND (exp
, 1));
4437 return expand_debug_expr (newexp
);
4441 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4442 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4446 if (TREE_CODE (exp
) == MEM_REF
)
4448 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4449 || (GET_CODE (op0
) == PLUS
4450 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4451 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4452 Instead just use get_inner_reference. */
4455 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4457 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
4460 op0
= plus_constant (inner_mode
, op0
, offset
);
4463 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4465 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4467 if (op0
== NULL_RTX
)
4470 op0
= gen_rtx_MEM (mode
, op0
);
4471 set_mem_attributes (op0
, exp
, 0);
4472 if (TREE_CODE (exp
) == MEM_REF
4473 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4474 set_mem_expr (op0
, NULL_TREE
);
4475 set_mem_addr_space (op0
, as
);
4479 case TARGET_MEM_REF
:
4480 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4481 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4484 op0
= expand_debug_expr
4485 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4489 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4490 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4492 if (op0
== NULL_RTX
)
4495 op0
= gen_rtx_MEM (mode
, op0
);
4497 set_mem_attributes (op0
, exp
, 0);
4498 set_mem_addr_space (op0
, as
);
4504 case ARRAY_RANGE_REF
:
4509 case VIEW_CONVERT_EXPR
:
4512 poly_int64 bitsize
, bitpos
;
4514 int reversep
, volatilep
= 0;
4516 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4517 &unsignedp
, &reversep
, &volatilep
);
4520 if (known_eq (bitsize
, 0))
4523 orig_op0
= op0
= expand_debug_expr (tem
);
4530 machine_mode addrmode
, offmode
;
4535 op0
= XEXP (op0
, 0);
4536 addrmode
= GET_MODE (op0
);
4537 if (addrmode
== VOIDmode
)
4540 op1
= expand_debug_expr (offset
);
4544 offmode
= GET_MODE (op1
);
4545 if (offmode
== VOIDmode
)
4546 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4548 if (addrmode
!= offmode
)
4549 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4551 /* Don't use offset_address here, we don't need a
4552 recognizable address, and we don't want to generate
4554 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4560 if (mode1
== VOIDmode
)
4562 if (maybe_gt (bitsize
, MAX_BITSIZE_MODE_ANY_INT
))
4565 mode1
= smallest_int_mode_for_size (bitsize
);
4567 poly_int64 bytepos
= bits_to_bytes_round_down (bitpos
);
4568 if (maybe_ne (bytepos
, 0))
4570 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
4571 bitpos
= num_trailing_bits (bitpos
);
4573 else if (known_eq (bitpos
, 0)
4574 && known_eq (bitsize
, GET_MODE_BITSIZE (mode
)))
4575 op0
= adjust_address_nv (op0
, mode
, 0);
4576 else if (GET_MODE (op0
) != mode1
)
4577 op0
= adjust_address_nv (op0
, mode1
, 0);
4579 op0
= copy_rtx (op0
);
4580 if (op0
== orig_op0
)
4581 op0
= shallow_copy_rtx (op0
);
4582 set_mem_attributes (op0
, exp
, 0);
4585 if (known_eq (bitpos
, 0) && mode
== GET_MODE (op0
))
4588 if (maybe_lt (bitpos
, 0))
4591 if (GET_MODE (op0
) == BLKmode
|| mode
== BLKmode
)
4595 if (multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
4596 && known_eq (bitsize
, GET_MODE_BITSIZE (mode1
)))
4598 machine_mode opmode
= GET_MODE (op0
);
4600 if (opmode
== VOIDmode
)
4601 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4603 /* This condition may hold if we're expanding the address
4604 right past the end of an array that turned out not to
4605 be addressable (i.e., the address was only computed in
4606 debug stmts). The gen_subreg below would rightfully
4607 crash, and the address doesn't really exist, so just
4609 if (known_ge (bitpos
, GET_MODE_BITSIZE (opmode
)))
4612 if (multiple_p (bitpos
, GET_MODE_BITSIZE (mode
)))
4613 return simplify_gen_subreg (mode
, op0
, opmode
, bytepos
);
4616 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4617 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4619 : ZERO_EXTRACT
, mode
,
4620 GET_MODE (op0
) != VOIDmode
4622 : TYPE_MODE (TREE_TYPE (tem
)),
4623 op0
, gen_int_mode (bitsize
, word_mode
),
4624 gen_int_mode (bitpos
, word_mode
));
4629 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4632 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4635 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4638 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4640 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4643 case FIX_TRUNC_EXPR
:
4644 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4647 case POINTER_PLUS_EXPR
:
4648 /* For the rare target where pointers are not the same size as
4649 size_t, we need to check for mis-matched modes and correct
4652 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4653 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4654 && op0_mode
!= op1_mode
)
4656 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4657 /* If OP0 is a partial mode, then we must truncate, even
4658 if it has the same bitsize as OP1 as GCC's
4659 representation of partial modes is opaque. */
4660 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4661 && (GET_MODE_BITSIZE (op0_mode
)
4662 == GET_MODE_BITSIZE (op1_mode
))))
4663 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4665 /* We always sign-extend, regardless of the signedness of
4666 the operand, because the operand is always unsigned
4667 here even if the original C expression is signed. */
4668 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4672 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4675 case POINTER_DIFF_EXPR
:
4676 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4679 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4682 case TRUNC_DIV_EXPR
:
4683 case EXACT_DIV_EXPR
:
4685 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4687 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4689 case TRUNC_MOD_EXPR
:
4690 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4692 case FLOOR_DIV_EXPR
:
4694 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4697 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4698 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4699 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4700 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4703 case FLOOR_MOD_EXPR
:
4705 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4708 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4709 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4710 adj
= simplify_gen_unary (NEG
, mode
,
4711 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4713 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4719 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4720 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4721 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4722 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4726 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4727 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4728 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4729 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4735 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4736 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4737 adj
= simplify_gen_unary (NEG
, mode
,
4738 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4740 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4744 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4745 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4746 adj
= simplify_gen_unary (NEG
, mode
,
4747 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4749 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4752 case ROUND_DIV_EXPR
:
4755 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4756 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4757 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4758 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4762 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4763 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4764 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4765 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4768 case ROUND_MOD_EXPR
:
4771 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4772 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4773 adj
= simplify_gen_unary (NEG
, mode
,
4774 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4776 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4780 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4781 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4782 adj
= simplify_gen_unary (NEG
, mode
,
4783 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4785 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4789 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4793 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4795 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4798 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4801 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4804 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4807 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4810 case TRUTH_AND_EXPR
:
4811 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4815 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4818 case TRUTH_XOR_EXPR
:
4819 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4821 case TRUTH_ANDIF_EXPR
:
4822 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4824 case TRUTH_ORIF_EXPR
:
4825 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4827 case TRUTH_NOT_EXPR
:
4828 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4831 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4835 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4839 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4843 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4847 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4850 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4852 case UNORDERED_EXPR
:
4853 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4856 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4859 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4862 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4865 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4868 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4871 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4874 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4877 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4880 gcc_assert (COMPLEX_MODE_P (mode
));
4881 if (GET_MODE (op0
) == VOIDmode
)
4882 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4883 if (GET_MODE (op1
) == VOIDmode
)
4884 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4885 return gen_rtx_CONCAT (mode
, op0
, op1
);
4888 if (GET_CODE (op0
) == CONCAT
)
4889 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4890 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4892 GET_MODE_INNER (mode
)));
4895 scalar_mode imode
= GET_MODE_INNER (mode
);
4900 re
= adjust_address_nv (op0
, imode
, 0);
4901 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4905 scalar_int_mode ifmode
;
4906 scalar_int_mode ihmode
;
4908 if (!int_mode_for_mode (mode
).exists (&ifmode
)
4909 || !int_mode_for_mode (imode
).exists (&ihmode
))
4911 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4914 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4915 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4916 if (imode
!= ihmode
)
4917 re
= gen_rtx_SUBREG (imode
, re
, 0);
4918 im
= copy_rtx (op0
);
4920 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4921 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4922 if (imode
!= ihmode
)
4923 im
= gen_rtx_SUBREG (imode
, im
, 0);
4925 im
= gen_rtx_NEG (imode
, im
);
4926 return gen_rtx_CONCAT (mode
, re
, im
);
4930 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4931 if (!op0
|| !MEM_P (op0
))
4933 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4934 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4935 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4936 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4937 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4938 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4940 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4942 poly_int64 bitoffset
, bitsize
, maxsize
, byteoffset
;
4945 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
4946 &bitsize
, &maxsize
, &reverse
);
4948 || TREE_CODE (decl
) == PARM_DECL
4949 || TREE_CODE (decl
) == RESULT_DECL
)
4950 && (!TREE_ADDRESSABLE (decl
)
4951 || target_for_debug_bind (decl
))
4952 && multiple_p (bitoffset
, BITS_PER_UNIT
, &byteoffset
)
4953 && known_gt (bitsize
, 0)
4954 && known_eq (bitsize
, maxsize
))
4956 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4957 return plus_constant (mode
, base
, byteoffset
);
4961 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4962 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4965 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4968 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4969 || (GET_CODE (op0
) == PLUS
4970 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4971 && CONST_INT_P (XEXP (op0
, 1)))))
4973 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4976 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
4979 return plus_constant (mode
, op0
, offset
);
4986 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4987 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
4988 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
4994 unsigned HOST_WIDE_INT i
, nelts
;
4996 if (!VECTOR_CST_NELTS (exp
).is_constant (&nelts
))
4999 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5001 for (i
= 0; i
< nelts
; ++i
)
5003 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
5006 XVECEXP (op0
, 0, i
) = op1
;
5013 if (TREE_CLOBBER_P (exp
))
5015 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
5018 unsigned HOST_WIDE_INT nelts
;
5021 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)).is_constant (&nelts
))
5022 goto flag_unsupported
;
5024 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5026 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
5028 op1
= expand_debug_expr (val
);
5031 XVECEXP (op0
, 0, i
) = op1
;
5036 op1
= expand_debug_expr
5037 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
5042 for (; i
< nelts
; i
++)
5043 XVECEXP (op0
, 0, i
) = op1
;
5049 goto flag_unsupported
;
5052 /* ??? Maybe handle some builtins? */
5057 gimple
*g
= get_gimple_for_ssa_name (exp
);
5061 if (deep_ter_debug_map
)
5063 tree
*slot
= deep_ter_debug_map
->get (exp
);
5068 t
= gimple_assign_rhs_to_tree (g
);
5069 op0
= expand_debug_expr (t
);
5075 /* If this is a reference to an incoming value of
5076 parameter that is never used in the code or where the
5077 incoming value is never used in the code, use
5078 PARM_DECL's DECL_RTL if set. */
5079 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5080 && SSA_NAME_VAR (exp
)
5081 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5082 && has_zero_uses (exp
))
5084 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5087 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5092 int part
= var_to_partition (SA
.map
, exp
);
5094 if (part
== NO_PARTITION
)
5097 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5099 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5107 /* Vector stuff. For most of the codes we don't have rtl codes. */
5108 case REALIGN_LOAD_EXPR
:
5110 case VEC_PACK_FIX_TRUNC_EXPR
:
5111 case VEC_PACK_FLOAT_EXPR
:
5112 case VEC_PACK_SAT_EXPR
:
5113 case VEC_PACK_TRUNC_EXPR
:
5114 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
5115 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
5116 case VEC_UNPACK_FLOAT_HI_EXPR
:
5117 case VEC_UNPACK_FLOAT_LO_EXPR
:
5118 case VEC_UNPACK_HI_EXPR
:
5119 case VEC_UNPACK_LO_EXPR
:
5120 case VEC_WIDEN_MULT_HI_EXPR
:
5121 case VEC_WIDEN_MULT_LO_EXPR
:
5122 case VEC_WIDEN_MULT_EVEN_EXPR
:
5123 case VEC_WIDEN_MULT_ODD_EXPR
:
5124 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5125 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5127 case VEC_DUPLICATE_EXPR
:
5128 case VEC_SERIES_EXPR
:
5132 case ADDR_SPACE_CONVERT_EXPR
:
5133 case FIXED_CONVERT_EXPR
:
5135 case WITH_SIZE_EXPR
:
5136 case BIT_INSERT_EXPR
:
5140 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5141 && SCALAR_INT_MODE_P (mode
))
5144 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5146 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5149 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5151 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5153 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5154 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5158 case WIDEN_MULT_EXPR
:
5159 case WIDEN_MULT_PLUS_EXPR
:
5160 case WIDEN_MULT_MINUS_EXPR
:
5161 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5162 && SCALAR_INT_MODE_P (mode
))
5164 inner_mode
= GET_MODE (op0
);
5165 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5166 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5168 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5169 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5170 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5172 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5173 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5174 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5176 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5177 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5179 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5183 case MULT_HIGHPART_EXPR
:
5184 /* ??? Similar to the above. */
5187 case WIDEN_SUM_EXPR
:
5188 case WIDEN_LSHIFT_EXPR
:
5189 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5190 && SCALAR_INT_MODE_P (mode
))
5193 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5195 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5197 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5198 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5213 /* Return an RTX equivalent to the source bind value of the tree expression
5217 expand_debug_source_expr (tree exp
)
5220 machine_mode mode
= VOIDmode
, inner_mode
;
5222 switch (TREE_CODE (exp
))
5225 if (DECL_ABSTRACT_ORIGIN (exp
))
5226 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp
));
5230 mode
= DECL_MODE (exp
);
5231 op0
= expand_debug_parm_decl (exp
);
5234 /* See if this isn't an argument that has been completely
5236 if (!DECL_RTL_SET_P (exp
)
5237 && !DECL_INCOMING_RTL (exp
)
5238 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5240 tree aexp
= DECL_ORIGIN (exp
);
5241 if (DECL_CONTEXT (aexp
)
5242 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5244 vec
<tree
, va_gc
> **debug_args
;
5247 debug_args
= decl_debug_args_lookup (current_function_decl
);
5248 if (debug_args
!= NULL
)
5250 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5253 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5263 if (op0
== NULL_RTX
)
5266 inner_mode
= GET_MODE (op0
);
5267 if (mode
== inner_mode
)
5270 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5272 if (GET_MODE_UNIT_BITSIZE (mode
)
5273 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5274 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5275 else if (GET_MODE_UNIT_BITSIZE (mode
)
5276 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5277 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5279 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5281 else if (FLOAT_MODE_P (mode
))
5283 else if (FLOAT_MODE_P (inner_mode
))
5285 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5286 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5288 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5290 else if (GET_MODE_UNIT_PRECISION (mode
)
5291 == GET_MODE_UNIT_PRECISION (inner_mode
))
5292 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5293 else if (GET_MODE_UNIT_PRECISION (mode
)
5294 < GET_MODE_UNIT_PRECISION (inner_mode
))
5295 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5296 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5297 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5299 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5304 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5305 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5306 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5309 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5313 if (exp
== NULL_RTX
)
5316 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5321 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5322 rtx dval
= make_debug_expr_from_rtl (exp
);
5324 /* Emit a debug bind insn before INSN. */
5325 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5326 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5327 VAR_INIT_STATUS_INITIALIZED
);
5329 emit_debug_insn_before (bind
, insn
);
5334 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5336 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5337 switch (*format_ptr
++)
5340 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5345 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5346 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5354 /* Expand the _LOCs in debug insns. We run this after expanding all
5355 regular insns, so that any variables referenced in the function
5356 will have their DECL_RTLs set. */
5359 expand_debug_locations (void)
5362 rtx_insn
*last
= get_last_insn ();
5363 int save_strict_alias
= flag_strict_aliasing
;
5365 /* New alias sets while setting up memory attributes cause
5366 -fcompare-debug failures, even though it doesn't bring about any
5368 flag_strict_aliasing
= 0;
5370 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5371 if (DEBUG_BIND_INSN_P (insn
))
5373 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5375 rtx_insn
*prev_insn
, *insn2
;
5378 if (value
== NULL_TREE
)
5382 if (INSN_VAR_LOCATION_STATUS (insn
)
5383 == VAR_INIT_STATUS_UNINITIALIZED
)
5384 val
= expand_debug_source_expr (value
);
5385 /* The avoid_deep_ter_for_debug function inserts
5386 debug bind stmts after SSA_NAME definition, with the
5387 SSA_NAME as the whole bind location. Disable temporarily
5388 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5389 being defined in this DEBUG_INSN. */
5390 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5392 tree
*slot
= deep_ter_debug_map
->get (value
);
5395 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5400 val
= expand_debug_expr (value
);
5402 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5405 val
= expand_debug_expr (value
);
5406 gcc_assert (last
== get_last_insn ());
5410 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5413 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5415 gcc_assert (mode
== GET_MODE (val
)
5416 || (GET_MODE (val
) == VOIDmode
5417 && (CONST_SCALAR_INT_P (val
)
5418 || GET_CODE (val
) == CONST_FIXED
5419 || GET_CODE (val
) == LABEL_REF
)));
5422 INSN_VAR_LOCATION_LOC (insn
) = val
;
5423 prev_insn
= PREV_INSN (insn
);
5424 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5425 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5428 flag_strict_aliasing
= save_strict_alias
;
5431 /* Performs swapping operands of commutative operations to expand
5432 the expensive one first. */
5435 reorder_operands (basic_block bb
)
5437 unsigned int *lattice
; /* Hold cost of each statement. */
5438 unsigned int i
= 0, n
= 0;
5439 gimple_stmt_iterator gsi
;
5445 use_operand_p use_p
;
5446 gimple
*def0
, *def1
;
5448 /* Compute cost of each statement using estimate_num_insns. */
5449 stmts
= bb_seq (bb
);
5450 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5452 stmt
= gsi_stmt (gsi
);
5453 if (!is_gimple_debug (stmt
))
5454 gimple_set_uid (stmt
, n
++);
5456 lattice
= XNEWVEC (unsigned int, n
);
5457 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5460 stmt
= gsi_stmt (gsi
);
5461 if (is_gimple_debug (stmt
))
5463 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5465 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5467 tree use
= USE_FROM_PTR (use_p
);
5469 if (TREE_CODE (use
) != SSA_NAME
)
5471 def_stmt
= get_gimple_for_ssa_name (use
);
5474 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5477 if (!is_gimple_assign (stmt
)
5478 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5480 op0
= gimple_op (stmt
, 1);
5481 op1
= gimple_op (stmt
, 2);
5482 if (TREE_CODE (op0
) != SSA_NAME
5483 || TREE_CODE (op1
) != SSA_NAME
)
5485 /* Swap operands if the second one is more expensive. */
5486 def0
= get_gimple_for_ssa_name (op0
);
5487 def1
= get_gimple_for_ssa_name (op1
);
5491 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5495 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5497 fprintf (dump_file
, "Swap operands in stmt:\n");
5498 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5499 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5500 def0
? lattice
[gimple_uid (def0
)] : 0,
5501 lattice
[gimple_uid (def1
)]);
5503 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5504 gimple_assign_rhs2_ptr (stmt
));
5510 /* Expand basic block BB from GIMPLE trees to RTL. */
5513 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5515 gimple_stmt_iterator gsi
;
5517 gimple
*stmt
= NULL
;
5518 rtx_note
*note
= NULL
;
5524 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5527 /* Note that since we are now transitioning from GIMPLE to RTL, we
5528 cannot use the gsi_*_bb() routines because they expect the basic
5529 block to be in GIMPLE, instead of RTL. Therefore, we need to
5530 access the BB sequence directly. */
5532 reorder_operands (bb
);
5533 stmts
= bb_seq (bb
);
5534 bb
->il
.gimple
.seq
= NULL
;
5535 bb
->il
.gimple
.phi_nodes
= NULL
;
5536 rtl_profile_for_bb (bb
);
5537 init_rtl_bb_info (bb
);
5538 bb
->flags
|= BB_RTL
;
5540 /* Remove the RETURN_EXPR if we may fall though to the exit
5542 gsi
= gsi_last (stmts
);
5543 if (!gsi_end_p (gsi
)
5544 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5546 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5548 gcc_assert (single_succ_p (bb
));
5549 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5551 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5552 && !gimple_return_retval (ret_stmt
))
5554 gsi_remove (&gsi
, false);
5555 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5559 gsi
= gsi_start (stmts
);
5560 if (!gsi_end_p (gsi
))
5562 stmt
= gsi_stmt (gsi
);
5563 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5567 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5571 gcc_checking_assert (!note
);
5572 last
= get_last_insn ();
5576 expand_gimple_stmt (stmt
);
5583 BB_HEAD (bb
) = NEXT_INSN (last
);
5584 if (NOTE_P (BB_HEAD (bb
)))
5585 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5586 gcc_assert (LABEL_P (BB_HEAD (bb
)));
5587 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5589 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5592 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5595 NOTE_BASIC_BLOCK (note
) = bb
;
5597 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5601 stmt
= gsi_stmt (gsi
);
5603 /* If this statement is a non-debug one, and we generate debug
5604 insns, then this one might be the last real use of a TERed
5605 SSA_NAME, but where there are still some debug uses further
5606 down. Expanding the current SSA name in such further debug
5607 uses by their RHS might lead to wrong debug info, as coalescing
5608 might make the operands of such RHS be placed into the same
5609 pseudo as something else. Like so:
5610 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5614 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5615 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5616 the write to a_2 would actually have clobbered the place which
5619 So, instead of that, we recognize the situation, and generate
5620 debug temporaries at the last real use of TERed SSA names:
5627 if (MAY_HAVE_DEBUG_BIND_INSNS
5629 && !is_gimple_debug (stmt
))
5635 location_t sloc
= curr_insn_location ();
5637 /* Look for SSA names that have their last use here (TERed
5638 names always have only one real use). */
5639 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5640 if ((def
= get_gimple_for_ssa_name (op
)))
5642 imm_use_iterator imm_iter
;
5643 use_operand_p use_p
;
5644 bool have_debug_uses
= false;
5646 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5648 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5650 have_debug_uses
= true;
5655 if (have_debug_uses
)
5657 /* OP is a TERed SSA name, with DEF its defining
5658 statement, and where OP is used in further debug
5659 instructions. Generate a debug temporary, and
5660 replace all uses of OP in debug insns with that
5663 tree value
= gimple_assign_rhs_to_tree (def
);
5664 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5668 set_curr_insn_location (gimple_location (def
));
5670 DECL_ARTIFICIAL (vexpr
) = 1;
5671 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5673 mode
= DECL_MODE (value
);
5675 mode
= TYPE_MODE (TREE_TYPE (value
));
5676 SET_DECL_MODE (vexpr
, mode
);
5678 val
= gen_rtx_VAR_LOCATION
5679 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5681 emit_debug_insn (val
);
5683 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5685 if (!gimple_debug_bind_p (debugstmt
))
5688 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5689 SET_USE (use_p
, vexpr
);
5691 update_stmt (debugstmt
);
5695 set_curr_insn_location (sloc
);
5698 currently_expanding_gimple_stmt
= stmt
;
5700 /* Expand this statement, then evaluate the resulting RTL and
5701 fixup the CFG accordingly. */
5702 if (gimple_code (stmt
) == GIMPLE_COND
)
5704 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5708 else if (is_gimple_debug (stmt
))
5710 location_t sloc
= curr_insn_location ();
5711 gimple_stmt_iterator nsi
= gsi
;
5716 tree value
= NULL_TREE
;
5720 if (!gimple_debug_nonbind_marker_p (stmt
))
5722 if (gimple_debug_bind_p (stmt
))
5724 var
= gimple_debug_bind_get_var (stmt
);
5726 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5727 && TREE_CODE (var
) != LABEL_DECL
5728 && !target_for_debug_bind (var
))
5729 goto delink_debug_stmt
;
5732 mode
= DECL_MODE (var
);
5734 mode
= TYPE_MODE (TREE_TYPE (var
));
5736 if (gimple_debug_bind_has_value_p (stmt
))
5737 value
= gimple_debug_bind_get_value (stmt
);
5739 val
= gen_rtx_VAR_LOCATION
5740 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5742 else if (gimple_debug_source_bind_p (stmt
))
5744 var
= gimple_debug_source_bind_get_var (stmt
);
5746 value
= gimple_debug_source_bind_get_value (stmt
);
5748 mode
= DECL_MODE (var
);
5750 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5751 VAR_INIT_STATUS_UNINITIALIZED
);
5756 /* If this function was first compiled with markers
5757 enabled, but they're now disable (e.g. LTO), drop
5758 them on the floor. */
5759 else if (gimple_debug_nonbind_marker_p (stmt
)
5760 && !MAY_HAVE_DEBUG_MARKER_INSNS
)
5761 goto delink_debug_stmt
;
5762 else if (gimple_debug_begin_stmt_p (stmt
))
5763 val
= GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5764 else if (gimple_debug_inline_entry_p (stmt
))
5766 tree block
= gimple_block (stmt
);
5769 val
= GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5771 goto delink_debug_stmt
;
5776 last
= get_last_insn ();
5778 set_curr_insn_location (gimple_location (stmt
));
5780 emit_debug_insn (val
);
5782 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5784 /* We can't dump the insn with a TREE where an RTX
5786 if (GET_CODE (val
) == VAR_LOCATION
)
5788 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val
) == (rtx
)value
);
5789 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5791 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5792 if (GET_CODE (val
) == VAR_LOCATION
)
5793 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5797 /* In order not to generate too many debug temporaries,
5798 we delink all uses of debug statements we already expanded.
5799 Therefore debug statements between definition and real
5800 use of TERed SSA names will continue to use the SSA name,
5801 and not be replaced with debug temps. */
5802 delink_stmt_imm_use (stmt
);
5806 if (gsi_end_p (nsi
))
5808 stmt
= gsi_stmt (nsi
);
5809 if (!is_gimple_debug (stmt
))
5813 set_curr_insn_location (sloc
);
5817 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5819 && gimple_call_tail_p (call_stmt
)
5820 && disable_tail_calls
)
5821 gimple_call_set_tail (call_stmt
, false);
5823 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5826 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5837 def_operand_p def_p
;
5838 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5842 /* Ignore this stmt if it is in the list of
5843 replaceable expressions. */
5845 && bitmap_bit_p (SA
.values
,
5846 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5849 last
= expand_gimple_stmt (stmt
);
5850 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5855 currently_expanding_gimple_stmt
= NULL
;
5857 /* Expand implicit goto and convert goto_locus. */
5858 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5860 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5861 set_curr_insn_location (e
->goto_locus
);
5862 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5864 emit_jump (label_rtx_for_bb (e
->dest
));
5865 e
->flags
&= ~EDGE_FALLTHRU
;
5869 /* Expanded RTL can create a jump in the last instruction of block.
5870 This later might be assumed to be a jump to successor and break edge insertion.
5871 We need to insert dummy move to prevent this. PR41440. */
5872 if (single_succ_p (bb
)
5873 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5874 && (last
= get_last_insn ())
5876 || (DEBUG_INSN_P (last
)
5877 && JUMP_P (prev_nondebug_insn (last
)))))
5879 rtx dummy
= gen_reg_rtx (SImode
);
5880 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5883 do_pending_stack_adjust ();
5885 /* Find the block tail. The last insn in the block is the insn
5886 before a barrier and/or table jump insn. */
5887 last
= get_last_insn ();
5888 if (BARRIER_P (last
))
5889 last
= PREV_INSN (last
);
5890 if (JUMP_TABLE_DATA_P (last
))
5891 last
= PREV_INSN (PREV_INSN (last
));
5892 if (BARRIER_P (last
))
5893 last
= PREV_INSN (last
);
5896 update_bb_for_insn (bb
);
5902 /* Create a basic block for initialization code. */
5905 construct_init_block (void)
5907 basic_block init_block
, first_block
;
5911 /* Multiple entry points not supported yet. */
5912 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5913 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5914 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5915 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5916 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5918 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5920 /* When entry edge points to first basic block, we don't need jump,
5921 otherwise we have to jump into proper target. */
5922 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5924 tree label
= gimple_block_label (e
->dest
);
5926 emit_jump (jump_target_rtx (label
));
5930 flags
= EDGE_FALLTHRU
;
5932 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5934 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5935 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5936 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5939 first_block
= e
->dest
;
5940 redirect_edge_succ (e
, init_block
);
5941 e
= make_single_succ_edge (init_block
, first_block
, flags
);
5944 e
= make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
5947 update_bb_for_insn (init_block
);
5951 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5952 found in the block tree. */
5955 set_block_levels (tree block
, int level
)
5959 BLOCK_NUMBER (block
) = level
;
5960 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5961 block
= BLOCK_CHAIN (block
);
5965 /* Create a block containing landing pads and similar stuff. */
5968 construct_exit_block (void)
5970 rtx_insn
*head
= get_last_insn ();
5972 basic_block exit_block
;
5976 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5977 rtx_insn
*orig_end
= BB_END (prev_bb
);
5979 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5981 /* Make sure the locus is set to the end of the function, so that
5982 epilogue line numbers and warnings are set properly. */
5983 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5984 input_location
= cfun
->function_end_locus
;
5986 /* Generate rtl for function exit. */
5987 expand_function_end ();
5989 end
= get_last_insn ();
5992 /* While emitting the function end we could move end of the last basic
5994 BB_END (prev_bb
) = orig_end
;
5995 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5996 head
= NEXT_INSN (head
);
5997 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5998 bb count counting will be confused. Any instructions before that
5999 label are emitted for the case where PREV_BB falls through into the
6000 exit block, so append those instructions to prev_bb in that case. */
6001 if (NEXT_INSN (head
) != return_label
)
6003 while (NEXT_INSN (head
) != return_label
)
6005 if (!NOTE_P (NEXT_INSN (head
)))
6006 BB_END (prev_bb
) = NEXT_INSN (head
);
6007 head
= NEXT_INSN (head
);
6010 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
6011 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
6012 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6015 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
6017 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
6018 if (!(e
->flags
& EDGE_ABNORMAL
))
6019 redirect_edge_succ (e
, exit_block
);
6024 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6026 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6029 exit_block
->count
-= e2
->count ();
6031 update_bb_for_insn (exit_block
);
6034 /* Helper function for discover_nonconstant_array_refs.
6035 Look for ARRAY_REF nodes with non-constant indexes and mark them
6039 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
6040 void *data ATTRIBUTE_UNUSED
)
6044 if (IS_TYPE_OR_DECL_P (t
))
6046 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6048 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6049 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
6050 && (!TREE_OPERAND (t
, 2)
6051 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6052 || (TREE_CODE (t
) == COMPONENT_REF
6053 && (!TREE_OPERAND (t
,2)
6054 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6055 || TREE_CODE (t
) == BIT_FIELD_REF
6056 || TREE_CODE (t
) == REALPART_EXPR
6057 || TREE_CODE (t
) == IMAGPART_EXPR
6058 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
6059 || CONVERT_EXPR_P (t
))
6060 t
= TREE_OPERAND (t
, 0);
6062 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6064 t
= get_base_address (t
);
6066 && DECL_MODE (t
) != BLKmode
)
6067 TREE_ADDRESSABLE (t
) = 1;
6076 /* RTL expansion is not able to compile array references with variable
6077 offsets for arrays stored in single register. Discover such
6078 expressions and mark variables as addressable to avoid this
6082 discover_nonconstant_array_refs (void)
6085 gimple_stmt_iterator gsi
;
6087 FOR_EACH_BB_FN (bb
, cfun
)
6088 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6090 gimple
*stmt
= gsi_stmt (gsi
);
6091 if (!is_gimple_debug (stmt
))
6092 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6096 /* This function sets crtl->args.internal_arg_pointer to a virtual
6097 register if DRAP is needed. Local register allocator will replace
6098 virtual_incoming_args_rtx with the virtual register. */
6101 expand_stack_alignment (void)
6104 unsigned int preferred_stack_boundary
;
6106 if (! SUPPORTS_STACK_ALIGNMENT
)
6109 if (cfun
->calls_alloca
6110 || cfun
->has_nonlocal_label
6111 || crtl
->has_nonlocal_goto
)
6112 crtl
->need_drap
= true;
6114 /* Call update_stack_boundary here again to update incoming stack
6115 boundary. It may set incoming stack alignment to a different
6116 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6117 use the minimum incoming stack alignment to check if it is OK
6118 to perform sibcall optimization since sibcall optimization will
6119 only align the outgoing stack to incoming stack boundary. */
6120 if (targetm
.calls
.update_stack_boundary
)
6121 targetm
.calls
.update_stack_boundary ();
6123 /* The incoming stack frame has to be aligned at least at
6124 parm_stack_boundary. */
6125 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6127 /* Update crtl->stack_alignment_estimated and use it later to align
6128 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6129 exceptions since callgraph doesn't collect incoming stack alignment
6131 if (cfun
->can_throw_non_call_exceptions
6132 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6133 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6135 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6136 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6137 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6138 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6139 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6141 gcc_assert (crtl
->stack_alignment_needed
6142 <= crtl
->stack_alignment_estimated
);
6144 crtl
->stack_realign_needed
6145 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6146 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6148 crtl
->stack_realign_processed
= true;
6150 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6152 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6153 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6155 /* stack_realign_drap and drap_rtx must match. */
6156 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6158 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6159 if (drap_rtx
!= NULL
)
6161 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6163 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6165 fixup_tail_calls ();
6171 expand_main_function (void)
6173 #if (defined(INVOKE__main) \
6174 || (!defined(HAS_INIT_SECTION) \
6175 && !defined(INIT_SECTION_ASM_OP) \
6176 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6177 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6182 /* Expand code to initialize the stack_protect_guard. This is invoked at
6183 the beginning of a function to be protected. */
6186 stack_protect_prologue (void)
6188 tree guard_decl
= targetm
.stack_protect_guard ();
6191 x
= expand_normal (crtl
->stack_protect_guard
);
6193 if (targetm
.have_stack_protect_combined_set () && guard_decl
)
6195 gcc_assert (DECL_P (guard_decl
));
6196 y
= DECL_RTL (guard_decl
);
6198 /* Allow the target to compute address of Y and copy it to X without
6199 leaking Y into a register. This combined address + copy pattern
6200 allows the target to prevent spilling of any intermediate results by
6201 splitting it after register allocator. */
6202 if (rtx_insn
*insn
= targetm
.gen_stack_protect_combined_set (x
, y
))
6210 y
= expand_normal (guard_decl
);
6214 /* Allow the target to copy from Y to X without leaking Y into a
6216 if (targetm
.have_stack_protect_set ())
6217 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6223 /* Otherwise do a straight move. */
6224 emit_move_insn (x
, y
);
6227 /* Translate the intermediate representation contained in the CFG
6228 from GIMPLE trees to RTL.
6230 We do conversion per basic block and preserve/update the tree CFG.
6231 This implies we have to do some magic as the CFG can simultaneously
6232 consist of basic blocks containing RTL and GIMPLE trees. This can
6233 confuse the CFG hooks, so be careful to not manipulate CFG during
6238 const pass_data pass_data_expand
=
6240 RTL_PASS
, /* type */
6241 "expand", /* name */
6242 OPTGROUP_NONE
, /* optinfo_flags */
6243 TV_EXPAND
, /* tv_id */
6244 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6247 | PROP_gimple_lva
), /* properties_required */
6248 PROP_rtl
, /* properties_provided */
6249 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6250 0, /* todo_flags_start */
6251 0, /* todo_flags_finish */
6254 class pass_expand
: public rtl_opt_pass
6257 pass_expand (gcc::context
*ctxt
)
6258 : rtl_opt_pass (pass_data_expand
, ctxt
)
6261 /* opt_pass methods: */
6262 virtual unsigned int execute (function
*);
6264 }; // class pass_expand
6267 pass_expand::execute (function
*fun
)
6269 basic_block bb
, init_block
;
6272 rtx_insn
*var_seq
, *var_ret_seq
;
6275 timevar_push (TV_OUT_OF_SSA
);
6276 rewrite_out_of_ssa (&SA
);
6277 timevar_pop (TV_OUT_OF_SSA
);
6278 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6280 if (MAY_HAVE_DEBUG_BIND_STMTS
&& flag_tree_ter
)
6282 gimple_stmt_iterator gsi
;
6283 FOR_EACH_BB_FN (bb
, cfun
)
6284 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6285 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6286 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6289 /* Make sure all values used by the optimization passes have sane
6293 /* Some backends want to know that we are expanding to RTL. */
6294 currently_expanding_to_rtl
= 1;
6295 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6296 free_dominance_info (CDI_DOMINATORS
);
6298 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6300 insn_locations_init ();
6301 if (!DECL_IS_BUILTIN (current_function_decl
))
6303 /* Eventually, all FEs should explicitly set function_start_locus. */
6304 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6305 set_curr_insn_location
6306 (DECL_SOURCE_LOCATION (current_function_decl
));
6308 set_curr_insn_location (fun
->function_start_locus
);
6311 set_curr_insn_location (UNKNOWN_LOCATION
);
6312 prologue_location
= curr_insn_location ();
6314 #ifdef INSN_SCHEDULING
6315 init_sched_attrs ();
6318 /* Make sure first insn is a note even if we don't want linenums.
6319 This makes sure the first insn will never be deleted.
6320 Also, final expects a note to appear there. */
6321 emit_note (NOTE_INSN_DELETED
);
6323 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6324 discover_nonconstant_array_refs ();
6326 targetm
.expand_to_rtl_hook ();
6327 crtl
->init_stack_alignment ();
6328 fun
->cfg
->max_jumptable_ents
= 0;
6330 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6331 of the function section at exapnsion time to predict distance of calls. */
6332 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6334 /* Expand the variables recorded during gimple lowering. */
6335 timevar_push (TV_VAR_EXPAND
);
6338 var_ret_seq
= expand_used_vars ();
6340 var_seq
= get_insns ();
6342 timevar_pop (TV_VAR_EXPAND
);
6344 /* Honor stack protection warnings. */
6345 if (warn_stack_protect
)
6347 if (fun
->calls_alloca
)
6348 warning (OPT_Wstack_protector
,
6349 "stack protector not protecting local variables: "
6350 "variable length buffer");
6351 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6352 warning (OPT_Wstack_protector
,
6353 "stack protector not protecting function: "
6354 "all local arrays are less than %d bytes long",
6355 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
6358 /* Set up parameters and prepare for return, for the function. */
6359 expand_function_start (current_function_decl
);
6361 /* If we emitted any instructions for setting up the variables,
6362 emit them before the FUNCTION_START note. */
6365 emit_insn_before (var_seq
, parm_birth_insn
);
6367 /* In expand_function_end we'll insert the alloca save/restore
6368 before parm_birth_insn. We've just insertted an alloca call.
6369 Adjust the pointer to match. */
6370 parm_birth_insn
= var_seq
;
6373 /* Now propagate the RTL assignment of each partition to the
6374 underlying var of each SSA_NAME. */
6377 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6379 /* We might have generated new SSA names in
6380 update_alias_info_with_stack_vars. They will have a NULL
6381 defining statements, and won't be part of the partitioning,
6383 if (!SSA_NAME_DEF_STMT (name
))
6386 adjust_one_expanded_partition_var (name
);
6389 /* Clean up RTL of variables that straddle across multiple
6390 partitions, and check that the rtl of any PARM_DECLs that are not
6391 cleaned up is that of their default defs. */
6392 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6396 /* We might have generated new SSA names in
6397 update_alias_info_with_stack_vars. They will have a NULL
6398 defining statements, and won't be part of the partitioning,
6400 if (!SSA_NAME_DEF_STMT (name
))
6402 part
= var_to_partition (SA
.map
, name
);
6403 if (part
== NO_PARTITION
)
6406 /* If this decl was marked as living in multiple places, reset
6407 this now to NULL. */
6408 tree var
= SSA_NAME_VAR (name
);
6409 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6410 SET_DECL_RTL (var
, NULL
);
6411 /* Check that the pseudos chosen by assign_parms are those of
6412 the corresponding default defs. */
6413 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6414 && (TREE_CODE (var
) == PARM_DECL
6415 || TREE_CODE (var
) == RESULT_DECL
))
6417 rtx in
= DECL_RTL_IF_SET (var
);
6419 rtx out
= SA
.partition_to_pseudo
[part
];
6420 gcc_assert (in
== out
);
6422 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6423 those expected by debug backends for each parm and for
6424 the result. This is particularly important for stabs,
6425 whose register elimination from parm's DECL_RTL may cause
6426 -fcompare-debug differences as SET_DECL_RTL changes reg's
6427 attrs. So, make sure the RTL already has the parm as the
6428 EXPR, so that it won't change. */
6429 SET_DECL_RTL (var
, NULL_RTX
);
6431 set_mem_attributes (in
, var
, true);
6432 SET_DECL_RTL (var
, in
);
6436 /* If this function is `main', emit a call to `__main'
6437 to run global initializers, etc. */
6438 if (DECL_NAME (current_function_decl
)
6439 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6440 && DECL_FILE_SCOPE_P (current_function_decl
))
6441 expand_main_function ();
6443 /* Initialize the stack_protect_guard field. This must happen after the
6444 call to __main (if any) so that the external decl is initialized. */
6445 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6446 stack_protect_prologue ();
6448 expand_phi_nodes (&SA
);
6450 /* Release any stale SSA redirection data. */
6451 redirect_edge_var_map_empty ();
6453 /* Register rtl specific functions for cfg. */
6454 rtl_register_cfg_hooks ();
6456 init_block
= construct_init_block ();
6458 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6459 remaining edges later. */
6460 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6461 e
->flags
&= ~EDGE_EXECUTABLE
;
6463 /* If the function has too many markers, drop them while expanding. */
6464 if (cfun
->debug_marker_count
6465 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
6466 cfun
->debug_nonbind_markers
= false;
6468 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6469 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6471 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6473 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6474 expand_debug_locations ();
6476 if (deep_ter_debug_map
)
6478 delete deep_ter_debug_map
;
6479 deep_ter_debug_map
= NULL
;
6482 /* Free stuff we no longer need after GIMPLE optimizations. */
6483 free_dominance_info (CDI_DOMINATORS
);
6484 free_dominance_info (CDI_POST_DOMINATORS
);
6485 delete_tree_cfg_annotations (fun
);
6487 timevar_push (TV_OUT_OF_SSA
);
6488 finish_out_of_ssa (&SA
);
6489 timevar_pop (TV_OUT_OF_SSA
);
6491 timevar_push (TV_POST_EXPAND
);
6492 /* We are no longer in SSA form. */
6493 fun
->gimple_df
->in_ssa_p
= false;
6494 loops_state_clear (LOOP_CLOSED_SSA
);
6496 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6497 conservatively to true until they are all profile aware. */
6498 delete lab_rtx_for_bb
;
6499 free_histograms (fun
);
6501 construct_exit_block ();
6502 insn_locations_finalize ();
6506 rtx_insn
*after
= return_label
;
6507 rtx_insn
*next
= NEXT_INSN (after
);
6508 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6510 emit_insn_after (var_ret_seq
, after
);
6513 /* Zap the tree EH table. */
6514 set_eh_throw_stmt_table (fun
, NULL
);
6516 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6517 split edges which edge insertions might do. */
6518 rebuild_jump_labels (get_insns ());
6520 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
6521 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6525 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6529 rebuild_jump_labels_chain (e
->insns
.r
);
6530 /* Put insns after parm birth, but before
6531 NOTE_INSNS_FUNCTION_BEG. */
6532 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
6533 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6535 rtx_insn
*insns
= e
->insns
.r
;
6537 if (NOTE_P (parm_birth_insn
)
6538 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6539 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6541 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6544 commit_one_edge_insertion (e
);
6551 /* We're done expanding trees to RTL. */
6552 currently_expanding_to_rtl
= 0;
6554 flush_mark_addressable_queue ();
6556 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6557 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6561 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6563 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6564 e
->flags
&= ~EDGE_EXECUTABLE
;
6566 /* At the moment not all abnormal edges match the RTL
6567 representation. It is safe to remove them here as
6568 find_many_sub_basic_blocks will rediscover them.
6569 In the future we should get this fixed properly. */
6570 if ((e
->flags
& EDGE_ABNORMAL
)
6571 && !(e
->flags
& EDGE_SIBCALL
))
6578 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6579 bitmap_ones (blocks
);
6580 find_many_sub_basic_blocks (blocks
);
6581 purge_all_dead_edges ();
6583 /* After initial rtl generation, call back to finish generating
6584 exception support code. We need to do this before cleaning up
6585 the CFG as the code does not expect dead landing pads. */
6586 if (fun
->eh
->region_tree
!= NULL
)
6587 finish_eh_generation ();
6589 /* Call expand_stack_alignment after finishing all
6590 updates to crtl->preferred_stack_boundary. */
6591 expand_stack_alignment ();
6593 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6595 if (crtl
->tail_call_emit
)
6596 fixup_tail_calls ();
6598 /* BB subdivision may have created basic blocks that are are only reachable
6599 from unlikely bbs but not marked as such in the profile. */
6601 propagate_unlikely_bbs_forward ();
6603 /* Remove unreachable blocks, otherwise we cannot compute dominators
6604 which are needed for loop state verification. As a side-effect
6605 this also compacts blocks.
6606 ??? We cannot remove trivially dead insns here as for example
6607 the DRAP reg on i?86 is not magically live at this point.
6608 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6609 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6611 checking_verify_flow_info ();
6613 /* Initialize pseudos allocated for hard registers. */
6614 emit_initial_value_sets ();
6616 /* And finally unshare all RTL. */
6619 /* There's no need to defer outputting this function any more; we
6620 know we want to output it. */
6621 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6623 /* Now that we're done expanding trees to RTL, we shouldn't have any
6624 more CONCATs anywhere. */
6625 generating_concat_p
= 0;
6630 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6631 /* And the pass manager will dump RTL for us. */
6634 /* If we're emitting a nested function, make sure its parent gets
6635 emitted as well. Doing otherwise confuses debug info. */
6638 for (parent
= DECL_CONTEXT (current_function_decl
);
6639 parent
!= NULL_TREE
;
6640 parent
= get_containing_scope (parent
))
6641 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6642 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6645 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6647 /* After expanding, the return labels are no longer needed. */
6648 return_label
= NULL
;
6649 naked_return_label
= NULL
;
6651 /* After expanding, the tm_restart map is no longer needed. */
6652 if (fun
->gimple_df
->tm_restart
)
6653 fun
->gimple_df
->tm_restart
= NULL
;
6655 /* Tag the blocks with a depth number so that change_scope can find
6656 the common parent easily. */
6657 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6658 default_rtl_profile ();
6660 /* For -dx discard loops now, otherwise IL verify in clean_state will
6662 if (rtl_dump_and_exit
)
6664 cfun
->curr_properties
&= ~PROP_loops
;
6665 loop_optimizer_finalize ();
6668 timevar_pop (TV_POST_EXPAND
);
6676 make_pass_expand (gcc::context
*ctxt
)
6678 return new pass_expand (ctxt
);