1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
74 #include "tree-ssa-address.h"
77 #include "tree-chkp.h"
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
85 #define NAME__MAIN "__main"
88 /* This variable holds information helping the rewriting of SSA trees
92 /* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
94 gimple
*currently_expanding_gimple_stmt
;
96 static rtx
expand_debug_expr (tree
);
98 static bool defer_stack_allocation (tree
, bool);
100 static void record_alignment_for_reg_var (unsigned int);
102 /* Return an expression tree corresponding to the RHS of GIMPLE
106 gimple_assign_rhs_to_tree (gimple
*stmt
)
109 enum gimple_rhs_class grhs_class
;
111 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
113 if (grhs_class
== GIMPLE_TERNARY_RHS
)
114 t
= build3 (gimple_assign_rhs_code (stmt
),
115 TREE_TYPE (gimple_assign_lhs (stmt
)),
116 gimple_assign_rhs1 (stmt
),
117 gimple_assign_rhs2 (stmt
),
118 gimple_assign_rhs3 (stmt
));
119 else if (grhs_class
== GIMPLE_BINARY_RHS
)
120 t
= build2 (gimple_assign_rhs_code (stmt
),
121 TREE_TYPE (gimple_assign_lhs (stmt
)),
122 gimple_assign_rhs1 (stmt
),
123 gimple_assign_rhs2 (stmt
));
124 else if (grhs_class
== GIMPLE_UNARY_RHS
)
125 t
= build1 (gimple_assign_rhs_code (stmt
),
126 TREE_TYPE (gimple_assign_lhs (stmt
)),
127 gimple_assign_rhs1 (stmt
));
128 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
130 t
= gimple_assign_rhs1 (stmt
);
131 /* Avoid modifying this tree in place below. */
132 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
133 && gimple_location (stmt
) != EXPR_LOCATION (t
))
134 || (gimple_block (stmt
)
135 && currently_expanding_to_rtl
142 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
143 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
161 leader_merge (tree cur
, tree next
)
163 if (cur
== NULL
|| cur
== next
)
166 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
169 if (DECL_P (next
) && DECL_IGNORED_P (next
))
175 /* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
179 set_rtl (tree t
, rtx x
)
181 gcc_checking_assert (!x
182 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
183 || (use_register_for_decl (t
)
185 || (GET_CODE (x
) == CONCAT
186 && (REG_P (XEXP (x
, 0))
187 || SUBREG_P (XEXP (x
, 0)))
188 && (REG_P (XEXP (x
, 1))
189 || SUBREG_P (XEXP (x
, 1))))
190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
194 || (GET_CODE (x
) == PARALLEL
196 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
197 && (GET_MODE (x
) == BLKmode
198 || !flag_tree_coalesce_vars
)))
199 : (MEM_P (x
) || x
== pc_rtx
200 || (GET_CODE (x
) == CONCAT
201 && MEM_P (XEXP (x
, 0))
202 && MEM_P (XEXP (x
, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
212 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
214 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
215 && (promote_ssa_mode (t
, NULL
) == BLKmode
216 || !flag_tree_coalesce_vars
))
217 || !use_register_for_decl (t
)
218 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
223 tree cur
= NULL_TREE
;
231 else if (SUBREG_P (xm
))
233 gcc_assert (subreg_lowpart_p (xm
));
234 xm
= SUBREG_REG (xm
);
237 else if (GET_CODE (xm
) == CONCAT
)
242 else if (GET_CODE (xm
) == PARALLEL
)
244 xm
= XVECEXP (xm
, 0, 0);
245 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
249 else if (xm
== pc_rtx
)
254 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
259 set_mem_attributes (x
,
260 next
&& TREE_CODE (next
) == SSA_NAME
264 set_reg_attrs_for_decl_rtl (next
, x
);
268 if (TREE_CODE (t
) == SSA_NAME
)
270 int part
= var_to_partition (SA
.map
, t
);
271 if (part
!= NO_PARTITION
)
273 if (SA
.partition_to_pseudo
[part
])
274 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
275 else if (x
!= pc_rtx
)
276 SA
.partition_to_pseudo
[part
] = x
;
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
280 DECL. For PARMs and RESULTs, do so only when setting the
282 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
283 && (VAR_P (SSA_NAME_VAR (t
))
284 || SSA_NAME_IS_DEFAULT_DEF (t
)))
286 tree var
= SSA_NAME_VAR (t
);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var
))
289 SET_DECL_RTL (var
, x
);
290 /* If we have it set already to "multiple places" don't
292 else if (DECL_RTL (var
) == pc_rtx
)
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var
) != x
)
301 SET_DECL_RTL (var
, pc_rtx
);
308 /* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
323 /* The partition representative. */
324 size_t representative
;
326 /* The next stack variable in the partition, or EOC. */
329 /* The numbers of conflicting stack variables. */
333 #define EOC ((size_t)-1)
335 /* We have an array of such objects while deciding allocation. */
336 static struct stack_var
*stack_vars
;
337 static size_t stack_vars_alloc
;
338 static size_t stack_vars_num
;
339 static hash_map
<tree
, size_t> *decl_to_stack_part
;
341 /* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343 static bitmap_obstack stack_var_bitmap_obstack
;
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346 is non-decreasing. */
347 static size_t *stack_vars_sorted
;
349 /* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352 static int frame_phase
;
354 /* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356 static bool has_protected_decls
;
358 /* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360 static bool has_short_buffer
;
362 /* Compute the byte alignment to use for DECL. Ignore alignment
363 we can't do with expected alignment of the stack boundary. */
366 align_local_variable (tree decl
)
370 if (TREE_CODE (decl
) == SSA_NAME
)
371 align
= TYPE_ALIGN (TREE_TYPE (decl
));
374 align
= LOCAL_DECL_ALIGNMENT (decl
);
375 SET_DECL_ALIGN (decl
, align
);
377 return align
/ BITS_PER_UNIT
;
380 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
386 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
393 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
395 HOST_WIDE_INT offset
, new_frame_offset
;
397 if (FRAME_GROWS_DOWNWARD
)
400 = align_base (frame_offset
- frame_phase
- size
,
401 align
, false) + frame_phase
;
402 offset
= new_frame_offset
;
407 = align_base (frame_offset
- frame_phase
, align
, true) + frame_phase
;
408 offset
= new_frame_offset
;
409 new_frame_offset
+= size
;
411 frame_offset
= new_frame_offset
;
413 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
414 frame_offset
= offset
= 0;
419 /* Accumulate DECL into STACK_VARS. */
422 add_stack_var (tree decl
)
426 if (stack_vars_num
>= stack_vars_alloc
)
428 if (stack_vars_alloc
)
429 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
431 stack_vars_alloc
= 32;
433 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
435 if (!decl_to_stack_part
)
436 decl_to_stack_part
= new hash_map
<tree
, size_t>;
438 v
= &stack_vars
[stack_vars_num
];
439 decl_to_stack_part
->put (decl
, stack_vars_num
);
442 tree size
= TREE_CODE (decl
) == SSA_NAME
443 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
444 : DECL_SIZE_UNIT (decl
);
445 v
->size
= tree_to_uhwi (size
);
446 /* Ensure that all variables have size, so that &a != &b for any two
447 variables that are simultaneously live. */
450 v
->alignb
= align_local_variable (decl
);
451 /* An alignment of zero can mightily confuse us later. */
452 gcc_assert (v
->alignb
!= 0);
454 /* All variables are initially in their own partition. */
455 v
->representative
= stack_vars_num
;
458 /* All variables initially conflict with no other. */
461 /* Ensure that this decl doesn't get put onto the list twice. */
462 set_rtl (decl
, pc_rtx
);
467 /* Make the decls associated with luid's X and Y conflict. */
470 add_stack_var_conflict (size_t x
, size_t y
)
472 struct stack_var
*a
= &stack_vars
[x
];
473 struct stack_var
*b
= &stack_vars
[y
];
475 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
477 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
478 bitmap_set_bit (a
->conflicts
, y
);
479 bitmap_set_bit (b
->conflicts
, x
);
482 /* Check whether the decls associated with luid's X and Y conflict. */
485 stack_var_conflict_p (size_t x
, size_t y
)
487 struct stack_var
*a
= &stack_vars
[x
];
488 struct stack_var
*b
= &stack_vars
[y
];
491 /* Partitions containing an SSA name result from gimple registers
492 with things like unsupported modes. They are top-level and
493 hence conflict with everything else. */
494 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
497 if (!a
->conflicts
|| !b
->conflicts
)
499 return bitmap_bit_p (a
->conflicts
, y
);
502 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
503 enter its partition number into bitmap DATA. */
506 visit_op (gimple
*, tree op
, tree
, void *data
)
508 bitmap active
= (bitmap
)data
;
509 op
= get_base_address (op
);
512 && DECL_RTL_IF_SET (op
) == pc_rtx
)
514 size_t *v
= decl_to_stack_part
->get (op
);
516 bitmap_set_bit (active
, *v
);
521 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
522 record conflicts between it and all currently active other partitions
526 visit_conflict (gimple
*, tree op
, tree
, void *data
)
528 bitmap active
= (bitmap
)data
;
529 op
= get_base_address (op
);
532 && DECL_RTL_IF_SET (op
) == pc_rtx
)
534 size_t *v
= decl_to_stack_part
->get (op
);
535 if (v
&& bitmap_set_bit (active
, *v
))
540 gcc_assert (num
< stack_vars_num
);
541 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
542 add_stack_var_conflict (num
, i
);
548 /* Helper routine for add_scope_conflicts, calculating the active partitions
549 at the end of BB, leaving the result in WORK. We're called to generate
550 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
554 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
558 gimple_stmt_iterator gsi
;
559 walk_stmt_load_store_addr_fn visit
;
562 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
563 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
567 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
569 gimple
*stmt
= gsi_stmt (gsi
);
570 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
572 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
574 gimple
*stmt
= gsi_stmt (gsi
);
576 if (gimple_clobber_p (stmt
))
578 tree lhs
= gimple_assign_lhs (stmt
);
580 /* Nested function lowering might introduce LHSs
581 that are COMPONENT_REFs. */
584 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
585 && (v
= decl_to_stack_part
->get (lhs
)))
586 bitmap_clear_bit (work
, *v
);
588 else if (!is_gimple_debug (stmt
))
591 && visit
== visit_op
)
593 /* If this is the first real instruction in this BB we need
594 to add conflicts for everything live at this point now.
595 Unlike classical liveness for named objects we can't
596 rely on seeing a def/use of the names we're interested in.
597 There might merely be indirect loads/stores. We'd not add any
598 conflicts for such partitions. */
601 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
603 struct stack_var
*a
= &stack_vars
[i
];
605 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
606 bitmap_ior_into (a
->conflicts
, work
);
608 visit
= visit_conflict
;
610 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
615 /* Generate stack partition conflicts between all partitions that are
616 simultaneously live. */
619 add_scope_conflicts (void)
623 bitmap work
= BITMAP_ALLOC (NULL
);
627 /* We approximate the live range of a stack variable by taking the first
628 mention of its name as starting point(s), and by the end-of-scope
629 death clobber added by gimplify as ending point(s) of the range.
630 This overapproximates in the case we for instance moved an address-taken
631 operation upward, without also moving a dereference to it upwards.
632 But it's conservatively correct as a variable never can hold values
633 before its name is mentioned at least once.
635 We then do a mostly classical bitmap liveness algorithm. */
637 FOR_ALL_BB_FN (bb
, cfun
)
638 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
640 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
641 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
648 for (i
= 0; i
< n_bbs
; i
++)
651 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
652 active
= (bitmap
)bb
->aux
;
653 add_scope_conflicts_1 (bb
, work
, false);
654 if (bitmap_ior_into (active
, work
))
659 FOR_EACH_BB_FN (bb
, cfun
)
660 add_scope_conflicts_1 (bb
, work
, true);
664 FOR_ALL_BB_FN (bb
, cfun
)
665 BITMAP_FREE (bb
->aux
);
668 /* A subroutine of partition_stack_vars. A comparison function for qsort,
669 sorting an array of indices by the properties of the object. */
672 stack_var_cmp (const void *a
, const void *b
)
674 size_t ia
= *(const size_t *)a
;
675 size_t ib
= *(const size_t *)b
;
676 unsigned int aligna
= stack_vars
[ia
].alignb
;
677 unsigned int alignb
= stack_vars
[ib
].alignb
;
678 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
679 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
680 tree decla
= stack_vars
[ia
].decl
;
681 tree declb
= stack_vars
[ib
].decl
;
683 unsigned int uida
, uidb
;
685 /* Primary compare on "large" alignment. Large comes first. */
686 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
687 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
688 if (largea
!= largeb
)
689 return (int)largeb
- (int)largea
;
691 /* Secondary compare on size, decreasing */
697 /* Tertiary compare on true alignment, decreasing. */
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
706 if (TREE_CODE (decla
) == SSA_NAME
)
708 if (TREE_CODE (declb
) == SSA_NAME
)
709 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
713 else if (TREE_CODE (declb
) == SSA_NAME
)
716 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
724 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
725 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
727 /* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
732 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
733 part_hashmap
*decls_to_partitions
,
734 hash_set
<bitmap
> *visited
, bitmap temp
)
742 /* The pointed-to vars bitmap is shared, it is enough to
744 || visited
->add (pt
->vars
))
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
752 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
754 || !bitmap_bit_p (temp
, i
))
755 && (part
= decls_to_partitions
->get (i
)))
756 bitmap_ior_into (temp
, *part
);
757 if (!bitmap_empty_p (temp
))
758 bitmap_ior_into (pt
->vars
, temp
);
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
767 update_alias_info_with_stack_vars (void)
769 part_hashmap
*decls_to_partitions
= NULL
;
771 tree var
= NULL_TREE
;
773 for (i
= 0; i
< stack_vars_num
; i
++)
777 struct ptr_info_def
*pi
;
779 /* Not interested in partitions with single variable. */
780 if (stack_vars
[i
].representative
!= i
781 || stack_vars
[i
].next
== EOC
)
784 if (!decls_to_partitions
)
786 decls_to_partitions
= new part_hashmap
;
787 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var
== NULL_TREE
)
794 var
= create_tmp_var (ptr_type_node
);
795 name
= make_ssa_name (var
);
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part
= BITMAP_GGC_ALLOC ();
800 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
802 tree decl
= stack_vars
[j
].decl
;
803 unsigned int uid
= DECL_PT_UID (decl
);
804 bitmap_set_bit (part
, uid
);
805 decls_to_partitions
->put (uid
, part
);
806 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
807 if (TREE_ADDRESSABLE (decl
))
808 TREE_ADDRESSABLE (name
) = 1;
811 /* Make the SSA name point to all partition members. */
812 pi
= get_ptr_info (name
);
813 pt_solution_set (&pi
->pt
, part
, false);
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions
)
822 hash_set
<bitmap
> visited
;
823 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
825 FOR_EACH_SSA_NAME (i
, name
, cfun
)
827 struct ptr_info_def
*pi
;
829 if (POINTER_TYPE_P (TREE_TYPE (name
))
830 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
831 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
835 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
836 decls_to_partitions
, &visited
, temp
);
838 delete decls_to_partitions
;
843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
845 Merge them into a single partition A. */
848 union_stack_vars (size_t a
, size_t b
)
850 struct stack_var
*vb
= &stack_vars
[b
];
854 gcc_assert (stack_vars
[b
].next
== EOC
);
855 /* Add B to A's partition. */
856 stack_vars
[b
].next
= stack_vars
[a
].next
;
857 stack_vars
[b
].representative
= a
;
858 stack_vars
[a
].next
= b
;
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
862 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
864 /* Update the interference graph and merge the conflicts. */
867 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
868 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
869 BITMAP_FREE (vb
->conflicts
);
873 /* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
877 Sort the objects by size in descending order.
882 Look for the largest non-conflicting object B with size <= S.
889 partition_stack_vars (void)
891 size_t si
, sj
, n
= stack_vars_num
;
893 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
894 for (si
= 0; si
< n
; ++si
)
895 stack_vars_sorted
[si
] = si
;
900 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
902 for (si
= 0; si
< n
; ++si
)
904 size_t i
= stack_vars_sorted
[si
];
905 unsigned int ialign
= stack_vars
[i
].alignb
;
906 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars
[i
].representative
!= i
)
914 for (sj
= si
+ 1; sj
< n
; ++sj
)
916 size_t j
= stack_vars_sorted
[sj
];
917 unsigned int jalign
= stack_vars
[j
].alignb
;
918 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars
[j
].representative
!= j
)
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
927 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
934 if ((asan_sanitize_stack_p ())
936 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i
, j
))
943 /* UNION the objects, placing J at OFFSET. */
944 union_stack_vars (i
, j
);
948 update_alias_info_with_stack_vars ();
951 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
954 dump_stack_var_partition (void)
956 size_t si
, i
, j
, n
= stack_vars_num
;
958 for (si
= 0; si
< n
; ++si
)
960 i
= stack_vars_sorted
[si
];
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars
[i
].representative
!= i
)
966 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
967 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
968 stack_vars
[i
].alignb
);
970 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
972 fputc ('\t', dump_file
);
973 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
975 fputc ('\n', dump_file
);
979 /* Assign rtl to DECL at BASE + OFFSET. */
982 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
983 HOST_WIDE_INT offset
)
988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
991 x
= plus_constant (Pmode
, base
, offset
);
992 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl
))
994 : DECL_MODE (SSAVAR (decl
)), x
);
996 if (TREE_CODE (decl
) != SSA_NAME
)
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
1001 if (base
== virtual_stack_vars_rtx
)
1002 offset
-= frame_phase
;
1003 align
= least_bit_hwi (offset
);
1004 align
*= BITS_PER_UNIT
;
1005 if (align
== 0 || align
> base_align
)
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
1012 SET_DECL_ALIGN (decl
, align
);
1013 DECL_USER_ALIGN (decl
) = 0;
1019 struct stack_vars_data
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
1024 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1026 /* Vector of partition representative decls in between the paddings. */
1027 auto_vec
<tree
> asan_decl_vec
;
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb
;
1036 /* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1041 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
1043 size_t si
, i
, j
, n
= stack_vars_num
;
1044 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
1045 rtx large_base
= NULL
;
1046 unsigned large_align
= 0;
1047 bool large_allocation_done
= false;
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1054 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1056 /* Find the total size of these variables. */
1057 for (si
= 0; si
< n
; ++si
)
1061 i
= stack_vars_sorted
[si
];
1062 alignb
= stack_vars
[i
].alignb
;
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1069 large_align
= alignb
* BITS_PER_UNIT
;
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars
[i
].representative
!= i
)
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl
= stack_vars
[i
].decl
;
1082 if (TREE_CODE (decl
) == SSA_NAME
1083 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1084 : DECL_RTL (decl
) != pc_rtx
)
1087 large_size
+= alignb
- 1;
1088 large_size
&= -(HOST_WIDE_INT
)alignb
;
1089 large_size
+= stack_vars
[i
].size
;
1093 for (si
= 0; si
< n
; ++si
)
1096 unsigned base_align
, alignb
;
1097 HOST_WIDE_INT offset
;
1099 i
= stack_vars_sorted
[si
];
1101 /* Skip variables that aren't partition representatives, for now. */
1102 if (stack_vars
[i
].representative
!= i
)
1105 /* Skip variables that have already had rtl assigned. See also
1106 add_stack_var where we perpetrate this pc_rtx hack. */
1107 decl
= stack_vars
[i
].decl
;
1108 if (TREE_CODE (decl
) == SSA_NAME
1109 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1110 : DECL_RTL (decl
) != pc_rtx
)
1113 /* Check the predicate to see whether this variable should be
1114 allocated in this pass. */
1115 if (pred
&& !pred (i
))
1118 alignb
= stack_vars
[i
].alignb
;
1119 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1121 base
= virtual_stack_vars_rtx
;
1122 if ((asan_sanitize_stack_p ())
1125 HOST_WIDE_INT prev_offset
1126 = align_base (frame_offset
,
1127 MAX (alignb
, ASAN_RED_ZONE_SIZE
),
1128 !FRAME_GROWS_DOWNWARD
);
1129 tree repr_decl
= NULL_TREE
;
1131 = alloc_stack_frame_space (stack_vars
[i
].size
1132 + ASAN_RED_ZONE_SIZE
,
1133 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
1135 data
->asan_vec
.safe_push (prev_offset
);
1136 data
->asan_vec
.safe_push (offset
+ stack_vars
[i
].size
);
1137 /* Find best representative of the partition.
1138 Prefer those with DECL_NAME, even better
1139 satisfying asan_protect_stack_decl predicate. */
1140 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1141 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1142 && DECL_NAME (stack_vars
[j
].decl
))
1144 repr_decl
= stack_vars
[j
].decl
;
1147 else if (repr_decl
== NULL_TREE
1148 && DECL_P (stack_vars
[j
].decl
)
1149 && DECL_NAME (stack_vars
[j
].decl
))
1150 repr_decl
= stack_vars
[j
].decl
;
1151 if (repr_decl
== NULL_TREE
)
1152 repr_decl
= stack_vars
[i
].decl
;
1153 data
->asan_decl_vec
.safe_push (repr_decl
);
1154 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1155 if (data
->asan_base
== NULL
)
1156 data
->asan_base
= gen_reg_rtx (Pmode
);
1157 base
= data
->asan_base
;
1159 if (!STRICT_ALIGNMENT
)
1160 base_align
= crtl
->max_used_stack_slot_alignment
;
1162 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1163 GET_MODE_ALIGNMENT (SImode
)
1164 << ASAN_SHADOW_SHIFT
);
1168 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1169 base_align
= crtl
->max_used_stack_slot_alignment
;
1174 /* Large alignment is only processed in the last pass. */
1178 /* If there were any variables requiring "large" alignment, allocate
1180 if (large_size
> 0 && ! large_allocation_done
)
1182 HOST_WIDE_INT loffset
;
1183 rtx large_allocsize
;
1185 large_allocsize
= GEN_INT (large_size
);
1186 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1187 loffset
= alloc_stack_frame_space
1188 (INTVAL (large_allocsize
),
1189 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1190 large_base
= get_dynamic_stack_base (loffset
, large_align
);
1191 large_allocation_done
= true;
1193 gcc_assert (large_base
!= NULL
);
1195 large_alloc
+= alignb
- 1;
1196 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
1197 offset
= large_alloc
;
1198 large_alloc
+= stack_vars
[i
].size
;
1201 base_align
= large_align
;
1204 /* Create rtl for each variable based on their location within the
1206 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1208 expand_one_stack_var_at (stack_vars
[j
].decl
,
1214 gcc_assert (large_alloc
== large_size
);
1217 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1218 static HOST_WIDE_INT
1219 account_stack_vars (void)
1221 size_t si
, j
, i
, n
= stack_vars_num
;
1222 HOST_WIDE_INT size
= 0;
1224 for (si
= 0; si
< n
; ++si
)
1226 i
= stack_vars_sorted
[si
];
1228 /* Skip variables that aren't partition representatives, for now. */
1229 if (stack_vars
[i
].representative
!= i
)
1232 size
+= stack_vars
[i
].size
;
1233 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1234 set_rtl (stack_vars
[j
].decl
, NULL
);
1239 /* Record the RTL assignment X for the default def of PARM. */
1242 set_parm_rtl (tree parm
, rtx x
)
1244 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1245 || TREE_CODE (parm
) == RESULT_DECL
);
1247 if (x
&& !MEM_P (x
))
1249 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1250 TYPE_MODE (TREE_TYPE (parm
)),
1251 TYPE_ALIGN (TREE_TYPE (parm
)));
1253 /* If the variable alignment is very large we'll dynamicaly
1254 allocate it, which means that in-frame portion is just a
1255 pointer. ??? We've got a pseudo for sure here, do we
1256 actually dynamically allocate its spilling area if needed?
1257 ??? Isn't it a problem when POINTER_SIZE also exceeds
1258 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1259 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1260 align
= POINTER_SIZE
;
1262 record_alignment_for_reg_var (align
);
1265 tree ssa
= ssa_default_def (cfun
, parm
);
1267 return set_rtl (parm
, x
);
1269 int part
= var_to_partition (SA
.map
, ssa
);
1270 gcc_assert (part
!= NO_PARTITION
);
1272 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1273 gcc_assert (changed
);
1276 gcc_assert (DECL_RTL (parm
) == x
);
1279 /* A subroutine of expand_one_var. Called to immediately assign rtl
1280 to a variable to be allocated in the stack frame. */
1283 expand_one_stack_var_1 (tree var
)
1285 HOST_WIDE_INT size
, offset
;
1286 unsigned byte_align
;
1288 if (TREE_CODE (var
) == SSA_NAME
)
1290 tree type
= TREE_TYPE (var
);
1291 size
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1292 byte_align
= TYPE_ALIGN_UNIT (type
);
1296 size
= tree_to_uhwi (DECL_SIZE_UNIT (var
));
1297 byte_align
= align_local_variable (var
);
1300 /* We handle highly aligned variables in expand_stack_vars. */
1301 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1303 offset
= alloc_stack_frame_space (size
, byte_align
);
1305 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1306 crtl
->max_used_stack_slot_alignment
, offset
);
1309 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1310 already assigned some MEM. */
1313 expand_one_stack_var (tree var
)
1315 if (TREE_CODE (var
) == SSA_NAME
)
1317 int part
= var_to_partition (SA
.map
, var
);
1318 if (part
!= NO_PARTITION
)
1320 rtx x
= SA
.partition_to_pseudo
[part
];
1322 gcc_assert (MEM_P (x
));
1327 return expand_one_stack_var_1 (var
);
1330 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1331 that will reside in a hard register. */
1334 expand_one_hard_reg_var (tree var
)
1336 rest_of_decl_compilation (var
, 0, 0);
1339 /* Record the alignment requirements of some variable assigned to a
1343 record_alignment_for_reg_var (unsigned int align
)
1345 if (SUPPORTS_STACK_ALIGNMENT
1346 && crtl
->stack_alignment_estimated
< align
)
1348 /* stack_alignment_estimated shouldn't change after stack
1349 realign decision made */
1350 gcc_assert (!crtl
->stack_realign_processed
);
1351 crtl
->stack_alignment_estimated
= align
;
1354 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1355 So here we only make sure stack_alignment_needed >= align. */
1356 if (crtl
->stack_alignment_needed
< align
)
1357 crtl
->stack_alignment_needed
= align
;
1358 if (crtl
->max_used_stack_slot_alignment
< align
)
1359 crtl
->max_used_stack_slot_alignment
= align
;
1362 /* Create RTL for an SSA partition. */
1365 expand_one_ssa_partition (tree var
)
1367 int part
= var_to_partition (SA
.map
, var
);
1368 gcc_assert (part
!= NO_PARTITION
);
1370 if (SA
.partition_to_pseudo
[part
])
1373 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1374 TYPE_MODE (TREE_TYPE (var
)),
1375 TYPE_ALIGN (TREE_TYPE (var
)));
1377 /* If the variable alignment is very large we'll dynamicaly allocate
1378 it, which means that in-frame portion is just a pointer. */
1379 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1380 align
= POINTER_SIZE
;
1382 record_alignment_for_reg_var (align
);
1384 if (!use_register_for_decl (var
))
1386 if (defer_stack_allocation (var
, true))
1387 add_stack_var (var
);
1389 expand_one_stack_var_1 (var
);
1393 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1394 rtx x
= gen_reg_rtx (reg_mode
);
1398 /* For a promoted variable, X will not be used directly but wrapped in a
1399 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1400 will assume that its upper bits can be inferred from its lower bits.
1401 Therefore, if X isn't initialized on every path from the entry, then
1402 we must do it manually in order to fulfill the above assumption. */
1403 if (reg_mode
!= TYPE_MODE (TREE_TYPE (var
))
1404 && bitmap_bit_p (SA
.partitions_for_undefined_values
, part
))
1405 emit_move_insn (x
, CONST0_RTX (reg_mode
));
1408 /* Record the association between the RTL generated for partition PART
1409 and the underlying variable of the SSA_NAME VAR. */
1412 adjust_one_expanded_partition_var (tree var
)
1417 tree decl
= SSA_NAME_VAR (var
);
1419 int part
= var_to_partition (SA
.map
, var
);
1420 if (part
== NO_PARTITION
)
1423 rtx x
= SA
.partition_to_pseudo
[part
];
1432 /* Note if the object is a user variable. */
1433 if (decl
&& !DECL_ARTIFICIAL (decl
))
1436 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1437 mark_reg_pointer (x
, get_pointer_alignment (var
));
1440 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1441 that will reside in a pseudo register. */
1444 expand_one_register_var (tree var
)
1446 if (TREE_CODE (var
) == SSA_NAME
)
1448 int part
= var_to_partition (SA
.map
, var
);
1449 if (part
!= NO_PARTITION
)
1451 rtx x
= SA
.partition_to_pseudo
[part
];
1453 gcc_assert (REG_P (x
));
1460 tree type
= TREE_TYPE (decl
);
1461 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1462 rtx x
= gen_reg_rtx (reg_mode
);
1466 /* Note if the object is a user variable. */
1467 if (!DECL_ARTIFICIAL (decl
))
1470 if (POINTER_TYPE_P (type
))
1471 mark_reg_pointer (x
, get_pointer_alignment (var
));
1474 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1475 has some associated error, e.g. its type is error-mark. We just need
1476 to pick something that won't crash the rest of the compiler. */
1479 expand_one_error_var (tree var
)
1481 machine_mode mode
= DECL_MODE (var
);
1484 if (mode
== BLKmode
)
1485 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1486 else if (mode
== VOIDmode
)
1489 x
= gen_reg_rtx (mode
);
1491 SET_DECL_RTL (var
, x
);
1494 /* A subroutine of expand_one_var. VAR is a variable that will be
1495 allocated to the local stack frame. Return true if we wish to
1496 add VAR to STACK_VARS so that it will be coalesced with other
1497 variables. Return false to allocate VAR immediately.
1499 This function is used to reduce the number of variables considered
1500 for coalescing, which reduces the size of the quadratic problem. */
1503 defer_stack_allocation (tree var
, bool toplevel
)
1505 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1506 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1507 : DECL_SIZE_UNIT (var
);
1509 /* Whether the variable is small enough for immediate allocation not to be
1510 a problem with regard to the frame size. */
1512 = ((HOST_WIDE_INT
) tree_to_uhwi (size_unit
)
1513 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
));
1515 /* If stack protection is enabled, *all* stack variables must be deferred,
1516 so that we can re-order the strings to the top of the frame.
1517 Similarly for Address Sanitizer. */
1518 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1521 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1522 ? TYPE_ALIGN (TREE_TYPE (var
))
1525 /* We handle "large" alignment via dynamic allocation. We want to handle
1526 this extra complication in only one place, so defer them. */
1527 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1530 bool ignored
= TREE_CODE (var
) == SSA_NAME
1531 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1532 : DECL_IGNORED_P (var
);
1534 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1535 might be detached from their block and appear at toplevel when we reach
1536 here. We want to coalesce them with variables from other blocks when
1537 the immediate contribution to the frame size would be noticeable. */
1538 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1541 /* Variables declared in the outermost scope automatically conflict
1542 with every other variable. The only reason to want to defer them
1543 at all is that, after sorting, we can more efficiently pack
1544 small variables in the stack frame. Continue to defer at -O2. */
1545 if (toplevel
&& optimize
< 2)
1548 /* Without optimization, *most* variables are allocated from the
1549 stack, which makes the quadratic problem large exactly when we
1550 want compilation to proceed as quickly as possible. On the
1551 other hand, we don't want the function's stack frame size to
1552 get completely out of hand. So we avoid adding scalars and
1553 "small" aggregates to the list at all. */
1554 if (optimize
== 0 && smallish
)
1560 /* A subroutine of expand_used_vars. Expand one variable according to
1561 its flavor. Variables to be placed on the stack are not actually
1562 expanded yet, merely recorded.
1563 When REALLY_EXPAND is false, only add stack values to be allocated.
1564 Return stack usage this variable is supposed to take.
1567 static HOST_WIDE_INT
1568 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1570 unsigned int align
= BITS_PER_UNIT
;
1575 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1577 if (is_global_var (var
))
1580 /* Because we don't know if VAR will be in register or on stack,
1581 we conservatively assume it will be on stack even if VAR is
1582 eventually put into register after RA pass. For non-automatic
1583 variables, which won't be on stack, we collect alignment of
1584 type and ignore user specified alignment. Similarly for
1585 SSA_NAMEs for which use_register_for_decl returns true. */
1586 if (TREE_STATIC (var
)
1587 || DECL_EXTERNAL (var
)
1588 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1589 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1590 TYPE_MODE (TREE_TYPE (var
)),
1591 TYPE_ALIGN (TREE_TYPE (var
)));
1592 else if (DECL_HAS_VALUE_EXPR_P (var
)
1593 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1594 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1595 or variables which were assigned a stack slot already by
1596 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1597 changed from the offset chosen to it. */
1598 align
= crtl
->stack_alignment_estimated
;
1600 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1602 /* If the variable alignment is very large we'll dynamicaly allocate
1603 it, which means that in-frame portion is just a pointer. */
1604 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1605 align
= POINTER_SIZE
;
1608 record_alignment_for_reg_var (align
);
1610 if (TREE_CODE (origvar
) == SSA_NAME
)
1612 gcc_assert (!VAR_P (var
)
1613 || (!DECL_EXTERNAL (var
)
1614 && !DECL_HAS_VALUE_EXPR_P (var
)
1615 && !TREE_STATIC (var
)
1616 && TREE_TYPE (var
) != error_mark_node
1617 && !DECL_HARD_REGISTER (var
)
1620 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1622 else if (DECL_EXTERNAL (var
))
1624 else if (DECL_HAS_VALUE_EXPR_P (var
))
1626 else if (TREE_STATIC (var
))
1628 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1630 else if (TREE_TYPE (var
) == error_mark_node
)
1633 expand_one_error_var (var
);
1635 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1639 expand_one_hard_reg_var (var
);
1640 if (!DECL_HARD_REGISTER (var
))
1641 /* Invalid register specification. */
1642 expand_one_error_var (var
);
1645 else if (use_register_for_decl (var
))
1648 expand_one_register_var (origvar
);
1650 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1652 /* Reject variables which cover more than half of the address-space. */
1655 error ("size of variable %q+D is too large", var
);
1656 expand_one_error_var (var
);
1659 else if (defer_stack_allocation (var
, toplevel
))
1660 add_stack_var (origvar
);
1665 if (lookup_attribute ("naked",
1666 DECL_ATTRIBUTES (current_function_decl
)))
1667 error ("cannot allocate stack for variable %q+D, naked function.",
1670 expand_one_stack_var (origvar
);
1674 return tree_to_uhwi (DECL_SIZE_UNIT (var
));
1679 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1680 expanding variables. Those variables that can be put into registers
1681 are allocated pseudos; those that can't are put on the stack.
1683 TOPLEVEL is true if this is the outermost BLOCK. */
1686 expand_used_vars_for_block (tree block
, bool toplevel
)
1690 /* Expand all variables at this level. */
1691 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1693 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1694 || !DECL_NONSHAREABLE (t
)))
1695 expand_one_var (t
, toplevel
, true);
1697 /* Expand all variables at containing levels. */
1698 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1699 expand_used_vars_for_block (t
, false);
1702 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1703 and clear TREE_USED on all local variables. */
1706 clear_tree_used (tree block
)
1710 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1711 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1712 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1713 || !DECL_NONSHAREABLE (t
))
1716 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1717 clear_tree_used (t
);
1721 SPCT_FLAG_DEFAULT
= 1,
1723 SPCT_FLAG_STRONG
= 3,
1724 SPCT_FLAG_EXPLICIT
= 4
1727 /* Examine TYPE and determine a bit mask of the following features. */
1729 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1730 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1731 #define SPCT_HAS_ARRAY 4
1732 #define SPCT_HAS_AGGREGATE 8
1735 stack_protect_classify_type (tree type
)
1737 unsigned int ret
= 0;
1740 switch (TREE_CODE (type
))
1743 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1744 if (t
== char_type_node
1745 || t
== signed_char_type_node
1746 || t
== unsigned_char_type_node
)
1748 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1749 unsigned HOST_WIDE_INT len
;
1751 if (!TYPE_SIZE_UNIT (type
)
1752 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1755 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1758 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1760 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1763 ret
= SPCT_HAS_ARRAY
;
1767 case QUAL_UNION_TYPE
:
1769 ret
= SPCT_HAS_AGGREGATE
;
1770 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1771 if (TREE_CODE (t
) == FIELD_DECL
)
1772 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1782 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1783 part of the local stack frame. Remember if we ever return nonzero for
1784 any variable in this function. The return value is the phase number in
1785 which the variable should be allocated. */
1788 stack_protect_decl_phase (tree decl
)
1790 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1793 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1794 has_short_buffer
= true;
1796 if (flag_stack_protect
== SPCT_FLAG_ALL
1797 || flag_stack_protect
== SPCT_FLAG_STRONG
1798 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1799 && lookup_attribute ("stack_protect",
1800 DECL_ATTRIBUTES (current_function_decl
))))
1802 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1803 && !(bits
& SPCT_HAS_AGGREGATE
))
1805 else if (bits
& SPCT_HAS_ARRAY
)
1809 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1812 has_protected_decls
= true;
1817 /* Two helper routines that check for phase 1 and phase 2. These are used
1818 as callbacks for expand_stack_vars. */
1821 stack_protect_decl_phase_1 (size_t i
)
1823 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1827 stack_protect_decl_phase_2 (size_t i
)
1829 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1832 /* And helper function that checks for asan phase (with stack protector
1833 it is phase 3). This is used as callback for expand_stack_vars.
1834 Returns true if any of the vars in the partition need to be protected. */
1837 asan_decl_phase_3 (size_t i
)
1841 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1843 i
= stack_vars
[i
].next
;
1848 /* Ensure that variables in different stack protection phases conflict
1849 so that they are not merged and share the same stack slot. */
1852 add_stack_protection_conflicts (void)
1854 size_t i
, j
, n
= stack_vars_num
;
1855 unsigned char *phase
;
1857 phase
= XNEWVEC (unsigned char, n
);
1858 for (i
= 0; i
< n
; ++i
)
1859 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1861 for (i
= 0; i
< n
; ++i
)
1863 unsigned char ph_i
= phase
[i
];
1864 for (j
= i
+ 1; j
< n
; ++j
)
1865 if (ph_i
!= phase
[j
])
1866 add_stack_var_conflict (i
, j
);
1872 /* Create a decl for the guard at the top of the stack frame. */
1875 create_stack_guard (void)
1877 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1878 VAR_DECL
, NULL
, ptr_type_node
);
1879 TREE_THIS_VOLATILE (guard
) = 1;
1880 TREE_USED (guard
) = 1;
1881 expand_one_stack_var (guard
);
1882 crtl
->stack_protect_guard
= guard
;
1885 /* Prepare for expanding variables. */
1887 init_vars_expansion (void)
1889 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1890 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1892 /* A map from decl to stack partition. */
1893 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1895 /* Initialize local stack smashing state. */
1896 has_protected_decls
= false;
1897 has_short_buffer
= false;
1900 /* Free up stack variable graph data. */
1902 fini_vars_expansion (void)
1904 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1906 XDELETEVEC (stack_vars
);
1907 if (stack_vars_sorted
)
1908 XDELETEVEC (stack_vars_sorted
);
1910 stack_vars_sorted
= NULL
;
1911 stack_vars_alloc
= stack_vars_num
= 0;
1912 delete decl_to_stack_part
;
1913 decl_to_stack_part
= NULL
;
1916 /* Make a fair guess for the size of the stack frame of the function
1917 in NODE. This doesn't have to be exact, the result is only used in
1918 the inline heuristics. So we don't want to run the full stack var
1919 packing algorithm (which is quadratic in the number of stack vars).
1920 Instead, we calculate the total size of all stack vars. This turns
1921 out to be a pretty fair estimate -- packing of stack vars doesn't
1922 happen very often. */
1925 estimated_stack_frame_size (struct cgraph_node
*node
)
1927 HOST_WIDE_INT size
= 0;
1930 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1934 init_vars_expansion ();
1936 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1937 if (auto_var_in_fn_p (var
, fn
->decl
))
1938 size
+= expand_one_var (var
, true, false);
1940 if (stack_vars_num
> 0)
1942 /* Fake sorting the stack vars for account_stack_vars (). */
1943 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1944 for (i
= 0; i
< stack_vars_num
; ++i
)
1945 stack_vars_sorted
[i
] = i
;
1946 size
+= account_stack_vars ();
1949 fini_vars_expansion ();
1954 /* Helper routine to check if a record or union contains an array field. */
1957 record_or_union_type_has_array_p (const_tree tree_type
)
1959 tree fields
= TYPE_FIELDS (tree_type
);
1962 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1963 if (TREE_CODE (f
) == FIELD_DECL
)
1965 tree field_type
= TREE_TYPE (f
);
1966 if (RECORD_OR_UNION_TYPE_P (field_type
)
1967 && record_or_union_type_has_array_p (field_type
))
1969 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1975 /* Check if the current function has local referenced variables that
1976 have their addresses taken, contain an array, or are arrays. */
1979 stack_protect_decl_p ()
1984 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1985 if (!is_global_var (var
))
1987 tree var_type
= TREE_TYPE (var
);
1989 && (TREE_CODE (var_type
) == ARRAY_TYPE
1990 || TREE_ADDRESSABLE (var
)
1991 || (RECORD_OR_UNION_TYPE_P (var_type
)
1992 && record_or_union_type_has_array_p (var_type
))))
1998 /* Check if the current function has calls that use a return slot. */
2001 stack_protect_return_slot_p ()
2005 FOR_ALL_BB_FN (bb
, cfun
)
2006 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2007 !gsi_end_p (gsi
); gsi_next (&gsi
))
2009 gimple
*stmt
= gsi_stmt (gsi
);
2010 /* This assumes that calls to internal-only functions never
2011 use a return slot. */
2012 if (is_gimple_call (stmt
)
2013 && !gimple_call_internal_p (stmt
)
2014 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2015 gimple_call_fndecl (stmt
)))
2021 /* Expand all variables used in the function. */
2024 expand_used_vars (void)
2026 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2027 auto_vec
<tree
> maybe_local_decls
;
2028 rtx_insn
*var_end_seq
= NULL
;
2031 bool gen_stack_protect_signal
= false;
2033 /* Compute the phase of the stack frame for this function. */
2035 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2036 int off
= targetm
.starting_frame_offset () % align
;
2037 frame_phase
= off
? align
- off
: 0;
2040 /* Set TREE_USED on all variables in the local_decls. */
2041 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2042 TREE_USED (var
) = 1;
2043 /* Clear TREE_USED on all variables associated with a block scope. */
2044 clear_tree_used (DECL_INITIAL (current_function_decl
));
2046 init_vars_expansion ();
2048 if (targetm
.use_pseudo_pic_reg ())
2049 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2051 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2053 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2056 tree var
= partition_to_var (SA
.map
, i
);
2058 gcc_assert (!virtual_operand_p (var
));
2060 expand_one_ssa_partition (var
);
2063 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2064 gen_stack_protect_signal
2065 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2067 /* At this point all variables on the local_decls with TREE_USED
2068 set are not associated with any block scope. Lay them out. */
2070 len
= vec_safe_length (cfun
->local_decls
);
2071 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2073 bool expand_now
= false;
2075 /* Expanded above already. */
2076 if (is_gimple_reg (var
))
2078 TREE_USED (var
) = 0;
2081 /* We didn't set a block for static or extern because it's hard
2082 to tell the difference between a global variable (re)declared
2083 in a local scope, and one that's really declared there to
2084 begin with. And it doesn't really matter much, since we're
2085 not giving them stack space. Expand them now. */
2086 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2089 /* Expand variables not associated with any block now. Those created by
2090 the optimizers could be live anywhere in the function. Those that
2091 could possibly have been scoped originally and detached from their
2092 block will have their allocation deferred so we coalesce them with
2093 others when optimization is enabled. */
2094 else if (TREE_USED (var
))
2097 /* Finally, mark all variables on the list as used. We'll use
2098 this in a moment when we expand those associated with scopes. */
2099 TREE_USED (var
) = 1;
2102 expand_one_var (var
, true, true);
2105 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2107 rtx rtl
= DECL_RTL_IF_SET (var
);
2109 /* Keep artificial non-ignored vars in cfun->local_decls
2110 chain until instantiate_decls. */
2111 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2112 add_local_decl (cfun
, var
);
2113 else if (rtl
== NULL_RTX
)
2114 /* If rtl isn't set yet, which can happen e.g. with
2115 -fstack-protector, retry before returning from this
2117 maybe_local_decls
.safe_push (var
);
2121 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2123 +-----------------+-----------------+
2124 | ...processed... | ...duplicates...|
2125 +-----------------+-----------------+
2127 +-- LEN points here.
2129 We just want the duplicates, as those are the artificial
2130 non-ignored vars that we want to keep until instantiate_decls.
2131 Move them down and truncate the array. */
2132 if (!vec_safe_is_empty (cfun
->local_decls
))
2133 cfun
->local_decls
->block_remove (0, len
);
2135 /* At this point, all variables within the block tree with TREE_USED
2136 set are actually used by the optimized function. Lay them out. */
2137 expand_used_vars_for_block (outer_block
, true);
2139 if (stack_vars_num
> 0)
2141 add_scope_conflicts ();
2143 /* If stack protection is enabled, we don't share space between
2144 vulnerable data and non-vulnerable data. */
2145 if (flag_stack_protect
!= 0
2146 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2147 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2148 && lookup_attribute ("stack_protect",
2149 DECL_ATTRIBUTES (current_function_decl
)))))
2150 add_stack_protection_conflicts ();
2152 /* Now that we have collected all stack variables, and have computed a
2153 minimal interference graph, attempt to save some stack space. */
2154 partition_stack_vars ();
2156 dump_stack_var_partition ();
2159 switch (flag_stack_protect
)
2162 create_stack_guard ();
2165 case SPCT_FLAG_STRONG
:
2166 if (gen_stack_protect_signal
2167 || cfun
->calls_alloca
|| has_protected_decls
2168 || lookup_attribute ("stack_protect",
2169 DECL_ATTRIBUTES (current_function_decl
)))
2170 create_stack_guard ();
2173 case SPCT_FLAG_DEFAULT
:
2174 if (cfun
->calls_alloca
|| has_protected_decls
2175 || lookup_attribute ("stack_protect",
2176 DECL_ATTRIBUTES (current_function_decl
)))
2177 create_stack_guard ();
2180 case SPCT_FLAG_EXPLICIT
:
2181 if (lookup_attribute ("stack_protect",
2182 DECL_ATTRIBUTES (current_function_decl
)))
2183 create_stack_guard ();
2189 /* Assign rtl to each variable based on these partitions. */
2190 if (stack_vars_num
> 0)
2192 struct stack_vars_data data
;
2194 data
.asan_base
= NULL_RTX
;
2195 data
.asan_alignb
= 0;
2197 /* Reorder decls to be protected by iterating over the variables
2198 array multiple times, and allocating out of each phase in turn. */
2199 /* ??? We could probably integrate this into the qsort we did
2200 earlier, such that we naturally see these variables first,
2201 and thus naturally allocate things in the right order. */
2202 if (has_protected_decls
)
2204 /* Phase 1 contains only character arrays. */
2205 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2207 /* Phase 2 contains other kinds of arrays. */
2208 if (flag_stack_protect
== SPCT_FLAG_ALL
2209 || flag_stack_protect
== SPCT_FLAG_STRONG
2210 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2211 && lookup_attribute ("stack_protect",
2212 DECL_ATTRIBUTES (current_function_decl
))))
2213 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2216 if (asan_sanitize_stack_p ())
2217 /* Phase 3, any partitions that need asan protection
2218 in addition to phase 1 and 2. */
2219 expand_stack_vars (asan_decl_phase_3
, &data
);
2221 if (!data
.asan_vec
.is_empty ())
2223 HOST_WIDE_INT prev_offset
= frame_offset
;
2224 HOST_WIDE_INT offset
, sz
, redzonesz
;
2225 redzonesz
= ASAN_RED_ZONE_SIZE
;
2226 sz
= data
.asan_vec
[0] - prev_offset
;
2227 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2228 && data
.asan_alignb
<= 4096
2229 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2230 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2231 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2233 = alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
);
2234 data
.asan_vec
.safe_push (prev_offset
);
2235 data
.asan_vec
.safe_push (offset
);
2236 /* Leave space for alignment if STRICT_ALIGNMENT. */
2237 if (STRICT_ALIGNMENT
)
2238 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2239 << ASAN_SHADOW_SHIFT
)
2240 / BITS_PER_UNIT
, 1);
2243 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2246 data
.asan_vec
.address (),
2247 data
.asan_decl_vec
.address (),
2248 data
.asan_vec
.length ());
2251 expand_stack_vars (NULL
, &data
);
2254 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2255 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2256 virtual_stack_vars_rtx
,
2259 fini_vars_expansion ();
2261 /* If there were any artificial non-ignored vars without rtl
2262 found earlier, see if deferred stack allocation hasn't assigned
2264 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2266 rtx rtl
= DECL_RTL_IF_SET (var
);
2268 /* Keep artificial non-ignored vars in cfun->local_decls
2269 chain until instantiate_decls. */
2270 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2271 add_local_decl (cfun
, var
);
2274 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2275 if (STACK_ALIGNMENT_NEEDED
)
2277 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2278 if (!FRAME_GROWS_DOWNWARD
)
2279 frame_offset
+= align
- 1;
2280 frame_offset
&= -align
;
2287 /* If we need to produce a detailed dump, print the tree representation
2288 for STMT to the dump file. SINCE is the last RTX after which the RTL
2289 generated for STMT should have been appended. */
2292 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2294 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2296 fprintf (dump_file
, "\n;; ");
2297 print_gimple_stmt (dump_file
, stmt
, 0,
2298 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2299 fprintf (dump_file
, "\n");
2301 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2305 /* Maps the blocks that do not contain tree labels to rtx labels. */
2307 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2309 /* Returns the label_rtx expression for a label starting basic block BB. */
2311 static rtx_code_label
*
2312 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2314 gimple_stmt_iterator gsi
;
2317 if (bb
->flags
& BB_RTL
)
2318 return block_label (bb
);
2320 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2324 /* Find the tree label if it is present. */
2326 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2330 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2334 lab
= gimple_label_label (lab_stmt
);
2335 if (DECL_NONLOCAL (lab
))
2338 return jump_target_rtx (lab
);
2341 rtx_code_label
*l
= gen_label_rtx ();
2342 lab_rtx_for_bb
->put (bb
, l
);
2347 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2348 of a basic block where we just expanded the conditional at the end,
2349 possibly clean up the CFG and instruction sequence. LAST is the
2350 last instruction before the just emitted jump sequence. */
2353 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2355 /* Special case: when jumpif decides that the condition is
2356 trivial it emits an unconditional jump (and the necessary
2357 barrier). But we still have two edges, the fallthru one is
2358 wrong. purge_dead_edges would clean this up later. Unfortunately
2359 we have to insert insns (and split edges) before
2360 find_many_sub_basic_blocks and hence before purge_dead_edges.
2361 But splitting edges might create new blocks which depend on the
2362 fact that if there are two edges there's no barrier. So the
2363 barrier would get lost and verify_flow_info would ICE. Instead
2364 of auditing all edge splitters to care for the barrier (which
2365 normally isn't there in a cleaned CFG), fix it here. */
2366 if (BARRIER_P (get_last_insn ()))
2370 /* Now, we have a single successor block, if we have insns to
2371 insert on the remaining edge we potentially will insert
2372 it at the end of this block (if the dest block isn't feasible)
2373 in order to avoid splitting the edge. This insertion will take
2374 place in front of the last jump. But we might have emitted
2375 multiple jumps (conditional and one unconditional) to the
2376 same destination. Inserting in front of the last one then
2377 is a problem. See PR 40021. We fix this by deleting all
2378 jumps except the last unconditional one. */
2379 insn
= PREV_INSN (get_last_insn ());
2380 /* Make sure we have an unconditional jump. Otherwise we're
2382 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2383 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2385 insn
= PREV_INSN (insn
);
2386 if (JUMP_P (NEXT_INSN (insn
)))
2388 if (!any_condjump_p (NEXT_INSN (insn
)))
2390 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2391 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2393 delete_insn (NEXT_INSN (insn
));
2399 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2400 Returns a new basic block if we've terminated the current basic
2401 block and created a new one. */
2404 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2406 basic_block new_bb
, dest
;
2409 rtx_insn
*last2
, *last
;
2410 enum tree_code code
;
2413 code
= gimple_cond_code (stmt
);
2414 op0
= gimple_cond_lhs (stmt
);
2415 op1
= gimple_cond_rhs (stmt
);
2416 /* We're sometimes presented with such code:
2420 This would expand to two comparisons which then later might
2421 be cleaned up by combine. But some pattern matchers like if-conversion
2422 work better when there's only one compare, so make up for this
2423 here as special exception if TER would have made the same change. */
2425 && TREE_CODE (op0
) == SSA_NAME
2426 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2427 && TREE_CODE (op1
) == INTEGER_CST
2428 && ((gimple_cond_code (stmt
) == NE_EXPR
2429 && integer_zerop (op1
))
2430 || (gimple_cond_code (stmt
) == EQ_EXPR
2431 && integer_onep (op1
)))
2432 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2434 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2435 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2437 enum tree_code code2
= gimple_assign_rhs_code (second
);
2438 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2441 op0
= gimple_assign_rhs1 (second
);
2442 op1
= gimple_assign_rhs2 (second
);
2444 /* If jumps are cheap and the target does not support conditional
2445 compare, turn some more codes into jumpy sequences. */
2446 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2447 && targetm
.gen_ccmp_first
== NULL
)
2449 if ((code2
== BIT_AND_EXPR
2450 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2451 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2452 || code2
== TRUTH_AND_EXPR
)
2454 code
= TRUTH_ANDIF_EXPR
;
2455 op0
= gimple_assign_rhs1 (second
);
2456 op1
= gimple_assign_rhs2 (second
);
2458 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2460 code
= TRUTH_ORIF_EXPR
;
2461 op0
= gimple_assign_rhs1 (second
);
2462 op1
= gimple_assign_rhs2 (second
);
2468 last2
= last
= get_last_insn ();
2470 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2471 set_curr_insn_location (gimple_location (stmt
));
2473 /* These flags have no purpose in RTL land. */
2474 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2475 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2477 /* We can either have a pure conditional jump with one fallthru edge or
2478 two-way jump that needs to be decomposed into two basic blocks. */
2479 if (false_edge
->dest
== bb
->next_bb
)
2481 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2482 true_edge
->probability
);
2483 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2484 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2485 set_curr_insn_location (true_edge
->goto_locus
);
2486 false_edge
->flags
|= EDGE_FALLTHRU
;
2487 maybe_cleanup_end_of_block (false_edge
, last
);
2490 if (true_edge
->dest
== bb
->next_bb
)
2492 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2493 false_edge
->probability
);
2494 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2495 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2496 set_curr_insn_location (false_edge
->goto_locus
);
2497 true_edge
->flags
|= EDGE_FALLTHRU
;
2498 maybe_cleanup_end_of_block (true_edge
, last
);
2502 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2503 true_edge
->probability
);
2504 last
= get_last_insn ();
2505 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2506 set_curr_insn_location (false_edge
->goto_locus
);
2507 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2510 if (BARRIER_P (BB_END (bb
)))
2511 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2512 update_bb_for_insn (bb
);
2514 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2515 dest
= false_edge
->dest
;
2516 redirect_edge_succ (false_edge
, new_bb
);
2517 false_edge
->flags
|= EDGE_FALLTHRU
;
2518 new_bb
->count
= false_edge
->count ();
2519 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2520 add_bb_to_loop (new_bb
, loop
);
2521 if (loop
->latch
== bb
2522 && loop
->header
== dest
)
2523 loop
->latch
= new_bb
;
2524 make_single_succ_edge (new_bb
, dest
, 0);
2525 if (BARRIER_P (BB_END (new_bb
)))
2526 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2527 update_bb_for_insn (new_bb
);
2529 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2531 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2533 set_curr_insn_location (true_edge
->goto_locus
);
2534 true_edge
->goto_locus
= curr_insn_location ();
2540 /* Mark all calls that can have a transaction restart. */
2543 mark_transaction_restart_calls (gimple
*stmt
)
2545 struct tm_restart_node dummy
;
2546 tm_restart_node
**slot
;
2548 if (!cfun
->gimple_df
->tm_restart
)
2552 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2555 struct tm_restart_node
*n
= *slot
;
2556 tree list
= n
->label_or_list
;
2559 for (insn
= next_real_insn (get_last_insn ());
2561 insn
= next_real_insn (insn
))
2564 if (TREE_CODE (list
) == LABEL_DECL
)
2565 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2567 for (; list
; list
= TREE_CHAIN (list
))
2568 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2572 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2576 expand_call_stmt (gcall
*stmt
)
2578 tree exp
, decl
, lhs
;
2582 if (gimple_call_internal_p (stmt
))
2584 expand_internal_call (stmt
);
2588 /* If this is a call to a built-in function and it has no effect other
2589 than setting the lhs, try to implement it using an internal function
2591 decl
= gimple_call_fndecl (stmt
);
2592 if (gimple_call_lhs (stmt
)
2593 && !gimple_has_side_effects (stmt
)
2594 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2596 internal_fn ifn
= replacement_internal_fn (stmt
);
2597 if (ifn
!= IFN_LAST
)
2599 expand_internal_call (ifn
, stmt
);
2604 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2606 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2607 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2609 /* If this is not a builtin function, the function type through which the
2610 call is made may be different from the type of the function. */
2613 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2614 CALL_EXPR_FN (exp
));
2616 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2617 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2619 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2621 tree arg
= gimple_call_arg (stmt
, i
);
2623 /* TER addresses into arguments of builtin functions so we have a
2624 chance to infer more correct alignment information. See PR39954. */
2626 && TREE_CODE (arg
) == SSA_NAME
2627 && (def
= get_gimple_for_ssa_name (arg
))
2628 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2629 arg
= gimple_assign_rhs1 (def
);
2630 CALL_EXPR_ARG (exp
, i
) = arg
;
2633 if (gimple_has_side_effects (stmt
))
2634 TREE_SIDE_EFFECTS (exp
) = 1;
2636 if (gimple_call_nothrow_p (stmt
))
2637 TREE_NOTHROW (exp
) = 1;
2639 if (gimple_no_warning_p (stmt
))
2640 TREE_NO_WARNING (exp
) = 1;
2642 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2643 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2644 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2646 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2647 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2648 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2650 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2651 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2652 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2653 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2654 CALL_WITH_BOUNDS_P (exp
) = gimple_call_with_bounds_p (stmt
);
2656 /* Ensure RTL is created for debug args. */
2657 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2659 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2664 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2666 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2667 expand_debug_expr (dtemp
);
2671 rtx_insn
*before_call
= get_last_insn ();
2672 lhs
= gimple_call_lhs (stmt
);
2674 expand_assignment (lhs
, exp
, false);
2676 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2678 /* If the gimple call is an indirect call and has 'nocf_check'
2679 attribute find a generated CALL insn to mark it as no
2680 control-flow verification is needed. */
2681 if (gimple_call_nocf_check_p (stmt
)
2682 && !gimple_call_fndecl (stmt
))
2684 rtx_insn
*last
= get_last_insn ();
2685 while (!CALL_P (last
)
2686 && last
!= before_call
)
2687 last
= PREV_INSN (last
);
2689 if (last
!= before_call
)
2690 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2693 mark_transaction_restart_calls (stmt
);
2697 /* Generate RTL for an asm statement (explicit assembler code).
2698 STRING is a STRING_CST node containing the assembler code text,
2699 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2700 insn is volatile; don't optimize it. */
2703 expand_asm_loc (tree string
, int vol
, location_t locus
)
2707 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2708 ggc_strdup (TREE_STRING_POINTER (string
)),
2711 MEM_VOLATILE_P (body
) = vol
;
2713 /* Non-empty basic ASM implicitly clobbers memory. */
2714 if (TREE_STRING_LENGTH (string
) != 0)
2717 unsigned i
, nclobbers
;
2718 auto_vec
<rtx
> input_rvec
, output_rvec
;
2719 auto_vec
<const char *> constraints
;
2720 auto_vec
<rtx
> clobber_rvec
;
2721 HARD_REG_SET clobbered_regs
;
2722 CLEAR_HARD_REG_SET (clobbered_regs
);
2724 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2725 clobber_rvec
.safe_push (clob
);
2727 if (targetm
.md_asm_adjust
)
2728 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2729 constraints
, clobber_rvec
,
2733 nclobbers
= clobber_rvec
.length ();
2734 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2736 XVECEXP (body
, 0, 0) = asm_op
;
2737 for (i
= 0; i
< nclobbers
; i
++)
2738 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2744 /* Return the number of times character C occurs in string S. */
2746 n_occurrences (int c
, const char *s
)
2754 /* A subroutine of expand_asm_operands. Check that all operands have
2755 the same number of alternatives. Return true if so. */
2758 check_operand_nalternatives (const vec
<const char *> &constraints
)
2760 unsigned len
= constraints
.length();
2763 int nalternatives
= n_occurrences (',', constraints
[0]);
2765 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2767 error ("too many alternatives in %<asm%>");
2771 for (unsigned i
= 1; i
< len
; ++i
)
2772 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2774 error ("operand constraints for %<asm%> differ "
2775 "in number of alternatives");
2782 /* Check for overlap between registers marked in CLOBBERED_REGS and
2783 anything inappropriate in T. Emit error and return the register
2784 variable definition for error, NULL_TREE for ok. */
2787 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2789 /* Conflicts between asm-declared register variables and the clobber
2790 list are not allowed. */
2791 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2795 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2796 DECL_NAME (overlap
));
2798 /* Reset registerness to stop multiple errors emitted for a single
2800 DECL_REGISTER (overlap
) = 0;
2807 /* Generate RTL for an asm statement with arguments.
2808 STRING is the instruction template.
2809 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2810 Each output or input has an expression in the TREE_VALUE and
2811 a tree list in TREE_PURPOSE which in turn contains a constraint
2812 name in TREE_VALUE (or NULL_TREE) and a constraint string
2814 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2815 that is clobbered by this insn.
2817 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2818 should be the fallthru basic block of the asm goto.
2820 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2821 Some elements of OUTPUTS may be replaced with trees representing temporary
2822 values. The caller should copy those temporary values to the originally
2825 VOL nonzero means the insn is volatile; don't optimize it. */
2828 expand_asm_stmt (gasm
*stmt
)
2830 class save_input_location
2835 explicit save_input_location(location_t where
)
2837 old
= input_location
;
2838 input_location
= where
;
2841 ~save_input_location()
2843 input_location
= old
;
2847 location_t locus
= gimple_location (stmt
);
2849 if (gimple_asm_input_p (stmt
))
2851 const char *s
= gimple_asm_string (stmt
);
2852 tree string
= build_string (strlen (s
), s
);
2853 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2857 /* There are some legacy diagnostics in here, and also avoids a
2858 sixth parameger to targetm.md_asm_adjust. */
2859 save_input_location
s_i_l(locus
);
2861 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2862 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2863 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2866 /* ??? Diagnose during gimplification? */
2867 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2869 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2873 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2874 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2875 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2877 /* Copy the gimple vectors into new vectors that we can manipulate. */
2879 output_tvec
.safe_grow (noutputs
);
2880 input_tvec
.safe_grow (ninputs
);
2881 constraints
.safe_grow (noutputs
+ ninputs
);
2883 for (i
= 0; i
< noutputs
; ++i
)
2885 tree t
= gimple_asm_output_op (stmt
, i
);
2886 output_tvec
[i
] = TREE_VALUE (t
);
2887 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2889 for (i
= 0; i
< ninputs
; i
++)
2891 tree t
= gimple_asm_input_op (stmt
, i
);
2892 input_tvec
[i
] = TREE_VALUE (t
);
2893 constraints
[i
+ noutputs
]
2894 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2897 /* ??? Diagnose during gimplification? */
2898 if (! check_operand_nalternatives (constraints
))
2901 /* Count the number of meaningful clobbered registers, ignoring what
2902 we would ignore later. */
2903 auto_vec
<rtx
> clobber_rvec
;
2904 HARD_REG_SET clobbered_regs
;
2905 CLEAR_HARD_REG_SET (clobbered_regs
);
2907 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2909 clobber_rvec
.reserve (n
);
2910 for (i
= 0; i
< n
; i
++)
2912 tree t
= gimple_asm_clobber_op (stmt
, i
);
2913 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2916 j
= decode_reg_name_and_count (regname
, &nregs
);
2921 /* ??? Diagnose during gimplification? */
2922 error ("unknown register name %qs in %<asm%>", regname
);
2926 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2927 clobber_rvec
.safe_push (x
);
2931 /* Otherwise we should have -1 == empty string
2932 or -3 == cc, which is not a register. */
2933 gcc_assert (j
== -1 || j
== -3);
2937 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
2939 /* Clobbering the PIC register is an error. */
2940 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2942 /* ??? Diagnose during gimplification? */
2943 error ("PIC register clobbered by %qs in %<asm%>",
2948 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2949 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
2950 clobber_rvec
.safe_push (x
);
2954 unsigned nclobbers
= clobber_rvec
.length();
2956 /* First pass over inputs and outputs checks validity and sets
2957 mark_addressable if needed. */
2958 /* ??? Diagnose during gimplification? */
2960 for (i
= 0; i
< noutputs
; ++i
)
2962 tree val
= output_tvec
[i
];
2963 tree type
= TREE_TYPE (val
);
2964 const char *constraint
;
2969 /* Try to parse the output constraint. If that fails, there's
2970 no point in going further. */
2971 constraint
= constraints
[i
];
2972 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
2973 &allows_mem
, &allows_reg
, &is_inout
))
2980 && REG_P (DECL_RTL (val
))
2981 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
2982 mark_addressable (val
);
2985 for (i
= 0; i
< ninputs
; ++i
)
2987 bool allows_reg
, allows_mem
;
2988 const char *constraint
;
2990 constraint
= constraints
[i
+ noutputs
];
2991 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
2992 constraints
.address (),
2993 &allows_mem
, &allows_reg
))
2996 if (! allows_reg
&& allows_mem
)
2997 mark_addressable (input_tvec
[i
]);
3000 /* Second pass evaluates arguments. */
3002 /* Make sure stack is consistent for asm goto. */
3004 do_pending_stack_adjust ();
3005 int old_generating_concat_p
= generating_concat_p
;
3007 /* Vector of RTX's of evaluated output operands. */
3008 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
3009 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3010 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3012 output_rvec
.safe_grow (noutputs
);
3014 for (i
= 0; i
< noutputs
; ++i
)
3016 tree val
= output_tvec
[i
];
3017 tree type
= TREE_TYPE (val
);
3018 bool is_inout
, allows_reg
, allows_mem
, ok
;
3021 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3022 noutputs
, &allows_mem
, &allows_reg
,
3026 /* If an output operand is not a decl or indirect ref and our constraint
3027 allows a register, make a temporary to act as an intermediate.
3028 Make the asm insn write into that, then we will copy it to
3029 the real output operand. Likewise for promoted variables. */
3031 generating_concat_p
= 0;
3033 if ((TREE_CODE (val
) == INDIRECT_REF
3036 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3037 && ! (REG_P (DECL_RTL (val
))
3038 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3042 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3043 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3045 op
= validize_mem (op
);
3047 if (! allows_reg
&& !MEM_P (op
))
3048 error ("output number %d not directly addressable", i
);
3049 if ((! allows_mem
&& MEM_P (op
))
3050 || GET_CODE (op
) == CONCAT
)
3053 op
= gen_reg_rtx (GET_MODE (op
));
3055 generating_concat_p
= old_generating_concat_p
;
3058 emit_move_insn (op
, old_op
);
3060 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3061 emit_move_insn (old_op
, op
);
3062 after_rtl_seq
= get_insns ();
3063 after_rtl_end
= get_last_insn ();
3069 op
= assign_temp (type
, 0, 1);
3070 op
= validize_mem (op
);
3071 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3072 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3074 generating_concat_p
= old_generating_concat_p
;
3076 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3077 expand_assignment (val
, make_tree (type
, op
), false);
3078 after_rtl_seq
= get_insns ();
3079 after_rtl_end
= get_last_insn ();
3082 output_rvec
[i
] = op
;
3085 inout_opnum
.safe_push (i
);
3088 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3089 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3091 input_rvec
.safe_grow (ninputs
);
3092 input_mode
.safe_grow (ninputs
);
3094 generating_concat_p
= 0;
3096 for (i
= 0; i
< ninputs
; ++i
)
3098 tree val
= input_tvec
[i
];
3099 tree type
= TREE_TYPE (val
);
3100 bool allows_reg
, allows_mem
, ok
;
3101 const char *constraint
;
3104 constraint
= constraints
[i
+ noutputs
];
3105 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3106 constraints
.address (),
3107 &allows_mem
, &allows_reg
);
3110 /* EXPAND_INITIALIZER will not generate code for valid initializer
3111 constants, but will still generate code for other types of operand.
3112 This is the behavior we want for constant constraints. */
3113 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3114 allows_reg
? EXPAND_NORMAL
3115 : allows_mem
? EXPAND_MEMORY
3116 : EXPAND_INITIALIZER
);
3118 /* Never pass a CONCAT to an ASM. */
3119 if (GET_CODE (op
) == CONCAT
)
3120 op
= force_reg (GET_MODE (op
), op
);
3121 else if (MEM_P (op
))
3122 op
= validize_mem (op
);
3124 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3126 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3127 op
= force_reg (TYPE_MODE (type
), op
);
3128 else if (!allows_mem
)
3129 warning (0, "asm operand %d probably doesn%'t match constraints",
3131 else if (MEM_P (op
))
3133 /* We won't recognize either volatile memory or memory
3134 with a queued address as available a memory_operand
3135 at this point. Ignore it: clearly this *is* a memory. */
3141 input_mode
[i
] = TYPE_MODE (type
);
3144 /* For in-out operands, copy output rtx to input rtx. */
3145 unsigned ninout
= inout_opnum
.length();
3146 for (i
= 0; i
< ninout
; i
++)
3148 int j
= inout_opnum
[i
];
3149 rtx o
= output_rvec
[j
];
3151 input_rvec
.safe_push (o
);
3152 input_mode
.safe_push (GET_MODE (o
));
3155 sprintf (buffer
, "%d", j
);
3156 constraints
.safe_push (ggc_strdup (buffer
));
3160 /* Sometimes we wish to automatically clobber registers across an asm.
3161 Case in point is when the i386 backend moved from cc0 to a hard reg --
3162 maintaining source-level compatibility means automatically clobbering
3163 the flags register. */
3164 rtx_insn
*after_md_seq
= NULL
;
3165 if (targetm
.md_asm_adjust
)
3166 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3167 constraints
, clobber_rvec
,
3170 /* Do not allow the hook to change the output and input count,
3171 lest it mess up the operand numbering. */
3172 gcc_assert (output_rvec
.length() == noutputs
);
3173 gcc_assert (input_rvec
.length() == ninputs
);
3174 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3176 /* But it certainly can adjust the clobbers. */
3177 nclobbers
= clobber_rvec
.length();
3179 /* Third pass checks for easy conflicts. */
3180 /* ??? Why are we doing this on trees instead of rtx. */
3182 bool clobber_conflict_found
= 0;
3183 for (i
= 0; i
< noutputs
; ++i
)
3184 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3185 clobber_conflict_found
= 1;
3186 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3187 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3188 clobber_conflict_found
= 1;
3190 /* Make vectors for the expression-rtx, constraint strings,
3191 and named operands. */
3193 rtvec argvec
= rtvec_alloc (ninputs
);
3194 rtvec constraintvec
= rtvec_alloc (ninputs
);
3195 rtvec labelvec
= rtvec_alloc (nlabels
);
3197 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3198 : GET_MODE (output_rvec
[0])),
3199 ggc_strdup (gimple_asm_string (stmt
)),
3200 "", 0, argvec
, constraintvec
,
3202 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3204 for (i
= 0; i
< ninputs
; ++i
)
3206 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3207 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3208 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3209 constraints
[i
+ noutputs
],
3213 /* Copy labels to the vector. */
3214 rtx_code_label
*fallthru_label
= NULL
;
3217 basic_block fallthru_bb
= NULL
;
3218 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3220 fallthru_bb
= fallthru
->dest
;
3222 for (i
= 0; i
< nlabels
; ++i
)
3224 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3226 /* If asm goto has any labels in the fallthru basic block, use
3227 a label that we emit immediately after the asm goto. Expansion
3228 may insert further instructions into the same basic block after
3229 asm goto and if we don't do this, insertion of instructions on
3230 the fallthru edge might misbehave. See PR58670. */
3231 if (fallthru_bb
&& label_to_block_fn (cfun
, label
) == fallthru_bb
)
3233 if (fallthru_label
== NULL_RTX
)
3234 fallthru_label
= gen_label_rtx ();
3238 r
= label_rtx (label
);
3239 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3243 /* Now, for each output, construct an rtx
3244 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3245 ARGVEC CONSTRAINTS OPNAMES))
3246 If there is more than one, put them inside a PARALLEL. */
3248 if (nlabels
> 0 && nclobbers
== 0)
3250 gcc_assert (noutputs
== 0);
3251 emit_jump_insn (body
);
3253 else if (noutputs
== 0 && nclobbers
== 0)
3255 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3258 else if (noutputs
== 1 && nclobbers
== 0)
3260 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3261 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3271 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3273 /* For each output operand, store a SET. */
3274 for (i
= 0; i
< noutputs
; ++i
)
3276 rtx src
, o
= output_rvec
[i
];
3279 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3284 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3285 ASM_OPERANDS_TEMPLATE (obody
),
3286 constraints
[i
], i
, argvec
,
3287 constraintvec
, labelvec
, locus
);
3288 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3290 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3293 /* If there are no outputs (but there are some clobbers)
3294 store the bare ASM_OPERANDS into the PARALLEL. */
3296 XVECEXP (body
, 0, i
++) = obody
;
3298 /* Store (clobber REG) for each clobbered register specified. */
3299 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3301 rtx clobbered_reg
= clobber_rvec
[j
];
3303 /* Do sanity check for overlap between clobbers and respectively
3304 input and outputs that hasn't been handled. Such overlap
3305 should have been detected and reported above. */
3306 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3308 /* We test the old body (obody) contents to avoid
3309 tripping over the under-construction body. */
3310 for (unsigned k
= 0; k
< noutputs
; ++k
)
3311 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3312 internal_error ("asm clobber conflict with output operand");
3314 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3315 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3316 internal_error ("asm clobber conflict with input operand");
3319 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3323 emit_jump_insn (body
);
3328 generating_concat_p
= old_generating_concat_p
;
3331 emit_label (fallthru_label
);
3334 emit_insn (after_md_seq
);
3336 emit_insn (after_rtl_seq
);
3339 crtl
->has_asm_statement
= 1;
3342 /* Emit code to jump to the address
3343 specified by the pointer expression EXP. */
3346 expand_computed_goto (tree exp
)
3348 rtx x
= expand_normal (exp
);
3350 do_pending_stack_adjust ();
3351 emit_indirect_jump (x
);
3354 /* Generate RTL code for a `goto' statement with target label LABEL.
3355 LABEL should be a LABEL_DECL tree node that was or will later be
3356 defined with `expand_label'. */
3359 expand_goto (tree label
)
3363 /* Check for a nonlocal goto to a containing function. Should have
3364 gotten translated to __builtin_nonlocal_goto. */
3365 tree context
= decl_function_context (label
);
3366 gcc_assert (!context
|| context
== current_function_decl
);
3369 emit_jump (jump_target_rtx (label
));
3372 /* Output a return with no value. */
3375 expand_null_return_1 (void)
3377 clear_pending_stack_adjust ();
3378 do_pending_stack_adjust ();
3379 emit_jump (return_label
);
3382 /* Generate RTL to return from the current function, with no value.
3383 (That is, we do not do anything about returning any value.) */
3386 expand_null_return (void)
3388 /* If this function was declared to return a value, but we
3389 didn't, clobber the return registers so that they are not
3390 propagated live to the rest of the function. */
3391 clobber_return_register ();
3393 expand_null_return_1 ();
3396 /* Generate RTL to return from the current function, with value VAL. */
3399 expand_value_return (rtx val
)
3401 /* Copy the value to the return location unless it's already there. */
3403 tree decl
= DECL_RESULT (current_function_decl
);
3404 rtx return_reg
= DECL_RTL (decl
);
3405 if (return_reg
!= val
)
3407 tree funtype
= TREE_TYPE (current_function_decl
);
3408 tree type
= TREE_TYPE (decl
);
3409 int unsignedp
= TYPE_UNSIGNED (type
);
3410 machine_mode old_mode
= DECL_MODE (decl
);
3412 if (DECL_BY_REFERENCE (decl
))
3413 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3415 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3417 if (mode
!= old_mode
)
3418 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3420 if (GET_CODE (return_reg
) == PARALLEL
)
3421 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3423 emit_move_insn (return_reg
, val
);
3426 expand_null_return_1 ();
3429 /* Generate RTL to evaluate the expression RETVAL and return it
3430 from the current function. */
3433 expand_return (tree retval
, tree bounds
)
3440 /* If function wants no value, give it none. */
3441 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3443 expand_normal (retval
);
3444 expand_null_return ();
3448 if (retval
== error_mark_node
)
3450 /* Treat this like a return of no value from a function that
3452 expand_null_return ();
3455 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3456 || TREE_CODE (retval
) == INIT_EXPR
)
3457 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3458 retval_rhs
= TREE_OPERAND (retval
, 1);
3460 retval_rhs
= retval
;
3462 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3464 /* Put returned bounds to the right place. */
3465 bounds_rtl
= DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl
));
3471 if (bounds
&& bounds
!= error_mark_node
)
3473 bnd
= expand_normal (bounds
);
3474 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3476 else if (REG_P (bounds_rtl
))
3479 bnd
= chkp_expand_zero_bounds ();
3482 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3483 addr
= gen_rtx_MEM (Pmode
, addr
);
3484 bnd
= targetm
.calls
.load_bounds_for_arg (addr
, NULL
, NULL
);
3487 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3493 gcc_assert (GET_CODE (bounds_rtl
) == PARALLEL
);
3496 bnd
= chkp_expand_zero_bounds ();
3499 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3500 addr
= gen_rtx_MEM (Pmode
, addr
);
3503 for (n
= 0; n
< XVECLEN (bounds_rtl
, 0); n
++)
3505 rtx slot
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 0);
3508 rtx offs
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 1);
3509 rtx from
= adjust_address (addr
, Pmode
, INTVAL (offs
));
3510 bnd
= targetm
.calls
.load_bounds_for_arg (from
, NULL
, NULL
);
3512 targetm
.calls
.store_returned_bounds (slot
, bnd
);
3516 else if (chkp_function_instrumented_p (current_function_decl
)
3517 && !BOUNDED_P (retval_rhs
)
3518 && chkp_type_has_pointer (TREE_TYPE (retval_rhs
))
3519 && TREE_CODE (retval_rhs
) != RESULT_DECL
)
3521 rtx addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3522 addr
= gen_rtx_MEM (Pmode
, addr
);
3524 gcc_assert (MEM_P (result_rtl
));
3526 chkp_copy_bounds_for_stack_parm (result_rtl
, addr
, TREE_TYPE (retval_rhs
));
3529 /* If we are returning the RESULT_DECL, then the value has already
3530 been stored into it, so we don't have to do anything special. */
3531 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3532 expand_value_return (result_rtl
);
3534 /* If the result is an aggregate that is being returned in one (or more)
3535 registers, load the registers here. */
3537 else if (retval_rhs
!= 0
3538 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3539 && REG_P (result_rtl
))
3541 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3544 /* Use the mode of the result value on the return register. */
3545 PUT_MODE (result_rtl
, GET_MODE (val
));
3546 expand_value_return (val
);
3549 expand_null_return ();
3551 else if (retval_rhs
!= 0
3552 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3553 && (REG_P (result_rtl
)
3554 || (GET_CODE (result_rtl
) == PARALLEL
)))
3556 /* Compute the return value into a temporary (usually a pseudo reg). */
3558 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3559 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3560 val
= force_not_mem (val
);
3561 expand_value_return (val
);
3565 /* No hard reg used; calculate value into hard return reg. */
3566 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3567 expand_value_return (result_rtl
);
3571 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3572 STMT that doesn't require special handling for outgoing edges. That
3573 is no tailcalls and no GIMPLE_COND. */
3576 expand_gimple_stmt_1 (gimple
*stmt
)
3580 set_curr_insn_location (gimple_location (stmt
));
3582 switch (gimple_code (stmt
))
3585 op0
= gimple_goto_dest (stmt
);
3586 if (TREE_CODE (op0
) == LABEL_DECL
)
3589 expand_computed_goto (op0
);
3592 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3595 case GIMPLE_PREDICT
:
3599 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3600 if (gimple_switch_num_labels (swtch
) == 1)
3601 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3603 expand_case (swtch
);
3607 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3610 expand_call_stmt (as_a
<gcall
*> (stmt
));
3615 tree bnd
= gimple_return_retbnd (as_a
<greturn
*> (stmt
));
3616 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3618 if (op0
&& op0
!= error_mark_node
)
3620 tree result
= DECL_RESULT (current_function_decl
);
3622 /* Mark we have return statement with missing bounds. */
3624 && chkp_function_instrumented_p (cfun
->decl
)
3626 bnd
= error_mark_node
;
3628 /* If we are not returning the current function's RESULT_DECL,
3629 build an assignment to it. */
3632 /* I believe that a function's RESULT_DECL is unique. */
3633 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3635 /* ??? We'd like to use simply expand_assignment here,
3636 but this fails if the value is of BLKmode but the return
3637 decl is a register. expand_return has special handling
3638 for this combination, which eventually should move
3639 to common code. See comments there. Until then, let's
3640 build a modify expression :-/ */
3641 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3647 expand_null_return ();
3649 expand_return (op0
, bnd
);
3655 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3656 tree lhs
= gimple_assign_lhs (assign_stmt
);
3658 /* Tree expand used to fiddle with |= and &= of two bitfield
3659 COMPONENT_REFs here. This can't happen with gimple, the LHS
3660 of binary assigns must be a gimple reg. */
3662 if (TREE_CODE (lhs
) != SSA_NAME
3663 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3664 == GIMPLE_SINGLE_RHS
)
3666 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3667 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3668 == GIMPLE_SINGLE_RHS
);
3669 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3670 /* Do not put locations on possibly shared trees. */
3671 && !is_gimple_min_invariant (rhs
))
3672 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3673 if (TREE_CLOBBER_P (rhs
))
3674 /* This is a clobber to mark the going out of scope for
3678 expand_assignment (lhs
, rhs
,
3679 gimple_assign_nontemporal_move_p (
3685 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3686 struct separate_ops ops
;
3687 bool promoted
= false;
3689 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3690 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3693 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3694 ops
.type
= TREE_TYPE (lhs
);
3695 switch (get_gimple_rhs_class (ops
.code
))
3697 case GIMPLE_TERNARY_RHS
:
3698 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3700 case GIMPLE_BINARY_RHS
:
3701 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3703 case GIMPLE_UNARY_RHS
:
3704 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3709 ops
.location
= gimple_location (stmt
);
3711 /* If we want to use a nontemporal store, force the value to
3712 register first. If we store into a promoted register,
3713 don't directly expand to target. */
3714 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3715 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3722 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3723 /* If TEMP is a VOIDmode constant, use convert_modes to make
3724 sure that we properly convert it. */
3725 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3727 temp
= convert_modes (GET_MODE (target
),
3728 TYPE_MODE (ops
.type
),
3730 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3731 GET_MODE (target
), temp
, unsignedp
);
3734 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3736 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3740 temp
= force_operand (temp
, target
);
3742 emit_move_insn (target
, temp
);
3753 /* Expand one gimple statement STMT and return the last RTL instruction
3754 before any of the newly generated ones.
3756 In addition to generating the necessary RTL instructions this also
3757 sets REG_EH_REGION notes if necessary and sets the current source
3758 location for diagnostics. */
3761 expand_gimple_stmt (gimple
*stmt
)
3763 location_t saved_location
= input_location
;
3764 rtx_insn
*last
= get_last_insn ();
3769 /* We need to save and restore the current source location so that errors
3770 discovered during expansion are emitted with the right location. But
3771 it would be better if the diagnostic routines used the source location
3772 embedded in the tree nodes rather than globals. */
3773 if (gimple_has_location (stmt
))
3774 input_location
= gimple_location (stmt
);
3776 expand_gimple_stmt_1 (stmt
);
3778 /* Free any temporaries used to evaluate this statement. */
3781 input_location
= saved_location
;
3783 /* Mark all insns that may trap. */
3784 lp_nr
= lookup_stmt_eh_lp (stmt
);
3788 for (insn
= next_real_insn (last
); insn
;
3789 insn
= next_real_insn (insn
))
3791 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3792 /* If we want exceptions for non-call insns, any
3793 may_trap_p instruction may throw. */
3794 && GET_CODE (PATTERN (insn
)) != CLOBBER
3795 && GET_CODE (PATTERN (insn
)) != USE
3796 && insn_could_throw_p (insn
))
3797 make_reg_eh_region_note (insn
, 0, lp_nr
);
3804 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3805 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3806 generated a tail call (something that might be denied by the ABI
3807 rules governing the call; see calls.c).
3809 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3810 can still reach the rest of BB. The case here is __builtin_sqrt,
3811 where the NaN result goes through the external function (with a
3812 tailcall) and the normal result happens via a sqrt instruction. */
3815 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3817 rtx_insn
*last2
, *last
;
3820 profile_probability probability
;
3822 last2
= last
= expand_gimple_stmt (stmt
);
3824 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3825 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3828 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3830 *can_fallthru
= true;
3834 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3835 Any instructions emitted here are about to be deleted. */
3836 do_pending_stack_adjust ();
3838 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3839 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3840 EH or abnormal edges, we shouldn't have created a tail call in
3841 the first place. So it seems to me we should just be removing
3842 all edges here, or redirecting the existing fallthru edge to
3845 probability
= profile_probability::never ();
3847 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3849 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3851 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3852 e
->dest
->count
-= e
->count ();
3853 probability
+= e
->probability
;
3860 /* This is somewhat ugly: the call_expr expander often emits instructions
3861 after the sibcall (to perform the function return). These confuse the
3862 find_many_sub_basic_blocks code, so we need to get rid of these. */
3863 last
= NEXT_INSN (last
);
3864 gcc_assert (BARRIER_P (last
));
3866 *can_fallthru
= false;
3867 while (NEXT_INSN (last
))
3869 /* For instance an sqrt builtin expander expands if with
3870 sibcall in the then and label for `else`. */
3871 if (LABEL_P (NEXT_INSN (last
)))
3873 *can_fallthru
= true;
3876 delete_insn (NEXT_INSN (last
));
3879 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3881 e
->probability
= probability
;
3883 update_bb_for_insn (bb
);
3885 if (NEXT_INSN (last
))
3887 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3890 if (BARRIER_P (last
))
3891 BB_END (bb
) = PREV_INSN (last
);
3894 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3899 /* Return the difference between the floor and the truncated result of
3900 a signed division by OP1 with remainder MOD. */
3902 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3904 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3905 return gen_rtx_IF_THEN_ELSE
3906 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3907 gen_rtx_IF_THEN_ELSE
3908 (mode
, gen_rtx_LT (BImode
,
3909 gen_rtx_DIV (mode
, op1
, mod
),
3911 constm1_rtx
, const0_rtx
),
3915 /* Return the difference between the ceil and the truncated result of
3916 a signed division by OP1 with remainder MOD. */
3918 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3920 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3921 return gen_rtx_IF_THEN_ELSE
3922 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3923 gen_rtx_IF_THEN_ELSE
3924 (mode
, gen_rtx_GT (BImode
,
3925 gen_rtx_DIV (mode
, op1
, mod
),
3927 const1_rtx
, const0_rtx
),
3931 /* Return the difference between the ceil and the truncated result of
3932 an unsigned division by OP1 with remainder MOD. */
3934 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3936 /* (mod != 0 ? 1 : 0) */
3937 return gen_rtx_IF_THEN_ELSE
3938 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3939 const1_rtx
, const0_rtx
);
3942 /* Return the difference between the rounded and the truncated result
3943 of a signed division by OP1 with remainder MOD. Halfway cases are
3944 rounded away from zero, rather than to the nearest even number. */
3946 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3948 /* (abs (mod) >= abs (op1) - abs (mod)
3949 ? (op1 / mod > 0 ? 1 : -1)
3951 return gen_rtx_IF_THEN_ELSE
3952 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3953 gen_rtx_MINUS (mode
,
3954 gen_rtx_ABS (mode
, op1
),
3955 gen_rtx_ABS (mode
, mod
))),
3956 gen_rtx_IF_THEN_ELSE
3957 (mode
, gen_rtx_GT (BImode
,
3958 gen_rtx_DIV (mode
, op1
, mod
),
3960 const1_rtx
, constm1_rtx
),
3964 /* Return the difference between the rounded and the truncated result
3965 of a unsigned division by OP1 with remainder MOD. Halfway cases
3966 are rounded away from zero, rather than to the nearest even
3969 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3971 /* (mod >= op1 - mod ? 1 : 0) */
3972 return gen_rtx_IF_THEN_ELSE
3973 (mode
, gen_rtx_GE (BImode
, mod
,
3974 gen_rtx_MINUS (mode
, op1
, mod
)),
3975 const1_rtx
, const0_rtx
);
3978 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3982 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
3985 #ifndef POINTERS_EXTEND_UNSIGNED
3986 gcc_assert (mode
== Pmode
3987 || mode
== targetm
.addr_space
.address_mode (as
));
3988 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
3992 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
3994 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
3997 /* X must have some form of address mode already. */
3998 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
3999 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
4000 x
= lowpart_subreg (mode
, x
, xmode
);
4001 else if (POINTERS_EXTEND_UNSIGNED
> 0)
4002 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
4003 else if (!POINTERS_EXTEND_UNSIGNED
)
4004 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4007 switch (GET_CODE (x
))
4010 if ((SUBREG_PROMOTED_VAR_P (x
)
4011 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4012 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4013 && REG_P (XEXP (SUBREG_REG (x
), 0))
4014 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4015 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4016 && GET_MODE (SUBREG_REG (x
)) == mode
)
4017 return SUBREG_REG (x
);
4020 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4021 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4024 temp
= shallow_copy_rtx (x
);
4025 PUT_MODE (temp
, mode
);
4028 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4030 temp
= gen_rtx_CONST (mode
, temp
);
4034 if (CONST_INT_P (XEXP (x
, 1)))
4036 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4038 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4044 /* Don't know how to express ptr_extend as operation in debug info. */
4047 #endif /* POINTERS_EXTEND_UNSIGNED */
4052 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4053 by avoid_deep_ter_for_debug. */
4055 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4057 /* Split too deep TER chains for debug stmts using debug temporaries. */
4060 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4062 use_operand_p use_p
;
4064 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4066 tree use
= USE_FROM_PTR (use_p
);
4067 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4069 gimple
*g
= get_gimple_for_ssa_name (use
);
4072 if (depth
> 6 && !stmt_ends_bb_p (g
))
4074 if (deep_ter_debug_map
== NULL
)
4075 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4077 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4080 vexpr
= make_node (DEBUG_EXPR_DECL
);
4081 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4082 DECL_ARTIFICIAL (vexpr
) = 1;
4083 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4084 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (use
)));
4085 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4086 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4087 avoid_deep_ter_for_debug (def_temp
, 0);
4090 avoid_deep_ter_for_debug (g
, depth
+ 1);
4094 /* Return an RTX equivalent to the value of the parameter DECL. */
4097 expand_debug_parm_decl (tree decl
)
4099 rtx incoming
= DECL_INCOMING_RTL (decl
);
4102 && GET_MODE (incoming
) != BLKmode
4103 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4104 || (MEM_P (incoming
)
4105 && REG_P (XEXP (incoming
, 0))
4106 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4108 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4110 #ifdef HAVE_window_save
4111 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4112 If the target machine has an explicit window save instruction, the
4113 actual entry value is the corresponding OUTGOING_REGNO instead. */
4114 if (REG_P (incoming
)
4115 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4117 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4118 OUTGOING_REGNO (REGNO (incoming
)), 0);
4119 else if (MEM_P (incoming
))
4121 rtx reg
= XEXP (incoming
, 0);
4122 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4124 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4125 incoming
= replace_equiv_address_nv (incoming
, reg
);
4128 incoming
= copy_rtx (incoming
);
4132 ENTRY_VALUE_EXP (rtl
) = incoming
;
4137 && GET_MODE (incoming
) != BLKmode
4138 && !TREE_ADDRESSABLE (decl
)
4140 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4141 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4142 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4143 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4144 return copy_rtx (incoming
);
4149 /* Return an RTX equivalent to the value of the tree expression EXP. */
4152 expand_debug_expr (tree exp
)
4154 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4155 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4156 machine_mode inner_mode
= VOIDmode
;
4157 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4159 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4161 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4163 case tcc_expression
:
4164 switch (TREE_CODE (exp
))
4169 case WIDEN_MULT_PLUS_EXPR
:
4170 case WIDEN_MULT_MINUS_EXPR
:
4174 case TRUTH_ANDIF_EXPR
:
4175 case TRUTH_ORIF_EXPR
:
4176 case TRUTH_AND_EXPR
:
4178 case TRUTH_XOR_EXPR
:
4181 case TRUTH_NOT_EXPR
:
4190 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4197 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4200 switch (TREE_CODE (exp
))
4206 case WIDEN_LSHIFT_EXPR
:
4207 /* Ensure second operand isn't wider than the first one. */
4208 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4209 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4210 && (GET_MODE_UNIT_PRECISION (mode
)
4211 < GET_MODE_PRECISION (op1_mode
)))
4212 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4221 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4222 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4227 case tcc_comparison
:
4228 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4236 case tcc_exceptional
:
4237 case tcc_declaration
:
4243 switch (TREE_CODE (exp
))
4246 if (!lookup_constant_def (exp
))
4248 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4249 != (size_t) TREE_STRING_LENGTH (exp
))
4251 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4252 op0
= gen_rtx_MEM (BLKmode
, op0
);
4253 set_mem_attributes (op0
, exp
, 0);
4261 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4265 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
4268 gcc_assert (COMPLEX_MODE_P (mode
));
4269 op0
= expand_debug_expr (TREE_REALPART (exp
));
4270 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4271 return gen_rtx_CONCAT (mode
, op0
, op1
);
4273 case DEBUG_EXPR_DECL
:
4274 op0
= DECL_RTL_IF_SET (exp
);
4279 op0
= gen_rtx_DEBUG_EXPR (mode
);
4280 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4281 SET_DECL_RTL (exp
, op0
);
4291 op0
= DECL_RTL_IF_SET (exp
);
4293 /* This decl was probably optimized away. */
4297 || DECL_EXTERNAL (exp
)
4298 || !TREE_STATIC (exp
)
4300 || DECL_HARD_REGISTER (exp
)
4301 || DECL_IN_CONSTANT_POOL (exp
)
4302 || mode
== VOIDmode
)
4305 op0
= make_decl_rtl_for_debug (exp
);
4307 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4308 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4312 op0
= copy_rtx (op0
);
4314 if (GET_MODE (op0
) == BLKmode
4315 /* If op0 is not BLKmode, but mode is, adjust_mode
4316 below would ICE. While it is likely a FE bug,
4317 try to be robust here. See PR43166. */
4319 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4321 gcc_assert (MEM_P (op0
));
4322 op0
= adjust_address_nv (op0
, mode
, 0);
4332 inner_mode
= GET_MODE (op0
);
4334 if (mode
== inner_mode
)
4337 if (inner_mode
== VOIDmode
)
4339 if (TREE_CODE (exp
) == SSA_NAME
)
4340 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4342 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4343 if (mode
== inner_mode
)
4347 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4349 if (GET_MODE_UNIT_BITSIZE (mode
)
4350 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4351 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4352 else if (GET_MODE_UNIT_BITSIZE (mode
)
4353 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4354 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4356 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4358 else if (FLOAT_MODE_P (mode
))
4360 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4361 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4362 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4364 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4366 else if (FLOAT_MODE_P (inner_mode
))
4369 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4371 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4373 else if (GET_MODE_UNIT_PRECISION (mode
)
4374 == GET_MODE_UNIT_PRECISION (inner_mode
))
4375 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4376 else if (GET_MODE_UNIT_PRECISION (mode
)
4377 < GET_MODE_UNIT_PRECISION (inner_mode
))
4378 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4379 else if (UNARY_CLASS_P (exp
)
4380 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4382 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4384 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4390 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4392 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4393 TREE_OPERAND (exp
, 0),
4394 TREE_OPERAND (exp
, 1));
4396 return expand_debug_expr (newexp
);
4400 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4401 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4405 if (TREE_CODE (exp
) == MEM_REF
)
4407 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4408 || (GET_CODE (op0
) == PLUS
4409 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4410 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4411 Instead just use get_inner_reference. */
4414 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4415 if (!op1
|| !CONST_INT_P (op1
))
4418 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
4421 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4423 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4425 if (op0
== NULL_RTX
)
4428 op0
= gen_rtx_MEM (mode
, op0
);
4429 set_mem_attributes (op0
, exp
, 0);
4430 if (TREE_CODE (exp
) == MEM_REF
4431 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4432 set_mem_expr (op0
, NULL_TREE
);
4433 set_mem_addr_space (op0
, as
);
4437 case TARGET_MEM_REF
:
4438 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4439 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4442 op0
= expand_debug_expr
4443 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4447 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4448 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4450 if (op0
== NULL_RTX
)
4453 op0
= gen_rtx_MEM (mode
, op0
);
4455 set_mem_attributes (op0
, exp
, 0);
4456 set_mem_addr_space (op0
, as
);
4462 case ARRAY_RANGE_REF
:
4467 case VIEW_CONVERT_EXPR
:
4470 poly_int64 bitsize
, bitpos
;
4472 int reversep
, volatilep
= 0;
4474 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4475 &unsignedp
, &reversep
, &volatilep
);
4478 if (known_eq (bitsize
, 0))
4481 orig_op0
= op0
= expand_debug_expr (tem
);
4488 machine_mode addrmode
, offmode
;
4493 op0
= XEXP (op0
, 0);
4494 addrmode
= GET_MODE (op0
);
4495 if (addrmode
== VOIDmode
)
4498 op1
= expand_debug_expr (offset
);
4502 offmode
= GET_MODE (op1
);
4503 if (offmode
== VOIDmode
)
4504 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4506 if (addrmode
!= offmode
)
4507 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4509 /* Don't use offset_address here, we don't need a
4510 recognizable address, and we don't want to generate
4512 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4518 if (mode1
== VOIDmode
)
4520 mode1
= smallest_int_mode_for_size (bitsize
);
4521 poly_int64 bytepos
= bits_to_bytes_round_down (bitpos
);
4522 if (maybe_ne (bytepos
, 0))
4524 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
4525 bitpos
= num_trailing_bits (bitpos
);
4527 else if (known_eq (bitpos
, 0)
4528 && known_eq (bitsize
, GET_MODE_BITSIZE (mode
)))
4529 op0
= adjust_address_nv (op0
, mode
, 0);
4530 else if (GET_MODE (op0
) != mode1
)
4531 op0
= adjust_address_nv (op0
, mode1
, 0);
4533 op0
= copy_rtx (op0
);
4534 if (op0
== orig_op0
)
4535 op0
= shallow_copy_rtx (op0
);
4536 set_mem_attributes (op0
, exp
, 0);
4539 if (known_eq (bitpos
, 0) && mode
== GET_MODE (op0
))
4542 if (maybe_lt (bitpos
, 0))
4545 if (GET_MODE (op0
) == BLKmode
)
4549 if (multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
4550 && known_eq (bitsize
, GET_MODE_BITSIZE (mode1
)))
4552 machine_mode opmode
= GET_MODE (op0
);
4554 if (opmode
== VOIDmode
)
4555 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4557 /* This condition may hold if we're expanding the address
4558 right past the end of an array that turned out not to
4559 be addressable (i.e., the address was only computed in
4560 debug stmts). The gen_subreg below would rightfully
4561 crash, and the address doesn't really exist, so just
4563 if (known_ge (bitpos
, GET_MODE_BITSIZE (opmode
)))
4566 if (multiple_p (bitpos
, GET_MODE_BITSIZE (mode
)))
4567 return simplify_gen_subreg (mode
, op0
, opmode
, bytepos
);
4570 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4571 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4573 : ZERO_EXTRACT
, mode
,
4574 GET_MODE (op0
) != VOIDmode
4576 : TYPE_MODE (TREE_TYPE (tem
)),
4577 op0
, gen_int_mode (bitsize
, word_mode
),
4578 gen_int_mode (bitpos
, word_mode
));
4582 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4585 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4588 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4591 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4593 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4596 case FIX_TRUNC_EXPR
:
4597 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4600 case POINTER_PLUS_EXPR
:
4601 /* For the rare target where pointers are not the same size as
4602 size_t, we need to check for mis-matched modes and correct
4605 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4606 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4607 && op0_mode
!= op1_mode
)
4609 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4610 /* If OP0 is a partial mode, then we must truncate, even
4611 if it has the same bitsize as OP1 as GCC's
4612 representation of partial modes is opaque. */
4613 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4614 && (GET_MODE_BITSIZE (op0_mode
)
4615 == GET_MODE_BITSIZE (op1_mode
))))
4616 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4618 /* We always sign-extend, regardless of the signedness of
4619 the operand, because the operand is always unsigned
4620 here even if the original C expression is signed. */
4621 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4625 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4628 case POINTER_DIFF_EXPR
:
4629 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4632 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4635 case TRUNC_DIV_EXPR
:
4636 case EXACT_DIV_EXPR
:
4638 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4640 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4642 case TRUNC_MOD_EXPR
:
4643 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4645 case FLOOR_DIV_EXPR
:
4647 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4650 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4651 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4652 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4653 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4656 case FLOOR_MOD_EXPR
:
4658 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4661 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4662 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4663 adj
= simplify_gen_unary (NEG
, mode
,
4664 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4666 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4672 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4673 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4674 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4675 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4679 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4680 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4681 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4682 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4688 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4689 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4690 adj
= simplify_gen_unary (NEG
, mode
,
4691 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4693 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4697 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4698 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4699 adj
= simplify_gen_unary (NEG
, mode
,
4700 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4702 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4705 case ROUND_DIV_EXPR
:
4708 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4709 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4710 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4711 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4715 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4716 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4717 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4718 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4721 case ROUND_MOD_EXPR
:
4724 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4725 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4726 adj
= simplify_gen_unary (NEG
, mode
,
4727 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4729 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4733 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4734 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4735 adj
= simplify_gen_unary (NEG
, mode
,
4736 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4738 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4742 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4746 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4748 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4751 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4754 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4757 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4760 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4763 case TRUTH_AND_EXPR
:
4764 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4768 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4771 case TRUTH_XOR_EXPR
:
4772 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4774 case TRUTH_ANDIF_EXPR
:
4775 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4777 case TRUTH_ORIF_EXPR
:
4778 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4780 case TRUTH_NOT_EXPR
:
4781 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4784 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4788 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4792 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4796 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4800 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4803 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4805 case UNORDERED_EXPR
:
4806 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4809 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4812 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4815 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4818 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4821 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4824 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4827 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4830 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4833 gcc_assert (COMPLEX_MODE_P (mode
));
4834 if (GET_MODE (op0
) == VOIDmode
)
4835 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4836 if (GET_MODE (op1
) == VOIDmode
)
4837 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4838 return gen_rtx_CONCAT (mode
, op0
, op1
);
4841 if (GET_CODE (op0
) == CONCAT
)
4842 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4843 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4845 GET_MODE_INNER (mode
)));
4848 scalar_mode imode
= GET_MODE_INNER (mode
);
4853 re
= adjust_address_nv (op0
, imode
, 0);
4854 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4858 scalar_int_mode ifmode
;
4859 scalar_int_mode ihmode
;
4861 if (!int_mode_for_mode (mode
).exists (&ifmode
)
4862 || !int_mode_for_mode (imode
).exists (&ihmode
))
4864 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4867 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4868 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4869 if (imode
!= ihmode
)
4870 re
= gen_rtx_SUBREG (imode
, re
, 0);
4871 im
= copy_rtx (op0
);
4873 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4874 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4875 if (imode
!= ihmode
)
4876 im
= gen_rtx_SUBREG (imode
, im
, 0);
4878 im
= gen_rtx_NEG (imode
, im
);
4879 return gen_rtx_CONCAT (mode
, re
, im
);
4883 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4884 if (!op0
|| !MEM_P (op0
))
4886 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4887 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4888 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4889 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4890 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4891 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4893 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4895 poly_int64 bitoffset
, bitsize
, maxsize
, byteoffset
;
4898 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
4899 &bitsize
, &maxsize
, &reverse
);
4901 || TREE_CODE (decl
) == PARM_DECL
4902 || TREE_CODE (decl
) == RESULT_DECL
)
4903 && (!TREE_ADDRESSABLE (decl
)
4904 || target_for_debug_bind (decl
))
4905 && multiple_p (bitoffset
, BITS_PER_UNIT
, &byteoffset
)
4906 && known_gt (bitsize
, 0)
4907 && known_eq (bitsize
, maxsize
))
4909 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4910 return plus_constant (mode
, base
, byteoffset
);
4914 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4915 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4918 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4921 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4922 || (GET_CODE (op0
) == PLUS
4923 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4924 && CONST_INT_P (XEXP (op0
, 1)))))
4926 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4928 if (!op1
|| !CONST_INT_P (op1
))
4931 return plus_constant (mode
, op0
, INTVAL (op1
));
4938 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4939 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
4940 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
4948 nelts
= VECTOR_CST_NELTS (exp
);
4949 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
4951 for (i
= 0; i
< nelts
; ++i
)
4953 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
4956 XVECEXP (op0
, 0, i
) = op1
;
4963 if (TREE_CLOBBER_P (exp
))
4965 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
4970 op0
= gen_rtx_CONCATN
4971 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4973 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
4975 op1
= expand_debug_expr (val
);
4978 XVECEXP (op0
, 0, i
) = op1
;
4981 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
4983 op1
= expand_debug_expr
4984 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
4989 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
4990 XVECEXP (op0
, 0, i
) = op1
;
4996 goto flag_unsupported
;
4999 /* ??? Maybe handle some builtins? */
5004 gimple
*g
= get_gimple_for_ssa_name (exp
);
5008 if (deep_ter_debug_map
)
5010 tree
*slot
= deep_ter_debug_map
->get (exp
);
5015 t
= gimple_assign_rhs_to_tree (g
);
5016 op0
= expand_debug_expr (t
);
5022 /* If this is a reference to an incoming value of
5023 parameter that is never used in the code or where the
5024 incoming value is never used in the code, use
5025 PARM_DECL's DECL_RTL if set. */
5026 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5027 && SSA_NAME_VAR (exp
)
5028 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5029 && has_zero_uses (exp
))
5031 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5034 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5039 int part
= var_to_partition (SA
.map
, exp
);
5041 if (part
== NO_PARTITION
)
5044 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5046 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5054 /* Vector stuff. For most of the codes we don't have rtl codes. */
5055 case REALIGN_LOAD_EXPR
:
5057 case VEC_PACK_FIX_TRUNC_EXPR
:
5058 case VEC_PACK_SAT_EXPR
:
5059 case VEC_PACK_TRUNC_EXPR
:
5060 case VEC_UNPACK_FLOAT_HI_EXPR
:
5061 case VEC_UNPACK_FLOAT_LO_EXPR
:
5062 case VEC_UNPACK_HI_EXPR
:
5063 case VEC_UNPACK_LO_EXPR
:
5064 case VEC_WIDEN_MULT_HI_EXPR
:
5065 case VEC_WIDEN_MULT_LO_EXPR
:
5066 case VEC_WIDEN_MULT_EVEN_EXPR
:
5067 case VEC_WIDEN_MULT_ODD_EXPR
:
5068 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5069 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5071 case VEC_DUPLICATE_EXPR
:
5072 case VEC_SERIES_EXPR
:
5076 case ADDR_SPACE_CONVERT_EXPR
:
5077 case FIXED_CONVERT_EXPR
:
5079 case WITH_SIZE_EXPR
:
5080 case BIT_INSERT_EXPR
:
5084 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5085 && SCALAR_INT_MODE_P (mode
))
5088 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5090 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5093 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5095 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5097 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5098 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5102 case WIDEN_MULT_EXPR
:
5103 case WIDEN_MULT_PLUS_EXPR
:
5104 case WIDEN_MULT_MINUS_EXPR
:
5105 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5106 && SCALAR_INT_MODE_P (mode
))
5108 inner_mode
= GET_MODE (op0
);
5109 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5110 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5112 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5113 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5114 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5116 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5117 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5118 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5120 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5121 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5123 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5127 case MULT_HIGHPART_EXPR
:
5128 /* ??? Similar to the above. */
5131 case WIDEN_SUM_EXPR
:
5132 case WIDEN_LSHIFT_EXPR
:
5133 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5134 && SCALAR_INT_MODE_P (mode
))
5137 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5139 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5141 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5142 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5147 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
5160 /* Return an RTX equivalent to the source bind value of the tree expression
5164 expand_debug_source_expr (tree exp
)
5167 machine_mode mode
= VOIDmode
, inner_mode
;
5169 switch (TREE_CODE (exp
))
5173 mode
= DECL_MODE (exp
);
5174 op0
= expand_debug_parm_decl (exp
);
5177 /* See if this isn't an argument that has been completely
5179 if (!DECL_RTL_SET_P (exp
)
5180 && !DECL_INCOMING_RTL (exp
)
5181 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5183 tree aexp
= DECL_ORIGIN (exp
);
5184 if (DECL_CONTEXT (aexp
)
5185 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5187 vec
<tree
, va_gc
> **debug_args
;
5190 debug_args
= decl_debug_args_lookup (current_function_decl
);
5191 if (debug_args
!= NULL
)
5193 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5196 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5206 if (op0
== NULL_RTX
)
5209 inner_mode
= GET_MODE (op0
);
5210 if (mode
== inner_mode
)
5213 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5215 if (GET_MODE_UNIT_BITSIZE (mode
)
5216 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5217 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5218 else if (GET_MODE_UNIT_BITSIZE (mode
)
5219 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5220 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5222 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5224 else if (FLOAT_MODE_P (mode
))
5226 else if (FLOAT_MODE_P (inner_mode
))
5228 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5229 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5231 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5233 else if (GET_MODE_UNIT_PRECISION (mode
)
5234 == GET_MODE_UNIT_PRECISION (inner_mode
))
5235 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5236 else if (GET_MODE_UNIT_PRECISION (mode
)
5237 < GET_MODE_UNIT_PRECISION (inner_mode
))
5238 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5239 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5240 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5242 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5247 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5248 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5249 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5252 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5256 if (exp
== NULL_RTX
)
5259 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5264 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5265 rtx dval
= make_debug_expr_from_rtl (exp
);
5267 /* Emit a debug bind insn before INSN. */
5268 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5269 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5270 VAR_INIT_STATUS_INITIALIZED
);
5272 emit_debug_insn_before (bind
, insn
);
5277 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5279 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5280 switch (*format_ptr
++)
5283 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5288 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5289 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5297 /* Expand the _LOCs in debug insns. We run this after expanding all
5298 regular insns, so that any variables referenced in the function
5299 will have their DECL_RTLs set. */
5302 expand_debug_locations (void)
5305 rtx_insn
*last
= get_last_insn ();
5306 int save_strict_alias
= flag_strict_aliasing
;
5308 /* New alias sets while setting up memory attributes cause
5309 -fcompare-debug failures, even though it doesn't bring about any
5311 flag_strict_aliasing
= 0;
5313 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5314 if (DEBUG_BIND_INSN_P (insn
))
5316 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5318 rtx_insn
*prev_insn
, *insn2
;
5321 if (value
== NULL_TREE
)
5325 if (INSN_VAR_LOCATION_STATUS (insn
)
5326 == VAR_INIT_STATUS_UNINITIALIZED
)
5327 val
= expand_debug_source_expr (value
);
5328 /* The avoid_deep_ter_for_debug function inserts
5329 debug bind stmts after SSA_NAME definition, with the
5330 SSA_NAME as the whole bind location. Disable temporarily
5331 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5332 being defined in this DEBUG_INSN. */
5333 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5335 tree
*slot
= deep_ter_debug_map
->get (value
);
5338 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5343 val
= expand_debug_expr (value
);
5345 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5348 val
= expand_debug_expr (value
);
5349 gcc_assert (last
== get_last_insn ());
5353 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5356 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5358 gcc_assert (mode
== GET_MODE (val
)
5359 || (GET_MODE (val
) == VOIDmode
5360 && (CONST_SCALAR_INT_P (val
)
5361 || GET_CODE (val
) == CONST_FIXED
5362 || GET_CODE (val
) == LABEL_REF
)));
5365 INSN_VAR_LOCATION_LOC (insn
) = val
;
5366 prev_insn
= PREV_INSN (insn
);
5367 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5368 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5371 flag_strict_aliasing
= save_strict_alias
;
5374 /* Performs swapping operands of commutative operations to expand
5375 the expensive one first. */
5378 reorder_operands (basic_block bb
)
5380 unsigned int *lattice
; /* Hold cost of each statement. */
5381 unsigned int i
= 0, n
= 0;
5382 gimple_stmt_iterator gsi
;
5388 use_operand_p use_p
;
5389 gimple
*def0
, *def1
;
5391 /* Compute cost of each statement using estimate_num_insns. */
5392 stmts
= bb_seq (bb
);
5393 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5395 stmt
= gsi_stmt (gsi
);
5396 if (!is_gimple_debug (stmt
))
5397 gimple_set_uid (stmt
, n
++);
5399 lattice
= XNEWVEC (unsigned int, n
);
5400 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5403 stmt
= gsi_stmt (gsi
);
5404 if (is_gimple_debug (stmt
))
5406 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5408 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5410 tree use
= USE_FROM_PTR (use_p
);
5412 if (TREE_CODE (use
) != SSA_NAME
)
5414 def_stmt
= get_gimple_for_ssa_name (use
);
5417 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5420 if (!is_gimple_assign (stmt
)
5421 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5423 op0
= gimple_op (stmt
, 1);
5424 op1
= gimple_op (stmt
, 2);
5425 if (TREE_CODE (op0
) != SSA_NAME
5426 || TREE_CODE (op1
) != SSA_NAME
)
5428 /* Swap operands if the second one is more expensive. */
5429 def0
= get_gimple_for_ssa_name (op0
);
5430 def1
= get_gimple_for_ssa_name (op1
);
5434 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5440 fprintf (dump_file
, "Swap operands in stmt:\n");
5441 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5442 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5443 def0
? lattice
[gimple_uid (def0
)] : 0,
5444 lattice
[gimple_uid (def1
)]);
5446 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5447 gimple_assign_rhs2_ptr (stmt
));
5453 /* Expand basic block BB from GIMPLE trees to RTL. */
5456 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5458 gimple_stmt_iterator gsi
;
5460 gimple
*stmt
= NULL
;
5461 rtx_note
*note
= NULL
;
5467 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5470 /* Note that since we are now transitioning from GIMPLE to RTL, we
5471 cannot use the gsi_*_bb() routines because they expect the basic
5472 block to be in GIMPLE, instead of RTL. Therefore, we need to
5473 access the BB sequence directly. */
5475 reorder_operands (bb
);
5476 stmts
= bb_seq (bb
);
5477 bb
->il
.gimple
.seq
= NULL
;
5478 bb
->il
.gimple
.phi_nodes
= NULL
;
5479 rtl_profile_for_bb (bb
);
5480 init_rtl_bb_info (bb
);
5481 bb
->flags
|= BB_RTL
;
5483 /* Remove the RETURN_EXPR if we may fall though to the exit
5485 gsi
= gsi_last (stmts
);
5486 if (!gsi_end_p (gsi
)
5487 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5489 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5491 gcc_assert (single_succ_p (bb
));
5492 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5494 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5495 && !gimple_return_retval (ret_stmt
))
5497 gsi_remove (&gsi
, false);
5498 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5502 gsi
= gsi_start (stmts
);
5503 if (!gsi_end_p (gsi
))
5505 stmt
= gsi_stmt (gsi
);
5506 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5510 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5514 gcc_checking_assert (!note
);
5515 last
= get_last_insn ();
5519 expand_gimple_stmt (stmt
);
5526 BB_HEAD (bb
) = NEXT_INSN (last
);
5527 if (NOTE_P (BB_HEAD (bb
)))
5528 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5529 gcc_assert (LABEL_P (BB_HEAD (bb
)));
5530 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5532 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5535 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5538 NOTE_BASIC_BLOCK (note
) = bb
;
5540 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5544 stmt
= gsi_stmt (gsi
);
5546 /* If this statement is a non-debug one, and we generate debug
5547 insns, then this one might be the last real use of a TERed
5548 SSA_NAME, but where there are still some debug uses further
5549 down. Expanding the current SSA name in such further debug
5550 uses by their RHS might lead to wrong debug info, as coalescing
5551 might make the operands of such RHS be placed into the same
5552 pseudo as something else. Like so:
5553 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5557 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5558 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5559 the write to a_2 would actually have clobbered the place which
5562 So, instead of that, we recognize the situation, and generate
5563 debug temporaries at the last real use of TERed SSA names:
5570 if (MAY_HAVE_DEBUG_BIND_INSNS
5572 && !is_gimple_debug (stmt
))
5578 location_t sloc
= curr_insn_location ();
5580 /* Look for SSA names that have their last use here (TERed
5581 names always have only one real use). */
5582 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5583 if ((def
= get_gimple_for_ssa_name (op
)))
5585 imm_use_iterator imm_iter
;
5586 use_operand_p use_p
;
5587 bool have_debug_uses
= false;
5589 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5591 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5593 have_debug_uses
= true;
5598 if (have_debug_uses
)
5600 /* OP is a TERed SSA name, with DEF its defining
5601 statement, and where OP is used in further debug
5602 instructions. Generate a debug temporary, and
5603 replace all uses of OP in debug insns with that
5606 tree value
= gimple_assign_rhs_to_tree (def
);
5607 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5611 set_curr_insn_location (gimple_location (def
));
5613 DECL_ARTIFICIAL (vexpr
) = 1;
5614 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5616 mode
= DECL_MODE (value
);
5618 mode
= TYPE_MODE (TREE_TYPE (value
));
5619 SET_DECL_MODE (vexpr
, mode
);
5621 val
= gen_rtx_VAR_LOCATION
5622 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5624 emit_debug_insn (val
);
5626 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5628 if (!gimple_debug_bind_p (debugstmt
))
5631 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5632 SET_USE (use_p
, vexpr
);
5634 update_stmt (debugstmt
);
5638 set_curr_insn_location (sloc
);
5641 currently_expanding_gimple_stmt
= stmt
;
5643 /* Expand this statement, then evaluate the resulting RTL and
5644 fixup the CFG accordingly. */
5645 if (gimple_code (stmt
) == GIMPLE_COND
)
5647 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5651 else if (is_gimple_debug (stmt
))
5653 location_t sloc
= curr_insn_location ();
5654 gimple_stmt_iterator nsi
= gsi
;
5659 tree value
= NULL_TREE
;
5663 if (!gimple_debug_nonbind_marker_p (stmt
))
5665 if (gimple_debug_bind_p (stmt
))
5667 var
= gimple_debug_bind_get_var (stmt
);
5669 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5670 && TREE_CODE (var
) != LABEL_DECL
5671 && !target_for_debug_bind (var
))
5672 goto delink_debug_stmt
;
5675 mode
= DECL_MODE (var
);
5677 mode
= TYPE_MODE (TREE_TYPE (var
));
5679 if (gimple_debug_bind_has_value_p (stmt
))
5680 value
= gimple_debug_bind_get_value (stmt
);
5682 val
= gen_rtx_VAR_LOCATION
5683 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5685 else if (gimple_debug_source_bind_p (stmt
))
5687 var
= gimple_debug_source_bind_get_var (stmt
);
5689 value
= gimple_debug_source_bind_get_value (stmt
);
5691 mode
= DECL_MODE (var
);
5693 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5694 VAR_INIT_STATUS_UNINITIALIZED
);
5699 /* If this function was first compiled with markers
5700 enabled, but they're now disable (e.g. LTO), drop
5701 them on the floor. */
5702 else if (gimple_debug_nonbind_marker_p (stmt
)
5703 && !MAY_HAVE_DEBUG_MARKER_INSNS
)
5704 goto delink_debug_stmt
;
5705 else if (gimple_debug_begin_stmt_p (stmt
))
5706 val
= GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5710 last
= get_last_insn ();
5712 set_curr_insn_location (gimple_location (stmt
));
5714 emit_debug_insn (val
);
5716 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5718 /* We can't dump the insn with a TREE where an RTX
5720 if (GET_CODE (val
) == VAR_LOCATION
)
5722 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val
) == (rtx
)value
);
5723 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5725 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5726 if (GET_CODE (val
) == VAR_LOCATION
)
5727 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5731 /* In order not to generate too many debug temporaries,
5732 we delink all uses of debug statements we already expanded.
5733 Therefore debug statements between definition and real
5734 use of TERed SSA names will continue to use the SSA name,
5735 and not be replaced with debug temps. */
5736 delink_stmt_imm_use (stmt
);
5740 if (gsi_end_p (nsi
))
5742 stmt
= gsi_stmt (nsi
);
5743 if (!is_gimple_debug (stmt
))
5747 set_curr_insn_location (sloc
);
5751 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5753 && gimple_call_tail_p (call_stmt
)
5754 && disable_tail_calls
)
5755 gimple_call_set_tail (call_stmt
, false);
5757 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5760 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5771 def_operand_p def_p
;
5772 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5776 /* Ignore this stmt if it is in the list of
5777 replaceable expressions. */
5779 && bitmap_bit_p (SA
.values
,
5780 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5783 last
= expand_gimple_stmt (stmt
);
5784 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5789 currently_expanding_gimple_stmt
= NULL
;
5791 /* Expand implicit goto and convert goto_locus. */
5792 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5794 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5795 set_curr_insn_location (e
->goto_locus
);
5796 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5798 emit_jump (label_rtx_for_bb (e
->dest
));
5799 e
->flags
&= ~EDGE_FALLTHRU
;
5803 /* Expanded RTL can create a jump in the last instruction of block.
5804 This later might be assumed to be a jump to successor and break edge insertion.
5805 We need to insert dummy move to prevent this. PR41440. */
5806 if (single_succ_p (bb
)
5807 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5808 && (last
= get_last_insn ())
5810 || (DEBUG_INSN_P (last
)
5811 && JUMP_P (prev_nondebug_insn (last
)))))
5813 rtx dummy
= gen_reg_rtx (SImode
);
5814 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5817 do_pending_stack_adjust ();
5819 /* Find the block tail. The last insn in the block is the insn
5820 before a barrier and/or table jump insn. */
5821 last
= get_last_insn ();
5822 if (BARRIER_P (last
))
5823 last
= PREV_INSN (last
);
5824 if (JUMP_TABLE_DATA_P (last
))
5825 last
= PREV_INSN (PREV_INSN (last
));
5828 update_bb_for_insn (bb
);
5834 /* Create a basic block for initialization code. */
5837 construct_init_block (void)
5839 basic_block init_block
, first_block
;
5843 /* Multiple entry points not supported yet. */
5844 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5845 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5846 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5847 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5848 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5850 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5852 /* When entry edge points to first basic block, we don't need jump,
5853 otherwise we have to jump into proper target. */
5854 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5856 tree label
= gimple_block_label (e
->dest
);
5858 emit_jump (jump_target_rtx (label
));
5862 flags
= EDGE_FALLTHRU
;
5864 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5866 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5867 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5868 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5871 first_block
= e
->dest
;
5872 redirect_edge_succ (e
, init_block
);
5873 e
= make_single_succ_edge (init_block
, first_block
, flags
);
5876 e
= make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
5879 update_bb_for_insn (init_block
);
5883 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5884 found in the block tree. */
5887 set_block_levels (tree block
, int level
)
5891 BLOCK_NUMBER (block
) = level
;
5892 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5893 block
= BLOCK_CHAIN (block
);
5897 /* Create a block containing landing pads and similar stuff. */
5900 construct_exit_block (void)
5902 rtx_insn
*head
= get_last_insn ();
5904 basic_block exit_block
;
5908 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5909 rtx_insn
*orig_end
= BB_END (prev_bb
);
5911 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5913 /* Make sure the locus is set to the end of the function, so that
5914 epilogue line numbers and warnings are set properly. */
5915 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5916 input_location
= cfun
->function_end_locus
;
5918 /* Generate rtl for function exit. */
5919 expand_function_end ();
5921 end
= get_last_insn ();
5924 /* While emitting the function end we could move end of the last basic
5926 BB_END (prev_bb
) = orig_end
;
5927 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5928 head
= NEXT_INSN (head
);
5929 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5930 bb count counting will be confused. Any instructions before that
5931 label are emitted for the case where PREV_BB falls through into the
5932 exit block, so append those instructions to prev_bb in that case. */
5933 if (NEXT_INSN (head
) != return_label
)
5935 while (NEXT_INSN (head
) != return_label
)
5937 if (!NOTE_P (NEXT_INSN (head
)))
5938 BB_END (prev_bb
) = NEXT_INSN (head
);
5939 head
= NEXT_INSN (head
);
5942 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
5943 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5944 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5947 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
5949 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
5950 if (!(e
->flags
& EDGE_ABNORMAL
))
5951 redirect_edge_succ (e
, exit_block
);
5956 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
5958 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5961 exit_block
->count
-= e2
->count ();
5963 update_bb_for_insn (exit_block
);
5966 /* Helper function for discover_nonconstant_array_refs.
5967 Look for ARRAY_REF nodes with non-constant indexes and mark them
5971 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
5972 void *data ATTRIBUTE_UNUSED
)
5976 if (IS_TYPE_OR_DECL_P (t
))
5978 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5980 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5981 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
5982 && (!TREE_OPERAND (t
, 2)
5983 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5984 || (TREE_CODE (t
) == COMPONENT_REF
5985 && (!TREE_OPERAND (t
,2)
5986 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5987 || TREE_CODE (t
) == BIT_FIELD_REF
5988 || TREE_CODE (t
) == REALPART_EXPR
5989 || TREE_CODE (t
) == IMAGPART_EXPR
5990 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
5991 || CONVERT_EXPR_P (t
))
5992 t
= TREE_OPERAND (t
, 0);
5994 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5996 t
= get_base_address (t
);
5998 && DECL_MODE (t
) != BLKmode
)
5999 TREE_ADDRESSABLE (t
) = 1;
6008 /* RTL expansion is not able to compile array references with variable
6009 offsets for arrays stored in single register. Discover such
6010 expressions and mark variables as addressable to avoid this
6014 discover_nonconstant_array_refs (void)
6017 gimple_stmt_iterator gsi
;
6019 FOR_EACH_BB_FN (bb
, cfun
)
6020 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6022 gimple
*stmt
= gsi_stmt (gsi
);
6023 if (!is_gimple_debug (stmt
))
6024 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6028 /* This function sets crtl->args.internal_arg_pointer to a virtual
6029 register if DRAP is needed. Local register allocator will replace
6030 virtual_incoming_args_rtx with the virtual register. */
6033 expand_stack_alignment (void)
6036 unsigned int preferred_stack_boundary
;
6038 if (! SUPPORTS_STACK_ALIGNMENT
)
6041 if (cfun
->calls_alloca
6042 || cfun
->has_nonlocal_label
6043 || crtl
->has_nonlocal_goto
)
6044 crtl
->need_drap
= true;
6046 /* Call update_stack_boundary here again to update incoming stack
6047 boundary. It may set incoming stack alignment to a different
6048 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6049 use the minimum incoming stack alignment to check if it is OK
6050 to perform sibcall optimization since sibcall optimization will
6051 only align the outgoing stack to incoming stack boundary. */
6052 if (targetm
.calls
.update_stack_boundary
)
6053 targetm
.calls
.update_stack_boundary ();
6055 /* The incoming stack frame has to be aligned at least at
6056 parm_stack_boundary. */
6057 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6059 /* Update crtl->stack_alignment_estimated and use it later to align
6060 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6061 exceptions since callgraph doesn't collect incoming stack alignment
6063 if (cfun
->can_throw_non_call_exceptions
6064 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6065 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6067 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6068 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6069 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6070 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6071 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6073 gcc_assert (crtl
->stack_alignment_needed
6074 <= crtl
->stack_alignment_estimated
);
6076 crtl
->stack_realign_needed
6077 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6078 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6080 crtl
->stack_realign_processed
= true;
6082 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6084 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6085 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6087 /* stack_realign_drap and drap_rtx must match. */
6088 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6090 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6091 if (drap_rtx
!= NULL
)
6093 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6095 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6097 fixup_tail_calls ();
6103 expand_main_function (void)
6105 #if (defined(INVOKE__main) \
6106 || (!defined(HAS_INIT_SECTION) \
6107 && !defined(INIT_SECTION_ASM_OP) \
6108 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6109 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6114 /* Expand code to initialize the stack_protect_guard. This is invoked at
6115 the beginning of a function to be protected. */
6118 stack_protect_prologue (void)
6120 tree guard_decl
= targetm
.stack_protect_guard ();
6123 x
= expand_normal (crtl
->stack_protect_guard
);
6125 y
= expand_normal (guard_decl
);
6129 /* Allow the target to copy from Y to X without leaking Y into a
6131 if (targetm
.have_stack_protect_set ())
6132 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6138 /* Otherwise do a straight move. */
6139 emit_move_insn (x
, y
);
6142 /* Translate the intermediate representation contained in the CFG
6143 from GIMPLE trees to RTL.
6145 We do conversion per basic block and preserve/update the tree CFG.
6146 This implies we have to do some magic as the CFG can simultaneously
6147 consist of basic blocks containing RTL and GIMPLE trees. This can
6148 confuse the CFG hooks, so be careful to not manipulate CFG during
6153 const pass_data pass_data_expand
=
6155 RTL_PASS
, /* type */
6156 "expand", /* name */
6157 OPTGROUP_NONE
, /* optinfo_flags */
6158 TV_EXPAND
, /* tv_id */
6159 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6162 | PROP_gimple_lva
), /* properties_required */
6163 PROP_rtl
, /* properties_provided */
6164 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6165 0, /* todo_flags_start */
6166 0, /* todo_flags_finish */
6169 class pass_expand
: public rtl_opt_pass
6172 pass_expand (gcc::context
*ctxt
)
6173 : rtl_opt_pass (pass_data_expand
, ctxt
)
6176 /* opt_pass methods: */
6177 virtual unsigned int execute (function
*);
6179 }; // class pass_expand
6182 pass_expand::execute (function
*fun
)
6184 basic_block bb
, init_block
;
6187 rtx_insn
*var_seq
, *var_ret_seq
;
6190 timevar_push (TV_OUT_OF_SSA
);
6191 rewrite_out_of_ssa (&SA
);
6192 timevar_pop (TV_OUT_OF_SSA
);
6193 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6195 if (MAY_HAVE_DEBUG_BIND_STMTS
&& flag_tree_ter
)
6197 gimple_stmt_iterator gsi
;
6198 FOR_EACH_BB_FN (bb
, cfun
)
6199 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6200 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6201 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6204 /* Make sure all values used by the optimization passes have sane
6208 /* Some backends want to know that we are expanding to RTL. */
6209 currently_expanding_to_rtl
= 1;
6210 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6211 free_dominance_info (CDI_DOMINATORS
);
6213 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6215 if (chkp_function_instrumented_p (current_function_decl
))
6216 chkp_reset_rtl_bounds ();
6218 insn_locations_init ();
6219 if (!DECL_IS_BUILTIN (current_function_decl
))
6221 /* Eventually, all FEs should explicitly set function_start_locus. */
6222 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6223 set_curr_insn_location
6224 (DECL_SOURCE_LOCATION (current_function_decl
));
6226 set_curr_insn_location (fun
->function_start_locus
);
6229 set_curr_insn_location (UNKNOWN_LOCATION
);
6230 prologue_location
= curr_insn_location ();
6232 #ifdef INSN_SCHEDULING
6233 init_sched_attrs ();
6236 /* Make sure first insn is a note even if we don't want linenums.
6237 This makes sure the first insn will never be deleted.
6238 Also, final expects a note to appear there. */
6239 emit_note (NOTE_INSN_DELETED
);
6241 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6242 discover_nonconstant_array_refs ();
6244 targetm
.expand_to_rtl_hook ();
6245 crtl
->init_stack_alignment ();
6246 fun
->cfg
->max_jumptable_ents
= 0;
6248 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6249 of the function section at exapnsion time to predict distance of calls. */
6250 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6252 /* Expand the variables recorded during gimple lowering. */
6253 timevar_push (TV_VAR_EXPAND
);
6256 var_ret_seq
= expand_used_vars ();
6258 var_seq
= get_insns ();
6260 timevar_pop (TV_VAR_EXPAND
);
6262 /* Honor stack protection warnings. */
6263 if (warn_stack_protect
)
6265 if (fun
->calls_alloca
)
6266 warning (OPT_Wstack_protector
,
6267 "stack protector not protecting local variables: "
6268 "variable length buffer");
6269 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6270 warning (OPT_Wstack_protector
,
6271 "stack protector not protecting function: "
6272 "all local arrays are less than %d bytes long",
6273 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
6276 /* Set up parameters and prepare for return, for the function. */
6277 expand_function_start (current_function_decl
);
6279 /* If we emitted any instructions for setting up the variables,
6280 emit them before the FUNCTION_START note. */
6283 emit_insn_before (var_seq
, parm_birth_insn
);
6285 /* In expand_function_end we'll insert the alloca save/restore
6286 before parm_birth_insn. We've just insertted an alloca call.
6287 Adjust the pointer to match. */
6288 parm_birth_insn
= var_seq
;
6291 /* Now propagate the RTL assignment of each partition to the
6292 underlying var of each SSA_NAME. */
6295 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6297 /* We might have generated new SSA names in
6298 update_alias_info_with_stack_vars. They will have a NULL
6299 defining statements, and won't be part of the partitioning,
6301 if (!SSA_NAME_DEF_STMT (name
))
6304 adjust_one_expanded_partition_var (name
);
6307 /* Clean up RTL of variables that straddle across multiple
6308 partitions, and check that the rtl of any PARM_DECLs that are not
6309 cleaned up is that of their default defs. */
6310 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6314 /* We might have generated new SSA names in
6315 update_alias_info_with_stack_vars. They will have a NULL
6316 defining statements, and won't be part of the partitioning,
6318 if (!SSA_NAME_DEF_STMT (name
))
6320 part
= var_to_partition (SA
.map
, name
);
6321 if (part
== NO_PARTITION
)
6324 /* If this decl was marked as living in multiple places, reset
6325 this now to NULL. */
6326 tree var
= SSA_NAME_VAR (name
);
6327 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6328 SET_DECL_RTL (var
, NULL
);
6329 /* Check that the pseudos chosen by assign_parms are those of
6330 the corresponding default defs. */
6331 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6332 && (TREE_CODE (var
) == PARM_DECL
6333 || TREE_CODE (var
) == RESULT_DECL
))
6335 rtx in
= DECL_RTL_IF_SET (var
);
6337 rtx out
= SA
.partition_to_pseudo
[part
];
6338 gcc_assert (in
== out
);
6340 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6341 those expected by debug backends for each parm and for
6342 the result. This is particularly important for stabs,
6343 whose register elimination from parm's DECL_RTL may cause
6344 -fcompare-debug differences as SET_DECL_RTL changes reg's
6345 attrs. So, make sure the RTL already has the parm as the
6346 EXPR, so that it won't change. */
6347 SET_DECL_RTL (var
, NULL_RTX
);
6349 set_mem_attributes (in
, var
, true);
6350 SET_DECL_RTL (var
, in
);
6354 /* If this function is `main', emit a call to `__main'
6355 to run global initializers, etc. */
6356 if (DECL_NAME (current_function_decl
)
6357 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6358 && DECL_FILE_SCOPE_P (current_function_decl
))
6359 expand_main_function ();
6361 /* Initialize the stack_protect_guard field. This must happen after the
6362 call to __main (if any) so that the external decl is initialized. */
6363 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6364 stack_protect_prologue ();
6366 expand_phi_nodes (&SA
);
6368 /* Release any stale SSA redirection data. */
6369 redirect_edge_var_map_empty ();
6371 /* Register rtl specific functions for cfg. */
6372 rtl_register_cfg_hooks ();
6374 init_block
= construct_init_block ();
6376 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6377 remaining edges later. */
6378 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6379 e
->flags
&= ~EDGE_EXECUTABLE
;
6381 /* If the function has too many markers, drop them while expanding. */
6382 if (cfun
->debug_marker_count
6383 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
6384 cfun
->debug_nonbind_markers
= false;
6386 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6387 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6389 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6391 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6392 expand_debug_locations ();
6394 if (deep_ter_debug_map
)
6396 delete deep_ter_debug_map
;
6397 deep_ter_debug_map
= NULL
;
6400 /* Free stuff we no longer need after GIMPLE optimizations. */
6401 free_dominance_info (CDI_DOMINATORS
);
6402 free_dominance_info (CDI_POST_DOMINATORS
);
6403 delete_tree_cfg_annotations (fun
);
6405 timevar_push (TV_OUT_OF_SSA
);
6406 finish_out_of_ssa (&SA
);
6407 timevar_pop (TV_OUT_OF_SSA
);
6409 timevar_push (TV_POST_EXPAND
);
6410 /* We are no longer in SSA form. */
6411 fun
->gimple_df
->in_ssa_p
= false;
6412 loops_state_clear (LOOP_CLOSED_SSA
);
6414 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6415 conservatively to true until they are all profile aware. */
6416 delete lab_rtx_for_bb
;
6417 free_histograms (fun
);
6419 construct_exit_block ();
6420 insn_locations_finalize ();
6424 rtx_insn
*after
= return_label
;
6425 rtx_insn
*next
= NEXT_INSN (after
);
6426 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6428 emit_insn_after (var_ret_seq
, after
);
6431 /* Zap the tree EH table. */
6432 set_eh_throw_stmt_table (fun
, NULL
);
6434 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6435 split edges which edge insertions might do. */
6436 rebuild_jump_labels (get_insns ());
6438 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
6439 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6443 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6447 rebuild_jump_labels_chain (e
->insns
.r
);
6448 /* Put insns after parm birth, but before
6449 NOTE_INSNS_FUNCTION_BEG. */
6450 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
6451 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6453 rtx_insn
*insns
= e
->insns
.r
;
6455 if (NOTE_P (parm_birth_insn
)
6456 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6457 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6459 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6462 commit_one_edge_insertion (e
);
6469 /* We're done expanding trees to RTL. */
6470 currently_expanding_to_rtl
= 0;
6472 flush_mark_addressable_queue ();
6474 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6475 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6479 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6481 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6482 e
->flags
&= ~EDGE_EXECUTABLE
;
6484 /* At the moment not all abnormal edges match the RTL
6485 representation. It is safe to remove them here as
6486 find_many_sub_basic_blocks will rediscover them.
6487 In the future we should get this fixed properly. */
6488 if ((e
->flags
& EDGE_ABNORMAL
)
6489 && !(e
->flags
& EDGE_SIBCALL
))
6496 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6497 bitmap_ones (blocks
);
6498 find_many_sub_basic_blocks (blocks
);
6499 purge_all_dead_edges ();
6501 expand_stack_alignment ();
6503 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6505 if (crtl
->tail_call_emit
)
6506 fixup_tail_calls ();
6508 /* After initial rtl generation, call back to finish generating
6509 exception support code. We need to do this before cleaning up
6510 the CFG as the code does not expect dead landing pads. */
6511 if (fun
->eh
->region_tree
!= NULL
)
6512 finish_eh_generation ();
6514 /* BB subdivision may have created basic blocks that are are only reachable
6515 from unlikely bbs but not marked as such in the profile. */
6517 propagate_unlikely_bbs_forward ();
6519 /* Remove unreachable blocks, otherwise we cannot compute dominators
6520 which are needed for loop state verification. As a side-effect
6521 this also compacts blocks.
6522 ??? We cannot remove trivially dead insns here as for example
6523 the DRAP reg on i?86 is not magically live at this point.
6524 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6525 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6527 checking_verify_flow_info ();
6529 /* Initialize pseudos allocated for hard registers. */
6530 emit_initial_value_sets ();
6532 /* And finally unshare all RTL. */
6535 /* There's no need to defer outputting this function any more; we
6536 know we want to output it. */
6537 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6539 /* Now that we're done expanding trees to RTL, we shouldn't have any
6540 more CONCATs anywhere. */
6541 generating_concat_p
= 0;
6546 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6547 /* And the pass manager will dump RTL for us. */
6550 /* If we're emitting a nested function, make sure its parent gets
6551 emitted as well. Doing otherwise confuses debug info. */
6554 for (parent
= DECL_CONTEXT (current_function_decl
);
6555 parent
!= NULL_TREE
;
6556 parent
= get_containing_scope (parent
))
6557 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6558 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6561 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6563 /* After expanding, the return labels are no longer needed. */
6564 return_label
= NULL
;
6565 naked_return_label
= NULL
;
6567 /* After expanding, the tm_restart map is no longer needed. */
6568 if (fun
->gimple_df
->tm_restart
)
6569 fun
->gimple_df
->tm_restart
= NULL
;
6571 /* Tag the blocks with a depth number so that change_scope can find
6572 the common parent easily. */
6573 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6574 default_rtl_profile ();
6576 /* For -dx discard loops now, otherwise IL verify in clean_state will
6578 if (rtl_dump_and_exit
)
6580 cfun
->curr_properties
&= ~PROP_loops
;
6581 loop_optimizer_finalize ();
6584 timevar_pop (TV_POST_EXPAND
);
6592 make_pass_expand (gcc::context
*ctxt
)
6594 return new pass_expand (ctxt
);