]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
Daily bump.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
85ec4feb 2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5
AM
28#include "cfghooks.h"
29#include "tree-pass.h"
4d0cdd0c 30#include "memmodel.h"
957060b5 31#include "tm_p.h"
c7131fb2 32#include "ssa.h"
957060b5
AM
33#include "optabs.h"
34#include "regs.h" /* For reg_renumber. */
35#include "emit-rtl.h"
36#include "recog.h"
37#include "cgraph.h"
38#include "diagnostic.h"
40e23961 39#include "fold-const.h"
d8a2d370
DN
40#include "varasm.h"
41#include "stor-layout.h"
42#include "stmt.h"
43#include "print-tree.h"
60393bbc
AM
44#include "cfgrtl.h"
45#include "cfganal.h"
46#include "cfgbuild.h"
47#include "cfgcleanup.h"
36566b39
PK
48#include "dojump.h"
49#include "explow.h"
50#include "calls.h"
242229bb 51#include "expr.h"
2fb9a547
AM
52#include "internal-fn.h"
53#include "tree-eh.h"
5be5c238 54#include "gimple-iterator.h"
1b223a9f 55#include "gimple-expr.h"
5be5c238 56#include "gimple-walk.h"
442b4905 57#include "tree-cfg.h"
442b4905 58#include "tree-dfa.h"
7a300452 59#include "tree-ssa.h"
242229bb 60#include "except.h"
cf835838 61#include "gimple-pretty-print.h"
1f6d3a08 62#include "toplev.h"
ef330312 63#include "debug.h"
7d69de61 64#include "params.h"
ff28a94d 65#include "tree-inline.h"
6946b3f7 66#include "value-prof.h"
8e9055ae 67#include "tree-ssa-live.h"
78bca40d 68#include "tree-outof-ssa.h"
7d776ee2 69#include "cfgloop.h"
2b21299c 70#include "insn-attr.h" /* For INSN_SCHEDULING. */
314e6352
ML
71#include "stringpool.h"
72#include "attribs.h"
f3ddd692 73#include "asan.h"
4484a35a 74#include "tree-ssa-address.h"
862d0b35 75#include "output.h"
9b2b7279 76#include "builtins.h"
726a989a 77
8a6ce562
JBG
78/* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82#ifndef NAME__MAIN
83#define NAME__MAIN "__main"
84#endif
85
4e3825db
MM
86/* This variable holds information helping the rewriting of SSA trees
87 into RTL. */
88struct ssaexpand SA;
89
a5883ba0
MM
90/* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
355fe088 92gimple *currently_expanding_gimple_stmt;
a5883ba0 93
ddb555ed
JJ
94static rtx expand_debug_expr (tree);
95
1f9ceff1
AO
96static bool defer_stack_allocation (tree, bool);
97
f11a7b6d
AO
98static void record_alignment_for_reg_var (unsigned int);
99
726a989a
RB
100/* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
102
103tree
355fe088 104gimple_assign_rhs_to_tree (gimple *stmt)
726a989a
RB
105{
106 tree t;
82d6e6fc 107 enum gimple_rhs_class grhs_class;
b8698a0f 108
82d6e6fc 109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 110
0354c0c7
BS
111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
82d6e6fc 122 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
82d6e6fc 126 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
127 {
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
5368224f 134 && EXPR_P (t)))
b5b8b0ac
AO
135 t = copy_node (t);
136 }
726a989a
RB
137 else
138 gcc_unreachable ();
139
f5045c96
AM
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
142
726a989a
RB
143 return t;
144}
145
726a989a 146
1f6d3a08
RH
147#ifndef STACK_ALIGNMENT_NEEDED
148#define STACK_ALIGNMENT_NEEDED 1
149#endif
150
4e3825db
MM
151#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152
1f9ceff1
AO
153/* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
157
158static tree
159leader_merge (tree cur, tree next)
160{
161 if (cur == NULL || cur == next)
162 return next;
163
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
166
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
169
170 return cur;
171}
172
4e3825db
MM
173/* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
176static inline void
177set_rtl (tree t, rtx x)
178{
f11a7b6d
AO
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 || (use_register_for_decl (t)
182 ? (REG_P (x)
183 || (GET_CODE (x) == CONCAT
184 && (REG_P (XEXP (x, 0))
185 || SUBREG_P (XEXP (x, 0)))
186 && (REG_P (XEXP (x, 1))
187 || SUBREG_P (XEXP (x, 1))))
058c6384
EB
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
f11a7b6d
AO
192 || (GET_CODE (x) == PARALLEL
193 && SSAVAR (t)
194 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
058c6384
EB
195 && (GET_MODE (x) == BLKmode
196 || !flag_tree_coalesce_vars)))
f11a7b6d
AO
197 : (MEM_P (x) || x == pc_rtx
198 || (GET_CODE (x) == CONCAT
199 && MEM_P (XEXP (x, 0))
200 && MEM_P (XEXP (x, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
21fc3950
EB
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
209 unpromoted REGs. */
f11a7b6d 210 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
21fc3950
EB
211 || (SSAVAR (t)
212 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 && (promote_ssa_mode (t, NULL) == BLKmode
214 || !flag_tree_coalesce_vars))
f11a7b6d
AO
215 || !use_register_for_decl (t)
216 || GET_MODE (x) == promote_ssa_mode (t, NULL));
217
218 if (x)
1f9ceff1
AO
219 {
220 bool skip = false;
221 tree cur = NULL_TREE;
f11a7b6d
AO
222 rtx xm = x;
223
224 retry:
225 if (MEM_P (xm))
226 cur = MEM_EXPR (xm);
227 else if (REG_P (xm))
228 cur = REG_EXPR (xm);
229 else if (SUBREG_P (xm))
230 {
231 gcc_assert (subreg_lowpart_p (xm));
232 xm = SUBREG_REG (xm);
233 goto retry;
234 }
235 else if (GET_CODE (xm) == CONCAT)
236 {
237 xm = XEXP (xm, 0);
238 goto retry;
239 }
240 else if (GET_CODE (xm) == PARALLEL)
241 {
242 xm = XVECEXP (xm, 0, 0);
243 gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 xm = XEXP (xm, 0);
245 goto retry;
246 }
247 else if (xm == pc_rtx)
1f9ceff1
AO
248 skip = true;
249 else
250 gcc_unreachable ();
251
f11a7b6d 252 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
1f9ceff1
AO
253
254 if (cur != next)
255 {
256 if (MEM_P (x))
f11a7b6d
AO
257 set_mem_attributes (x,
258 next && TREE_CODE (next) == SSA_NAME
259 ? TREE_TYPE (next)
260 : next, true);
1f9ceff1
AO
261 else
262 set_reg_attrs_for_decl_rtl (next, x);
263 }
264 }
265
4e3825db
MM
266 if (TREE_CODE (t) == SSA_NAME)
267 {
1f9ceff1
AO
268 int part = var_to_partition (SA.map, t);
269 if (part != NO_PARTITION)
270 {
271 if (SA.partition_to_pseudo[part])
272 gcc_assert (SA.partition_to_pseudo[part] == x);
273 else if (x != pc_rtx)
274 SA.partition_to_pseudo[part] = x;
275 }
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
f11a7b6d
AO
278 DECL. For PARMs and RESULTs, do so only when setting the
279 default def. */
280 if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 && (VAR_P (SSA_NAME_VAR (t))
282 || SSA_NAME_IS_DEFAULT_DEF (t)))
eb7adebc
MM
283 {
284 tree var = SSA_NAME_VAR (t);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var))
287 SET_DECL_RTL (var, x);
47598145 288 /* If we have it set already to "multiple places" don't
eb7adebc
MM
289 change this. */
290 else if (DECL_RTL (var) == pc_rtx)
291 ;
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var) != x)
299 SET_DECL_RTL (var, pc_rtx);
300 }
4e3825db
MM
301 }
302 else
303 SET_DECL_RTL (t, x);
304}
1f6d3a08
RH
305
306/* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
308struct stack_var
309{
310 /* The Variable. */
311 tree decl;
312
1f6d3a08
RH
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
5e48d894 315 poly_uint64 size;
1f6d3a08
RH
316
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
320
321 /* The partition representative. */
322 size_t representative;
323
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
2bdbbe94
MM
326
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
1f6d3a08
RH
329};
330
331#define EOC ((size_t)-1)
332
333/* We have an array of such objects while deciding allocation. */
334static struct stack_var *stack_vars;
335static size_t stack_vars_alloc;
336static size_t stack_vars_num;
39c8aaa4 337static hash_map<tree, size_t> *decl_to_stack_part;
1f6d3a08 338
3f9b14ff
SB
339/* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341static bitmap_obstack stack_var_bitmap_obstack;
342
fa10beec 343/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
344 is non-decreasing. */
345static size_t *stack_vars_sorted;
346
1f6d3a08
RH
347/* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350static int frame_phase;
351
7d69de61
RH
352/* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354static bool has_protected_decls;
355
356/* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358static bool has_short_buffer;
1f6d3a08 359
6f197850 360/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
361 we can't do with expected alignment of the stack boundary. */
362
363static unsigned int
6f197850 364align_local_variable (tree decl)
765c3e8f 365{
1f9ceff1
AO
366 unsigned int align;
367
368 if (TREE_CODE (decl) == SSA_NAME)
369 align = TYPE_ALIGN (TREE_TYPE (decl));
370 else
371 {
372 align = LOCAL_DECL_ALIGNMENT (decl);
fe37c7af 373 SET_DECL_ALIGN (decl, align);
1f9ceff1 374 }
1f6d3a08
RH
375 return align / BITS_PER_UNIT;
376}
377
435be747
MO
378/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
379 down otherwise. Return truncated BASE value. */
380
381static inline unsigned HOST_WIDE_INT
382align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
383{
384 return align_up ? (base + align - 1) & -align : base & -align;
385}
386
1f6d3a08
RH
387/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
388 Return the frame offset. */
389
f075bd95 390static poly_int64
5e48d894 391alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
1f6d3a08 392{
f075bd95 393 poly_int64 offset, new_frame_offset;
1f6d3a08 394
1f6d3a08
RH
395 if (FRAME_GROWS_DOWNWARD)
396 {
435be747 397 new_frame_offset
f075bd95
RS
398 = aligned_lower_bound (frame_offset - frame_phase - size,
399 align) + frame_phase;
1f6d3a08
RH
400 offset = new_frame_offset;
401 }
402 else
403 {
435be747 404 new_frame_offset
f075bd95
RS
405 = aligned_upper_bound (frame_offset - frame_phase,
406 align) + frame_phase;
1f6d3a08
RH
407 offset = new_frame_offset;
408 new_frame_offset += size;
409 }
410 frame_offset = new_frame_offset;
411
9fb798d7
EB
412 if (frame_offset_overflow (frame_offset, cfun->decl))
413 frame_offset = offset = 0;
414
1f6d3a08
RH
415 return offset;
416}
417
418/* Accumulate DECL into STACK_VARS. */
419
420static void
421add_stack_var (tree decl)
422{
533f611a
RH
423 struct stack_var *v;
424
1f6d3a08
RH
425 if (stack_vars_num >= stack_vars_alloc)
426 {
427 if (stack_vars_alloc)
428 stack_vars_alloc = stack_vars_alloc * 3 / 2;
429 else
430 stack_vars_alloc = 32;
431 stack_vars
432 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
433 }
47598145 434 if (!decl_to_stack_part)
39c8aaa4 435 decl_to_stack_part = new hash_map<tree, size_t>;
47598145 436
533f611a 437 v = &stack_vars[stack_vars_num];
39c8aaa4 438 decl_to_stack_part->put (decl, stack_vars_num);
533f611a
RH
439
440 v->decl = decl;
1f9ceff1
AO
441 tree size = TREE_CODE (decl) == SSA_NAME
442 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
443 : DECL_SIZE_UNIT (decl);
5e48d894 444 v->size = tree_to_poly_uint64 (size);
533f611a
RH
445 /* Ensure that all variables have size, so that &a != &b for any two
446 variables that are simultaneously live. */
5e48d894 447 if (known_eq (v->size, 0U))
533f611a 448 v->size = 1;
1f9ceff1 449 v->alignb = align_local_variable (decl);
13868f40
EB
450 /* An alignment of zero can mightily confuse us later. */
451 gcc_assert (v->alignb != 0);
1f6d3a08
RH
452
453 /* All variables are initially in their own partition. */
533f611a
RH
454 v->representative = stack_vars_num;
455 v->next = EOC;
1f6d3a08 456
2bdbbe94 457 /* All variables initially conflict with no other. */
533f611a 458 v->conflicts = NULL;
2bdbbe94 459
1f6d3a08 460 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 461 set_rtl (decl, pc_rtx);
1f6d3a08
RH
462
463 stack_vars_num++;
464}
465
1f6d3a08
RH
466/* Make the decls associated with luid's X and Y conflict. */
467
468static void
469add_stack_var_conflict (size_t x, size_t y)
470{
2bdbbe94
MM
471 struct stack_var *a = &stack_vars[x];
472 struct stack_var *b = &stack_vars[y];
473 if (!a->conflicts)
3f9b14ff 474 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 475 if (!b->conflicts)
3f9b14ff 476 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
477 bitmap_set_bit (a->conflicts, y);
478 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
479}
480
481/* Check whether the decls associated with luid's X and Y conflict. */
482
483static bool
484stack_var_conflict_p (size_t x, size_t y)
485{
2bdbbe94
MM
486 struct stack_var *a = &stack_vars[x];
487 struct stack_var *b = &stack_vars[y];
47598145
MM
488 if (x == y)
489 return false;
490 /* Partitions containing an SSA name result from gimple registers
491 with things like unsupported modes. They are top-level and
492 hence conflict with everything else. */
493 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
494 return true;
495
2bdbbe94
MM
496 if (!a->conflicts || !b->conflicts)
497 return false;
498 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 499}
b8698a0f 500
47598145
MM
501/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
502 enter its partition number into bitmap DATA. */
503
504static bool
355fe088 505visit_op (gimple *, tree op, tree, void *data)
47598145
MM
506{
507 bitmap active = (bitmap)data;
508 op = get_base_address (op);
509 if (op
510 && DECL_P (op)
511 && DECL_RTL_IF_SET (op) == pc_rtx)
512 {
39c8aaa4 513 size_t *v = decl_to_stack_part->get (op);
47598145
MM
514 if (v)
515 bitmap_set_bit (active, *v);
516 }
517 return false;
518}
519
520/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
521 record conflicts between it and all currently active other partitions
522 from bitmap DATA. */
523
524static bool
355fe088 525visit_conflict (gimple *, tree op, tree, void *data)
47598145
MM
526{
527 bitmap active = (bitmap)data;
528 op = get_base_address (op);
529 if (op
530 && DECL_P (op)
531 && DECL_RTL_IF_SET (op) == pc_rtx)
532 {
39c8aaa4 533 size_t *v = decl_to_stack_part->get (op);
47598145
MM
534 if (v && bitmap_set_bit (active, *v))
535 {
536 size_t num = *v;
537 bitmap_iterator bi;
538 unsigned i;
539 gcc_assert (num < stack_vars_num);
540 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
541 add_stack_var_conflict (num, i);
542 }
543 }
544 return false;
545}
546
547/* Helper routine for add_scope_conflicts, calculating the active partitions
548 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
549 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
550 liveness. */
47598145
MM
551
552static void
81bfd197 553add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
554{
555 edge e;
556 edge_iterator ei;
557 gimple_stmt_iterator gsi;
9f1363cd 558 walk_stmt_load_store_addr_fn visit;
47598145
MM
559
560 bitmap_clear (work);
561 FOR_EACH_EDGE (e, ei, bb->preds)
562 bitmap_ior_into (work, (bitmap)e->src->aux);
563
ea85edfe 564 visit = visit_op;
47598145
MM
565
566 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
567 {
355fe088 568 gimple *stmt = gsi_stmt (gsi);
ea85edfe 569 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 570 }
ea85edfe 571 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145 572 {
355fe088 573 gimple *stmt = gsi_stmt (gsi);
47598145
MM
574
575 if (gimple_clobber_p (stmt))
576 {
577 tree lhs = gimple_assign_lhs (stmt);
578 size_t *v;
579 /* Nested function lowering might introduce LHSs
580 that are COMPONENT_REFs. */
8813a647 581 if (!VAR_P (lhs))
47598145
MM
582 continue;
583 if (DECL_RTL_IF_SET (lhs) == pc_rtx
39c8aaa4 584 && (v = decl_to_stack_part->get (lhs)))
47598145
MM
585 bitmap_clear_bit (work, *v);
586 }
587 else if (!is_gimple_debug (stmt))
ea85edfe 588 {
81bfd197 589 if (for_conflict
ea85edfe
JJ
590 && visit == visit_op)
591 {
592 /* If this is the first real instruction in this BB we need
88d599dc
MM
593 to add conflicts for everything live at this point now.
594 Unlike classical liveness for named objects we can't
ea85edfe
JJ
595 rely on seeing a def/use of the names we're interested in.
596 There might merely be indirect loads/stores. We'd not add any
81bfd197 597 conflicts for such partitions. */
ea85edfe
JJ
598 bitmap_iterator bi;
599 unsigned i;
81bfd197 600 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 601 {
9b44f5d9
MM
602 struct stack_var *a = &stack_vars[i];
603 if (!a->conflicts)
3f9b14ff 604 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 605 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
606 }
607 visit = visit_conflict;
608 }
609 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
610 }
47598145
MM
611 }
612}
613
614/* Generate stack partition conflicts between all partitions that are
615 simultaneously live. */
616
617static void
618add_scope_conflicts (void)
619{
620 basic_block bb;
621 bool changed;
622 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
623 int *rpo;
624 int n_bbs;
47598145 625
88d599dc 626 /* We approximate the live range of a stack variable by taking the first
47598145
MM
627 mention of its name as starting point(s), and by the end-of-scope
628 death clobber added by gimplify as ending point(s) of the range.
629 This overapproximates in the case we for instance moved an address-taken
630 operation upward, without also moving a dereference to it upwards.
631 But it's conservatively correct as a variable never can hold values
632 before its name is mentioned at least once.
633
88d599dc 634 We then do a mostly classical bitmap liveness algorithm. */
47598145 635
04a90bec 636 FOR_ALL_BB_FN (bb, cfun)
3f9b14ff 637 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 638
8b1c6fd7 639 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
9b44f5d9
MM
640 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
641
47598145
MM
642 changed = true;
643 while (changed)
644 {
9b44f5d9 645 int i;
47598145 646 changed = false;
9b44f5d9 647 for (i = 0; i < n_bbs; i++)
47598145 648 {
9b44f5d9 649 bitmap active;
06e28de2 650 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
9b44f5d9 651 active = (bitmap)bb->aux;
81bfd197 652 add_scope_conflicts_1 (bb, work, false);
47598145
MM
653 if (bitmap_ior_into (active, work))
654 changed = true;
655 }
656 }
657
11cd3bed 658 FOR_EACH_BB_FN (bb, cfun)
81bfd197 659 add_scope_conflicts_1 (bb, work, true);
47598145 660
9b44f5d9 661 free (rpo);
47598145 662 BITMAP_FREE (work);
04a90bec 663 FOR_ALL_BB_FN (bb, cfun)
47598145
MM
664 BITMAP_FREE (bb->aux);
665}
666
1f6d3a08 667/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 668 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
669
670static int
3a42502d 671stack_var_cmp (const void *a, const void *b)
1f6d3a08 672{
3a42502d
RH
673 size_t ia = *(const size_t *)a;
674 size_t ib = *(const size_t *)b;
675 unsigned int aligna = stack_vars[ia].alignb;
676 unsigned int alignb = stack_vars[ib].alignb;
5e48d894
RS
677 poly_int64 sizea = stack_vars[ia].size;
678 poly_int64 sizeb = stack_vars[ib].size;
3a42502d
RH
679 tree decla = stack_vars[ia].decl;
680 tree declb = stack_vars[ib].decl;
681 bool largea, largeb;
4e3825db 682 unsigned int uida, uidb;
1f6d3a08 683
3a42502d
RH
684 /* Primary compare on "large" alignment. Large comes first. */
685 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
686 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
687 if (largea != largeb)
688 return (int)largeb - (int)largea;
689
690 /* Secondary compare on size, decreasing */
5e48d894
RS
691 int diff = compare_sizes_for_sort (sizeb, sizea);
692 if (diff != 0)
693 return diff;
3a42502d
RH
694
695 /* Tertiary compare on true alignment, decreasing. */
696 if (aligna < alignb)
697 return -1;
698 if (aligna > alignb)
699 return 1;
700
701 /* Final compare on ID for sort stability, increasing.
702 Two SSA names are compared by their version, SSA names come before
703 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
704 if (TREE_CODE (decla) == SSA_NAME)
705 {
706 if (TREE_CODE (declb) == SSA_NAME)
707 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
708 else
709 return -1;
710 }
711 else if (TREE_CODE (declb) == SSA_NAME)
712 return 1;
713 else
714 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 715 if (uida < uidb)
79f802f5 716 return 1;
3a42502d
RH
717 if (uida > uidb)
718 return -1;
1f6d3a08
RH
719 return 0;
720}
721
0ef08bc5 722struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
39c8aaa4 723typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
55b34b5f
RG
724
725/* If the points-to solution *PI points to variables that are in a partition
726 together with other variables add all partition members to the pointed-to
727 variables bitmap. */
728
729static void
730add_partitioned_vars_to_ptset (struct pt_solution *pt,
39c8aaa4 731 part_hashmap *decls_to_partitions,
6e2830c3 732 hash_set<bitmap> *visited, bitmap temp)
55b34b5f
RG
733{
734 bitmap_iterator bi;
735 unsigned i;
736 bitmap *part;
737
738 if (pt->anything
739 || pt->vars == NULL
740 /* The pointed-to vars bitmap is shared, it is enough to
741 visit it once. */
6e2830c3 742 || visited->add (pt->vars))
55b34b5f
RG
743 return;
744
745 bitmap_clear (temp);
746
747 /* By using a temporary bitmap to store all members of the partitions
748 we have to add we make sure to visit each of the partitions only
749 once. */
750 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
751 if ((!temp
752 || !bitmap_bit_p (temp, i))
39c8aaa4 753 && (part = decls_to_partitions->get (i)))
55b34b5f
RG
754 bitmap_ior_into (temp, *part);
755 if (!bitmap_empty_p (temp))
756 bitmap_ior_into (pt->vars, temp);
757}
758
759/* Update points-to sets based on partition info, so we can use them on RTL.
760 The bitmaps representing stack partitions will be saved until expand,
761 where partitioned decls used as bases in memory expressions will be
762 rewritten. */
763
764static void
765update_alias_info_with_stack_vars (void)
766{
39c8aaa4 767 part_hashmap *decls_to_partitions = NULL;
55b34b5f
RG
768 size_t i, j;
769 tree var = NULL_TREE;
770
771 for (i = 0; i < stack_vars_num; i++)
772 {
773 bitmap part = NULL;
774 tree name;
775 struct ptr_info_def *pi;
776
777 /* Not interested in partitions with single variable. */
778 if (stack_vars[i].representative != i
779 || stack_vars[i].next == EOC)
780 continue;
781
782 if (!decls_to_partitions)
783 {
39c8aaa4
TS
784 decls_to_partitions = new part_hashmap;
785 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
55b34b5f
RG
786 }
787
788 /* Create an SSA_NAME that points to the partition for use
789 as base during alias-oracle queries on RTL for bases that
790 have been partitioned. */
791 if (var == NULL_TREE)
b731b390
JJ
792 var = create_tmp_var (ptr_type_node);
793 name = make_ssa_name (var);
55b34b5f
RG
794
795 /* Create bitmaps representing partitions. They will be used for
796 points-to sets later, so use GGC alloc. */
797 part = BITMAP_GGC_ALLOC ();
798 for (j = i; j != EOC; j = stack_vars[j].next)
799 {
800 tree decl = stack_vars[j].decl;
25a6a873 801 unsigned int uid = DECL_PT_UID (decl);
55b34b5f 802 bitmap_set_bit (part, uid);
39c8aaa4
TS
803 decls_to_partitions->put (uid, part);
804 cfun->gimple_df->decls_to_pointers->put (decl, name);
88d8330d
EB
805 if (TREE_ADDRESSABLE (decl))
806 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
807 }
808
809 /* Make the SSA name point to all partition members. */
810 pi = get_ptr_info (name);
d3553615 811 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
812 }
813
814 /* Make all points-to sets that contain one member of a partition
815 contain all members of the partition. */
816 if (decls_to_partitions)
817 {
818 unsigned i;
46aa019a 819 tree name;
6e2830c3 820 hash_set<bitmap> visited;
3f9b14ff 821 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f 822
46aa019a 823 FOR_EACH_SSA_NAME (i, name, cfun)
55b34b5f 824 {
55b34b5f
RG
825 struct ptr_info_def *pi;
826
46aa019a 827 if (POINTER_TYPE_P (TREE_TYPE (name))
55b34b5f
RG
828 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
829 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
6e2830c3 830 &visited, temp);
55b34b5f
RG
831 }
832
833 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
6e2830c3 834 decls_to_partitions, &visited, temp);
55b34b5f 835
39c8aaa4 836 delete decls_to_partitions;
55b34b5f
RG
837 BITMAP_FREE (temp);
838 }
839}
840
1f6d3a08
RH
841/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
842 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 843 Merge them into a single partition A. */
1f6d3a08
RH
844
845static void
6ddfda8a 846union_stack_vars (size_t a, size_t b)
1f6d3a08 847{
2bdbbe94
MM
848 struct stack_var *vb = &stack_vars[b];
849 bitmap_iterator bi;
850 unsigned u;
1f6d3a08 851
6ddfda8a
ER
852 gcc_assert (stack_vars[b].next == EOC);
853 /* Add B to A's partition. */
854 stack_vars[b].next = stack_vars[a].next;
855 stack_vars[b].representative = a;
1f6d3a08
RH
856 stack_vars[a].next = b;
857
858 /* Update the required alignment of partition A to account for B. */
859 if (stack_vars[a].alignb < stack_vars[b].alignb)
860 stack_vars[a].alignb = stack_vars[b].alignb;
861
862 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
863 if (vb->conflicts)
864 {
865 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
866 add_stack_var_conflict (a, stack_vars[u].representative);
867 BITMAP_FREE (vb->conflicts);
868 }
1f6d3a08
RH
869}
870
871/* A subroutine of expand_used_vars. Binpack the variables into
872 partitions constrained by the interference graph. The overall
873 algorithm used is as follows:
874
6ddfda8a 875 Sort the objects by size in descending order.
1f6d3a08
RH
876 For each object A {
877 S = size(A)
878 O = 0
879 loop {
880 Look for the largest non-conflicting object B with size <= S.
881 UNION (A, B)
1f6d3a08
RH
882 }
883 }
884*/
885
886static void
887partition_stack_vars (void)
888{
889 size_t si, sj, n = stack_vars_num;
890
891 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
892 for (si = 0; si < n; ++si)
893 stack_vars_sorted[si] = si;
894
895 if (n == 1)
896 return;
897
3a42502d 898 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 899
1f6d3a08
RH
900 for (si = 0; si < n; ++si)
901 {
902 size_t i = stack_vars_sorted[si];
3a42502d 903 unsigned int ialign = stack_vars[i].alignb;
5e48d894 904 poly_int64 isize = stack_vars[i].size;
1f6d3a08 905
6ddfda8a
ER
906 /* Ignore objects that aren't partition representatives. If we
907 see a var that is not a partition representative, it must
908 have been merged earlier. */
909 if (stack_vars[i].representative != i)
910 continue;
911
912 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
913 {
914 size_t j = stack_vars_sorted[sj];
1f6d3a08 915 unsigned int jalign = stack_vars[j].alignb;
5e48d894 916 poly_int64 jsize = stack_vars[j].size;
1f6d3a08
RH
917
918 /* Ignore objects that aren't partition representatives. */
919 if (stack_vars[j].representative != j)
920 continue;
921
3a42502d
RH
922 /* Do not mix objects of "small" (supported) alignment
923 and "large" (unsupported) alignment. */
924 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
925 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
926 break;
927
928 /* For Address Sanitizer do not mix objects with different
929 sizes, as the shorter vars wouldn't be adequately protected.
930 Don't do that for "large" (unsupported) alignment objects,
931 those aren't protected anyway. */
5e48d894
RS
932 if (asan_sanitize_stack_p ()
933 && maybe_ne (isize, jsize)
f3ddd692
JJ
934 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
935 break;
936
937 /* Ignore conflicting objects. */
938 if (stack_var_conflict_p (i, j))
3a42502d
RH
939 continue;
940
1f6d3a08 941 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 942 union_stack_vars (i, j);
1f6d3a08
RH
943 }
944 }
55b34b5f 945
9b999dc5 946 update_alias_info_with_stack_vars ();
1f6d3a08
RH
947}
948
949/* A debugging aid for expand_used_vars. Dump the generated partitions. */
950
951static void
952dump_stack_var_partition (void)
953{
954 size_t si, i, j, n = stack_vars_num;
955
956 for (si = 0; si < n; ++si)
957 {
958 i = stack_vars_sorted[si];
959
960 /* Skip variables that aren't partition representatives, for now. */
961 if (stack_vars[i].representative != i)
962 continue;
963
5e48d894
RS
964 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
965 print_dec (stack_vars[i].size, dump_file);
966 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
1f6d3a08
RH
967
968 for (j = i; j != EOC; j = stack_vars[j].next)
969 {
970 fputc ('\t', dump_file);
971 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 972 }
6ddfda8a 973 fputc ('\n', dump_file);
1f6d3a08
RH
974 }
975}
976
3a42502d 977/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
978
979static void
3a42502d 980expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
f075bd95 981 poly_int64 offset)
1f6d3a08 982{
3a42502d 983 unsigned align;
1f6d3a08 984 rtx x;
c22cacf3 985
1f6d3a08 986 /* If this fails, we've overflowed the stack frame. Error nicely? */
f075bd95 987 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
1f6d3a08 988
0a81f074 989 x = plus_constant (Pmode, base, offset);
1f9ceff1
AO
990 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
991 ? TYPE_MODE (TREE_TYPE (decl))
992 : DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 993
4e3825db
MM
994 if (TREE_CODE (decl) != SSA_NAME)
995 {
996 /* Set alignment we actually gave this decl if it isn't an SSA name.
997 If it is we generate stack slots only accidentally so it isn't as
998 important, we'll simply use the alignment that is already set. */
3a42502d
RH
999 if (base == virtual_stack_vars_rtx)
1000 offset -= frame_phase;
f075bd95 1001 align = known_alignment (offset);
4e3825db 1002 align *= BITS_PER_UNIT;
3a42502d
RH
1003 if (align == 0 || align > base_align)
1004 align = base_align;
1005
1006 /* One would think that we could assert that we're not decreasing
1007 alignment here, but (at least) the i386 port does exactly this
1008 via the MINIMUM_ALIGNMENT hook. */
4e3825db 1009
fe37c7af 1010 SET_DECL_ALIGN (decl, align);
4e3825db
MM
1011 DECL_USER_ALIGN (decl) = 0;
1012 }
1013
4e3825db 1014 set_rtl (decl, x);
1f6d3a08
RH
1015}
1016
f3ddd692
JJ
1017struct stack_vars_data
1018{
1019 /* Vector of offset pairs, always end of some padding followed
1020 by start of the padding that needs Address Sanitizer protection.
1021 The vector is in reversed, highest offset pairs come first. */
06dc18b3 1022 auto_vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
1023
1024 /* Vector of partition representative decls in between the paddings. */
06dc18b3 1025 auto_vec<tree> asan_decl_vec;
e361382f
JJ
1026
1027 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1028 rtx asan_base;
1029
1030 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1031 unsigned int asan_alignb;
f3ddd692
JJ
1032};
1033
1f6d3a08
RH
1034/* A subroutine of expand_used_vars. Give each partition representative
1035 a unique location within the stack frame. Update each partition member
1036 with that location. */
1037
1038static void
f3ddd692 1039expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
1040{
1041 size_t si, i, j, n = stack_vars_num;
5e48d894 1042 poly_uint64 large_size = 0, large_alloc = 0;
3a42502d
RH
1043 rtx large_base = NULL;
1044 unsigned large_align = 0;
7072df0a 1045 bool large_allocation_done = false;
3a42502d
RH
1046 tree decl;
1047
1048 /* Determine if there are any variables requiring "large" alignment.
1049 Since these are dynamically allocated, we only process these if
1050 no predicate involved. */
1051 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1052 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1053 {
1054 /* Find the total size of these variables. */
1055 for (si = 0; si < n; ++si)
1056 {
1057 unsigned alignb;
1058
1059 i = stack_vars_sorted[si];
1060 alignb = stack_vars[i].alignb;
1061
a8eeec27
SE
1062 /* All "large" alignment decls come before all "small" alignment
1063 decls, but "large" alignment decls are not sorted based on
1064 their alignment. Increase large_align to track the largest
1065 required alignment. */
1066 if ((alignb * BITS_PER_UNIT) > large_align)
1067 large_align = alignb * BITS_PER_UNIT;
1068
3a42502d
RH
1069 /* Stop when we get to the first decl with "small" alignment. */
1070 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1071 break;
1072
1073 /* Skip variables that aren't partition representatives. */
1074 if (stack_vars[i].representative != i)
1075 continue;
1076
1077 /* Skip variables that have already had rtl assigned. See also
1078 add_stack_var where we perpetrate this pc_rtx hack. */
1079 decl = stack_vars[i].decl;
1f9ceff1
AO
1080 if (TREE_CODE (decl) == SSA_NAME
1081 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1082 : DECL_RTL (decl) != pc_rtx)
3a42502d
RH
1083 continue;
1084
5e48d894 1085 large_size = aligned_upper_bound (large_size, alignb);
3a42502d
RH
1086 large_size += stack_vars[i].size;
1087 }
3a42502d 1088 }
1f6d3a08
RH
1089
1090 for (si = 0; si < n; ++si)
1091 {
3a42502d
RH
1092 rtx base;
1093 unsigned base_align, alignb;
f075bd95 1094 poly_int64 offset;
1f6d3a08
RH
1095
1096 i = stack_vars_sorted[si];
1097
1098 /* Skip variables that aren't partition representatives, for now. */
1099 if (stack_vars[i].representative != i)
1100 continue;
1101
7d69de61
RH
1102 /* Skip variables that have already had rtl assigned. See also
1103 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d 1104 decl = stack_vars[i].decl;
1f9ceff1
AO
1105 if (TREE_CODE (decl) == SSA_NAME
1106 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1107 : DECL_RTL (decl) != pc_rtx)
7d69de61
RH
1108 continue;
1109
c22cacf3 1110 /* Check the predicate to see whether this variable should be
7d69de61 1111 allocated in this pass. */
f3ddd692 1112 if (pred && !pred (i))
7d69de61
RH
1113 continue;
1114
3a42502d
RH
1115 alignb = stack_vars[i].alignb;
1116 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1117 {
e361382f 1118 base = virtual_stack_vars_rtx;
f075bd95
RS
1119 /* ASAN description strings don't yet have a syntax for expressing
1120 polynomial offsets. */
1121 HOST_WIDE_INT prev_offset;
1122 if (asan_sanitize_stack_p ()
1123 && pred
5e48d894
RS
1124 && frame_offset.is_constant (&prev_offset)
1125 && stack_vars[i].size.is_constant ())
f3ddd692 1126 {
f075bd95
RS
1127 prev_offset = align_base (prev_offset,
1128 MAX (alignb, ASAN_RED_ZONE_SIZE),
1129 !FRAME_GROWS_DOWNWARD);
f3ddd692 1130 tree repr_decl = NULL_TREE;
f3ddd692
JJ
1131 offset
1132 = alloc_stack_frame_space (stack_vars[i].size
1133 + ASAN_RED_ZONE_SIZE,
1134 MAX (alignb, ASAN_RED_ZONE_SIZE));
435be747 1135
9771b263 1136 data->asan_vec.safe_push (prev_offset);
f075bd95
RS
1137 /* Allocating a constant amount of space from a constant
1138 starting offset must give a constant result. */
1139 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1140 .to_constant ());
f3ddd692
JJ
1141 /* Find best representative of the partition.
1142 Prefer those with DECL_NAME, even better
1143 satisfying asan_protect_stack_decl predicate. */
1144 for (j = i; j != EOC; j = stack_vars[j].next)
1145 if (asan_protect_stack_decl (stack_vars[j].decl)
1146 && DECL_NAME (stack_vars[j].decl))
1147 {
1148 repr_decl = stack_vars[j].decl;
1149 break;
1150 }
1151 else if (repr_decl == NULL_TREE
1152 && DECL_P (stack_vars[j].decl)
1153 && DECL_NAME (stack_vars[j].decl))
1154 repr_decl = stack_vars[j].decl;
1155 if (repr_decl == NULL_TREE)
1156 repr_decl = stack_vars[i].decl;
9771b263 1157 data->asan_decl_vec.safe_push (repr_decl);
e361382f
JJ
1158 data->asan_alignb = MAX (data->asan_alignb, alignb);
1159 if (data->asan_base == NULL)
1160 data->asan_base = gen_reg_rtx (Pmode);
1161 base = data->asan_base;
e5dcd695
LZ
1162
1163 if (!STRICT_ALIGNMENT)
1164 base_align = crtl->max_used_stack_slot_alignment;
1165 else
1166 base_align = MAX (crtl->max_used_stack_slot_alignment,
1167 GET_MODE_ALIGNMENT (SImode)
1168 << ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1169 }
1170 else
e5dcd695
LZ
1171 {
1172 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1173 base_align = crtl->max_used_stack_slot_alignment;
1174 }
3a42502d
RH
1175 }
1176 else
1177 {
1178 /* Large alignment is only processed in the last pass. */
1179 if (pred)
1180 continue;
7072df0a
DV
1181
1182 /* If there were any variables requiring "large" alignment, allocate
1183 space. */
5e48d894 1184 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
7072df0a 1185 {
f075bd95 1186 poly_int64 loffset;
7072df0a
DV
1187 rtx large_allocsize;
1188
5e48d894 1189 large_allocsize = gen_int_mode (large_size, Pmode);
7072df0a
DV
1190 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1191 loffset = alloc_stack_frame_space
5e48d894 1192 (rtx_to_poly_int64 (large_allocsize),
7072df0a
DV
1193 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1194 large_base = get_dynamic_stack_base (loffset, large_align);
1195 large_allocation_done = true;
1196 }
533f611a 1197 gcc_assert (large_base != NULL);
3a42502d 1198
5e48d894 1199 large_alloc = aligned_upper_bound (large_alloc, alignb);
3a42502d
RH
1200 offset = large_alloc;
1201 large_alloc += stack_vars[i].size;
1202
1203 base = large_base;
1204 base_align = large_align;
1205 }
1f6d3a08
RH
1206
1207 /* Create rtl for each variable based on their location within the
1208 partition. */
1209 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1210 {
f8da8190 1211 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1212 base, base_align,
6ddfda8a 1213 offset);
f8da8190 1214 }
1f6d3a08 1215 }
3a42502d 1216
5e48d894 1217 gcc_assert (known_eq (large_alloc, large_size));
1f6d3a08
RH
1218}
1219
ff28a94d 1220/* Take into account all sizes of partitions and reset DECL_RTLs. */
5e48d894 1221static poly_uint64
ff28a94d
JH
1222account_stack_vars (void)
1223{
1224 size_t si, j, i, n = stack_vars_num;
5e48d894 1225 poly_uint64 size = 0;
ff28a94d
JH
1226
1227 for (si = 0; si < n; ++si)
1228 {
1229 i = stack_vars_sorted[si];
1230
1231 /* Skip variables that aren't partition representatives, for now. */
1232 if (stack_vars[i].representative != i)
1233 continue;
1234
1235 size += stack_vars[i].size;
1236 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1237 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1238 }
1239 return size;
1240}
1241
f11a7b6d
AO
1242/* Record the RTL assignment X for the default def of PARM. */
1243
1244extern void
1245set_parm_rtl (tree parm, rtx x)
1246{
1247 gcc_assert (TREE_CODE (parm) == PARM_DECL
1248 || TREE_CODE (parm) == RESULT_DECL);
1249
1250 if (x && !MEM_P (x))
1251 {
1252 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1253 TYPE_MODE (TREE_TYPE (parm)),
1254 TYPE_ALIGN (TREE_TYPE (parm)));
1255
1256 /* If the variable alignment is very large we'll dynamicaly
1257 allocate it, which means that in-frame portion is just a
1258 pointer. ??? We've got a pseudo for sure here, do we
1259 actually dynamically allocate its spilling area if needed?
b06e1dce
JL
1260 ??? Isn't it a problem when Pmode alignment also exceeds
1261 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
f11a7b6d 1262 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
b06e1dce 1263 align = GET_MODE_ALIGNMENT (Pmode);
f11a7b6d
AO
1264
1265 record_alignment_for_reg_var (align);
1266 }
1267
f11a7b6d
AO
1268 tree ssa = ssa_default_def (cfun, parm);
1269 if (!ssa)
1270 return set_rtl (parm, x);
1271
1272 int part = var_to_partition (SA.map, ssa);
1273 gcc_assert (part != NO_PARTITION);
1274
1275 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1276 gcc_assert (changed);
1277
1278 set_rtl (ssa, x);
1279 gcc_assert (DECL_RTL (parm) == x);
1280}
1281
1f6d3a08
RH
1282/* A subroutine of expand_one_var. Called to immediately assign rtl
1283 to a variable to be allocated in the stack frame. */
1284
1285static void
1f9ceff1 1286expand_one_stack_var_1 (tree var)
1f6d3a08 1287{
5e48d894 1288 poly_uint64 size;
f075bd95 1289 poly_int64 offset;
3a42502d 1290 unsigned byte_align;
1f6d3a08 1291
1f9ceff1
AO
1292 if (TREE_CODE (var) == SSA_NAME)
1293 {
1294 tree type = TREE_TYPE (var);
5e48d894 1295 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1f9ceff1
AO
1296 byte_align = TYPE_ALIGN_UNIT (type);
1297 }
1298 else
1299 {
5e48d894 1300 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1f9ceff1
AO
1301 byte_align = align_local_variable (var);
1302 }
3a42502d
RH
1303
1304 /* We handle highly aligned variables in expand_stack_vars. */
1305 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1306
3a42502d
RH
1307 offset = alloc_stack_frame_space (size, byte_align);
1308
1309 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1310 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1311}
1312
1f9ceff1
AO
1313/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1314 already assigned some MEM. */
1315
1316static void
1317expand_one_stack_var (tree var)
1318{
1319 if (TREE_CODE (var) == SSA_NAME)
1320 {
1321 int part = var_to_partition (SA.map, var);
1322 if (part != NO_PARTITION)
1323 {
1324 rtx x = SA.partition_to_pseudo[part];
1325 gcc_assert (x);
1326 gcc_assert (MEM_P (x));
1327 return;
1328 }
1329 }
1330
1331 return expand_one_stack_var_1 (var);
1332}
1333
1f6d3a08
RH
1334/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1335 that will reside in a hard register. */
1336
1337static void
1338expand_one_hard_reg_var (tree var)
1339{
1340 rest_of_decl_compilation (var, 0, 0);
1341}
1342
1f9ceff1
AO
1343/* Record the alignment requirements of some variable assigned to a
1344 pseudo. */
1345
1346static void
1347record_alignment_for_reg_var (unsigned int align)
1348{
1349 if (SUPPORTS_STACK_ALIGNMENT
1350 && crtl->stack_alignment_estimated < align)
1351 {
1352 /* stack_alignment_estimated shouldn't change after stack
1353 realign decision made */
1354 gcc_assert (!crtl->stack_realign_processed);
1355 crtl->stack_alignment_estimated = align;
1356 }
1357
1358 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1359 So here we only make sure stack_alignment_needed >= align. */
1360 if (crtl->stack_alignment_needed < align)
1361 crtl->stack_alignment_needed = align;
1362 if (crtl->max_used_stack_slot_alignment < align)
1363 crtl->max_used_stack_slot_alignment = align;
1364}
1365
1366/* Create RTL for an SSA partition. */
1367
1368static void
1369expand_one_ssa_partition (tree var)
1370{
1371 int part = var_to_partition (SA.map, var);
1372 gcc_assert (part != NO_PARTITION);
1373
1374 if (SA.partition_to_pseudo[part])
1375 return;
1376
1377 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1378 TYPE_MODE (TREE_TYPE (var)),
1379 TYPE_ALIGN (TREE_TYPE (var)));
1380
1381 /* If the variable alignment is very large we'll dynamicaly allocate
1382 it, which means that in-frame portion is just a pointer. */
1383 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
b06e1dce 1384 align = GET_MODE_ALIGNMENT (Pmode);
1f9ceff1
AO
1385
1386 record_alignment_for_reg_var (align);
1387
1388 if (!use_register_for_decl (var))
1389 {
f11a7b6d 1390 if (defer_stack_allocation (var, true))
1f9ceff1
AO
1391 add_stack_var (var);
1392 else
1393 expand_one_stack_var_1 (var);
1394 return;
1395 }
1396
1397 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1f9ceff1
AO
1398 rtx x = gen_reg_rtx (reg_mode);
1399
1400 set_rtl (var, x);
bc2a7ceb
EB
1401
1402 /* For a promoted variable, X will not be used directly but wrapped in a
1403 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1404 will assume that its upper bits can be inferred from its lower bits.
1405 Therefore, if X isn't initialized on every path from the entry, then
1406 we must do it manually in order to fulfill the above assumption. */
1407 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1408 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1409 emit_move_insn (x, CONST0_RTX (reg_mode));
1f9ceff1
AO
1410}
1411
f11a7b6d
AO
1412/* Record the association between the RTL generated for partition PART
1413 and the underlying variable of the SSA_NAME VAR. */
1f9ceff1
AO
1414
1415static void
1416adjust_one_expanded_partition_var (tree var)
1417{
1418 if (!var)
1419 return;
1420
1421 tree decl = SSA_NAME_VAR (var);
1422
1423 int part = var_to_partition (SA.map, var);
1424 if (part == NO_PARTITION)
1425 return;
1426
1427 rtx x = SA.partition_to_pseudo[part];
1428
f11a7b6d 1429 gcc_assert (x);
1f9ceff1
AO
1430
1431 set_rtl (var, x);
1432
1433 if (!REG_P (x))
1434 return;
1435
1436 /* Note if the object is a user variable. */
1437 if (decl && !DECL_ARTIFICIAL (decl))
1438 mark_user_reg (x);
1439
1440 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1441 mark_reg_pointer (x, get_pointer_alignment (var));
1442}
1443
1f6d3a08
RH
1444/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1445 that will reside in a pseudo register. */
1446
1447static void
1448expand_one_register_var (tree var)
1449{
1f9ceff1
AO
1450 if (TREE_CODE (var) == SSA_NAME)
1451 {
1452 int part = var_to_partition (SA.map, var);
1453 if (part != NO_PARTITION)
1454 {
1455 rtx x = SA.partition_to_pseudo[part];
1456 gcc_assert (x);
1457 gcc_assert (REG_P (x));
1458 return;
1459 }
1460 gcc_unreachable ();
1461 }
1462
1463 tree decl = var;
4e3825db 1464 tree type = TREE_TYPE (decl);
ef4bddc2 1465 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1466 rtx x = gen_reg_rtx (reg_mode);
1467
4e3825db 1468 set_rtl (var, x);
1f6d3a08
RH
1469
1470 /* Note if the object is a user variable. */
4e3825db
MM
1471 if (!DECL_ARTIFICIAL (decl))
1472 mark_user_reg (x);
1f6d3a08 1473
61021c2c 1474 if (POINTER_TYPE_P (type))
d466b407 1475 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1476}
1477
1478/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1479 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1480 to pick something that won't crash the rest of the compiler. */
1481
1482static void
1483expand_one_error_var (tree var)
1484{
ef4bddc2 1485 machine_mode mode = DECL_MODE (var);
1f6d3a08
RH
1486 rtx x;
1487
1488 if (mode == BLKmode)
1489 x = gen_rtx_MEM (BLKmode, const0_rtx);
1490 else if (mode == VOIDmode)
1491 x = const0_rtx;
1492 else
1493 x = gen_reg_rtx (mode);
1494
1495 SET_DECL_RTL (var, x);
1496}
1497
c22cacf3 1498/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1499 allocated to the local stack frame. Return true if we wish to
1500 add VAR to STACK_VARS so that it will be coalesced with other
1501 variables. Return false to allocate VAR immediately.
1502
1503 This function is used to reduce the number of variables considered
1504 for coalescing, which reduces the size of the quadratic problem. */
1505
1506static bool
1507defer_stack_allocation (tree var, bool toplevel)
1508{
1f9ceff1
AO
1509 tree size_unit = TREE_CODE (var) == SSA_NAME
1510 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1511 : DECL_SIZE_UNIT (var);
5e48d894 1512 poly_uint64 size;
1f9ceff1 1513
ee2e8462
EB
1514 /* Whether the variable is small enough for immediate allocation not to be
1515 a problem with regard to the frame size. */
1516 bool smallish
5e48d894
RS
1517 = (poly_int_tree_p (size_unit, &size)
1518 && (estimated_poly_value (size)
1519 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
ee2e8462 1520
7d69de61 1521 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1522 so that we can re-order the strings to the top of the frame.
1523 Similarly for Address Sanitizer. */
c461d263 1524 if (flag_stack_protect || asan_sanitize_stack_p ())
7d69de61
RH
1525 return true;
1526
1f9ceff1
AO
1527 unsigned int align = TREE_CODE (var) == SSA_NAME
1528 ? TYPE_ALIGN (TREE_TYPE (var))
1529 : DECL_ALIGN (var);
1530
3a42502d
RH
1531 /* We handle "large" alignment via dynamic allocation. We want to handle
1532 this extra complication in only one place, so defer them. */
1f9ceff1 1533 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
3a42502d
RH
1534 return true;
1535
1f9ceff1
AO
1536 bool ignored = TREE_CODE (var) == SSA_NAME
1537 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1538 : DECL_IGNORED_P (var);
1539
ee2e8462
EB
1540 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1541 might be detached from their block and appear at toplevel when we reach
1542 here. We want to coalesce them with variables from other blocks when
1543 the immediate contribution to the frame size would be noticeable. */
1f9ceff1 1544 if (toplevel && optimize > 0 && ignored && !smallish)
ee2e8462
EB
1545 return true;
1546
1547 /* Variables declared in the outermost scope automatically conflict
1548 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1549 at all is that, after sorting, we can more efficiently pack
1550 small variables in the stack frame. Continue to defer at -O2. */
1551 if (toplevel && optimize < 2)
1552 return false;
1553
1554 /* Without optimization, *most* variables are allocated from the
1555 stack, which makes the quadratic problem large exactly when we
c22cacf3 1556 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1557 other hand, we don't want the function's stack frame size to
1558 get completely out of hand. So we avoid adding scalars and
1559 "small" aggregates to the list at all. */
ee2e8462 1560 if (optimize == 0 && smallish)
1f6d3a08
RH
1561 return false;
1562
1563 return true;
1564}
1565
1566/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1567 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1568 expanded yet, merely recorded.
ff28a94d
JH
1569 When REALLY_EXPAND is false, only add stack values to be allocated.
1570 Return stack usage this variable is supposed to take.
1571*/
1f6d3a08 1572
5e48d894 1573static poly_uint64
ff28a94d 1574expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1575{
3a42502d 1576 unsigned int align = BITS_PER_UNIT;
4e3825db 1577 tree origvar = var;
3a42502d 1578
4e3825db
MM
1579 var = SSAVAR (var);
1580
8813a647 1581 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
2e3f842f 1582 {
9d7d6446
JB
1583 if (is_global_var (var))
1584 return 0;
1585
2e3f842f
L
1586 /* Because we don't know if VAR will be in register or on stack,
1587 we conservatively assume it will be on stack even if VAR is
1588 eventually put into register after RA pass. For non-automatic
1589 variables, which won't be on stack, we collect alignment of
3396aba5
JJ
1590 type and ignore user specified alignment. Similarly for
1591 SSA_NAMEs for which use_register_for_decl returns true. */
1592 if (TREE_STATIC (var)
1593 || DECL_EXTERNAL (var)
1594 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
ae58e548
JJ
1595 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1596 TYPE_MODE (TREE_TYPE (var)),
1597 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1598 else if (DECL_HAS_VALUE_EXPR_P (var)
1599 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1600 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1601 or variables which were assigned a stack slot already by
1602 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1603 changed from the offset chosen to it. */
1604 align = crtl->stack_alignment_estimated;
2e3f842f 1605 else
ae58e548 1606 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1607
3a42502d
RH
1608 /* If the variable alignment is very large we'll dynamicaly allocate
1609 it, which means that in-frame portion is just a pointer. */
1610 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
b06e1dce 1611 align = GET_MODE_ALIGNMENT (Pmode);
3a42502d
RH
1612 }
1613
1f9ceff1 1614 record_alignment_for_reg_var (align);
3a42502d 1615
5e48d894 1616 poly_uint64 size;
4e3825db
MM
1617 if (TREE_CODE (origvar) == SSA_NAME)
1618 {
8813a647 1619 gcc_assert (!VAR_P (var)
4e3825db
MM
1620 || (!DECL_EXTERNAL (var)
1621 && !DECL_HAS_VALUE_EXPR_P (var)
1622 && !TREE_STATIC (var)
4e3825db
MM
1623 && TREE_TYPE (var) != error_mark_node
1624 && !DECL_HARD_REGISTER (var)
1625 && really_expand));
1626 }
8813a647 1627 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
4846b435 1628 ;
1f6d3a08
RH
1629 else if (DECL_EXTERNAL (var))
1630 ;
833b3afe 1631 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1632 ;
1633 else if (TREE_STATIC (var))
7e8b322a 1634 ;
eb7adebc 1635 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1636 ;
1637 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1638 {
1639 if (really_expand)
1640 expand_one_error_var (var);
1641 }
8813a647 1642 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
ff28a94d
JH
1643 {
1644 if (really_expand)
c218f6e8
JM
1645 {
1646 expand_one_hard_reg_var (var);
1647 if (!DECL_HARD_REGISTER (var))
1648 /* Invalid register specification. */
1649 expand_one_error_var (var);
1650 }
ff28a94d 1651 }
1f6d3a08 1652 else if (use_register_for_decl (var))
ff28a94d
JH
1653 {
1654 if (really_expand)
4e3825db 1655 expand_one_register_var (origvar);
ff28a94d 1656 }
5e48d894
RS
1657 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1658 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1659 {
56099f00 1660 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1661 if (really_expand)
1662 {
1663 error ("size of variable %q+D is too large", var);
1664 expand_one_error_var (var);
1665 }
1666 }
1f6d3a08 1667 else if (defer_stack_allocation (var, toplevel))
4e3825db 1668 add_stack_var (origvar);
1f6d3a08 1669 else
ff28a94d 1670 {
bd9f1b4b 1671 if (really_expand)
de0fb905
AB
1672 {
1673 if (lookup_attribute ("naked",
1674 DECL_ATTRIBUTES (current_function_decl)))
1675 error ("cannot allocate stack for variable %q+D, naked function.",
1676 var);
1677
1678 expand_one_stack_var (origvar);
1679 }
5e48d894 1680 return size;
ff28a94d
JH
1681 }
1682 return 0;
1f6d3a08
RH
1683}
1684
1685/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1686 expanding variables. Those variables that can be put into registers
1687 are allocated pseudos; those that can't are put on the stack.
1688
1689 TOPLEVEL is true if this is the outermost BLOCK. */
1690
1691static void
1692expand_used_vars_for_block (tree block, bool toplevel)
1693{
1f6d3a08
RH
1694 tree t;
1695
1f6d3a08 1696 /* Expand all variables at this level. */
910ad8de 1697 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185 1698 if (TREE_USED (t)
8813a647 1699 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1700 || !DECL_NONSHAREABLE (t)))
ff28a94d 1701 expand_one_var (t, toplevel, true);
1f6d3a08 1702
1f6d3a08
RH
1703 /* Expand all variables at containing levels. */
1704 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1705 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1706}
1707
1708/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1709 and clear TREE_USED on all local variables. */
1710
1711static void
1712clear_tree_used (tree block)
1713{
1714 tree t;
1715
910ad8de 1716 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1717 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
8813a647 1718 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1719 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1720 TREE_USED (t) = 0;
1721
1722 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1723 clear_tree_used (t);
1724}
1725
f6bc1c4a
HS
1726enum {
1727 SPCT_FLAG_DEFAULT = 1,
1728 SPCT_FLAG_ALL = 2,
5434dc07
MD
1729 SPCT_FLAG_STRONG = 3,
1730 SPCT_FLAG_EXPLICIT = 4
f6bc1c4a
HS
1731};
1732
7d69de61
RH
1733/* Examine TYPE and determine a bit mask of the following features. */
1734
1735#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1736#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1737#define SPCT_HAS_ARRAY 4
1738#define SPCT_HAS_AGGREGATE 8
1739
1740static unsigned int
1741stack_protect_classify_type (tree type)
1742{
1743 unsigned int ret = 0;
1744 tree t;
1745
1746 switch (TREE_CODE (type))
1747 {
1748 case ARRAY_TYPE:
1749 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1750 if (t == char_type_node
1751 || t == signed_char_type_node
1752 || t == unsigned_char_type_node)
1753 {
15362b89
JJ
1754 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1755 unsigned HOST_WIDE_INT len;
7d69de61 1756
15362b89 1757 if (!TYPE_SIZE_UNIT (type)
cc269bb6 1758 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
15362b89 1759 len = max;
7d69de61 1760 else
ae7e9ddd 1761 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7d69de61
RH
1762
1763 if (len < max)
1764 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1765 else
1766 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1767 }
1768 else
1769 ret = SPCT_HAS_ARRAY;
1770 break;
1771
1772 case UNION_TYPE:
1773 case QUAL_UNION_TYPE:
1774 case RECORD_TYPE:
1775 ret = SPCT_HAS_AGGREGATE;
1776 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1777 if (TREE_CODE (t) == FIELD_DECL)
1778 ret |= stack_protect_classify_type (TREE_TYPE (t));
1779 break;
1780
1781 default:
1782 break;
1783 }
1784
1785 return ret;
1786}
1787
a4d05547
KH
1788/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1789 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1790 any variable in this function. The return value is the phase number in
1791 which the variable should be allocated. */
1792
1793static int
1794stack_protect_decl_phase (tree decl)
1795{
1796 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1797 int ret = 0;
1798
1799 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1800 has_short_buffer = true;
1801
f6bc1c4a 1802 if (flag_stack_protect == SPCT_FLAG_ALL
5434dc07
MD
1803 || flag_stack_protect == SPCT_FLAG_STRONG
1804 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1805 && lookup_attribute ("stack_protect",
1806 DECL_ATTRIBUTES (current_function_decl))))
7d69de61
RH
1807 {
1808 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1809 && !(bits & SPCT_HAS_AGGREGATE))
1810 ret = 1;
1811 else if (bits & SPCT_HAS_ARRAY)
1812 ret = 2;
1813 }
1814 else
1815 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1816
1817 if (ret)
1818 has_protected_decls = true;
1819
1820 return ret;
1821}
1822
1823/* Two helper routines that check for phase 1 and phase 2. These are used
1824 as callbacks for expand_stack_vars. */
1825
1826static bool
f3ddd692
JJ
1827stack_protect_decl_phase_1 (size_t i)
1828{
1829 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1830}
1831
1832static bool
1833stack_protect_decl_phase_2 (size_t i)
7d69de61 1834{
f3ddd692 1835 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1836}
1837
f3ddd692
JJ
1838/* And helper function that checks for asan phase (with stack protector
1839 it is phase 3). This is used as callback for expand_stack_vars.
1840 Returns true if any of the vars in the partition need to be protected. */
1841
7d69de61 1842static bool
f3ddd692 1843asan_decl_phase_3 (size_t i)
7d69de61 1844{
f3ddd692
JJ
1845 while (i != EOC)
1846 {
1847 if (asan_protect_stack_decl (stack_vars[i].decl))
1848 return true;
1849 i = stack_vars[i].next;
1850 }
1851 return false;
7d69de61
RH
1852}
1853
1854/* Ensure that variables in different stack protection phases conflict
1855 so that they are not merged and share the same stack slot. */
1856
1857static void
1858add_stack_protection_conflicts (void)
1859{
1860 size_t i, j, n = stack_vars_num;
1861 unsigned char *phase;
1862
1863 phase = XNEWVEC (unsigned char, n);
1864 for (i = 0; i < n; ++i)
1865 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1866
1867 for (i = 0; i < n; ++i)
1868 {
1869 unsigned char ph_i = phase[i];
9b44f5d9 1870 for (j = i + 1; j < n; ++j)
7d69de61
RH
1871 if (ph_i != phase[j])
1872 add_stack_var_conflict (i, j);
1873 }
1874
1875 XDELETEVEC (phase);
1876}
1877
1878/* Create a decl for the guard at the top of the stack frame. */
1879
1880static void
1881create_stack_guard (void)
1882{
c2255bc4
AH
1883 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1884 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1885 TREE_THIS_VOLATILE (guard) = 1;
1886 TREE_USED (guard) = 1;
1887 expand_one_stack_var (guard);
cb91fab0 1888 crtl->stack_protect_guard = guard;
7d69de61
RH
1889}
1890
ff28a94d 1891/* Prepare for expanding variables. */
b8698a0f 1892static void
ff28a94d
JH
1893init_vars_expansion (void)
1894{
3f9b14ff
SB
1895 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1896 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1897
3f9b14ff 1898 /* A map from decl to stack partition. */
39c8aaa4 1899 decl_to_stack_part = new hash_map<tree, size_t>;
ff28a94d
JH
1900
1901 /* Initialize local stack smashing state. */
1902 has_protected_decls = false;
1903 has_short_buffer = false;
1904}
1905
1906/* Free up stack variable graph data. */
1907static void
1908fini_vars_expansion (void)
1909{
3f9b14ff
SB
1910 bitmap_obstack_release (&stack_var_bitmap_obstack);
1911 if (stack_vars)
1912 XDELETEVEC (stack_vars);
1913 if (stack_vars_sorted)
1914 XDELETEVEC (stack_vars_sorted);
ff28a94d 1915 stack_vars = NULL;
9b44f5d9 1916 stack_vars_sorted = NULL;
ff28a94d 1917 stack_vars_alloc = stack_vars_num = 0;
39c8aaa4 1918 delete decl_to_stack_part;
47598145 1919 decl_to_stack_part = NULL;
ff28a94d
JH
1920}
1921
30925d94
AO
1922/* Make a fair guess for the size of the stack frame of the function
1923 in NODE. This doesn't have to be exact, the result is only used in
1924 the inline heuristics. So we don't want to run the full stack var
1925 packing algorithm (which is quadratic in the number of stack vars).
1926 Instead, we calculate the total size of all stack vars. This turns
1927 out to be a pretty fair estimate -- packing of stack vars doesn't
1928 happen very often. */
b5a430f3 1929
ff28a94d 1930HOST_WIDE_INT
30925d94 1931estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d 1932{
5e48d894 1933 poly_int64 size = 0;
b5a430f3 1934 size_t i;
bb7e6d55 1935 tree var;
67348ccc 1936 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1937
bb7e6d55 1938 push_cfun (fn);
ff28a94d 1939
3f9b14ff
SB
1940 init_vars_expansion ();
1941
824f71b9
RG
1942 FOR_EACH_LOCAL_DECL (fn, i, var)
1943 if (auto_var_in_fn_p (var, fn->decl))
1944 size += expand_one_var (var, true, false);
b5a430f3 1945
ff28a94d
JH
1946 if (stack_vars_num > 0)
1947 {
b5a430f3
SB
1948 /* Fake sorting the stack vars for account_stack_vars (). */
1949 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1950 for (i = 0; i < stack_vars_num; ++i)
1951 stack_vars_sorted[i] = i;
ff28a94d 1952 size += account_stack_vars ();
ff28a94d 1953 }
3f9b14ff
SB
1954
1955 fini_vars_expansion ();
2e1ec94f 1956 pop_cfun ();
5e48d894 1957 return estimated_poly_value (size);
ff28a94d
JH
1958}
1959
f6bc1c4a
HS
1960/* Helper routine to check if a record or union contains an array field. */
1961
1962static int
1963record_or_union_type_has_array_p (const_tree tree_type)
1964{
1965 tree fields = TYPE_FIELDS (tree_type);
1966 tree f;
1967
1968 for (f = fields; f; f = DECL_CHAIN (f))
1969 if (TREE_CODE (f) == FIELD_DECL)
1970 {
1971 tree field_type = TREE_TYPE (f);
1972 if (RECORD_OR_UNION_TYPE_P (field_type)
1973 && record_or_union_type_has_array_p (field_type))
1974 return 1;
1975 if (TREE_CODE (field_type) == ARRAY_TYPE)
1976 return 1;
1977 }
1978 return 0;
1979}
1980
6545746e
FW
1981/* Check if the current function has local referenced variables that
1982 have their addresses taken, contain an array, or are arrays. */
1983
1984static bool
1985stack_protect_decl_p ()
1986{
1987 unsigned i;
1988 tree var;
1989
1990 FOR_EACH_LOCAL_DECL (cfun, i, var)
1991 if (!is_global_var (var))
1992 {
1993 tree var_type = TREE_TYPE (var);
8813a647 1994 if (VAR_P (var)
6545746e
FW
1995 && (TREE_CODE (var_type) == ARRAY_TYPE
1996 || TREE_ADDRESSABLE (var)
1997 || (RECORD_OR_UNION_TYPE_P (var_type)
1998 && record_or_union_type_has_array_p (var_type))))
1999 return true;
2000 }
2001 return false;
2002}
2003
2004/* Check if the current function has calls that use a return slot. */
2005
2006static bool
2007stack_protect_return_slot_p ()
2008{
2009 basic_block bb;
2010
2011 FOR_ALL_BB_FN (bb, cfun)
2012 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2013 !gsi_end_p (gsi); gsi_next (&gsi))
2014 {
355fe088 2015 gimple *stmt = gsi_stmt (gsi);
6545746e
FW
2016 /* This assumes that calls to internal-only functions never
2017 use a return slot. */
2018 if (is_gimple_call (stmt)
2019 && !gimple_call_internal_p (stmt)
2020 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2021 gimple_call_fndecl (stmt)))
2022 return true;
2023 }
2024 return false;
2025}
2026
1f6d3a08 2027/* Expand all variables used in the function. */
727a31fa 2028
b47aae36 2029static rtx_insn *
727a31fa
RH
2030expand_used_vars (void)
2031{
c021f10b 2032 tree var, outer_block = DECL_INITIAL (current_function_decl);
8c681247 2033 auto_vec<tree> maybe_local_decls;
b47aae36 2034 rtx_insn *var_end_seq = NULL;
4e3825db 2035 unsigned i;
c021f10b 2036 unsigned len;
f6bc1c4a 2037 bool gen_stack_protect_signal = false;
727a31fa 2038
1f6d3a08
RH
2039 /* Compute the phase of the stack frame for this function. */
2040 {
2041 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2a31c321 2042 int off = targetm.starting_frame_offset () % align;
1f6d3a08
RH
2043 frame_phase = off ? align - off : 0;
2044 }
727a31fa 2045
3f9b14ff
SB
2046 /* Set TREE_USED on all variables in the local_decls. */
2047 FOR_EACH_LOCAL_DECL (cfun, i, var)
2048 TREE_USED (var) = 1;
2049 /* Clear TREE_USED on all variables associated with a block scope. */
2050 clear_tree_used (DECL_INITIAL (current_function_decl));
2051
ff28a94d 2052 init_vars_expansion ();
7d69de61 2053
8f51aa6b
IZ
2054 if (targetm.use_pseudo_pic_reg ())
2055 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2056
4e3825db
MM
2057 for (i = 0; i < SA.map->num_partitions; i++)
2058 {
f11a7b6d
AO
2059 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2060 continue;
2061
4e3825db
MM
2062 tree var = partition_to_var (SA.map, i);
2063
ea057359 2064 gcc_assert (!virtual_operand_p (var));
70b5e7dc 2065
1f9ceff1 2066 expand_one_ssa_partition (var);
64d7fb90 2067 }
7eb9f42e 2068
f6bc1c4a 2069 if (flag_stack_protect == SPCT_FLAG_STRONG)
6545746e
FW
2070 gen_stack_protect_signal
2071 = stack_protect_decl_p () || stack_protect_return_slot_p ();
f6bc1c4a 2072
cb91fab0 2073 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 2074 set are not associated with any block scope. Lay them out. */
c021f10b 2075
9771b263 2076 len = vec_safe_length (cfun->local_decls);
c021f10b 2077 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 2078 {
1f6d3a08
RH
2079 bool expand_now = false;
2080
4e3825db
MM
2081 /* Expanded above already. */
2082 if (is_gimple_reg (var))
eb7adebc
MM
2083 {
2084 TREE_USED (var) = 0;
3adcf52c 2085 goto next;
eb7adebc 2086 }
1f6d3a08
RH
2087 /* We didn't set a block for static or extern because it's hard
2088 to tell the difference between a global variable (re)declared
2089 in a local scope, and one that's really declared there to
2090 begin with. And it doesn't really matter much, since we're
2091 not giving them stack space. Expand them now. */
4e3825db 2092 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
2093 expand_now = true;
2094
ee2e8462
EB
2095 /* Expand variables not associated with any block now. Those created by
2096 the optimizers could be live anywhere in the function. Those that
2097 could possibly have been scoped originally and detached from their
2098 block will have their allocation deferred so we coalesce them with
2099 others when optimization is enabled. */
1f6d3a08
RH
2100 else if (TREE_USED (var))
2101 expand_now = true;
2102
2103 /* Finally, mark all variables on the list as used. We'll use
2104 this in a moment when we expand those associated with scopes. */
2105 TREE_USED (var) = 1;
2106
2107 if (expand_now)
3adcf52c
JM
2108 expand_one_var (var, true, true);
2109
2110 next:
2111 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 2112 {
3adcf52c
JM
2113 rtx rtl = DECL_RTL_IF_SET (var);
2114
2115 /* Keep artificial non-ignored vars in cfun->local_decls
2116 chain until instantiate_decls. */
2117 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2118 add_local_decl (cfun, var);
6c6366f6 2119 else if (rtl == NULL_RTX)
c021f10b
NF
2120 /* If rtl isn't set yet, which can happen e.g. with
2121 -fstack-protector, retry before returning from this
2122 function. */
9771b263 2123 maybe_local_decls.safe_push (var);
802e9f8e 2124 }
1f6d3a08 2125 }
1f6d3a08 2126
c021f10b
NF
2127 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2128
2129 +-----------------+-----------------+
2130 | ...processed... | ...duplicates...|
2131 +-----------------+-----------------+
2132 ^
2133 +-- LEN points here.
2134
2135 We just want the duplicates, as those are the artificial
2136 non-ignored vars that we want to keep until instantiate_decls.
2137 Move them down and truncate the array. */
9771b263
DN
2138 if (!vec_safe_is_empty (cfun->local_decls))
2139 cfun->local_decls->block_remove (0, len);
c021f10b 2140
1f6d3a08
RH
2141 /* At this point, all variables within the block tree with TREE_USED
2142 set are actually used by the optimized function. Lay them out. */
2143 expand_used_vars_for_block (outer_block, true);
2144
2145 if (stack_vars_num > 0)
2146 {
47598145 2147 add_scope_conflicts ();
1f6d3a08 2148
c22cacf3 2149 /* If stack protection is enabled, we don't share space between
7d69de61 2150 vulnerable data and non-vulnerable data. */
5434dc07
MD
2151 if (flag_stack_protect != 0
2152 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2153 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2154 && lookup_attribute ("stack_protect",
2155 DECL_ATTRIBUTES (current_function_decl)))))
7d69de61
RH
2156 add_stack_protection_conflicts ();
2157
c22cacf3 2158 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
2159 minimal interference graph, attempt to save some stack space. */
2160 partition_stack_vars ();
2161 if (dump_file)
2162 dump_stack_var_partition ();
7d69de61
RH
2163 }
2164
f6bc1c4a
HS
2165 switch (flag_stack_protect)
2166 {
2167 case SPCT_FLAG_ALL:
2168 create_stack_guard ();
2169 break;
2170
2171 case SPCT_FLAG_STRONG:
2172 if (gen_stack_protect_signal
5434dc07
MD
2173 || cfun->calls_alloca || has_protected_decls
2174 || lookup_attribute ("stack_protect",
2175 DECL_ATTRIBUTES (current_function_decl)))
f6bc1c4a
HS
2176 create_stack_guard ();
2177 break;
2178
2179 case SPCT_FLAG_DEFAULT:
5434dc07
MD
2180 if (cfun->calls_alloca || has_protected_decls
2181 || lookup_attribute ("stack_protect",
2182 DECL_ATTRIBUTES (current_function_decl)))
c3284718 2183 create_stack_guard ();
f6bc1c4a
HS
2184 break;
2185
5434dc07
MD
2186 case SPCT_FLAG_EXPLICIT:
2187 if (lookup_attribute ("stack_protect",
2188 DECL_ATTRIBUTES (current_function_decl)))
2189 create_stack_guard ();
2190 break;
f6bc1c4a
HS
2191 default:
2192 ;
2193 }
1f6d3a08 2194
7d69de61
RH
2195 /* Assign rtl to each variable based on these partitions. */
2196 if (stack_vars_num > 0)
2197 {
f3ddd692
JJ
2198 struct stack_vars_data data;
2199
e361382f
JJ
2200 data.asan_base = NULL_RTX;
2201 data.asan_alignb = 0;
f3ddd692 2202
7d69de61
RH
2203 /* Reorder decls to be protected by iterating over the variables
2204 array multiple times, and allocating out of each phase in turn. */
c22cacf3 2205 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
2206 earlier, such that we naturally see these variables first,
2207 and thus naturally allocate things in the right order. */
2208 if (has_protected_decls)
2209 {
2210 /* Phase 1 contains only character arrays. */
f3ddd692 2211 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
2212
2213 /* Phase 2 contains other kinds of arrays. */
5434dc07
MD
2214 if (flag_stack_protect == SPCT_FLAG_ALL
2215 || flag_stack_protect == SPCT_FLAG_STRONG
2216 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2217 && lookup_attribute ("stack_protect",
2218 DECL_ATTRIBUTES (current_function_decl))))
f3ddd692 2219 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
2220 }
2221
c461d263 2222 if (asan_sanitize_stack_p ())
f3ddd692
JJ
2223 /* Phase 3, any partitions that need asan protection
2224 in addition to phase 1 and 2. */
2225 expand_stack_vars (asan_decl_phase_3, &data);
2226
f075bd95
RS
2227 /* ASAN description strings don't yet have a syntax for expressing
2228 polynomial offsets. */
2229 HOST_WIDE_INT prev_offset;
2230 if (!data.asan_vec.is_empty ()
2231 && frame_offset.is_constant (&prev_offset))
f3ddd692 2232 {
e361382f
JJ
2233 HOST_WIDE_INT offset, sz, redzonesz;
2234 redzonesz = ASAN_RED_ZONE_SIZE;
2235 sz = data.asan_vec[0] - prev_offset;
2236 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2237 && data.asan_alignb <= 4096
3dc87cc0 2238 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
e361382f
JJ
2239 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2240 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
f075bd95
RS
2241 /* Allocating a constant amount of space from a constant
2242 starting offset must give a constant result. */
2243 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2244 .to_constant ());
9771b263
DN
2245 data.asan_vec.safe_push (prev_offset);
2246 data.asan_vec.safe_push (offset);
e5dcd695
LZ
2247 /* Leave space for alignment if STRICT_ALIGNMENT. */
2248 if (STRICT_ALIGNMENT)
2249 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2250 << ASAN_SHADOW_SHIFT)
2251 / BITS_PER_UNIT, 1);
f3ddd692
JJ
2252
2253 var_end_seq
2254 = asan_emit_stack_protection (virtual_stack_vars_rtx,
e361382f
JJ
2255 data.asan_base,
2256 data.asan_alignb,
9771b263 2257 data.asan_vec.address (),
e361382f 2258 data.asan_decl_vec.address (),
9771b263 2259 data.asan_vec.length ());
f3ddd692
JJ
2260 }
2261
2262 expand_stack_vars (NULL, &data);
1f6d3a08
RH
2263 }
2264
5094f7d5 2265 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
e3174bdf
MO
2266 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2267 virtual_stack_vars_rtx,
2268 var_end_seq);
2269
3f9b14ff
SB
2270 fini_vars_expansion ();
2271
6c6366f6
JJ
2272 /* If there were any artificial non-ignored vars without rtl
2273 found earlier, see if deferred stack allocation hasn't assigned
2274 rtl to them. */
9771b263 2275 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 2276 {
6c6366f6
JJ
2277 rtx rtl = DECL_RTL_IF_SET (var);
2278
6c6366f6
JJ
2279 /* Keep artificial non-ignored vars in cfun->local_decls
2280 chain until instantiate_decls. */
2281 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2282 add_local_decl (cfun, var);
6c6366f6
JJ
2283 }
2284
1f6d3a08
RH
2285 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2286 if (STACK_ALIGNMENT_NEEDED)
2287 {
2288 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
f075bd95
RS
2289 if (FRAME_GROWS_DOWNWARD)
2290 frame_offset = aligned_lower_bound (frame_offset, align);
2291 else
2292 frame_offset = aligned_upper_bound (frame_offset, align);
1f6d3a08 2293 }
f3ddd692
JJ
2294
2295 return var_end_seq;
727a31fa
RH
2296}
2297
2298
b7211528
SB
2299/* If we need to produce a detailed dump, print the tree representation
2300 for STMT to the dump file. SINCE is the last RTX after which the RTL
2301 generated for STMT should have been appended. */
2302
2303static void
355fe088 2304maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
b7211528
SB
2305{
2306 if (dump_file && (dump_flags & TDF_DETAILS))
2307 {
2308 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
2309 print_gimple_stmt (dump_file, stmt, 0,
2310 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
2311 fprintf (dump_file, "\n");
2312
2313 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2314 }
2315}
2316
8b11009b
ZD
2317/* Maps the blocks that do not contain tree labels to rtx labels. */
2318
134aa83c 2319static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
8b11009b 2320
a9b77cd1
ZD
2321/* Returns the label_rtx expression for a label starting basic block BB. */
2322
1476d1bd 2323static rtx_code_label *
726a989a 2324label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 2325{
726a989a
RB
2326 gimple_stmt_iterator gsi;
2327 tree lab;
a9b77cd1
ZD
2328
2329 if (bb->flags & BB_RTL)
2330 return block_label (bb);
2331
134aa83c 2332 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 2333 if (elt)
39c8aaa4 2334 return *elt;
8b11009b
ZD
2335
2336 /* Find the tree label if it is present. */
b8698a0f 2337
726a989a 2338 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 2339 {
538dd0b7
DM
2340 glabel *lab_stmt;
2341
2342 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2343 if (!lab_stmt)
a9b77cd1
ZD
2344 break;
2345
726a989a 2346 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
2347 if (DECL_NONLOCAL (lab))
2348 break;
2349
1476d1bd 2350 return jump_target_rtx (lab);
a9b77cd1
ZD
2351 }
2352
19f8b229 2353 rtx_code_label *l = gen_label_rtx ();
39c8aaa4
TS
2354 lab_rtx_for_bb->put (bb, l);
2355 return l;
a9b77cd1
ZD
2356}
2357
726a989a 2358
529ff441
MM
2359/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2360 of a basic block where we just expanded the conditional at the end,
315adeda
MM
2361 possibly clean up the CFG and instruction sequence. LAST is the
2362 last instruction before the just emitted jump sequence. */
529ff441
MM
2363
2364static void
b47aae36 2365maybe_cleanup_end_of_block (edge e, rtx_insn *last)
529ff441
MM
2366{
2367 /* Special case: when jumpif decides that the condition is
2368 trivial it emits an unconditional jump (and the necessary
2369 barrier). But we still have two edges, the fallthru one is
2370 wrong. purge_dead_edges would clean this up later. Unfortunately
2371 we have to insert insns (and split edges) before
2372 find_many_sub_basic_blocks and hence before purge_dead_edges.
2373 But splitting edges might create new blocks which depend on the
2374 fact that if there are two edges there's no barrier. So the
2375 barrier would get lost and verify_flow_info would ICE. Instead
2376 of auditing all edge splitters to care for the barrier (which
2377 normally isn't there in a cleaned CFG), fix it here. */
2378 if (BARRIER_P (get_last_insn ()))
2379 {
b47aae36 2380 rtx_insn *insn;
529ff441
MM
2381 remove_edge (e);
2382 /* Now, we have a single successor block, if we have insns to
2383 insert on the remaining edge we potentially will insert
2384 it at the end of this block (if the dest block isn't feasible)
2385 in order to avoid splitting the edge. This insertion will take
2386 place in front of the last jump. But we might have emitted
2387 multiple jumps (conditional and one unconditional) to the
2388 same destination. Inserting in front of the last one then
2389 is a problem. See PR 40021. We fix this by deleting all
2390 jumps except the last unconditional one. */
2391 insn = PREV_INSN (get_last_insn ());
2392 /* Make sure we have an unconditional jump. Otherwise we're
2393 confused. */
2394 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 2395 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
2396 {
2397 insn = PREV_INSN (insn);
2398 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 2399 {
8a269cb7 2400 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
2401 {
2402 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2403 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2404 }
2405 delete_insn (NEXT_INSN (insn));
2406 }
529ff441
MM
2407 }
2408 }
2409}
2410
726a989a 2411/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
2412 Returns a new basic block if we've terminated the current basic
2413 block and created a new one. */
2414
2415static basic_block
538dd0b7 2416expand_gimple_cond (basic_block bb, gcond *stmt)
80c7a9eb
RH
2417{
2418 basic_block new_bb, dest;
80c7a9eb
RH
2419 edge true_edge;
2420 edge false_edge;
b47aae36 2421 rtx_insn *last2, *last;
28ed065e
MM
2422 enum tree_code code;
2423 tree op0, op1;
2424
2425 code = gimple_cond_code (stmt);
2426 op0 = gimple_cond_lhs (stmt);
2427 op1 = gimple_cond_rhs (stmt);
2428 /* We're sometimes presented with such code:
2429 D.123_1 = x < y;
2430 if (D.123_1 != 0)
2431 ...
2432 This would expand to two comparisons which then later might
2433 be cleaned up by combine. But some pattern matchers like if-conversion
2434 work better when there's only one compare, so make up for this
2435 here as special exception if TER would have made the same change. */
31348d52 2436 if (SA.values
28ed065e 2437 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
2438 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2439 && TREE_CODE (op1) == INTEGER_CST
2440 && ((gimple_cond_code (stmt) == NE_EXPR
2441 && integer_zerop (op1))
2442 || (gimple_cond_code (stmt) == EQ_EXPR
2443 && integer_onep (op1)))
28ed065e
MM
2444 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2445 {
355fe088 2446 gimple *second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2447 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2448 {
e83f4b68
MM
2449 enum tree_code code2 = gimple_assign_rhs_code (second);
2450 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2451 {
2452 code = code2;
2453 op0 = gimple_assign_rhs1 (second);
2454 op1 = gimple_assign_rhs2 (second);
2455 }
2d52a3a1
ZC
2456 /* If jumps are cheap and the target does not support conditional
2457 compare, turn some more codes into jumpy sequences. */
2458 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2459 && targetm.gen_ccmp_first == NULL)
e83f4b68
MM
2460 {
2461 if ((code2 == BIT_AND_EXPR
2462 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2463 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2464 || code2 == TRUTH_AND_EXPR)
2465 {
2466 code = TRUTH_ANDIF_EXPR;
2467 op0 = gimple_assign_rhs1 (second);
2468 op1 = gimple_assign_rhs2 (second);
2469 }
2470 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2471 {
2472 code = TRUTH_ORIF_EXPR;
2473 op0 = gimple_assign_rhs1 (second);
2474 op1 = gimple_assign_rhs2 (second);
2475 }
2476 }
28ed065e
MM
2477 }
2478 }
b7211528 2479
c0cbe526
JJ
2480 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2481 into (x - C2) * C3 < C4. */
2482 if ((code == EQ_EXPR || code == NE_EXPR)
2483 && TREE_CODE (op0) == SSA_NAME
2484 && TREE_CODE (op1) == INTEGER_CST)
2485 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2486
b7211528 2487 last2 = last = get_last_insn ();
80c7a9eb
RH
2488
2489 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2490 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2491
2492 /* These flags have no purpose in RTL land. */
2493 true_edge->flags &= ~EDGE_TRUE_VALUE;
2494 false_edge->flags &= ~EDGE_FALSE_VALUE;
2495
2496 /* We can either have a pure conditional jump with one fallthru edge or
2497 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2498 if (false_edge->dest == bb->next_bb)
80c7a9eb 2499 {
40e90eac
JJ
2500 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2501 true_edge->probability);
726a989a 2502 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2503 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2504 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2505 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2506 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2507 return NULL;
2508 }
a9b77cd1 2509 if (true_edge->dest == bb->next_bb)
80c7a9eb 2510 {
40e90eac
JJ
2511 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2512 false_edge->probability);
726a989a 2513 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2514 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2515 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2516 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2517 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2518 return NULL;
2519 }
80c7a9eb 2520
40e90eac
JJ
2521 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2522 true_edge->probability);
80c7a9eb 2523 last = get_last_insn ();
2f13f2de 2524 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2525 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2526 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb 2527
1130d5e3 2528 BB_END (bb) = last;
80c7a9eb 2529 if (BARRIER_P (BB_END (bb)))
1130d5e3 2530 BB_END (bb) = PREV_INSN (BB_END (bb));
80c7a9eb
RH
2531 update_bb_for_insn (bb);
2532
2533 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2534 dest = false_edge->dest;
2535 redirect_edge_succ (false_edge, new_bb);
2536 false_edge->flags |= EDGE_FALLTHRU;
ef30ab83 2537 new_bb->count = false_edge->count ();
ba7629e2
RB
2538 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2539 add_bb_to_loop (new_bb, loop);
2540 if (loop->latch == bb
2541 && loop->header == dest)
2542 loop->latch = new_bb;
357067f2 2543 make_single_succ_edge (new_bb, dest, 0);
80c7a9eb 2544 if (BARRIER_P (BB_END (new_bb)))
1130d5e3 2545 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
80c7a9eb
RH
2546 update_bb_for_insn (new_bb);
2547
726a989a 2548 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2549
2f13f2de 2550 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2551 {
5368224f
DC
2552 set_curr_insn_location (true_edge->goto_locus);
2553 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2554 }
7787b4aa 2555
80c7a9eb
RH
2556 return new_bb;
2557}
2558
0a35513e
AH
2559/* Mark all calls that can have a transaction restart. */
2560
2561static void
355fe088 2562mark_transaction_restart_calls (gimple *stmt)
0a35513e
AH
2563{
2564 struct tm_restart_node dummy;
50979347 2565 tm_restart_node **slot;
0a35513e
AH
2566
2567 if (!cfun->gimple_df->tm_restart)
2568 return;
2569
2570 dummy.stmt = stmt;
50979347 2571 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
0a35513e
AH
2572 if (slot)
2573 {
50979347 2574 struct tm_restart_node *n = *slot;
0a35513e 2575 tree list = n->label_or_list;
b47aae36 2576 rtx_insn *insn;
0a35513e
AH
2577
2578 for (insn = next_real_insn (get_last_insn ());
2579 !CALL_P (insn);
2580 insn = next_real_insn (insn))
2581 continue;
2582
2583 if (TREE_CODE (list) == LABEL_DECL)
2584 add_reg_note (insn, REG_TM, label_rtx (list));
2585 else
2586 for (; list ; list = TREE_CHAIN (list))
2587 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2588 }
2589}
2590
28ed065e
MM
2591/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2592 statement STMT. */
2593
2594static void
538dd0b7 2595expand_call_stmt (gcall *stmt)
28ed065e 2596{
25583c4f 2597 tree exp, decl, lhs;
e23817b3 2598 bool builtin_p;
e7925582 2599 size_t i;
28ed065e 2600
25583c4f
RS
2601 if (gimple_call_internal_p (stmt))
2602 {
2603 expand_internal_call (stmt);
2604 return;
2605 }
2606
4cfe7a6c
RS
2607 /* If this is a call to a built-in function and it has no effect other
2608 than setting the lhs, try to implement it using an internal function
2609 instead. */
2610 decl = gimple_call_fndecl (stmt);
2611 if (gimple_call_lhs (stmt)
2612 && !gimple_has_side_effects (stmt)
2613 && (optimize || (decl && called_as_built_in (decl))))
2614 {
2615 internal_fn ifn = replacement_internal_fn (stmt);
2616 if (ifn != IFN_LAST)
2617 {
2618 expand_internal_call (ifn, stmt);
2619 return;
2620 }
2621 }
2622
01156003 2623 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
089d1227 2624
01156003 2625 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
3d78e008 2626 builtin_p = decl && fndecl_built_in_p (decl);
01156003 2627
e7925582
EB
2628 /* If this is not a builtin function, the function type through which the
2629 call is made may be different from the type of the function. */
2630 if (!builtin_p)
2631 CALL_EXPR_FN (exp)
b25aa0e8
EB
2632 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2633 CALL_EXPR_FN (exp));
e7925582 2634
28ed065e
MM
2635 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2636 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2637
2638 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2639 {
2640 tree arg = gimple_call_arg (stmt, i);
355fe088 2641 gimple *def;
e23817b3
RG
2642 /* TER addresses into arguments of builtin functions so we have a
2643 chance to infer more correct alignment information. See PR39954. */
2644 if (builtin_p
2645 && TREE_CODE (arg) == SSA_NAME
2646 && (def = get_gimple_for_ssa_name (arg))
2647 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2648 arg = gimple_assign_rhs1 (def);
2649 CALL_EXPR_ARG (exp, i) = arg;
2650 }
28ed065e 2651
93f28ca7 2652 if (gimple_has_side_effects (stmt))
28ed065e
MM
2653 TREE_SIDE_EFFECTS (exp) = 1;
2654
93f28ca7 2655 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2656 TREE_NOTHROW (exp) = 1;
2657
cc8bea0a
MS
2658 if (gimple_no_warning_p (stmt))
2659 TREE_NO_WARNING (exp) = 1;
2660
28ed065e 2661 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
9a385c2d 2662 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
28ed065e 2663 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353 2664 if (decl
3d78e008 2665 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
9e878cf1 2666 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
63d2a353
MM
2667 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2668 else
2669 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e 2670 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
4c640e26 2671 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
28ed065e 2672 SET_EXPR_LOCATION (exp, gimple_location (stmt));
28ed065e 2673
ddb555ed
JJ
2674 /* Ensure RTL is created for debug args. */
2675 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2676 {
9771b263 2677 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2678 unsigned int ix;
2679 tree dtemp;
2680
2681 if (debug_args)
9771b263 2682 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2683 {
2684 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2685 expand_debug_expr (dtemp);
2686 }
2687 }
2688
5c5f0b65 2689 rtx_insn *before_call = get_last_insn ();
25583c4f 2690 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2691 if (lhs)
2692 expand_assignment (lhs, exp, false);
2693 else
4c437f02 2694 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a35513e 2695
5c5f0b65
IT
2696 /* If the gimple call is an indirect call and has 'nocf_check'
2697 attribute find a generated CALL insn to mark it as no
2698 control-flow verification is needed. */
2699 if (gimple_call_nocf_check_p (stmt)
2700 && !gimple_call_fndecl (stmt))
2701 {
2702 rtx_insn *last = get_last_insn ();
2703 while (!CALL_P (last)
2704 && last != before_call)
2705 last = PREV_INSN (last);
2706
2707 if (last != before_call)
2708 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2709 }
2710
0a35513e 2711 mark_transaction_restart_calls (stmt);
28ed065e
MM
2712}
2713
862d0b35
DN
2714
2715/* Generate RTL for an asm statement (explicit assembler code).
2716 STRING is a STRING_CST node containing the assembler code text,
2717 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2718 insn is volatile; don't optimize it. */
2719
2720static void
2721expand_asm_loc (tree string, int vol, location_t locus)
2722{
2723 rtx body;
2724
862d0b35
DN
2725 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2726 ggc_strdup (TREE_STRING_POINTER (string)),
2727 locus);
2728
2729 MEM_VOLATILE_P (body) = vol;
2730
93671519
BE
2731 /* Non-empty basic ASM implicitly clobbers memory. */
2732 if (TREE_STRING_LENGTH (string) != 0)
2733 {
2734 rtx asm_op, clob;
2735 unsigned i, nclobbers;
2736 auto_vec<rtx> input_rvec, output_rvec;
2737 auto_vec<const char *> constraints;
2738 auto_vec<rtx> clobber_rvec;
2739 HARD_REG_SET clobbered_regs;
2740 CLEAR_HARD_REG_SET (clobbered_regs);
2741
2742 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2743 clobber_rvec.safe_push (clob);
2744
2745 if (targetm.md_asm_adjust)
2746 targetm.md_asm_adjust (output_rvec, input_rvec,
2747 constraints, clobber_rvec,
2748 clobbered_regs);
2749
2750 asm_op = body;
2751 nclobbers = clobber_rvec.length ();
2752 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2753
2754 XVECEXP (body, 0, 0) = asm_op;
2755 for (i = 0; i < nclobbers; i++)
2756 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2757 }
2758
862d0b35
DN
2759 emit_insn (body);
2760}
2761
2762/* Return the number of times character C occurs in string S. */
2763static int
2764n_occurrences (int c, const char *s)
2765{
2766 int n = 0;
2767 while (*s)
2768 n += (*s++ == c);
2769 return n;
2770}
2771
2772/* A subroutine of expand_asm_operands. Check that all operands have
2773 the same number of alternatives. Return true if so. */
2774
2775static bool
7ca35180 2776check_operand_nalternatives (const vec<const char *> &constraints)
862d0b35 2777{
7ca35180
RH
2778 unsigned len = constraints.length();
2779 if (len > 0)
862d0b35 2780 {
7ca35180 2781 int nalternatives = n_occurrences (',', constraints[0]);
862d0b35
DN
2782
2783 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2784 {
2785 error ("too many alternatives in %<asm%>");
2786 return false;
2787 }
2788
7ca35180
RH
2789 for (unsigned i = 1; i < len; ++i)
2790 if (n_occurrences (',', constraints[i]) != nalternatives)
2791 {
2792 error ("operand constraints for %<asm%> differ "
2793 "in number of alternatives");
2794 return false;
2795 }
862d0b35 2796 }
862d0b35
DN
2797 return true;
2798}
2799
2800/* Check for overlap between registers marked in CLOBBERED_REGS and
2801 anything inappropriate in T. Emit error and return the register
2802 variable definition for error, NULL_TREE for ok. */
2803
2804static bool
2805tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2806{
2807 /* Conflicts between asm-declared register variables and the clobber
2808 list are not allowed. */
2809 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2810
2811 if (overlap)
2812 {
2813 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2814 DECL_NAME (overlap));
2815
2816 /* Reset registerness to stop multiple errors emitted for a single
2817 variable. */
2818 DECL_REGISTER (overlap) = 0;
2819 return true;
2820 }
2821
2822 return false;
2823}
2824
2825/* Generate RTL for an asm statement with arguments.
2826 STRING is the instruction template.
2827 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2828 Each output or input has an expression in the TREE_VALUE and
2829 a tree list in TREE_PURPOSE which in turn contains a constraint
2830 name in TREE_VALUE (or NULL_TREE) and a constraint string
2831 in TREE_PURPOSE.
2832 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2833 that is clobbered by this insn.
2834
2835 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2836 should be the fallthru basic block of the asm goto.
2837
2838 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2839 Some elements of OUTPUTS may be replaced with trees representing temporary
2840 values. The caller should copy those temporary values to the originally
2841 specified lvalues.
2842
2843 VOL nonzero means the insn is volatile; don't optimize it. */
2844
2845static void
6476a8fd 2846expand_asm_stmt (gasm *stmt)
862d0b35 2847{
7ca35180
RH
2848 class save_input_location
2849 {
2850 location_t old;
6476a8fd 2851
7ca35180
RH
2852 public:
2853 explicit save_input_location(location_t where)
6476a8fd 2854 {
7ca35180
RH
2855 old = input_location;
2856 input_location = where;
6476a8fd
RH
2857 }
2858
7ca35180 2859 ~save_input_location()
6476a8fd 2860 {
7ca35180 2861 input_location = old;
6476a8fd 2862 }
7ca35180 2863 };
6476a8fd 2864
7ca35180 2865 location_t locus = gimple_location (stmt);
6476a8fd 2866
7ca35180 2867 if (gimple_asm_input_p (stmt))
6476a8fd 2868 {
7ca35180
RH
2869 const char *s = gimple_asm_string (stmt);
2870 tree string = build_string (strlen (s), s);
2871 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2872 return;
6476a8fd
RH
2873 }
2874
7ca35180
RH
2875 /* There are some legacy diagnostics in here, and also avoids a
2876 sixth parameger to targetm.md_asm_adjust. */
2877 save_input_location s_i_l(locus);
6476a8fd 2878
7ca35180
RH
2879 unsigned noutputs = gimple_asm_noutputs (stmt);
2880 unsigned ninputs = gimple_asm_ninputs (stmt);
2881 unsigned nlabels = gimple_asm_nlabels (stmt);
2882 unsigned i;
2883
2884 /* ??? Diagnose during gimplification? */
2885 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
6476a8fd 2886 {
7ca35180 2887 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
6476a8fd
RH
2888 return;
2889 }
2890
7ca35180
RH
2891 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2892 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2893 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
6476a8fd 2894
7ca35180 2895 /* Copy the gimple vectors into new vectors that we can manipulate. */
862d0b35 2896
7ca35180
RH
2897 output_tvec.safe_grow (noutputs);
2898 input_tvec.safe_grow (ninputs);
2899 constraints.safe_grow (noutputs + ninputs);
862d0b35 2900
7ca35180
RH
2901 for (i = 0; i < noutputs; ++i)
2902 {
2903 tree t = gimple_asm_output_op (stmt, i);
2904 output_tvec[i] = TREE_VALUE (t);
2905 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2906 }
2907 for (i = 0; i < ninputs; i++)
2908 {
2909 tree t = gimple_asm_input_op (stmt, i);
2910 input_tvec[i] = TREE_VALUE (t);
2911 constraints[i + noutputs]
2912 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2913 }
862d0b35 2914
7ca35180
RH
2915 /* ??? Diagnose during gimplification? */
2916 if (! check_operand_nalternatives (constraints))
2917 return;
862d0b35
DN
2918
2919 /* Count the number of meaningful clobbered registers, ignoring what
2920 we would ignore later. */
7ca35180
RH
2921 auto_vec<rtx> clobber_rvec;
2922 HARD_REG_SET clobbered_regs;
862d0b35 2923 CLEAR_HARD_REG_SET (clobbered_regs);
862d0b35 2924
7ca35180
RH
2925 if (unsigned n = gimple_asm_nclobbers (stmt))
2926 {
2927 clobber_rvec.reserve (n);
2928 for (i = 0; i < n; i++)
2929 {
2930 tree t = gimple_asm_clobber_op (stmt, i);
2931 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2932 int nregs, j;
862d0b35 2933
7ca35180
RH
2934 j = decode_reg_name_and_count (regname, &nregs);
2935 if (j < 0)
862d0b35 2936 {
7ca35180 2937 if (j == -2)
862d0b35 2938 {
7ca35180
RH
2939 /* ??? Diagnose during gimplification? */
2940 error ("unknown register name %qs in %<asm%>", regname);
2941 }
2942 else if (j == -4)
2943 {
2944 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2945 clobber_rvec.safe_push (x);
2946 }
2947 else
2948 {
2949 /* Otherwise we should have -1 == empty string
2950 or -3 == cc, which is not a register. */
2951 gcc_assert (j == -1 || j == -3);
862d0b35 2952 }
862d0b35 2953 }
7ca35180
RH
2954 else
2955 for (int reg = j; reg < j + nregs; reg++)
2956 {
2957 /* Clobbering the PIC register is an error. */
2958 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2959 {
2960 /* ??? Diagnose during gimplification? */
2961 error ("PIC register clobbered by %qs in %<asm%>",
2962 regname);
2963 return;
2964 }
2965
2966 SET_HARD_REG_BIT (clobbered_regs, reg);
2967 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2968 clobber_rvec.safe_push (x);
2969 }
862d0b35
DN
2970 }
2971 }
7ca35180 2972 unsigned nclobbers = clobber_rvec.length();
862d0b35
DN
2973
2974 /* First pass over inputs and outputs checks validity and sets
2975 mark_addressable if needed. */
7ca35180 2976 /* ??? Diagnose during gimplification? */
862d0b35 2977
7ca35180 2978 for (i = 0; i < noutputs; ++i)
862d0b35 2979 {
7ca35180 2980 tree val = output_tvec[i];
862d0b35
DN
2981 tree type = TREE_TYPE (val);
2982 const char *constraint;
2983 bool is_inout;
2984 bool allows_reg;
2985 bool allows_mem;
2986
862d0b35
DN
2987 /* Try to parse the output constraint. If that fails, there's
2988 no point in going further. */
2989 constraint = constraints[i];
2990 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2991 &allows_mem, &allows_reg, &is_inout))
2992 return;
2993
2994 if (! allows_reg
2995 && (allows_mem
2996 || is_inout
2997 || (DECL_P (val)
2998 && REG_P (DECL_RTL (val))
2999 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3000 mark_addressable (val);
862d0b35
DN
3001 }
3002
7ca35180 3003 for (i = 0; i < ninputs; ++i)
862d0b35
DN
3004 {
3005 bool allows_reg, allows_mem;
3006 const char *constraint;
3007
862d0b35 3008 constraint = constraints[i + noutputs];
7ca35180
RH
3009 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3010 constraints.address (),
3011 &allows_mem, &allows_reg))
862d0b35
DN
3012 return;
3013
3014 if (! allows_reg && allows_mem)
7ca35180 3015 mark_addressable (input_tvec[i]);
862d0b35
DN
3016 }
3017
3018 /* Second pass evaluates arguments. */
3019
3020 /* Make sure stack is consistent for asm goto. */
3021 if (nlabels > 0)
3022 do_pending_stack_adjust ();
7ca35180
RH
3023 int old_generating_concat_p = generating_concat_p;
3024
3025 /* Vector of RTX's of evaluated output operands. */
3026 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3027 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3028 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
862d0b35 3029
7ca35180
RH
3030 output_rvec.safe_grow (noutputs);
3031
3032 for (i = 0; i < noutputs; ++i)
862d0b35 3033 {
7ca35180 3034 tree val = output_tvec[i];
862d0b35 3035 tree type = TREE_TYPE (val);
7ca35180 3036 bool is_inout, allows_reg, allows_mem, ok;
862d0b35 3037 rtx op;
862d0b35
DN
3038
3039 ok = parse_output_constraint (&constraints[i], i, ninputs,
3040 noutputs, &allows_mem, &allows_reg,
3041 &is_inout);
3042 gcc_assert (ok);
3043
3044 /* If an output operand is not a decl or indirect ref and our constraint
3045 allows a register, make a temporary to act as an intermediate.
7ca35180 3046 Make the asm insn write into that, then we will copy it to
862d0b35
DN
3047 the real output operand. Likewise for promoted variables. */
3048
3049 generating_concat_p = 0;
3050
d5754d94 3051 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
862d0b35
DN
3052 || (DECL_P (val)
3053 && (allows_mem || REG_P (DECL_RTL (val)))
3054 && ! (REG_P (DECL_RTL (val))
3055 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3056 || ! allows_reg
d5754d94
JJ
3057 || is_inout
3058 || TREE_ADDRESSABLE (type))
862d0b35
DN
3059 {
3060 op = expand_expr (val, NULL_RTX, VOIDmode,
3061 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3062 if (MEM_P (op))
3063 op = validize_mem (op);
3064
3065 if (! allows_reg && !MEM_P (op))
3066 error ("output number %d not directly addressable", i);
d5754d94 3067 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
862d0b35
DN
3068 || GET_CODE (op) == CONCAT)
3069 {
7ca35180 3070 rtx old_op = op;
862d0b35 3071 op = gen_reg_rtx (GET_MODE (op));
7ca35180
RH
3072
3073 generating_concat_p = old_generating_concat_p;
3074
862d0b35 3075 if (is_inout)
7ca35180
RH
3076 emit_move_insn (op, old_op);
3077
3078 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3079 emit_move_insn (old_op, op);
3080 after_rtl_seq = get_insns ();
3081 after_rtl_end = get_last_insn ();
3082 end_sequence ();
862d0b35
DN
3083 }
3084 }
3085 else
3086 {
3087 op = assign_temp (type, 0, 1);
3088 op = validize_mem (op);
7ca35180
RH
3089 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3090 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
862d0b35 3091
7ca35180 3092 generating_concat_p = old_generating_concat_p;
862d0b35 3093
7ca35180
RH
3094 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3095 expand_assignment (val, make_tree (type, op), false);
3096 after_rtl_seq = get_insns ();
3097 after_rtl_end = get_last_insn ();
3098 end_sequence ();
862d0b35 3099 }
7ca35180 3100 output_rvec[i] = op;
862d0b35 3101
7ca35180
RH
3102 if (is_inout)
3103 inout_opnum.safe_push (i);
862d0b35
DN
3104 }
3105
7ca35180
RH
3106 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3107 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
862d0b35 3108
7ca35180
RH
3109 input_rvec.safe_grow (ninputs);
3110 input_mode.safe_grow (ninputs);
862d0b35 3111
7ca35180 3112 generating_concat_p = 0;
862d0b35 3113
7ca35180 3114 for (i = 0; i < ninputs; ++i)
862d0b35 3115 {
7ca35180
RH
3116 tree val = input_tvec[i];
3117 tree type = TREE_TYPE (val);
3118 bool allows_reg, allows_mem, ok;
862d0b35 3119 const char *constraint;
862d0b35 3120 rtx op;
862d0b35
DN
3121
3122 constraint = constraints[i + noutputs];
7ca35180
RH
3123 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3124 constraints.address (),
3125 &allows_mem, &allows_reg);
862d0b35
DN
3126 gcc_assert (ok);
3127
862d0b35
DN
3128 /* EXPAND_INITIALIZER will not generate code for valid initializer
3129 constants, but will still generate code for other types of operand.
3130 This is the behavior we want for constant constraints. */
3131 op = expand_expr (val, NULL_RTX, VOIDmode,
3132 allows_reg ? EXPAND_NORMAL
3133 : allows_mem ? EXPAND_MEMORY
3134 : EXPAND_INITIALIZER);
3135
3136 /* Never pass a CONCAT to an ASM. */
3137 if (GET_CODE (op) == CONCAT)
3138 op = force_reg (GET_MODE (op), op);
3139 else if (MEM_P (op))
3140 op = validize_mem (op);
3141
3142 if (asm_operand_ok (op, constraint, NULL) <= 0)
3143 {
3144 if (allows_reg && TYPE_MODE (type) != BLKmode)
3145 op = force_reg (TYPE_MODE (type), op);
3146 else if (!allows_mem)
3147 warning (0, "asm operand %d probably doesn%'t match constraints",
3148 i + noutputs);
3149 else if (MEM_P (op))
3150 {
3151 /* We won't recognize either volatile memory or memory
3152 with a queued address as available a memory_operand
3153 at this point. Ignore it: clearly this *is* a memory. */
3154 }
3155 else
3156 gcc_unreachable ();
3157 }
7ca35180
RH
3158 input_rvec[i] = op;
3159 input_mode[i] = TYPE_MODE (type);
862d0b35
DN
3160 }
3161
862d0b35 3162 /* For in-out operands, copy output rtx to input rtx. */
7ca35180 3163 unsigned ninout = inout_opnum.length();
862d0b35
DN
3164 for (i = 0; i < ninout; i++)
3165 {
3166 int j = inout_opnum[i];
7ca35180 3167 rtx o = output_rvec[j];
862d0b35 3168
7ca35180
RH
3169 input_rvec.safe_push (o);
3170 input_mode.safe_push (GET_MODE (o));
862d0b35 3171
7ca35180 3172 char buffer[16];
862d0b35 3173 sprintf (buffer, "%d", j);
7ca35180
RH
3174 constraints.safe_push (ggc_strdup (buffer));
3175 }
3176 ninputs += ninout;
3177
3178 /* Sometimes we wish to automatically clobber registers across an asm.
3179 Case in point is when the i386 backend moved from cc0 to a hard reg --
3180 maintaining source-level compatibility means automatically clobbering
3181 the flags register. */
3182 rtx_insn *after_md_seq = NULL;
3183 if (targetm.md_asm_adjust)
3184 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3185 constraints, clobber_rvec,
3186 clobbered_regs);
3187
3188 /* Do not allow the hook to change the output and input count,
3189 lest it mess up the operand numbering. */
3190 gcc_assert (output_rvec.length() == noutputs);
3191 gcc_assert (input_rvec.length() == ninputs);
3192 gcc_assert (constraints.length() == noutputs + ninputs);
3193
3194 /* But it certainly can adjust the clobbers. */
3195 nclobbers = clobber_rvec.length();
3196
3197 /* Third pass checks for easy conflicts. */
3198 /* ??? Why are we doing this on trees instead of rtx. */
3199
3200 bool clobber_conflict_found = 0;
3201 for (i = 0; i < noutputs; ++i)
3202 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3203 clobber_conflict_found = 1;
3204 for (i = 0; i < ninputs - ninout; ++i)
3205 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3206 clobber_conflict_found = 1;
3207
3208 /* Make vectors for the expression-rtx, constraint strings,
3209 and named operands. */
3210
3211 rtvec argvec = rtvec_alloc (ninputs);
3212 rtvec constraintvec = rtvec_alloc (ninputs);
3213 rtvec labelvec = rtvec_alloc (nlabels);
3214
3215 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3216 : GET_MODE (output_rvec[0])),
3217 ggc_strdup (gimple_asm_string (stmt)),
618400bc 3218 "", 0, argvec, constraintvec,
7ca35180
RH
3219 labelvec, locus);
3220 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3221
3222 for (i = 0; i < ninputs; ++i)
3223 {
3224 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3225 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3226 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3227 constraints[i + noutputs],
3228 locus);
862d0b35
DN
3229 }
3230
3231 /* Copy labels to the vector. */
7ca35180
RH
3232 rtx_code_label *fallthru_label = NULL;
3233 if (nlabels > 0)
3234 {
3235 basic_block fallthru_bb = NULL;
3236 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3237 if (fallthru)
3238 fallthru_bb = fallthru->dest;
3239
3240 for (i = 0; i < nlabels; ++i)
862d0b35 3241 {
7ca35180 3242 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
e67d1102 3243 rtx_insn *r;
7ca35180
RH
3244 /* If asm goto has any labels in the fallthru basic block, use
3245 a label that we emit immediately after the asm goto. Expansion
3246 may insert further instructions into the same basic block after
3247 asm goto and if we don't do this, insertion of instructions on
3248 the fallthru edge might misbehave. See PR58670. */
61ff5d6f 3249 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
7ca35180
RH
3250 {
3251 if (fallthru_label == NULL_RTX)
3252 fallthru_label = gen_label_rtx ();
3253 r = fallthru_label;
3254 }
3255 else
3256 r = label_rtx (label);
3257 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
862d0b35 3258 }
862d0b35
DN
3259 }
3260
862d0b35
DN
3261 /* Now, for each output, construct an rtx
3262 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3263 ARGVEC CONSTRAINTS OPNAMES))
3264 If there is more than one, put them inside a PARALLEL. */
3265
3266 if (nlabels > 0 && nclobbers == 0)
3267 {
3268 gcc_assert (noutputs == 0);
3269 emit_jump_insn (body);
3270 }
3271 else if (noutputs == 0 && nclobbers == 0)
3272 {
3273 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3274 emit_insn (body);
3275 }
3276 else if (noutputs == 1 && nclobbers == 0)
3277 {
7ca35180
RH
3278 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3279 emit_insn (gen_rtx_SET (output_rvec[0], body));
862d0b35
DN
3280 }
3281 else
3282 {
3283 rtx obody = body;
3284 int num = noutputs;
3285
3286 if (num == 0)
3287 num = 1;
3288
3289 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3290
3291 /* For each output operand, store a SET. */
7ca35180 3292 for (i = 0; i < noutputs; ++i)
862d0b35 3293 {
7ca35180
RH
3294 rtx src, o = output_rvec[i];
3295 if (i == 0)
3296 {
3297 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3298 src = obody;
3299 }
3300 else
3301 {
3302 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3303 ASM_OPERANDS_TEMPLATE (obody),
3304 constraints[i], i, argvec,
3305 constraintvec, labelvec, locus);
3306 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3307 }
3308 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
862d0b35
DN
3309 }
3310
3311 /* If there are no outputs (but there are some clobbers)
3312 store the bare ASM_OPERANDS into the PARALLEL. */
862d0b35
DN
3313 if (i == 0)
3314 XVECEXP (body, 0, i++) = obody;
3315
3316 /* Store (clobber REG) for each clobbered register specified. */
7ca35180 3317 for (unsigned j = 0; j < nclobbers; ++j)
862d0b35 3318 {
7ca35180 3319 rtx clobbered_reg = clobber_rvec[j];
862d0b35 3320
7ca35180
RH
3321 /* Do sanity check for overlap between clobbers and respectively
3322 input and outputs that hasn't been handled. Such overlap
3323 should have been detected and reported above. */
3324 if (!clobber_conflict_found && REG_P (clobbered_reg))
862d0b35 3325 {
7ca35180
RH
3326 /* We test the old body (obody) contents to avoid
3327 tripping over the under-construction body. */
3328 for (unsigned k = 0; k < noutputs; ++k)
3329 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3330 internal_error ("asm clobber conflict with output operand");
3331
3332 for (unsigned k = 0; k < ninputs - ninout; ++k)
3333 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3334 internal_error ("asm clobber conflict with input operand");
862d0b35
DN
3335 }
3336
7ca35180 3337 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
862d0b35
DN
3338 }
3339
3340 if (nlabels > 0)
3341 emit_jump_insn (body);
3342 else
3343 emit_insn (body);
3344 }
3345
7ca35180
RH
3346 generating_concat_p = old_generating_concat_p;
3347
862d0b35
DN
3348 if (fallthru_label)
3349 emit_label (fallthru_label);
3350
7ca35180
RH
3351 if (after_md_seq)
3352 emit_insn (after_md_seq);
3353 if (after_rtl_seq)
3354 emit_insn (after_rtl_seq);
862d0b35 3355
6476a8fd 3356 free_temp_slots ();
7ca35180 3357 crtl->has_asm_statement = 1;
862d0b35
DN
3358}
3359
3360/* Emit code to jump to the address
3361 specified by the pointer expression EXP. */
3362
3363static void
3364expand_computed_goto (tree exp)
3365{
3366 rtx x = expand_normal (exp);
3367
862d0b35
DN
3368 do_pending_stack_adjust ();
3369 emit_indirect_jump (x);
3370}
3371
3372/* Generate RTL code for a `goto' statement with target label LABEL.
3373 LABEL should be a LABEL_DECL tree node that was or will later be
3374 defined with `expand_label'. */
3375
3376static void
3377expand_goto (tree label)
3378{
b2b29377
MM
3379 if (flag_checking)
3380 {
3381 /* Check for a nonlocal goto to a containing function. Should have
3382 gotten translated to __builtin_nonlocal_goto. */
3383 tree context = decl_function_context (label);
3384 gcc_assert (!context || context == current_function_decl);
3385 }
862d0b35 3386
1476d1bd 3387 emit_jump (jump_target_rtx (label));
862d0b35
DN
3388}
3389
3390/* Output a return with no value. */
3391
3392static void
3393expand_null_return_1 (void)
3394{
3395 clear_pending_stack_adjust ();
3396 do_pending_stack_adjust ();
3397 emit_jump (return_label);
3398}
3399
3400/* Generate RTL to return from the current function, with no value.
3401 (That is, we do not do anything about returning any value.) */
3402
3403void
3404expand_null_return (void)
3405{
3406 /* If this function was declared to return a value, but we
3407 didn't, clobber the return registers so that they are not
3408 propagated live to the rest of the function. */
3409 clobber_return_register ();
3410
3411 expand_null_return_1 ();
3412}
3413
3414/* Generate RTL to return from the current function, with value VAL. */
3415
3416static void
3417expand_value_return (rtx val)
3418{
3419 /* Copy the value to the return location unless it's already there. */
3420
3421 tree decl = DECL_RESULT (current_function_decl);
3422 rtx return_reg = DECL_RTL (decl);
3423 if (return_reg != val)
3424 {
3425 tree funtype = TREE_TYPE (current_function_decl);
3426 tree type = TREE_TYPE (decl);
3427 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
3428 machine_mode old_mode = DECL_MODE (decl);
3429 machine_mode mode;
862d0b35
DN
3430 if (DECL_BY_REFERENCE (decl))
3431 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3432 else
3433 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3434
3435 if (mode != old_mode)
3436 val = convert_modes (mode, old_mode, val, unsignedp);
3437
3438 if (GET_CODE (return_reg) == PARALLEL)
3439 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3440 else
3441 emit_move_insn (return_reg, val);
3442 }
3443
3444 expand_null_return_1 ();
3445}
3446
3447/* Generate RTL to evaluate the expression RETVAL and return it
3448 from the current function. */
3449
3450static void
31db0fe0 3451expand_return (tree retval)
862d0b35
DN
3452{
3453 rtx result_rtl;
3454 rtx val = 0;
3455 tree retval_rhs;
3456
3457 /* If function wants no value, give it none. */
3458 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3459 {
3460 expand_normal (retval);
3461 expand_null_return ();
3462 return;
3463 }
3464
3465 if (retval == error_mark_node)
3466 {
3467 /* Treat this like a return of no value from a function that
3468 returns a value. */
3469 expand_null_return ();
3470 return;
3471 }
3472 else if ((TREE_CODE (retval) == MODIFY_EXPR
3473 || TREE_CODE (retval) == INIT_EXPR)
3474 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3475 retval_rhs = TREE_OPERAND (retval, 1);
3476 else
3477 retval_rhs = retval;
3478
3479 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3480
3481 /* If we are returning the RESULT_DECL, then the value has already
3482 been stored into it, so we don't have to do anything special. */
3483 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3484 expand_value_return (result_rtl);
3485
3486 /* If the result is an aggregate that is being returned in one (or more)
3487 registers, load the registers here. */
3488
3489 else if (retval_rhs != 0
3490 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3491 && REG_P (result_rtl))
3492 {
3493 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3494 if (val)
3495 {
3496 /* Use the mode of the result value on the return register. */
3497 PUT_MODE (result_rtl, GET_MODE (val));
3498 expand_value_return (val);
3499 }
3500 else
3501 expand_null_return ();
3502 }
3503 else if (retval_rhs != 0
3504 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3505 && (REG_P (result_rtl)
3506 || (GET_CODE (result_rtl) == PARALLEL)))
3507 {
9ee5337d
EB
3508 /* Compute the return value into a temporary (usually a pseudo reg). */
3509 val
3510 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
862d0b35
DN
3511 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3512 val = force_not_mem (val);
862d0b35
DN
3513 expand_value_return (val);
3514 }
3515 else
3516 {
3517 /* No hard reg used; calculate value into hard return reg. */
3518 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3519 expand_value_return (result_rtl);
3520 }
3521}
3522
3ba4ff41
RS
3523/* Expand a clobber of LHS. If LHS is stored it in a multi-part
3524 register, tell the rtl optimizers that its value is no longer
3525 needed. */
3526
3527static void
3528expand_clobber (tree lhs)
3529{
3530 if (DECL_P (lhs))
3531 {
3532 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3533 if (decl_rtl && REG_P (decl_rtl))
3534 {
3535 machine_mode decl_mode = GET_MODE (decl_rtl);
3536 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3537 REGMODE_NATURAL_SIZE (decl_mode)))
3538 emit_clobber (decl_rtl);
3539 }
3540 }
3541}
3542
28ed065e
MM
3543/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3544 STMT that doesn't require special handling for outgoing edges. That
3545 is no tailcalls and no GIMPLE_COND. */
3546
3547static void
355fe088 3548expand_gimple_stmt_1 (gimple *stmt)
28ed065e
MM
3549{
3550 tree op0;
c82fee88 3551
5368224f 3552 set_curr_insn_location (gimple_location (stmt));
c82fee88 3553
28ed065e
MM
3554 switch (gimple_code (stmt))
3555 {
3556 case GIMPLE_GOTO:
3557 op0 = gimple_goto_dest (stmt);
3558 if (TREE_CODE (op0) == LABEL_DECL)
3559 expand_goto (op0);
3560 else
3561 expand_computed_goto (op0);
3562 break;
3563 case GIMPLE_LABEL:
538dd0b7 3564 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
28ed065e
MM
3565 break;
3566 case GIMPLE_NOP:
3567 case GIMPLE_PREDICT:
3568 break;
28ed065e 3569 case GIMPLE_SWITCH:
f66459c1
PB
3570 {
3571 gswitch *swtch = as_a <gswitch *> (stmt);
3572 if (gimple_switch_num_labels (swtch) == 1)
3573 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3574 else
3575 expand_case (swtch);
3576 }
28ed065e
MM
3577 break;
3578 case GIMPLE_ASM:
538dd0b7 3579 expand_asm_stmt (as_a <gasm *> (stmt));
28ed065e
MM
3580 break;
3581 case GIMPLE_CALL:
538dd0b7 3582 expand_call_stmt (as_a <gcall *> (stmt));
28ed065e
MM
3583 break;
3584
3585 case GIMPLE_RETURN:
855f036d 3586 {
855f036d 3587 op0 = gimple_return_retval (as_a <greturn *> (stmt));
28ed065e 3588
855f036d
IE
3589 if (op0 && op0 != error_mark_node)
3590 {
3591 tree result = DECL_RESULT (current_function_decl);
28ed065e 3592
855f036d
IE
3593 /* If we are not returning the current function's RESULT_DECL,
3594 build an assignment to it. */
3595 if (op0 != result)
3596 {
3597 /* I believe that a function's RESULT_DECL is unique. */
3598 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3599
3600 /* ??? We'd like to use simply expand_assignment here,
3601 but this fails if the value is of BLKmode but the return
3602 decl is a register. expand_return has special handling
3603 for this combination, which eventually should move
3604 to common code. See comments there. Until then, let's
3605 build a modify expression :-/ */
3606 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3607 result, op0);
3608 }
855f036d
IE
3609 }
3610
3611 if (!op0)
3612 expand_null_return ();
3613 else
31db0fe0 3614 expand_return (op0);
855f036d 3615 }
28ed065e
MM
3616 break;
3617
3618 case GIMPLE_ASSIGN:
3619 {
538dd0b7
DM
3620 gassign *assign_stmt = as_a <gassign *> (stmt);
3621 tree lhs = gimple_assign_lhs (assign_stmt);
28ed065e
MM
3622
3623 /* Tree expand used to fiddle with |= and &= of two bitfield
3624 COMPONENT_REFs here. This can't happen with gimple, the LHS
3625 of binary assigns must be a gimple reg. */
3626
3627 if (TREE_CODE (lhs) != SSA_NAME
3628 || get_gimple_rhs_class (gimple_expr_code (stmt))
3629 == GIMPLE_SINGLE_RHS)
3630 {
538dd0b7 3631 tree rhs = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3632 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3633 == GIMPLE_SINGLE_RHS);
ae2ffe2a
RB
3634 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3635 /* Do not put locations on possibly shared trees. */
3636 && !is_gimple_min_invariant (rhs))
28ed065e 3637 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3638 if (TREE_CLOBBER_P (rhs))
3639 /* This is a clobber to mark the going out of scope for
3640 this LHS. */
3ba4ff41 3641 expand_clobber (lhs);
47598145
MM
3642 else
3643 expand_assignment (lhs, rhs,
538dd0b7
DM
3644 gimple_assign_nontemporal_move_p (
3645 assign_stmt));
28ed065e
MM
3646 }
3647 else
3648 {
3649 rtx target, temp;
538dd0b7 3650 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
28ed065e
MM
3651 struct separate_ops ops;
3652 bool promoted = false;
3653
3654 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3655 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3656 promoted = true;
3657
538dd0b7 3658 ops.code = gimple_assign_rhs_code (assign_stmt);
28ed065e 3659 ops.type = TREE_TYPE (lhs);
b0dd8c90 3660 switch (get_gimple_rhs_class (ops.code))
28ed065e 3661 {
0354c0c7 3662 case GIMPLE_TERNARY_RHS:
538dd0b7 3663 ops.op2 = gimple_assign_rhs3 (assign_stmt);
0354c0c7 3664 /* Fallthru */
28ed065e 3665 case GIMPLE_BINARY_RHS:
538dd0b7 3666 ops.op1 = gimple_assign_rhs2 (assign_stmt);
28ed065e
MM
3667 /* Fallthru */
3668 case GIMPLE_UNARY_RHS:
538dd0b7 3669 ops.op0 = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3670 break;
3671 default:
3672 gcc_unreachable ();
3673 }
3674 ops.location = gimple_location (stmt);
3675
3676 /* If we want to use a nontemporal store, force the value to
3677 register first. If we store into a promoted register,
3678 don't directly expand to target. */
3679 temp = nontemporal || promoted ? NULL_RTX : target;
3680 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3681 EXPAND_NORMAL);
3682
3683 if (temp == target)
3684 ;
3685 else if (promoted)
3686 {
362d42dc 3687 int unsignedp = SUBREG_PROMOTED_SIGN (target);
28ed065e
MM
3688 /* If TEMP is a VOIDmode constant, use convert_modes to make
3689 sure that we properly convert it. */
3690 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3691 {
3692 temp = convert_modes (GET_MODE (target),
3693 TYPE_MODE (ops.type),
4e18a7d4 3694 temp, unsignedp);
28ed065e 3695 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3696 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3697 }
3698
27be0c32 3699 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3700 }
3701 else if (nontemporal && emit_storent_insn (target, temp))
3702 ;
3703 else
3704 {
3705 temp = force_operand (temp, target);
3706 if (temp != target)
3707 emit_move_insn (target, temp);
3708 }
3709 }
3710 }
3711 break;
3712
3713 default:
3714 gcc_unreachable ();
3715 }
3716}
3717
3718/* Expand one gimple statement STMT and return the last RTL instruction
3719 before any of the newly generated ones.
3720
3721 In addition to generating the necessary RTL instructions this also
3722 sets REG_EH_REGION notes if necessary and sets the current source
3723 location for diagnostics. */
3724
b47aae36 3725static rtx_insn *
355fe088 3726expand_gimple_stmt (gimple *stmt)
28ed065e 3727{
28ed065e 3728 location_t saved_location = input_location;
b47aae36 3729 rtx_insn *last = get_last_insn ();
c82fee88 3730 int lp_nr;
28ed065e 3731
28ed065e
MM
3732 gcc_assert (cfun);
3733
c82fee88
EB
3734 /* We need to save and restore the current source location so that errors
3735 discovered during expansion are emitted with the right location. But
3736 it would be better if the diagnostic routines used the source location
3737 embedded in the tree nodes rather than globals. */
28ed065e 3738 if (gimple_has_location (stmt))
c82fee88 3739 input_location = gimple_location (stmt);
28ed065e
MM
3740
3741 expand_gimple_stmt_1 (stmt);
c82fee88 3742
28ed065e
MM
3743 /* Free any temporaries used to evaluate this statement. */
3744 free_temp_slots ();
3745
3746 input_location = saved_location;
3747
3748 /* Mark all insns that may trap. */
1d65f45c
RH
3749 lp_nr = lookup_stmt_eh_lp (stmt);
3750 if (lp_nr)
28ed065e 3751 {
b47aae36 3752 rtx_insn *insn;
28ed065e
MM
3753 for (insn = next_real_insn (last); insn;
3754 insn = next_real_insn (insn))
3755 {
3756 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3757 /* If we want exceptions for non-call insns, any
3758 may_trap_p instruction may throw. */
3759 && GET_CODE (PATTERN (insn)) != CLOBBER
8df47bdf 3760 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
28ed065e 3761 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3762 && insn_could_throw_p (insn))
3763 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3764 }
3765 }
3766
3767 return last;
3768}
3769
726a989a 3770/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3771 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3772 generated a tail call (something that might be denied by the ABI
cea49550
RH
3773 rules governing the call; see calls.c).
3774
3775 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3776 can still reach the rest of BB. The case here is __builtin_sqrt,
3777 where the NaN result goes through the external function (with a
3778 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3779
3780static basic_block
538dd0b7 3781expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
80c7a9eb 3782{
b47aae36 3783 rtx_insn *last2, *last;
224e770b 3784 edge e;
628f6a4e 3785 edge_iterator ei;
357067f2 3786 profile_probability probability;
80c7a9eb 3787
28ed065e 3788 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3789
3790 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3791 if (CALL_P (last) && SIBLING_CALL_P (last))
3792 goto found;
80c7a9eb 3793
726a989a 3794 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3795
cea49550 3796 *can_fallthru = true;
224e770b 3797 return NULL;
80c7a9eb 3798
224e770b
RH
3799 found:
3800 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3801 Any instructions emitted here are about to be deleted. */
3802 do_pending_stack_adjust ();
3803
3804 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3805 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3806 EH or abnormal edges, we shouldn't have created a tail call in
3807 the first place. So it seems to me we should just be removing
3808 all edges here, or redirecting the existing fallthru edge to
3809 the exit block. */
3810
357067f2 3811 probability = profile_probability::never ();
224e770b 3812
628f6a4e
BE
3813 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3814 {
224e770b
RH
3815 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3816 {
fefa31b5 3817 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
e7a74006 3818 e->dest->count -= e->count ();
224e770b
RH
3819 probability += e->probability;
3820 remove_edge (e);
80c7a9eb 3821 }
628f6a4e
BE
3822 else
3823 ei_next (&ei);
80c7a9eb
RH
3824 }
3825
224e770b
RH
3826 /* This is somewhat ugly: the call_expr expander often emits instructions
3827 after the sibcall (to perform the function return). These confuse the
12eff7b7 3828 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3829 last = NEXT_INSN (last);
341c100f 3830 gcc_assert (BARRIER_P (last));
cea49550
RH
3831
3832 *can_fallthru = false;
224e770b
RH
3833 while (NEXT_INSN (last))
3834 {
3835 /* For instance an sqrt builtin expander expands if with
3836 sibcall in the then and label for `else`. */
3837 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3838 {
3839 *can_fallthru = true;
3840 break;
3841 }
224e770b
RH
3842 delete_insn (NEXT_INSN (last));
3843 }
3844
fefa31b5
DM
3845 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3846 | EDGE_SIBCALL);
aea5e79a 3847 e->probability = probability;
1130d5e3 3848 BB_END (bb) = last;
224e770b
RH
3849 update_bb_for_insn (bb);
3850
3851 if (NEXT_INSN (last))
3852 {
3853 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3854
3855 last = BB_END (bb);
3856 if (BARRIER_P (last))
1130d5e3 3857 BB_END (bb) = PREV_INSN (last);
224e770b
RH
3858 }
3859
726a989a 3860 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3861
224e770b 3862 return bb;
80c7a9eb
RH
3863}
3864
b5b8b0ac
AO
3865/* Return the difference between the floor and the truncated result of
3866 a signed division by OP1 with remainder MOD. */
3867static rtx
ef4bddc2 3868floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3869{
3870 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3871 return gen_rtx_IF_THEN_ELSE
3872 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3873 gen_rtx_IF_THEN_ELSE
3874 (mode, gen_rtx_LT (BImode,
3875 gen_rtx_DIV (mode, op1, mod),
3876 const0_rtx),
3877 constm1_rtx, const0_rtx),
3878 const0_rtx);
3879}
3880
3881/* Return the difference between the ceil and the truncated result of
3882 a signed division by OP1 with remainder MOD. */
3883static rtx
ef4bddc2 3884ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3885{
3886 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3887 return gen_rtx_IF_THEN_ELSE
3888 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3889 gen_rtx_IF_THEN_ELSE
3890 (mode, gen_rtx_GT (BImode,
3891 gen_rtx_DIV (mode, op1, mod),
3892 const0_rtx),
3893 const1_rtx, const0_rtx),
3894 const0_rtx);
3895}
3896
3897/* Return the difference between the ceil and the truncated result of
3898 an unsigned division by OP1 with remainder MOD. */
3899static rtx
ef4bddc2 3900ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
b5b8b0ac
AO
3901{
3902 /* (mod != 0 ? 1 : 0) */
3903 return gen_rtx_IF_THEN_ELSE
3904 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3905 const1_rtx, const0_rtx);
3906}
3907
3908/* Return the difference between the rounded and the truncated result
3909 of a signed division by OP1 with remainder MOD. Halfway cases are
3910 rounded away from zero, rather than to the nearest even number. */
3911static rtx
ef4bddc2 3912round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3913{
3914 /* (abs (mod) >= abs (op1) - abs (mod)
3915 ? (op1 / mod > 0 ? 1 : -1)
3916 : 0) */
3917 return gen_rtx_IF_THEN_ELSE
3918 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3919 gen_rtx_MINUS (mode,
3920 gen_rtx_ABS (mode, op1),
3921 gen_rtx_ABS (mode, mod))),
3922 gen_rtx_IF_THEN_ELSE
3923 (mode, gen_rtx_GT (BImode,
3924 gen_rtx_DIV (mode, op1, mod),
3925 const0_rtx),
3926 const1_rtx, constm1_rtx),
3927 const0_rtx);
3928}
3929
3930/* Return the difference between the rounded and the truncated result
3931 of a unsigned division by OP1 with remainder MOD. Halfway cases
3932 are rounded away from zero, rather than to the nearest even
3933 number. */
3934static rtx
ef4bddc2 3935round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3936{
3937 /* (mod >= op1 - mod ? 1 : 0) */
3938 return gen_rtx_IF_THEN_ELSE
3939 (mode, gen_rtx_GE (BImode, mod,
3940 gen_rtx_MINUS (mode, op1, mod)),
3941 const1_rtx, const0_rtx);
3942}
3943
dda2da58
AO
3944/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3945 any rtl. */
3946
3947static rtx
095a2d76 3948convert_debug_memory_address (scalar_int_mode mode, rtx x,
f61c6f34 3949 addr_space_t as)
dda2da58 3950{
dda2da58 3951#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3952 gcc_assert (mode == Pmode
3953 || mode == targetm.addr_space.address_mode (as));
c7ad039d 3954 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
dda2da58 3955#else
f61c6f34 3956 rtx temp;
f61c6f34 3957
639d4bb8 3958 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3959
3960 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3961 return x;
3962
c7ad039d
RS
3963 /* X must have some form of address mode already. */
3964 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
69660a70 3965 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3403a1a9 3966 x = lowpart_subreg (mode, x, xmode);
dda2da58
AO
3967 else if (POINTERS_EXTEND_UNSIGNED > 0)
3968 x = gen_rtx_ZERO_EXTEND (mode, x);
3969 else if (!POINTERS_EXTEND_UNSIGNED)
3970 x = gen_rtx_SIGN_EXTEND (mode, x);
3971 else
f61c6f34
JJ
3972 {
3973 switch (GET_CODE (x))
3974 {
3975 case SUBREG:
3976 if ((SUBREG_PROMOTED_VAR_P (x)
3977 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3978 || (GET_CODE (SUBREG_REG (x)) == PLUS
3979 && REG_P (XEXP (SUBREG_REG (x), 0))
3980 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3981 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3982 && GET_MODE (SUBREG_REG (x)) == mode)
3983 return SUBREG_REG (x);
3984 break;
3985 case LABEL_REF:
04a121a7 3986 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
f61c6f34
JJ
3987 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3988 return temp;
3989 case SYMBOL_REF:
3990 temp = shallow_copy_rtx (x);
3991 PUT_MODE (temp, mode);
3992 return temp;
3993 case CONST:
3994 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3995 if (temp)
3996 temp = gen_rtx_CONST (mode, temp);
3997 return temp;
3998 case PLUS:
3999 case MINUS:
4000 if (CONST_INT_P (XEXP (x, 1)))
4001 {
4002 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4003 if (temp)
4004 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4005 }
4006 break;
4007 default:
4008 break;
4009 }
4010 /* Don't know how to express ptr_extend as operation in debug info. */
4011 return NULL;
4012 }
dda2da58
AO
4013#endif /* POINTERS_EXTEND_UNSIGNED */
4014
4015 return x;
4016}
4017
dfde35b3
JJ
4018/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4019 by avoid_deep_ter_for_debug. */
4020
4021static hash_map<tree, tree> *deep_ter_debug_map;
4022
4023/* Split too deep TER chains for debug stmts using debug temporaries. */
4024
4025static void
355fe088 4026avoid_deep_ter_for_debug (gimple *stmt, int depth)
dfde35b3
JJ
4027{
4028 use_operand_p use_p;
4029 ssa_op_iter iter;
4030 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4031 {
4032 tree use = USE_FROM_PTR (use_p);
4033 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4034 continue;
355fe088 4035 gimple *g = get_gimple_for_ssa_name (use);
dfde35b3
JJ
4036 if (g == NULL)
4037 continue;
4038 if (depth > 6 && !stmt_ends_bb_p (g))
4039 {
4040 if (deep_ter_debug_map == NULL)
4041 deep_ter_debug_map = new hash_map<tree, tree>;
4042
4043 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4044 if (vexpr != NULL)
4045 continue;
4046 vexpr = make_node (DEBUG_EXPR_DECL);
355fe088 4047 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
dfde35b3
JJ
4048 DECL_ARTIFICIAL (vexpr) = 1;
4049 TREE_TYPE (vexpr) = TREE_TYPE (use);
899ca90e 4050 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
dfde35b3
JJ
4051 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4052 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4053 avoid_deep_ter_for_debug (def_temp, 0);
4054 }
4055 else
4056 avoid_deep_ter_for_debug (g, depth + 1);
4057 }
4058}
4059
12c5ffe5
EB
4060/* Return an RTX equivalent to the value of the parameter DECL. */
4061
4062static rtx
4063expand_debug_parm_decl (tree decl)
4064{
4065 rtx incoming = DECL_INCOMING_RTL (decl);
4066
4067 if (incoming
4068 && GET_MODE (incoming) != BLKmode
4069 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4070 || (MEM_P (incoming)
4071 && REG_P (XEXP (incoming, 0))
4072 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4073 {
4074 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4075
4076#ifdef HAVE_window_save
4077 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4078 If the target machine has an explicit window save instruction, the
4079 actual entry value is the corresponding OUTGOING_REGNO instead. */
4080 if (REG_P (incoming)
4081 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4082 incoming
4083 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4084 OUTGOING_REGNO (REGNO (incoming)), 0);
4085 else if (MEM_P (incoming))
4086 {
4087 rtx reg = XEXP (incoming, 0);
4088 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4089 {
4090 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4091 incoming = replace_equiv_address_nv (incoming, reg);
4092 }
6cfa417f
JJ
4093 else
4094 incoming = copy_rtx (incoming);
12c5ffe5
EB
4095 }
4096#endif
4097
4098 ENTRY_VALUE_EXP (rtl) = incoming;
4099 return rtl;
4100 }
4101
4102 if (incoming
4103 && GET_MODE (incoming) != BLKmode
4104 && !TREE_ADDRESSABLE (decl)
4105 && MEM_P (incoming)
4106 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4107 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4108 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4109 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 4110 return copy_rtx (incoming);
12c5ffe5
EB
4111
4112 return NULL_RTX;
4113}
4114
4115/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
4116
4117static rtx
4118expand_debug_expr (tree exp)
4119{
4120 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
ef4bddc2
RS
4121 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4122 machine_mode inner_mode = VOIDmode;
b5b8b0ac 4123 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 4124 addr_space_t as;
7a504f33 4125 scalar_int_mode op0_mode, op1_mode, addr_mode;
b5b8b0ac
AO
4126
4127 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4128 {
4129 case tcc_expression:
4130 switch (TREE_CODE (exp))
4131 {
4132 case COND_EXPR:
7ece48b1 4133 case DOT_PROD_EXPR:
79d652a5 4134 case SAD_EXPR:
0354c0c7
BS
4135 case WIDEN_MULT_PLUS_EXPR:
4136 case WIDEN_MULT_MINUS_EXPR:
b5b8b0ac
AO
4137 goto ternary;
4138
4139 case TRUTH_ANDIF_EXPR:
4140 case TRUTH_ORIF_EXPR:
4141 case TRUTH_AND_EXPR:
4142 case TRUTH_OR_EXPR:
4143 case TRUTH_XOR_EXPR:
4144 goto binary;
4145
4146 case TRUTH_NOT_EXPR:
4147 goto unary;
4148
4149 default:
4150 break;
4151 }
4152 break;
4153
4154 ternary:
4155 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4156 if (!op2)
4157 return NULL_RTX;
4158 /* Fall through. */
4159
4160 binary:
4161 case tcc_binary:
e3bd1763
JJ
4162 if (mode == BLKmode)
4163 return NULL_RTX;
b5b8b0ac
AO
4164 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4165 if (!op1)
4166 return NULL_RTX;
26d83bcc
JJ
4167 switch (TREE_CODE (exp))
4168 {
4169 case LSHIFT_EXPR:
4170 case RSHIFT_EXPR:
4171 case LROTATE_EXPR:
4172 case RROTATE_EXPR:
4173 case WIDEN_LSHIFT_EXPR:
4174 /* Ensure second operand isn't wider than the first one. */
4175 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
b0567726
RS
4176 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4177 && (GET_MODE_UNIT_PRECISION (mode)
4178 < GET_MODE_PRECISION (op1_mode)))
4179 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
26d83bcc
JJ
4180 break;
4181 default:
4182 break;
4183 }
b5b8b0ac
AO
4184 /* Fall through. */
4185
4186 unary:
4187 case tcc_unary:
e3bd1763
JJ
4188 if (mode == BLKmode)
4189 return NULL_RTX;
2ba172e0 4190 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4191 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4192 if (!op0)
4193 return NULL_RTX;
4194 break;
4195
871dae34
AO
4196 case tcc_comparison:
4197 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4198 goto binary;
4199
b5b8b0ac
AO
4200 case tcc_type:
4201 case tcc_statement:
4202 gcc_unreachable ();
4203
4204 case tcc_constant:
4205 case tcc_exceptional:
4206 case tcc_declaration:
4207 case tcc_reference:
4208 case tcc_vl_exp:
4209 break;
4210 }
4211
4212 switch (TREE_CODE (exp))
4213 {
4214 case STRING_CST:
4215 if (!lookup_constant_def (exp))
4216 {
e1b243a8
JJ
4217 if (strlen (TREE_STRING_POINTER (exp)) + 1
4218 != (size_t) TREE_STRING_LENGTH (exp))
4219 return NULL_RTX;
b5b8b0ac
AO
4220 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4221 op0 = gen_rtx_MEM (BLKmode, op0);
4222 set_mem_attributes (op0, exp, 0);
4223 return op0;
4224 }
191816a3 4225 /* Fall through. */
b5b8b0ac
AO
4226
4227 case INTEGER_CST:
4228 case REAL_CST:
4229 case FIXED_CST:
4230 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4231 return op0;
4232
36fd6408
RS
4233 case POLY_INT_CST:
4234 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4235
b5b8b0ac
AO
4236 case COMPLEX_CST:
4237 gcc_assert (COMPLEX_MODE_P (mode));
4238 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 4239 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
4240 return gen_rtx_CONCAT (mode, op0, op1);
4241
0ca5af51
AO
4242 case DEBUG_EXPR_DECL:
4243 op0 = DECL_RTL_IF_SET (exp);
4244
4245 if (op0)
4246 return op0;
4247
4248 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 4249 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
4250 SET_DECL_RTL (exp, op0);
4251
4252 return op0;
4253
b5b8b0ac
AO
4254 case VAR_DECL:
4255 case PARM_DECL:
4256 case FUNCTION_DECL:
4257 case LABEL_DECL:
4258 case CONST_DECL:
4259 case RESULT_DECL:
4260 op0 = DECL_RTL_IF_SET (exp);
4261
4262 /* This decl was probably optimized away. */
4263 if (!op0)
e1b243a8 4264 {
8813a647 4265 if (!VAR_P (exp)
e1b243a8
JJ
4266 || DECL_EXTERNAL (exp)
4267 || !TREE_STATIC (exp)
4268 || !DECL_NAME (exp)
0fba566c 4269 || DECL_HARD_REGISTER (exp)
7d5fc814 4270 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 4271 || mode == VOIDmode)
e1b243a8
JJ
4272 return NULL;
4273
b1aa0655 4274 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
4275 if (!MEM_P (op0)
4276 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4277 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4278 return NULL;
4279 }
4280 else
4281 op0 = copy_rtx (op0);
b5b8b0ac 4282
06796564 4283 if (GET_MODE (op0) == BLKmode
871dae34 4284 /* If op0 is not BLKmode, but mode is, adjust_mode
06796564
JJ
4285 below would ICE. While it is likely a FE bug,
4286 try to be robust here. See PR43166. */
132b4e82
JJ
4287 || mode == BLKmode
4288 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
4289 {
4290 gcc_assert (MEM_P (op0));
4291 op0 = adjust_address_nv (op0, mode, 0);
4292 return op0;
4293 }
4294
4295 /* Fall through. */
4296
4297 adjust_mode:
4298 case PAREN_EXPR:
625a9766 4299 CASE_CONVERT:
b5b8b0ac 4300 {
2ba172e0 4301 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
4302
4303 if (mode == inner_mode)
4304 return op0;
4305
4306 if (inner_mode == VOIDmode)
4307 {
2a8e30fb
MM
4308 if (TREE_CODE (exp) == SSA_NAME)
4309 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4310 else
4311 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4312 if (mode == inner_mode)
4313 return op0;
4314 }
4315
4316 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4317 {
250a60f3
RS
4318 if (GET_MODE_UNIT_BITSIZE (mode)
4319 == GET_MODE_UNIT_BITSIZE (inner_mode))
b5b8b0ac 4320 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
250a60f3
RS
4321 else if (GET_MODE_UNIT_BITSIZE (mode)
4322 < GET_MODE_UNIT_BITSIZE (inner_mode))
b5b8b0ac
AO
4323 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4324 else
4325 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4326 }
4327 else if (FLOAT_MODE_P (mode))
4328 {
2a8e30fb 4329 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
4330 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4331 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4332 else
4333 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4334 }
4335 else if (FLOAT_MODE_P (inner_mode))
4336 {
4337 if (unsignedp)
4338 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4339 else
4340 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4341 }
bb06a2d8
RS
4342 else if (GET_MODE_UNIT_PRECISION (mode)
4343 == GET_MODE_UNIT_PRECISION (inner_mode))
3403a1a9 4344 op0 = lowpart_subreg (mode, op0, inner_mode);
bb06a2d8
RS
4345 else if (GET_MODE_UNIT_PRECISION (mode)
4346 < GET_MODE_UNIT_PRECISION (inner_mode))
4347 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
cf4ef6f7 4348 else if (UNARY_CLASS_P (exp)
1b47fe3f
JJ
4349 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4350 : unsignedp)
2ba172e0 4351 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 4352 else
2ba172e0 4353 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
4354
4355 return op0;
4356 }
4357
70f34814 4358 case MEM_REF:
71f3a3f5
JJ
4359 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4360 {
4361 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4362 TREE_OPERAND (exp, 0),
4363 TREE_OPERAND (exp, 1));
4364 if (newexp)
4365 return expand_debug_expr (newexp);
4366 }
4367 /* FALLTHROUGH */
b5b8b0ac 4368 case INDIRECT_REF:
0a81f074 4369 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4370 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4371 if (!op0)
4372 return NULL;
4373
cb115041
JJ
4374 if (TREE_CODE (exp) == MEM_REF)
4375 {
583ac69c
JJ
4376 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4377 || (GET_CODE (op0) == PLUS
4378 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4379 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4380 Instead just use get_inner_reference. */
4381 goto component_ref;
4382
cb115041 4383 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
5284e559
RS
4384 poly_int64 offset;
4385 if (!op1 || !poly_int_rtx_p (op1, &offset))
cb115041
JJ
4386 return NULL;
4387
5284e559 4388 op0 = plus_constant (inner_mode, op0, offset);
cb115041
JJ
4389 }
4390
a148c4b2 4391 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
b5b8b0ac 4392
f61c6f34
JJ
4393 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4394 op0, as);
4395 if (op0 == NULL_RTX)
4396 return NULL;
b5b8b0ac 4397
f61c6f34 4398 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 4399 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
4400 if (TREE_CODE (exp) == MEM_REF
4401 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4402 set_mem_expr (op0, NULL_TREE);
09e881c9 4403 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4404
4405 return op0;
4406
4407 case TARGET_MEM_REF:
4d948885
RG
4408 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4409 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
4410 return NULL;
4411
4412 op0 = expand_debug_expr
4e25ca6b 4413 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
4414 if (!op0)
4415 return NULL;
4416
c168f180 4417 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
f61c6f34
JJ
4418 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4419 op0, as);
4420 if (op0 == NULL_RTX)
4421 return NULL;
b5b8b0ac
AO
4422
4423 op0 = gen_rtx_MEM (mode, op0);
4424
4425 set_mem_attributes (op0, exp, 0);
09e881c9 4426 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4427
4428 return op0;
4429
583ac69c 4430 component_ref:
b5b8b0ac
AO
4431 case ARRAY_REF:
4432 case ARRAY_RANGE_REF:
4433 case COMPONENT_REF:
4434 case BIT_FIELD_REF:
4435 case REALPART_EXPR:
4436 case IMAGPART_EXPR:
4437 case VIEW_CONVERT_EXPR:
4438 {
ef4bddc2 4439 machine_mode mode1;
06889da8 4440 poly_int64 bitsize, bitpos;
b5b8b0ac 4441 tree offset;
ee45a32d
EB
4442 int reversep, volatilep = 0;
4443 tree tem
4444 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
25b75a48 4445 &unsignedp, &reversep, &volatilep);
b5b8b0ac
AO
4446 rtx orig_op0;
4447
06889da8 4448 if (known_eq (bitsize, 0))
4f2a9af8
JJ
4449 return NULL;
4450
b5b8b0ac
AO
4451 orig_op0 = op0 = expand_debug_expr (tem);
4452
4453 if (!op0)
4454 return NULL;
4455
4456 if (offset)
4457 {
ef4bddc2 4458 machine_mode addrmode, offmode;
dda2da58 4459
aa847cc8
JJ
4460 if (!MEM_P (op0))
4461 return NULL;
b5b8b0ac 4462
dda2da58
AO
4463 op0 = XEXP (op0, 0);
4464 addrmode = GET_MODE (op0);
4465 if (addrmode == VOIDmode)
4466 addrmode = Pmode;
4467
b5b8b0ac
AO
4468 op1 = expand_debug_expr (offset);
4469 if (!op1)
4470 return NULL;
4471
dda2da58
AO
4472 offmode = GET_MODE (op1);
4473 if (offmode == VOIDmode)
4474 offmode = TYPE_MODE (TREE_TYPE (offset));
4475
4476 if (addrmode != offmode)
3403a1a9 4477 op1 = lowpart_subreg (addrmode, op1, offmode);
dda2da58
AO
4478
4479 /* Don't use offset_address here, we don't need a
4480 recognizable address, and we don't want to generate
4481 code. */
2ba172e0
JJ
4482 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4483 op0, op1));
b5b8b0ac
AO
4484 }
4485
4486 if (MEM_P (op0))
4487 {
4f2a9af8 4488 if (mode1 == VOIDmode)
dba9c1fd
JJ
4489 {
4490 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4491 return NULL;
4492 /* Bitfield. */
4493 mode1 = smallest_int_mode_for_size (bitsize);
4494 }
06889da8
RS
4495 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4496 if (maybe_ne (bytepos, 0))
b5b8b0ac 4497 {
06889da8
RS
4498 op0 = adjust_address_nv (op0, mode1, bytepos);
4499 bitpos = num_trailing_bits (bitpos);
b5b8b0ac 4500 }
06889da8
RS
4501 else if (known_eq (bitpos, 0)
4502 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
b5b8b0ac
AO
4503 op0 = adjust_address_nv (op0, mode, 0);
4504 else if (GET_MODE (op0) != mode1)
4505 op0 = adjust_address_nv (op0, mode1, 0);
4506 else
4507 op0 = copy_rtx (op0);
4508 if (op0 == orig_op0)
4509 op0 = shallow_copy_rtx (op0);
4510 set_mem_attributes (op0, exp, 0);
4511 }
4512
06889da8 4513 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
b5b8b0ac
AO
4514 return op0;
4515
06889da8 4516 if (maybe_lt (bitpos, 0))
2d3fc6aa
JJ
4517 return NULL;
4518
c54af068 4519 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
88c04a5d
JJ
4520 return NULL;
4521
06889da8
RS
4522 poly_int64 bytepos;
4523 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4524 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
b5b8b0ac 4525 {
ef4bddc2 4526 machine_mode opmode = GET_MODE (op0);
b5b8b0ac 4527
b5b8b0ac 4528 if (opmode == VOIDmode)
9712cba0 4529 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4530
4531 /* This condition may hold if we're expanding the address
4532 right past the end of an array that turned out not to
4533 be addressable (i.e., the address was only computed in
4534 debug stmts). The gen_subreg below would rightfully
4535 crash, and the address doesn't really exist, so just
4536 drop it. */
06889da8 4537 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
b5b8b0ac
AO
4538 return NULL;
4539
06889da8
RS
4540 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4541 return simplify_gen_subreg (mode, op0, opmode, bytepos);
b5b8b0ac
AO
4542 }
4543
4544 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4545 && TYPE_UNSIGNED (TREE_TYPE (exp))
4546 ? SIGN_EXTRACT
4547 : ZERO_EXTRACT, mode,
4548 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4549 ? GET_MODE (op0)
4550 : TYPE_MODE (TREE_TYPE (tem)),
06889da8
RS
4551 op0, gen_int_mode (bitsize, word_mode),
4552 gen_int_mode (bitpos, word_mode));
b5b8b0ac
AO
4553 }
4554
b5b8b0ac 4555 case ABS_EXPR:
e197e64e 4556 case ABSU_EXPR:
2ba172e0 4557 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4558
4559 case NEGATE_EXPR:
2ba172e0 4560 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4561
4562 case BIT_NOT_EXPR:
2ba172e0 4563 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4564
4565 case FLOAT_EXPR:
2ba172e0
JJ
4566 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4567 0)))
4568 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4569 inner_mode);
b5b8b0ac
AO
4570
4571 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4572 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4573 inner_mode);
b5b8b0ac
AO
4574
4575 case POINTER_PLUS_EXPR:
576319a7
DD
4576 /* For the rare target where pointers are not the same size as
4577 size_t, we need to check for mis-matched modes and correct
4578 the addend. */
4579 if (op0 && op1
673bf5a6
RS
4580 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4581 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4582 && op0_mode != op1_mode)
576319a7 4583 {
673bf5a6
RS
4584 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4585 /* If OP0 is a partial mode, then we must truncate, even
4586 if it has the same bitsize as OP1 as GCC's
4587 representation of partial modes is opaque. */
4588 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4589 && (GET_MODE_BITSIZE (op0_mode)
4590 == GET_MODE_BITSIZE (op1_mode))))
4591 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
576319a7
DD
4592 else
4593 /* We always sign-extend, regardless of the signedness of
4594 the operand, because the operand is always unsigned
4595 here even if the original C expression is signed. */
673bf5a6 4596 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
576319a7
DD
4597 }
4598 /* Fall through. */
b5b8b0ac 4599 case PLUS_EXPR:
2ba172e0 4600 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4601
4602 case MINUS_EXPR:
1af4ebf5 4603 case POINTER_DIFF_EXPR:
2ba172e0 4604 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4605
4606 case MULT_EXPR:
2ba172e0 4607 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4608
4609 case RDIV_EXPR:
4610 case TRUNC_DIV_EXPR:
4611 case EXACT_DIV_EXPR:
4612 if (unsignedp)
2ba172e0 4613 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4614 else
2ba172e0 4615 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4616
4617 case TRUNC_MOD_EXPR:
2ba172e0 4618 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4619
4620 case FLOOR_DIV_EXPR:
4621 if (unsignedp)
2ba172e0 4622 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4623 else
4624 {
2ba172e0
JJ
4625 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4626 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4627 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4628 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4629 }
4630
4631 case FLOOR_MOD_EXPR:
4632 if (unsignedp)
2ba172e0 4633 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4634 else
4635 {
2ba172e0 4636 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4637 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4638 adj = simplify_gen_unary (NEG, mode,
4639 simplify_gen_binary (MULT, mode, adj, op1),
4640 mode);
4641 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4642 }
4643
4644 case CEIL_DIV_EXPR:
4645 if (unsignedp)
4646 {
2ba172e0
JJ
4647 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4648 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4649 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4650 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4651 }
4652 else
4653 {
2ba172e0
JJ
4654 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4655 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4656 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4657 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4658 }
4659
4660 case CEIL_MOD_EXPR:
4661 if (unsignedp)
4662 {
2ba172e0 4663 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4664 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4665 adj = simplify_gen_unary (NEG, mode,
4666 simplify_gen_binary (MULT, mode, adj, op1),
4667 mode);
4668 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4669 }
4670 else
4671 {
2ba172e0 4672 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4673 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4674 adj = simplify_gen_unary (NEG, mode,
4675 simplify_gen_binary (MULT, mode, adj, op1),
4676 mode);
4677 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4678 }
4679
4680 case ROUND_DIV_EXPR:
4681 if (unsignedp)
4682 {
2ba172e0
JJ
4683 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4684 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4685 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4686 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4687 }
4688 else
4689 {
2ba172e0
JJ
4690 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4691 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4692 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4693 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4694 }
4695
4696 case ROUND_MOD_EXPR:
4697 if (unsignedp)
4698 {
2ba172e0 4699 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4700 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4701 adj = simplify_gen_unary (NEG, mode,
4702 simplify_gen_binary (MULT, mode, adj, op1),
4703 mode);
4704 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4705 }
4706 else
4707 {
2ba172e0 4708 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4709 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4710 adj = simplify_gen_unary (NEG, mode,
4711 simplify_gen_binary (MULT, mode, adj, op1),
4712 mode);
4713 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4714 }
4715
4716 case LSHIFT_EXPR:
2ba172e0 4717 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4718
4719 case RSHIFT_EXPR:
4720 if (unsignedp)
2ba172e0 4721 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4722 else
2ba172e0 4723 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4724
4725 case LROTATE_EXPR:
2ba172e0 4726 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4727
4728 case RROTATE_EXPR:
2ba172e0 4729 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4730
4731 case MIN_EXPR:
2ba172e0 4732 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4733
4734 case MAX_EXPR:
2ba172e0 4735 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4736
4737 case BIT_AND_EXPR:
4738 case TRUTH_AND_EXPR:
2ba172e0 4739 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4740
4741 case BIT_IOR_EXPR:
4742 case TRUTH_OR_EXPR:
2ba172e0 4743 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4744
4745 case BIT_XOR_EXPR:
4746 case TRUTH_XOR_EXPR:
2ba172e0 4747 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4748
4749 case TRUTH_ANDIF_EXPR:
4750 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4751
4752 case TRUTH_ORIF_EXPR:
4753 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4754
4755 case TRUTH_NOT_EXPR:
2ba172e0 4756 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4757
4758 case LT_EXPR:
2ba172e0
JJ
4759 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4760 op0, op1);
b5b8b0ac
AO
4761
4762 case LE_EXPR:
2ba172e0
JJ
4763 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4764 op0, op1);
b5b8b0ac
AO
4765
4766 case GT_EXPR:
2ba172e0
JJ
4767 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4768 op0, op1);
b5b8b0ac
AO
4769
4770 case GE_EXPR:
2ba172e0
JJ
4771 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4772 op0, op1);
b5b8b0ac
AO
4773
4774 case EQ_EXPR:
2ba172e0 4775 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4776
4777 case NE_EXPR:
2ba172e0 4778 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4779
4780 case UNORDERED_EXPR:
2ba172e0 4781 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4782
4783 case ORDERED_EXPR:
2ba172e0 4784 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4785
4786 case UNLT_EXPR:
2ba172e0 4787 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4788
4789 case UNLE_EXPR:
2ba172e0 4790 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4791
4792 case UNGT_EXPR:
2ba172e0 4793 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4794
4795 case UNGE_EXPR:
2ba172e0 4796 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4797
4798 case UNEQ_EXPR:
2ba172e0 4799 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4800
4801 case LTGT_EXPR:
2ba172e0 4802 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4803
4804 case COND_EXPR:
4805 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4806
4807 case COMPLEX_EXPR:
4808 gcc_assert (COMPLEX_MODE_P (mode));
4809 if (GET_MODE (op0) == VOIDmode)
4810 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4811 if (GET_MODE (op1) == VOIDmode)
4812 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4813 return gen_rtx_CONCAT (mode, op0, op1);
4814
d02a5a4b
JJ
4815 case CONJ_EXPR:
4816 if (GET_CODE (op0) == CONCAT)
4817 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4818 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4819 XEXP (op0, 1),
4820 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4821 else
4822 {
d21cefc2 4823 scalar_mode imode = GET_MODE_INNER (mode);
d02a5a4b
JJ
4824 rtx re, im;
4825
4826 if (MEM_P (op0))
4827 {
4828 re = adjust_address_nv (op0, imode, 0);
4829 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4830 }
4831 else
4832 {
304b9962
RS
4833 scalar_int_mode ifmode;
4834 scalar_int_mode ihmode;
d02a5a4b 4835 rtx halfsize;
304b9962
RS
4836 if (!int_mode_for_mode (mode).exists (&ifmode)
4837 || !int_mode_for_mode (imode).exists (&ihmode))
d02a5a4b
JJ
4838 return NULL;
4839 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4840 re = op0;
4841 if (mode != ifmode)
4842 re = gen_rtx_SUBREG (ifmode, re, 0);
4843 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4844 if (imode != ihmode)
4845 re = gen_rtx_SUBREG (imode, re, 0);
4846 im = copy_rtx (op0);
4847 if (mode != ifmode)
4848 im = gen_rtx_SUBREG (ifmode, im, 0);
4849 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4850 if (imode != ihmode)
4851 im = gen_rtx_SUBREG (imode, im, 0);
4852 }
4853 im = gen_rtx_NEG (imode, im);
4854 return gen_rtx_CONCAT (mode, re, im);
4855 }
4856
b5b8b0ac
AO
4857 case ADDR_EXPR:
4858 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4859 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4860 {
4861 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4862 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4863 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4864 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4865 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4866 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4867
4868 if (handled_component_p (TREE_OPERAND (exp, 0)))
4869 {
588db50c 4870 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
ee45a32d 4871 bool reverse;
c8a27c40 4872 tree decl
ee45a32d
EB
4873 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4874 &bitsize, &maxsize, &reverse);
8813a647 4875 if ((VAR_P (decl)
c8a27c40
JJ
4876 || TREE_CODE (decl) == PARM_DECL
4877 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4878 && (!TREE_ADDRESSABLE (decl)
4879 || target_for_debug_bind (decl))
588db50c
RS
4880 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4881 && known_gt (bitsize, 0)
4882 && known_eq (bitsize, maxsize))
0a81f074
RS
4883 {
4884 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
588db50c 4885 return plus_constant (mode, base, byteoffset);
0a81f074 4886 }
c8a27c40
JJ
4887 }
4888
9430b7ba
JJ
4889 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4890 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4891 == ADDR_EXPR)
4892 {
4893 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4894 0));
4895 if (op0 != NULL
4896 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4897 || (GET_CODE (op0) == PLUS
4898 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4899 && CONST_INT_P (XEXP (op0, 1)))))
4900 {
4901 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4902 1));
5284e559
RS
4903 poly_int64 offset;
4904 if (!op1 || !poly_int_rtx_p (op1, &offset))
9430b7ba
JJ
4905 return NULL;
4906
5284e559 4907 return plus_constant (mode, op0, offset);
9430b7ba
JJ
4908 }
4909 }
4910
c8a27c40
JJ
4911 return NULL;
4912 }
b5b8b0ac 4913
a148c4b2 4914 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7a504f33
RS
4915 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4916 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
dda2da58
AO
4917
4918 return op0;
b5b8b0ac
AO
4919
4920 case VECTOR_CST:
d2a12ae7 4921 {
928686b1
RS
4922 unsigned HOST_WIDE_INT i, nelts;
4923
4924 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4925 return NULL;
d2a12ae7 4926
9e822269 4927 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
d2a12ae7 4928
9e822269 4929 for (i = 0; i < nelts; ++i)
d2a12ae7
RG
4930 {
4931 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4932 if (!op1)
4933 return NULL;
4934 XVECEXP (op0, 0, i) = op1;
4935 }
4936
4937 return op0;
4938 }
b5b8b0ac
AO
4939
4940 case CONSTRUCTOR:
47598145
MM
4941 if (TREE_CLOBBER_P (exp))
4942 return NULL;
4943 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4944 {
4945 unsigned i;
928686b1 4946 unsigned HOST_WIDE_INT nelts;
b5b8b0ac
AO
4947 tree val;
4948
928686b1
RS
4949 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
4950 goto flag_unsupported;
4951
4952 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
b5b8b0ac
AO
4953
4954 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4955 {
4956 op1 = expand_debug_expr (val);
4957 if (!op1)
4958 return NULL;
4959 XVECEXP (op0, 0, i) = op1;
4960 }
4961
928686b1 4962 if (i < nelts)
b5b8b0ac
AO
4963 {
4964 op1 = expand_debug_expr
e8160c9a 4965 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4966
4967 if (!op1)
4968 return NULL;
4969
928686b1 4970 for (; i < nelts; i++)
b5b8b0ac
AO
4971 XVECEXP (op0, 0, i) = op1;
4972 }
4973
4974 return op0;
4975 }
4976 else
4977 goto flag_unsupported;
4978
4979 case CALL_EXPR:
4980 /* ??? Maybe handle some builtins? */
4981 return NULL;
4982
4983 case SSA_NAME:
4984 {
355fe088 4985 gimple *g = get_gimple_for_ssa_name (exp);
2a8e30fb
MM
4986 if (g)
4987 {
dfde35b3
JJ
4988 tree t = NULL_TREE;
4989 if (deep_ter_debug_map)
4990 {
4991 tree *slot = deep_ter_debug_map->get (exp);
4992 if (slot)
4993 t = *slot;
4994 }
4995 if (t == NULL_TREE)
4996 t = gimple_assign_rhs_to_tree (g);
4997 op0 = expand_debug_expr (t);
2a8e30fb
MM
4998 if (!op0)
4999 return NULL;
5000 }
5001 else
5002 {
f11a7b6d
AO
5003 /* If this is a reference to an incoming value of
5004 parameter that is never used in the code or where the
5005 incoming value is never used in the code, use
5006 PARM_DECL's DECL_RTL if set. */
5007 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5008 && SSA_NAME_VAR (exp)
5009 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5010 && has_zero_uses (exp))
5011 {
5012 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5013 if (op0)
5014 goto adjust_mode;
5015 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5016 if (op0)
5017 goto adjust_mode;
5018 }
5019
2a8e30fb 5020 int part = var_to_partition (SA.map, exp);
b5b8b0ac 5021
2a8e30fb 5022 if (part == NO_PARTITION)
f11a7b6d 5023 return NULL;
b5b8b0ac 5024
2a8e30fb 5025 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 5026
abfea58d 5027 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 5028 }
b5b8b0ac
AO
5029 goto adjust_mode;
5030 }
5031
5032 case ERROR_MARK:
5033 return NULL;
5034
7ece48b1
JJ
5035 /* Vector stuff. For most of the codes we don't have rtl codes. */
5036 case REALIGN_LOAD_EXPR:
7ece48b1 5037 case VEC_COND_EXPR:
7ece48b1 5038 case VEC_PACK_FIX_TRUNC_EXPR:
1bda738b 5039 case VEC_PACK_FLOAT_EXPR:
7ece48b1
JJ
5040 case VEC_PACK_SAT_EXPR:
5041 case VEC_PACK_TRUNC_EXPR:
1bda738b
JJ
5042 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5043 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
7ece48b1
JJ
5044 case VEC_UNPACK_FLOAT_HI_EXPR:
5045 case VEC_UNPACK_FLOAT_LO_EXPR:
5046 case VEC_UNPACK_HI_EXPR:
5047 case VEC_UNPACK_LO_EXPR:
5048 case VEC_WIDEN_MULT_HI_EXPR:
5049 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
5050 case VEC_WIDEN_MULT_EVEN_EXPR:
5051 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
5052 case VEC_WIDEN_LSHIFT_HI_EXPR:
5053 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 5054 case VEC_PERM_EXPR:
be4c1d4a 5055 case VEC_DUPLICATE_EXPR:
9adab579 5056 case VEC_SERIES_EXPR:
7ece48b1
JJ
5057 return NULL;
5058
98449720 5059 /* Misc codes. */
7ece48b1
JJ
5060 case ADDR_SPACE_CONVERT_EXPR:
5061 case FIXED_CONVERT_EXPR:
5062 case OBJ_TYPE_REF:
5063 case WITH_SIZE_EXPR:
483c6429 5064 case BIT_INSERT_EXPR:
7ece48b1
JJ
5065 return NULL;
5066
5067 case DOT_PROD_EXPR:
5068 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5069 && SCALAR_INT_MODE_P (mode))
5070 {
2ba172e0
JJ
5071 op0
5072 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5073 0)))
5074 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5075 inner_mode);
5076 op1
5077 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5078 1)))
5079 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5080 inner_mode);
5081 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5082 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
5083 }
5084 return NULL;
5085
5086 case WIDEN_MULT_EXPR:
0354c0c7
BS
5087 case WIDEN_MULT_PLUS_EXPR:
5088 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
5089 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5090 && SCALAR_INT_MODE_P (mode))
5091 {
2ba172e0 5092 inner_mode = GET_MODE (op0);
7ece48b1 5093 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 5094 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 5095 else
5b58b39b 5096 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 5097 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 5098 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 5099 else
5b58b39b 5100 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 5101 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
5102 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5103 return op0;
5104 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 5105 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 5106 else
2ba172e0 5107 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
5108 }
5109 return NULL;
5110
98449720
RH
5111 case MULT_HIGHPART_EXPR:
5112 /* ??? Similar to the above. */
5113 return NULL;
5114
7ece48b1 5115 case WIDEN_SUM_EXPR:
3f3af9df 5116 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
5117 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5118 && SCALAR_INT_MODE_P (mode))
5119 {
2ba172e0
JJ
5120 op0
5121 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5122 0)))
5123 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5124 inner_mode);
3f3af9df
JJ
5125 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5126 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
5127 }
5128 return NULL;
5129
b5b8b0ac
AO
5130 default:
5131 flag_unsupported:
b2b29377
MM
5132 if (flag_checking)
5133 {
5134 debug_tree (exp);
5135 gcc_unreachable ();
5136 }
b5b8b0ac 5137 return NULL;
b5b8b0ac
AO
5138 }
5139}
5140
ddb555ed
JJ
5141/* Return an RTX equivalent to the source bind value of the tree expression
5142 EXP. */
5143
5144static rtx
5145expand_debug_source_expr (tree exp)
5146{
5147 rtx op0 = NULL_RTX;
ef4bddc2 5148 machine_mode mode = VOIDmode, inner_mode;
ddb555ed
JJ
5149
5150 switch (TREE_CODE (exp))
5151 {
74725f46
TV
5152 case VAR_DECL:
5153 if (DECL_ABSTRACT_ORIGIN (exp))
5154 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5155 break;
ddb555ed
JJ
5156 case PARM_DECL:
5157 {
ddb555ed 5158 mode = DECL_MODE (exp);
12c5ffe5
EB
5159 op0 = expand_debug_parm_decl (exp);
5160 if (op0)
5161 break;
ddb555ed
JJ
5162 /* See if this isn't an argument that has been completely
5163 optimized out. */
5164 if (!DECL_RTL_SET_P (exp)
12c5ffe5 5165 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
5166 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5167 {
7b575cfa 5168 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
5169 if (DECL_CONTEXT (aexp)
5170 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5171 {
9771b263 5172 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
5173 unsigned int ix;
5174 tree ddecl;
ddb555ed
JJ
5175 debug_args = decl_debug_args_lookup (current_function_decl);
5176 if (debug_args != NULL)
5177 {
9771b263 5178 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
5179 ix += 2)
5180 if (ddecl == aexp)
5181 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5182 }
5183 }
5184 }
5185 break;
5186 }
5187 default:
5188 break;
5189 }
5190
5191 if (op0 == NULL_RTX)
5192 return NULL_RTX;
5193
5194 inner_mode = GET_MODE (op0);
5195 if (mode == inner_mode)
5196 return op0;
5197
5198 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5199 {
250a60f3
RS
5200 if (GET_MODE_UNIT_BITSIZE (mode)
5201 == GET_MODE_UNIT_BITSIZE (inner_mode))
ddb555ed 5202 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
250a60f3
RS
5203 else if (GET_MODE_UNIT_BITSIZE (mode)
5204 < GET_MODE_UNIT_BITSIZE (inner_mode))
ddb555ed
JJ
5205 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5206 else
5207 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5208 }
5209 else if (FLOAT_MODE_P (mode))
5210 gcc_unreachable ();
5211 else if (FLOAT_MODE_P (inner_mode))
5212 {
5213 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5214 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5215 else
5216 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5217 }
bb06a2d8
RS
5218 else if (GET_MODE_UNIT_PRECISION (mode)
5219 == GET_MODE_UNIT_PRECISION (inner_mode))
3403a1a9 5220 op0 = lowpart_subreg (mode, op0, inner_mode);
bb06a2d8
RS
5221 else if (GET_MODE_UNIT_PRECISION (mode)
5222 < GET_MODE_UNIT_PRECISION (inner_mode))
5223 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
ddb555ed
JJ
5224 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5225 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5226 else
5227 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5228
5229 return op0;
5230}
5231
6cfa417f
JJ
5232/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5233 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5234 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5235
5236static void
b47aae36 5237avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
6cfa417f
JJ
5238{
5239 rtx exp = *exp_p;
5240
5241 if (exp == NULL_RTX)
5242 return;
5243
5244 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5245 return;
5246
5247 if (depth == 4)
5248 {
5249 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5250 rtx dval = make_debug_expr_from_rtl (exp);
5251
5252 /* Emit a debug bind insn before INSN. */
5253 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5254 DEBUG_EXPR_TREE_DECL (dval), exp,
5255 VAR_INIT_STATUS_INITIALIZED);
5256
5257 emit_debug_insn_before (bind, insn);
5258 *exp_p = dval;
5259 return;
5260 }
5261
5262 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5263 int i, j;
5264 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5265 switch (*format_ptr++)
5266 {
5267 case 'e':
5268 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5269 break;
5270
5271 case 'E':
5272 case 'V':
5273 for (j = 0; j < XVECLEN (exp, i); j++)
5274 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5275 break;
5276
5277 default:
5278 break;
5279 }
5280}
5281
b5b8b0ac
AO
5282/* Expand the _LOCs in debug insns. We run this after expanding all
5283 regular insns, so that any variables referenced in the function
5284 will have their DECL_RTLs set. */
5285
5286static void
5287expand_debug_locations (void)
5288{
b47aae36
DM
5289 rtx_insn *insn;
5290 rtx_insn *last = get_last_insn ();
b5b8b0ac
AO
5291 int save_strict_alias = flag_strict_aliasing;
5292
5293 /* New alias sets while setting up memory attributes cause
5294 -fcompare-debug failures, even though it doesn't bring about any
5295 codegen changes. */
5296 flag_strict_aliasing = 0;
5297
5298 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
36f52e8f 5299 if (DEBUG_BIND_INSN_P (insn))
b5b8b0ac
AO
5300 {
5301 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
b47aae36
DM
5302 rtx val;
5303 rtx_insn *prev_insn, *insn2;
ef4bddc2 5304 machine_mode mode;
b5b8b0ac
AO
5305
5306 if (value == NULL_TREE)
5307 val = NULL_RTX;
5308 else
5309 {
ddb555ed
JJ
5310 if (INSN_VAR_LOCATION_STATUS (insn)
5311 == VAR_INIT_STATUS_UNINITIALIZED)
5312 val = expand_debug_source_expr (value);
dfde35b3
JJ
5313 /* The avoid_deep_ter_for_debug function inserts
5314 debug bind stmts after SSA_NAME definition, with the
5315 SSA_NAME as the whole bind location. Disable temporarily
5316 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5317 being defined in this DEBUG_INSN. */
5318 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5319 {
5320 tree *slot = deep_ter_debug_map->get (value);
5321 if (slot)
5322 {
5323 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5324 *slot = NULL_TREE;
5325 else
5326 slot = NULL;
5327 }
5328 val = expand_debug_expr (value);
5329 if (slot)
5330 *slot = INSN_VAR_LOCATION_DECL (insn);
5331 }
ddb555ed
JJ
5332 else
5333 val = expand_debug_expr (value);
b5b8b0ac
AO
5334 gcc_assert (last == get_last_insn ());
5335 }
5336
5337 if (!val)
5338 val = gen_rtx_UNKNOWN_VAR_LOC ();
5339 else
5340 {
5341 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5342
5343 gcc_assert (mode == GET_MODE (val)
5344 || (GET_MODE (val) == VOIDmode
33ffb5c5 5345 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 5346 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
5347 || GET_CODE (val) == LABEL_REF)));
5348 }
5349
5350 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
5351 prev_insn = PREV_INSN (insn);
5352 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5353 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
5354 }
5355
5356 flag_strict_aliasing = save_strict_alias;
5357}
5358
d2626c0b
YR
5359/* Performs swapping operands of commutative operations to expand
5360 the expensive one first. */
5361
5362static void
5363reorder_operands (basic_block bb)
5364{
5365 unsigned int *lattice; /* Hold cost of each statement. */
5366 unsigned int i = 0, n = 0;
5367 gimple_stmt_iterator gsi;
5368 gimple_seq stmts;
355fe088 5369 gimple *stmt;
d2626c0b
YR
5370 bool swap;
5371 tree op0, op1;
5372 ssa_op_iter iter;
5373 use_operand_p use_p;
355fe088 5374 gimple *def0, *def1;
d2626c0b
YR
5375
5376 /* Compute cost of each statement using estimate_num_insns. */
5377 stmts = bb_seq (bb);
5378 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5379 {
5380 stmt = gsi_stmt (gsi);
090238ee
YR
5381 if (!is_gimple_debug (stmt))
5382 gimple_set_uid (stmt, n++);
d2626c0b
YR
5383 }
5384 lattice = XNEWVEC (unsigned int, n);
5385 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5386 {
5387 unsigned cost;
5388 stmt = gsi_stmt (gsi);
090238ee
YR
5389 if (is_gimple_debug (stmt))
5390 continue;
d2626c0b
YR
5391 cost = estimate_num_insns (stmt, &eni_size_weights);
5392 lattice[i] = cost;
5393 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5394 {
5395 tree use = USE_FROM_PTR (use_p);
355fe088 5396 gimple *def_stmt;
d2626c0b
YR
5397 if (TREE_CODE (use) != SSA_NAME)
5398 continue;
5399 def_stmt = get_gimple_for_ssa_name (use);
5400 if (!def_stmt)
5401 continue;
5402 lattice[i] += lattice[gimple_uid (def_stmt)];
5403 }
5404 i++;
5405 if (!is_gimple_assign (stmt)
5406 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5407 continue;
5408 op0 = gimple_op (stmt, 1);
5409 op1 = gimple_op (stmt, 2);
5410 if (TREE_CODE (op0) != SSA_NAME
5411 || TREE_CODE (op1) != SSA_NAME)
5412 continue;
5413 /* Swap operands if the second one is more expensive. */
5414 def0 = get_gimple_for_ssa_name (op0);
d2626c0b
YR
5415 def1 = get_gimple_for_ssa_name (op1);
5416 if (!def1)
5417 continue;
5418 swap = false;
68ca4ac9 5419 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
d2626c0b
YR
5420 swap = true;
5421 if (swap)
5422 {
5423 if (dump_file && (dump_flags & TDF_DETAILS))
5424 {
5425 fprintf (dump_file, "Swap operands in stmt:\n");
5426 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5427 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
68ca4ac9 5428 def0 ? lattice[gimple_uid (def0)] : 0,
d2626c0b
YR
5429 lattice[gimple_uid (def1)]);
5430 }
5431 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5432 gimple_assign_rhs2_ptr (stmt));
5433 }
5434 }
5435 XDELETE (lattice);
5436}
5437
242229bb
JH
5438/* Expand basic block BB from GIMPLE trees to RTL. */
5439
5440static basic_block
f3ddd692 5441expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 5442{
726a989a
RB
5443 gimple_stmt_iterator gsi;
5444 gimple_seq stmts;
355fe088 5445 gimple *stmt = NULL;
65f4b875 5446 rtx_note *note = NULL;
b47aae36 5447 rtx_insn *last;
242229bb 5448 edge e;
628f6a4e 5449 edge_iterator ei;
242229bb
JH
5450
5451 if (dump_file)
726a989a
RB
5452 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5453 bb->index);
5454
5455 /* Note that since we are now transitioning from GIMPLE to RTL, we
5456 cannot use the gsi_*_bb() routines because they expect the basic
5457 block to be in GIMPLE, instead of RTL. Therefore, we need to
5458 access the BB sequence directly. */
d2626c0b
YR
5459 if (optimize)
5460 reorder_operands (bb);
726a989a 5461 stmts = bb_seq (bb);
3e8b732e
MM
5462 bb->il.gimple.seq = NULL;
5463 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 5464 rtl_profile_for_bb (bb);
5e2d947c
JH
5465 init_rtl_bb_info (bb);
5466 bb->flags |= BB_RTL;
5467
a9b77cd1
ZD
5468 /* Remove the RETURN_EXPR if we may fall though to the exit
5469 instead. */
726a989a
RB
5470 gsi = gsi_last (stmts);
5471 if (!gsi_end_p (gsi)
5472 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 5473 {
538dd0b7 5474 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
a9b77cd1
ZD
5475
5476 gcc_assert (single_succ_p (bb));
fefa31b5 5477 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
a9b77cd1 5478
fefa31b5 5479 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
726a989a 5480 && !gimple_return_retval (ret_stmt))
a9b77cd1 5481 {
726a989a 5482 gsi_remove (&gsi, false);
a9b77cd1
ZD
5483 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5484 }
5485 }
5486
67a8d719 5487 gsi = gsi_start (stmts);
726a989a 5488 if (!gsi_end_p (gsi))
8b11009b 5489 {
726a989a
RB
5490 stmt = gsi_stmt (gsi);
5491 if (gimple_code (stmt) != GIMPLE_LABEL)
5492 stmt = NULL;
8b11009b 5493 }
242229bb 5494
134aa83c 5495 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 5496
afa7c903 5497 if (stmt || elt)
242229bb 5498 {
65f4b875 5499 gcc_checking_assert (!note);
242229bb
JH
5500 last = get_last_insn ();
5501
8b11009b
ZD
5502 if (stmt)
5503 {
28ed065e 5504 expand_gimple_stmt (stmt);
67a8d719 5505 gsi_next (&gsi);
8b11009b
ZD
5506 }
5507
5508 if (elt)
39c8aaa4 5509 emit_label (*elt);
242229bb 5510
1130d5e3 5511 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 5512 if (NOTE_P (BB_HEAD (bb)))
1130d5e3 5513 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
65f4b875 5514 gcc_assert (LABEL_P (BB_HEAD (bb)));
242229bb 5515 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 5516
726a989a 5517 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
5518 }
5519 else
1130d5e3 5520 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
242229bb 5521
65f4b875
AO
5522 if (note)
5523 NOTE_BASIC_BLOCK (note) = bb;
242229bb 5524
726a989a 5525 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 5526 {
cea49550 5527 basic_block new_bb;
242229bb 5528
b5b8b0ac 5529 stmt = gsi_stmt (gsi);
2a8e30fb
MM
5530
5531 /* If this statement is a non-debug one, and we generate debug
5532 insns, then this one might be the last real use of a TERed
5533 SSA_NAME, but where there are still some debug uses further
5534 down. Expanding the current SSA name in such further debug
5535 uses by their RHS might lead to wrong debug info, as coalescing
5536 might make the operands of such RHS be placed into the same
5537 pseudo as something else. Like so:
5538 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5539 use(a_1);
5540 a_2 = ...
5541 #DEBUG ... => a_1
5542 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5543 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5544 the write to a_2 would actually have clobbered the place which
5545 formerly held a_0.
5546
5547 So, instead of that, we recognize the situation, and generate
5548 debug temporaries at the last real use of TERed SSA names:
5549 a_1 = a_0 + 1;
5550 #DEBUG #D1 => a_1
5551 use(a_1);
5552 a_2 = ...
5553 #DEBUG ... => #D1
5554 */
36f52e8f 5555 if (MAY_HAVE_DEBUG_BIND_INSNS
2a8e30fb
MM
5556 && SA.values
5557 && !is_gimple_debug (stmt))
5558 {
5559 ssa_op_iter iter;
5560 tree op;
355fe088 5561 gimple *def;
2a8e30fb 5562
5368224f 5563 location_t sloc = curr_insn_location ();
2a8e30fb
MM
5564
5565 /* Look for SSA names that have their last use here (TERed
5566 names always have only one real use). */
5567 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5568 if ((def = get_gimple_for_ssa_name (op)))
5569 {
5570 imm_use_iterator imm_iter;
5571 use_operand_p use_p;
5572 bool have_debug_uses = false;
5573
5574 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5575 {
5576 if (gimple_debug_bind_p (USE_STMT (use_p)))
5577 {
5578 have_debug_uses = true;
5579 break;
5580 }
5581 }
5582
5583 if (have_debug_uses)
5584 {
871dae34 5585 /* OP is a TERed SSA name, with DEF its defining
2a8e30fb
MM
5586 statement, and where OP is used in further debug
5587 instructions. Generate a debug temporary, and
5588 replace all uses of OP in debug insns with that
5589 temporary. */
355fe088 5590 gimple *debugstmt;
2a8e30fb
MM
5591 tree value = gimple_assign_rhs_to_tree (def);
5592 tree vexpr = make_node (DEBUG_EXPR_DECL);
5593 rtx val;
ef4bddc2 5594 machine_mode mode;
2a8e30fb 5595
5368224f 5596 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
5597
5598 DECL_ARTIFICIAL (vexpr) = 1;
5599 TREE_TYPE (vexpr) = TREE_TYPE (value);
5600 if (DECL_P (value))
5601 mode = DECL_MODE (value);
5602 else
5603 mode = TYPE_MODE (TREE_TYPE (value));
899ca90e 5604 SET_DECL_MODE (vexpr, mode);
2a8e30fb
MM
5605
5606 val = gen_rtx_VAR_LOCATION
5607 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5608
e8c6bb74 5609 emit_debug_insn (val);
2a8e30fb
MM
5610
5611 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5612 {
5613 if (!gimple_debug_bind_p (debugstmt))
5614 continue;
5615
5616 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5617 SET_USE (use_p, vexpr);
5618
5619 update_stmt (debugstmt);
5620 }
5621 }
5622 }
5368224f 5623 set_curr_insn_location (sloc);
2a8e30fb
MM
5624 }
5625
a5883ba0 5626 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 5627
242229bb
JH
5628 /* Expand this statement, then evaluate the resulting RTL and
5629 fixup the CFG accordingly. */
726a989a 5630 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 5631 {
538dd0b7 5632 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
cea49550
RH
5633 if (new_bb)
5634 return new_bb;
5635 }
96a95ac1 5636 else if (is_gimple_debug (stmt))
b5b8b0ac 5637 {
5368224f 5638 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
5639 gimple_stmt_iterator nsi = gsi;
5640
5641 for (;;)
5642 {
96a95ac1
AO
5643 tree var;
5644 tree value = NULL_TREE;
5645 rtx val = NULL_RTX;
ef4bddc2 5646 machine_mode mode;
b5b8b0ac 5647
96a95ac1
AO
5648 if (!gimple_debug_nonbind_marker_p (stmt))
5649 {
5650 if (gimple_debug_bind_p (stmt))
5651 {
5652 var = gimple_debug_bind_get_var (stmt);
ec8c1492 5653
96a95ac1
AO
5654 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5655 && TREE_CODE (var) != LABEL_DECL
5656 && !target_for_debug_bind (var))
5657 goto delink_debug_stmt;
b5b8b0ac 5658
96a95ac1
AO
5659 if (DECL_P (var))
5660 mode = DECL_MODE (var);
5661 else
5662 mode = TYPE_MODE (TREE_TYPE (var));
b5b8b0ac 5663
96a95ac1
AO
5664 if (gimple_debug_bind_has_value_p (stmt))
5665 value = gimple_debug_bind_get_value (stmt);
5666
5667 val = gen_rtx_VAR_LOCATION
5668 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5669 }
5670 else if (gimple_debug_source_bind_p (stmt))
5671 {
5672 var = gimple_debug_source_bind_get_var (stmt);
5673
5674 value = gimple_debug_source_bind_get_value (stmt);
5675
5676 mode = DECL_MODE (var);
b5b8b0ac 5677
96a95ac1
AO
5678 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5679 VAR_INIT_STATUS_UNINITIALIZED);
5680 }
5681 else
5682 gcc_unreachable ();
5683 }
5684 /* If this function was first compiled with markers
5685 enabled, but they're now disable (e.g. LTO), drop
5686 them on the floor. */
5687 else if (gimple_debug_nonbind_marker_p (stmt)
5688 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5689 goto delink_debug_stmt;
5690 else if (gimple_debug_begin_stmt_p (stmt))
5691 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
58006663
AO
5692 else if (gimple_debug_inline_entry_p (stmt))
5693 {
5694 tree block = gimple_block (stmt);
5695
5696 if (block)
5697 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5698 else
5699 goto delink_debug_stmt;
5700 }
b5b8b0ac 5701 else
96a95ac1 5702 gcc_unreachable ();
b5b8b0ac 5703
96a95ac1
AO
5704 last = get_last_insn ();
5705
5706 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac 5707
e16b6fd0 5708 emit_debug_insn (val);
b5b8b0ac
AO
5709
5710 if (dump_file && (dump_flags & TDF_DETAILS))
5711 {
5712 /* We can't dump the insn with a TREE where an RTX
5713 is expected. */
96a95ac1
AO
5714 if (GET_CODE (val) == VAR_LOCATION)
5715 {
5716 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5717 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5718 }
b5b8b0ac 5719 maybe_dump_rtl_for_gimple_stmt (stmt, last);
96a95ac1
AO
5720 if (GET_CODE (val) == VAR_LOCATION)
5721 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5722 }
5723
ec8c1492 5724 delink_debug_stmt:
2a8e30fb
MM
5725 /* In order not to generate too many debug temporaries,
5726 we delink all uses of debug statements we already expanded.
5727 Therefore debug statements between definition and real
5728 use of TERed SSA names will continue to use the SSA name,
5729 and not be replaced with debug temps. */
5730 delink_stmt_imm_use (stmt);
5731
b5b8b0ac
AO
5732 gsi = nsi;
5733 gsi_next (&nsi);
5734 if (gsi_end_p (nsi))
5735 break;
5736 stmt = gsi_stmt (nsi);
96a95ac1 5737 if (!is_gimple_debug (stmt))
b5b8b0ac
AO
5738 break;
5739 }
5740
5368224f 5741 set_curr_insn_location (sloc);
b5b8b0ac 5742 }
80c7a9eb 5743 else
242229bb 5744 {
538dd0b7
DM
5745 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5746 if (call_stmt
5747 && gimple_call_tail_p (call_stmt)
f3ddd692 5748 && disable_tail_calls)
538dd0b7 5749 gimple_call_set_tail (call_stmt, false);
f3ddd692 5750
538dd0b7 5751 if (call_stmt && gimple_call_tail_p (call_stmt))
cea49550
RH
5752 {
5753 bool can_fallthru;
538dd0b7 5754 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
cea49550
RH
5755 if (new_bb)
5756 {
5757 if (can_fallthru)
5758 bb = new_bb;
5759 else
5760 return new_bb;
5761 }
5762 }
4d7a65ea 5763 else
b7211528 5764 {
4e3825db 5765 def_operand_p def_p;
4e3825db
MM
5766 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5767
5768 if (def_p != NULL)
5769 {
5770 /* Ignore this stmt if it is in the list of
5771 replaceable expressions. */
5772 if (SA.values
b8698a0f 5773 && bitmap_bit_p (SA.values,
e97809c6 5774 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5775 continue;
5776 }
28ed065e 5777 last = expand_gimple_stmt (stmt);
726a989a 5778 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5779 }
242229bb
JH
5780 }
5781 }
5782
a5883ba0
MM
5783 currently_expanding_gimple_stmt = NULL;
5784
7241571e 5785 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5786 FOR_EACH_EDGE (e, ei, bb->succs)
5787 {
2f13f2de 5788 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5789 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5790 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5791 {
5792 emit_jump (label_rtx_for_bb (e->dest));
5793 e->flags &= ~EDGE_FALLTHRU;
5794 }
a9b77cd1
ZD
5795 }
5796
ae761c45
AH
5797 /* Expanded RTL can create a jump in the last instruction of block.
5798 This later might be assumed to be a jump to successor and break edge insertion.
5799 We need to insert dummy move to prevent this. PR41440. */
5800 if (single_succ_p (bb)
5801 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5802 && (last = get_last_insn ())
4dbebf6f
AO
5803 && (JUMP_P (last)
5804 || (DEBUG_INSN_P (last)
5805 && JUMP_P (prev_nondebug_insn (last)))))
ae761c45
AH
5806 {
5807 rtx dummy = gen_reg_rtx (SImode);
5808 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5809 }
5810
242229bb
JH
5811 do_pending_stack_adjust ();
5812
3f117656 5813 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5814 before a barrier and/or table jump insn. */
5815 last = get_last_insn ();
4b4bf941 5816 if (BARRIER_P (last))
242229bb
JH
5817 last = PREV_INSN (last);
5818 if (JUMP_TABLE_DATA_P (last))
5819 last = PREV_INSN (PREV_INSN (last));
1130d5e3 5820 BB_END (bb) = last;
caf93cb0 5821
242229bb 5822 update_bb_for_insn (bb);
80c7a9eb 5823
242229bb
JH
5824 return bb;
5825}
5826
5827
5828/* Create a basic block for initialization code. */
5829
5830static basic_block
5831construct_init_block (void)
5832{
5833 basic_block init_block, first_block;
fd44f634
JH
5834 edge e = NULL;
5835 int flags;
275a4187 5836
fd44f634 5837 /* Multiple entry points not supported yet. */
fefa31b5
DM
5838 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5839 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5840 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5841 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5842 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
242229bb 5843
fefa31b5 5844 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
275a4187 5845
fd44f634
JH
5846 /* When entry edge points to first basic block, we don't need jump,
5847 otherwise we have to jump into proper target. */
fefa31b5 5848 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
fd44f634 5849 {
726a989a 5850 tree label = gimple_block_label (e->dest);
fd44f634 5851
1476d1bd 5852 emit_jump (jump_target_rtx (label));
fd44f634 5853 flags = 0;
275a4187 5854 }
fd44f634
JH
5855 else
5856 flags = EDGE_FALLTHRU;
242229bb
JH
5857
5858 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5859 get_last_insn (),
fefa31b5 5860 ENTRY_BLOCK_PTR_FOR_FN (cfun));
fefa31b5 5861 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5862 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
242229bb
JH
5863 if (e)
5864 {
5865 first_block = e->dest;
5866 redirect_edge_succ (e, init_block);
357067f2 5867 e = make_single_succ_edge (init_block, first_block, flags);
242229bb
JH
5868 }
5869 else
357067f2
JH
5870 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5871 EDGE_FALLTHRU);
242229bb
JH
5872
5873 update_bb_for_insn (init_block);
5874 return init_block;
5875}
5876
55e092c4
JH
5877/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5878 found in the block tree. */
5879
5880static void
5881set_block_levels (tree block, int level)
5882{
5883 while (block)
5884 {
5885 BLOCK_NUMBER (block) = level;
5886 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5887 block = BLOCK_CHAIN (block);
5888 }
5889}
242229bb
JH
5890
5891/* Create a block containing landing pads and similar stuff. */
5892
5893static void
5894construct_exit_block (void)
5895{
b47aae36
DM
5896 rtx_insn *head = get_last_insn ();
5897 rtx_insn *end;
242229bb 5898 basic_block exit_block;
628f6a4e
BE
5899 edge e, e2;
5900 unsigned ix;
5901 edge_iterator ei;
79c7fda6 5902 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
b47aae36 5903 rtx_insn *orig_end = BB_END (prev_bb);
242229bb 5904
fefa31b5 5905 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
bf08ebeb 5906
caf93cb0 5907 /* Make sure the locus is set to the end of the function, so that
242229bb 5908 epilogue line numbers and warnings are set properly. */
2f13f2de 5909 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5910 input_location = cfun->function_end_locus;
5911
242229bb
JH
5912 /* Generate rtl for function exit. */
5913 expand_function_end ();
5914
5915 end = get_last_insn ();
5916 if (head == end)
5917 return;
79c7fda6
JJ
5918 /* While emitting the function end we could move end of the last basic
5919 block. */
1130d5e3 5920 BB_END (prev_bb) = orig_end;
4b4bf941 5921 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5922 head = NEXT_INSN (head);
79c7fda6 5923 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
e7a74006 5924 bb count counting will be confused. Any instructions before that
79c7fda6
JJ
5925 label are emitted for the case where PREV_BB falls through into the
5926 exit block, so append those instructions to prev_bb in that case. */
5927 if (NEXT_INSN (head) != return_label)
5928 {
5929 while (NEXT_INSN (head) != return_label)
5930 {
5931 if (!NOTE_P (NEXT_INSN (head)))
1130d5e3 5932 BB_END (prev_bb) = NEXT_INSN (head);
79c7fda6
JJ
5933 head = NEXT_INSN (head);
5934 }
5935 }
5936 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
fefa31b5 5937 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5938 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
628f6a4e
BE
5939
5940 ix = 0;
fefa31b5 5941 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
242229bb 5942 {
fefa31b5 5943 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
242229bb 5944 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5945 redirect_edge_succ (e, exit_block);
5946 else
5947 ix++;
242229bb 5948 }
628f6a4e 5949
357067f2
JH
5950 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5951 EDGE_FALLTHRU);
fefa31b5 5952 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
242229bb
JH
5953 if (e2 != e)
5954 {
ef30ab83 5955 exit_block->count -= e2->count ();
242229bb 5956 }
242229bb
JH
5957 update_bb_for_insn (exit_block);
5958}
5959
c22cacf3 5960/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5961 Look for ARRAY_REF nodes with non-constant indexes and mark them
5962 addressable. */
5963
5964static tree
5965discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5966 void *data ATTRIBUTE_UNUSED)
5967{
5968 tree t = *tp;
5969
5970 if (IS_TYPE_OR_DECL_P (t))
5971 *walk_subtrees = 0;
5972 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5973 {
5974 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5975 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5976 && (!TREE_OPERAND (t, 2)
5977 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5978 || (TREE_CODE (t) == COMPONENT_REF
5979 && (!TREE_OPERAND (t,2)
5980 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5981 || TREE_CODE (t) == BIT_FIELD_REF
5982 || TREE_CODE (t) == REALPART_EXPR
5983 || TREE_CODE (t) == IMAGPART_EXPR
5984 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5985 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5986 t = TREE_OPERAND (t, 0);
5987
5988 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5989 {
5990 t = get_base_address (t);
6f11d690
RG
5991 if (t && DECL_P (t)
5992 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5993 TREE_ADDRESSABLE (t) = 1;
5994 }
5995
5996 *walk_subtrees = 0;
5997 }
5998
5999 return NULL_TREE;
6000}
6001
6002/* RTL expansion is not able to compile array references with variable
6003 offsets for arrays stored in single register. Discover such
6004 expressions and mark variables as addressable to avoid this
6005 scenario. */
6006
6007static void
6008discover_nonconstant_array_refs (void)
6009{
6010 basic_block bb;
726a989a 6011 gimple_stmt_iterator gsi;
a1b23b2f 6012
11cd3bed 6013 FOR_EACH_BB_FN (bb, cfun)
726a989a
RB
6014 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6015 {
355fe088 6016 gimple *stmt = gsi_stmt (gsi);
aa847cc8
JJ
6017 if (!is_gimple_debug (stmt))
6018 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 6019 }
a1b23b2f
UW
6020}
6021
2e3f842f
L
6022/* This function sets crtl->args.internal_arg_pointer to a virtual
6023 register if DRAP is needed. Local register allocator will replace
6024 virtual_incoming_args_rtx with the virtual register. */
6025
6026static void
6027expand_stack_alignment (void)
6028{
6029 rtx drap_rtx;
e939805b 6030 unsigned int preferred_stack_boundary;
2e3f842f
L
6031
6032 if (! SUPPORTS_STACK_ALIGNMENT)
6033 return;
b8698a0f 6034
2e3f842f
L
6035 if (cfun->calls_alloca
6036 || cfun->has_nonlocal_label
6037 || crtl->has_nonlocal_goto)
6038 crtl->need_drap = true;
6039
890b9b96
L
6040 /* Call update_stack_boundary here again to update incoming stack
6041 boundary. It may set incoming stack alignment to a different
6042 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6043 use the minimum incoming stack alignment to check if it is OK
6044 to perform sibcall optimization since sibcall optimization will
6045 only align the outgoing stack to incoming stack boundary. */
6046 if (targetm.calls.update_stack_boundary)
6047 targetm.calls.update_stack_boundary ();
6048
6049 /* The incoming stack frame has to be aligned at least at
6050 parm_stack_boundary. */
6051 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 6052
2e3f842f
L
6053 /* Update crtl->stack_alignment_estimated and use it later to align
6054 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6055 exceptions since callgraph doesn't collect incoming stack alignment
6056 in this case. */
8f4f502f 6057 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
6058 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6059 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6060 else
6061 preferred_stack_boundary = crtl->preferred_stack_boundary;
6062 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6063 crtl->stack_alignment_estimated = preferred_stack_boundary;
6064 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6065 crtl->stack_alignment_needed = preferred_stack_boundary;
6066
890b9b96
L
6067 gcc_assert (crtl->stack_alignment_needed
6068 <= crtl->stack_alignment_estimated);
6069
2e3f842f 6070 crtl->stack_realign_needed
e939805b 6071 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 6072 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
6073
6074 crtl->stack_realign_processed = true;
6075
6076 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6077 alignment. */
6078 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 6079 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 6080
d015f7cc
L
6081 /* stack_realign_drap and drap_rtx must match. */
6082 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6083
2e3f842f 6084 /* Do nothing if NULL is returned, which means DRAP is not needed. */
01512446 6085 if (drap_rtx != NULL)
2e3f842f
L
6086 {
6087 crtl->args.internal_arg_pointer = drap_rtx;
6088
6089 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6090 needed. */
6091 fixup_tail_calls ();
6092 }
6093}
862d0b35
DN
6094\f
6095
6096static void
6097expand_main_function (void)
6098{
6099#if (defined(INVOKE__main) \
6100 || (!defined(HAS_INIT_SECTION) \
6101 && !defined(INIT_SECTION_ASM_OP) \
6102 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
db69559b 6103 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
862d0b35
DN
6104#endif
6105}
6106\f
6107
6108/* Expand code to initialize the stack_protect_guard. This is invoked at
6109 the beginning of a function to be protected. */
6110
862d0b35
DN
6111static void
6112stack_protect_prologue (void)
6113{
6114 tree guard_decl = targetm.stack_protect_guard ();
6115 rtx x, y;
6116
6117 x = expand_normal (crtl->stack_protect_guard);
1202f33e
JJ
6118 if (guard_decl)
6119 y = expand_normal (guard_decl);
6120 else
6121 y = const0_rtx;
862d0b35
DN
6122
6123 /* Allow the target to copy from Y to X without leaking Y into a
6124 register. */
c65aa042
RS
6125 if (targetm.have_stack_protect_set ())
6126 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6127 {
6128 emit_insn (insn);
6129 return;
6130 }
862d0b35
DN
6131
6132 /* Otherwise do a straight move. */
6133 emit_move_insn (x, y);
6134}
2e3f842f 6135
242229bb
JH
6136/* Translate the intermediate representation contained in the CFG
6137 from GIMPLE trees to RTL.
6138
6139 We do conversion per basic block and preserve/update the tree CFG.
6140 This implies we have to do some magic as the CFG can simultaneously
6141 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 6142 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
6143 the expansion. */
6144
be55bfe6
TS
6145namespace {
6146
6147const pass_data pass_data_expand =
6148{
6149 RTL_PASS, /* type */
6150 "expand", /* name */
6151 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6152 TV_EXPAND, /* tv_id */
6153 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6154 | PROP_gimple_lcx
f8e89441
TV
6155 | PROP_gimple_lvec
6156 | PROP_gimple_lva), /* properties_required */
be55bfe6
TS
6157 PROP_rtl, /* properties_provided */
6158 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
3bea341f 6159 0, /* todo_flags_start */
be55bfe6
TS
6160 0, /* todo_flags_finish */
6161};
6162
6163class pass_expand : public rtl_opt_pass
6164{
6165public:
6166 pass_expand (gcc::context *ctxt)
6167 : rtl_opt_pass (pass_data_expand, ctxt)
6168 {}
6169
6170 /* opt_pass methods: */
6171 virtual unsigned int execute (function *);
6172
6173}; // class pass_expand
6174
6175unsigned int
6176pass_expand::execute (function *fun)
242229bb
JH
6177{
6178 basic_block bb, init_block;
0ef90296
ZD
6179 edge_iterator ei;
6180 edge e;
b47aae36 6181 rtx_insn *var_seq, *var_ret_seq;
4e3825db
MM
6182 unsigned i;
6183
f029db69 6184 timevar_push (TV_OUT_OF_SSA);
4e3825db 6185 rewrite_out_of_ssa (&SA);
f029db69 6186 timevar_pop (TV_OUT_OF_SSA);
c302207e 6187 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 6188
36f52e8f 6189 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
dfde35b3
JJ
6190 {
6191 gimple_stmt_iterator gsi;
6192 FOR_EACH_BB_FN (bb, cfun)
6193 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6194 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6195 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6196 }
6197
be147e84
RG
6198 /* Make sure all values used by the optimization passes have sane
6199 defaults. */
6200 reg_renumber = 0;
6201
4586b4ca
SB
6202 /* Some backends want to know that we are expanding to RTL. */
6203 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
6204 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6205 free_dominance_info (CDI_DOMINATORS);
4586b4ca 6206
be55bfe6 6207 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
bf08ebeb 6208
5368224f 6209 insn_locations_init ();
fe8a7779 6210 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
6211 {
6212 /* Eventually, all FEs should explicitly set function_start_locus. */
be55bfe6
TS
6213 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6214 set_curr_insn_location
6215 (DECL_SOURCE_LOCATION (current_function_decl));
1751ecd6 6216 else
be55bfe6 6217 set_curr_insn_location (fun->function_start_locus);
1751ecd6 6218 }
9ff70652 6219 else
5368224f
DC
6220 set_curr_insn_location (UNKNOWN_LOCATION);
6221 prologue_location = curr_insn_location ();
55e092c4 6222
2b21299c
JJ
6223#ifdef INSN_SCHEDULING
6224 init_sched_attrs ();
6225#endif
6226
55e092c4
JH
6227 /* Make sure first insn is a note even if we don't want linenums.
6228 This makes sure the first insn will never be deleted.
6229 Also, final expects a note to appear there. */
6230 emit_note (NOTE_INSN_DELETED);
6429e3be 6231
a1b23b2f
UW
6232 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6233 discover_nonconstant_array_refs ();
6234
e41b2a33 6235 targetm.expand_to_rtl_hook ();
8194c537 6236 crtl->init_stack_alignment ();
be55bfe6 6237 fun->cfg->max_jumptable_ents = 0;
cb91fab0 6238
ae9fd6b7
JH
6239 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6240 of the function section at exapnsion time to predict distance of calls. */
6241 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6242
727a31fa 6243 /* Expand the variables recorded during gimple lowering. */
f029db69 6244 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
6245 start_sequence ();
6246
f3ddd692 6247 var_ret_seq = expand_used_vars ();
3a42502d
RH
6248
6249 var_seq = get_insns ();
6250 end_sequence ();
f029db69 6251 timevar_pop (TV_VAR_EXPAND);
242229bb 6252
7d69de61
RH
6253 /* Honor stack protection warnings. */
6254 if (warn_stack_protect)
6255 {
be55bfe6 6256 if (fun->calls_alloca)
b8698a0f 6257 warning (OPT_Wstack_protector,
3b123595 6258 "stack protector not protecting local variables: "
be55bfe6 6259 "variable length buffer");
cb91fab0 6260 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 6261 warning (OPT_Wstack_protector,
3b123595 6262 "stack protector not protecting function: "
be55bfe6 6263 "all local arrays are less than %d bytes long",
7d69de61
RH
6264 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6265 }
6266
242229bb 6267 /* Set up parameters and prepare for return, for the function. */
b79c5284 6268 expand_function_start (current_function_decl);
242229bb 6269
3a42502d
RH
6270 /* If we emitted any instructions for setting up the variables,
6271 emit them before the FUNCTION_START note. */
6272 if (var_seq)
6273 {
6274 emit_insn_before (var_seq, parm_birth_insn);
6275
6276 /* In expand_function_end we'll insert the alloca save/restore
6277 before parm_birth_insn. We've just insertted an alloca call.
6278 Adjust the pointer to match. */
6279 parm_birth_insn = var_seq;
6280 }
6281
f11a7b6d
AO
6282 /* Now propagate the RTL assignment of each partition to the
6283 underlying var of each SSA_NAME. */
46aa019a
KV
6284 tree name;
6285
6286 FOR_EACH_SSA_NAME (i, name, cfun)
f11a7b6d 6287 {
46aa019a
KV
6288 /* We might have generated new SSA names in
6289 update_alias_info_with_stack_vars. They will have a NULL
6290 defining statements, and won't be part of the partitioning,
6291 so ignore those. */
6292 if (!SSA_NAME_DEF_STMT (name))
f11a7b6d
AO
6293 continue;
6294
6295 adjust_one_expanded_partition_var (name);
6296 }
6297
6298 /* Clean up RTL of variables that straddle across multiple
6299 partitions, and check that the rtl of any PARM_DECLs that are not
6300 cleaned up is that of their default defs. */
46aa019a 6301 FOR_EACH_SSA_NAME (i, name, cfun)
d466b407 6302 {
d466b407 6303 int part;
d466b407 6304
46aa019a
KV
6305 /* We might have generated new SSA names in
6306 update_alias_info_with_stack_vars. They will have a NULL
6307 defining statements, and won't be part of the partitioning,
6308 so ignore those. */
6309 if (!SSA_NAME_DEF_STMT (name))
d466b407
MM
6310 continue;
6311 part = var_to_partition (SA.map, name);
6312 if (part == NO_PARTITION)
6313 continue;
70b5e7dc 6314
1f9ceff1
AO
6315 /* If this decl was marked as living in multiple places, reset
6316 this now to NULL. */
6317 tree var = SSA_NAME_VAR (name);
6318 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6319 SET_DECL_RTL (var, NULL);
6320 /* Check that the pseudos chosen by assign_parms are those of
6321 the corresponding default defs. */
6322 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6323 && (TREE_CODE (var) == PARM_DECL
6324 || TREE_CODE (var) == RESULT_DECL))
70b5e7dc 6325 {
1f9ceff1
AO
6326 rtx in = DECL_RTL_IF_SET (var);
6327 gcc_assert (in);
6328 rtx out = SA.partition_to_pseudo[part];
f11a7b6d
AO
6329 gcc_assert (in == out);
6330
6331 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6332 those expected by debug backends for each parm and for
6333 the result. This is particularly important for stabs,
6334 whose register elimination from parm's DECL_RTL may cause
6335 -fcompare-debug differences as SET_DECL_RTL changes reg's
6336 attrs. So, make sure the RTL already has the parm as the
6337 EXPR, so that it won't change. */
6338 SET_DECL_RTL (var, NULL_RTX);
6339 if (MEM_P (in))
6340 set_mem_attributes (in, var, true);
6341 SET_DECL_RTL (var, in);
70b5e7dc 6342 }
d466b407
MM
6343 }
6344
242229bb
JH
6345 /* If this function is `main', emit a call to `__main'
6346 to run global initializers, etc. */
6347 if (DECL_NAME (current_function_decl)
6348 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6349 && DECL_FILE_SCOPE_P (current_function_decl))
6350 expand_main_function ();
6351
7d69de61
RH
6352 /* Initialize the stack_protect_guard field. This must happen after the
6353 call to __main (if any) so that the external decl is initialized. */
87a5dc2d 6354 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
7d69de61
RH
6355 stack_protect_prologue ();
6356
4e3825db
MM
6357 expand_phi_nodes (&SA);
6358
0d334e37 6359 /* Release any stale SSA redirection data. */
b3e46655 6360 redirect_edge_var_map_empty ();
0d334e37 6361
3fbd86b1 6362 /* Register rtl specific functions for cfg. */
242229bb
JH
6363 rtl_register_cfg_hooks ();
6364
6365 init_block = construct_init_block ();
6366
0ef90296 6367 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 6368 remaining edges later. */
be55bfe6 6369 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
0ef90296
ZD
6370 e->flags &= ~EDGE_EXECUTABLE;
6371
96a95ac1
AO
6372 /* If the function has too many markers, drop them while expanding. */
6373 if (cfun->debug_marker_count
6374 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6375 cfun->debug_nonbind_markers = false;
6376
134aa83c 6377 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
be55bfe6 6378 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
fefa31b5 6379 next_bb)
f3ddd692 6380 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 6381
36f52e8f 6382 if (MAY_HAVE_DEBUG_BIND_INSNS)
b5b8b0ac
AO
6383 expand_debug_locations ();
6384
dfde35b3
JJ
6385 if (deep_ter_debug_map)
6386 {
6387 delete deep_ter_debug_map;
6388 deep_ter_debug_map = NULL;
6389 }
6390
452aa9c5
RG
6391 /* Free stuff we no longer need after GIMPLE optimizations. */
6392 free_dominance_info (CDI_DOMINATORS);
6393 free_dominance_info (CDI_POST_DOMINATORS);
61183076 6394 delete_tree_cfg_annotations (fun);
452aa9c5 6395
f029db69 6396 timevar_push (TV_OUT_OF_SSA);
4e3825db 6397 finish_out_of_ssa (&SA);
f029db69 6398 timevar_pop (TV_OUT_OF_SSA);
4e3825db 6399
f029db69 6400 timevar_push (TV_POST_EXPAND);
91753e21 6401 /* We are no longer in SSA form. */
be55bfe6 6402 fun->gimple_df->in_ssa_p = false;
726338f4 6403 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 6404
bf08ebeb
JH
6405 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6406 conservatively to true until they are all profile aware. */
39c8aaa4 6407 delete lab_rtx_for_bb;
61183076 6408 free_histograms (fun);
242229bb
JH
6409
6410 construct_exit_block ();
5368224f 6411 insn_locations_finalize ();
242229bb 6412
f3ddd692
JJ
6413 if (var_ret_seq)
6414 {
dc01c3d1 6415 rtx_insn *after = return_label;
b47aae36 6416 rtx_insn *next = NEXT_INSN (after);
f3ddd692
JJ
6417 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6418 after = next;
6419 emit_insn_after (var_ret_seq, after);
6420 }
6421
1d65f45c 6422 /* Zap the tree EH table. */
be55bfe6 6423 set_eh_throw_stmt_table (fun, NULL);
242229bb 6424
42821aff
MM
6425 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6426 split edges which edge insertions might do. */
242229bb 6427 rebuild_jump_labels (get_insns ());
242229bb 6428
be55bfe6
TS
6429 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6430 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6431 {
6432 edge e;
6433 edge_iterator ei;
6434 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6435 {
6436 if (e->insns.r)
bc470c24 6437 {
3ffa95c2 6438 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
6439 /* Put insns after parm birth, but before
6440 NOTE_INSNS_FUNCTION_BEG. */
be55bfe6
TS
6441 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6442 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
bc470c24 6443 {
3ffa95c2
DM
6444 rtx_insn *insns = e->insns.r;
6445 e->insns.r = NULL;
e40191f1
TV
6446 if (NOTE_P (parm_birth_insn)
6447 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6448 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6449 else
6450 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
6451 }
6452 else
6453 commit_one_edge_insertion (e);
6454 }
4e3825db
MM
6455 else
6456 ei_next (&ei);
6457 }
6458 }
6459
6460 /* We're done expanding trees to RTL. */
6461 currently_expanding_to_rtl = 0;
6462
1b223a9f
AO
6463 flush_mark_addressable_queue ();
6464
be55bfe6
TS
6465 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6466 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6467 {
6468 edge e;
6469 edge_iterator ei;
6470 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6471 {
6472 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6473 e->flags &= ~EDGE_EXECUTABLE;
6474
6475 /* At the moment not all abnormal edges match the RTL
6476 representation. It is safe to remove them here as
6477 find_many_sub_basic_blocks will rediscover them.
6478 In the future we should get this fixed properly. */
6479 if ((e->flags & EDGE_ABNORMAL)
6480 && !(e->flags & EDGE_SIBCALL))
6481 remove_edge (e);
6482 else
6483 ei_next (&ei);
6484 }
6485 }
6486
7ba9e72d 6487 auto_sbitmap blocks (last_basic_block_for_fn (fun));
f61e445a 6488 bitmap_ones (blocks);
242229bb 6489 find_many_sub_basic_blocks (blocks);
4e3825db 6490 purge_all_dead_edges ();
242229bb 6491
2e3f842f
L
6492 expand_stack_alignment ();
6493
be147e84
RG
6494 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6495 function. */
6496 if (crtl->tail_call_emit)
6497 fixup_tail_calls ();
6498
dac1fbf8
RG
6499 /* After initial rtl generation, call back to finish generating
6500 exception support code. We need to do this before cleaning up
6501 the CFG as the code does not expect dead landing pads. */
be55bfe6 6502 if (fun->eh->region_tree != NULL)
dac1fbf8
RG
6503 finish_eh_generation ();
6504
8b5d71cd
JH
6505 /* BB subdivision may have created basic blocks that are are only reachable
6506 from unlikely bbs but not marked as such in the profile. */
6507 if (optimize)
6508 propagate_unlikely_bbs_forward ();
6509
dac1fbf8
RG
6510 /* Remove unreachable blocks, otherwise we cannot compute dominators
6511 which are needed for loop state verification. As a side-effect
6512 this also compacts blocks.
6513 ??? We cannot remove trivially dead insns here as for example
6514 the DRAP reg on i?86 is not magically live at this point.
6515 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6516 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6517
b2b29377 6518 checking_verify_flow_info ();
9f8628ba 6519
be147e84
RG
6520 /* Initialize pseudos allocated for hard registers. */
6521 emit_initial_value_sets ();
6522
6523 /* And finally unshare all RTL. */
6524 unshare_all_rtl ();
6525
9f8628ba
PB
6526 /* There's no need to defer outputting this function any more; we
6527 know we want to output it. */
6528 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6529
6530 /* Now that we're done expanding trees to RTL, we shouldn't have any
6531 more CONCATs anywhere. */
6532 generating_concat_p = 0;
6533
b7211528
SB
6534 if (dump_file)
6535 {
6536 fprintf (dump_file,
6537 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6538 /* And the pass manager will dump RTL for us. */
6539 }
ef330312
PB
6540
6541 /* If we're emitting a nested function, make sure its parent gets
6542 emitted as well. Doing otherwise confuses debug info. */
be55bfe6
TS
6543 {
6544 tree parent;
6545 for (parent = DECL_CONTEXT (current_function_decl);
6546 parent != NULL_TREE;
6547 parent = get_containing_scope (parent))
6548 if (TREE_CODE (parent) == FUNCTION_DECL)
6549 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6550 }
c22cacf3 6551
ef330312 6552 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
6553
6554 /* After expanding, the return labels are no longer needed. */
6555 return_label = NULL;
6556 naked_return_label = NULL;
0a35513e
AH
6557
6558 /* After expanding, the tm_restart map is no longer needed. */
be55bfe6 6559 if (fun->gimple_df->tm_restart)
50979347 6560 fun->gimple_df->tm_restart = NULL;
0a35513e 6561
55e092c4
JH
6562 /* Tag the blocks with a depth number so that change_scope can find
6563 the common parent easily. */
be55bfe6 6564 set_block_levels (DECL_INITIAL (fun->decl), 0);
bf08ebeb 6565 default_rtl_profile ();
be147e84 6566
687aed9c
RB
6567 /* For -dx discard loops now, otherwise IL verify in clean_state will
6568 ICE. */
6569 if (rtl_dump_and_exit)
6570 {
6571 cfun->curr_properties &= ~PROP_loops;
6572 loop_optimizer_finalize ();
6573 }
6574
f029db69 6575 timevar_pop (TV_POST_EXPAND);
be147e84 6576
c2924966 6577 return 0;
242229bb
JH
6578}
6579
27a4cd48
DM
6580} // anon namespace
6581
6582rtl_opt_pass *
6583make_pass_expand (gcc::context *ctxt)
6584{
6585 return new pass_expand (ctxt);
6586}