]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
gcc/ada/ChangeLog:
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
0ec80471 1/* A pass for lowering trees to RTL.
fbd26352 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
0ec80471 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
0ec80471 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
0ec80471 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
25#include "rtl.h"
9ef16211 26#include "tree.h"
27#include "gimple.h"
7c29e30e 28#include "cfghooks.h"
29#include "tree-pass.h"
ad7b10a2 30#include "memmodel.h"
7c29e30e 31#include "tm_p.h"
9ef16211 32#include "ssa.h"
7c29e30e 33#include "optabs.h"
34#include "regs.h" /* For reg_renumber. */
35#include "emit-rtl.h"
36#include "recog.h"
37#include "cgraph.h"
38#include "diagnostic.h"
b20a8bb4 39#include "fold-const.h"
9ed99284 40#include "varasm.h"
41#include "stor-layout.h"
42#include "stmt.h"
43#include "print-tree.h"
94ea8568 44#include "cfgrtl.h"
45#include "cfganal.h"
46#include "cfgbuild.h"
47#include "cfgcleanup.h"
d53441c8 48#include "dojump.h"
49#include "explow.h"
50#include "calls.h"
0ec80471 51#include "expr.h"
bc61cadb 52#include "internal-fn.h"
53#include "tree-eh.h"
dcf1a1ec 54#include "gimple-iterator.h"
ea804f86 55#include "gimple-expr.h"
dcf1a1ec 56#include "gimple-walk.h"
073c1fd5 57#include "tree-cfg.h"
073c1fd5 58#include "tree-dfa.h"
69ee5dbb 59#include "tree-ssa.h"
0ec80471 60#include "except.h"
ce084dfc 61#include "gimple-pretty-print.h"
60d03123 62#include "toplev.h"
77fce4cd 63#include "debug.h"
f1a0edff 64#include "params.h"
5a02d67b 65#include "tree-inline.h"
4992f399 66#include "value-prof.h"
b23fb4cb 67#include "tree-ssa-live.h"
f7373a91 68#include "tree-outof-ssa.h"
79f958cb 69#include "cfgloop.h"
fdc86f97 70#include "insn-attr.h" /* For INSN_SCHEDULING. */
30a86690 71#include "stringpool.h"
72#include "attribs.h"
3c919612 73#include "asan.h"
424a4a92 74#include "tree-ssa-address.h"
0e80b01d 75#include "output.h"
f7715905 76#include "builtins.h"
75a70cf9 77
d3211b7e 78/* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82#ifndef NAME__MAIN
83#define NAME__MAIN "__main"
84#endif
85
a8dd994c 86/* This variable holds information helping the rewriting of SSA trees
87 into RTL. */
88struct ssaexpand SA;
89
8cee8dc0 90/* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
42acab1c 92gimple *currently_expanding_gimple_stmt;
8cee8dc0 93
841424cc 94static rtx expand_debug_expr (tree);
95
94f92c36 96static bool defer_stack_allocation (tree, bool);
97
b2df3bbf 98static void record_alignment_for_reg_var (unsigned int);
99
75a70cf9 100/* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
102
103tree
42acab1c 104gimple_assign_rhs_to_tree (gimple *stmt)
75a70cf9 105{
106 tree t;
f4e36c33 107 enum gimple_rhs_class grhs_class;
48e1416a 108
f4e36c33 109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
75a70cf9 110
00f4f705 111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
75a70cf9 118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
f4e36c33 122 else if (grhs_class == GIMPLE_UNARY_RHS)
75a70cf9 123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
f4e36c33 126 else if (grhs_class == GIMPLE_SINGLE_RHS)
9845d120 127 {
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
dfecf957 130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
5169661d 134 && EXPR_P (t)))
9845d120 135 t = copy_node (t);
136 }
75a70cf9 137 else
138 gcc_unreachable ();
139
efbcb6de 140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
142
75a70cf9 143 return t;
144}
145
75a70cf9 146
60d03123 147#ifndef STACK_ALIGNMENT_NEEDED
148#define STACK_ALIGNMENT_NEEDED 1
149#endif
150
a8dd994c 151#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152
94f92c36 153/* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
157
158static tree
159leader_merge (tree cur, tree next)
160{
161 if (cur == NULL || cur == next)
162 return next;
163
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
166
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
169
170 return cur;
171}
172
a8dd994c 173/* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
176static inline void
177set_rtl (tree t, rtx x)
178{
b2df3bbf 179 gcc_checking_assert (!x
180 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 || (use_register_for_decl (t)
182 ? (REG_P (x)
183 || (GET_CODE (x) == CONCAT
184 && (REG_P (XEXP (x, 0))
185 || SUBREG_P (XEXP (x, 0)))
186 && (REG_P (XEXP (x, 1))
187 || SUBREG_P (XEXP (x, 1))))
796bb135 188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
b2df3bbf 192 || (GET_CODE (x) == PARALLEL
193 && SSAVAR (t)
194 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
796bb135 195 && (GET_MODE (x) == BLKmode
196 || !flag_tree_coalesce_vars)))
b2df3bbf 197 : (MEM_P (x) || x == pc_rtx
198 || (GET_CODE (x) == CONCAT
199 && MEM_P (XEXP (x, 0))
200 && MEM_P (XEXP (x, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
3809b03c 207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
209 unpromoted REGs. */
b2df3bbf 210 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
3809b03c 211 || (SSAVAR (t)
212 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 && (promote_ssa_mode (t, NULL) == BLKmode
214 || !flag_tree_coalesce_vars))
b2df3bbf 215 || !use_register_for_decl (t)
216 || GET_MODE (x) == promote_ssa_mode (t, NULL));
217
218 if (x)
94f92c36 219 {
220 bool skip = false;
221 tree cur = NULL_TREE;
b2df3bbf 222 rtx xm = x;
223
224 retry:
225 if (MEM_P (xm))
226 cur = MEM_EXPR (xm);
227 else if (REG_P (xm))
228 cur = REG_EXPR (xm);
229 else if (SUBREG_P (xm))
230 {
231 gcc_assert (subreg_lowpart_p (xm));
232 xm = SUBREG_REG (xm);
233 goto retry;
234 }
235 else if (GET_CODE (xm) == CONCAT)
236 {
237 xm = XEXP (xm, 0);
238 goto retry;
239 }
240 else if (GET_CODE (xm) == PARALLEL)
241 {
242 xm = XVECEXP (xm, 0, 0);
243 gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 xm = XEXP (xm, 0);
245 goto retry;
246 }
247 else if (xm == pc_rtx)
94f92c36 248 skip = true;
249 else
250 gcc_unreachable ();
251
b2df3bbf 252 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
94f92c36 253
254 if (cur != next)
255 {
256 if (MEM_P (x))
b2df3bbf 257 set_mem_attributes (x,
258 next && TREE_CODE (next) == SSA_NAME
259 ? TREE_TYPE (next)
260 : next, true);
94f92c36 261 else
262 set_reg_attrs_for_decl_rtl (next, x);
263 }
264 }
265
a8dd994c 266 if (TREE_CODE (t) == SSA_NAME)
267 {
94f92c36 268 int part = var_to_partition (SA.map, t);
269 if (part != NO_PARTITION)
270 {
271 if (SA.partition_to_pseudo[part])
272 gcc_assert (SA.partition_to_pseudo[part] == x);
273 else if (x != pc_rtx)
274 SA.partition_to_pseudo[part] = x;
275 }
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
b2df3bbf 278 DECL. For PARMs and RESULTs, do so only when setting the
279 default def. */
280 if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 && (VAR_P (SSA_NAME_VAR (t))
282 || SSA_NAME_IS_DEFAULT_DEF (t)))
e32b531f 283 {
284 tree var = SSA_NAME_VAR (t);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var))
287 SET_DECL_RTL (var, x);
3c25489e 288 /* If we have it set already to "multiple places" don't
e32b531f 289 change this. */
290 else if (DECL_RTL (var) == pc_rtx)
291 ;
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var) != x)
299 SET_DECL_RTL (var, pc_rtx);
300 }
a8dd994c 301 }
302 else
303 SET_DECL_RTL (t, x);
304}
60d03123 305
306/* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
308struct stack_var
309{
310 /* The Variable. */
311 tree decl;
312
60d03123 313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
87ff83f0 315 poly_uint64 size;
60d03123 316
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
320
321 /* The partition representative. */
322 size_t representative;
323
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
dfa054ff 326
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
60d03123 329};
330
331#define EOC ((size_t)-1)
332
333/* We have an array of such objects while deciding allocation. */
334static struct stack_var *stack_vars;
335static size_t stack_vars_alloc;
336static size_t stack_vars_num;
5f8841a5 337static hash_map<tree, size_t> *decl_to_stack_part;
60d03123 338
4fb07d00 339/* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341static bitmap_obstack stack_var_bitmap_obstack;
342
f0b5f617 343/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
60d03123 344 is non-decreasing. */
345static size_t *stack_vars_sorted;
346
60d03123 347/* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350static int frame_phase;
351
f1a0edff 352/* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354static bool has_protected_decls;
355
356/* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358static bool has_short_buffer;
60d03123 359
25c513b9 360/* Compute the byte alignment to use for DECL. Ignore alignment
f64dc32d 361 we can't do with expected alignment of the stack boundary. */
362
363static unsigned int
25c513b9 364align_local_variable (tree decl)
f64dc32d 365{
94f92c36 366 unsigned int align;
367
368 if (TREE_CODE (decl) == SSA_NAME)
369 align = TYPE_ALIGN (TREE_TYPE (decl));
370 else
371 {
372 align = LOCAL_DECL_ALIGNMENT (decl);
5d4b30ea 373 SET_DECL_ALIGN (decl, align);
94f92c36 374 }
60d03123 375 return align / BITS_PER_UNIT;
376}
377
9dbe51a9 378/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
379 down otherwise. Return truncated BASE value. */
380
381static inline unsigned HOST_WIDE_INT
382align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
383{
384 return align_up ? (base + align - 1) & -align : base & -align;
385}
386
60d03123 387/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
388 Return the frame offset. */
389
85aa2f28 390static poly_int64
87ff83f0 391alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
60d03123 392{
85aa2f28 393 poly_int64 offset, new_frame_offset;
60d03123 394
60d03123 395 if (FRAME_GROWS_DOWNWARD)
396 {
9dbe51a9 397 new_frame_offset
85aa2f28 398 = aligned_lower_bound (frame_offset - frame_phase - size,
399 align) + frame_phase;
60d03123 400 offset = new_frame_offset;
401 }
402 else
403 {
9dbe51a9 404 new_frame_offset
85aa2f28 405 = aligned_upper_bound (frame_offset - frame_phase,
406 align) + frame_phase;
60d03123 407 offset = new_frame_offset;
408 new_frame_offset += size;
409 }
410 frame_offset = new_frame_offset;
411
26d04e5f 412 if (frame_offset_overflow (frame_offset, cfun->decl))
413 frame_offset = offset = 0;
414
60d03123 415 return offset;
416}
417
418/* Accumulate DECL into STACK_VARS. */
419
420static void
421add_stack_var (tree decl)
422{
e67bda38 423 struct stack_var *v;
424
60d03123 425 if (stack_vars_num >= stack_vars_alloc)
426 {
427 if (stack_vars_alloc)
428 stack_vars_alloc = stack_vars_alloc * 3 / 2;
429 else
430 stack_vars_alloc = 32;
431 stack_vars
432 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
433 }
3c25489e 434 if (!decl_to_stack_part)
5f8841a5 435 decl_to_stack_part = new hash_map<tree, size_t>;
3c25489e 436
e67bda38 437 v = &stack_vars[stack_vars_num];
5f8841a5 438 decl_to_stack_part->put (decl, stack_vars_num);
e67bda38 439
440 v->decl = decl;
94f92c36 441 tree size = TREE_CODE (decl) == SSA_NAME
442 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
443 : DECL_SIZE_UNIT (decl);
87ff83f0 444 v->size = tree_to_poly_uint64 (size);
e67bda38 445 /* Ensure that all variables have size, so that &a != &b for any two
446 variables that are simultaneously live. */
87ff83f0 447 if (known_eq (v->size, 0U))
e67bda38 448 v->size = 1;
94f92c36 449 v->alignb = align_local_variable (decl);
0a4cd568 450 /* An alignment of zero can mightily confuse us later. */
451 gcc_assert (v->alignb != 0);
60d03123 452
453 /* All variables are initially in their own partition. */
e67bda38 454 v->representative = stack_vars_num;
455 v->next = EOC;
60d03123 456
dfa054ff 457 /* All variables initially conflict with no other. */
e67bda38 458 v->conflicts = NULL;
dfa054ff 459
60d03123 460 /* Ensure that this decl doesn't get put onto the list twice. */
a8dd994c 461 set_rtl (decl, pc_rtx);
60d03123 462
463 stack_vars_num++;
464}
465
60d03123 466/* Make the decls associated with luid's X and Y conflict. */
467
468static void
469add_stack_var_conflict (size_t x, size_t y)
470{
dfa054ff 471 struct stack_var *a = &stack_vars[x];
472 struct stack_var *b = &stack_vars[y];
b1a639d2 473 if (x == y)
474 return;
dfa054ff 475 if (!a->conflicts)
4fb07d00 476 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
dfa054ff 477 if (!b->conflicts)
4fb07d00 478 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
dfa054ff 479 bitmap_set_bit (a->conflicts, y);
480 bitmap_set_bit (b->conflicts, x);
60d03123 481}
482
483/* Check whether the decls associated with luid's X and Y conflict. */
484
485static bool
486stack_var_conflict_p (size_t x, size_t y)
487{
dfa054ff 488 struct stack_var *a = &stack_vars[x];
489 struct stack_var *b = &stack_vars[y];
3c25489e 490 if (x == y)
491 return false;
492 /* Partitions containing an SSA name result from gimple registers
493 with things like unsupported modes. They are top-level and
494 hence conflict with everything else. */
495 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496 return true;
497
dfa054ff 498 if (!a->conflicts || !b->conflicts)
499 return false;
500 return bitmap_bit_p (a->conflicts, y);
60d03123 501}
48e1416a 502
3c25489e 503/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
504 enter its partition number into bitmap DATA. */
505
506static bool
42acab1c 507visit_op (gimple *, tree op, tree, void *data)
3c25489e 508{
509 bitmap active = (bitmap)data;
510 op = get_base_address (op);
511 if (op
512 && DECL_P (op)
513 && DECL_RTL_IF_SET (op) == pc_rtx)
514 {
5f8841a5 515 size_t *v = decl_to_stack_part->get (op);
3c25489e 516 if (v)
517 bitmap_set_bit (active, *v);
518 }
519 return false;
520}
521
522/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
523 record conflicts between it and all currently active other partitions
524 from bitmap DATA. */
525
526static bool
42acab1c 527visit_conflict (gimple *, tree op, tree, void *data)
3c25489e 528{
529 bitmap active = (bitmap)data;
530 op = get_base_address (op);
531 if (op
532 && DECL_P (op)
533 && DECL_RTL_IF_SET (op) == pc_rtx)
534 {
5f8841a5 535 size_t *v = decl_to_stack_part->get (op);
3c25489e 536 if (v && bitmap_set_bit (active, *v))
537 {
538 size_t num = *v;
539 bitmap_iterator bi;
540 unsigned i;
541 gcc_assert (num < stack_vars_num);
542 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 add_stack_var_conflict (num, i);
544 }
545 }
546 return false;
547}
548
549/* Helper routine for add_scope_conflicts, calculating the active partitions
550 at the end of BB, leaving the result in WORK. We're called to generate
b74338cf 551 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552 liveness. */
3c25489e 553
554static void
b74338cf 555add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
3c25489e 556{
557 edge e;
558 edge_iterator ei;
559 gimple_stmt_iterator gsi;
5b26a9e3 560 walk_stmt_load_store_addr_fn visit;
3c25489e 561
562 bitmap_clear (work);
563 FOR_EACH_EDGE (e, ei, bb->preds)
564 bitmap_ior_into (work, (bitmap)e->src->aux);
565
07428872 566 visit = visit_op;
3c25489e 567
568 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569 {
42acab1c 570 gimple *stmt = gsi_stmt (gsi);
07428872 571 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
3c25489e 572 }
07428872 573 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3c25489e 574 {
42acab1c 575 gimple *stmt = gsi_stmt (gsi);
3c25489e 576
577 if (gimple_clobber_p (stmt))
578 {
579 tree lhs = gimple_assign_lhs (stmt);
580 size_t *v;
581 /* Nested function lowering might introduce LHSs
582 that are COMPONENT_REFs. */
53e9c5c4 583 if (!VAR_P (lhs))
3c25489e 584 continue;
585 if (DECL_RTL_IF_SET (lhs) == pc_rtx
5f8841a5 586 && (v = decl_to_stack_part->get (lhs)))
3c25489e 587 bitmap_clear_bit (work, *v);
588 }
589 else if (!is_gimple_debug (stmt))
07428872 590 {
b74338cf 591 if (for_conflict
07428872 592 && visit == visit_op)
593 {
594 /* If this is the first real instruction in this BB we need
0b44da0d 595 to add conflicts for everything live at this point now.
596 Unlike classical liveness for named objects we can't
07428872 597 rely on seeing a def/use of the names we're interested in.
598 There might merely be indirect loads/stores. We'd not add any
b74338cf 599 conflicts for such partitions. */
07428872 600 bitmap_iterator bi;
601 unsigned i;
b74338cf 602 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
07428872 603 {
99fade12 604 struct stack_var *a = &stack_vars[i];
605 if (!a->conflicts)
4fb07d00 606 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
99fade12 607 bitmap_ior_into (a->conflicts, work);
07428872 608 }
609 visit = visit_conflict;
610 }
611 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 }
3c25489e 613 }
614}
615
616/* Generate stack partition conflicts between all partitions that are
617 simultaneously live. */
618
619static void
620add_scope_conflicts (void)
621{
622 basic_block bb;
623 bool changed;
624 bitmap work = BITMAP_ALLOC (NULL);
99fade12 625 int *rpo;
626 int n_bbs;
3c25489e 627
0b44da0d 628 /* We approximate the live range of a stack variable by taking the first
3c25489e 629 mention of its name as starting point(s), and by the end-of-scope
630 death clobber added by gimplify as ending point(s) of the range.
631 This overapproximates in the case we for instance moved an address-taken
632 operation upward, without also moving a dereference to it upwards.
633 But it's conservatively correct as a variable never can hold values
634 before its name is mentioned at least once.
635
0b44da0d 636 We then do a mostly classical bitmap liveness algorithm. */
3c25489e 637
ed7d889a 638 FOR_ALL_BB_FN (bb, cfun)
4fb07d00 639 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
3c25489e 640
fe672ac0 641 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
99fade12 642 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643
3c25489e 644 changed = true;
645 while (changed)
646 {
99fade12 647 int i;
3c25489e 648 changed = false;
99fade12 649 for (i = 0; i < n_bbs; i++)
3c25489e 650 {
99fade12 651 bitmap active;
f5a6b05f 652 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
99fade12 653 active = (bitmap)bb->aux;
b74338cf 654 add_scope_conflicts_1 (bb, work, false);
3c25489e 655 if (bitmap_ior_into (active, work))
656 changed = true;
657 }
658 }
659
fc00614f 660 FOR_EACH_BB_FN (bb, cfun)
b74338cf 661 add_scope_conflicts_1 (bb, work, true);
3c25489e 662
99fade12 663 free (rpo);
3c25489e 664 BITMAP_FREE (work);
ed7d889a 665 FOR_ALL_BB_FN (bb, cfun)
3c25489e 666 BITMAP_FREE (bb->aux);
667}
668
60d03123 669/* A subroutine of partition_stack_vars. A comparison function for qsort,
5be42b39 670 sorting an array of indices by the properties of the object. */
60d03123 671
672static int
5be42b39 673stack_var_cmp (const void *a, const void *b)
60d03123 674{
5be42b39 675 size_t ia = *(const size_t *)a;
676 size_t ib = *(const size_t *)b;
677 unsigned int aligna = stack_vars[ia].alignb;
678 unsigned int alignb = stack_vars[ib].alignb;
87ff83f0 679 poly_int64 sizea = stack_vars[ia].size;
680 poly_int64 sizeb = stack_vars[ib].size;
5be42b39 681 tree decla = stack_vars[ia].decl;
682 tree declb = stack_vars[ib].decl;
683 bool largea, largeb;
a8dd994c 684 unsigned int uida, uidb;
60d03123 685
5be42b39 686 /* Primary compare on "large" alignment. Large comes first. */
687 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689 if (largea != largeb)
690 return (int)largeb - (int)largea;
691
692 /* Secondary compare on size, decreasing */
87ff83f0 693 int diff = compare_sizes_for_sort (sizeb, sizea);
694 if (diff != 0)
695 return diff;
5be42b39 696
697 /* Tertiary compare on true alignment, decreasing. */
698 if (aligna < alignb)
699 return -1;
700 if (aligna > alignb)
701 return 1;
702
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
a8dd994c 706 if (TREE_CODE (decla) == SSA_NAME)
707 {
708 if (TREE_CODE (declb) == SSA_NAME)
709 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else
711 return -1;
712 }
713 else if (TREE_CODE (declb) == SSA_NAME)
714 return 1;
715 else
716 uida = DECL_UID (decla), uidb = DECL_UID (declb);
7615883a 717 if (uida < uidb)
7615883a 718 return 1;
5be42b39 719 if (uida > uidb)
720 return -1;
60d03123 721 return 0;
722}
723
d5fb6135 724struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
5f8841a5 725typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
3a443843 726
727/* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
729 variables bitmap. */
730
731static void
732add_partitioned_vars_to_ptset (struct pt_solution *pt,
5f8841a5 733 part_hashmap *decls_to_partitions,
431205b7 734 hash_set<bitmap> *visited, bitmap temp)
3a443843 735{
736 bitmap_iterator bi;
737 unsigned i;
738 bitmap *part;
739
740 if (pt->anything
741 || pt->vars == NULL
742 /* The pointed-to vars bitmap is shared, it is enough to
743 visit it once. */
431205b7 744 || visited->add (pt->vars))
3a443843 745 return;
746
747 bitmap_clear (temp);
748
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
751 once. */
752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753 if ((!temp
754 || !bitmap_bit_p (temp, i))
5f8841a5 755 && (part = decls_to_partitions->get (i)))
3a443843 756 bitmap_ior_into (temp, *part);
757 if (!bitmap_empty_p (temp))
758 bitmap_ior_into (pt->vars, temp);
759}
760
761/* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
764 rewritten. */
765
766static void
767update_alias_info_with_stack_vars (void)
768{
5f8841a5 769 part_hashmap *decls_to_partitions = NULL;
3a443843 770 size_t i, j;
771 tree var = NULL_TREE;
772
773 for (i = 0; i < stack_vars_num; i++)
774 {
775 bitmap part = NULL;
776 tree name;
777 struct ptr_info_def *pi;
778
779 /* Not interested in partitions with single variable. */
780 if (stack_vars[i].representative != i
781 || stack_vars[i].next == EOC)
782 continue;
783
784 if (!decls_to_partitions)
785 {
5f8841a5 786 decls_to_partitions = new part_hashmap;
787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
3a443843 788 }
789
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var == NULL_TREE)
f9e245b2 794 var = create_tmp_var (ptr_type_node);
795 name = make_ssa_name (var);
3a443843 796
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part = BITMAP_GGC_ALLOC ();
800 for (j = i; j != EOC; j = stack_vars[j].next)
801 {
802 tree decl = stack_vars[j].decl;
1a981e1a 803 unsigned int uid = DECL_PT_UID (decl);
3a443843 804 bitmap_set_bit (part, uid);
5f8841a5 805 decls_to_partitions->put (uid, part);
806 cfun->gimple_df->decls_to_pointers->put (decl, name);
f7b5f694 807 if (TREE_ADDRESSABLE (decl))
808 TREE_ADDRESSABLE (name) = 1;
3a443843 809 }
810
811 /* Make the SSA name point to all partition members. */
812 pi = get_ptr_info (name);
6fc56905 813 pt_solution_set (&pi->pt, part, false);
3a443843 814 }
815
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions)
819 {
820 unsigned i;
f211616e 821 tree name;
431205b7 822 hash_set<bitmap> visited;
4fb07d00 823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
3a443843 824
f211616e 825 FOR_EACH_SSA_NAME (i, name, cfun)
3a443843 826 {
3a443843 827 struct ptr_info_def *pi;
828
f211616e 829 if (POINTER_TYPE_P (TREE_TYPE (name))
3a443843 830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
431205b7 832 &visited, temp);
3a443843 833 }
834
835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
431205b7 836 decls_to_partitions, &visited, temp);
3a443843 837
5f8841a5 838 delete decls_to_partitions;
3a443843 839 BITMAP_FREE (temp);
840 }
841}
842
60d03123 843/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
2a24c3a6 845 Merge them into a single partition A. */
60d03123 846
847static void
2a24c3a6 848union_stack_vars (size_t a, size_t b)
60d03123 849{
dfa054ff 850 struct stack_var *vb = &stack_vars[b];
851 bitmap_iterator bi;
852 unsigned u;
60d03123 853
2a24c3a6 854 gcc_assert (stack_vars[b].next == EOC);
855 /* Add B to A's partition. */
856 stack_vars[b].next = stack_vars[a].next;
857 stack_vars[b].representative = a;
60d03123 858 stack_vars[a].next = b;
859
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars[a].alignb < stack_vars[b].alignb)
862 stack_vars[a].alignb = stack_vars[b].alignb;
863
864 /* Update the interference graph and merge the conflicts. */
dfa054ff 865 if (vb->conflicts)
866 {
867 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 add_stack_var_conflict (a, stack_vars[u].representative);
869 BITMAP_FREE (vb->conflicts);
870 }
60d03123 871}
872
873/* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
876
2a24c3a6 877 Sort the objects by size in descending order.
60d03123 878 For each object A {
879 S = size(A)
880 O = 0
881 loop {
882 Look for the largest non-conflicting object B with size <= S.
883 UNION (A, B)
60d03123 884 }
885 }
886*/
887
888static void
889partition_stack_vars (void)
890{
891 size_t si, sj, n = stack_vars_num;
892
893 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894 for (si = 0; si < n; ++si)
895 stack_vars_sorted[si] = si;
896
897 if (n == 1)
898 return;
899
5be42b39 900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
60d03123 901
60d03123 902 for (si = 0; si < n; ++si)
903 {
904 size_t i = stack_vars_sorted[si];
5be42b39 905 unsigned int ialign = stack_vars[i].alignb;
87ff83f0 906 poly_int64 isize = stack_vars[i].size;
60d03123 907
2a24c3a6 908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars[i].representative != i)
912 continue;
913
914 for (sj = si + 1; sj < n; ++sj)
60d03123 915 {
916 size_t j = stack_vars_sorted[sj];
60d03123 917 unsigned int jalign = stack_vars[j].alignb;
87ff83f0 918 poly_int64 jsize = stack_vars[j].size;
60d03123 919
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars[j].representative != j)
922 continue;
923
5be42b39 924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
3c919612 928 break;
929
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
87ff83f0 934 if (asan_sanitize_stack_p ()
935 && maybe_ne (isize, jsize)
3c919612 936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 break;
938
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i, j))
5be42b39 941 continue;
942
60d03123 943 /* UNION the objects, placing J at OFFSET. */
2a24c3a6 944 union_stack_vars (i, j);
60d03123 945 }
946 }
3a443843 947
ba487639 948 update_alias_info_with_stack_vars ();
60d03123 949}
950
951/* A debugging aid for expand_used_vars. Dump the generated partitions. */
952
953static void
954dump_stack_var_partition (void)
955{
956 size_t si, i, j, n = stack_vars_num;
957
958 for (si = 0; si < n; ++si)
959 {
960 i = stack_vars_sorted[si];
961
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
965
87ff83f0 966 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967 print_dec (stack_vars[i].size, dump_file);
968 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
60d03123 969
970 for (j = i; j != EOC; j = stack_vars[j].next)
971 {
972 fputc ('\t', dump_file);
973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
60d03123 974 }
2a24c3a6 975 fputc ('\n', dump_file);
60d03123 976 }
977}
978
5be42b39 979/* Assign rtl to DECL at BASE + OFFSET. */
60d03123 980
981static void
5be42b39 982expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
85aa2f28 983 poly_int64 offset)
60d03123 984{
5be42b39 985 unsigned align;
60d03123 986 rtx x;
a0c938f0 987
60d03123 988 /* If this fails, we've overflowed the stack frame. Error nicely? */
85aa2f28 989 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
60d03123 990
29c05e22 991 x = plus_constant (Pmode, base, offset);
94f92c36 992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl))
994 : DECL_MODE (SSAVAR (decl)), x);
60d03123 995
a8dd994c 996 if (TREE_CODE (decl) != SSA_NAME)
997 {
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
5be42b39 1001 if (base == virtual_stack_vars_rtx)
1002 offset -= frame_phase;
85aa2f28 1003 align = known_alignment (offset);
a8dd994c 1004 align *= BITS_PER_UNIT;
5be42b39 1005 if (align == 0 || align > base_align)
1006 align = base_align;
1007
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
a8dd994c 1011
5d4b30ea 1012 SET_DECL_ALIGN (decl, align);
a8dd994c 1013 DECL_USER_ALIGN (decl) = 0;
1014 }
1015
a8dd994c 1016 set_rtl (decl, x);
60d03123 1017}
1018
3c919612 1019struct stack_vars_data
1020{
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
d2361e3b 1024 auto_vec<HOST_WIDE_INT> asan_vec;
3c919612 1025
1026 /* Vector of partition representative decls in between the paddings. */
d2361e3b 1027 auto_vec<tree> asan_decl_vec;
683539f6 1028
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 rtx asan_base;
1031
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb;
3c919612 1034};
1035
60d03123 1036/* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1039
1040static void
3c919612 1041expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
60d03123 1042{
1043 size_t si, i, j, n = stack_vars_num;
87ff83f0 1044 poly_uint64 large_size = 0, large_alloc = 0;
5be42b39 1045 rtx large_base = NULL;
1046 unsigned large_align = 0;
a80f37e1 1047 bool large_allocation_done = false;
5be42b39 1048 tree decl;
1049
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055 {
1056 /* Find the total size of these variables. */
1057 for (si = 0; si < n; ++si)
1058 {
1059 unsigned alignb;
1060
1061 i = stack_vars_sorted[si];
1062 alignb = stack_vars[i].alignb;
1063
2bb1c7d1 1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb * BITS_PER_UNIT) > large_align)
1069 large_align = alignb * BITS_PER_UNIT;
1070
5be42b39 1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 break;
1074
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars[i].representative != i)
1077 continue;
1078
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl = stack_vars[i].decl;
94f92c36 1082 if (TREE_CODE (decl) == SSA_NAME
1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 : DECL_RTL (decl) != pc_rtx)
5be42b39 1085 continue;
1086
87ff83f0 1087 large_size = aligned_upper_bound (large_size, alignb);
5be42b39 1088 large_size += stack_vars[i].size;
1089 }
5be42b39 1090 }
60d03123 1091
1092 for (si = 0; si < n; ++si)
1093 {
5be42b39 1094 rtx base;
1095 unsigned base_align, alignb;
85aa2f28 1096 poly_int64 offset;
60d03123 1097
1098 i = stack_vars_sorted[si];
1099
1100 /* Skip variables that aren't partition representatives, for now. */
1101 if (stack_vars[i].representative != i)
1102 continue;
1103
f1a0edff 1104 /* Skip variables that have already had rtl assigned. See also
1105 add_stack_var where we perpetrate this pc_rtx hack. */
5be42b39 1106 decl = stack_vars[i].decl;
94f92c36 1107 if (TREE_CODE (decl) == SSA_NAME
1108 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 : DECL_RTL (decl) != pc_rtx)
f1a0edff 1110 continue;
1111
a0c938f0 1112 /* Check the predicate to see whether this variable should be
f1a0edff 1113 allocated in this pass. */
3c919612 1114 if (pred && !pred (i))
f1a0edff 1115 continue;
1116
5be42b39 1117 alignb = stack_vars[i].alignb;
1118 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 {
683539f6 1120 base = virtual_stack_vars_rtx;
85aa2f28 1121 /* ASAN description strings don't yet have a syntax for expressing
1122 polynomial offsets. */
1123 HOST_WIDE_INT prev_offset;
1124 if (asan_sanitize_stack_p ()
1125 && pred
87ff83f0 1126 && frame_offset.is_constant (&prev_offset)
1127 && stack_vars[i].size.is_constant ())
3c919612 1128 {
db93a978 1129 if (data->asan_vec.is_empty ())
1130 {
1131 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1132 prev_offset = frame_offset.to_constant ();
1133 }
85aa2f28 1134 prev_offset = align_base (prev_offset,
ed779478 1135 ASAN_MIN_RED_ZONE_SIZE,
85aa2f28 1136 !FRAME_GROWS_DOWNWARD);
3c919612 1137 tree repr_decl = NULL_TREE;
57e4ba18 1138 unsigned HOST_WIDE_INT size
1139 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1140 if (data->asan_vec.is_empty ())
1141 size = MAX (size, ASAN_RED_ZONE_SIZE);
1142
1143 unsigned HOST_WIDE_INT alignment = MAX (alignb,
1144 ASAN_MIN_RED_ZONE_SIZE);
1145 offset = alloc_stack_frame_space (size, alignment);
9dbe51a9 1146
f1f41a6c 1147 data->asan_vec.safe_push (prev_offset);
85aa2f28 1148 /* Allocating a constant amount of space from a constant
1149 starting offset must give a constant result. */
1150 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1151 .to_constant ());
3c919612 1152 /* Find best representative of the partition.
1153 Prefer those with DECL_NAME, even better
1154 satisfying asan_protect_stack_decl predicate. */
1155 for (j = i; j != EOC; j = stack_vars[j].next)
1156 if (asan_protect_stack_decl (stack_vars[j].decl)
1157 && DECL_NAME (stack_vars[j].decl))
1158 {
1159 repr_decl = stack_vars[j].decl;
1160 break;
1161 }
1162 else if (repr_decl == NULL_TREE
1163 && DECL_P (stack_vars[j].decl)
1164 && DECL_NAME (stack_vars[j].decl))
1165 repr_decl = stack_vars[j].decl;
1166 if (repr_decl == NULL_TREE)
1167 repr_decl = stack_vars[i].decl;
f1f41a6c 1168 data->asan_decl_vec.safe_push (repr_decl);
4f28881f 1169
1170 /* Make sure a representative is unpoison if another
1171 variable in the partition is handled by
1172 use-after-scope sanitization. */
1173 if (asan_handled_variables != NULL
1174 && !asan_handled_variables->contains (repr_decl))
1175 {
1176 for (j = i; j != EOC; j = stack_vars[j].next)
1177 if (asan_handled_variables->contains (stack_vars[j].decl))
1178 break;
1179 if (j != EOC)
1180 asan_handled_variables->add (repr_decl);
1181 }
1182
683539f6 1183 data->asan_alignb = MAX (data->asan_alignb, alignb);
1184 if (data->asan_base == NULL)
1185 data->asan_base = gen_reg_rtx (Pmode);
1186 base = data->asan_base;
f89175bb 1187
1188 if (!STRICT_ALIGNMENT)
1189 base_align = crtl->max_used_stack_slot_alignment;
1190 else
1191 base_align = MAX (crtl->max_used_stack_slot_alignment,
1192 GET_MODE_ALIGNMENT (SImode)
1193 << ASAN_SHADOW_SHIFT);
3c919612 1194 }
1195 else
f89175bb 1196 {
1197 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1198 base_align = crtl->max_used_stack_slot_alignment;
1199 }
5be42b39 1200 }
1201 else
1202 {
1203 /* Large alignment is only processed in the last pass. */
1204 if (pred)
1205 continue;
a80f37e1 1206
1207 /* If there were any variables requiring "large" alignment, allocate
1208 space. */
87ff83f0 1209 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
a80f37e1 1210 {
85aa2f28 1211 poly_int64 loffset;
a80f37e1 1212 rtx large_allocsize;
1213
87ff83f0 1214 large_allocsize = gen_int_mode (large_size, Pmode);
a80f37e1 1215 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1216 loffset = alloc_stack_frame_space
87ff83f0 1217 (rtx_to_poly_int64 (large_allocsize),
a80f37e1 1218 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1219 large_base = get_dynamic_stack_base (loffset, large_align);
1220 large_allocation_done = true;
1221 }
e67bda38 1222 gcc_assert (large_base != NULL);
5be42b39 1223
87ff83f0 1224 large_alloc = aligned_upper_bound (large_alloc, alignb);
5be42b39 1225 offset = large_alloc;
1226 large_alloc += stack_vars[i].size;
1227
1228 base = large_base;
1229 base_align = large_align;
1230 }
60d03123 1231
1232 /* Create rtl for each variable based on their location within the
1233 partition. */
1234 for (j = i; j != EOC; j = stack_vars[j].next)
8394f3a0 1235 {
8394f3a0 1236 expand_one_stack_var_at (stack_vars[j].decl,
5be42b39 1237 base, base_align,
2a24c3a6 1238 offset);
8394f3a0 1239 }
60d03123 1240 }
5be42b39 1241
87ff83f0 1242 gcc_assert (known_eq (large_alloc, large_size));
60d03123 1243}
1244
5a02d67b 1245/* Take into account all sizes of partitions and reset DECL_RTLs. */
87ff83f0 1246static poly_uint64
5a02d67b 1247account_stack_vars (void)
1248{
1249 size_t si, j, i, n = stack_vars_num;
87ff83f0 1250 poly_uint64 size = 0;
5a02d67b 1251
1252 for (si = 0; si < n; ++si)
1253 {
1254 i = stack_vars_sorted[si];
1255
1256 /* Skip variables that aren't partition representatives, for now. */
1257 if (stack_vars[i].representative != i)
1258 continue;
1259
1260 size += stack_vars[i].size;
1261 for (j = i; j != EOC; j = stack_vars[j].next)
a8dd994c 1262 set_rtl (stack_vars[j].decl, NULL);
5a02d67b 1263 }
1264 return size;
1265}
1266
b2df3bbf 1267/* Record the RTL assignment X for the default def of PARM. */
1268
1269extern void
1270set_parm_rtl (tree parm, rtx x)
1271{
1272 gcc_assert (TREE_CODE (parm) == PARM_DECL
1273 || TREE_CODE (parm) == RESULT_DECL);
1274
1275 if (x && !MEM_P (x))
1276 {
1277 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1278 TYPE_MODE (TREE_TYPE (parm)),
1279 TYPE_ALIGN (TREE_TYPE (parm)));
1280
1281 /* If the variable alignment is very large we'll dynamicaly
1282 allocate it, which means that in-frame portion is just a
1283 pointer. ??? We've got a pseudo for sure here, do we
1284 actually dynamically allocate its spilling area if needed?
2fa87500 1285 ??? Isn't it a problem when Pmode alignment also exceeds
1286 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
b2df3bbf 1287 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
2fa87500 1288 align = GET_MODE_ALIGNMENT (Pmode);
b2df3bbf 1289
1290 record_alignment_for_reg_var (align);
1291 }
1292
b2df3bbf 1293 tree ssa = ssa_default_def (cfun, parm);
1294 if (!ssa)
1295 return set_rtl (parm, x);
1296
1297 int part = var_to_partition (SA.map, ssa);
1298 gcc_assert (part != NO_PARTITION);
1299
1300 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1301 gcc_assert (changed);
1302
1303 set_rtl (ssa, x);
1304 gcc_assert (DECL_RTL (parm) == x);
1305}
1306
60d03123 1307/* A subroutine of expand_one_var. Called to immediately assign rtl
1308 to a variable to be allocated in the stack frame. */
1309
1310static void
94f92c36 1311expand_one_stack_var_1 (tree var)
60d03123 1312{
87ff83f0 1313 poly_uint64 size;
85aa2f28 1314 poly_int64 offset;
5be42b39 1315 unsigned byte_align;
60d03123 1316
94f92c36 1317 if (TREE_CODE (var) == SSA_NAME)
1318 {
1319 tree type = TREE_TYPE (var);
87ff83f0 1320 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
94f92c36 1321 byte_align = TYPE_ALIGN_UNIT (type);
1322 }
1323 else
1324 {
87ff83f0 1325 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
94f92c36 1326 byte_align = align_local_variable (var);
1327 }
5be42b39 1328
1329 /* We handle highly aligned variables in expand_stack_vars. */
1330 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
60d03123 1331
5be42b39 1332 offset = alloc_stack_frame_space (size, byte_align);
1333
1334 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1335 crtl->max_used_stack_slot_alignment, offset);
60d03123 1336}
1337
94f92c36 1338/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1339 already assigned some MEM. */
1340
1341static void
1342expand_one_stack_var (tree var)
1343{
1344 if (TREE_CODE (var) == SSA_NAME)
1345 {
1346 int part = var_to_partition (SA.map, var);
1347 if (part != NO_PARTITION)
1348 {
1349 rtx x = SA.partition_to_pseudo[part];
1350 gcc_assert (x);
1351 gcc_assert (MEM_P (x));
1352 return;
1353 }
1354 }
1355
1356 return expand_one_stack_var_1 (var);
1357}
1358
60d03123 1359/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1360 that will reside in a hard register. */
1361
1362static void
1363expand_one_hard_reg_var (tree var)
1364{
1365 rest_of_decl_compilation (var, 0, 0);
1366}
1367
94f92c36 1368/* Record the alignment requirements of some variable assigned to a
1369 pseudo. */
1370
1371static void
1372record_alignment_for_reg_var (unsigned int align)
1373{
1374 if (SUPPORTS_STACK_ALIGNMENT
1375 && crtl->stack_alignment_estimated < align)
1376 {
1377 /* stack_alignment_estimated shouldn't change after stack
1378 realign decision made */
1379 gcc_assert (!crtl->stack_realign_processed);
1380 crtl->stack_alignment_estimated = align;
1381 }
1382
1383 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1384 So here we only make sure stack_alignment_needed >= align. */
1385 if (crtl->stack_alignment_needed < align)
1386 crtl->stack_alignment_needed = align;
1387 if (crtl->max_used_stack_slot_alignment < align)
1388 crtl->max_used_stack_slot_alignment = align;
1389}
1390
1391/* Create RTL for an SSA partition. */
1392
1393static void
1394expand_one_ssa_partition (tree var)
1395{
1396 int part = var_to_partition (SA.map, var);
1397 gcc_assert (part != NO_PARTITION);
1398
1399 if (SA.partition_to_pseudo[part])
1400 return;
1401
1402 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1403 TYPE_MODE (TREE_TYPE (var)),
1404 TYPE_ALIGN (TREE_TYPE (var)));
1405
1406 /* If the variable alignment is very large we'll dynamicaly allocate
1407 it, which means that in-frame portion is just a pointer. */
1408 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
2fa87500 1409 align = GET_MODE_ALIGNMENT (Pmode);
94f92c36 1410
1411 record_alignment_for_reg_var (align);
1412
1413 if (!use_register_for_decl (var))
1414 {
b2df3bbf 1415 if (defer_stack_allocation (var, true))
94f92c36 1416 add_stack_var (var);
1417 else
1418 expand_one_stack_var_1 (var);
1419 return;
1420 }
1421
1422 machine_mode reg_mode = promote_ssa_mode (var, NULL);
94f92c36 1423 rtx x = gen_reg_rtx (reg_mode);
1424
1425 set_rtl (var, x);
31cbcee1 1426
1427 /* For a promoted variable, X will not be used directly but wrapped in a
1428 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1429 will assume that its upper bits can be inferred from its lower bits.
1430 Therefore, if X isn't initialized on every path from the entry, then
1431 we must do it manually in order to fulfill the above assumption. */
1432 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1433 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1434 emit_move_insn (x, CONST0_RTX (reg_mode));
94f92c36 1435}
1436
b2df3bbf 1437/* Record the association between the RTL generated for partition PART
1438 and the underlying variable of the SSA_NAME VAR. */
94f92c36 1439
1440static void
1441adjust_one_expanded_partition_var (tree var)
1442{
1443 if (!var)
1444 return;
1445
1446 tree decl = SSA_NAME_VAR (var);
1447
1448 int part = var_to_partition (SA.map, var);
1449 if (part == NO_PARTITION)
1450 return;
1451
1452 rtx x = SA.partition_to_pseudo[part];
1453
b2df3bbf 1454 gcc_assert (x);
94f92c36 1455
1456 set_rtl (var, x);
1457
1458 if (!REG_P (x))
1459 return;
1460
1461 /* Note if the object is a user variable. */
1462 if (decl && !DECL_ARTIFICIAL (decl))
1463 mark_user_reg (x);
1464
1465 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1466 mark_reg_pointer (x, get_pointer_alignment (var));
1467}
1468
60d03123 1469/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1470 that will reside in a pseudo register. */
1471
1472static void
1473expand_one_register_var (tree var)
1474{
94f92c36 1475 if (TREE_CODE (var) == SSA_NAME)
1476 {
1477 int part = var_to_partition (SA.map, var);
1478 if (part != NO_PARTITION)
1479 {
1480 rtx x = SA.partition_to_pseudo[part];
1481 gcc_assert (x);
1482 gcc_assert (REG_P (x));
1483 return;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 tree decl = var;
a8dd994c 1489 tree type = TREE_TYPE (decl);
3754d046 1490 machine_mode reg_mode = promote_decl_mode (decl, NULL);
60d03123 1491 rtx x = gen_reg_rtx (reg_mode);
1492
a8dd994c 1493 set_rtl (var, x);
60d03123 1494
1495 /* Note if the object is a user variable. */
a8dd994c 1496 if (!DECL_ARTIFICIAL (decl))
1497 mark_user_reg (x);
60d03123 1498
9961142a 1499 if (POINTER_TYPE_P (type))
f2ca19b4 1500 mark_reg_pointer (x, get_pointer_alignment (var));
60d03123 1501}
1502
1503/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
c78cbec8 1504 has some associated error, e.g. its type is error-mark. We just need
60d03123 1505 to pick something that won't crash the rest of the compiler. */
1506
1507static void
1508expand_one_error_var (tree var)
1509{
3754d046 1510 machine_mode mode = DECL_MODE (var);
60d03123 1511 rtx x;
1512
1513 if (mode == BLKmode)
1514 x = gen_rtx_MEM (BLKmode, const0_rtx);
1515 else if (mode == VOIDmode)
1516 x = const0_rtx;
1517 else
1518 x = gen_reg_rtx (mode);
1519
1520 SET_DECL_RTL (var, x);
1521}
1522
a0c938f0 1523/* A subroutine of expand_one_var. VAR is a variable that will be
60d03123 1524 allocated to the local stack frame. Return true if we wish to
1525 add VAR to STACK_VARS so that it will be coalesced with other
1526 variables. Return false to allocate VAR immediately.
1527
1528 This function is used to reduce the number of variables considered
1529 for coalescing, which reduces the size of the quadratic problem. */
1530
1531static bool
1532defer_stack_allocation (tree var, bool toplevel)
1533{
94f92c36 1534 tree size_unit = TREE_CODE (var) == SSA_NAME
1535 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1536 : DECL_SIZE_UNIT (var);
87ff83f0 1537 poly_uint64 size;
94f92c36 1538
da4b9ed5 1539 /* Whether the variable is small enough for immediate allocation not to be
1540 a problem with regard to the frame size. */
1541 bool smallish
87ff83f0 1542 = (poly_int_tree_p (size_unit, &size)
1543 && (estimated_poly_value (size)
1544 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
da4b9ed5 1545
f1a0edff 1546 /* If stack protection is enabled, *all* stack variables must be deferred,
3c919612 1547 so that we can re-order the strings to the top of the frame.
1548 Similarly for Address Sanitizer. */
2bc9f5c6 1549 if (flag_stack_protect || asan_sanitize_stack_p ())
f1a0edff 1550 return true;
1551
94f92c36 1552 unsigned int align = TREE_CODE (var) == SSA_NAME
1553 ? TYPE_ALIGN (TREE_TYPE (var))
1554 : DECL_ALIGN (var);
1555
5be42b39 1556 /* We handle "large" alignment via dynamic allocation. We want to handle
1557 this extra complication in only one place, so defer them. */
94f92c36 1558 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
5be42b39 1559 return true;
1560
94f92c36 1561 bool ignored = TREE_CODE (var) == SSA_NAME
1562 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1563 : DECL_IGNORED_P (var);
1564
da4b9ed5 1565 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1566 might be detached from their block and appear at toplevel when we reach
1567 here. We want to coalesce them with variables from other blocks when
1568 the immediate contribution to the frame size would be noticeable. */
94f92c36 1569 if (toplevel && optimize > 0 && ignored && !smallish)
da4b9ed5 1570 return true;
1571
1572 /* Variables declared in the outermost scope automatically conflict
1573 with every other variable. The only reason to want to defer them
60d03123 1574 at all is that, after sorting, we can more efficiently pack
1575 small variables in the stack frame. Continue to defer at -O2. */
1576 if (toplevel && optimize < 2)
1577 return false;
1578
1579 /* Without optimization, *most* variables are allocated from the
1580 stack, which makes the quadratic problem large exactly when we
a0c938f0 1581 want compilation to proceed as quickly as possible. On the
60d03123 1582 other hand, we don't want the function's stack frame size to
1583 get completely out of hand. So we avoid adding scalars and
1584 "small" aggregates to the list at all. */
da4b9ed5 1585 if (optimize == 0 && smallish)
60d03123 1586 return false;
1587
1588 return true;
1589}
1590
1591/* A subroutine of expand_used_vars. Expand one variable according to
91275768 1592 its flavor. Variables to be placed on the stack are not actually
48e1416a 1593 expanded yet, merely recorded.
5a02d67b 1594 When REALLY_EXPAND is false, only add stack values to be allocated.
1595 Return stack usage this variable is supposed to take.
1596*/
60d03123 1597
87ff83f0 1598static poly_uint64
5a02d67b 1599expand_one_var (tree var, bool toplevel, bool really_expand)
60d03123 1600{
5be42b39 1601 unsigned int align = BITS_PER_UNIT;
a8dd994c 1602 tree origvar = var;
5be42b39 1603
a8dd994c 1604 var = SSAVAR (var);
1605
53e9c5c4 1606 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
27a7a23a 1607 {
7c5f5567 1608 if (is_global_var (var))
1609 return 0;
1610
27a7a23a 1611 /* Because we don't know if VAR will be in register or on stack,
1612 we conservatively assume it will be on stack even if VAR is
1613 eventually put into register after RA pass. For non-automatic
1614 variables, which won't be on stack, we collect alignment of
fc1995c6 1615 type and ignore user specified alignment. Similarly for
1616 SSA_NAMEs for which use_register_for_decl returns true. */
1617 if (TREE_STATIC (var)
1618 || DECL_EXTERNAL (var)
1619 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
8645d3e7 1620 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1621 TYPE_MODE (TREE_TYPE (var)),
1622 TYPE_ALIGN (TREE_TYPE (var)));
505a6491 1623 else if (DECL_HAS_VALUE_EXPR_P (var)
1624 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1625 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1626 or variables which were assigned a stack slot already by
1627 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1628 changed from the offset chosen to it. */
1629 align = crtl->stack_alignment_estimated;
27a7a23a 1630 else
8645d3e7 1631 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
27a7a23a 1632
5be42b39 1633 /* If the variable alignment is very large we'll dynamicaly allocate
1634 it, which means that in-frame portion is just a pointer. */
1635 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
2fa87500 1636 align = GET_MODE_ALIGNMENT (Pmode);
5be42b39 1637 }
1638
94f92c36 1639 record_alignment_for_reg_var (align);
5be42b39 1640
87ff83f0 1641 poly_uint64 size;
a8dd994c 1642 if (TREE_CODE (origvar) == SSA_NAME)
1643 {
53e9c5c4 1644 gcc_assert (!VAR_P (var)
a8dd994c 1645 || (!DECL_EXTERNAL (var)
1646 && !DECL_HAS_VALUE_EXPR_P (var)
1647 && !TREE_STATIC (var)
a8dd994c 1648 && TREE_TYPE (var) != error_mark_node
1649 && !DECL_HARD_REGISTER (var)
1650 && really_expand));
1651 }
53e9c5c4 1652 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1a105fae 1653 ;
60d03123 1654 else if (DECL_EXTERNAL (var))
1655 ;
75fa4f82 1656 else if (DECL_HAS_VALUE_EXPR_P (var))
60d03123 1657 ;
1658 else if (TREE_STATIC (var))
6329636b 1659 ;
e32b531f 1660 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
60d03123 1661 ;
1662 else if (TREE_TYPE (var) == error_mark_node)
5a02d67b 1663 {
1664 if (really_expand)
1665 expand_one_error_var (var);
1666 }
53e9c5c4 1667 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
5a02d67b 1668 {
1669 if (really_expand)
2ea8d869 1670 {
1671 expand_one_hard_reg_var (var);
1672 if (!DECL_HARD_REGISTER (var))
1673 /* Invalid register specification. */
1674 expand_one_error_var (var);
1675 }
5a02d67b 1676 }
60d03123 1677 else if (use_register_for_decl (var))
5a02d67b 1678 {
1679 if (really_expand)
a8dd994c 1680 expand_one_register_var (origvar);
5a02d67b 1681 }
87ff83f0 1682 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1683 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
ce0afe34 1684 {
85d86b55 1685 /* Reject variables which cover more than half of the address-space. */
ce0afe34 1686 if (really_expand)
1687 {
67de90a6 1688 if (DECL_NONLOCAL_FRAME (var))
1689 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1690 "total size of local objects is too large");
1691 else
1692 error_at (DECL_SOURCE_LOCATION (var),
1693 "size of variable %q+D is too large", var);
ce0afe34 1694 expand_one_error_var (var);
1695 }
1696 }
60d03123 1697 else if (defer_stack_allocation (var, toplevel))
a8dd994c 1698 add_stack_var (origvar);
60d03123 1699 else
5a02d67b 1700 {
15083ac3 1701 if (really_expand)
f27f1575 1702 {
1703 if (lookup_attribute ("naked",
1704 DECL_ATTRIBUTES (current_function_decl)))
85b9be9b 1705 error ("cannot allocate stack for variable %q+D, naked function",
f27f1575 1706 var);
1707
1708 expand_one_stack_var (origvar);
1709 }
87ff83f0 1710 return size;
5a02d67b 1711 }
1712 return 0;
60d03123 1713}
1714
1715/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1716 expanding variables. Those variables that can be put into registers
1717 are allocated pseudos; those that can't are put on the stack.
1718
1719 TOPLEVEL is true if this is the outermost BLOCK. */
1720
1721static void
1722expand_used_vars_for_block (tree block, bool toplevel)
1723{
60d03123 1724 tree t;
1725
60d03123 1726 /* Expand all variables at this level. */
1767a056 1727 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
c7c68014 1728 if (TREE_USED (t)
53e9c5c4 1729 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
c7c68014 1730 || !DECL_NONSHAREABLE (t)))
5a02d67b 1731 expand_one_var (t, toplevel, true);
60d03123 1732
60d03123 1733 /* Expand all variables at containing levels. */
1734 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1735 expand_used_vars_for_block (t, false);
60d03123 1736}
1737
1738/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1739 and clear TREE_USED on all local variables. */
1740
1741static void
1742clear_tree_used (tree block)
1743{
1744 tree t;
1745
1767a056 1746 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
60d03123 1747 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
53e9c5c4 1748 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
c7c68014 1749 || !DECL_NONSHAREABLE (t))
60d03123 1750 TREE_USED (t) = 0;
1751
1752 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1753 clear_tree_used (t);
1754}
1755
b156ec37 1756enum {
1757 SPCT_FLAG_DEFAULT = 1,
1758 SPCT_FLAG_ALL = 2,
947aa916 1759 SPCT_FLAG_STRONG = 3,
1760 SPCT_FLAG_EXPLICIT = 4
b156ec37 1761};
1762
f1a0edff 1763/* Examine TYPE and determine a bit mask of the following features. */
1764
1765#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1766#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1767#define SPCT_HAS_ARRAY 4
1768#define SPCT_HAS_AGGREGATE 8
1769
1770static unsigned int
1771stack_protect_classify_type (tree type)
1772{
1773 unsigned int ret = 0;
1774 tree t;
1775
1776 switch (TREE_CODE (type))
1777 {
1778 case ARRAY_TYPE:
1779 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1780 if (t == char_type_node
1781 || t == signed_char_type_node
1782 || t == unsigned_char_type_node)
1783 {
b888d9d5 1784 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1785 unsigned HOST_WIDE_INT len;
f1a0edff 1786
b888d9d5 1787 if (!TYPE_SIZE_UNIT (type)
cd4547bf 1788 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
b888d9d5 1789 len = max;
f1a0edff 1790 else
6a0712d4 1791 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
f1a0edff 1792
1793 if (len < max)
1794 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1795 else
1796 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1797 }
1798 else
1799 ret = SPCT_HAS_ARRAY;
1800 break;
1801
1802 case UNION_TYPE:
1803 case QUAL_UNION_TYPE:
1804 case RECORD_TYPE:
1805 ret = SPCT_HAS_AGGREGATE;
1806 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1807 if (TREE_CODE (t) == FIELD_DECL)
1808 ret |= stack_protect_classify_type (TREE_TYPE (t));
1809 break;
1810
1811 default:
1812 break;
1813 }
1814
1815 return ret;
1816}
1817
3ce7ff97 1818/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1819 part of the local stack frame. Remember if we ever return nonzero for
f1a0edff 1820 any variable in this function. The return value is the phase number in
1821 which the variable should be allocated. */
1822
1823static int
1824stack_protect_decl_phase (tree decl)
1825{
1826 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1827 int ret = 0;
1828
1829 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1830 has_short_buffer = true;
1831
b156ec37 1832 if (flag_stack_protect == SPCT_FLAG_ALL
947aa916 1833 || flag_stack_protect == SPCT_FLAG_STRONG
1834 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1835 && lookup_attribute ("stack_protect",
1836 DECL_ATTRIBUTES (current_function_decl))))
f1a0edff 1837 {
1838 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1839 && !(bits & SPCT_HAS_AGGREGATE))
1840 ret = 1;
1841 else if (bits & SPCT_HAS_ARRAY)
1842 ret = 2;
1843 }
1844 else
1845 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1846
1847 if (ret)
1848 has_protected_decls = true;
1849
1850 return ret;
1851}
1852
1853/* Two helper routines that check for phase 1 and phase 2. These are used
1854 as callbacks for expand_stack_vars. */
1855
1856static bool
3c919612 1857stack_protect_decl_phase_1 (size_t i)
1858{
1859 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1860}
1861
1862static bool
1863stack_protect_decl_phase_2 (size_t i)
f1a0edff 1864{
3c919612 1865 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
f1a0edff 1866}
1867
3c919612 1868/* And helper function that checks for asan phase (with stack protector
1869 it is phase 3). This is used as callback for expand_stack_vars.
1870 Returns true if any of the vars in the partition need to be protected. */
1871
f1a0edff 1872static bool
3c919612 1873asan_decl_phase_3 (size_t i)
f1a0edff 1874{
3c919612 1875 while (i != EOC)
1876 {
1877 if (asan_protect_stack_decl (stack_vars[i].decl))
1878 return true;
1879 i = stack_vars[i].next;
1880 }
1881 return false;
f1a0edff 1882}
1883
1884/* Ensure that variables in different stack protection phases conflict
1885 so that they are not merged and share the same stack slot. */
1886
1887static void
1888add_stack_protection_conflicts (void)
1889{
1890 size_t i, j, n = stack_vars_num;
1891 unsigned char *phase;
1892
1893 phase = XNEWVEC (unsigned char, n);
1894 for (i = 0; i < n; ++i)
1895 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1896
1897 for (i = 0; i < n; ++i)
1898 {
1899 unsigned char ph_i = phase[i];
99fade12 1900 for (j = i + 1; j < n; ++j)
f1a0edff 1901 if (ph_i != phase[j])
1902 add_stack_var_conflict (i, j);
1903 }
1904
1905 XDELETEVEC (phase);
1906}
1907
1908/* Create a decl for the guard at the top of the stack frame. */
1909
1910static void
1911create_stack_guard (void)
1912{
e60a6f7b 1913 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1914 VAR_DECL, NULL, ptr_type_node);
f1a0edff 1915 TREE_THIS_VOLATILE (guard) = 1;
1916 TREE_USED (guard) = 1;
1917 expand_one_stack_var (guard);
edb7afe8 1918 crtl->stack_protect_guard = guard;
f1a0edff 1919}
1920
5a02d67b 1921/* Prepare for expanding variables. */
48e1416a 1922static void
5a02d67b 1923init_vars_expansion (void)
1924{
4fb07d00 1925 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1926 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
5a02d67b 1927
4fb07d00 1928 /* A map from decl to stack partition. */
5f8841a5 1929 decl_to_stack_part = new hash_map<tree, size_t>;
5a02d67b 1930
1931 /* Initialize local stack smashing state. */
1932 has_protected_decls = false;
1933 has_short_buffer = false;
1934}
1935
1936/* Free up stack variable graph data. */
1937static void
1938fini_vars_expansion (void)
1939{
4fb07d00 1940 bitmap_obstack_release (&stack_var_bitmap_obstack);
1941 if (stack_vars)
1942 XDELETEVEC (stack_vars);
1943 if (stack_vars_sorted)
1944 XDELETEVEC (stack_vars_sorted);
5a02d67b 1945 stack_vars = NULL;
99fade12 1946 stack_vars_sorted = NULL;
5a02d67b 1947 stack_vars_alloc = stack_vars_num = 0;
5f8841a5 1948 delete decl_to_stack_part;
3c25489e 1949 decl_to_stack_part = NULL;
5a02d67b 1950}
1951
970270ba 1952/* Make a fair guess for the size of the stack frame of the function
1953 in NODE. This doesn't have to be exact, the result is only used in
1954 the inline heuristics. So we don't want to run the full stack var
1955 packing algorithm (which is quadratic in the number of stack vars).
1956 Instead, we calculate the total size of all stack vars. This turns
1957 out to be a pretty fair estimate -- packing of stack vars doesn't
1958 happen very often. */
961c8f72 1959
5a02d67b 1960HOST_WIDE_INT
970270ba 1961estimated_stack_frame_size (struct cgraph_node *node)
5a02d67b 1962{
87ff83f0 1963 poly_int64 size = 0;
961c8f72 1964 size_t i;
649597af 1965 tree var;
02774f2d 1966 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
970270ba 1967
649597af 1968 push_cfun (fn);
5a02d67b 1969
4fb07d00 1970 init_vars_expansion ();
1971
24ccd9c6 1972 FOR_EACH_LOCAL_DECL (fn, i, var)
1973 if (auto_var_in_fn_p (var, fn->decl))
1974 size += expand_one_var (var, true, false);
961c8f72 1975
5a02d67b 1976 if (stack_vars_num > 0)
1977 {
961c8f72 1978 /* Fake sorting the stack vars for account_stack_vars (). */
1979 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1980 for (i = 0; i < stack_vars_num; ++i)
1981 stack_vars_sorted[i] = i;
5a02d67b 1982 size += account_stack_vars ();
5a02d67b 1983 }
4fb07d00 1984
1985 fini_vars_expansion ();
73b46517 1986 pop_cfun ();
87ff83f0 1987 return estimated_poly_value (size);
5a02d67b 1988}
1989
b156ec37 1990/* Helper routine to check if a record or union contains an array field. */
1991
1992static int
1993record_or_union_type_has_array_p (const_tree tree_type)
1994{
1995 tree fields = TYPE_FIELDS (tree_type);
1996 tree f;
1997
1998 for (f = fields; f; f = DECL_CHAIN (f))
1999 if (TREE_CODE (f) == FIELD_DECL)
2000 {
2001 tree field_type = TREE_TYPE (f);
2002 if (RECORD_OR_UNION_TYPE_P (field_type)
2003 && record_or_union_type_has_array_p (field_type))
2004 return 1;
2005 if (TREE_CODE (field_type) == ARRAY_TYPE)
2006 return 1;
2007 }
2008 return 0;
2009}
2010
ec4af1be 2011/* Check if the current function has local referenced variables that
2012 have their addresses taken, contain an array, or are arrays. */
2013
2014static bool
2015stack_protect_decl_p ()
2016{
2017 unsigned i;
2018 tree var;
2019
2020 FOR_EACH_LOCAL_DECL (cfun, i, var)
2021 if (!is_global_var (var))
2022 {
2023 tree var_type = TREE_TYPE (var);
53e9c5c4 2024 if (VAR_P (var)
ec4af1be 2025 && (TREE_CODE (var_type) == ARRAY_TYPE
2026 || TREE_ADDRESSABLE (var)
2027 || (RECORD_OR_UNION_TYPE_P (var_type)
2028 && record_or_union_type_has_array_p (var_type))))
2029 return true;
2030 }
2031 return false;
2032}
2033
2034/* Check if the current function has calls that use a return slot. */
2035
2036static bool
2037stack_protect_return_slot_p ()
2038{
2039 basic_block bb;
2040
2041 FOR_ALL_BB_FN (bb, cfun)
2042 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2043 !gsi_end_p (gsi); gsi_next (&gsi))
2044 {
42acab1c 2045 gimple *stmt = gsi_stmt (gsi);
ec4af1be 2046 /* This assumes that calls to internal-only functions never
2047 use a return slot. */
2048 if (is_gimple_call (stmt)
2049 && !gimple_call_internal_p (stmt)
2050 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2051 gimple_call_fndecl (stmt)))
2052 return true;
2053 }
2054 return false;
2055}
2056
60d03123 2057/* Expand all variables used in the function. */
280450fa 2058
74a0cbc4 2059static rtx_insn *
280450fa 2060expand_used_vars (void)
2061{
2ab2ce89 2062 tree var, outer_block = DECL_INITIAL (current_function_decl);
e9258aee 2063 auto_vec<tree> maybe_local_decls;
74a0cbc4 2064 rtx_insn *var_end_seq = NULL;
a8dd994c 2065 unsigned i;
2ab2ce89 2066 unsigned len;
b156ec37 2067 bool gen_stack_protect_signal = false;
280450fa 2068
60d03123 2069 /* Compute the phase of the stack frame for this function. */
2070 {
2071 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
8374586c 2072 int off = targetm.starting_frame_offset () % align;
60d03123 2073 frame_phase = off ? align - off : 0;
2074 }
280450fa 2075
4fb07d00 2076 /* Set TREE_USED on all variables in the local_decls. */
2077 FOR_EACH_LOCAL_DECL (cfun, i, var)
2078 TREE_USED (var) = 1;
2079 /* Clear TREE_USED on all variables associated with a block scope. */
2080 clear_tree_used (DECL_INITIAL (current_function_decl));
2081
5a02d67b 2082 init_vars_expansion ();
f1a0edff 2083
ab0f939c 2084 if (targetm.use_pseudo_pic_reg ())
2085 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2086
a8dd994c 2087 for (i = 0; i < SA.map->num_partitions; i++)
2088 {
b2df3bbf 2089 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2090 continue;
2091
a8dd994c 2092 tree var = partition_to_var (SA.map, i);
2093
7c782c9b 2094 gcc_assert (!virtual_operand_p (var));
ec11736b 2095
94f92c36 2096 expand_one_ssa_partition (var);
bcde57a0 2097 }
f22255e7 2098
b156ec37 2099 if (flag_stack_protect == SPCT_FLAG_STRONG)
ec4af1be 2100 gen_stack_protect_signal
2101 = stack_protect_decl_p () || stack_protect_return_slot_p ();
b156ec37 2102
edb7afe8 2103 /* At this point all variables on the local_decls with TREE_USED
60d03123 2104 set are not associated with any block scope. Lay them out. */
2ab2ce89 2105
f1f41a6c 2106 len = vec_safe_length (cfun->local_decls);
2ab2ce89 2107 FOR_EACH_LOCAL_DECL (cfun, i, var)
60d03123 2108 {
60d03123 2109 bool expand_now = false;
2110
a8dd994c 2111 /* Expanded above already. */
2112 if (is_gimple_reg (var))
e32b531f 2113 {
2114 TREE_USED (var) = 0;
a45d3ce3 2115 goto next;
e32b531f 2116 }
60d03123 2117 /* We didn't set a block for static or extern because it's hard
2118 to tell the difference between a global variable (re)declared
2119 in a local scope, and one that's really declared there to
2120 begin with. And it doesn't really matter much, since we're
2121 not giving them stack space. Expand them now. */
a8dd994c 2122 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
60d03123 2123 expand_now = true;
2124
da4b9ed5 2125 /* Expand variables not associated with any block now. Those created by
2126 the optimizers could be live anywhere in the function. Those that
2127 could possibly have been scoped originally and detached from their
2128 block will have their allocation deferred so we coalesce them with
2129 others when optimization is enabled. */
60d03123 2130 else if (TREE_USED (var))
2131 expand_now = true;
2132
2133 /* Finally, mark all variables on the list as used. We'll use
2134 this in a moment when we expand those associated with scopes. */
2135 TREE_USED (var) = 1;
2136
2137 if (expand_now)
a45d3ce3 2138 expand_one_var (var, true, true);
2139
2140 next:
2141 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
78fa9ba7 2142 {
a45d3ce3 2143 rtx rtl = DECL_RTL_IF_SET (var);
2144
2145 /* Keep artificial non-ignored vars in cfun->local_decls
2146 chain until instantiate_decls. */
2147 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2ab2ce89 2148 add_local_decl (cfun, var);
257b4da1 2149 else if (rtl == NULL_RTX)
2ab2ce89 2150 /* If rtl isn't set yet, which can happen e.g. with
2151 -fstack-protector, retry before returning from this
2152 function. */
f1f41a6c 2153 maybe_local_decls.safe_push (var);
78fa9ba7 2154 }
60d03123 2155 }
60d03123 2156
2ab2ce89 2157 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2158
2159 +-----------------+-----------------+
2160 | ...processed... | ...duplicates...|
2161 +-----------------+-----------------+
2162 ^
2163 +-- LEN points here.
2164
2165 We just want the duplicates, as those are the artificial
2166 non-ignored vars that we want to keep until instantiate_decls.
2167 Move them down and truncate the array. */
f1f41a6c 2168 if (!vec_safe_is_empty (cfun->local_decls))
2169 cfun->local_decls->block_remove (0, len);
2ab2ce89 2170
60d03123 2171 /* At this point, all variables within the block tree with TREE_USED
2172 set are actually used by the optimized function. Lay them out. */
2173 expand_used_vars_for_block (outer_block, true);
2174
2175 if (stack_vars_num > 0)
2176 {
3c25489e 2177 add_scope_conflicts ();
60d03123 2178
a0c938f0 2179 /* If stack protection is enabled, we don't share space between
f1a0edff 2180 vulnerable data and non-vulnerable data. */
947aa916 2181 if (flag_stack_protect != 0
2182 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2183 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2184 && lookup_attribute ("stack_protect",
2185 DECL_ATTRIBUTES (current_function_decl)))))
f1a0edff 2186 add_stack_protection_conflicts ();
2187
a0c938f0 2188 /* Now that we have collected all stack variables, and have computed a
60d03123 2189 minimal interference graph, attempt to save some stack space. */
2190 partition_stack_vars ();
2191 if (dump_file)
2192 dump_stack_var_partition ();
f1a0edff 2193 }
2194
b156ec37 2195 switch (flag_stack_protect)
2196 {
2197 case SPCT_FLAG_ALL:
2198 create_stack_guard ();
2199 break;
2200
2201 case SPCT_FLAG_STRONG:
2202 if (gen_stack_protect_signal
947aa916 2203 || cfun->calls_alloca || has_protected_decls
2204 || lookup_attribute ("stack_protect",
2205 DECL_ATTRIBUTES (current_function_decl)))
b156ec37 2206 create_stack_guard ();
2207 break;
2208
2209 case SPCT_FLAG_DEFAULT:
947aa916 2210 if (cfun->calls_alloca || has_protected_decls
2211 || lookup_attribute ("stack_protect",
2212 DECL_ATTRIBUTES (current_function_decl)))
9af5ce0c 2213 create_stack_guard ();
b156ec37 2214 break;
2215
947aa916 2216 case SPCT_FLAG_EXPLICIT:
2217 if (lookup_attribute ("stack_protect",
2218 DECL_ATTRIBUTES (current_function_decl)))
2219 create_stack_guard ();
2220 break;
b156ec37 2221 default:
2222 ;
2223 }
60d03123 2224
f1a0edff 2225 /* Assign rtl to each variable based on these partitions. */
2226 if (stack_vars_num > 0)
2227 {
3c919612 2228 struct stack_vars_data data;
2229
683539f6 2230 data.asan_base = NULL_RTX;
2231 data.asan_alignb = 0;
3c919612 2232
f1a0edff 2233 /* Reorder decls to be protected by iterating over the variables
2234 array multiple times, and allocating out of each phase in turn. */
a0c938f0 2235 /* ??? We could probably integrate this into the qsort we did
f1a0edff 2236 earlier, such that we naturally see these variables first,
2237 and thus naturally allocate things in the right order. */
2238 if (has_protected_decls)
2239 {
2240 /* Phase 1 contains only character arrays. */
3c919612 2241 expand_stack_vars (stack_protect_decl_phase_1, &data);
f1a0edff 2242
2243 /* Phase 2 contains other kinds of arrays. */
947aa916 2244 if (flag_stack_protect == SPCT_FLAG_ALL
2245 || flag_stack_protect == SPCT_FLAG_STRONG
2246 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2247 && lookup_attribute ("stack_protect",
2248 DECL_ATTRIBUTES (current_function_decl))))
3c919612 2249 expand_stack_vars (stack_protect_decl_phase_2, &data);
f1a0edff 2250 }
2251
2bc9f5c6 2252 if (asan_sanitize_stack_p ())
3c919612 2253 /* Phase 3, any partitions that need asan protection
2254 in addition to phase 1 and 2. */
2255 expand_stack_vars (asan_decl_phase_3, &data);
2256
85aa2f28 2257 /* ASAN description strings don't yet have a syntax for expressing
2258 polynomial offsets. */
2259 HOST_WIDE_INT prev_offset;
2260 if (!data.asan_vec.is_empty ()
2261 && frame_offset.is_constant (&prev_offset))
3c919612 2262 {
683539f6 2263 HOST_WIDE_INT offset, sz, redzonesz;
2264 redzonesz = ASAN_RED_ZONE_SIZE;
2265 sz = data.asan_vec[0] - prev_offset;
2266 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2267 && data.asan_alignb <= 4096
c8c66351 2268 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
683539f6 2269 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2270 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
85aa2f28 2271 /* Allocating a constant amount of space from a constant
2272 starting offset must give a constant result. */
2273 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2274 .to_constant ());
f1f41a6c 2275 data.asan_vec.safe_push (prev_offset);
2276 data.asan_vec.safe_push (offset);
f89175bb 2277 /* Leave space for alignment if STRICT_ALIGNMENT. */
2278 if (STRICT_ALIGNMENT)
2279 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2280 << ASAN_SHADOW_SHIFT)
2281 / BITS_PER_UNIT, 1);
3c919612 2282
2283 var_end_seq
2284 = asan_emit_stack_protection (virtual_stack_vars_rtx,
683539f6 2285 data.asan_base,
2286 data.asan_alignb,
f1f41a6c 2287 data.asan_vec.address (),
683539f6 2288 data.asan_decl_vec.address (),
f1f41a6c 2289 data.asan_vec.length ());
3c919612 2290 }
2291
2292 expand_stack_vars (NULL, &data);
60d03123 2293 }
2294
77c44489 2295 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
d08919a7 2296 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2297 virtual_stack_vars_rtx,
2298 var_end_seq);
2299
4fb07d00 2300 fini_vars_expansion ();
2301
257b4da1 2302 /* If there were any artificial non-ignored vars without rtl
2303 found earlier, see if deferred stack allocation hasn't assigned
2304 rtl to them. */
f1f41a6c 2305 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
257b4da1 2306 {
257b4da1 2307 rtx rtl = DECL_RTL_IF_SET (var);
2308
257b4da1 2309 /* Keep artificial non-ignored vars in cfun->local_decls
2310 chain until instantiate_decls. */
2311 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2ab2ce89 2312 add_local_decl (cfun, var);
257b4da1 2313 }
2314
60d03123 2315 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2316 if (STACK_ALIGNMENT_NEEDED)
2317 {
2318 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
85aa2f28 2319 if (FRAME_GROWS_DOWNWARD)
2320 frame_offset = aligned_lower_bound (frame_offset, align);
2321 else
2322 frame_offset = aligned_upper_bound (frame_offset, align);
60d03123 2323 }
3c919612 2324
2325 return var_end_seq;
280450fa 2326}
2327
2328
49377e21 2329/* If we need to produce a detailed dump, print the tree representation
2330 for STMT to the dump file. SINCE is the last RTX after which the RTL
2331 generated for STMT should have been appended. */
2332
2333static void
42acab1c 2334maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
49377e21 2335{
2336 if (dump_file && (dump_flags & TDF_DETAILS))
2337 {
2338 fprintf (dump_file, "\n;; ");
9845d120 2339 print_gimple_stmt (dump_file, stmt, 0,
2340 TDF_SLIM | (dump_flags & TDF_LINENO));
49377e21 2341 fprintf (dump_file, "\n");
2342
2343 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2344 }
2345}
2346
6313ae8b 2347/* Maps the blocks that do not contain tree labels to rtx labels. */
2348
0699065d 2349static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
6313ae8b 2350
63f88450 2351/* Returns the label_rtx expression for a label starting basic block BB. */
2352
f9a00e9e 2353static rtx_code_label *
75a70cf9 2354label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
63f88450 2355{
75a70cf9 2356 gimple_stmt_iterator gsi;
2357 tree lab;
63f88450 2358
2359 if (bb->flags & BB_RTL)
2360 return block_label (bb);
2361
0699065d 2362 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
6313ae8b 2363 if (elt)
5f8841a5 2364 return *elt;
6313ae8b 2365
2366 /* Find the tree label if it is present. */
48e1416a 2367
75a70cf9 2368 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
63f88450 2369 {
1a91d914 2370 glabel *lab_stmt;
2371
2372 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2373 if (!lab_stmt)
63f88450 2374 break;
2375
75a70cf9 2376 lab = gimple_label_label (lab_stmt);
63f88450 2377 if (DECL_NONLOCAL (lab))
2378 break;
2379
f9a00e9e 2380 return jump_target_rtx (lab);
63f88450 2381 }
2382
79f6a8ed 2383 rtx_code_label *l = gen_label_rtx ();
5f8841a5 2384 lab_rtx_for_bb->put (bb, l);
2385 return l;
63f88450 2386}
2387
75a70cf9 2388
f800c469 2389/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2390 of a basic block where we just expanded the conditional at the end,
ee0f3895 2391 possibly clean up the CFG and instruction sequence. LAST is the
2392 last instruction before the just emitted jump sequence. */
f800c469 2393
2394static void
74a0cbc4 2395maybe_cleanup_end_of_block (edge e, rtx_insn *last)
f800c469 2396{
2397 /* Special case: when jumpif decides that the condition is
2398 trivial it emits an unconditional jump (and the necessary
2399 barrier). But we still have two edges, the fallthru one is
2400 wrong. purge_dead_edges would clean this up later. Unfortunately
2401 we have to insert insns (and split edges) before
2402 find_many_sub_basic_blocks and hence before purge_dead_edges.
2403 But splitting edges might create new blocks which depend on the
2404 fact that if there are two edges there's no barrier. So the
2405 barrier would get lost and verify_flow_info would ICE. Instead
2406 of auditing all edge splitters to care for the barrier (which
2407 normally isn't there in a cleaned CFG), fix it here. */
2408 if (BARRIER_P (get_last_insn ()))
2409 {
74a0cbc4 2410 rtx_insn *insn;
f800c469 2411 remove_edge (e);
2412 /* Now, we have a single successor block, if we have insns to
2413 insert on the remaining edge we potentially will insert
2414 it at the end of this block (if the dest block isn't feasible)
2415 in order to avoid splitting the edge. This insertion will take
2416 place in front of the last jump. But we might have emitted
2417 multiple jumps (conditional and one unconditional) to the
2418 same destination. Inserting in front of the last one then
2419 is a problem. See PR 40021. We fix this by deleting all
2420 jumps except the last unconditional one. */
2421 insn = PREV_INSN (get_last_insn ());
2422 /* Make sure we have an unconditional jump. Otherwise we're
2423 confused. */
2424 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
ee0f3895 2425 for (insn = PREV_INSN (insn); insn != last;)
f800c469 2426 {
2427 insn = PREV_INSN (insn);
2428 if (JUMP_P (NEXT_INSN (insn)))
2755d767 2429 {
46a5816d 2430 if (!any_condjump_p (NEXT_INSN (insn)))
2755d767 2431 {
2432 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2433 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2434 }
2435 delete_insn (NEXT_INSN (insn));
2436 }
f800c469 2437 }
2438 }
2439}
2440
75a70cf9 2441/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
3ced8962 2442 Returns a new basic block if we've terminated the current basic
2443 block and created a new one. */
2444
2445static basic_block
1a91d914 2446expand_gimple_cond (basic_block bb, gcond *stmt)
3ced8962 2447{
2448 basic_block new_bb, dest;
3ced8962 2449 edge true_edge;
2450 edge false_edge;
74a0cbc4 2451 rtx_insn *last2, *last;
16c9337c 2452 enum tree_code code;
2453 tree op0, op1;
2454
2455 code = gimple_cond_code (stmt);
2456 op0 = gimple_cond_lhs (stmt);
2457 op1 = gimple_cond_rhs (stmt);
2458 /* We're sometimes presented with such code:
2459 D.123_1 = x < y;
2460 if (D.123_1 != 0)
2461 ...
2462 This would expand to two comparisons which then later might
2463 be cleaned up by combine. But some pattern matchers like if-conversion
2464 work better when there's only one compare, so make up for this
2465 here as special exception if TER would have made the same change. */
9532a315 2466 if (SA.values
16c9337c 2467 && TREE_CODE (op0) == SSA_NAME
9532a315 2468 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2469 && TREE_CODE (op1) == INTEGER_CST
2470 && ((gimple_cond_code (stmt) == NE_EXPR
2471 && integer_zerop (op1))
2472 || (gimple_cond_code (stmt) == EQ_EXPR
2473 && integer_onep (op1)))
16c9337c 2474 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2475 {
42acab1c 2476 gimple *second = SSA_NAME_DEF_STMT (op0);
5905fb26 2477 if (gimple_code (second) == GIMPLE_ASSIGN)
16c9337c 2478 {
5905fb26 2479 enum tree_code code2 = gimple_assign_rhs_code (second);
2480 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2481 {
2482 code = code2;
2483 op0 = gimple_assign_rhs1 (second);
2484 op1 = gimple_assign_rhs2 (second);
2485 }
01ee997b 2486 /* If jumps are cheap and the target does not support conditional
2487 compare, turn some more codes into jumpy sequences. */
2488 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2489 && targetm.gen_ccmp_first == NULL)
5905fb26 2490 {
2491 if ((code2 == BIT_AND_EXPR
2492 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2493 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2494 || code2 == TRUTH_AND_EXPR)
2495 {
2496 code = TRUTH_ANDIF_EXPR;
2497 op0 = gimple_assign_rhs1 (second);
2498 op1 = gimple_assign_rhs2 (second);
2499 }
2500 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2501 {
2502 code = TRUTH_ORIF_EXPR;
2503 op0 = gimple_assign_rhs1 (second);
2504 op1 = gimple_assign_rhs2 (second);
2505 }
2506 }
16c9337c 2507 }
2508 }
49377e21 2509
7c8e9b7d 2510 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2511 into (x - C2) * C3 < C4. */
2512 if ((code == EQ_EXPR || code == NE_EXPR)
2513 && TREE_CODE (op0) == SSA_NAME
2514 && TREE_CODE (op1) == INTEGER_CST)
2515 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2516
49377e21 2517 last2 = last = get_last_insn ();
3ced8962 2518
2519 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5169661d 2520 set_curr_insn_location (gimple_location (stmt));
3ced8962 2521
2522 /* These flags have no purpose in RTL land. */
2523 true_edge->flags &= ~EDGE_TRUE_VALUE;
2524 false_edge->flags &= ~EDGE_FALSE_VALUE;
2525
2526 /* We can either have a pure conditional jump with one fallthru edge or
2527 two-way jump that needs to be decomposed into two basic blocks. */
63f88450 2528 if (false_edge->dest == bb->next_bb)
3ced8962 2529 {
79ab74cc 2530 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2531 true_edge->probability);
75a70cf9 2532 maybe_dump_rtl_for_gimple_stmt (stmt, last);
8e7408e3 2533 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2534 set_curr_insn_location (true_edge->goto_locus);
63f88450 2535 false_edge->flags |= EDGE_FALLTHRU;
ee0f3895 2536 maybe_cleanup_end_of_block (false_edge, last);
3ced8962 2537 return NULL;
2538 }
63f88450 2539 if (true_edge->dest == bb->next_bb)
3ced8962 2540 {
79ab74cc 2541 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2542 false_edge->probability);
75a70cf9 2543 maybe_dump_rtl_for_gimple_stmt (stmt, last);
8e7408e3 2544 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2545 set_curr_insn_location (false_edge->goto_locus);
63f88450 2546 true_edge->flags |= EDGE_FALLTHRU;
ee0f3895 2547 maybe_cleanup_end_of_block (true_edge, last);
3ced8962 2548 return NULL;
2549 }
3ced8962 2550
79ab74cc 2551 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2552 true_edge->probability);
3ced8962 2553 last = get_last_insn ();
8e7408e3 2554 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2555 set_curr_insn_location (false_edge->goto_locus);
63f88450 2556 emit_jump (label_rtx_for_bb (false_edge->dest));
3ced8962 2557
26bb3cb2 2558 BB_END (bb) = last;
3ced8962 2559 if (BARRIER_P (BB_END (bb)))
26bb3cb2 2560 BB_END (bb) = PREV_INSN (BB_END (bb));
3ced8962 2561 update_bb_for_insn (bb);
2562
2563 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2564 dest = false_edge->dest;
2565 redirect_edge_succ (false_edge, new_bb);
2566 false_edge->flags |= EDGE_FALLTHRU;
ea5d3981 2567 new_bb->count = false_edge->count ();
d1af79c6 2568 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2569 add_bb_to_loop (new_bb, loop);
2570 if (loop->latch == bb
2571 && loop->header == dest)
2572 loop->latch = new_bb;
720cfc43 2573 make_single_succ_edge (new_bb, dest, 0);
3ced8962 2574 if (BARRIER_P (BB_END (new_bb)))
26bb3cb2 2575 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
3ced8962 2576 update_bb_for_insn (new_bb);
2577
75a70cf9 2578 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
a0c938f0 2579
8e7408e3 2580 if (true_edge->goto_locus != UNKNOWN_LOCATION)
c4ad3297 2581 {
5169661d 2582 set_curr_insn_location (true_edge->goto_locus);
2583 true_edge->goto_locus = curr_insn_location ();
c4ad3297 2584 }
c4ad3297 2585
3ced8962 2586 return new_bb;
2587}
2588
4c0315d0 2589/* Mark all calls that can have a transaction restart. */
2590
2591static void
42acab1c 2592mark_transaction_restart_calls (gimple *stmt)
4c0315d0 2593{
2594 struct tm_restart_node dummy;
b7aa58e4 2595 tm_restart_node **slot;
4c0315d0 2596
2597 if (!cfun->gimple_df->tm_restart)
2598 return;
2599
2600 dummy.stmt = stmt;
b7aa58e4 2601 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
4c0315d0 2602 if (slot)
2603 {
b7aa58e4 2604 struct tm_restart_node *n = *slot;
4c0315d0 2605 tree list = n->label_or_list;
74a0cbc4 2606 rtx_insn *insn;
4c0315d0 2607
2608 for (insn = next_real_insn (get_last_insn ());
2609 !CALL_P (insn);
2610 insn = next_real_insn (insn))
2611 continue;
2612
2613 if (TREE_CODE (list) == LABEL_DECL)
2614 add_reg_note (insn, REG_TM, label_rtx (list));
2615 else
2616 for (; list ; list = TREE_CHAIN (list))
2617 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2618 }
2619}
2620
16c9337c 2621/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2622 statement STMT. */
2623
2624static void
1a91d914 2625expand_call_stmt (gcall *stmt)
16c9337c 2626{
fb049fba 2627 tree exp, decl, lhs;
facbb5c4 2628 bool builtin_p;
a967d5e5 2629 size_t i;
16c9337c 2630
fb049fba 2631 if (gimple_call_internal_p (stmt))
2632 {
2633 expand_internal_call (stmt);
2634 return;
2635 }
2636
ae62deea 2637 /* If this is a call to a built-in function and it has no effect other
2638 than setting the lhs, try to implement it using an internal function
2639 instead. */
2640 decl = gimple_call_fndecl (stmt);
2641 if (gimple_call_lhs (stmt)
2642 && !gimple_has_side_effects (stmt)
2643 && (optimize || (decl && called_as_built_in (decl))))
2644 {
2645 internal_fn ifn = replacement_internal_fn (stmt);
2646 if (ifn != IFN_LAST)
2647 {
2648 expand_internal_call (ifn, stmt);
2649 return;
2650 }
2651 }
2652
0fcb889c 2653 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
bbc26dcc 2654
0fcb889c 2655 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
a0e9bfbb 2656 builtin_p = decl && fndecl_built_in_p (decl);
0fcb889c 2657
a967d5e5 2658 /* If this is not a builtin function, the function type through which the
2659 call is made may be different from the type of the function. */
2660 if (!builtin_p)
2661 CALL_EXPR_FN (exp)
317bd3b6 2662 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2663 CALL_EXPR_FN (exp));
a967d5e5 2664
16c9337c 2665 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2666 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2667
2668 for (i = 0; i < gimple_call_num_args (stmt); i++)
facbb5c4 2669 {
2670 tree arg = gimple_call_arg (stmt, i);
42acab1c 2671 gimple *def;
facbb5c4 2672 /* TER addresses into arguments of builtin functions so we have a
2673 chance to infer more correct alignment information. See PR39954. */
2674 if (builtin_p
2675 && TREE_CODE (arg) == SSA_NAME
2676 && (def = get_gimple_for_ssa_name (arg))
2677 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2678 arg = gimple_assign_rhs1 (def);
2679 CALL_EXPR_ARG (exp, i) = arg;
2680 }
16c9337c 2681
e1ac6f35 2682 if (gimple_has_side_effects (stmt))
16c9337c 2683 TREE_SIDE_EFFECTS (exp) = 1;
2684
e1ac6f35 2685 if (gimple_call_nothrow_p (stmt))
16c9337c 2686 TREE_NOTHROW (exp) = 1;
2687
e6a18b5a 2688 if (gimple_no_warning_p (stmt))
2689 TREE_NO_WARNING (exp) = 1;
2690
16c9337c 2691 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
b4a61e77 2692 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
16c9337c 2693 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
a882d754 2694 if (decl
a0e9bfbb 2695 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2b34677f 2696 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
a882d754 2697 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2698 else
2699 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
16c9337c 2700 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
a27e3913 2701 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
16c9337c 2702 SET_EXPR_LOCATION (exp, gimple_location (stmt));
16c9337c 2703
841424cc 2704 /* Ensure RTL is created for debug args. */
2705 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2706 {
f1f41a6c 2707 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
841424cc 2708 unsigned int ix;
2709 tree dtemp;
2710
2711 if (debug_args)
f1f41a6c 2712 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
841424cc 2713 {
2714 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2715 expand_debug_expr (dtemp);
2716 }
2717 }
2718
3c0f15b4 2719 rtx_insn *before_call = get_last_insn ();
fb049fba 2720 lhs = gimple_call_lhs (stmt);
16c9337c 2721 if (lhs)
2722 expand_assignment (lhs, exp, false);
2723 else
a12f023f 2724 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4c0315d0 2725
3c0f15b4 2726 /* If the gimple call is an indirect call and has 'nocf_check'
2727 attribute find a generated CALL insn to mark it as no
2728 control-flow verification is needed. */
2729 if (gimple_call_nocf_check_p (stmt)
2730 && !gimple_call_fndecl (stmt))
2731 {
2732 rtx_insn *last = get_last_insn ();
2733 while (!CALL_P (last)
2734 && last != before_call)
2735 last = PREV_INSN (last);
2736
2737 if (last != before_call)
2738 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2739 }
2740
4c0315d0 2741 mark_transaction_restart_calls (stmt);
16c9337c 2742}
2743
0e80b01d 2744
2745/* Generate RTL for an asm statement (explicit assembler code).
2746 STRING is a STRING_CST node containing the assembler code text,
2747 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2748 insn is volatile; don't optimize it. */
2749
2750static void
2751expand_asm_loc (tree string, int vol, location_t locus)
2752{
2753 rtx body;
2754
0e80b01d 2755 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2756 ggc_strdup (TREE_STRING_POINTER (string)),
2757 locus);
2758
2759 MEM_VOLATILE_P (body) = vol;
2760
43ac2f2f 2761 /* Non-empty basic ASM implicitly clobbers memory. */
2762 if (TREE_STRING_LENGTH (string) != 0)
2763 {
2764 rtx asm_op, clob;
2765 unsigned i, nclobbers;
2766 auto_vec<rtx> input_rvec, output_rvec;
2767 auto_vec<const char *> constraints;
2768 auto_vec<rtx> clobber_rvec;
2769 HARD_REG_SET clobbered_regs;
2770 CLEAR_HARD_REG_SET (clobbered_regs);
2771
2772 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2773 clobber_rvec.safe_push (clob);
2774
2775 if (targetm.md_asm_adjust)
2776 targetm.md_asm_adjust (output_rvec, input_rvec,
2777 constraints, clobber_rvec,
2778 clobbered_regs);
2779
2780 asm_op = body;
2781 nclobbers = clobber_rvec.length ();
2782 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2783
2784 XVECEXP (body, 0, 0) = asm_op;
2785 for (i = 0; i < nclobbers; i++)
2786 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2787 }
2788
0e80b01d 2789 emit_insn (body);
2790}
2791
2792/* Return the number of times character C occurs in string S. */
2793static int
2794n_occurrences (int c, const char *s)
2795{
2796 int n = 0;
2797 while (*s)
2798 n += (*s++ == c);
2799 return n;
2800}
2801
2802/* A subroutine of expand_asm_operands. Check that all operands have
2803 the same number of alternatives. Return true if so. */
2804
2805static bool
2af3d775 2806check_operand_nalternatives (const vec<const char *> &constraints)
0e80b01d 2807{
2af3d775 2808 unsigned len = constraints.length();
2809 if (len > 0)
0e80b01d 2810 {
2af3d775 2811 int nalternatives = n_occurrences (',', constraints[0]);
0e80b01d 2812
2813 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2814 {
2815 error ("too many alternatives in %<asm%>");
2816 return false;
2817 }
2818
2af3d775 2819 for (unsigned i = 1; i < len; ++i)
2820 if (n_occurrences (',', constraints[i]) != nalternatives)
2821 {
2822 error ("operand constraints for %<asm%> differ "
2823 "in number of alternatives");
2824 return false;
2825 }
0e80b01d 2826 }
0e80b01d 2827 return true;
2828}
2829
2830/* Check for overlap between registers marked in CLOBBERED_REGS and
2831 anything inappropriate in T. Emit error and return the register
2832 variable definition for error, NULL_TREE for ok. */
2833
2834static bool
2835tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2836{
2837 /* Conflicts between asm-declared register variables and the clobber
2838 list are not allowed. */
2839 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2840
2841 if (overlap)
2842 {
85b9be9b 2843 error ("%<asm%> specifier for variable %qE conflicts with "
2844 "%<asm%> clobber list",
0e80b01d 2845 DECL_NAME (overlap));
2846
2847 /* Reset registerness to stop multiple errors emitted for a single
2848 variable. */
2849 DECL_REGISTER (overlap) = 0;
2850 return true;
2851 }
2852
2853 return false;
2854}
2855
7675b215 2856/* Check that the given REGNO spanning NREGS is a valid
2857 asm clobber operand. Some HW registers cannot be
2858 saved/restored, hence they should not be clobbered by
2859 asm statements. */
2860static bool
2861asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2862{
2863 bool is_valid = true;
2864 HARD_REG_SET regset;
2865
2866 CLEAR_HARD_REG_SET (regset);
2867
2868 add_range_to_hard_reg_set (&regset, regno, nregs);
2869
2870 /* Clobbering the PIC register is an error. */
2871 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2872 && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2873 {
2874 /* ??? Diagnose during gimplification? */
2875 error ("PIC register clobbered by %qs in %<asm%>", regname);
2876 is_valid = false;
2877 }
b16ca977 2878 else if (!in_hard_reg_set_p
2879 (accessible_reg_set, reg_raw_mode[regno], regno))
2880 {
2881 /* ??? Diagnose during gimplification? */
2882 error ("the register %qs cannot be clobbered in %<asm%>"
2883 " for the current target", regname);
2884 is_valid = false;
2885 }
2886
9d1cdb74 2887 /* Clobbering the stack pointer register is deprecated. GCC expects
2888 the value of the stack pointer after an asm statement to be the same
2889 as it was before, so no asm can validly clobber the stack pointer in
2890 the usual sense. Adding the stack pointer to the clobber list has
2891 traditionally had some undocumented and somewhat obscure side-effects. */
2892 if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM)
2893 && warning (OPT_Wdeprecated, "listing the stack pointer register"
2894 " %qs in a clobber list is deprecated", regname))
2895 inform (input_location, "the value of the stack pointer after an %<asm%>"
2896 " statement must be the same as it was before the statement");
7675b215 2897
2898 return is_valid;
2899}
2900
0e80b01d 2901/* Generate RTL for an asm statement with arguments.
2902 STRING is the instruction template.
2903 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2904 Each output or input has an expression in the TREE_VALUE and
2905 a tree list in TREE_PURPOSE which in turn contains a constraint
2906 name in TREE_VALUE (or NULL_TREE) and a constraint string
2907 in TREE_PURPOSE.
2908 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2909 that is clobbered by this insn.
2910
2911 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2912 should be the fallthru basic block of the asm goto.
2913
2914 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2915 Some elements of OUTPUTS may be replaced with trees representing temporary
2916 values. The caller should copy those temporary values to the originally
2917 specified lvalues.
2918
2919 VOL nonzero means the insn is volatile; don't optimize it. */
2920
2921static void
ae231cbd 2922expand_asm_stmt (gasm *stmt)
0e80b01d 2923{
2af3d775 2924 class save_input_location
2925 {
2926 location_t old;
ae231cbd 2927
2af3d775 2928 public:
2929 explicit save_input_location(location_t where)
ae231cbd 2930 {
2af3d775 2931 old = input_location;
2932 input_location = where;
ae231cbd 2933 }
2934
2af3d775 2935 ~save_input_location()
ae231cbd 2936 {
2af3d775 2937 input_location = old;
ae231cbd 2938 }
2af3d775 2939 };
ae231cbd 2940
2af3d775 2941 location_t locus = gimple_location (stmt);
ae231cbd 2942
2af3d775 2943 if (gimple_asm_input_p (stmt))
ae231cbd 2944 {
2af3d775 2945 const char *s = gimple_asm_string (stmt);
2946 tree string = build_string (strlen (s), s);
2947 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2948 return;
ae231cbd 2949 }
2950
2af3d775 2951 /* There are some legacy diagnostics in here, and also avoids a
2952 sixth parameger to targetm.md_asm_adjust. */
2953 save_input_location s_i_l(locus);
ae231cbd 2954
2af3d775 2955 unsigned noutputs = gimple_asm_noutputs (stmt);
2956 unsigned ninputs = gimple_asm_ninputs (stmt);
2957 unsigned nlabels = gimple_asm_nlabels (stmt);
2958 unsigned i;
2959
2960 /* ??? Diagnose during gimplification? */
2961 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
ae231cbd 2962 {
2af3d775 2963 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
ae231cbd 2964 return;
2965 }
2966
2af3d775 2967 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2968 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2969 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
ae231cbd 2970
2af3d775 2971 /* Copy the gimple vectors into new vectors that we can manipulate. */
0e80b01d 2972
2af3d775 2973 output_tvec.safe_grow (noutputs);
2974 input_tvec.safe_grow (ninputs);
2975 constraints.safe_grow (noutputs + ninputs);
0e80b01d 2976
2af3d775 2977 for (i = 0; i < noutputs; ++i)
2978 {
2979 tree t = gimple_asm_output_op (stmt, i);
2980 output_tvec[i] = TREE_VALUE (t);
2981 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2982 }
2983 for (i = 0; i < ninputs; i++)
2984 {
2985 tree t = gimple_asm_input_op (stmt, i);
2986 input_tvec[i] = TREE_VALUE (t);
2987 constraints[i + noutputs]
2988 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2989 }
0e80b01d 2990
2af3d775 2991 /* ??? Diagnose during gimplification? */
2992 if (! check_operand_nalternatives (constraints))
2993 return;
0e80b01d 2994
2995 /* Count the number of meaningful clobbered registers, ignoring what
2996 we would ignore later. */
2af3d775 2997 auto_vec<rtx> clobber_rvec;
2998 HARD_REG_SET clobbered_regs;
0e80b01d 2999 CLEAR_HARD_REG_SET (clobbered_regs);
0e80b01d 3000
2af3d775 3001 if (unsigned n = gimple_asm_nclobbers (stmt))
3002 {
3003 clobber_rvec.reserve (n);
3004 for (i = 0; i < n; i++)
3005 {
3006 tree t = gimple_asm_clobber_op (stmt, i);
3007 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
3008 int nregs, j;
0e80b01d 3009
2af3d775 3010 j = decode_reg_name_and_count (regname, &nregs);
3011 if (j < 0)
0e80b01d 3012 {
2af3d775 3013 if (j == -2)
0e80b01d 3014 {
2af3d775 3015 /* ??? Diagnose during gimplification? */
3016 error ("unknown register name %qs in %<asm%>", regname);
3017 }
3018 else if (j == -4)
3019 {
3020 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3021 clobber_rvec.safe_push (x);
3022 }
3023 else
3024 {
3025 /* Otherwise we should have -1 == empty string
3026 or -3 == cc, which is not a register. */
3027 gcc_assert (j == -1 || j == -3);
0e80b01d 3028 }
0e80b01d 3029 }
2af3d775 3030 else
3031 for (int reg = j; reg < j + nregs; reg++)
3032 {
7675b215 3033 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3034 return;
2af3d775 3035
3036 SET_HARD_REG_BIT (clobbered_regs, reg);
3037 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3038 clobber_rvec.safe_push (x);
3039 }
0e80b01d 3040 }
3041 }
2af3d775 3042 unsigned nclobbers = clobber_rvec.length();
0e80b01d 3043
3044 /* First pass over inputs and outputs checks validity and sets
3045 mark_addressable if needed. */
2af3d775 3046 /* ??? Diagnose during gimplification? */
0e80b01d 3047
2af3d775 3048 for (i = 0; i < noutputs; ++i)
0e80b01d 3049 {
2af3d775 3050 tree val = output_tvec[i];
0e80b01d 3051 tree type = TREE_TYPE (val);
3052 const char *constraint;
3053 bool is_inout;
3054 bool allows_reg;
3055 bool allows_mem;
3056
0e80b01d 3057 /* Try to parse the output constraint. If that fails, there's
3058 no point in going further. */
3059 constraint = constraints[i];
3060 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3061 &allows_mem, &allows_reg, &is_inout))
3062 return;
3063
b782636f 3064 /* If the output is a hard register, verify it doesn't conflict with
3065 any other operand's possible hard register use. */
3066 if (DECL_P (val)
3067 && REG_P (DECL_RTL (val))
3068 && HARD_REGISTER_P (DECL_RTL (val)))
3069 {
3070 unsigned j, output_hregno = REGNO (DECL_RTL (val));
3071 bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3072 unsigned long match;
3073
3074 /* Verify the other outputs do not use the same hard register. */
3075 for (j = i + 1; j < noutputs; ++j)
3076 if (DECL_P (output_tvec[j])
3077 && REG_P (DECL_RTL (output_tvec[j]))
3078 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3079 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3080 error ("invalid hard register usage between output operands");
3081
3082 /* Verify matching constraint operands use the same hard register
3083 and that the non-matching constraint operands do not use the same
3084 hard register if the output is an early clobber operand. */
3085 for (j = 0; j < ninputs; ++j)
3086 if (DECL_P (input_tvec[j])
3087 && REG_P (DECL_RTL (input_tvec[j]))
3088 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3089 {
3090 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3091 switch (*constraints[j + noutputs])
3092 {
3093 case '0': case '1': case '2': case '3': case '4':
3094 case '5': case '6': case '7': case '8': case '9':
3095 match = strtoul (constraints[j + noutputs], NULL, 10);
3096 break;
3097 default:
3098 match = ULONG_MAX;
3099 break;
3100 }
3101 if (i == match
3102 && output_hregno != input_hregno)
3103 error ("invalid hard register usage between output operand "
3104 "and matching constraint operand");
3105 else if (early_clobber_p
3106 && i != match
3107 && output_hregno == input_hregno)
3108 error ("invalid hard register usage between earlyclobber "
3109 "operand and input operand");
3110 }
3111 }
3112
0e80b01d 3113 if (! allows_reg
3114 && (allows_mem
3115 || is_inout
3116 || (DECL_P (val)
3117 && REG_P (DECL_RTL (val))
3118 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3119 mark_addressable (val);
0e80b01d 3120 }
3121
2af3d775 3122 for (i = 0; i < ninputs; ++i)
0e80b01d 3123 {
3124 bool allows_reg, allows_mem;
3125 const char *constraint;
3126
0e80b01d 3127 constraint = constraints[i + noutputs];
2af3d775 3128 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3129 constraints.address (),
3130 &allows_mem, &allows_reg))
0e80b01d 3131 return;
3132
3133 if (! allows_reg && allows_mem)
2af3d775 3134 mark_addressable (input_tvec[i]);
0e80b01d 3135 }
3136
3137 /* Second pass evaluates arguments. */
3138
3139 /* Make sure stack is consistent for asm goto. */
3140 if (nlabels > 0)
3141 do_pending_stack_adjust ();
2af3d775 3142 int old_generating_concat_p = generating_concat_p;
3143
3144 /* Vector of RTX's of evaluated output operands. */
3145 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3146 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3147 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
0e80b01d 3148
2af3d775 3149 output_rvec.safe_grow (noutputs);
3150
3151 for (i = 0; i < noutputs; ++i)
0e80b01d 3152 {
2af3d775 3153 tree val = output_tvec[i];
0e80b01d 3154 tree type = TREE_TYPE (val);
2af3d775 3155 bool is_inout, allows_reg, allows_mem, ok;
0e80b01d 3156 rtx op;
0e80b01d 3157
3158 ok = parse_output_constraint (&constraints[i], i, ninputs,
3159 noutputs, &allows_mem, &allows_reg,
3160 &is_inout);
3161 gcc_assert (ok);
3162
3163 /* If an output operand is not a decl or indirect ref and our constraint
3164 allows a register, make a temporary to act as an intermediate.
2af3d775 3165 Make the asm insn write into that, then we will copy it to
0e80b01d 3166 the real output operand. Likewise for promoted variables. */
3167
3168 generating_concat_p = 0;
3169
f9659d60 3170 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
0e80b01d 3171 || (DECL_P (val)
3172 && (allows_mem || REG_P (DECL_RTL (val)))
3173 && ! (REG_P (DECL_RTL (val))
3174 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3175 || ! allows_reg
f9659d60 3176 || is_inout
3177 || TREE_ADDRESSABLE (type))
0e80b01d 3178 {
3179 op = expand_expr (val, NULL_RTX, VOIDmode,
3180 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3181 if (MEM_P (op))
3182 op = validize_mem (op);
3183
3184 if (! allows_reg && !MEM_P (op))
3185 error ("output number %d not directly addressable", i);
f9659d60 3186 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
0e80b01d 3187 || GET_CODE (op) == CONCAT)
3188 {
2af3d775 3189 rtx old_op = op;
0e80b01d 3190 op = gen_reg_rtx (GET_MODE (op));
2af3d775 3191
3192 generating_concat_p = old_generating_concat_p;
3193
0e80b01d 3194 if (is_inout)
2af3d775 3195 emit_move_insn (op, old_op);
3196
3197 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3198 emit_move_insn (old_op, op);
3199 after_rtl_seq = get_insns ();
3200 after_rtl_end = get_last_insn ();
3201 end_sequence ();
0e80b01d 3202 }
3203 }
3204 else
3205 {
3206 op = assign_temp (type, 0, 1);
3207 op = validize_mem (op);
2af3d775 3208 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3209 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
0e80b01d 3210
2af3d775 3211 generating_concat_p = old_generating_concat_p;
0e80b01d 3212
2af3d775 3213 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3214 expand_assignment (val, make_tree (type, op), false);
3215 after_rtl_seq = get_insns ();
3216 after_rtl_end = get_last_insn ();
3217 end_sequence ();
0e80b01d 3218 }
2af3d775 3219 output_rvec[i] = op;
0e80b01d 3220
2af3d775 3221 if (is_inout)
3222 inout_opnum.safe_push (i);
0e80b01d 3223 }
3224
2af3d775 3225 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3226 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
0e80b01d 3227
2af3d775 3228 input_rvec.safe_grow (ninputs);
3229 input_mode.safe_grow (ninputs);
0e80b01d 3230
2af3d775 3231 generating_concat_p = 0;
0e80b01d 3232
2af3d775 3233 for (i = 0; i < ninputs; ++i)
0e80b01d 3234 {
2af3d775 3235 tree val = input_tvec[i];
3236 tree type = TREE_TYPE (val);
3237 bool allows_reg, allows_mem, ok;
0e80b01d 3238 const char *constraint;
0e80b01d 3239 rtx op;
0e80b01d 3240
3241 constraint = constraints[i + noutputs];
2af3d775 3242 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3243 constraints.address (),
3244 &allows_mem, &allows_reg);
0e80b01d 3245 gcc_assert (ok);
3246
0e80b01d 3247 /* EXPAND_INITIALIZER will not generate code for valid initializer
3248 constants, but will still generate code for other types of operand.
3249 This is the behavior we want for constant constraints. */
3250 op = expand_expr (val, NULL_RTX, VOIDmode,
3251 allows_reg ? EXPAND_NORMAL
3252 : allows_mem ? EXPAND_MEMORY
3253 : EXPAND_INITIALIZER);
3254
3255 /* Never pass a CONCAT to an ASM. */
3256 if (GET_CODE (op) == CONCAT)
3257 op = force_reg (GET_MODE (op), op);
3258 else if (MEM_P (op))
3259 op = validize_mem (op);
3260
3261 if (asm_operand_ok (op, constraint, NULL) <= 0)
3262 {
3263 if (allows_reg && TYPE_MODE (type) != BLKmode)
3264 op = force_reg (TYPE_MODE (type), op);
3265 else if (!allows_mem)
85b9be9b 3266 warning (0, "%<asm%> operand %d probably does not match "
3267 "constraints",
0e80b01d 3268 i + noutputs);
3269 else if (MEM_P (op))
3270 {
3271 /* We won't recognize either volatile memory or memory
3272 with a queued address as available a memory_operand
3273 at this point. Ignore it: clearly this *is* a memory. */
3274 }
3275 else
3276 gcc_unreachable ();
3277 }
2af3d775 3278 input_rvec[i] = op;
3279 input_mode[i] = TYPE_MODE (type);
0e80b01d 3280 }
3281
0e80b01d 3282 /* For in-out operands, copy output rtx to input rtx. */
2af3d775 3283 unsigned ninout = inout_opnum.length();
0e80b01d 3284 for (i = 0; i < ninout; i++)
3285 {
3286 int j = inout_opnum[i];
2af3d775 3287 rtx o = output_rvec[j];
0e80b01d 3288
2af3d775 3289 input_rvec.safe_push (o);
3290 input_mode.safe_push (GET_MODE (o));
0e80b01d 3291
2af3d775 3292 char buffer[16];
0e80b01d 3293 sprintf (buffer, "%d", j);
2af3d775 3294 constraints.safe_push (ggc_strdup (buffer));
3295 }
3296 ninputs += ninout;
3297
3298 /* Sometimes we wish to automatically clobber registers across an asm.
3299 Case in point is when the i386 backend moved from cc0 to a hard reg --
3300 maintaining source-level compatibility means automatically clobbering
3301 the flags register. */
3302 rtx_insn *after_md_seq = NULL;
3303 if (targetm.md_asm_adjust)
3304 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3305 constraints, clobber_rvec,
3306 clobbered_regs);
3307
3308 /* Do not allow the hook to change the output and input count,
3309 lest it mess up the operand numbering. */
3310 gcc_assert (output_rvec.length() == noutputs);
3311 gcc_assert (input_rvec.length() == ninputs);
3312 gcc_assert (constraints.length() == noutputs + ninputs);
3313
3314 /* But it certainly can adjust the clobbers. */
3315 nclobbers = clobber_rvec.length();
3316
3317 /* Third pass checks for easy conflicts. */
3318 /* ??? Why are we doing this on trees instead of rtx. */
3319
3320 bool clobber_conflict_found = 0;
3321 for (i = 0; i < noutputs; ++i)
3322 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3323 clobber_conflict_found = 1;
3324 for (i = 0; i < ninputs - ninout; ++i)
3325 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3326 clobber_conflict_found = 1;
3327
3328 /* Make vectors for the expression-rtx, constraint strings,
3329 and named operands. */
3330
3331 rtvec argvec = rtvec_alloc (ninputs);
3332 rtvec constraintvec = rtvec_alloc (ninputs);
3333 rtvec labelvec = rtvec_alloc (nlabels);
3334
3335 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3336 : GET_MODE (output_rvec[0])),
3337 ggc_strdup (gimple_asm_string (stmt)),
7e93252e 3338 "", 0, argvec, constraintvec,
2af3d775 3339 labelvec, locus);
3340 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3341
3342 for (i = 0; i < ninputs; ++i)
3343 {
3344 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3345 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3346 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3347 constraints[i + noutputs],
3348 locus);
0e80b01d 3349 }
3350
3351 /* Copy labels to the vector. */
2af3d775 3352 rtx_code_label *fallthru_label = NULL;
3353 if (nlabels > 0)
3354 {
3355 basic_block fallthru_bb = NULL;
3356 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3357 if (fallthru)
3358 fallthru_bb = fallthru->dest;
3359
3360 for (i = 0; i < nlabels; ++i)
0e80b01d 3361 {
2af3d775 3362 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
9ed997be 3363 rtx_insn *r;
2af3d775 3364 /* If asm goto has any labels in the fallthru basic block, use
3365 a label that we emit immediately after the asm goto. Expansion
3366 may insert further instructions into the same basic block after
3367 asm goto and if we don't do this, insertion of instructions on
3368 the fallthru edge might misbehave. See PR58670. */
0fb4f2ce 3369 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
2af3d775 3370 {
3371 if (fallthru_label == NULL_RTX)
3372 fallthru_label = gen_label_rtx ();
3373 r = fallthru_label;
3374 }
3375 else
3376 r = label_rtx (label);
3377 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
0e80b01d 3378 }
0e80b01d 3379 }
3380
0e80b01d 3381 /* Now, for each output, construct an rtx
3382 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3383 ARGVEC CONSTRAINTS OPNAMES))
3384 If there is more than one, put them inside a PARALLEL. */
3385
3386 if (nlabels > 0 && nclobbers == 0)
3387 {
3388 gcc_assert (noutputs == 0);
3389 emit_jump_insn (body);
3390 }
3391 else if (noutputs == 0 && nclobbers == 0)
3392 {
3393 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3394 emit_insn (body);
3395 }
3396 else if (noutputs == 1 && nclobbers == 0)
3397 {
2af3d775 3398 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3399 emit_insn (gen_rtx_SET (output_rvec[0], body));
0e80b01d 3400 }
3401 else
3402 {
3403 rtx obody = body;
3404 int num = noutputs;
3405
3406 if (num == 0)
3407 num = 1;
3408
3409 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3410
3411 /* For each output operand, store a SET. */
2af3d775 3412 for (i = 0; i < noutputs; ++i)
0e80b01d 3413 {
2af3d775 3414 rtx src, o = output_rvec[i];
3415 if (i == 0)
3416 {
3417 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3418 src = obody;
3419 }
3420 else
3421 {
3422 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3423 ASM_OPERANDS_TEMPLATE (obody),
3424 constraints[i], i, argvec,
3425 constraintvec, labelvec, locus);
3426 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3427 }
3428 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
0e80b01d 3429 }
3430
3431 /* If there are no outputs (but there are some clobbers)
3432 store the bare ASM_OPERANDS into the PARALLEL. */
0e80b01d 3433 if (i == 0)
3434 XVECEXP (body, 0, i++) = obody;
3435
3436 /* Store (clobber REG) for each clobbered register specified. */
2af3d775 3437 for (unsigned j = 0; j < nclobbers; ++j)
0e80b01d 3438 {
2af3d775 3439 rtx clobbered_reg = clobber_rvec[j];
0e80b01d 3440
2af3d775 3441 /* Do sanity check for overlap between clobbers and respectively
3442 input and outputs that hasn't been handled. Such overlap
3443 should have been detected and reported above. */
3444 if (!clobber_conflict_found && REG_P (clobbered_reg))
0e80b01d 3445 {
2af3d775 3446 /* We test the old body (obody) contents to avoid
3447 tripping over the under-construction body. */
3448 for (unsigned k = 0; k < noutputs; ++k)
3449 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
85b9be9b 3450 internal_error ("%<asm%> clobber conflict with "
3451 "output operand");
2af3d775 3452
3453 for (unsigned k = 0; k < ninputs - ninout; ++k)
3454 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
85b9be9b 3455 internal_error ("%<asm%> clobber conflict with "
3456 "input operand");
0e80b01d 3457 }
3458
2af3d775 3459 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
0e80b01d 3460 }
3461
3462 if (nlabels > 0)
3463 emit_jump_insn (body);
3464 else
3465 emit_insn (body);
3466 }
3467
2af3d775 3468 generating_concat_p = old_generating_concat_p;
3469
0e80b01d 3470 if (fallthru_label)
3471 emit_label (fallthru_label);
3472
2af3d775 3473 if (after_md_seq)
3474 emit_insn (after_md_seq);
3475 if (after_rtl_seq)
3476 emit_insn (after_rtl_seq);
0e80b01d 3477
ae231cbd 3478 free_temp_slots ();
2af3d775 3479 crtl->has_asm_statement = 1;
0e80b01d 3480}
3481
3482/* Emit code to jump to the address
3483 specified by the pointer expression EXP. */
3484
3485static void
3486expand_computed_goto (tree exp)
3487{
3488 rtx x = expand_normal (exp);
3489
0e80b01d 3490 do_pending_stack_adjust ();
3491 emit_indirect_jump (x);
3492}
3493
3494/* Generate RTL code for a `goto' statement with target label LABEL.
3495 LABEL should be a LABEL_DECL tree node that was or will later be
3496 defined with `expand_label'. */
3497
3498static void
3499expand_goto (tree label)
3500{
382ecba7 3501 if (flag_checking)
3502 {
3503 /* Check for a nonlocal goto to a containing function. Should have
3504 gotten translated to __builtin_nonlocal_goto. */
3505 tree context = decl_function_context (label);
3506 gcc_assert (!context || context == current_function_decl);
3507 }
0e80b01d 3508
f9a00e9e 3509 emit_jump (jump_target_rtx (label));
0e80b01d 3510}
3511
3512/* Output a return with no value. */
3513
3514static void
3515expand_null_return_1 (void)
3516{
3517 clear_pending_stack_adjust ();
3518 do_pending_stack_adjust ();
3519 emit_jump (return_label);
3520}
3521
3522/* Generate RTL to return from the current function, with no value.
3523 (That is, we do not do anything about returning any value.) */
3524
3525void
3526expand_null_return (void)
3527{
3528 /* If this function was declared to return a value, but we
3529 didn't, clobber the return registers so that they are not
3530 propagated live to the rest of the function. */
3531 clobber_return_register ();
3532
3533 expand_null_return_1 ();
3534}
3535
3536/* Generate RTL to return from the current function, with value VAL. */
3537
3538static void
3539expand_value_return (rtx val)
3540{
3541 /* Copy the value to the return location unless it's already there. */
3542
3543 tree decl = DECL_RESULT (current_function_decl);
3544 rtx return_reg = DECL_RTL (decl);
3545 if (return_reg != val)
3546 {
3547 tree funtype = TREE_TYPE (current_function_decl);
3548 tree type = TREE_TYPE (decl);
3549 int unsignedp = TYPE_UNSIGNED (type);
3754d046 3550 machine_mode old_mode = DECL_MODE (decl);
3551 machine_mode mode;
0e80b01d 3552 if (DECL_BY_REFERENCE (decl))
3553 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3554 else
3555 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3556
3557 if (mode != old_mode)
3558 val = convert_modes (mode, old_mode, val, unsignedp);
3559
3560 if (GET_CODE (return_reg) == PARALLEL)
3561 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3562 else
3563 emit_move_insn (return_reg, val);
3564 }
3565
3566 expand_null_return_1 ();
3567}
3568
3569/* Generate RTL to evaluate the expression RETVAL and return it
3570 from the current function. */
3571
3572static void
1e42d5c6 3573expand_return (tree retval)
0e80b01d 3574{
3575 rtx result_rtl;
3576 rtx val = 0;
3577 tree retval_rhs;
3578
3579 /* If function wants no value, give it none. */
3580 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3581 {
3582 expand_normal (retval);
3583 expand_null_return ();
3584 return;
3585 }
3586
3587 if (retval == error_mark_node)
3588 {
3589 /* Treat this like a return of no value from a function that
3590 returns a value. */
3591 expand_null_return ();
3592 return;
3593 }
3594 else if ((TREE_CODE (retval) == MODIFY_EXPR
3595 || TREE_CODE (retval) == INIT_EXPR)
3596 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3597 retval_rhs = TREE_OPERAND (retval, 1);
3598 else
3599 retval_rhs = retval;
3600
3601 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3602
3603 /* If we are returning the RESULT_DECL, then the value has already
3604 been stored into it, so we don't have to do anything special. */
3605 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3606 expand_value_return (result_rtl);
3607
3608 /* If the result is an aggregate that is being returned in one (or more)
3609 registers, load the registers here. */
3610
3611 else if (retval_rhs != 0
3612 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3613 && REG_P (result_rtl))
3614 {
3615 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3616 if (val)
3617 {
3618 /* Use the mode of the result value on the return register. */
3619 PUT_MODE (result_rtl, GET_MODE (val));
3620 expand_value_return (val);
3621 }
3622 else
3623 expand_null_return ();
3624 }
3625 else if (retval_rhs != 0
3626 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3627 && (REG_P (result_rtl)
3628 || (GET_CODE (result_rtl) == PARALLEL)))
3629 {
9f495e8d 3630 /* Compute the return value into a temporary (usually a pseudo reg). */
3631 val
3632 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
0e80b01d 3633 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3634 val = force_not_mem (val);
0e80b01d 3635 expand_value_return (val);
3636 }
3637 else
3638 {
3639 /* No hard reg used; calculate value into hard return reg. */
3640 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3641 expand_value_return (result_rtl);
3642 }
3643}
3644
a74c4ce4 3645/* Expand a clobber of LHS. If LHS is stored it in a multi-part
3646 register, tell the rtl optimizers that its value is no longer
3647 needed. */
3648
3649static void
3650expand_clobber (tree lhs)
3651{
3652 if (DECL_P (lhs))
3653 {
3654 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3655 if (decl_rtl && REG_P (decl_rtl))
3656 {
3657 machine_mode decl_mode = GET_MODE (decl_rtl);
3658 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3659 REGMODE_NATURAL_SIZE (decl_mode)))
3660 emit_clobber (decl_rtl);
3661 }
3662 }
3663}
3664
16c9337c 3665/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3666 STMT that doesn't require special handling for outgoing edges. That
3667 is no tailcalls and no GIMPLE_COND. */
3668
3669static void
42acab1c 3670expand_gimple_stmt_1 (gimple *stmt)
16c9337c 3671{
3672 tree op0;
8c593757 3673
5169661d 3674 set_curr_insn_location (gimple_location (stmt));
8c593757 3675
16c9337c 3676 switch (gimple_code (stmt))
3677 {
3678 case GIMPLE_GOTO:
3679 op0 = gimple_goto_dest (stmt);
3680 if (TREE_CODE (op0) == LABEL_DECL)
3681 expand_goto (op0);
3682 else
3683 expand_computed_goto (op0);
3684 break;
3685 case GIMPLE_LABEL:
1a91d914 3686 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
16c9337c 3687 break;
3688 case GIMPLE_NOP:
3689 case GIMPLE_PREDICT:
3690 break;
16c9337c 3691 case GIMPLE_SWITCH:
b8daf3d8 3692 {
3693 gswitch *swtch = as_a <gswitch *> (stmt);
3694 if (gimple_switch_num_labels (swtch) == 1)
3695 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3696 else
3697 expand_case (swtch);
3698 }
16c9337c 3699 break;
3700 case GIMPLE_ASM:
1a91d914 3701 expand_asm_stmt (as_a <gasm *> (stmt));
16c9337c 3702 break;
3703 case GIMPLE_CALL:
1a91d914 3704 expand_call_stmt (as_a <gcall *> (stmt));
16c9337c 3705 break;
3706
3707 case GIMPLE_RETURN:
2e6c9e14 3708 {
2e6c9e14 3709 op0 = gimple_return_retval (as_a <greturn *> (stmt));
16c9337c 3710
2e6c9e14 3711 if (op0 && op0 != error_mark_node)
3712 {
3713 tree result = DECL_RESULT (current_function_decl);
16c9337c 3714
2e6c9e14 3715 /* If we are not returning the current function's RESULT_DECL,
3716 build an assignment to it. */
3717 if (op0 != result)
3718 {
3719 /* I believe that a function's RESULT_DECL is unique. */
3720 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3721
3722 /* ??? We'd like to use simply expand_assignment here,
3723 but this fails if the value is of BLKmode but the return
3724 decl is a register. expand_return has special handling
3725 for this combination, which eventually should move
3726 to common code. See comments there. Until then, let's
3727 build a modify expression :-/ */
3728 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3729 result, op0);
3730 }
2e6c9e14 3731 }
3732
3733 if (!op0)
3734 expand_null_return ();
3735 else
1e42d5c6 3736 expand_return (op0);
2e6c9e14 3737 }
16c9337c 3738 break;
3739
3740 case GIMPLE_ASSIGN:
3741 {
1a91d914 3742 gassign *assign_stmt = as_a <gassign *> (stmt);
3743 tree lhs = gimple_assign_lhs (assign_stmt);
16c9337c 3744
3745 /* Tree expand used to fiddle with |= and &= of two bitfield
3746 COMPONENT_REFs here. This can't happen with gimple, the LHS
3747 of binary assigns must be a gimple reg. */
3748
3749 if (TREE_CODE (lhs) != SSA_NAME
3750 || get_gimple_rhs_class (gimple_expr_code (stmt))
3751 == GIMPLE_SINGLE_RHS)
3752 {
1a91d914 3753 tree rhs = gimple_assign_rhs1 (assign_stmt);
16c9337c 3754 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3755 == GIMPLE_SINGLE_RHS);
8f413f95 3756 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3757 /* Do not put locations on possibly shared trees. */
3758 && !is_gimple_min_invariant (rhs))
16c9337c 3759 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3c25489e 3760 if (TREE_CLOBBER_P (rhs))
3761 /* This is a clobber to mark the going out of scope for
3762 this LHS. */
a74c4ce4 3763 expand_clobber (lhs);
3c25489e 3764 else
3765 expand_assignment (lhs, rhs,
1a91d914 3766 gimple_assign_nontemporal_move_p (
3767 assign_stmt));
16c9337c 3768 }
3769 else
3770 {
3771 rtx target, temp;
1a91d914 3772 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
16c9337c 3773 struct separate_ops ops;
3774 bool promoted = false;
3775
3776 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3777 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3778 promoted = true;
3779
1a91d914 3780 ops.code = gimple_assign_rhs_code (assign_stmt);
16c9337c 3781 ops.type = TREE_TYPE (lhs);
1ebce849 3782 switch (get_gimple_rhs_class (ops.code))
16c9337c 3783 {
00f4f705 3784 case GIMPLE_TERNARY_RHS:
1a91d914 3785 ops.op2 = gimple_assign_rhs3 (assign_stmt);
00f4f705 3786 /* Fallthru */
16c9337c 3787 case GIMPLE_BINARY_RHS:
1a91d914 3788 ops.op1 = gimple_assign_rhs2 (assign_stmt);
16c9337c 3789 /* Fallthru */
3790 case GIMPLE_UNARY_RHS:
1a91d914 3791 ops.op0 = gimple_assign_rhs1 (assign_stmt);
16c9337c 3792 break;
3793 default:
3794 gcc_unreachable ();
3795 }
3796 ops.location = gimple_location (stmt);
3797
3798 /* If we want to use a nontemporal store, force the value to
3799 register first. If we store into a promoted register,
3800 don't directly expand to target. */
3801 temp = nontemporal || promoted ? NULL_RTX : target;
3802 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3803 EXPAND_NORMAL);
3804
3805 if (temp == target)
3806 ;
3807 else if (promoted)
3808 {
e8629f9e 3809 int unsignedp = SUBREG_PROMOTED_SIGN (target);
16c9337c 3810 /* If TEMP is a VOIDmode constant, use convert_modes to make
3811 sure that we properly convert it. */
3812 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3813 {
3814 temp = convert_modes (GET_MODE (target),
3815 TYPE_MODE (ops.type),
088c4b7b 3816 temp, unsignedp);
16c9337c 3817 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
088c4b7b 3818 GET_MODE (target), temp, unsignedp);
16c9337c 3819 }
3820
5a9ccd1b 3821 convert_move (SUBREG_REG (target), temp, unsignedp);
16c9337c 3822 }
3823 else if (nontemporal && emit_storent_insn (target, temp))
3824 ;
3825 else
3826 {
3827 temp = force_operand (temp, target);
3828 if (temp != target)
3829 emit_move_insn (target, temp);
3830 }
3831 }
3832 }
3833 break;
3834
3835 default:
3836 gcc_unreachable ();
3837 }
3838}
3839
3840/* Expand one gimple statement STMT and return the last RTL instruction
3841 before any of the newly generated ones.
3842
3843 In addition to generating the necessary RTL instructions this also
3844 sets REG_EH_REGION notes if necessary and sets the current source
3845 location for diagnostics. */
3846
74a0cbc4 3847static rtx_insn *
42acab1c 3848expand_gimple_stmt (gimple *stmt)
16c9337c 3849{
16c9337c 3850 location_t saved_location = input_location;
74a0cbc4 3851 rtx_insn *last = get_last_insn ();
8c593757 3852 int lp_nr;
16c9337c 3853
16c9337c 3854 gcc_assert (cfun);
3855
8c593757 3856 /* We need to save and restore the current source location so that errors
3857 discovered during expansion are emitted with the right location. But
3858 it would be better if the diagnostic routines used the source location
3859 embedded in the tree nodes rather than globals. */
16c9337c 3860 if (gimple_has_location (stmt))
8c593757 3861 input_location = gimple_location (stmt);
16c9337c 3862
3863 expand_gimple_stmt_1 (stmt);
8c593757 3864
16c9337c 3865 /* Free any temporaries used to evaluate this statement. */
3866 free_temp_slots ();
3867
3868 input_location = saved_location;
3869
3870 /* Mark all insns that may trap. */
e38def9c 3871 lp_nr = lookup_stmt_eh_lp (stmt);
3872 if (lp_nr)
16c9337c 3873 {
74a0cbc4 3874 rtx_insn *insn;
16c9337c 3875 for (insn = next_real_insn (last); insn;
3876 insn = next_real_insn (insn))
3877 {
3878 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3879 /* If we want exceptions for non-call insns, any
3880 may_trap_p instruction may throw. */
3881 && GET_CODE (PATTERN (insn)) != CLOBBER
70bdfe23 3882 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
16c9337c 3883 && GET_CODE (PATTERN (insn)) != USE
e38def9c 3884 && insn_could_throw_p (insn))
3885 make_reg_eh_region_note (insn, 0, lp_nr);
16c9337c 3886 }
3887 }
3888
3889 return last;
3890}
3891
75a70cf9 3892/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
17ceb1d5 3893 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3894 generated a tail call (something that might be denied by the ABI
c578459e 3895 rules governing the call; see calls.c).
3896
3897 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3898 can still reach the rest of BB. The case here is __builtin_sqrt,
3899 where the NaN result goes through the external function (with a
3900 tailcall) and the normal result happens via a sqrt instruction. */
3ced8962 3901
3902static basic_block
1a91d914 3903expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3ced8962 3904{
74a0cbc4 3905 rtx_insn *last2, *last;
17ceb1d5 3906 edge e;
cd665a06 3907 edge_iterator ei;
720cfc43 3908 profile_probability probability;
3ced8962 3909
16c9337c 3910 last2 = last = expand_gimple_stmt (stmt);
3ced8962 3911
3912 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
17ceb1d5 3913 if (CALL_P (last) && SIBLING_CALL_P (last))
3914 goto found;
3ced8962 3915
75a70cf9 3916 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
49377e21 3917
c578459e 3918 *can_fallthru = true;
17ceb1d5 3919 return NULL;
3ced8962 3920
17ceb1d5 3921 found:
3922 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3923 Any instructions emitted here are about to be deleted. */
3924 do_pending_stack_adjust ();
3925
3926 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3927 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3928 EH or abnormal edges, we shouldn't have created a tail call in
3929 the first place. So it seems to me we should just be removing
3930 all edges here, or redirecting the existing fallthru edge to
3931 the exit block. */
3932
720cfc43 3933 probability = profile_probability::never ();
17ceb1d5 3934
cd665a06 3935 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3936 {
17ceb1d5 3937 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3938 {
34154e27 3939 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
205ce1aa 3940 e->dest->count -= e->count ();
17ceb1d5 3941 probability += e->probability;
3942 remove_edge (e);
3ced8962 3943 }
cd665a06 3944 else
3945 ei_next (&ei);
3ced8962 3946 }
3947
17ceb1d5 3948 /* This is somewhat ugly: the call_expr expander often emits instructions
3949 after the sibcall (to perform the function return). These confuse the
794d8e3f 3950 find_many_sub_basic_blocks code, so we need to get rid of these. */
17ceb1d5 3951 last = NEXT_INSN (last);
cc636d56 3952 gcc_assert (BARRIER_P (last));
c578459e 3953
3954 *can_fallthru = false;
17ceb1d5 3955 while (NEXT_INSN (last))
3956 {
3957 /* For instance an sqrt builtin expander expands if with
3958 sibcall in the then and label for `else`. */
3959 if (LABEL_P (NEXT_INSN (last)))
c578459e 3960 {
3961 *can_fallthru = true;
3962 break;
3963 }
17ceb1d5 3964 delete_insn (NEXT_INSN (last));
3965 }
3966
34154e27 3967 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3968 | EDGE_SIBCALL);
25d2128b 3969 e->probability = probability;
26bb3cb2 3970 BB_END (bb) = last;
17ceb1d5 3971 update_bb_for_insn (bb);
3972
3973 if (NEXT_INSN (last))
3974 {
3975 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3976
3977 last = BB_END (bb);
3978 if (BARRIER_P (last))
26bb3cb2 3979 BB_END (bb) = PREV_INSN (last);
17ceb1d5 3980 }
3981
75a70cf9 3982 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
49377e21 3983
17ceb1d5 3984 return bb;
3ced8962 3985}
3986
9845d120 3987/* Return the difference between the floor and the truncated result of
3988 a signed division by OP1 with remainder MOD. */
3989static rtx
3754d046 3990floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3991{
3992 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3993 return gen_rtx_IF_THEN_ELSE
3994 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3995 gen_rtx_IF_THEN_ELSE
3996 (mode, gen_rtx_LT (BImode,
3997 gen_rtx_DIV (mode, op1, mod),
3998 const0_rtx),
3999 constm1_rtx, const0_rtx),
4000 const0_rtx);
4001}
4002
4003/* Return the difference between the ceil and the truncated result of
4004 a signed division by OP1 with remainder MOD. */
4005static rtx
3754d046 4006ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 4007{
4008 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4009 return gen_rtx_IF_THEN_ELSE
4010 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4011 gen_rtx_IF_THEN_ELSE
4012 (mode, gen_rtx_GT (BImode,
4013 gen_rtx_DIV (mode, op1, mod),
4014 const0_rtx),
4015 const1_rtx, const0_rtx),
4016 const0_rtx);
4017}
4018
4019/* Return the difference between the ceil and the truncated result of
4020 an unsigned division by OP1 with remainder MOD. */
4021static rtx
3754d046 4022ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
9845d120 4023{
4024 /* (mod != 0 ? 1 : 0) */
4025 return gen_rtx_IF_THEN_ELSE
4026 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4027 const1_rtx, const0_rtx);
4028}
4029
4030/* Return the difference between the rounded and the truncated result
4031 of a signed division by OP1 with remainder MOD. Halfway cases are
4032 rounded away from zero, rather than to the nearest even number. */
4033static rtx
3754d046 4034round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 4035{
4036 /* (abs (mod) >= abs (op1) - abs (mod)
4037 ? (op1 / mod > 0 ? 1 : -1)
4038 : 0) */
4039 return gen_rtx_IF_THEN_ELSE
4040 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4041 gen_rtx_MINUS (mode,
4042 gen_rtx_ABS (mode, op1),
4043 gen_rtx_ABS (mode, mod))),
4044 gen_rtx_IF_THEN_ELSE
4045 (mode, gen_rtx_GT (BImode,
4046 gen_rtx_DIV (mode, op1, mod),
4047 const0_rtx),
4048 const1_rtx, constm1_rtx),
4049 const0_rtx);
4050}
4051
4052/* Return the difference between the rounded and the truncated result
4053 of a unsigned division by OP1 with remainder MOD. Halfway cases
4054 are rounded away from zero, rather than to the nearest even
4055 number. */
4056static rtx
3754d046 4057round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 4058{
4059 /* (mod >= op1 - mod ? 1 : 0) */
4060 return gen_rtx_IF_THEN_ELSE
4061 (mode, gen_rtx_GE (BImode, mod,
4062 gen_rtx_MINUS (mode, op1, mod)),
4063 const1_rtx, const0_rtx);
4064}
4065
d89c81d6 4066/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4067 any rtl. */
4068
4069static rtx
f77c4496 4070convert_debug_memory_address (scalar_int_mode mode, rtx x,
cd799492 4071 addr_space_t as)
d89c81d6 4072{
d89c81d6 4073#ifndef POINTERS_EXTEND_UNSIGNED
cd799492 4074 gcc_assert (mode == Pmode
4075 || mode == targetm.addr_space.address_mode (as));
7a6aeeed 4076 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
d89c81d6 4077#else
cd799492 4078 rtx temp;
cd799492 4079
df7f3935 4080 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
d89c81d6 4081
4082 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4083 return x;
4084
7a6aeeed 4085 /* X must have some form of address mode already. */
4086 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
995b44f5 4087 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
a8a727ad 4088 x = lowpart_subreg (mode, x, xmode);
d89c81d6 4089 else if (POINTERS_EXTEND_UNSIGNED > 0)
4090 x = gen_rtx_ZERO_EXTEND (mode, x);
4091 else if (!POINTERS_EXTEND_UNSIGNED)
4092 x = gen_rtx_SIGN_EXTEND (mode, x);
4093 else
cd799492 4094 {
4095 switch (GET_CODE (x))
4096 {
4097 case SUBREG:
4098 if ((SUBREG_PROMOTED_VAR_P (x)
4099 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4100 || (GET_CODE (SUBREG_REG (x)) == PLUS
4101 && REG_P (XEXP (SUBREG_REG (x), 0))
4102 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4103 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4104 && GET_MODE (SUBREG_REG (x)) == mode)
4105 return SUBREG_REG (x);
4106 break;
4107 case LABEL_REF:
c7799456 4108 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
cd799492 4109 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4110 return temp;
4111 case SYMBOL_REF:
4112 temp = shallow_copy_rtx (x);
4113 PUT_MODE (temp, mode);
4114 return temp;
4115 case CONST:
4116 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4117 if (temp)
4118 temp = gen_rtx_CONST (mode, temp);
4119 return temp;
4120 case PLUS:
4121 case MINUS:
4122 if (CONST_INT_P (XEXP (x, 1)))
4123 {
4124 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4125 if (temp)
4126 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4127 }
4128 break;
4129 default:
4130 break;
4131 }
4132 /* Don't know how to express ptr_extend as operation in debug info. */
4133 return NULL;
4134 }
d89c81d6 4135#endif /* POINTERS_EXTEND_UNSIGNED */
4136
4137 return x;
4138}
4139
54497144 4140/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4141 by avoid_deep_ter_for_debug. */
4142
4143static hash_map<tree, tree> *deep_ter_debug_map;
4144
4145/* Split too deep TER chains for debug stmts using debug temporaries. */
4146
4147static void
42acab1c 4148avoid_deep_ter_for_debug (gimple *stmt, int depth)
54497144 4149{
4150 use_operand_p use_p;
4151 ssa_op_iter iter;
4152 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4153 {
4154 tree use = USE_FROM_PTR (use_p);
4155 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4156 continue;
42acab1c 4157 gimple *g = get_gimple_for_ssa_name (use);
54497144 4158 if (g == NULL)
4159 continue;
4160 if (depth > 6 && !stmt_ends_bb_p (g))
4161 {
4162 if (deep_ter_debug_map == NULL)
4163 deep_ter_debug_map = new hash_map<tree, tree>;
4164
4165 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4166 if (vexpr != NULL)
4167 continue;
4168 vexpr = make_node (DEBUG_EXPR_DECL);
42acab1c 4169 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
54497144 4170 DECL_ARTIFICIAL (vexpr) = 1;
4171 TREE_TYPE (vexpr) = TREE_TYPE (use);
adc78298 4172 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
54497144 4173 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4174 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4175 avoid_deep_ter_for_debug (def_temp, 0);
4176 }
4177 else
4178 avoid_deep_ter_for_debug (g, depth + 1);
4179 }
4180}
4181
8ee59e4e 4182/* Return an RTX equivalent to the value of the parameter DECL. */
4183
4184static rtx
4185expand_debug_parm_decl (tree decl)
4186{
4187 rtx incoming = DECL_INCOMING_RTL (decl);
4188
4189 if (incoming
4190 && GET_MODE (incoming) != BLKmode
4191 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4192 || (MEM_P (incoming)
4193 && REG_P (XEXP (incoming, 0))
4194 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4195 {
4196 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4197
4198#ifdef HAVE_window_save
4199 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4200 If the target machine has an explicit window save instruction, the
4201 actual entry value is the corresponding OUTGOING_REGNO instead. */
4202 if (REG_P (incoming)
4203 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4204 incoming
4205 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4206 OUTGOING_REGNO (REGNO (incoming)), 0);
4207 else if (MEM_P (incoming))
4208 {
4209 rtx reg = XEXP (incoming, 0);
4210 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4211 {
4212 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4213 incoming = replace_equiv_address_nv (incoming, reg);
4214 }
848d0536 4215 else
4216 incoming = copy_rtx (incoming);
8ee59e4e 4217 }
4218#endif
4219
4220 ENTRY_VALUE_EXP (rtl) = incoming;
4221 return rtl;
4222 }
4223
4224 if (incoming
4225 && GET_MODE (incoming) != BLKmode
4226 && !TREE_ADDRESSABLE (decl)
4227 && MEM_P (incoming)
4228 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4229 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4230 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4231 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
848d0536 4232 return copy_rtx (incoming);
8ee59e4e 4233
4234 return NULL_RTX;
4235}
4236
4237/* Return an RTX equivalent to the value of the tree expression EXP. */
9845d120 4238
4239static rtx
4240expand_debug_expr (tree exp)
4241{
4242 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3754d046 4243 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4244 machine_mode inner_mode = VOIDmode;
9845d120 4245 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bd1a81f7 4246 addr_space_t as;
03b7a719 4247 scalar_int_mode op0_mode, op1_mode, addr_mode;
9845d120 4248
4249 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4250 {
4251 case tcc_expression:
4252 switch (TREE_CODE (exp))
4253 {
4254 case COND_EXPR:
b54ee9da 4255 case DOT_PROD_EXPR:
a2287001 4256 case SAD_EXPR:
00f4f705 4257 case WIDEN_MULT_PLUS_EXPR:
4258 case WIDEN_MULT_MINUS_EXPR:
9845d120 4259 goto ternary;
4260
4261 case TRUTH_ANDIF_EXPR:
4262 case TRUTH_ORIF_EXPR:
4263 case TRUTH_AND_EXPR:
4264 case TRUTH_OR_EXPR:
4265 case TRUTH_XOR_EXPR:
4266 goto binary;
4267
4268 case TRUTH_NOT_EXPR:
4269 goto unary;
4270
4271 default:
4272 break;
4273 }
4274 break;
4275
4276 ternary:
4277 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4278 if (!op2)
4279 return NULL_RTX;
4280 /* Fall through. */
4281
4282 binary:
4283 case tcc_binary:
41e61033 4284 if (mode == BLKmode)
4285 return NULL_RTX;
9845d120 4286 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4287 if (!op1)
4288 return NULL_RTX;
bfaa965e 4289 switch (TREE_CODE (exp))
4290 {
4291 case LSHIFT_EXPR:
4292 case RSHIFT_EXPR:
4293 case LROTATE_EXPR:
4294 case RROTATE_EXPR:
4295 case WIDEN_LSHIFT_EXPR:
4296 /* Ensure second operand isn't wider than the first one. */
4297 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
8974b7a3 4298 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4299 && (GET_MODE_UNIT_PRECISION (mode)
4300 < GET_MODE_PRECISION (op1_mode)))
4301 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
bfaa965e 4302 break;
4303 default:
4304 break;
4305 }
9845d120 4306 /* Fall through. */
4307
4308 unary:
4309 case tcc_unary:
41e61033 4310 if (mode == BLKmode)
4311 return NULL_RTX;
9ecadf14 4312 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 4313 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4314 if (!op0)
4315 return NULL_RTX;
4316 break;
4317
71b39a64 4318 case tcc_comparison:
4319 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4320 goto binary;
4321
9845d120 4322 case tcc_type:
4323 case tcc_statement:
4324 gcc_unreachable ();
4325
4326 case tcc_constant:
4327 case tcc_exceptional:
4328 case tcc_declaration:
4329 case tcc_reference:
4330 case tcc_vl_exp:
4331 break;
4332 }
4333
4334 switch (TREE_CODE (exp))
4335 {
4336 case STRING_CST:
4337 if (!lookup_constant_def (exp))
4338 {
0f89d483 4339 if (strlen (TREE_STRING_POINTER (exp)) + 1
4340 != (size_t) TREE_STRING_LENGTH (exp))
4341 return NULL_RTX;
9845d120 4342 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4343 op0 = gen_rtx_MEM (BLKmode, op0);
4344 set_mem_attributes (op0, exp, 0);
4345 return op0;
4346 }
e3533433 4347 /* Fall through. */
9845d120 4348
4349 case INTEGER_CST:
4350 case REAL_CST:
4351 case FIXED_CST:
4352 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4353 return op0;
4354
8672ee56 4355 case POLY_INT_CST:
4356 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4357
9845d120 4358 case COMPLEX_CST:
4359 gcc_assert (COMPLEX_MODE_P (mode));
4360 op0 = expand_debug_expr (TREE_REALPART (exp));
9845d120 4361 op1 = expand_debug_expr (TREE_IMAGPART (exp));
9845d120 4362 return gen_rtx_CONCAT (mode, op0, op1);
4363
688ff29b 4364 case DEBUG_EXPR_DECL:
4365 op0 = DECL_RTL_IF_SET (exp);
4366
4367 if (op0)
4368 return op0;
4369
4370 op0 = gen_rtx_DEBUG_EXPR (mode);
23dd51cb 4371 DEBUG_EXPR_TREE_DECL (op0) = exp;
688ff29b 4372 SET_DECL_RTL (exp, op0);
4373
4374 return op0;
4375
9845d120 4376 case VAR_DECL:
4377 case PARM_DECL:
4378 case FUNCTION_DECL:
4379 case LABEL_DECL:
4380 case CONST_DECL:
4381 case RESULT_DECL:
4382 op0 = DECL_RTL_IF_SET (exp);
4383
4384 /* This decl was probably optimized away. */
4385 if (!op0)
0f89d483 4386 {
53e9c5c4 4387 if (!VAR_P (exp)
0f89d483 4388 || DECL_EXTERNAL (exp)
4389 || !TREE_STATIC (exp)
4390 || !DECL_NAME (exp)
a5653528 4391 || DECL_HARD_REGISTER (exp)
1cdbcae1 4392 || DECL_IN_CONSTANT_POOL (exp)
a5653528 4393 || mode == VOIDmode)
0f89d483 4394 return NULL;
4395
e6db644e 4396 op0 = make_decl_rtl_for_debug (exp);
0f89d483 4397 if (!MEM_P (op0)
4398 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4399 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4400 return NULL;
4401 }
4402 else
4403 op0 = copy_rtx (op0);
9845d120 4404
5d713e67 4405 if (GET_MODE (op0) == BLKmode
71b39a64 4406 /* If op0 is not BLKmode, but mode is, adjust_mode
5d713e67 4407 below would ICE. While it is likely a FE bug,
4408 try to be robust here. See PR43166. */
0f18e023 4409 || mode == BLKmode
4410 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
9845d120 4411 {
4412 gcc_assert (MEM_P (op0));
4413 op0 = adjust_address_nv (op0, mode, 0);
4414 return op0;
4415 }
4416
4417 /* Fall through. */
4418
4419 adjust_mode:
4420 case PAREN_EXPR:
d09ef31a 4421 CASE_CONVERT:
9845d120 4422 {
9ecadf14 4423 inner_mode = GET_MODE (op0);
9845d120 4424
4425 if (mode == inner_mode)
4426 return op0;
4427
4428 if (inner_mode == VOIDmode)
4429 {
3c800ea7 4430 if (TREE_CODE (exp) == SSA_NAME)
4431 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4432 else
4433 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 4434 if (mode == inner_mode)
4435 return op0;
4436 }
4437
4438 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4439 {
332d11bd 4440 if (GET_MODE_UNIT_BITSIZE (mode)
4441 == GET_MODE_UNIT_BITSIZE (inner_mode))
9845d120 4442 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
332d11bd 4443 else if (GET_MODE_UNIT_BITSIZE (mode)
4444 < GET_MODE_UNIT_BITSIZE (inner_mode))
9845d120 4445 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4446 else
4447 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4448 }
4449 else if (FLOAT_MODE_P (mode))
4450 {
3c800ea7 4451 gcc_assert (TREE_CODE (exp) != SSA_NAME);
9845d120 4452 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4453 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4454 else
4455 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4456 }
4457 else if (FLOAT_MODE_P (inner_mode))
4458 {
4459 if (unsignedp)
4460 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4461 else
4462 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4463 }
1048c155 4464 else if (GET_MODE_UNIT_PRECISION (mode)
4465 == GET_MODE_UNIT_PRECISION (inner_mode))
a8a727ad 4466 op0 = lowpart_subreg (mode, op0, inner_mode);
1048c155 4467 else if (GET_MODE_UNIT_PRECISION (mode)
4468 < GET_MODE_UNIT_PRECISION (inner_mode))
4469 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
72f8014e 4470 else if (UNARY_CLASS_P (exp)
f84ead57 4471 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4472 : unsignedp)
9ecadf14 4473 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
9845d120 4474 else
9ecadf14 4475 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
9845d120 4476
4477 return op0;
4478 }
4479
182cf5a9 4480 case MEM_REF:
e488c25f 4481 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4482 {
4483 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4484 TREE_OPERAND (exp, 0),
4485 TREE_OPERAND (exp, 1));
4486 if (newexp)
4487 return expand_debug_expr (newexp);
4488 }
4489 /* FALLTHROUGH */
9845d120 4490 case INDIRECT_REF:
29c05e22 4491 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 4492 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4493 if (!op0)
4494 return NULL;
4495
7ef770fa 4496 if (TREE_CODE (exp) == MEM_REF)
4497 {
f87ea39e 4498 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4499 || (GET_CODE (op0) == PLUS
4500 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4501 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4502 Instead just use get_inner_reference. */
4503 goto component_ref;
4504
7ef770fa 4505 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
7e3747b0 4506 poly_int64 offset;
4507 if (!op1 || !poly_int_rtx_p (op1, &offset))
7ef770fa 4508 return NULL;
4509
7e3747b0 4510 op0 = plus_constant (inner_mode, op0, offset);
7ef770fa 4511 }
4512
14a3093e 4513 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9845d120 4514
cd799492 4515 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4516 op0, as);
4517 if (op0 == NULL_RTX)
4518 return NULL;
9845d120 4519
cd799492 4520 op0 = gen_rtx_MEM (mode, op0);
9845d120 4521 set_mem_attributes (op0, exp, 0);
e488c25f 4522 if (TREE_CODE (exp) == MEM_REF
4523 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4524 set_mem_expr (op0, NULL_TREE);
bd1a81f7 4525 set_mem_addr_space (op0, as);
9845d120 4526
4527 return op0;
4528
4529 case TARGET_MEM_REF:
28daba6f 4530 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4531 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
9845d120 4532 return NULL;
4533
4534 op0 = expand_debug_expr
8d8150c8 4535 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
9845d120 4536 if (!op0)
4537 return NULL;
4538
27628c9e 4539 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
cd799492 4540 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4541 op0, as);
4542 if (op0 == NULL_RTX)
4543 return NULL;
9845d120 4544
4545 op0 = gen_rtx_MEM (mode, op0);
4546
4547 set_mem_attributes (op0, exp, 0);
bd1a81f7 4548 set_mem_addr_space (op0, as);
9845d120 4549
4550 return op0;
4551
f87ea39e 4552 component_ref:
9845d120 4553 case ARRAY_REF:
4554 case ARRAY_RANGE_REF:
4555 case COMPONENT_REF:
4556 case BIT_FIELD_REF:
4557 case REALPART_EXPR:
4558 case IMAGPART_EXPR:
4559 case VIEW_CONVERT_EXPR:
4560 {
3754d046 4561 machine_mode mode1;
73bf92b3 4562 poly_int64 bitsize, bitpos;
9845d120 4563 tree offset;
292237f3 4564 int reversep, volatilep = 0;
4565 tree tem
4566 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
b3b6e4b5 4567 &unsignedp, &reversep, &volatilep);
9845d120 4568 rtx orig_op0;
4569
73bf92b3 4570 if (known_eq (bitsize, 0))
9e3c8673 4571 return NULL;
4572
9845d120 4573 orig_op0 = op0 = expand_debug_expr (tem);
4574
4575 if (!op0)
4576 return NULL;
4577
4578 if (offset)
4579 {
3754d046 4580 machine_mode addrmode, offmode;
d89c81d6 4581
f4b490ea 4582 if (!MEM_P (op0))
4583 return NULL;
9845d120 4584
d89c81d6 4585 op0 = XEXP (op0, 0);
4586 addrmode = GET_MODE (op0);
4587 if (addrmode == VOIDmode)
4588 addrmode = Pmode;
4589
9845d120 4590 op1 = expand_debug_expr (offset);
4591 if (!op1)
4592 return NULL;
4593
d89c81d6 4594 offmode = GET_MODE (op1);
4595 if (offmode == VOIDmode)
4596 offmode = TYPE_MODE (TREE_TYPE (offset));
4597
4598 if (addrmode != offmode)
a8a727ad 4599 op1 = lowpart_subreg (addrmode, op1, offmode);
d89c81d6 4600
4601 /* Don't use offset_address here, we don't need a
4602 recognizable address, and we don't want to generate
4603 code. */
9ecadf14 4604 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4605 op0, op1));
9845d120 4606 }
4607
4608 if (MEM_P (op0))
4609 {
9e3c8673 4610 if (mode1 == VOIDmode)
5acac2af 4611 {
4612 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4613 return NULL;
4614 /* Bitfield. */
4615 mode1 = smallest_int_mode_for_size (bitsize);
4616 }
73bf92b3 4617 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4618 if (maybe_ne (bytepos, 0))
9845d120 4619 {
73bf92b3 4620 op0 = adjust_address_nv (op0, mode1, bytepos);
4621 bitpos = num_trailing_bits (bitpos);
9845d120 4622 }
73bf92b3 4623 else if (known_eq (bitpos, 0)
4624 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
9845d120 4625 op0 = adjust_address_nv (op0, mode, 0);
4626 else if (GET_MODE (op0) != mode1)
4627 op0 = adjust_address_nv (op0, mode1, 0);
4628 else
4629 op0 = copy_rtx (op0);
4630 if (op0 == orig_op0)
4631 op0 = shallow_copy_rtx (op0);
4632 set_mem_attributes (op0, exp, 0);
4633 }
4634
73bf92b3 4635 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
9845d120 4636 return op0;
4637
73bf92b3 4638 if (maybe_lt (bitpos, 0))
5bd71193 4639 return NULL;
4640
c1371560 4641 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
37cd7b09 4642 return NULL;
4643
73bf92b3 4644 poly_int64 bytepos;
4645 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4646 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
9845d120 4647 {
3754d046 4648 machine_mode opmode = GET_MODE (op0);
9845d120 4649
9845d120 4650 if (opmode == VOIDmode)
c8b13e49 4651 opmode = TYPE_MODE (TREE_TYPE (tem));
9845d120 4652
4653 /* This condition may hold if we're expanding the address
4654 right past the end of an array that turned out not to
4655 be addressable (i.e., the address was only computed in
4656 debug stmts). The gen_subreg below would rightfully
4657 crash, and the address doesn't really exist, so just
4658 drop it. */
73bf92b3 4659 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
9845d120 4660 return NULL;
4661
73bf92b3 4662 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4663 return simplify_gen_subreg (mode, op0, opmode, bytepos);
9845d120 4664 }
4665
4666 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4667 && TYPE_UNSIGNED (TREE_TYPE (exp))
4668 ? SIGN_EXTRACT
4669 : ZERO_EXTRACT, mode,
4670 GET_MODE (op0) != VOIDmode
c8b13e49 4671 ? GET_MODE (op0)
4672 : TYPE_MODE (TREE_TYPE (tem)),
73bf92b3 4673 op0, gen_int_mode (bitsize, word_mode),
4674 gen_int_mode (bitpos, word_mode));
9845d120 4675 }
4676
9845d120 4677 case ABS_EXPR:
1c67942e 4678 case ABSU_EXPR:
9ecadf14 4679 return simplify_gen_unary (ABS, mode, op0, mode);
9845d120 4680
4681 case NEGATE_EXPR:
9ecadf14 4682 return simplify_gen_unary (NEG, mode, op0, mode);
9845d120 4683
4684 case BIT_NOT_EXPR:
9ecadf14 4685 return simplify_gen_unary (NOT, mode, op0, mode);
9845d120 4686
4687 case FLOAT_EXPR:
9ecadf14 4688 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4689 0)))
4690 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4691 inner_mode);
9845d120 4692
4693 case FIX_TRUNC_EXPR:
9ecadf14 4694 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4695 inner_mode);
9845d120 4696
4697 case POINTER_PLUS_EXPR:
af3d13d6 4698 /* For the rare target where pointers are not the same size as
4699 size_t, we need to check for mis-matched modes and correct
4700 the addend. */
4701 if (op0 && op1
1aa8738f 4702 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4703 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4704 && op0_mode != op1_mode)
af3d13d6 4705 {
1aa8738f 4706 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4707 /* If OP0 is a partial mode, then we must truncate, even
4708 if it has the same bitsize as OP1 as GCC's
4709 representation of partial modes is opaque. */
4710 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4711 && (GET_MODE_BITSIZE (op0_mode)
4712 == GET_MODE_BITSIZE (op1_mode))))
4713 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
af3d13d6 4714 else
4715 /* We always sign-extend, regardless of the signedness of
4716 the operand, because the operand is always unsigned
4717 here even if the original C expression is signed. */
1aa8738f 4718 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
af3d13d6 4719 }
4720 /* Fall through. */
9845d120 4721 case PLUS_EXPR:
9ecadf14 4722 return simplify_gen_binary (PLUS, mode, op0, op1);
9845d120 4723
4724 case MINUS_EXPR:
57e83b58 4725 case POINTER_DIFF_EXPR:
9ecadf14 4726 return simplify_gen_binary (MINUS, mode, op0, op1);
9845d120 4727
4728 case MULT_EXPR:
9ecadf14 4729 return simplify_gen_binary (MULT, mode, op0, op1);
9845d120 4730
4731 case RDIV_EXPR:
4732 case TRUNC_DIV_EXPR:
4733 case EXACT_DIV_EXPR:
4734 if (unsignedp)
9ecadf14 4735 return simplify_gen_binary (UDIV, mode, op0, op1);
9845d120 4736 else
9ecadf14 4737 return simplify_gen_binary (DIV, mode, op0, op1);
9845d120 4738
4739 case TRUNC_MOD_EXPR:
9ecadf14 4740 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
9845d120 4741
4742 case FLOOR_DIV_EXPR:
4743 if (unsignedp)
9ecadf14 4744 return simplify_gen_binary (UDIV, mode, op0, op1);
9845d120 4745 else
4746 {
9ecadf14 4747 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4748 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4749 rtx adj = floor_sdiv_adjust (mode, mod, op1);
9ecadf14 4750 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4751 }
4752
4753 case FLOOR_MOD_EXPR:
4754 if (unsignedp)
9ecadf14 4755 return simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4756 else
4757 {
9ecadf14 4758 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4759 rtx adj = floor_sdiv_adjust (mode, mod, op1);
9ecadf14 4760 adj = simplify_gen_unary (NEG, mode,
4761 simplify_gen_binary (MULT, mode, adj, op1),
4762 mode);
4763 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4764 }
4765
4766 case CEIL_DIV_EXPR:
4767 if (unsignedp)
4768 {
9ecadf14 4769 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4770 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4771 rtx adj = ceil_udiv_adjust (mode, mod, op1);
9ecadf14 4772 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4773 }
4774 else
4775 {
9ecadf14 4776 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4777 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4778 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
9ecadf14 4779 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4780 }
4781
4782 case CEIL_MOD_EXPR:
4783 if (unsignedp)
4784 {
9ecadf14 4785 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4786 rtx adj = ceil_udiv_adjust (mode, mod, op1);
9ecadf14 4787 adj = simplify_gen_unary (NEG, mode,
4788 simplify_gen_binary (MULT, mode, adj, op1),
4789 mode);
4790 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4791 }
4792 else
4793 {
9ecadf14 4794 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4795 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
9ecadf14 4796 adj = simplify_gen_unary (NEG, mode,
4797 simplify_gen_binary (MULT, mode, adj, op1),
4798 mode);
4799 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4800 }
4801
4802 case ROUND_DIV_EXPR:
4803 if (unsignedp)
4804 {
9ecadf14 4805 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4806 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4807 rtx adj = round_udiv_adjust (mode, mod, op1);
9ecadf14 4808 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4809 }
4810 else
4811 {
9ecadf14 4812 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4813 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4814 rtx adj = round_sdiv_adjust (mode, mod, op1);
9ecadf14 4815 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4816 }
4817
4818 case ROUND_MOD_EXPR:
4819 if (unsignedp)
4820 {
9ecadf14 4821 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4822 rtx adj = round_udiv_adjust (mode, mod, op1);
9ecadf14 4823 adj = simplify_gen_unary (NEG, mode,
4824 simplify_gen_binary (MULT, mode, adj, op1),
4825 mode);
4826 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4827 }
4828 else
4829 {
9ecadf14 4830 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4831 rtx adj = round_sdiv_adjust (mode, mod, op1);
9ecadf14 4832 adj = simplify_gen_unary (NEG, mode,
4833 simplify_gen_binary (MULT, mode, adj, op1),
4834 mode);
4835 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4836 }
4837
4838 case LSHIFT_EXPR:
9ecadf14 4839 return simplify_gen_binary (ASHIFT, mode, op0, op1);
9845d120 4840
4841 case RSHIFT_EXPR:
4842 if (unsignedp)
9ecadf14 4843 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
9845d120 4844 else
9ecadf14 4845 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
9845d120 4846
4847 case LROTATE_EXPR:
9ecadf14 4848 return simplify_gen_binary (ROTATE, mode, op0, op1);
9845d120 4849
4850 case RROTATE_EXPR:
9ecadf14 4851 return simplify_gen_binary (ROTATERT, mode, op0, op1);
9845d120 4852
4853 case MIN_EXPR:
9ecadf14 4854 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
9845d120 4855
4856 case MAX_EXPR:
9ecadf14 4857 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
9845d120 4858
4859 case BIT_AND_EXPR:
4860 case TRUTH_AND_EXPR:
9ecadf14 4861 return simplify_gen_binary (AND, mode, op0, op1);
9845d120 4862
4863 case BIT_IOR_EXPR:
4864 case TRUTH_OR_EXPR:
9ecadf14 4865 return simplify_gen_binary (IOR, mode, op0, op1);
9845d120 4866
4867 case BIT_XOR_EXPR:
4868 case TRUTH_XOR_EXPR:
9ecadf14 4869 return simplify_gen_binary (XOR, mode, op0, op1);
9845d120 4870
4871 case TRUTH_ANDIF_EXPR:
4872 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4873
4874 case TRUTH_ORIF_EXPR:
4875 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4876
4877 case TRUTH_NOT_EXPR:
9ecadf14 4878 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
9845d120 4879
4880 case LT_EXPR:
9ecadf14 4881 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4882 op0, op1);
9845d120 4883
4884 case LE_EXPR:
9ecadf14 4885 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4886 op0, op1);
9845d120 4887
4888 case GT_EXPR:
9ecadf14 4889 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4890 op0, op1);
9845d120 4891
4892 case GE_EXPR:
9ecadf14 4893 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4894 op0, op1);
9845d120 4895
4896 case EQ_EXPR:
9ecadf14 4897 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
9845d120 4898
4899 case NE_EXPR:
9ecadf14 4900 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
9845d120 4901
4902 case UNORDERED_EXPR:
9ecadf14 4903 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
9845d120 4904
4905 case ORDERED_EXPR:
9ecadf14 4906 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
9845d120 4907
4908 case UNLT_EXPR:
9ecadf14 4909 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
9845d120 4910
4911 case UNLE_EXPR:
9ecadf14 4912 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
9845d120 4913
4914 case UNGT_EXPR:
9ecadf14 4915 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
9845d120 4916
4917 case UNGE_EXPR:
9ecadf14 4918 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
9845d120 4919
4920 case UNEQ_EXPR:
9ecadf14 4921 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
9845d120 4922
4923 case LTGT_EXPR:
9ecadf14 4924 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
9845d120 4925
4926 case COND_EXPR:
4927 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4928
4929 case COMPLEX_EXPR:
4930 gcc_assert (COMPLEX_MODE_P (mode));
4931 if (GET_MODE (op0) == VOIDmode)
4932 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4933 if (GET_MODE (op1) == VOIDmode)
4934 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4935 return gen_rtx_CONCAT (mode, op0, op1);
4936
4e6677f8 4937 case CONJ_EXPR:
4938 if (GET_CODE (op0) == CONCAT)
4939 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
9ecadf14 4940 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4941 XEXP (op0, 1),
4942 GET_MODE_INNER (mode)));
4e6677f8 4943 else
4944 {
9fcae33e 4945 scalar_mode imode = GET_MODE_INNER (mode);
4e6677f8 4946 rtx re, im;
4947
4948 if (MEM_P (op0))
4949 {
4950 re = adjust_address_nv (op0, imode, 0);
4951 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4952 }
4953 else
4954 {
2cf1bb25 4955 scalar_int_mode ifmode;
4956 scalar_int_mode ihmode;
4e6677f8 4957 rtx halfsize;
2cf1bb25 4958 if (!int_mode_for_mode (mode).exists (&ifmode)
4959 || !int_mode_for_mode (imode).exists (&ihmode))
4e6677f8 4960 return NULL;
4961 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4962 re = op0;
4963 if (mode != ifmode)
4964 re = gen_rtx_SUBREG (ifmode, re, 0);
4965 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4966 if (imode != ihmode)
4967 re = gen_rtx_SUBREG (imode, re, 0);
4968 im = copy_rtx (op0);
4969 if (mode != ifmode)
4970 im = gen_rtx_SUBREG (ifmode, im, 0);
4971 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4972 if (imode != ihmode)
4973 im = gen_rtx_SUBREG (imode, im, 0);
4974 }
4975 im = gen_rtx_NEG (imode, im);
4976 return gen_rtx_CONCAT (mode, re, im);
4977 }
4978
9845d120 4979 case ADDR_EXPR:
4980 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4981 if (!op0 || !MEM_P (op0))
f9c61ef7 4982 {
4983 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4984 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4985 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
88f2e16b 4986 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4987 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
f9c61ef7 4988 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4989
4990 if (handled_component_p (TREE_OPERAND (exp, 0)))
4991 {
f3c2a387 4992 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
292237f3 4993 bool reverse;
f9c61ef7 4994 tree decl
292237f3 4995 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4996 &bitsize, &maxsize, &reverse);
53e9c5c4 4997 if ((VAR_P (decl)
f9c61ef7 4998 || TREE_CODE (decl) == PARM_DECL
4999 || TREE_CODE (decl) == RESULT_DECL)
88f2e16b 5000 && (!TREE_ADDRESSABLE (decl)
5001 || target_for_debug_bind (decl))
f3c2a387 5002 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
5003 && known_gt (bitsize, 0)
5004 && known_eq (bitsize, maxsize))
29c05e22 5005 {
5006 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
f3c2a387 5007 return plus_constant (mode, base, byteoffset);
29c05e22 5008 }
f9c61ef7 5009 }
5010
8afb7c4b 5011 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5012 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5013 == ADDR_EXPR)
5014 {
5015 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5016 0));
5017 if (op0 != NULL
5018 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5019 || (GET_CODE (op0) == PLUS
5020 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5021 && CONST_INT_P (XEXP (op0, 1)))))
5022 {
5023 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5024 1));
7e3747b0 5025 poly_int64 offset;
5026 if (!op1 || !poly_int_rtx_p (op1, &offset))
8afb7c4b 5027 return NULL;
5028
7e3747b0 5029 return plus_constant (mode, op0, offset);
8afb7c4b 5030 }
5031 }
5032
f9c61ef7 5033 return NULL;
5034 }
9845d120 5035
14a3093e 5036 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
03b7a719 5037 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5038 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
d89c81d6 5039
5040 return op0;
9845d120 5041
5042 case VECTOR_CST:
fadf62f4 5043 {
f08ee65f 5044 unsigned HOST_WIDE_INT i, nelts;
5045
5046 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5047 return NULL;
fadf62f4 5048
1f547280 5049 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
fadf62f4 5050
1f547280 5051 for (i = 0; i < nelts; ++i)
fadf62f4 5052 {
5053 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5054 if (!op1)
5055 return NULL;
5056 XVECEXP (op0, 0, i) = op1;
5057 }
5058
5059 return op0;
5060 }
9845d120 5061
5062 case CONSTRUCTOR:
3c25489e 5063 if (TREE_CLOBBER_P (exp))
5064 return NULL;
5065 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
9845d120 5066 {
5067 unsigned i;
f08ee65f 5068 unsigned HOST_WIDE_INT nelts;
9845d120 5069 tree val;
5070
f08ee65f 5071 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5072 goto flag_unsupported;
5073
5074 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
9845d120 5075
5076 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5077 {
5078 op1 = expand_debug_expr (val);
5079 if (!op1)
5080 return NULL;
5081 XVECEXP (op0, 0, i) = op1;
5082 }
5083
f08ee65f 5084 if (i < nelts)
9845d120 5085 {
5086 op1 = expand_debug_expr
385f3f36 5087 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
9845d120 5088
5089 if (!op1)
5090 return NULL;
5091
f08ee65f 5092 for (; i < nelts; i++)
9845d120 5093 XVECEXP (op0, 0, i) = op1;
5094 }
5095
5096 return op0;
5097 }
5098 else
5099 goto flag_unsupported;
5100
5101 case CALL_EXPR:
5102 /* ??? Maybe handle some builtins? */
5103 return NULL;
5104
5105 case SSA_NAME:
5106 {
42acab1c 5107 gimple *g = get_gimple_for_ssa_name (exp);
3c800ea7 5108 if (g)
5109 {
54497144 5110 tree t = NULL_TREE;
5111 if (deep_ter_debug_map)
5112 {
5113 tree *slot = deep_ter_debug_map->get (exp);
5114 if (slot)
5115 t = *slot;
5116 }
5117 if (t == NULL_TREE)
5118 t = gimple_assign_rhs_to_tree (g);
5119 op0 = expand_debug_expr (t);
3c800ea7 5120 if (!op0)
5121 return NULL;
5122 }
5123 else
5124 {
b2df3bbf 5125 /* If this is a reference to an incoming value of
5126 parameter that is never used in the code or where the
5127 incoming value is never used in the code, use
5128 PARM_DECL's DECL_RTL if set. */
5129 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5130 && SSA_NAME_VAR (exp)
5131 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5132 && has_zero_uses (exp))
5133 {
5134 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5135 if (op0)
5136 goto adjust_mode;
5137 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5138 if (op0)
5139 goto adjust_mode;
5140 }
5141
3c800ea7 5142 int part = var_to_partition (SA.map, exp);
9845d120 5143
3c800ea7 5144 if (part == NO_PARTITION)
b2df3bbf 5145 return NULL;
9845d120 5146
3c800ea7 5147 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
9845d120 5148
ce6d059c 5149 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3c800ea7 5150 }
9845d120 5151 goto adjust_mode;
5152 }
5153
5154 case ERROR_MARK:
5155 return NULL;
5156
b54ee9da 5157 /* Vector stuff. For most of the codes we don't have rtl codes. */
5158 case REALIGN_LOAD_EXPR:
b54ee9da 5159 case VEC_COND_EXPR:
b54ee9da 5160 case VEC_PACK_FIX_TRUNC_EXPR:
0efcdf5a 5161 case VEC_PACK_FLOAT_EXPR:
b54ee9da 5162 case VEC_PACK_SAT_EXPR:
5163 case VEC_PACK_TRUNC_EXPR:
0efcdf5a 5164 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5165 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
b54ee9da 5166 case VEC_UNPACK_FLOAT_HI_EXPR:
5167 case VEC_UNPACK_FLOAT_LO_EXPR:
5168 case VEC_UNPACK_HI_EXPR:
5169 case VEC_UNPACK_LO_EXPR:
5170 case VEC_WIDEN_MULT_HI_EXPR:
5171 case VEC_WIDEN_MULT_LO_EXPR:
79a78f7f 5172 case VEC_WIDEN_MULT_EVEN_EXPR:
5173 case VEC_WIDEN_MULT_ODD_EXPR:
6083c152 5174 case VEC_WIDEN_LSHIFT_HI_EXPR:
5175 case VEC_WIDEN_LSHIFT_LO_EXPR:
3557cb99 5176 case VEC_PERM_EXPR:
a308fcf8 5177 case VEC_DUPLICATE_EXPR:
7ed29fa2 5178 case VEC_SERIES_EXPR:
b54ee9da 5179 return NULL;
5180
96504875 5181 /* Misc codes. */
b54ee9da 5182 case ADDR_SPACE_CONVERT_EXPR:
5183 case FIXED_CONVERT_EXPR:
5184 case OBJ_TYPE_REF:
5185 case WITH_SIZE_EXPR:
2506d97a 5186 case BIT_INSERT_EXPR:
b54ee9da 5187 return NULL;
5188
5189 case DOT_PROD_EXPR:
5190 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5191 && SCALAR_INT_MODE_P (mode))
5192 {
9ecadf14 5193 op0
5194 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5195 0)))
5196 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5197 inner_mode);
5198 op1
5199 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5200 1)))
5201 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5202 inner_mode);
5203 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5204 return simplify_gen_binary (PLUS, mode, op0, op2);
b54ee9da 5205 }
5206 return NULL;
5207
5208 case WIDEN_MULT_EXPR:
00f4f705 5209 case WIDEN_MULT_PLUS_EXPR:
5210 case WIDEN_MULT_MINUS_EXPR:
b54ee9da 5211 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5212 && SCALAR_INT_MODE_P (mode))
5213 {
9ecadf14 5214 inner_mode = GET_MODE (op0);
b54ee9da 5215 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
62be004c 5216 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b54ee9da 5217 else
62be004c 5218 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b54ee9da 5219 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
62be004c 5220 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
b54ee9da 5221 else
62be004c 5222 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
9ecadf14 5223 op0 = simplify_gen_binary (MULT, mode, op0, op1);
00f4f705 5224 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5225 return op0;
5226 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
9ecadf14 5227 return simplify_gen_binary (PLUS, mode, op0, op2);
00f4f705 5228 else
9ecadf14 5229 return simplify_gen_binary (MINUS, mode, op2, op0);
b54ee9da 5230 }
5231 return NULL;
5232
96504875 5233 case MULT_HIGHPART_EXPR:
5234 /* ??? Similar to the above. */
5235 return NULL;
5236
b54ee9da 5237 case WIDEN_SUM_EXPR:
3557cb99 5238 case WIDEN_LSHIFT_EXPR:
b54ee9da 5239 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5240 && SCALAR_INT_MODE_P (mode))
5241 {
9ecadf14 5242 op0
5243 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5244 0)))
5245 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5246 inner_mode);
3557cb99 5247 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5248 ? ASHIFT : PLUS, mode, op0, op1);
b54ee9da 5249 }
5250 return NULL;
5251
9845d120 5252 default:
5253 flag_unsupported:
382ecba7 5254 if (flag_checking)
5255 {
5256 debug_tree (exp);
5257 gcc_unreachable ();
5258 }
9845d120 5259 return NULL;
9845d120 5260 }
5261}
5262
841424cc 5263/* Return an RTX equivalent to the source bind value of the tree expression
5264 EXP. */
5265
5266static rtx
5267expand_debug_source_expr (tree exp)
5268{
5269 rtx op0 = NULL_RTX;
3754d046 5270 machine_mode mode = VOIDmode, inner_mode;
841424cc 5271
5272 switch (TREE_CODE (exp))
5273 {
95e02bd3 5274 case VAR_DECL:
5275 if (DECL_ABSTRACT_ORIGIN (exp))
5276 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5277 break;
841424cc 5278 case PARM_DECL:
5279 {
841424cc 5280 mode = DECL_MODE (exp);
8ee59e4e 5281 op0 = expand_debug_parm_decl (exp);
5282 if (op0)
5283 break;
841424cc 5284 /* See if this isn't an argument that has been completely
5285 optimized out. */
5286 if (!DECL_RTL_SET_P (exp)
8ee59e4e 5287 && !DECL_INCOMING_RTL (exp)
841424cc 5288 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5289 {
42c442a9 5290 tree aexp = DECL_ORIGIN (exp);
841424cc 5291 if (DECL_CONTEXT (aexp)
5292 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5293 {
f1f41a6c 5294 vec<tree, va_gc> **debug_args;
841424cc 5295 unsigned int ix;
5296 tree ddecl;
841424cc 5297 debug_args = decl_debug_args_lookup (current_function_decl);
5298 if (debug_args != NULL)
5299 {
f1f41a6c 5300 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
841424cc 5301 ix += 2)
5302 if (ddecl == aexp)
5303 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5304 }
5305 }
5306 }
5307 break;
5308 }
5309 default:
5310 break;
5311 }
5312
5313 if (op0 == NULL_RTX)
5314 return NULL_RTX;
5315
5316 inner_mode = GET_MODE (op0);
5317 if (mode == inner_mode)
5318 return op0;
5319
5320 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5321 {
332d11bd 5322 if (GET_MODE_UNIT_BITSIZE (mode)
5323 == GET_MODE_UNIT_BITSIZE (inner_mode))
841424cc 5324 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
332d11bd 5325 else if (GET_MODE_UNIT_BITSIZE (mode)
5326 < GET_MODE_UNIT_BITSIZE (inner_mode))
841424cc 5327 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5328 else
5329 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5330 }
5331 else if (FLOAT_MODE_P (mode))
5332 gcc_unreachable ();
5333 else if (FLOAT_MODE_P (inner_mode))
5334 {
5335 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5336 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5337 else
5338 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5339 }
1048c155 5340 else if (GET_MODE_UNIT_PRECISION (mode)
5341 == GET_MODE_UNIT_PRECISION (inner_mode))
a8a727ad 5342 op0 = lowpart_subreg (mode, op0, inner_mode);
1048c155 5343 else if (GET_MODE_UNIT_PRECISION (mode)
5344 < GET_MODE_UNIT_PRECISION (inner_mode))
5345 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
841424cc 5346 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5347 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5348 else
5349 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5350
5351 return op0;
5352}
5353
848d0536 5354/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5355 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5356 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5357
5358static void
74a0cbc4 5359avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
848d0536 5360{
5361 rtx exp = *exp_p;
5362
5363 if (exp == NULL_RTX)
5364 return;
5365
5366 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5367 return;
5368
5369 if (depth == 4)
5370 {
5371 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5372 rtx dval = make_debug_expr_from_rtl (exp);
5373
5374 /* Emit a debug bind insn before INSN. */
5375 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5376 DEBUG_EXPR_TREE_DECL (dval), exp,
5377 VAR_INIT_STATUS_INITIALIZED);
5378
5379 emit_debug_insn_before (bind, insn);
5380 *exp_p = dval;
5381 return;
5382 }
5383
5384 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5385 int i, j;
5386 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5387 switch (*format_ptr++)
5388 {
5389 case 'e':
5390 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5391 break;
5392
5393 case 'E':
5394 case 'V':
5395 for (j = 0; j < XVECLEN (exp, i); j++)
5396 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5397 break;
5398
5399 default:
5400 break;
5401 }
5402}
5403
9845d120 5404/* Expand the _LOCs in debug insns. We run this after expanding all
5405 regular insns, so that any variables referenced in the function
5406 will have their DECL_RTLs set. */
5407
5408static void
5409expand_debug_locations (void)
5410{
74a0cbc4 5411 rtx_insn *insn;
5412 rtx_insn *last = get_last_insn ();
9845d120 5413 int save_strict_alias = flag_strict_aliasing;
5414
5415 /* New alias sets while setting up memory attributes cause
5416 -fcompare-debug failures, even though it doesn't bring about any
5417 codegen changes. */
5418 flag_strict_aliasing = 0;
5419
5420 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
c64f38bf 5421 if (DEBUG_BIND_INSN_P (insn))
9845d120 5422 {
5423 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
74a0cbc4 5424 rtx val;
5425 rtx_insn *prev_insn, *insn2;
3754d046 5426 machine_mode mode;
9845d120 5427
5428 if (value == NULL_TREE)
5429 val = NULL_RTX;
5430 else
5431 {
841424cc 5432 if (INSN_VAR_LOCATION_STATUS (insn)
5433 == VAR_INIT_STATUS_UNINITIALIZED)
5434 val = expand_debug_source_expr (value);
54497144 5435 /* The avoid_deep_ter_for_debug function inserts
5436 debug bind stmts after SSA_NAME definition, with the
5437 SSA_NAME as the whole bind location. Disable temporarily
5438 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5439 being defined in this DEBUG_INSN. */
5440 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5441 {
5442 tree *slot = deep_ter_debug_map->get (value);
5443 if (slot)
5444 {
5445 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5446 *slot = NULL_TREE;
5447 else
5448 slot = NULL;
5449 }
5450 val = expand_debug_expr (value);
5451 if (slot)
5452 *slot = INSN_VAR_LOCATION_DECL (insn);
5453 }
841424cc 5454 else
5455 val = expand_debug_expr (value);
9845d120 5456 gcc_assert (last == get_last_insn ());
5457 }
5458
5459 if (!val)
5460 val = gen_rtx_UNKNOWN_VAR_LOC ();
5461 else
5462 {
5463 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5464
5465 gcc_assert (mode == GET_MODE (val)
5466 || (GET_MODE (val) == VOIDmode
efa08fc2 5467 && (CONST_SCALAR_INT_P (val)
9845d120 5468 || GET_CODE (val) == CONST_FIXED
9845d120 5469 || GET_CODE (val) == LABEL_REF)));
5470 }
5471
5472 INSN_VAR_LOCATION_LOC (insn) = val;
848d0536 5473 prev_insn = PREV_INSN (insn);
5474 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5475 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
9845d120 5476 }
5477
5478 flag_strict_aliasing = save_strict_alias;
5479}
5480
f7974718 5481/* Performs swapping operands of commutative operations to expand
5482 the expensive one first. */
5483
5484static void
5485reorder_operands (basic_block bb)
5486{
5487 unsigned int *lattice; /* Hold cost of each statement. */
5488 unsigned int i = 0, n = 0;
5489 gimple_stmt_iterator gsi;
5490 gimple_seq stmts;
42acab1c 5491 gimple *stmt;
f7974718 5492 bool swap;
5493 tree op0, op1;
5494 ssa_op_iter iter;
5495 use_operand_p use_p;
42acab1c 5496 gimple *def0, *def1;
f7974718 5497
5498 /* Compute cost of each statement using estimate_num_insns. */
5499 stmts = bb_seq (bb);
5500 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5501 {
5502 stmt = gsi_stmt (gsi);
31aebeec 5503 if (!is_gimple_debug (stmt))
5504 gimple_set_uid (stmt, n++);
f7974718 5505 }
5506 lattice = XNEWVEC (unsigned int, n);
5507 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5508 {
5509 unsigned cost;
5510 stmt = gsi_stmt (gsi);
31aebeec 5511 if (is_gimple_debug (stmt))
5512 continue;
f7974718 5513 cost = estimate_num_insns (stmt, &eni_size_weights);
5514 lattice[i] = cost;
5515 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5516 {
5517 tree use = USE_FROM_PTR (use_p);
42acab1c 5518 gimple *def_stmt;
f7974718 5519 if (TREE_CODE (use) != SSA_NAME)
5520 continue;
5521 def_stmt = get_gimple_for_ssa_name (use);
5522 if (!def_stmt)
5523 continue;
5524 lattice[i] += lattice[gimple_uid (def_stmt)];
5525 }
5526 i++;
5527 if (!is_gimple_assign (stmt)
5528 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5529 continue;
5530 op0 = gimple_op (stmt, 1);
5531 op1 = gimple_op (stmt, 2);
5532 if (TREE_CODE (op0) != SSA_NAME
5533 || TREE_CODE (op1) != SSA_NAME)
5534 continue;
5535 /* Swap operands if the second one is more expensive. */
5536 def0 = get_gimple_for_ssa_name (op0);
f7974718 5537 def1 = get_gimple_for_ssa_name (op1);
5538 if (!def1)
5539 continue;
5540 swap = false;
4b8069b9 5541 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
f7974718 5542 swap = true;
5543 if (swap)
5544 {
5545 if (dump_file && (dump_flags & TDF_DETAILS))
5546 {
5547 fprintf (dump_file, "Swap operands in stmt:\n");
5548 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5549 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
4b8069b9 5550 def0 ? lattice[gimple_uid (def0)] : 0,
f7974718 5551 lattice[gimple_uid (def1)]);
5552 }
5553 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5554 gimple_assign_rhs2_ptr (stmt));
5555 }
5556 }
5557 XDELETE (lattice);
5558}
5559
0ec80471 5560/* Expand basic block BB from GIMPLE trees to RTL. */
5561
5562static basic_block
3c919612 5563expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
0ec80471 5564{
75a70cf9 5565 gimple_stmt_iterator gsi;
5566 gimple_seq stmts;
42acab1c 5567 gimple *stmt = NULL;
bce107d7 5568 rtx_note *note = NULL;
74a0cbc4 5569 rtx_insn *last;
0ec80471 5570 edge e;
cd665a06 5571 edge_iterator ei;
0ec80471 5572
5573 if (dump_file)
75a70cf9 5574 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5575 bb->index);
5576
5577 /* Note that since we are now transitioning from GIMPLE to RTL, we
5578 cannot use the gsi_*_bb() routines because they expect the basic
5579 block to be in GIMPLE, instead of RTL. Therefore, we need to
5580 access the BB sequence directly. */
f7974718 5581 if (optimize)
5582 reorder_operands (bb);
75a70cf9 5583 stmts = bb_seq (bb);
924c4c71 5584 bb->il.gimple.seq = NULL;
5585 bb->il.gimple.phi_nodes = NULL;
7dfb44a0 5586 rtl_profile_for_bb (bb);
e0dde8f8 5587 init_rtl_bb_info (bb);
5588 bb->flags |= BB_RTL;
5589
63f88450 5590 /* Remove the RETURN_EXPR if we may fall though to the exit
5591 instead. */
75a70cf9 5592 gsi = gsi_last (stmts);
5593 if (!gsi_end_p (gsi)
5594 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
63f88450 5595 {
1a91d914 5596 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
63f88450 5597
5598 gcc_assert (single_succ_p (bb));
34154e27 5599 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
63f88450 5600
34154e27 5601 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
75a70cf9 5602 && !gimple_return_retval (ret_stmt))
63f88450 5603 {
75a70cf9 5604 gsi_remove (&gsi, false);
63f88450 5605 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5606 }
5607 }
5608
d71c7316 5609 gsi = gsi_start (stmts);
75a70cf9 5610 if (!gsi_end_p (gsi))
6313ae8b 5611 {
75a70cf9 5612 stmt = gsi_stmt (gsi);
5613 if (gimple_code (stmt) != GIMPLE_LABEL)
5614 stmt = NULL;
6313ae8b 5615 }
0ec80471 5616
0699065d 5617 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
6313ae8b 5618
186fbc79 5619 if (stmt || elt)
0ec80471 5620 {
bce107d7 5621 gcc_checking_assert (!note);
0ec80471 5622 last = get_last_insn ();
5623
6313ae8b 5624 if (stmt)
5625 {
16c9337c 5626 expand_gimple_stmt (stmt);
d71c7316 5627 gsi_next (&gsi);
6313ae8b 5628 }
5629
5630 if (elt)
5f8841a5 5631 emit_label (*elt);
0ec80471 5632
26bb3cb2 5633 BB_HEAD (bb) = NEXT_INSN (last);
6d7dc5b9 5634 if (NOTE_P (BB_HEAD (bb)))
26bb3cb2 5635 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
bce107d7 5636 gcc_assert (LABEL_P (BB_HEAD (bb)));
0ec80471 5637 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
49377e21 5638
75a70cf9 5639 maybe_dump_rtl_for_gimple_stmt (stmt, last);
0ec80471 5640 }
5641 else
26bb3cb2 5642 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
0ec80471 5643
bce107d7 5644 if (note)
5645 NOTE_BASIC_BLOCK (note) = bb;
0ec80471 5646
75a70cf9 5647 for (; !gsi_end_p (gsi); gsi_next (&gsi))
0ec80471 5648 {
c578459e 5649 basic_block new_bb;
0ec80471 5650
9845d120 5651 stmt = gsi_stmt (gsi);
3c800ea7 5652
5653 /* If this statement is a non-debug one, and we generate debug
5654 insns, then this one might be the last real use of a TERed
5655 SSA_NAME, but where there are still some debug uses further
5656 down. Expanding the current SSA name in such further debug
5657 uses by their RHS might lead to wrong debug info, as coalescing
5658 might make the operands of such RHS be placed into the same
5659 pseudo as something else. Like so:
5660 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5661 use(a_1);
5662 a_2 = ...
5663 #DEBUG ... => a_1
5664 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5665 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5666 the write to a_2 would actually have clobbered the place which
5667 formerly held a_0.
5668
5669 So, instead of that, we recognize the situation, and generate
5670 debug temporaries at the last real use of TERed SSA names:
5671 a_1 = a_0 + 1;
5672 #DEBUG #D1 => a_1
5673 use(a_1);
5674 a_2 = ...
5675 #DEBUG ... => #D1
5676 */
c64f38bf 5677 if (MAY_HAVE_DEBUG_BIND_INSNS
3c800ea7 5678 && SA.values
5679 && !is_gimple_debug (stmt))
5680 {
5681 ssa_op_iter iter;
5682 tree op;
42acab1c 5683 gimple *def;
3c800ea7 5684
5169661d 5685 location_t sloc = curr_insn_location ();
3c800ea7 5686
5687 /* Look for SSA names that have their last use here (TERed
5688 names always have only one real use). */
5689 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5690 if ((def = get_gimple_for_ssa_name (op)))
5691 {
5692 imm_use_iterator imm_iter;
5693 use_operand_p use_p;
5694 bool have_debug_uses = false;
5695
5696 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5697 {
5698 if (gimple_debug_bind_p (USE_STMT (use_p)))
5699 {
5700 have_debug_uses = true;
5701 break;
5702 }
5703 }
5704
5705 if (have_debug_uses)
5706 {
71b39a64 5707 /* OP is a TERed SSA name, with DEF its defining
3c800ea7 5708 statement, and where OP is used in further debug
5709 instructions. Generate a debug temporary, and
5710 replace all uses of OP in debug insns with that
5711 temporary. */
42acab1c 5712 gimple *debugstmt;
3c800ea7 5713 tree value = gimple_assign_rhs_to_tree (def);
5714 tree vexpr = make_node (DEBUG_EXPR_DECL);
5715 rtx val;
3754d046 5716 machine_mode mode;
3c800ea7 5717
5169661d 5718 set_curr_insn_location (gimple_location (def));
3c800ea7 5719
5720 DECL_ARTIFICIAL (vexpr) = 1;
5721 TREE_TYPE (vexpr) = TREE_TYPE (value);
5722 if (DECL_P (value))
5723 mode = DECL_MODE (value);
5724 else
5725 mode = TYPE_MODE (TREE_TYPE (value));
adc78298 5726 SET_DECL_MODE (vexpr, mode);
3c800ea7 5727
5728 val = gen_rtx_VAR_LOCATION
5729 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5730
3e549002 5731 emit_debug_insn (val);
3c800ea7 5732
5733 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5734 {
5735 if (!gimple_debug_bind_p (debugstmt))
5736 continue;
5737
5738 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5739 SET_USE (use_p, vexpr);
5740
5741 update_stmt (debugstmt);
5742 }
5743 }
5744 }
5169661d 5745 set_curr_insn_location (sloc);
3c800ea7 5746 }
5747
8cee8dc0 5748 currently_expanding_gimple_stmt = stmt;
9845d120 5749
0ec80471 5750 /* Expand this statement, then evaluate the resulting RTL and
5751 fixup the CFG accordingly. */
75a70cf9 5752 if (gimple_code (stmt) == GIMPLE_COND)
c578459e 5753 {
1a91d914 5754 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
c578459e 5755 if (new_bb)
5756 return new_bb;
5757 }
90567983 5758 else if (is_gimple_debug (stmt))
9845d120 5759 {
5169661d 5760 location_t sloc = curr_insn_location ();
9845d120 5761 gimple_stmt_iterator nsi = gsi;
5762
5763 for (;;)
5764 {
90567983 5765 tree var;
5766 tree value = NULL_TREE;
5767 rtx val = NULL_RTX;
3754d046 5768 machine_mode mode;
9845d120 5769
90567983 5770 if (!gimple_debug_nonbind_marker_p (stmt))
5771 {
5772 if (gimple_debug_bind_p (stmt))
5773 {
5774 var = gimple_debug_bind_get_var (stmt);
9bae88bc 5775
90567983 5776 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5777 && TREE_CODE (var) != LABEL_DECL
5778 && !target_for_debug_bind (var))
5779 goto delink_debug_stmt;
9845d120 5780
90567983 5781 if (DECL_P (var))
5782 mode = DECL_MODE (var);
5783 else
5784 mode = TYPE_MODE (TREE_TYPE (var));
9845d120 5785
90567983 5786 if (gimple_debug_bind_has_value_p (stmt))
5787 value = gimple_debug_bind_get_value (stmt);
5788
5789 val = gen_rtx_VAR_LOCATION
5790 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5791 }
5792 else if (gimple_debug_source_bind_p (stmt))
5793 {
5794 var = gimple_debug_source_bind_get_var (stmt);
5795
5796 value = gimple_debug_source_bind_get_value (stmt);
5797
5798 mode = DECL_MODE (var);
9845d120 5799
90567983 5800 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5801 VAR_INIT_STATUS_UNINITIALIZED);
5802 }
5803 else
5804 gcc_unreachable ();
5805 }
5806 /* If this function was first compiled with markers
5807 enabled, but they're now disable (e.g. LTO), drop
5808 them on the floor. */
5809 else if (gimple_debug_nonbind_marker_p (stmt)
5810 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5811 goto delink_debug_stmt;
5812 else if (gimple_debug_begin_stmt_p (stmt))
5813 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
8f6f3638 5814 else if (gimple_debug_inline_entry_p (stmt))
5815 {
5816 tree block = gimple_block (stmt);
5817
5818 if (block)
5819 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5820 else
5821 goto delink_debug_stmt;
5822 }
9845d120 5823 else
90567983 5824 gcc_unreachable ();
9845d120 5825
90567983 5826 last = get_last_insn ();
5827
5828 set_curr_insn_location (gimple_location (stmt));
9845d120 5829
1084097d 5830 emit_debug_insn (val);
9845d120 5831
5832 if (dump_file && (dump_flags & TDF_DETAILS))
5833 {
5834 /* We can't dump the insn with a TREE where an RTX
5835 is expected. */
90567983 5836 if (GET_CODE (val) == VAR_LOCATION)
5837 {
5838 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5839 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5840 }
9845d120 5841 maybe_dump_rtl_for_gimple_stmt (stmt, last);
90567983 5842 if (GET_CODE (val) == VAR_LOCATION)
5843 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
9845d120 5844 }
5845
9bae88bc 5846 delink_debug_stmt:
3c800ea7 5847 /* In order not to generate too many debug temporaries,
5848 we delink all uses of debug statements we already expanded.
5849 Therefore debug statements between definition and real
5850 use of TERed SSA names will continue to use the SSA name,
5851 and not be replaced with debug temps. */
5852 delink_stmt_imm_use (stmt);
5853
9845d120 5854 gsi = nsi;
5855 gsi_next (&nsi);
5856 if (gsi_end_p (nsi))
5857 break;
5858 stmt = gsi_stmt (nsi);
90567983 5859 if (!is_gimple_debug (stmt))
9845d120 5860 break;
5861 }
5862
5169661d 5863 set_curr_insn_location (sloc);
9845d120 5864 }
3ced8962 5865 else
0ec80471 5866 {
1a91d914 5867 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5868 if (call_stmt
5869 && gimple_call_tail_p (call_stmt)
3c919612 5870 && disable_tail_calls)
1a91d914 5871 gimple_call_set_tail (call_stmt, false);
3c919612 5872
1a91d914 5873 if (call_stmt && gimple_call_tail_p (call_stmt))
c578459e 5874 {
5875 bool can_fallthru;
1a91d914 5876 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
c578459e 5877 if (new_bb)
5878 {
5879 if (can_fallthru)
5880 bb = new_bb;
5881 else
5882 return new_bb;
5883 }
5884 }
2a3ebafa 5885 else
49377e21 5886 {
a8dd994c 5887 def_operand_p def_p;
a8dd994c 5888 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5889
5890 if (def_p != NULL)
5891 {
5892 /* Ignore this stmt if it is in the list of
5893 replaceable expressions. */
5894 if (SA.values
48e1416a 5895 && bitmap_bit_p (SA.values,
dfdbf3fd 5896 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
a8dd994c 5897 continue;
5898 }
16c9337c 5899 last = expand_gimple_stmt (stmt);
75a70cf9 5900 maybe_dump_rtl_for_gimple_stmt (stmt, last);
49377e21 5901 }
0ec80471 5902 }
5903 }
5904
8cee8dc0 5905 currently_expanding_gimple_stmt = NULL;
5906
9c388755 5907 /* Expand implicit goto and convert goto_locus. */
63f88450 5908 FOR_EACH_EDGE (e, ei, bb->succs)
5909 {
8e7408e3 5910 if (e->goto_locus != UNKNOWN_LOCATION)
5169661d 5911 set_curr_insn_location (e->goto_locus);
9c388755 5912 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5913 {
5914 emit_jump (label_rtx_for_bb (e->dest));
5915 e->flags &= ~EDGE_FALLTHRU;
5916 }
63f88450 5917 }
5918
8a9ad55b 5919 /* Expanded RTL can create a jump in the last instruction of block.
5920 This later might be assumed to be a jump to successor and break edge insertion.
5921 We need to insert dummy move to prevent this. PR41440. */
5922 if (single_succ_p (bb)
5923 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5924 && (last = get_last_insn ())
b941a5ed 5925 && (JUMP_P (last)
5926 || (DEBUG_INSN_P (last)
5927 && JUMP_P (prev_nondebug_insn (last)))))
8a9ad55b 5928 {
5929 rtx dummy = gen_reg_rtx (SImode);
5930 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5931 }
5932
0ec80471 5933 do_pending_stack_adjust ();
5934
822e391f 5935 /* Find the block tail. The last insn in the block is the insn
0ec80471 5936 before a barrier and/or table jump insn. */
5937 last = get_last_insn ();
6d7dc5b9 5938 if (BARRIER_P (last))
0ec80471 5939 last = PREV_INSN (last);
5940 if (JUMP_TABLE_DATA_P (last))
5941 last = PREV_INSN (PREV_INSN (last));
3574763e 5942 if (BARRIER_P (last))
5943 last = PREV_INSN (last);
26bb3cb2 5944 BB_END (bb) = last;
491e04ef 5945
0ec80471 5946 update_bb_for_insn (bb);
3ced8962 5947
0ec80471 5948 return bb;
5949}
5950
5951
5952/* Create a basic block for initialization code. */
5953
5954static basic_block
5955construct_init_block (void)
5956{
5957 basic_block init_block, first_block;
9a755727 5958 edge e = NULL;
5959 int flags;
e20bf721 5960
9a755727 5961 /* Multiple entry points not supported yet. */
34154e27 5962 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5963 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5964 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5965 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5966 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
0ec80471 5967
34154e27 5968 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
e20bf721 5969
9a755727 5970 /* When entry edge points to first basic block, we don't need jump,
5971 otherwise we have to jump into proper target. */
34154e27 5972 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
9a755727 5973 {
75a70cf9 5974 tree label = gimple_block_label (e->dest);
9a755727 5975
f9a00e9e 5976 emit_jump (jump_target_rtx (label));
9a755727 5977 flags = 0;
e20bf721 5978 }
9a755727 5979 else
5980 flags = EDGE_FALLTHRU;
0ec80471 5981
5982 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5983 get_last_insn (),
34154e27 5984 ENTRY_BLOCK_PTR_FOR_FN (cfun));
34154e27 5985 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
b3083327 5986 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
0ec80471 5987 if (e)
5988 {
5989 first_block = e->dest;
5990 redirect_edge_succ (e, init_block);
720cfc43 5991 e = make_single_succ_edge (init_block, first_block, flags);
0ec80471 5992 }
5993 else
720cfc43 5994 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5995 EDGE_FALLTHRU);
0ec80471 5996
5997 update_bb_for_insn (init_block);
5998 return init_block;
5999}
6000
375c1c8a 6001/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6002 found in the block tree. */
6003
6004static void
6005set_block_levels (tree block, int level)
6006{
6007 while (block)
6008 {
6009 BLOCK_NUMBER (block) = level;
6010 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6011 block = BLOCK_CHAIN (block);
6012 }
6013}
0ec80471 6014
6015/* Create a block containing landing pads and similar stuff. */
6016
6017static void
6018construct_exit_block (void)
6019{
74a0cbc4 6020 rtx_insn *head = get_last_insn ();
6021 rtx_insn *end;
0ec80471 6022 basic_block exit_block;
cd665a06 6023 edge e, e2;
6024 unsigned ix;
6025 edge_iterator ei;
04e7d9cb 6026 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
74a0cbc4 6027 rtx_insn *orig_end = BB_END (prev_bb);
0ec80471 6028
34154e27 6029 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
7dfb44a0 6030
491e04ef 6031 /* Make sure the locus is set to the end of the function, so that
0ec80471 6032 epilogue line numbers and warnings are set properly. */
8e7408e3 6033 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
0ec80471 6034 input_location = cfun->function_end_locus;
6035
0ec80471 6036 /* Generate rtl for function exit. */
6037 expand_function_end ();
6038
6039 end = get_last_insn ();
6040 if (head == end)
6041 return;
04e7d9cb 6042 /* While emitting the function end we could move end of the last basic
6043 block. */
26bb3cb2 6044 BB_END (prev_bb) = orig_end;
6d7dc5b9 6045 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
0ec80471 6046 head = NEXT_INSN (head);
04e7d9cb 6047 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
205ce1aa 6048 bb count counting will be confused. Any instructions before that
04e7d9cb 6049 label are emitted for the case where PREV_BB falls through into the
6050 exit block, so append those instructions to prev_bb in that case. */
6051 if (NEXT_INSN (head) != return_label)
6052 {
6053 while (NEXT_INSN (head) != return_label)
6054 {
6055 if (!NOTE_P (NEXT_INSN (head)))
26bb3cb2 6056 BB_END (prev_bb) = NEXT_INSN (head);
04e7d9cb 6057 head = NEXT_INSN (head);
6058 }
6059 }
6060 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
34154e27 6061 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
b3083327 6062 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
cd665a06 6063
6064 ix = 0;
34154e27 6065 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
0ec80471 6066 {
34154e27 6067 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
0ec80471 6068 if (!(e->flags & EDGE_ABNORMAL))
cd665a06 6069 redirect_edge_succ (e, exit_block);
6070 else
6071 ix++;
0ec80471 6072 }
cd665a06 6073
720cfc43 6074 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6075 EDGE_FALLTHRU);
34154e27 6076 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
0ec80471 6077 if (e2 != e)
6078 {
ea5d3981 6079 exit_block->count -= e2->count ();
0ec80471 6080 }
0ec80471 6081 update_bb_for_insn (exit_block);
6082}
6083
a0c938f0 6084/* Helper function for discover_nonconstant_array_refs.
9d5aa3bd 6085 Look for ARRAY_REF nodes with non-constant indexes and mark them
6086 addressable. */
6087
6088static tree
6089discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6090 void *data ATTRIBUTE_UNUSED)
6091{
6092 tree t = *tp;
6093
6094 if (IS_TYPE_OR_DECL_P (t))
6095 *walk_subtrees = 0;
6096 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6097 {
6098 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6099 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6100 && (!TREE_OPERAND (t, 2)
6101 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6102 || (TREE_CODE (t) == COMPONENT_REF
6103 && (!TREE_OPERAND (t,2)
6104 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6105 || TREE_CODE (t) == BIT_FIELD_REF
6106 || TREE_CODE (t) == REALPART_EXPR
6107 || TREE_CODE (t) == IMAGPART_EXPR
6108 || TREE_CODE (t) == VIEW_CONVERT_EXPR
72dd6141 6109 || CONVERT_EXPR_P (t))
9d5aa3bd 6110 t = TREE_OPERAND (t, 0);
6111
6112 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6113 {
6114 t = get_base_address (t);
9a60c3b9 6115 if (t && DECL_P (t)
6116 && DECL_MODE (t) != BLKmode)
9d5aa3bd 6117 TREE_ADDRESSABLE (t) = 1;
6118 }
6119
6120 *walk_subtrees = 0;
6121 }
6122
6123 return NULL_TREE;
6124}
6125
6126/* RTL expansion is not able to compile array references with variable
6127 offsets for arrays stored in single register. Discover such
6128 expressions and mark variables as addressable to avoid this
6129 scenario. */
6130
6131static void
6132discover_nonconstant_array_refs (void)
6133{
6134 basic_block bb;
75a70cf9 6135 gimple_stmt_iterator gsi;
9d5aa3bd 6136
fc00614f 6137 FOR_EACH_BB_FN (bb, cfun)
75a70cf9 6138 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6139 {
42acab1c 6140 gimple *stmt = gsi_stmt (gsi);
f4b490ea 6141 if (!is_gimple_debug (stmt))
6142 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
75a70cf9 6143 }
9d5aa3bd 6144}
6145
27a7a23a 6146/* This function sets crtl->args.internal_arg_pointer to a virtual
6147 register if DRAP is needed. Local register allocator will replace
6148 virtual_incoming_args_rtx with the virtual register. */
6149
6150static void
6151expand_stack_alignment (void)
6152{
6153 rtx drap_rtx;
9e1c1bf0 6154 unsigned int preferred_stack_boundary;
27a7a23a 6155
6156 if (! SUPPORTS_STACK_ALIGNMENT)
6157 return;
48e1416a 6158
27a7a23a 6159 if (cfun->calls_alloca
6160 || cfun->has_nonlocal_label
6161 || crtl->has_nonlocal_goto)
6162 crtl->need_drap = true;
6163
c0a05dc0 6164 /* Call update_stack_boundary here again to update incoming stack
6165 boundary. It may set incoming stack alignment to a different
6166 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6167 use the minimum incoming stack alignment to check if it is OK
6168 to perform sibcall optimization since sibcall optimization will
6169 only align the outgoing stack to incoming stack boundary. */
6170 if (targetm.calls.update_stack_boundary)
6171 targetm.calls.update_stack_boundary ();
6172
6173 /* The incoming stack frame has to be aligned at least at
6174 parm_stack_boundary. */
6175 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
27a7a23a 6176
27a7a23a 6177 /* Update crtl->stack_alignment_estimated and use it later to align
6178 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6179 exceptions since callgraph doesn't collect incoming stack alignment
6180 in this case. */
cbeb677e 6181 if (cfun->can_throw_non_call_exceptions
27a7a23a 6182 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6183 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6184 else
6185 preferred_stack_boundary = crtl->preferred_stack_boundary;
6186 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6187 crtl->stack_alignment_estimated = preferred_stack_boundary;
6188 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6189 crtl->stack_alignment_needed = preferred_stack_boundary;
6190
c0a05dc0 6191 gcc_assert (crtl->stack_alignment_needed
6192 <= crtl->stack_alignment_estimated);
6193
27a7a23a 6194 crtl->stack_realign_needed
9e1c1bf0 6195 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
7b70fdf7 6196 crtl->stack_realign_tried = crtl->stack_realign_needed;
27a7a23a 6197
6198 crtl->stack_realign_processed = true;
6199
6200 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6201 alignment. */
6202 gcc_assert (targetm.calls.get_drap_rtx != NULL);
48e1416a 6203 drap_rtx = targetm.calls.get_drap_rtx ();
27a7a23a 6204
f6754469 6205 /* stack_realign_drap and drap_rtx must match. */
6206 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6207
27a7a23a 6208 /* Do nothing if NULL is returned, which means DRAP is not needed. */
c9281ef8 6209 if (drap_rtx != NULL)
27a7a23a 6210 {
6211 crtl->args.internal_arg_pointer = drap_rtx;
6212
6213 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6214 needed. */
6215 fixup_tail_calls ();
6216 }
6217}
0e80b01d 6218\f
6219
6220static void
6221expand_main_function (void)
6222{
6223#if (defined(INVOKE__main) \
6224 || (!defined(HAS_INIT_SECTION) \
6225 && !defined(INIT_SECTION_ASM_OP) \
6226 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
9e9e5c15 6227 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
0e80b01d 6228#endif
6229}
6230\f
6231
6232/* Expand code to initialize the stack_protect_guard. This is invoked at
6233 the beginning of a function to be protected. */
6234
0e80b01d 6235static void
6236stack_protect_prologue (void)
6237{
6238 tree guard_decl = targetm.stack_protect_guard ();
6239 rtx x, y;
6240
c7a7ba46 6241 crtl->stack_protect_guard_decl = guard_decl;
0e80b01d 6242 x = expand_normal (crtl->stack_protect_guard);
f98495d9 6243
6244 if (targetm.have_stack_protect_combined_set () && guard_decl)
6245 {
6246 gcc_assert (DECL_P (guard_decl));
6247 y = DECL_RTL (guard_decl);
6248
6249 /* Allow the target to compute address of Y and copy it to X without
6250 leaking Y into a register. This combined address + copy pattern
6251 allows the target to prevent spilling of any intermediate results by
6252 splitting it after register allocator. */
6253 if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6254 {
6255 emit_insn (insn);
6256 return;
6257 }
6258 }
6259
8a23256f 6260 if (guard_decl)
6261 y = expand_normal (guard_decl);
6262 else
6263 y = const0_rtx;
0e80b01d 6264
6265 /* Allow the target to copy from Y to X without leaking Y into a
6266 register. */
e9b06442 6267 if (targetm.have_stack_protect_set ())
6268 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6269 {
6270 emit_insn (insn);
6271 return;
6272 }
0e80b01d 6273
6274 /* Otherwise do a straight move. */
6275 emit_move_insn (x, y);
6276}
27a7a23a 6277
0ec80471 6278/* Translate the intermediate representation contained in the CFG
6279 from GIMPLE trees to RTL.
6280
6281 We do conversion per basic block and preserve/update the tree CFG.
6282 This implies we have to do some magic as the CFG can simultaneously
6283 consist of basic blocks containing RTL and GIMPLE trees. This can
2c763ed4 6284 confuse the CFG hooks, so be careful to not manipulate CFG during
0ec80471 6285 the expansion. */
6286
65b0537f 6287namespace {
6288
6289const pass_data pass_data_expand =
6290{
6291 RTL_PASS, /* type */
6292 "expand", /* name */
6293 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 6294 TV_EXPAND, /* tv_id */
6295 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6296 | PROP_gimple_lcx
82fc0e0a 6297 | PROP_gimple_lvec
6298 | PROP_gimple_lva), /* properties_required */
65b0537f 6299 PROP_rtl, /* properties_provided */
6300 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
8b88439e 6301 0, /* todo_flags_start */
65b0537f 6302 0, /* todo_flags_finish */
6303};
6304
6305class pass_expand : public rtl_opt_pass
6306{
6307public:
6308 pass_expand (gcc::context *ctxt)
6309 : rtl_opt_pass (pass_data_expand, ctxt)
6310 {}
6311
6312 /* opt_pass methods: */
6313 virtual unsigned int execute (function *);
6314
6315}; // class pass_expand
6316
6317unsigned int
6318pass_expand::execute (function *fun)
0ec80471 6319{
6320 basic_block bb, init_block;
ea06d49f 6321 edge_iterator ei;
6322 edge e;
74a0cbc4 6323 rtx_insn *var_seq, *var_ret_seq;
a8dd994c 6324 unsigned i;
6325
e2050933 6326 timevar_push (TV_OUT_OF_SSA);
a8dd994c 6327 rewrite_out_of_ssa (&SA);
e2050933 6328 timevar_pop (TV_OUT_OF_SSA);
ed7e2206 6329 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
0ec80471 6330
c64f38bf 6331 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
54497144 6332 {
6333 gimple_stmt_iterator gsi;
6334 FOR_EACH_BB_FN (bb, cfun)
6335 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6336 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6337 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6338 }
6339
212dddd3 6340 /* Make sure all values used by the optimization passes have sane
6341 defaults. */
6342 reg_renumber = 0;
6343
723c0ee7 6344 /* Some backends want to know that we are expanding to RTL. */
6345 currently_expanding_to_rtl = 1;
821ac701 6346 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6347 free_dominance_info (CDI_DOMINATORS);
723c0ee7 6348
65b0537f 6349 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
7dfb44a0 6350
5169661d 6351 insn_locations_init ();
c3771ec9 6352 if (!DECL_IS_BUILTIN (current_function_decl))
30099c0c 6353 {
6354 /* Eventually, all FEs should explicitly set function_start_locus. */
65b0537f 6355 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6356 set_curr_insn_location
6357 (DECL_SOURCE_LOCATION (current_function_decl));
30099c0c 6358 else
65b0537f 6359 set_curr_insn_location (fun->function_start_locus);
30099c0c 6360 }
0aecb55e 6361 else
5169661d 6362 set_curr_insn_location (UNKNOWN_LOCATION);
6363 prologue_location = curr_insn_location ();
375c1c8a 6364
fdc86f97 6365#ifdef INSN_SCHEDULING
6366 init_sched_attrs ();
6367#endif
6368
375c1c8a 6369 /* Make sure first insn is a note even if we don't want linenums.
6370 This makes sure the first insn will never be deleted.
6371 Also, final expects a note to appear there. */
6372 emit_note (NOTE_INSN_DELETED);
656047bf 6373
9d5aa3bd 6374 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6375 discover_nonconstant_array_refs ();
6376
bc5e6ea1 6377 targetm.expand_to_rtl_hook ();
2add0b64 6378 crtl->init_stack_alignment ();
65b0537f 6379 fun->cfg->max_jumptable_ents = 0;
edb7afe8 6380
b8a89e7e 6381 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6382 of the function section at exapnsion time to predict distance of calls. */
6383 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6384
280450fa 6385 /* Expand the variables recorded during gimple lowering. */
e2050933 6386 timevar_push (TV_VAR_EXPAND);
5be42b39 6387 start_sequence ();
6388
3c919612 6389 var_ret_seq = expand_used_vars ();
5be42b39 6390
6391 var_seq = get_insns ();
6392 end_sequence ();
e2050933 6393 timevar_pop (TV_VAR_EXPAND);
0ec80471 6394
f1a0edff 6395 /* Honor stack protection warnings. */
6396 if (warn_stack_protect)
6397 {
65b0537f 6398 if (fun->calls_alloca)
48e1416a 6399 warning (OPT_Wstack_protector,
b15b8239 6400 "stack protector not protecting local variables: "
65b0537f 6401 "variable length buffer");
edb7afe8 6402 if (has_short_buffer && !crtl->stack_protect_guard)
48e1416a 6403 warning (OPT_Wstack_protector,
b15b8239 6404 "stack protector not protecting function: "
65b0537f 6405 "all local arrays are less than %d bytes long",
f1a0edff 6406 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6407 }
6408
0ec80471 6409 /* Set up parameters and prepare for return, for the function. */
82aa4bd5 6410 expand_function_start (current_function_decl);
0ec80471 6411
5be42b39 6412 /* If we emitted any instructions for setting up the variables,
6413 emit them before the FUNCTION_START note. */
6414 if (var_seq)
6415 {
6416 emit_insn_before (var_seq, parm_birth_insn);
6417
6418 /* In expand_function_end we'll insert the alloca save/restore
6419 before parm_birth_insn. We've just insertted an alloca call.
6420 Adjust the pointer to match. */
6421 parm_birth_insn = var_seq;
6422 }
6423
b2df3bbf 6424 /* Now propagate the RTL assignment of each partition to the
6425 underlying var of each SSA_NAME. */
f211616e 6426 tree name;
6427
6428 FOR_EACH_SSA_NAME (i, name, cfun)
b2df3bbf 6429 {
f211616e 6430 /* We might have generated new SSA names in
6431 update_alias_info_with_stack_vars. They will have a NULL
6432 defining statements, and won't be part of the partitioning,
6433 so ignore those. */
6434 if (!SSA_NAME_DEF_STMT (name))
b2df3bbf 6435 continue;
6436
6437 adjust_one_expanded_partition_var (name);
6438 }
6439
6440 /* Clean up RTL of variables that straddle across multiple
6441 partitions, and check that the rtl of any PARM_DECLs that are not
6442 cleaned up is that of their default defs. */
f211616e 6443 FOR_EACH_SSA_NAME (i, name, cfun)
f2ca19b4 6444 {
f2ca19b4 6445 int part;
f2ca19b4 6446
f211616e 6447 /* We might have generated new SSA names in
6448 update_alias_info_with_stack_vars. They will have a NULL
6449 defining statements, and won't be part of the partitioning,
6450 so ignore those. */
6451 if (!SSA_NAME_DEF_STMT (name))
f2ca19b4 6452 continue;
6453 part = var_to_partition (SA.map, name);
6454 if (part == NO_PARTITION)
6455 continue;
ec11736b 6456
94f92c36 6457 /* If this decl was marked as living in multiple places, reset
6458 this now to NULL. */
6459 tree var = SSA_NAME_VAR (name);
6460 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6461 SET_DECL_RTL (var, NULL);
6462 /* Check that the pseudos chosen by assign_parms are those of
6463 the corresponding default defs. */
6464 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6465 && (TREE_CODE (var) == PARM_DECL
6466 || TREE_CODE (var) == RESULT_DECL))
ec11736b 6467 {
94f92c36 6468 rtx in = DECL_RTL_IF_SET (var);
6469 gcc_assert (in);
6470 rtx out = SA.partition_to_pseudo[part];
b2df3bbf 6471 gcc_assert (in == out);
6472
6473 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6474 those expected by debug backends for each parm and for
6475 the result. This is particularly important for stabs,
6476 whose register elimination from parm's DECL_RTL may cause
6477 -fcompare-debug differences as SET_DECL_RTL changes reg's
6478 attrs. So, make sure the RTL already has the parm as the
6479 EXPR, so that it won't change. */
6480 SET_DECL_RTL (var, NULL_RTX);
6481 if (MEM_P (in))
6482 set_mem_attributes (in, var, true);
6483 SET_DECL_RTL (var, in);
ec11736b 6484 }
f2ca19b4 6485 }
6486
0ec80471 6487 /* If this function is `main', emit a call to `__main'
6488 to run global initializers, etc. */
6489 if (DECL_NAME (current_function_decl)
6490 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6491 && DECL_FILE_SCOPE_P (current_function_decl))
6492 expand_main_function ();
6493
f1a0edff 6494 /* Initialize the stack_protect_guard field. This must happen after the
6495 call to __main (if any) so that the external decl is initialized. */
783f362b 6496 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
f1a0edff 6497 stack_protect_prologue ();
6498
a8dd994c 6499 expand_phi_nodes (&SA);
6500
3e292d17 6501 /* Release any stale SSA redirection data. */
b1090780 6502 redirect_edge_var_map_empty ();
3e292d17 6503
011e6b51 6504 /* Register rtl specific functions for cfg. */
0ec80471 6505 rtl_register_cfg_hooks ();
6506
6507 init_block = construct_init_block ();
6508
ea06d49f 6509 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
a8dd994c 6510 remaining edges later. */
65b0537f 6511 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
ea06d49f 6512 e->flags &= ~EDGE_EXECUTABLE;
6513
90567983 6514 /* If the function has too many markers, drop them while expanding. */
6515 if (cfun->debug_marker_count
6516 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6517 cfun->debug_nonbind_markers = false;
6518
0699065d 6519 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
65b0537f 6520 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
34154e27 6521 next_bb)
3c919612 6522 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
7dfb44a0 6523
c64f38bf 6524 if (MAY_HAVE_DEBUG_BIND_INSNS)
9845d120 6525 expand_debug_locations ();
6526
54497144 6527 if (deep_ter_debug_map)
6528 {
6529 delete deep_ter_debug_map;
6530 deep_ter_debug_map = NULL;
6531 }
6532
3db65b62 6533 /* Free stuff we no longer need after GIMPLE optimizations. */
6534 free_dominance_info (CDI_DOMINATORS);
6535 free_dominance_info (CDI_POST_DOMINATORS);
d4f078b5 6536 delete_tree_cfg_annotations (fun);
3db65b62 6537
e2050933 6538 timevar_push (TV_OUT_OF_SSA);
a8dd994c 6539 finish_out_of_ssa (&SA);
e2050933 6540 timevar_pop (TV_OUT_OF_SSA);
a8dd994c 6541
e2050933 6542 timevar_push (TV_POST_EXPAND);
67817f0f 6543 /* We are no longer in SSA form. */
65b0537f 6544 fun->gimple_df->in_ssa_p = false;
b3083327 6545 loops_state_clear (LOOP_CLOSED_SSA);
67817f0f 6546
7dfb44a0 6547 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6548 conservatively to true until they are all profile aware. */
5f8841a5 6549 delete lab_rtx_for_bb;
d4f078b5 6550 free_histograms (fun);
0ec80471 6551
6552 construct_exit_block ();
5169661d 6553 insn_locations_finalize ();
0ec80471 6554
3c919612 6555 if (var_ret_seq)
6556 {
4cd001d5 6557 rtx_insn *after = return_label;
74a0cbc4 6558 rtx_insn *next = NEXT_INSN (after);
3c919612 6559 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6560 after = next;
6561 emit_insn_after (var_ret_seq, after);
6562 }
6563
e38def9c 6564 /* Zap the tree EH table. */
65b0537f 6565 set_eh_throw_stmt_table (fun, NULL);
0ec80471 6566
409e049a 6567 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6568 split edges which edge insertions might do. */
0ec80471 6569 rebuild_jump_labels (get_insns ());
0ec80471 6570
65b0537f 6571 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6572 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
a8dd994c 6573 {
6574 edge e;
6575 edge_iterator ei;
6576 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6577 {
6578 if (e->insns.r)
4547eca6 6579 {
ae5e6486 6580 rebuild_jump_labels_chain (e->insns.r);
d699f73a 6581 /* Put insns after parm birth, but before
6582 NOTE_INSNS_FUNCTION_BEG. */
65b0537f 6583 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6584 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
4547eca6 6585 {
ae5e6486 6586 rtx_insn *insns = e->insns.r;
6587 e->insns.r = NULL;
d699f73a 6588 if (NOTE_P (parm_birth_insn)
6589 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6590 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6591 else
6592 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4547eca6 6593 }
6594 else
6595 commit_one_edge_insertion (e);
6596 }
a8dd994c 6597 else
6598 ei_next (&ei);
6599 }
6600 }
6601
6602 /* We're done expanding trees to RTL. */
6603 currently_expanding_to_rtl = 0;
6604
ea804f86 6605 flush_mark_addressable_queue ();
6606
65b0537f 6607 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6608 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
a8dd994c 6609 {
6610 edge e;
6611 edge_iterator ei;
6612 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6613 {
6614 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6615 e->flags &= ~EDGE_EXECUTABLE;
6616
6617 /* At the moment not all abnormal edges match the RTL
6618 representation. It is safe to remove them here as
6619 find_many_sub_basic_blocks will rediscover them.
6620 In the future we should get this fixed properly. */
6621 if ((e->flags & EDGE_ABNORMAL)
6622 && !(e->flags & EDGE_SIBCALL))
6623 remove_edge (e);
6624 else
6625 ei_next (&ei);
6626 }
6627 }
6628
3c6549f8 6629 auto_sbitmap blocks (last_basic_block_for_fn (fun));
53c5d9d4 6630 bitmap_ones (blocks);
0ec80471 6631 find_many_sub_basic_blocks (blocks);
a8dd994c 6632 purge_all_dead_edges ();
0ec80471 6633
b0be10c3 6634 /* After initial rtl generation, call back to finish generating
6635 exception support code. We need to do this before cleaning up
6636 the CFG as the code does not expect dead landing pads. */
6637 if (fun->eh->region_tree != NULL)
6638 finish_eh_generation ();
6639
6640 /* Call expand_stack_alignment after finishing all
6641 updates to crtl->preferred_stack_boundary. */
27a7a23a 6642 expand_stack_alignment ();
6643
212dddd3 6644 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6645 function. */
6646 if (crtl->tail_call_emit)
6647 fixup_tail_calls ();
6648
c86933f9 6649 /* BB subdivision may have created basic blocks that are are only reachable
6650 from unlikely bbs but not marked as such in the profile. */
6651 if (optimize)
6652 propagate_unlikely_bbs_forward ();
6653
1dd4980f 6654 /* Remove unreachable blocks, otherwise we cannot compute dominators
6655 which are needed for loop state verification. As a side-effect
6656 this also compacts blocks.
6657 ??? We cannot remove trivially dead insns here as for example
6658 the DRAP reg on i?86 is not magically live at this point.
6659 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6660 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6661
382ecba7 6662 checking_verify_flow_info ();
0f9005dd 6663
212dddd3 6664 /* Initialize pseudos allocated for hard registers. */
6665 emit_initial_value_sets ();
6666
6667 /* And finally unshare all RTL. */
6668 unshare_all_rtl ();
6669
0f9005dd 6670 /* There's no need to defer outputting this function any more; we
6671 know we want to output it. */
6672 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6673
6674 /* Now that we're done expanding trees to RTL, we shouldn't have any
6675 more CONCATs anywhere. */
6676 generating_concat_p = 0;
6677
49377e21 6678 if (dump_file)
6679 {
6680 fprintf (dump_file,
6681 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6682 /* And the pass manager will dump RTL for us. */
6683 }
77fce4cd 6684
6685 /* If we're emitting a nested function, make sure its parent gets
6686 emitted as well. Doing otherwise confuses debug info. */
65b0537f 6687 {
6688 tree parent;
6689 for (parent = DECL_CONTEXT (current_function_decl);
6690 parent != NULL_TREE;
6691 parent = get_containing_scope (parent))
6692 if (TREE_CODE (parent) == FUNCTION_DECL)
6693 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6694 }
a0c938f0 6695
77fce4cd 6696 TREE_ASM_WRITTEN (current_function_decl) = 1;
1a56c787 6697
6698 /* After expanding, the return labels are no longer needed. */
6699 return_label = NULL;
6700 naked_return_label = NULL;
4c0315d0 6701
6702 /* After expanding, the tm_restart map is no longer needed. */
65b0537f 6703 if (fun->gimple_df->tm_restart)
b7aa58e4 6704 fun->gimple_df->tm_restart = NULL;
4c0315d0 6705
375c1c8a 6706 /* Tag the blocks with a depth number so that change_scope can find
6707 the common parent easily. */
65b0537f 6708 set_block_levels (DECL_INITIAL (fun->decl), 0);
7dfb44a0 6709 default_rtl_profile ();
212dddd3 6710
6d9dcf16 6711 /* For -dx discard loops now, otherwise IL verify in clean_state will
6712 ICE. */
6713 if (rtl_dump_and_exit)
6714 {
6715 cfun->curr_properties &= ~PROP_loops;
6716 loop_optimizer_finalize ();
6717 }
6718
e2050933 6719 timevar_pop (TV_POST_EXPAND);
212dddd3 6720
2a1990e9 6721 return 0;
0ec80471 6722}
6723
cbe8bda8 6724} // anon namespace
6725
6726rtl_opt_pass *
6727make_pass_expand (gcc::context *ctxt)
6728{
6729 return new pass_expand (ctxt);
6730}