]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
[SFN] boilerplate changes in preparation to introduce nonbind markers
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
0ec80471 1/* A pass for lowering trees to RTL.
aad93da1 2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
0ec80471 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
0ec80471 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
0ec80471 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
25#include "rtl.h"
9ef16211 26#include "tree.h"
27#include "gimple.h"
7c29e30e 28#include "cfghooks.h"
29#include "tree-pass.h"
ad7b10a2 30#include "memmodel.h"
7c29e30e 31#include "tm_p.h"
9ef16211 32#include "ssa.h"
7c29e30e 33#include "optabs.h"
34#include "regs.h" /* For reg_renumber. */
35#include "emit-rtl.h"
36#include "recog.h"
37#include "cgraph.h"
38#include "diagnostic.h"
b20a8bb4 39#include "fold-const.h"
9ed99284 40#include "varasm.h"
41#include "stor-layout.h"
42#include "stmt.h"
43#include "print-tree.h"
94ea8568 44#include "cfgrtl.h"
45#include "cfganal.h"
46#include "cfgbuild.h"
47#include "cfgcleanup.h"
d53441c8 48#include "dojump.h"
49#include "explow.h"
50#include "calls.h"
0ec80471 51#include "expr.h"
bc61cadb 52#include "internal-fn.h"
53#include "tree-eh.h"
dcf1a1ec 54#include "gimple-iterator.h"
ea804f86 55#include "gimple-expr.h"
dcf1a1ec 56#include "gimple-walk.h"
073c1fd5 57#include "tree-cfg.h"
073c1fd5 58#include "tree-dfa.h"
69ee5dbb 59#include "tree-ssa.h"
0ec80471 60#include "except.h"
ce084dfc 61#include "gimple-pretty-print.h"
60d03123 62#include "toplev.h"
77fce4cd 63#include "debug.h"
f1a0edff 64#include "params.h"
5a02d67b 65#include "tree-inline.h"
4992f399 66#include "value-prof.h"
b23fb4cb 67#include "tree-ssa-live.h"
f7373a91 68#include "tree-outof-ssa.h"
79f958cb 69#include "cfgloop.h"
fdc86f97 70#include "insn-attr.h" /* For INSN_SCHEDULING. */
30a86690 71#include "stringpool.h"
72#include "attribs.h"
3c919612 73#include "asan.h"
424a4a92 74#include "tree-ssa-address.h"
0e80b01d 75#include "output.h"
f7715905 76#include "builtins.h"
058a1b7a 77#include "tree-chkp.h"
78#include "rtl-chkp.h"
75a70cf9 79
d3211b7e 80/* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84#ifndef NAME__MAIN
85#define NAME__MAIN "__main"
86#endif
87
a8dd994c 88/* This variable holds information helping the rewriting of SSA trees
89 into RTL. */
90struct ssaexpand SA;
91
8cee8dc0 92/* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
42acab1c 94gimple *currently_expanding_gimple_stmt;
8cee8dc0 95
841424cc 96static rtx expand_debug_expr (tree);
97
94f92c36 98static bool defer_stack_allocation (tree, bool);
99
b2df3bbf 100static void record_alignment_for_reg_var (unsigned int);
101
75a70cf9 102/* Return an expression tree corresponding to the RHS of GIMPLE
103 statement STMT. */
104
105tree
42acab1c 106gimple_assign_rhs_to_tree (gimple *stmt)
75a70cf9 107{
108 tree t;
f4e36c33 109 enum gimple_rhs_class grhs_class;
48e1416a 110
f4e36c33 111 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
75a70cf9 112
00f4f705 113 if (grhs_class == GIMPLE_TERNARY_RHS)
114 t = build3 (gimple_assign_rhs_code (stmt),
115 TREE_TYPE (gimple_assign_lhs (stmt)),
116 gimple_assign_rhs1 (stmt),
117 gimple_assign_rhs2 (stmt),
118 gimple_assign_rhs3 (stmt));
119 else if (grhs_class == GIMPLE_BINARY_RHS)
75a70cf9 120 t = build2 (gimple_assign_rhs_code (stmt),
121 TREE_TYPE (gimple_assign_lhs (stmt)),
122 gimple_assign_rhs1 (stmt),
123 gimple_assign_rhs2 (stmt));
f4e36c33 124 else if (grhs_class == GIMPLE_UNARY_RHS)
75a70cf9 125 t = build1 (gimple_assign_rhs_code (stmt),
126 TREE_TYPE (gimple_assign_lhs (stmt)),
127 gimple_assign_rhs1 (stmt));
f4e36c33 128 else if (grhs_class == GIMPLE_SINGLE_RHS)
9845d120 129 {
130 t = gimple_assign_rhs1 (stmt);
131 /* Avoid modifying this tree in place below. */
dfecf957 132 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 && gimple_location (stmt) != EXPR_LOCATION (t))
134 || (gimple_block (stmt)
135 && currently_expanding_to_rtl
5169661d 136 && EXPR_P (t)))
9845d120 137 t = copy_node (t);
138 }
75a70cf9 139 else
140 gcc_unreachable ();
141
efbcb6de 142 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143 SET_EXPR_LOCATION (t, gimple_location (stmt));
144
75a70cf9 145 return t;
146}
147
75a70cf9 148
60d03123 149#ifndef STACK_ALIGNMENT_NEEDED
150#define STACK_ALIGNMENT_NEEDED 1
151#endif
152
a8dd994c 153#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154
94f92c36 155/* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
159
160static tree
161leader_merge (tree cur, tree next)
162{
163 if (cur == NULL || cur == next)
164 return next;
165
166 if (DECL_P (cur) && DECL_IGNORED_P (cur))
167 return cur;
168
169 if (DECL_P (next) && DECL_IGNORED_P (next))
170 return next;
171
172 return cur;
173}
174
a8dd994c 175/* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
178static inline void
179set_rtl (tree t, rtx x)
180{
b2df3bbf 181 gcc_checking_assert (!x
182 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 || (use_register_for_decl (t)
184 ? (REG_P (x)
185 || (GET_CODE (x) == CONCAT
186 && (REG_P (XEXP (x, 0))
187 || SUBREG_P (XEXP (x, 0)))
188 && (REG_P (XEXP (x, 1))
189 || SUBREG_P (XEXP (x, 1))))
796bb135 190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
b2df3bbf 194 || (GET_CODE (x) == PARALLEL
195 && SSAVAR (t)
196 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
796bb135 197 && (GET_MODE (x) == BLKmode
198 || !flag_tree_coalesce_vars)))
b2df3bbf 199 : (MEM_P (x) || x == pc_rtx
200 || (GET_CODE (x) == CONCAT
201 && MEM_P (XEXP (x, 0))
202 && MEM_P (XEXP (x, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
3809b03c 209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
211 unpromoted REGs. */
b2df3bbf 212 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
3809b03c 213 || (SSAVAR (t)
214 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 && (promote_ssa_mode (t, NULL) == BLKmode
216 || !flag_tree_coalesce_vars))
b2df3bbf 217 || !use_register_for_decl (t)
218 || GET_MODE (x) == promote_ssa_mode (t, NULL));
219
220 if (x)
94f92c36 221 {
222 bool skip = false;
223 tree cur = NULL_TREE;
b2df3bbf 224 rtx xm = x;
225
226 retry:
227 if (MEM_P (xm))
228 cur = MEM_EXPR (xm);
229 else if (REG_P (xm))
230 cur = REG_EXPR (xm);
231 else if (SUBREG_P (xm))
232 {
233 gcc_assert (subreg_lowpart_p (xm));
234 xm = SUBREG_REG (xm);
235 goto retry;
236 }
237 else if (GET_CODE (xm) == CONCAT)
238 {
239 xm = XEXP (xm, 0);
240 goto retry;
241 }
242 else if (GET_CODE (xm) == PARALLEL)
243 {
244 xm = XVECEXP (xm, 0, 0);
245 gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 xm = XEXP (xm, 0);
247 goto retry;
248 }
249 else if (xm == pc_rtx)
94f92c36 250 skip = true;
251 else
252 gcc_unreachable ();
253
b2df3bbf 254 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
94f92c36 255
256 if (cur != next)
257 {
258 if (MEM_P (x))
b2df3bbf 259 set_mem_attributes (x,
260 next && TREE_CODE (next) == SSA_NAME
261 ? TREE_TYPE (next)
262 : next, true);
94f92c36 263 else
264 set_reg_attrs_for_decl_rtl (next, x);
265 }
266 }
267
a8dd994c 268 if (TREE_CODE (t) == SSA_NAME)
269 {
94f92c36 270 int part = var_to_partition (SA.map, t);
271 if (part != NO_PARTITION)
272 {
273 if (SA.partition_to_pseudo[part])
274 gcc_assert (SA.partition_to_pseudo[part] == x);
275 else if (x != pc_rtx)
276 SA.partition_to_pseudo[part] = x;
277 }
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
b2df3bbf 280 DECL. For PARMs and RESULTs, do so only when setting the
281 default def. */
282 if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 && (VAR_P (SSA_NAME_VAR (t))
284 || SSA_NAME_IS_DEFAULT_DEF (t)))
e32b531f 285 {
286 tree var = SSA_NAME_VAR (t);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var))
289 SET_DECL_RTL (var, x);
3c25489e 290 /* If we have it set already to "multiple places" don't
e32b531f 291 change this. */
292 else if (DECL_RTL (var) == pc_rtx)
293 ;
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var) != x)
301 SET_DECL_RTL (var, pc_rtx);
302 }
a8dd994c 303 }
304 else
305 SET_DECL_RTL (t, x);
306}
60d03123 307
308/* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
310struct stack_var
311{
312 /* The Variable. */
313 tree decl;
314
60d03123 315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
317 HOST_WIDE_INT size;
318
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
321 unsigned int alignb;
322
323 /* The partition representative. */
324 size_t representative;
325
326 /* The next stack variable in the partition, or EOC. */
327 size_t next;
dfa054ff 328
329 /* The numbers of conflicting stack variables. */
330 bitmap conflicts;
60d03123 331};
332
333#define EOC ((size_t)-1)
334
335/* We have an array of such objects while deciding allocation. */
336static struct stack_var *stack_vars;
337static size_t stack_vars_alloc;
338static size_t stack_vars_num;
5f8841a5 339static hash_map<tree, size_t> *decl_to_stack_part;
60d03123 340
4fb07d00 341/* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343static bitmap_obstack stack_var_bitmap_obstack;
344
f0b5f617 345/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
60d03123 346 is non-decreasing. */
347static size_t *stack_vars_sorted;
348
60d03123 349/* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352static int frame_phase;
353
f1a0edff 354/* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356static bool has_protected_decls;
357
358/* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360static bool has_short_buffer;
60d03123 361
25c513b9 362/* Compute the byte alignment to use for DECL. Ignore alignment
f64dc32d 363 we can't do with expected alignment of the stack boundary. */
364
365static unsigned int
25c513b9 366align_local_variable (tree decl)
f64dc32d 367{
94f92c36 368 unsigned int align;
369
370 if (TREE_CODE (decl) == SSA_NAME)
371 align = TYPE_ALIGN (TREE_TYPE (decl));
372 else
373 {
374 align = LOCAL_DECL_ALIGNMENT (decl);
5d4b30ea 375 SET_DECL_ALIGN (decl, align);
94f92c36 376 }
60d03123 377 return align / BITS_PER_UNIT;
378}
379
9dbe51a9 380/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
382
383static inline unsigned HOST_WIDE_INT
384align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385{
386 return align_up ? (base + align - 1) & -align : base & -align;
387}
388
60d03123 389/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
391
392static HOST_WIDE_INT
5be42b39 393alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
60d03123 394{
395 HOST_WIDE_INT offset, new_frame_offset;
396
60d03123 397 if (FRAME_GROWS_DOWNWARD)
398 {
9dbe51a9 399 new_frame_offset
400 = align_base (frame_offset - frame_phase - size,
401 align, false) + frame_phase;
60d03123 402 offset = new_frame_offset;
403 }
404 else
405 {
9dbe51a9 406 new_frame_offset
407 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
60d03123 408 offset = new_frame_offset;
409 new_frame_offset += size;
410 }
411 frame_offset = new_frame_offset;
412
26d04e5f 413 if (frame_offset_overflow (frame_offset, cfun->decl))
414 frame_offset = offset = 0;
415
60d03123 416 return offset;
417}
418
419/* Accumulate DECL into STACK_VARS. */
420
421static void
422add_stack_var (tree decl)
423{
e67bda38 424 struct stack_var *v;
425
60d03123 426 if (stack_vars_num >= stack_vars_alloc)
427 {
428 if (stack_vars_alloc)
429 stack_vars_alloc = stack_vars_alloc * 3 / 2;
430 else
431 stack_vars_alloc = 32;
432 stack_vars
433 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
434 }
3c25489e 435 if (!decl_to_stack_part)
5f8841a5 436 decl_to_stack_part = new hash_map<tree, size_t>;
3c25489e 437
e67bda38 438 v = &stack_vars[stack_vars_num];
5f8841a5 439 decl_to_stack_part->put (decl, stack_vars_num);
e67bda38 440
441 v->decl = decl;
94f92c36 442 tree size = TREE_CODE (decl) == SSA_NAME
443 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
444 : DECL_SIZE_UNIT (decl);
445 v->size = tree_to_uhwi (size);
e67bda38 446 /* Ensure that all variables have size, so that &a != &b for any two
447 variables that are simultaneously live. */
448 if (v->size == 0)
449 v->size = 1;
94f92c36 450 v->alignb = align_local_variable (decl);
0a4cd568 451 /* An alignment of zero can mightily confuse us later. */
452 gcc_assert (v->alignb != 0);
60d03123 453
454 /* All variables are initially in their own partition. */
e67bda38 455 v->representative = stack_vars_num;
456 v->next = EOC;
60d03123 457
dfa054ff 458 /* All variables initially conflict with no other. */
e67bda38 459 v->conflicts = NULL;
dfa054ff 460
60d03123 461 /* Ensure that this decl doesn't get put onto the list twice. */
a8dd994c 462 set_rtl (decl, pc_rtx);
60d03123 463
464 stack_vars_num++;
465}
466
60d03123 467/* Make the decls associated with luid's X and Y conflict. */
468
469static void
470add_stack_var_conflict (size_t x, size_t y)
471{
dfa054ff 472 struct stack_var *a = &stack_vars[x];
473 struct stack_var *b = &stack_vars[y];
474 if (!a->conflicts)
4fb07d00 475 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
dfa054ff 476 if (!b->conflicts)
4fb07d00 477 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
dfa054ff 478 bitmap_set_bit (a->conflicts, y);
479 bitmap_set_bit (b->conflicts, x);
60d03123 480}
481
482/* Check whether the decls associated with luid's X and Y conflict. */
483
484static bool
485stack_var_conflict_p (size_t x, size_t y)
486{
dfa054ff 487 struct stack_var *a = &stack_vars[x];
488 struct stack_var *b = &stack_vars[y];
3c25489e 489 if (x == y)
490 return false;
491 /* Partitions containing an SSA name result from gimple registers
492 with things like unsupported modes. They are top-level and
493 hence conflict with everything else. */
494 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
495 return true;
496
dfa054ff 497 if (!a->conflicts || !b->conflicts)
498 return false;
499 return bitmap_bit_p (a->conflicts, y);
60d03123 500}
48e1416a 501
3c25489e 502/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
503 enter its partition number into bitmap DATA. */
504
505static bool
42acab1c 506visit_op (gimple *, tree op, tree, void *data)
3c25489e 507{
508 bitmap active = (bitmap)data;
509 op = get_base_address (op);
510 if (op
511 && DECL_P (op)
512 && DECL_RTL_IF_SET (op) == pc_rtx)
513 {
5f8841a5 514 size_t *v = decl_to_stack_part->get (op);
3c25489e 515 if (v)
516 bitmap_set_bit (active, *v);
517 }
518 return false;
519}
520
521/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
522 record conflicts between it and all currently active other partitions
523 from bitmap DATA. */
524
525static bool
42acab1c 526visit_conflict (gimple *, tree op, tree, void *data)
3c25489e 527{
528 bitmap active = (bitmap)data;
529 op = get_base_address (op);
530 if (op
531 && DECL_P (op)
532 && DECL_RTL_IF_SET (op) == pc_rtx)
533 {
5f8841a5 534 size_t *v = decl_to_stack_part->get (op);
3c25489e 535 if (v && bitmap_set_bit (active, *v))
536 {
537 size_t num = *v;
538 bitmap_iterator bi;
539 unsigned i;
540 gcc_assert (num < stack_vars_num);
541 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
542 add_stack_var_conflict (num, i);
543 }
544 }
545 return false;
546}
547
548/* Helper routine for add_scope_conflicts, calculating the active partitions
549 at the end of BB, leaving the result in WORK. We're called to generate
b74338cf 550 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
551 liveness. */
3c25489e 552
553static void
b74338cf 554add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
3c25489e 555{
556 edge e;
557 edge_iterator ei;
558 gimple_stmt_iterator gsi;
5b26a9e3 559 walk_stmt_load_store_addr_fn visit;
3c25489e 560
561 bitmap_clear (work);
562 FOR_EACH_EDGE (e, ei, bb->preds)
563 bitmap_ior_into (work, (bitmap)e->src->aux);
564
07428872 565 visit = visit_op;
3c25489e 566
567 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
568 {
42acab1c 569 gimple *stmt = gsi_stmt (gsi);
07428872 570 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
3c25489e 571 }
07428872 572 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3c25489e 573 {
42acab1c 574 gimple *stmt = gsi_stmt (gsi);
3c25489e 575
576 if (gimple_clobber_p (stmt))
577 {
578 tree lhs = gimple_assign_lhs (stmt);
579 size_t *v;
580 /* Nested function lowering might introduce LHSs
581 that are COMPONENT_REFs. */
53e9c5c4 582 if (!VAR_P (lhs))
3c25489e 583 continue;
584 if (DECL_RTL_IF_SET (lhs) == pc_rtx
5f8841a5 585 && (v = decl_to_stack_part->get (lhs)))
3c25489e 586 bitmap_clear_bit (work, *v);
587 }
588 else if (!is_gimple_debug (stmt))
07428872 589 {
b74338cf 590 if (for_conflict
07428872 591 && visit == visit_op)
592 {
593 /* If this is the first real instruction in this BB we need
0b44da0d 594 to add conflicts for everything live at this point now.
595 Unlike classical liveness for named objects we can't
07428872 596 rely on seeing a def/use of the names we're interested in.
597 There might merely be indirect loads/stores. We'd not add any
b74338cf 598 conflicts for such partitions. */
07428872 599 bitmap_iterator bi;
600 unsigned i;
b74338cf 601 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
07428872 602 {
99fade12 603 struct stack_var *a = &stack_vars[i];
604 if (!a->conflicts)
4fb07d00 605 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
99fade12 606 bitmap_ior_into (a->conflicts, work);
07428872 607 }
608 visit = visit_conflict;
609 }
610 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
611 }
3c25489e 612 }
613}
614
615/* Generate stack partition conflicts between all partitions that are
616 simultaneously live. */
617
618static void
619add_scope_conflicts (void)
620{
621 basic_block bb;
622 bool changed;
623 bitmap work = BITMAP_ALLOC (NULL);
99fade12 624 int *rpo;
625 int n_bbs;
3c25489e 626
0b44da0d 627 /* We approximate the live range of a stack variable by taking the first
3c25489e 628 mention of its name as starting point(s), and by the end-of-scope
629 death clobber added by gimplify as ending point(s) of the range.
630 This overapproximates in the case we for instance moved an address-taken
631 operation upward, without also moving a dereference to it upwards.
632 But it's conservatively correct as a variable never can hold values
633 before its name is mentioned at least once.
634
0b44da0d 635 We then do a mostly classical bitmap liveness algorithm. */
3c25489e 636
ed7d889a 637 FOR_ALL_BB_FN (bb, cfun)
4fb07d00 638 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
3c25489e 639
fe672ac0 640 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
99fade12 641 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
642
3c25489e 643 changed = true;
644 while (changed)
645 {
99fade12 646 int i;
3c25489e 647 changed = false;
99fade12 648 for (i = 0; i < n_bbs; i++)
3c25489e 649 {
99fade12 650 bitmap active;
f5a6b05f 651 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
99fade12 652 active = (bitmap)bb->aux;
b74338cf 653 add_scope_conflicts_1 (bb, work, false);
3c25489e 654 if (bitmap_ior_into (active, work))
655 changed = true;
656 }
657 }
658
fc00614f 659 FOR_EACH_BB_FN (bb, cfun)
b74338cf 660 add_scope_conflicts_1 (bb, work, true);
3c25489e 661
99fade12 662 free (rpo);
3c25489e 663 BITMAP_FREE (work);
ed7d889a 664 FOR_ALL_BB_FN (bb, cfun)
3c25489e 665 BITMAP_FREE (bb->aux);
666}
667
60d03123 668/* A subroutine of partition_stack_vars. A comparison function for qsort,
5be42b39 669 sorting an array of indices by the properties of the object. */
60d03123 670
671static int
5be42b39 672stack_var_cmp (const void *a, const void *b)
60d03123 673{
5be42b39 674 size_t ia = *(const size_t *)a;
675 size_t ib = *(const size_t *)b;
676 unsigned int aligna = stack_vars[ia].alignb;
677 unsigned int alignb = stack_vars[ib].alignb;
678 HOST_WIDE_INT sizea = stack_vars[ia].size;
679 HOST_WIDE_INT sizeb = stack_vars[ib].size;
680 tree decla = stack_vars[ia].decl;
681 tree declb = stack_vars[ib].decl;
682 bool largea, largeb;
a8dd994c 683 unsigned int uida, uidb;
60d03123 684
5be42b39 685 /* Primary compare on "large" alignment. Large comes first. */
686 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
687 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688 if (largea != largeb)
689 return (int)largeb - (int)largea;
690
691 /* Secondary compare on size, decreasing */
5be42b39 692 if (sizea > sizeb)
2a24c3a6 693 return -1;
694 if (sizea < sizeb)
60d03123 695 return 1;
5be42b39 696
697 /* Tertiary compare on true alignment, decreasing. */
698 if (aligna < alignb)
699 return -1;
700 if (aligna > alignb)
701 return 1;
702
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
a8dd994c 706 if (TREE_CODE (decla) == SSA_NAME)
707 {
708 if (TREE_CODE (declb) == SSA_NAME)
709 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else
711 return -1;
712 }
713 else if (TREE_CODE (declb) == SSA_NAME)
714 return 1;
715 else
716 uida = DECL_UID (decla), uidb = DECL_UID (declb);
7615883a 717 if (uida < uidb)
7615883a 718 return 1;
5be42b39 719 if (uida > uidb)
720 return -1;
60d03123 721 return 0;
722}
723
d5fb6135 724struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
5f8841a5 725typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
3a443843 726
727/* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
729 variables bitmap. */
730
731static void
732add_partitioned_vars_to_ptset (struct pt_solution *pt,
5f8841a5 733 part_hashmap *decls_to_partitions,
431205b7 734 hash_set<bitmap> *visited, bitmap temp)
3a443843 735{
736 bitmap_iterator bi;
737 unsigned i;
738 bitmap *part;
739
740 if (pt->anything
741 || pt->vars == NULL
742 /* The pointed-to vars bitmap is shared, it is enough to
743 visit it once. */
431205b7 744 || visited->add (pt->vars))
3a443843 745 return;
746
747 bitmap_clear (temp);
748
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
751 once. */
752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753 if ((!temp
754 || !bitmap_bit_p (temp, i))
5f8841a5 755 && (part = decls_to_partitions->get (i)))
3a443843 756 bitmap_ior_into (temp, *part);
757 if (!bitmap_empty_p (temp))
758 bitmap_ior_into (pt->vars, temp);
759}
760
761/* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
764 rewritten. */
765
766static void
767update_alias_info_with_stack_vars (void)
768{
5f8841a5 769 part_hashmap *decls_to_partitions = NULL;
3a443843 770 size_t i, j;
771 tree var = NULL_TREE;
772
773 for (i = 0; i < stack_vars_num; i++)
774 {
775 bitmap part = NULL;
776 tree name;
777 struct ptr_info_def *pi;
778
779 /* Not interested in partitions with single variable. */
780 if (stack_vars[i].representative != i
781 || stack_vars[i].next == EOC)
782 continue;
783
784 if (!decls_to_partitions)
785 {
5f8841a5 786 decls_to_partitions = new part_hashmap;
787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
3a443843 788 }
789
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var == NULL_TREE)
f9e245b2 794 var = create_tmp_var (ptr_type_node);
795 name = make_ssa_name (var);
3a443843 796
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part = BITMAP_GGC_ALLOC ();
800 for (j = i; j != EOC; j = stack_vars[j].next)
801 {
802 tree decl = stack_vars[j].decl;
1a981e1a 803 unsigned int uid = DECL_PT_UID (decl);
3a443843 804 bitmap_set_bit (part, uid);
5f8841a5 805 decls_to_partitions->put (uid, part);
806 cfun->gimple_df->decls_to_pointers->put (decl, name);
f7b5f694 807 if (TREE_ADDRESSABLE (decl))
808 TREE_ADDRESSABLE (name) = 1;
3a443843 809 }
810
811 /* Make the SSA name point to all partition members. */
812 pi = get_ptr_info (name);
6fc56905 813 pt_solution_set (&pi->pt, part, false);
3a443843 814 }
815
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions)
819 {
820 unsigned i;
f211616e 821 tree name;
431205b7 822 hash_set<bitmap> visited;
4fb07d00 823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
3a443843 824
f211616e 825 FOR_EACH_SSA_NAME (i, name, cfun)
3a443843 826 {
3a443843 827 struct ptr_info_def *pi;
828
f211616e 829 if (POINTER_TYPE_P (TREE_TYPE (name))
3a443843 830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
431205b7 832 &visited, temp);
3a443843 833 }
834
835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
431205b7 836 decls_to_partitions, &visited, temp);
3a443843 837
5f8841a5 838 delete decls_to_partitions;
3a443843 839 BITMAP_FREE (temp);
840 }
841}
842
60d03123 843/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
2a24c3a6 845 Merge them into a single partition A. */
60d03123 846
847static void
2a24c3a6 848union_stack_vars (size_t a, size_t b)
60d03123 849{
dfa054ff 850 struct stack_var *vb = &stack_vars[b];
851 bitmap_iterator bi;
852 unsigned u;
60d03123 853
2a24c3a6 854 gcc_assert (stack_vars[b].next == EOC);
855 /* Add B to A's partition. */
856 stack_vars[b].next = stack_vars[a].next;
857 stack_vars[b].representative = a;
60d03123 858 stack_vars[a].next = b;
859
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars[a].alignb < stack_vars[b].alignb)
862 stack_vars[a].alignb = stack_vars[b].alignb;
863
864 /* Update the interference graph and merge the conflicts. */
dfa054ff 865 if (vb->conflicts)
866 {
867 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 add_stack_var_conflict (a, stack_vars[u].representative);
869 BITMAP_FREE (vb->conflicts);
870 }
60d03123 871}
872
873/* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
876
2a24c3a6 877 Sort the objects by size in descending order.
60d03123 878 For each object A {
879 S = size(A)
880 O = 0
881 loop {
882 Look for the largest non-conflicting object B with size <= S.
883 UNION (A, B)
60d03123 884 }
885 }
886*/
887
888static void
889partition_stack_vars (void)
890{
891 size_t si, sj, n = stack_vars_num;
892
893 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894 for (si = 0; si < n; ++si)
895 stack_vars_sorted[si] = si;
896
897 if (n == 1)
898 return;
899
5be42b39 900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
60d03123 901
60d03123 902 for (si = 0; si < n; ++si)
903 {
904 size_t i = stack_vars_sorted[si];
5be42b39 905 unsigned int ialign = stack_vars[i].alignb;
3c919612 906 HOST_WIDE_INT isize = stack_vars[i].size;
60d03123 907
2a24c3a6 908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars[i].representative != i)
912 continue;
913
914 for (sj = si + 1; sj < n; ++sj)
60d03123 915 {
916 size_t j = stack_vars_sorted[sj];
60d03123 917 unsigned int jalign = stack_vars[j].alignb;
3c919612 918 HOST_WIDE_INT jsize = stack_vars[j].size;
60d03123 919
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars[j].representative != j)
922 continue;
923
5be42b39 924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
3c919612 928 break;
929
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
629b6abc 934 if ((asan_sanitize_stack_p ())
935 && isize != jsize
3c919612 936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 break;
938
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i, j))
5be42b39 941 continue;
942
60d03123 943 /* UNION the objects, placing J at OFFSET. */
2a24c3a6 944 union_stack_vars (i, j);
60d03123 945 }
946 }
3a443843 947
ba487639 948 update_alias_info_with_stack_vars ();
60d03123 949}
950
951/* A debugging aid for expand_used_vars. Dump the generated partitions. */
952
953static void
954dump_stack_var_partition (void)
955{
956 size_t si, i, j, n = stack_vars_num;
957
958 for (si = 0; si < n; ++si)
959 {
960 i = stack_vars_sorted[si];
961
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
965
966 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
967 " align %u\n", (unsigned long) i, stack_vars[i].size,
968 stack_vars[i].alignb);
969
970 for (j = i; j != EOC; j = stack_vars[j].next)
971 {
972 fputc ('\t', dump_file);
973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
60d03123 974 }
2a24c3a6 975 fputc ('\n', dump_file);
60d03123 976 }
977}
978
5be42b39 979/* Assign rtl to DECL at BASE + OFFSET. */
60d03123 980
981static void
5be42b39 982expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 HOST_WIDE_INT offset)
60d03123 984{
5be42b39 985 unsigned align;
60d03123 986 rtx x;
a0c938f0 987
60d03123 988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
990
29c05e22 991 x = plus_constant (Pmode, base, offset);
94f92c36 992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl))
994 : DECL_MODE (SSAVAR (decl)), x);
60d03123 995
a8dd994c 996 if (TREE_CODE (decl) != SSA_NAME)
997 {
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
5be42b39 1001 if (base == virtual_stack_vars_rtx)
1002 offset -= frame_phase;
ac29ece2 1003 align = least_bit_hwi (offset);
a8dd994c 1004 align *= BITS_PER_UNIT;
5be42b39 1005 if (align == 0 || align > base_align)
1006 align = base_align;
1007
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
a8dd994c 1011
5d4b30ea 1012 SET_DECL_ALIGN (decl, align);
a8dd994c 1013 DECL_USER_ALIGN (decl) = 0;
1014 }
1015
a8dd994c 1016 set_rtl (decl, x);
60d03123 1017}
1018
3c919612 1019struct stack_vars_data
1020{
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
d2361e3b 1024 auto_vec<HOST_WIDE_INT> asan_vec;
3c919612 1025
1026 /* Vector of partition representative decls in between the paddings. */
d2361e3b 1027 auto_vec<tree> asan_decl_vec;
683539f6 1028
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 rtx asan_base;
1031
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb;
3c919612 1034};
1035
60d03123 1036/* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1039
1040static void
3c919612 1041expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
60d03123 1042{
1043 size_t si, i, j, n = stack_vars_num;
5be42b39 1044 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1045 rtx large_base = NULL;
1046 unsigned large_align = 0;
a80f37e1 1047 bool large_allocation_done = false;
5be42b39 1048 tree decl;
1049
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055 {
1056 /* Find the total size of these variables. */
1057 for (si = 0; si < n; ++si)
1058 {
1059 unsigned alignb;
1060
1061 i = stack_vars_sorted[si];
1062 alignb = stack_vars[i].alignb;
1063
2bb1c7d1 1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb * BITS_PER_UNIT) > large_align)
1069 large_align = alignb * BITS_PER_UNIT;
1070
5be42b39 1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 break;
1074
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars[i].representative != i)
1077 continue;
1078
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl = stack_vars[i].decl;
94f92c36 1082 if (TREE_CODE (decl) == SSA_NAME
1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 : DECL_RTL (decl) != pc_rtx)
5be42b39 1085 continue;
1086
1087 large_size += alignb - 1;
1088 large_size &= -(HOST_WIDE_INT)alignb;
1089 large_size += stack_vars[i].size;
1090 }
5be42b39 1091 }
60d03123 1092
1093 for (si = 0; si < n; ++si)
1094 {
5be42b39 1095 rtx base;
1096 unsigned base_align, alignb;
60d03123 1097 HOST_WIDE_INT offset;
1098
1099 i = stack_vars_sorted[si];
1100
1101 /* Skip variables that aren't partition representatives, for now. */
1102 if (stack_vars[i].representative != i)
1103 continue;
1104
f1a0edff 1105 /* Skip variables that have already had rtl assigned. See also
1106 add_stack_var where we perpetrate this pc_rtx hack. */
5be42b39 1107 decl = stack_vars[i].decl;
94f92c36 1108 if (TREE_CODE (decl) == SSA_NAME
1109 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1110 : DECL_RTL (decl) != pc_rtx)
f1a0edff 1111 continue;
1112
a0c938f0 1113 /* Check the predicate to see whether this variable should be
f1a0edff 1114 allocated in this pass. */
3c919612 1115 if (pred && !pred (i))
f1a0edff 1116 continue;
1117
5be42b39 1118 alignb = stack_vars[i].alignb;
1119 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1120 {
683539f6 1121 base = virtual_stack_vars_rtx;
629b6abc 1122 if ((asan_sanitize_stack_p ())
1123 && pred)
3c919612 1124 {
9dbe51a9 1125 HOST_WIDE_INT prev_offset
1126 = align_base (frame_offset,
1127 MAX (alignb, ASAN_RED_ZONE_SIZE),
4bbfd6c2 1128 !FRAME_GROWS_DOWNWARD);
3c919612 1129 tree repr_decl = NULL_TREE;
3c919612 1130 offset
1131 = alloc_stack_frame_space (stack_vars[i].size
1132 + ASAN_RED_ZONE_SIZE,
1133 MAX (alignb, ASAN_RED_ZONE_SIZE));
9dbe51a9 1134
f1f41a6c 1135 data->asan_vec.safe_push (prev_offset);
1136 data->asan_vec.safe_push (offset + stack_vars[i].size);
3c919612 1137 /* Find best representative of the partition.
1138 Prefer those with DECL_NAME, even better
1139 satisfying asan_protect_stack_decl predicate. */
1140 for (j = i; j != EOC; j = stack_vars[j].next)
1141 if (asan_protect_stack_decl (stack_vars[j].decl)
1142 && DECL_NAME (stack_vars[j].decl))
1143 {
1144 repr_decl = stack_vars[j].decl;
1145 break;
1146 }
1147 else if (repr_decl == NULL_TREE
1148 && DECL_P (stack_vars[j].decl)
1149 && DECL_NAME (stack_vars[j].decl))
1150 repr_decl = stack_vars[j].decl;
1151 if (repr_decl == NULL_TREE)
1152 repr_decl = stack_vars[i].decl;
f1f41a6c 1153 data->asan_decl_vec.safe_push (repr_decl);
683539f6 1154 data->asan_alignb = MAX (data->asan_alignb, alignb);
1155 if (data->asan_base == NULL)
1156 data->asan_base = gen_reg_rtx (Pmode);
1157 base = data->asan_base;
f89175bb 1158
1159 if (!STRICT_ALIGNMENT)
1160 base_align = crtl->max_used_stack_slot_alignment;
1161 else
1162 base_align = MAX (crtl->max_used_stack_slot_alignment,
1163 GET_MODE_ALIGNMENT (SImode)
1164 << ASAN_SHADOW_SHIFT);
3c919612 1165 }
1166 else
f89175bb 1167 {
1168 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1169 base_align = crtl->max_used_stack_slot_alignment;
1170 }
5be42b39 1171 }
1172 else
1173 {
1174 /* Large alignment is only processed in the last pass. */
1175 if (pred)
1176 continue;
a80f37e1 1177
1178 /* If there were any variables requiring "large" alignment, allocate
1179 space. */
1180 if (large_size > 0 && ! large_allocation_done)
1181 {
1182 HOST_WIDE_INT loffset;
1183 rtx large_allocsize;
1184
1185 large_allocsize = GEN_INT (large_size);
1186 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1187 loffset = alloc_stack_frame_space
1188 (INTVAL (large_allocsize),
1189 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1190 large_base = get_dynamic_stack_base (loffset, large_align);
1191 large_allocation_done = true;
1192 }
e67bda38 1193 gcc_assert (large_base != NULL);
5be42b39 1194
1195 large_alloc += alignb - 1;
1196 large_alloc &= -(HOST_WIDE_INT)alignb;
1197 offset = large_alloc;
1198 large_alloc += stack_vars[i].size;
1199
1200 base = large_base;
1201 base_align = large_align;
1202 }
60d03123 1203
1204 /* Create rtl for each variable based on their location within the
1205 partition. */
1206 for (j = i; j != EOC; j = stack_vars[j].next)
8394f3a0 1207 {
8394f3a0 1208 expand_one_stack_var_at (stack_vars[j].decl,
5be42b39 1209 base, base_align,
2a24c3a6 1210 offset);
8394f3a0 1211 }
60d03123 1212 }
5be42b39 1213
1214 gcc_assert (large_alloc == large_size);
60d03123 1215}
1216
5a02d67b 1217/* Take into account all sizes of partitions and reset DECL_RTLs. */
1218static HOST_WIDE_INT
1219account_stack_vars (void)
1220{
1221 size_t si, j, i, n = stack_vars_num;
1222 HOST_WIDE_INT size = 0;
1223
1224 for (si = 0; si < n; ++si)
1225 {
1226 i = stack_vars_sorted[si];
1227
1228 /* Skip variables that aren't partition representatives, for now. */
1229 if (stack_vars[i].representative != i)
1230 continue;
1231
1232 size += stack_vars[i].size;
1233 for (j = i; j != EOC; j = stack_vars[j].next)
a8dd994c 1234 set_rtl (stack_vars[j].decl, NULL);
5a02d67b 1235 }
1236 return size;
1237}
1238
b2df3bbf 1239/* Record the RTL assignment X for the default def of PARM. */
1240
1241extern void
1242set_parm_rtl (tree parm, rtx x)
1243{
1244 gcc_assert (TREE_CODE (parm) == PARM_DECL
1245 || TREE_CODE (parm) == RESULT_DECL);
1246
1247 if (x && !MEM_P (x))
1248 {
1249 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1250 TYPE_MODE (TREE_TYPE (parm)),
1251 TYPE_ALIGN (TREE_TYPE (parm)));
1252
1253 /* If the variable alignment is very large we'll dynamicaly
1254 allocate it, which means that in-frame portion is just a
1255 pointer. ??? We've got a pseudo for sure here, do we
1256 actually dynamically allocate its spilling area if needed?
1257 ??? Isn't it a problem when POINTER_SIZE also exceeds
1258 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1259 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1260 align = POINTER_SIZE;
1261
1262 record_alignment_for_reg_var (align);
1263 }
1264
b2df3bbf 1265 tree ssa = ssa_default_def (cfun, parm);
1266 if (!ssa)
1267 return set_rtl (parm, x);
1268
1269 int part = var_to_partition (SA.map, ssa);
1270 gcc_assert (part != NO_PARTITION);
1271
1272 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1273 gcc_assert (changed);
1274
1275 set_rtl (ssa, x);
1276 gcc_assert (DECL_RTL (parm) == x);
1277}
1278
60d03123 1279/* A subroutine of expand_one_var. Called to immediately assign rtl
1280 to a variable to be allocated in the stack frame. */
1281
1282static void
94f92c36 1283expand_one_stack_var_1 (tree var)
60d03123 1284{
5be42b39 1285 HOST_WIDE_INT size, offset;
1286 unsigned byte_align;
60d03123 1287
94f92c36 1288 if (TREE_CODE (var) == SSA_NAME)
1289 {
1290 tree type = TREE_TYPE (var);
1291 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1292 byte_align = TYPE_ALIGN_UNIT (type);
1293 }
1294 else
1295 {
1296 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1297 byte_align = align_local_variable (var);
1298 }
5be42b39 1299
1300 /* We handle highly aligned variables in expand_stack_vars. */
1301 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
60d03123 1302
5be42b39 1303 offset = alloc_stack_frame_space (size, byte_align);
1304
1305 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1306 crtl->max_used_stack_slot_alignment, offset);
60d03123 1307}
1308
94f92c36 1309/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1310 already assigned some MEM. */
1311
1312static void
1313expand_one_stack_var (tree var)
1314{
1315 if (TREE_CODE (var) == SSA_NAME)
1316 {
1317 int part = var_to_partition (SA.map, var);
1318 if (part != NO_PARTITION)
1319 {
1320 rtx x = SA.partition_to_pseudo[part];
1321 gcc_assert (x);
1322 gcc_assert (MEM_P (x));
1323 return;
1324 }
1325 }
1326
1327 return expand_one_stack_var_1 (var);
1328}
1329
60d03123 1330/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1331 that will reside in a hard register. */
1332
1333static void
1334expand_one_hard_reg_var (tree var)
1335{
1336 rest_of_decl_compilation (var, 0, 0);
1337}
1338
94f92c36 1339/* Record the alignment requirements of some variable assigned to a
1340 pseudo. */
1341
1342static void
1343record_alignment_for_reg_var (unsigned int align)
1344{
1345 if (SUPPORTS_STACK_ALIGNMENT
1346 && crtl->stack_alignment_estimated < align)
1347 {
1348 /* stack_alignment_estimated shouldn't change after stack
1349 realign decision made */
1350 gcc_assert (!crtl->stack_realign_processed);
1351 crtl->stack_alignment_estimated = align;
1352 }
1353
1354 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1355 So here we only make sure stack_alignment_needed >= align. */
1356 if (crtl->stack_alignment_needed < align)
1357 crtl->stack_alignment_needed = align;
1358 if (crtl->max_used_stack_slot_alignment < align)
1359 crtl->max_used_stack_slot_alignment = align;
1360}
1361
1362/* Create RTL for an SSA partition. */
1363
1364static void
1365expand_one_ssa_partition (tree var)
1366{
1367 int part = var_to_partition (SA.map, var);
1368 gcc_assert (part != NO_PARTITION);
1369
1370 if (SA.partition_to_pseudo[part])
1371 return;
1372
1373 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1374 TYPE_MODE (TREE_TYPE (var)),
1375 TYPE_ALIGN (TREE_TYPE (var)));
1376
1377 /* If the variable alignment is very large we'll dynamicaly allocate
1378 it, which means that in-frame portion is just a pointer. */
1379 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1380 align = POINTER_SIZE;
1381
1382 record_alignment_for_reg_var (align);
1383
1384 if (!use_register_for_decl (var))
1385 {
b2df3bbf 1386 if (defer_stack_allocation (var, true))
94f92c36 1387 add_stack_var (var);
1388 else
1389 expand_one_stack_var_1 (var);
1390 return;
1391 }
1392
1393 machine_mode reg_mode = promote_ssa_mode (var, NULL);
94f92c36 1394 rtx x = gen_reg_rtx (reg_mode);
1395
1396 set_rtl (var, x);
31cbcee1 1397
1398 /* For a promoted variable, X will not be used directly but wrapped in a
1399 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1400 will assume that its upper bits can be inferred from its lower bits.
1401 Therefore, if X isn't initialized on every path from the entry, then
1402 we must do it manually in order to fulfill the above assumption. */
1403 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1404 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1405 emit_move_insn (x, CONST0_RTX (reg_mode));
94f92c36 1406}
1407
b2df3bbf 1408/* Record the association between the RTL generated for partition PART
1409 and the underlying variable of the SSA_NAME VAR. */
94f92c36 1410
1411static void
1412adjust_one_expanded_partition_var (tree var)
1413{
1414 if (!var)
1415 return;
1416
1417 tree decl = SSA_NAME_VAR (var);
1418
1419 int part = var_to_partition (SA.map, var);
1420 if (part == NO_PARTITION)
1421 return;
1422
1423 rtx x = SA.partition_to_pseudo[part];
1424
b2df3bbf 1425 gcc_assert (x);
94f92c36 1426
1427 set_rtl (var, x);
1428
1429 if (!REG_P (x))
1430 return;
1431
1432 /* Note if the object is a user variable. */
1433 if (decl && !DECL_ARTIFICIAL (decl))
1434 mark_user_reg (x);
1435
1436 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1437 mark_reg_pointer (x, get_pointer_alignment (var));
1438}
1439
60d03123 1440/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1441 that will reside in a pseudo register. */
1442
1443static void
1444expand_one_register_var (tree var)
1445{
94f92c36 1446 if (TREE_CODE (var) == SSA_NAME)
1447 {
1448 int part = var_to_partition (SA.map, var);
1449 if (part != NO_PARTITION)
1450 {
1451 rtx x = SA.partition_to_pseudo[part];
1452 gcc_assert (x);
1453 gcc_assert (REG_P (x));
1454 return;
1455 }
1456 gcc_unreachable ();
1457 }
1458
1459 tree decl = var;
a8dd994c 1460 tree type = TREE_TYPE (decl);
3754d046 1461 machine_mode reg_mode = promote_decl_mode (decl, NULL);
60d03123 1462 rtx x = gen_reg_rtx (reg_mode);
1463
a8dd994c 1464 set_rtl (var, x);
60d03123 1465
1466 /* Note if the object is a user variable. */
a8dd994c 1467 if (!DECL_ARTIFICIAL (decl))
1468 mark_user_reg (x);
60d03123 1469
9961142a 1470 if (POINTER_TYPE_P (type))
f2ca19b4 1471 mark_reg_pointer (x, get_pointer_alignment (var));
60d03123 1472}
1473
1474/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
c78cbec8 1475 has some associated error, e.g. its type is error-mark. We just need
60d03123 1476 to pick something that won't crash the rest of the compiler. */
1477
1478static void
1479expand_one_error_var (tree var)
1480{
3754d046 1481 machine_mode mode = DECL_MODE (var);
60d03123 1482 rtx x;
1483
1484 if (mode == BLKmode)
1485 x = gen_rtx_MEM (BLKmode, const0_rtx);
1486 else if (mode == VOIDmode)
1487 x = const0_rtx;
1488 else
1489 x = gen_reg_rtx (mode);
1490
1491 SET_DECL_RTL (var, x);
1492}
1493
a0c938f0 1494/* A subroutine of expand_one_var. VAR is a variable that will be
60d03123 1495 allocated to the local stack frame. Return true if we wish to
1496 add VAR to STACK_VARS so that it will be coalesced with other
1497 variables. Return false to allocate VAR immediately.
1498
1499 This function is used to reduce the number of variables considered
1500 for coalescing, which reduces the size of the quadratic problem. */
1501
1502static bool
1503defer_stack_allocation (tree var, bool toplevel)
1504{
94f92c36 1505 tree size_unit = TREE_CODE (var) == SSA_NAME
1506 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1507 : DECL_SIZE_UNIT (var);
1508
da4b9ed5 1509 /* Whether the variable is small enough for immediate allocation not to be
1510 a problem with regard to the frame size. */
1511 bool smallish
94f92c36 1512 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
da4b9ed5 1513 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1514
f1a0edff 1515 /* If stack protection is enabled, *all* stack variables must be deferred,
3c919612 1516 so that we can re-order the strings to the top of the frame.
1517 Similarly for Address Sanitizer. */
2bc9f5c6 1518 if (flag_stack_protect || asan_sanitize_stack_p ())
f1a0edff 1519 return true;
1520
94f92c36 1521 unsigned int align = TREE_CODE (var) == SSA_NAME
1522 ? TYPE_ALIGN (TREE_TYPE (var))
1523 : DECL_ALIGN (var);
1524
5be42b39 1525 /* We handle "large" alignment via dynamic allocation. We want to handle
1526 this extra complication in only one place, so defer them. */
94f92c36 1527 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
5be42b39 1528 return true;
1529
94f92c36 1530 bool ignored = TREE_CODE (var) == SSA_NAME
1531 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1532 : DECL_IGNORED_P (var);
1533
da4b9ed5 1534 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1535 might be detached from their block and appear at toplevel when we reach
1536 here. We want to coalesce them with variables from other blocks when
1537 the immediate contribution to the frame size would be noticeable. */
94f92c36 1538 if (toplevel && optimize > 0 && ignored && !smallish)
da4b9ed5 1539 return true;
1540
1541 /* Variables declared in the outermost scope automatically conflict
1542 with every other variable. The only reason to want to defer them
60d03123 1543 at all is that, after sorting, we can more efficiently pack
1544 small variables in the stack frame. Continue to defer at -O2. */
1545 if (toplevel && optimize < 2)
1546 return false;
1547
1548 /* Without optimization, *most* variables are allocated from the
1549 stack, which makes the quadratic problem large exactly when we
a0c938f0 1550 want compilation to proceed as quickly as possible. On the
60d03123 1551 other hand, we don't want the function's stack frame size to
1552 get completely out of hand. So we avoid adding scalars and
1553 "small" aggregates to the list at all. */
da4b9ed5 1554 if (optimize == 0 && smallish)
60d03123 1555 return false;
1556
1557 return true;
1558}
1559
1560/* A subroutine of expand_used_vars. Expand one variable according to
91275768 1561 its flavor. Variables to be placed on the stack are not actually
48e1416a 1562 expanded yet, merely recorded.
5a02d67b 1563 When REALLY_EXPAND is false, only add stack values to be allocated.
1564 Return stack usage this variable is supposed to take.
1565*/
60d03123 1566
5a02d67b 1567static HOST_WIDE_INT
1568expand_one_var (tree var, bool toplevel, bool really_expand)
60d03123 1569{
5be42b39 1570 unsigned int align = BITS_PER_UNIT;
a8dd994c 1571 tree origvar = var;
5be42b39 1572
a8dd994c 1573 var = SSAVAR (var);
1574
53e9c5c4 1575 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
27a7a23a 1576 {
7c5f5567 1577 if (is_global_var (var))
1578 return 0;
1579
27a7a23a 1580 /* Because we don't know if VAR will be in register or on stack,
1581 we conservatively assume it will be on stack even if VAR is
1582 eventually put into register after RA pass. For non-automatic
1583 variables, which won't be on stack, we collect alignment of
fc1995c6 1584 type and ignore user specified alignment. Similarly for
1585 SSA_NAMEs for which use_register_for_decl returns true. */
1586 if (TREE_STATIC (var)
1587 || DECL_EXTERNAL (var)
1588 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
8645d3e7 1589 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1590 TYPE_MODE (TREE_TYPE (var)),
1591 TYPE_ALIGN (TREE_TYPE (var)));
505a6491 1592 else if (DECL_HAS_VALUE_EXPR_P (var)
1593 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1594 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1595 or variables which were assigned a stack slot already by
1596 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1597 changed from the offset chosen to it. */
1598 align = crtl->stack_alignment_estimated;
27a7a23a 1599 else
8645d3e7 1600 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
27a7a23a 1601
5be42b39 1602 /* If the variable alignment is very large we'll dynamicaly allocate
1603 it, which means that in-frame portion is just a pointer. */
1604 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1605 align = POINTER_SIZE;
1606 }
1607
94f92c36 1608 record_alignment_for_reg_var (align);
5be42b39 1609
a8dd994c 1610 if (TREE_CODE (origvar) == SSA_NAME)
1611 {
53e9c5c4 1612 gcc_assert (!VAR_P (var)
a8dd994c 1613 || (!DECL_EXTERNAL (var)
1614 && !DECL_HAS_VALUE_EXPR_P (var)
1615 && !TREE_STATIC (var)
a8dd994c 1616 && TREE_TYPE (var) != error_mark_node
1617 && !DECL_HARD_REGISTER (var)
1618 && really_expand));
1619 }
53e9c5c4 1620 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1a105fae 1621 ;
60d03123 1622 else if (DECL_EXTERNAL (var))
1623 ;
75fa4f82 1624 else if (DECL_HAS_VALUE_EXPR_P (var))
60d03123 1625 ;
1626 else if (TREE_STATIC (var))
6329636b 1627 ;
e32b531f 1628 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
60d03123 1629 ;
1630 else if (TREE_TYPE (var) == error_mark_node)
5a02d67b 1631 {
1632 if (really_expand)
1633 expand_one_error_var (var);
1634 }
53e9c5c4 1635 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
5a02d67b 1636 {
1637 if (really_expand)
2ea8d869 1638 {
1639 expand_one_hard_reg_var (var);
1640 if (!DECL_HARD_REGISTER (var))
1641 /* Invalid register specification. */
1642 expand_one_error_var (var);
1643 }
5a02d67b 1644 }
60d03123 1645 else if (use_register_for_decl (var))
5a02d67b 1646 {
1647 if (really_expand)
a8dd994c 1648 expand_one_register_var (origvar);
5a02d67b 1649 }
85d86b55 1650 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
ce0afe34 1651 {
85d86b55 1652 /* Reject variables which cover more than half of the address-space. */
ce0afe34 1653 if (really_expand)
1654 {
1655 error ("size of variable %q+D is too large", var);
1656 expand_one_error_var (var);
1657 }
1658 }
60d03123 1659 else if (defer_stack_allocation (var, toplevel))
a8dd994c 1660 add_stack_var (origvar);
60d03123 1661 else
5a02d67b 1662 {
15083ac3 1663 if (really_expand)
f27f1575 1664 {
1665 if (lookup_attribute ("naked",
1666 DECL_ATTRIBUTES (current_function_decl)))
1667 error ("cannot allocate stack for variable %q+D, naked function.",
1668 var);
1669
1670 expand_one_stack_var (origvar);
1671 }
1672
1673
6a0712d4 1674 return tree_to_uhwi (DECL_SIZE_UNIT (var));
5a02d67b 1675 }
1676 return 0;
60d03123 1677}
1678
1679/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1680 expanding variables. Those variables that can be put into registers
1681 are allocated pseudos; those that can't are put on the stack.
1682
1683 TOPLEVEL is true if this is the outermost BLOCK. */
1684
1685static void
1686expand_used_vars_for_block (tree block, bool toplevel)
1687{
60d03123 1688 tree t;
1689
60d03123 1690 /* Expand all variables at this level. */
1767a056 1691 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
c7c68014 1692 if (TREE_USED (t)
53e9c5c4 1693 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
c7c68014 1694 || !DECL_NONSHAREABLE (t)))
5a02d67b 1695 expand_one_var (t, toplevel, true);
60d03123 1696
60d03123 1697 /* Expand all variables at containing levels. */
1698 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1699 expand_used_vars_for_block (t, false);
60d03123 1700}
1701
1702/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1703 and clear TREE_USED on all local variables. */
1704
1705static void
1706clear_tree_used (tree block)
1707{
1708 tree t;
1709
1767a056 1710 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
60d03123 1711 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
53e9c5c4 1712 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
c7c68014 1713 || !DECL_NONSHAREABLE (t))
60d03123 1714 TREE_USED (t) = 0;
1715
1716 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1717 clear_tree_used (t);
1718}
1719
b156ec37 1720enum {
1721 SPCT_FLAG_DEFAULT = 1,
1722 SPCT_FLAG_ALL = 2,
947aa916 1723 SPCT_FLAG_STRONG = 3,
1724 SPCT_FLAG_EXPLICIT = 4
b156ec37 1725};
1726
f1a0edff 1727/* Examine TYPE and determine a bit mask of the following features. */
1728
1729#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1730#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1731#define SPCT_HAS_ARRAY 4
1732#define SPCT_HAS_AGGREGATE 8
1733
1734static unsigned int
1735stack_protect_classify_type (tree type)
1736{
1737 unsigned int ret = 0;
1738 tree t;
1739
1740 switch (TREE_CODE (type))
1741 {
1742 case ARRAY_TYPE:
1743 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1744 if (t == char_type_node
1745 || t == signed_char_type_node
1746 || t == unsigned_char_type_node)
1747 {
b888d9d5 1748 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1749 unsigned HOST_WIDE_INT len;
f1a0edff 1750
b888d9d5 1751 if (!TYPE_SIZE_UNIT (type)
cd4547bf 1752 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
b888d9d5 1753 len = max;
f1a0edff 1754 else
6a0712d4 1755 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
f1a0edff 1756
1757 if (len < max)
1758 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1759 else
1760 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1761 }
1762 else
1763 ret = SPCT_HAS_ARRAY;
1764 break;
1765
1766 case UNION_TYPE:
1767 case QUAL_UNION_TYPE:
1768 case RECORD_TYPE:
1769 ret = SPCT_HAS_AGGREGATE;
1770 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1771 if (TREE_CODE (t) == FIELD_DECL)
1772 ret |= stack_protect_classify_type (TREE_TYPE (t));
1773 break;
1774
1775 default:
1776 break;
1777 }
1778
1779 return ret;
1780}
1781
3ce7ff97 1782/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1783 part of the local stack frame. Remember if we ever return nonzero for
f1a0edff 1784 any variable in this function. The return value is the phase number in
1785 which the variable should be allocated. */
1786
1787static int
1788stack_protect_decl_phase (tree decl)
1789{
1790 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1791 int ret = 0;
1792
1793 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1794 has_short_buffer = true;
1795
b156ec37 1796 if (flag_stack_protect == SPCT_FLAG_ALL
947aa916 1797 || flag_stack_protect == SPCT_FLAG_STRONG
1798 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1799 && lookup_attribute ("stack_protect",
1800 DECL_ATTRIBUTES (current_function_decl))))
f1a0edff 1801 {
1802 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1803 && !(bits & SPCT_HAS_AGGREGATE))
1804 ret = 1;
1805 else if (bits & SPCT_HAS_ARRAY)
1806 ret = 2;
1807 }
1808 else
1809 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1810
1811 if (ret)
1812 has_protected_decls = true;
1813
1814 return ret;
1815}
1816
1817/* Two helper routines that check for phase 1 and phase 2. These are used
1818 as callbacks for expand_stack_vars. */
1819
1820static bool
3c919612 1821stack_protect_decl_phase_1 (size_t i)
1822{
1823 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1824}
1825
1826static bool
1827stack_protect_decl_phase_2 (size_t i)
f1a0edff 1828{
3c919612 1829 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
f1a0edff 1830}
1831
3c919612 1832/* And helper function that checks for asan phase (with stack protector
1833 it is phase 3). This is used as callback for expand_stack_vars.
1834 Returns true if any of the vars in the partition need to be protected. */
1835
f1a0edff 1836static bool
3c919612 1837asan_decl_phase_3 (size_t i)
f1a0edff 1838{
3c919612 1839 while (i != EOC)
1840 {
1841 if (asan_protect_stack_decl (stack_vars[i].decl))
1842 return true;
1843 i = stack_vars[i].next;
1844 }
1845 return false;
f1a0edff 1846}
1847
1848/* Ensure that variables in different stack protection phases conflict
1849 so that they are not merged and share the same stack slot. */
1850
1851static void
1852add_stack_protection_conflicts (void)
1853{
1854 size_t i, j, n = stack_vars_num;
1855 unsigned char *phase;
1856
1857 phase = XNEWVEC (unsigned char, n);
1858 for (i = 0; i < n; ++i)
1859 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1860
1861 for (i = 0; i < n; ++i)
1862 {
1863 unsigned char ph_i = phase[i];
99fade12 1864 for (j = i + 1; j < n; ++j)
f1a0edff 1865 if (ph_i != phase[j])
1866 add_stack_var_conflict (i, j);
1867 }
1868
1869 XDELETEVEC (phase);
1870}
1871
1872/* Create a decl for the guard at the top of the stack frame. */
1873
1874static void
1875create_stack_guard (void)
1876{
e60a6f7b 1877 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1878 VAR_DECL, NULL, ptr_type_node);
f1a0edff 1879 TREE_THIS_VOLATILE (guard) = 1;
1880 TREE_USED (guard) = 1;
1881 expand_one_stack_var (guard);
edb7afe8 1882 crtl->stack_protect_guard = guard;
f1a0edff 1883}
1884
5a02d67b 1885/* Prepare for expanding variables. */
48e1416a 1886static void
5a02d67b 1887init_vars_expansion (void)
1888{
4fb07d00 1889 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1890 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
5a02d67b 1891
4fb07d00 1892 /* A map from decl to stack partition. */
5f8841a5 1893 decl_to_stack_part = new hash_map<tree, size_t>;
5a02d67b 1894
1895 /* Initialize local stack smashing state. */
1896 has_protected_decls = false;
1897 has_short_buffer = false;
1898}
1899
1900/* Free up stack variable graph data. */
1901static void
1902fini_vars_expansion (void)
1903{
4fb07d00 1904 bitmap_obstack_release (&stack_var_bitmap_obstack);
1905 if (stack_vars)
1906 XDELETEVEC (stack_vars);
1907 if (stack_vars_sorted)
1908 XDELETEVEC (stack_vars_sorted);
5a02d67b 1909 stack_vars = NULL;
99fade12 1910 stack_vars_sorted = NULL;
5a02d67b 1911 stack_vars_alloc = stack_vars_num = 0;
5f8841a5 1912 delete decl_to_stack_part;
3c25489e 1913 decl_to_stack_part = NULL;
5a02d67b 1914}
1915
970270ba 1916/* Make a fair guess for the size of the stack frame of the function
1917 in NODE. This doesn't have to be exact, the result is only used in
1918 the inline heuristics. So we don't want to run the full stack var
1919 packing algorithm (which is quadratic in the number of stack vars).
1920 Instead, we calculate the total size of all stack vars. This turns
1921 out to be a pretty fair estimate -- packing of stack vars doesn't
1922 happen very often. */
961c8f72 1923
5a02d67b 1924HOST_WIDE_INT
970270ba 1925estimated_stack_frame_size (struct cgraph_node *node)
5a02d67b 1926{
1927 HOST_WIDE_INT size = 0;
961c8f72 1928 size_t i;
649597af 1929 tree var;
02774f2d 1930 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
970270ba 1931
649597af 1932 push_cfun (fn);
5a02d67b 1933
4fb07d00 1934 init_vars_expansion ();
1935
24ccd9c6 1936 FOR_EACH_LOCAL_DECL (fn, i, var)
1937 if (auto_var_in_fn_p (var, fn->decl))
1938 size += expand_one_var (var, true, false);
961c8f72 1939
5a02d67b 1940 if (stack_vars_num > 0)
1941 {
961c8f72 1942 /* Fake sorting the stack vars for account_stack_vars (). */
1943 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1944 for (i = 0; i < stack_vars_num; ++i)
1945 stack_vars_sorted[i] = i;
5a02d67b 1946 size += account_stack_vars ();
5a02d67b 1947 }
4fb07d00 1948
1949 fini_vars_expansion ();
73b46517 1950 pop_cfun ();
5a02d67b 1951 return size;
1952}
1953
b156ec37 1954/* Helper routine to check if a record or union contains an array field. */
1955
1956static int
1957record_or_union_type_has_array_p (const_tree tree_type)
1958{
1959 tree fields = TYPE_FIELDS (tree_type);
1960 tree f;
1961
1962 for (f = fields; f; f = DECL_CHAIN (f))
1963 if (TREE_CODE (f) == FIELD_DECL)
1964 {
1965 tree field_type = TREE_TYPE (f);
1966 if (RECORD_OR_UNION_TYPE_P (field_type)
1967 && record_or_union_type_has_array_p (field_type))
1968 return 1;
1969 if (TREE_CODE (field_type) == ARRAY_TYPE)
1970 return 1;
1971 }
1972 return 0;
1973}
1974
ec4af1be 1975/* Check if the current function has local referenced variables that
1976 have their addresses taken, contain an array, or are arrays. */
1977
1978static bool
1979stack_protect_decl_p ()
1980{
1981 unsigned i;
1982 tree var;
1983
1984 FOR_EACH_LOCAL_DECL (cfun, i, var)
1985 if (!is_global_var (var))
1986 {
1987 tree var_type = TREE_TYPE (var);
53e9c5c4 1988 if (VAR_P (var)
ec4af1be 1989 && (TREE_CODE (var_type) == ARRAY_TYPE
1990 || TREE_ADDRESSABLE (var)
1991 || (RECORD_OR_UNION_TYPE_P (var_type)
1992 && record_or_union_type_has_array_p (var_type))))
1993 return true;
1994 }
1995 return false;
1996}
1997
1998/* Check if the current function has calls that use a return slot. */
1999
2000static bool
2001stack_protect_return_slot_p ()
2002{
2003 basic_block bb;
2004
2005 FOR_ALL_BB_FN (bb, cfun)
2006 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2007 !gsi_end_p (gsi); gsi_next (&gsi))
2008 {
42acab1c 2009 gimple *stmt = gsi_stmt (gsi);
ec4af1be 2010 /* This assumes that calls to internal-only functions never
2011 use a return slot. */
2012 if (is_gimple_call (stmt)
2013 && !gimple_call_internal_p (stmt)
2014 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2015 gimple_call_fndecl (stmt)))
2016 return true;
2017 }
2018 return false;
2019}
2020
60d03123 2021/* Expand all variables used in the function. */
280450fa 2022
74a0cbc4 2023static rtx_insn *
280450fa 2024expand_used_vars (void)
2025{
2ab2ce89 2026 tree var, outer_block = DECL_INITIAL (current_function_decl);
e9258aee 2027 auto_vec<tree> maybe_local_decls;
74a0cbc4 2028 rtx_insn *var_end_seq = NULL;
a8dd994c 2029 unsigned i;
2ab2ce89 2030 unsigned len;
b156ec37 2031 bool gen_stack_protect_signal = false;
280450fa 2032
60d03123 2033 /* Compute the phase of the stack frame for this function. */
2034 {
2035 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
8374586c 2036 int off = targetm.starting_frame_offset () % align;
60d03123 2037 frame_phase = off ? align - off : 0;
2038 }
280450fa 2039
4fb07d00 2040 /* Set TREE_USED on all variables in the local_decls. */
2041 FOR_EACH_LOCAL_DECL (cfun, i, var)
2042 TREE_USED (var) = 1;
2043 /* Clear TREE_USED on all variables associated with a block scope. */
2044 clear_tree_used (DECL_INITIAL (current_function_decl));
2045
5a02d67b 2046 init_vars_expansion ();
f1a0edff 2047
ab0f939c 2048 if (targetm.use_pseudo_pic_reg ())
2049 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2050
a8dd994c 2051 for (i = 0; i < SA.map->num_partitions; i++)
2052 {
b2df3bbf 2053 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2054 continue;
2055
a8dd994c 2056 tree var = partition_to_var (SA.map, i);
2057
7c782c9b 2058 gcc_assert (!virtual_operand_p (var));
ec11736b 2059
94f92c36 2060 expand_one_ssa_partition (var);
bcde57a0 2061 }
f22255e7 2062
b156ec37 2063 if (flag_stack_protect == SPCT_FLAG_STRONG)
ec4af1be 2064 gen_stack_protect_signal
2065 = stack_protect_decl_p () || stack_protect_return_slot_p ();
b156ec37 2066
edb7afe8 2067 /* At this point all variables on the local_decls with TREE_USED
60d03123 2068 set are not associated with any block scope. Lay them out. */
2ab2ce89 2069
f1f41a6c 2070 len = vec_safe_length (cfun->local_decls);
2ab2ce89 2071 FOR_EACH_LOCAL_DECL (cfun, i, var)
60d03123 2072 {
60d03123 2073 bool expand_now = false;
2074
a8dd994c 2075 /* Expanded above already. */
2076 if (is_gimple_reg (var))
e32b531f 2077 {
2078 TREE_USED (var) = 0;
a45d3ce3 2079 goto next;
e32b531f 2080 }
60d03123 2081 /* We didn't set a block for static or extern because it's hard
2082 to tell the difference between a global variable (re)declared
2083 in a local scope, and one that's really declared there to
2084 begin with. And it doesn't really matter much, since we're
2085 not giving them stack space. Expand them now. */
a8dd994c 2086 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
60d03123 2087 expand_now = true;
2088
da4b9ed5 2089 /* Expand variables not associated with any block now. Those created by
2090 the optimizers could be live anywhere in the function. Those that
2091 could possibly have been scoped originally and detached from their
2092 block will have their allocation deferred so we coalesce them with
2093 others when optimization is enabled. */
60d03123 2094 else if (TREE_USED (var))
2095 expand_now = true;
2096
2097 /* Finally, mark all variables on the list as used. We'll use
2098 this in a moment when we expand those associated with scopes. */
2099 TREE_USED (var) = 1;
2100
2101 if (expand_now)
a45d3ce3 2102 expand_one_var (var, true, true);
2103
2104 next:
2105 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
78fa9ba7 2106 {
a45d3ce3 2107 rtx rtl = DECL_RTL_IF_SET (var);
2108
2109 /* Keep artificial non-ignored vars in cfun->local_decls
2110 chain until instantiate_decls. */
2111 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2ab2ce89 2112 add_local_decl (cfun, var);
257b4da1 2113 else if (rtl == NULL_RTX)
2ab2ce89 2114 /* If rtl isn't set yet, which can happen e.g. with
2115 -fstack-protector, retry before returning from this
2116 function. */
f1f41a6c 2117 maybe_local_decls.safe_push (var);
78fa9ba7 2118 }
60d03123 2119 }
60d03123 2120
2ab2ce89 2121 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2122
2123 +-----------------+-----------------+
2124 | ...processed... | ...duplicates...|
2125 +-----------------+-----------------+
2126 ^
2127 +-- LEN points here.
2128
2129 We just want the duplicates, as those are the artificial
2130 non-ignored vars that we want to keep until instantiate_decls.
2131 Move them down and truncate the array. */
f1f41a6c 2132 if (!vec_safe_is_empty (cfun->local_decls))
2133 cfun->local_decls->block_remove (0, len);
2ab2ce89 2134
60d03123 2135 /* At this point, all variables within the block tree with TREE_USED
2136 set are actually used by the optimized function. Lay them out. */
2137 expand_used_vars_for_block (outer_block, true);
2138
2139 if (stack_vars_num > 0)
2140 {
3c25489e 2141 add_scope_conflicts ();
60d03123 2142
a0c938f0 2143 /* If stack protection is enabled, we don't share space between
f1a0edff 2144 vulnerable data and non-vulnerable data. */
947aa916 2145 if (flag_stack_protect != 0
2146 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2147 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2148 && lookup_attribute ("stack_protect",
2149 DECL_ATTRIBUTES (current_function_decl)))))
f1a0edff 2150 add_stack_protection_conflicts ();
2151
a0c938f0 2152 /* Now that we have collected all stack variables, and have computed a
60d03123 2153 minimal interference graph, attempt to save some stack space. */
2154 partition_stack_vars ();
2155 if (dump_file)
2156 dump_stack_var_partition ();
f1a0edff 2157 }
2158
b156ec37 2159 switch (flag_stack_protect)
2160 {
2161 case SPCT_FLAG_ALL:
2162 create_stack_guard ();
2163 break;
2164
2165 case SPCT_FLAG_STRONG:
2166 if (gen_stack_protect_signal
947aa916 2167 || cfun->calls_alloca || has_protected_decls
2168 || lookup_attribute ("stack_protect",
2169 DECL_ATTRIBUTES (current_function_decl)))
b156ec37 2170 create_stack_guard ();
2171 break;
2172
2173 case SPCT_FLAG_DEFAULT:
947aa916 2174 if (cfun->calls_alloca || has_protected_decls
2175 || lookup_attribute ("stack_protect",
2176 DECL_ATTRIBUTES (current_function_decl)))
9af5ce0c 2177 create_stack_guard ();
b156ec37 2178 break;
2179
947aa916 2180 case SPCT_FLAG_EXPLICIT:
2181 if (lookup_attribute ("stack_protect",
2182 DECL_ATTRIBUTES (current_function_decl)))
2183 create_stack_guard ();
2184 break;
b156ec37 2185 default:
2186 ;
2187 }
60d03123 2188
f1a0edff 2189 /* Assign rtl to each variable based on these partitions. */
2190 if (stack_vars_num > 0)
2191 {
3c919612 2192 struct stack_vars_data data;
2193
683539f6 2194 data.asan_base = NULL_RTX;
2195 data.asan_alignb = 0;
3c919612 2196
f1a0edff 2197 /* Reorder decls to be protected by iterating over the variables
2198 array multiple times, and allocating out of each phase in turn. */
a0c938f0 2199 /* ??? We could probably integrate this into the qsort we did
f1a0edff 2200 earlier, such that we naturally see these variables first,
2201 and thus naturally allocate things in the right order. */
2202 if (has_protected_decls)
2203 {
2204 /* Phase 1 contains only character arrays. */
3c919612 2205 expand_stack_vars (stack_protect_decl_phase_1, &data);
f1a0edff 2206
2207 /* Phase 2 contains other kinds of arrays. */
947aa916 2208 if (flag_stack_protect == SPCT_FLAG_ALL
2209 || flag_stack_protect == SPCT_FLAG_STRONG
2210 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2211 && lookup_attribute ("stack_protect",
2212 DECL_ATTRIBUTES (current_function_decl))))
3c919612 2213 expand_stack_vars (stack_protect_decl_phase_2, &data);
f1a0edff 2214 }
2215
2bc9f5c6 2216 if (asan_sanitize_stack_p ())
3c919612 2217 /* Phase 3, any partitions that need asan protection
2218 in addition to phase 1 and 2. */
2219 expand_stack_vars (asan_decl_phase_3, &data);
2220
f1f41a6c 2221 if (!data.asan_vec.is_empty ())
3c919612 2222 {
2223 HOST_WIDE_INT prev_offset = frame_offset;
683539f6 2224 HOST_WIDE_INT offset, sz, redzonesz;
2225 redzonesz = ASAN_RED_ZONE_SIZE;
2226 sz = data.asan_vec[0] - prev_offset;
2227 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2228 && data.asan_alignb <= 4096
c8c66351 2229 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
683539f6 2230 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2231 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2232 offset
2233 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
f1f41a6c 2234 data.asan_vec.safe_push (prev_offset);
2235 data.asan_vec.safe_push (offset);
f89175bb 2236 /* Leave space for alignment if STRICT_ALIGNMENT. */
2237 if (STRICT_ALIGNMENT)
2238 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2239 << ASAN_SHADOW_SHIFT)
2240 / BITS_PER_UNIT, 1);
3c919612 2241
2242 var_end_seq
2243 = asan_emit_stack_protection (virtual_stack_vars_rtx,
683539f6 2244 data.asan_base,
2245 data.asan_alignb,
f1f41a6c 2246 data.asan_vec.address (),
683539f6 2247 data.asan_decl_vec.address (),
f1f41a6c 2248 data.asan_vec.length ());
3c919612 2249 }
2250
2251 expand_stack_vars (NULL, &data);
60d03123 2252 }
2253
77c44489 2254 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
d08919a7 2255 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2256 virtual_stack_vars_rtx,
2257 var_end_seq);
2258
4fb07d00 2259 fini_vars_expansion ();
2260
257b4da1 2261 /* If there were any artificial non-ignored vars without rtl
2262 found earlier, see if deferred stack allocation hasn't assigned
2263 rtl to them. */
f1f41a6c 2264 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
257b4da1 2265 {
257b4da1 2266 rtx rtl = DECL_RTL_IF_SET (var);
2267
257b4da1 2268 /* Keep artificial non-ignored vars in cfun->local_decls
2269 chain until instantiate_decls. */
2270 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2ab2ce89 2271 add_local_decl (cfun, var);
257b4da1 2272 }
2273
60d03123 2274 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2275 if (STACK_ALIGNMENT_NEEDED)
2276 {
2277 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2278 if (!FRAME_GROWS_DOWNWARD)
2279 frame_offset += align - 1;
2280 frame_offset &= -align;
2281 }
3c919612 2282
2283 return var_end_seq;
280450fa 2284}
2285
2286
49377e21 2287/* If we need to produce a detailed dump, print the tree representation
2288 for STMT to the dump file. SINCE is the last RTX after which the RTL
2289 generated for STMT should have been appended. */
2290
2291static void
42acab1c 2292maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
49377e21 2293{
2294 if (dump_file && (dump_flags & TDF_DETAILS))
2295 {
2296 fprintf (dump_file, "\n;; ");
9845d120 2297 print_gimple_stmt (dump_file, stmt, 0,
2298 TDF_SLIM | (dump_flags & TDF_LINENO));
49377e21 2299 fprintf (dump_file, "\n");
2300
2301 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2302 }
2303}
2304
6313ae8b 2305/* Maps the blocks that do not contain tree labels to rtx labels. */
2306
0699065d 2307static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
6313ae8b 2308
63f88450 2309/* Returns the label_rtx expression for a label starting basic block BB. */
2310
f9a00e9e 2311static rtx_code_label *
75a70cf9 2312label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
63f88450 2313{
75a70cf9 2314 gimple_stmt_iterator gsi;
2315 tree lab;
63f88450 2316
2317 if (bb->flags & BB_RTL)
2318 return block_label (bb);
2319
0699065d 2320 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
6313ae8b 2321 if (elt)
5f8841a5 2322 return *elt;
6313ae8b 2323
2324 /* Find the tree label if it is present. */
48e1416a 2325
75a70cf9 2326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
63f88450 2327 {
1a91d914 2328 glabel *lab_stmt;
2329
2330 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2331 if (!lab_stmt)
63f88450 2332 break;
2333
75a70cf9 2334 lab = gimple_label_label (lab_stmt);
63f88450 2335 if (DECL_NONLOCAL (lab))
2336 break;
2337
f9a00e9e 2338 return jump_target_rtx (lab);
63f88450 2339 }
2340
79f6a8ed 2341 rtx_code_label *l = gen_label_rtx ();
5f8841a5 2342 lab_rtx_for_bb->put (bb, l);
2343 return l;
63f88450 2344}
2345
75a70cf9 2346
f800c469 2347/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2348 of a basic block where we just expanded the conditional at the end,
ee0f3895 2349 possibly clean up the CFG and instruction sequence. LAST is the
2350 last instruction before the just emitted jump sequence. */
f800c469 2351
2352static void
74a0cbc4 2353maybe_cleanup_end_of_block (edge e, rtx_insn *last)
f800c469 2354{
2355 /* Special case: when jumpif decides that the condition is
2356 trivial it emits an unconditional jump (and the necessary
2357 barrier). But we still have two edges, the fallthru one is
2358 wrong. purge_dead_edges would clean this up later. Unfortunately
2359 we have to insert insns (and split edges) before
2360 find_many_sub_basic_blocks and hence before purge_dead_edges.
2361 But splitting edges might create new blocks which depend on the
2362 fact that if there are two edges there's no barrier. So the
2363 barrier would get lost and verify_flow_info would ICE. Instead
2364 of auditing all edge splitters to care for the barrier (which
2365 normally isn't there in a cleaned CFG), fix it here. */
2366 if (BARRIER_P (get_last_insn ()))
2367 {
74a0cbc4 2368 rtx_insn *insn;
f800c469 2369 remove_edge (e);
2370 /* Now, we have a single successor block, if we have insns to
2371 insert on the remaining edge we potentially will insert
2372 it at the end of this block (if the dest block isn't feasible)
2373 in order to avoid splitting the edge. This insertion will take
2374 place in front of the last jump. But we might have emitted
2375 multiple jumps (conditional and one unconditional) to the
2376 same destination. Inserting in front of the last one then
2377 is a problem. See PR 40021. We fix this by deleting all
2378 jumps except the last unconditional one. */
2379 insn = PREV_INSN (get_last_insn ());
2380 /* Make sure we have an unconditional jump. Otherwise we're
2381 confused. */
2382 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
ee0f3895 2383 for (insn = PREV_INSN (insn); insn != last;)
f800c469 2384 {
2385 insn = PREV_INSN (insn);
2386 if (JUMP_P (NEXT_INSN (insn)))
2755d767 2387 {
46a5816d 2388 if (!any_condjump_p (NEXT_INSN (insn)))
2755d767 2389 {
2390 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2391 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2392 }
2393 delete_insn (NEXT_INSN (insn));
2394 }
f800c469 2395 }
2396 }
2397}
2398
75a70cf9 2399/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
3ced8962 2400 Returns a new basic block if we've terminated the current basic
2401 block and created a new one. */
2402
2403static basic_block
1a91d914 2404expand_gimple_cond (basic_block bb, gcond *stmt)
3ced8962 2405{
2406 basic_block new_bb, dest;
3ced8962 2407 edge true_edge;
2408 edge false_edge;
74a0cbc4 2409 rtx_insn *last2, *last;
16c9337c 2410 enum tree_code code;
2411 tree op0, op1;
2412
2413 code = gimple_cond_code (stmt);
2414 op0 = gimple_cond_lhs (stmt);
2415 op1 = gimple_cond_rhs (stmt);
2416 /* We're sometimes presented with such code:
2417 D.123_1 = x < y;
2418 if (D.123_1 != 0)
2419 ...
2420 This would expand to two comparisons which then later might
2421 be cleaned up by combine. But some pattern matchers like if-conversion
2422 work better when there's only one compare, so make up for this
2423 here as special exception if TER would have made the same change. */
9532a315 2424 if (SA.values
16c9337c 2425 && TREE_CODE (op0) == SSA_NAME
9532a315 2426 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2427 && TREE_CODE (op1) == INTEGER_CST
2428 && ((gimple_cond_code (stmt) == NE_EXPR
2429 && integer_zerop (op1))
2430 || (gimple_cond_code (stmt) == EQ_EXPR
2431 && integer_onep (op1)))
16c9337c 2432 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2433 {
42acab1c 2434 gimple *second = SSA_NAME_DEF_STMT (op0);
5905fb26 2435 if (gimple_code (second) == GIMPLE_ASSIGN)
16c9337c 2436 {
5905fb26 2437 enum tree_code code2 = gimple_assign_rhs_code (second);
2438 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2439 {
2440 code = code2;
2441 op0 = gimple_assign_rhs1 (second);
2442 op1 = gimple_assign_rhs2 (second);
2443 }
01ee997b 2444 /* If jumps are cheap and the target does not support conditional
2445 compare, turn some more codes into jumpy sequences. */
2446 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2447 && targetm.gen_ccmp_first == NULL)
5905fb26 2448 {
2449 if ((code2 == BIT_AND_EXPR
2450 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2451 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2452 || code2 == TRUTH_AND_EXPR)
2453 {
2454 code = TRUTH_ANDIF_EXPR;
2455 op0 = gimple_assign_rhs1 (second);
2456 op1 = gimple_assign_rhs2 (second);
2457 }
2458 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2459 {
2460 code = TRUTH_ORIF_EXPR;
2461 op0 = gimple_assign_rhs1 (second);
2462 op1 = gimple_assign_rhs2 (second);
2463 }
2464 }
16c9337c 2465 }
2466 }
49377e21 2467
2468 last2 = last = get_last_insn ();
3ced8962 2469
2470 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5169661d 2471 set_curr_insn_location (gimple_location (stmt));
3ced8962 2472
2473 /* These flags have no purpose in RTL land. */
2474 true_edge->flags &= ~EDGE_TRUE_VALUE;
2475 false_edge->flags &= ~EDGE_FALSE_VALUE;
2476
2477 /* We can either have a pure conditional jump with one fallthru edge or
2478 two-way jump that needs to be decomposed into two basic blocks. */
63f88450 2479 if (false_edge->dest == bb->next_bb)
3ced8962 2480 {
79ab74cc 2481 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2482 true_edge->probability);
75a70cf9 2483 maybe_dump_rtl_for_gimple_stmt (stmt, last);
8e7408e3 2484 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2485 set_curr_insn_location (true_edge->goto_locus);
63f88450 2486 false_edge->flags |= EDGE_FALLTHRU;
ee0f3895 2487 maybe_cleanup_end_of_block (false_edge, last);
3ced8962 2488 return NULL;
2489 }
63f88450 2490 if (true_edge->dest == bb->next_bb)
3ced8962 2491 {
79ab74cc 2492 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2493 false_edge->probability);
75a70cf9 2494 maybe_dump_rtl_for_gimple_stmt (stmt, last);
8e7408e3 2495 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2496 set_curr_insn_location (false_edge->goto_locus);
63f88450 2497 true_edge->flags |= EDGE_FALLTHRU;
ee0f3895 2498 maybe_cleanup_end_of_block (true_edge, last);
3ced8962 2499 return NULL;
2500 }
3ced8962 2501
79ab74cc 2502 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2503 true_edge->probability);
3ced8962 2504 last = get_last_insn ();
8e7408e3 2505 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2506 set_curr_insn_location (false_edge->goto_locus);
63f88450 2507 emit_jump (label_rtx_for_bb (false_edge->dest));
3ced8962 2508
26bb3cb2 2509 BB_END (bb) = last;
3ced8962 2510 if (BARRIER_P (BB_END (bb)))
26bb3cb2 2511 BB_END (bb) = PREV_INSN (BB_END (bb));
3ced8962 2512 update_bb_for_insn (bb);
2513
2514 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2515 dest = false_edge->dest;
2516 redirect_edge_succ (false_edge, new_bb);
2517 false_edge->flags |= EDGE_FALLTHRU;
ea5d3981 2518 new_bb->count = false_edge->count ();
d1af79c6 2519 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2520 add_bb_to_loop (new_bb, loop);
2521 if (loop->latch == bb
2522 && loop->header == dest)
2523 loop->latch = new_bb;
720cfc43 2524 make_single_succ_edge (new_bb, dest, 0);
3ced8962 2525 if (BARRIER_P (BB_END (new_bb)))
26bb3cb2 2526 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
3ced8962 2527 update_bb_for_insn (new_bb);
2528
75a70cf9 2529 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
a0c938f0 2530
8e7408e3 2531 if (true_edge->goto_locus != UNKNOWN_LOCATION)
c4ad3297 2532 {
5169661d 2533 set_curr_insn_location (true_edge->goto_locus);
2534 true_edge->goto_locus = curr_insn_location ();
c4ad3297 2535 }
c4ad3297 2536
3ced8962 2537 return new_bb;
2538}
2539
4c0315d0 2540/* Mark all calls that can have a transaction restart. */
2541
2542static void
42acab1c 2543mark_transaction_restart_calls (gimple *stmt)
4c0315d0 2544{
2545 struct tm_restart_node dummy;
b7aa58e4 2546 tm_restart_node **slot;
4c0315d0 2547
2548 if (!cfun->gimple_df->tm_restart)
2549 return;
2550
2551 dummy.stmt = stmt;
b7aa58e4 2552 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
4c0315d0 2553 if (slot)
2554 {
b7aa58e4 2555 struct tm_restart_node *n = *slot;
4c0315d0 2556 tree list = n->label_or_list;
74a0cbc4 2557 rtx_insn *insn;
4c0315d0 2558
2559 for (insn = next_real_insn (get_last_insn ());
2560 !CALL_P (insn);
2561 insn = next_real_insn (insn))
2562 continue;
2563
2564 if (TREE_CODE (list) == LABEL_DECL)
2565 add_reg_note (insn, REG_TM, label_rtx (list));
2566 else
2567 for (; list ; list = TREE_CHAIN (list))
2568 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2569 }
2570}
2571
16c9337c 2572/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2573 statement STMT. */
2574
2575static void
1a91d914 2576expand_call_stmt (gcall *stmt)
16c9337c 2577{
fb049fba 2578 tree exp, decl, lhs;
facbb5c4 2579 bool builtin_p;
a967d5e5 2580 size_t i;
16c9337c 2581
fb049fba 2582 if (gimple_call_internal_p (stmt))
2583 {
2584 expand_internal_call (stmt);
2585 return;
2586 }
2587
ae62deea 2588 /* If this is a call to a built-in function and it has no effect other
2589 than setting the lhs, try to implement it using an internal function
2590 instead. */
2591 decl = gimple_call_fndecl (stmt);
2592 if (gimple_call_lhs (stmt)
2593 && !gimple_has_side_effects (stmt)
2594 && (optimize || (decl && called_as_built_in (decl))))
2595 {
2596 internal_fn ifn = replacement_internal_fn (stmt);
2597 if (ifn != IFN_LAST)
2598 {
2599 expand_internal_call (ifn, stmt);
2600 return;
2601 }
2602 }
2603
0fcb889c 2604 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
bbc26dcc 2605
0fcb889c 2606 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
bbc26dcc 2607 builtin_p = decl && DECL_BUILT_IN (decl);
0fcb889c 2608
a967d5e5 2609 /* If this is not a builtin function, the function type through which the
2610 call is made may be different from the type of the function. */
2611 if (!builtin_p)
2612 CALL_EXPR_FN (exp)
317bd3b6 2613 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2614 CALL_EXPR_FN (exp));
a967d5e5 2615
16c9337c 2616 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2617 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2618
2619 for (i = 0; i < gimple_call_num_args (stmt); i++)
facbb5c4 2620 {
2621 tree arg = gimple_call_arg (stmt, i);
42acab1c 2622 gimple *def;
facbb5c4 2623 /* TER addresses into arguments of builtin functions so we have a
2624 chance to infer more correct alignment information. See PR39954. */
2625 if (builtin_p
2626 && TREE_CODE (arg) == SSA_NAME
2627 && (def = get_gimple_for_ssa_name (arg))
2628 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2629 arg = gimple_assign_rhs1 (def);
2630 CALL_EXPR_ARG (exp, i) = arg;
2631 }
16c9337c 2632
e1ac6f35 2633 if (gimple_has_side_effects (stmt))
16c9337c 2634 TREE_SIDE_EFFECTS (exp) = 1;
2635
e1ac6f35 2636 if (gimple_call_nothrow_p (stmt))
16c9337c 2637 TREE_NOTHROW (exp) = 1;
2638
2639 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
b4a61e77 2640 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
16c9337c 2641 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
a882d754 2642 if (decl
2643 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2b34677f 2644 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
a882d754 2645 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2646 else
2647 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
16c9337c 2648 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
a27e3913 2649 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
16c9337c 2650 SET_EXPR_LOCATION (exp, gimple_location (stmt));
058a1b7a 2651 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
16c9337c 2652
841424cc 2653 /* Ensure RTL is created for debug args. */
2654 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2655 {
f1f41a6c 2656 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
841424cc 2657 unsigned int ix;
2658 tree dtemp;
2659
2660 if (debug_args)
f1f41a6c 2661 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
841424cc 2662 {
2663 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2664 expand_debug_expr (dtemp);
2665 }
2666 }
2667
3c0f15b4 2668 rtx_insn *before_call = get_last_insn ();
fb049fba 2669 lhs = gimple_call_lhs (stmt);
16c9337c 2670 if (lhs)
2671 expand_assignment (lhs, exp, false);
2672 else
a12f023f 2673 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4c0315d0 2674
3c0f15b4 2675 /* If the gimple call is an indirect call and has 'nocf_check'
2676 attribute find a generated CALL insn to mark it as no
2677 control-flow verification is needed. */
2678 if (gimple_call_nocf_check_p (stmt)
2679 && !gimple_call_fndecl (stmt))
2680 {
2681 rtx_insn *last = get_last_insn ();
2682 while (!CALL_P (last)
2683 && last != before_call)
2684 last = PREV_INSN (last);
2685
2686 if (last != before_call)
2687 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2688 }
2689
4c0315d0 2690 mark_transaction_restart_calls (stmt);
16c9337c 2691}
2692
0e80b01d 2693
2694/* Generate RTL for an asm statement (explicit assembler code).
2695 STRING is a STRING_CST node containing the assembler code text,
2696 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2697 insn is volatile; don't optimize it. */
2698
2699static void
2700expand_asm_loc (tree string, int vol, location_t locus)
2701{
2702 rtx body;
2703
0e80b01d 2704 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2705 ggc_strdup (TREE_STRING_POINTER (string)),
2706 locus);
2707
2708 MEM_VOLATILE_P (body) = vol;
2709
43ac2f2f 2710 /* Non-empty basic ASM implicitly clobbers memory. */
2711 if (TREE_STRING_LENGTH (string) != 0)
2712 {
2713 rtx asm_op, clob;
2714 unsigned i, nclobbers;
2715 auto_vec<rtx> input_rvec, output_rvec;
2716 auto_vec<const char *> constraints;
2717 auto_vec<rtx> clobber_rvec;
2718 HARD_REG_SET clobbered_regs;
2719 CLEAR_HARD_REG_SET (clobbered_regs);
2720
2721 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2722 clobber_rvec.safe_push (clob);
2723
2724 if (targetm.md_asm_adjust)
2725 targetm.md_asm_adjust (output_rvec, input_rvec,
2726 constraints, clobber_rvec,
2727 clobbered_regs);
2728
2729 asm_op = body;
2730 nclobbers = clobber_rvec.length ();
2731 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2732
2733 XVECEXP (body, 0, 0) = asm_op;
2734 for (i = 0; i < nclobbers; i++)
2735 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2736 }
2737
0e80b01d 2738 emit_insn (body);
2739}
2740
2741/* Return the number of times character C occurs in string S. */
2742static int
2743n_occurrences (int c, const char *s)
2744{
2745 int n = 0;
2746 while (*s)
2747 n += (*s++ == c);
2748 return n;
2749}
2750
2751/* A subroutine of expand_asm_operands. Check that all operands have
2752 the same number of alternatives. Return true if so. */
2753
2754static bool
2af3d775 2755check_operand_nalternatives (const vec<const char *> &constraints)
0e80b01d 2756{
2af3d775 2757 unsigned len = constraints.length();
2758 if (len > 0)
0e80b01d 2759 {
2af3d775 2760 int nalternatives = n_occurrences (',', constraints[0]);
0e80b01d 2761
2762 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2763 {
2764 error ("too many alternatives in %<asm%>");
2765 return false;
2766 }
2767
2af3d775 2768 for (unsigned i = 1; i < len; ++i)
2769 if (n_occurrences (',', constraints[i]) != nalternatives)
2770 {
2771 error ("operand constraints for %<asm%> differ "
2772 "in number of alternatives");
2773 return false;
2774 }
0e80b01d 2775 }
0e80b01d 2776 return true;
2777}
2778
2779/* Check for overlap between registers marked in CLOBBERED_REGS and
2780 anything inappropriate in T. Emit error and return the register
2781 variable definition for error, NULL_TREE for ok. */
2782
2783static bool
2784tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2785{
2786 /* Conflicts between asm-declared register variables and the clobber
2787 list are not allowed. */
2788 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2789
2790 if (overlap)
2791 {
2792 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2793 DECL_NAME (overlap));
2794
2795 /* Reset registerness to stop multiple errors emitted for a single
2796 variable. */
2797 DECL_REGISTER (overlap) = 0;
2798 return true;
2799 }
2800
2801 return false;
2802}
2803
2804/* Generate RTL for an asm statement with arguments.
2805 STRING is the instruction template.
2806 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2807 Each output or input has an expression in the TREE_VALUE and
2808 a tree list in TREE_PURPOSE which in turn contains a constraint
2809 name in TREE_VALUE (or NULL_TREE) and a constraint string
2810 in TREE_PURPOSE.
2811 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2812 that is clobbered by this insn.
2813
2814 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2815 should be the fallthru basic block of the asm goto.
2816
2817 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2818 Some elements of OUTPUTS may be replaced with trees representing temporary
2819 values. The caller should copy those temporary values to the originally
2820 specified lvalues.
2821
2822 VOL nonzero means the insn is volatile; don't optimize it. */
2823
2824static void
ae231cbd 2825expand_asm_stmt (gasm *stmt)
0e80b01d 2826{
2af3d775 2827 class save_input_location
2828 {
2829 location_t old;
ae231cbd 2830
2af3d775 2831 public:
2832 explicit save_input_location(location_t where)
ae231cbd 2833 {
2af3d775 2834 old = input_location;
2835 input_location = where;
ae231cbd 2836 }
2837
2af3d775 2838 ~save_input_location()
ae231cbd 2839 {
2af3d775 2840 input_location = old;
ae231cbd 2841 }
2af3d775 2842 };
ae231cbd 2843
2af3d775 2844 location_t locus = gimple_location (stmt);
ae231cbd 2845
2af3d775 2846 if (gimple_asm_input_p (stmt))
ae231cbd 2847 {
2af3d775 2848 const char *s = gimple_asm_string (stmt);
2849 tree string = build_string (strlen (s), s);
2850 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2851 return;
ae231cbd 2852 }
2853
2af3d775 2854 /* There are some legacy diagnostics in here, and also avoids a
2855 sixth parameger to targetm.md_asm_adjust. */
2856 save_input_location s_i_l(locus);
ae231cbd 2857
2af3d775 2858 unsigned noutputs = gimple_asm_noutputs (stmt);
2859 unsigned ninputs = gimple_asm_ninputs (stmt);
2860 unsigned nlabels = gimple_asm_nlabels (stmt);
2861 unsigned i;
2862
2863 /* ??? Diagnose during gimplification? */
2864 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
ae231cbd 2865 {
2af3d775 2866 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
ae231cbd 2867 return;
2868 }
2869
2af3d775 2870 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2871 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2872 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
ae231cbd 2873
2af3d775 2874 /* Copy the gimple vectors into new vectors that we can manipulate. */
0e80b01d 2875
2af3d775 2876 output_tvec.safe_grow (noutputs);
2877 input_tvec.safe_grow (ninputs);
2878 constraints.safe_grow (noutputs + ninputs);
0e80b01d 2879
2af3d775 2880 for (i = 0; i < noutputs; ++i)
2881 {
2882 tree t = gimple_asm_output_op (stmt, i);
2883 output_tvec[i] = TREE_VALUE (t);
2884 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2885 }
2886 for (i = 0; i < ninputs; i++)
2887 {
2888 tree t = gimple_asm_input_op (stmt, i);
2889 input_tvec[i] = TREE_VALUE (t);
2890 constraints[i + noutputs]
2891 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2892 }
0e80b01d 2893
2af3d775 2894 /* ??? Diagnose during gimplification? */
2895 if (! check_operand_nalternatives (constraints))
2896 return;
0e80b01d 2897
2898 /* Count the number of meaningful clobbered registers, ignoring what
2899 we would ignore later. */
2af3d775 2900 auto_vec<rtx> clobber_rvec;
2901 HARD_REG_SET clobbered_regs;
0e80b01d 2902 CLEAR_HARD_REG_SET (clobbered_regs);
0e80b01d 2903
2af3d775 2904 if (unsigned n = gimple_asm_nclobbers (stmt))
2905 {
2906 clobber_rvec.reserve (n);
2907 for (i = 0; i < n; i++)
2908 {
2909 tree t = gimple_asm_clobber_op (stmt, i);
2910 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2911 int nregs, j;
0e80b01d 2912
2af3d775 2913 j = decode_reg_name_and_count (regname, &nregs);
2914 if (j < 0)
0e80b01d 2915 {
2af3d775 2916 if (j == -2)
0e80b01d 2917 {
2af3d775 2918 /* ??? Diagnose during gimplification? */
2919 error ("unknown register name %qs in %<asm%>", regname);
2920 }
2921 else if (j == -4)
2922 {
2923 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2924 clobber_rvec.safe_push (x);
2925 }
2926 else
2927 {
2928 /* Otherwise we should have -1 == empty string
2929 or -3 == cc, which is not a register. */
2930 gcc_assert (j == -1 || j == -3);
0e80b01d 2931 }
0e80b01d 2932 }
2af3d775 2933 else
2934 for (int reg = j; reg < j + nregs; reg++)
2935 {
2936 /* Clobbering the PIC register is an error. */
2937 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2938 {
2939 /* ??? Diagnose during gimplification? */
2940 error ("PIC register clobbered by %qs in %<asm%>",
2941 regname);
2942 return;
2943 }
2944
2945 SET_HARD_REG_BIT (clobbered_regs, reg);
2946 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2947 clobber_rvec.safe_push (x);
2948 }
0e80b01d 2949 }
2950 }
2af3d775 2951 unsigned nclobbers = clobber_rvec.length();
0e80b01d 2952
2953 /* First pass over inputs and outputs checks validity and sets
2954 mark_addressable if needed. */
2af3d775 2955 /* ??? Diagnose during gimplification? */
0e80b01d 2956
2af3d775 2957 for (i = 0; i < noutputs; ++i)
0e80b01d 2958 {
2af3d775 2959 tree val = output_tvec[i];
0e80b01d 2960 tree type = TREE_TYPE (val);
2961 const char *constraint;
2962 bool is_inout;
2963 bool allows_reg;
2964 bool allows_mem;
2965
0e80b01d 2966 /* Try to parse the output constraint. If that fails, there's
2967 no point in going further. */
2968 constraint = constraints[i];
2969 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2970 &allows_mem, &allows_reg, &is_inout))
2971 return;
2972
2973 if (! allows_reg
2974 && (allows_mem
2975 || is_inout
2976 || (DECL_P (val)
2977 && REG_P (DECL_RTL (val))
2978 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2979 mark_addressable (val);
0e80b01d 2980 }
2981
2af3d775 2982 for (i = 0; i < ninputs; ++i)
0e80b01d 2983 {
2984 bool allows_reg, allows_mem;
2985 const char *constraint;
2986
0e80b01d 2987 constraint = constraints[i + noutputs];
2af3d775 2988 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2989 constraints.address (),
2990 &allows_mem, &allows_reg))
0e80b01d 2991 return;
2992
2993 if (! allows_reg && allows_mem)
2af3d775 2994 mark_addressable (input_tvec[i]);
0e80b01d 2995 }
2996
2997 /* Second pass evaluates arguments. */
2998
2999 /* Make sure stack is consistent for asm goto. */
3000 if (nlabels > 0)
3001 do_pending_stack_adjust ();
2af3d775 3002 int old_generating_concat_p = generating_concat_p;
3003
3004 /* Vector of RTX's of evaluated output operands. */
3005 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3006 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3007 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
0e80b01d 3008
2af3d775 3009 output_rvec.safe_grow (noutputs);
3010
3011 for (i = 0; i < noutputs; ++i)
0e80b01d 3012 {
2af3d775 3013 tree val = output_tvec[i];
0e80b01d 3014 tree type = TREE_TYPE (val);
2af3d775 3015 bool is_inout, allows_reg, allows_mem, ok;
0e80b01d 3016 rtx op;
0e80b01d 3017
3018 ok = parse_output_constraint (&constraints[i], i, ninputs,
3019 noutputs, &allows_mem, &allows_reg,
3020 &is_inout);
3021 gcc_assert (ok);
3022
3023 /* If an output operand is not a decl or indirect ref and our constraint
3024 allows a register, make a temporary to act as an intermediate.
2af3d775 3025 Make the asm insn write into that, then we will copy it to
0e80b01d 3026 the real output operand. Likewise for promoted variables. */
3027
3028 generating_concat_p = 0;
3029
0e80b01d 3030 if ((TREE_CODE (val) == INDIRECT_REF
3031 && allows_mem)
3032 || (DECL_P (val)
3033 && (allows_mem || REG_P (DECL_RTL (val)))
3034 && ! (REG_P (DECL_RTL (val))
3035 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3036 || ! allows_reg
3037 || is_inout)
3038 {
3039 op = expand_expr (val, NULL_RTX, VOIDmode,
3040 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3041 if (MEM_P (op))
3042 op = validize_mem (op);
3043
3044 if (! allows_reg && !MEM_P (op))
3045 error ("output number %d not directly addressable", i);
3046 if ((! allows_mem && MEM_P (op))
3047 || GET_CODE (op) == CONCAT)
3048 {
2af3d775 3049 rtx old_op = op;
0e80b01d 3050 op = gen_reg_rtx (GET_MODE (op));
2af3d775 3051
3052 generating_concat_p = old_generating_concat_p;
3053
0e80b01d 3054 if (is_inout)
2af3d775 3055 emit_move_insn (op, old_op);
3056
3057 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3058 emit_move_insn (old_op, op);
3059 after_rtl_seq = get_insns ();
3060 after_rtl_end = get_last_insn ();
3061 end_sequence ();
0e80b01d 3062 }
3063 }
3064 else
3065 {
3066 op = assign_temp (type, 0, 1);
3067 op = validize_mem (op);
2af3d775 3068 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3069 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
0e80b01d 3070
2af3d775 3071 generating_concat_p = old_generating_concat_p;
0e80b01d 3072
2af3d775 3073 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3074 expand_assignment (val, make_tree (type, op), false);
3075 after_rtl_seq = get_insns ();
3076 after_rtl_end = get_last_insn ();
3077 end_sequence ();
0e80b01d 3078 }
2af3d775 3079 output_rvec[i] = op;
0e80b01d 3080
2af3d775 3081 if (is_inout)
3082 inout_opnum.safe_push (i);
0e80b01d 3083 }
3084
2af3d775 3085 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3086 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
0e80b01d 3087
2af3d775 3088 input_rvec.safe_grow (ninputs);
3089 input_mode.safe_grow (ninputs);
0e80b01d 3090
2af3d775 3091 generating_concat_p = 0;
0e80b01d 3092
2af3d775 3093 for (i = 0; i < ninputs; ++i)
0e80b01d 3094 {
2af3d775 3095 tree val = input_tvec[i];
3096 tree type = TREE_TYPE (val);
3097 bool allows_reg, allows_mem, ok;
0e80b01d 3098 const char *constraint;
0e80b01d 3099 rtx op;
0e80b01d 3100
3101 constraint = constraints[i + noutputs];
2af3d775 3102 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3103 constraints.address (),
3104 &allows_mem, &allows_reg);
0e80b01d 3105 gcc_assert (ok);
3106
0e80b01d 3107 /* EXPAND_INITIALIZER will not generate code for valid initializer
3108 constants, but will still generate code for other types of operand.
3109 This is the behavior we want for constant constraints. */
3110 op = expand_expr (val, NULL_RTX, VOIDmode,
3111 allows_reg ? EXPAND_NORMAL
3112 : allows_mem ? EXPAND_MEMORY
3113 : EXPAND_INITIALIZER);
3114
3115 /* Never pass a CONCAT to an ASM. */
3116 if (GET_CODE (op) == CONCAT)
3117 op = force_reg (GET_MODE (op), op);
3118 else if (MEM_P (op))
3119 op = validize_mem (op);
3120
3121 if (asm_operand_ok (op, constraint, NULL) <= 0)
3122 {
3123 if (allows_reg && TYPE_MODE (type) != BLKmode)
3124 op = force_reg (TYPE_MODE (type), op);
3125 else if (!allows_mem)
3126 warning (0, "asm operand %d probably doesn%'t match constraints",
3127 i + noutputs);
3128 else if (MEM_P (op))
3129 {
3130 /* We won't recognize either volatile memory or memory
3131 with a queued address as available a memory_operand
3132 at this point. Ignore it: clearly this *is* a memory. */
3133 }
3134 else
3135 gcc_unreachable ();
3136 }
2af3d775 3137 input_rvec[i] = op;
3138 input_mode[i] = TYPE_MODE (type);
0e80b01d 3139 }
3140
0e80b01d 3141 /* For in-out operands, copy output rtx to input rtx. */
2af3d775 3142 unsigned ninout = inout_opnum.length();
0e80b01d 3143 for (i = 0; i < ninout; i++)
3144 {
3145 int j = inout_opnum[i];
2af3d775 3146 rtx o = output_rvec[j];
0e80b01d 3147
2af3d775 3148 input_rvec.safe_push (o);
3149 input_mode.safe_push (GET_MODE (o));
0e80b01d 3150
2af3d775 3151 char buffer[16];
0e80b01d 3152 sprintf (buffer, "%d", j);
2af3d775 3153 constraints.safe_push (ggc_strdup (buffer));
3154 }
3155 ninputs += ninout;
3156
3157 /* Sometimes we wish to automatically clobber registers across an asm.
3158 Case in point is when the i386 backend moved from cc0 to a hard reg --
3159 maintaining source-level compatibility means automatically clobbering
3160 the flags register. */
3161 rtx_insn *after_md_seq = NULL;
3162 if (targetm.md_asm_adjust)
3163 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3164 constraints, clobber_rvec,
3165 clobbered_regs);
3166
3167 /* Do not allow the hook to change the output and input count,
3168 lest it mess up the operand numbering. */
3169 gcc_assert (output_rvec.length() == noutputs);
3170 gcc_assert (input_rvec.length() == ninputs);
3171 gcc_assert (constraints.length() == noutputs + ninputs);
3172
3173 /* But it certainly can adjust the clobbers. */
3174 nclobbers = clobber_rvec.length();
3175
3176 /* Third pass checks for easy conflicts. */
3177 /* ??? Why are we doing this on trees instead of rtx. */
3178
3179 bool clobber_conflict_found = 0;
3180 for (i = 0; i < noutputs; ++i)
3181 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3182 clobber_conflict_found = 1;
3183 for (i = 0; i < ninputs - ninout; ++i)
3184 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3185 clobber_conflict_found = 1;
3186
3187 /* Make vectors for the expression-rtx, constraint strings,
3188 and named operands. */
3189
3190 rtvec argvec = rtvec_alloc (ninputs);
3191 rtvec constraintvec = rtvec_alloc (ninputs);
3192 rtvec labelvec = rtvec_alloc (nlabels);
3193
3194 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3195 : GET_MODE (output_rvec[0])),
3196 ggc_strdup (gimple_asm_string (stmt)),
7e93252e 3197 "", 0, argvec, constraintvec,
2af3d775 3198 labelvec, locus);
3199 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3200
3201 for (i = 0; i < ninputs; ++i)
3202 {
3203 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3204 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3205 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3206 constraints[i + noutputs],
3207 locus);
0e80b01d 3208 }
3209
3210 /* Copy labels to the vector. */
2af3d775 3211 rtx_code_label *fallthru_label = NULL;
3212 if (nlabels > 0)
3213 {
3214 basic_block fallthru_bb = NULL;
3215 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3216 if (fallthru)
3217 fallthru_bb = fallthru->dest;
3218
3219 for (i = 0; i < nlabels; ++i)
0e80b01d 3220 {
2af3d775 3221 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
9ed997be 3222 rtx_insn *r;
2af3d775 3223 /* If asm goto has any labels in the fallthru basic block, use
3224 a label that we emit immediately after the asm goto. Expansion
3225 may insert further instructions into the same basic block after
3226 asm goto and if we don't do this, insertion of instructions on
3227 the fallthru edge might misbehave. See PR58670. */
3228 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3229 {
3230 if (fallthru_label == NULL_RTX)
3231 fallthru_label = gen_label_rtx ();
3232 r = fallthru_label;
3233 }
3234 else
3235 r = label_rtx (label);
3236 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
0e80b01d 3237 }
0e80b01d 3238 }
3239
0e80b01d 3240 /* Now, for each output, construct an rtx
3241 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3242 ARGVEC CONSTRAINTS OPNAMES))
3243 If there is more than one, put them inside a PARALLEL. */
3244
3245 if (nlabels > 0 && nclobbers == 0)
3246 {
3247 gcc_assert (noutputs == 0);
3248 emit_jump_insn (body);
3249 }
3250 else if (noutputs == 0 && nclobbers == 0)
3251 {
3252 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3253 emit_insn (body);
3254 }
3255 else if (noutputs == 1 && nclobbers == 0)
3256 {
2af3d775 3257 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3258 emit_insn (gen_rtx_SET (output_rvec[0], body));
0e80b01d 3259 }
3260 else
3261 {
3262 rtx obody = body;
3263 int num = noutputs;
3264
3265 if (num == 0)
3266 num = 1;
3267
3268 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3269
3270 /* For each output operand, store a SET. */
2af3d775 3271 for (i = 0; i < noutputs; ++i)
0e80b01d 3272 {
2af3d775 3273 rtx src, o = output_rvec[i];
3274 if (i == 0)
3275 {
3276 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3277 src = obody;
3278 }
3279 else
3280 {
3281 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3282 ASM_OPERANDS_TEMPLATE (obody),
3283 constraints[i], i, argvec,
3284 constraintvec, labelvec, locus);
3285 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3286 }
3287 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
0e80b01d 3288 }
3289
3290 /* If there are no outputs (but there are some clobbers)
3291 store the bare ASM_OPERANDS into the PARALLEL. */
0e80b01d 3292 if (i == 0)
3293 XVECEXP (body, 0, i++) = obody;
3294
3295 /* Store (clobber REG) for each clobbered register specified. */
2af3d775 3296 for (unsigned j = 0; j < nclobbers; ++j)
0e80b01d 3297 {
2af3d775 3298 rtx clobbered_reg = clobber_rvec[j];
0e80b01d 3299
2af3d775 3300 /* Do sanity check for overlap between clobbers and respectively
3301 input and outputs that hasn't been handled. Such overlap
3302 should have been detected and reported above. */
3303 if (!clobber_conflict_found && REG_P (clobbered_reg))
0e80b01d 3304 {
2af3d775 3305 /* We test the old body (obody) contents to avoid
3306 tripping over the under-construction body. */
3307 for (unsigned k = 0; k < noutputs; ++k)
3308 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3309 internal_error ("asm clobber conflict with output operand");
3310
3311 for (unsigned k = 0; k < ninputs - ninout; ++k)
3312 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3313 internal_error ("asm clobber conflict with input operand");
0e80b01d 3314 }
3315
2af3d775 3316 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
0e80b01d 3317 }
3318
3319 if (nlabels > 0)
3320 emit_jump_insn (body);
3321 else
3322 emit_insn (body);
3323 }
3324
2af3d775 3325 generating_concat_p = old_generating_concat_p;
3326
0e80b01d 3327 if (fallthru_label)
3328 emit_label (fallthru_label);
3329
2af3d775 3330 if (after_md_seq)
3331 emit_insn (after_md_seq);
3332 if (after_rtl_seq)
3333 emit_insn (after_rtl_seq);
0e80b01d 3334
ae231cbd 3335 free_temp_slots ();
2af3d775 3336 crtl->has_asm_statement = 1;
0e80b01d 3337}
3338
3339/* Emit code to jump to the address
3340 specified by the pointer expression EXP. */
3341
3342static void
3343expand_computed_goto (tree exp)
3344{
3345 rtx x = expand_normal (exp);
3346
0e80b01d 3347 do_pending_stack_adjust ();
3348 emit_indirect_jump (x);
3349}
3350
3351/* Generate RTL code for a `goto' statement with target label LABEL.
3352 LABEL should be a LABEL_DECL tree node that was or will later be
3353 defined with `expand_label'. */
3354
3355static void
3356expand_goto (tree label)
3357{
382ecba7 3358 if (flag_checking)
3359 {
3360 /* Check for a nonlocal goto to a containing function. Should have
3361 gotten translated to __builtin_nonlocal_goto. */
3362 tree context = decl_function_context (label);
3363 gcc_assert (!context || context == current_function_decl);
3364 }
0e80b01d 3365
f9a00e9e 3366 emit_jump (jump_target_rtx (label));
0e80b01d 3367}
3368
3369/* Output a return with no value. */
3370
3371static void
3372expand_null_return_1 (void)
3373{
3374 clear_pending_stack_adjust ();
3375 do_pending_stack_adjust ();
3376 emit_jump (return_label);
3377}
3378
3379/* Generate RTL to return from the current function, with no value.
3380 (That is, we do not do anything about returning any value.) */
3381
3382void
3383expand_null_return (void)
3384{
3385 /* If this function was declared to return a value, but we
3386 didn't, clobber the return registers so that they are not
3387 propagated live to the rest of the function. */
3388 clobber_return_register ();
3389
3390 expand_null_return_1 ();
3391}
3392
3393/* Generate RTL to return from the current function, with value VAL. */
3394
3395static void
3396expand_value_return (rtx val)
3397{
3398 /* Copy the value to the return location unless it's already there. */
3399
3400 tree decl = DECL_RESULT (current_function_decl);
3401 rtx return_reg = DECL_RTL (decl);
3402 if (return_reg != val)
3403 {
3404 tree funtype = TREE_TYPE (current_function_decl);
3405 tree type = TREE_TYPE (decl);
3406 int unsignedp = TYPE_UNSIGNED (type);
3754d046 3407 machine_mode old_mode = DECL_MODE (decl);
3408 machine_mode mode;
0e80b01d 3409 if (DECL_BY_REFERENCE (decl))
3410 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3411 else
3412 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3413
3414 if (mode != old_mode)
3415 val = convert_modes (mode, old_mode, val, unsignedp);
3416
3417 if (GET_CODE (return_reg) == PARALLEL)
3418 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3419 else
3420 emit_move_insn (return_reg, val);
3421 }
3422
3423 expand_null_return_1 ();
3424}
3425
3426/* Generate RTL to evaluate the expression RETVAL and return it
3427 from the current function. */
3428
3429static void
058a1b7a 3430expand_return (tree retval, tree bounds)
0e80b01d 3431{
3432 rtx result_rtl;
3433 rtx val = 0;
3434 tree retval_rhs;
058a1b7a 3435 rtx bounds_rtl;
0e80b01d 3436
3437 /* If function wants no value, give it none. */
3438 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3439 {
3440 expand_normal (retval);
3441 expand_null_return ();
3442 return;
3443 }
3444
3445 if (retval == error_mark_node)
3446 {
3447 /* Treat this like a return of no value from a function that
3448 returns a value. */
3449 expand_null_return ();
3450 return;
3451 }
3452 else if ((TREE_CODE (retval) == MODIFY_EXPR
3453 || TREE_CODE (retval) == INIT_EXPR)
3454 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3455 retval_rhs = TREE_OPERAND (retval, 1);
3456 else
3457 retval_rhs = retval;
3458
3459 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3460
058a1b7a 3461 /* Put returned bounds to the right place. */
3462 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3463 if (bounds_rtl)
3464 {
2e6c9e14 3465 rtx addr = NULL;
3466 rtx bnd = NULL;
058a1b7a 3467
2e6c9e14 3468 if (bounds && bounds != error_mark_node)
058a1b7a 3469 {
3470 bnd = expand_normal (bounds);
3471 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3472 }
3473 else if (REG_P (bounds_rtl))
3474 {
2e6c9e14 3475 if (bounds)
3476 bnd = chkp_expand_zero_bounds ();
3477 else
3478 {
3479 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3480 addr = gen_rtx_MEM (Pmode, addr);
3481 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3482 }
3483
058a1b7a 3484 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3485 }
3486 else
3487 {
3488 int n;
3489
3490 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3491
2e6c9e14 3492 if (bounds)
3493 bnd = chkp_expand_zero_bounds ();
3494 else
3495 {
3496 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3497 addr = gen_rtx_MEM (Pmode, addr);
3498 }
058a1b7a 3499
3500 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3501 {
058a1b7a 3502 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
2e6c9e14 3503 if (!bounds)
3504 {
3505 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3506 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3507 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3508 }
058a1b7a 3509 targetm.calls.store_returned_bounds (slot, bnd);
3510 }
3511 }
3512 }
3513 else if (chkp_function_instrumented_p (current_function_decl)
3514 && !BOUNDED_P (retval_rhs)
3515 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3516 && TREE_CODE (retval_rhs) != RESULT_DECL)
3517 {
3518 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3519 addr = gen_rtx_MEM (Pmode, addr);
3520
3521 gcc_assert (MEM_P (result_rtl));
3522
3523 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3524 }
3525
0e80b01d 3526 /* If we are returning the RESULT_DECL, then the value has already
3527 been stored into it, so we don't have to do anything special. */
3528 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3529 expand_value_return (result_rtl);
3530
3531 /* If the result is an aggregate that is being returned in one (or more)
3532 registers, load the registers here. */
3533
3534 else if (retval_rhs != 0
3535 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3536 && REG_P (result_rtl))
3537 {
3538 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3539 if (val)
3540 {
3541 /* Use the mode of the result value on the return register. */
3542 PUT_MODE (result_rtl, GET_MODE (val));
3543 expand_value_return (val);
3544 }
3545 else
3546 expand_null_return ();
3547 }
3548 else if (retval_rhs != 0
3549 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3550 && (REG_P (result_rtl)
3551 || (GET_CODE (result_rtl) == PARALLEL)))
3552 {
9f495e8d 3553 /* Compute the return value into a temporary (usually a pseudo reg). */
3554 val
3555 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
0e80b01d 3556 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3557 val = force_not_mem (val);
0e80b01d 3558 expand_value_return (val);
3559 }
3560 else
3561 {
3562 /* No hard reg used; calculate value into hard return reg. */
3563 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3564 expand_value_return (result_rtl);
3565 }
3566}
3567
16c9337c 3568/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3569 STMT that doesn't require special handling for outgoing edges. That
3570 is no tailcalls and no GIMPLE_COND. */
3571
3572static void
42acab1c 3573expand_gimple_stmt_1 (gimple *stmt)
16c9337c 3574{
3575 tree op0;
8c593757 3576
5169661d 3577 set_curr_insn_location (gimple_location (stmt));
8c593757 3578
16c9337c 3579 switch (gimple_code (stmt))
3580 {
3581 case GIMPLE_GOTO:
3582 op0 = gimple_goto_dest (stmt);
3583 if (TREE_CODE (op0) == LABEL_DECL)
3584 expand_goto (op0);
3585 else
3586 expand_computed_goto (op0);
3587 break;
3588 case GIMPLE_LABEL:
1a91d914 3589 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
16c9337c 3590 break;
3591 case GIMPLE_NOP:
3592 case GIMPLE_PREDICT:
3593 break;
16c9337c 3594 case GIMPLE_SWITCH:
b8daf3d8 3595 {
3596 gswitch *swtch = as_a <gswitch *> (stmt);
3597 if (gimple_switch_num_labels (swtch) == 1)
3598 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3599 else
3600 expand_case (swtch);
3601 }
16c9337c 3602 break;
3603 case GIMPLE_ASM:
1a91d914 3604 expand_asm_stmt (as_a <gasm *> (stmt));
16c9337c 3605 break;
3606 case GIMPLE_CALL:
1a91d914 3607 expand_call_stmt (as_a <gcall *> (stmt));
16c9337c 3608 break;
3609
3610 case GIMPLE_RETURN:
2e6c9e14 3611 {
3612 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3613 op0 = gimple_return_retval (as_a <greturn *> (stmt));
16c9337c 3614
2e6c9e14 3615 if (op0 && op0 != error_mark_node)
3616 {
3617 tree result = DECL_RESULT (current_function_decl);
16c9337c 3618
eeda041b 3619 /* Mark we have return statement with missing bounds. */
3620 if (!bnd
3621 && chkp_function_instrumented_p (cfun->decl)
3622 && !DECL_P (op0))
3623 bnd = error_mark_node;
3624
2e6c9e14 3625 /* If we are not returning the current function's RESULT_DECL,
3626 build an assignment to it. */
3627 if (op0 != result)
3628 {
3629 /* I believe that a function's RESULT_DECL is unique. */
3630 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3631
3632 /* ??? We'd like to use simply expand_assignment here,
3633 but this fails if the value is of BLKmode but the return
3634 decl is a register. expand_return has special handling
3635 for this combination, which eventually should move
3636 to common code. See comments there. Until then, let's
3637 build a modify expression :-/ */
3638 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3639 result, op0);
3640 }
2e6c9e14 3641 }
3642
3643 if (!op0)
3644 expand_null_return ();
3645 else
3646 expand_return (op0, bnd);
3647 }
16c9337c 3648 break;
3649
3650 case GIMPLE_ASSIGN:
3651 {
1a91d914 3652 gassign *assign_stmt = as_a <gassign *> (stmt);
3653 tree lhs = gimple_assign_lhs (assign_stmt);
16c9337c 3654
3655 /* Tree expand used to fiddle with |= and &= of two bitfield
3656 COMPONENT_REFs here. This can't happen with gimple, the LHS
3657 of binary assigns must be a gimple reg. */
3658
3659 if (TREE_CODE (lhs) != SSA_NAME
3660 || get_gimple_rhs_class (gimple_expr_code (stmt))
3661 == GIMPLE_SINGLE_RHS)
3662 {
1a91d914 3663 tree rhs = gimple_assign_rhs1 (assign_stmt);
16c9337c 3664 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3665 == GIMPLE_SINGLE_RHS);
8f413f95 3666 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3667 /* Do not put locations on possibly shared trees. */
3668 && !is_gimple_min_invariant (rhs))
16c9337c 3669 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3c25489e 3670 if (TREE_CLOBBER_P (rhs))
3671 /* This is a clobber to mark the going out of scope for
3672 this LHS. */
3673 ;
3674 else
3675 expand_assignment (lhs, rhs,
1a91d914 3676 gimple_assign_nontemporal_move_p (
3677 assign_stmt));
16c9337c 3678 }
3679 else
3680 {
3681 rtx target, temp;
1a91d914 3682 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
16c9337c 3683 struct separate_ops ops;
3684 bool promoted = false;
3685
3686 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3687 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3688 promoted = true;
3689
1a91d914 3690 ops.code = gimple_assign_rhs_code (assign_stmt);
16c9337c 3691 ops.type = TREE_TYPE (lhs);
1ebce849 3692 switch (get_gimple_rhs_class (ops.code))
16c9337c 3693 {
00f4f705 3694 case GIMPLE_TERNARY_RHS:
1a91d914 3695 ops.op2 = gimple_assign_rhs3 (assign_stmt);
00f4f705 3696 /* Fallthru */
16c9337c 3697 case GIMPLE_BINARY_RHS:
1a91d914 3698 ops.op1 = gimple_assign_rhs2 (assign_stmt);
16c9337c 3699 /* Fallthru */
3700 case GIMPLE_UNARY_RHS:
1a91d914 3701 ops.op0 = gimple_assign_rhs1 (assign_stmt);
16c9337c 3702 break;
3703 default:
3704 gcc_unreachable ();
3705 }
3706 ops.location = gimple_location (stmt);
3707
3708 /* If we want to use a nontemporal store, force the value to
3709 register first. If we store into a promoted register,
3710 don't directly expand to target. */
3711 temp = nontemporal || promoted ? NULL_RTX : target;
3712 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3713 EXPAND_NORMAL);
3714
3715 if (temp == target)
3716 ;
3717 else if (promoted)
3718 {
e8629f9e 3719 int unsignedp = SUBREG_PROMOTED_SIGN (target);
16c9337c 3720 /* If TEMP is a VOIDmode constant, use convert_modes to make
3721 sure that we properly convert it. */
3722 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3723 {
3724 temp = convert_modes (GET_MODE (target),
3725 TYPE_MODE (ops.type),
088c4b7b 3726 temp, unsignedp);
16c9337c 3727 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
088c4b7b 3728 GET_MODE (target), temp, unsignedp);
16c9337c 3729 }
3730
5a9ccd1b 3731 convert_move (SUBREG_REG (target), temp, unsignedp);
16c9337c 3732 }
3733 else if (nontemporal && emit_storent_insn (target, temp))
3734 ;
3735 else
3736 {
3737 temp = force_operand (temp, target);
3738 if (temp != target)
3739 emit_move_insn (target, temp);
3740 }
3741 }
3742 }
3743 break;
3744
3745 default:
3746 gcc_unreachable ();
3747 }
3748}
3749
3750/* Expand one gimple statement STMT and return the last RTL instruction
3751 before any of the newly generated ones.
3752
3753 In addition to generating the necessary RTL instructions this also
3754 sets REG_EH_REGION notes if necessary and sets the current source
3755 location for diagnostics. */
3756
74a0cbc4 3757static rtx_insn *
42acab1c 3758expand_gimple_stmt (gimple *stmt)
16c9337c 3759{
16c9337c 3760 location_t saved_location = input_location;
74a0cbc4 3761 rtx_insn *last = get_last_insn ();
8c593757 3762 int lp_nr;
16c9337c 3763
16c9337c 3764 gcc_assert (cfun);
3765
8c593757 3766 /* We need to save and restore the current source location so that errors
3767 discovered during expansion are emitted with the right location. But
3768 it would be better if the diagnostic routines used the source location
3769 embedded in the tree nodes rather than globals. */
16c9337c 3770 if (gimple_has_location (stmt))
8c593757 3771 input_location = gimple_location (stmt);
16c9337c 3772
3773 expand_gimple_stmt_1 (stmt);
8c593757 3774
16c9337c 3775 /* Free any temporaries used to evaluate this statement. */
3776 free_temp_slots ();
3777
3778 input_location = saved_location;
3779
3780 /* Mark all insns that may trap. */
e38def9c 3781 lp_nr = lookup_stmt_eh_lp (stmt);
3782 if (lp_nr)
16c9337c 3783 {
74a0cbc4 3784 rtx_insn *insn;
16c9337c 3785 for (insn = next_real_insn (last); insn;
3786 insn = next_real_insn (insn))
3787 {
3788 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3789 /* If we want exceptions for non-call insns, any
3790 may_trap_p instruction may throw. */
3791 && GET_CODE (PATTERN (insn)) != CLOBBER
3792 && GET_CODE (PATTERN (insn)) != USE
e38def9c 3793 && insn_could_throw_p (insn))
3794 make_reg_eh_region_note (insn, 0, lp_nr);
16c9337c 3795 }
3796 }
3797
3798 return last;
3799}
3800
75a70cf9 3801/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
17ceb1d5 3802 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3803 generated a tail call (something that might be denied by the ABI
c578459e 3804 rules governing the call; see calls.c).
3805
3806 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3807 can still reach the rest of BB. The case here is __builtin_sqrt,
3808 where the NaN result goes through the external function (with a
3809 tailcall) and the normal result happens via a sqrt instruction. */
3ced8962 3810
3811static basic_block
1a91d914 3812expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3ced8962 3813{
74a0cbc4 3814 rtx_insn *last2, *last;
17ceb1d5 3815 edge e;
cd665a06 3816 edge_iterator ei;
720cfc43 3817 profile_probability probability;
3ced8962 3818
16c9337c 3819 last2 = last = expand_gimple_stmt (stmt);
3ced8962 3820
3821 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
17ceb1d5 3822 if (CALL_P (last) && SIBLING_CALL_P (last))
3823 goto found;
3ced8962 3824
75a70cf9 3825 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
49377e21 3826
c578459e 3827 *can_fallthru = true;
17ceb1d5 3828 return NULL;
3ced8962 3829
17ceb1d5 3830 found:
3831 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3832 Any instructions emitted here are about to be deleted. */
3833 do_pending_stack_adjust ();
3834
3835 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3836 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3837 EH or abnormal edges, we shouldn't have created a tail call in
3838 the first place. So it seems to me we should just be removing
3839 all edges here, or redirecting the existing fallthru edge to
3840 the exit block. */
3841
720cfc43 3842 probability = profile_probability::never ();
17ceb1d5 3843
cd665a06 3844 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3845 {
17ceb1d5 3846 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3847 {
34154e27 3848 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
205ce1aa 3849 e->dest->count -= e->count ();
17ceb1d5 3850 probability += e->probability;
3851 remove_edge (e);
3ced8962 3852 }
cd665a06 3853 else
3854 ei_next (&ei);
3ced8962 3855 }
3856
17ceb1d5 3857 /* This is somewhat ugly: the call_expr expander often emits instructions
3858 after the sibcall (to perform the function return). These confuse the
794d8e3f 3859 find_many_sub_basic_blocks code, so we need to get rid of these. */
17ceb1d5 3860 last = NEXT_INSN (last);
cc636d56 3861 gcc_assert (BARRIER_P (last));
c578459e 3862
3863 *can_fallthru = false;
17ceb1d5 3864 while (NEXT_INSN (last))
3865 {
3866 /* For instance an sqrt builtin expander expands if with
3867 sibcall in the then and label for `else`. */
3868 if (LABEL_P (NEXT_INSN (last)))
c578459e 3869 {
3870 *can_fallthru = true;
3871 break;
3872 }
17ceb1d5 3873 delete_insn (NEXT_INSN (last));
3874 }
3875
34154e27 3876 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3877 | EDGE_SIBCALL);
25d2128b 3878 e->probability = probability;
26bb3cb2 3879 BB_END (bb) = last;
17ceb1d5 3880 update_bb_for_insn (bb);
3881
3882 if (NEXT_INSN (last))
3883 {
3884 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3885
3886 last = BB_END (bb);
3887 if (BARRIER_P (last))
26bb3cb2 3888 BB_END (bb) = PREV_INSN (last);
17ceb1d5 3889 }
3890
75a70cf9 3891 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
49377e21 3892
17ceb1d5 3893 return bb;
3ced8962 3894}
3895
9845d120 3896/* Return the difference between the floor and the truncated result of
3897 a signed division by OP1 with remainder MOD. */
3898static rtx
3754d046 3899floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3900{
3901 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3902 return gen_rtx_IF_THEN_ELSE
3903 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3904 gen_rtx_IF_THEN_ELSE
3905 (mode, gen_rtx_LT (BImode,
3906 gen_rtx_DIV (mode, op1, mod),
3907 const0_rtx),
3908 constm1_rtx, const0_rtx),
3909 const0_rtx);
3910}
3911
3912/* Return the difference between the ceil and the truncated result of
3913 a signed division by OP1 with remainder MOD. */
3914static rtx
3754d046 3915ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3916{
3917 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3918 return gen_rtx_IF_THEN_ELSE
3919 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3920 gen_rtx_IF_THEN_ELSE
3921 (mode, gen_rtx_GT (BImode,
3922 gen_rtx_DIV (mode, op1, mod),
3923 const0_rtx),
3924 const1_rtx, const0_rtx),
3925 const0_rtx);
3926}
3927
3928/* Return the difference between the ceil and the truncated result of
3929 an unsigned division by OP1 with remainder MOD. */
3930static rtx
3754d046 3931ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
9845d120 3932{
3933 /* (mod != 0 ? 1 : 0) */
3934 return gen_rtx_IF_THEN_ELSE
3935 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3936 const1_rtx, const0_rtx);
3937}
3938
3939/* Return the difference between the rounded and the truncated result
3940 of a signed division by OP1 with remainder MOD. Halfway cases are
3941 rounded away from zero, rather than to the nearest even number. */
3942static rtx
3754d046 3943round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3944{
3945 /* (abs (mod) >= abs (op1) - abs (mod)
3946 ? (op1 / mod > 0 ? 1 : -1)
3947 : 0) */
3948 return gen_rtx_IF_THEN_ELSE
3949 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3950 gen_rtx_MINUS (mode,
3951 gen_rtx_ABS (mode, op1),
3952 gen_rtx_ABS (mode, mod))),
3953 gen_rtx_IF_THEN_ELSE
3954 (mode, gen_rtx_GT (BImode,
3955 gen_rtx_DIV (mode, op1, mod),
3956 const0_rtx),
3957 const1_rtx, constm1_rtx),
3958 const0_rtx);
3959}
3960
3961/* Return the difference between the rounded and the truncated result
3962 of a unsigned division by OP1 with remainder MOD. Halfway cases
3963 are rounded away from zero, rather than to the nearest even
3964 number. */
3965static rtx
3754d046 3966round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3967{
3968 /* (mod >= op1 - mod ? 1 : 0) */
3969 return gen_rtx_IF_THEN_ELSE
3970 (mode, gen_rtx_GE (BImode, mod,
3971 gen_rtx_MINUS (mode, op1, mod)),
3972 const1_rtx, const0_rtx);
3973}
3974
d89c81d6 3975/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3976 any rtl. */
3977
3978static rtx
f77c4496 3979convert_debug_memory_address (scalar_int_mode mode, rtx x,
cd799492 3980 addr_space_t as)
d89c81d6 3981{
d89c81d6 3982#ifndef POINTERS_EXTEND_UNSIGNED
cd799492 3983 gcc_assert (mode == Pmode
3984 || mode == targetm.addr_space.address_mode (as));
7a6aeeed 3985 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
d89c81d6 3986#else
cd799492 3987 rtx temp;
cd799492 3988
df7f3935 3989 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
d89c81d6 3990
3991 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3992 return x;
3993
7a6aeeed 3994 /* X must have some form of address mode already. */
3995 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
995b44f5 3996 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
a8a727ad 3997 x = lowpart_subreg (mode, x, xmode);
d89c81d6 3998 else if (POINTERS_EXTEND_UNSIGNED > 0)
3999 x = gen_rtx_ZERO_EXTEND (mode, x);
4000 else if (!POINTERS_EXTEND_UNSIGNED)
4001 x = gen_rtx_SIGN_EXTEND (mode, x);
4002 else
cd799492 4003 {
4004 switch (GET_CODE (x))
4005 {
4006 case SUBREG:
4007 if ((SUBREG_PROMOTED_VAR_P (x)
4008 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4009 || (GET_CODE (SUBREG_REG (x)) == PLUS
4010 && REG_P (XEXP (SUBREG_REG (x), 0))
4011 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4012 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4013 && GET_MODE (SUBREG_REG (x)) == mode)
4014 return SUBREG_REG (x);
4015 break;
4016 case LABEL_REF:
c7799456 4017 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
cd799492 4018 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4019 return temp;
4020 case SYMBOL_REF:
4021 temp = shallow_copy_rtx (x);
4022 PUT_MODE (temp, mode);
4023 return temp;
4024 case CONST:
4025 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4026 if (temp)
4027 temp = gen_rtx_CONST (mode, temp);
4028 return temp;
4029 case PLUS:
4030 case MINUS:
4031 if (CONST_INT_P (XEXP (x, 1)))
4032 {
4033 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4034 if (temp)
4035 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4036 }
4037 break;
4038 default:
4039 break;
4040 }
4041 /* Don't know how to express ptr_extend as operation in debug info. */
4042 return NULL;
4043 }
d89c81d6 4044#endif /* POINTERS_EXTEND_UNSIGNED */
4045
4046 return x;
4047}
4048
54497144 4049/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4050 by avoid_deep_ter_for_debug. */
4051
4052static hash_map<tree, tree> *deep_ter_debug_map;
4053
4054/* Split too deep TER chains for debug stmts using debug temporaries. */
4055
4056static void
42acab1c 4057avoid_deep_ter_for_debug (gimple *stmt, int depth)
54497144 4058{
4059 use_operand_p use_p;
4060 ssa_op_iter iter;
4061 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4062 {
4063 tree use = USE_FROM_PTR (use_p);
4064 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4065 continue;
42acab1c 4066 gimple *g = get_gimple_for_ssa_name (use);
54497144 4067 if (g == NULL)
4068 continue;
4069 if (depth > 6 && !stmt_ends_bb_p (g))
4070 {
4071 if (deep_ter_debug_map == NULL)
4072 deep_ter_debug_map = new hash_map<tree, tree>;
4073
4074 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4075 if (vexpr != NULL)
4076 continue;
4077 vexpr = make_node (DEBUG_EXPR_DECL);
42acab1c 4078 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
54497144 4079 DECL_ARTIFICIAL (vexpr) = 1;
4080 TREE_TYPE (vexpr) = TREE_TYPE (use);
adc78298 4081 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
54497144 4082 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4083 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4084 avoid_deep_ter_for_debug (def_temp, 0);
4085 }
4086 else
4087 avoid_deep_ter_for_debug (g, depth + 1);
4088 }
4089}
4090
8ee59e4e 4091/* Return an RTX equivalent to the value of the parameter DECL. */
4092
4093static rtx
4094expand_debug_parm_decl (tree decl)
4095{
4096 rtx incoming = DECL_INCOMING_RTL (decl);
4097
4098 if (incoming
4099 && GET_MODE (incoming) != BLKmode
4100 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4101 || (MEM_P (incoming)
4102 && REG_P (XEXP (incoming, 0))
4103 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4104 {
4105 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4106
4107#ifdef HAVE_window_save
4108 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4109 If the target machine has an explicit window save instruction, the
4110 actual entry value is the corresponding OUTGOING_REGNO instead. */
4111 if (REG_P (incoming)
4112 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4113 incoming
4114 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4115 OUTGOING_REGNO (REGNO (incoming)), 0);
4116 else if (MEM_P (incoming))
4117 {
4118 rtx reg = XEXP (incoming, 0);
4119 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4120 {
4121 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4122 incoming = replace_equiv_address_nv (incoming, reg);
4123 }
848d0536 4124 else
4125 incoming = copy_rtx (incoming);
8ee59e4e 4126 }
4127#endif
4128
4129 ENTRY_VALUE_EXP (rtl) = incoming;
4130 return rtl;
4131 }
4132
4133 if (incoming
4134 && GET_MODE (incoming) != BLKmode
4135 && !TREE_ADDRESSABLE (decl)
4136 && MEM_P (incoming)
4137 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4138 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4139 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4140 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
848d0536 4141 return copy_rtx (incoming);
8ee59e4e 4142
4143 return NULL_RTX;
4144}
4145
4146/* Return an RTX equivalent to the value of the tree expression EXP. */
9845d120 4147
4148static rtx
4149expand_debug_expr (tree exp)
4150{
4151 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3754d046 4152 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4153 machine_mode inner_mode = VOIDmode;
9845d120 4154 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bd1a81f7 4155 addr_space_t as;
03b7a719 4156 scalar_int_mode op0_mode, op1_mode, addr_mode;
9845d120 4157
4158 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4159 {
4160 case tcc_expression:
4161 switch (TREE_CODE (exp))
4162 {
4163 case COND_EXPR:
b54ee9da 4164 case DOT_PROD_EXPR:
a2287001 4165 case SAD_EXPR:
00f4f705 4166 case WIDEN_MULT_PLUS_EXPR:
4167 case WIDEN_MULT_MINUS_EXPR:
156f51b9 4168 case FMA_EXPR:
9845d120 4169 goto ternary;
4170
4171 case TRUTH_ANDIF_EXPR:
4172 case TRUTH_ORIF_EXPR:
4173 case TRUTH_AND_EXPR:
4174 case TRUTH_OR_EXPR:
4175 case TRUTH_XOR_EXPR:
4176 goto binary;
4177
4178 case TRUTH_NOT_EXPR:
4179 goto unary;
4180
4181 default:
4182 break;
4183 }
4184 break;
4185
4186 ternary:
4187 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4188 if (!op2)
4189 return NULL_RTX;
4190 /* Fall through. */
4191
4192 binary:
4193 case tcc_binary:
9845d120 4194 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4195 if (!op1)
4196 return NULL_RTX;
bfaa965e 4197 switch (TREE_CODE (exp))
4198 {
4199 case LSHIFT_EXPR:
4200 case RSHIFT_EXPR:
4201 case LROTATE_EXPR:
4202 case RROTATE_EXPR:
4203 case WIDEN_LSHIFT_EXPR:
4204 /* Ensure second operand isn't wider than the first one. */
4205 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
8974b7a3 4206 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4207 && (GET_MODE_UNIT_PRECISION (mode)
4208 < GET_MODE_PRECISION (op1_mode)))
4209 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
bfaa965e 4210 break;
4211 default:
4212 break;
4213 }
9845d120 4214 /* Fall through. */
4215
4216 unary:
4217 case tcc_unary:
9ecadf14 4218 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 4219 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4220 if (!op0)
4221 return NULL_RTX;
4222 break;
4223
71b39a64 4224 case tcc_comparison:
4225 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4226 goto binary;
4227
9845d120 4228 case tcc_type:
4229 case tcc_statement:
4230 gcc_unreachable ();
4231
4232 case tcc_constant:
4233 case tcc_exceptional:
4234 case tcc_declaration:
4235 case tcc_reference:
4236 case tcc_vl_exp:
4237 break;
4238 }
4239
4240 switch (TREE_CODE (exp))
4241 {
4242 case STRING_CST:
4243 if (!lookup_constant_def (exp))
4244 {
0f89d483 4245 if (strlen (TREE_STRING_POINTER (exp)) + 1
4246 != (size_t) TREE_STRING_LENGTH (exp))
4247 return NULL_RTX;
9845d120 4248 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4249 op0 = gen_rtx_MEM (BLKmode, op0);
4250 set_mem_attributes (op0, exp, 0);
4251 return op0;
4252 }
e3533433 4253 /* Fall through. */
9845d120 4254
4255 case INTEGER_CST:
4256 case REAL_CST:
4257 case FIXED_CST:
4258 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4259 return op0;
4260
4261 case COMPLEX_CST:
4262 gcc_assert (COMPLEX_MODE_P (mode));
4263 op0 = expand_debug_expr (TREE_REALPART (exp));
9845d120 4264 op1 = expand_debug_expr (TREE_IMAGPART (exp));
9845d120 4265 return gen_rtx_CONCAT (mode, op0, op1);
4266
688ff29b 4267 case DEBUG_EXPR_DECL:
4268 op0 = DECL_RTL_IF_SET (exp);
4269
4270 if (op0)
4271 return op0;
4272
4273 op0 = gen_rtx_DEBUG_EXPR (mode);
23dd51cb 4274 DEBUG_EXPR_TREE_DECL (op0) = exp;
688ff29b 4275 SET_DECL_RTL (exp, op0);
4276
4277 return op0;
4278
9845d120 4279 case VAR_DECL:
4280 case PARM_DECL:
4281 case FUNCTION_DECL:
4282 case LABEL_DECL:
4283 case CONST_DECL:
4284 case RESULT_DECL:
4285 op0 = DECL_RTL_IF_SET (exp);
4286
4287 /* This decl was probably optimized away. */
4288 if (!op0)
0f89d483 4289 {
53e9c5c4 4290 if (!VAR_P (exp)
0f89d483 4291 || DECL_EXTERNAL (exp)
4292 || !TREE_STATIC (exp)
4293 || !DECL_NAME (exp)
a5653528 4294 || DECL_HARD_REGISTER (exp)
1cdbcae1 4295 || DECL_IN_CONSTANT_POOL (exp)
a5653528 4296 || mode == VOIDmode)
0f89d483 4297 return NULL;
4298
e6db644e 4299 op0 = make_decl_rtl_for_debug (exp);
0f89d483 4300 if (!MEM_P (op0)
4301 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4302 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4303 return NULL;
4304 }
4305 else
4306 op0 = copy_rtx (op0);
9845d120 4307
5d713e67 4308 if (GET_MODE (op0) == BLKmode
71b39a64 4309 /* If op0 is not BLKmode, but mode is, adjust_mode
5d713e67 4310 below would ICE. While it is likely a FE bug,
4311 try to be robust here. See PR43166. */
0f18e023 4312 || mode == BLKmode
4313 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
9845d120 4314 {
4315 gcc_assert (MEM_P (op0));
4316 op0 = adjust_address_nv (op0, mode, 0);
4317 return op0;
4318 }
4319
4320 /* Fall through. */
4321
4322 adjust_mode:
4323 case PAREN_EXPR:
d09ef31a 4324 CASE_CONVERT:
9845d120 4325 {
9ecadf14 4326 inner_mode = GET_MODE (op0);
9845d120 4327
4328 if (mode == inner_mode)
4329 return op0;
4330
4331 if (inner_mode == VOIDmode)
4332 {
3c800ea7 4333 if (TREE_CODE (exp) == SSA_NAME)
4334 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4335 else
4336 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 4337 if (mode == inner_mode)
4338 return op0;
4339 }
4340
4341 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4342 {
332d11bd 4343 if (GET_MODE_UNIT_BITSIZE (mode)
4344 == GET_MODE_UNIT_BITSIZE (inner_mode))
9845d120 4345 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
332d11bd 4346 else if (GET_MODE_UNIT_BITSIZE (mode)
4347 < GET_MODE_UNIT_BITSIZE (inner_mode))
9845d120 4348 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4349 else
4350 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4351 }
4352 else if (FLOAT_MODE_P (mode))
4353 {
3c800ea7 4354 gcc_assert (TREE_CODE (exp) != SSA_NAME);
9845d120 4355 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4356 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4357 else
4358 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4359 }
4360 else if (FLOAT_MODE_P (inner_mode))
4361 {
4362 if (unsignedp)
4363 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4364 else
4365 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4366 }
1048c155 4367 else if (GET_MODE_UNIT_PRECISION (mode)
4368 == GET_MODE_UNIT_PRECISION (inner_mode))
a8a727ad 4369 op0 = lowpart_subreg (mode, op0, inner_mode);
1048c155 4370 else if (GET_MODE_UNIT_PRECISION (mode)
4371 < GET_MODE_UNIT_PRECISION (inner_mode))
4372 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
72f8014e 4373 else if (UNARY_CLASS_P (exp)
f84ead57 4374 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4375 : unsignedp)
9ecadf14 4376 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
9845d120 4377 else
9ecadf14 4378 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
9845d120 4379
4380 return op0;
4381 }
4382
182cf5a9 4383 case MEM_REF:
e488c25f 4384 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4385 {
4386 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4387 TREE_OPERAND (exp, 0),
4388 TREE_OPERAND (exp, 1));
4389 if (newexp)
4390 return expand_debug_expr (newexp);
4391 }
4392 /* FALLTHROUGH */
9845d120 4393 case INDIRECT_REF:
29c05e22 4394 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 4395 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4396 if (!op0)
4397 return NULL;
4398
7ef770fa 4399 if (TREE_CODE (exp) == MEM_REF)
4400 {
f87ea39e 4401 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4402 || (GET_CODE (op0) == PLUS
4403 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4404 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4405 Instead just use get_inner_reference. */
4406 goto component_ref;
4407
7ef770fa 4408 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4409 if (!op1 || !CONST_INT_P (op1))
4410 return NULL;
4411
29c05e22 4412 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
7ef770fa 4413 }
4414
14a3093e 4415 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9845d120 4416
cd799492 4417 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4418 op0, as);
4419 if (op0 == NULL_RTX)
4420 return NULL;
9845d120 4421
cd799492 4422 op0 = gen_rtx_MEM (mode, op0);
9845d120 4423 set_mem_attributes (op0, exp, 0);
e488c25f 4424 if (TREE_CODE (exp) == MEM_REF
4425 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4426 set_mem_expr (op0, NULL_TREE);
bd1a81f7 4427 set_mem_addr_space (op0, as);
9845d120 4428
4429 return op0;
4430
4431 case TARGET_MEM_REF:
28daba6f 4432 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4433 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
9845d120 4434 return NULL;
4435
4436 op0 = expand_debug_expr
8d8150c8 4437 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
9845d120 4438 if (!op0)
4439 return NULL;
4440
27628c9e 4441 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
cd799492 4442 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4443 op0, as);
4444 if (op0 == NULL_RTX)
4445 return NULL;
9845d120 4446
4447 op0 = gen_rtx_MEM (mode, op0);
4448
4449 set_mem_attributes (op0, exp, 0);
bd1a81f7 4450 set_mem_addr_space (op0, as);
9845d120 4451
4452 return op0;
4453
f87ea39e 4454 component_ref:
9845d120 4455 case ARRAY_REF:
4456 case ARRAY_RANGE_REF:
4457 case COMPONENT_REF:
4458 case BIT_FIELD_REF:
4459 case REALPART_EXPR:
4460 case IMAGPART_EXPR:
4461 case VIEW_CONVERT_EXPR:
4462 {
3754d046 4463 machine_mode mode1;
9845d120 4464 HOST_WIDE_INT bitsize, bitpos;
4465 tree offset;
292237f3 4466 int reversep, volatilep = 0;
4467 tree tem
4468 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
b3b6e4b5 4469 &unsignedp, &reversep, &volatilep);
9845d120 4470 rtx orig_op0;
4471
9e3c8673 4472 if (bitsize == 0)
4473 return NULL;
4474
9845d120 4475 orig_op0 = op0 = expand_debug_expr (tem);
4476
4477 if (!op0)
4478 return NULL;
4479
4480 if (offset)
4481 {
3754d046 4482 machine_mode addrmode, offmode;
d89c81d6 4483
f4b490ea 4484 if (!MEM_P (op0))
4485 return NULL;
9845d120 4486
d89c81d6 4487 op0 = XEXP (op0, 0);
4488 addrmode = GET_MODE (op0);
4489 if (addrmode == VOIDmode)
4490 addrmode = Pmode;
4491
9845d120 4492 op1 = expand_debug_expr (offset);
4493 if (!op1)
4494 return NULL;
4495
d89c81d6 4496 offmode = GET_MODE (op1);
4497 if (offmode == VOIDmode)
4498 offmode = TYPE_MODE (TREE_TYPE (offset));
4499
4500 if (addrmode != offmode)
a8a727ad 4501 op1 = lowpart_subreg (addrmode, op1, offmode);
d89c81d6 4502
4503 /* Don't use offset_address here, we don't need a
4504 recognizable address, and we don't want to generate
4505 code. */
9ecadf14 4506 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4507 op0, op1));
9845d120 4508 }
4509
4510 if (MEM_P (op0))
4511 {
9e3c8673 4512 if (mode1 == VOIDmode)
4513 /* Bitfield. */
1a5d4b27 4514 mode1 = smallest_int_mode_for_size (bitsize);
9845d120 4515 if (bitpos >= BITS_PER_UNIT)
4516 {
4517 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4518 bitpos %= BITS_PER_UNIT;
4519 }
4520 else if (bitpos < 0)
4521 {
9e3c8673 4522 HOST_WIDE_INT units
4523 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2ca1ca3a 4524 op0 = adjust_address_nv (op0, mode1, -units);
9845d120 4525 bitpos += units * BITS_PER_UNIT;
4526 }
4527 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4528 op0 = adjust_address_nv (op0, mode, 0);
4529 else if (GET_MODE (op0) != mode1)
4530 op0 = adjust_address_nv (op0, mode1, 0);
4531 else
4532 op0 = copy_rtx (op0);
4533 if (op0 == orig_op0)
4534 op0 = shallow_copy_rtx (op0);
4535 set_mem_attributes (op0, exp, 0);
4536 }
4537
4538 if (bitpos == 0 && mode == GET_MODE (op0))
4539 return op0;
4540
5bd71193 4541 if (bitpos < 0)
4542 return NULL;
4543
37cd7b09 4544 if (GET_MODE (op0) == BLKmode)
4545 return NULL;
4546
9845d120 4547 if ((bitpos % BITS_PER_UNIT) == 0
4548 && bitsize == GET_MODE_BITSIZE (mode1))
4549 {
3754d046 4550 machine_mode opmode = GET_MODE (op0);
9845d120 4551
9845d120 4552 if (opmode == VOIDmode)
c8b13e49 4553 opmode = TYPE_MODE (TREE_TYPE (tem));
9845d120 4554
4555 /* This condition may hold if we're expanding the address
4556 right past the end of an array that turned out not to
4557 be addressable (i.e., the address was only computed in
4558 debug stmts). The gen_subreg below would rightfully
4559 crash, and the address doesn't really exist, so just
4560 drop it. */
4561 if (bitpos >= GET_MODE_BITSIZE (opmode))
4562 return NULL;
4563
406aed5d 4564 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4565 return simplify_gen_subreg (mode, op0, opmode,
4566 bitpos / BITS_PER_UNIT);
9845d120 4567 }
4568
4569 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4570 && TYPE_UNSIGNED (TREE_TYPE (exp))
4571 ? SIGN_EXTRACT
4572 : ZERO_EXTRACT, mode,
4573 GET_MODE (op0) != VOIDmode
c8b13e49 4574 ? GET_MODE (op0)
4575 : TYPE_MODE (TREE_TYPE (tem)),
9845d120 4576 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4577 }
4578
9845d120 4579 case ABS_EXPR:
9ecadf14 4580 return simplify_gen_unary (ABS, mode, op0, mode);
9845d120 4581
4582 case NEGATE_EXPR:
9ecadf14 4583 return simplify_gen_unary (NEG, mode, op0, mode);
9845d120 4584
4585 case BIT_NOT_EXPR:
9ecadf14 4586 return simplify_gen_unary (NOT, mode, op0, mode);
9845d120 4587
4588 case FLOAT_EXPR:
9ecadf14 4589 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4590 0)))
4591 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4592 inner_mode);
9845d120 4593
4594 case FIX_TRUNC_EXPR:
9ecadf14 4595 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4596 inner_mode);
9845d120 4597
4598 case POINTER_PLUS_EXPR:
af3d13d6 4599 /* For the rare target where pointers are not the same size as
4600 size_t, we need to check for mis-matched modes and correct
4601 the addend. */
4602 if (op0 && op1
1aa8738f 4603 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4604 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4605 && op0_mode != op1_mode)
af3d13d6 4606 {
1aa8738f 4607 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4608 /* If OP0 is a partial mode, then we must truncate, even
4609 if it has the same bitsize as OP1 as GCC's
4610 representation of partial modes is opaque. */
4611 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4612 && (GET_MODE_BITSIZE (op0_mode)
4613 == GET_MODE_BITSIZE (op1_mode))))
4614 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
af3d13d6 4615 else
4616 /* We always sign-extend, regardless of the signedness of
4617 the operand, because the operand is always unsigned
4618 here even if the original C expression is signed. */
1aa8738f 4619 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
af3d13d6 4620 }
4621 /* Fall through. */
9845d120 4622 case PLUS_EXPR:
9ecadf14 4623 return simplify_gen_binary (PLUS, mode, op0, op1);
9845d120 4624
4625 case MINUS_EXPR:
57e83b58 4626 case POINTER_DIFF_EXPR:
9ecadf14 4627 return simplify_gen_binary (MINUS, mode, op0, op1);
9845d120 4628
4629 case MULT_EXPR:
9ecadf14 4630 return simplify_gen_binary (MULT, mode, op0, op1);
9845d120 4631
4632 case RDIV_EXPR:
4633 case TRUNC_DIV_EXPR:
4634 case EXACT_DIV_EXPR:
4635 if (unsignedp)
9ecadf14 4636 return simplify_gen_binary (UDIV, mode, op0, op1);
9845d120 4637 else
9ecadf14 4638 return simplify_gen_binary (DIV, mode, op0, op1);
9845d120 4639
4640 case TRUNC_MOD_EXPR:
9ecadf14 4641 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
9845d120 4642
4643 case FLOOR_DIV_EXPR:
4644 if (unsignedp)
9ecadf14 4645 return simplify_gen_binary (UDIV, mode, op0, op1);
9845d120 4646 else
4647 {
9ecadf14 4648 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4649 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4650 rtx adj = floor_sdiv_adjust (mode, mod, op1);
9ecadf14 4651 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4652 }
4653
4654 case FLOOR_MOD_EXPR:
4655 if (unsignedp)
9ecadf14 4656 return simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4657 else
4658 {
9ecadf14 4659 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4660 rtx adj = floor_sdiv_adjust (mode, mod, op1);
9ecadf14 4661 adj = simplify_gen_unary (NEG, mode,
4662 simplify_gen_binary (MULT, mode, adj, op1),
4663 mode);
4664 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4665 }
4666
4667 case CEIL_DIV_EXPR:
4668 if (unsignedp)
4669 {
9ecadf14 4670 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4671 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4672 rtx adj = ceil_udiv_adjust (mode, mod, op1);
9ecadf14 4673 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4674 }
4675 else
4676 {
9ecadf14 4677 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4678 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4679 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
9ecadf14 4680 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4681 }
4682
4683 case CEIL_MOD_EXPR:
4684 if (unsignedp)
4685 {
9ecadf14 4686 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4687 rtx adj = ceil_udiv_adjust (mode, mod, op1);
9ecadf14 4688 adj = simplify_gen_unary (NEG, mode,
4689 simplify_gen_binary (MULT, mode, adj, op1),
4690 mode);
4691 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4692 }
4693 else
4694 {
9ecadf14 4695 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4696 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
9ecadf14 4697 adj = simplify_gen_unary (NEG, mode,
4698 simplify_gen_binary (MULT, mode, adj, op1),
4699 mode);
4700 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4701 }
4702
4703 case ROUND_DIV_EXPR:
4704 if (unsignedp)
4705 {
9ecadf14 4706 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4707 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4708 rtx adj = round_udiv_adjust (mode, mod, op1);
9ecadf14 4709 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4710 }
4711 else
4712 {
9ecadf14 4713 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4714 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4715 rtx adj = round_sdiv_adjust (mode, mod, op1);
9ecadf14 4716 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4717 }
4718
4719 case ROUND_MOD_EXPR:
4720 if (unsignedp)
4721 {
9ecadf14 4722 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4723 rtx adj = round_udiv_adjust (mode, mod, op1);
9ecadf14 4724 adj = simplify_gen_unary (NEG, mode,
4725 simplify_gen_binary (MULT, mode, adj, op1),
4726 mode);
4727 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4728 }
4729 else
4730 {
9ecadf14 4731 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4732 rtx adj = round_sdiv_adjust (mode, mod, op1);
9ecadf14 4733 adj = simplify_gen_unary (NEG, mode,
4734 simplify_gen_binary (MULT, mode, adj, op1),
4735 mode);
4736 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4737 }
4738
4739 case LSHIFT_EXPR:
9ecadf14 4740 return simplify_gen_binary (ASHIFT, mode, op0, op1);
9845d120 4741
4742 case RSHIFT_EXPR:
4743 if (unsignedp)
9ecadf14 4744 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
9845d120 4745 else
9ecadf14 4746 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
9845d120 4747
4748 case LROTATE_EXPR:
9ecadf14 4749 return simplify_gen_binary (ROTATE, mode, op0, op1);
9845d120 4750
4751 case RROTATE_EXPR:
9ecadf14 4752 return simplify_gen_binary (ROTATERT, mode, op0, op1);
9845d120 4753
4754 case MIN_EXPR:
9ecadf14 4755 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
9845d120 4756
4757 case MAX_EXPR:
9ecadf14 4758 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
9845d120 4759
4760 case BIT_AND_EXPR:
4761 case TRUTH_AND_EXPR:
9ecadf14 4762 return simplify_gen_binary (AND, mode, op0, op1);
9845d120 4763
4764 case BIT_IOR_EXPR:
4765 case TRUTH_OR_EXPR:
9ecadf14 4766 return simplify_gen_binary (IOR, mode, op0, op1);
9845d120 4767
4768 case BIT_XOR_EXPR:
4769 case TRUTH_XOR_EXPR:
9ecadf14 4770 return simplify_gen_binary (XOR, mode, op0, op1);
9845d120 4771
4772 case TRUTH_ANDIF_EXPR:
4773 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4774
4775 case TRUTH_ORIF_EXPR:
4776 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4777
4778 case TRUTH_NOT_EXPR:
9ecadf14 4779 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
9845d120 4780
4781 case LT_EXPR:
9ecadf14 4782 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4783 op0, op1);
9845d120 4784
4785 case LE_EXPR:
9ecadf14 4786 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4787 op0, op1);
9845d120 4788
4789 case GT_EXPR:
9ecadf14 4790 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4791 op0, op1);
9845d120 4792
4793 case GE_EXPR:
9ecadf14 4794 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4795 op0, op1);
9845d120 4796
4797 case EQ_EXPR:
9ecadf14 4798 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
9845d120 4799
4800 case NE_EXPR:
9ecadf14 4801 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
9845d120 4802
4803 case UNORDERED_EXPR:
9ecadf14 4804 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
9845d120 4805
4806 case ORDERED_EXPR:
9ecadf14 4807 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
9845d120 4808
4809 case UNLT_EXPR:
9ecadf14 4810 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
9845d120 4811
4812 case UNLE_EXPR:
9ecadf14 4813 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
9845d120 4814
4815 case UNGT_EXPR:
9ecadf14 4816 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
9845d120 4817
4818 case UNGE_EXPR:
9ecadf14 4819 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
9845d120 4820
4821 case UNEQ_EXPR:
9ecadf14 4822 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
9845d120 4823
4824 case LTGT_EXPR:
9ecadf14 4825 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
9845d120 4826
4827 case COND_EXPR:
4828 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4829
4830 case COMPLEX_EXPR:
4831 gcc_assert (COMPLEX_MODE_P (mode));
4832 if (GET_MODE (op0) == VOIDmode)
4833 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4834 if (GET_MODE (op1) == VOIDmode)
4835 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4836 return gen_rtx_CONCAT (mode, op0, op1);
4837
4e6677f8 4838 case CONJ_EXPR:
4839 if (GET_CODE (op0) == CONCAT)
4840 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
9ecadf14 4841 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4842 XEXP (op0, 1),
4843 GET_MODE_INNER (mode)));
4e6677f8 4844 else
4845 {
9fcae33e 4846 scalar_mode imode = GET_MODE_INNER (mode);
4e6677f8 4847 rtx re, im;
4848
4849 if (MEM_P (op0))
4850 {
4851 re = adjust_address_nv (op0, imode, 0);
4852 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4853 }
4854 else
4855 {
2cf1bb25 4856 scalar_int_mode ifmode;
4857 scalar_int_mode ihmode;
4e6677f8 4858 rtx halfsize;
2cf1bb25 4859 if (!int_mode_for_mode (mode).exists (&ifmode)
4860 || !int_mode_for_mode (imode).exists (&ihmode))
4e6677f8 4861 return NULL;
4862 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4863 re = op0;
4864 if (mode != ifmode)
4865 re = gen_rtx_SUBREG (ifmode, re, 0);
4866 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4867 if (imode != ihmode)
4868 re = gen_rtx_SUBREG (imode, re, 0);
4869 im = copy_rtx (op0);
4870 if (mode != ifmode)
4871 im = gen_rtx_SUBREG (ifmode, im, 0);
4872 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4873 if (imode != ihmode)
4874 im = gen_rtx_SUBREG (imode, im, 0);
4875 }
4876 im = gen_rtx_NEG (imode, im);
4877 return gen_rtx_CONCAT (mode, re, im);
4878 }
4879
9845d120 4880 case ADDR_EXPR:
4881 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4882 if (!op0 || !MEM_P (op0))
f9c61ef7 4883 {
4884 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4885 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4886 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
88f2e16b 4887 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4888 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
f9c61ef7 4889 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4890
4891 if (handled_component_p (TREE_OPERAND (exp, 0)))
4892 {
4893 HOST_WIDE_INT bitoffset, bitsize, maxsize;
292237f3 4894 bool reverse;
f9c61ef7 4895 tree decl
292237f3 4896 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4897 &bitsize, &maxsize, &reverse);
53e9c5c4 4898 if ((VAR_P (decl)
f9c61ef7 4899 || TREE_CODE (decl) == PARM_DECL
4900 || TREE_CODE (decl) == RESULT_DECL)
88f2e16b 4901 && (!TREE_ADDRESSABLE (decl)
4902 || target_for_debug_bind (decl))
f9c61ef7 4903 && (bitoffset % BITS_PER_UNIT) == 0
4904 && bitsize > 0
4905 && bitsize == maxsize)
29c05e22 4906 {
4907 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4908 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4909 }
f9c61ef7 4910 }
4911
8afb7c4b 4912 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4913 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4914 == ADDR_EXPR)
4915 {
4916 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4917 0));
4918 if (op0 != NULL
4919 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4920 || (GET_CODE (op0) == PLUS
4921 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4922 && CONST_INT_P (XEXP (op0, 1)))))
4923 {
4924 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4925 1));
4926 if (!op1 || !CONST_INT_P (op1))
4927 return NULL;
4928
4929 return plus_constant (mode, op0, INTVAL (op1));
4930 }
4931 }
4932
f9c61ef7 4933 return NULL;
4934 }
9845d120 4935
14a3093e 4936 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
03b7a719 4937 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4938 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
d89c81d6 4939
4940 return op0;
9845d120 4941
4942 case VECTOR_CST:
fadf62f4 4943 {
1f547280 4944 unsigned i, nelts;
fadf62f4 4945
1f547280 4946 nelts = VECTOR_CST_NELTS (exp);
4947 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
fadf62f4 4948
1f547280 4949 for (i = 0; i < nelts; ++i)
fadf62f4 4950 {
4951 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4952 if (!op1)
4953 return NULL;
4954 XVECEXP (op0, 0, i) = op1;
4955 }
4956
4957 return op0;
4958 }
9845d120 4959
4960 case CONSTRUCTOR:
3c25489e 4961 if (TREE_CLOBBER_P (exp))
4962 return NULL;
4963 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
9845d120 4964 {
4965 unsigned i;
4966 tree val;
4967
4968 op0 = gen_rtx_CONCATN
4969 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4970
4971 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4972 {
4973 op1 = expand_debug_expr (val);
4974 if (!op1)
4975 return NULL;
4976 XVECEXP (op0, 0, i) = op1;
4977 }
4978
4979 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4980 {
4981 op1 = expand_debug_expr
385f3f36 4982 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
9845d120 4983
4984 if (!op1)
4985 return NULL;
4986
4987 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4988 XVECEXP (op0, 0, i) = op1;
4989 }
4990
4991 return op0;
4992 }
4993 else
4994 goto flag_unsupported;
4995
4996 case CALL_EXPR:
4997 /* ??? Maybe handle some builtins? */
4998 return NULL;
4999
5000 case SSA_NAME:
5001 {
42acab1c 5002 gimple *g = get_gimple_for_ssa_name (exp);
3c800ea7 5003 if (g)
5004 {
54497144 5005 tree t = NULL_TREE;
5006 if (deep_ter_debug_map)
5007 {
5008 tree *slot = deep_ter_debug_map->get (exp);
5009 if (slot)
5010 t = *slot;
5011 }
5012 if (t == NULL_TREE)
5013 t = gimple_assign_rhs_to_tree (g);
5014 op0 = expand_debug_expr (t);
3c800ea7 5015 if (!op0)
5016 return NULL;
5017 }
5018 else
5019 {
b2df3bbf 5020 /* If this is a reference to an incoming value of
5021 parameter that is never used in the code or where the
5022 incoming value is never used in the code, use
5023 PARM_DECL's DECL_RTL if set. */
5024 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5025 && SSA_NAME_VAR (exp)
5026 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5027 && has_zero_uses (exp))
5028 {
5029 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5030 if (op0)
5031 goto adjust_mode;
5032 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5033 if (op0)
5034 goto adjust_mode;
5035 }
5036
3c800ea7 5037 int part = var_to_partition (SA.map, exp);
9845d120 5038
3c800ea7 5039 if (part == NO_PARTITION)
b2df3bbf 5040 return NULL;
9845d120 5041
3c800ea7 5042 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
9845d120 5043
ce6d059c 5044 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3c800ea7 5045 }
9845d120 5046 goto adjust_mode;
5047 }
5048
5049 case ERROR_MARK:
5050 return NULL;
5051
b54ee9da 5052 /* Vector stuff. For most of the codes we don't have rtl codes. */
5053 case REALIGN_LOAD_EXPR:
b54ee9da 5054 case VEC_COND_EXPR:
b54ee9da 5055 case VEC_PACK_FIX_TRUNC_EXPR:
5056 case VEC_PACK_SAT_EXPR:
5057 case VEC_PACK_TRUNC_EXPR:
b54ee9da 5058 case VEC_UNPACK_FLOAT_HI_EXPR:
5059 case VEC_UNPACK_FLOAT_LO_EXPR:
5060 case VEC_UNPACK_HI_EXPR:
5061 case VEC_UNPACK_LO_EXPR:
5062 case VEC_WIDEN_MULT_HI_EXPR:
5063 case VEC_WIDEN_MULT_LO_EXPR:
79a78f7f 5064 case VEC_WIDEN_MULT_EVEN_EXPR:
5065 case VEC_WIDEN_MULT_ODD_EXPR:
6083c152 5066 case VEC_WIDEN_LSHIFT_HI_EXPR:
5067 case VEC_WIDEN_LSHIFT_LO_EXPR:
3557cb99 5068 case VEC_PERM_EXPR:
b54ee9da 5069 return NULL;
5070
96504875 5071 /* Misc codes. */
b54ee9da 5072 case ADDR_SPACE_CONVERT_EXPR:
5073 case FIXED_CONVERT_EXPR:
5074 case OBJ_TYPE_REF:
5075 case WITH_SIZE_EXPR:
2506d97a 5076 case BIT_INSERT_EXPR:
b54ee9da 5077 return NULL;
5078
5079 case DOT_PROD_EXPR:
5080 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5081 && SCALAR_INT_MODE_P (mode))
5082 {
9ecadf14 5083 op0
5084 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5085 0)))
5086 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5087 inner_mode);
5088 op1
5089 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5090 1)))
5091 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5092 inner_mode);
5093 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5094 return simplify_gen_binary (PLUS, mode, op0, op2);
b54ee9da 5095 }
5096 return NULL;
5097
5098 case WIDEN_MULT_EXPR:
00f4f705 5099 case WIDEN_MULT_PLUS_EXPR:
5100 case WIDEN_MULT_MINUS_EXPR:
b54ee9da 5101 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5102 && SCALAR_INT_MODE_P (mode))
5103 {
9ecadf14 5104 inner_mode = GET_MODE (op0);
b54ee9da 5105 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
62be004c 5106 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b54ee9da 5107 else
62be004c 5108 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b54ee9da 5109 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
62be004c 5110 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
b54ee9da 5111 else
62be004c 5112 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
9ecadf14 5113 op0 = simplify_gen_binary (MULT, mode, op0, op1);
00f4f705 5114 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5115 return op0;
5116 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
9ecadf14 5117 return simplify_gen_binary (PLUS, mode, op0, op2);
00f4f705 5118 else
9ecadf14 5119 return simplify_gen_binary (MINUS, mode, op2, op0);
b54ee9da 5120 }
5121 return NULL;
5122
96504875 5123 case MULT_HIGHPART_EXPR:
5124 /* ??? Similar to the above. */
5125 return NULL;
5126
b54ee9da 5127 case WIDEN_SUM_EXPR:
3557cb99 5128 case WIDEN_LSHIFT_EXPR:
b54ee9da 5129 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5130 && SCALAR_INT_MODE_P (mode))
5131 {
9ecadf14 5132 op0
5133 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5134 0)))
5135 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5136 inner_mode);
3557cb99 5137 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5138 ? ASHIFT : PLUS, mode, op0, op1);
b54ee9da 5139 }
5140 return NULL;
5141
156f51b9 5142 case FMA_EXPR:
9ecadf14 5143 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
156f51b9 5144
9845d120 5145 default:
5146 flag_unsupported:
382ecba7 5147 if (flag_checking)
5148 {
5149 debug_tree (exp);
5150 gcc_unreachable ();
5151 }
9845d120 5152 return NULL;
9845d120 5153 }
5154}
5155
841424cc 5156/* Return an RTX equivalent to the source bind value of the tree expression
5157 EXP. */
5158
5159static rtx
5160expand_debug_source_expr (tree exp)
5161{
5162 rtx op0 = NULL_RTX;
3754d046 5163 machine_mode mode = VOIDmode, inner_mode;
841424cc 5164
5165 switch (TREE_CODE (exp))
5166 {
5167 case PARM_DECL:
5168 {
841424cc 5169 mode = DECL_MODE (exp);
8ee59e4e 5170 op0 = expand_debug_parm_decl (exp);
5171 if (op0)
5172 break;
841424cc 5173 /* See if this isn't an argument that has been completely
5174 optimized out. */
5175 if (!DECL_RTL_SET_P (exp)
8ee59e4e 5176 && !DECL_INCOMING_RTL (exp)
841424cc 5177 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5178 {
42c442a9 5179 tree aexp = DECL_ORIGIN (exp);
841424cc 5180 if (DECL_CONTEXT (aexp)
5181 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5182 {
f1f41a6c 5183 vec<tree, va_gc> **debug_args;
841424cc 5184 unsigned int ix;
5185 tree ddecl;
841424cc 5186 debug_args = decl_debug_args_lookup (current_function_decl);
5187 if (debug_args != NULL)
5188 {
f1f41a6c 5189 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
841424cc 5190 ix += 2)
5191 if (ddecl == aexp)
5192 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5193 }
5194 }
5195 }
5196 break;
5197 }
5198 default:
5199 break;
5200 }
5201
5202 if (op0 == NULL_RTX)
5203 return NULL_RTX;
5204
5205 inner_mode = GET_MODE (op0);
5206 if (mode == inner_mode)
5207 return op0;
5208
5209 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5210 {
332d11bd 5211 if (GET_MODE_UNIT_BITSIZE (mode)
5212 == GET_MODE_UNIT_BITSIZE (inner_mode))
841424cc 5213 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
332d11bd 5214 else if (GET_MODE_UNIT_BITSIZE (mode)
5215 < GET_MODE_UNIT_BITSIZE (inner_mode))
841424cc 5216 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5217 else
5218 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5219 }
5220 else if (FLOAT_MODE_P (mode))
5221 gcc_unreachable ();
5222 else if (FLOAT_MODE_P (inner_mode))
5223 {
5224 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5225 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5226 else
5227 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5228 }
1048c155 5229 else if (GET_MODE_UNIT_PRECISION (mode)
5230 == GET_MODE_UNIT_PRECISION (inner_mode))
a8a727ad 5231 op0 = lowpart_subreg (mode, op0, inner_mode);
1048c155 5232 else if (GET_MODE_UNIT_PRECISION (mode)
5233 < GET_MODE_UNIT_PRECISION (inner_mode))
5234 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
841424cc 5235 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5236 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5237 else
5238 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5239
5240 return op0;
5241}
5242
848d0536 5243/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5244 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5245 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5246
5247static void
74a0cbc4 5248avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
848d0536 5249{
5250 rtx exp = *exp_p;
5251
5252 if (exp == NULL_RTX)
5253 return;
5254
5255 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5256 return;
5257
5258 if (depth == 4)
5259 {
5260 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5261 rtx dval = make_debug_expr_from_rtl (exp);
5262
5263 /* Emit a debug bind insn before INSN. */
5264 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5265 DEBUG_EXPR_TREE_DECL (dval), exp,
5266 VAR_INIT_STATUS_INITIALIZED);
5267
5268 emit_debug_insn_before (bind, insn);
5269 *exp_p = dval;
5270 return;
5271 }
5272
5273 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5274 int i, j;
5275 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5276 switch (*format_ptr++)
5277 {
5278 case 'e':
5279 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5280 break;
5281
5282 case 'E':
5283 case 'V':
5284 for (j = 0; j < XVECLEN (exp, i); j++)
5285 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5286 break;
5287
5288 default:
5289 break;
5290 }
5291}
5292
9845d120 5293/* Expand the _LOCs in debug insns. We run this after expanding all
5294 regular insns, so that any variables referenced in the function
5295 will have their DECL_RTLs set. */
5296
5297static void
5298expand_debug_locations (void)
5299{
74a0cbc4 5300 rtx_insn *insn;
5301 rtx_insn *last = get_last_insn ();
9845d120 5302 int save_strict_alias = flag_strict_aliasing;
5303
5304 /* New alias sets while setting up memory attributes cause
5305 -fcompare-debug failures, even though it doesn't bring about any
5306 codegen changes. */
5307 flag_strict_aliasing = 0;
5308
5309 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
c64f38bf 5310 if (DEBUG_BIND_INSN_P (insn))
9845d120 5311 {
5312 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
74a0cbc4 5313 rtx val;
5314 rtx_insn *prev_insn, *insn2;
3754d046 5315 machine_mode mode;
9845d120 5316
5317 if (value == NULL_TREE)
5318 val = NULL_RTX;
5319 else
5320 {
841424cc 5321 if (INSN_VAR_LOCATION_STATUS (insn)
5322 == VAR_INIT_STATUS_UNINITIALIZED)
5323 val = expand_debug_source_expr (value);
54497144 5324 /* The avoid_deep_ter_for_debug function inserts
5325 debug bind stmts after SSA_NAME definition, with the
5326 SSA_NAME as the whole bind location. Disable temporarily
5327 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5328 being defined in this DEBUG_INSN. */
5329 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5330 {
5331 tree *slot = deep_ter_debug_map->get (value);
5332 if (slot)
5333 {
5334 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5335 *slot = NULL_TREE;
5336 else
5337 slot = NULL;
5338 }
5339 val = expand_debug_expr (value);
5340 if (slot)
5341 *slot = INSN_VAR_LOCATION_DECL (insn);
5342 }
841424cc 5343 else
5344 val = expand_debug_expr (value);
9845d120 5345 gcc_assert (last == get_last_insn ());
5346 }
5347
5348 if (!val)
5349 val = gen_rtx_UNKNOWN_VAR_LOC ();
5350 else
5351 {
5352 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5353
5354 gcc_assert (mode == GET_MODE (val)
5355 || (GET_MODE (val) == VOIDmode
efa08fc2 5356 && (CONST_SCALAR_INT_P (val)
9845d120 5357 || GET_CODE (val) == CONST_FIXED
9845d120 5358 || GET_CODE (val) == LABEL_REF)));
5359 }
5360
5361 INSN_VAR_LOCATION_LOC (insn) = val;
848d0536 5362 prev_insn = PREV_INSN (insn);
5363 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5364 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
9845d120 5365 }
5366
5367 flag_strict_aliasing = save_strict_alias;
5368}
5369
f7974718 5370/* Performs swapping operands of commutative operations to expand
5371 the expensive one first. */
5372
5373static void
5374reorder_operands (basic_block bb)
5375{
5376 unsigned int *lattice; /* Hold cost of each statement. */
5377 unsigned int i = 0, n = 0;
5378 gimple_stmt_iterator gsi;
5379 gimple_seq stmts;
42acab1c 5380 gimple *stmt;
f7974718 5381 bool swap;
5382 tree op0, op1;
5383 ssa_op_iter iter;
5384 use_operand_p use_p;
42acab1c 5385 gimple *def0, *def1;
f7974718 5386
5387 /* Compute cost of each statement using estimate_num_insns. */
5388 stmts = bb_seq (bb);
5389 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5390 {
5391 stmt = gsi_stmt (gsi);
31aebeec 5392 if (!is_gimple_debug (stmt))
5393 gimple_set_uid (stmt, n++);
f7974718 5394 }
5395 lattice = XNEWVEC (unsigned int, n);
5396 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5397 {
5398 unsigned cost;
5399 stmt = gsi_stmt (gsi);
31aebeec 5400 if (is_gimple_debug (stmt))
5401 continue;
f7974718 5402 cost = estimate_num_insns (stmt, &eni_size_weights);
5403 lattice[i] = cost;
5404 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5405 {
5406 tree use = USE_FROM_PTR (use_p);
42acab1c 5407 gimple *def_stmt;
f7974718 5408 if (TREE_CODE (use) != SSA_NAME)
5409 continue;
5410 def_stmt = get_gimple_for_ssa_name (use);
5411 if (!def_stmt)
5412 continue;
5413 lattice[i] += lattice[gimple_uid (def_stmt)];
5414 }
5415 i++;
5416 if (!is_gimple_assign (stmt)
5417 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5418 continue;
5419 op0 = gimple_op (stmt, 1);
5420 op1 = gimple_op (stmt, 2);
5421 if (TREE_CODE (op0) != SSA_NAME
5422 || TREE_CODE (op1) != SSA_NAME)
5423 continue;
5424 /* Swap operands if the second one is more expensive. */
5425 def0 = get_gimple_for_ssa_name (op0);
f7974718 5426 def1 = get_gimple_for_ssa_name (op1);
5427 if (!def1)
5428 continue;
5429 swap = false;
4b8069b9 5430 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
f7974718 5431 swap = true;
5432 if (swap)
5433 {
5434 if (dump_file && (dump_flags & TDF_DETAILS))
5435 {
5436 fprintf (dump_file, "Swap operands in stmt:\n");
5437 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5438 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
4b8069b9 5439 def0 ? lattice[gimple_uid (def0)] : 0,
f7974718 5440 lattice[gimple_uid (def1)]);
5441 }
5442 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5443 gimple_assign_rhs2_ptr (stmt));
5444 }
5445 }
5446 XDELETE (lattice);
5447}
5448
0ec80471 5449/* Expand basic block BB from GIMPLE trees to RTL. */
5450
5451static basic_block
3c919612 5452expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
0ec80471 5453{
75a70cf9 5454 gimple_stmt_iterator gsi;
5455 gimple_seq stmts;
42acab1c 5456 gimple *stmt = NULL;
cef3d8ad 5457 rtx_note *note;
74a0cbc4 5458 rtx_insn *last;
0ec80471 5459 edge e;
cd665a06 5460 edge_iterator ei;
0ec80471 5461
5462 if (dump_file)
75a70cf9 5463 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5464 bb->index);
5465
5466 /* Note that since we are now transitioning from GIMPLE to RTL, we
5467 cannot use the gsi_*_bb() routines because they expect the basic
5468 block to be in GIMPLE, instead of RTL. Therefore, we need to
5469 access the BB sequence directly. */
f7974718 5470 if (optimize)
5471 reorder_operands (bb);
75a70cf9 5472 stmts = bb_seq (bb);
924c4c71 5473 bb->il.gimple.seq = NULL;
5474 bb->il.gimple.phi_nodes = NULL;
7dfb44a0 5475 rtl_profile_for_bb (bb);
e0dde8f8 5476 init_rtl_bb_info (bb);
5477 bb->flags |= BB_RTL;
5478
63f88450 5479 /* Remove the RETURN_EXPR if we may fall though to the exit
5480 instead. */
75a70cf9 5481 gsi = gsi_last (stmts);
5482 if (!gsi_end_p (gsi)
5483 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
63f88450 5484 {
1a91d914 5485 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
63f88450 5486
5487 gcc_assert (single_succ_p (bb));
34154e27 5488 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
63f88450 5489
34154e27 5490 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
75a70cf9 5491 && !gimple_return_retval (ret_stmt))
63f88450 5492 {
75a70cf9 5493 gsi_remove (&gsi, false);
63f88450 5494 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5495 }
5496 }
5497
75a70cf9 5498 gsi = gsi_start (stmts);
5499 if (!gsi_end_p (gsi))
6313ae8b 5500 {
75a70cf9 5501 stmt = gsi_stmt (gsi);
5502 if (gimple_code (stmt) != GIMPLE_LABEL)
5503 stmt = NULL;
6313ae8b 5504 }
0ec80471 5505
0699065d 5506 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
6313ae8b 5507
5508 if (stmt || elt)
0ec80471 5509 {
5510 last = get_last_insn ();
5511
6313ae8b 5512 if (stmt)
5513 {
16c9337c 5514 expand_gimple_stmt (stmt);
75a70cf9 5515 gsi_next (&gsi);
6313ae8b 5516 }
5517
5518 if (elt)
5f8841a5 5519 emit_label (*elt);
0ec80471 5520
26bb3cb2 5521 BB_HEAD (bb) = NEXT_INSN (last);
6d7dc5b9 5522 if (NOTE_P (BB_HEAD (bb)))
26bb3cb2 5523 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
0ec80471 5524 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
49377e21 5525
75a70cf9 5526 maybe_dump_rtl_for_gimple_stmt (stmt, last);
0ec80471 5527 }
5528 else
26bb3cb2 5529 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
0ec80471 5530
5531 NOTE_BASIC_BLOCK (note) = bb;
5532
75a70cf9 5533 for (; !gsi_end_p (gsi); gsi_next (&gsi))
0ec80471 5534 {
c578459e 5535 basic_block new_bb;
0ec80471 5536
9845d120 5537 stmt = gsi_stmt (gsi);
3c800ea7 5538
5539 /* If this statement is a non-debug one, and we generate debug
5540 insns, then this one might be the last real use of a TERed
5541 SSA_NAME, but where there are still some debug uses further
5542 down. Expanding the current SSA name in such further debug
5543 uses by their RHS might lead to wrong debug info, as coalescing
5544 might make the operands of such RHS be placed into the same
5545 pseudo as something else. Like so:
5546 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5547 use(a_1);
5548 a_2 = ...
5549 #DEBUG ... => a_1
5550 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5551 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5552 the write to a_2 would actually have clobbered the place which
5553 formerly held a_0.
5554
5555 So, instead of that, we recognize the situation, and generate
5556 debug temporaries at the last real use of TERed SSA names:
5557 a_1 = a_0 + 1;
5558 #DEBUG #D1 => a_1
5559 use(a_1);
5560 a_2 = ...
5561 #DEBUG ... => #D1
5562 */
c64f38bf 5563 if (MAY_HAVE_DEBUG_BIND_INSNS
3c800ea7 5564 && SA.values
5565 && !is_gimple_debug (stmt))
5566 {
5567 ssa_op_iter iter;
5568 tree op;
42acab1c 5569 gimple *def;
3c800ea7 5570
5169661d 5571 location_t sloc = curr_insn_location ();
3c800ea7 5572
5573 /* Look for SSA names that have their last use here (TERed
5574 names always have only one real use). */
5575 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5576 if ((def = get_gimple_for_ssa_name (op)))
5577 {
5578 imm_use_iterator imm_iter;
5579 use_operand_p use_p;
5580 bool have_debug_uses = false;
5581
5582 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5583 {
5584 if (gimple_debug_bind_p (USE_STMT (use_p)))
5585 {
5586 have_debug_uses = true;
5587 break;
5588 }
5589 }
5590
5591 if (have_debug_uses)
5592 {
71b39a64 5593 /* OP is a TERed SSA name, with DEF its defining
3c800ea7 5594 statement, and where OP is used in further debug
5595 instructions. Generate a debug temporary, and
5596 replace all uses of OP in debug insns with that
5597 temporary. */
42acab1c 5598 gimple *debugstmt;
3c800ea7 5599 tree value = gimple_assign_rhs_to_tree (def);
5600 tree vexpr = make_node (DEBUG_EXPR_DECL);
5601 rtx val;
3754d046 5602 machine_mode mode;
3c800ea7 5603
5169661d 5604 set_curr_insn_location (gimple_location (def));
3c800ea7 5605
5606 DECL_ARTIFICIAL (vexpr) = 1;
5607 TREE_TYPE (vexpr) = TREE_TYPE (value);
5608 if (DECL_P (value))
5609 mode = DECL_MODE (value);
5610 else
5611 mode = TYPE_MODE (TREE_TYPE (value));
adc78298 5612 SET_DECL_MODE (vexpr, mode);
3c800ea7 5613
5614 val = gen_rtx_VAR_LOCATION
5615 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5616
3e549002 5617 emit_debug_insn (val);
3c800ea7 5618
5619 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5620 {
5621 if (!gimple_debug_bind_p (debugstmt))
5622 continue;
5623
5624 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5625 SET_USE (use_p, vexpr);
5626
5627 update_stmt (debugstmt);
5628 }
5629 }
5630 }
5169661d 5631 set_curr_insn_location (sloc);
3c800ea7 5632 }
5633
8cee8dc0 5634 currently_expanding_gimple_stmt = stmt;
9845d120 5635
0ec80471 5636 /* Expand this statement, then evaluate the resulting RTL and
5637 fixup the CFG accordingly. */
75a70cf9 5638 if (gimple_code (stmt) == GIMPLE_COND)
c578459e 5639 {
1a91d914 5640 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
c578459e 5641 if (new_bb)
5642 return new_bb;
5643 }
9845d120 5644 else if (gimple_debug_bind_p (stmt))
5645 {
5169661d 5646 location_t sloc = curr_insn_location ();
9845d120 5647 gimple_stmt_iterator nsi = gsi;
5648
5649 for (;;)
5650 {
5651 tree var = gimple_debug_bind_get_var (stmt);
5652 tree value;
5653 rtx val;
3754d046 5654 machine_mode mode;
9845d120 5655
9bae88bc 5656 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5657 && TREE_CODE (var) != LABEL_DECL
5658 && !target_for_debug_bind (var))
5659 goto delink_debug_stmt;
5660
9845d120 5661 if (gimple_debug_bind_has_value_p (stmt))
5662 value = gimple_debug_bind_get_value (stmt);
5663 else
5664 value = NULL_TREE;
5665
5666 last = get_last_insn ();
5667
5169661d 5668 set_curr_insn_location (gimple_location (stmt));
9845d120 5669
5670 if (DECL_P (var))
5671 mode = DECL_MODE (var);
5672 else
5673 mode = TYPE_MODE (TREE_TYPE (var));
5674
5675 val = gen_rtx_VAR_LOCATION
5676 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5677
1084097d 5678 emit_debug_insn (val);
9845d120 5679
5680 if (dump_file && (dump_flags & TDF_DETAILS))
5681 {
5682 /* We can't dump the insn with a TREE where an RTX
5683 is expected. */
3e549002 5684 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
9845d120 5685 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3e549002 5686 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
9845d120 5687 }
5688
9bae88bc 5689 delink_debug_stmt:
3c800ea7 5690 /* In order not to generate too many debug temporaries,
5691 we delink all uses of debug statements we already expanded.
5692 Therefore debug statements between definition and real
5693 use of TERed SSA names will continue to use the SSA name,
5694 and not be replaced with debug temps. */
5695 delink_stmt_imm_use (stmt);
5696
9845d120 5697 gsi = nsi;
5698 gsi_next (&nsi);
5699 if (gsi_end_p (nsi))
5700 break;
5701 stmt = gsi_stmt (nsi);
5702 if (!gimple_debug_bind_p (stmt))
5703 break;
5704 }
5705
5169661d 5706 set_curr_insn_location (sloc);
841424cc 5707 }
5708 else if (gimple_debug_source_bind_p (stmt))
5709 {
5169661d 5710 location_t sloc = curr_insn_location ();
841424cc 5711 tree var = gimple_debug_source_bind_get_var (stmt);
5712 tree value = gimple_debug_source_bind_get_value (stmt);
5713 rtx val;
3754d046 5714 machine_mode mode;
841424cc 5715
5716 last = get_last_insn ();
5717
5169661d 5718 set_curr_insn_location (gimple_location (stmt));
841424cc 5719
5720 mode = DECL_MODE (var);
5721
5722 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5723 VAR_INIT_STATUS_UNINITIALIZED);
5724
5725 emit_debug_insn (val);
5726
5727 if (dump_file && (dump_flags & TDF_DETAILS))
5728 {
5729 /* We can't dump the insn with a TREE where an RTX
5730 is expected. */
5731 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5732 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5733 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5734 }
5735
5169661d 5736 set_curr_insn_location (sloc);
9845d120 5737 }
3ced8962 5738 else
0ec80471 5739 {
1a91d914 5740 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5741 if (call_stmt
5742 && gimple_call_tail_p (call_stmt)
3c919612 5743 && disable_tail_calls)
1a91d914 5744 gimple_call_set_tail (call_stmt, false);
3c919612 5745
1a91d914 5746 if (call_stmt && gimple_call_tail_p (call_stmt))
c578459e 5747 {
5748 bool can_fallthru;
1a91d914 5749 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
c578459e 5750 if (new_bb)
5751 {
5752 if (can_fallthru)
5753 bb = new_bb;
5754 else
5755 return new_bb;
5756 }
5757 }
2a3ebafa 5758 else
49377e21 5759 {
a8dd994c 5760 def_operand_p def_p;
a8dd994c 5761 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5762
5763 if (def_p != NULL)
5764 {
5765 /* Ignore this stmt if it is in the list of
5766 replaceable expressions. */
5767 if (SA.values
48e1416a 5768 && bitmap_bit_p (SA.values,
dfdbf3fd 5769 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
a8dd994c 5770 continue;
5771 }
16c9337c 5772 last = expand_gimple_stmt (stmt);
75a70cf9 5773 maybe_dump_rtl_for_gimple_stmt (stmt, last);
49377e21 5774 }
0ec80471 5775 }
5776 }
5777
8cee8dc0 5778 currently_expanding_gimple_stmt = NULL;
5779
9c388755 5780 /* Expand implicit goto and convert goto_locus. */
63f88450 5781 FOR_EACH_EDGE (e, ei, bb->succs)
5782 {
8e7408e3 5783 if (e->goto_locus != UNKNOWN_LOCATION)
5169661d 5784 set_curr_insn_location (e->goto_locus);
9c388755 5785 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5786 {
5787 emit_jump (label_rtx_for_bb (e->dest));
5788 e->flags &= ~EDGE_FALLTHRU;
5789 }
63f88450 5790 }
5791
8a9ad55b 5792 /* Expanded RTL can create a jump in the last instruction of block.
5793 This later might be assumed to be a jump to successor and break edge insertion.
5794 We need to insert dummy move to prevent this. PR41440. */
5795 if (single_succ_p (bb)
5796 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5797 && (last = get_last_insn ())
b941a5ed 5798 && (JUMP_P (last)
5799 || (DEBUG_INSN_P (last)
5800 && JUMP_P (prev_nondebug_insn (last)))))
8a9ad55b 5801 {
5802 rtx dummy = gen_reg_rtx (SImode);
5803 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5804 }
5805
0ec80471 5806 do_pending_stack_adjust ();
5807
822e391f 5808 /* Find the block tail. The last insn in the block is the insn
0ec80471 5809 before a barrier and/or table jump insn. */
5810 last = get_last_insn ();
6d7dc5b9 5811 if (BARRIER_P (last))
0ec80471 5812 last = PREV_INSN (last);
5813 if (JUMP_TABLE_DATA_P (last))
5814 last = PREV_INSN (PREV_INSN (last));
26bb3cb2 5815 BB_END (bb) = last;
491e04ef 5816
0ec80471 5817 update_bb_for_insn (bb);
3ced8962 5818
0ec80471 5819 return bb;
5820}
5821
5822
5823/* Create a basic block for initialization code. */
5824
5825static basic_block
5826construct_init_block (void)
5827{
5828 basic_block init_block, first_block;
9a755727 5829 edge e = NULL;
5830 int flags;
e20bf721 5831
9a755727 5832 /* Multiple entry points not supported yet. */
34154e27 5833 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5834 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5835 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5836 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5837 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
0ec80471 5838
34154e27 5839 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
e20bf721 5840
9a755727 5841 /* When entry edge points to first basic block, we don't need jump,
5842 otherwise we have to jump into proper target. */
34154e27 5843 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
9a755727 5844 {
75a70cf9 5845 tree label = gimple_block_label (e->dest);
9a755727 5846
f9a00e9e 5847 emit_jump (jump_target_rtx (label));
9a755727 5848 flags = 0;
e20bf721 5849 }
9a755727 5850 else
5851 flags = EDGE_FALLTHRU;
0ec80471 5852
5853 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5854 get_last_insn (),
34154e27 5855 ENTRY_BLOCK_PTR_FOR_FN (cfun));
34154e27 5856 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
b3083327 5857 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
0ec80471 5858 if (e)
5859 {
5860 first_block = e->dest;
5861 redirect_edge_succ (e, init_block);
720cfc43 5862 e = make_single_succ_edge (init_block, first_block, flags);
0ec80471 5863 }
5864 else
720cfc43 5865 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5866 EDGE_FALLTHRU);
0ec80471 5867
5868 update_bb_for_insn (init_block);
5869 return init_block;
5870}
5871
375c1c8a 5872/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5873 found in the block tree. */
5874
5875static void
5876set_block_levels (tree block, int level)
5877{
5878 while (block)
5879 {
5880 BLOCK_NUMBER (block) = level;
5881 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5882 block = BLOCK_CHAIN (block);
5883 }
5884}
0ec80471 5885
5886/* Create a block containing landing pads and similar stuff. */
5887
5888static void
5889construct_exit_block (void)
5890{
74a0cbc4 5891 rtx_insn *head = get_last_insn ();
5892 rtx_insn *end;
0ec80471 5893 basic_block exit_block;
cd665a06 5894 edge e, e2;
5895 unsigned ix;
5896 edge_iterator ei;
04e7d9cb 5897 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
74a0cbc4 5898 rtx_insn *orig_end = BB_END (prev_bb);
0ec80471 5899
34154e27 5900 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
7dfb44a0 5901
491e04ef 5902 /* Make sure the locus is set to the end of the function, so that
0ec80471 5903 epilogue line numbers and warnings are set properly. */
8e7408e3 5904 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
0ec80471 5905 input_location = cfun->function_end_locus;
5906
0ec80471 5907 /* Generate rtl for function exit. */
5908 expand_function_end ();
5909
5910 end = get_last_insn ();
5911 if (head == end)
5912 return;
04e7d9cb 5913 /* While emitting the function end we could move end of the last basic
5914 block. */
26bb3cb2 5915 BB_END (prev_bb) = orig_end;
6d7dc5b9 5916 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
0ec80471 5917 head = NEXT_INSN (head);
04e7d9cb 5918 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
205ce1aa 5919 bb count counting will be confused. Any instructions before that
04e7d9cb 5920 label are emitted for the case where PREV_BB falls through into the
5921 exit block, so append those instructions to prev_bb in that case. */
5922 if (NEXT_INSN (head) != return_label)
5923 {
5924 while (NEXT_INSN (head) != return_label)
5925 {
5926 if (!NOTE_P (NEXT_INSN (head)))
26bb3cb2 5927 BB_END (prev_bb) = NEXT_INSN (head);
04e7d9cb 5928 head = NEXT_INSN (head);
5929 }
5930 }
5931 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
34154e27 5932 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
b3083327 5933 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
cd665a06 5934
5935 ix = 0;
34154e27 5936 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
0ec80471 5937 {
34154e27 5938 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
0ec80471 5939 if (!(e->flags & EDGE_ABNORMAL))
cd665a06 5940 redirect_edge_succ (e, exit_block);
5941 else
5942 ix++;
0ec80471 5943 }
cd665a06 5944
720cfc43 5945 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5946 EDGE_FALLTHRU);
34154e27 5947 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
0ec80471 5948 if (e2 != e)
5949 {
ea5d3981 5950 exit_block->count -= e2->count ();
0ec80471 5951 }
0ec80471 5952 update_bb_for_insn (exit_block);
5953}
5954
a0c938f0 5955/* Helper function for discover_nonconstant_array_refs.
9d5aa3bd 5956 Look for ARRAY_REF nodes with non-constant indexes and mark them
5957 addressable. */
5958
5959static tree
5960discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5961 void *data ATTRIBUTE_UNUSED)
5962{
5963 tree t = *tp;
5964
5965 if (IS_TYPE_OR_DECL_P (t))
5966 *walk_subtrees = 0;
5967 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5968 {
5969 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5970 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5971 && (!TREE_OPERAND (t, 2)
5972 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5973 || (TREE_CODE (t) == COMPONENT_REF
5974 && (!TREE_OPERAND (t,2)
5975 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5976 || TREE_CODE (t) == BIT_FIELD_REF
5977 || TREE_CODE (t) == REALPART_EXPR
5978 || TREE_CODE (t) == IMAGPART_EXPR
5979 || TREE_CODE (t) == VIEW_CONVERT_EXPR
72dd6141 5980 || CONVERT_EXPR_P (t))
9d5aa3bd 5981 t = TREE_OPERAND (t, 0);
5982
5983 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5984 {
5985 t = get_base_address (t);
9a60c3b9 5986 if (t && DECL_P (t)
5987 && DECL_MODE (t) != BLKmode)
9d5aa3bd 5988 TREE_ADDRESSABLE (t) = 1;
5989 }
5990
5991 *walk_subtrees = 0;
5992 }
5993
5994 return NULL_TREE;
5995}
5996
5997/* RTL expansion is not able to compile array references with variable
5998 offsets for arrays stored in single register. Discover such
5999 expressions and mark variables as addressable to avoid this
6000 scenario. */
6001
6002static void
6003discover_nonconstant_array_refs (void)
6004{
6005 basic_block bb;
75a70cf9 6006 gimple_stmt_iterator gsi;
9d5aa3bd 6007
fc00614f 6008 FOR_EACH_BB_FN (bb, cfun)
75a70cf9 6009 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6010 {
42acab1c 6011 gimple *stmt = gsi_stmt (gsi);
f4b490ea 6012 if (!is_gimple_debug (stmt))
6013 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
75a70cf9 6014 }
9d5aa3bd 6015}
6016
27a7a23a 6017/* This function sets crtl->args.internal_arg_pointer to a virtual
6018 register if DRAP is needed. Local register allocator will replace
6019 virtual_incoming_args_rtx with the virtual register. */
6020
6021static void
6022expand_stack_alignment (void)
6023{
6024 rtx drap_rtx;
9e1c1bf0 6025 unsigned int preferred_stack_boundary;
27a7a23a 6026
6027 if (! SUPPORTS_STACK_ALIGNMENT)
6028 return;
48e1416a 6029
27a7a23a 6030 if (cfun->calls_alloca
6031 || cfun->has_nonlocal_label
6032 || crtl->has_nonlocal_goto)
6033 crtl->need_drap = true;
6034
c0a05dc0 6035 /* Call update_stack_boundary here again to update incoming stack
6036 boundary. It may set incoming stack alignment to a different
6037 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6038 use the minimum incoming stack alignment to check if it is OK
6039 to perform sibcall optimization since sibcall optimization will
6040 only align the outgoing stack to incoming stack boundary. */
6041 if (targetm.calls.update_stack_boundary)
6042 targetm.calls.update_stack_boundary ();
6043
6044 /* The incoming stack frame has to be aligned at least at
6045 parm_stack_boundary. */
6046 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
27a7a23a 6047
27a7a23a 6048 /* Update crtl->stack_alignment_estimated and use it later to align
6049 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6050 exceptions since callgraph doesn't collect incoming stack alignment
6051 in this case. */
cbeb677e 6052 if (cfun->can_throw_non_call_exceptions
27a7a23a 6053 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6054 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6055 else
6056 preferred_stack_boundary = crtl->preferred_stack_boundary;
6057 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6058 crtl->stack_alignment_estimated = preferred_stack_boundary;
6059 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6060 crtl->stack_alignment_needed = preferred_stack_boundary;
6061
c0a05dc0 6062 gcc_assert (crtl->stack_alignment_needed
6063 <= crtl->stack_alignment_estimated);
6064
27a7a23a 6065 crtl->stack_realign_needed
9e1c1bf0 6066 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
7b70fdf7 6067 crtl->stack_realign_tried = crtl->stack_realign_needed;
27a7a23a 6068
6069 crtl->stack_realign_processed = true;
6070
6071 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6072 alignment. */
6073 gcc_assert (targetm.calls.get_drap_rtx != NULL);
48e1416a 6074 drap_rtx = targetm.calls.get_drap_rtx ();
27a7a23a 6075
f6754469 6076 /* stack_realign_drap and drap_rtx must match. */
6077 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6078
27a7a23a 6079 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6080 if (NULL != drap_rtx)
6081 {
6082 crtl->args.internal_arg_pointer = drap_rtx;
6083
6084 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6085 needed. */
6086 fixup_tail_calls ();
6087 }
6088}
0e80b01d 6089\f
6090
6091static void
6092expand_main_function (void)
6093{
6094#if (defined(INVOKE__main) \
6095 || (!defined(HAS_INIT_SECTION) \
6096 && !defined(INIT_SECTION_ASM_OP) \
6097 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
9e9e5c15 6098 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
0e80b01d 6099#endif
6100}
6101\f
6102
6103/* Expand code to initialize the stack_protect_guard. This is invoked at
6104 the beginning of a function to be protected. */
6105
0e80b01d 6106static void
6107stack_protect_prologue (void)
6108{
6109 tree guard_decl = targetm.stack_protect_guard ();
6110 rtx x, y;
6111
6112 x = expand_normal (crtl->stack_protect_guard);
8a23256f 6113 if (guard_decl)
6114 y = expand_normal (guard_decl);
6115 else
6116 y = const0_rtx;
0e80b01d 6117
6118 /* Allow the target to copy from Y to X without leaking Y into a
6119 register. */
e9b06442 6120 if (targetm.have_stack_protect_set ())
6121 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6122 {
6123 emit_insn (insn);
6124 return;
6125 }
0e80b01d 6126
6127 /* Otherwise do a straight move. */
6128 emit_move_insn (x, y);
6129}
27a7a23a 6130
0ec80471 6131/* Translate the intermediate representation contained in the CFG
6132 from GIMPLE trees to RTL.
6133
6134 We do conversion per basic block and preserve/update the tree CFG.
6135 This implies we have to do some magic as the CFG can simultaneously
6136 consist of basic blocks containing RTL and GIMPLE trees. This can
2c763ed4 6137 confuse the CFG hooks, so be careful to not manipulate CFG during
0ec80471 6138 the expansion. */
6139
65b0537f 6140namespace {
6141
6142const pass_data pass_data_expand =
6143{
6144 RTL_PASS, /* type */
6145 "expand", /* name */
6146 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 6147 TV_EXPAND, /* tv_id */
6148 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6149 | PROP_gimple_lcx
82fc0e0a 6150 | PROP_gimple_lvec
6151 | PROP_gimple_lva), /* properties_required */
65b0537f 6152 PROP_rtl, /* properties_provided */
6153 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
8b88439e 6154 0, /* todo_flags_start */
65b0537f 6155 0, /* todo_flags_finish */
6156};
6157
6158class pass_expand : public rtl_opt_pass
6159{
6160public:
6161 pass_expand (gcc::context *ctxt)
6162 : rtl_opt_pass (pass_data_expand, ctxt)
6163 {}
6164
6165 /* opt_pass methods: */
6166 virtual unsigned int execute (function *);
6167
6168}; // class pass_expand
6169
6170unsigned int
6171pass_expand::execute (function *fun)
0ec80471 6172{
6173 basic_block bb, init_block;
ea06d49f 6174 edge_iterator ei;
6175 edge e;
74a0cbc4 6176 rtx_insn *var_seq, *var_ret_seq;
a8dd994c 6177 unsigned i;
6178
e2050933 6179 timevar_push (TV_OUT_OF_SSA);
a8dd994c 6180 rewrite_out_of_ssa (&SA);
e2050933 6181 timevar_pop (TV_OUT_OF_SSA);
ed7e2206 6182 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
0ec80471 6183
c64f38bf 6184 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
54497144 6185 {
6186 gimple_stmt_iterator gsi;
6187 FOR_EACH_BB_FN (bb, cfun)
6188 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6189 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6190 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6191 }
6192
212dddd3 6193 /* Make sure all values used by the optimization passes have sane
6194 defaults. */
6195 reg_renumber = 0;
6196
723c0ee7 6197 /* Some backends want to know that we are expanding to RTL. */
6198 currently_expanding_to_rtl = 1;
821ac701 6199 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6200 free_dominance_info (CDI_DOMINATORS);
723c0ee7 6201
65b0537f 6202 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
7dfb44a0 6203
058a1b7a 6204 if (chkp_function_instrumented_p (current_function_decl))
6205 chkp_reset_rtl_bounds ();
6206
5169661d 6207 insn_locations_init ();
c3771ec9 6208 if (!DECL_IS_BUILTIN (current_function_decl))
30099c0c 6209 {
6210 /* Eventually, all FEs should explicitly set function_start_locus. */
65b0537f 6211 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6212 set_curr_insn_location
6213 (DECL_SOURCE_LOCATION (current_function_decl));
30099c0c 6214 else
65b0537f 6215 set_curr_insn_location (fun->function_start_locus);
30099c0c 6216 }
0aecb55e 6217 else
5169661d 6218 set_curr_insn_location (UNKNOWN_LOCATION);
6219 prologue_location = curr_insn_location ();
375c1c8a 6220
fdc86f97 6221#ifdef INSN_SCHEDULING
6222 init_sched_attrs ();
6223#endif
6224
375c1c8a 6225 /* Make sure first insn is a note even if we don't want linenums.
6226 This makes sure the first insn will never be deleted.
6227 Also, final expects a note to appear there. */
6228 emit_note (NOTE_INSN_DELETED);
656047bf 6229
9d5aa3bd 6230 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6231 discover_nonconstant_array_refs ();
6232
bc5e6ea1 6233 targetm.expand_to_rtl_hook ();
2add0b64 6234 crtl->init_stack_alignment ();
65b0537f 6235 fun->cfg->max_jumptable_ents = 0;
edb7afe8 6236
b8a89e7e 6237 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6238 of the function section at exapnsion time to predict distance of calls. */
6239 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6240
280450fa 6241 /* Expand the variables recorded during gimple lowering. */
e2050933 6242 timevar_push (TV_VAR_EXPAND);
5be42b39 6243 start_sequence ();
6244
3c919612 6245 var_ret_seq = expand_used_vars ();
5be42b39 6246
6247 var_seq = get_insns ();
6248 end_sequence ();
e2050933 6249 timevar_pop (TV_VAR_EXPAND);
0ec80471 6250
f1a0edff 6251 /* Honor stack protection warnings. */
6252 if (warn_stack_protect)
6253 {
65b0537f 6254 if (fun->calls_alloca)
48e1416a 6255 warning (OPT_Wstack_protector,
b15b8239 6256 "stack protector not protecting local variables: "
65b0537f 6257 "variable length buffer");
edb7afe8 6258 if (has_short_buffer && !crtl->stack_protect_guard)
48e1416a 6259 warning (OPT_Wstack_protector,
b15b8239 6260 "stack protector not protecting function: "
65b0537f 6261 "all local arrays are less than %d bytes long",
f1a0edff 6262 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6263 }
6264
0ec80471 6265 /* Set up parameters and prepare for return, for the function. */
82aa4bd5 6266 expand_function_start (current_function_decl);
0ec80471 6267
5be42b39 6268 /* If we emitted any instructions for setting up the variables,
6269 emit them before the FUNCTION_START note. */
6270 if (var_seq)
6271 {
6272 emit_insn_before (var_seq, parm_birth_insn);
6273
6274 /* In expand_function_end we'll insert the alloca save/restore
6275 before parm_birth_insn. We've just insertted an alloca call.
6276 Adjust the pointer to match. */
6277 parm_birth_insn = var_seq;
6278 }
6279
b2df3bbf 6280 /* Now propagate the RTL assignment of each partition to the
6281 underlying var of each SSA_NAME. */
f211616e 6282 tree name;
6283
6284 FOR_EACH_SSA_NAME (i, name, cfun)
b2df3bbf 6285 {
f211616e 6286 /* We might have generated new SSA names in
6287 update_alias_info_with_stack_vars. They will have a NULL
6288 defining statements, and won't be part of the partitioning,
6289 so ignore those. */
6290 if (!SSA_NAME_DEF_STMT (name))
b2df3bbf 6291 continue;
6292
6293 adjust_one_expanded_partition_var (name);
6294 }
6295
6296 /* Clean up RTL of variables that straddle across multiple
6297 partitions, and check that the rtl of any PARM_DECLs that are not
6298 cleaned up is that of their default defs. */
f211616e 6299 FOR_EACH_SSA_NAME (i, name, cfun)
f2ca19b4 6300 {
f2ca19b4 6301 int part;
f2ca19b4 6302
f211616e 6303 /* We might have generated new SSA names in
6304 update_alias_info_with_stack_vars. They will have a NULL
6305 defining statements, and won't be part of the partitioning,
6306 so ignore those. */
6307 if (!SSA_NAME_DEF_STMT (name))
f2ca19b4 6308 continue;
6309 part = var_to_partition (SA.map, name);
6310 if (part == NO_PARTITION)
6311 continue;
ec11736b 6312
94f92c36 6313 /* If this decl was marked as living in multiple places, reset
6314 this now to NULL. */
6315 tree var = SSA_NAME_VAR (name);
6316 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6317 SET_DECL_RTL (var, NULL);
6318 /* Check that the pseudos chosen by assign_parms are those of
6319 the corresponding default defs. */
6320 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6321 && (TREE_CODE (var) == PARM_DECL
6322 || TREE_CODE (var) == RESULT_DECL))
ec11736b 6323 {
94f92c36 6324 rtx in = DECL_RTL_IF_SET (var);
6325 gcc_assert (in);
6326 rtx out = SA.partition_to_pseudo[part];
b2df3bbf 6327 gcc_assert (in == out);
6328
6329 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6330 those expected by debug backends for each parm and for
6331 the result. This is particularly important for stabs,
6332 whose register elimination from parm's DECL_RTL may cause
6333 -fcompare-debug differences as SET_DECL_RTL changes reg's
6334 attrs. So, make sure the RTL already has the parm as the
6335 EXPR, so that it won't change. */
6336 SET_DECL_RTL (var, NULL_RTX);
6337 if (MEM_P (in))
6338 set_mem_attributes (in, var, true);
6339 SET_DECL_RTL (var, in);
ec11736b 6340 }
f2ca19b4 6341 }
6342
0ec80471 6343 /* If this function is `main', emit a call to `__main'
6344 to run global initializers, etc. */
6345 if (DECL_NAME (current_function_decl)
6346 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6347 && DECL_FILE_SCOPE_P (current_function_decl))
6348 expand_main_function ();
6349
f1a0edff 6350 /* Initialize the stack_protect_guard field. This must happen after the
6351 call to __main (if any) so that the external decl is initialized. */
783f362b 6352 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
f1a0edff 6353 stack_protect_prologue ();
6354
a8dd994c 6355 expand_phi_nodes (&SA);
6356
3e292d17 6357 /* Release any stale SSA redirection data. */
b1090780 6358 redirect_edge_var_map_empty ();
3e292d17 6359
011e6b51 6360 /* Register rtl specific functions for cfg. */
0ec80471 6361 rtl_register_cfg_hooks ();
6362
6363 init_block = construct_init_block ();
6364
ea06d49f 6365 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
a8dd994c 6366 remaining edges later. */
65b0537f 6367 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
ea06d49f 6368 e->flags &= ~EDGE_EXECUTABLE;
6369
0699065d 6370 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
65b0537f 6371 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
34154e27 6372 next_bb)
3c919612 6373 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
7dfb44a0 6374
c64f38bf 6375 if (MAY_HAVE_DEBUG_BIND_INSNS)
9845d120 6376 expand_debug_locations ();
6377
54497144 6378 if (deep_ter_debug_map)
6379 {
6380 delete deep_ter_debug_map;
6381 deep_ter_debug_map = NULL;
6382 }
6383
3db65b62 6384 /* Free stuff we no longer need after GIMPLE optimizations. */
6385 free_dominance_info (CDI_DOMINATORS);
6386 free_dominance_info (CDI_POST_DOMINATORS);
d4f078b5 6387 delete_tree_cfg_annotations (fun);
3db65b62 6388
e2050933 6389 timevar_push (TV_OUT_OF_SSA);
a8dd994c 6390 finish_out_of_ssa (&SA);
e2050933 6391 timevar_pop (TV_OUT_OF_SSA);
a8dd994c 6392
e2050933 6393 timevar_push (TV_POST_EXPAND);
67817f0f 6394 /* We are no longer in SSA form. */
65b0537f 6395 fun->gimple_df->in_ssa_p = false;
b3083327 6396 loops_state_clear (LOOP_CLOSED_SSA);
67817f0f 6397
7dfb44a0 6398 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6399 conservatively to true until they are all profile aware. */
5f8841a5 6400 delete lab_rtx_for_bb;
d4f078b5 6401 free_histograms (fun);
0ec80471 6402
6403 construct_exit_block ();
5169661d 6404 insn_locations_finalize ();
0ec80471 6405
3c919612 6406 if (var_ret_seq)
6407 {
4cd001d5 6408 rtx_insn *after = return_label;
74a0cbc4 6409 rtx_insn *next = NEXT_INSN (after);
3c919612 6410 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6411 after = next;
6412 emit_insn_after (var_ret_seq, after);
6413 }
6414
e38def9c 6415 /* Zap the tree EH table. */
65b0537f 6416 set_eh_throw_stmt_table (fun, NULL);
0ec80471 6417
409e049a 6418 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6419 split edges which edge insertions might do. */
0ec80471 6420 rebuild_jump_labels (get_insns ());
0ec80471 6421
65b0537f 6422 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6423 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
a8dd994c 6424 {
6425 edge e;
6426 edge_iterator ei;
6427 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6428 {
6429 if (e->insns.r)
4547eca6 6430 {
ae5e6486 6431 rebuild_jump_labels_chain (e->insns.r);
d699f73a 6432 /* Put insns after parm birth, but before
6433 NOTE_INSNS_FUNCTION_BEG. */
65b0537f 6434 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6435 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
4547eca6 6436 {
ae5e6486 6437 rtx_insn *insns = e->insns.r;
6438 e->insns.r = NULL;
d699f73a 6439 if (NOTE_P (parm_birth_insn)
6440 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6441 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6442 else
6443 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4547eca6 6444 }
6445 else
6446 commit_one_edge_insertion (e);
6447 }
a8dd994c 6448 else
6449 ei_next (&ei);
6450 }
6451 }
6452
6453 /* We're done expanding trees to RTL. */
6454 currently_expanding_to_rtl = 0;
6455
ea804f86 6456 flush_mark_addressable_queue ();
6457
65b0537f 6458 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6459 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
a8dd994c 6460 {
6461 edge e;
6462 edge_iterator ei;
6463 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6464 {
6465 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6466 e->flags &= ~EDGE_EXECUTABLE;
6467
6468 /* At the moment not all abnormal edges match the RTL
6469 representation. It is safe to remove them here as
6470 find_many_sub_basic_blocks will rediscover them.
6471 In the future we should get this fixed properly. */
6472 if ((e->flags & EDGE_ABNORMAL)
6473 && !(e->flags & EDGE_SIBCALL))
6474 remove_edge (e);
6475 else
6476 ei_next (&ei);
6477 }
6478 }
6479
3c6549f8 6480 auto_sbitmap blocks (last_basic_block_for_fn (fun));
53c5d9d4 6481 bitmap_ones (blocks);
0ec80471 6482 find_many_sub_basic_blocks (blocks);
a8dd994c 6483 purge_all_dead_edges ();
0ec80471 6484
27a7a23a 6485 expand_stack_alignment ();
6486
212dddd3 6487 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6488 function. */
6489 if (crtl->tail_call_emit)
6490 fixup_tail_calls ();
6491
1dd4980f 6492 /* After initial rtl generation, call back to finish generating
6493 exception support code. We need to do this before cleaning up
6494 the CFG as the code does not expect dead landing pads. */
65b0537f 6495 if (fun->eh->region_tree != NULL)
1dd4980f 6496 finish_eh_generation ();
6497
c86933f9 6498 /* BB subdivision may have created basic blocks that are are only reachable
6499 from unlikely bbs but not marked as such in the profile. */
6500 if (optimize)
6501 propagate_unlikely_bbs_forward ();
6502
1dd4980f 6503 /* Remove unreachable blocks, otherwise we cannot compute dominators
6504 which are needed for loop state verification. As a side-effect
6505 this also compacts blocks.
6506 ??? We cannot remove trivially dead insns here as for example
6507 the DRAP reg on i?86 is not magically live at this point.
6508 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6509 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6510
382ecba7 6511 checking_verify_flow_info ();
0f9005dd 6512
212dddd3 6513 /* Initialize pseudos allocated for hard registers. */
6514 emit_initial_value_sets ();
6515
6516 /* And finally unshare all RTL. */
6517 unshare_all_rtl ();
6518
0f9005dd 6519 /* There's no need to defer outputting this function any more; we
6520 know we want to output it. */
6521 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6522
6523 /* Now that we're done expanding trees to RTL, we shouldn't have any
6524 more CONCATs anywhere. */
6525 generating_concat_p = 0;
6526
49377e21 6527 if (dump_file)
6528 {
6529 fprintf (dump_file,
6530 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6531 /* And the pass manager will dump RTL for us. */
6532 }
77fce4cd 6533
6534 /* If we're emitting a nested function, make sure its parent gets
6535 emitted as well. Doing otherwise confuses debug info. */
65b0537f 6536 {
6537 tree parent;
6538 for (parent = DECL_CONTEXT (current_function_decl);
6539 parent != NULL_TREE;
6540 parent = get_containing_scope (parent))
6541 if (TREE_CODE (parent) == FUNCTION_DECL)
6542 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6543 }
a0c938f0 6544
77fce4cd 6545 TREE_ASM_WRITTEN (current_function_decl) = 1;
1a56c787 6546
6547 /* After expanding, the return labels are no longer needed. */
6548 return_label = NULL;
6549 naked_return_label = NULL;
4c0315d0 6550
6551 /* After expanding, the tm_restart map is no longer needed. */
65b0537f 6552 if (fun->gimple_df->tm_restart)
b7aa58e4 6553 fun->gimple_df->tm_restart = NULL;
4c0315d0 6554
375c1c8a 6555 /* Tag the blocks with a depth number so that change_scope can find
6556 the common parent easily. */
65b0537f 6557 set_block_levels (DECL_INITIAL (fun->decl), 0);
7dfb44a0 6558 default_rtl_profile ();
212dddd3 6559
6d9dcf16 6560 /* For -dx discard loops now, otherwise IL verify in clean_state will
6561 ICE. */
6562 if (rtl_dump_and_exit)
6563 {
6564 cfun->curr_properties &= ~PROP_loops;
6565 loop_optimizer_finalize ();
6566 }
6567
e2050933 6568 timevar_pop (TV_POST_EXPAND);
212dddd3 6569
2a1990e9 6570 return 0;
0ec80471 6571}
6572
cbe8bda8 6573} // anon namespace
6574
6575rtl_opt_pass *
6576make_pass_expand (gcc::context *ctxt)
6577{
6578 return new pass_expand (ctxt);
6579}