]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
poly_int: vect_permute_load/store_chain
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
85ec4feb 2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5
AM
28#include "cfghooks.h"
29#include "tree-pass.h"
4d0cdd0c 30#include "memmodel.h"
957060b5 31#include "tm_p.h"
c7131fb2 32#include "ssa.h"
957060b5
AM
33#include "optabs.h"
34#include "regs.h" /* For reg_renumber. */
35#include "emit-rtl.h"
36#include "recog.h"
37#include "cgraph.h"
38#include "diagnostic.h"
40e23961 39#include "fold-const.h"
d8a2d370
DN
40#include "varasm.h"
41#include "stor-layout.h"
42#include "stmt.h"
43#include "print-tree.h"
60393bbc
AM
44#include "cfgrtl.h"
45#include "cfganal.h"
46#include "cfgbuild.h"
47#include "cfgcleanup.h"
36566b39
PK
48#include "dojump.h"
49#include "explow.h"
50#include "calls.h"
242229bb 51#include "expr.h"
2fb9a547
AM
52#include "internal-fn.h"
53#include "tree-eh.h"
5be5c238 54#include "gimple-iterator.h"
1b223a9f 55#include "gimple-expr.h"
5be5c238 56#include "gimple-walk.h"
442b4905 57#include "tree-cfg.h"
442b4905 58#include "tree-dfa.h"
7a300452 59#include "tree-ssa.h"
242229bb 60#include "except.h"
cf835838 61#include "gimple-pretty-print.h"
1f6d3a08 62#include "toplev.h"
ef330312 63#include "debug.h"
7d69de61 64#include "params.h"
ff28a94d 65#include "tree-inline.h"
6946b3f7 66#include "value-prof.h"
8e9055ae 67#include "tree-ssa-live.h"
78bca40d 68#include "tree-outof-ssa.h"
7d776ee2 69#include "cfgloop.h"
2b21299c 70#include "insn-attr.h" /* For INSN_SCHEDULING. */
314e6352
ML
71#include "stringpool.h"
72#include "attribs.h"
f3ddd692 73#include "asan.h"
4484a35a 74#include "tree-ssa-address.h"
862d0b35 75#include "output.h"
9b2b7279 76#include "builtins.h"
d5e254e1
IE
77#include "tree-chkp.h"
78#include "rtl-chkp.h"
726a989a 79
8a6ce562
JBG
80/* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84#ifndef NAME__MAIN
85#define NAME__MAIN "__main"
86#endif
87
4e3825db
MM
88/* This variable holds information helping the rewriting of SSA trees
89 into RTL. */
90struct ssaexpand SA;
91
a5883ba0
MM
92/* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
355fe088 94gimple *currently_expanding_gimple_stmt;
a5883ba0 95
ddb555ed
JJ
96static rtx expand_debug_expr (tree);
97
1f9ceff1
AO
98static bool defer_stack_allocation (tree, bool);
99
f11a7b6d
AO
100static void record_alignment_for_reg_var (unsigned int);
101
726a989a
RB
102/* Return an expression tree corresponding to the RHS of GIMPLE
103 statement STMT. */
104
105tree
355fe088 106gimple_assign_rhs_to_tree (gimple *stmt)
726a989a
RB
107{
108 tree t;
82d6e6fc 109 enum gimple_rhs_class grhs_class;
b8698a0f 110
82d6e6fc 111 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 112
0354c0c7
BS
113 if (grhs_class == GIMPLE_TERNARY_RHS)
114 t = build3 (gimple_assign_rhs_code (stmt),
115 TREE_TYPE (gimple_assign_lhs (stmt)),
116 gimple_assign_rhs1 (stmt),
117 gimple_assign_rhs2 (stmt),
118 gimple_assign_rhs3 (stmt));
119 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
120 t = build2 (gimple_assign_rhs_code (stmt),
121 TREE_TYPE (gimple_assign_lhs (stmt)),
122 gimple_assign_rhs1 (stmt),
123 gimple_assign_rhs2 (stmt));
82d6e6fc 124 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
125 t = build1 (gimple_assign_rhs_code (stmt),
126 TREE_TYPE (gimple_assign_lhs (stmt)),
127 gimple_assign_rhs1 (stmt));
82d6e6fc 128 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
129 {
130 t = gimple_assign_rhs1 (stmt);
131 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
132 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 && gimple_location (stmt) != EXPR_LOCATION (t))
134 || (gimple_block (stmt)
135 && currently_expanding_to_rtl
5368224f 136 && EXPR_P (t)))
b5b8b0ac
AO
137 t = copy_node (t);
138 }
726a989a
RB
139 else
140 gcc_unreachable ();
141
f5045c96
AM
142 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143 SET_EXPR_LOCATION (t, gimple_location (stmt));
144
726a989a
RB
145 return t;
146}
147
726a989a 148
1f6d3a08
RH
149#ifndef STACK_ALIGNMENT_NEEDED
150#define STACK_ALIGNMENT_NEEDED 1
151#endif
152
4e3825db
MM
153#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154
1f9ceff1
AO
155/* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
159
160static tree
161leader_merge (tree cur, tree next)
162{
163 if (cur == NULL || cur == next)
164 return next;
165
166 if (DECL_P (cur) && DECL_IGNORED_P (cur))
167 return cur;
168
169 if (DECL_P (next) && DECL_IGNORED_P (next))
170 return next;
171
172 return cur;
173}
174
4e3825db
MM
175/* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
178static inline void
179set_rtl (tree t, rtx x)
180{
f11a7b6d
AO
181 gcc_checking_assert (!x
182 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 || (use_register_for_decl (t)
184 ? (REG_P (x)
185 || (GET_CODE (x) == CONCAT
186 && (REG_P (XEXP (x, 0))
187 || SUBREG_P (XEXP (x, 0)))
188 && (REG_P (XEXP (x, 1))
189 || SUBREG_P (XEXP (x, 1))))
058c6384
EB
190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
f11a7b6d
AO
194 || (GET_CODE (x) == PARALLEL
195 && SSAVAR (t)
196 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
058c6384
EB
197 && (GET_MODE (x) == BLKmode
198 || !flag_tree_coalesce_vars)))
f11a7b6d
AO
199 : (MEM_P (x) || x == pc_rtx
200 || (GET_CODE (x) == CONCAT
201 && MEM_P (XEXP (x, 0))
202 && MEM_P (XEXP (x, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
21fc3950
EB
209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
211 unpromoted REGs. */
f11a7b6d 212 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
21fc3950
EB
213 || (SSAVAR (t)
214 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 && (promote_ssa_mode (t, NULL) == BLKmode
216 || !flag_tree_coalesce_vars))
f11a7b6d
AO
217 || !use_register_for_decl (t)
218 || GET_MODE (x) == promote_ssa_mode (t, NULL));
219
220 if (x)
1f9ceff1
AO
221 {
222 bool skip = false;
223 tree cur = NULL_TREE;
f11a7b6d
AO
224 rtx xm = x;
225
226 retry:
227 if (MEM_P (xm))
228 cur = MEM_EXPR (xm);
229 else if (REG_P (xm))
230 cur = REG_EXPR (xm);
231 else if (SUBREG_P (xm))
232 {
233 gcc_assert (subreg_lowpart_p (xm));
234 xm = SUBREG_REG (xm);
235 goto retry;
236 }
237 else if (GET_CODE (xm) == CONCAT)
238 {
239 xm = XEXP (xm, 0);
240 goto retry;
241 }
242 else if (GET_CODE (xm) == PARALLEL)
243 {
244 xm = XVECEXP (xm, 0, 0);
245 gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 xm = XEXP (xm, 0);
247 goto retry;
248 }
249 else if (xm == pc_rtx)
1f9ceff1
AO
250 skip = true;
251 else
252 gcc_unreachable ();
253
f11a7b6d 254 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
1f9ceff1
AO
255
256 if (cur != next)
257 {
258 if (MEM_P (x))
f11a7b6d
AO
259 set_mem_attributes (x,
260 next && TREE_CODE (next) == SSA_NAME
261 ? TREE_TYPE (next)
262 : next, true);
1f9ceff1
AO
263 else
264 set_reg_attrs_for_decl_rtl (next, x);
265 }
266 }
267
4e3825db
MM
268 if (TREE_CODE (t) == SSA_NAME)
269 {
1f9ceff1
AO
270 int part = var_to_partition (SA.map, t);
271 if (part != NO_PARTITION)
272 {
273 if (SA.partition_to_pseudo[part])
274 gcc_assert (SA.partition_to_pseudo[part] == x);
275 else if (x != pc_rtx)
276 SA.partition_to_pseudo[part] = x;
277 }
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
f11a7b6d
AO
280 DECL. For PARMs and RESULTs, do so only when setting the
281 default def. */
282 if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 && (VAR_P (SSA_NAME_VAR (t))
284 || SSA_NAME_IS_DEFAULT_DEF (t)))
eb7adebc
MM
285 {
286 tree var = SSA_NAME_VAR (t);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var))
289 SET_DECL_RTL (var, x);
47598145 290 /* If we have it set already to "multiple places" don't
eb7adebc
MM
291 change this. */
292 else if (DECL_RTL (var) == pc_rtx)
293 ;
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var) != x)
301 SET_DECL_RTL (var, pc_rtx);
302 }
4e3825db
MM
303 }
304 else
305 SET_DECL_RTL (t, x);
306}
1f6d3a08
RH
307
308/* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
310struct stack_var
311{
312 /* The Variable. */
313 tree decl;
314
1f6d3a08
RH
315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
5e48d894 317 poly_uint64 size;
1f6d3a08
RH
318
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
321 unsigned int alignb;
322
323 /* The partition representative. */
324 size_t representative;
325
326 /* The next stack variable in the partition, or EOC. */
327 size_t next;
2bdbbe94
MM
328
329 /* The numbers of conflicting stack variables. */
330 bitmap conflicts;
1f6d3a08
RH
331};
332
333#define EOC ((size_t)-1)
334
335/* We have an array of such objects while deciding allocation. */
336static struct stack_var *stack_vars;
337static size_t stack_vars_alloc;
338static size_t stack_vars_num;
39c8aaa4 339static hash_map<tree, size_t> *decl_to_stack_part;
1f6d3a08 340
3f9b14ff
SB
341/* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343static bitmap_obstack stack_var_bitmap_obstack;
344
fa10beec 345/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
346 is non-decreasing. */
347static size_t *stack_vars_sorted;
348
1f6d3a08
RH
349/* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352static int frame_phase;
353
7d69de61
RH
354/* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356static bool has_protected_decls;
357
358/* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360static bool has_short_buffer;
1f6d3a08 361
6f197850 362/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
363 we can't do with expected alignment of the stack boundary. */
364
365static unsigned int
6f197850 366align_local_variable (tree decl)
765c3e8f 367{
1f9ceff1
AO
368 unsigned int align;
369
370 if (TREE_CODE (decl) == SSA_NAME)
371 align = TYPE_ALIGN (TREE_TYPE (decl));
372 else
373 {
374 align = LOCAL_DECL_ALIGNMENT (decl);
fe37c7af 375 SET_DECL_ALIGN (decl, align);
1f9ceff1 376 }
1f6d3a08
RH
377 return align / BITS_PER_UNIT;
378}
379
435be747
MO
380/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
382
383static inline unsigned HOST_WIDE_INT
384align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385{
386 return align_up ? (base + align - 1) & -align : base & -align;
387}
388
1f6d3a08
RH
389/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
391
f075bd95 392static poly_int64
5e48d894 393alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
1f6d3a08 394{
f075bd95 395 poly_int64 offset, new_frame_offset;
1f6d3a08 396
1f6d3a08
RH
397 if (FRAME_GROWS_DOWNWARD)
398 {
435be747 399 new_frame_offset
f075bd95
RS
400 = aligned_lower_bound (frame_offset - frame_phase - size,
401 align) + frame_phase;
1f6d3a08
RH
402 offset = new_frame_offset;
403 }
404 else
405 {
435be747 406 new_frame_offset
f075bd95
RS
407 = aligned_upper_bound (frame_offset - frame_phase,
408 align) + frame_phase;
1f6d3a08
RH
409 offset = new_frame_offset;
410 new_frame_offset += size;
411 }
412 frame_offset = new_frame_offset;
413
9fb798d7
EB
414 if (frame_offset_overflow (frame_offset, cfun->decl))
415 frame_offset = offset = 0;
416
1f6d3a08
RH
417 return offset;
418}
419
420/* Accumulate DECL into STACK_VARS. */
421
422static void
423add_stack_var (tree decl)
424{
533f611a
RH
425 struct stack_var *v;
426
1f6d3a08
RH
427 if (stack_vars_num >= stack_vars_alloc)
428 {
429 if (stack_vars_alloc)
430 stack_vars_alloc = stack_vars_alloc * 3 / 2;
431 else
432 stack_vars_alloc = 32;
433 stack_vars
434 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
435 }
47598145 436 if (!decl_to_stack_part)
39c8aaa4 437 decl_to_stack_part = new hash_map<tree, size_t>;
47598145 438
533f611a 439 v = &stack_vars[stack_vars_num];
39c8aaa4 440 decl_to_stack_part->put (decl, stack_vars_num);
533f611a
RH
441
442 v->decl = decl;
1f9ceff1
AO
443 tree size = TREE_CODE (decl) == SSA_NAME
444 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445 : DECL_SIZE_UNIT (decl);
5e48d894 446 v->size = tree_to_poly_uint64 (size);
533f611a
RH
447 /* Ensure that all variables have size, so that &a != &b for any two
448 variables that are simultaneously live. */
5e48d894 449 if (known_eq (v->size, 0U))
533f611a 450 v->size = 1;
1f9ceff1 451 v->alignb = align_local_variable (decl);
13868f40
EB
452 /* An alignment of zero can mightily confuse us later. */
453 gcc_assert (v->alignb != 0);
1f6d3a08
RH
454
455 /* All variables are initially in their own partition. */
533f611a
RH
456 v->representative = stack_vars_num;
457 v->next = EOC;
1f6d3a08 458
2bdbbe94 459 /* All variables initially conflict with no other. */
533f611a 460 v->conflicts = NULL;
2bdbbe94 461
1f6d3a08 462 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 463 set_rtl (decl, pc_rtx);
1f6d3a08
RH
464
465 stack_vars_num++;
466}
467
1f6d3a08
RH
468/* Make the decls associated with luid's X and Y conflict. */
469
470static void
471add_stack_var_conflict (size_t x, size_t y)
472{
2bdbbe94
MM
473 struct stack_var *a = &stack_vars[x];
474 struct stack_var *b = &stack_vars[y];
475 if (!a->conflicts)
3f9b14ff 476 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 477 if (!b->conflicts)
3f9b14ff 478 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
479 bitmap_set_bit (a->conflicts, y);
480 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
481}
482
483/* Check whether the decls associated with luid's X and Y conflict. */
484
485static bool
486stack_var_conflict_p (size_t x, size_t y)
487{
2bdbbe94
MM
488 struct stack_var *a = &stack_vars[x];
489 struct stack_var *b = &stack_vars[y];
47598145
MM
490 if (x == y)
491 return false;
492 /* Partitions containing an SSA name result from gimple registers
493 with things like unsupported modes. They are top-level and
494 hence conflict with everything else. */
495 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496 return true;
497
2bdbbe94
MM
498 if (!a->conflicts || !b->conflicts)
499 return false;
500 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 501}
b8698a0f 502
47598145
MM
503/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
504 enter its partition number into bitmap DATA. */
505
506static bool
355fe088 507visit_op (gimple *, tree op, tree, void *data)
47598145
MM
508{
509 bitmap active = (bitmap)data;
510 op = get_base_address (op);
511 if (op
512 && DECL_P (op)
513 && DECL_RTL_IF_SET (op) == pc_rtx)
514 {
39c8aaa4 515 size_t *v = decl_to_stack_part->get (op);
47598145
MM
516 if (v)
517 bitmap_set_bit (active, *v);
518 }
519 return false;
520}
521
522/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
523 record conflicts between it and all currently active other partitions
524 from bitmap DATA. */
525
526static bool
355fe088 527visit_conflict (gimple *, tree op, tree, void *data)
47598145
MM
528{
529 bitmap active = (bitmap)data;
530 op = get_base_address (op);
531 if (op
532 && DECL_P (op)
533 && DECL_RTL_IF_SET (op) == pc_rtx)
534 {
39c8aaa4 535 size_t *v = decl_to_stack_part->get (op);
47598145
MM
536 if (v && bitmap_set_bit (active, *v))
537 {
538 size_t num = *v;
539 bitmap_iterator bi;
540 unsigned i;
541 gcc_assert (num < stack_vars_num);
542 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 add_stack_var_conflict (num, i);
544 }
545 }
546 return false;
547}
548
549/* Helper routine for add_scope_conflicts, calculating the active partitions
550 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
551 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552 liveness. */
47598145
MM
553
554static void
81bfd197 555add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
556{
557 edge e;
558 edge_iterator ei;
559 gimple_stmt_iterator gsi;
9f1363cd 560 walk_stmt_load_store_addr_fn visit;
47598145
MM
561
562 bitmap_clear (work);
563 FOR_EACH_EDGE (e, ei, bb->preds)
564 bitmap_ior_into (work, (bitmap)e->src->aux);
565
ea85edfe 566 visit = visit_op;
47598145
MM
567
568 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569 {
355fe088 570 gimple *stmt = gsi_stmt (gsi);
ea85edfe 571 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 572 }
ea85edfe 573 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145 574 {
355fe088 575 gimple *stmt = gsi_stmt (gsi);
47598145
MM
576
577 if (gimple_clobber_p (stmt))
578 {
579 tree lhs = gimple_assign_lhs (stmt);
580 size_t *v;
581 /* Nested function lowering might introduce LHSs
582 that are COMPONENT_REFs. */
8813a647 583 if (!VAR_P (lhs))
47598145
MM
584 continue;
585 if (DECL_RTL_IF_SET (lhs) == pc_rtx
39c8aaa4 586 && (v = decl_to_stack_part->get (lhs)))
47598145
MM
587 bitmap_clear_bit (work, *v);
588 }
589 else if (!is_gimple_debug (stmt))
ea85edfe 590 {
81bfd197 591 if (for_conflict
ea85edfe
JJ
592 && visit == visit_op)
593 {
594 /* If this is the first real instruction in this BB we need
88d599dc
MM
595 to add conflicts for everything live at this point now.
596 Unlike classical liveness for named objects we can't
ea85edfe
JJ
597 rely on seeing a def/use of the names we're interested in.
598 There might merely be indirect loads/stores. We'd not add any
81bfd197 599 conflicts for such partitions. */
ea85edfe
JJ
600 bitmap_iterator bi;
601 unsigned i;
81bfd197 602 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 603 {
9b44f5d9
MM
604 struct stack_var *a = &stack_vars[i];
605 if (!a->conflicts)
3f9b14ff 606 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 607 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
608 }
609 visit = visit_conflict;
610 }
611 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 }
47598145
MM
613 }
614}
615
616/* Generate stack partition conflicts between all partitions that are
617 simultaneously live. */
618
619static void
620add_scope_conflicts (void)
621{
622 basic_block bb;
623 bool changed;
624 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
625 int *rpo;
626 int n_bbs;
47598145 627
88d599dc 628 /* We approximate the live range of a stack variable by taking the first
47598145
MM
629 mention of its name as starting point(s), and by the end-of-scope
630 death clobber added by gimplify as ending point(s) of the range.
631 This overapproximates in the case we for instance moved an address-taken
632 operation upward, without also moving a dereference to it upwards.
633 But it's conservatively correct as a variable never can hold values
634 before its name is mentioned at least once.
635
88d599dc 636 We then do a mostly classical bitmap liveness algorithm. */
47598145 637
04a90bec 638 FOR_ALL_BB_FN (bb, cfun)
3f9b14ff 639 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 640
8b1c6fd7 641 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
9b44f5d9
MM
642 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643
47598145
MM
644 changed = true;
645 while (changed)
646 {
9b44f5d9 647 int i;
47598145 648 changed = false;
9b44f5d9 649 for (i = 0; i < n_bbs; i++)
47598145 650 {
9b44f5d9 651 bitmap active;
06e28de2 652 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
9b44f5d9 653 active = (bitmap)bb->aux;
81bfd197 654 add_scope_conflicts_1 (bb, work, false);
47598145
MM
655 if (bitmap_ior_into (active, work))
656 changed = true;
657 }
658 }
659
11cd3bed 660 FOR_EACH_BB_FN (bb, cfun)
81bfd197 661 add_scope_conflicts_1 (bb, work, true);
47598145 662
9b44f5d9 663 free (rpo);
47598145 664 BITMAP_FREE (work);
04a90bec 665 FOR_ALL_BB_FN (bb, cfun)
47598145
MM
666 BITMAP_FREE (bb->aux);
667}
668
1f6d3a08 669/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 670 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
671
672static int
3a42502d 673stack_var_cmp (const void *a, const void *b)
1f6d3a08 674{
3a42502d
RH
675 size_t ia = *(const size_t *)a;
676 size_t ib = *(const size_t *)b;
677 unsigned int aligna = stack_vars[ia].alignb;
678 unsigned int alignb = stack_vars[ib].alignb;
5e48d894
RS
679 poly_int64 sizea = stack_vars[ia].size;
680 poly_int64 sizeb = stack_vars[ib].size;
3a42502d
RH
681 tree decla = stack_vars[ia].decl;
682 tree declb = stack_vars[ib].decl;
683 bool largea, largeb;
4e3825db 684 unsigned int uida, uidb;
1f6d3a08 685
3a42502d
RH
686 /* Primary compare on "large" alignment. Large comes first. */
687 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689 if (largea != largeb)
690 return (int)largeb - (int)largea;
691
692 /* Secondary compare on size, decreasing */
5e48d894
RS
693 int diff = compare_sizes_for_sort (sizeb, sizea);
694 if (diff != 0)
695 return diff;
3a42502d
RH
696
697 /* Tertiary compare on true alignment, decreasing. */
698 if (aligna < alignb)
699 return -1;
700 if (aligna > alignb)
701 return 1;
702
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
706 if (TREE_CODE (decla) == SSA_NAME)
707 {
708 if (TREE_CODE (declb) == SSA_NAME)
709 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else
711 return -1;
712 }
713 else if (TREE_CODE (declb) == SSA_NAME)
714 return 1;
715 else
716 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 717 if (uida < uidb)
79f802f5 718 return 1;
3a42502d
RH
719 if (uida > uidb)
720 return -1;
1f6d3a08
RH
721 return 0;
722}
723
0ef08bc5 724struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
39c8aaa4 725typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
55b34b5f
RG
726
727/* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
729 variables bitmap. */
730
731static void
732add_partitioned_vars_to_ptset (struct pt_solution *pt,
39c8aaa4 733 part_hashmap *decls_to_partitions,
6e2830c3 734 hash_set<bitmap> *visited, bitmap temp)
55b34b5f
RG
735{
736 bitmap_iterator bi;
737 unsigned i;
738 bitmap *part;
739
740 if (pt->anything
741 || pt->vars == NULL
742 /* The pointed-to vars bitmap is shared, it is enough to
743 visit it once. */
6e2830c3 744 || visited->add (pt->vars))
55b34b5f
RG
745 return;
746
747 bitmap_clear (temp);
748
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
751 once. */
752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753 if ((!temp
754 || !bitmap_bit_p (temp, i))
39c8aaa4 755 && (part = decls_to_partitions->get (i)))
55b34b5f
RG
756 bitmap_ior_into (temp, *part);
757 if (!bitmap_empty_p (temp))
758 bitmap_ior_into (pt->vars, temp);
759}
760
761/* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
764 rewritten. */
765
766static void
767update_alias_info_with_stack_vars (void)
768{
39c8aaa4 769 part_hashmap *decls_to_partitions = NULL;
55b34b5f
RG
770 size_t i, j;
771 tree var = NULL_TREE;
772
773 for (i = 0; i < stack_vars_num; i++)
774 {
775 bitmap part = NULL;
776 tree name;
777 struct ptr_info_def *pi;
778
779 /* Not interested in partitions with single variable. */
780 if (stack_vars[i].representative != i
781 || stack_vars[i].next == EOC)
782 continue;
783
784 if (!decls_to_partitions)
785 {
39c8aaa4
TS
786 decls_to_partitions = new part_hashmap;
787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
55b34b5f
RG
788 }
789
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var == NULL_TREE)
b731b390
JJ
794 var = create_tmp_var (ptr_type_node);
795 name = make_ssa_name (var);
55b34b5f
RG
796
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part = BITMAP_GGC_ALLOC ();
800 for (j = i; j != EOC; j = stack_vars[j].next)
801 {
802 tree decl = stack_vars[j].decl;
25a6a873 803 unsigned int uid = DECL_PT_UID (decl);
55b34b5f 804 bitmap_set_bit (part, uid);
39c8aaa4
TS
805 decls_to_partitions->put (uid, part);
806 cfun->gimple_df->decls_to_pointers->put (decl, name);
88d8330d
EB
807 if (TREE_ADDRESSABLE (decl))
808 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
809 }
810
811 /* Make the SSA name point to all partition members. */
812 pi = get_ptr_info (name);
d3553615 813 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
814 }
815
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions)
819 {
820 unsigned i;
46aa019a 821 tree name;
6e2830c3 822 hash_set<bitmap> visited;
3f9b14ff 823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f 824
46aa019a 825 FOR_EACH_SSA_NAME (i, name, cfun)
55b34b5f 826 {
55b34b5f
RG
827 struct ptr_info_def *pi;
828
46aa019a 829 if (POINTER_TYPE_P (TREE_TYPE (name))
55b34b5f
RG
830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
6e2830c3 832 &visited, temp);
55b34b5f
RG
833 }
834
835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
6e2830c3 836 decls_to_partitions, &visited, temp);
55b34b5f 837
39c8aaa4 838 delete decls_to_partitions;
55b34b5f
RG
839 BITMAP_FREE (temp);
840 }
841}
842
1f6d3a08
RH
843/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 845 Merge them into a single partition A. */
1f6d3a08
RH
846
847static void
6ddfda8a 848union_stack_vars (size_t a, size_t b)
1f6d3a08 849{
2bdbbe94
MM
850 struct stack_var *vb = &stack_vars[b];
851 bitmap_iterator bi;
852 unsigned u;
1f6d3a08 853
6ddfda8a
ER
854 gcc_assert (stack_vars[b].next == EOC);
855 /* Add B to A's partition. */
856 stack_vars[b].next = stack_vars[a].next;
857 stack_vars[b].representative = a;
1f6d3a08
RH
858 stack_vars[a].next = b;
859
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars[a].alignb < stack_vars[b].alignb)
862 stack_vars[a].alignb = stack_vars[b].alignb;
863
864 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
865 if (vb->conflicts)
866 {
867 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 add_stack_var_conflict (a, stack_vars[u].representative);
869 BITMAP_FREE (vb->conflicts);
870 }
1f6d3a08
RH
871}
872
873/* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
876
6ddfda8a 877 Sort the objects by size in descending order.
1f6d3a08
RH
878 For each object A {
879 S = size(A)
880 O = 0
881 loop {
882 Look for the largest non-conflicting object B with size <= S.
883 UNION (A, B)
1f6d3a08
RH
884 }
885 }
886*/
887
888static void
889partition_stack_vars (void)
890{
891 size_t si, sj, n = stack_vars_num;
892
893 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894 for (si = 0; si < n; ++si)
895 stack_vars_sorted[si] = si;
896
897 if (n == 1)
898 return;
899
3a42502d 900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 901
1f6d3a08
RH
902 for (si = 0; si < n; ++si)
903 {
904 size_t i = stack_vars_sorted[si];
3a42502d 905 unsigned int ialign = stack_vars[i].alignb;
5e48d894 906 poly_int64 isize = stack_vars[i].size;
1f6d3a08 907
6ddfda8a
ER
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars[i].representative != i)
912 continue;
913
914 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
915 {
916 size_t j = stack_vars_sorted[sj];
1f6d3a08 917 unsigned int jalign = stack_vars[j].alignb;
5e48d894 918 poly_int64 jsize = stack_vars[j].size;
1f6d3a08
RH
919
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars[j].representative != j)
922 continue;
923
3a42502d
RH
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
928 break;
929
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
5e48d894
RS
934 if (asan_sanitize_stack_p ()
935 && maybe_ne (isize, jsize)
f3ddd692
JJ
936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 break;
938
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i, j))
3a42502d
RH
941 continue;
942
1f6d3a08 943 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 944 union_stack_vars (i, j);
1f6d3a08
RH
945 }
946 }
55b34b5f 947
9b999dc5 948 update_alias_info_with_stack_vars ();
1f6d3a08
RH
949}
950
951/* A debugging aid for expand_used_vars. Dump the generated partitions. */
952
953static void
954dump_stack_var_partition (void)
955{
956 size_t si, i, j, n = stack_vars_num;
957
958 for (si = 0; si < n; ++si)
959 {
960 i = stack_vars_sorted[si];
961
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
965
5e48d894
RS
966 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967 print_dec (stack_vars[i].size, dump_file);
968 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
1f6d3a08
RH
969
970 for (j = i; j != EOC; j = stack_vars[j].next)
971 {
972 fputc ('\t', dump_file);
973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 974 }
6ddfda8a 975 fputc ('\n', dump_file);
1f6d3a08
RH
976 }
977}
978
3a42502d 979/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
980
981static void
3a42502d 982expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
f075bd95 983 poly_int64 offset)
1f6d3a08 984{
3a42502d 985 unsigned align;
1f6d3a08 986 rtx x;
c22cacf3 987
1f6d3a08 988 /* If this fails, we've overflowed the stack frame. Error nicely? */
f075bd95 989 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
1f6d3a08 990
0a81f074 991 x = plus_constant (Pmode, base, offset);
1f9ceff1
AO
992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl))
994 : DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 995
4e3825db
MM
996 if (TREE_CODE (decl) != SSA_NAME)
997 {
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
3a42502d
RH
1001 if (base == virtual_stack_vars_rtx)
1002 offset -= frame_phase;
f075bd95 1003 align = known_alignment (offset);
4e3825db 1004 align *= BITS_PER_UNIT;
3a42502d
RH
1005 if (align == 0 || align > base_align)
1006 align = base_align;
1007
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
4e3825db 1011
fe37c7af 1012 SET_DECL_ALIGN (decl, align);
4e3825db
MM
1013 DECL_USER_ALIGN (decl) = 0;
1014 }
1015
4e3825db 1016 set_rtl (decl, x);
1f6d3a08
RH
1017}
1018
f3ddd692
JJ
1019struct stack_vars_data
1020{
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
06dc18b3 1024 auto_vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
1025
1026 /* Vector of partition representative decls in between the paddings. */
06dc18b3 1027 auto_vec<tree> asan_decl_vec;
e361382f
JJ
1028
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 rtx asan_base;
1031
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb;
f3ddd692
JJ
1034};
1035
1f6d3a08
RH
1036/* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1039
1040static void
f3ddd692 1041expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
1042{
1043 size_t si, i, j, n = stack_vars_num;
5e48d894 1044 poly_uint64 large_size = 0, large_alloc = 0;
3a42502d
RH
1045 rtx large_base = NULL;
1046 unsigned large_align = 0;
7072df0a 1047 bool large_allocation_done = false;
3a42502d
RH
1048 tree decl;
1049
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055 {
1056 /* Find the total size of these variables. */
1057 for (si = 0; si < n; ++si)
1058 {
1059 unsigned alignb;
1060
1061 i = stack_vars_sorted[si];
1062 alignb = stack_vars[i].alignb;
1063
a8eeec27
SE
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb * BITS_PER_UNIT) > large_align)
1069 large_align = alignb * BITS_PER_UNIT;
1070
3a42502d
RH
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 break;
1074
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars[i].representative != i)
1077 continue;
1078
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl = stack_vars[i].decl;
1f9ceff1
AO
1082 if (TREE_CODE (decl) == SSA_NAME
1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 : DECL_RTL (decl) != pc_rtx)
3a42502d
RH
1085 continue;
1086
5e48d894 1087 large_size = aligned_upper_bound (large_size, alignb);
3a42502d
RH
1088 large_size += stack_vars[i].size;
1089 }
3a42502d 1090 }
1f6d3a08
RH
1091
1092 for (si = 0; si < n; ++si)
1093 {
3a42502d
RH
1094 rtx base;
1095 unsigned base_align, alignb;
f075bd95 1096 poly_int64 offset;
1f6d3a08
RH
1097
1098 i = stack_vars_sorted[si];
1099
1100 /* Skip variables that aren't partition representatives, for now. */
1101 if (stack_vars[i].representative != i)
1102 continue;
1103
7d69de61
RH
1104 /* Skip variables that have already had rtl assigned. See also
1105 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d 1106 decl = stack_vars[i].decl;
1f9ceff1
AO
1107 if (TREE_CODE (decl) == SSA_NAME
1108 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 : DECL_RTL (decl) != pc_rtx)
7d69de61
RH
1110 continue;
1111
c22cacf3 1112 /* Check the predicate to see whether this variable should be
7d69de61 1113 allocated in this pass. */
f3ddd692 1114 if (pred && !pred (i))
7d69de61
RH
1115 continue;
1116
3a42502d
RH
1117 alignb = stack_vars[i].alignb;
1118 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 {
e361382f 1120 base = virtual_stack_vars_rtx;
f075bd95
RS
1121 /* ASAN description strings don't yet have a syntax for expressing
1122 polynomial offsets. */
1123 HOST_WIDE_INT prev_offset;
1124 if (asan_sanitize_stack_p ()
1125 && pred
5e48d894
RS
1126 && frame_offset.is_constant (&prev_offset)
1127 && stack_vars[i].size.is_constant ())
f3ddd692 1128 {
f075bd95
RS
1129 prev_offset = align_base (prev_offset,
1130 MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 !FRAME_GROWS_DOWNWARD);
f3ddd692 1132 tree repr_decl = NULL_TREE;
f3ddd692
JJ
1133 offset
1134 = alloc_stack_frame_space (stack_vars[i].size
1135 + ASAN_RED_ZONE_SIZE,
1136 MAX (alignb, ASAN_RED_ZONE_SIZE));
435be747 1137
9771b263 1138 data->asan_vec.safe_push (prev_offset);
f075bd95
RS
1139 /* Allocating a constant amount of space from a constant
1140 starting offset must give a constant result. */
1141 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 .to_constant ());
f3ddd692
JJ
1143 /* Find best representative of the partition.
1144 Prefer those with DECL_NAME, even better
1145 satisfying asan_protect_stack_decl predicate. */
1146 for (j = i; j != EOC; j = stack_vars[j].next)
1147 if (asan_protect_stack_decl (stack_vars[j].decl)
1148 && DECL_NAME (stack_vars[j].decl))
1149 {
1150 repr_decl = stack_vars[j].decl;
1151 break;
1152 }
1153 else if (repr_decl == NULL_TREE
1154 && DECL_P (stack_vars[j].decl)
1155 && DECL_NAME (stack_vars[j].decl))
1156 repr_decl = stack_vars[j].decl;
1157 if (repr_decl == NULL_TREE)
1158 repr_decl = stack_vars[i].decl;
9771b263 1159 data->asan_decl_vec.safe_push (repr_decl);
e361382f
JJ
1160 data->asan_alignb = MAX (data->asan_alignb, alignb);
1161 if (data->asan_base == NULL)
1162 data->asan_base = gen_reg_rtx (Pmode);
1163 base = data->asan_base;
e5dcd695
LZ
1164
1165 if (!STRICT_ALIGNMENT)
1166 base_align = crtl->max_used_stack_slot_alignment;
1167 else
1168 base_align = MAX (crtl->max_used_stack_slot_alignment,
1169 GET_MODE_ALIGNMENT (SImode)
1170 << ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1171 }
1172 else
e5dcd695
LZ
1173 {
1174 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1175 base_align = crtl->max_used_stack_slot_alignment;
1176 }
3a42502d
RH
1177 }
1178 else
1179 {
1180 /* Large alignment is only processed in the last pass. */
1181 if (pred)
1182 continue;
7072df0a
DV
1183
1184 /* If there were any variables requiring "large" alignment, allocate
1185 space. */
5e48d894 1186 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
7072df0a 1187 {
f075bd95 1188 poly_int64 loffset;
7072df0a
DV
1189 rtx large_allocsize;
1190
5e48d894 1191 large_allocsize = gen_int_mode (large_size, Pmode);
7072df0a
DV
1192 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1193 loffset = alloc_stack_frame_space
5e48d894 1194 (rtx_to_poly_int64 (large_allocsize),
7072df0a
DV
1195 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1196 large_base = get_dynamic_stack_base (loffset, large_align);
1197 large_allocation_done = true;
1198 }
533f611a 1199 gcc_assert (large_base != NULL);
3a42502d 1200
5e48d894 1201 large_alloc = aligned_upper_bound (large_alloc, alignb);
3a42502d
RH
1202 offset = large_alloc;
1203 large_alloc += stack_vars[i].size;
1204
1205 base = large_base;
1206 base_align = large_align;
1207 }
1f6d3a08
RH
1208
1209 /* Create rtl for each variable based on their location within the
1210 partition. */
1211 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1212 {
f8da8190 1213 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1214 base, base_align,
6ddfda8a 1215 offset);
f8da8190 1216 }
1f6d3a08 1217 }
3a42502d 1218
5e48d894 1219 gcc_assert (known_eq (large_alloc, large_size));
1f6d3a08
RH
1220}
1221
ff28a94d 1222/* Take into account all sizes of partitions and reset DECL_RTLs. */
5e48d894 1223static poly_uint64
ff28a94d
JH
1224account_stack_vars (void)
1225{
1226 size_t si, j, i, n = stack_vars_num;
5e48d894 1227 poly_uint64 size = 0;
ff28a94d
JH
1228
1229 for (si = 0; si < n; ++si)
1230 {
1231 i = stack_vars_sorted[si];
1232
1233 /* Skip variables that aren't partition representatives, for now. */
1234 if (stack_vars[i].representative != i)
1235 continue;
1236
1237 size += stack_vars[i].size;
1238 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1239 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1240 }
1241 return size;
1242}
1243
f11a7b6d
AO
1244/* Record the RTL assignment X for the default def of PARM. */
1245
1246extern void
1247set_parm_rtl (tree parm, rtx x)
1248{
1249 gcc_assert (TREE_CODE (parm) == PARM_DECL
1250 || TREE_CODE (parm) == RESULT_DECL);
1251
1252 if (x && !MEM_P (x))
1253 {
1254 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1255 TYPE_MODE (TREE_TYPE (parm)),
1256 TYPE_ALIGN (TREE_TYPE (parm)));
1257
1258 /* If the variable alignment is very large we'll dynamicaly
1259 allocate it, which means that in-frame portion is just a
1260 pointer. ??? We've got a pseudo for sure here, do we
1261 actually dynamically allocate its spilling area if needed?
1262 ??? Isn't it a problem when POINTER_SIZE also exceeds
1263 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1264 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1265 align = POINTER_SIZE;
1266
1267 record_alignment_for_reg_var (align);
1268 }
1269
f11a7b6d
AO
1270 tree ssa = ssa_default_def (cfun, parm);
1271 if (!ssa)
1272 return set_rtl (parm, x);
1273
1274 int part = var_to_partition (SA.map, ssa);
1275 gcc_assert (part != NO_PARTITION);
1276
1277 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1278 gcc_assert (changed);
1279
1280 set_rtl (ssa, x);
1281 gcc_assert (DECL_RTL (parm) == x);
1282}
1283
1f6d3a08
RH
1284/* A subroutine of expand_one_var. Called to immediately assign rtl
1285 to a variable to be allocated in the stack frame. */
1286
1287static void
1f9ceff1 1288expand_one_stack_var_1 (tree var)
1f6d3a08 1289{
5e48d894 1290 poly_uint64 size;
f075bd95 1291 poly_int64 offset;
3a42502d 1292 unsigned byte_align;
1f6d3a08 1293
1f9ceff1
AO
1294 if (TREE_CODE (var) == SSA_NAME)
1295 {
1296 tree type = TREE_TYPE (var);
5e48d894 1297 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1f9ceff1
AO
1298 byte_align = TYPE_ALIGN_UNIT (type);
1299 }
1300 else
1301 {
5e48d894 1302 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1f9ceff1
AO
1303 byte_align = align_local_variable (var);
1304 }
3a42502d
RH
1305
1306 /* We handle highly aligned variables in expand_stack_vars. */
1307 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1308
3a42502d
RH
1309 offset = alloc_stack_frame_space (size, byte_align);
1310
1311 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1312 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1313}
1314
1f9ceff1
AO
1315/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1316 already assigned some MEM. */
1317
1318static void
1319expand_one_stack_var (tree var)
1320{
1321 if (TREE_CODE (var) == SSA_NAME)
1322 {
1323 int part = var_to_partition (SA.map, var);
1324 if (part != NO_PARTITION)
1325 {
1326 rtx x = SA.partition_to_pseudo[part];
1327 gcc_assert (x);
1328 gcc_assert (MEM_P (x));
1329 return;
1330 }
1331 }
1332
1333 return expand_one_stack_var_1 (var);
1334}
1335
1f6d3a08
RH
1336/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1337 that will reside in a hard register. */
1338
1339static void
1340expand_one_hard_reg_var (tree var)
1341{
1342 rest_of_decl_compilation (var, 0, 0);
1343}
1344
1f9ceff1
AO
1345/* Record the alignment requirements of some variable assigned to a
1346 pseudo. */
1347
1348static void
1349record_alignment_for_reg_var (unsigned int align)
1350{
1351 if (SUPPORTS_STACK_ALIGNMENT
1352 && crtl->stack_alignment_estimated < align)
1353 {
1354 /* stack_alignment_estimated shouldn't change after stack
1355 realign decision made */
1356 gcc_assert (!crtl->stack_realign_processed);
1357 crtl->stack_alignment_estimated = align;
1358 }
1359
1360 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1361 So here we only make sure stack_alignment_needed >= align. */
1362 if (crtl->stack_alignment_needed < align)
1363 crtl->stack_alignment_needed = align;
1364 if (crtl->max_used_stack_slot_alignment < align)
1365 crtl->max_used_stack_slot_alignment = align;
1366}
1367
1368/* Create RTL for an SSA partition. */
1369
1370static void
1371expand_one_ssa_partition (tree var)
1372{
1373 int part = var_to_partition (SA.map, var);
1374 gcc_assert (part != NO_PARTITION);
1375
1376 if (SA.partition_to_pseudo[part])
1377 return;
1378
1379 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1380 TYPE_MODE (TREE_TYPE (var)),
1381 TYPE_ALIGN (TREE_TYPE (var)));
1382
1383 /* If the variable alignment is very large we'll dynamicaly allocate
1384 it, which means that in-frame portion is just a pointer. */
1385 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1386 align = POINTER_SIZE;
1387
1388 record_alignment_for_reg_var (align);
1389
1390 if (!use_register_for_decl (var))
1391 {
f11a7b6d 1392 if (defer_stack_allocation (var, true))
1f9ceff1
AO
1393 add_stack_var (var);
1394 else
1395 expand_one_stack_var_1 (var);
1396 return;
1397 }
1398
1399 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1f9ceff1
AO
1400 rtx x = gen_reg_rtx (reg_mode);
1401
1402 set_rtl (var, x);
bc2a7ceb
EB
1403
1404 /* For a promoted variable, X will not be used directly but wrapped in a
1405 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1406 will assume that its upper bits can be inferred from its lower bits.
1407 Therefore, if X isn't initialized on every path from the entry, then
1408 we must do it manually in order to fulfill the above assumption. */
1409 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1410 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1411 emit_move_insn (x, CONST0_RTX (reg_mode));
1f9ceff1
AO
1412}
1413
f11a7b6d
AO
1414/* Record the association between the RTL generated for partition PART
1415 and the underlying variable of the SSA_NAME VAR. */
1f9ceff1
AO
1416
1417static void
1418adjust_one_expanded_partition_var (tree var)
1419{
1420 if (!var)
1421 return;
1422
1423 tree decl = SSA_NAME_VAR (var);
1424
1425 int part = var_to_partition (SA.map, var);
1426 if (part == NO_PARTITION)
1427 return;
1428
1429 rtx x = SA.partition_to_pseudo[part];
1430
f11a7b6d 1431 gcc_assert (x);
1f9ceff1
AO
1432
1433 set_rtl (var, x);
1434
1435 if (!REG_P (x))
1436 return;
1437
1438 /* Note if the object is a user variable. */
1439 if (decl && !DECL_ARTIFICIAL (decl))
1440 mark_user_reg (x);
1441
1442 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1443 mark_reg_pointer (x, get_pointer_alignment (var));
1444}
1445
1f6d3a08
RH
1446/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1447 that will reside in a pseudo register. */
1448
1449static void
1450expand_one_register_var (tree var)
1451{
1f9ceff1
AO
1452 if (TREE_CODE (var) == SSA_NAME)
1453 {
1454 int part = var_to_partition (SA.map, var);
1455 if (part != NO_PARTITION)
1456 {
1457 rtx x = SA.partition_to_pseudo[part];
1458 gcc_assert (x);
1459 gcc_assert (REG_P (x));
1460 return;
1461 }
1462 gcc_unreachable ();
1463 }
1464
1465 tree decl = var;
4e3825db 1466 tree type = TREE_TYPE (decl);
ef4bddc2 1467 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1468 rtx x = gen_reg_rtx (reg_mode);
1469
4e3825db 1470 set_rtl (var, x);
1f6d3a08
RH
1471
1472 /* Note if the object is a user variable. */
4e3825db
MM
1473 if (!DECL_ARTIFICIAL (decl))
1474 mark_user_reg (x);
1f6d3a08 1475
61021c2c 1476 if (POINTER_TYPE_P (type))
d466b407 1477 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1478}
1479
1480/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1481 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1482 to pick something that won't crash the rest of the compiler. */
1483
1484static void
1485expand_one_error_var (tree var)
1486{
ef4bddc2 1487 machine_mode mode = DECL_MODE (var);
1f6d3a08
RH
1488 rtx x;
1489
1490 if (mode == BLKmode)
1491 x = gen_rtx_MEM (BLKmode, const0_rtx);
1492 else if (mode == VOIDmode)
1493 x = const0_rtx;
1494 else
1495 x = gen_reg_rtx (mode);
1496
1497 SET_DECL_RTL (var, x);
1498}
1499
c22cacf3 1500/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1501 allocated to the local stack frame. Return true if we wish to
1502 add VAR to STACK_VARS so that it will be coalesced with other
1503 variables. Return false to allocate VAR immediately.
1504
1505 This function is used to reduce the number of variables considered
1506 for coalescing, which reduces the size of the quadratic problem. */
1507
1508static bool
1509defer_stack_allocation (tree var, bool toplevel)
1510{
1f9ceff1
AO
1511 tree size_unit = TREE_CODE (var) == SSA_NAME
1512 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1513 : DECL_SIZE_UNIT (var);
5e48d894 1514 poly_uint64 size;
1f9ceff1 1515
ee2e8462
EB
1516 /* Whether the variable is small enough for immediate allocation not to be
1517 a problem with regard to the frame size. */
1518 bool smallish
5e48d894
RS
1519 = (poly_int_tree_p (size_unit, &size)
1520 && (estimated_poly_value (size)
1521 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
ee2e8462 1522
7d69de61 1523 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1524 so that we can re-order the strings to the top of the frame.
1525 Similarly for Address Sanitizer. */
c461d263 1526 if (flag_stack_protect || asan_sanitize_stack_p ())
7d69de61
RH
1527 return true;
1528
1f9ceff1
AO
1529 unsigned int align = TREE_CODE (var) == SSA_NAME
1530 ? TYPE_ALIGN (TREE_TYPE (var))
1531 : DECL_ALIGN (var);
1532
3a42502d
RH
1533 /* We handle "large" alignment via dynamic allocation. We want to handle
1534 this extra complication in only one place, so defer them. */
1f9ceff1 1535 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
3a42502d
RH
1536 return true;
1537
1f9ceff1
AO
1538 bool ignored = TREE_CODE (var) == SSA_NAME
1539 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1540 : DECL_IGNORED_P (var);
1541
ee2e8462
EB
1542 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1543 might be detached from their block and appear at toplevel when we reach
1544 here. We want to coalesce them with variables from other blocks when
1545 the immediate contribution to the frame size would be noticeable. */
1f9ceff1 1546 if (toplevel && optimize > 0 && ignored && !smallish)
ee2e8462
EB
1547 return true;
1548
1549 /* Variables declared in the outermost scope automatically conflict
1550 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1551 at all is that, after sorting, we can more efficiently pack
1552 small variables in the stack frame. Continue to defer at -O2. */
1553 if (toplevel && optimize < 2)
1554 return false;
1555
1556 /* Without optimization, *most* variables are allocated from the
1557 stack, which makes the quadratic problem large exactly when we
c22cacf3 1558 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1559 other hand, we don't want the function's stack frame size to
1560 get completely out of hand. So we avoid adding scalars and
1561 "small" aggregates to the list at all. */
ee2e8462 1562 if (optimize == 0 && smallish)
1f6d3a08
RH
1563 return false;
1564
1565 return true;
1566}
1567
1568/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1569 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1570 expanded yet, merely recorded.
ff28a94d
JH
1571 When REALLY_EXPAND is false, only add stack values to be allocated.
1572 Return stack usage this variable is supposed to take.
1573*/
1f6d3a08 1574
5e48d894 1575static poly_uint64
ff28a94d 1576expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1577{
3a42502d 1578 unsigned int align = BITS_PER_UNIT;
4e3825db 1579 tree origvar = var;
3a42502d 1580
4e3825db
MM
1581 var = SSAVAR (var);
1582
8813a647 1583 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
2e3f842f 1584 {
9d7d6446
JB
1585 if (is_global_var (var))
1586 return 0;
1587
2e3f842f
L
1588 /* Because we don't know if VAR will be in register or on stack,
1589 we conservatively assume it will be on stack even if VAR is
1590 eventually put into register after RA pass. For non-automatic
1591 variables, which won't be on stack, we collect alignment of
3396aba5
JJ
1592 type and ignore user specified alignment. Similarly for
1593 SSA_NAMEs for which use_register_for_decl returns true. */
1594 if (TREE_STATIC (var)
1595 || DECL_EXTERNAL (var)
1596 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
ae58e548
JJ
1597 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1598 TYPE_MODE (TREE_TYPE (var)),
1599 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1600 else if (DECL_HAS_VALUE_EXPR_P (var)
1601 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1602 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1603 or variables which were assigned a stack slot already by
1604 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1605 changed from the offset chosen to it. */
1606 align = crtl->stack_alignment_estimated;
2e3f842f 1607 else
ae58e548 1608 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1609
3a42502d
RH
1610 /* If the variable alignment is very large we'll dynamicaly allocate
1611 it, which means that in-frame portion is just a pointer. */
1612 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1613 align = POINTER_SIZE;
1614 }
1615
1f9ceff1 1616 record_alignment_for_reg_var (align);
3a42502d 1617
5e48d894 1618 poly_uint64 size;
4e3825db
MM
1619 if (TREE_CODE (origvar) == SSA_NAME)
1620 {
8813a647 1621 gcc_assert (!VAR_P (var)
4e3825db
MM
1622 || (!DECL_EXTERNAL (var)
1623 && !DECL_HAS_VALUE_EXPR_P (var)
1624 && !TREE_STATIC (var)
4e3825db
MM
1625 && TREE_TYPE (var) != error_mark_node
1626 && !DECL_HARD_REGISTER (var)
1627 && really_expand));
1628 }
8813a647 1629 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
4846b435 1630 ;
1f6d3a08
RH
1631 else if (DECL_EXTERNAL (var))
1632 ;
833b3afe 1633 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1634 ;
1635 else if (TREE_STATIC (var))
7e8b322a 1636 ;
eb7adebc 1637 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1638 ;
1639 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1640 {
1641 if (really_expand)
1642 expand_one_error_var (var);
1643 }
8813a647 1644 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
ff28a94d
JH
1645 {
1646 if (really_expand)
c218f6e8
JM
1647 {
1648 expand_one_hard_reg_var (var);
1649 if (!DECL_HARD_REGISTER (var))
1650 /* Invalid register specification. */
1651 expand_one_error_var (var);
1652 }
ff28a94d 1653 }
1f6d3a08 1654 else if (use_register_for_decl (var))
ff28a94d
JH
1655 {
1656 if (really_expand)
4e3825db 1657 expand_one_register_var (origvar);
ff28a94d 1658 }
5e48d894
RS
1659 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1660 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1661 {
56099f00 1662 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1663 if (really_expand)
1664 {
1665 error ("size of variable %q+D is too large", var);
1666 expand_one_error_var (var);
1667 }
1668 }
1f6d3a08 1669 else if (defer_stack_allocation (var, toplevel))
4e3825db 1670 add_stack_var (origvar);
1f6d3a08 1671 else
ff28a94d 1672 {
bd9f1b4b 1673 if (really_expand)
de0fb905
AB
1674 {
1675 if (lookup_attribute ("naked",
1676 DECL_ATTRIBUTES (current_function_decl)))
1677 error ("cannot allocate stack for variable %q+D, naked function.",
1678 var);
1679
1680 expand_one_stack_var (origvar);
1681 }
5e48d894 1682 return size;
ff28a94d
JH
1683 }
1684 return 0;
1f6d3a08
RH
1685}
1686
1687/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1688 expanding variables. Those variables that can be put into registers
1689 are allocated pseudos; those that can't are put on the stack.
1690
1691 TOPLEVEL is true if this is the outermost BLOCK. */
1692
1693static void
1694expand_used_vars_for_block (tree block, bool toplevel)
1695{
1f6d3a08
RH
1696 tree t;
1697
1f6d3a08 1698 /* Expand all variables at this level. */
910ad8de 1699 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185 1700 if (TREE_USED (t)
8813a647 1701 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1702 || !DECL_NONSHAREABLE (t)))
ff28a94d 1703 expand_one_var (t, toplevel, true);
1f6d3a08 1704
1f6d3a08
RH
1705 /* Expand all variables at containing levels. */
1706 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1707 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1708}
1709
1710/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1711 and clear TREE_USED on all local variables. */
1712
1713static void
1714clear_tree_used (tree block)
1715{
1716 tree t;
1717
910ad8de 1718 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1719 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
8813a647 1720 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1721 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1722 TREE_USED (t) = 0;
1723
1724 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1725 clear_tree_used (t);
1726}
1727
f6bc1c4a
HS
1728enum {
1729 SPCT_FLAG_DEFAULT = 1,
1730 SPCT_FLAG_ALL = 2,
5434dc07
MD
1731 SPCT_FLAG_STRONG = 3,
1732 SPCT_FLAG_EXPLICIT = 4
f6bc1c4a
HS
1733};
1734
7d69de61
RH
1735/* Examine TYPE and determine a bit mask of the following features. */
1736
1737#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1738#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1739#define SPCT_HAS_ARRAY 4
1740#define SPCT_HAS_AGGREGATE 8
1741
1742static unsigned int
1743stack_protect_classify_type (tree type)
1744{
1745 unsigned int ret = 0;
1746 tree t;
1747
1748 switch (TREE_CODE (type))
1749 {
1750 case ARRAY_TYPE:
1751 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1752 if (t == char_type_node
1753 || t == signed_char_type_node
1754 || t == unsigned_char_type_node)
1755 {
15362b89
JJ
1756 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1757 unsigned HOST_WIDE_INT len;
7d69de61 1758
15362b89 1759 if (!TYPE_SIZE_UNIT (type)
cc269bb6 1760 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
15362b89 1761 len = max;
7d69de61 1762 else
ae7e9ddd 1763 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7d69de61
RH
1764
1765 if (len < max)
1766 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1767 else
1768 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1769 }
1770 else
1771 ret = SPCT_HAS_ARRAY;
1772 break;
1773
1774 case UNION_TYPE:
1775 case QUAL_UNION_TYPE:
1776 case RECORD_TYPE:
1777 ret = SPCT_HAS_AGGREGATE;
1778 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1779 if (TREE_CODE (t) == FIELD_DECL)
1780 ret |= stack_protect_classify_type (TREE_TYPE (t));
1781 break;
1782
1783 default:
1784 break;
1785 }
1786
1787 return ret;
1788}
1789
a4d05547
KH
1790/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1791 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1792 any variable in this function. The return value is the phase number in
1793 which the variable should be allocated. */
1794
1795static int
1796stack_protect_decl_phase (tree decl)
1797{
1798 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1799 int ret = 0;
1800
1801 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1802 has_short_buffer = true;
1803
f6bc1c4a 1804 if (flag_stack_protect == SPCT_FLAG_ALL
5434dc07
MD
1805 || flag_stack_protect == SPCT_FLAG_STRONG
1806 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1807 && lookup_attribute ("stack_protect",
1808 DECL_ATTRIBUTES (current_function_decl))))
7d69de61
RH
1809 {
1810 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1811 && !(bits & SPCT_HAS_AGGREGATE))
1812 ret = 1;
1813 else if (bits & SPCT_HAS_ARRAY)
1814 ret = 2;
1815 }
1816 else
1817 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1818
1819 if (ret)
1820 has_protected_decls = true;
1821
1822 return ret;
1823}
1824
1825/* Two helper routines that check for phase 1 and phase 2. These are used
1826 as callbacks for expand_stack_vars. */
1827
1828static bool
f3ddd692
JJ
1829stack_protect_decl_phase_1 (size_t i)
1830{
1831 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1832}
1833
1834static bool
1835stack_protect_decl_phase_2 (size_t i)
7d69de61 1836{
f3ddd692 1837 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1838}
1839
f3ddd692
JJ
1840/* And helper function that checks for asan phase (with stack protector
1841 it is phase 3). This is used as callback for expand_stack_vars.
1842 Returns true if any of the vars in the partition need to be protected. */
1843
7d69de61 1844static bool
f3ddd692 1845asan_decl_phase_3 (size_t i)
7d69de61 1846{
f3ddd692
JJ
1847 while (i != EOC)
1848 {
1849 if (asan_protect_stack_decl (stack_vars[i].decl))
1850 return true;
1851 i = stack_vars[i].next;
1852 }
1853 return false;
7d69de61
RH
1854}
1855
1856/* Ensure that variables in different stack protection phases conflict
1857 so that they are not merged and share the same stack slot. */
1858
1859static void
1860add_stack_protection_conflicts (void)
1861{
1862 size_t i, j, n = stack_vars_num;
1863 unsigned char *phase;
1864
1865 phase = XNEWVEC (unsigned char, n);
1866 for (i = 0; i < n; ++i)
1867 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1868
1869 for (i = 0; i < n; ++i)
1870 {
1871 unsigned char ph_i = phase[i];
9b44f5d9 1872 for (j = i + 1; j < n; ++j)
7d69de61
RH
1873 if (ph_i != phase[j])
1874 add_stack_var_conflict (i, j);
1875 }
1876
1877 XDELETEVEC (phase);
1878}
1879
1880/* Create a decl for the guard at the top of the stack frame. */
1881
1882static void
1883create_stack_guard (void)
1884{
c2255bc4
AH
1885 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1886 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1887 TREE_THIS_VOLATILE (guard) = 1;
1888 TREE_USED (guard) = 1;
1889 expand_one_stack_var (guard);
cb91fab0 1890 crtl->stack_protect_guard = guard;
7d69de61
RH
1891}
1892
ff28a94d 1893/* Prepare for expanding variables. */
b8698a0f 1894static void
ff28a94d
JH
1895init_vars_expansion (void)
1896{
3f9b14ff
SB
1897 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1898 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1899
3f9b14ff 1900 /* A map from decl to stack partition. */
39c8aaa4 1901 decl_to_stack_part = new hash_map<tree, size_t>;
ff28a94d
JH
1902
1903 /* Initialize local stack smashing state. */
1904 has_protected_decls = false;
1905 has_short_buffer = false;
1906}
1907
1908/* Free up stack variable graph data. */
1909static void
1910fini_vars_expansion (void)
1911{
3f9b14ff
SB
1912 bitmap_obstack_release (&stack_var_bitmap_obstack);
1913 if (stack_vars)
1914 XDELETEVEC (stack_vars);
1915 if (stack_vars_sorted)
1916 XDELETEVEC (stack_vars_sorted);
ff28a94d 1917 stack_vars = NULL;
9b44f5d9 1918 stack_vars_sorted = NULL;
ff28a94d 1919 stack_vars_alloc = stack_vars_num = 0;
39c8aaa4 1920 delete decl_to_stack_part;
47598145 1921 decl_to_stack_part = NULL;
ff28a94d
JH
1922}
1923
30925d94
AO
1924/* Make a fair guess for the size of the stack frame of the function
1925 in NODE. This doesn't have to be exact, the result is only used in
1926 the inline heuristics. So we don't want to run the full stack var
1927 packing algorithm (which is quadratic in the number of stack vars).
1928 Instead, we calculate the total size of all stack vars. This turns
1929 out to be a pretty fair estimate -- packing of stack vars doesn't
1930 happen very often. */
b5a430f3 1931
ff28a94d 1932HOST_WIDE_INT
30925d94 1933estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d 1934{
5e48d894 1935 poly_int64 size = 0;
b5a430f3 1936 size_t i;
bb7e6d55 1937 tree var;
67348ccc 1938 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1939
bb7e6d55 1940 push_cfun (fn);
ff28a94d 1941
3f9b14ff
SB
1942 init_vars_expansion ();
1943
824f71b9
RG
1944 FOR_EACH_LOCAL_DECL (fn, i, var)
1945 if (auto_var_in_fn_p (var, fn->decl))
1946 size += expand_one_var (var, true, false);
b5a430f3 1947
ff28a94d
JH
1948 if (stack_vars_num > 0)
1949 {
b5a430f3
SB
1950 /* Fake sorting the stack vars for account_stack_vars (). */
1951 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1952 for (i = 0; i < stack_vars_num; ++i)
1953 stack_vars_sorted[i] = i;
ff28a94d 1954 size += account_stack_vars ();
ff28a94d 1955 }
3f9b14ff
SB
1956
1957 fini_vars_expansion ();
2e1ec94f 1958 pop_cfun ();
5e48d894 1959 return estimated_poly_value (size);
ff28a94d
JH
1960}
1961
f6bc1c4a
HS
1962/* Helper routine to check if a record or union contains an array field. */
1963
1964static int
1965record_or_union_type_has_array_p (const_tree tree_type)
1966{
1967 tree fields = TYPE_FIELDS (tree_type);
1968 tree f;
1969
1970 for (f = fields; f; f = DECL_CHAIN (f))
1971 if (TREE_CODE (f) == FIELD_DECL)
1972 {
1973 tree field_type = TREE_TYPE (f);
1974 if (RECORD_OR_UNION_TYPE_P (field_type)
1975 && record_or_union_type_has_array_p (field_type))
1976 return 1;
1977 if (TREE_CODE (field_type) == ARRAY_TYPE)
1978 return 1;
1979 }
1980 return 0;
1981}
1982
6545746e
FW
1983/* Check if the current function has local referenced variables that
1984 have their addresses taken, contain an array, or are arrays. */
1985
1986static bool
1987stack_protect_decl_p ()
1988{
1989 unsigned i;
1990 tree var;
1991
1992 FOR_EACH_LOCAL_DECL (cfun, i, var)
1993 if (!is_global_var (var))
1994 {
1995 tree var_type = TREE_TYPE (var);
8813a647 1996 if (VAR_P (var)
6545746e
FW
1997 && (TREE_CODE (var_type) == ARRAY_TYPE
1998 || TREE_ADDRESSABLE (var)
1999 || (RECORD_OR_UNION_TYPE_P (var_type)
2000 && record_or_union_type_has_array_p (var_type))))
2001 return true;
2002 }
2003 return false;
2004}
2005
2006/* Check if the current function has calls that use a return slot. */
2007
2008static bool
2009stack_protect_return_slot_p ()
2010{
2011 basic_block bb;
2012
2013 FOR_ALL_BB_FN (bb, cfun)
2014 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2015 !gsi_end_p (gsi); gsi_next (&gsi))
2016 {
355fe088 2017 gimple *stmt = gsi_stmt (gsi);
6545746e
FW
2018 /* This assumes that calls to internal-only functions never
2019 use a return slot. */
2020 if (is_gimple_call (stmt)
2021 && !gimple_call_internal_p (stmt)
2022 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2023 gimple_call_fndecl (stmt)))
2024 return true;
2025 }
2026 return false;
2027}
2028
1f6d3a08 2029/* Expand all variables used in the function. */
727a31fa 2030
b47aae36 2031static rtx_insn *
727a31fa
RH
2032expand_used_vars (void)
2033{
c021f10b 2034 tree var, outer_block = DECL_INITIAL (current_function_decl);
8c681247 2035 auto_vec<tree> maybe_local_decls;
b47aae36 2036 rtx_insn *var_end_seq = NULL;
4e3825db 2037 unsigned i;
c021f10b 2038 unsigned len;
f6bc1c4a 2039 bool gen_stack_protect_signal = false;
727a31fa 2040
1f6d3a08
RH
2041 /* Compute the phase of the stack frame for this function. */
2042 {
2043 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2a31c321 2044 int off = targetm.starting_frame_offset () % align;
1f6d3a08
RH
2045 frame_phase = off ? align - off : 0;
2046 }
727a31fa 2047
3f9b14ff
SB
2048 /* Set TREE_USED on all variables in the local_decls. */
2049 FOR_EACH_LOCAL_DECL (cfun, i, var)
2050 TREE_USED (var) = 1;
2051 /* Clear TREE_USED on all variables associated with a block scope. */
2052 clear_tree_used (DECL_INITIAL (current_function_decl));
2053
ff28a94d 2054 init_vars_expansion ();
7d69de61 2055
8f51aa6b
IZ
2056 if (targetm.use_pseudo_pic_reg ())
2057 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2058
4e3825db
MM
2059 for (i = 0; i < SA.map->num_partitions; i++)
2060 {
f11a7b6d
AO
2061 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2062 continue;
2063
4e3825db
MM
2064 tree var = partition_to_var (SA.map, i);
2065
ea057359 2066 gcc_assert (!virtual_operand_p (var));
70b5e7dc 2067
1f9ceff1 2068 expand_one_ssa_partition (var);
64d7fb90 2069 }
7eb9f42e 2070
f6bc1c4a 2071 if (flag_stack_protect == SPCT_FLAG_STRONG)
6545746e
FW
2072 gen_stack_protect_signal
2073 = stack_protect_decl_p () || stack_protect_return_slot_p ();
f6bc1c4a 2074
cb91fab0 2075 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 2076 set are not associated with any block scope. Lay them out. */
c021f10b 2077
9771b263 2078 len = vec_safe_length (cfun->local_decls);
c021f10b 2079 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 2080 {
1f6d3a08
RH
2081 bool expand_now = false;
2082
4e3825db
MM
2083 /* Expanded above already. */
2084 if (is_gimple_reg (var))
eb7adebc
MM
2085 {
2086 TREE_USED (var) = 0;
3adcf52c 2087 goto next;
eb7adebc 2088 }
1f6d3a08
RH
2089 /* We didn't set a block for static or extern because it's hard
2090 to tell the difference between a global variable (re)declared
2091 in a local scope, and one that's really declared there to
2092 begin with. And it doesn't really matter much, since we're
2093 not giving them stack space. Expand them now. */
4e3825db 2094 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
2095 expand_now = true;
2096
ee2e8462
EB
2097 /* Expand variables not associated with any block now. Those created by
2098 the optimizers could be live anywhere in the function. Those that
2099 could possibly have been scoped originally and detached from their
2100 block will have their allocation deferred so we coalesce them with
2101 others when optimization is enabled. */
1f6d3a08
RH
2102 else if (TREE_USED (var))
2103 expand_now = true;
2104
2105 /* Finally, mark all variables on the list as used. We'll use
2106 this in a moment when we expand those associated with scopes. */
2107 TREE_USED (var) = 1;
2108
2109 if (expand_now)
3adcf52c
JM
2110 expand_one_var (var, true, true);
2111
2112 next:
2113 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 2114 {
3adcf52c
JM
2115 rtx rtl = DECL_RTL_IF_SET (var);
2116
2117 /* Keep artificial non-ignored vars in cfun->local_decls
2118 chain until instantiate_decls. */
2119 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2120 add_local_decl (cfun, var);
6c6366f6 2121 else if (rtl == NULL_RTX)
c021f10b
NF
2122 /* If rtl isn't set yet, which can happen e.g. with
2123 -fstack-protector, retry before returning from this
2124 function. */
9771b263 2125 maybe_local_decls.safe_push (var);
802e9f8e 2126 }
1f6d3a08 2127 }
1f6d3a08 2128
c021f10b
NF
2129 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2130
2131 +-----------------+-----------------+
2132 | ...processed... | ...duplicates...|
2133 +-----------------+-----------------+
2134 ^
2135 +-- LEN points here.
2136
2137 We just want the duplicates, as those are the artificial
2138 non-ignored vars that we want to keep until instantiate_decls.
2139 Move them down and truncate the array. */
9771b263
DN
2140 if (!vec_safe_is_empty (cfun->local_decls))
2141 cfun->local_decls->block_remove (0, len);
c021f10b 2142
1f6d3a08
RH
2143 /* At this point, all variables within the block tree with TREE_USED
2144 set are actually used by the optimized function. Lay them out. */
2145 expand_used_vars_for_block (outer_block, true);
2146
2147 if (stack_vars_num > 0)
2148 {
47598145 2149 add_scope_conflicts ();
1f6d3a08 2150
c22cacf3 2151 /* If stack protection is enabled, we don't share space between
7d69de61 2152 vulnerable data and non-vulnerable data. */
5434dc07
MD
2153 if (flag_stack_protect != 0
2154 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2155 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2156 && lookup_attribute ("stack_protect",
2157 DECL_ATTRIBUTES (current_function_decl)))))
7d69de61
RH
2158 add_stack_protection_conflicts ();
2159
c22cacf3 2160 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
2161 minimal interference graph, attempt to save some stack space. */
2162 partition_stack_vars ();
2163 if (dump_file)
2164 dump_stack_var_partition ();
7d69de61
RH
2165 }
2166
f6bc1c4a
HS
2167 switch (flag_stack_protect)
2168 {
2169 case SPCT_FLAG_ALL:
2170 create_stack_guard ();
2171 break;
2172
2173 case SPCT_FLAG_STRONG:
2174 if (gen_stack_protect_signal
5434dc07
MD
2175 || cfun->calls_alloca || has_protected_decls
2176 || lookup_attribute ("stack_protect",
2177 DECL_ATTRIBUTES (current_function_decl)))
f6bc1c4a
HS
2178 create_stack_guard ();
2179 break;
2180
2181 case SPCT_FLAG_DEFAULT:
5434dc07
MD
2182 if (cfun->calls_alloca || has_protected_decls
2183 || lookup_attribute ("stack_protect",
2184 DECL_ATTRIBUTES (current_function_decl)))
c3284718 2185 create_stack_guard ();
f6bc1c4a
HS
2186 break;
2187
5434dc07
MD
2188 case SPCT_FLAG_EXPLICIT:
2189 if (lookup_attribute ("stack_protect",
2190 DECL_ATTRIBUTES (current_function_decl)))
2191 create_stack_guard ();
2192 break;
f6bc1c4a
HS
2193 default:
2194 ;
2195 }
1f6d3a08 2196
7d69de61
RH
2197 /* Assign rtl to each variable based on these partitions. */
2198 if (stack_vars_num > 0)
2199 {
f3ddd692
JJ
2200 struct stack_vars_data data;
2201
e361382f
JJ
2202 data.asan_base = NULL_RTX;
2203 data.asan_alignb = 0;
f3ddd692 2204
7d69de61
RH
2205 /* Reorder decls to be protected by iterating over the variables
2206 array multiple times, and allocating out of each phase in turn. */
c22cacf3 2207 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
2208 earlier, such that we naturally see these variables first,
2209 and thus naturally allocate things in the right order. */
2210 if (has_protected_decls)
2211 {
2212 /* Phase 1 contains only character arrays. */
f3ddd692 2213 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
2214
2215 /* Phase 2 contains other kinds of arrays. */
5434dc07
MD
2216 if (flag_stack_protect == SPCT_FLAG_ALL
2217 || flag_stack_protect == SPCT_FLAG_STRONG
2218 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2219 && lookup_attribute ("stack_protect",
2220 DECL_ATTRIBUTES (current_function_decl))))
f3ddd692 2221 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
2222 }
2223
c461d263 2224 if (asan_sanitize_stack_p ())
f3ddd692
JJ
2225 /* Phase 3, any partitions that need asan protection
2226 in addition to phase 1 and 2. */
2227 expand_stack_vars (asan_decl_phase_3, &data);
2228
f075bd95
RS
2229 /* ASAN description strings don't yet have a syntax for expressing
2230 polynomial offsets. */
2231 HOST_WIDE_INT prev_offset;
2232 if (!data.asan_vec.is_empty ()
2233 && frame_offset.is_constant (&prev_offset))
f3ddd692 2234 {
e361382f
JJ
2235 HOST_WIDE_INT offset, sz, redzonesz;
2236 redzonesz = ASAN_RED_ZONE_SIZE;
2237 sz = data.asan_vec[0] - prev_offset;
2238 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2239 && data.asan_alignb <= 4096
3dc87cc0 2240 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
e361382f
JJ
2241 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2242 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
f075bd95
RS
2243 /* Allocating a constant amount of space from a constant
2244 starting offset must give a constant result. */
2245 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2246 .to_constant ());
9771b263
DN
2247 data.asan_vec.safe_push (prev_offset);
2248 data.asan_vec.safe_push (offset);
e5dcd695
LZ
2249 /* Leave space for alignment if STRICT_ALIGNMENT. */
2250 if (STRICT_ALIGNMENT)
2251 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2252 << ASAN_SHADOW_SHIFT)
2253 / BITS_PER_UNIT, 1);
f3ddd692
JJ
2254
2255 var_end_seq
2256 = asan_emit_stack_protection (virtual_stack_vars_rtx,
e361382f
JJ
2257 data.asan_base,
2258 data.asan_alignb,
9771b263 2259 data.asan_vec.address (),
e361382f 2260 data.asan_decl_vec.address (),
9771b263 2261 data.asan_vec.length ());
f3ddd692
JJ
2262 }
2263
2264 expand_stack_vars (NULL, &data);
1f6d3a08
RH
2265 }
2266
5094f7d5 2267 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
e3174bdf
MO
2268 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2269 virtual_stack_vars_rtx,
2270 var_end_seq);
2271
3f9b14ff
SB
2272 fini_vars_expansion ();
2273
6c6366f6
JJ
2274 /* If there were any artificial non-ignored vars without rtl
2275 found earlier, see if deferred stack allocation hasn't assigned
2276 rtl to them. */
9771b263 2277 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 2278 {
6c6366f6
JJ
2279 rtx rtl = DECL_RTL_IF_SET (var);
2280
6c6366f6
JJ
2281 /* Keep artificial non-ignored vars in cfun->local_decls
2282 chain until instantiate_decls. */
2283 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2284 add_local_decl (cfun, var);
6c6366f6
JJ
2285 }
2286
1f6d3a08
RH
2287 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2288 if (STACK_ALIGNMENT_NEEDED)
2289 {
2290 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
f075bd95
RS
2291 if (FRAME_GROWS_DOWNWARD)
2292 frame_offset = aligned_lower_bound (frame_offset, align);
2293 else
2294 frame_offset = aligned_upper_bound (frame_offset, align);
1f6d3a08 2295 }
f3ddd692
JJ
2296
2297 return var_end_seq;
727a31fa
RH
2298}
2299
2300
b7211528
SB
2301/* If we need to produce a detailed dump, print the tree representation
2302 for STMT to the dump file. SINCE is the last RTX after which the RTL
2303 generated for STMT should have been appended. */
2304
2305static void
355fe088 2306maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
b7211528
SB
2307{
2308 if (dump_file && (dump_flags & TDF_DETAILS))
2309 {
2310 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
2311 print_gimple_stmt (dump_file, stmt, 0,
2312 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
2313 fprintf (dump_file, "\n");
2314
2315 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2316 }
2317}
2318
8b11009b
ZD
2319/* Maps the blocks that do not contain tree labels to rtx labels. */
2320
134aa83c 2321static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
8b11009b 2322
a9b77cd1
ZD
2323/* Returns the label_rtx expression for a label starting basic block BB. */
2324
1476d1bd 2325static rtx_code_label *
726a989a 2326label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 2327{
726a989a
RB
2328 gimple_stmt_iterator gsi;
2329 tree lab;
a9b77cd1
ZD
2330
2331 if (bb->flags & BB_RTL)
2332 return block_label (bb);
2333
134aa83c 2334 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 2335 if (elt)
39c8aaa4 2336 return *elt;
8b11009b
ZD
2337
2338 /* Find the tree label if it is present. */
b8698a0f 2339
726a989a 2340 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 2341 {
538dd0b7
DM
2342 glabel *lab_stmt;
2343
2344 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2345 if (!lab_stmt)
a9b77cd1
ZD
2346 break;
2347
726a989a 2348 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
2349 if (DECL_NONLOCAL (lab))
2350 break;
2351
1476d1bd 2352 return jump_target_rtx (lab);
a9b77cd1
ZD
2353 }
2354
19f8b229 2355 rtx_code_label *l = gen_label_rtx ();
39c8aaa4
TS
2356 lab_rtx_for_bb->put (bb, l);
2357 return l;
a9b77cd1
ZD
2358}
2359
726a989a 2360
529ff441
MM
2361/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2362 of a basic block where we just expanded the conditional at the end,
315adeda
MM
2363 possibly clean up the CFG and instruction sequence. LAST is the
2364 last instruction before the just emitted jump sequence. */
529ff441
MM
2365
2366static void
b47aae36 2367maybe_cleanup_end_of_block (edge e, rtx_insn *last)
529ff441
MM
2368{
2369 /* Special case: when jumpif decides that the condition is
2370 trivial it emits an unconditional jump (and the necessary
2371 barrier). But we still have two edges, the fallthru one is
2372 wrong. purge_dead_edges would clean this up later. Unfortunately
2373 we have to insert insns (and split edges) before
2374 find_many_sub_basic_blocks and hence before purge_dead_edges.
2375 But splitting edges might create new blocks which depend on the
2376 fact that if there are two edges there's no barrier. So the
2377 barrier would get lost and verify_flow_info would ICE. Instead
2378 of auditing all edge splitters to care for the barrier (which
2379 normally isn't there in a cleaned CFG), fix it here. */
2380 if (BARRIER_P (get_last_insn ()))
2381 {
b47aae36 2382 rtx_insn *insn;
529ff441
MM
2383 remove_edge (e);
2384 /* Now, we have a single successor block, if we have insns to
2385 insert on the remaining edge we potentially will insert
2386 it at the end of this block (if the dest block isn't feasible)
2387 in order to avoid splitting the edge. This insertion will take
2388 place in front of the last jump. But we might have emitted
2389 multiple jumps (conditional and one unconditional) to the
2390 same destination. Inserting in front of the last one then
2391 is a problem. See PR 40021. We fix this by deleting all
2392 jumps except the last unconditional one. */
2393 insn = PREV_INSN (get_last_insn ());
2394 /* Make sure we have an unconditional jump. Otherwise we're
2395 confused. */
2396 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 2397 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
2398 {
2399 insn = PREV_INSN (insn);
2400 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 2401 {
8a269cb7 2402 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
2403 {
2404 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2405 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2406 }
2407 delete_insn (NEXT_INSN (insn));
2408 }
529ff441
MM
2409 }
2410 }
2411}
2412
726a989a 2413/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
2414 Returns a new basic block if we've terminated the current basic
2415 block and created a new one. */
2416
2417static basic_block
538dd0b7 2418expand_gimple_cond (basic_block bb, gcond *stmt)
80c7a9eb
RH
2419{
2420 basic_block new_bb, dest;
80c7a9eb
RH
2421 edge true_edge;
2422 edge false_edge;
b47aae36 2423 rtx_insn *last2, *last;
28ed065e
MM
2424 enum tree_code code;
2425 tree op0, op1;
2426
2427 code = gimple_cond_code (stmt);
2428 op0 = gimple_cond_lhs (stmt);
2429 op1 = gimple_cond_rhs (stmt);
2430 /* We're sometimes presented with such code:
2431 D.123_1 = x < y;
2432 if (D.123_1 != 0)
2433 ...
2434 This would expand to two comparisons which then later might
2435 be cleaned up by combine. But some pattern matchers like if-conversion
2436 work better when there's only one compare, so make up for this
2437 here as special exception if TER would have made the same change. */
31348d52 2438 if (SA.values
28ed065e 2439 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
2440 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2441 && TREE_CODE (op1) == INTEGER_CST
2442 && ((gimple_cond_code (stmt) == NE_EXPR
2443 && integer_zerop (op1))
2444 || (gimple_cond_code (stmt) == EQ_EXPR
2445 && integer_onep (op1)))
28ed065e
MM
2446 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2447 {
355fe088 2448 gimple *second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2449 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2450 {
e83f4b68
MM
2451 enum tree_code code2 = gimple_assign_rhs_code (second);
2452 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2453 {
2454 code = code2;
2455 op0 = gimple_assign_rhs1 (second);
2456 op1 = gimple_assign_rhs2 (second);
2457 }
2d52a3a1
ZC
2458 /* If jumps are cheap and the target does not support conditional
2459 compare, turn some more codes into jumpy sequences. */
2460 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2461 && targetm.gen_ccmp_first == NULL)
e83f4b68
MM
2462 {
2463 if ((code2 == BIT_AND_EXPR
2464 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2465 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2466 || code2 == TRUTH_AND_EXPR)
2467 {
2468 code = TRUTH_ANDIF_EXPR;
2469 op0 = gimple_assign_rhs1 (second);
2470 op1 = gimple_assign_rhs2 (second);
2471 }
2472 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2473 {
2474 code = TRUTH_ORIF_EXPR;
2475 op0 = gimple_assign_rhs1 (second);
2476 op1 = gimple_assign_rhs2 (second);
2477 }
2478 }
28ed065e
MM
2479 }
2480 }
b7211528
SB
2481
2482 last2 = last = get_last_insn ();
80c7a9eb
RH
2483
2484 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2485 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2486
2487 /* These flags have no purpose in RTL land. */
2488 true_edge->flags &= ~EDGE_TRUE_VALUE;
2489 false_edge->flags &= ~EDGE_FALSE_VALUE;
2490
2491 /* We can either have a pure conditional jump with one fallthru edge or
2492 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2493 if (false_edge->dest == bb->next_bb)
80c7a9eb 2494 {
40e90eac
JJ
2495 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2496 true_edge->probability);
726a989a 2497 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2498 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2499 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2500 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2501 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2502 return NULL;
2503 }
a9b77cd1 2504 if (true_edge->dest == bb->next_bb)
80c7a9eb 2505 {
40e90eac
JJ
2506 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2507 false_edge->probability);
726a989a 2508 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2509 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2510 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2511 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2512 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2513 return NULL;
2514 }
80c7a9eb 2515
40e90eac
JJ
2516 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2517 true_edge->probability);
80c7a9eb 2518 last = get_last_insn ();
2f13f2de 2519 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2520 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2521 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb 2522
1130d5e3 2523 BB_END (bb) = last;
80c7a9eb 2524 if (BARRIER_P (BB_END (bb)))
1130d5e3 2525 BB_END (bb) = PREV_INSN (BB_END (bb));
80c7a9eb
RH
2526 update_bb_for_insn (bb);
2527
2528 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2529 dest = false_edge->dest;
2530 redirect_edge_succ (false_edge, new_bb);
2531 false_edge->flags |= EDGE_FALLTHRU;
ef30ab83 2532 new_bb->count = false_edge->count ();
ba7629e2
RB
2533 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2534 add_bb_to_loop (new_bb, loop);
2535 if (loop->latch == bb
2536 && loop->header == dest)
2537 loop->latch = new_bb;
357067f2 2538 make_single_succ_edge (new_bb, dest, 0);
80c7a9eb 2539 if (BARRIER_P (BB_END (new_bb)))
1130d5e3 2540 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
80c7a9eb
RH
2541 update_bb_for_insn (new_bb);
2542
726a989a 2543 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2544
2f13f2de 2545 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2546 {
5368224f
DC
2547 set_curr_insn_location (true_edge->goto_locus);
2548 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2549 }
7787b4aa 2550
80c7a9eb
RH
2551 return new_bb;
2552}
2553
0a35513e
AH
2554/* Mark all calls that can have a transaction restart. */
2555
2556static void
355fe088 2557mark_transaction_restart_calls (gimple *stmt)
0a35513e
AH
2558{
2559 struct tm_restart_node dummy;
50979347 2560 tm_restart_node **slot;
0a35513e
AH
2561
2562 if (!cfun->gimple_df->tm_restart)
2563 return;
2564
2565 dummy.stmt = stmt;
50979347 2566 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
0a35513e
AH
2567 if (slot)
2568 {
50979347 2569 struct tm_restart_node *n = *slot;
0a35513e 2570 tree list = n->label_or_list;
b47aae36 2571 rtx_insn *insn;
0a35513e
AH
2572
2573 for (insn = next_real_insn (get_last_insn ());
2574 !CALL_P (insn);
2575 insn = next_real_insn (insn))
2576 continue;
2577
2578 if (TREE_CODE (list) == LABEL_DECL)
2579 add_reg_note (insn, REG_TM, label_rtx (list));
2580 else
2581 for (; list ; list = TREE_CHAIN (list))
2582 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2583 }
2584}
2585
28ed065e
MM
2586/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2587 statement STMT. */
2588
2589static void
538dd0b7 2590expand_call_stmt (gcall *stmt)
28ed065e 2591{
25583c4f 2592 tree exp, decl, lhs;
e23817b3 2593 bool builtin_p;
e7925582 2594 size_t i;
28ed065e 2595
25583c4f
RS
2596 if (gimple_call_internal_p (stmt))
2597 {
2598 expand_internal_call (stmt);
2599 return;
2600 }
2601
4cfe7a6c
RS
2602 /* If this is a call to a built-in function and it has no effect other
2603 than setting the lhs, try to implement it using an internal function
2604 instead. */
2605 decl = gimple_call_fndecl (stmt);
2606 if (gimple_call_lhs (stmt)
2607 && !gimple_has_side_effects (stmt)
2608 && (optimize || (decl && called_as_built_in (decl))))
2609 {
2610 internal_fn ifn = replacement_internal_fn (stmt);
2611 if (ifn != IFN_LAST)
2612 {
2613 expand_internal_call (ifn, stmt);
2614 return;
2615 }
2616 }
2617
01156003 2618 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
089d1227 2619
01156003 2620 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
089d1227 2621 builtin_p = decl && DECL_BUILT_IN (decl);
01156003 2622
e7925582
EB
2623 /* If this is not a builtin function, the function type through which the
2624 call is made may be different from the type of the function. */
2625 if (!builtin_p)
2626 CALL_EXPR_FN (exp)
b25aa0e8
EB
2627 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2628 CALL_EXPR_FN (exp));
e7925582 2629
28ed065e
MM
2630 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2631 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2632
2633 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2634 {
2635 tree arg = gimple_call_arg (stmt, i);
355fe088 2636 gimple *def;
e23817b3
RG
2637 /* TER addresses into arguments of builtin functions so we have a
2638 chance to infer more correct alignment information. See PR39954. */
2639 if (builtin_p
2640 && TREE_CODE (arg) == SSA_NAME
2641 && (def = get_gimple_for_ssa_name (arg))
2642 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2643 arg = gimple_assign_rhs1 (def);
2644 CALL_EXPR_ARG (exp, i) = arg;
2645 }
28ed065e 2646
93f28ca7 2647 if (gimple_has_side_effects (stmt))
28ed065e
MM
2648 TREE_SIDE_EFFECTS (exp) = 1;
2649
93f28ca7 2650 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2651 TREE_NOTHROW (exp) = 1;
2652
cc8bea0a
MS
2653 if (gimple_no_warning_p (stmt))
2654 TREE_NO_WARNING (exp) = 1;
2655
28ed065e 2656 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
9a385c2d 2657 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
28ed065e 2658 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2659 if (decl
2660 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9e878cf1 2661 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
63d2a353
MM
2662 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2663 else
2664 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e 2665 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
4c640e26 2666 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
28ed065e 2667 SET_EXPR_LOCATION (exp, gimple_location (stmt));
d5e254e1 2668 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
28ed065e 2669
ddb555ed
JJ
2670 /* Ensure RTL is created for debug args. */
2671 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2672 {
9771b263 2673 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2674 unsigned int ix;
2675 tree dtemp;
2676
2677 if (debug_args)
9771b263 2678 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2679 {
2680 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2681 expand_debug_expr (dtemp);
2682 }
2683 }
2684
5c5f0b65 2685 rtx_insn *before_call = get_last_insn ();
25583c4f 2686 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2687 if (lhs)
2688 expand_assignment (lhs, exp, false);
2689 else
4c437f02 2690 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a35513e 2691
5c5f0b65
IT
2692 /* If the gimple call is an indirect call and has 'nocf_check'
2693 attribute find a generated CALL insn to mark it as no
2694 control-flow verification is needed. */
2695 if (gimple_call_nocf_check_p (stmt)
2696 && !gimple_call_fndecl (stmt))
2697 {
2698 rtx_insn *last = get_last_insn ();
2699 while (!CALL_P (last)
2700 && last != before_call)
2701 last = PREV_INSN (last);
2702
2703 if (last != before_call)
2704 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2705 }
2706
0a35513e 2707 mark_transaction_restart_calls (stmt);
28ed065e
MM
2708}
2709
862d0b35
DN
2710
2711/* Generate RTL for an asm statement (explicit assembler code).
2712 STRING is a STRING_CST node containing the assembler code text,
2713 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2714 insn is volatile; don't optimize it. */
2715
2716static void
2717expand_asm_loc (tree string, int vol, location_t locus)
2718{
2719 rtx body;
2720
862d0b35
DN
2721 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2722 ggc_strdup (TREE_STRING_POINTER (string)),
2723 locus);
2724
2725 MEM_VOLATILE_P (body) = vol;
2726
93671519
BE
2727 /* Non-empty basic ASM implicitly clobbers memory. */
2728 if (TREE_STRING_LENGTH (string) != 0)
2729 {
2730 rtx asm_op, clob;
2731 unsigned i, nclobbers;
2732 auto_vec<rtx> input_rvec, output_rvec;
2733 auto_vec<const char *> constraints;
2734 auto_vec<rtx> clobber_rvec;
2735 HARD_REG_SET clobbered_regs;
2736 CLEAR_HARD_REG_SET (clobbered_regs);
2737
2738 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2739 clobber_rvec.safe_push (clob);
2740
2741 if (targetm.md_asm_adjust)
2742 targetm.md_asm_adjust (output_rvec, input_rvec,
2743 constraints, clobber_rvec,
2744 clobbered_regs);
2745
2746 asm_op = body;
2747 nclobbers = clobber_rvec.length ();
2748 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2749
2750 XVECEXP (body, 0, 0) = asm_op;
2751 for (i = 0; i < nclobbers; i++)
2752 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2753 }
2754
862d0b35
DN
2755 emit_insn (body);
2756}
2757
2758/* Return the number of times character C occurs in string S. */
2759static int
2760n_occurrences (int c, const char *s)
2761{
2762 int n = 0;
2763 while (*s)
2764 n += (*s++ == c);
2765 return n;
2766}
2767
2768/* A subroutine of expand_asm_operands. Check that all operands have
2769 the same number of alternatives. Return true if so. */
2770
2771static bool
7ca35180 2772check_operand_nalternatives (const vec<const char *> &constraints)
862d0b35 2773{
7ca35180
RH
2774 unsigned len = constraints.length();
2775 if (len > 0)
862d0b35 2776 {
7ca35180 2777 int nalternatives = n_occurrences (',', constraints[0]);
862d0b35
DN
2778
2779 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2780 {
2781 error ("too many alternatives in %<asm%>");
2782 return false;
2783 }
2784
7ca35180
RH
2785 for (unsigned i = 1; i < len; ++i)
2786 if (n_occurrences (',', constraints[i]) != nalternatives)
2787 {
2788 error ("operand constraints for %<asm%> differ "
2789 "in number of alternatives");
2790 return false;
2791 }
862d0b35 2792 }
862d0b35
DN
2793 return true;
2794}
2795
2796/* Check for overlap between registers marked in CLOBBERED_REGS and
2797 anything inappropriate in T. Emit error and return the register
2798 variable definition for error, NULL_TREE for ok. */
2799
2800static bool
2801tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2802{
2803 /* Conflicts between asm-declared register variables and the clobber
2804 list are not allowed. */
2805 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2806
2807 if (overlap)
2808 {
2809 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2810 DECL_NAME (overlap));
2811
2812 /* Reset registerness to stop multiple errors emitted for a single
2813 variable. */
2814 DECL_REGISTER (overlap) = 0;
2815 return true;
2816 }
2817
2818 return false;
2819}
2820
2821/* Generate RTL for an asm statement with arguments.
2822 STRING is the instruction template.
2823 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2824 Each output or input has an expression in the TREE_VALUE and
2825 a tree list in TREE_PURPOSE which in turn contains a constraint
2826 name in TREE_VALUE (or NULL_TREE) and a constraint string
2827 in TREE_PURPOSE.
2828 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2829 that is clobbered by this insn.
2830
2831 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2832 should be the fallthru basic block of the asm goto.
2833
2834 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2835 Some elements of OUTPUTS may be replaced with trees representing temporary
2836 values. The caller should copy those temporary values to the originally
2837 specified lvalues.
2838
2839 VOL nonzero means the insn is volatile; don't optimize it. */
2840
2841static void
6476a8fd 2842expand_asm_stmt (gasm *stmt)
862d0b35 2843{
7ca35180
RH
2844 class save_input_location
2845 {
2846 location_t old;
6476a8fd 2847
7ca35180
RH
2848 public:
2849 explicit save_input_location(location_t where)
6476a8fd 2850 {
7ca35180
RH
2851 old = input_location;
2852 input_location = where;
6476a8fd
RH
2853 }
2854
7ca35180 2855 ~save_input_location()
6476a8fd 2856 {
7ca35180 2857 input_location = old;
6476a8fd 2858 }
7ca35180 2859 };
6476a8fd 2860
7ca35180 2861 location_t locus = gimple_location (stmt);
6476a8fd 2862
7ca35180 2863 if (gimple_asm_input_p (stmt))
6476a8fd 2864 {
7ca35180
RH
2865 const char *s = gimple_asm_string (stmt);
2866 tree string = build_string (strlen (s), s);
2867 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2868 return;
6476a8fd
RH
2869 }
2870
7ca35180
RH
2871 /* There are some legacy diagnostics in here, and also avoids a
2872 sixth parameger to targetm.md_asm_adjust. */
2873 save_input_location s_i_l(locus);
6476a8fd 2874
7ca35180
RH
2875 unsigned noutputs = gimple_asm_noutputs (stmt);
2876 unsigned ninputs = gimple_asm_ninputs (stmt);
2877 unsigned nlabels = gimple_asm_nlabels (stmt);
2878 unsigned i;
2879
2880 /* ??? Diagnose during gimplification? */
2881 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
6476a8fd 2882 {
7ca35180 2883 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
6476a8fd
RH
2884 return;
2885 }
2886
7ca35180
RH
2887 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2888 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2889 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
6476a8fd 2890
7ca35180 2891 /* Copy the gimple vectors into new vectors that we can manipulate. */
862d0b35 2892
7ca35180
RH
2893 output_tvec.safe_grow (noutputs);
2894 input_tvec.safe_grow (ninputs);
2895 constraints.safe_grow (noutputs + ninputs);
862d0b35 2896
7ca35180
RH
2897 for (i = 0; i < noutputs; ++i)
2898 {
2899 tree t = gimple_asm_output_op (stmt, i);
2900 output_tvec[i] = TREE_VALUE (t);
2901 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2902 }
2903 for (i = 0; i < ninputs; i++)
2904 {
2905 tree t = gimple_asm_input_op (stmt, i);
2906 input_tvec[i] = TREE_VALUE (t);
2907 constraints[i + noutputs]
2908 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2909 }
862d0b35 2910
7ca35180
RH
2911 /* ??? Diagnose during gimplification? */
2912 if (! check_operand_nalternatives (constraints))
2913 return;
862d0b35
DN
2914
2915 /* Count the number of meaningful clobbered registers, ignoring what
2916 we would ignore later. */
7ca35180
RH
2917 auto_vec<rtx> clobber_rvec;
2918 HARD_REG_SET clobbered_regs;
862d0b35 2919 CLEAR_HARD_REG_SET (clobbered_regs);
862d0b35 2920
7ca35180
RH
2921 if (unsigned n = gimple_asm_nclobbers (stmt))
2922 {
2923 clobber_rvec.reserve (n);
2924 for (i = 0; i < n; i++)
2925 {
2926 tree t = gimple_asm_clobber_op (stmt, i);
2927 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2928 int nregs, j;
862d0b35 2929
7ca35180
RH
2930 j = decode_reg_name_and_count (regname, &nregs);
2931 if (j < 0)
862d0b35 2932 {
7ca35180 2933 if (j == -2)
862d0b35 2934 {
7ca35180
RH
2935 /* ??? Diagnose during gimplification? */
2936 error ("unknown register name %qs in %<asm%>", regname);
2937 }
2938 else if (j == -4)
2939 {
2940 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2941 clobber_rvec.safe_push (x);
2942 }
2943 else
2944 {
2945 /* Otherwise we should have -1 == empty string
2946 or -3 == cc, which is not a register. */
2947 gcc_assert (j == -1 || j == -3);
862d0b35 2948 }
862d0b35 2949 }
7ca35180
RH
2950 else
2951 for (int reg = j; reg < j + nregs; reg++)
2952 {
2953 /* Clobbering the PIC register is an error. */
2954 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2955 {
2956 /* ??? Diagnose during gimplification? */
2957 error ("PIC register clobbered by %qs in %<asm%>",
2958 regname);
2959 return;
2960 }
2961
2962 SET_HARD_REG_BIT (clobbered_regs, reg);
2963 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2964 clobber_rvec.safe_push (x);
2965 }
862d0b35
DN
2966 }
2967 }
7ca35180 2968 unsigned nclobbers = clobber_rvec.length();
862d0b35
DN
2969
2970 /* First pass over inputs and outputs checks validity and sets
2971 mark_addressable if needed. */
7ca35180 2972 /* ??? Diagnose during gimplification? */
862d0b35 2973
7ca35180 2974 for (i = 0; i < noutputs; ++i)
862d0b35 2975 {
7ca35180 2976 tree val = output_tvec[i];
862d0b35
DN
2977 tree type = TREE_TYPE (val);
2978 const char *constraint;
2979 bool is_inout;
2980 bool allows_reg;
2981 bool allows_mem;
2982
862d0b35
DN
2983 /* Try to parse the output constraint. If that fails, there's
2984 no point in going further. */
2985 constraint = constraints[i];
2986 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2987 &allows_mem, &allows_reg, &is_inout))
2988 return;
2989
2990 if (! allows_reg
2991 && (allows_mem
2992 || is_inout
2993 || (DECL_P (val)
2994 && REG_P (DECL_RTL (val))
2995 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2996 mark_addressable (val);
862d0b35
DN
2997 }
2998
7ca35180 2999 for (i = 0; i < ninputs; ++i)
862d0b35
DN
3000 {
3001 bool allows_reg, allows_mem;
3002 const char *constraint;
3003
862d0b35 3004 constraint = constraints[i + noutputs];
7ca35180
RH
3005 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3006 constraints.address (),
3007 &allows_mem, &allows_reg))
862d0b35
DN
3008 return;
3009
3010 if (! allows_reg && allows_mem)
7ca35180 3011 mark_addressable (input_tvec[i]);
862d0b35
DN
3012 }
3013
3014 /* Second pass evaluates arguments. */
3015
3016 /* Make sure stack is consistent for asm goto. */
3017 if (nlabels > 0)
3018 do_pending_stack_adjust ();
7ca35180
RH
3019 int old_generating_concat_p = generating_concat_p;
3020
3021 /* Vector of RTX's of evaluated output operands. */
3022 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3023 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3024 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
862d0b35 3025
7ca35180
RH
3026 output_rvec.safe_grow (noutputs);
3027
3028 for (i = 0; i < noutputs; ++i)
862d0b35 3029 {
7ca35180 3030 tree val = output_tvec[i];
862d0b35 3031 tree type = TREE_TYPE (val);
7ca35180 3032 bool is_inout, allows_reg, allows_mem, ok;
862d0b35 3033 rtx op;
862d0b35
DN
3034
3035 ok = parse_output_constraint (&constraints[i], i, ninputs,
3036 noutputs, &allows_mem, &allows_reg,
3037 &is_inout);
3038 gcc_assert (ok);
3039
3040 /* If an output operand is not a decl or indirect ref and our constraint
3041 allows a register, make a temporary to act as an intermediate.
7ca35180 3042 Make the asm insn write into that, then we will copy it to
862d0b35
DN
3043 the real output operand. Likewise for promoted variables. */
3044
3045 generating_concat_p = 0;
3046
862d0b35
DN
3047 if ((TREE_CODE (val) == INDIRECT_REF
3048 && allows_mem)
3049 || (DECL_P (val)
3050 && (allows_mem || REG_P (DECL_RTL (val)))
3051 && ! (REG_P (DECL_RTL (val))
3052 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3053 || ! allows_reg
3054 || is_inout)
3055 {
3056 op = expand_expr (val, NULL_RTX, VOIDmode,
3057 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3058 if (MEM_P (op))
3059 op = validize_mem (op);
3060
3061 if (! allows_reg && !MEM_P (op))
3062 error ("output number %d not directly addressable", i);
3063 if ((! allows_mem && MEM_P (op))
3064 || GET_CODE (op) == CONCAT)
3065 {
7ca35180 3066 rtx old_op = op;
862d0b35 3067 op = gen_reg_rtx (GET_MODE (op));
7ca35180
RH
3068
3069 generating_concat_p = old_generating_concat_p;
3070
862d0b35 3071 if (is_inout)
7ca35180
RH
3072 emit_move_insn (op, old_op);
3073
3074 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3075 emit_move_insn (old_op, op);
3076 after_rtl_seq = get_insns ();
3077 after_rtl_end = get_last_insn ();
3078 end_sequence ();
862d0b35
DN
3079 }
3080 }
3081 else
3082 {
3083 op = assign_temp (type, 0, 1);
3084 op = validize_mem (op);
7ca35180
RH
3085 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3086 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
862d0b35 3087
7ca35180 3088 generating_concat_p = old_generating_concat_p;
862d0b35 3089
7ca35180
RH
3090 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3091 expand_assignment (val, make_tree (type, op), false);
3092 after_rtl_seq = get_insns ();
3093 after_rtl_end = get_last_insn ();
3094 end_sequence ();
862d0b35 3095 }
7ca35180 3096 output_rvec[i] = op;
862d0b35 3097
7ca35180
RH
3098 if (is_inout)
3099 inout_opnum.safe_push (i);
862d0b35
DN
3100 }
3101
7ca35180
RH
3102 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3103 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
862d0b35 3104
7ca35180
RH
3105 input_rvec.safe_grow (ninputs);
3106 input_mode.safe_grow (ninputs);
862d0b35 3107
7ca35180 3108 generating_concat_p = 0;
862d0b35 3109
7ca35180 3110 for (i = 0; i < ninputs; ++i)
862d0b35 3111 {
7ca35180
RH
3112 tree val = input_tvec[i];
3113 tree type = TREE_TYPE (val);
3114 bool allows_reg, allows_mem, ok;
862d0b35 3115 const char *constraint;
862d0b35 3116 rtx op;
862d0b35
DN
3117
3118 constraint = constraints[i + noutputs];
7ca35180
RH
3119 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3120 constraints.address (),
3121 &allows_mem, &allows_reg);
862d0b35
DN
3122 gcc_assert (ok);
3123
862d0b35
DN
3124 /* EXPAND_INITIALIZER will not generate code for valid initializer
3125 constants, but will still generate code for other types of operand.
3126 This is the behavior we want for constant constraints. */
3127 op = expand_expr (val, NULL_RTX, VOIDmode,
3128 allows_reg ? EXPAND_NORMAL
3129 : allows_mem ? EXPAND_MEMORY
3130 : EXPAND_INITIALIZER);
3131
3132 /* Never pass a CONCAT to an ASM. */
3133 if (GET_CODE (op) == CONCAT)
3134 op = force_reg (GET_MODE (op), op);
3135 else if (MEM_P (op))
3136 op = validize_mem (op);
3137
3138 if (asm_operand_ok (op, constraint, NULL) <= 0)
3139 {
3140 if (allows_reg && TYPE_MODE (type) != BLKmode)
3141 op = force_reg (TYPE_MODE (type), op);
3142 else if (!allows_mem)
3143 warning (0, "asm operand %d probably doesn%'t match constraints",
3144 i + noutputs);
3145 else if (MEM_P (op))
3146 {
3147 /* We won't recognize either volatile memory or memory
3148 with a queued address as available a memory_operand
3149 at this point. Ignore it: clearly this *is* a memory. */
3150 }
3151 else
3152 gcc_unreachable ();
3153 }
7ca35180
RH
3154 input_rvec[i] = op;
3155 input_mode[i] = TYPE_MODE (type);
862d0b35
DN
3156 }
3157
862d0b35 3158 /* For in-out operands, copy output rtx to input rtx. */
7ca35180 3159 unsigned ninout = inout_opnum.length();
862d0b35
DN
3160 for (i = 0; i < ninout; i++)
3161 {
3162 int j = inout_opnum[i];
7ca35180 3163 rtx o = output_rvec[j];
862d0b35 3164
7ca35180
RH
3165 input_rvec.safe_push (o);
3166 input_mode.safe_push (GET_MODE (o));
862d0b35 3167
7ca35180 3168 char buffer[16];
862d0b35 3169 sprintf (buffer, "%d", j);
7ca35180
RH
3170 constraints.safe_push (ggc_strdup (buffer));
3171 }
3172 ninputs += ninout;
3173
3174 /* Sometimes we wish to automatically clobber registers across an asm.
3175 Case in point is when the i386 backend moved from cc0 to a hard reg --
3176 maintaining source-level compatibility means automatically clobbering
3177 the flags register. */
3178 rtx_insn *after_md_seq = NULL;
3179 if (targetm.md_asm_adjust)
3180 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3181 constraints, clobber_rvec,
3182 clobbered_regs);
3183
3184 /* Do not allow the hook to change the output and input count,
3185 lest it mess up the operand numbering. */
3186 gcc_assert (output_rvec.length() == noutputs);
3187 gcc_assert (input_rvec.length() == ninputs);
3188 gcc_assert (constraints.length() == noutputs + ninputs);
3189
3190 /* But it certainly can adjust the clobbers. */
3191 nclobbers = clobber_rvec.length();
3192
3193 /* Third pass checks for easy conflicts. */
3194 /* ??? Why are we doing this on trees instead of rtx. */
3195
3196 bool clobber_conflict_found = 0;
3197 for (i = 0; i < noutputs; ++i)
3198 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3199 clobber_conflict_found = 1;
3200 for (i = 0; i < ninputs - ninout; ++i)
3201 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3202 clobber_conflict_found = 1;
3203
3204 /* Make vectors for the expression-rtx, constraint strings,
3205 and named operands. */
3206
3207 rtvec argvec = rtvec_alloc (ninputs);
3208 rtvec constraintvec = rtvec_alloc (ninputs);
3209 rtvec labelvec = rtvec_alloc (nlabels);
3210
3211 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3212 : GET_MODE (output_rvec[0])),
3213 ggc_strdup (gimple_asm_string (stmt)),
618400bc 3214 "", 0, argvec, constraintvec,
7ca35180
RH
3215 labelvec, locus);
3216 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3217
3218 for (i = 0; i < ninputs; ++i)
3219 {
3220 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3221 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3222 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3223 constraints[i + noutputs],
3224 locus);
862d0b35
DN
3225 }
3226
3227 /* Copy labels to the vector. */
7ca35180
RH
3228 rtx_code_label *fallthru_label = NULL;
3229 if (nlabels > 0)
3230 {
3231 basic_block fallthru_bb = NULL;
3232 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3233 if (fallthru)
3234 fallthru_bb = fallthru->dest;
3235
3236 for (i = 0; i < nlabels; ++i)
862d0b35 3237 {
7ca35180 3238 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
e67d1102 3239 rtx_insn *r;
7ca35180
RH
3240 /* If asm goto has any labels in the fallthru basic block, use
3241 a label that we emit immediately after the asm goto. Expansion
3242 may insert further instructions into the same basic block after
3243 asm goto and if we don't do this, insertion of instructions on
3244 the fallthru edge might misbehave. See PR58670. */
3245 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3246 {
3247 if (fallthru_label == NULL_RTX)
3248 fallthru_label = gen_label_rtx ();
3249 r = fallthru_label;
3250 }
3251 else
3252 r = label_rtx (label);
3253 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
862d0b35 3254 }
862d0b35
DN
3255 }
3256
862d0b35
DN
3257 /* Now, for each output, construct an rtx
3258 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3259 ARGVEC CONSTRAINTS OPNAMES))
3260 If there is more than one, put them inside a PARALLEL. */
3261
3262 if (nlabels > 0 && nclobbers == 0)
3263 {
3264 gcc_assert (noutputs == 0);
3265 emit_jump_insn (body);
3266 }
3267 else if (noutputs == 0 && nclobbers == 0)
3268 {
3269 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3270 emit_insn (body);
3271 }
3272 else if (noutputs == 1 && nclobbers == 0)
3273 {
7ca35180
RH
3274 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3275 emit_insn (gen_rtx_SET (output_rvec[0], body));
862d0b35
DN
3276 }
3277 else
3278 {
3279 rtx obody = body;
3280 int num = noutputs;
3281
3282 if (num == 0)
3283 num = 1;
3284
3285 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3286
3287 /* For each output operand, store a SET. */
7ca35180 3288 for (i = 0; i < noutputs; ++i)
862d0b35 3289 {
7ca35180
RH
3290 rtx src, o = output_rvec[i];
3291 if (i == 0)
3292 {
3293 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3294 src = obody;
3295 }
3296 else
3297 {
3298 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3299 ASM_OPERANDS_TEMPLATE (obody),
3300 constraints[i], i, argvec,
3301 constraintvec, labelvec, locus);
3302 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3303 }
3304 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
862d0b35
DN
3305 }
3306
3307 /* If there are no outputs (but there are some clobbers)
3308 store the bare ASM_OPERANDS into the PARALLEL. */
862d0b35
DN
3309 if (i == 0)
3310 XVECEXP (body, 0, i++) = obody;
3311
3312 /* Store (clobber REG) for each clobbered register specified. */
7ca35180 3313 for (unsigned j = 0; j < nclobbers; ++j)
862d0b35 3314 {
7ca35180 3315 rtx clobbered_reg = clobber_rvec[j];
862d0b35 3316
7ca35180
RH
3317 /* Do sanity check for overlap between clobbers and respectively
3318 input and outputs that hasn't been handled. Such overlap
3319 should have been detected and reported above. */
3320 if (!clobber_conflict_found && REG_P (clobbered_reg))
862d0b35 3321 {
7ca35180
RH
3322 /* We test the old body (obody) contents to avoid
3323 tripping over the under-construction body. */
3324 for (unsigned k = 0; k < noutputs; ++k)
3325 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3326 internal_error ("asm clobber conflict with output operand");
3327
3328 for (unsigned k = 0; k < ninputs - ninout; ++k)
3329 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3330 internal_error ("asm clobber conflict with input operand");
862d0b35
DN
3331 }
3332
7ca35180 3333 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
862d0b35
DN
3334 }
3335
3336 if (nlabels > 0)
3337 emit_jump_insn (body);
3338 else
3339 emit_insn (body);
3340 }
3341
7ca35180
RH
3342 generating_concat_p = old_generating_concat_p;
3343
862d0b35
DN
3344 if (fallthru_label)
3345 emit_label (fallthru_label);
3346
7ca35180
RH
3347 if (after_md_seq)
3348 emit_insn (after_md_seq);
3349 if (after_rtl_seq)
3350 emit_insn (after_rtl_seq);
862d0b35 3351
6476a8fd 3352 free_temp_slots ();
7ca35180 3353 crtl->has_asm_statement = 1;
862d0b35
DN
3354}
3355
3356/* Emit code to jump to the address
3357 specified by the pointer expression EXP. */
3358
3359static void
3360expand_computed_goto (tree exp)
3361{
3362 rtx x = expand_normal (exp);
3363
862d0b35
DN
3364 do_pending_stack_adjust ();
3365 emit_indirect_jump (x);
3366}
3367
3368/* Generate RTL code for a `goto' statement with target label LABEL.
3369 LABEL should be a LABEL_DECL tree node that was or will later be
3370 defined with `expand_label'. */
3371
3372static void
3373expand_goto (tree label)
3374{
b2b29377
MM
3375 if (flag_checking)
3376 {
3377 /* Check for a nonlocal goto to a containing function. Should have
3378 gotten translated to __builtin_nonlocal_goto. */
3379 tree context = decl_function_context (label);
3380 gcc_assert (!context || context == current_function_decl);
3381 }
862d0b35 3382
1476d1bd 3383 emit_jump (jump_target_rtx (label));
862d0b35
DN
3384}
3385
3386/* Output a return with no value. */
3387
3388static void
3389expand_null_return_1 (void)
3390{
3391 clear_pending_stack_adjust ();
3392 do_pending_stack_adjust ();
3393 emit_jump (return_label);
3394}
3395
3396/* Generate RTL to return from the current function, with no value.
3397 (That is, we do not do anything about returning any value.) */
3398
3399void
3400expand_null_return (void)
3401{
3402 /* If this function was declared to return a value, but we
3403 didn't, clobber the return registers so that they are not
3404 propagated live to the rest of the function. */
3405 clobber_return_register ();
3406
3407 expand_null_return_1 ();
3408}
3409
3410/* Generate RTL to return from the current function, with value VAL. */
3411
3412static void
3413expand_value_return (rtx val)
3414{
3415 /* Copy the value to the return location unless it's already there. */
3416
3417 tree decl = DECL_RESULT (current_function_decl);
3418 rtx return_reg = DECL_RTL (decl);
3419 if (return_reg != val)
3420 {
3421 tree funtype = TREE_TYPE (current_function_decl);
3422 tree type = TREE_TYPE (decl);
3423 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
3424 machine_mode old_mode = DECL_MODE (decl);
3425 machine_mode mode;
862d0b35
DN
3426 if (DECL_BY_REFERENCE (decl))
3427 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3428 else
3429 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3430
3431 if (mode != old_mode)
3432 val = convert_modes (mode, old_mode, val, unsignedp);
3433
3434 if (GET_CODE (return_reg) == PARALLEL)
3435 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3436 else
3437 emit_move_insn (return_reg, val);
3438 }
3439
3440 expand_null_return_1 ();
3441}
3442
3443/* Generate RTL to evaluate the expression RETVAL and return it
3444 from the current function. */
3445
3446static void
d5e254e1 3447expand_return (tree retval, tree bounds)
862d0b35
DN
3448{
3449 rtx result_rtl;
3450 rtx val = 0;
3451 tree retval_rhs;
d5e254e1 3452 rtx bounds_rtl;
862d0b35
DN
3453
3454 /* If function wants no value, give it none. */
3455 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3456 {
3457 expand_normal (retval);
3458 expand_null_return ();
3459 return;
3460 }
3461
3462 if (retval == error_mark_node)
3463 {
3464 /* Treat this like a return of no value from a function that
3465 returns a value. */
3466 expand_null_return ();
3467 return;
3468 }
3469 else if ((TREE_CODE (retval) == MODIFY_EXPR
3470 || TREE_CODE (retval) == INIT_EXPR)
3471 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3472 retval_rhs = TREE_OPERAND (retval, 1);
3473 else
3474 retval_rhs = retval;
3475
3476 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3477
d5e254e1
IE
3478 /* Put returned bounds to the right place. */
3479 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3480 if (bounds_rtl)
3481 {
855f036d
IE
3482 rtx addr = NULL;
3483 rtx bnd = NULL;
d5e254e1 3484
855f036d 3485 if (bounds && bounds != error_mark_node)
d5e254e1
IE
3486 {
3487 bnd = expand_normal (bounds);
3488 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3489 }
3490 else if (REG_P (bounds_rtl))
3491 {
855f036d
IE
3492 if (bounds)
3493 bnd = chkp_expand_zero_bounds ();
3494 else
3495 {
3496 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3497 addr = gen_rtx_MEM (Pmode, addr);
3498 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3499 }
3500
d5e254e1
IE
3501 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3502 }
3503 else
3504 {
3505 int n;
3506
3507 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3508
855f036d
IE
3509 if (bounds)
3510 bnd = chkp_expand_zero_bounds ();
3511 else
3512 {
3513 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3514 addr = gen_rtx_MEM (Pmode, addr);
3515 }
d5e254e1
IE
3516
3517 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3518 {
d5e254e1 3519 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
855f036d
IE
3520 if (!bounds)
3521 {
3522 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3523 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3524 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3525 }
d5e254e1
IE
3526 targetm.calls.store_returned_bounds (slot, bnd);
3527 }
3528 }
3529 }
3530 else if (chkp_function_instrumented_p (current_function_decl)
3531 && !BOUNDED_P (retval_rhs)
3532 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3533 && TREE_CODE (retval_rhs) != RESULT_DECL)
3534 {
3535 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3536 addr = gen_rtx_MEM (Pmode, addr);
3537
3538 gcc_assert (MEM_P (result_rtl));
3539
3540 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3541 }
3542
862d0b35
DN
3543 /* If we are returning the RESULT_DECL, then the value has already
3544 been stored into it, so we don't have to do anything special. */
3545 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3546 expand_value_return (result_rtl);
3547
3548 /* If the result is an aggregate that is being returned in one (or more)
3549 registers, load the registers here. */
3550
3551 else if (retval_rhs != 0
3552 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3553 && REG_P (result_rtl))
3554 {
3555 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3556 if (val)
3557 {
3558 /* Use the mode of the result value on the return register. */
3559 PUT_MODE (result_rtl, GET_MODE (val));
3560 expand_value_return (val);
3561 }
3562 else
3563 expand_null_return ();
3564 }
3565 else if (retval_rhs != 0
3566 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3567 && (REG_P (result_rtl)
3568 || (GET_CODE (result_rtl) == PARALLEL)))
3569 {
9ee5337d
EB
3570 /* Compute the return value into a temporary (usually a pseudo reg). */
3571 val
3572 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
862d0b35
DN
3573 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3574 val = force_not_mem (val);
862d0b35
DN
3575 expand_value_return (val);
3576 }
3577 else
3578 {
3579 /* No hard reg used; calculate value into hard return reg. */
3580 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 expand_value_return (result_rtl);
3582 }
3583}
3584
28ed065e
MM
3585/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3586 STMT that doesn't require special handling for outgoing edges. That
3587 is no tailcalls and no GIMPLE_COND. */
3588
3589static void
355fe088 3590expand_gimple_stmt_1 (gimple *stmt)
28ed065e
MM
3591{
3592 tree op0;
c82fee88 3593
5368224f 3594 set_curr_insn_location (gimple_location (stmt));
c82fee88 3595
28ed065e
MM
3596 switch (gimple_code (stmt))
3597 {
3598 case GIMPLE_GOTO:
3599 op0 = gimple_goto_dest (stmt);
3600 if (TREE_CODE (op0) == LABEL_DECL)
3601 expand_goto (op0);
3602 else
3603 expand_computed_goto (op0);
3604 break;
3605 case GIMPLE_LABEL:
538dd0b7 3606 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
28ed065e
MM
3607 break;
3608 case GIMPLE_NOP:
3609 case GIMPLE_PREDICT:
3610 break;
28ed065e 3611 case GIMPLE_SWITCH:
f66459c1
PB
3612 {
3613 gswitch *swtch = as_a <gswitch *> (stmt);
3614 if (gimple_switch_num_labels (swtch) == 1)
3615 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3616 else
3617 expand_case (swtch);
3618 }
28ed065e
MM
3619 break;
3620 case GIMPLE_ASM:
538dd0b7 3621 expand_asm_stmt (as_a <gasm *> (stmt));
28ed065e
MM
3622 break;
3623 case GIMPLE_CALL:
538dd0b7 3624 expand_call_stmt (as_a <gcall *> (stmt));
28ed065e
MM
3625 break;
3626
3627 case GIMPLE_RETURN:
855f036d
IE
3628 {
3629 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3630 op0 = gimple_return_retval (as_a <greturn *> (stmt));
28ed065e 3631
855f036d
IE
3632 if (op0 && op0 != error_mark_node)
3633 {
3634 tree result = DECL_RESULT (current_function_decl);
28ed065e 3635
b5be36b1
IE
3636 /* Mark we have return statement with missing bounds. */
3637 if (!bnd
3638 && chkp_function_instrumented_p (cfun->decl)
3639 && !DECL_P (op0))
3640 bnd = error_mark_node;
3641
855f036d
IE
3642 /* If we are not returning the current function's RESULT_DECL,
3643 build an assignment to it. */
3644 if (op0 != result)
3645 {
3646 /* I believe that a function's RESULT_DECL is unique. */
3647 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3648
3649 /* ??? We'd like to use simply expand_assignment here,
3650 but this fails if the value is of BLKmode but the return
3651 decl is a register. expand_return has special handling
3652 for this combination, which eventually should move
3653 to common code. See comments there. Until then, let's
3654 build a modify expression :-/ */
3655 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3656 result, op0);
3657 }
855f036d
IE
3658 }
3659
3660 if (!op0)
3661 expand_null_return ();
3662 else
3663 expand_return (op0, bnd);
3664 }
28ed065e
MM
3665 break;
3666
3667 case GIMPLE_ASSIGN:
3668 {
538dd0b7
DM
3669 gassign *assign_stmt = as_a <gassign *> (stmt);
3670 tree lhs = gimple_assign_lhs (assign_stmt);
28ed065e
MM
3671
3672 /* Tree expand used to fiddle with |= and &= of two bitfield
3673 COMPONENT_REFs here. This can't happen with gimple, the LHS
3674 of binary assigns must be a gimple reg. */
3675
3676 if (TREE_CODE (lhs) != SSA_NAME
3677 || get_gimple_rhs_class (gimple_expr_code (stmt))
3678 == GIMPLE_SINGLE_RHS)
3679 {
538dd0b7 3680 tree rhs = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3681 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3682 == GIMPLE_SINGLE_RHS);
ae2ffe2a
RB
3683 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3684 /* Do not put locations on possibly shared trees. */
3685 && !is_gimple_min_invariant (rhs))
28ed065e 3686 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3687 if (TREE_CLOBBER_P (rhs))
3688 /* This is a clobber to mark the going out of scope for
3689 this LHS. */
3690 ;
3691 else
3692 expand_assignment (lhs, rhs,
538dd0b7
DM
3693 gimple_assign_nontemporal_move_p (
3694 assign_stmt));
28ed065e
MM
3695 }
3696 else
3697 {
3698 rtx target, temp;
538dd0b7 3699 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
28ed065e
MM
3700 struct separate_ops ops;
3701 bool promoted = false;
3702
3703 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3704 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3705 promoted = true;
3706
538dd0b7 3707 ops.code = gimple_assign_rhs_code (assign_stmt);
28ed065e 3708 ops.type = TREE_TYPE (lhs);
b0dd8c90 3709 switch (get_gimple_rhs_class (ops.code))
28ed065e 3710 {
0354c0c7 3711 case GIMPLE_TERNARY_RHS:
538dd0b7 3712 ops.op2 = gimple_assign_rhs3 (assign_stmt);
0354c0c7 3713 /* Fallthru */
28ed065e 3714 case GIMPLE_BINARY_RHS:
538dd0b7 3715 ops.op1 = gimple_assign_rhs2 (assign_stmt);
28ed065e
MM
3716 /* Fallthru */
3717 case GIMPLE_UNARY_RHS:
538dd0b7 3718 ops.op0 = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3719 break;
3720 default:
3721 gcc_unreachable ();
3722 }
3723 ops.location = gimple_location (stmt);
3724
3725 /* If we want to use a nontemporal store, force the value to
3726 register first. If we store into a promoted register,
3727 don't directly expand to target. */
3728 temp = nontemporal || promoted ? NULL_RTX : target;
3729 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3730 EXPAND_NORMAL);
3731
3732 if (temp == target)
3733 ;
3734 else if (promoted)
3735 {
362d42dc 3736 int unsignedp = SUBREG_PROMOTED_SIGN (target);
28ed065e
MM
3737 /* If TEMP is a VOIDmode constant, use convert_modes to make
3738 sure that we properly convert it. */
3739 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3740 {
3741 temp = convert_modes (GET_MODE (target),
3742 TYPE_MODE (ops.type),
4e18a7d4 3743 temp, unsignedp);
28ed065e 3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3745 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3746 }
3747
27be0c32 3748 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3749 }
3750 else if (nontemporal && emit_storent_insn (target, temp))
3751 ;
3752 else
3753 {
3754 temp = force_operand (temp, target);
3755 if (temp != target)
3756 emit_move_insn (target, temp);
3757 }
3758 }
3759 }
3760 break;
3761
3762 default:
3763 gcc_unreachable ();
3764 }
3765}
3766
3767/* Expand one gimple statement STMT and return the last RTL instruction
3768 before any of the newly generated ones.
3769
3770 In addition to generating the necessary RTL instructions this also
3771 sets REG_EH_REGION notes if necessary and sets the current source
3772 location for diagnostics. */
3773
b47aae36 3774static rtx_insn *
355fe088 3775expand_gimple_stmt (gimple *stmt)
28ed065e 3776{
28ed065e 3777 location_t saved_location = input_location;
b47aae36 3778 rtx_insn *last = get_last_insn ();
c82fee88 3779 int lp_nr;
28ed065e 3780
28ed065e
MM
3781 gcc_assert (cfun);
3782
c82fee88
EB
3783 /* We need to save and restore the current source location so that errors
3784 discovered during expansion are emitted with the right location. But
3785 it would be better if the diagnostic routines used the source location
3786 embedded in the tree nodes rather than globals. */
28ed065e 3787 if (gimple_has_location (stmt))
c82fee88 3788 input_location = gimple_location (stmt);
28ed065e
MM
3789
3790 expand_gimple_stmt_1 (stmt);
c82fee88 3791
28ed065e
MM
3792 /* Free any temporaries used to evaluate this statement. */
3793 free_temp_slots ();
3794
3795 input_location = saved_location;
3796
3797 /* Mark all insns that may trap. */
1d65f45c
RH
3798 lp_nr = lookup_stmt_eh_lp (stmt);
3799 if (lp_nr)
28ed065e 3800 {
b47aae36 3801 rtx_insn *insn;
28ed065e
MM
3802 for (insn = next_real_insn (last); insn;
3803 insn = next_real_insn (insn))
3804 {
3805 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3806 /* If we want exceptions for non-call insns, any
3807 may_trap_p instruction may throw. */
3808 && GET_CODE (PATTERN (insn)) != CLOBBER
3809 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3810 && insn_could_throw_p (insn))
3811 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3812 }
3813 }
3814
3815 return last;
3816}
3817
726a989a 3818/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3819 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3820 generated a tail call (something that might be denied by the ABI
cea49550
RH
3821 rules governing the call; see calls.c).
3822
3823 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3824 can still reach the rest of BB. The case here is __builtin_sqrt,
3825 where the NaN result goes through the external function (with a
3826 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3827
3828static basic_block
538dd0b7 3829expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
80c7a9eb 3830{
b47aae36 3831 rtx_insn *last2, *last;
224e770b 3832 edge e;
628f6a4e 3833 edge_iterator ei;
357067f2 3834 profile_probability probability;
80c7a9eb 3835
28ed065e 3836 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3837
3838 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3839 if (CALL_P (last) && SIBLING_CALL_P (last))
3840 goto found;
80c7a9eb 3841
726a989a 3842 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3843
cea49550 3844 *can_fallthru = true;
224e770b 3845 return NULL;
80c7a9eb 3846
224e770b
RH
3847 found:
3848 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3849 Any instructions emitted here are about to be deleted. */
3850 do_pending_stack_adjust ();
3851
3852 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3853 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3854 EH or abnormal edges, we shouldn't have created a tail call in
3855 the first place. So it seems to me we should just be removing
3856 all edges here, or redirecting the existing fallthru edge to
3857 the exit block. */
3858
357067f2 3859 probability = profile_probability::never ();
224e770b 3860
628f6a4e
BE
3861 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3862 {
224e770b
RH
3863 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3864 {
fefa31b5 3865 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
e7a74006 3866 e->dest->count -= e->count ();
224e770b
RH
3867 probability += e->probability;
3868 remove_edge (e);
80c7a9eb 3869 }
628f6a4e
BE
3870 else
3871 ei_next (&ei);
80c7a9eb
RH
3872 }
3873
224e770b
RH
3874 /* This is somewhat ugly: the call_expr expander often emits instructions
3875 after the sibcall (to perform the function return). These confuse the
12eff7b7 3876 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3877 last = NEXT_INSN (last);
341c100f 3878 gcc_assert (BARRIER_P (last));
cea49550
RH
3879
3880 *can_fallthru = false;
224e770b
RH
3881 while (NEXT_INSN (last))
3882 {
3883 /* For instance an sqrt builtin expander expands if with
3884 sibcall in the then and label for `else`. */
3885 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3886 {
3887 *can_fallthru = true;
3888 break;
3889 }
224e770b
RH
3890 delete_insn (NEXT_INSN (last));
3891 }
3892
fefa31b5
DM
3893 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3894 | EDGE_SIBCALL);
aea5e79a 3895 e->probability = probability;
1130d5e3 3896 BB_END (bb) = last;
224e770b
RH
3897 update_bb_for_insn (bb);
3898
3899 if (NEXT_INSN (last))
3900 {
3901 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3902
3903 last = BB_END (bb);
3904 if (BARRIER_P (last))
1130d5e3 3905 BB_END (bb) = PREV_INSN (last);
224e770b
RH
3906 }
3907
726a989a 3908 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3909
224e770b 3910 return bb;
80c7a9eb
RH
3911}
3912
b5b8b0ac
AO
3913/* Return the difference between the floor and the truncated result of
3914 a signed division by OP1 with remainder MOD. */
3915static rtx
ef4bddc2 3916floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3917{
3918 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3919 return gen_rtx_IF_THEN_ELSE
3920 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3921 gen_rtx_IF_THEN_ELSE
3922 (mode, gen_rtx_LT (BImode,
3923 gen_rtx_DIV (mode, op1, mod),
3924 const0_rtx),
3925 constm1_rtx, const0_rtx),
3926 const0_rtx);
3927}
3928
3929/* Return the difference between the ceil and the truncated result of
3930 a signed division by OP1 with remainder MOD. */
3931static rtx
ef4bddc2 3932ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3933{
3934 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3935 return gen_rtx_IF_THEN_ELSE
3936 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3937 gen_rtx_IF_THEN_ELSE
3938 (mode, gen_rtx_GT (BImode,
3939 gen_rtx_DIV (mode, op1, mod),
3940 const0_rtx),
3941 const1_rtx, const0_rtx),
3942 const0_rtx);
3943}
3944
3945/* Return the difference between the ceil and the truncated result of
3946 an unsigned division by OP1 with remainder MOD. */
3947static rtx
ef4bddc2 3948ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
b5b8b0ac
AO
3949{
3950 /* (mod != 0 ? 1 : 0) */
3951 return gen_rtx_IF_THEN_ELSE
3952 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3953 const1_rtx, const0_rtx);
3954}
3955
3956/* Return the difference between the rounded and the truncated result
3957 of a signed division by OP1 with remainder MOD. Halfway cases are
3958 rounded away from zero, rather than to the nearest even number. */
3959static rtx
ef4bddc2 3960round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3961{
3962 /* (abs (mod) >= abs (op1) - abs (mod)
3963 ? (op1 / mod > 0 ? 1 : -1)
3964 : 0) */
3965 return gen_rtx_IF_THEN_ELSE
3966 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3967 gen_rtx_MINUS (mode,
3968 gen_rtx_ABS (mode, op1),
3969 gen_rtx_ABS (mode, mod))),
3970 gen_rtx_IF_THEN_ELSE
3971 (mode, gen_rtx_GT (BImode,
3972 gen_rtx_DIV (mode, op1, mod),
3973 const0_rtx),
3974 const1_rtx, constm1_rtx),
3975 const0_rtx);
3976}
3977
3978/* Return the difference between the rounded and the truncated result
3979 of a unsigned division by OP1 with remainder MOD. Halfway cases
3980 are rounded away from zero, rather than to the nearest even
3981 number. */
3982static rtx
ef4bddc2 3983round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3984{
3985 /* (mod >= op1 - mod ? 1 : 0) */
3986 return gen_rtx_IF_THEN_ELSE
3987 (mode, gen_rtx_GE (BImode, mod,
3988 gen_rtx_MINUS (mode, op1, mod)),
3989 const1_rtx, const0_rtx);
3990}
3991
dda2da58
AO
3992/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3993 any rtl. */
3994
3995static rtx
095a2d76 3996convert_debug_memory_address (scalar_int_mode mode, rtx x,
f61c6f34 3997 addr_space_t as)
dda2da58 3998{
dda2da58 3999#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
4000 gcc_assert (mode == Pmode
4001 || mode == targetm.addr_space.address_mode (as));
c7ad039d 4002 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
dda2da58 4003#else
f61c6f34 4004 rtx temp;
f61c6f34 4005
639d4bb8 4006 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
4007
4008 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4009 return x;
4010
c7ad039d
RS
4011 /* X must have some form of address mode already. */
4012 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
69660a70 4013 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3403a1a9 4014 x = lowpart_subreg (mode, x, xmode);
dda2da58
AO
4015 else if (POINTERS_EXTEND_UNSIGNED > 0)
4016 x = gen_rtx_ZERO_EXTEND (mode, x);
4017 else if (!POINTERS_EXTEND_UNSIGNED)
4018 x = gen_rtx_SIGN_EXTEND (mode, x);
4019 else
f61c6f34
JJ
4020 {
4021 switch (GET_CODE (x))
4022 {
4023 case SUBREG:
4024 if ((SUBREG_PROMOTED_VAR_P (x)
4025 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4026 || (GET_CODE (SUBREG_REG (x)) == PLUS
4027 && REG_P (XEXP (SUBREG_REG (x), 0))
4028 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4029 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4030 && GET_MODE (SUBREG_REG (x)) == mode)
4031 return SUBREG_REG (x);
4032 break;
4033 case LABEL_REF:
04a121a7 4034 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
f61c6f34
JJ
4035 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4036 return temp;
4037 case SYMBOL_REF:
4038 temp = shallow_copy_rtx (x);
4039 PUT_MODE (temp, mode);
4040 return temp;
4041 case CONST:
4042 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4043 if (temp)
4044 temp = gen_rtx_CONST (mode, temp);
4045 return temp;
4046 case PLUS:
4047 case MINUS:
4048 if (CONST_INT_P (XEXP (x, 1)))
4049 {
4050 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4051 if (temp)
4052 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4053 }
4054 break;
4055 default:
4056 break;
4057 }
4058 /* Don't know how to express ptr_extend as operation in debug info. */
4059 return NULL;
4060 }
dda2da58
AO
4061#endif /* POINTERS_EXTEND_UNSIGNED */
4062
4063 return x;
4064}
4065
dfde35b3
JJ
4066/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4067 by avoid_deep_ter_for_debug. */
4068
4069static hash_map<tree, tree> *deep_ter_debug_map;
4070
4071/* Split too deep TER chains for debug stmts using debug temporaries. */
4072
4073static void
355fe088 4074avoid_deep_ter_for_debug (gimple *stmt, int depth)
dfde35b3
JJ
4075{
4076 use_operand_p use_p;
4077 ssa_op_iter iter;
4078 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4079 {
4080 tree use = USE_FROM_PTR (use_p);
4081 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4082 continue;
355fe088 4083 gimple *g = get_gimple_for_ssa_name (use);
dfde35b3
JJ
4084 if (g == NULL)
4085 continue;
4086 if (depth > 6 && !stmt_ends_bb_p (g))
4087 {
4088 if (deep_ter_debug_map == NULL)
4089 deep_ter_debug_map = new hash_map<tree, tree>;
4090
4091 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4092 if (vexpr != NULL)
4093 continue;
4094 vexpr = make_node (DEBUG_EXPR_DECL);
355fe088 4095 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
dfde35b3
JJ
4096 DECL_ARTIFICIAL (vexpr) = 1;
4097 TREE_TYPE (vexpr) = TREE_TYPE (use);
899ca90e 4098 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
dfde35b3
JJ
4099 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4100 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4101 avoid_deep_ter_for_debug (def_temp, 0);
4102 }
4103 else
4104 avoid_deep_ter_for_debug (g, depth + 1);
4105 }
4106}
4107
12c5ffe5
EB
4108/* Return an RTX equivalent to the value of the parameter DECL. */
4109
4110static rtx
4111expand_debug_parm_decl (tree decl)
4112{
4113 rtx incoming = DECL_INCOMING_RTL (decl);
4114
4115 if (incoming
4116 && GET_MODE (incoming) != BLKmode
4117 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4118 || (MEM_P (incoming)
4119 && REG_P (XEXP (incoming, 0))
4120 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4121 {
4122 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4123
4124#ifdef HAVE_window_save
4125 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4126 If the target machine has an explicit window save instruction, the
4127 actual entry value is the corresponding OUTGOING_REGNO instead. */
4128 if (REG_P (incoming)
4129 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4130 incoming
4131 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4132 OUTGOING_REGNO (REGNO (incoming)), 0);
4133 else if (MEM_P (incoming))
4134 {
4135 rtx reg = XEXP (incoming, 0);
4136 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4137 {
4138 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4139 incoming = replace_equiv_address_nv (incoming, reg);
4140 }
6cfa417f
JJ
4141 else
4142 incoming = copy_rtx (incoming);
12c5ffe5
EB
4143 }
4144#endif
4145
4146 ENTRY_VALUE_EXP (rtl) = incoming;
4147 return rtl;
4148 }
4149
4150 if (incoming
4151 && GET_MODE (incoming) != BLKmode
4152 && !TREE_ADDRESSABLE (decl)
4153 && MEM_P (incoming)
4154 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4155 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4156 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4157 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 4158 return copy_rtx (incoming);
12c5ffe5
EB
4159
4160 return NULL_RTX;
4161}
4162
4163/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
4164
4165static rtx
4166expand_debug_expr (tree exp)
4167{
4168 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
ef4bddc2
RS
4169 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4170 machine_mode inner_mode = VOIDmode;
b5b8b0ac 4171 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 4172 addr_space_t as;
7a504f33 4173 scalar_int_mode op0_mode, op1_mode, addr_mode;
b5b8b0ac
AO
4174
4175 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4176 {
4177 case tcc_expression:
4178 switch (TREE_CODE (exp))
4179 {
4180 case COND_EXPR:
7ece48b1 4181 case DOT_PROD_EXPR:
79d652a5 4182 case SAD_EXPR:
0354c0c7
BS
4183 case WIDEN_MULT_PLUS_EXPR:
4184 case WIDEN_MULT_MINUS_EXPR:
0f59b812 4185 case FMA_EXPR:
b5b8b0ac
AO
4186 goto ternary;
4187
4188 case TRUTH_ANDIF_EXPR:
4189 case TRUTH_ORIF_EXPR:
4190 case TRUTH_AND_EXPR:
4191 case TRUTH_OR_EXPR:
4192 case TRUTH_XOR_EXPR:
4193 goto binary;
4194
4195 case TRUTH_NOT_EXPR:
4196 goto unary;
4197
4198 default:
4199 break;
4200 }
4201 break;
4202
4203 ternary:
4204 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4205 if (!op2)
4206 return NULL_RTX;
4207 /* Fall through. */
4208
4209 binary:
4210 case tcc_binary:
e3bd1763
JJ
4211 if (mode == BLKmode)
4212 return NULL_RTX;
b5b8b0ac
AO
4213 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4214 if (!op1)
4215 return NULL_RTX;
26d83bcc
JJ
4216 switch (TREE_CODE (exp))
4217 {
4218 case LSHIFT_EXPR:
4219 case RSHIFT_EXPR:
4220 case LROTATE_EXPR:
4221 case RROTATE_EXPR:
4222 case WIDEN_LSHIFT_EXPR:
4223 /* Ensure second operand isn't wider than the first one. */
4224 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
b0567726
RS
4225 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4226 && (GET_MODE_UNIT_PRECISION (mode)
4227 < GET_MODE_PRECISION (op1_mode)))
4228 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
26d83bcc
JJ
4229 break;
4230 default:
4231 break;
4232 }
b5b8b0ac
AO
4233 /* Fall through. */
4234
4235 unary:
4236 case tcc_unary:
e3bd1763
JJ
4237 if (mode == BLKmode)
4238 return NULL_RTX;
2ba172e0 4239 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4240 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4241 if (!op0)
4242 return NULL_RTX;
4243 break;
4244
871dae34
AO
4245 case tcc_comparison:
4246 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4247 goto binary;
4248
b5b8b0ac
AO
4249 case tcc_type:
4250 case tcc_statement:
4251 gcc_unreachable ();
4252
4253 case tcc_constant:
4254 case tcc_exceptional:
4255 case tcc_declaration:
4256 case tcc_reference:
4257 case tcc_vl_exp:
4258 break;
4259 }
4260
4261 switch (TREE_CODE (exp))
4262 {
4263 case STRING_CST:
4264 if (!lookup_constant_def (exp))
4265 {
e1b243a8
JJ
4266 if (strlen (TREE_STRING_POINTER (exp)) + 1
4267 != (size_t) TREE_STRING_LENGTH (exp))
4268 return NULL_RTX;
b5b8b0ac
AO
4269 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4270 op0 = gen_rtx_MEM (BLKmode, op0);
4271 set_mem_attributes (op0, exp, 0);
4272 return op0;
4273 }
191816a3 4274 /* Fall through. */
b5b8b0ac
AO
4275
4276 case INTEGER_CST:
4277 case REAL_CST:
4278 case FIXED_CST:
4279 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4280 return op0;
4281
36fd6408
RS
4282 case POLY_INT_CST:
4283 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4284
b5b8b0ac
AO
4285 case COMPLEX_CST:
4286 gcc_assert (COMPLEX_MODE_P (mode));
4287 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 4288 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
4289 return gen_rtx_CONCAT (mode, op0, op1);
4290
0ca5af51
AO
4291 case DEBUG_EXPR_DECL:
4292 op0 = DECL_RTL_IF_SET (exp);
4293
4294 if (op0)
4295 return op0;
4296
4297 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 4298 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
4299 SET_DECL_RTL (exp, op0);
4300
4301 return op0;
4302
b5b8b0ac
AO
4303 case VAR_DECL:
4304 case PARM_DECL:
4305 case FUNCTION_DECL:
4306 case LABEL_DECL:
4307 case CONST_DECL:
4308 case RESULT_DECL:
4309 op0 = DECL_RTL_IF_SET (exp);
4310
4311 /* This decl was probably optimized away. */
4312 if (!op0)
e1b243a8 4313 {
8813a647 4314 if (!VAR_P (exp)
e1b243a8
JJ
4315 || DECL_EXTERNAL (exp)
4316 || !TREE_STATIC (exp)
4317 || !DECL_NAME (exp)
0fba566c 4318 || DECL_HARD_REGISTER (exp)
7d5fc814 4319 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 4320 || mode == VOIDmode)
e1b243a8
JJ
4321 return NULL;
4322
b1aa0655 4323 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
4324 if (!MEM_P (op0)
4325 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4326 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4327 return NULL;
4328 }
4329 else
4330 op0 = copy_rtx (op0);
b5b8b0ac 4331
06796564 4332 if (GET_MODE (op0) == BLKmode
871dae34 4333 /* If op0 is not BLKmode, but mode is, adjust_mode
06796564
JJ
4334 below would ICE. While it is likely a FE bug,
4335 try to be robust here. See PR43166. */
132b4e82
JJ
4336 || mode == BLKmode
4337 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
4338 {
4339 gcc_assert (MEM_P (op0));
4340 op0 = adjust_address_nv (op0, mode, 0);
4341 return op0;
4342 }
4343
4344 /* Fall through. */
4345
4346 adjust_mode:
4347 case PAREN_EXPR:
625a9766 4348 CASE_CONVERT:
b5b8b0ac 4349 {
2ba172e0 4350 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
4351
4352 if (mode == inner_mode)
4353 return op0;
4354
4355 if (inner_mode == VOIDmode)
4356 {
2a8e30fb
MM
4357 if (TREE_CODE (exp) == SSA_NAME)
4358 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4359 else
4360 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4361 if (mode == inner_mode)
4362 return op0;
4363 }
4364
4365 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4366 {
250a60f3
RS
4367 if (GET_MODE_UNIT_BITSIZE (mode)
4368 == GET_MODE_UNIT_BITSIZE (inner_mode))
b5b8b0ac 4369 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
250a60f3
RS
4370 else if (GET_MODE_UNIT_BITSIZE (mode)
4371 < GET_MODE_UNIT_BITSIZE (inner_mode))
b5b8b0ac
AO
4372 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4373 else
4374 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4375 }
4376 else if (FLOAT_MODE_P (mode))
4377 {
2a8e30fb 4378 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
4379 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4380 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4381 else
4382 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4383 }
4384 else if (FLOAT_MODE_P (inner_mode))
4385 {
4386 if (unsignedp)
4387 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4388 else
4389 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4390 }
bb06a2d8
RS
4391 else if (GET_MODE_UNIT_PRECISION (mode)
4392 == GET_MODE_UNIT_PRECISION (inner_mode))
3403a1a9 4393 op0 = lowpart_subreg (mode, op0, inner_mode);
bb06a2d8
RS
4394 else if (GET_MODE_UNIT_PRECISION (mode)
4395 < GET_MODE_UNIT_PRECISION (inner_mode))
4396 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
cf4ef6f7 4397 else if (UNARY_CLASS_P (exp)
1b47fe3f
JJ
4398 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4399 : unsignedp)
2ba172e0 4400 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 4401 else
2ba172e0 4402 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
4403
4404 return op0;
4405 }
4406
70f34814 4407 case MEM_REF:
71f3a3f5
JJ
4408 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4409 {
4410 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4411 TREE_OPERAND (exp, 0),
4412 TREE_OPERAND (exp, 1));
4413 if (newexp)
4414 return expand_debug_expr (newexp);
4415 }
4416 /* FALLTHROUGH */
b5b8b0ac 4417 case INDIRECT_REF:
0a81f074 4418 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4419 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4420 if (!op0)
4421 return NULL;
4422
cb115041
JJ
4423 if (TREE_CODE (exp) == MEM_REF)
4424 {
583ac69c
JJ
4425 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4426 || (GET_CODE (op0) == PLUS
4427 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4428 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4429 Instead just use get_inner_reference. */
4430 goto component_ref;
4431
cb115041
JJ
4432 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4433 if (!op1 || !CONST_INT_P (op1))
4434 return NULL;
4435
0a81f074 4436 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
4437 }
4438
a148c4b2 4439 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
b5b8b0ac 4440
f61c6f34
JJ
4441 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4442 op0, as);
4443 if (op0 == NULL_RTX)
4444 return NULL;
b5b8b0ac 4445
f61c6f34 4446 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 4447 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
4448 if (TREE_CODE (exp) == MEM_REF
4449 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4450 set_mem_expr (op0, NULL_TREE);
09e881c9 4451 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4452
4453 return op0;
4454
4455 case TARGET_MEM_REF:
4d948885
RG
4456 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4457 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
4458 return NULL;
4459
4460 op0 = expand_debug_expr
4e25ca6b 4461 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
4462 if (!op0)
4463 return NULL;
4464
c168f180 4465 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
f61c6f34
JJ
4466 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4467 op0, as);
4468 if (op0 == NULL_RTX)
4469 return NULL;
b5b8b0ac
AO
4470
4471 op0 = gen_rtx_MEM (mode, op0);
4472
4473 set_mem_attributes (op0, exp, 0);
09e881c9 4474 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4475
4476 return op0;
4477
583ac69c 4478 component_ref:
b5b8b0ac
AO
4479 case ARRAY_REF:
4480 case ARRAY_RANGE_REF:
4481 case COMPONENT_REF:
4482 case BIT_FIELD_REF:
4483 case REALPART_EXPR:
4484 case IMAGPART_EXPR:
4485 case VIEW_CONVERT_EXPR:
4486 {
ef4bddc2 4487 machine_mode mode1;
06889da8 4488 poly_int64 bitsize, bitpos;
b5b8b0ac 4489 tree offset;
ee45a32d
EB
4490 int reversep, volatilep = 0;
4491 tree tem
4492 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
25b75a48 4493 &unsignedp, &reversep, &volatilep);
b5b8b0ac
AO
4494 rtx orig_op0;
4495
06889da8 4496 if (known_eq (bitsize, 0))
4f2a9af8
JJ
4497 return NULL;
4498
b5b8b0ac
AO
4499 orig_op0 = op0 = expand_debug_expr (tem);
4500
4501 if (!op0)
4502 return NULL;
4503
4504 if (offset)
4505 {
ef4bddc2 4506 machine_mode addrmode, offmode;
dda2da58 4507
aa847cc8
JJ
4508 if (!MEM_P (op0))
4509 return NULL;
b5b8b0ac 4510
dda2da58
AO
4511 op0 = XEXP (op0, 0);
4512 addrmode = GET_MODE (op0);
4513 if (addrmode == VOIDmode)
4514 addrmode = Pmode;
4515
b5b8b0ac
AO
4516 op1 = expand_debug_expr (offset);
4517 if (!op1)
4518 return NULL;
4519
dda2da58
AO
4520 offmode = GET_MODE (op1);
4521 if (offmode == VOIDmode)
4522 offmode = TYPE_MODE (TREE_TYPE (offset));
4523
4524 if (addrmode != offmode)
3403a1a9 4525 op1 = lowpart_subreg (addrmode, op1, offmode);
dda2da58
AO
4526
4527 /* Don't use offset_address here, we don't need a
4528 recognizable address, and we don't want to generate
4529 code. */
2ba172e0
JJ
4530 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4531 op0, op1));
b5b8b0ac
AO
4532 }
4533
4534 if (MEM_P (op0))
4535 {
4f2a9af8
JJ
4536 if (mode1 == VOIDmode)
4537 /* Bitfield. */
f67f4dff 4538 mode1 = smallest_int_mode_for_size (bitsize);
06889da8
RS
4539 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4540 if (maybe_ne (bytepos, 0))
b5b8b0ac 4541 {
06889da8
RS
4542 op0 = adjust_address_nv (op0, mode1, bytepos);
4543 bitpos = num_trailing_bits (bitpos);
b5b8b0ac 4544 }
06889da8
RS
4545 else if (known_eq (bitpos, 0)
4546 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
b5b8b0ac
AO
4547 op0 = adjust_address_nv (op0, mode, 0);
4548 else if (GET_MODE (op0) != mode1)
4549 op0 = adjust_address_nv (op0, mode1, 0);
4550 else
4551 op0 = copy_rtx (op0);
4552 if (op0 == orig_op0)
4553 op0 = shallow_copy_rtx (op0);
4554 set_mem_attributes (op0, exp, 0);
4555 }
4556
06889da8 4557 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
b5b8b0ac
AO
4558 return op0;
4559
06889da8 4560 if (maybe_lt (bitpos, 0))
2d3fc6aa
JJ
4561 return NULL;
4562
88c04a5d
JJ
4563 if (GET_MODE (op0) == BLKmode)
4564 return NULL;
4565
06889da8
RS
4566 poly_int64 bytepos;
4567 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4568 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
b5b8b0ac 4569 {
ef4bddc2 4570 machine_mode opmode = GET_MODE (op0);
b5b8b0ac 4571
b5b8b0ac 4572 if (opmode == VOIDmode)
9712cba0 4573 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4574
4575 /* This condition may hold if we're expanding the address
4576 right past the end of an array that turned out not to
4577 be addressable (i.e., the address was only computed in
4578 debug stmts). The gen_subreg below would rightfully
4579 crash, and the address doesn't really exist, so just
4580 drop it. */
06889da8 4581 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
b5b8b0ac
AO
4582 return NULL;
4583
06889da8
RS
4584 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4585 return simplify_gen_subreg (mode, op0, opmode, bytepos);
b5b8b0ac
AO
4586 }
4587
4588 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4589 && TYPE_UNSIGNED (TREE_TYPE (exp))
4590 ? SIGN_EXTRACT
4591 : ZERO_EXTRACT, mode,
4592 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4593 ? GET_MODE (op0)
4594 : TYPE_MODE (TREE_TYPE (tem)),
06889da8
RS
4595 op0, gen_int_mode (bitsize, word_mode),
4596 gen_int_mode (bitpos, word_mode));
b5b8b0ac
AO
4597 }
4598
b5b8b0ac 4599 case ABS_EXPR:
2ba172e0 4600 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4601
4602 case NEGATE_EXPR:
2ba172e0 4603 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4604
4605 case BIT_NOT_EXPR:
2ba172e0 4606 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4607
4608 case FLOAT_EXPR:
2ba172e0
JJ
4609 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4610 0)))
4611 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4612 inner_mode);
b5b8b0ac
AO
4613
4614 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4615 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4616 inner_mode);
b5b8b0ac
AO
4617
4618 case POINTER_PLUS_EXPR:
576319a7
DD
4619 /* For the rare target where pointers are not the same size as
4620 size_t, we need to check for mis-matched modes and correct
4621 the addend. */
4622 if (op0 && op1
673bf5a6
RS
4623 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4624 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4625 && op0_mode != op1_mode)
576319a7 4626 {
673bf5a6
RS
4627 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4628 /* If OP0 is a partial mode, then we must truncate, even
4629 if it has the same bitsize as OP1 as GCC's
4630 representation of partial modes is opaque. */
4631 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4632 && (GET_MODE_BITSIZE (op0_mode)
4633 == GET_MODE_BITSIZE (op1_mode))))
4634 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
576319a7
DD
4635 else
4636 /* We always sign-extend, regardless of the signedness of
4637 the operand, because the operand is always unsigned
4638 here even if the original C expression is signed. */
673bf5a6 4639 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
576319a7
DD
4640 }
4641 /* Fall through. */
b5b8b0ac 4642 case PLUS_EXPR:
2ba172e0 4643 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4644
4645 case MINUS_EXPR:
1af4ebf5 4646 case POINTER_DIFF_EXPR:
2ba172e0 4647 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4648
4649 case MULT_EXPR:
2ba172e0 4650 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4651
4652 case RDIV_EXPR:
4653 case TRUNC_DIV_EXPR:
4654 case EXACT_DIV_EXPR:
4655 if (unsignedp)
2ba172e0 4656 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4657 else
2ba172e0 4658 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4659
4660 case TRUNC_MOD_EXPR:
2ba172e0 4661 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4662
4663 case FLOOR_DIV_EXPR:
4664 if (unsignedp)
2ba172e0 4665 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4666 else
4667 {
2ba172e0
JJ
4668 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4669 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4670 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4671 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4672 }
4673
4674 case FLOOR_MOD_EXPR:
4675 if (unsignedp)
2ba172e0 4676 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4677 else
4678 {
2ba172e0 4679 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4680 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4681 adj = simplify_gen_unary (NEG, mode,
4682 simplify_gen_binary (MULT, mode, adj, op1),
4683 mode);
4684 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4685 }
4686
4687 case CEIL_DIV_EXPR:
4688 if (unsignedp)
4689 {
2ba172e0
JJ
4690 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4691 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4692 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4693 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4694 }
4695 else
4696 {
2ba172e0
JJ
4697 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4698 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4699 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4700 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4701 }
4702
4703 case CEIL_MOD_EXPR:
4704 if (unsignedp)
4705 {
2ba172e0 4706 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4707 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4708 adj = simplify_gen_unary (NEG, mode,
4709 simplify_gen_binary (MULT, mode, adj, op1),
4710 mode);
4711 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4712 }
4713 else
4714 {
2ba172e0 4715 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4716 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4717 adj = simplify_gen_unary (NEG, mode,
4718 simplify_gen_binary (MULT, mode, adj, op1),
4719 mode);
4720 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4721 }
4722
4723 case ROUND_DIV_EXPR:
4724 if (unsignedp)
4725 {
2ba172e0
JJ
4726 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4727 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4728 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4729 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4730 }
4731 else
4732 {
2ba172e0
JJ
4733 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4734 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4735 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4736 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4737 }
4738
4739 case ROUND_MOD_EXPR:
4740 if (unsignedp)
4741 {
2ba172e0 4742 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4743 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4744 adj = simplify_gen_unary (NEG, mode,
4745 simplify_gen_binary (MULT, mode, adj, op1),
4746 mode);
4747 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4748 }
4749 else
4750 {
2ba172e0 4751 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4752 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4753 adj = simplify_gen_unary (NEG, mode,
4754 simplify_gen_binary (MULT, mode, adj, op1),
4755 mode);
4756 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4757 }
4758
4759 case LSHIFT_EXPR:
2ba172e0 4760 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4761
4762 case RSHIFT_EXPR:
4763 if (unsignedp)
2ba172e0 4764 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4765 else
2ba172e0 4766 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4767
4768 case LROTATE_EXPR:
2ba172e0 4769 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4770
4771 case RROTATE_EXPR:
2ba172e0 4772 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4773
4774 case MIN_EXPR:
2ba172e0 4775 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4776
4777 case MAX_EXPR:
2ba172e0 4778 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4779
4780 case BIT_AND_EXPR:
4781 case TRUTH_AND_EXPR:
2ba172e0 4782 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4783
4784 case BIT_IOR_EXPR:
4785 case TRUTH_OR_EXPR:
2ba172e0 4786 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4787
4788 case BIT_XOR_EXPR:
4789 case TRUTH_XOR_EXPR:
2ba172e0 4790 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4791
4792 case TRUTH_ANDIF_EXPR:
4793 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4794
4795 case TRUTH_ORIF_EXPR:
4796 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4797
4798 case TRUTH_NOT_EXPR:
2ba172e0 4799 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4800
4801 case LT_EXPR:
2ba172e0
JJ
4802 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4803 op0, op1);
b5b8b0ac
AO
4804
4805 case LE_EXPR:
2ba172e0
JJ
4806 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4807 op0, op1);
b5b8b0ac
AO
4808
4809 case GT_EXPR:
2ba172e0
JJ
4810 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4811 op0, op1);
b5b8b0ac
AO
4812
4813 case GE_EXPR:
2ba172e0
JJ
4814 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4815 op0, op1);
b5b8b0ac
AO
4816
4817 case EQ_EXPR:
2ba172e0 4818 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4819
4820 case NE_EXPR:
2ba172e0 4821 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4822
4823 case UNORDERED_EXPR:
2ba172e0 4824 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4825
4826 case ORDERED_EXPR:
2ba172e0 4827 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4828
4829 case UNLT_EXPR:
2ba172e0 4830 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4831
4832 case UNLE_EXPR:
2ba172e0 4833 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4834
4835 case UNGT_EXPR:
2ba172e0 4836 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4837
4838 case UNGE_EXPR:
2ba172e0 4839 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4840
4841 case UNEQ_EXPR:
2ba172e0 4842 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4843
4844 case LTGT_EXPR:
2ba172e0 4845 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4846
4847 case COND_EXPR:
4848 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4849
4850 case COMPLEX_EXPR:
4851 gcc_assert (COMPLEX_MODE_P (mode));
4852 if (GET_MODE (op0) == VOIDmode)
4853 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4854 if (GET_MODE (op1) == VOIDmode)
4855 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4856 return gen_rtx_CONCAT (mode, op0, op1);
4857
d02a5a4b
JJ
4858 case CONJ_EXPR:
4859 if (GET_CODE (op0) == CONCAT)
4860 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4861 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4862 XEXP (op0, 1),
4863 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4864 else
4865 {
d21cefc2 4866 scalar_mode imode = GET_MODE_INNER (mode);
d02a5a4b
JJ
4867 rtx re, im;
4868
4869 if (MEM_P (op0))
4870 {
4871 re = adjust_address_nv (op0, imode, 0);
4872 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4873 }
4874 else
4875 {
304b9962
RS
4876 scalar_int_mode ifmode;
4877 scalar_int_mode ihmode;
d02a5a4b 4878 rtx halfsize;
304b9962
RS
4879 if (!int_mode_for_mode (mode).exists (&ifmode)
4880 || !int_mode_for_mode (imode).exists (&ihmode))
d02a5a4b
JJ
4881 return NULL;
4882 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4883 re = op0;
4884 if (mode != ifmode)
4885 re = gen_rtx_SUBREG (ifmode, re, 0);
4886 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4887 if (imode != ihmode)
4888 re = gen_rtx_SUBREG (imode, re, 0);
4889 im = copy_rtx (op0);
4890 if (mode != ifmode)
4891 im = gen_rtx_SUBREG (ifmode, im, 0);
4892 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4893 if (imode != ihmode)
4894 im = gen_rtx_SUBREG (imode, im, 0);
4895 }
4896 im = gen_rtx_NEG (imode, im);
4897 return gen_rtx_CONCAT (mode, re, im);
4898 }
4899
b5b8b0ac
AO
4900 case ADDR_EXPR:
4901 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4902 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4903 {
4904 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4905 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4906 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4907 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4908 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4909 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4910
4911 if (handled_component_p (TREE_OPERAND (exp, 0)))
4912 {
588db50c 4913 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
ee45a32d 4914 bool reverse;
c8a27c40 4915 tree decl
ee45a32d
EB
4916 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4917 &bitsize, &maxsize, &reverse);
8813a647 4918 if ((VAR_P (decl)
c8a27c40
JJ
4919 || TREE_CODE (decl) == PARM_DECL
4920 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4921 && (!TREE_ADDRESSABLE (decl)
4922 || target_for_debug_bind (decl))
588db50c
RS
4923 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4924 && known_gt (bitsize, 0)
4925 && known_eq (bitsize, maxsize))
0a81f074
RS
4926 {
4927 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
588db50c 4928 return plus_constant (mode, base, byteoffset);
0a81f074 4929 }
c8a27c40
JJ
4930 }
4931
9430b7ba
JJ
4932 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4933 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4934 == ADDR_EXPR)
4935 {
4936 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4937 0));
4938 if (op0 != NULL
4939 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4940 || (GET_CODE (op0) == PLUS
4941 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4942 && CONST_INT_P (XEXP (op0, 1)))))
4943 {
4944 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4945 1));
4946 if (!op1 || !CONST_INT_P (op1))
4947 return NULL;
4948
4949 return plus_constant (mode, op0, INTVAL (op1));
4950 }
4951 }
4952
c8a27c40
JJ
4953 return NULL;
4954 }
b5b8b0ac 4955
a148c4b2 4956 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7a504f33
RS
4957 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4958 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
dda2da58
AO
4959
4960 return op0;
b5b8b0ac
AO
4961
4962 case VECTOR_CST:
d2a12ae7 4963 {
9e822269 4964 unsigned i, nelts;
d2a12ae7 4965
9e822269
RS
4966 nelts = VECTOR_CST_NELTS (exp);
4967 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
d2a12ae7 4968
9e822269 4969 for (i = 0; i < nelts; ++i)
d2a12ae7
RG
4970 {
4971 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4972 if (!op1)
4973 return NULL;
4974 XVECEXP (op0, 0, i) = op1;
4975 }
4976
4977 return op0;
4978 }
b5b8b0ac
AO
4979
4980 case CONSTRUCTOR:
47598145
MM
4981 if (TREE_CLOBBER_P (exp))
4982 return NULL;
4983 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4984 {
4985 unsigned i;
4986 tree val;
4987
4988 op0 = gen_rtx_CONCATN
4989 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4990
4991 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4992 {
4993 op1 = expand_debug_expr (val);
4994 if (!op1)
4995 return NULL;
4996 XVECEXP (op0, 0, i) = op1;
4997 }
4998
4999 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
5000 {
5001 op1 = expand_debug_expr
e8160c9a 5002 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
5003
5004 if (!op1)
5005 return NULL;
5006
5007 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
5008 XVECEXP (op0, 0, i) = op1;
5009 }
5010
5011 return op0;
5012 }
5013 else
5014 goto flag_unsupported;
5015
5016 case CALL_EXPR:
5017 /* ??? Maybe handle some builtins? */
5018 return NULL;
5019
5020 case SSA_NAME:
5021 {
355fe088 5022 gimple *g = get_gimple_for_ssa_name (exp);
2a8e30fb
MM
5023 if (g)
5024 {
dfde35b3
JJ
5025 tree t = NULL_TREE;
5026 if (deep_ter_debug_map)
5027 {
5028 tree *slot = deep_ter_debug_map->get (exp);
5029 if (slot)
5030 t = *slot;
5031 }
5032 if (t == NULL_TREE)
5033 t = gimple_assign_rhs_to_tree (g);
5034 op0 = expand_debug_expr (t);
2a8e30fb
MM
5035 if (!op0)
5036 return NULL;
5037 }
5038 else
5039 {
f11a7b6d
AO
5040 /* If this is a reference to an incoming value of
5041 parameter that is never used in the code or where the
5042 incoming value is never used in the code, use
5043 PARM_DECL's DECL_RTL if set. */
5044 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5045 && SSA_NAME_VAR (exp)
5046 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5047 && has_zero_uses (exp))
5048 {
5049 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5050 if (op0)
5051 goto adjust_mode;
5052 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5053 if (op0)
5054 goto adjust_mode;
5055 }
5056
2a8e30fb 5057 int part = var_to_partition (SA.map, exp);
b5b8b0ac 5058
2a8e30fb 5059 if (part == NO_PARTITION)
f11a7b6d 5060 return NULL;
b5b8b0ac 5061
2a8e30fb 5062 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 5063
abfea58d 5064 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 5065 }
b5b8b0ac
AO
5066 goto adjust_mode;
5067 }
5068
5069 case ERROR_MARK:
5070 return NULL;
5071
7ece48b1
JJ
5072 /* Vector stuff. For most of the codes we don't have rtl codes. */
5073 case REALIGN_LOAD_EXPR:
7ece48b1 5074 case VEC_COND_EXPR:
7ece48b1
JJ
5075 case VEC_PACK_FIX_TRUNC_EXPR:
5076 case VEC_PACK_SAT_EXPR:
5077 case VEC_PACK_TRUNC_EXPR:
7ece48b1
JJ
5078 case VEC_UNPACK_FLOAT_HI_EXPR:
5079 case VEC_UNPACK_FLOAT_LO_EXPR:
5080 case VEC_UNPACK_HI_EXPR:
5081 case VEC_UNPACK_LO_EXPR:
5082 case VEC_WIDEN_MULT_HI_EXPR:
5083 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
5084 case VEC_WIDEN_MULT_EVEN_EXPR:
5085 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
5086 case VEC_WIDEN_LSHIFT_HI_EXPR:
5087 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 5088 case VEC_PERM_EXPR:
be4c1d4a 5089 case VEC_DUPLICATE_EXPR:
9adab579 5090 case VEC_SERIES_EXPR:
7ece48b1
JJ
5091 return NULL;
5092
98449720 5093 /* Misc codes. */
7ece48b1
JJ
5094 case ADDR_SPACE_CONVERT_EXPR:
5095 case FIXED_CONVERT_EXPR:
5096 case OBJ_TYPE_REF:
5097 case WITH_SIZE_EXPR:
483c6429 5098 case BIT_INSERT_EXPR:
7ece48b1
JJ
5099 return NULL;
5100
5101 case DOT_PROD_EXPR:
5102 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5103 && SCALAR_INT_MODE_P (mode))
5104 {
2ba172e0
JJ
5105 op0
5106 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5107 0)))
5108 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5109 inner_mode);
5110 op1
5111 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5112 1)))
5113 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5114 inner_mode);
5115 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5116 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
5117 }
5118 return NULL;
5119
5120 case WIDEN_MULT_EXPR:
0354c0c7
BS
5121 case WIDEN_MULT_PLUS_EXPR:
5122 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
5123 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5124 && SCALAR_INT_MODE_P (mode))
5125 {
2ba172e0 5126 inner_mode = GET_MODE (op0);
7ece48b1 5127 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 5128 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 5129 else
5b58b39b 5130 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 5131 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 5132 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 5133 else
5b58b39b 5134 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 5135 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
5136 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5137 return op0;
5138 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 5139 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 5140 else
2ba172e0 5141 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
5142 }
5143 return NULL;
5144
98449720
RH
5145 case MULT_HIGHPART_EXPR:
5146 /* ??? Similar to the above. */
5147 return NULL;
5148
7ece48b1 5149 case WIDEN_SUM_EXPR:
3f3af9df 5150 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
5151 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5152 && SCALAR_INT_MODE_P (mode))
5153 {
2ba172e0
JJ
5154 op0
5155 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5156 0)))
5157 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5158 inner_mode);
3f3af9df
JJ
5159 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5160 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
5161 }
5162 return NULL;
5163
0f59b812 5164 case FMA_EXPR:
2ba172e0 5165 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 5166
b5b8b0ac
AO
5167 default:
5168 flag_unsupported:
b2b29377
MM
5169 if (flag_checking)
5170 {
5171 debug_tree (exp);
5172 gcc_unreachable ();
5173 }
b5b8b0ac 5174 return NULL;
b5b8b0ac
AO
5175 }
5176}
5177
ddb555ed
JJ
5178/* Return an RTX equivalent to the source bind value of the tree expression
5179 EXP. */
5180
5181static rtx
5182expand_debug_source_expr (tree exp)
5183{
5184 rtx op0 = NULL_RTX;
ef4bddc2 5185 machine_mode mode = VOIDmode, inner_mode;
ddb555ed
JJ
5186
5187 switch (TREE_CODE (exp))
5188 {
5189 case PARM_DECL:
5190 {
ddb555ed 5191 mode = DECL_MODE (exp);
12c5ffe5
EB
5192 op0 = expand_debug_parm_decl (exp);
5193 if (op0)
5194 break;
ddb555ed
JJ
5195 /* See if this isn't an argument that has been completely
5196 optimized out. */
5197 if (!DECL_RTL_SET_P (exp)
12c5ffe5 5198 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
5199 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5200 {
7b575cfa 5201 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
5202 if (DECL_CONTEXT (aexp)
5203 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5204 {
9771b263 5205 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
5206 unsigned int ix;
5207 tree ddecl;
ddb555ed
JJ
5208 debug_args = decl_debug_args_lookup (current_function_decl);
5209 if (debug_args != NULL)
5210 {
9771b263 5211 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
5212 ix += 2)
5213 if (ddecl == aexp)
5214 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5215 }
5216 }
5217 }
5218 break;
5219 }
5220 default:
5221 break;
5222 }
5223
5224 if (op0 == NULL_RTX)
5225 return NULL_RTX;
5226
5227 inner_mode = GET_MODE (op0);
5228 if (mode == inner_mode)
5229 return op0;
5230
5231 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5232 {
250a60f3
RS
5233 if (GET_MODE_UNIT_BITSIZE (mode)
5234 == GET_MODE_UNIT_BITSIZE (inner_mode))
ddb555ed 5235 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
250a60f3
RS
5236 else if (GET_MODE_UNIT_BITSIZE (mode)
5237 < GET_MODE_UNIT_BITSIZE (inner_mode))
ddb555ed
JJ
5238 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5239 else
5240 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5241 }
5242 else if (FLOAT_MODE_P (mode))
5243 gcc_unreachable ();
5244 else if (FLOAT_MODE_P (inner_mode))
5245 {
5246 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5247 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5248 else
5249 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5250 }
bb06a2d8
RS
5251 else if (GET_MODE_UNIT_PRECISION (mode)
5252 == GET_MODE_UNIT_PRECISION (inner_mode))
3403a1a9 5253 op0 = lowpart_subreg (mode, op0, inner_mode);
bb06a2d8
RS
5254 else if (GET_MODE_UNIT_PRECISION (mode)
5255 < GET_MODE_UNIT_PRECISION (inner_mode))
5256 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
ddb555ed
JJ
5257 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5258 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5259 else
5260 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5261
5262 return op0;
5263}
5264
6cfa417f
JJ
5265/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5266 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5267 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5268
5269static void
b47aae36 5270avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
6cfa417f
JJ
5271{
5272 rtx exp = *exp_p;
5273
5274 if (exp == NULL_RTX)
5275 return;
5276
5277 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5278 return;
5279
5280 if (depth == 4)
5281 {
5282 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5283 rtx dval = make_debug_expr_from_rtl (exp);
5284
5285 /* Emit a debug bind insn before INSN. */
5286 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5287 DEBUG_EXPR_TREE_DECL (dval), exp,
5288 VAR_INIT_STATUS_INITIALIZED);
5289
5290 emit_debug_insn_before (bind, insn);
5291 *exp_p = dval;
5292 return;
5293 }
5294
5295 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5296 int i, j;
5297 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5298 switch (*format_ptr++)
5299 {
5300 case 'e':
5301 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5302 break;
5303
5304 case 'E':
5305 case 'V':
5306 for (j = 0; j < XVECLEN (exp, i); j++)
5307 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5308 break;
5309
5310 default:
5311 break;
5312 }
5313}
5314
b5b8b0ac
AO
5315/* Expand the _LOCs in debug insns. We run this after expanding all
5316 regular insns, so that any variables referenced in the function
5317 will have their DECL_RTLs set. */
5318
5319static void
5320expand_debug_locations (void)
5321{
b47aae36
DM
5322 rtx_insn *insn;
5323 rtx_insn *last = get_last_insn ();
b5b8b0ac
AO
5324 int save_strict_alias = flag_strict_aliasing;
5325
5326 /* New alias sets while setting up memory attributes cause
5327 -fcompare-debug failures, even though it doesn't bring about any
5328 codegen changes. */
5329 flag_strict_aliasing = 0;
5330
5331 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
36f52e8f 5332 if (DEBUG_BIND_INSN_P (insn))
b5b8b0ac
AO
5333 {
5334 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
b47aae36
DM
5335 rtx val;
5336 rtx_insn *prev_insn, *insn2;
ef4bddc2 5337 machine_mode mode;
b5b8b0ac
AO
5338
5339 if (value == NULL_TREE)
5340 val = NULL_RTX;
5341 else
5342 {
ddb555ed
JJ
5343 if (INSN_VAR_LOCATION_STATUS (insn)
5344 == VAR_INIT_STATUS_UNINITIALIZED)
5345 val = expand_debug_source_expr (value);
dfde35b3
JJ
5346 /* The avoid_deep_ter_for_debug function inserts
5347 debug bind stmts after SSA_NAME definition, with the
5348 SSA_NAME as the whole bind location. Disable temporarily
5349 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5350 being defined in this DEBUG_INSN. */
5351 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5352 {
5353 tree *slot = deep_ter_debug_map->get (value);
5354 if (slot)
5355 {
5356 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5357 *slot = NULL_TREE;
5358 else
5359 slot = NULL;
5360 }
5361 val = expand_debug_expr (value);
5362 if (slot)
5363 *slot = INSN_VAR_LOCATION_DECL (insn);
5364 }
ddb555ed
JJ
5365 else
5366 val = expand_debug_expr (value);
b5b8b0ac
AO
5367 gcc_assert (last == get_last_insn ());
5368 }
5369
5370 if (!val)
5371 val = gen_rtx_UNKNOWN_VAR_LOC ();
5372 else
5373 {
5374 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5375
5376 gcc_assert (mode == GET_MODE (val)
5377 || (GET_MODE (val) == VOIDmode
33ffb5c5 5378 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 5379 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
5380 || GET_CODE (val) == LABEL_REF)));
5381 }
5382
5383 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
5384 prev_insn = PREV_INSN (insn);
5385 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5386 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
5387 }
5388
5389 flag_strict_aliasing = save_strict_alias;
5390}
5391
d2626c0b
YR
5392/* Performs swapping operands of commutative operations to expand
5393 the expensive one first. */
5394
5395static void
5396reorder_operands (basic_block bb)
5397{
5398 unsigned int *lattice; /* Hold cost of each statement. */
5399 unsigned int i = 0, n = 0;
5400 gimple_stmt_iterator gsi;
5401 gimple_seq stmts;
355fe088 5402 gimple *stmt;
d2626c0b
YR
5403 bool swap;
5404 tree op0, op1;
5405 ssa_op_iter iter;
5406 use_operand_p use_p;
355fe088 5407 gimple *def0, *def1;
d2626c0b
YR
5408
5409 /* Compute cost of each statement using estimate_num_insns. */
5410 stmts = bb_seq (bb);
5411 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5412 {
5413 stmt = gsi_stmt (gsi);
090238ee
YR
5414 if (!is_gimple_debug (stmt))
5415 gimple_set_uid (stmt, n++);
d2626c0b
YR
5416 }
5417 lattice = XNEWVEC (unsigned int, n);
5418 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5419 {
5420 unsigned cost;
5421 stmt = gsi_stmt (gsi);
090238ee
YR
5422 if (is_gimple_debug (stmt))
5423 continue;
d2626c0b
YR
5424 cost = estimate_num_insns (stmt, &eni_size_weights);
5425 lattice[i] = cost;
5426 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5427 {
5428 tree use = USE_FROM_PTR (use_p);
355fe088 5429 gimple *def_stmt;
d2626c0b
YR
5430 if (TREE_CODE (use) != SSA_NAME)
5431 continue;
5432 def_stmt = get_gimple_for_ssa_name (use);
5433 if (!def_stmt)
5434 continue;
5435 lattice[i] += lattice[gimple_uid (def_stmt)];
5436 }
5437 i++;
5438 if (!is_gimple_assign (stmt)
5439 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5440 continue;
5441 op0 = gimple_op (stmt, 1);
5442 op1 = gimple_op (stmt, 2);
5443 if (TREE_CODE (op0) != SSA_NAME
5444 || TREE_CODE (op1) != SSA_NAME)
5445 continue;
5446 /* Swap operands if the second one is more expensive. */
5447 def0 = get_gimple_for_ssa_name (op0);
d2626c0b
YR
5448 def1 = get_gimple_for_ssa_name (op1);
5449 if (!def1)
5450 continue;
5451 swap = false;
68ca4ac9 5452 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
d2626c0b
YR
5453 swap = true;
5454 if (swap)
5455 {
5456 if (dump_file && (dump_flags & TDF_DETAILS))
5457 {
5458 fprintf (dump_file, "Swap operands in stmt:\n");
5459 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5460 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
68ca4ac9 5461 def0 ? lattice[gimple_uid (def0)] : 0,
d2626c0b
YR
5462 lattice[gimple_uid (def1)]);
5463 }
5464 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5465 gimple_assign_rhs2_ptr (stmt));
5466 }
5467 }
5468 XDELETE (lattice);
5469}
5470
242229bb
JH
5471/* Expand basic block BB from GIMPLE trees to RTL. */
5472
5473static basic_block
f3ddd692 5474expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 5475{
726a989a
RB
5476 gimple_stmt_iterator gsi;
5477 gimple_seq stmts;
355fe088 5478 gimple *stmt = NULL;
65f4b875 5479 rtx_note *note = NULL;
b47aae36 5480 rtx_insn *last;
242229bb 5481 edge e;
628f6a4e 5482 edge_iterator ei;
242229bb
JH
5483
5484 if (dump_file)
726a989a
RB
5485 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5486 bb->index);
5487
5488 /* Note that since we are now transitioning from GIMPLE to RTL, we
5489 cannot use the gsi_*_bb() routines because they expect the basic
5490 block to be in GIMPLE, instead of RTL. Therefore, we need to
5491 access the BB sequence directly. */
d2626c0b
YR
5492 if (optimize)
5493 reorder_operands (bb);
726a989a 5494 stmts = bb_seq (bb);
3e8b732e
MM
5495 bb->il.gimple.seq = NULL;
5496 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 5497 rtl_profile_for_bb (bb);
5e2d947c
JH
5498 init_rtl_bb_info (bb);
5499 bb->flags |= BB_RTL;
5500
a9b77cd1
ZD
5501 /* Remove the RETURN_EXPR if we may fall though to the exit
5502 instead. */
726a989a
RB
5503 gsi = gsi_last (stmts);
5504 if (!gsi_end_p (gsi)
5505 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 5506 {
538dd0b7 5507 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
a9b77cd1
ZD
5508
5509 gcc_assert (single_succ_p (bb));
fefa31b5 5510 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
a9b77cd1 5511
fefa31b5 5512 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
726a989a 5513 && !gimple_return_retval (ret_stmt))
a9b77cd1 5514 {
726a989a 5515 gsi_remove (&gsi, false);
a9b77cd1
ZD
5516 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5517 }
5518 }
5519
67a8d719 5520 gsi = gsi_start (stmts);
726a989a 5521 if (!gsi_end_p (gsi))
8b11009b 5522 {
726a989a
RB
5523 stmt = gsi_stmt (gsi);
5524 if (gimple_code (stmt) != GIMPLE_LABEL)
5525 stmt = NULL;
8b11009b 5526 }
242229bb 5527
134aa83c 5528 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 5529
afa7c903 5530 if (stmt || elt)
242229bb 5531 {
65f4b875 5532 gcc_checking_assert (!note);
242229bb
JH
5533 last = get_last_insn ();
5534
8b11009b
ZD
5535 if (stmt)
5536 {
28ed065e 5537 expand_gimple_stmt (stmt);
67a8d719 5538 gsi_next (&gsi);
8b11009b
ZD
5539 }
5540
5541 if (elt)
39c8aaa4 5542 emit_label (*elt);
242229bb 5543
1130d5e3 5544 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 5545 if (NOTE_P (BB_HEAD (bb)))
1130d5e3 5546 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
65f4b875 5547 gcc_assert (LABEL_P (BB_HEAD (bb)));
242229bb 5548 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 5549
726a989a 5550 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
5551 }
5552 else
1130d5e3 5553 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
242229bb 5554
65f4b875
AO
5555 if (note)
5556 NOTE_BASIC_BLOCK (note) = bb;
242229bb 5557
726a989a 5558 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 5559 {
cea49550 5560 basic_block new_bb;
242229bb 5561
b5b8b0ac 5562 stmt = gsi_stmt (gsi);
2a8e30fb
MM
5563
5564 /* If this statement is a non-debug one, and we generate debug
5565 insns, then this one might be the last real use of a TERed
5566 SSA_NAME, but where there are still some debug uses further
5567 down. Expanding the current SSA name in such further debug
5568 uses by their RHS might lead to wrong debug info, as coalescing
5569 might make the operands of such RHS be placed into the same
5570 pseudo as something else. Like so:
5571 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5572 use(a_1);
5573 a_2 = ...
5574 #DEBUG ... => a_1
5575 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5576 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5577 the write to a_2 would actually have clobbered the place which
5578 formerly held a_0.
5579
5580 So, instead of that, we recognize the situation, and generate
5581 debug temporaries at the last real use of TERed SSA names:
5582 a_1 = a_0 + 1;
5583 #DEBUG #D1 => a_1
5584 use(a_1);
5585 a_2 = ...
5586 #DEBUG ... => #D1
5587 */
36f52e8f 5588 if (MAY_HAVE_DEBUG_BIND_INSNS
2a8e30fb
MM
5589 && SA.values
5590 && !is_gimple_debug (stmt))
5591 {
5592 ssa_op_iter iter;
5593 tree op;
355fe088 5594 gimple *def;
2a8e30fb 5595
5368224f 5596 location_t sloc = curr_insn_location ();
2a8e30fb
MM
5597
5598 /* Look for SSA names that have their last use here (TERed
5599 names always have only one real use). */
5600 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5601 if ((def = get_gimple_for_ssa_name (op)))
5602 {
5603 imm_use_iterator imm_iter;
5604 use_operand_p use_p;
5605 bool have_debug_uses = false;
5606
5607 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5608 {
5609 if (gimple_debug_bind_p (USE_STMT (use_p)))
5610 {
5611 have_debug_uses = true;
5612 break;
5613 }
5614 }
5615
5616 if (have_debug_uses)
5617 {
871dae34 5618 /* OP is a TERed SSA name, with DEF its defining
2a8e30fb
MM
5619 statement, and where OP is used in further debug
5620 instructions. Generate a debug temporary, and
5621 replace all uses of OP in debug insns with that
5622 temporary. */
355fe088 5623 gimple *debugstmt;
2a8e30fb
MM
5624 tree value = gimple_assign_rhs_to_tree (def);
5625 tree vexpr = make_node (DEBUG_EXPR_DECL);
5626 rtx val;
ef4bddc2 5627 machine_mode mode;
2a8e30fb 5628
5368224f 5629 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
5630
5631 DECL_ARTIFICIAL (vexpr) = 1;
5632 TREE_TYPE (vexpr) = TREE_TYPE (value);
5633 if (DECL_P (value))
5634 mode = DECL_MODE (value);
5635 else
5636 mode = TYPE_MODE (TREE_TYPE (value));
899ca90e 5637 SET_DECL_MODE (vexpr, mode);
2a8e30fb
MM
5638
5639 val = gen_rtx_VAR_LOCATION
5640 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5641
e8c6bb74 5642 emit_debug_insn (val);
2a8e30fb
MM
5643
5644 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5645 {
5646 if (!gimple_debug_bind_p (debugstmt))
5647 continue;
5648
5649 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5650 SET_USE (use_p, vexpr);
5651
5652 update_stmt (debugstmt);
5653 }
5654 }
5655 }
5368224f 5656 set_curr_insn_location (sloc);
2a8e30fb
MM
5657 }
5658
a5883ba0 5659 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 5660
242229bb
JH
5661 /* Expand this statement, then evaluate the resulting RTL and
5662 fixup the CFG accordingly. */
726a989a 5663 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 5664 {
538dd0b7 5665 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
cea49550
RH
5666 if (new_bb)
5667 return new_bb;
5668 }
96a95ac1 5669 else if (is_gimple_debug (stmt))
b5b8b0ac 5670 {
5368224f 5671 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
5672 gimple_stmt_iterator nsi = gsi;
5673
5674 for (;;)
5675 {
96a95ac1
AO
5676 tree var;
5677 tree value = NULL_TREE;
5678 rtx val = NULL_RTX;
ef4bddc2 5679 machine_mode mode;
b5b8b0ac 5680
96a95ac1
AO
5681 if (!gimple_debug_nonbind_marker_p (stmt))
5682 {
5683 if (gimple_debug_bind_p (stmt))
5684 {
5685 var = gimple_debug_bind_get_var (stmt);
ec8c1492 5686
96a95ac1
AO
5687 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5688 && TREE_CODE (var) != LABEL_DECL
5689 && !target_for_debug_bind (var))
5690 goto delink_debug_stmt;
b5b8b0ac 5691
96a95ac1
AO
5692 if (DECL_P (var))
5693 mode = DECL_MODE (var);
5694 else
5695 mode = TYPE_MODE (TREE_TYPE (var));
b5b8b0ac 5696
96a95ac1
AO
5697 if (gimple_debug_bind_has_value_p (stmt))
5698 value = gimple_debug_bind_get_value (stmt);
5699
5700 val = gen_rtx_VAR_LOCATION
5701 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5702 }
5703 else if (gimple_debug_source_bind_p (stmt))
5704 {
5705 var = gimple_debug_source_bind_get_var (stmt);
5706
5707 value = gimple_debug_source_bind_get_value (stmt);
5708
5709 mode = DECL_MODE (var);
b5b8b0ac 5710
96a95ac1
AO
5711 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5712 VAR_INIT_STATUS_UNINITIALIZED);
5713 }
5714 else
5715 gcc_unreachable ();
5716 }
5717 /* If this function was first compiled with markers
5718 enabled, but they're now disable (e.g. LTO), drop
5719 them on the floor. */
5720 else if (gimple_debug_nonbind_marker_p (stmt)
5721 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5722 goto delink_debug_stmt;
5723 else if (gimple_debug_begin_stmt_p (stmt))
5724 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
b5b8b0ac 5725 else
96a95ac1 5726 gcc_unreachable ();
b5b8b0ac 5727
96a95ac1
AO
5728 last = get_last_insn ();
5729
5730 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac 5731
e16b6fd0 5732 emit_debug_insn (val);
b5b8b0ac
AO
5733
5734 if (dump_file && (dump_flags & TDF_DETAILS))
5735 {
5736 /* We can't dump the insn with a TREE where an RTX
5737 is expected. */
96a95ac1
AO
5738 if (GET_CODE (val) == VAR_LOCATION)
5739 {
5740 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5741 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5742 }
b5b8b0ac 5743 maybe_dump_rtl_for_gimple_stmt (stmt, last);
96a95ac1
AO
5744 if (GET_CODE (val) == VAR_LOCATION)
5745 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5746 }
5747
ec8c1492 5748 delink_debug_stmt:
2a8e30fb
MM
5749 /* In order not to generate too many debug temporaries,
5750 we delink all uses of debug statements we already expanded.
5751 Therefore debug statements between definition and real
5752 use of TERed SSA names will continue to use the SSA name,
5753 and not be replaced with debug temps. */
5754 delink_stmt_imm_use (stmt);
5755
b5b8b0ac
AO
5756 gsi = nsi;
5757 gsi_next (&nsi);
5758 if (gsi_end_p (nsi))
5759 break;
5760 stmt = gsi_stmt (nsi);
96a95ac1 5761 if (!is_gimple_debug (stmt))
b5b8b0ac
AO
5762 break;
5763 }
5764
5368224f 5765 set_curr_insn_location (sloc);
b5b8b0ac 5766 }
80c7a9eb 5767 else
242229bb 5768 {
538dd0b7
DM
5769 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5770 if (call_stmt
5771 && gimple_call_tail_p (call_stmt)
f3ddd692 5772 && disable_tail_calls)
538dd0b7 5773 gimple_call_set_tail (call_stmt, false);
f3ddd692 5774
538dd0b7 5775 if (call_stmt && gimple_call_tail_p (call_stmt))
cea49550
RH
5776 {
5777 bool can_fallthru;
538dd0b7 5778 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
cea49550
RH
5779 if (new_bb)
5780 {
5781 if (can_fallthru)
5782 bb = new_bb;
5783 else
5784 return new_bb;
5785 }
5786 }
4d7a65ea 5787 else
b7211528 5788 {
4e3825db 5789 def_operand_p def_p;
4e3825db
MM
5790 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5791
5792 if (def_p != NULL)
5793 {
5794 /* Ignore this stmt if it is in the list of
5795 replaceable expressions. */
5796 if (SA.values
b8698a0f 5797 && bitmap_bit_p (SA.values,
e97809c6 5798 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5799 continue;
5800 }
28ed065e 5801 last = expand_gimple_stmt (stmt);
726a989a 5802 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5803 }
242229bb
JH
5804 }
5805 }
5806
a5883ba0
MM
5807 currently_expanding_gimple_stmt = NULL;
5808
7241571e 5809 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5810 FOR_EACH_EDGE (e, ei, bb->succs)
5811 {
2f13f2de 5812 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5813 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5814 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5815 {
5816 emit_jump (label_rtx_for_bb (e->dest));
5817 e->flags &= ~EDGE_FALLTHRU;
5818 }
a9b77cd1
ZD
5819 }
5820
ae761c45
AH
5821 /* Expanded RTL can create a jump in the last instruction of block.
5822 This later might be assumed to be a jump to successor and break edge insertion.
5823 We need to insert dummy move to prevent this. PR41440. */
5824 if (single_succ_p (bb)
5825 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5826 && (last = get_last_insn ())
4dbebf6f
AO
5827 && (JUMP_P (last)
5828 || (DEBUG_INSN_P (last)
5829 && JUMP_P (prev_nondebug_insn (last)))))
ae761c45
AH
5830 {
5831 rtx dummy = gen_reg_rtx (SImode);
5832 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5833 }
5834
242229bb
JH
5835 do_pending_stack_adjust ();
5836
3f117656 5837 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5838 before a barrier and/or table jump insn. */
5839 last = get_last_insn ();
4b4bf941 5840 if (BARRIER_P (last))
242229bb
JH
5841 last = PREV_INSN (last);
5842 if (JUMP_TABLE_DATA_P (last))
5843 last = PREV_INSN (PREV_INSN (last));
1130d5e3 5844 BB_END (bb) = last;
caf93cb0 5845
242229bb 5846 update_bb_for_insn (bb);
80c7a9eb 5847
242229bb
JH
5848 return bb;
5849}
5850
5851
5852/* Create a basic block for initialization code. */
5853
5854static basic_block
5855construct_init_block (void)
5856{
5857 basic_block init_block, first_block;
fd44f634
JH
5858 edge e = NULL;
5859 int flags;
275a4187 5860
fd44f634 5861 /* Multiple entry points not supported yet. */
fefa31b5
DM
5862 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5863 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5864 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5865 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5866 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
242229bb 5867
fefa31b5 5868 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
275a4187 5869
fd44f634
JH
5870 /* When entry edge points to first basic block, we don't need jump,
5871 otherwise we have to jump into proper target. */
fefa31b5 5872 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
fd44f634 5873 {
726a989a 5874 tree label = gimple_block_label (e->dest);
fd44f634 5875
1476d1bd 5876 emit_jump (jump_target_rtx (label));
fd44f634 5877 flags = 0;
275a4187 5878 }
fd44f634
JH
5879 else
5880 flags = EDGE_FALLTHRU;
242229bb
JH
5881
5882 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5883 get_last_insn (),
fefa31b5 5884 ENTRY_BLOCK_PTR_FOR_FN (cfun));
fefa31b5 5885 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5886 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
242229bb
JH
5887 if (e)
5888 {
5889 first_block = e->dest;
5890 redirect_edge_succ (e, init_block);
357067f2 5891 e = make_single_succ_edge (init_block, first_block, flags);
242229bb
JH
5892 }
5893 else
357067f2
JH
5894 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5895 EDGE_FALLTHRU);
242229bb
JH
5896
5897 update_bb_for_insn (init_block);
5898 return init_block;
5899}
5900
55e092c4
JH
5901/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5902 found in the block tree. */
5903
5904static void
5905set_block_levels (tree block, int level)
5906{
5907 while (block)
5908 {
5909 BLOCK_NUMBER (block) = level;
5910 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5911 block = BLOCK_CHAIN (block);
5912 }
5913}
242229bb
JH
5914
5915/* Create a block containing landing pads and similar stuff. */
5916
5917static void
5918construct_exit_block (void)
5919{
b47aae36
DM
5920 rtx_insn *head = get_last_insn ();
5921 rtx_insn *end;
242229bb 5922 basic_block exit_block;
628f6a4e
BE
5923 edge e, e2;
5924 unsigned ix;
5925 edge_iterator ei;
79c7fda6 5926 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
b47aae36 5927 rtx_insn *orig_end = BB_END (prev_bb);
242229bb 5928
fefa31b5 5929 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
bf08ebeb 5930
caf93cb0 5931 /* Make sure the locus is set to the end of the function, so that
242229bb 5932 epilogue line numbers and warnings are set properly. */
2f13f2de 5933 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5934 input_location = cfun->function_end_locus;
5935
242229bb
JH
5936 /* Generate rtl for function exit. */
5937 expand_function_end ();
5938
5939 end = get_last_insn ();
5940 if (head == end)
5941 return;
79c7fda6
JJ
5942 /* While emitting the function end we could move end of the last basic
5943 block. */
1130d5e3 5944 BB_END (prev_bb) = orig_end;
4b4bf941 5945 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5946 head = NEXT_INSN (head);
79c7fda6 5947 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
e7a74006 5948 bb count counting will be confused. Any instructions before that
79c7fda6
JJ
5949 label are emitted for the case where PREV_BB falls through into the
5950 exit block, so append those instructions to prev_bb in that case. */
5951 if (NEXT_INSN (head) != return_label)
5952 {
5953 while (NEXT_INSN (head) != return_label)
5954 {
5955 if (!NOTE_P (NEXT_INSN (head)))
1130d5e3 5956 BB_END (prev_bb) = NEXT_INSN (head);
79c7fda6
JJ
5957 head = NEXT_INSN (head);
5958 }
5959 }
5960 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
fefa31b5 5961 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5962 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
628f6a4e
BE
5963
5964 ix = 0;
fefa31b5 5965 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
242229bb 5966 {
fefa31b5 5967 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
242229bb 5968 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5969 redirect_edge_succ (e, exit_block);
5970 else
5971 ix++;
242229bb 5972 }
628f6a4e 5973
357067f2
JH
5974 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5975 EDGE_FALLTHRU);
fefa31b5 5976 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
242229bb
JH
5977 if (e2 != e)
5978 {
ef30ab83 5979 exit_block->count -= e2->count ();
242229bb 5980 }
242229bb
JH
5981 update_bb_for_insn (exit_block);
5982}
5983
c22cacf3 5984/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5985 Look for ARRAY_REF nodes with non-constant indexes and mark them
5986 addressable. */
5987
5988static tree
5989discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5990 void *data ATTRIBUTE_UNUSED)
5991{
5992 tree t = *tp;
5993
5994 if (IS_TYPE_OR_DECL_P (t))
5995 *walk_subtrees = 0;
5996 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5997 {
5998 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5999 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6000 && (!TREE_OPERAND (t, 2)
6001 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6002 || (TREE_CODE (t) == COMPONENT_REF
6003 && (!TREE_OPERAND (t,2)
6004 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6005 || TREE_CODE (t) == BIT_FIELD_REF
6006 || TREE_CODE (t) == REALPART_EXPR
6007 || TREE_CODE (t) == IMAGPART_EXPR
6008 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 6009 || CONVERT_EXPR_P (t))
a1b23b2f
UW
6010 t = TREE_OPERAND (t, 0);
6011
6012 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6013 {
6014 t = get_base_address (t);
6f11d690
RG
6015 if (t && DECL_P (t)
6016 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
6017 TREE_ADDRESSABLE (t) = 1;
6018 }
6019
6020 *walk_subtrees = 0;
6021 }
6022
6023 return NULL_TREE;
6024}
6025
6026/* RTL expansion is not able to compile array references with variable
6027 offsets for arrays stored in single register. Discover such
6028 expressions and mark variables as addressable to avoid this
6029 scenario. */
6030
6031static void
6032discover_nonconstant_array_refs (void)
6033{
6034 basic_block bb;
726a989a 6035 gimple_stmt_iterator gsi;
a1b23b2f 6036
11cd3bed 6037 FOR_EACH_BB_FN (bb, cfun)
726a989a
RB
6038 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6039 {
355fe088 6040 gimple *stmt = gsi_stmt (gsi);
aa847cc8
JJ
6041 if (!is_gimple_debug (stmt))
6042 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 6043 }
a1b23b2f
UW
6044}
6045
2e3f842f
L
6046/* This function sets crtl->args.internal_arg_pointer to a virtual
6047 register if DRAP is needed. Local register allocator will replace
6048 virtual_incoming_args_rtx with the virtual register. */
6049
6050static void
6051expand_stack_alignment (void)
6052{
6053 rtx drap_rtx;
e939805b 6054 unsigned int preferred_stack_boundary;
2e3f842f
L
6055
6056 if (! SUPPORTS_STACK_ALIGNMENT)
6057 return;
b8698a0f 6058
2e3f842f
L
6059 if (cfun->calls_alloca
6060 || cfun->has_nonlocal_label
6061 || crtl->has_nonlocal_goto)
6062 crtl->need_drap = true;
6063
890b9b96
L
6064 /* Call update_stack_boundary here again to update incoming stack
6065 boundary. It may set incoming stack alignment to a different
6066 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6067 use the minimum incoming stack alignment to check if it is OK
6068 to perform sibcall optimization since sibcall optimization will
6069 only align the outgoing stack to incoming stack boundary. */
6070 if (targetm.calls.update_stack_boundary)
6071 targetm.calls.update_stack_boundary ();
6072
6073 /* The incoming stack frame has to be aligned at least at
6074 parm_stack_boundary. */
6075 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 6076
2e3f842f
L
6077 /* Update crtl->stack_alignment_estimated and use it later to align
6078 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6079 exceptions since callgraph doesn't collect incoming stack alignment
6080 in this case. */
8f4f502f 6081 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
6082 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6083 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6084 else
6085 preferred_stack_boundary = crtl->preferred_stack_boundary;
6086 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6087 crtl->stack_alignment_estimated = preferred_stack_boundary;
6088 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6089 crtl->stack_alignment_needed = preferred_stack_boundary;
6090
890b9b96
L
6091 gcc_assert (crtl->stack_alignment_needed
6092 <= crtl->stack_alignment_estimated);
6093
2e3f842f 6094 crtl->stack_realign_needed
e939805b 6095 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 6096 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
6097
6098 crtl->stack_realign_processed = true;
6099
6100 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6101 alignment. */
6102 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 6103 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 6104
d015f7cc
L
6105 /* stack_realign_drap and drap_rtx must match. */
6106 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6107
2e3f842f 6108 /* Do nothing if NULL is returned, which means DRAP is not needed. */
01512446 6109 if (drap_rtx != NULL)
2e3f842f
L
6110 {
6111 crtl->args.internal_arg_pointer = drap_rtx;
6112
6113 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6114 needed. */
6115 fixup_tail_calls ();
6116 }
6117}
862d0b35
DN
6118\f
6119
6120static void
6121expand_main_function (void)
6122{
6123#if (defined(INVOKE__main) \
6124 || (!defined(HAS_INIT_SECTION) \
6125 && !defined(INIT_SECTION_ASM_OP) \
6126 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
db69559b 6127 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
862d0b35
DN
6128#endif
6129}
6130\f
6131
6132/* Expand code to initialize the stack_protect_guard. This is invoked at
6133 the beginning of a function to be protected. */
6134
862d0b35
DN
6135static void
6136stack_protect_prologue (void)
6137{
6138 tree guard_decl = targetm.stack_protect_guard ();
6139 rtx x, y;
6140
6141 x = expand_normal (crtl->stack_protect_guard);
1202f33e
JJ
6142 if (guard_decl)
6143 y = expand_normal (guard_decl);
6144 else
6145 y = const0_rtx;
862d0b35
DN
6146
6147 /* Allow the target to copy from Y to X without leaking Y into a
6148 register. */
c65aa042
RS
6149 if (targetm.have_stack_protect_set ())
6150 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6151 {
6152 emit_insn (insn);
6153 return;
6154 }
862d0b35
DN
6155
6156 /* Otherwise do a straight move. */
6157 emit_move_insn (x, y);
6158}
2e3f842f 6159
242229bb
JH
6160/* Translate the intermediate representation contained in the CFG
6161 from GIMPLE trees to RTL.
6162
6163 We do conversion per basic block and preserve/update the tree CFG.
6164 This implies we have to do some magic as the CFG can simultaneously
6165 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 6166 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
6167 the expansion. */
6168
be55bfe6
TS
6169namespace {
6170
6171const pass_data pass_data_expand =
6172{
6173 RTL_PASS, /* type */
6174 "expand", /* name */
6175 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6176 TV_EXPAND, /* tv_id */
6177 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6178 | PROP_gimple_lcx
f8e89441
TV
6179 | PROP_gimple_lvec
6180 | PROP_gimple_lva), /* properties_required */
be55bfe6
TS
6181 PROP_rtl, /* properties_provided */
6182 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
3bea341f 6183 0, /* todo_flags_start */
be55bfe6
TS
6184 0, /* todo_flags_finish */
6185};
6186
6187class pass_expand : public rtl_opt_pass
6188{
6189public:
6190 pass_expand (gcc::context *ctxt)
6191 : rtl_opt_pass (pass_data_expand, ctxt)
6192 {}
6193
6194 /* opt_pass methods: */
6195 virtual unsigned int execute (function *);
6196
6197}; // class pass_expand
6198
6199unsigned int
6200pass_expand::execute (function *fun)
242229bb
JH
6201{
6202 basic_block bb, init_block;
0ef90296
ZD
6203 edge_iterator ei;
6204 edge e;
b47aae36 6205 rtx_insn *var_seq, *var_ret_seq;
4e3825db
MM
6206 unsigned i;
6207
f029db69 6208 timevar_push (TV_OUT_OF_SSA);
4e3825db 6209 rewrite_out_of_ssa (&SA);
f029db69 6210 timevar_pop (TV_OUT_OF_SSA);
c302207e 6211 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 6212
36f52e8f 6213 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
dfde35b3
JJ
6214 {
6215 gimple_stmt_iterator gsi;
6216 FOR_EACH_BB_FN (bb, cfun)
6217 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6218 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6219 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6220 }
6221
be147e84
RG
6222 /* Make sure all values used by the optimization passes have sane
6223 defaults. */
6224 reg_renumber = 0;
6225
4586b4ca
SB
6226 /* Some backends want to know that we are expanding to RTL. */
6227 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
6228 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6229 free_dominance_info (CDI_DOMINATORS);
4586b4ca 6230
be55bfe6 6231 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
bf08ebeb 6232
d5e254e1
IE
6233 if (chkp_function_instrumented_p (current_function_decl))
6234 chkp_reset_rtl_bounds ();
6235
5368224f 6236 insn_locations_init ();
fe8a7779 6237 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
6238 {
6239 /* Eventually, all FEs should explicitly set function_start_locus. */
be55bfe6
TS
6240 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6241 set_curr_insn_location
6242 (DECL_SOURCE_LOCATION (current_function_decl));
1751ecd6 6243 else
be55bfe6 6244 set_curr_insn_location (fun->function_start_locus);
1751ecd6 6245 }
9ff70652 6246 else
5368224f
DC
6247 set_curr_insn_location (UNKNOWN_LOCATION);
6248 prologue_location = curr_insn_location ();
55e092c4 6249
2b21299c
JJ
6250#ifdef INSN_SCHEDULING
6251 init_sched_attrs ();
6252#endif
6253
55e092c4
JH
6254 /* Make sure first insn is a note even if we don't want linenums.
6255 This makes sure the first insn will never be deleted.
6256 Also, final expects a note to appear there. */
6257 emit_note (NOTE_INSN_DELETED);
6429e3be 6258
a1b23b2f
UW
6259 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6260 discover_nonconstant_array_refs ();
6261
e41b2a33 6262 targetm.expand_to_rtl_hook ();
8194c537 6263 crtl->init_stack_alignment ();
be55bfe6 6264 fun->cfg->max_jumptable_ents = 0;
cb91fab0 6265
ae9fd6b7
JH
6266 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6267 of the function section at exapnsion time to predict distance of calls. */
6268 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6269
727a31fa 6270 /* Expand the variables recorded during gimple lowering. */
f029db69 6271 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
6272 start_sequence ();
6273
f3ddd692 6274 var_ret_seq = expand_used_vars ();
3a42502d
RH
6275
6276 var_seq = get_insns ();
6277 end_sequence ();
f029db69 6278 timevar_pop (TV_VAR_EXPAND);
242229bb 6279
7d69de61
RH
6280 /* Honor stack protection warnings. */
6281 if (warn_stack_protect)
6282 {
be55bfe6 6283 if (fun->calls_alloca)
b8698a0f 6284 warning (OPT_Wstack_protector,
3b123595 6285 "stack protector not protecting local variables: "
be55bfe6 6286 "variable length buffer");
cb91fab0 6287 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 6288 warning (OPT_Wstack_protector,
3b123595 6289 "stack protector not protecting function: "
be55bfe6 6290 "all local arrays are less than %d bytes long",
7d69de61
RH
6291 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6292 }
6293
242229bb 6294 /* Set up parameters and prepare for return, for the function. */
b79c5284 6295 expand_function_start (current_function_decl);
242229bb 6296
3a42502d
RH
6297 /* If we emitted any instructions for setting up the variables,
6298 emit them before the FUNCTION_START note. */
6299 if (var_seq)
6300 {
6301 emit_insn_before (var_seq, parm_birth_insn);
6302
6303 /* In expand_function_end we'll insert the alloca save/restore
6304 before parm_birth_insn. We've just insertted an alloca call.
6305 Adjust the pointer to match. */
6306 parm_birth_insn = var_seq;
6307 }
6308
f11a7b6d
AO
6309 /* Now propagate the RTL assignment of each partition to the
6310 underlying var of each SSA_NAME. */
46aa019a
KV
6311 tree name;
6312
6313 FOR_EACH_SSA_NAME (i, name, cfun)
f11a7b6d 6314 {
46aa019a
KV
6315 /* We might have generated new SSA names in
6316 update_alias_info_with_stack_vars. They will have a NULL
6317 defining statements, and won't be part of the partitioning,
6318 so ignore those. */
6319 if (!SSA_NAME_DEF_STMT (name))
f11a7b6d
AO
6320 continue;
6321
6322 adjust_one_expanded_partition_var (name);
6323 }
6324
6325 /* Clean up RTL of variables that straddle across multiple
6326 partitions, and check that the rtl of any PARM_DECLs that are not
6327 cleaned up is that of their default defs. */
46aa019a 6328 FOR_EACH_SSA_NAME (i, name, cfun)
d466b407 6329 {
d466b407 6330 int part;
d466b407 6331
46aa019a
KV
6332 /* We might have generated new SSA names in
6333 update_alias_info_with_stack_vars. They will have a NULL
6334 defining statements, and won't be part of the partitioning,
6335 so ignore those. */
6336 if (!SSA_NAME_DEF_STMT (name))
d466b407
MM
6337 continue;
6338 part = var_to_partition (SA.map, name);
6339 if (part == NO_PARTITION)
6340 continue;
70b5e7dc 6341
1f9ceff1
AO
6342 /* If this decl was marked as living in multiple places, reset
6343 this now to NULL. */
6344 tree var = SSA_NAME_VAR (name);
6345 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6346 SET_DECL_RTL (var, NULL);
6347 /* Check that the pseudos chosen by assign_parms are those of
6348 the corresponding default defs. */
6349 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6350 && (TREE_CODE (var) == PARM_DECL
6351 || TREE_CODE (var) == RESULT_DECL))
70b5e7dc 6352 {
1f9ceff1
AO
6353 rtx in = DECL_RTL_IF_SET (var);
6354 gcc_assert (in);
6355 rtx out = SA.partition_to_pseudo[part];
f11a7b6d
AO
6356 gcc_assert (in == out);
6357
6358 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6359 those expected by debug backends for each parm and for
6360 the result. This is particularly important for stabs,
6361 whose register elimination from parm's DECL_RTL may cause
6362 -fcompare-debug differences as SET_DECL_RTL changes reg's
6363 attrs. So, make sure the RTL already has the parm as the
6364 EXPR, so that it won't change. */
6365 SET_DECL_RTL (var, NULL_RTX);
6366 if (MEM_P (in))
6367 set_mem_attributes (in, var, true);
6368 SET_DECL_RTL (var, in);
70b5e7dc 6369 }
d466b407
MM
6370 }
6371
242229bb
JH
6372 /* If this function is `main', emit a call to `__main'
6373 to run global initializers, etc. */
6374 if (DECL_NAME (current_function_decl)
6375 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6376 && DECL_FILE_SCOPE_P (current_function_decl))
6377 expand_main_function ();
6378
7d69de61
RH
6379 /* Initialize the stack_protect_guard field. This must happen after the
6380 call to __main (if any) so that the external decl is initialized. */
87a5dc2d 6381 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
7d69de61
RH
6382 stack_protect_prologue ();
6383
4e3825db
MM
6384 expand_phi_nodes (&SA);
6385
0d334e37 6386 /* Release any stale SSA redirection data. */
b3e46655 6387 redirect_edge_var_map_empty ();
0d334e37 6388
3fbd86b1 6389 /* Register rtl specific functions for cfg. */
242229bb
JH
6390 rtl_register_cfg_hooks ();
6391
6392 init_block = construct_init_block ();
6393
0ef90296 6394 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 6395 remaining edges later. */
be55bfe6 6396 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
0ef90296
ZD
6397 e->flags &= ~EDGE_EXECUTABLE;
6398
96a95ac1
AO
6399 /* If the function has too many markers, drop them while expanding. */
6400 if (cfun->debug_marker_count
6401 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6402 cfun->debug_nonbind_markers = false;
6403
134aa83c 6404 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
be55bfe6 6405 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
fefa31b5 6406 next_bb)
f3ddd692 6407 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 6408
36f52e8f 6409 if (MAY_HAVE_DEBUG_BIND_INSNS)
b5b8b0ac
AO
6410 expand_debug_locations ();
6411
dfde35b3
JJ
6412 if (deep_ter_debug_map)
6413 {
6414 delete deep_ter_debug_map;
6415 deep_ter_debug_map = NULL;
6416 }
6417
452aa9c5
RG
6418 /* Free stuff we no longer need after GIMPLE optimizations. */
6419 free_dominance_info (CDI_DOMINATORS);
6420 free_dominance_info (CDI_POST_DOMINATORS);
61183076 6421 delete_tree_cfg_annotations (fun);
452aa9c5 6422
f029db69 6423 timevar_push (TV_OUT_OF_SSA);
4e3825db 6424 finish_out_of_ssa (&SA);
f029db69 6425 timevar_pop (TV_OUT_OF_SSA);
4e3825db 6426
f029db69 6427 timevar_push (TV_POST_EXPAND);
91753e21 6428 /* We are no longer in SSA form. */
be55bfe6 6429 fun->gimple_df->in_ssa_p = false;
726338f4 6430 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 6431
bf08ebeb
JH
6432 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6433 conservatively to true until they are all profile aware. */
39c8aaa4 6434 delete lab_rtx_for_bb;
61183076 6435 free_histograms (fun);
242229bb
JH
6436
6437 construct_exit_block ();
5368224f 6438 insn_locations_finalize ();
242229bb 6439
f3ddd692
JJ
6440 if (var_ret_seq)
6441 {
dc01c3d1 6442 rtx_insn *after = return_label;
b47aae36 6443 rtx_insn *next = NEXT_INSN (after);
f3ddd692
JJ
6444 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6445 after = next;
6446 emit_insn_after (var_ret_seq, after);
6447 }
6448
1d65f45c 6449 /* Zap the tree EH table. */
be55bfe6 6450 set_eh_throw_stmt_table (fun, NULL);
242229bb 6451
42821aff
MM
6452 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6453 split edges which edge insertions might do. */
242229bb 6454 rebuild_jump_labels (get_insns ());
242229bb 6455
be55bfe6
TS
6456 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6457 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6458 {
6459 edge e;
6460 edge_iterator ei;
6461 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6462 {
6463 if (e->insns.r)
bc470c24 6464 {
3ffa95c2 6465 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
6466 /* Put insns after parm birth, but before
6467 NOTE_INSNS_FUNCTION_BEG. */
be55bfe6
TS
6468 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6469 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
bc470c24 6470 {
3ffa95c2
DM
6471 rtx_insn *insns = e->insns.r;
6472 e->insns.r = NULL;
e40191f1
TV
6473 if (NOTE_P (parm_birth_insn)
6474 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6475 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6476 else
6477 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
6478 }
6479 else
6480 commit_one_edge_insertion (e);
6481 }
4e3825db
MM
6482 else
6483 ei_next (&ei);
6484 }
6485 }
6486
6487 /* We're done expanding trees to RTL. */
6488 currently_expanding_to_rtl = 0;
6489
1b223a9f
AO
6490 flush_mark_addressable_queue ();
6491
be55bfe6
TS
6492 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6493 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6494 {
6495 edge e;
6496 edge_iterator ei;
6497 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6498 {
6499 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6500 e->flags &= ~EDGE_EXECUTABLE;
6501
6502 /* At the moment not all abnormal edges match the RTL
6503 representation. It is safe to remove them here as
6504 find_many_sub_basic_blocks will rediscover them.
6505 In the future we should get this fixed properly. */
6506 if ((e->flags & EDGE_ABNORMAL)
6507 && !(e->flags & EDGE_SIBCALL))
6508 remove_edge (e);
6509 else
6510 ei_next (&ei);
6511 }
6512 }
6513
7ba9e72d 6514 auto_sbitmap blocks (last_basic_block_for_fn (fun));
f61e445a 6515 bitmap_ones (blocks);
242229bb 6516 find_many_sub_basic_blocks (blocks);
4e3825db 6517 purge_all_dead_edges ();
242229bb 6518
2e3f842f
L
6519 expand_stack_alignment ();
6520
be147e84
RG
6521 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6522 function. */
6523 if (crtl->tail_call_emit)
6524 fixup_tail_calls ();
6525
dac1fbf8
RG
6526 /* After initial rtl generation, call back to finish generating
6527 exception support code. We need to do this before cleaning up
6528 the CFG as the code does not expect dead landing pads. */
be55bfe6 6529 if (fun->eh->region_tree != NULL)
dac1fbf8
RG
6530 finish_eh_generation ();
6531
8b5d71cd
JH
6532 /* BB subdivision may have created basic blocks that are are only reachable
6533 from unlikely bbs but not marked as such in the profile. */
6534 if (optimize)
6535 propagate_unlikely_bbs_forward ();
6536
dac1fbf8
RG
6537 /* Remove unreachable blocks, otherwise we cannot compute dominators
6538 which are needed for loop state verification. As a side-effect
6539 this also compacts blocks.
6540 ??? We cannot remove trivially dead insns here as for example
6541 the DRAP reg on i?86 is not magically live at this point.
6542 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6543 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6544
b2b29377 6545 checking_verify_flow_info ();
9f8628ba 6546
be147e84
RG
6547 /* Initialize pseudos allocated for hard registers. */
6548 emit_initial_value_sets ();
6549
6550 /* And finally unshare all RTL. */
6551 unshare_all_rtl ();
6552
9f8628ba
PB
6553 /* There's no need to defer outputting this function any more; we
6554 know we want to output it. */
6555 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6556
6557 /* Now that we're done expanding trees to RTL, we shouldn't have any
6558 more CONCATs anywhere. */
6559 generating_concat_p = 0;
6560
b7211528
SB
6561 if (dump_file)
6562 {
6563 fprintf (dump_file,
6564 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6565 /* And the pass manager will dump RTL for us. */
6566 }
ef330312
PB
6567
6568 /* If we're emitting a nested function, make sure its parent gets
6569 emitted as well. Doing otherwise confuses debug info. */
be55bfe6
TS
6570 {
6571 tree parent;
6572 for (parent = DECL_CONTEXT (current_function_decl);
6573 parent != NULL_TREE;
6574 parent = get_containing_scope (parent))
6575 if (TREE_CODE (parent) == FUNCTION_DECL)
6576 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6577 }
c22cacf3 6578
ef330312 6579 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
6580
6581 /* After expanding, the return labels are no longer needed. */
6582 return_label = NULL;
6583 naked_return_label = NULL;
0a35513e
AH
6584
6585 /* After expanding, the tm_restart map is no longer needed. */
be55bfe6 6586 if (fun->gimple_df->tm_restart)
50979347 6587 fun->gimple_df->tm_restart = NULL;
0a35513e 6588
55e092c4
JH
6589 /* Tag the blocks with a depth number so that change_scope can find
6590 the common parent easily. */
be55bfe6 6591 set_block_levels (DECL_INITIAL (fun->decl), 0);
bf08ebeb 6592 default_rtl_profile ();
be147e84 6593
687aed9c
RB
6594 /* For -dx discard loops now, otherwise IL verify in clean_state will
6595 ICE. */
6596 if (rtl_dump_and_exit)
6597 {
6598 cfun->curr_properties &= ~PROP_loops;
6599 loop_optimizer_finalize ();
6600 }
6601
f029db69 6602 timevar_pop (TV_POST_EXPAND);
be147e84 6603
c2924966 6604 return 0;
242229bb
JH
6605}
6606
27a4cd48
DM
6607} // anon namespace
6608
6609rtl_opt_pass *
6610make_pass_expand (gcc::context *ctxt)
6611{
6612 return new pass_expand (ctxt);
6613}