]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
Support C++-specific selftests
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
cbe34bb5 2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5
AM
28#include "cfghooks.h"
29#include "tree-pass.h"
4d0cdd0c 30#include "memmodel.h"
957060b5 31#include "tm_p.h"
c7131fb2 32#include "ssa.h"
957060b5
AM
33#include "optabs.h"
34#include "regs.h" /* For reg_renumber. */
35#include "emit-rtl.h"
36#include "recog.h"
37#include "cgraph.h"
38#include "diagnostic.h"
40e23961 39#include "fold-const.h"
d8a2d370
DN
40#include "varasm.h"
41#include "stor-layout.h"
42#include "stmt.h"
43#include "print-tree.h"
60393bbc
AM
44#include "cfgrtl.h"
45#include "cfganal.h"
46#include "cfgbuild.h"
47#include "cfgcleanup.h"
36566b39
PK
48#include "dojump.h"
49#include "explow.h"
50#include "calls.h"
242229bb 51#include "expr.h"
2fb9a547
AM
52#include "internal-fn.h"
53#include "tree-eh.h"
5be5c238 54#include "gimple-iterator.h"
1b223a9f 55#include "gimple-expr.h"
5be5c238 56#include "gimple-walk.h"
442b4905 57#include "tree-cfg.h"
442b4905 58#include "tree-dfa.h"
7a300452 59#include "tree-ssa.h"
242229bb 60#include "except.h"
cf835838 61#include "gimple-pretty-print.h"
1f6d3a08 62#include "toplev.h"
ef330312 63#include "debug.h"
7d69de61 64#include "params.h"
ff28a94d 65#include "tree-inline.h"
6946b3f7 66#include "value-prof.h"
8e9055ae 67#include "tree-ssa-live.h"
78bca40d 68#include "tree-outof-ssa.h"
7d776ee2 69#include "cfgloop.h"
2b21299c 70#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 71#include "asan.h"
4484a35a 72#include "tree-ssa-address.h"
862d0b35 73#include "output.h"
9b2b7279 74#include "builtins.h"
d5e254e1
IE
75#include "tree-chkp.h"
76#include "rtl-chkp.h"
726a989a 77
8a6ce562
JBG
78/* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82#ifndef NAME__MAIN
83#define NAME__MAIN "__main"
84#endif
85
4e3825db
MM
86/* This variable holds information helping the rewriting of SSA trees
87 into RTL. */
88struct ssaexpand SA;
89
a5883ba0
MM
90/* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
355fe088 92gimple *currently_expanding_gimple_stmt;
a5883ba0 93
ddb555ed
JJ
94static rtx expand_debug_expr (tree);
95
1f9ceff1
AO
96static bool defer_stack_allocation (tree, bool);
97
f11a7b6d
AO
98static void record_alignment_for_reg_var (unsigned int);
99
726a989a
RB
100/* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
102
103tree
355fe088 104gimple_assign_rhs_to_tree (gimple *stmt)
726a989a
RB
105{
106 tree t;
82d6e6fc 107 enum gimple_rhs_class grhs_class;
b8698a0f 108
82d6e6fc 109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 110
0354c0c7
BS
111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
82d6e6fc 122 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
82d6e6fc 126 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
127 {
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
5368224f 134 && EXPR_P (t)))
b5b8b0ac
AO
135 t = copy_node (t);
136 }
726a989a
RB
137 else
138 gcc_unreachable ();
139
f5045c96
AM
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
142
726a989a
RB
143 return t;
144}
145
726a989a 146
1f6d3a08
RH
147#ifndef STACK_ALIGNMENT_NEEDED
148#define STACK_ALIGNMENT_NEEDED 1
149#endif
150
4e3825db
MM
151#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152
1f9ceff1
AO
153/* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
157
158static tree
159leader_merge (tree cur, tree next)
160{
161 if (cur == NULL || cur == next)
162 return next;
163
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
166
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
169
170 return cur;
171}
172
4e3825db
MM
173/* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
176static inline void
177set_rtl (tree t, rtx x)
178{
f11a7b6d
AO
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 || (use_register_for_decl (t)
182 ? (REG_P (x)
183 || (GET_CODE (x) == CONCAT
184 && (REG_P (XEXP (x, 0))
185 || SUBREG_P (XEXP (x, 0)))
186 && (REG_P (XEXP (x, 1))
187 || SUBREG_P (XEXP (x, 1))))
058c6384
EB
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
f11a7b6d
AO
192 || (GET_CODE (x) == PARALLEL
193 && SSAVAR (t)
194 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
058c6384
EB
195 && (GET_MODE (x) == BLKmode
196 || !flag_tree_coalesce_vars)))
f11a7b6d
AO
197 : (MEM_P (x) || x == pc_rtx
198 || (GET_CODE (x) == CONCAT
199 && MEM_P (XEXP (x, 0))
200 && MEM_P (XEXP (x, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
21fc3950
EB
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
209 unpromoted REGs. */
f11a7b6d 210 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
21fc3950
EB
211 || (SSAVAR (t)
212 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 && (promote_ssa_mode (t, NULL) == BLKmode
214 || !flag_tree_coalesce_vars))
f11a7b6d
AO
215 || !use_register_for_decl (t)
216 || GET_MODE (x) == promote_ssa_mode (t, NULL));
217
218 if (x)
1f9ceff1
AO
219 {
220 bool skip = false;
221 tree cur = NULL_TREE;
f11a7b6d
AO
222 rtx xm = x;
223
224 retry:
225 if (MEM_P (xm))
226 cur = MEM_EXPR (xm);
227 else if (REG_P (xm))
228 cur = REG_EXPR (xm);
229 else if (SUBREG_P (xm))
230 {
231 gcc_assert (subreg_lowpart_p (xm));
232 xm = SUBREG_REG (xm);
233 goto retry;
234 }
235 else if (GET_CODE (xm) == CONCAT)
236 {
237 xm = XEXP (xm, 0);
238 goto retry;
239 }
240 else if (GET_CODE (xm) == PARALLEL)
241 {
242 xm = XVECEXP (xm, 0, 0);
243 gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 xm = XEXP (xm, 0);
245 goto retry;
246 }
247 else if (xm == pc_rtx)
1f9ceff1
AO
248 skip = true;
249 else
250 gcc_unreachable ();
251
f11a7b6d 252 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
1f9ceff1
AO
253
254 if (cur != next)
255 {
256 if (MEM_P (x))
f11a7b6d
AO
257 set_mem_attributes (x,
258 next && TREE_CODE (next) == SSA_NAME
259 ? TREE_TYPE (next)
260 : next, true);
1f9ceff1
AO
261 else
262 set_reg_attrs_for_decl_rtl (next, x);
263 }
264 }
265
4e3825db
MM
266 if (TREE_CODE (t) == SSA_NAME)
267 {
1f9ceff1
AO
268 int part = var_to_partition (SA.map, t);
269 if (part != NO_PARTITION)
270 {
271 if (SA.partition_to_pseudo[part])
272 gcc_assert (SA.partition_to_pseudo[part] == x);
273 else if (x != pc_rtx)
274 SA.partition_to_pseudo[part] = x;
275 }
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
f11a7b6d
AO
278 DECL. For PARMs and RESULTs, do so only when setting the
279 default def. */
280 if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 && (VAR_P (SSA_NAME_VAR (t))
282 || SSA_NAME_IS_DEFAULT_DEF (t)))
eb7adebc
MM
283 {
284 tree var = SSA_NAME_VAR (t);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var))
287 SET_DECL_RTL (var, x);
47598145 288 /* If we have it set already to "multiple places" don't
eb7adebc
MM
289 change this. */
290 else if (DECL_RTL (var) == pc_rtx)
291 ;
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var) != x)
299 SET_DECL_RTL (var, pc_rtx);
300 }
4e3825db
MM
301 }
302 else
303 SET_DECL_RTL (t, x);
304}
1f6d3a08
RH
305
306/* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
308struct stack_var
309{
310 /* The Variable. */
311 tree decl;
312
1f6d3a08
RH
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
315 HOST_WIDE_INT size;
316
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
320
321 /* The partition representative. */
322 size_t representative;
323
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
2bdbbe94
MM
326
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
1f6d3a08
RH
329};
330
331#define EOC ((size_t)-1)
332
333/* We have an array of such objects while deciding allocation. */
334static struct stack_var *stack_vars;
335static size_t stack_vars_alloc;
336static size_t stack_vars_num;
39c8aaa4 337static hash_map<tree, size_t> *decl_to_stack_part;
1f6d3a08 338
3f9b14ff
SB
339/* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341static bitmap_obstack stack_var_bitmap_obstack;
342
fa10beec 343/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
344 is non-decreasing. */
345static size_t *stack_vars_sorted;
346
1f6d3a08
RH
347/* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350static int frame_phase;
351
7d69de61
RH
352/* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354static bool has_protected_decls;
355
356/* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358static bool has_short_buffer;
1f6d3a08 359
6f197850 360/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
361 we can't do with expected alignment of the stack boundary. */
362
363static unsigned int
6f197850 364align_local_variable (tree decl)
765c3e8f 365{
1f9ceff1
AO
366 unsigned int align;
367
368 if (TREE_CODE (decl) == SSA_NAME)
369 align = TYPE_ALIGN (TREE_TYPE (decl));
370 else
371 {
372 align = LOCAL_DECL_ALIGNMENT (decl);
fe37c7af 373 SET_DECL_ALIGN (decl, align);
1f9ceff1 374 }
1f6d3a08
RH
375 return align / BITS_PER_UNIT;
376}
377
435be747
MO
378/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
379 down otherwise. Return truncated BASE value. */
380
381static inline unsigned HOST_WIDE_INT
382align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
383{
384 return align_up ? (base + align - 1) & -align : base & -align;
385}
386
1f6d3a08
RH
387/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
388 Return the frame offset. */
389
390static HOST_WIDE_INT
3a42502d 391alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
392{
393 HOST_WIDE_INT offset, new_frame_offset;
394
1f6d3a08
RH
395 if (FRAME_GROWS_DOWNWARD)
396 {
435be747
MO
397 new_frame_offset
398 = align_base (frame_offset - frame_phase - size,
399 align, false) + frame_phase;
1f6d3a08
RH
400 offset = new_frame_offset;
401 }
402 else
403 {
435be747
MO
404 new_frame_offset
405 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
1f6d3a08
RH
406 offset = new_frame_offset;
407 new_frame_offset += size;
408 }
409 frame_offset = new_frame_offset;
410
9fb798d7
EB
411 if (frame_offset_overflow (frame_offset, cfun->decl))
412 frame_offset = offset = 0;
413
1f6d3a08
RH
414 return offset;
415}
416
417/* Accumulate DECL into STACK_VARS. */
418
419static void
420add_stack_var (tree decl)
421{
533f611a
RH
422 struct stack_var *v;
423
1f6d3a08
RH
424 if (stack_vars_num >= stack_vars_alloc)
425 {
426 if (stack_vars_alloc)
427 stack_vars_alloc = stack_vars_alloc * 3 / 2;
428 else
429 stack_vars_alloc = 32;
430 stack_vars
431 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
432 }
47598145 433 if (!decl_to_stack_part)
39c8aaa4 434 decl_to_stack_part = new hash_map<tree, size_t>;
47598145 435
533f611a 436 v = &stack_vars[stack_vars_num];
39c8aaa4 437 decl_to_stack_part->put (decl, stack_vars_num);
533f611a
RH
438
439 v->decl = decl;
1f9ceff1
AO
440 tree size = TREE_CODE (decl) == SSA_NAME
441 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
442 : DECL_SIZE_UNIT (decl);
443 v->size = tree_to_uhwi (size);
533f611a
RH
444 /* Ensure that all variables have size, so that &a != &b for any two
445 variables that are simultaneously live. */
446 if (v->size == 0)
447 v->size = 1;
1f9ceff1 448 v->alignb = align_local_variable (decl);
13868f40
EB
449 /* An alignment of zero can mightily confuse us later. */
450 gcc_assert (v->alignb != 0);
1f6d3a08
RH
451
452 /* All variables are initially in their own partition. */
533f611a
RH
453 v->representative = stack_vars_num;
454 v->next = EOC;
1f6d3a08 455
2bdbbe94 456 /* All variables initially conflict with no other. */
533f611a 457 v->conflicts = NULL;
2bdbbe94 458
1f6d3a08 459 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 460 set_rtl (decl, pc_rtx);
1f6d3a08
RH
461
462 stack_vars_num++;
463}
464
1f6d3a08
RH
465/* Make the decls associated with luid's X and Y conflict. */
466
467static void
468add_stack_var_conflict (size_t x, size_t y)
469{
2bdbbe94
MM
470 struct stack_var *a = &stack_vars[x];
471 struct stack_var *b = &stack_vars[y];
472 if (!a->conflicts)
3f9b14ff 473 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 474 if (!b->conflicts)
3f9b14ff 475 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
476 bitmap_set_bit (a->conflicts, y);
477 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
478}
479
480/* Check whether the decls associated with luid's X and Y conflict. */
481
482static bool
483stack_var_conflict_p (size_t x, size_t y)
484{
2bdbbe94
MM
485 struct stack_var *a = &stack_vars[x];
486 struct stack_var *b = &stack_vars[y];
47598145
MM
487 if (x == y)
488 return false;
489 /* Partitions containing an SSA name result from gimple registers
490 with things like unsupported modes. They are top-level and
491 hence conflict with everything else. */
492 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
493 return true;
494
2bdbbe94
MM
495 if (!a->conflicts || !b->conflicts)
496 return false;
497 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 498}
b8698a0f 499
47598145
MM
500/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
501 enter its partition number into bitmap DATA. */
502
503static bool
355fe088 504visit_op (gimple *, tree op, tree, void *data)
47598145
MM
505{
506 bitmap active = (bitmap)data;
507 op = get_base_address (op);
508 if (op
509 && DECL_P (op)
510 && DECL_RTL_IF_SET (op) == pc_rtx)
511 {
39c8aaa4 512 size_t *v = decl_to_stack_part->get (op);
47598145
MM
513 if (v)
514 bitmap_set_bit (active, *v);
515 }
516 return false;
517}
518
519/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
520 record conflicts between it and all currently active other partitions
521 from bitmap DATA. */
522
523static bool
355fe088 524visit_conflict (gimple *, tree op, tree, void *data)
47598145
MM
525{
526 bitmap active = (bitmap)data;
527 op = get_base_address (op);
528 if (op
529 && DECL_P (op)
530 && DECL_RTL_IF_SET (op) == pc_rtx)
531 {
39c8aaa4 532 size_t *v = decl_to_stack_part->get (op);
47598145
MM
533 if (v && bitmap_set_bit (active, *v))
534 {
535 size_t num = *v;
536 bitmap_iterator bi;
537 unsigned i;
538 gcc_assert (num < stack_vars_num);
539 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
540 add_stack_var_conflict (num, i);
541 }
542 }
543 return false;
544}
545
546/* Helper routine for add_scope_conflicts, calculating the active partitions
547 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
548 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
549 liveness. */
47598145
MM
550
551static void
81bfd197 552add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
553{
554 edge e;
555 edge_iterator ei;
556 gimple_stmt_iterator gsi;
9f1363cd 557 walk_stmt_load_store_addr_fn visit;
47598145
MM
558
559 bitmap_clear (work);
560 FOR_EACH_EDGE (e, ei, bb->preds)
561 bitmap_ior_into (work, (bitmap)e->src->aux);
562
ea85edfe 563 visit = visit_op;
47598145
MM
564
565 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
566 {
355fe088 567 gimple *stmt = gsi_stmt (gsi);
ea85edfe 568 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 569 }
ea85edfe 570 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145 571 {
355fe088 572 gimple *stmt = gsi_stmt (gsi);
47598145
MM
573
574 if (gimple_clobber_p (stmt))
575 {
576 tree lhs = gimple_assign_lhs (stmt);
577 size_t *v;
578 /* Nested function lowering might introduce LHSs
579 that are COMPONENT_REFs. */
8813a647 580 if (!VAR_P (lhs))
47598145
MM
581 continue;
582 if (DECL_RTL_IF_SET (lhs) == pc_rtx
39c8aaa4 583 && (v = decl_to_stack_part->get (lhs)))
47598145
MM
584 bitmap_clear_bit (work, *v);
585 }
586 else if (!is_gimple_debug (stmt))
ea85edfe 587 {
81bfd197 588 if (for_conflict
ea85edfe
JJ
589 && visit == visit_op)
590 {
591 /* If this is the first real instruction in this BB we need
88d599dc
MM
592 to add conflicts for everything live at this point now.
593 Unlike classical liveness for named objects we can't
ea85edfe
JJ
594 rely on seeing a def/use of the names we're interested in.
595 There might merely be indirect loads/stores. We'd not add any
81bfd197 596 conflicts for such partitions. */
ea85edfe
JJ
597 bitmap_iterator bi;
598 unsigned i;
81bfd197 599 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 600 {
9b44f5d9
MM
601 struct stack_var *a = &stack_vars[i];
602 if (!a->conflicts)
3f9b14ff 603 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 604 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
605 }
606 visit = visit_conflict;
607 }
608 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
609 }
47598145
MM
610 }
611}
612
613/* Generate stack partition conflicts between all partitions that are
614 simultaneously live. */
615
616static void
617add_scope_conflicts (void)
618{
619 basic_block bb;
620 bool changed;
621 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
622 int *rpo;
623 int n_bbs;
47598145 624
88d599dc 625 /* We approximate the live range of a stack variable by taking the first
47598145
MM
626 mention of its name as starting point(s), and by the end-of-scope
627 death clobber added by gimplify as ending point(s) of the range.
628 This overapproximates in the case we for instance moved an address-taken
629 operation upward, without also moving a dereference to it upwards.
630 But it's conservatively correct as a variable never can hold values
631 before its name is mentioned at least once.
632
88d599dc 633 We then do a mostly classical bitmap liveness algorithm. */
47598145 634
04a90bec 635 FOR_ALL_BB_FN (bb, cfun)
3f9b14ff 636 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 637
8b1c6fd7 638 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
9b44f5d9
MM
639 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
640
47598145
MM
641 changed = true;
642 while (changed)
643 {
9b44f5d9 644 int i;
47598145 645 changed = false;
9b44f5d9 646 for (i = 0; i < n_bbs; i++)
47598145 647 {
9b44f5d9 648 bitmap active;
06e28de2 649 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
9b44f5d9 650 active = (bitmap)bb->aux;
81bfd197 651 add_scope_conflicts_1 (bb, work, false);
47598145
MM
652 if (bitmap_ior_into (active, work))
653 changed = true;
654 }
655 }
656
11cd3bed 657 FOR_EACH_BB_FN (bb, cfun)
81bfd197 658 add_scope_conflicts_1 (bb, work, true);
47598145 659
9b44f5d9 660 free (rpo);
47598145 661 BITMAP_FREE (work);
04a90bec 662 FOR_ALL_BB_FN (bb, cfun)
47598145
MM
663 BITMAP_FREE (bb->aux);
664}
665
1f6d3a08 666/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 667 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
668
669static int
3a42502d 670stack_var_cmp (const void *a, const void *b)
1f6d3a08 671{
3a42502d
RH
672 size_t ia = *(const size_t *)a;
673 size_t ib = *(const size_t *)b;
674 unsigned int aligna = stack_vars[ia].alignb;
675 unsigned int alignb = stack_vars[ib].alignb;
676 HOST_WIDE_INT sizea = stack_vars[ia].size;
677 HOST_WIDE_INT sizeb = stack_vars[ib].size;
678 tree decla = stack_vars[ia].decl;
679 tree declb = stack_vars[ib].decl;
680 bool largea, largeb;
4e3825db 681 unsigned int uida, uidb;
1f6d3a08 682
3a42502d
RH
683 /* Primary compare on "large" alignment. Large comes first. */
684 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
685 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
686 if (largea != largeb)
687 return (int)largeb - (int)largea;
688
689 /* Secondary compare on size, decreasing */
3a42502d 690 if (sizea > sizeb)
6ddfda8a
ER
691 return -1;
692 if (sizea < sizeb)
1f6d3a08 693 return 1;
3a42502d
RH
694
695 /* Tertiary compare on true alignment, decreasing. */
696 if (aligna < alignb)
697 return -1;
698 if (aligna > alignb)
699 return 1;
700
701 /* Final compare on ID for sort stability, increasing.
702 Two SSA names are compared by their version, SSA names come before
703 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
704 if (TREE_CODE (decla) == SSA_NAME)
705 {
706 if (TREE_CODE (declb) == SSA_NAME)
707 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
708 else
709 return -1;
710 }
711 else if (TREE_CODE (declb) == SSA_NAME)
712 return 1;
713 else
714 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 715 if (uida < uidb)
79f802f5 716 return 1;
3a42502d
RH
717 if (uida > uidb)
718 return -1;
1f6d3a08
RH
719 return 0;
720}
721
0ef08bc5 722struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
39c8aaa4 723typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
55b34b5f
RG
724
725/* If the points-to solution *PI points to variables that are in a partition
726 together with other variables add all partition members to the pointed-to
727 variables bitmap. */
728
729static void
730add_partitioned_vars_to_ptset (struct pt_solution *pt,
39c8aaa4 731 part_hashmap *decls_to_partitions,
6e2830c3 732 hash_set<bitmap> *visited, bitmap temp)
55b34b5f
RG
733{
734 bitmap_iterator bi;
735 unsigned i;
736 bitmap *part;
737
738 if (pt->anything
739 || pt->vars == NULL
740 /* The pointed-to vars bitmap is shared, it is enough to
741 visit it once. */
6e2830c3 742 || visited->add (pt->vars))
55b34b5f
RG
743 return;
744
745 bitmap_clear (temp);
746
747 /* By using a temporary bitmap to store all members of the partitions
748 we have to add we make sure to visit each of the partitions only
749 once. */
750 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
751 if ((!temp
752 || !bitmap_bit_p (temp, i))
39c8aaa4 753 && (part = decls_to_partitions->get (i)))
55b34b5f
RG
754 bitmap_ior_into (temp, *part);
755 if (!bitmap_empty_p (temp))
756 bitmap_ior_into (pt->vars, temp);
757}
758
759/* Update points-to sets based on partition info, so we can use them on RTL.
760 The bitmaps representing stack partitions will be saved until expand,
761 where partitioned decls used as bases in memory expressions will be
762 rewritten. */
763
764static void
765update_alias_info_with_stack_vars (void)
766{
39c8aaa4 767 part_hashmap *decls_to_partitions = NULL;
55b34b5f
RG
768 size_t i, j;
769 tree var = NULL_TREE;
770
771 for (i = 0; i < stack_vars_num; i++)
772 {
773 bitmap part = NULL;
774 tree name;
775 struct ptr_info_def *pi;
776
777 /* Not interested in partitions with single variable. */
778 if (stack_vars[i].representative != i
779 || stack_vars[i].next == EOC)
780 continue;
781
782 if (!decls_to_partitions)
783 {
39c8aaa4
TS
784 decls_to_partitions = new part_hashmap;
785 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
55b34b5f
RG
786 }
787
788 /* Create an SSA_NAME that points to the partition for use
789 as base during alias-oracle queries on RTL for bases that
790 have been partitioned. */
791 if (var == NULL_TREE)
b731b390
JJ
792 var = create_tmp_var (ptr_type_node);
793 name = make_ssa_name (var);
55b34b5f
RG
794
795 /* Create bitmaps representing partitions. They will be used for
796 points-to sets later, so use GGC alloc. */
797 part = BITMAP_GGC_ALLOC ();
798 for (j = i; j != EOC; j = stack_vars[j].next)
799 {
800 tree decl = stack_vars[j].decl;
25a6a873 801 unsigned int uid = DECL_PT_UID (decl);
55b34b5f 802 bitmap_set_bit (part, uid);
39c8aaa4
TS
803 decls_to_partitions->put (uid, part);
804 cfun->gimple_df->decls_to_pointers->put (decl, name);
88d8330d
EB
805 if (TREE_ADDRESSABLE (decl))
806 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
807 }
808
809 /* Make the SSA name point to all partition members. */
810 pi = get_ptr_info (name);
d3553615 811 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
812 }
813
814 /* Make all points-to sets that contain one member of a partition
815 contain all members of the partition. */
816 if (decls_to_partitions)
817 {
818 unsigned i;
46aa019a 819 tree name;
6e2830c3 820 hash_set<bitmap> visited;
3f9b14ff 821 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f 822
46aa019a 823 FOR_EACH_SSA_NAME (i, name, cfun)
55b34b5f 824 {
55b34b5f
RG
825 struct ptr_info_def *pi;
826
46aa019a 827 if (POINTER_TYPE_P (TREE_TYPE (name))
55b34b5f
RG
828 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
829 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
6e2830c3 830 &visited, temp);
55b34b5f
RG
831 }
832
833 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
6e2830c3 834 decls_to_partitions, &visited, temp);
55b34b5f 835
39c8aaa4 836 delete decls_to_partitions;
55b34b5f
RG
837 BITMAP_FREE (temp);
838 }
839}
840
1f6d3a08
RH
841/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
842 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 843 Merge them into a single partition A. */
1f6d3a08
RH
844
845static void
6ddfda8a 846union_stack_vars (size_t a, size_t b)
1f6d3a08 847{
2bdbbe94
MM
848 struct stack_var *vb = &stack_vars[b];
849 bitmap_iterator bi;
850 unsigned u;
1f6d3a08 851
6ddfda8a
ER
852 gcc_assert (stack_vars[b].next == EOC);
853 /* Add B to A's partition. */
854 stack_vars[b].next = stack_vars[a].next;
855 stack_vars[b].representative = a;
1f6d3a08
RH
856 stack_vars[a].next = b;
857
858 /* Update the required alignment of partition A to account for B. */
859 if (stack_vars[a].alignb < stack_vars[b].alignb)
860 stack_vars[a].alignb = stack_vars[b].alignb;
861
862 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
863 if (vb->conflicts)
864 {
865 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
866 add_stack_var_conflict (a, stack_vars[u].representative);
867 BITMAP_FREE (vb->conflicts);
868 }
1f6d3a08
RH
869}
870
871/* A subroutine of expand_used_vars. Binpack the variables into
872 partitions constrained by the interference graph. The overall
873 algorithm used is as follows:
874
6ddfda8a 875 Sort the objects by size in descending order.
1f6d3a08
RH
876 For each object A {
877 S = size(A)
878 O = 0
879 loop {
880 Look for the largest non-conflicting object B with size <= S.
881 UNION (A, B)
1f6d3a08
RH
882 }
883 }
884*/
885
886static void
887partition_stack_vars (void)
888{
889 size_t si, sj, n = stack_vars_num;
890
891 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
892 for (si = 0; si < n; ++si)
893 stack_vars_sorted[si] = si;
894
895 if (n == 1)
896 return;
897
3a42502d 898 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 899
1f6d3a08
RH
900 for (si = 0; si < n; ++si)
901 {
902 size_t i = stack_vars_sorted[si];
3a42502d 903 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 904 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 905
6ddfda8a
ER
906 /* Ignore objects that aren't partition representatives. If we
907 see a var that is not a partition representative, it must
908 have been merged earlier. */
909 if (stack_vars[i].representative != i)
910 continue;
911
912 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
913 {
914 size_t j = stack_vars_sorted[sj];
1f6d3a08 915 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 916 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
917
918 /* Ignore objects that aren't partition representatives. */
919 if (stack_vars[j].representative != j)
920 continue;
921
3a42502d
RH
922 /* Do not mix objects of "small" (supported) alignment
923 and "large" (unsupported) alignment. */
924 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
925 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
926 break;
927
928 /* For Address Sanitizer do not mix objects with different
929 sizes, as the shorter vars wouldn't be adequately protected.
930 Don't do that for "large" (unsupported) alignment objects,
931 those aren't protected anyway. */
6dc4a604
ML
932 if ((asan_sanitize_stack_p ())
933 && isize != jsize
f3ddd692
JJ
934 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
935 break;
936
937 /* Ignore conflicting objects. */
938 if (stack_var_conflict_p (i, j))
3a42502d
RH
939 continue;
940
1f6d3a08 941 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 942 union_stack_vars (i, j);
1f6d3a08
RH
943 }
944 }
55b34b5f 945
9b999dc5 946 update_alias_info_with_stack_vars ();
1f6d3a08
RH
947}
948
949/* A debugging aid for expand_used_vars. Dump the generated partitions. */
950
951static void
952dump_stack_var_partition (void)
953{
954 size_t si, i, j, n = stack_vars_num;
955
956 for (si = 0; si < n; ++si)
957 {
958 i = stack_vars_sorted[si];
959
960 /* Skip variables that aren't partition representatives, for now. */
961 if (stack_vars[i].representative != i)
962 continue;
963
964 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
965 " align %u\n", (unsigned long) i, stack_vars[i].size,
966 stack_vars[i].alignb);
967
968 for (j = i; j != EOC; j = stack_vars[j].next)
969 {
970 fputc ('\t', dump_file);
971 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 972 }
6ddfda8a 973 fputc ('\n', dump_file);
1f6d3a08
RH
974 }
975}
976
3a42502d 977/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
978
979static void
3a42502d
RH
980expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
981 HOST_WIDE_INT offset)
1f6d3a08 982{
3a42502d 983 unsigned align;
1f6d3a08 984 rtx x;
c22cacf3 985
1f6d3a08
RH
986 /* If this fails, we've overflowed the stack frame. Error nicely? */
987 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
988
0a81f074 989 x = plus_constant (Pmode, base, offset);
1f9ceff1
AO
990 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
991 ? TYPE_MODE (TREE_TYPE (decl))
992 : DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 993
4e3825db
MM
994 if (TREE_CODE (decl) != SSA_NAME)
995 {
996 /* Set alignment we actually gave this decl if it isn't an SSA name.
997 If it is we generate stack slots only accidentally so it isn't as
998 important, we'll simply use the alignment that is already set. */
3a42502d
RH
999 if (base == virtual_stack_vars_rtx)
1000 offset -= frame_phase;
146ec50f 1001 align = least_bit_hwi (offset);
4e3825db 1002 align *= BITS_PER_UNIT;
3a42502d
RH
1003 if (align == 0 || align > base_align)
1004 align = base_align;
1005
1006 /* One would think that we could assert that we're not decreasing
1007 alignment here, but (at least) the i386 port does exactly this
1008 via the MINIMUM_ALIGNMENT hook. */
4e3825db 1009
fe37c7af 1010 SET_DECL_ALIGN (decl, align);
4e3825db
MM
1011 DECL_USER_ALIGN (decl) = 0;
1012 }
1013
4e3825db 1014 set_rtl (decl, x);
1f6d3a08
RH
1015}
1016
f3ddd692
JJ
1017struct stack_vars_data
1018{
1019 /* Vector of offset pairs, always end of some padding followed
1020 by start of the padding that needs Address Sanitizer protection.
1021 The vector is in reversed, highest offset pairs come first. */
06dc18b3 1022 auto_vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
1023
1024 /* Vector of partition representative decls in between the paddings. */
06dc18b3 1025 auto_vec<tree> asan_decl_vec;
e361382f
JJ
1026
1027 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1028 rtx asan_base;
1029
1030 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1031 unsigned int asan_alignb;
f3ddd692
JJ
1032};
1033
1f6d3a08
RH
1034/* A subroutine of expand_used_vars. Give each partition representative
1035 a unique location within the stack frame. Update each partition member
1036 with that location. */
1037
1038static void
f3ddd692 1039expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
1040{
1041 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
1042 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1043 rtx large_base = NULL;
1044 unsigned large_align = 0;
7072df0a 1045 bool large_allocation_done = false;
3a42502d
RH
1046 tree decl;
1047
1048 /* Determine if there are any variables requiring "large" alignment.
1049 Since these are dynamically allocated, we only process these if
1050 no predicate involved. */
1051 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1052 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1053 {
1054 /* Find the total size of these variables. */
1055 for (si = 0; si < n; ++si)
1056 {
1057 unsigned alignb;
1058
1059 i = stack_vars_sorted[si];
1060 alignb = stack_vars[i].alignb;
1061
a8eeec27
SE
1062 /* All "large" alignment decls come before all "small" alignment
1063 decls, but "large" alignment decls are not sorted based on
1064 their alignment. Increase large_align to track the largest
1065 required alignment. */
1066 if ((alignb * BITS_PER_UNIT) > large_align)
1067 large_align = alignb * BITS_PER_UNIT;
1068
3a42502d
RH
1069 /* Stop when we get to the first decl with "small" alignment. */
1070 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1071 break;
1072
1073 /* Skip variables that aren't partition representatives. */
1074 if (stack_vars[i].representative != i)
1075 continue;
1076
1077 /* Skip variables that have already had rtl assigned. See also
1078 add_stack_var where we perpetrate this pc_rtx hack. */
1079 decl = stack_vars[i].decl;
1f9ceff1
AO
1080 if (TREE_CODE (decl) == SSA_NAME
1081 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1082 : DECL_RTL (decl) != pc_rtx)
3a42502d
RH
1083 continue;
1084
1085 large_size += alignb - 1;
1086 large_size &= -(HOST_WIDE_INT)alignb;
1087 large_size += stack_vars[i].size;
1088 }
3a42502d 1089 }
1f6d3a08
RH
1090
1091 for (si = 0; si < n; ++si)
1092 {
3a42502d
RH
1093 rtx base;
1094 unsigned base_align, alignb;
1f6d3a08
RH
1095 HOST_WIDE_INT offset;
1096
1097 i = stack_vars_sorted[si];
1098
1099 /* Skip variables that aren't partition representatives, for now. */
1100 if (stack_vars[i].representative != i)
1101 continue;
1102
7d69de61
RH
1103 /* Skip variables that have already had rtl assigned. See also
1104 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d 1105 decl = stack_vars[i].decl;
1f9ceff1
AO
1106 if (TREE_CODE (decl) == SSA_NAME
1107 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1108 : DECL_RTL (decl) != pc_rtx)
7d69de61
RH
1109 continue;
1110
c22cacf3 1111 /* Check the predicate to see whether this variable should be
7d69de61 1112 allocated in this pass. */
f3ddd692 1113 if (pred && !pred (i))
7d69de61
RH
1114 continue;
1115
3a42502d
RH
1116 alignb = stack_vars[i].alignb;
1117 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1118 {
e361382f 1119 base = virtual_stack_vars_rtx;
6dc4a604
ML
1120 if ((asan_sanitize_stack_p ())
1121 && pred)
f3ddd692 1122 {
435be747
MO
1123 HOST_WIDE_INT prev_offset
1124 = align_base (frame_offset,
1125 MAX (alignb, ASAN_RED_ZONE_SIZE),
d6c1a7a7 1126 !FRAME_GROWS_DOWNWARD);
f3ddd692 1127 tree repr_decl = NULL_TREE;
f3ddd692
JJ
1128 offset
1129 = alloc_stack_frame_space (stack_vars[i].size
1130 + ASAN_RED_ZONE_SIZE,
1131 MAX (alignb, ASAN_RED_ZONE_SIZE));
435be747 1132
9771b263
DN
1133 data->asan_vec.safe_push (prev_offset);
1134 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
1135 /* Find best representative of the partition.
1136 Prefer those with DECL_NAME, even better
1137 satisfying asan_protect_stack_decl predicate. */
1138 for (j = i; j != EOC; j = stack_vars[j].next)
1139 if (asan_protect_stack_decl (stack_vars[j].decl)
1140 && DECL_NAME (stack_vars[j].decl))
1141 {
1142 repr_decl = stack_vars[j].decl;
1143 break;
1144 }
1145 else if (repr_decl == NULL_TREE
1146 && DECL_P (stack_vars[j].decl)
1147 && DECL_NAME (stack_vars[j].decl))
1148 repr_decl = stack_vars[j].decl;
1149 if (repr_decl == NULL_TREE)
1150 repr_decl = stack_vars[i].decl;
9771b263 1151 data->asan_decl_vec.safe_push (repr_decl);
e361382f
JJ
1152 data->asan_alignb = MAX (data->asan_alignb, alignb);
1153 if (data->asan_base == NULL)
1154 data->asan_base = gen_reg_rtx (Pmode);
1155 base = data->asan_base;
e5dcd695
LZ
1156
1157 if (!STRICT_ALIGNMENT)
1158 base_align = crtl->max_used_stack_slot_alignment;
1159 else
1160 base_align = MAX (crtl->max_used_stack_slot_alignment,
1161 GET_MODE_ALIGNMENT (SImode)
1162 << ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1163 }
1164 else
e5dcd695
LZ
1165 {
1166 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1167 base_align = crtl->max_used_stack_slot_alignment;
1168 }
3a42502d
RH
1169 }
1170 else
1171 {
1172 /* Large alignment is only processed in the last pass. */
1173 if (pred)
1174 continue;
7072df0a
DV
1175
1176 /* If there were any variables requiring "large" alignment, allocate
1177 space. */
1178 if (large_size > 0 && ! large_allocation_done)
1179 {
1180 HOST_WIDE_INT loffset;
1181 rtx large_allocsize;
1182
1183 large_allocsize = GEN_INT (large_size);
1184 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1185 loffset = alloc_stack_frame_space
1186 (INTVAL (large_allocsize),
1187 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1188 large_base = get_dynamic_stack_base (loffset, large_align);
1189 large_allocation_done = true;
1190 }
533f611a 1191 gcc_assert (large_base != NULL);
3a42502d
RH
1192
1193 large_alloc += alignb - 1;
1194 large_alloc &= -(HOST_WIDE_INT)alignb;
1195 offset = large_alloc;
1196 large_alloc += stack_vars[i].size;
1197
1198 base = large_base;
1199 base_align = large_align;
1200 }
1f6d3a08
RH
1201
1202 /* Create rtl for each variable based on their location within the
1203 partition. */
1204 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1205 {
f8da8190 1206 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1207 base, base_align,
6ddfda8a 1208 offset);
f8da8190 1209 }
1f6d3a08 1210 }
3a42502d
RH
1211
1212 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1213}
1214
ff28a94d
JH
1215/* Take into account all sizes of partitions and reset DECL_RTLs. */
1216static HOST_WIDE_INT
1217account_stack_vars (void)
1218{
1219 size_t si, j, i, n = stack_vars_num;
1220 HOST_WIDE_INT size = 0;
1221
1222 for (si = 0; si < n; ++si)
1223 {
1224 i = stack_vars_sorted[si];
1225
1226 /* Skip variables that aren't partition representatives, for now. */
1227 if (stack_vars[i].representative != i)
1228 continue;
1229
1230 size += stack_vars[i].size;
1231 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1232 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1233 }
1234 return size;
1235}
1236
f11a7b6d
AO
1237/* Record the RTL assignment X for the default def of PARM. */
1238
1239extern void
1240set_parm_rtl (tree parm, rtx x)
1241{
1242 gcc_assert (TREE_CODE (parm) == PARM_DECL
1243 || TREE_CODE (parm) == RESULT_DECL);
1244
1245 if (x && !MEM_P (x))
1246 {
1247 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1248 TYPE_MODE (TREE_TYPE (parm)),
1249 TYPE_ALIGN (TREE_TYPE (parm)));
1250
1251 /* If the variable alignment is very large we'll dynamicaly
1252 allocate it, which means that in-frame portion is just a
1253 pointer. ??? We've got a pseudo for sure here, do we
1254 actually dynamically allocate its spilling area if needed?
1255 ??? Isn't it a problem when POINTER_SIZE also exceeds
1256 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1257 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1258 align = POINTER_SIZE;
1259
1260 record_alignment_for_reg_var (align);
1261 }
1262
f11a7b6d
AO
1263 tree ssa = ssa_default_def (cfun, parm);
1264 if (!ssa)
1265 return set_rtl (parm, x);
1266
1267 int part = var_to_partition (SA.map, ssa);
1268 gcc_assert (part != NO_PARTITION);
1269
1270 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1271 gcc_assert (changed);
1272
1273 set_rtl (ssa, x);
1274 gcc_assert (DECL_RTL (parm) == x);
1275}
1276
1f6d3a08
RH
1277/* A subroutine of expand_one_var. Called to immediately assign rtl
1278 to a variable to be allocated in the stack frame. */
1279
1280static void
1f9ceff1 1281expand_one_stack_var_1 (tree var)
1f6d3a08 1282{
3a42502d
RH
1283 HOST_WIDE_INT size, offset;
1284 unsigned byte_align;
1f6d3a08 1285
1f9ceff1
AO
1286 if (TREE_CODE (var) == SSA_NAME)
1287 {
1288 tree type = TREE_TYPE (var);
1289 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1290 byte_align = TYPE_ALIGN_UNIT (type);
1291 }
1292 else
1293 {
1294 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1295 byte_align = align_local_variable (var);
1296 }
3a42502d
RH
1297
1298 /* We handle highly aligned variables in expand_stack_vars. */
1299 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1300
3a42502d
RH
1301 offset = alloc_stack_frame_space (size, byte_align);
1302
1303 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1304 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1305}
1306
1f9ceff1
AO
1307/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1308 already assigned some MEM. */
1309
1310static void
1311expand_one_stack_var (tree var)
1312{
1313 if (TREE_CODE (var) == SSA_NAME)
1314 {
1315 int part = var_to_partition (SA.map, var);
1316 if (part != NO_PARTITION)
1317 {
1318 rtx x = SA.partition_to_pseudo[part];
1319 gcc_assert (x);
1320 gcc_assert (MEM_P (x));
1321 return;
1322 }
1323 }
1324
1325 return expand_one_stack_var_1 (var);
1326}
1327
1f6d3a08
RH
1328/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1329 that will reside in a hard register. */
1330
1331static void
1332expand_one_hard_reg_var (tree var)
1333{
1334 rest_of_decl_compilation (var, 0, 0);
1335}
1336
1f9ceff1
AO
1337/* Record the alignment requirements of some variable assigned to a
1338 pseudo. */
1339
1340static void
1341record_alignment_for_reg_var (unsigned int align)
1342{
1343 if (SUPPORTS_STACK_ALIGNMENT
1344 && crtl->stack_alignment_estimated < align)
1345 {
1346 /* stack_alignment_estimated shouldn't change after stack
1347 realign decision made */
1348 gcc_assert (!crtl->stack_realign_processed);
1349 crtl->stack_alignment_estimated = align;
1350 }
1351
1352 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1353 So here we only make sure stack_alignment_needed >= align. */
1354 if (crtl->stack_alignment_needed < align)
1355 crtl->stack_alignment_needed = align;
1356 if (crtl->max_used_stack_slot_alignment < align)
1357 crtl->max_used_stack_slot_alignment = align;
1358}
1359
1360/* Create RTL for an SSA partition. */
1361
1362static void
1363expand_one_ssa_partition (tree var)
1364{
1365 int part = var_to_partition (SA.map, var);
1366 gcc_assert (part != NO_PARTITION);
1367
1368 if (SA.partition_to_pseudo[part])
1369 return;
1370
1371 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1372 TYPE_MODE (TREE_TYPE (var)),
1373 TYPE_ALIGN (TREE_TYPE (var)));
1374
1375 /* If the variable alignment is very large we'll dynamicaly allocate
1376 it, which means that in-frame portion is just a pointer. */
1377 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1378 align = POINTER_SIZE;
1379
1380 record_alignment_for_reg_var (align);
1381
1382 if (!use_register_for_decl (var))
1383 {
f11a7b6d 1384 if (defer_stack_allocation (var, true))
1f9ceff1
AO
1385 add_stack_var (var);
1386 else
1387 expand_one_stack_var_1 (var);
1388 return;
1389 }
1390
1391 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1392
1393 rtx x = gen_reg_rtx (reg_mode);
1394
1395 set_rtl (var, x);
1396}
1397
f11a7b6d
AO
1398/* Record the association between the RTL generated for partition PART
1399 and the underlying variable of the SSA_NAME VAR. */
1f9ceff1
AO
1400
1401static void
1402adjust_one_expanded_partition_var (tree var)
1403{
1404 if (!var)
1405 return;
1406
1407 tree decl = SSA_NAME_VAR (var);
1408
1409 int part = var_to_partition (SA.map, var);
1410 if (part == NO_PARTITION)
1411 return;
1412
1413 rtx x = SA.partition_to_pseudo[part];
1414
f11a7b6d 1415 gcc_assert (x);
1f9ceff1
AO
1416
1417 set_rtl (var, x);
1418
1419 if (!REG_P (x))
1420 return;
1421
1422 /* Note if the object is a user variable. */
1423 if (decl && !DECL_ARTIFICIAL (decl))
1424 mark_user_reg (x);
1425
1426 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1427 mark_reg_pointer (x, get_pointer_alignment (var));
1428}
1429
1f6d3a08
RH
1430/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1431 that will reside in a pseudo register. */
1432
1433static void
1434expand_one_register_var (tree var)
1435{
1f9ceff1
AO
1436 if (TREE_CODE (var) == SSA_NAME)
1437 {
1438 int part = var_to_partition (SA.map, var);
1439 if (part != NO_PARTITION)
1440 {
1441 rtx x = SA.partition_to_pseudo[part];
1442 gcc_assert (x);
1443 gcc_assert (REG_P (x));
1444 return;
1445 }
1446 gcc_unreachable ();
1447 }
1448
1449 tree decl = var;
4e3825db 1450 tree type = TREE_TYPE (decl);
ef4bddc2 1451 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1452 rtx x = gen_reg_rtx (reg_mode);
1453
4e3825db 1454 set_rtl (var, x);
1f6d3a08
RH
1455
1456 /* Note if the object is a user variable. */
4e3825db
MM
1457 if (!DECL_ARTIFICIAL (decl))
1458 mark_user_reg (x);
1f6d3a08 1459
61021c2c 1460 if (POINTER_TYPE_P (type))
d466b407 1461 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1462}
1463
1464/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1465 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1466 to pick something that won't crash the rest of the compiler. */
1467
1468static void
1469expand_one_error_var (tree var)
1470{
ef4bddc2 1471 machine_mode mode = DECL_MODE (var);
1f6d3a08
RH
1472 rtx x;
1473
1474 if (mode == BLKmode)
1475 x = gen_rtx_MEM (BLKmode, const0_rtx);
1476 else if (mode == VOIDmode)
1477 x = const0_rtx;
1478 else
1479 x = gen_reg_rtx (mode);
1480
1481 SET_DECL_RTL (var, x);
1482}
1483
c22cacf3 1484/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1485 allocated to the local stack frame. Return true if we wish to
1486 add VAR to STACK_VARS so that it will be coalesced with other
1487 variables. Return false to allocate VAR immediately.
1488
1489 This function is used to reduce the number of variables considered
1490 for coalescing, which reduces the size of the quadratic problem. */
1491
1492static bool
1493defer_stack_allocation (tree var, bool toplevel)
1494{
1f9ceff1
AO
1495 tree size_unit = TREE_CODE (var) == SSA_NAME
1496 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1497 : DECL_SIZE_UNIT (var);
1498
ee2e8462
EB
1499 /* Whether the variable is small enough for immediate allocation not to be
1500 a problem with regard to the frame size. */
1501 bool smallish
1f9ceff1 1502 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
ee2e8462
EB
1503 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1504
7d69de61 1505 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1506 so that we can re-order the strings to the top of the frame.
1507 Similarly for Address Sanitizer. */
c461d263 1508 if (flag_stack_protect || asan_sanitize_stack_p ())
7d69de61
RH
1509 return true;
1510
1f9ceff1
AO
1511 unsigned int align = TREE_CODE (var) == SSA_NAME
1512 ? TYPE_ALIGN (TREE_TYPE (var))
1513 : DECL_ALIGN (var);
1514
3a42502d
RH
1515 /* We handle "large" alignment via dynamic allocation. We want to handle
1516 this extra complication in only one place, so defer them. */
1f9ceff1 1517 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
3a42502d
RH
1518 return true;
1519
1f9ceff1
AO
1520 bool ignored = TREE_CODE (var) == SSA_NAME
1521 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1522 : DECL_IGNORED_P (var);
1523
ee2e8462
EB
1524 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1525 might be detached from their block and appear at toplevel when we reach
1526 here. We want to coalesce them with variables from other blocks when
1527 the immediate contribution to the frame size would be noticeable. */
1f9ceff1 1528 if (toplevel && optimize > 0 && ignored && !smallish)
ee2e8462
EB
1529 return true;
1530
1531 /* Variables declared in the outermost scope automatically conflict
1532 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1533 at all is that, after sorting, we can more efficiently pack
1534 small variables in the stack frame. Continue to defer at -O2. */
1535 if (toplevel && optimize < 2)
1536 return false;
1537
1538 /* Without optimization, *most* variables are allocated from the
1539 stack, which makes the quadratic problem large exactly when we
c22cacf3 1540 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1541 other hand, we don't want the function's stack frame size to
1542 get completely out of hand. So we avoid adding scalars and
1543 "small" aggregates to the list at all. */
ee2e8462 1544 if (optimize == 0 && smallish)
1f6d3a08
RH
1545 return false;
1546
1547 return true;
1548}
1549
1550/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1551 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1552 expanded yet, merely recorded.
ff28a94d
JH
1553 When REALLY_EXPAND is false, only add stack values to be allocated.
1554 Return stack usage this variable is supposed to take.
1555*/
1f6d3a08 1556
ff28a94d
JH
1557static HOST_WIDE_INT
1558expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1559{
3a42502d 1560 unsigned int align = BITS_PER_UNIT;
4e3825db 1561 tree origvar = var;
3a42502d 1562
4e3825db
MM
1563 var = SSAVAR (var);
1564
8813a647 1565 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
2e3f842f 1566 {
9d7d6446
JB
1567 if (is_global_var (var))
1568 return 0;
1569
2e3f842f
L
1570 /* Because we don't know if VAR will be in register or on stack,
1571 we conservatively assume it will be on stack even if VAR is
1572 eventually put into register after RA pass. For non-automatic
1573 variables, which won't be on stack, we collect alignment of
3396aba5
JJ
1574 type and ignore user specified alignment. Similarly for
1575 SSA_NAMEs for which use_register_for_decl returns true. */
1576 if (TREE_STATIC (var)
1577 || DECL_EXTERNAL (var)
1578 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
ae58e548
JJ
1579 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1580 TYPE_MODE (TREE_TYPE (var)),
1581 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1582 else if (DECL_HAS_VALUE_EXPR_P (var)
1583 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1584 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1585 or variables which were assigned a stack slot already by
1586 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1587 changed from the offset chosen to it. */
1588 align = crtl->stack_alignment_estimated;
2e3f842f 1589 else
ae58e548 1590 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1591
3a42502d
RH
1592 /* If the variable alignment is very large we'll dynamicaly allocate
1593 it, which means that in-frame portion is just a pointer. */
1594 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1595 align = POINTER_SIZE;
1596 }
1597
1f9ceff1 1598 record_alignment_for_reg_var (align);
3a42502d 1599
4e3825db
MM
1600 if (TREE_CODE (origvar) == SSA_NAME)
1601 {
8813a647 1602 gcc_assert (!VAR_P (var)
4e3825db
MM
1603 || (!DECL_EXTERNAL (var)
1604 && !DECL_HAS_VALUE_EXPR_P (var)
1605 && !TREE_STATIC (var)
4e3825db
MM
1606 && TREE_TYPE (var) != error_mark_node
1607 && !DECL_HARD_REGISTER (var)
1608 && really_expand));
1609 }
8813a647 1610 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
4846b435 1611 ;
1f6d3a08
RH
1612 else if (DECL_EXTERNAL (var))
1613 ;
833b3afe 1614 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1615 ;
1616 else if (TREE_STATIC (var))
7e8b322a 1617 ;
eb7adebc 1618 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1619 ;
1620 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1621 {
1622 if (really_expand)
1623 expand_one_error_var (var);
1624 }
8813a647 1625 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
ff28a94d
JH
1626 {
1627 if (really_expand)
c218f6e8
JM
1628 {
1629 expand_one_hard_reg_var (var);
1630 if (!DECL_HARD_REGISTER (var))
1631 /* Invalid register specification. */
1632 expand_one_error_var (var);
1633 }
ff28a94d 1634 }
1f6d3a08 1635 else if (use_register_for_decl (var))
ff28a94d
JH
1636 {
1637 if (really_expand)
4e3825db 1638 expand_one_register_var (origvar);
ff28a94d 1639 }
56099f00 1640 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1641 {
56099f00 1642 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1643 if (really_expand)
1644 {
1645 error ("size of variable %q+D is too large", var);
1646 expand_one_error_var (var);
1647 }
1648 }
1f6d3a08 1649 else if (defer_stack_allocation (var, toplevel))
4e3825db 1650 add_stack_var (origvar);
1f6d3a08 1651 else
ff28a94d 1652 {
bd9f1b4b 1653 if (really_expand)
de0fb905
AB
1654 {
1655 if (lookup_attribute ("naked",
1656 DECL_ATTRIBUTES (current_function_decl)))
1657 error ("cannot allocate stack for variable %q+D, naked function.",
1658 var);
1659
1660 expand_one_stack_var (origvar);
1661 }
1662
1663
ae7e9ddd 1664 return tree_to_uhwi (DECL_SIZE_UNIT (var));
ff28a94d
JH
1665 }
1666 return 0;
1f6d3a08
RH
1667}
1668
1669/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1670 expanding variables. Those variables that can be put into registers
1671 are allocated pseudos; those that can't are put on the stack.
1672
1673 TOPLEVEL is true if this is the outermost BLOCK. */
1674
1675static void
1676expand_used_vars_for_block (tree block, bool toplevel)
1677{
1f6d3a08
RH
1678 tree t;
1679
1f6d3a08 1680 /* Expand all variables at this level. */
910ad8de 1681 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185 1682 if (TREE_USED (t)
8813a647 1683 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1684 || !DECL_NONSHAREABLE (t)))
ff28a94d 1685 expand_one_var (t, toplevel, true);
1f6d3a08 1686
1f6d3a08
RH
1687 /* Expand all variables at containing levels. */
1688 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1689 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1690}
1691
1692/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1693 and clear TREE_USED on all local variables. */
1694
1695static void
1696clear_tree_used (tree block)
1697{
1698 tree t;
1699
910ad8de 1700 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1701 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
8813a647 1702 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1703 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1704 TREE_USED (t) = 0;
1705
1706 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1707 clear_tree_used (t);
1708}
1709
f6bc1c4a
HS
1710enum {
1711 SPCT_FLAG_DEFAULT = 1,
1712 SPCT_FLAG_ALL = 2,
5434dc07
MD
1713 SPCT_FLAG_STRONG = 3,
1714 SPCT_FLAG_EXPLICIT = 4
f6bc1c4a
HS
1715};
1716
7d69de61
RH
1717/* Examine TYPE and determine a bit mask of the following features. */
1718
1719#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1720#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1721#define SPCT_HAS_ARRAY 4
1722#define SPCT_HAS_AGGREGATE 8
1723
1724static unsigned int
1725stack_protect_classify_type (tree type)
1726{
1727 unsigned int ret = 0;
1728 tree t;
1729
1730 switch (TREE_CODE (type))
1731 {
1732 case ARRAY_TYPE:
1733 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1734 if (t == char_type_node
1735 || t == signed_char_type_node
1736 || t == unsigned_char_type_node)
1737 {
15362b89
JJ
1738 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1739 unsigned HOST_WIDE_INT len;
7d69de61 1740
15362b89 1741 if (!TYPE_SIZE_UNIT (type)
cc269bb6 1742 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
15362b89 1743 len = max;
7d69de61 1744 else
ae7e9ddd 1745 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7d69de61
RH
1746
1747 if (len < max)
1748 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1749 else
1750 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1751 }
1752 else
1753 ret = SPCT_HAS_ARRAY;
1754 break;
1755
1756 case UNION_TYPE:
1757 case QUAL_UNION_TYPE:
1758 case RECORD_TYPE:
1759 ret = SPCT_HAS_AGGREGATE;
1760 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1761 if (TREE_CODE (t) == FIELD_DECL)
1762 ret |= stack_protect_classify_type (TREE_TYPE (t));
1763 break;
1764
1765 default:
1766 break;
1767 }
1768
1769 return ret;
1770}
1771
a4d05547
KH
1772/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1773 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1774 any variable in this function. The return value is the phase number in
1775 which the variable should be allocated. */
1776
1777static int
1778stack_protect_decl_phase (tree decl)
1779{
1780 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1781 int ret = 0;
1782
1783 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1784 has_short_buffer = true;
1785
f6bc1c4a 1786 if (flag_stack_protect == SPCT_FLAG_ALL
5434dc07
MD
1787 || flag_stack_protect == SPCT_FLAG_STRONG
1788 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1789 && lookup_attribute ("stack_protect",
1790 DECL_ATTRIBUTES (current_function_decl))))
7d69de61
RH
1791 {
1792 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1793 && !(bits & SPCT_HAS_AGGREGATE))
1794 ret = 1;
1795 else if (bits & SPCT_HAS_ARRAY)
1796 ret = 2;
1797 }
1798 else
1799 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1800
1801 if (ret)
1802 has_protected_decls = true;
1803
1804 return ret;
1805}
1806
1807/* Two helper routines that check for phase 1 and phase 2. These are used
1808 as callbacks for expand_stack_vars. */
1809
1810static bool
f3ddd692
JJ
1811stack_protect_decl_phase_1 (size_t i)
1812{
1813 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1814}
1815
1816static bool
1817stack_protect_decl_phase_2 (size_t i)
7d69de61 1818{
f3ddd692 1819 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1820}
1821
f3ddd692
JJ
1822/* And helper function that checks for asan phase (with stack protector
1823 it is phase 3). This is used as callback for expand_stack_vars.
1824 Returns true if any of the vars in the partition need to be protected. */
1825
7d69de61 1826static bool
f3ddd692 1827asan_decl_phase_3 (size_t i)
7d69de61 1828{
f3ddd692
JJ
1829 while (i != EOC)
1830 {
1831 if (asan_protect_stack_decl (stack_vars[i].decl))
1832 return true;
1833 i = stack_vars[i].next;
1834 }
1835 return false;
7d69de61
RH
1836}
1837
1838/* Ensure that variables in different stack protection phases conflict
1839 so that they are not merged and share the same stack slot. */
1840
1841static void
1842add_stack_protection_conflicts (void)
1843{
1844 size_t i, j, n = stack_vars_num;
1845 unsigned char *phase;
1846
1847 phase = XNEWVEC (unsigned char, n);
1848 for (i = 0; i < n; ++i)
1849 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1850
1851 for (i = 0; i < n; ++i)
1852 {
1853 unsigned char ph_i = phase[i];
9b44f5d9 1854 for (j = i + 1; j < n; ++j)
7d69de61
RH
1855 if (ph_i != phase[j])
1856 add_stack_var_conflict (i, j);
1857 }
1858
1859 XDELETEVEC (phase);
1860}
1861
1862/* Create a decl for the guard at the top of the stack frame. */
1863
1864static void
1865create_stack_guard (void)
1866{
c2255bc4
AH
1867 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1868 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1869 TREE_THIS_VOLATILE (guard) = 1;
1870 TREE_USED (guard) = 1;
1871 expand_one_stack_var (guard);
cb91fab0 1872 crtl->stack_protect_guard = guard;
7d69de61
RH
1873}
1874
ff28a94d 1875/* Prepare for expanding variables. */
b8698a0f 1876static void
ff28a94d
JH
1877init_vars_expansion (void)
1878{
3f9b14ff
SB
1879 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1880 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1881
3f9b14ff 1882 /* A map from decl to stack partition. */
39c8aaa4 1883 decl_to_stack_part = new hash_map<tree, size_t>;
ff28a94d
JH
1884
1885 /* Initialize local stack smashing state. */
1886 has_protected_decls = false;
1887 has_short_buffer = false;
1888}
1889
1890/* Free up stack variable graph data. */
1891static void
1892fini_vars_expansion (void)
1893{
3f9b14ff
SB
1894 bitmap_obstack_release (&stack_var_bitmap_obstack);
1895 if (stack_vars)
1896 XDELETEVEC (stack_vars);
1897 if (stack_vars_sorted)
1898 XDELETEVEC (stack_vars_sorted);
ff28a94d 1899 stack_vars = NULL;
9b44f5d9 1900 stack_vars_sorted = NULL;
ff28a94d 1901 stack_vars_alloc = stack_vars_num = 0;
39c8aaa4 1902 delete decl_to_stack_part;
47598145 1903 decl_to_stack_part = NULL;
ff28a94d
JH
1904}
1905
30925d94
AO
1906/* Make a fair guess for the size of the stack frame of the function
1907 in NODE. This doesn't have to be exact, the result is only used in
1908 the inline heuristics. So we don't want to run the full stack var
1909 packing algorithm (which is quadratic in the number of stack vars).
1910 Instead, we calculate the total size of all stack vars. This turns
1911 out to be a pretty fair estimate -- packing of stack vars doesn't
1912 happen very often. */
b5a430f3 1913
ff28a94d 1914HOST_WIDE_INT
30925d94 1915estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1916{
1917 HOST_WIDE_INT size = 0;
b5a430f3 1918 size_t i;
bb7e6d55 1919 tree var;
67348ccc 1920 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1921
bb7e6d55 1922 push_cfun (fn);
ff28a94d 1923
3f9b14ff
SB
1924 init_vars_expansion ();
1925
824f71b9
RG
1926 FOR_EACH_LOCAL_DECL (fn, i, var)
1927 if (auto_var_in_fn_p (var, fn->decl))
1928 size += expand_one_var (var, true, false);
b5a430f3 1929
ff28a94d
JH
1930 if (stack_vars_num > 0)
1931 {
b5a430f3
SB
1932 /* Fake sorting the stack vars for account_stack_vars (). */
1933 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1934 for (i = 0; i < stack_vars_num; ++i)
1935 stack_vars_sorted[i] = i;
ff28a94d 1936 size += account_stack_vars ();
ff28a94d 1937 }
3f9b14ff
SB
1938
1939 fini_vars_expansion ();
2e1ec94f 1940 pop_cfun ();
ff28a94d
JH
1941 return size;
1942}
1943
f6bc1c4a
HS
1944/* Helper routine to check if a record or union contains an array field. */
1945
1946static int
1947record_or_union_type_has_array_p (const_tree tree_type)
1948{
1949 tree fields = TYPE_FIELDS (tree_type);
1950 tree f;
1951
1952 for (f = fields; f; f = DECL_CHAIN (f))
1953 if (TREE_CODE (f) == FIELD_DECL)
1954 {
1955 tree field_type = TREE_TYPE (f);
1956 if (RECORD_OR_UNION_TYPE_P (field_type)
1957 && record_or_union_type_has_array_p (field_type))
1958 return 1;
1959 if (TREE_CODE (field_type) == ARRAY_TYPE)
1960 return 1;
1961 }
1962 return 0;
1963}
1964
6545746e
FW
1965/* Check if the current function has local referenced variables that
1966 have their addresses taken, contain an array, or are arrays. */
1967
1968static bool
1969stack_protect_decl_p ()
1970{
1971 unsigned i;
1972 tree var;
1973
1974 FOR_EACH_LOCAL_DECL (cfun, i, var)
1975 if (!is_global_var (var))
1976 {
1977 tree var_type = TREE_TYPE (var);
8813a647 1978 if (VAR_P (var)
6545746e
FW
1979 && (TREE_CODE (var_type) == ARRAY_TYPE
1980 || TREE_ADDRESSABLE (var)
1981 || (RECORD_OR_UNION_TYPE_P (var_type)
1982 && record_or_union_type_has_array_p (var_type))))
1983 return true;
1984 }
1985 return false;
1986}
1987
1988/* Check if the current function has calls that use a return slot. */
1989
1990static bool
1991stack_protect_return_slot_p ()
1992{
1993 basic_block bb;
1994
1995 FOR_ALL_BB_FN (bb, cfun)
1996 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1997 !gsi_end_p (gsi); gsi_next (&gsi))
1998 {
355fe088 1999 gimple *stmt = gsi_stmt (gsi);
6545746e
FW
2000 /* This assumes that calls to internal-only functions never
2001 use a return slot. */
2002 if (is_gimple_call (stmt)
2003 && !gimple_call_internal_p (stmt)
2004 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2005 gimple_call_fndecl (stmt)))
2006 return true;
2007 }
2008 return false;
2009}
2010
1f6d3a08 2011/* Expand all variables used in the function. */
727a31fa 2012
b47aae36 2013static rtx_insn *
727a31fa
RH
2014expand_used_vars (void)
2015{
c021f10b 2016 tree var, outer_block = DECL_INITIAL (current_function_decl);
8c681247 2017 auto_vec<tree> maybe_local_decls;
b47aae36 2018 rtx_insn *var_end_seq = NULL;
4e3825db 2019 unsigned i;
c021f10b 2020 unsigned len;
f6bc1c4a 2021 bool gen_stack_protect_signal = false;
727a31fa 2022
1f6d3a08
RH
2023 /* Compute the phase of the stack frame for this function. */
2024 {
2025 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2026 int off = STARTING_FRAME_OFFSET % align;
2027 frame_phase = off ? align - off : 0;
2028 }
727a31fa 2029
3f9b14ff
SB
2030 /* Set TREE_USED on all variables in the local_decls. */
2031 FOR_EACH_LOCAL_DECL (cfun, i, var)
2032 TREE_USED (var) = 1;
2033 /* Clear TREE_USED on all variables associated with a block scope. */
2034 clear_tree_used (DECL_INITIAL (current_function_decl));
2035
ff28a94d 2036 init_vars_expansion ();
7d69de61 2037
8f51aa6b
IZ
2038 if (targetm.use_pseudo_pic_reg ())
2039 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2040
4e3825db
MM
2041 for (i = 0; i < SA.map->num_partitions; i++)
2042 {
f11a7b6d
AO
2043 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2044 continue;
2045
4e3825db
MM
2046 tree var = partition_to_var (SA.map, i);
2047
ea057359 2048 gcc_assert (!virtual_operand_p (var));
70b5e7dc 2049
1f9ceff1 2050 expand_one_ssa_partition (var);
64d7fb90 2051 }
7eb9f42e 2052
f6bc1c4a 2053 if (flag_stack_protect == SPCT_FLAG_STRONG)
6545746e
FW
2054 gen_stack_protect_signal
2055 = stack_protect_decl_p () || stack_protect_return_slot_p ();
f6bc1c4a 2056
cb91fab0 2057 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 2058 set are not associated with any block scope. Lay them out. */
c021f10b 2059
9771b263 2060 len = vec_safe_length (cfun->local_decls);
c021f10b 2061 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 2062 {
1f6d3a08
RH
2063 bool expand_now = false;
2064
4e3825db
MM
2065 /* Expanded above already. */
2066 if (is_gimple_reg (var))
eb7adebc
MM
2067 {
2068 TREE_USED (var) = 0;
3adcf52c 2069 goto next;
eb7adebc 2070 }
1f6d3a08
RH
2071 /* We didn't set a block for static or extern because it's hard
2072 to tell the difference between a global variable (re)declared
2073 in a local scope, and one that's really declared there to
2074 begin with. And it doesn't really matter much, since we're
2075 not giving them stack space. Expand them now. */
4e3825db 2076 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
2077 expand_now = true;
2078
ee2e8462
EB
2079 /* Expand variables not associated with any block now. Those created by
2080 the optimizers could be live anywhere in the function. Those that
2081 could possibly have been scoped originally and detached from their
2082 block will have their allocation deferred so we coalesce them with
2083 others when optimization is enabled. */
1f6d3a08
RH
2084 else if (TREE_USED (var))
2085 expand_now = true;
2086
2087 /* Finally, mark all variables on the list as used. We'll use
2088 this in a moment when we expand those associated with scopes. */
2089 TREE_USED (var) = 1;
2090
2091 if (expand_now)
3adcf52c
JM
2092 expand_one_var (var, true, true);
2093
2094 next:
2095 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 2096 {
3adcf52c
JM
2097 rtx rtl = DECL_RTL_IF_SET (var);
2098
2099 /* Keep artificial non-ignored vars in cfun->local_decls
2100 chain until instantiate_decls. */
2101 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2102 add_local_decl (cfun, var);
6c6366f6 2103 else if (rtl == NULL_RTX)
c021f10b
NF
2104 /* If rtl isn't set yet, which can happen e.g. with
2105 -fstack-protector, retry before returning from this
2106 function. */
9771b263 2107 maybe_local_decls.safe_push (var);
802e9f8e 2108 }
1f6d3a08 2109 }
1f6d3a08 2110
c021f10b
NF
2111 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2112
2113 +-----------------+-----------------+
2114 | ...processed... | ...duplicates...|
2115 +-----------------+-----------------+
2116 ^
2117 +-- LEN points here.
2118
2119 We just want the duplicates, as those are the artificial
2120 non-ignored vars that we want to keep until instantiate_decls.
2121 Move them down and truncate the array. */
9771b263
DN
2122 if (!vec_safe_is_empty (cfun->local_decls))
2123 cfun->local_decls->block_remove (0, len);
c021f10b 2124
1f6d3a08
RH
2125 /* At this point, all variables within the block tree with TREE_USED
2126 set are actually used by the optimized function. Lay them out. */
2127 expand_used_vars_for_block (outer_block, true);
2128
2129 if (stack_vars_num > 0)
2130 {
47598145 2131 add_scope_conflicts ();
1f6d3a08 2132
c22cacf3 2133 /* If stack protection is enabled, we don't share space between
7d69de61 2134 vulnerable data and non-vulnerable data. */
5434dc07
MD
2135 if (flag_stack_protect != 0
2136 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2137 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2138 && lookup_attribute ("stack_protect",
2139 DECL_ATTRIBUTES (current_function_decl)))))
7d69de61
RH
2140 add_stack_protection_conflicts ();
2141
c22cacf3 2142 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
2143 minimal interference graph, attempt to save some stack space. */
2144 partition_stack_vars ();
2145 if (dump_file)
2146 dump_stack_var_partition ();
7d69de61
RH
2147 }
2148
f6bc1c4a
HS
2149 switch (flag_stack_protect)
2150 {
2151 case SPCT_FLAG_ALL:
2152 create_stack_guard ();
2153 break;
2154
2155 case SPCT_FLAG_STRONG:
2156 if (gen_stack_protect_signal
5434dc07
MD
2157 || cfun->calls_alloca || has_protected_decls
2158 || lookup_attribute ("stack_protect",
2159 DECL_ATTRIBUTES (current_function_decl)))
f6bc1c4a
HS
2160 create_stack_guard ();
2161 break;
2162
2163 case SPCT_FLAG_DEFAULT:
5434dc07
MD
2164 if (cfun->calls_alloca || has_protected_decls
2165 || lookup_attribute ("stack_protect",
2166 DECL_ATTRIBUTES (current_function_decl)))
c3284718 2167 create_stack_guard ();
f6bc1c4a
HS
2168 break;
2169
5434dc07
MD
2170 case SPCT_FLAG_EXPLICIT:
2171 if (lookup_attribute ("stack_protect",
2172 DECL_ATTRIBUTES (current_function_decl)))
2173 create_stack_guard ();
2174 break;
f6bc1c4a
HS
2175 default:
2176 ;
2177 }
1f6d3a08 2178
7d69de61
RH
2179 /* Assign rtl to each variable based on these partitions. */
2180 if (stack_vars_num > 0)
2181 {
f3ddd692
JJ
2182 struct stack_vars_data data;
2183
e361382f
JJ
2184 data.asan_base = NULL_RTX;
2185 data.asan_alignb = 0;
f3ddd692 2186
7d69de61
RH
2187 /* Reorder decls to be protected by iterating over the variables
2188 array multiple times, and allocating out of each phase in turn. */
c22cacf3 2189 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
2190 earlier, such that we naturally see these variables first,
2191 and thus naturally allocate things in the right order. */
2192 if (has_protected_decls)
2193 {
2194 /* Phase 1 contains only character arrays. */
f3ddd692 2195 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
2196
2197 /* Phase 2 contains other kinds of arrays. */
5434dc07
MD
2198 if (flag_stack_protect == SPCT_FLAG_ALL
2199 || flag_stack_protect == SPCT_FLAG_STRONG
2200 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2201 && lookup_attribute ("stack_protect",
2202 DECL_ATTRIBUTES (current_function_decl))))
f3ddd692 2203 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
2204 }
2205
c461d263 2206 if (asan_sanitize_stack_p ())
f3ddd692
JJ
2207 /* Phase 3, any partitions that need asan protection
2208 in addition to phase 1 and 2. */
2209 expand_stack_vars (asan_decl_phase_3, &data);
2210
9771b263 2211 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
2212 {
2213 HOST_WIDE_INT prev_offset = frame_offset;
e361382f
JJ
2214 HOST_WIDE_INT offset, sz, redzonesz;
2215 redzonesz = ASAN_RED_ZONE_SIZE;
2216 sz = data.asan_vec[0] - prev_offset;
2217 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2218 && data.asan_alignb <= 4096
3dc87cc0 2219 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
e361382f
JJ
2220 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2221 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2222 offset
2223 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
9771b263
DN
2224 data.asan_vec.safe_push (prev_offset);
2225 data.asan_vec.safe_push (offset);
e5dcd695
LZ
2226 /* Leave space for alignment if STRICT_ALIGNMENT. */
2227 if (STRICT_ALIGNMENT)
2228 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2229 << ASAN_SHADOW_SHIFT)
2230 / BITS_PER_UNIT, 1);
f3ddd692
JJ
2231
2232 var_end_seq
2233 = asan_emit_stack_protection (virtual_stack_vars_rtx,
e361382f
JJ
2234 data.asan_base,
2235 data.asan_alignb,
9771b263 2236 data.asan_vec.address (),
e361382f 2237 data.asan_decl_vec.address (),
9771b263 2238 data.asan_vec.length ());
f3ddd692
JJ
2239 }
2240
2241 expand_stack_vars (NULL, &data);
1f6d3a08
RH
2242 }
2243
3f9b14ff
SB
2244 fini_vars_expansion ();
2245
6c6366f6
JJ
2246 /* If there were any artificial non-ignored vars without rtl
2247 found earlier, see if deferred stack allocation hasn't assigned
2248 rtl to them. */
9771b263 2249 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 2250 {
6c6366f6
JJ
2251 rtx rtl = DECL_RTL_IF_SET (var);
2252
6c6366f6
JJ
2253 /* Keep artificial non-ignored vars in cfun->local_decls
2254 chain until instantiate_decls. */
2255 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2256 add_local_decl (cfun, var);
6c6366f6
JJ
2257 }
2258
1f6d3a08
RH
2259 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2260 if (STACK_ALIGNMENT_NEEDED)
2261 {
2262 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2263 if (!FRAME_GROWS_DOWNWARD)
2264 frame_offset += align - 1;
2265 frame_offset &= -align;
2266 }
f3ddd692
JJ
2267
2268 return var_end_seq;
727a31fa
RH
2269}
2270
2271
b7211528
SB
2272/* If we need to produce a detailed dump, print the tree representation
2273 for STMT to the dump file. SINCE is the last RTX after which the RTL
2274 generated for STMT should have been appended. */
2275
2276static void
355fe088 2277maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
b7211528
SB
2278{
2279 if (dump_file && (dump_flags & TDF_DETAILS))
2280 {
2281 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
2282 print_gimple_stmt (dump_file, stmt, 0,
2283 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
2284 fprintf (dump_file, "\n");
2285
2286 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2287 }
2288}
2289
8b11009b
ZD
2290/* Maps the blocks that do not contain tree labels to rtx labels. */
2291
134aa83c 2292static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
8b11009b 2293
a9b77cd1
ZD
2294/* Returns the label_rtx expression for a label starting basic block BB. */
2295
1476d1bd 2296static rtx_code_label *
726a989a 2297label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 2298{
726a989a
RB
2299 gimple_stmt_iterator gsi;
2300 tree lab;
a9b77cd1
ZD
2301
2302 if (bb->flags & BB_RTL)
2303 return block_label (bb);
2304
134aa83c 2305 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 2306 if (elt)
39c8aaa4 2307 return *elt;
8b11009b
ZD
2308
2309 /* Find the tree label if it is present. */
b8698a0f 2310
726a989a 2311 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 2312 {
538dd0b7
DM
2313 glabel *lab_stmt;
2314
2315 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2316 if (!lab_stmt)
a9b77cd1
ZD
2317 break;
2318
726a989a 2319 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
2320 if (DECL_NONLOCAL (lab))
2321 break;
2322
1476d1bd 2323 return jump_target_rtx (lab);
a9b77cd1
ZD
2324 }
2325
19f8b229 2326 rtx_code_label *l = gen_label_rtx ();
39c8aaa4
TS
2327 lab_rtx_for_bb->put (bb, l);
2328 return l;
a9b77cd1
ZD
2329}
2330
726a989a 2331
529ff441
MM
2332/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2333 of a basic block where we just expanded the conditional at the end,
315adeda
MM
2334 possibly clean up the CFG and instruction sequence. LAST is the
2335 last instruction before the just emitted jump sequence. */
529ff441
MM
2336
2337static void
b47aae36 2338maybe_cleanup_end_of_block (edge e, rtx_insn *last)
529ff441
MM
2339{
2340 /* Special case: when jumpif decides that the condition is
2341 trivial it emits an unconditional jump (and the necessary
2342 barrier). But we still have two edges, the fallthru one is
2343 wrong. purge_dead_edges would clean this up later. Unfortunately
2344 we have to insert insns (and split edges) before
2345 find_many_sub_basic_blocks and hence before purge_dead_edges.
2346 But splitting edges might create new blocks which depend on the
2347 fact that if there are two edges there's no barrier. So the
2348 barrier would get lost and verify_flow_info would ICE. Instead
2349 of auditing all edge splitters to care for the barrier (which
2350 normally isn't there in a cleaned CFG), fix it here. */
2351 if (BARRIER_P (get_last_insn ()))
2352 {
b47aae36 2353 rtx_insn *insn;
529ff441
MM
2354 remove_edge (e);
2355 /* Now, we have a single successor block, if we have insns to
2356 insert on the remaining edge we potentially will insert
2357 it at the end of this block (if the dest block isn't feasible)
2358 in order to avoid splitting the edge. This insertion will take
2359 place in front of the last jump. But we might have emitted
2360 multiple jumps (conditional and one unconditional) to the
2361 same destination. Inserting in front of the last one then
2362 is a problem. See PR 40021. We fix this by deleting all
2363 jumps except the last unconditional one. */
2364 insn = PREV_INSN (get_last_insn ());
2365 /* Make sure we have an unconditional jump. Otherwise we're
2366 confused. */
2367 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 2368 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
2369 {
2370 insn = PREV_INSN (insn);
2371 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 2372 {
8a269cb7 2373 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
2374 {
2375 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2376 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2377 }
2378 delete_insn (NEXT_INSN (insn));
2379 }
529ff441
MM
2380 }
2381 }
2382}
2383
726a989a 2384/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
2385 Returns a new basic block if we've terminated the current basic
2386 block and created a new one. */
2387
2388static basic_block
538dd0b7 2389expand_gimple_cond (basic_block bb, gcond *stmt)
80c7a9eb
RH
2390{
2391 basic_block new_bb, dest;
80c7a9eb
RH
2392 edge true_edge;
2393 edge false_edge;
b47aae36 2394 rtx_insn *last2, *last;
28ed065e
MM
2395 enum tree_code code;
2396 tree op0, op1;
2397
2398 code = gimple_cond_code (stmt);
2399 op0 = gimple_cond_lhs (stmt);
2400 op1 = gimple_cond_rhs (stmt);
2401 /* We're sometimes presented with such code:
2402 D.123_1 = x < y;
2403 if (D.123_1 != 0)
2404 ...
2405 This would expand to two comparisons which then later might
2406 be cleaned up by combine. But some pattern matchers like if-conversion
2407 work better when there's only one compare, so make up for this
2408 here as special exception if TER would have made the same change. */
31348d52 2409 if (SA.values
28ed065e 2410 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
2411 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2412 && TREE_CODE (op1) == INTEGER_CST
2413 && ((gimple_cond_code (stmt) == NE_EXPR
2414 && integer_zerop (op1))
2415 || (gimple_cond_code (stmt) == EQ_EXPR
2416 && integer_onep (op1)))
28ed065e
MM
2417 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2418 {
355fe088 2419 gimple *second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2420 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2421 {
e83f4b68
MM
2422 enum tree_code code2 = gimple_assign_rhs_code (second);
2423 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2424 {
2425 code = code2;
2426 op0 = gimple_assign_rhs1 (second);
2427 op1 = gimple_assign_rhs2 (second);
2428 }
2d52a3a1
ZC
2429 /* If jumps are cheap and the target does not support conditional
2430 compare, turn some more codes into jumpy sequences. */
2431 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2432 && targetm.gen_ccmp_first == NULL)
e83f4b68
MM
2433 {
2434 if ((code2 == BIT_AND_EXPR
2435 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2436 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2437 || code2 == TRUTH_AND_EXPR)
2438 {
2439 code = TRUTH_ANDIF_EXPR;
2440 op0 = gimple_assign_rhs1 (second);
2441 op1 = gimple_assign_rhs2 (second);
2442 }
2443 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2444 {
2445 code = TRUTH_ORIF_EXPR;
2446 op0 = gimple_assign_rhs1 (second);
2447 op1 = gimple_assign_rhs2 (second);
2448 }
2449 }
28ed065e
MM
2450 }
2451 }
b7211528
SB
2452
2453 last2 = last = get_last_insn ();
80c7a9eb
RH
2454
2455 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2456 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2457
2458 /* These flags have no purpose in RTL land. */
2459 true_edge->flags &= ~EDGE_TRUE_VALUE;
2460 false_edge->flags &= ~EDGE_FALSE_VALUE;
2461
2462 /* We can either have a pure conditional jump with one fallthru edge or
2463 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2464 if (false_edge->dest == bb->next_bb)
80c7a9eb 2465 {
40e90eac
JJ
2466 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2467 true_edge->probability);
726a989a 2468 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2469 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2470 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2471 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2472 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2473 return NULL;
2474 }
a9b77cd1 2475 if (true_edge->dest == bb->next_bb)
80c7a9eb 2476 {
40e90eac
JJ
2477 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2478 false_edge->probability);
726a989a 2479 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2480 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2481 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2482 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2483 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2484 return NULL;
2485 }
80c7a9eb 2486
40e90eac
JJ
2487 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2488 true_edge->probability);
80c7a9eb 2489 last = get_last_insn ();
2f13f2de 2490 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2491 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2492 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb 2493
1130d5e3 2494 BB_END (bb) = last;
80c7a9eb 2495 if (BARRIER_P (BB_END (bb)))
1130d5e3 2496 BB_END (bb) = PREV_INSN (BB_END (bb));
80c7a9eb
RH
2497 update_bb_for_insn (bb);
2498
2499 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2500 dest = false_edge->dest;
2501 redirect_edge_succ (false_edge, new_bb);
2502 false_edge->flags |= EDGE_FALLTHRU;
2503 new_bb->count = false_edge->count;
2504 new_bb->frequency = EDGE_FREQUENCY (false_edge);
ba7629e2
RB
2505 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2506 add_bb_to_loop (new_bb, loop);
2507 if (loop->latch == bb
2508 && loop->header == dest)
2509 loop->latch = new_bb;
357067f2 2510 make_single_succ_edge (new_bb, dest, 0);
80c7a9eb 2511 if (BARRIER_P (BB_END (new_bb)))
1130d5e3 2512 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
80c7a9eb
RH
2513 update_bb_for_insn (new_bb);
2514
726a989a 2515 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2516
2f13f2de 2517 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2518 {
5368224f
DC
2519 set_curr_insn_location (true_edge->goto_locus);
2520 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2521 }
7787b4aa 2522
80c7a9eb
RH
2523 return new_bb;
2524}
2525
0a35513e
AH
2526/* Mark all calls that can have a transaction restart. */
2527
2528static void
355fe088 2529mark_transaction_restart_calls (gimple *stmt)
0a35513e
AH
2530{
2531 struct tm_restart_node dummy;
50979347 2532 tm_restart_node **slot;
0a35513e
AH
2533
2534 if (!cfun->gimple_df->tm_restart)
2535 return;
2536
2537 dummy.stmt = stmt;
50979347 2538 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
0a35513e
AH
2539 if (slot)
2540 {
50979347 2541 struct tm_restart_node *n = *slot;
0a35513e 2542 tree list = n->label_or_list;
b47aae36 2543 rtx_insn *insn;
0a35513e
AH
2544
2545 for (insn = next_real_insn (get_last_insn ());
2546 !CALL_P (insn);
2547 insn = next_real_insn (insn))
2548 continue;
2549
2550 if (TREE_CODE (list) == LABEL_DECL)
2551 add_reg_note (insn, REG_TM, label_rtx (list));
2552 else
2553 for (; list ; list = TREE_CHAIN (list))
2554 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2555 }
2556}
2557
28ed065e
MM
2558/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2559 statement STMT. */
2560
2561static void
538dd0b7 2562expand_call_stmt (gcall *stmt)
28ed065e 2563{
25583c4f 2564 tree exp, decl, lhs;
e23817b3 2565 bool builtin_p;
e7925582 2566 size_t i;
28ed065e 2567
25583c4f
RS
2568 if (gimple_call_internal_p (stmt))
2569 {
2570 expand_internal_call (stmt);
2571 return;
2572 }
2573
4cfe7a6c
RS
2574 /* If this is a call to a built-in function and it has no effect other
2575 than setting the lhs, try to implement it using an internal function
2576 instead. */
2577 decl = gimple_call_fndecl (stmt);
2578 if (gimple_call_lhs (stmt)
2579 && !gimple_has_side_effects (stmt)
2580 && (optimize || (decl && called_as_built_in (decl))))
2581 {
2582 internal_fn ifn = replacement_internal_fn (stmt);
2583 if (ifn != IFN_LAST)
2584 {
2585 expand_internal_call (ifn, stmt);
2586 return;
2587 }
2588 }
2589
01156003 2590 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
089d1227 2591
01156003 2592 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
089d1227 2593 builtin_p = decl && DECL_BUILT_IN (decl);
01156003 2594
e7925582
EB
2595 /* If this is not a builtin function, the function type through which the
2596 call is made may be different from the type of the function. */
2597 if (!builtin_p)
2598 CALL_EXPR_FN (exp)
b25aa0e8
EB
2599 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2600 CALL_EXPR_FN (exp));
e7925582 2601
28ed065e
MM
2602 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2603 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2604
2605 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2606 {
2607 tree arg = gimple_call_arg (stmt, i);
355fe088 2608 gimple *def;
e23817b3
RG
2609 /* TER addresses into arguments of builtin functions so we have a
2610 chance to infer more correct alignment information. See PR39954. */
2611 if (builtin_p
2612 && TREE_CODE (arg) == SSA_NAME
2613 && (def = get_gimple_for_ssa_name (arg))
2614 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2615 arg = gimple_assign_rhs1 (def);
2616 CALL_EXPR_ARG (exp, i) = arg;
2617 }
28ed065e 2618
93f28ca7 2619 if (gimple_has_side_effects (stmt))
28ed065e
MM
2620 TREE_SIDE_EFFECTS (exp) = 1;
2621
93f28ca7 2622 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2623 TREE_NOTHROW (exp) = 1;
2624
2625 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
9a385c2d 2626 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
28ed065e 2627 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2628 if (decl
2629 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2630 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2631 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2632 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2633 else
2634 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e 2635 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
4c640e26 2636 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
28ed065e 2637 SET_EXPR_LOCATION (exp, gimple_location (stmt));
d5e254e1 2638 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
28ed065e 2639
ddb555ed
JJ
2640 /* Ensure RTL is created for debug args. */
2641 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2642 {
9771b263 2643 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2644 unsigned int ix;
2645 tree dtemp;
2646
2647 if (debug_args)
9771b263 2648 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2649 {
2650 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2651 expand_debug_expr (dtemp);
2652 }
2653 }
2654
25583c4f 2655 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2656 if (lhs)
2657 expand_assignment (lhs, exp, false);
2658 else
4c437f02 2659 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a35513e
AH
2660
2661 mark_transaction_restart_calls (stmt);
28ed065e
MM
2662}
2663
862d0b35
DN
2664
2665/* Generate RTL for an asm statement (explicit assembler code).
2666 STRING is a STRING_CST node containing the assembler code text,
2667 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2668 insn is volatile; don't optimize it. */
2669
2670static void
2671expand_asm_loc (tree string, int vol, location_t locus)
2672{
2673 rtx body;
2674
862d0b35
DN
2675 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2676 ggc_strdup (TREE_STRING_POINTER (string)),
2677 locus);
2678
2679 MEM_VOLATILE_P (body) = vol;
2680
93671519
BE
2681 /* Non-empty basic ASM implicitly clobbers memory. */
2682 if (TREE_STRING_LENGTH (string) != 0)
2683 {
2684 rtx asm_op, clob;
2685 unsigned i, nclobbers;
2686 auto_vec<rtx> input_rvec, output_rvec;
2687 auto_vec<const char *> constraints;
2688 auto_vec<rtx> clobber_rvec;
2689 HARD_REG_SET clobbered_regs;
2690 CLEAR_HARD_REG_SET (clobbered_regs);
2691
2692 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2693 clobber_rvec.safe_push (clob);
2694
2695 if (targetm.md_asm_adjust)
2696 targetm.md_asm_adjust (output_rvec, input_rvec,
2697 constraints, clobber_rvec,
2698 clobbered_regs);
2699
2700 asm_op = body;
2701 nclobbers = clobber_rvec.length ();
2702 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2703
2704 XVECEXP (body, 0, 0) = asm_op;
2705 for (i = 0; i < nclobbers; i++)
2706 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2707 }
2708
862d0b35
DN
2709 emit_insn (body);
2710}
2711
2712/* Return the number of times character C occurs in string S. */
2713static int
2714n_occurrences (int c, const char *s)
2715{
2716 int n = 0;
2717 while (*s)
2718 n += (*s++ == c);
2719 return n;
2720}
2721
2722/* A subroutine of expand_asm_operands. Check that all operands have
2723 the same number of alternatives. Return true if so. */
2724
2725static bool
7ca35180 2726check_operand_nalternatives (const vec<const char *> &constraints)
862d0b35 2727{
7ca35180
RH
2728 unsigned len = constraints.length();
2729 if (len > 0)
862d0b35 2730 {
7ca35180 2731 int nalternatives = n_occurrences (',', constraints[0]);
862d0b35
DN
2732
2733 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2734 {
2735 error ("too many alternatives in %<asm%>");
2736 return false;
2737 }
2738
7ca35180
RH
2739 for (unsigned i = 1; i < len; ++i)
2740 if (n_occurrences (',', constraints[i]) != nalternatives)
2741 {
2742 error ("operand constraints for %<asm%> differ "
2743 "in number of alternatives");
2744 return false;
2745 }
862d0b35 2746 }
862d0b35
DN
2747 return true;
2748}
2749
2750/* Check for overlap between registers marked in CLOBBERED_REGS and
2751 anything inappropriate in T. Emit error and return the register
2752 variable definition for error, NULL_TREE for ok. */
2753
2754static bool
2755tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2756{
2757 /* Conflicts between asm-declared register variables and the clobber
2758 list are not allowed. */
2759 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2760
2761 if (overlap)
2762 {
2763 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2764 DECL_NAME (overlap));
2765
2766 /* Reset registerness to stop multiple errors emitted for a single
2767 variable. */
2768 DECL_REGISTER (overlap) = 0;
2769 return true;
2770 }
2771
2772 return false;
2773}
2774
2775/* Generate RTL for an asm statement with arguments.
2776 STRING is the instruction template.
2777 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2778 Each output or input has an expression in the TREE_VALUE and
2779 a tree list in TREE_PURPOSE which in turn contains a constraint
2780 name in TREE_VALUE (or NULL_TREE) and a constraint string
2781 in TREE_PURPOSE.
2782 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2783 that is clobbered by this insn.
2784
2785 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2786 should be the fallthru basic block of the asm goto.
2787
2788 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2789 Some elements of OUTPUTS may be replaced with trees representing temporary
2790 values. The caller should copy those temporary values to the originally
2791 specified lvalues.
2792
2793 VOL nonzero means the insn is volatile; don't optimize it. */
2794
2795static void
6476a8fd 2796expand_asm_stmt (gasm *stmt)
862d0b35 2797{
7ca35180
RH
2798 class save_input_location
2799 {
2800 location_t old;
6476a8fd 2801
7ca35180
RH
2802 public:
2803 explicit save_input_location(location_t where)
6476a8fd 2804 {
7ca35180
RH
2805 old = input_location;
2806 input_location = where;
6476a8fd
RH
2807 }
2808
7ca35180 2809 ~save_input_location()
6476a8fd 2810 {
7ca35180 2811 input_location = old;
6476a8fd 2812 }
7ca35180 2813 };
6476a8fd 2814
7ca35180 2815 location_t locus = gimple_location (stmt);
6476a8fd 2816
7ca35180 2817 if (gimple_asm_input_p (stmt))
6476a8fd 2818 {
7ca35180
RH
2819 const char *s = gimple_asm_string (stmt);
2820 tree string = build_string (strlen (s), s);
2821 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2822 return;
6476a8fd
RH
2823 }
2824
7ca35180
RH
2825 /* There are some legacy diagnostics in here, and also avoids a
2826 sixth parameger to targetm.md_asm_adjust. */
2827 save_input_location s_i_l(locus);
6476a8fd 2828
7ca35180
RH
2829 unsigned noutputs = gimple_asm_noutputs (stmt);
2830 unsigned ninputs = gimple_asm_ninputs (stmt);
2831 unsigned nlabels = gimple_asm_nlabels (stmt);
2832 unsigned i;
2833
2834 /* ??? Diagnose during gimplification? */
2835 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
6476a8fd 2836 {
7ca35180 2837 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
6476a8fd
RH
2838 return;
2839 }
2840
7ca35180
RH
2841 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2842 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2843 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
6476a8fd 2844
7ca35180 2845 /* Copy the gimple vectors into new vectors that we can manipulate. */
862d0b35 2846
7ca35180
RH
2847 output_tvec.safe_grow (noutputs);
2848 input_tvec.safe_grow (ninputs);
2849 constraints.safe_grow (noutputs + ninputs);
862d0b35 2850
7ca35180
RH
2851 for (i = 0; i < noutputs; ++i)
2852 {
2853 tree t = gimple_asm_output_op (stmt, i);
2854 output_tvec[i] = TREE_VALUE (t);
2855 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2856 }
2857 for (i = 0; i < ninputs; i++)
2858 {
2859 tree t = gimple_asm_input_op (stmt, i);
2860 input_tvec[i] = TREE_VALUE (t);
2861 constraints[i + noutputs]
2862 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2863 }
862d0b35 2864
7ca35180
RH
2865 /* ??? Diagnose during gimplification? */
2866 if (! check_operand_nalternatives (constraints))
2867 return;
862d0b35
DN
2868
2869 /* Count the number of meaningful clobbered registers, ignoring what
2870 we would ignore later. */
7ca35180
RH
2871 auto_vec<rtx> clobber_rvec;
2872 HARD_REG_SET clobbered_regs;
862d0b35 2873 CLEAR_HARD_REG_SET (clobbered_regs);
862d0b35 2874
7ca35180
RH
2875 if (unsigned n = gimple_asm_nclobbers (stmt))
2876 {
2877 clobber_rvec.reserve (n);
2878 for (i = 0; i < n; i++)
2879 {
2880 tree t = gimple_asm_clobber_op (stmt, i);
2881 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2882 int nregs, j;
862d0b35 2883
7ca35180
RH
2884 j = decode_reg_name_and_count (regname, &nregs);
2885 if (j < 0)
862d0b35 2886 {
7ca35180 2887 if (j == -2)
862d0b35 2888 {
7ca35180
RH
2889 /* ??? Diagnose during gimplification? */
2890 error ("unknown register name %qs in %<asm%>", regname);
2891 }
2892 else if (j == -4)
2893 {
2894 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2895 clobber_rvec.safe_push (x);
2896 }
2897 else
2898 {
2899 /* Otherwise we should have -1 == empty string
2900 or -3 == cc, which is not a register. */
2901 gcc_assert (j == -1 || j == -3);
862d0b35 2902 }
862d0b35 2903 }
7ca35180
RH
2904 else
2905 for (int reg = j; reg < j + nregs; reg++)
2906 {
2907 /* Clobbering the PIC register is an error. */
2908 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2909 {
2910 /* ??? Diagnose during gimplification? */
2911 error ("PIC register clobbered by %qs in %<asm%>",
2912 regname);
2913 return;
2914 }
2915
2916 SET_HARD_REG_BIT (clobbered_regs, reg);
2917 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2918 clobber_rvec.safe_push (x);
2919 }
862d0b35
DN
2920 }
2921 }
7ca35180 2922 unsigned nclobbers = clobber_rvec.length();
862d0b35
DN
2923
2924 /* First pass over inputs and outputs checks validity and sets
2925 mark_addressable if needed. */
7ca35180 2926 /* ??? Diagnose during gimplification? */
862d0b35 2927
7ca35180 2928 for (i = 0; i < noutputs; ++i)
862d0b35 2929 {
7ca35180 2930 tree val = output_tvec[i];
862d0b35
DN
2931 tree type = TREE_TYPE (val);
2932 const char *constraint;
2933 bool is_inout;
2934 bool allows_reg;
2935 bool allows_mem;
2936
862d0b35
DN
2937 /* Try to parse the output constraint. If that fails, there's
2938 no point in going further. */
2939 constraint = constraints[i];
2940 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2941 &allows_mem, &allows_reg, &is_inout))
2942 return;
2943
2944 if (! allows_reg
2945 && (allows_mem
2946 || is_inout
2947 || (DECL_P (val)
2948 && REG_P (DECL_RTL (val))
2949 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2950 mark_addressable (val);
862d0b35
DN
2951 }
2952
7ca35180 2953 for (i = 0; i < ninputs; ++i)
862d0b35
DN
2954 {
2955 bool allows_reg, allows_mem;
2956 const char *constraint;
2957
862d0b35 2958 constraint = constraints[i + noutputs];
7ca35180
RH
2959 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2960 constraints.address (),
2961 &allows_mem, &allows_reg))
862d0b35
DN
2962 return;
2963
2964 if (! allows_reg && allows_mem)
7ca35180 2965 mark_addressable (input_tvec[i]);
862d0b35
DN
2966 }
2967
2968 /* Second pass evaluates arguments. */
2969
2970 /* Make sure stack is consistent for asm goto. */
2971 if (nlabels > 0)
2972 do_pending_stack_adjust ();
7ca35180
RH
2973 int old_generating_concat_p = generating_concat_p;
2974
2975 /* Vector of RTX's of evaluated output operands. */
2976 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2977 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2978 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
862d0b35 2979
7ca35180
RH
2980 output_rvec.safe_grow (noutputs);
2981
2982 for (i = 0; i < noutputs; ++i)
862d0b35 2983 {
7ca35180 2984 tree val = output_tvec[i];
862d0b35 2985 tree type = TREE_TYPE (val);
7ca35180 2986 bool is_inout, allows_reg, allows_mem, ok;
862d0b35 2987 rtx op;
862d0b35
DN
2988
2989 ok = parse_output_constraint (&constraints[i], i, ninputs,
2990 noutputs, &allows_mem, &allows_reg,
2991 &is_inout);
2992 gcc_assert (ok);
2993
2994 /* If an output operand is not a decl or indirect ref and our constraint
2995 allows a register, make a temporary to act as an intermediate.
7ca35180 2996 Make the asm insn write into that, then we will copy it to
862d0b35
DN
2997 the real output operand. Likewise for promoted variables. */
2998
2999 generating_concat_p = 0;
3000
862d0b35
DN
3001 if ((TREE_CODE (val) == INDIRECT_REF
3002 && allows_mem)
3003 || (DECL_P (val)
3004 && (allows_mem || REG_P (DECL_RTL (val)))
3005 && ! (REG_P (DECL_RTL (val))
3006 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3007 || ! allows_reg
3008 || is_inout)
3009 {
3010 op = expand_expr (val, NULL_RTX, VOIDmode,
3011 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3012 if (MEM_P (op))
3013 op = validize_mem (op);
3014
3015 if (! allows_reg && !MEM_P (op))
3016 error ("output number %d not directly addressable", i);
3017 if ((! allows_mem && MEM_P (op))
3018 || GET_CODE (op) == CONCAT)
3019 {
7ca35180 3020 rtx old_op = op;
862d0b35 3021 op = gen_reg_rtx (GET_MODE (op));
7ca35180
RH
3022
3023 generating_concat_p = old_generating_concat_p;
3024
862d0b35 3025 if (is_inout)
7ca35180
RH
3026 emit_move_insn (op, old_op);
3027
3028 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3029 emit_move_insn (old_op, op);
3030 after_rtl_seq = get_insns ();
3031 after_rtl_end = get_last_insn ();
3032 end_sequence ();
862d0b35
DN
3033 }
3034 }
3035 else
3036 {
3037 op = assign_temp (type, 0, 1);
3038 op = validize_mem (op);
7ca35180
RH
3039 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3040 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
862d0b35 3041
7ca35180 3042 generating_concat_p = old_generating_concat_p;
862d0b35 3043
7ca35180
RH
3044 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3045 expand_assignment (val, make_tree (type, op), false);
3046 after_rtl_seq = get_insns ();
3047 after_rtl_end = get_last_insn ();
3048 end_sequence ();
862d0b35 3049 }
7ca35180 3050 output_rvec[i] = op;
862d0b35 3051
7ca35180
RH
3052 if (is_inout)
3053 inout_opnum.safe_push (i);
862d0b35
DN
3054 }
3055
7ca35180
RH
3056 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3057 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
862d0b35 3058
7ca35180
RH
3059 input_rvec.safe_grow (ninputs);
3060 input_mode.safe_grow (ninputs);
862d0b35 3061
7ca35180 3062 generating_concat_p = 0;
862d0b35 3063
7ca35180 3064 for (i = 0; i < ninputs; ++i)
862d0b35 3065 {
7ca35180
RH
3066 tree val = input_tvec[i];
3067 tree type = TREE_TYPE (val);
3068 bool allows_reg, allows_mem, ok;
862d0b35 3069 const char *constraint;
862d0b35 3070 rtx op;
862d0b35
DN
3071
3072 constraint = constraints[i + noutputs];
7ca35180
RH
3073 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3074 constraints.address (),
3075 &allows_mem, &allows_reg);
862d0b35
DN
3076 gcc_assert (ok);
3077
862d0b35
DN
3078 /* EXPAND_INITIALIZER will not generate code for valid initializer
3079 constants, but will still generate code for other types of operand.
3080 This is the behavior we want for constant constraints. */
3081 op = expand_expr (val, NULL_RTX, VOIDmode,
3082 allows_reg ? EXPAND_NORMAL
3083 : allows_mem ? EXPAND_MEMORY
3084 : EXPAND_INITIALIZER);
3085
3086 /* Never pass a CONCAT to an ASM. */
3087 if (GET_CODE (op) == CONCAT)
3088 op = force_reg (GET_MODE (op), op);
3089 else if (MEM_P (op))
3090 op = validize_mem (op);
3091
3092 if (asm_operand_ok (op, constraint, NULL) <= 0)
3093 {
3094 if (allows_reg && TYPE_MODE (type) != BLKmode)
3095 op = force_reg (TYPE_MODE (type), op);
3096 else if (!allows_mem)
3097 warning (0, "asm operand %d probably doesn%'t match constraints",
3098 i + noutputs);
3099 else if (MEM_P (op))
3100 {
3101 /* We won't recognize either volatile memory or memory
3102 with a queued address as available a memory_operand
3103 at this point. Ignore it: clearly this *is* a memory. */
3104 }
3105 else
3106 gcc_unreachable ();
3107 }
7ca35180
RH
3108 input_rvec[i] = op;
3109 input_mode[i] = TYPE_MODE (type);
862d0b35
DN
3110 }
3111
862d0b35 3112 /* For in-out operands, copy output rtx to input rtx. */
7ca35180 3113 unsigned ninout = inout_opnum.length();
862d0b35
DN
3114 for (i = 0; i < ninout; i++)
3115 {
3116 int j = inout_opnum[i];
7ca35180 3117 rtx o = output_rvec[j];
862d0b35 3118
7ca35180
RH
3119 input_rvec.safe_push (o);
3120 input_mode.safe_push (GET_MODE (o));
862d0b35 3121
7ca35180 3122 char buffer[16];
862d0b35 3123 sprintf (buffer, "%d", j);
7ca35180
RH
3124 constraints.safe_push (ggc_strdup (buffer));
3125 }
3126 ninputs += ninout;
3127
3128 /* Sometimes we wish to automatically clobber registers across an asm.
3129 Case in point is when the i386 backend moved from cc0 to a hard reg --
3130 maintaining source-level compatibility means automatically clobbering
3131 the flags register. */
3132 rtx_insn *after_md_seq = NULL;
3133 if (targetm.md_asm_adjust)
3134 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3135 constraints, clobber_rvec,
3136 clobbered_regs);
3137
3138 /* Do not allow the hook to change the output and input count,
3139 lest it mess up the operand numbering. */
3140 gcc_assert (output_rvec.length() == noutputs);
3141 gcc_assert (input_rvec.length() == ninputs);
3142 gcc_assert (constraints.length() == noutputs + ninputs);
3143
3144 /* But it certainly can adjust the clobbers. */
3145 nclobbers = clobber_rvec.length();
3146
3147 /* Third pass checks for easy conflicts. */
3148 /* ??? Why are we doing this on trees instead of rtx. */
3149
3150 bool clobber_conflict_found = 0;
3151 for (i = 0; i < noutputs; ++i)
3152 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3153 clobber_conflict_found = 1;
3154 for (i = 0; i < ninputs - ninout; ++i)
3155 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3156 clobber_conflict_found = 1;
3157
3158 /* Make vectors for the expression-rtx, constraint strings,
3159 and named operands. */
3160
3161 rtvec argvec = rtvec_alloc (ninputs);
3162 rtvec constraintvec = rtvec_alloc (ninputs);
3163 rtvec labelvec = rtvec_alloc (nlabels);
3164
3165 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3166 : GET_MODE (output_rvec[0])),
3167 ggc_strdup (gimple_asm_string (stmt)),
618400bc 3168 "", 0, argvec, constraintvec,
7ca35180
RH
3169 labelvec, locus);
3170 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3171
3172 for (i = 0; i < ninputs; ++i)
3173 {
3174 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3175 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3176 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3177 constraints[i + noutputs],
3178 locus);
862d0b35
DN
3179 }
3180
3181 /* Copy labels to the vector. */
7ca35180
RH
3182 rtx_code_label *fallthru_label = NULL;
3183 if (nlabels > 0)
3184 {
3185 basic_block fallthru_bb = NULL;
3186 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3187 if (fallthru)
3188 fallthru_bb = fallthru->dest;
3189
3190 for (i = 0; i < nlabels; ++i)
862d0b35 3191 {
7ca35180 3192 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
e67d1102 3193 rtx_insn *r;
7ca35180
RH
3194 /* If asm goto has any labels in the fallthru basic block, use
3195 a label that we emit immediately after the asm goto. Expansion
3196 may insert further instructions into the same basic block after
3197 asm goto and if we don't do this, insertion of instructions on
3198 the fallthru edge might misbehave. See PR58670. */
3199 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3200 {
3201 if (fallthru_label == NULL_RTX)
3202 fallthru_label = gen_label_rtx ();
3203 r = fallthru_label;
3204 }
3205 else
3206 r = label_rtx (label);
3207 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
862d0b35 3208 }
862d0b35
DN
3209 }
3210
862d0b35
DN
3211 /* Now, for each output, construct an rtx
3212 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3213 ARGVEC CONSTRAINTS OPNAMES))
3214 If there is more than one, put them inside a PARALLEL. */
3215
3216 if (nlabels > 0 && nclobbers == 0)
3217 {
3218 gcc_assert (noutputs == 0);
3219 emit_jump_insn (body);
3220 }
3221 else if (noutputs == 0 && nclobbers == 0)
3222 {
3223 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3224 emit_insn (body);
3225 }
3226 else if (noutputs == 1 && nclobbers == 0)
3227 {
7ca35180
RH
3228 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3229 emit_insn (gen_rtx_SET (output_rvec[0], body));
862d0b35
DN
3230 }
3231 else
3232 {
3233 rtx obody = body;
3234 int num = noutputs;
3235
3236 if (num == 0)
3237 num = 1;
3238
3239 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3240
3241 /* For each output operand, store a SET. */
7ca35180 3242 for (i = 0; i < noutputs; ++i)
862d0b35 3243 {
7ca35180
RH
3244 rtx src, o = output_rvec[i];
3245 if (i == 0)
3246 {
3247 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3248 src = obody;
3249 }
3250 else
3251 {
3252 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3253 ASM_OPERANDS_TEMPLATE (obody),
3254 constraints[i], i, argvec,
3255 constraintvec, labelvec, locus);
3256 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3257 }
3258 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
862d0b35
DN
3259 }
3260
3261 /* If there are no outputs (but there are some clobbers)
3262 store the bare ASM_OPERANDS into the PARALLEL. */
862d0b35
DN
3263 if (i == 0)
3264 XVECEXP (body, 0, i++) = obody;
3265
3266 /* Store (clobber REG) for each clobbered register specified. */
7ca35180 3267 for (unsigned j = 0; j < nclobbers; ++j)
862d0b35 3268 {
7ca35180 3269 rtx clobbered_reg = clobber_rvec[j];
862d0b35 3270
7ca35180
RH
3271 /* Do sanity check for overlap between clobbers and respectively
3272 input and outputs that hasn't been handled. Such overlap
3273 should have been detected and reported above. */
3274 if (!clobber_conflict_found && REG_P (clobbered_reg))
862d0b35 3275 {
7ca35180
RH
3276 /* We test the old body (obody) contents to avoid
3277 tripping over the under-construction body. */
3278 for (unsigned k = 0; k < noutputs; ++k)
3279 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3280 internal_error ("asm clobber conflict with output operand");
3281
3282 for (unsigned k = 0; k < ninputs - ninout; ++k)
3283 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3284 internal_error ("asm clobber conflict with input operand");
862d0b35
DN
3285 }
3286
7ca35180 3287 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
862d0b35
DN
3288 }
3289
3290 if (nlabels > 0)
3291 emit_jump_insn (body);
3292 else
3293 emit_insn (body);
3294 }
3295
7ca35180
RH
3296 generating_concat_p = old_generating_concat_p;
3297
862d0b35
DN
3298 if (fallthru_label)
3299 emit_label (fallthru_label);
3300
7ca35180
RH
3301 if (after_md_seq)
3302 emit_insn (after_md_seq);
3303 if (after_rtl_seq)
3304 emit_insn (after_rtl_seq);
862d0b35 3305
6476a8fd 3306 free_temp_slots ();
7ca35180 3307 crtl->has_asm_statement = 1;
862d0b35
DN
3308}
3309
3310/* Emit code to jump to the address
3311 specified by the pointer expression EXP. */
3312
3313static void
3314expand_computed_goto (tree exp)
3315{
3316 rtx x = expand_normal (exp);
3317
862d0b35
DN
3318 do_pending_stack_adjust ();
3319 emit_indirect_jump (x);
3320}
3321
3322/* Generate RTL code for a `goto' statement with target label LABEL.
3323 LABEL should be a LABEL_DECL tree node that was or will later be
3324 defined with `expand_label'. */
3325
3326static void
3327expand_goto (tree label)
3328{
b2b29377
MM
3329 if (flag_checking)
3330 {
3331 /* Check for a nonlocal goto to a containing function. Should have
3332 gotten translated to __builtin_nonlocal_goto. */
3333 tree context = decl_function_context (label);
3334 gcc_assert (!context || context == current_function_decl);
3335 }
862d0b35 3336
1476d1bd 3337 emit_jump (jump_target_rtx (label));
862d0b35
DN
3338}
3339
3340/* Output a return with no value. */
3341
3342static void
3343expand_null_return_1 (void)
3344{
3345 clear_pending_stack_adjust ();
3346 do_pending_stack_adjust ();
3347 emit_jump (return_label);
3348}
3349
3350/* Generate RTL to return from the current function, with no value.
3351 (That is, we do not do anything about returning any value.) */
3352
3353void
3354expand_null_return (void)
3355{
3356 /* If this function was declared to return a value, but we
3357 didn't, clobber the return registers so that they are not
3358 propagated live to the rest of the function. */
3359 clobber_return_register ();
3360
3361 expand_null_return_1 ();
3362}
3363
3364/* Generate RTL to return from the current function, with value VAL. */
3365
3366static void
3367expand_value_return (rtx val)
3368{
3369 /* Copy the value to the return location unless it's already there. */
3370
3371 tree decl = DECL_RESULT (current_function_decl);
3372 rtx return_reg = DECL_RTL (decl);
3373 if (return_reg != val)
3374 {
3375 tree funtype = TREE_TYPE (current_function_decl);
3376 tree type = TREE_TYPE (decl);
3377 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
3378 machine_mode old_mode = DECL_MODE (decl);
3379 machine_mode mode;
862d0b35
DN
3380 if (DECL_BY_REFERENCE (decl))
3381 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3382 else
3383 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3384
3385 if (mode != old_mode)
3386 val = convert_modes (mode, old_mode, val, unsignedp);
3387
3388 if (GET_CODE (return_reg) == PARALLEL)
3389 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3390 else
3391 emit_move_insn (return_reg, val);
3392 }
3393
3394 expand_null_return_1 ();
3395}
3396
3397/* Generate RTL to evaluate the expression RETVAL and return it
3398 from the current function. */
3399
3400static void
d5e254e1 3401expand_return (tree retval, tree bounds)
862d0b35
DN
3402{
3403 rtx result_rtl;
3404 rtx val = 0;
3405 tree retval_rhs;
d5e254e1 3406 rtx bounds_rtl;
862d0b35
DN
3407
3408 /* If function wants no value, give it none. */
3409 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3410 {
3411 expand_normal (retval);
3412 expand_null_return ();
3413 return;
3414 }
3415
3416 if (retval == error_mark_node)
3417 {
3418 /* Treat this like a return of no value from a function that
3419 returns a value. */
3420 expand_null_return ();
3421 return;
3422 }
3423 else if ((TREE_CODE (retval) == MODIFY_EXPR
3424 || TREE_CODE (retval) == INIT_EXPR)
3425 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3426 retval_rhs = TREE_OPERAND (retval, 1);
3427 else
3428 retval_rhs = retval;
3429
3430 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3431
d5e254e1
IE
3432 /* Put returned bounds to the right place. */
3433 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3434 if (bounds_rtl)
3435 {
855f036d
IE
3436 rtx addr = NULL;
3437 rtx bnd = NULL;
d5e254e1 3438
855f036d 3439 if (bounds && bounds != error_mark_node)
d5e254e1
IE
3440 {
3441 bnd = expand_normal (bounds);
3442 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3443 }
3444 else if (REG_P (bounds_rtl))
3445 {
855f036d
IE
3446 if (bounds)
3447 bnd = chkp_expand_zero_bounds ();
3448 else
3449 {
3450 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3451 addr = gen_rtx_MEM (Pmode, addr);
3452 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3453 }
3454
d5e254e1
IE
3455 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3456 }
3457 else
3458 {
3459 int n;
3460
3461 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3462
855f036d
IE
3463 if (bounds)
3464 bnd = chkp_expand_zero_bounds ();
3465 else
3466 {
3467 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3468 addr = gen_rtx_MEM (Pmode, addr);
3469 }
d5e254e1
IE
3470
3471 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3472 {
d5e254e1 3473 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
855f036d
IE
3474 if (!bounds)
3475 {
3476 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3477 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3478 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3479 }
d5e254e1
IE
3480 targetm.calls.store_returned_bounds (slot, bnd);
3481 }
3482 }
3483 }
3484 else if (chkp_function_instrumented_p (current_function_decl)
3485 && !BOUNDED_P (retval_rhs)
3486 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3487 && TREE_CODE (retval_rhs) != RESULT_DECL)
3488 {
3489 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3490 addr = gen_rtx_MEM (Pmode, addr);
3491
3492 gcc_assert (MEM_P (result_rtl));
3493
3494 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3495 }
3496
862d0b35
DN
3497 /* If we are returning the RESULT_DECL, then the value has already
3498 been stored into it, so we don't have to do anything special. */
3499 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3500 expand_value_return (result_rtl);
3501
3502 /* If the result is an aggregate that is being returned in one (or more)
3503 registers, load the registers here. */
3504
3505 else if (retval_rhs != 0
3506 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3507 && REG_P (result_rtl))
3508 {
3509 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3510 if (val)
3511 {
3512 /* Use the mode of the result value on the return register. */
3513 PUT_MODE (result_rtl, GET_MODE (val));
3514 expand_value_return (val);
3515 }
3516 else
3517 expand_null_return ();
3518 }
3519 else if (retval_rhs != 0
3520 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3521 && (REG_P (result_rtl)
3522 || (GET_CODE (result_rtl) == PARALLEL)))
3523 {
9ee5337d
EB
3524 /* Compute the return value into a temporary (usually a pseudo reg). */
3525 val
3526 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
862d0b35
DN
3527 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3528 val = force_not_mem (val);
862d0b35
DN
3529 expand_value_return (val);
3530 }
3531 else
3532 {
3533 /* No hard reg used; calculate value into hard return reg. */
3534 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3535 expand_value_return (result_rtl);
3536 }
3537}
3538
28ed065e
MM
3539/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3540 STMT that doesn't require special handling for outgoing edges. That
3541 is no tailcalls and no GIMPLE_COND. */
3542
3543static void
355fe088 3544expand_gimple_stmt_1 (gimple *stmt)
28ed065e
MM
3545{
3546 tree op0;
c82fee88 3547
5368224f 3548 set_curr_insn_location (gimple_location (stmt));
c82fee88 3549
28ed065e
MM
3550 switch (gimple_code (stmt))
3551 {
3552 case GIMPLE_GOTO:
3553 op0 = gimple_goto_dest (stmt);
3554 if (TREE_CODE (op0) == LABEL_DECL)
3555 expand_goto (op0);
3556 else
3557 expand_computed_goto (op0);
3558 break;
3559 case GIMPLE_LABEL:
538dd0b7 3560 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
28ed065e
MM
3561 break;
3562 case GIMPLE_NOP:
3563 case GIMPLE_PREDICT:
3564 break;
28ed065e 3565 case GIMPLE_SWITCH:
f66459c1
PB
3566 {
3567 gswitch *swtch = as_a <gswitch *> (stmt);
3568 if (gimple_switch_num_labels (swtch) == 1)
3569 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3570 else
3571 expand_case (swtch);
3572 }
28ed065e
MM
3573 break;
3574 case GIMPLE_ASM:
538dd0b7 3575 expand_asm_stmt (as_a <gasm *> (stmt));
28ed065e
MM
3576 break;
3577 case GIMPLE_CALL:
538dd0b7 3578 expand_call_stmt (as_a <gcall *> (stmt));
28ed065e
MM
3579 break;
3580
3581 case GIMPLE_RETURN:
855f036d
IE
3582 {
3583 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3584 op0 = gimple_return_retval (as_a <greturn *> (stmt));
28ed065e 3585
855f036d
IE
3586 if (op0 && op0 != error_mark_node)
3587 {
3588 tree result = DECL_RESULT (current_function_decl);
28ed065e 3589
b5be36b1
IE
3590 /* Mark we have return statement with missing bounds. */
3591 if (!bnd
3592 && chkp_function_instrumented_p (cfun->decl)
3593 && !DECL_P (op0))
3594 bnd = error_mark_node;
3595
855f036d
IE
3596 /* If we are not returning the current function's RESULT_DECL,
3597 build an assignment to it. */
3598 if (op0 != result)
3599 {
3600 /* I believe that a function's RESULT_DECL is unique. */
3601 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3602
3603 /* ??? We'd like to use simply expand_assignment here,
3604 but this fails if the value is of BLKmode but the return
3605 decl is a register. expand_return has special handling
3606 for this combination, which eventually should move
3607 to common code. See comments there. Until then, let's
3608 build a modify expression :-/ */
3609 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3610 result, op0);
3611 }
855f036d
IE
3612 }
3613
3614 if (!op0)
3615 expand_null_return ();
3616 else
3617 expand_return (op0, bnd);
3618 }
28ed065e
MM
3619 break;
3620
3621 case GIMPLE_ASSIGN:
3622 {
538dd0b7
DM
3623 gassign *assign_stmt = as_a <gassign *> (stmt);
3624 tree lhs = gimple_assign_lhs (assign_stmt);
28ed065e
MM
3625
3626 /* Tree expand used to fiddle with |= and &= of two bitfield
3627 COMPONENT_REFs here. This can't happen with gimple, the LHS
3628 of binary assigns must be a gimple reg. */
3629
3630 if (TREE_CODE (lhs) != SSA_NAME
3631 || get_gimple_rhs_class (gimple_expr_code (stmt))
3632 == GIMPLE_SINGLE_RHS)
3633 {
538dd0b7 3634 tree rhs = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3635 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3636 == GIMPLE_SINGLE_RHS);
ae2ffe2a
RB
3637 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3638 /* Do not put locations on possibly shared trees. */
3639 && !is_gimple_min_invariant (rhs))
28ed065e 3640 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3641 if (TREE_CLOBBER_P (rhs))
3642 /* This is a clobber to mark the going out of scope for
3643 this LHS. */
3644 ;
3645 else
3646 expand_assignment (lhs, rhs,
538dd0b7
DM
3647 gimple_assign_nontemporal_move_p (
3648 assign_stmt));
28ed065e
MM
3649 }
3650 else
3651 {
3652 rtx target, temp;
538dd0b7 3653 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
28ed065e
MM
3654 struct separate_ops ops;
3655 bool promoted = false;
3656
3657 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3658 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3659 promoted = true;
3660
538dd0b7 3661 ops.code = gimple_assign_rhs_code (assign_stmt);
28ed065e 3662 ops.type = TREE_TYPE (lhs);
b0dd8c90 3663 switch (get_gimple_rhs_class (ops.code))
28ed065e 3664 {
0354c0c7 3665 case GIMPLE_TERNARY_RHS:
538dd0b7 3666 ops.op2 = gimple_assign_rhs3 (assign_stmt);
0354c0c7 3667 /* Fallthru */
28ed065e 3668 case GIMPLE_BINARY_RHS:
538dd0b7 3669 ops.op1 = gimple_assign_rhs2 (assign_stmt);
28ed065e
MM
3670 /* Fallthru */
3671 case GIMPLE_UNARY_RHS:
538dd0b7 3672 ops.op0 = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3673 break;
3674 default:
3675 gcc_unreachable ();
3676 }
3677 ops.location = gimple_location (stmt);
3678
3679 /* If we want to use a nontemporal store, force the value to
3680 register first. If we store into a promoted register,
3681 don't directly expand to target. */
3682 temp = nontemporal || promoted ? NULL_RTX : target;
3683 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3684 EXPAND_NORMAL);
3685
3686 if (temp == target)
3687 ;
3688 else if (promoted)
3689 {
362d42dc 3690 int unsignedp = SUBREG_PROMOTED_SIGN (target);
28ed065e
MM
3691 /* If TEMP is a VOIDmode constant, use convert_modes to make
3692 sure that we properly convert it. */
3693 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3694 {
3695 temp = convert_modes (GET_MODE (target),
3696 TYPE_MODE (ops.type),
4e18a7d4 3697 temp, unsignedp);
28ed065e 3698 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3699 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3700 }
3701
27be0c32 3702 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3703 }
3704 else if (nontemporal && emit_storent_insn (target, temp))
3705 ;
3706 else
3707 {
3708 temp = force_operand (temp, target);
3709 if (temp != target)
3710 emit_move_insn (target, temp);
3711 }
3712 }
3713 }
3714 break;
3715
3716 default:
3717 gcc_unreachable ();
3718 }
3719}
3720
3721/* Expand one gimple statement STMT and return the last RTL instruction
3722 before any of the newly generated ones.
3723
3724 In addition to generating the necessary RTL instructions this also
3725 sets REG_EH_REGION notes if necessary and sets the current source
3726 location for diagnostics. */
3727
b47aae36 3728static rtx_insn *
355fe088 3729expand_gimple_stmt (gimple *stmt)
28ed065e 3730{
28ed065e 3731 location_t saved_location = input_location;
b47aae36 3732 rtx_insn *last = get_last_insn ();
c82fee88 3733 int lp_nr;
28ed065e 3734
28ed065e
MM
3735 gcc_assert (cfun);
3736
c82fee88
EB
3737 /* We need to save and restore the current source location so that errors
3738 discovered during expansion are emitted with the right location. But
3739 it would be better if the diagnostic routines used the source location
3740 embedded in the tree nodes rather than globals. */
28ed065e 3741 if (gimple_has_location (stmt))
c82fee88 3742 input_location = gimple_location (stmt);
28ed065e
MM
3743
3744 expand_gimple_stmt_1 (stmt);
c82fee88 3745
28ed065e
MM
3746 /* Free any temporaries used to evaluate this statement. */
3747 free_temp_slots ();
3748
3749 input_location = saved_location;
3750
3751 /* Mark all insns that may trap. */
1d65f45c
RH
3752 lp_nr = lookup_stmt_eh_lp (stmt);
3753 if (lp_nr)
28ed065e 3754 {
b47aae36 3755 rtx_insn *insn;
28ed065e
MM
3756 for (insn = next_real_insn (last); insn;
3757 insn = next_real_insn (insn))
3758 {
3759 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3760 /* If we want exceptions for non-call insns, any
3761 may_trap_p instruction may throw. */
3762 && GET_CODE (PATTERN (insn)) != CLOBBER
3763 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3764 && insn_could_throw_p (insn))
3765 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3766 }
3767 }
3768
3769 return last;
3770}
3771
726a989a 3772/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3773 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3774 generated a tail call (something that might be denied by the ABI
cea49550
RH
3775 rules governing the call; see calls.c).
3776
3777 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3778 can still reach the rest of BB. The case here is __builtin_sqrt,
3779 where the NaN result goes through the external function (with a
3780 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3781
3782static basic_block
538dd0b7 3783expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
80c7a9eb 3784{
b47aae36 3785 rtx_insn *last2, *last;
224e770b 3786 edge e;
628f6a4e 3787 edge_iterator ei;
357067f2 3788 profile_probability probability;
80c7a9eb 3789
28ed065e 3790 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3791
3792 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3793 if (CALL_P (last) && SIBLING_CALL_P (last))
3794 goto found;
80c7a9eb 3795
726a989a 3796 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3797
cea49550 3798 *can_fallthru = true;
224e770b 3799 return NULL;
80c7a9eb 3800
224e770b
RH
3801 found:
3802 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3803 Any instructions emitted here are about to be deleted. */
3804 do_pending_stack_adjust ();
3805
3806 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3807 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3808 EH or abnormal edges, we shouldn't have created a tail call in
3809 the first place. So it seems to me we should just be removing
3810 all edges here, or redirecting the existing fallthru edge to
3811 the exit block. */
3812
357067f2 3813 probability = profile_probability::never ();
3995f3a2 3814 profile_count count = profile_count::zero ();
224e770b 3815
628f6a4e
BE
3816 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3817 {
224e770b
RH
3818 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3819 {
fefa31b5 3820 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
80c7a9eb 3821 {
224e770b
RH
3822 e->dest->count -= e->count;
3823 e->dest->frequency -= EDGE_FREQUENCY (e);
224e770b 3824 if (e->dest->frequency < 0)
c22cacf3 3825 e->dest->frequency = 0;
80c7a9eb 3826 }
224e770b
RH
3827 count += e->count;
3828 probability += e->probability;
3829 remove_edge (e);
80c7a9eb 3830 }
628f6a4e
BE
3831 else
3832 ei_next (&ei);
80c7a9eb
RH
3833 }
3834
224e770b
RH
3835 /* This is somewhat ugly: the call_expr expander often emits instructions
3836 after the sibcall (to perform the function return). These confuse the
12eff7b7 3837 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3838 last = NEXT_INSN (last);
341c100f 3839 gcc_assert (BARRIER_P (last));
cea49550
RH
3840
3841 *can_fallthru = false;
224e770b
RH
3842 while (NEXT_INSN (last))
3843 {
3844 /* For instance an sqrt builtin expander expands if with
3845 sibcall in the then and label for `else`. */
3846 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3847 {
3848 *can_fallthru = true;
3849 break;
3850 }
224e770b
RH
3851 delete_insn (NEXT_INSN (last));
3852 }
3853
fefa31b5
DM
3854 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3855 | EDGE_SIBCALL);
aea5e79a
JH
3856 e->probability = probability;
3857 e->count = count;
1130d5e3 3858 BB_END (bb) = last;
224e770b
RH
3859 update_bb_for_insn (bb);
3860
3861 if (NEXT_INSN (last))
3862 {
3863 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3864
3865 last = BB_END (bb);
3866 if (BARRIER_P (last))
1130d5e3 3867 BB_END (bb) = PREV_INSN (last);
224e770b
RH
3868 }
3869
726a989a 3870 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3871
224e770b 3872 return bb;
80c7a9eb
RH
3873}
3874
b5b8b0ac
AO
3875/* Return the difference between the floor and the truncated result of
3876 a signed division by OP1 with remainder MOD. */
3877static rtx
ef4bddc2 3878floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3879{
3880 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3881 return gen_rtx_IF_THEN_ELSE
3882 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3883 gen_rtx_IF_THEN_ELSE
3884 (mode, gen_rtx_LT (BImode,
3885 gen_rtx_DIV (mode, op1, mod),
3886 const0_rtx),
3887 constm1_rtx, const0_rtx),
3888 const0_rtx);
3889}
3890
3891/* Return the difference between the ceil and the truncated result of
3892 a signed division by OP1 with remainder MOD. */
3893static rtx
ef4bddc2 3894ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3895{
3896 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3897 return gen_rtx_IF_THEN_ELSE
3898 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3899 gen_rtx_IF_THEN_ELSE
3900 (mode, gen_rtx_GT (BImode,
3901 gen_rtx_DIV (mode, op1, mod),
3902 const0_rtx),
3903 const1_rtx, const0_rtx),
3904 const0_rtx);
3905}
3906
3907/* Return the difference between the ceil and the truncated result of
3908 an unsigned division by OP1 with remainder MOD. */
3909static rtx
ef4bddc2 3910ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
b5b8b0ac
AO
3911{
3912 /* (mod != 0 ? 1 : 0) */
3913 return gen_rtx_IF_THEN_ELSE
3914 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3915 const1_rtx, const0_rtx);
3916}
3917
3918/* Return the difference between the rounded and the truncated result
3919 of a signed division by OP1 with remainder MOD. Halfway cases are
3920 rounded away from zero, rather than to the nearest even number. */
3921static rtx
ef4bddc2 3922round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3923{
3924 /* (abs (mod) >= abs (op1) - abs (mod)
3925 ? (op1 / mod > 0 ? 1 : -1)
3926 : 0) */
3927 return gen_rtx_IF_THEN_ELSE
3928 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3929 gen_rtx_MINUS (mode,
3930 gen_rtx_ABS (mode, op1),
3931 gen_rtx_ABS (mode, mod))),
3932 gen_rtx_IF_THEN_ELSE
3933 (mode, gen_rtx_GT (BImode,
3934 gen_rtx_DIV (mode, op1, mod),
3935 const0_rtx),
3936 const1_rtx, constm1_rtx),
3937 const0_rtx);
3938}
3939
3940/* Return the difference between the rounded and the truncated result
3941 of a unsigned division by OP1 with remainder MOD. Halfway cases
3942 are rounded away from zero, rather than to the nearest even
3943 number. */
3944static rtx
ef4bddc2 3945round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3946{
3947 /* (mod >= op1 - mod ? 1 : 0) */
3948 return gen_rtx_IF_THEN_ELSE
3949 (mode, gen_rtx_GE (BImode, mod,
3950 gen_rtx_MINUS (mode, op1, mod)),
3951 const1_rtx, const0_rtx);
3952}
3953
dda2da58
AO
3954/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3955 any rtl. */
3956
3957static rtx
ef4bddc2 3958convert_debug_memory_address (machine_mode mode, rtx x,
f61c6f34 3959 addr_space_t as)
dda2da58 3960{
ef4bddc2 3961 machine_mode xmode = GET_MODE (x);
dda2da58
AO
3962
3963#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3964 gcc_assert (mode == Pmode
3965 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3966 gcc_assert (xmode == mode || xmode == VOIDmode);
3967#else
f61c6f34 3968 rtx temp;
f61c6f34 3969
639d4bb8 3970 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3971
3972 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3973 return x;
3974
69660a70 3975 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3403a1a9 3976 x = lowpart_subreg (mode, x, xmode);
dda2da58
AO
3977 else if (POINTERS_EXTEND_UNSIGNED > 0)
3978 x = gen_rtx_ZERO_EXTEND (mode, x);
3979 else if (!POINTERS_EXTEND_UNSIGNED)
3980 x = gen_rtx_SIGN_EXTEND (mode, x);
3981 else
f61c6f34
JJ
3982 {
3983 switch (GET_CODE (x))
3984 {
3985 case SUBREG:
3986 if ((SUBREG_PROMOTED_VAR_P (x)
3987 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3988 || (GET_CODE (SUBREG_REG (x)) == PLUS
3989 && REG_P (XEXP (SUBREG_REG (x), 0))
3990 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3991 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3992 && GET_MODE (SUBREG_REG (x)) == mode)
3993 return SUBREG_REG (x);
3994 break;
3995 case LABEL_REF:
04a121a7 3996 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
f61c6f34
JJ
3997 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3998 return temp;
3999 case SYMBOL_REF:
4000 temp = shallow_copy_rtx (x);
4001 PUT_MODE (temp, mode);
4002 return temp;
4003 case CONST:
4004 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4005 if (temp)
4006 temp = gen_rtx_CONST (mode, temp);
4007 return temp;
4008 case PLUS:
4009 case MINUS:
4010 if (CONST_INT_P (XEXP (x, 1)))
4011 {
4012 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4013 if (temp)
4014 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4015 }
4016 break;
4017 default:
4018 break;
4019 }
4020 /* Don't know how to express ptr_extend as operation in debug info. */
4021 return NULL;
4022 }
dda2da58
AO
4023#endif /* POINTERS_EXTEND_UNSIGNED */
4024
4025 return x;
4026}
4027
dfde35b3
JJ
4028/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4029 by avoid_deep_ter_for_debug. */
4030
4031static hash_map<tree, tree> *deep_ter_debug_map;
4032
4033/* Split too deep TER chains for debug stmts using debug temporaries. */
4034
4035static void
355fe088 4036avoid_deep_ter_for_debug (gimple *stmt, int depth)
dfde35b3
JJ
4037{
4038 use_operand_p use_p;
4039 ssa_op_iter iter;
4040 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4041 {
4042 tree use = USE_FROM_PTR (use_p);
4043 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4044 continue;
355fe088 4045 gimple *g = get_gimple_for_ssa_name (use);
dfde35b3
JJ
4046 if (g == NULL)
4047 continue;
4048 if (depth > 6 && !stmt_ends_bb_p (g))
4049 {
4050 if (deep_ter_debug_map == NULL)
4051 deep_ter_debug_map = new hash_map<tree, tree>;
4052
4053 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4054 if (vexpr != NULL)
4055 continue;
4056 vexpr = make_node (DEBUG_EXPR_DECL);
355fe088 4057 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
dfde35b3
JJ
4058 DECL_ARTIFICIAL (vexpr) = 1;
4059 TREE_TYPE (vexpr) = TREE_TYPE (use);
899ca90e 4060 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
dfde35b3
JJ
4061 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4062 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4063 avoid_deep_ter_for_debug (def_temp, 0);
4064 }
4065 else
4066 avoid_deep_ter_for_debug (g, depth + 1);
4067 }
4068}
4069
12c5ffe5
EB
4070/* Return an RTX equivalent to the value of the parameter DECL. */
4071
4072static rtx
4073expand_debug_parm_decl (tree decl)
4074{
4075 rtx incoming = DECL_INCOMING_RTL (decl);
4076
4077 if (incoming
4078 && GET_MODE (incoming) != BLKmode
4079 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4080 || (MEM_P (incoming)
4081 && REG_P (XEXP (incoming, 0))
4082 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4083 {
4084 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4085
4086#ifdef HAVE_window_save
4087 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4088 If the target machine has an explicit window save instruction, the
4089 actual entry value is the corresponding OUTGOING_REGNO instead. */
4090 if (REG_P (incoming)
4091 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4092 incoming
4093 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4094 OUTGOING_REGNO (REGNO (incoming)), 0);
4095 else if (MEM_P (incoming))
4096 {
4097 rtx reg = XEXP (incoming, 0);
4098 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4099 {
4100 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4101 incoming = replace_equiv_address_nv (incoming, reg);
4102 }
6cfa417f
JJ
4103 else
4104 incoming = copy_rtx (incoming);
12c5ffe5
EB
4105 }
4106#endif
4107
4108 ENTRY_VALUE_EXP (rtl) = incoming;
4109 return rtl;
4110 }
4111
4112 if (incoming
4113 && GET_MODE (incoming) != BLKmode
4114 && !TREE_ADDRESSABLE (decl)
4115 && MEM_P (incoming)
4116 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4117 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4118 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4119 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 4120 return copy_rtx (incoming);
12c5ffe5
EB
4121
4122 return NULL_RTX;
4123}
4124
4125/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
4126
4127static rtx
4128expand_debug_expr (tree exp)
4129{
4130 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
ef4bddc2
RS
4131 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4132 machine_mode inner_mode = VOIDmode;
b5b8b0ac 4133 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 4134 addr_space_t as;
b5b8b0ac
AO
4135
4136 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4137 {
4138 case tcc_expression:
4139 switch (TREE_CODE (exp))
4140 {
4141 case COND_EXPR:
7ece48b1 4142 case DOT_PROD_EXPR:
79d652a5 4143 case SAD_EXPR:
0354c0c7
BS
4144 case WIDEN_MULT_PLUS_EXPR:
4145 case WIDEN_MULT_MINUS_EXPR:
0f59b812 4146 case FMA_EXPR:
b5b8b0ac
AO
4147 goto ternary;
4148
4149 case TRUTH_ANDIF_EXPR:
4150 case TRUTH_ORIF_EXPR:
4151 case TRUTH_AND_EXPR:
4152 case TRUTH_OR_EXPR:
4153 case TRUTH_XOR_EXPR:
4154 goto binary;
4155
4156 case TRUTH_NOT_EXPR:
4157 goto unary;
4158
4159 default:
4160 break;
4161 }
4162 break;
4163
4164 ternary:
4165 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4166 if (!op2)
4167 return NULL_RTX;
4168 /* Fall through. */
4169
4170 binary:
4171 case tcc_binary:
b5b8b0ac
AO
4172 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4173 if (!op1)
4174 return NULL_RTX;
26d83bcc
JJ
4175 switch (TREE_CODE (exp))
4176 {
4177 case LSHIFT_EXPR:
4178 case RSHIFT_EXPR:
4179 case LROTATE_EXPR:
4180 case RROTATE_EXPR:
4181 case WIDEN_LSHIFT_EXPR:
4182 /* Ensure second operand isn't wider than the first one. */
4183 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4184 if (SCALAR_INT_MODE_P (inner_mode))
4185 {
4186 machine_mode opmode = mode;
4187 if (VECTOR_MODE_P (mode))
4188 opmode = GET_MODE_INNER (mode);
4189 if (SCALAR_INT_MODE_P (opmode)
4190 && (GET_MODE_PRECISION (opmode)
4191 < GET_MODE_PRECISION (inner_mode)))
3403a1a9 4192 op1 = lowpart_subreg (opmode, op1, inner_mode);
26d83bcc
JJ
4193 }
4194 break;
4195 default:
4196 break;
4197 }
b5b8b0ac
AO
4198 /* Fall through. */
4199
4200 unary:
4201 case tcc_unary:
2ba172e0 4202 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4203 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4204 if (!op0)
4205 return NULL_RTX;
4206 break;
4207
871dae34
AO
4208 case tcc_comparison:
4209 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4210 goto binary;
4211
b5b8b0ac
AO
4212 case tcc_type:
4213 case tcc_statement:
4214 gcc_unreachable ();
4215
4216 case tcc_constant:
4217 case tcc_exceptional:
4218 case tcc_declaration:
4219 case tcc_reference:
4220 case tcc_vl_exp:
4221 break;
4222 }
4223
4224 switch (TREE_CODE (exp))
4225 {
4226 case STRING_CST:
4227 if (!lookup_constant_def (exp))
4228 {
e1b243a8
JJ
4229 if (strlen (TREE_STRING_POINTER (exp)) + 1
4230 != (size_t) TREE_STRING_LENGTH (exp))
4231 return NULL_RTX;
b5b8b0ac
AO
4232 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4233 op0 = gen_rtx_MEM (BLKmode, op0);
4234 set_mem_attributes (op0, exp, 0);
4235 return op0;
4236 }
191816a3 4237 /* Fall through. */
b5b8b0ac
AO
4238
4239 case INTEGER_CST:
4240 case REAL_CST:
4241 case FIXED_CST:
4242 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4243 return op0;
4244
4245 case COMPLEX_CST:
4246 gcc_assert (COMPLEX_MODE_P (mode));
4247 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 4248 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
4249 return gen_rtx_CONCAT (mode, op0, op1);
4250
0ca5af51
AO
4251 case DEBUG_EXPR_DECL:
4252 op0 = DECL_RTL_IF_SET (exp);
4253
4254 if (op0)
4255 return op0;
4256
4257 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 4258 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
4259 SET_DECL_RTL (exp, op0);
4260
4261 return op0;
4262
b5b8b0ac
AO
4263 case VAR_DECL:
4264 case PARM_DECL:
4265 case FUNCTION_DECL:
4266 case LABEL_DECL:
4267 case CONST_DECL:
4268 case RESULT_DECL:
4269 op0 = DECL_RTL_IF_SET (exp);
4270
4271 /* This decl was probably optimized away. */
4272 if (!op0)
e1b243a8 4273 {
8813a647 4274 if (!VAR_P (exp)
e1b243a8
JJ
4275 || DECL_EXTERNAL (exp)
4276 || !TREE_STATIC (exp)
4277 || !DECL_NAME (exp)
0fba566c 4278 || DECL_HARD_REGISTER (exp)
7d5fc814 4279 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 4280 || mode == VOIDmode)
e1b243a8
JJ
4281 return NULL;
4282
b1aa0655 4283 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
4284 if (!MEM_P (op0)
4285 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4286 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4287 return NULL;
4288 }
4289 else
4290 op0 = copy_rtx (op0);
b5b8b0ac 4291
06796564 4292 if (GET_MODE (op0) == BLKmode
871dae34 4293 /* If op0 is not BLKmode, but mode is, adjust_mode
06796564
JJ
4294 below would ICE. While it is likely a FE bug,
4295 try to be robust here. See PR43166. */
132b4e82
JJ
4296 || mode == BLKmode
4297 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
4298 {
4299 gcc_assert (MEM_P (op0));
4300 op0 = adjust_address_nv (op0, mode, 0);
4301 return op0;
4302 }
4303
4304 /* Fall through. */
4305
4306 adjust_mode:
4307 case PAREN_EXPR:
625a9766 4308 CASE_CONVERT:
b5b8b0ac 4309 {
2ba172e0 4310 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
4311
4312 if (mode == inner_mode)
4313 return op0;
4314
4315 if (inner_mode == VOIDmode)
4316 {
2a8e30fb
MM
4317 if (TREE_CODE (exp) == SSA_NAME)
4318 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4319 else
4320 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4321 if (mode == inner_mode)
4322 return op0;
4323 }
4324
4325 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4326 {
4327 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4328 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4329 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4330 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4331 else
4332 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4333 }
4334 else if (FLOAT_MODE_P (mode))
4335 {
2a8e30fb 4336 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
4337 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4338 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4339 else
4340 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4341 }
4342 else if (FLOAT_MODE_P (inner_mode))
4343 {
4344 if (unsignedp)
4345 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4346 else
4347 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4348 }
4349 else if (CONSTANT_P (op0)
69660a70 4350 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3403a1a9 4351 op0 = lowpart_subreg (mode, op0, inner_mode);
cf4ef6f7 4352 else if (UNARY_CLASS_P (exp)
1b47fe3f
JJ
4353 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4354 : unsignedp)
2ba172e0 4355 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 4356 else
2ba172e0 4357 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
4358
4359 return op0;
4360 }
4361
70f34814 4362 case MEM_REF:
71f3a3f5
JJ
4363 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4364 {
4365 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4366 TREE_OPERAND (exp, 0),
4367 TREE_OPERAND (exp, 1));
4368 if (newexp)
4369 return expand_debug_expr (newexp);
4370 }
4371 /* FALLTHROUGH */
b5b8b0ac 4372 case INDIRECT_REF:
0a81f074 4373 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4374 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4375 if (!op0)
4376 return NULL;
4377
cb115041
JJ
4378 if (TREE_CODE (exp) == MEM_REF)
4379 {
583ac69c
JJ
4380 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4381 || (GET_CODE (op0) == PLUS
4382 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4383 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4384 Instead just use get_inner_reference. */
4385 goto component_ref;
4386
cb115041
JJ
4387 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4388 if (!op1 || !CONST_INT_P (op1))
4389 return NULL;
4390
0a81f074 4391 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
4392 }
4393
a148c4b2 4394 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
b5b8b0ac 4395
f61c6f34
JJ
4396 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4397 op0, as);
4398 if (op0 == NULL_RTX)
4399 return NULL;
b5b8b0ac 4400
f61c6f34 4401 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 4402 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
4403 if (TREE_CODE (exp) == MEM_REF
4404 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4405 set_mem_expr (op0, NULL_TREE);
09e881c9 4406 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4407
4408 return op0;
4409
4410 case TARGET_MEM_REF:
4d948885
RG
4411 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4412 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
4413 return NULL;
4414
4415 op0 = expand_debug_expr
4e25ca6b 4416 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
4417 if (!op0)
4418 return NULL;
4419
c168f180 4420 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
f61c6f34
JJ
4421 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4422 op0, as);
4423 if (op0 == NULL_RTX)
4424 return NULL;
b5b8b0ac
AO
4425
4426 op0 = gen_rtx_MEM (mode, op0);
4427
4428 set_mem_attributes (op0, exp, 0);
09e881c9 4429 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4430
4431 return op0;
4432
583ac69c 4433 component_ref:
b5b8b0ac
AO
4434 case ARRAY_REF:
4435 case ARRAY_RANGE_REF:
4436 case COMPONENT_REF:
4437 case BIT_FIELD_REF:
4438 case REALPART_EXPR:
4439 case IMAGPART_EXPR:
4440 case VIEW_CONVERT_EXPR:
4441 {
ef4bddc2 4442 machine_mode mode1;
b5b8b0ac
AO
4443 HOST_WIDE_INT bitsize, bitpos;
4444 tree offset;
ee45a32d
EB
4445 int reversep, volatilep = 0;
4446 tree tem
4447 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
25b75a48 4448 &unsignedp, &reversep, &volatilep);
b5b8b0ac
AO
4449 rtx orig_op0;
4450
4f2a9af8
JJ
4451 if (bitsize == 0)
4452 return NULL;
4453
b5b8b0ac
AO
4454 orig_op0 = op0 = expand_debug_expr (tem);
4455
4456 if (!op0)
4457 return NULL;
4458
4459 if (offset)
4460 {
ef4bddc2 4461 machine_mode addrmode, offmode;
dda2da58 4462
aa847cc8
JJ
4463 if (!MEM_P (op0))
4464 return NULL;
b5b8b0ac 4465
dda2da58
AO
4466 op0 = XEXP (op0, 0);
4467 addrmode = GET_MODE (op0);
4468 if (addrmode == VOIDmode)
4469 addrmode = Pmode;
4470
b5b8b0ac
AO
4471 op1 = expand_debug_expr (offset);
4472 if (!op1)
4473 return NULL;
4474
dda2da58
AO
4475 offmode = GET_MODE (op1);
4476 if (offmode == VOIDmode)
4477 offmode = TYPE_MODE (TREE_TYPE (offset));
4478
4479 if (addrmode != offmode)
3403a1a9 4480 op1 = lowpart_subreg (addrmode, op1, offmode);
dda2da58
AO
4481
4482 /* Don't use offset_address here, we don't need a
4483 recognizable address, and we don't want to generate
4484 code. */
2ba172e0
JJ
4485 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4486 op0, op1));
b5b8b0ac
AO
4487 }
4488
4489 if (MEM_P (op0))
4490 {
4f2a9af8
JJ
4491 if (mode1 == VOIDmode)
4492 /* Bitfield. */
4493 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
4494 if (bitpos >= BITS_PER_UNIT)
4495 {
4496 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4497 bitpos %= BITS_PER_UNIT;
4498 }
4499 else if (bitpos < 0)
4500 {
4f2a9af8
JJ
4501 HOST_WIDE_INT units
4502 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
e3abc83e 4503 op0 = adjust_address_nv (op0, mode1, -units);
b5b8b0ac
AO
4504 bitpos += units * BITS_PER_UNIT;
4505 }
4506 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4507 op0 = adjust_address_nv (op0, mode, 0);
4508 else if (GET_MODE (op0) != mode1)
4509 op0 = adjust_address_nv (op0, mode1, 0);
4510 else
4511 op0 = copy_rtx (op0);
4512 if (op0 == orig_op0)
4513 op0 = shallow_copy_rtx (op0);
4514 set_mem_attributes (op0, exp, 0);
4515 }
4516
4517 if (bitpos == 0 && mode == GET_MODE (op0))
4518 return op0;
4519
2d3fc6aa
JJ
4520 if (bitpos < 0)
4521 return NULL;
4522
88c04a5d
JJ
4523 if (GET_MODE (op0) == BLKmode)
4524 return NULL;
4525
b5b8b0ac
AO
4526 if ((bitpos % BITS_PER_UNIT) == 0
4527 && bitsize == GET_MODE_BITSIZE (mode1))
4528 {
ef4bddc2 4529 machine_mode opmode = GET_MODE (op0);
b5b8b0ac 4530
b5b8b0ac 4531 if (opmode == VOIDmode)
9712cba0 4532 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4533
4534 /* This condition may hold if we're expanding the address
4535 right past the end of an array that turned out not to
4536 be addressable (i.e., the address was only computed in
4537 debug stmts). The gen_subreg below would rightfully
4538 crash, and the address doesn't really exist, so just
4539 drop it. */
4540 if (bitpos >= GET_MODE_BITSIZE (opmode))
4541 return NULL;
4542
7d5d39bb
JJ
4543 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4544 return simplify_gen_subreg (mode, op0, opmode,
4545 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4546 }
4547
4548 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4549 && TYPE_UNSIGNED (TREE_TYPE (exp))
4550 ? SIGN_EXTRACT
4551 : ZERO_EXTRACT, mode,
4552 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4553 ? GET_MODE (op0)
4554 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4555 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4556 }
4557
b5b8b0ac 4558 case ABS_EXPR:
2ba172e0 4559 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4560
4561 case NEGATE_EXPR:
2ba172e0 4562 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4563
4564 case BIT_NOT_EXPR:
2ba172e0 4565 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4566
4567 case FLOAT_EXPR:
2ba172e0
JJ
4568 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4569 0)))
4570 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4571 inner_mode);
b5b8b0ac
AO
4572
4573 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4574 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4575 inner_mode);
b5b8b0ac
AO
4576
4577 case POINTER_PLUS_EXPR:
576319a7
DD
4578 /* For the rare target where pointers are not the same size as
4579 size_t, we need to check for mis-matched modes and correct
4580 the addend. */
4581 if (op0 && op1
4582 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4583 && GET_MODE (op0) != GET_MODE (op1))
4584 {
8369f38a
DD
4585 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4586 /* If OP0 is a partial mode, then we must truncate, even if it has
4587 the same bitsize as OP1 as GCC's representation of partial modes
4588 is opaque. */
4589 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4590 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4591 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4592 GET_MODE (op1));
576319a7
DD
4593 else
4594 /* We always sign-extend, regardless of the signedness of
4595 the operand, because the operand is always unsigned
4596 here even if the original C expression is signed. */
2ba172e0
JJ
4597 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4598 GET_MODE (op1));
576319a7
DD
4599 }
4600 /* Fall through. */
b5b8b0ac 4601 case PLUS_EXPR:
2ba172e0 4602 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4603
4604 case MINUS_EXPR:
2ba172e0 4605 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4606
4607 case MULT_EXPR:
2ba172e0 4608 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4609
4610 case RDIV_EXPR:
4611 case TRUNC_DIV_EXPR:
4612 case EXACT_DIV_EXPR:
4613 if (unsignedp)
2ba172e0 4614 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4615 else
2ba172e0 4616 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4617
4618 case TRUNC_MOD_EXPR:
2ba172e0 4619 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4620
4621 case FLOOR_DIV_EXPR:
4622 if (unsignedp)
2ba172e0 4623 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4624 else
4625 {
2ba172e0
JJ
4626 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4627 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4628 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4629 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4630 }
4631
4632 case FLOOR_MOD_EXPR:
4633 if (unsignedp)
2ba172e0 4634 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4635 else
4636 {
2ba172e0 4637 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4638 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4639 adj = simplify_gen_unary (NEG, mode,
4640 simplify_gen_binary (MULT, mode, adj, op1),
4641 mode);
4642 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4643 }
4644
4645 case CEIL_DIV_EXPR:
4646 if (unsignedp)
4647 {
2ba172e0
JJ
4648 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4649 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4650 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4651 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4652 }
4653 else
4654 {
2ba172e0
JJ
4655 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4656 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4657 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4658 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4659 }
4660
4661 case CEIL_MOD_EXPR:
4662 if (unsignedp)
4663 {
2ba172e0 4664 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4665 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4666 adj = simplify_gen_unary (NEG, mode,
4667 simplify_gen_binary (MULT, mode, adj, op1),
4668 mode);
4669 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4670 }
4671 else
4672 {
2ba172e0 4673 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4674 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4675 adj = simplify_gen_unary (NEG, mode,
4676 simplify_gen_binary (MULT, mode, adj, op1),
4677 mode);
4678 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4679 }
4680
4681 case ROUND_DIV_EXPR:
4682 if (unsignedp)
4683 {
2ba172e0
JJ
4684 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4685 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4686 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4687 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4688 }
4689 else
4690 {
2ba172e0
JJ
4691 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4692 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4693 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4694 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4695 }
4696
4697 case ROUND_MOD_EXPR:
4698 if (unsignedp)
4699 {
2ba172e0 4700 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4701 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4702 adj = simplify_gen_unary (NEG, mode,
4703 simplify_gen_binary (MULT, mode, adj, op1),
4704 mode);
4705 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4706 }
4707 else
4708 {
2ba172e0 4709 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4710 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4711 adj = simplify_gen_unary (NEG, mode,
4712 simplify_gen_binary (MULT, mode, adj, op1),
4713 mode);
4714 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4715 }
4716
4717 case LSHIFT_EXPR:
2ba172e0 4718 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4719
4720 case RSHIFT_EXPR:
4721 if (unsignedp)
2ba172e0 4722 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4723 else
2ba172e0 4724 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4725
4726 case LROTATE_EXPR:
2ba172e0 4727 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4728
4729 case RROTATE_EXPR:
2ba172e0 4730 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4731
4732 case MIN_EXPR:
2ba172e0 4733 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4734
4735 case MAX_EXPR:
2ba172e0 4736 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4737
4738 case BIT_AND_EXPR:
4739 case TRUTH_AND_EXPR:
2ba172e0 4740 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4741
4742 case BIT_IOR_EXPR:
4743 case TRUTH_OR_EXPR:
2ba172e0 4744 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4745
4746 case BIT_XOR_EXPR:
4747 case TRUTH_XOR_EXPR:
2ba172e0 4748 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4749
4750 case TRUTH_ANDIF_EXPR:
4751 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4752
4753 case TRUTH_ORIF_EXPR:
4754 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4755
4756 case TRUTH_NOT_EXPR:
2ba172e0 4757 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4758
4759 case LT_EXPR:
2ba172e0
JJ
4760 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4761 op0, op1);
b5b8b0ac
AO
4762
4763 case LE_EXPR:
2ba172e0
JJ
4764 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4765 op0, op1);
b5b8b0ac
AO
4766
4767 case GT_EXPR:
2ba172e0
JJ
4768 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4769 op0, op1);
b5b8b0ac
AO
4770
4771 case GE_EXPR:
2ba172e0
JJ
4772 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4773 op0, op1);
b5b8b0ac
AO
4774
4775 case EQ_EXPR:
2ba172e0 4776 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4777
4778 case NE_EXPR:
2ba172e0 4779 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4780
4781 case UNORDERED_EXPR:
2ba172e0 4782 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4783
4784 case ORDERED_EXPR:
2ba172e0 4785 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4786
4787 case UNLT_EXPR:
2ba172e0 4788 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4789
4790 case UNLE_EXPR:
2ba172e0 4791 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4792
4793 case UNGT_EXPR:
2ba172e0 4794 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4795
4796 case UNGE_EXPR:
2ba172e0 4797 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4798
4799 case UNEQ_EXPR:
2ba172e0 4800 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4801
4802 case LTGT_EXPR:
2ba172e0 4803 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4804
4805 case COND_EXPR:
4806 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4807
4808 case COMPLEX_EXPR:
4809 gcc_assert (COMPLEX_MODE_P (mode));
4810 if (GET_MODE (op0) == VOIDmode)
4811 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4812 if (GET_MODE (op1) == VOIDmode)
4813 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4814 return gen_rtx_CONCAT (mode, op0, op1);
4815
d02a5a4b
JJ
4816 case CONJ_EXPR:
4817 if (GET_CODE (op0) == CONCAT)
4818 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4819 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4820 XEXP (op0, 1),
4821 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4822 else
4823 {
ef4bddc2 4824 machine_mode imode = GET_MODE_INNER (mode);
d02a5a4b
JJ
4825 rtx re, im;
4826
4827 if (MEM_P (op0))
4828 {
4829 re = adjust_address_nv (op0, imode, 0);
4830 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4831 }
4832 else
4833 {
ef4bddc2
RS
4834 machine_mode ifmode = int_mode_for_mode (mode);
4835 machine_mode ihmode = int_mode_for_mode (imode);
d02a5a4b
JJ
4836 rtx halfsize;
4837 if (ifmode == BLKmode || ihmode == BLKmode)
4838 return NULL;
4839 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4840 re = op0;
4841 if (mode != ifmode)
4842 re = gen_rtx_SUBREG (ifmode, re, 0);
4843 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4844 if (imode != ihmode)
4845 re = gen_rtx_SUBREG (imode, re, 0);
4846 im = copy_rtx (op0);
4847 if (mode != ifmode)
4848 im = gen_rtx_SUBREG (ifmode, im, 0);
4849 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4850 if (imode != ihmode)
4851 im = gen_rtx_SUBREG (imode, im, 0);
4852 }
4853 im = gen_rtx_NEG (imode, im);
4854 return gen_rtx_CONCAT (mode, re, im);
4855 }
4856
b5b8b0ac
AO
4857 case ADDR_EXPR:
4858 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4859 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4860 {
4861 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4862 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4863 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4864 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4865 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4866 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4867
4868 if (handled_component_p (TREE_OPERAND (exp, 0)))
4869 {
4870 HOST_WIDE_INT bitoffset, bitsize, maxsize;
ee45a32d 4871 bool reverse;
c8a27c40 4872 tree decl
ee45a32d
EB
4873 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4874 &bitsize, &maxsize, &reverse);
8813a647 4875 if ((VAR_P (decl)
c8a27c40
JJ
4876 || TREE_CODE (decl) == PARM_DECL
4877 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4878 && (!TREE_ADDRESSABLE (decl)
4879 || target_for_debug_bind (decl))
c8a27c40
JJ
4880 && (bitoffset % BITS_PER_UNIT) == 0
4881 && bitsize > 0
4882 && bitsize == maxsize)
0a81f074
RS
4883 {
4884 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4885 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4886 }
c8a27c40
JJ
4887 }
4888
9430b7ba
JJ
4889 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4890 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4891 == ADDR_EXPR)
4892 {
4893 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4894 0));
4895 if (op0 != NULL
4896 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4897 || (GET_CODE (op0) == PLUS
4898 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4899 && CONST_INT_P (XEXP (op0, 1)))))
4900 {
4901 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4902 1));
4903 if (!op1 || !CONST_INT_P (op1))
4904 return NULL;
4905
4906 return plus_constant (mode, op0, INTVAL (op1));
4907 }
4908 }
4909
c8a27c40
JJ
4910 return NULL;
4911 }
b5b8b0ac 4912
a148c4b2 4913 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
f61c6f34 4914 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4915
4916 return op0;
b5b8b0ac
AO
4917
4918 case VECTOR_CST:
d2a12ae7
RG
4919 {
4920 unsigned i;
4921
4922 op0 = gen_rtx_CONCATN
4923 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4924
4925 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4926 {
4927 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4928 if (!op1)
4929 return NULL;
4930 XVECEXP (op0, 0, i) = op1;
4931 }
4932
4933 return op0;
4934 }
b5b8b0ac
AO
4935
4936 case CONSTRUCTOR:
47598145
MM
4937 if (TREE_CLOBBER_P (exp))
4938 return NULL;
4939 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4940 {
4941 unsigned i;
4942 tree val;
4943
4944 op0 = gen_rtx_CONCATN
4945 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4946
4947 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4948 {
4949 op1 = expand_debug_expr (val);
4950 if (!op1)
4951 return NULL;
4952 XVECEXP (op0, 0, i) = op1;
4953 }
4954
4955 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4956 {
4957 op1 = expand_debug_expr
e8160c9a 4958 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4959
4960 if (!op1)
4961 return NULL;
4962
4963 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4964 XVECEXP (op0, 0, i) = op1;
4965 }
4966
4967 return op0;
4968 }
4969 else
4970 goto flag_unsupported;
4971
4972 case CALL_EXPR:
4973 /* ??? Maybe handle some builtins? */
4974 return NULL;
4975
4976 case SSA_NAME:
4977 {
355fe088 4978 gimple *g = get_gimple_for_ssa_name (exp);
2a8e30fb
MM
4979 if (g)
4980 {
dfde35b3
JJ
4981 tree t = NULL_TREE;
4982 if (deep_ter_debug_map)
4983 {
4984 tree *slot = deep_ter_debug_map->get (exp);
4985 if (slot)
4986 t = *slot;
4987 }
4988 if (t == NULL_TREE)
4989 t = gimple_assign_rhs_to_tree (g);
4990 op0 = expand_debug_expr (t);
2a8e30fb
MM
4991 if (!op0)
4992 return NULL;
4993 }
4994 else
4995 {
f11a7b6d
AO
4996 /* If this is a reference to an incoming value of
4997 parameter that is never used in the code or where the
4998 incoming value is never used in the code, use
4999 PARM_DECL's DECL_RTL if set. */
5000 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5001 && SSA_NAME_VAR (exp)
5002 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5003 && has_zero_uses (exp))
5004 {
5005 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5006 if (op0)
5007 goto adjust_mode;
5008 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5009 if (op0)
5010 goto adjust_mode;
5011 }
5012
2a8e30fb 5013 int part = var_to_partition (SA.map, exp);
b5b8b0ac 5014
2a8e30fb 5015 if (part == NO_PARTITION)
f11a7b6d 5016 return NULL;
b5b8b0ac 5017
2a8e30fb 5018 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 5019
abfea58d 5020 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 5021 }
b5b8b0ac
AO
5022 goto adjust_mode;
5023 }
5024
5025 case ERROR_MARK:
5026 return NULL;
5027
7ece48b1
JJ
5028 /* Vector stuff. For most of the codes we don't have rtl codes. */
5029 case REALIGN_LOAD_EXPR:
5030 case REDUC_MAX_EXPR:
5031 case REDUC_MIN_EXPR:
5032 case REDUC_PLUS_EXPR:
5033 case VEC_COND_EXPR:
7ece48b1
JJ
5034 case VEC_PACK_FIX_TRUNC_EXPR:
5035 case VEC_PACK_SAT_EXPR:
5036 case VEC_PACK_TRUNC_EXPR:
7ece48b1
JJ
5037 case VEC_UNPACK_FLOAT_HI_EXPR:
5038 case VEC_UNPACK_FLOAT_LO_EXPR:
5039 case VEC_UNPACK_HI_EXPR:
5040 case VEC_UNPACK_LO_EXPR:
5041 case VEC_WIDEN_MULT_HI_EXPR:
5042 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
5043 case VEC_WIDEN_MULT_EVEN_EXPR:
5044 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
5045 case VEC_WIDEN_LSHIFT_HI_EXPR:
5046 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 5047 case VEC_PERM_EXPR:
7ece48b1
JJ
5048 return NULL;
5049
98449720 5050 /* Misc codes. */
7ece48b1
JJ
5051 case ADDR_SPACE_CONVERT_EXPR:
5052 case FIXED_CONVERT_EXPR:
5053 case OBJ_TYPE_REF:
5054 case WITH_SIZE_EXPR:
483c6429 5055 case BIT_INSERT_EXPR:
7ece48b1
JJ
5056 return NULL;
5057
5058 case DOT_PROD_EXPR:
5059 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5060 && SCALAR_INT_MODE_P (mode))
5061 {
2ba172e0
JJ
5062 op0
5063 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5064 0)))
5065 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5066 inner_mode);
5067 op1
5068 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5069 1)))
5070 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5071 inner_mode);
5072 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5073 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
5074 }
5075 return NULL;
5076
5077 case WIDEN_MULT_EXPR:
0354c0c7
BS
5078 case WIDEN_MULT_PLUS_EXPR:
5079 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
5080 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5081 && SCALAR_INT_MODE_P (mode))
5082 {
2ba172e0 5083 inner_mode = GET_MODE (op0);
7ece48b1 5084 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 5085 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 5086 else
5b58b39b 5087 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 5088 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 5089 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 5090 else
5b58b39b 5091 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 5092 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
5093 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5094 return op0;
5095 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 5096 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 5097 else
2ba172e0 5098 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
5099 }
5100 return NULL;
5101
98449720
RH
5102 case MULT_HIGHPART_EXPR:
5103 /* ??? Similar to the above. */
5104 return NULL;
5105
7ece48b1 5106 case WIDEN_SUM_EXPR:
3f3af9df 5107 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
5108 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5109 && SCALAR_INT_MODE_P (mode))
5110 {
2ba172e0
JJ
5111 op0
5112 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5113 0)))
5114 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5115 inner_mode);
3f3af9df
JJ
5116 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5117 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
5118 }
5119 return NULL;
5120
0f59b812 5121 case FMA_EXPR:
2ba172e0 5122 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 5123
b5b8b0ac
AO
5124 default:
5125 flag_unsupported:
b2b29377
MM
5126 if (flag_checking)
5127 {
5128 debug_tree (exp);
5129 gcc_unreachable ();
5130 }
b5b8b0ac 5131 return NULL;
b5b8b0ac
AO
5132 }
5133}
5134
ddb555ed
JJ
5135/* Return an RTX equivalent to the source bind value of the tree expression
5136 EXP. */
5137
5138static rtx
5139expand_debug_source_expr (tree exp)
5140{
5141 rtx op0 = NULL_RTX;
ef4bddc2 5142 machine_mode mode = VOIDmode, inner_mode;
ddb555ed
JJ
5143
5144 switch (TREE_CODE (exp))
5145 {
5146 case PARM_DECL:
5147 {
ddb555ed 5148 mode = DECL_MODE (exp);
12c5ffe5
EB
5149 op0 = expand_debug_parm_decl (exp);
5150 if (op0)
5151 break;
ddb555ed
JJ
5152 /* See if this isn't an argument that has been completely
5153 optimized out. */
5154 if (!DECL_RTL_SET_P (exp)
12c5ffe5 5155 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
5156 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5157 {
7b575cfa 5158 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
5159 if (DECL_CONTEXT (aexp)
5160 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5161 {
9771b263 5162 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
5163 unsigned int ix;
5164 tree ddecl;
ddb555ed
JJ
5165 debug_args = decl_debug_args_lookup (current_function_decl);
5166 if (debug_args != NULL)
5167 {
9771b263 5168 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
5169 ix += 2)
5170 if (ddecl == aexp)
5171 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5172 }
5173 }
5174 }
5175 break;
5176 }
5177 default:
5178 break;
5179 }
5180
5181 if (op0 == NULL_RTX)
5182 return NULL_RTX;
5183
5184 inner_mode = GET_MODE (op0);
5185 if (mode == inner_mode)
5186 return op0;
5187
5188 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5189 {
5190 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5191 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5192 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5193 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5194 else
5195 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5196 }
5197 else if (FLOAT_MODE_P (mode))
5198 gcc_unreachable ();
5199 else if (FLOAT_MODE_P (inner_mode))
5200 {
5201 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5202 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5203 else
5204 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5205 }
5206 else if (CONSTANT_P (op0)
5207 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3403a1a9 5208 op0 = lowpart_subreg (mode, op0, inner_mode);
ddb555ed
JJ
5209 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5210 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5211 else
5212 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5213
5214 return op0;
5215}
5216
6cfa417f
JJ
5217/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5218 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5219 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5220
5221static void
b47aae36 5222avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
6cfa417f
JJ
5223{
5224 rtx exp = *exp_p;
5225
5226 if (exp == NULL_RTX)
5227 return;
5228
5229 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5230 return;
5231
5232 if (depth == 4)
5233 {
5234 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5235 rtx dval = make_debug_expr_from_rtl (exp);
5236
5237 /* Emit a debug bind insn before INSN. */
5238 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5239 DEBUG_EXPR_TREE_DECL (dval), exp,
5240 VAR_INIT_STATUS_INITIALIZED);
5241
5242 emit_debug_insn_before (bind, insn);
5243 *exp_p = dval;
5244 return;
5245 }
5246
5247 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5248 int i, j;
5249 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5250 switch (*format_ptr++)
5251 {
5252 case 'e':
5253 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5254 break;
5255
5256 case 'E':
5257 case 'V':
5258 for (j = 0; j < XVECLEN (exp, i); j++)
5259 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5260 break;
5261
5262 default:
5263 break;
5264 }
5265}
5266
b5b8b0ac
AO
5267/* Expand the _LOCs in debug insns. We run this after expanding all
5268 regular insns, so that any variables referenced in the function
5269 will have their DECL_RTLs set. */
5270
5271static void
5272expand_debug_locations (void)
5273{
b47aae36
DM
5274 rtx_insn *insn;
5275 rtx_insn *last = get_last_insn ();
b5b8b0ac
AO
5276 int save_strict_alias = flag_strict_aliasing;
5277
5278 /* New alias sets while setting up memory attributes cause
5279 -fcompare-debug failures, even though it doesn't bring about any
5280 codegen changes. */
5281 flag_strict_aliasing = 0;
5282
5283 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5284 if (DEBUG_INSN_P (insn))
5285 {
5286 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
b47aae36
DM
5287 rtx val;
5288 rtx_insn *prev_insn, *insn2;
ef4bddc2 5289 machine_mode mode;
b5b8b0ac
AO
5290
5291 if (value == NULL_TREE)
5292 val = NULL_RTX;
5293 else
5294 {
ddb555ed
JJ
5295 if (INSN_VAR_LOCATION_STATUS (insn)
5296 == VAR_INIT_STATUS_UNINITIALIZED)
5297 val = expand_debug_source_expr (value);
dfde35b3
JJ
5298 /* The avoid_deep_ter_for_debug function inserts
5299 debug bind stmts after SSA_NAME definition, with the
5300 SSA_NAME as the whole bind location. Disable temporarily
5301 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5302 being defined in this DEBUG_INSN. */
5303 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5304 {
5305 tree *slot = deep_ter_debug_map->get (value);
5306 if (slot)
5307 {
5308 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5309 *slot = NULL_TREE;
5310 else
5311 slot = NULL;
5312 }
5313 val = expand_debug_expr (value);
5314 if (slot)
5315 *slot = INSN_VAR_LOCATION_DECL (insn);
5316 }
ddb555ed
JJ
5317 else
5318 val = expand_debug_expr (value);
b5b8b0ac
AO
5319 gcc_assert (last == get_last_insn ());
5320 }
5321
5322 if (!val)
5323 val = gen_rtx_UNKNOWN_VAR_LOC ();
5324 else
5325 {
5326 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5327
5328 gcc_assert (mode == GET_MODE (val)
5329 || (GET_MODE (val) == VOIDmode
33ffb5c5 5330 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 5331 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
5332 || GET_CODE (val) == LABEL_REF)));
5333 }
5334
5335 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
5336 prev_insn = PREV_INSN (insn);
5337 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5338 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
5339 }
5340
5341 flag_strict_aliasing = save_strict_alias;
5342}
5343
d2626c0b
YR
5344/* Performs swapping operands of commutative operations to expand
5345 the expensive one first. */
5346
5347static void
5348reorder_operands (basic_block bb)
5349{
5350 unsigned int *lattice; /* Hold cost of each statement. */
5351 unsigned int i = 0, n = 0;
5352 gimple_stmt_iterator gsi;
5353 gimple_seq stmts;
355fe088 5354 gimple *stmt;
d2626c0b
YR
5355 bool swap;
5356 tree op0, op1;
5357 ssa_op_iter iter;
5358 use_operand_p use_p;
355fe088 5359 gimple *def0, *def1;
d2626c0b
YR
5360
5361 /* Compute cost of each statement using estimate_num_insns. */
5362 stmts = bb_seq (bb);
5363 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5364 {
5365 stmt = gsi_stmt (gsi);
090238ee
YR
5366 if (!is_gimple_debug (stmt))
5367 gimple_set_uid (stmt, n++);
d2626c0b
YR
5368 }
5369 lattice = XNEWVEC (unsigned int, n);
5370 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5371 {
5372 unsigned cost;
5373 stmt = gsi_stmt (gsi);
090238ee
YR
5374 if (is_gimple_debug (stmt))
5375 continue;
d2626c0b
YR
5376 cost = estimate_num_insns (stmt, &eni_size_weights);
5377 lattice[i] = cost;
5378 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5379 {
5380 tree use = USE_FROM_PTR (use_p);
355fe088 5381 gimple *def_stmt;
d2626c0b
YR
5382 if (TREE_CODE (use) != SSA_NAME)
5383 continue;
5384 def_stmt = get_gimple_for_ssa_name (use);
5385 if (!def_stmt)
5386 continue;
5387 lattice[i] += lattice[gimple_uid (def_stmt)];
5388 }
5389 i++;
5390 if (!is_gimple_assign (stmt)
5391 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5392 continue;
5393 op0 = gimple_op (stmt, 1);
5394 op1 = gimple_op (stmt, 2);
5395 if (TREE_CODE (op0) != SSA_NAME
5396 || TREE_CODE (op1) != SSA_NAME)
5397 continue;
5398 /* Swap operands if the second one is more expensive. */
5399 def0 = get_gimple_for_ssa_name (op0);
d2626c0b
YR
5400 def1 = get_gimple_for_ssa_name (op1);
5401 if (!def1)
5402 continue;
5403 swap = false;
68ca4ac9 5404 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
d2626c0b
YR
5405 swap = true;
5406 if (swap)
5407 {
5408 if (dump_file && (dump_flags & TDF_DETAILS))
5409 {
5410 fprintf (dump_file, "Swap operands in stmt:\n");
5411 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5412 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
68ca4ac9 5413 def0 ? lattice[gimple_uid (def0)] : 0,
d2626c0b
YR
5414 lattice[gimple_uid (def1)]);
5415 }
5416 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5417 gimple_assign_rhs2_ptr (stmt));
5418 }
5419 }
5420 XDELETE (lattice);
5421}
5422
242229bb
JH
5423/* Expand basic block BB from GIMPLE trees to RTL. */
5424
5425static basic_block
f3ddd692 5426expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 5427{
726a989a
RB
5428 gimple_stmt_iterator gsi;
5429 gimple_seq stmts;
355fe088 5430 gimple *stmt = NULL;
66e8df53 5431 rtx_note *note;
b47aae36 5432 rtx_insn *last;
242229bb 5433 edge e;
628f6a4e 5434 edge_iterator ei;
242229bb
JH
5435
5436 if (dump_file)
726a989a
RB
5437 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5438 bb->index);
5439
5440 /* Note that since we are now transitioning from GIMPLE to RTL, we
5441 cannot use the gsi_*_bb() routines because they expect the basic
5442 block to be in GIMPLE, instead of RTL. Therefore, we need to
5443 access the BB sequence directly. */
d2626c0b
YR
5444 if (optimize)
5445 reorder_operands (bb);
726a989a 5446 stmts = bb_seq (bb);
3e8b732e
MM
5447 bb->il.gimple.seq = NULL;
5448 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 5449 rtl_profile_for_bb (bb);
5e2d947c
JH
5450 init_rtl_bb_info (bb);
5451 bb->flags |= BB_RTL;
5452
a9b77cd1
ZD
5453 /* Remove the RETURN_EXPR if we may fall though to the exit
5454 instead. */
726a989a
RB
5455 gsi = gsi_last (stmts);
5456 if (!gsi_end_p (gsi)
5457 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 5458 {
538dd0b7 5459 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
a9b77cd1
ZD
5460
5461 gcc_assert (single_succ_p (bb));
fefa31b5 5462 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
a9b77cd1 5463
fefa31b5 5464 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
726a989a 5465 && !gimple_return_retval (ret_stmt))
a9b77cd1 5466 {
726a989a 5467 gsi_remove (&gsi, false);
a9b77cd1
ZD
5468 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5469 }
5470 }
5471
726a989a
RB
5472 gsi = gsi_start (stmts);
5473 if (!gsi_end_p (gsi))
8b11009b 5474 {
726a989a
RB
5475 stmt = gsi_stmt (gsi);
5476 if (gimple_code (stmt) != GIMPLE_LABEL)
5477 stmt = NULL;
8b11009b 5478 }
242229bb 5479
134aa83c 5480 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b
ZD
5481
5482 if (stmt || elt)
242229bb
JH
5483 {
5484 last = get_last_insn ();
5485
8b11009b
ZD
5486 if (stmt)
5487 {
28ed065e 5488 expand_gimple_stmt (stmt);
726a989a 5489 gsi_next (&gsi);
8b11009b
ZD
5490 }
5491
5492 if (elt)
39c8aaa4 5493 emit_label (*elt);
242229bb 5494
caf93cb0 5495 /* Java emits line number notes in the top of labels.
c22cacf3 5496 ??? Make this go away once line number notes are obsoleted. */
1130d5e3 5497 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 5498 if (NOTE_P (BB_HEAD (bb)))
1130d5e3 5499 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 5500 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 5501
726a989a 5502 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
5503 }
5504 else
1130d5e3 5505 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
242229bb
JH
5506
5507 NOTE_BASIC_BLOCK (note) = bb;
5508
726a989a 5509 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 5510 {
cea49550 5511 basic_block new_bb;
242229bb 5512
b5b8b0ac 5513 stmt = gsi_stmt (gsi);
2a8e30fb
MM
5514
5515 /* If this statement is a non-debug one, and we generate debug
5516 insns, then this one might be the last real use of a TERed
5517 SSA_NAME, but where there are still some debug uses further
5518 down. Expanding the current SSA name in such further debug
5519 uses by their RHS might lead to wrong debug info, as coalescing
5520 might make the operands of such RHS be placed into the same
5521 pseudo as something else. Like so:
5522 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5523 use(a_1);
5524 a_2 = ...
5525 #DEBUG ... => a_1
5526 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5527 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5528 the write to a_2 would actually have clobbered the place which
5529 formerly held a_0.
5530
5531 So, instead of that, we recognize the situation, and generate
5532 debug temporaries at the last real use of TERed SSA names:
5533 a_1 = a_0 + 1;
5534 #DEBUG #D1 => a_1
5535 use(a_1);
5536 a_2 = ...
5537 #DEBUG ... => #D1
5538 */
5539 if (MAY_HAVE_DEBUG_INSNS
5540 && SA.values
5541 && !is_gimple_debug (stmt))
5542 {
5543 ssa_op_iter iter;
5544 tree op;
355fe088 5545 gimple *def;
2a8e30fb 5546
5368224f 5547 location_t sloc = curr_insn_location ();
2a8e30fb
MM
5548
5549 /* Look for SSA names that have their last use here (TERed
5550 names always have only one real use). */
5551 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5552 if ((def = get_gimple_for_ssa_name (op)))
5553 {
5554 imm_use_iterator imm_iter;
5555 use_operand_p use_p;
5556 bool have_debug_uses = false;
5557
5558 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5559 {
5560 if (gimple_debug_bind_p (USE_STMT (use_p)))
5561 {
5562 have_debug_uses = true;
5563 break;
5564 }
5565 }
5566
5567 if (have_debug_uses)
5568 {
871dae34 5569 /* OP is a TERed SSA name, with DEF its defining
2a8e30fb
MM
5570 statement, and where OP is used in further debug
5571 instructions. Generate a debug temporary, and
5572 replace all uses of OP in debug insns with that
5573 temporary. */
355fe088 5574 gimple *debugstmt;
2a8e30fb
MM
5575 tree value = gimple_assign_rhs_to_tree (def);
5576 tree vexpr = make_node (DEBUG_EXPR_DECL);
5577 rtx val;
ef4bddc2 5578 machine_mode mode;
2a8e30fb 5579
5368224f 5580 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
5581
5582 DECL_ARTIFICIAL (vexpr) = 1;
5583 TREE_TYPE (vexpr) = TREE_TYPE (value);
5584 if (DECL_P (value))
5585 mode = DECL_MODE (value);
5586 else
5587 mode = TYPE_MODE (TREE_TYPE (value));
899ca90e 5588 SET_DECL_MODE (vexpr, mode);
2a8e30fb
MM
5589
5590 val = gen_rtx_VAR_LOCATION
5591 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5592
e8c6bb74 5593 emit_debug_insn (val);
2a8e30fb
MM
5594
5595 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5596 {
5597 if (!gimple_debug_bind_p (debugstmt))
5598 continue;
5599
5600 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5601 SET_USE (use_p, vexpr);
5602
5603 update_stmt (debugstmt);
5604 }
5605 }
5606 }
5368224f 5607 set_curr_insn_location (sloc);
2a8e30fb
MM
5608 }
5609
a5883ba0 5610 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 5611
242229bb
JH
5612 /* Expand this statement, then evaluate the resulting RTL and
5613 fixup the CFG accordingly. */
726a989a 5614 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 5615 {
538dd0b7 5616 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
cea49550
RH
5617 if (new_bb)
5618 return new_bb;
5619 }
b5b8b0ac
AO
5620 else if (gimple_debug_bind_p (stmt))
5621 {
5368224f 5622 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
5623 gimple_stmt_iterator nsi = gsi;
5624
5625 for (;;)
5626 {
5627 tree var = gimple_debug_bind_get_var (stmt);
5628 tree value;
5629 rtx val;
ef4bddc2 5630 machine_mode mode;
b5b8b0ac 5631
ec8c1492
JJ
5632 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5633 && TREE_CODE (var) != LABEL_DECL
5634 && !target_for_debug_bind (var))
5635 goto delink_debug_stmt;
5636
b5b8b0ac
AO
5637 if (gimple_debug_bind_has_value_p (stmt))
5638 value = gimple_debug_bind_get_value (stmt);
5639 else
5640 value = NULL_TREE;
5641
5642 last = get_last_insn ();
5643
5368224f 5644 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
5645
5646 if (DECL_P (var))
5647 mode = DECL_MODE (var);
5648 else
5649 mode = TYPE_MODE (TREE_TYPE (var));
5650
5651 val = gen_rtx_VAR_LOCATION
5652 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5653
e16b6fd0 5654 emit_debug_insn (val);
b5b8b0ac
AO
5655
5656 if (dump_file && (dump_flags & TDF_DETAILS))
5657 {
5658 /* We can't dump the insn with a TREE where an RTX
5659 is expected. */
e8c6bb74 5660 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5661 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5662 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5663 }
5664
ec8c1492 5665 delink_debug_stmt:
2a8e30fb
MM
5666 /* In order not to generate too many debug temporaries,
5667 we delink all uses of debug statements we already expanded.
5668 Therefore debug statements between definition and real
5669 use of TERed SSA names will continue to use the SSA name,
5670 and not be replaced with debug temps. */
5671 delink_stmt_imm_use (stmt);
5672
b5b8b0ac
AO
5673 gsi = nsi;
5674 gsi_next (&nsi);
5675 if (gsi_end_p (nsi))
5676 break;
5677 stmt = gsi_stmt (nsi);
5678 if (!gimple_debug_bind_p (stmt))
5679 break;
5680 }
5681
5368224f 5682 set_curr_insn_location (sloc);
ddb555ed
JJ
5683 }
5684 else if (gimple_debug_source_bind_p (stmt))
5685 {
5368224f 5686 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5687 tree var = gimple_debug_source_bind_get_var (stmt);
5688 tree value = gimple_debug_source_bind_get_value (stmt);
5689 rtx val;
ef4bddc2 5690 machine_mode mode;
ddb555ed
JJ
5691
5692 last = get_last_insn ();
5693
5368224f 5694 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5695
5696 mode = DECL_MODE (var);
5697
5698 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5699 VAR_INIT_STATUS_UNINITIALIZED);
5700
5701 emit_debug_insn (val);
5702
5703 if (dump_file && (dump_flags & TDF_DETAILS))
5704 {
5705 /* We can't dump the insn with a TREE where an RTX
5706 is expected. */
5707 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5708 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5709 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5710 }
5711
5368224f 5712 set_curr_insn_location (sloc);
b5b8b0ac 5713 }
80c7a9eb 5714 else
242229bb 5715 {
538dd0b7
DM
5716 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5717 if (call_stmt
5718 && gimple_call_tail_p (call_stmt)
f3ddd692 5719 && disable_tail_calls)
538dd0b7 5720 gimple_call_set_tail (call_stmt, false);
f3ddd692 5721
538dd0b7 5722 if (call_stmt && gimple_call_tail_p (call_stmt))
cea49550
RH
5723 {
5724 bool can_fallthru;
538dd0b7 5725 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
cea49550
RH
5726 if (new_bb)
5727 {
5728 if (can_fallthru)
5729 bb = new_bb;
5730 else
5731 return new_bb;
5732 }
5733 }
4d7a65ea 5734 else
b7211528 5735 {
4e3825db 5736 def_operand_p def_p;
4e3825db
MM
5737 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5738
5739 if (def_p != NULL)
5740 {
5741 /* Ignore this stmt if it is in the list of
5742 replaceable expressions. */
5743 if (SA.values
b8698a0f 5744 && bitmap_bit_p (SA.values,
e97809c6 5745 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5746 continue;
5747 }
28ed065e 5748 last = expand_gimple_stmt (stmt);
726a989a 5749 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5750 }
242229bb
JH
5751 }
5752 }
5753
a5883ba0
MM
5754 currently_expanding_gimple_stmt = NULL;
5755
7241571e 5756 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5757 FOR_EACH_EDGE (e, ei, bb->succs)
5758 {
2f13f2de 5759 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5760 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5761 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5762 {
5763 emit_jump (label_rtx_for_bb (e->dest));
5764 e->flags &= ~EDGE_FALLTHRU;
5765 }
a9b77cd1
ZD
5766 }
5767
ae761c45
AH
5768 /* Expanded RTL can create a jump in the last instruction of block.
5769 This later might be assumed to be a jump to successor and break edge insertion.
5770 We need to insert dummy move to prevent this. PR41440. */
5771 if (single_succ_p (bb)
5772 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5773 && (last = get_last_insn ())
4dbebf6f
AO
5774 && (JUMP_P (last)
5775 || (DEBUG_INSN_P (last)
5776 && JUMP_P (prev_nondebug_insn (last)))))
ae761c45
AH
5777 {
5778 rtx dummy = gen_reg_rtx (SImode);
5779 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5780 }
5781
242229bb
JH
5782 do_pending_stack_adjust ();
5783
3f117656 5784 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5785 before a barrier and/or table jump insn. */
5786 last = get_last_insn ();
4b4bf941 5787 if (BARRIER_P (last))
242229bb
JH
5788 last = PREV_INSN (last);
5789 if (JUMP_TABLE_DATA_P (last))
5790 last = PREV_INSN (PREV_INSN (last));
1130d5e3 5791 BB_END (bb) = last;
caf93cb0 5792
242229bb 5793 update_bb_for_insn (bb);
80c7a9eb 5794
242229bb
JH
5795 return bb;
5796}
5797
5798
5799/* Create a basic block for initialization code. */
5800
5801static basic_block
5802construct_init_block (void)
5803{
5804 basic_block init_block, first_block;
fd44f634
JH
5805 edge e = NULL;
5806 int flags;
275a4187 5807
fd44f634 5808 /* Multiple entry points not supported yet. */
fefa31b5
DM
5809 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5810 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5811 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5812 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5813 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
242229bb 5814
fefa31b5 5815 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
275a4187 5816
fd44f634
JH
5817 /* When entry edge points to first basic block, we don't need jump,
5818 otherwise we have to jump into proper target. */
fefa31b5 5819 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
fd44f634 5820 {
726a989a 5821 tree label = gimple_block_label (e->dest);
fd44f634 5822
1476d1bd 5823 emit_jump (jump_target_rtx (label));
fd44f634 5824 flags = 0;
275a4187 5825 }
fd44f634
JH
5826 else
5827 flags = EDGE_FALLTHRU;
242229bb
JH
5828
5829 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5830 get_last_insn (),
fefa31b5
DM
5831 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5832 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5833 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5834 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
242229bb
JH
5835 if (e)
5836 {
5837 first_block = e->dest;
5838 redirect_edge_succ (e, init_block);
357067f2 5839 e = make_single_succ_edge (init_block, first_block, flags);
242229bb
JH
5840 }
5841 else
357067f2
JH
5842 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5843 EDGE_FALLTHRU);
242229bb
JH
5844
5845 update_bb_for_insn (init_block);
5846 return init_block;
5847}
5848
55e092c4
JH
5849/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5850 found in the block tree. */
5851
5852static void
5853set_block_levels (tree block, int level)
5854{
5855 while (block)
5856 {
5857 BLOCK_NUMBER (block) = level;
5858 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5859 block = BLOCK_CHAIN (block);
5860 }
5861}
242229bb
JH
5862
5863/* Create a block containing landing pads and similar stuff. */
5864
5865static void
5866construct_exit_block (void)
5867{
b47aae36
DM
5868 rtx_insn *head = get_last_insn ();
5869 rtx_insn *end;
242229bb 5870 basic_block exit_block;
628f6a4e
BE
5871 edge e, e2;
5872 unsigned ix;
5873 edge_iterator ei;
79c7fda6 5874 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
b47aae36 5875 rtx_insn *orig_end = BB_END (prev_bb);
242229bb 5876
fefa31b5 5877 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
bf08ebeb 5878
caf93cb0 5879 /* Make sure the locus is set to the end of the function, so that
242229bb 5880 epilogue line numbers and warnings are set properly. */
2f13f2de 5881 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5882 input_location = cfun->function_end_locus;
5883
242229bb
JH
5884 /* Generate rtl for function exit. */
5885 expand_function_end ();
5886
5887 end = get_last_insn ();
5888 if (head == end)
5889 return;
79c7fda6
JJ
5890 /* While emitting the function end we could move end of the last basic
5891 block. */
1130d5e3 5892 BB_END (prev_bb) = orig_end;
4b4bf941 5893 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5894 head = NEXT_INSN (head);
79c7fda6
JJ
5895 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5896 bb frequency counting will be confused. Any instructions before that
5897 label are emitted for the case where PREV_BB falls through into the
5898 exit block, so append those instructions to prev_bb in that case. */
5899 if (NEXT_INSN (head) != return_label)
5900 {
5901 while (NEXT_INSN (head) != return_label)
5902 {
5903 if (!NOTE_P (NEXT_INSN (head)))
1130d5e3 5904 BB_END (prev_bb) = NEXT_INSN (head);
79c7fda6
JJ
5905 head = NEXT_INSN (head);
5906 }
5907 }
5908 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
fefa31b5
DM
5909 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5910 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5911 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
628f6a4e
BE
5912
5913 ix = 0;
fefa31b5 5914 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
242229bb 5915 {
fefa31b5 5916 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
242229bb 5917 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5918 redirect_edge_succ (e, exit_block);
5919 else
5920 ix++;
242229bb 5921 }
628f6a4e 5922
357067f2
JH
5923 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5924 EDGE_FALLTHRU);
fefa31b5 5925 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
242229bb
JH
5926 if (e2 != e)
5927 {
c22cacf3 5928 e->count -= e2->count;
242229bb
JH
5929 exit_block->count -= e2->count;
5930 exit_block->frequency -= EDGE_FREQUENCY (e2);
5931 }
242229bb
JH
5932 if (exit_block->frequency < 0)
5933 exit_block->frequency = 0;
5934 update_bb_for_insn (exit_block);
5935}
5936
c22cacf3 5937/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5938 Look for ARRAY_REF nodes with non-constant indexes and mark them
5939 addressable. */
5940
5941static tree
5942discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5943 void *data ATTRIBUTE_UNUSED)
5944{
5945 tree t = *tp;
5946
5947 if (IS_TYPE_OR_DECL_P (t))
5948 *walk_subtrees = 0;
5949 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5950 {
5951 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5952 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5953 && (!TREE_OPERAND (t, 2)
5954 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5955 || (TREE_CODE (t) == COMPONENT_REF
5956 && (!TREE_OPERAND (t,2)
5957 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5958 || TREE_CODE (t) == BIT_FIELD_REF
5959 || TREE_CODE (t) == REALPART_EXPR
5960 || TREE_CODE (t) == IMAGPART_EXPR
5961 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5962 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5963 t = TREE_OPERAND (t, 0);
5964
5965 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5966 {
5967 t = get_base_address (t);
6f11d690
RG
5968 if (t && DECL_P (t)
5969 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5970 TREE_ADDRESSABLE (t) = 1;
5971 }
5972
5973 *walk_subtrees = 0;
5974 }
5975
5976 return NULL_TREE;
5977}
5978
5979/* RTL expansion is not able to compile array references with variable
5980 offsets for arrays stored in single register. Discover such
5981 expressions and mark variables as addressable to avoid this
5982 scenario. */
5983
5984static void
5985discover_nonconstant_array_refs (void)
5986{
5987 basic_block bb;
726a989a 5988 gimple_stmt_iterator gsi;
a1b23b2f 5989
11cd3bed 5990 FOR_EACH_BB_FN (bb, cfun)
726a989a
RB
5991 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5992 {
355fe088 5993 gimple *stmt = gsi_stmt (gsi);
aa847cc8
JJ
5994 if (!is_gimple_debug (stmt))
5995 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 5996 }
a1b23b2f
UW
5997}
5998
2e3f842f
L
5999/* This function sets crtl->args.internal_arg_pointer to a virtual
6000 register if DRAP is needed. Local register allocator will replace
6001 virtual_incoming_args_rtx with the virtual register. */
6002
6003static void
6004expand_stack_alignment (void)
6005{
6006 rtx drap_rtx;
e939805b 6007 unsigned int preferred_stack_boundary;
2e3f842f
L
6008
6009 if (! SUPPORTS_STACK_ALIGNMENT)
6010 return;
b8698a0f 6011
2e3f842f
L
6012 if (cfun->calls_alloca
6013 || cfun->has_nonlocal_label
6014 || crtl->has_nonlocal_goto)
6015 crtl->need_drap = true;
6016
890b9b96
L
6017 /* Call update_stack_boundary here again to update incoming stack
6018 boundary. It may set incoming stack alignment to a different
6019 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6020 use the minimum incoming stack alignment to check if it is OK
6021 to perform sibcall optimization since sibcall optimization will
6022 only align the outgoing stack to incoming stack boundary. */
6023 if (targetm.calls.update_stack_boundary)
6024 targetm.calls.update_stack_boundary ();
6025
6026 /* The incoming stack frame has to be aligned at least at
6027 parm_stack_boundary. */
6028 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 6029
2e3f842f
L
6030 /* Update crtl->stack_alignment_estimated and use it later to align
6031 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6032 exceptions since callgraph doesn't collect incoming stack alignment
6033 in this case. */
8f4f502f 6034 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
6035 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6036 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6037 else
6038 preferred_stack_boundary = crtl->preferred_stack_boundary;
6039 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6040 crtl->stack_alignment_estimated = preferred_stack_boundary;
6041 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6042 crtl->stack_alignment_needed = preferred_stack_boundary;
6043
890b9b96
L
6044 gcc_assert (crtl->stack_alignment_needed
6045 <= crtl->stack_alignment_estimated);
6046
2e3f842f 6047 crtl->stack_realign_needed
e939805b 6048 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 6049 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
6050
6051 crtl->stack_realign_processed = true;
6052
6053 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6054 alignment. */
6055 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 6056 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 6057
d015f7cc
L
6058 /* stack_realign_drap and drap_rtx must match. */
6059 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6060
2e3f842f
L
6061 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6062 if (NULL != drap_rtx)
6063 {
6064 crtl->args.internal_arg_pointer = drap_rtx;
6065
6066 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6067 needed. */
6068 fixup_tail_calls ();
6069 }
6070}
862d0b35
DN
6071\f
6072
6073static void
6074expand_main_function (void)
6075{
6076#if (defined(INVOKE__main) \
6077 || (!defined(HAS_INIT_SECTION) \
6078 && !defined(INIT_SECTION_ASM_OP) \
6079 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6080 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6081#endif
6082}
6083\f
6084
6085/* Expand code to initialize the stack_protect_guard. This is invoked at
6086 the beginning of a function to be protected. */
6087
862d0b35
DN
6088static void
6089stack_protect_prologue (void)
6090{
6091 tree guard_decl = targetm.stack_protect_guard ();
6092 rtx x, y;
6093
6094 x = expand_normal (crtl->stack_protect_guard);
1202f33e
JJ
6095 if (guard_decl)
6096 y = expand_normal (guard_decl);
6097 else
6098 y = const0_rtx;
862d0b35
DN
6099
6100 /* Allow the target to copy from Y to X without leaking Y into a
6101 register. */
c65aa042
RS
6102 if (targetm.have_stack_protect_set ())
6103 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6104 {
6105 emit_insn (insn);
6106 return;
6107 }
862d0b35
DN
6108
6109 /* Otherwise do a straight move. */
6110 emit_move_insn (x, y);
6111}
2e3f842f 6112
242229bb
JH
6113/* Translate the intermediate representation contained in the CFG
6114 from GIMPLE trees to RTL.
6115
6116 We do conversion per basic block and preserve/update the tree CFG.
6117 This implies we have to do some magic as the CFG can simultaneously
6118 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 6119 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
6120 the expansion. */
6121
be55bfe6
TS
6122namespace {
6123
6124const pass_data pass_data_expand =
6125{
6126 RTL_PASS, /* type */
6127 "expand", /* name */
6128 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6129 TV_EXPAND, /* tv_id */
6130 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6131 | PROP_gimple_lcx
f8e89441
TV
6132 | PROP_gimple_lvec
6133 | PROP_gimple_lva), /* properties_required */
be55bfe6
TS
6134 PROP_rtl, /* properties_provided */
6135 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
3bea341f 6136 0, /* todo_flags_start */
be55bfe6
TS
6137 0, /* todo_flags_finish */
6138};
6139
6140class pass_expand : public rtl_opt_pass
6141{
6142public:
6143 pass_expand (gcc::context *ctxt)
6144 : rtl_opt_pass (pass_data_expand, ctxt)
6145 {}
6146
6147 /* opt_pass methods: */
6148 virtual unsigned int execute (function *);
6149
6150}; // class pass_expand
6151
6152unsigned int
6153pass_expand::execute (function *fun)
242229bb
JH
6154{
6155 basic_block bb, init_block;
0ef90296
ZD
6156 edge_iterator ei;
6157 edge e;
b47aae36 6158 rtx_insn *var_seq, *var_ret_seq;
4e3825db
MM
6159 unsigned i;
6160
f029db69 6161 timevar_push (TV_OUT_OF_SSA);
4e3825db 6162 rewrite_out_of_ssa (&SA);
f029db69 6163 timevar_pop (TV_OUT_OF_SSA);
c302207e 6164 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 6165
dfde35b3
JJ
6166 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6167 {
6168 gimple_stmt_iterator gsi;
6169 FOR_EACH_BB_FN (bb, cfun)
6170 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6171 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6172 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6173 }
6174
be147e84
RG
6175 /* Make sure all values used by the optimization passes have sane
6176 defaults. */
6177 reg_renumber = 0;
6178
4586b4ca
SB
6179 /* Some backends want to know that we are expanding to RTL. */
6180 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
6181 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6182 free_dominance_info (CDI_DOMINATORS);
4586b4ca 6183
be55bfe6 6184 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
bf08ebeb 6185
d5e254e1
IE
6186 if (chkp_function_instrumented_p (current_function_decl))
6187 chkp_reset_rtl_bounds ();
6188
5368224f 6189 insn_locations_init ();
fe8a7779 6190 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
6191 {
6192 /* Eventually, all FEs should explicitly set function_start_locus. */
be55bfe6
TS
6193 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6194 set_curr_insn_location
6195 (DECL_SOURCE_LOCATION (current_function_decl));
1751ecd6 6196 else
be55bfe6 6197 set_curr_insn_location (fun->function_start_locus);
1751ecd6 6198 }
9ff70652 6199 else
5368224f
DC
6200 set_curr_insn_location (UNKNOWN_LOCATION);
6201 prologue_location = curr_insn_location ();
55e092c4 6202
2b21299c
JJ
6203#ifdef INSN_SCHEDULING
6204 init_sched_attrs ();
6205#endif
6206
55e092c4
JH
6207 /* Make sure first insn is a note even if we don't want linenums.
6208 This makes sure the first insn will never be deleted.
6209 Also, final expects a note to appear there. */
6210 emit_note (NOTE_INSN_DELETED);
6429e3be 6211
a1b23b2f
UW
6212 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6213 discover_nonconstant_array_refs ();
6214
e41b2a33 6215 targetm.expand_to_rtl_hook ();
8194c537 6216 crtl->init_stack_alignment ();
be55bfe6 6217 fun->cfg->max_jumptable_ents = 0;
cb91fab0 6218
ae9fd6b7
JH
6219 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6220 of the function section at exapnsion time to predict distance of calls. */
6221 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6222
727a31fa 6223 /* Expand the variables recorded during gimple lowering. */
f029db69 6224 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
6225 start_sequence ();
6226
f3ddd692 6227 var_ret_seq = expand_used_vars ();
3a42502d
RH
6228
6229 var_seq = get_insns ();
6230 end_sequence ();
f029db69 6231 timevar_pop (TV_VAR_EXPAND);
242229bb 6232
7d69de61
RH
6233 /* Honor stack protection warnings. */
6234 if (warn_stack_protect)
6235 {
be55bfe6 6236 if (fun->calls_alloca)
b8698a0f 6237 warning (OPT_Wstack_protector,
3b123595 6238 "stack protector not protecting local variables: "
be55bfe6 6239 "variable length buffer");
cb91fab0 6240 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 6241 warning (OPT_Wstack_protector,
3b123595 6242 "stack protector not protecting function: "
be55bfe6 6243 "all local arrays are less than %d bytes long",
7d69de61
RH
6244 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6245 }
6246
242229bb 6247 /* Set up parameters and prepare for return, for the function. */
b79c5284 6248 expand_function_start (current_function_decl);
242229bb 6249
3a42502d
RH
6250 /* If we emitted any instructions for setting up the variables,
6251 emit them before the FUNCTION_START note. */
6252 if (var_seq)
6253 {
6254 emit_insn_before (var_seq, parm_birth_insn);
6255
6256 /* In expand_function_end we'll insert the alloca save/restore
6257 before parm_birth_insn. We've just insertted an alloca call.
6258 Adjust the pointer to match. */
6259 parm_birth_insn = var_seq;
6260 }
6261
f11a7b6d
AO
6262 /* Now propagate the RTL assignment of each partition to the
6263 underlying var of each SSA_NAME. */
46aa019a
KV
6264 tree name;
6265
6266 FOR_EACH_SSA_NAME (i, name, cfun)
f11a7b6d 6267 {
46aa019a
KV
6268 /* We might have generated new SSA names in
6269 update_alias_info_with_stack_vars. They will have a NULL
6270 defining statements, and won't be part of the partitioning,
6271 so ignore those. */
6272 if (!SSA_NAME_DEF_STMT (name))
f11a7b6d
AO
6273 continue;
6274
6275 adjust_one_expanded_partition_var (name);
6276 }
6277
6278 /* Clean up RTL of variables that straddle across multiple
6279 partitions, and check that the rtl of any PARM_DECLs that are not
6280 cleaned up is that of their default defs. */
46aa019a 6281 FOR_EACH_SSA_NAME (i, name, cfun)
d466b407 6282 {
d466b407 6283 int part;
d466b407 6284
46aa019a
KV
6285 /* We might have generated new SSA names in
6286 update_alias_info_with_stack_vars. They will have a NULL
6287 defining statements, and won't be part of the partitioning,
6288 so ignore those. */
6289 if (!SSA_NAME_DEF_STMT (name))
d466b407
MM
6290 continue;
6291 part = var_to_partition (SA.map, name);
6292 if (part == NO_PARTITION)
6293 continue;
70b5e7dc 6294
1f9ceff1
AO
6295 /* If this decl was marked as living in multiple places, reset
6296 this now to NULL. */
6297 tree var = SSA_NAME_VAR (name);
6298 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6299 SET_DECL_RTL (var, NULL);
6300 /* Check that the pseudos chosen by assign_parms are those of
6301 the corresponding default defs. */
6302 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6303 && (TREE_CODE (var) == PARM_DECL
6304 || TREE_CODE (var) == RESULT_DECL))
70b5e7dc 6305 {
1f9ceff1
AO
6306 rtx in = DECL_RTL_IF_SET (var);
6307 gcc_assert (in);
6308 rtx out = SA.partition_to_pseudo[part];
f11a7b6d
AO
6309 gcc_assert (in == out);
6310
6311 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6312 those expected by debug backends for each parm and for
6313 the result. This is particularly important for stabs,
6314 whose register elimination from parm's DECL_RTL may cause
6315 -fcompare-debug differences as SET_DECL_RTL changes reg's
6316 attrs. So, make sure the RTL already has the parm as the
6317 EXPR, so that it won't change. */
6318 SET_DECL_RTL (var, NULL_RTX);
6319 if (MEM_P (in))
6320 set_mem_attributes (in, var, true);
6321 SET_DECL_RTL (var, in);
70b5e7dc 6322 }
d466b407
MM
6323 }
6324
242229bb
JH
6325 /* If this function is `main', emit a call to `__main'
6326 to run global initializers, etc. */
6327 if (DECL_NAME (current_function_decl)
6328 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6329 && DECL_FILE_SCOPE_P (current_function_decl))
6330 expand_main_function ();
6331
7d69de61
RH
6332 /* Initialize the stack_protect_guard field. This must happen after the
6333 call to __main (if any) so that the external decl is initialized. */
87a5dc2d 6334 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
7d69de61
RH
6335 stack_protect_prologue ();
6336
4e3825db
MM
6337 expand_phi_nodes (&SA);
6338
0d334e37 6339 /* Release any stale SSA redirection data. */
b3e46655 6340 redirect_edge_var_map_empty ();
0d334e37 6341
3fbd86b1 6342 /* Register rtl specific functions for cfg. */
242229bb
JH
6343 rtl_register_cfg_hooks ();
6344
6345 init_block = construct_init_block ();
6346
0ef90296 6347 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 6348 remaining edges later. */
be55bfe6 6349 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
0ef90296
ZD
6350 e->flags &= ~EDGE_EXECUTABLE;
6351
134aa83c 6352 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
be55bfe6 6353 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
fefa31b5 6354 next_bb)
f3ddd692 6355 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 6356
b5b8b0ac
AO
6357 if (MAY_HAVE_DEBUG_INSNS)
6358 expand_debug_locations ();
6359
dfde35b3
JJ
6360 if (deep_ter_debug_map)
6361 {
6362 delete deep_ter_debug_map;
6363 deep_ter_debug_map = NULL;
6364 }
6365
452aa9c5
RG
6366 /* Free stuff we no longer need after GIMPLE optimizations. */
6367 free_dominance_info (CDI_DOMINATORS);
6368 free_dominance_info (CDI_POST_DOMINATORS);
61183076 6369 delete_tree_cfg_annotations (fun);
452aa9c5 6370
f029db69 6371 timevar_push (TV_OUT_OF_SSA);
4e3825db 6372 finish_out_of_ssa (&SA);
f029db69 6373 timevar_pop (TV_OUT_OF_SSA);
4e3825db 6374
f029db69 6375 timevar_push (TV_POST_EXPAND);
91753e21 6376 /* We are no longer in SSA form. */
be55bfe6 6377 fun->gimple_df->in_ssa_p = false;
726338f4 6378 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 6379
bf08ebeb
JH
6380 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6381 conservatively to true until they are all profile aware. */
39c8aaa4 6382 delete lab_rtx_for_bb;
61183076 6383 free_histograms (fun);
242229bb
JH
6384
6385 construct_exit_block ();
5368224f 6386 insn_locations_finalize ();
242229bb 6387
f3ddd692
JJ
6388 if (var_ret_seq)
6389 {
dc01c3d1 6390 rtx_insn *after = return_label;
b47aae36 6391 rtx_insn *next = NEXT_INSN (after);
f3ddd692
JJ
6392 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6393 after = next;
6394 emit_insn_after (var_ret_seq, after);
6395 }
6396
1d65f45c 6397 /* Zap the tree EH table. */
be55bfe6 6398 set_eh_throw_stmt_table (fun, NULL);
242229bb 6399
42821aff
MM
6400 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6401 split edges which edge insertions might do. */
242229bb 6402 rebuild_jump_labels (get_insns ());
242229bb 6403
be55bfe6
TS
6404 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6405 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6406 {
6407 edge e;
6408 edge_iterator ei;
6409 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6410 {
6411 if (e->insns.r)
bc470c24 6412 {
3ffa95c2 6413 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
6414 /* Put insns after parm birth, but before
6415 NOTE_INSNS_FUNCTION_BEG. */
be55bfe6
TS
6416 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6417 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
bc470c24 6418 {
3ffa95c2
DM
6419 rtx_insn *insns = e->insns.r;
6420 e->insns.r = NULL;
e40191f1
TV
6421 if (NOTE_P (parm_birth_insn)
6422 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6423 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6424 else
6425 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
6426 }
6427 else
6428 commit_one_edge_insertion (e);
6429 }
4e3825db
MM
6430 else
6431 ei_next (&ei);
6432 }
6433 }
6434
6435 /* We're done expanding trees to RTL. */
6436 currently_expanding_to_rtl = 0;
6437
1b223a9f
AO
6438 flush_mark_addressable_queue ();
6439
be55bfe6
TS
6440 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6441 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6442 {
6443 edge e;
6444 edge_iterator ei;
6445 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6446 {
6447 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6448 e->flags &= ~EDGE_EXECUTABLE;
6449
6450 /* At the moment not all abnormal edges match the RTL
6451 representation. It is safe to remove them here as
6452 find_many_sub_basic_blocks will rediscover them.
6453 In the future we should get this fixed properly. */
6454 if ((e->flags & EDGE_ABNORMAL)
6455 && !(e->flags & EDGE_SIBCALL))
6456 remove_edge (e);
6457 else
6458 ei_next (&ei);
6459 }
6460 }
6461
7ba9e72d 6462 auto_sbitmap blocks (last_basic_block_for_fn (fun));
f61e445a 6463 bitmap_ones (blocks);
242229bb 6464 find_many_sub_basic_blocks (blocks);
4e3825db 6465 purge_all_dead_edges ();
242229bb 6466
2e3f842f
L
6467 expand_stack_alignment ();
6468
be147e84
RG
6469 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6470 function. */
6471 if (crtl->tail_call_emit)
6472 fixup_tail_calls ();
6473
dac1fbf8
RG
6474 /* After initial rtl generation, call back to finish generating
6475 exception support code. We need to do this before cleaning up
6476 the CFG as the code does not expect dead landing pads. */
be55bfe6 6477 if (fun->eh->region_tree != NULL)
dac1fbf8
RG
6478 finish_eh_generation ();
6479
6480 /* Remove unreachable blocks, otherwise we cannot compute dominators
6481 which are needed for loop state verification. As a side-effect
6482 this also compacts blocks.
6483 ??? We cannot remove trivially dead insns here as for example
6484 the DRAP reg on i?86 is not magically live at this point.
6485 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6486 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6487
b2b29377 6488 checking_verify_flow_info ();
9f8628ba 6489
be147e84
RG
6490 /* Initialize pseudos allocated for hard registers. */
6491 emit_initial_value_sets ();
6492
6493 /* And finally unshare all RTL. */
6494 unshare_all_rtl ();
6495
9f8628ba
PB
6496 /* There's no need to defer outputting this function any more; we
6497 know we want to output it. */
6498 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6499
6500 /* Now that we're done expanding trees to RTL, we shouldn't have any
6501 more CONCATs anywhere. */
6502 generating_concat_p = 0;
6503
b7211528
SB
6504 if (dump_file)
6505 {
6506 fprintf (dump_file,
6507 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6508 /* And the pass manager will dump RTL for us. */
6509 }
ef330312
PB
6510
6511 /* If we're emitting a nested function, make sure its parent gets
6512 emitted as well. Doing otherwise confuses debug info. */
be55bfe6
TS
6513 {
6514 tree parent;
6515 for (parent = DECL_CONTEXT (current_function_decl);
6516 parent != NULL_TREE;
6517 parent = get_containing_scope (parent))
6518 if (TREE_CODE (parent) == FUNCTION_DECL)
6519 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6520 }
c22cacf3 6521
ef330312
PB
6522 /* We are now committed to emitting code for this function. Do any
6523 preparation, such as emitting abstract debug info for the inline
6524 before it gets mangled by optimization. */
6525 if (cgraph_function_possibly_inlined_p (current_function_decl))
6526 (*debug_hooks->outlining_inline_function) (current_function_decl);
6527
6528 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
6529
6530 /* After expanding, the return labels are no longer needed. */
6531 return_label = NULL;
6532 naked_return_label = NULL;
0a35513e
AH
6533
6534 /* After expanding, the tm_restart map is no longer needed. */
be55bfe6 6535 if (fun->gimple_df->tm_restart)
50979347 6536 fun->gimple_df->tm_restart = NULL;
0a35513e 6537
55e092c4
JH
6538 /* Tag the blocks with a depth number so that change_scope can find
6539 the common parent easily. */
be55bfe6 6540 set_block_levels (DECL_INITIAL (fun->decl), 0);
bf08ebeb 6541 default_rtl_profile ();
be147e84 6542
687aed9c
RB
6543 /* For -dx discard loops now, otherwise IL verify in clean_state will
6544 ICE. */
6545 if (rtl_dump_and_exit)
6546 {
6547 cfun->curr_properties &= ~PROP_loops;
6548 loop_optimizer_finalize ();
6549 }
6550
f029db69 6551 timevar_pop (TV_POST_EXPAND);
be147e84 6552
c2924966 6553 return 0;
242229bb
JH
6554}
6555
27a4cd48
DM
6556} // anon namespace
6557
6558rtl_opt_pass *
6559make_pass_expand (gcc::context *ctxt)
6560{
6561 return new pass_expand (ctxt);
6562}