]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
Daily bump.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
818ab71a 2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5
AM
28#include "cfghooks.h"
29#include "tree-pass.h"
30#include "tm_p.h"
c7131fb2 31#include "ssa.h"
957060b5
AM
32#include "optabs.h"
33#include "regs.h" /* For reg_renumber. */
34#include "emit-rtl.h"
35#include "recog.h"
36#include "cgraph.h"
37#include "diagnostic.h"
40e23961 38#include "fold-const.h"
d8a2d370
DN
39#include "varasm.h"
40#include "stor-layout.h"
41#include "stmt.h"
42#include "print-tree.h"
60393bbc
AM
43#include "cfgrtl.h"
44#include "cfganal.h"
45#include "cfgbuild.h"
46#include "cfgcleanup.h"
36566b39
PK
47#include "dojump.h"
48#include "explow.h"
49#include "calls.h"
242229bb 50#include "expr.h"
2fb9a547
AM
51#include "internal-fn.h"
52#include "tree-eh.h"
5be5c238 53#include "gimple-iterator.h"
1b223a9f 54#include "gimple-expr.h"
5be5c238 55#include "gimple-walk.h"
442b4905 56#include "tree-cfg.h"
442b4905 57#include "tree-dfa.h"
7a300452 58#include "tree-ssa.h"
242229bb 59#include "except.h"
cf835838 60#include "gimple-pretty-print.h"
1f6d3a08 61#include "toplev.h"
ef330312 62#include "debug.h"
7d69de61 63#include "params.h"
ff28a94d 64#include "tree-inline.h"
6946b3f7 65#include "value-prof.h"
8e9055ae 66#include "tree-ssa-live.h"
78bca40d 67#include "tree-outof-ssa.h"
7d776ee2 68#include "cfgloop.h"
2b21299c 69#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 70#include "asan.h"
4484a35a 71#include "tree-ssa-address.h"
862d0b35 72#include "output.h"
9b2b7279 73#include "builtins.h"
d5e254e1
IE
74#include "tree-chkp.h"
75#include "rtl-chkp.h"
726a989a 76
8a6ce562
JBG
77/* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81#ifndef NAME__MAIN
82#define NAME__MAIN "__main"
83#endif
84
4e3825db
MM
85/* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87struct ssaexpand SA;
88
a5883ba0
MM
89/* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
355fe088 91gimple *currently_expanding_gimple_stmt;
a5883ba0 92
ddb555ed
JJ
93static rtx expand_debug_expr (tree);
94
1f9ceff1
AO
95static bool defer_stack_allocation (tree, bool);
96
f11a7b6d
AO
97static void record_alignment_for_reg_var (unsigned int);
98
726a989a
RB
99/* Return an expression tree corresponding to the RHS of GIMPLE
100 statement STMT. */
101
102tree
355fe088 103gimple_assign_rhs_to_tree (gimple *stmt)
726a989a
RB
104{
105 tree t;
82d6e6fc 106 enum gimple_rhs_class grhs_class;
b8698a0f 107
82d6e6fc 108 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 109
0354c0c7
BS
110 if (grhs_class == GIMPLE_TERNARY_RHS)
111 t = build3 (gimple_assign_rhs_code (stmt),
112 TREE_TYPE (gimple_assign_lhs (stmt)),
113 gimple_assign_rhs1 (stmt),
114 gimple_assign_rhs2 (stmt),
115 gimple_assign_rhs3 (stmt));
116 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
117 t = build2 (gimple_assign_rhs_code (stmt),
118 TREE_TYPE (gimple_assign_lhs (stmt)),
119 gimple_assign_rhs1 (stmt),
120 gimple_assign_rhs2 (stmt));
82d6e6fc 121 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
122 t = build1 (gimple_assign_rhs_code (stmt),
123 TREE_TYPE (gimple_assign_lhs (stmt)),
124 gimple_assign_rhs1 (stmt));
82d6e6fc 125 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
126 {
127 t = gimple_assign_rhs1 (stmt);
128 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
129 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
130 && gimple_location (stmt) != EXPR_LOCATION (t))
131 || (gimple_block (stmt)
132 && currently_expanding_to_rtl
5368224f 133 && EXPR_P (t)))
b5b8b0ac
AO
134 t = copy_node (t);
135 }
726a989a
RB
136 else
137 gcc_unreachable ();
138
f5045c96
AM
139 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140 SET_EXPR_LOCATION (t, gimple_location (stmt));
141
726a989a
RB
142 return t;
143}
144
726a989a 145
1f6d3a08
RH
146#ifndef STACK_ALIGNMENT_NEEDED
147#define STACK_ALIGNMENT_NEEDED 1
148#endif
149
4e3825db
MM
150#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
151
1f9ceff1
AO
152/* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
156
157static tree
158leader_merge (tree cur, tree next)
159{
160 if (cur == NULL || cur == next)
161 return next;
162
163 if (DECL_P (cur) && DECL_IGNORED_P (cur))
164 return cur;
165
166 if (DECL_P (next) && DECL_IGNORED_P (next))
167 return next;
168
169 return cur;
170}
171
4e3825db
MM
172/* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
175static inline void
176set_rtl (tree t, rtx x)
177{
f11a7b6d
AO
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 || (use_register_for_decl (t)
181 ? (REG_P (x)
182 || (GET_CODE (x) == CONCAT
183 && (REG_P (XEXP (x, 0))
184 || SUBREG_P (XEXP (x, 0)))
185 && (REG_P (XEXP (x, 1))
186 || SUBREG_P (XEXP (x, 1))))
058c6384
EB
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
f11a7b6d
AO
191 || (GET_CODE (x) == PARALLEL
192 && SSAVAR (t)
193 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
058c6384
EB
194 && (GET_MODE (x) == BLKmode
195 || !flag_tree_coalesce_vars)))
f11a7b6d
AO
196 : (MEM_P (x) || x == pc_rtx
197 || (GET_CODE (x) == CONCAT
198 && MEM_P (XEXP (x, 0))
199 && MEM_P (XEXP (x, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
21fc3950
EB
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
208 unpromoted REGs. */
f11a7b6d 209 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
21fc3950
EB
210 || (SSAVAR (t)
211 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 && (promote_ssa_mode (t, NULL) == BLKmode
213 || !flag_tree_coalesce_vars))
f11a7b6d
AO
214 || !use_register_for_decl (t)
215 || GET_MODE (x) == promote_ssa_mode (t, NULL));
216
217 if (x)
1f9ceff1
AO
218 {
219 bool skip = false;
220 tree cur = NULL_TREE;
f11a7b6d
AO
221 rtx xm = x;
222
223 retry:
224 if (MEM_P (xm))
225 cur = MEM_EXPR (xm);
226 else if (REG_P (xm))
227 cur = REG_EXPR (xm);
228 else if (SUBREG_P (xm))
229 {
230 gcc_assert (subreg_lowpart_p (xm));
231 xm = SUBREG_REG (xm);
232 goto retry;
233 }
234 else if (GET_CODE (xm) == CONCAT)
235 {
236 xm = XEXP (xm, 0);
237 goto retry;
238 }
239 else if (GET_CODE (xm) == PARALLEL)
240 {
241 xm = XVECEXP (xm, 0, 0);
242 gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 xm = XEXP (xm, 0);
244 goto retry;
245 }
246 else if (xm == pc_rtx)
1f9ceff1
AO
247 skip = true;
248 else
249 gcc_unreachable ();
250
f11a7b6d 251 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
1f9ceff1
AO
252
253 if (cur != next)
254 {
255 if (MEM_P (x))
f11a7b6d
AO
256 set_mem_attributes (x,
257 next && TREE_CODE (next) == SSA_NAME
258 ? TREE_TYPE (next)
259 : next, true);
1f9ceff1
AO
260 else
261 set_reg_attrs_for_decl_rtl (next, x);
262 }
263 }
264
4e3825db
MM
265 if (TREE_CODE (t) == SSA_NAME)
266 {
1f9ceff1
AO
267 int part = var_to_partition (SA.map, t);
268 if (part != NO_PARTITION)
269 {
270 if (SA.partition_to_pseudo[part])
271 gcc_assert (SA.partition_to_pseudo[part] == x);
272 else if (x != pc_rtx)
273 SA.partition_to_pseudo[part] = x;
274 }
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
f11a7b6d
AO
277 DECL. For PARMs and RESULTs, do so only when setting the
278 default def. */
279 if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 && (VAR_P (SSA_NAME_VAR (t))
281 || SSA_NAME_IS_DEFAULT_DEF (t)))
eb7adebc
MM
282 {
283 tree var = SSA_NAME_VAR (t);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var))
286 SET_DECL_RTL (var, x);
47598145 287 /* If we have it set already to "multiple places" don't
eb7adebc
MM
288 change this. */
289 else if (DECL_RTL (var) == pc_rtx)
290 ;
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var) != x)
298 SET_DECL_RTL (var, pc_rtx);
299 }
4e3825db
MM
300 }
301 else
302 SET_DECL_RTL (t, x);
303}
1f6d3a08
RH
304
305/* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
307struct stack_var
308{
309 /* The Variable. */
310 tree decl;
311
1f6d3a08
RH
312 /* Initially, the size of the variable. Later, the size of the partition,
313 if this variable becomes it's partition's representative. */
314 HOST_WIDE_INT size;
315
316 /* The *byte* alignment required for this variable. Or as, with the
317 size, the alignment for this partition. */
318 unsigned int alignb;
319
320 /* The partition representative. */
321 size_t representative;
322
323 /* The next stack variable in the partition, or EOC. */
324 size_t next;
2bdbbe94
MM
325
326 /* The numbers of conflicting stack variables. */
327 bitmap conflicts;
1f6d3a08
RH
328};
329
330#define EOC ((size_t)-1)
331
332/* We have an array of such objects while deciding allocation. */
333static struct stack_var *stack_vars;
334static size_t stack_vars_alloc;
335static size_t stack_vars_num;
39c8aaa4 336static hash_map<tree, size_t> *decl_to_stack_part;
1f6d3a08 337
3f9b14ff
SB
338/* Conflict bitmaps go on this obstack. This allows us to destroy
339 all of them in one big sweep. */
340static bitmap_obstack stack_var_bitmap_obstack;
341
fa10beec 342/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
343 is non-decreasing. */
344static size_t *stack_vars_sorted;
345
1f6d3a08
RH
346/* The phase of the stack frame. This is the known misalignment of
347 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
348 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
349static int frame_phase;
350
7d69de61
RH
351/* Used during expand_used_vars to remember if we saw any decls for
352 which we'd like to enable stack smashing protection. */
353static bool has_protected_decls;
354
355/* Used during expand_used_vars. Remember if we say a character buffer
356 smaller than our cutoff threshold. Used for -Wstack-protector. */
357static bool has_short_buffer;
1f6d3a08 358
6f197850 359/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
360 we can't do with expected alignment of the stack boundary. */
361
362static unsigned int
6f197850 363align_local_variable (tree decl)
765c3e8f 364{
1f9ceff1
AO
365 unsigned int align;
366
367 if (TREE_CODE (decl) == SSA_NAME)
368 align = TYPE_ALIGN (TREE_TYPE (decl));
369 else
370 {
371 align = LOCAL_DECL_ALIGNMENT (decl);
fe37c7af 372 SET_DECL_ALIGN (decl, align);
1f9ceff1 373 }
1f6d3a08
RH
374 return align / BITS_PER_UNIT;
375}
376
435be747
MO
377/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
378 down otherwise. Return truncated BASE value. */
379
380static inline unsigned HOST_WIDE_INT
381align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
382{
383 return align_up ? (base + align - 1) & -align : base & -align;
384}
385
1f6d3a08
RH
386/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
387 Return the frame offset. */
388
389static HOST_WIDE_INT
3a42502d 390alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
391{
392 HOST_WIDE_INT offset, new_frame_offset;
393
1f6d3a08
RH
394 if (FRAME_GROWS_DOWNWARD)
395 {
435be747
MO
396 new_frame_offset
397 = align_base (frame_offset - frame_phase - size,
398 align, false) + frame_phase;
1f6d3a08
RH
399 offset = new_frame_offset;
400 }
401 else
402 {
435be747
MO
403 new_frame_offset
404 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
1f6d3a08
RH
405 offset = new_frame_offset;
406 new_frame_offset += size;
407 }
408 frame_offset = new_frame_offset;
409
9fb798d7
EB
410 if (frame_offset_overflow (frame_offset, cfun->decl))
411 frame_offset = offset = 0;
412
1f6d3a08
RH
413 return offset;
414}
415
416/* Accumulate DECL into STACK_VARS. */
417
418static void
419add_stack_var (tree decl)
420{
533f611a
RH
421 struct stack_var *v;
422
1f6d3a08
RH
423 if (stack_vars_num >= stack_vars_alloc)
424 {
425 if (stack_vars_alloc)
426 stack_vars_alloc = stack_vars_alloc * 3 / 2;
427 else
428 stack_vars_alloc = 32;
429 stack_vars
430 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
431 }
47598145 432 if (!decl_to_stack_part)
39c8aaa4 433 decl_to_stack_part = new hash_map<tree, size_t>;
47598145 434
533f611a 435 v = &stack_vars[stack_vars_num];
39c8aaa4 436 decl_to_stack_part->put (decl, stack_vars_num);
533f611a
RH
437
438 v->decl = decl;
1f9ceff1
AO
439 tree size = TREE_CODE (decl) == SSA_NAME
440 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
441 : DECL_SIZE_UNIT (decl);
442 v->size = tree_to_uhwi (size);
533f611a
RH
443 /* Ensure that all variables have size, so that &a != &b for any two
444 variables that are simultaneously live. */
445 if (v->size == 0)
446 v->size = 1;
1f9ceff1 447 v->alignb = align_local_variable (decl);
13868f40
EB
448 /* An alignment of zero can mightily confuse us later. */
449 gcc_assert (v->alignb != 0);
1f6d3a08
RH
450
451 /* All variables are initially in their own partition. */
533f611a
RH
452 v->representative = stack_vars_num;
453 v->next = EOC;
1f6d3a08 454
2bdbbe94 455 /* All variables initially conflict with no other. */
533f611a 456 v->conflicts = NULL;
2bdbbe94 457
1f6d3a08 458 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 459 set_rtl (decl, pc_rtx);
1f6d3a08
RH
460
461 stack_vars_num++;
462}
463
1f6d3a08
RH
464/* Make the decls associated with luid's X and Y conflict. */
465
466static void
467add_stack_var_conflict (size_t x, size_t y)
468{
2bdbbe94
MM
469 struct stack_var *a = &stack_vars[x];
470 struct stack_var *b = &stack_vars[y];
471 if (!a->conflicts)
3f9b14ff 472 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 473 if (!b->conflicts)
3f9b14ff 474 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
475 bitmap_set_bit (a->conflicts, y);
476 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
477}
478
479/* Check whether the decls associated with luid's X and Y conflict. */
480
481static bool
482stack_var_conflict_p (size_t x, size_t y)
483{
2bdbbe94
MM
484 struct stack_var *a = &stack_vars[x];
485 struct stack_var *b = &stack_vars[y];
47598145
MM
486 if (x == y)
487 return false;
488 /* Partitions containing an SSA name result from gimple registers
489 with things like unsupported modes. They are top-level and
490 hence conflict with everything else. */
491 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
492 return true;
493
2bdbbe94
MM
494 if (!a->conflicts || !b->conflicts)
495 return false;
496 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 497}
b8698a0f 498
47598145
MM
499/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
500 enter its partition number into bitmap DATA. */
501
502static bool
355fe088 503visit_op (gimple *, tree op, tree, void *data)
47598145
MM
504{
505 bitmap active = (bitmap)data;
506 op = get_base_address (op);
507 if (op
508 && DECL_P (op)
509 && DECL_RTL_IF_SET (op) == pc_rtx)
510 {
39c8aaa4 511 size_t *v = decl_to_stack_part->get (op);
47598145
MM
512 if (v)
513 bitmap_set_bit (active, *v);
514 }
515 return false;
516}
517
518/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
519 record conflicts between it and all currently active other partitions
520 from bitmap DATA. */
521
522static bool
355fe088 523visit_conflict (gimple *, tree op, tree, void *data)
47598145
MM
524{
525 bitmap active = (bitmap)data;
526 op = get_base_address (op);
527 if (op
528 && DECL_P (op)
529 && DECL_RTL_IF_SET (op) == pc_rtx)
530 {
39c8aaa4 531 size_t *v = decl_to_stack_part->get (op);
47598145
MM
532 if (v && bitmap_set_bit (active, *v))
533 {
534 size_t num = *v;
535 bitmap_iterator bi;
536 unsigned i;
537 gcc_assert (num < stack_vars_num);
538 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
539 add_stack_var_conflict (num, i);
540 }
541 }
542 return false;
543}
544
545/* Helper routine for add_scope_conflicts, calculating the active partitions
546 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
547 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
548 liveness. */
47598145
MM
549
550static void
81bfd197 551add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
552{
553 edge e;
554 edge_iterator ei;
555 gimple_stmt_iterator gsi;
9f1363cd 556 walk_stmt_load_store_addr_fn visit;
47598145
MM
557
558 bitmap_clear (work);
559 FOR_EACH_EDGE (e, ei, bb->preds)
560 bitmap_ior_into (work, (bitmap)e->src->aux);
561
ea85edfe 562 visit = visit_op;
47598145
MM
563
564 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
565 {
355fe088 566 gimple *stmt = gsi_stmt (gsi);
ea85edfe 567 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 568 }
ea85edfe 569 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145 570 {
355fe088 571 gimple *stmt = gsi_stmt (gsi);
47598145
MM
572
573 if (gimple_clobber_p (stmt))
574 {
575 tree lhs = gimple_assign_lhs (stmt);
576 size_t *v;
577 /* Nested function lowering might introduce LHSs
578 that are COMPONENT_REFs. */
579 if (TREE_CODE (lhs) != VAR_DECL)
580 continue;
581 if (DECL_RTL_IF_SET (lhs) == pc_rtx
39c8aaa4 582 && (v = decl_to_stack_part->get (lhs)))
47598145
MM
583 bitmap_clear_bit (work, *v);
584 }
585 else if (!is_gimple_debug (stmt))
ea85edfe 586 {
81bfd197 587 if (for_conflict
ea85edfe
JJ
588 && visit == visit_op)
589 {
590 /* If this is the first real instruction in this BB we need
88d599dc
MM
591 to add conflicts for everything live at this point now.
592 Unlike classical liveness for named objects we can't
ea85edfe
JJ
593 rely on seeing a def/use of the names we're interested in.
594 There might merely be indirect loads/stores. We'd not add any
81bfd197 595 conflicts for such partitions. */
ea85edfe
JJ
596 bitmap_iterator bi;
597 unsigned i;
81bfd197 598 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 599 {
9b44f5d9
MM
600 struct stack_var *a = &stack_vars[i];
601 if (!a->conflicts)
3f9b14ff 602 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 603 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
604 }
605 visit = visit_conflict;
606 }
607 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
608 }
47598145
MM
609 }
610}
611
612/* Generate stack partition conflicts between all partitions that are
613 simultaneously live. */
614
615static void
616add_scope_conflicts (void)
617{
618 basic_block bb;
619 bool changed;
620 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
621 int *rpo;
622 int n_bbs;
47598145 623
88d599dc 624 /* We approximate the live range of a stack variable by taking the first
47598145
MM
625 mention of its name as starting point(s), and by the end-of-scope
626 death clobber added by gimplify as ending point(s) of the range.
627 This overapproximates in the case we for instance moved an address-taken
628 operation upward, without also moving a dereference to it upwards.
629 But it's conservatively correct as a variable never can hold values
630 before its name is mentioned at least once.
631
88d599dc 632 We then do a mostly classical bitmap liveness algorithm. */
47598145 633
04a90bec 634 FOR_ALL_BB_FN (bb, cfun)
3f9b14ff 635 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 636
8b1c6fd7 637 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
9b44f5d9
MM
638 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
639
47598145
MM
640 changed = true;
641 while (changed)
642 {
9b44f5d9 643 int i;
47598145 644 changed = false;
9b44f5d9 645 for (i = 0; i < n_bbs; i++)
47598145 646 {
9b44f5d9 647 bitmap active;
06e28de2 648 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
9b44f5d9 649 active = (bitmap)bb->aux;
81bfd197 650 add_scope_conflicts_1 (bb, work, false);
47598145
MM
651 if (bitmap_ior_into (active, work))
652 changed = true;
653 }
654 }
655
11cd3bed 656 FOR_EACH_BB_FN (bb, cfun)
81bfd197 657 add_scope_conflicts_1 (bb, work, true);
47598145 658
9b44f5d9 659 free (rpo);
47598145 660 BITMAP_FREE (work);
04a90bec 661 FOR_ALL_BB_FN (bb, cfun)
47598145
MM
662 BITMAP_FREE (bb->aux);
663}
664
1f6d3a08 665/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 666 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
667
668static int
3a42502d 669stack_var_cmp (const void *a, const void *b)
1f6d3a08 670{
3a42502d
RH
671 size_t ia = *(const size_t *)a;
672 size_t ib = *(const size_t *)b;
673 unsigned int aligna = stack_vars[ia].alignb;
674 unsigned int alignb = stack_vars[ib].alignb;
675 HOST_WIDE_INT sizea = stack_vars[ia].size;
676 HOST_WIDE_INT sizeb = stack_vars[ib].size;
677 tree decla = stack_vars[ia].decl;
678 tree declb = stack_vars[ib].decl;
679 bool largea, largeb;
4e3825db 680 unsigned int uida, uidb;
1f6d3a08 681
3a42502d
RH
682 /* Primary compare on "large" alignment. Large comes first. */
683 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
684 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
685 if (largea != largeb)
686 return (int)largeb - (int)largea;
687
688 /* Secondary compare on size, decreasing */
3a42502d 689 if (sizea > sizeb)
6ddfda8a
ER
690 return -1;
691 if (sizea < sizeb)
1f6d3a08 692 return 1;
3a42502d
RH
693
694 /* Tertiary compare on true alignment, decreasing. */
695 if (aligna < alignb)
696 return -1;
697 if (aligna > alignb)
698 return 1;
699
700 /* Final compare on ID for sort stability, increasing.
701 Two SSA names are compared by their version, SSA names come before
702 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
703 if (TREE_CODE (decla) == SSA_NAME)
704 {
705 if (TREE_CODE (declb) == SSA_NAME)
706 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
707 else
708 return -1;
709 }
710 else if (TREE_CODE (declb) == SSA_NAME)
711 return 1;
712 else
713 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 714 if (uida < uidb)
79f802f5 715 return 1;
3a42502d
RH
716 if (uida > uidb)
717 return -1;
1f6d3a08
RH
718 return 0;
719}
720
0ef08bc5 721struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
39c8aaa4 722typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
55b34b5f
RG
723
724/* If the points-to solution *PI points to variables that are in a partition
725 together with other variables add all partition members to the pointed-to
726 variables bitmap. */
727
728static void
729add_partitioned_vars_to_ptset (struct pt_solution *pt,
39c8aaa4 730 part_hashmap *decls_to_partitions,
6e2830c3 731 hash_set<bitmap> *visited, bitmap temp)
55b34b5f
RG
732{
733 bitmap_iterator bi;
734 unsigned i;
735 bitmap *part;
736
737 if (pt->anything
738 || pt->vars == NULL
739 /* The pointed-to vars bitmap is shared, it is enough to
740 visit it once. */
6e2830c3 741 || visited->add (pt->vars))
55b34b5f
RG
742 return;
743
744 bitmap_clear (temp);
745
746 /* By using a temporary bitmap to store all members of the partitions
747 we have to add we make sure to visit each of the partitions only
748 once. */
749 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
750 if ((!temp
751 || !bitmap_bit_p (temp, i))
39c8aaa4 752 && (part = decls_to_partitions->get (i)))
55b34b5f
RG
753 bitmap_ior_into (temp, *part);
754 if (!bitmap_empty_p (temp))
755 bitmap_ior_into (pt->vars, temp);
756}
757
758/* Update points-to sets based on partition info, so we can use them on RTL.
759 The bitmaps representing stack partitions will be saved until expand,
760 where partitioned decls used as bases in memory expressions will be
761 rewritten. */
762
763static void
764update_alias_info_with_stack_vars (void)
765{
39c8aaa4 766 part_hashmap *decls_to_partitions = NULL;
55b34b5f
RG
767 size_t i, j;
768 tree var = NULL_TREE;
769
770 for (i = 0; i < stack_vars_num; i++)
771 {
772 bitmap part = NULL;
773 tree name;
774 struct ptr_info_def *pi;
775
776 /* Not interested in partitions with single variable. */
777 if (stack_vars[i].representative != i
778 || stack_vars[i].next == EOC)
779 continue;
780
781 if (!decls_to_partitions)
782 {
39c8aaa4
TS
783 decls_to_partitions = new part_hashmap;
784 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
55b34b5f
RG
785 }
786
787 /* Create an SSA_NAME that points to the partition for use
788 as base during alias-oracle queries on RTL for bases that
789 have been partitioned. */
790 if (var == NULL_TREE)
b731b390
JJ
791 var = create_tmp_var (ptr_type_node);
792 name = make_ssa_name (var);
55b34b5f
RG
793
794 /* Create bitmaps representing partitions. They will be used for
795 points-to sets later, so use GGC alloc. */
796 part = BITMAP_GGC_ALLOC ();
797 for (j = i; j != EOC; j = stack_vars[j].next)
798 {
799 tree decl = stack_vars[j].decl;
25a6a873 800 unsigned int uid = DECL_PT_UID (decl);
55b34b5f 801 bitmap_set_bit (part, uid);
39c8aaa4
TS
802 decls_to_partitions->put (uid, part);
803 cfun->gimple_df->decls_to_pointers->put (decl, name);
88d8330d
EB
804 if (TREE_ADDRESSABLE (decl))
805 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
806 }
807
808 /* Make the SSA name point to all partition members. */
809 pi = get_ptr_info (name);
d3553615 810 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
811 }
812
813 /* Make all points-to sets that contain one member of a partition
814 contain all members of the partition. */
815 if (decls_to_partitions)
816 {
817 unsigned i;
6e2830c3 818 hash_set<bitmap> visited;
3f9b14ff 819 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f
RG
820
821 for (i = 1; i < num_ssa_names; i++)
822 {
823 tree name = ssa_name (i);
824 struct ptr_info_def *pi;
825
826 if (name
827 && POINTER_TYPE_P (TREE_TYPE (name))
828 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
829 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
6e2830c3 830 &visited, temp);
55b34b5f
RG
831 }
832
833 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
6e2830c3 834 decls_to_partitions, &visited, temp);
55b34b5f 835
39c8aaa4 836 delete decls_to_partitions;
55b34b5f
RG
837 BITMAP_FREE (temp);
838 }
839}
840
1f6d3a08
RH
841/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
842 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 843 Merge them into a single partition A. */
1f6d3a08
RH
844
845static void
6ddfda8a 846union_stack_vars (size_t a, size_t b)
1f6d3a08 847{
2bdbbe94
MM
848 struct stack_var *vb = &stack_vars[b];
849 bitmap_iterator bi;
850 unsigned u;
1f6d3a08 851
6ddfda8a
ER
852 gcc_assert (stack_vars[b].next == EOC);
853 /* Add B to A's partition. */
854 stack_vars[b].next = stack_vars[a].next;
855 stack_vars[b].representative = a;
1f6d3a08
RH
856 stack_vars[a].next = b;
857
858 /* Update the required alignment of partition A to account for B. */
859 if (stack_vars[a].alignb < stack_vars[b].alignb)
860 stack_vars[a].alignb = stack_vars[b].alignb;
861
862 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
863 if (vb->conflicts)
864 {
865 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
866 add_stack_var_conflict (a, stack_vars[u].representative);
867 BITMAP_FREE (vb->conflicts);
868 }
1f6d3a08
RH
869}
870
c461d263
JJ
871/* Return true if the current function should have its stack frame
872 protected by address sanitizer. */
873
874static inline bool
875asan_sanitize_stack_p (void)
876{
877 return ((flag_sanitize & SANITIZE_ADDRESS)
878 && ASAN_STACK
879 && !lookup_attribute ("no_sanitize_address",
880 DECL_ATTRIBUTES (current_function_decl)));
881}
882
1f6d3a08
RH
883/* A subroutine of expand_used_vars. Binpack the variables into
884 partitions constrained by the interference graph. The overall
885 algorithm used is as follows:
886
6ddfda8a 887 Sort the objects by size in descending order.
1f6d3a08
RH
888 For each object A {
889 S = size(A)
890 O = 0
891 loop {
892 Look for the largest non-conflicting object B with size <= S.
893 UNION (A, B)
1f6d3a08
RH
894 }
895 }
896*/
897
898static void
899partition_stack_vars (void)
900{
901 size_t si, sj, n = stack_vars_num;
902
903 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
904 for (si = 0; si < n; ++si)
905 stack_vars_sorted[si] = si;
906
907 if (n == 1)
908 return;
909
3a42502d 910 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 911
1f6d3a08
RH
912 for (si = 0; si < n; ++si)
913 {
914 size_t i = stack_vars_sorted[si];
3a42502d 915 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 916 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 917
6ddfda8a
ER
918 /* Ignore objects that aren't partition representatives. If we
919 see a var that is not a partition representative, it must
920 have been merged earlier. */
921 if (stack_vars[i].representative != i)
922 continue;
923
924 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
925 {
926 size_t j = stack_vars_sorted[sj];
1f6d3a08 927 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 928 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
929
930 /* Ignore objects that aren't partition representatives. */
931 if (stack_vars[j].representative != j)
932 continue;
933
3a42502d
RH
934 /* Do not mix objects of "small" (supported) alignment
935 and "large" (unsupported) alignment. */
936 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
938 break;
939
940 /* For Address Sanitizer do not mix objects with different
941 sizes, as the shorter vars wouldn't be adequately protected.
942 Don't do that for "large" (unsupported) alignment objects,
943 those aren't protected anyway. */
c461d263 944 if (asan_sanitize_stack_p () && isize != jsize
f3ddd692
JJ
945 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
946 break;
947
948 /* Ignore conflicting objects. */
949 if (stack_var_conflict_p (i, j))
3a42502d
RH
950 continue;
951
1f6d3a08 952 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 953 union_stack_vars (i, j);
1f6d3a08
RH
954 }
955 }
55b34b5f 956
9b999dc5 957 update_alias_info_with_stack_vars ();
1f6d3a08
RH
958}
959
960/* A debugging aid for expand_used_vars. Dump the generated partitions. */
961
962static void
963dump_stack_var_partition (void)
964{
965 size_t si, i, j, n = stack_vars_num;
966
967 for (si = 0; si < n; ++si)
968 {
969 i = stack_vars_sorted[si];
970
971 /* Skip variables that aren't partition representatives, for now. */
972 if (stack_vars[i].representative != i)
973 continue;
974
975 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
976 " align %u\n", (unsigned long) i, stack_vars[i].size,
977 stack_vars[i].alignb);
978
979 for (j = i; j != EOC; j = stack_vars[j].next)
980 {
981 fputc ('\t', dump_file);
982 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 983 }
6ddfda8a 984 fputc ('\n', dump_file);
1f6d3a08
RH
985 }
986}
987
3a42502d 988/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
989
990static void
3a42502d
RH
991expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
992 HOST_WIDE_INT offset)
1f6d3a08 993{
3a42502d 994 unsigned align;
1f6d3a08 995 rtx x;
c22cacf3 996
1f6d3a08
RH
997 /* If this fails, we've overflowed the stack frame. Error nicely? */
998 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
999
0a81f074 1000 x = plus_constant (Pmode, base, offset);
1f9ceff1
AO
1001 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1002 ? TYPE_MODE (TREE_TYPE (decl))
1003 : DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 1004
4e3825db
MM
1005 if (TREE_CODE (decl) != SSA_NAME)
1006 {
1007 /* Set alignment we actually gave this decl if it isn't an SSA name.
1008 If it is we generate stack slots only accidentally so it isn't as
1009 important, we'll simply use the alignment that is already set. */
3a42502d
RH
1010 if (base == virtual_stack_vars_rtx)
1011 offset -= frame_phase;
4e3825db
MM
1012 align = offset & -offset;
1013 align *= BITS_PER_UNIT;
3a42502d
RH
1014 if (align == 0 || align > base_align)
1015 align = base_align;
1016
1017 /* One would think that we could assert that we're not decreasing
1018 alignment here, but (at least) the i386 port does exactly this
1019 via the MINIMUM_ALIGNMENT hook. */
4e3825db 1020
fe37c7af 1021 SET_DECL_ALIGN (decl, align);
4e3825db
MM
1022 DECL_USER_ALIGN (decl) = 0;
1023 }
1024
4e3825db 1025 set_rtl (decl, x);
1f6d3a08
RH
1026}
1027
f3ddd692
JJ
1028struct stack_vars_data
1029{
1030 /* Vector of offset pairs, always end of some padding followed
1031 by start of the padding that needs Address Sanitizer protection.
1032 The vector is in reversed, highest offset pairs come first. */
06dc18b3 1033 auto_vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
1034
1035 /* Vector of partition representative decls in between the paddings. */
06dc18b3 1036 auto_vec<tree> asan_decl_vec;
e361382f
JJ
1037
1038 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1039 rtx asan_base;
1040
1041 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1042 unsigned int asan_alignb;
f3ddd692
JJ
1043};
1044
1f6d3a08
RH
1045/* A subroutine of expand_used_vars. Give each partition representative
1046 a unique location within the stack frame. Update each partition member
1047 with that location. */
1048
1049static void
f3ddd692 1050expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
1051{
1052 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
1053 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1054 rtx large_base = NULL;
1055 unsigned large_align = 0;
7072df0a 1056 bool large_allocation_done = false;
3a42502d
RH
1057 tree decl;
1058
1059 /* Determine if there are any variables requiring "large" alignment.
1060 Since these are dynamically allocated, we only process these if
1061 no predicate involved. */
1062 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1063 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1064 {
1065 /* Find the total size of these variables. */
1066 for (si = 0; si < n; ++si)
1067 {
1068 unsigned alignb;
1069
1070 i = stack_vars_sorted[si];
1071 alignb = stack_vars[i].alignb;
1072
a8eeec27
SE
1073 /* All "large" alignment decls come before all "small" alignment
1074 decls, but "large" alignment decls are not sorted based on
1075 their alignment. Increase large_align to track the largest
1076 required alignment. */
1077 if ((alignb * BITS_PER_UNIT) > large_align)
1078 large_align = alignb * BITS_PER_UNIT;
1079
3a42502d
RH
1080 /* Stop when we get to the first decl with "small" alignment. */
1081 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1082 break;
1083
1084 /* Skip variables that aren't partition representatives. */
1085 if (stack_vars[i].representative != i)
1086 continue;
1087
1088 /* Skip variables that have already had rtl assigned. See also
1089 add_stack_var where we perpetrate this pc_rtx hack. */
1090 decl = stack_vars[i].decl;
1f9ceff1
AO
1091 if (TREE_CODE (decl) == SSA_NAME
1092 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1093 : DECL_RTL (decl) != pc_rtx)
3a42502d
RH
1094 continue;
1095
1096 large_size += alignb - 1;
1097 large_size &= -(HOST_WIDE_INT)alignb;
1098 large_size += stack_vars[i].size;
1099 }
3a42502d 1100 }
1f6d3a08
RH
1101
1102 for (si = 0; si < n; ++si)
1103 {
3a42502d
RH
1104 rtx base;
1105 unsigned base_align, alignb;
1f6d3a08
RH
1106 HOST_WIDE_INT offset;
1107
1108 i = stack_vars_sorted[si];
1109
1110 /* Skip variables that aren't partition representatives, for now. */
1111 if (stack_vars[i].representative != i)
1112 continue;
1113
7d69de61
RH
1114 /* Skip variables that have already had rtl assigned. See also
1115 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d 1116 decl = stack_vars[i].decl;
1f9ceff1
AO
1117 if (TREE_CODE (decl) == SSA_NAME
1118 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1119 : DECL_RTL (decl) != pc_rtx)
7d69de61
RH
1120 continue;
1121
c22cacf3 1122 /* Check the predicate to see whether this variable should be
7d69de61 1123 allocated in this pass. */
f3ddd692 1124 if (pred && !pred (i))
7d69de61
RH
1125 continue;
1126
3a42502d
RH
1127 alignb = stack_vars[i].alignb;
1128 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1129 {
e361382f 1130 base = virtual_stack_vars_rtx;
c461d263 1131 if (asan_sanitize_stack_p () && pred)
f3ddd692 1132 {
435be747
MO
1133 HOST_WIDE_INT prev_offset
1134 = align_base (frame_offset,
1135 MAX (alignb, ASAN_RED_ZONE_SIZE),
d6c1a7a7 1136 !FRAME_GROWS_DOWNWARD);
f3ddd692 1137 tree repr_decl = NULL_TREE;
f3ddd692
JJ
1138 offset
1139 = alloc_stack_frame_space (stack_vars[i].size
1140 + ASAN_RED_ZONE_SIZE,
1141 MAX (alignb, ASAN_RED_ZONE_SIZE));
435be747 1142
9771b263
DN
1143 data->asan_vec.safe_push (prev_offset);
1144 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
1145 /* Find best representative of the partition.
1146 Prefer those with DECL_NAME, even better
1147 satisfying asan_protect_stack_decl predicate. */
1148 for (j = i; j != EOC; j = stack_vars[j].next)
1149 if (asan_protect_stack_decl (stack_vars[j].decl)
1150 && DECL_NAME (stack_vars[j].decl))
1151 {
1152 repr_decl = stack_vars[j].decl;
1153 break;
1154 }
1155 else if (repr_decl == NULL_TREE
1156 && DECL_P (stack_vars[j].decl)
1157 && DECL_NAME (stack_vars[j].decl))
1158 repr_decl = stack_vars[j].decl;
1159 if (repr_decl == NULL_TREE)
1160 repr_decl = stack_vars[i].decl;
9771b263 1161 data->asan_decl_vec.safe_push (repr_decl);
e361382f
JJ
1162 data->asan_alignb = MAX (data->asan_alignb, alignb);
1163 if (data->asan_base == NULL)
1164 data->asan_base = gen_reg_rtx (Pmode);
1165 base = data->asan_base;
e5dcd695
LZ
1166
1167 if (!STRICT_ALIGNMENT)
1168 base_align = crtl->max_used_stack_slot_alignment;
1169 else
1170 base_align = MAX (crtl->max_used_stack_slot_alignment,
1171 GET_MODE_ALIGNMENT (SImode)
1172 << ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1173 }
1174 else
e5dcd695
LZ
1175 {
1176 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1177 base_align = crtl->max_used_stack_slot_alignment;
1178 }
3a42502d
RH
1179 }
1180 else
1181 {
1182 /* Large alignment is only processed in the last pass. */
1183 if (pred)
1184 continue;
7072df0a
DV
1185
1186 /* If there were any variables requiring "large" alignment, allocate
1187 space. */
1188 if (large_size > 0 && ! large_allocation_done)
1189 {
1190 HOST_WIDE_INT loffset;
1191 rtx large_allocsize;
1192
1193 large_allocsize = GEN_INT (large_size);
1194 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1195 loffset = alloc_stack_frame_space
1196 (INTVAL (large_allocsize),
1197 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1198 large_base = get_dynamic_stack_base (loffset, large_align);
1199 large_allocation_done = true;
1200 }
533f611a 1201 gcc_assert (large_base != NULL);
3a42502d
RH
1202
1203 large_alloc += alignb - 1;
1204 large_alloc &= -(HOST_WIDE_INT)alignb;
1205 offset = large_alloc;
1206 large_alloc += stack_vars[i].size;
1207
1208 base = large_base;
1209 base_align = large_align;
1210 }
1f6d3a08
RH
1211
1212 /* Create rtl for each variable based on their location within the
1213 partition. */
1214 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1215 {
f8da8190 1216 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1217 base, base_align,
6ddfda8a 1218 offset);
f8da8190 1219 }
1f6d3a08 1220 }
3a42502d
RH
1221
1222 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1223}
1224
ff28a94d
JH
1225/* Take into account all sizes of partitions and reset DECL_RTLs. */
1226static HOST_WIDE_INT
1227account_stack_vars (void)
1228{
1229 size_t si, j, i, n = stack_vars_num;
1230 HOST_WIDE_INT size = 0;
1231
1232 for (si = 0; si < n; ++si)
1233 {
1234 i = stack_vars_sorted[si];
1235
1236 /* Skip variables that aren't partition representatives, for now. */
1237 if (stack_vars[i].representative != i)
1238 continue;
1239
1240 size += stack_vars[i].size;
1241 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1242 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1243 }
1244 return size;
1245}
1246
f11a7b6d
AO
1247/* Record the RTL assignment X for the default def of PARM. */
1248
1249extern void
1250set_parm_rtl (tree parm, rtx x)
1251{
1252 gcc_assert (TREE_CODE (parm) == PARM_DECL
1253 || TREE_CODE (parm) == RESULT_DECL);
1254
1255 if (x && !MEM_P (x))
1256 {
1257 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1258 TYPE_MODE (TREE_TYPE (parm)),
1259 TYPE_ALIGN (TREE_TYPE (parm)));
1260
1261 /* If the variable alignment is very large we'll dynamicaly
1262 allocate it, which means that in-frame portion is just a
1263 pointer. ??? We've got a pseudo for sure here, do we
1264 actually dynamically allocate its spilling area if needed?
1265 ??? Isn't it a problem when POINTER_SIZE also exceeds
1266 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1267 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1268 align = POINTER_SIZE;
1269
1270 record_alignment_for_reg_var (align);
1271 }
1272
f11a7b6d
AO
1273 tree ssa = ssa_default_def (cfun, parm);
1274 if (!ssa)
1275 return set_rtl (parm, x);
1276
1277 int part = var_to_partition (SA.map, ssa);
1278 gcc_assert (part != NO_PARTITION);
1279
1280 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1281 gcc_assert (changed);
1282
1283 set_rtl (ssa, x);
1284 gcc_assert (DECL_RTL (parm) == x);
1285}
1286
1f6d3a08
RH
1287/* A subroutine of expand_one_var. Called to immediately assign rtl
1288 to a variable to be allocated in the stack frame. */
1289
1290static void
1f9ceff1 1291expand_one_stack_var_1 (tree var)
1f6d3a08 1292{
3a42502d
RH
1293 HOST_WIDE_INT size, offset;
1294 unsigned byte_align;
1f6d3a08 1295
1f9ceff1
AO
1296 if (TREE_CODE (var) == SSA_NAME)
1297 {
1298 tree type = TREE_TYPE (var);
1299 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1300 byte_align = TYPE_ALIGN_UNIT (type);
1301 }
1302 else
1303 {
1304 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1305 byte_align = align_local_variable (var);
1306 }
3a42502d
RH
1307
1308 /* We handle highly aligned variables in expand_stack_vars. */
1309 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1310
3a42502d
RH
1311 offset = alloc_stack_frame_space (size, byte_align);
1312
1313 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1314 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1315}
1316
1f9ceff1
AO
1317/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1318 already assigned some MEM. */
1319
1320static void
1321expand_one_stack_var (tree var)
1322{
1323 if (TREE_CODE (var) == SSA_NAME)
1324 {
1325 int part = var_to_partition (SA.map, var);
1326 if (part != NO_PARTITION)
1327 {
1328 rtx x = SA.partition_to_pseudo[part];
1329 gcc_assert (x);
1330 gcc_assert (MEM_P (x));
1331 return;
1332 }
1333 }
1334
1335 return expand_one_stack_var_1 (var);
1336}
1337
1f6d3a08
RH
1338/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1339 that will reside in a hard register. */
1340
1341static void
1342expand_one_hard_reg_var (tree var)
1343{
1344 rest_of_decl_compilation (var, 0, 0);
1345}
1346
1f9ceff1
AO
1347/* Record the alignment requirements of some variable assigned to a
1348 pseudo. */
1349
1350static void
1351record_alignment_for_reg_var (unsigned int align)
1352{
1353 if (SUPPORTS_STACK_ALIGNMENT
1354 && crtl->stack_alignment_estimated < align)
1355 {
1356 /* stack_alignment_estimated shouldn't change after stack
1357 realign decision made */
1358 gcc_assert (!crtl->stack_realign_processed);
1359 crtl->stack_alignment_estimated = align;
1360 }
1361
1362 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1363 So here we only make sure stack_alignment_needed >= align. */
1364 if (crtl->stack_alignment_needed < align)
1365 crtl->stack_alignment_needed = align;
1366 if (crtl->max_used_stack_slot_alignment < align)
1367 crtl->max_used_stack_slot_alignment = align;
1368}
1369
1370/* Create RTL for an SSA partition. */
1371
1372static void
1373expand_one_ssa_partition (tree var)
1374{
1375 int part = var_to_partition (SA.map, var);
1376 gcc_assert (part != NO_PARTITION);
1377
1378 if (SA.partition_to_pseudo[part])
1379 return;
1380
1381 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1382 TYPE_MODE (TREE_TYPE (var)),
1383 TYPE_ALIGN (TREE_TYPE (var)));
1384
1385 /* If the variable alignment is very large we'll dynamicaly allocate
1386 it, which means that in-frame portion is just a pointer. */
1387 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1388 align = POINTER_SIZE;
1389
1390 record_alignment_for_reg_var (align);
1391
1392 if (!use_register_for_decl (var))
1393 {
f11a7b6d 1394 if (defer_stack_allocation (var, true))
1f9ceff1
AO
1395 add_stack_var (var);
1396 else
1397 expand_one_stack_var_1 (var);
1398 return;
1399 }
1400
1401 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1402
1403 rtx x = gen_reg_rtx (reg_mode);
1404
1405 set_rtl (var, x);
1406}
1407
f11a7b6d
AO
1408/* Record the association between the RTL generated for partition PART
1409 and the underlying variable of the SSA_NAME VAR. */
1f9ceff1
AO
1410
1411static void
1412adjust_one_expanded_partition_var (tree var)
1413{
1414 if (!var)
1415 return;
1416
1417 tree decl = SSA_NAME_VAR (var);
1418
1419 int part = var_to_partition (SA.map, var);
1420 if (part == NO_PARTITION)
1421 return;
1422
1423 rtx x = SA.partition_to_pseudo[part];
1424
f11a7b6d 1425 gcc_assert (x);
1f9ceff1
AO
1426
1427 set_rtl (var, x);
1428
1429 if (!REG_P (x))
1430 return;
1431
1432 /* Note if the object is a user variable. */
1433 if (decl && !DECL_ARTIFICIAL (decl))
1434 mark_user_reg (x);
1435
1436 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1437 mark_reg_pointer (x, get_pointer_alignment (var));
1438}
1439
1f6d3a08
RH
1440/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1441 that will reside in a pseudo register. */
1442
1443static void
1444expand_one_register_var (tree var)
1445{
1f9ceff1
AO
1446 if (TREE_CODE (var) == SSA_NAME)
1447 {
1448 int part = var_to_partition (SA.map, var);
1449 if (part != NO_PARTITION)
1450 {
1451 rtx x = SA.partition_to_pseudo[part];
1452 gcc_assert (x);
1453 gcc_assert (REG_P (x));
1454 return;
1455 }
1456 gcc_unreachable ();
1457 }
1458
1459 tree decl = var;
4e3825db 1460 tree type = TREE_TYPE (decl);
ef4bddc2 1461 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1462 rtx x = gen_reg_rtx (reg_mode);
1463
4e3825db 1464 set_rtl (var, x);
1f6d3a08
RH
1465
1466 /* Note if the object is a user variable. */
4e3825db
MM
1467 if (!DECL_ARTIFICIAL (decl))
1468 mark_user_reg (x);
1f6d3a08 1469
61021c2c 1470 if (POINTER_TYPE_P (type))
d466b407 1471 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1472}
1473
1474/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1475 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1476 to pick something that won't crash the rest of the compiler. */
1477
1478static void
1479expand_one_error_var (tree var)
1480{
ef4bddc2 1481 machine_mode mode = DECL_MODE (var);
1f6d3a08
RH
1482 rtx x;
1483
1484 if (mode == BLKmode)
1485 x = gen_rtx_MEM (BLKmode, const0_rtx);
1486 else if (mode == VOIDmode)
1487 x = const0_rtx;
1488 else
1489 x = gen_reg_rtx (mode);
1490
1491 SET_DECL_RTL (var, x);
1492}
1493
c22cacf3 1494/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1495 allocated to the local stack frame. Return true if we wish to
1496 add VAR to STACK_VARS so that it will be coalesced with other
1497 variables. Return false to allocate VAR immediately.
1498
1499 This function is used to reduce the number of variables considered
1500 for coalescing, which reduces the size of the quadratic problem. */
1501
1502static bool
1503defer_stack_allocation (tree var, bool toplevel)
1504{
1f9ceff1
AO
1505 tree size_unit = TREE_CODE (var) == SSA_NAME
1506 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1507 : DECL_SIZE_UNIT (var);
1508
ee2e8462
EB
1509 /* Whether the variable is small enough for immediate allocation not to be
1510 a problem with regard to the frame size. */
1511 bool smallish
1f9ceff1 1512 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
ee2e8462
EB
1513 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1514
7d69de61 1515 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1516 so that we can re-order the strings to the top of the frame.
1517 Similarly for Address Sanitizer. */
c461d263 1518 if (flag_stack_protect || asan_sanitize_stack_p ())
7d69de61
RH
1519 return true;
1520
1f9ceff1
AO
1521 unsigned int align = TREE_CODE (var) == SSA_NAME
1522 ? TYPE_ALIGN (TREE_TYPE (var))
1523 : DECL_ALIGN (var);
1524
3a42502d
RH
1525 /* We handle "large" alignment via dynamic allocation. We want to handle
1526 this extra complication in only one place, so defer them. */
1f9ceff1 1527 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
3a42502d
RH
1528 return true;
1529
1f9ceff1
AO
1530 bool ignored = TREE_CODE (var) == SSA_NAME
1531 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1532 : DECL_IGNORED_P (var);
1533
ee2e8462
EB
1534 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1535 might be detached from their block and appear at toplevel when we reach
1536 here. We want to coalesce them with variables from other blocks when
1537 the immediate contribution to the frame size would be noticeable. */
1f9ceff1 1538 if (toplevel && optimize > 0 && ignored && !smallish)
ee2e8462
EB
1539 return true;
1540
1541 /* Variables declared in the outermost scope automatically conflict
1542 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1543 at all is that, after sorting, we can more efficiently pack
1544 small variables in the stack frame. Continue to defer at -O2. */
1545 if (toplevel && optimize < 2)
1546 return false;
1547
1548 /* Without optimization, *most* variables are allocated from the
1549 stack, which makes the quadratic problem large exactly when we
c22cacf3 1550 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1551 other hand, we don't want the function's stack frame size to
1552 get completely out of hand. So we avoid adding scalars and
1553 "small" aggregates to the list at all. */
ee2e8462 1554 if (optimize == 0 && smallish)
1f6d3a08
RH
1555 return false;
1556
1557 return true;
1558}
1559
1560/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1561 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1562 expanded yet, merely recorded.
ff28a94d
JH
1563 When REALLY_EXPAND is false, only add stack values to be allocated.
1564 Return stack usage this variable is supposed to take.
1565*/
1f6d3a08 1566
ff28a94d
JH
1567static HOST_WIDE_INT
1568expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1569{
3a42502d 1570 unsigned int align = BITS_PER_UNIT;
4e3825db 1571 tree origvar = var;
3a42502d 1572
4e3825db
MM
1573 var = SSAVAR (var);
1574
3a42502d 1575 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1576 {
9d7d6446
JB
1577 if (is_global_var (var))
1578 return 0;
1579
2e3f842f
L
1580 /* Because we don't know if VAR will be in register or on stack,
1581 we conservatively assume it will be on stack even if VAR is
1582 eventually put into register after RA pass. For non-automatic
1583 variables, which won't be on stack, we collect alignment of
3396aba5
JJ
1584 type and ignore user specified alignment. Similarly for
1585 SSA_NAMEs for which use_register_for_decl returns true. */
1586 if (TREE_STATIC (var)
1587 || DECL_EXTERNAL (var)
1588 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
ae58e548
JJ
1589 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1590 TYPE_MODE (TREE_TYPE (var)),
1591 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1592 else if (DECL_HAS_VALUE_EXPR_P (var)
1593 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1594 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1595 or variables which were assigned a stack slot already by
1596 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1597 changed from the offset chosen to it. */
1598 align = crtl->stack_alignment_estimated;
2e3f842f 1599 else
ae58e548 1600 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1601
3a42502d
RH
1602 /* If the variable alignment is very large we'll dynamicaly allocate
1603 it, which means that in-frame portion is just a pointer. */
1604 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1605 align = POINTER_SIZE;
1606 }
1607
1f9ceff1 1608 record_alignment_for_reg_var (align);
3a42502d 1609
4e3825db
MM
1610 if (TREE_CODE (origvar) == SSA_NAME)
1611 {
1612 gcc_assert (TREE_CODE (var) != VAR_DECL
1613 || (!DECL_EXTERNAL (var)
1614 && !DECL_HAS_VALUE_EXPR_P (var)
1615 && !TREE_STATIC (var)
4e3825db
MM
1616 && TREE_TYPE (var) != error_mark_node
1617 && !DECL_HARD_REGISTER (var)
1618 && really_expand));
1619 }
1620 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1621 ;
1f6d3a08
RH
1622 else if (DECL_EXTERNAL (var))
1623 ;
833b3afe 1624 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1625 ;
1626 else if (TREE_STATIC (var))
7e8b322a 1627 ;
eb7adebc 1628 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1629 ;
1630 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1631 {
1632 if (really_expand)
1633 expand_one_error_var (var);
1634 }
4e3825db 1635 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1636 {
1637 if (really_expand)
c218f6e8
JM
1638 {
1639 expand_one_hard_reg_var (var);
1640 if (!DECL_HARD_REGISTER (var))
1641 /* Invalid register specification. */
1642 expand_one_error_var (var);
1643 }
ff28a94d 1644 }
1f6d3a08 1645 else if (use_register_for_decl (var))
ff28a94d
JH
1646 {
1647 if (really_expand)
4e3825db 1648 expand_one_register_var (origvar);
ff28a94d 1649 }
56099f00 1650 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1651 {
56099f00 1652 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1653 if (really_expand)
1654 {
1655 error ("size of variable %q+D is too large", var);
1656 expand_one_error_var (var);
1657 }
1658 }
1f6d3a08 1659 else if (defer_stack_allocation (var, toplevel))
4e3825db 1660 add_stack_var (origvar);
1f6d3a08 1661 else
ff28a94d 1662 {
bd9f1b4b 1663 if (really_expand)
de0fb905
AB
1664 {
1665 if (lookup_attribute ("naked",
1666 DECL_ATTRIBUTES (current_function_decl)))
1667 error ("cannot allocate stack for variable %q+D, naked function.",
1668 var);
1669
1670 expand_one_stack_var (origvar);
1671 }
1672
1673
ae7e9ddd 1674 return tree_to_uhwi (DECL_SIZE_UNIT (var));
ff28a94d
JH
1675 }
1676 return 0;
1f6d3a08
RH
1677}
1678
1679/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1680 expanding variables. Those variables that can be put into registers
1681 are allocated pseudos; those that can't are put on the stack.
1682
1683 TOPLEVEL is true if this is the outermost BLOCK. */
1684
1685static void
1686expand_used_vars_for_block (tree block, bool toplevel)
1687{
1f6d3a08
RH
1688 tree t;
1689
1f6d3a08 1690 /* Expand all variables at this level. */
910ad8de 1691 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1692 if (TREE_USED (t)
1693 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1694 || !DECL_NONSHAREABLE (t)))
ff28a94d 1695 expand_one_var (t, toplevel, true);
1f6d3a08 1696
1f6d3a08
RH
1697 /* Expand all variables at containing levels. */
1698 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1699 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1700}
1701
1702/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1703 and clear TREE_USED on all local variables. */
1704
1705static void
1706clear_tree_used (tree block)
1707{
1708 tree t;
1709
910ad8de 1710 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1711 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1712 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1713 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1714 TREE_USED (t) = 0;
1715
1716 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1717 clear_tree_used (t);
1718}
1719
f6bc1c4a
HS
1720enum {
1721 SPCT_FLAG_DEFAULT = 1,
1722 SPCT_FLAG_ALL = 2,
5434dc07
MD
1723 SPCT_FLAG_STRONG = 3,
1724 SPCT_FLAG_EXPLICIT = 4
f6bc1c4a
HS
1725};
1726
7d69de61
RH
1727/* Examine TYPE and determine a bit mask of the following features. */
1728
1729#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1730#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1731#define SPCT_HAS_ARRAY 4
1732#define SPCT_HAS_AGGREGATE 8
1733
1734static unsigned int
1735stack_protect_classify_type (tree type)
1736{
1737 unsigned int ret = 0;
1738 tree t;
1739
1740 switch (TREE_CODE (type))
1741 {
1742 case ARRAY_TYPE:
1743 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1744 if (t == char_type_node
1745 || t == signed_char_type_node
1746 || t == unsigned_char_type_node)
1747 {
15362b89
JJ
1748 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1749 unsigned HOST_WIDE_INT len;
7d69de61 1750
15362b89 1751 if (!TYPE_SIZE_UNIT (type)
cc269bb6 1752 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
15362b89 1753 len = max;
7d69de61 1754 else
ae7e9ddd 1755 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7d69de61
RH
1756
1757 if (len < max)
1758 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1759 else
1760 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1761 }
1762 else
1763 ret = SPCT_HAS_ARRAY;
1764 break;
1765
1766 case UNION_TYPE:
1767 case QUAL_UNION_TYPE:
1768 case RECORD_TYPE:
1769 ret = SPCT_HAS_AGGREGATE;
1770 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1771 if (TREE_CODE (t) == FIELD_DECL)
1772 ret |= stack_protect_classify_type (TREE_TYPE (t));
1773 break;
1774
1775 default:
1776 break;
1777 }
1778
1779 return ret;
1780}
1781
a4d05547
KH
1782/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1783 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1784 any variable in this function. The return value is the phase number in
1785 which the variable should be allocated. */
1786
1787static int
1788stack_protect_decl_phase (tree decl)
1789{
1790 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1791 int ret = 0;
1792
1793 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1794 has_short_buffer = true;
1795
f6bc1c4a 1796 if (flag_stack_protect == SPCT_FLAG_ALL
5434dc07
MD
1797 || flag_stack_protect == SPCT_FLAG_STRONG
1798 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1799 && lookup_attribute ("stack_protect",
1800 DECL_ATTRIBUTES (current_function_decl))))
7d69de61
RH
1801 {
1802 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1803 && !(bits & SPCT_HAS_AGGREGATE))
1804 ret = 1;
1805 else if (bits & SPCT_HAS_ARRAY)
1806 ret = 2;
1807 }
1808 else
1809 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1810
1811 if (ret)
1812 has_protected_decls = true;
1813
1814 return ret;
1815}
1816
1817/* Two helper routines that check for phase 1 and phase 2. These are used
1818 as callbacks for expand_stack_vars. */
1819
1820static bool
f3ddd692
JJ
1821stack_protect_decl_phase_1 (size_t i)
1822{
1823 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1824}
1825
1826static bool
1827stack_protect_decl_phase_2 (size_t i)
7d69de61 1828{
f3ddd692 1829 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1830}
1831
f3ddd692
JJ
1832/* And helper function that checks for asan phase (with stack protector
1833 it is phase 3). This is used as callback for expand_stack_vars.
1834 Returns true if any of the vars in the partition need to be protected. */
1835
7d69de61 1836static bool
f3ddd692 1837asan_decl_phase_3 (size_t i)
7d69de61 1838{
f3ddd692
JJ
1839 while (i != EOC)
1840 {
1841 if (asan_protect_stack_decl (stack_vars[i].decl))
1842 return true;
1843 i = stack_vars[i].next;
1844 }
1845 return false;
7d69de61
RH
1846}
1847
1848/* Ensure that variables in different stack protection phases conflict
1849 so that they are not merged and share the same stack slot. */
1850
1851static void
1852add_stack_protection_conflicts (void)
1853{
1854 size_t i, j, n = stack_vars_num;
1855 unsigned char *phase;
1856
1857 phase = XNEWVEC (unsigned char, n);
1858 for (i = 0; i < n; ++i)
1859 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1860
1861 for (i = 0; i < n; ++i)
1862 {
1863 unsigned char ph_i = phase[i];
9b44f5d9 1864 for (j = i + 1; j < n; ++j)
7d69de61
RH
1865 if (ph_i != phase[j])
1866 add_stack_var_conflict (i, j);
1867 }
1868
1869 XDELETEVEC (phase);
1870}
1871
1872/* Create a decl for the guard at the top of the stack frame. */
1873
1874static void
1875create_stack_guard (void)
1876{
c2255bc4
AH
1877 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1878 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1879 TREE_THIS_VOLATILE (guard) = 1;
1880 TREE_USED (guard) = 1;
1881 expand_one_stack_var (guard);
cb91fab0 1882 crtl->stack_protect_guard = guard;
7d69de61
RH
1883}
1884
ff28a94d 1885/* Prepare for expanding variables. */
b8698a0f 1886static void
ff28a94d
JH
1887init_vars_expansion (void)
1888{
3f9b14ff
SB
1889 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1890 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1891
3f9b14ff 1892 /* A map from decl to stack partition. */
39c8aaa4 1893 decl_to_stack_part = new hash_map<tree, size_t>;
ff28a94d
JH
1894
1895 /* Initialize local stack smashing state. */
1896 has_protected_decls = false;
1897 has_short_buffer = false;
1898}
1899
1900/* Free up stack variable graph data. */
1901static void
1902fini_vars_expansion (void)
1903{
3f9b14ff
SB
1904 bitmap_obstack_release (&stack_var_bitmap_obstack);
1905 if (stack_vars)
1906 XDELETEVEC (stack_vars);
1907 if (stack_vars_sorted)
1908 XDELETEVEC (stack_vars_sorted);
ff28a94d 1909 stack_vars = NULL;
9b44f5d9 1910 stack_vars_sorted = NULL;
ff28a94d 1911 stack_vars_alloc = stack_vars_num = 0;
39c8aaa4 1912 delete decl_to_stack_part;
47598145 1913 decl_to_stack_part = NULL;
ff28a94d
JH
1914}
1915
30925d94
AO
1916/* Make a fair guess for the size of the stack frame of the function
1917 in NODE. This doesn't have to be exact, the result is only used in
1918 the inline heuristics. So we don't want to run the full stack var
1919 packing algorithm (which is quadratic in the number of stack vars).
1920 Instead, we calculate the total size of all stack vars. This turns
1921 out to be a pretty fair estimate -- packing of stack vars doesn't
1922 happen very often. */
b5a430f3 1923
ff28a94d 1924HOST_WIDE_INT
30925d94 1925estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1926{
1927 HOST_WIDE_INT size = 0;
b5a430f3 1928 size_t i;
bb7e6d55 1929 tree var;
67348ccc 1930 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1931
bb7e6d55 1932 push_cfun (fn);
ff28a94d 1933
3f9b14ff
SB
1934 init_vars_expansion ();
1935
824f71b9
RG
1936 FOR_EACH_LOCAL_DECL (fn, i, var)
1937 if (auto_var_in_fn_p (var, fn->decl))
1938 size += expand_one_var (var, true, false);
b5a430f3 1939
ff28a94d
JH
1940 if (stack_vars_num > 0)
1941 {
b5a430f3
SB
1942 /* Fake sorting the stack vars for account_stack_vars (). */
1943 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1944 for (i = 0; i < stack_vars_num; ++i)
1945 stack_vars_sorted[i] = i;
ff28a94d 1946 size += account_stack_vars ();
ff28a94d 1947 }
3f9b14ff
SB
1948
1949 fini_vars_expansion ();
2e1ec94f 1950 pop_cfun ();
ff28a94d
JH
1951 return size;
1952}
1953
f6bc1c4a
HS
1954/* Helper routine to check if a record or union contains an array field. */
1955
1956static int
1957record_or_union_type_has_array_p (const_tree tree_type)
1958{
1959 tree fields = TYPE_FIELDS (tree_type);
1960 tree f;
1961
1962 for (f = fields; f; f = DECL_CHAIN (f))
1963 if (TREE_CODE (f) == FIELD_DECL)
1964 {
1965 tree field_type = TREE_TYPE (f);
1966 if (RECORD_OR_UNION_TYPE_P (field_type)
1967 && record_or_union_type_has_array_p (field_type))
1968 return 1;
1969 if (TREE_CODE (field_type) == ARRAY_TYPE)
1970 return 1;
1971 }
1972 return 0;
1973}
1974
6545746e
FW
1975/* Check if the current function has local referenced variables that
1976 have their addresses taken, contain an array, or are arrays. */
1977
1978static bool
1979stack_protect_decl_p ()
1980{
1981 unsigned i;
1982 tree var;
1983
1984 FOR_EACH_LOCAL_DECL (cfun, i, var)
1985 if (!is_global_var (var))
1986 {
1987 tree var_type = TREE_TYPE (var);
1988 if (TREE_CODE (var) == VAR_DECL
1989 && (TREE_CODE (var_type) == ARRAY_TYPE
1990 || TREE_ADDRESSABLE (var)
1991 || (RECORD_OR_UNION_TYPE_P (var_type)
1992 && record_or_union_type_has_array_p (var_type))))
1993 return true;
1994 }
1995 return false;
1996}
1997
1998/* Check if the current function has calls that use a return slot. */
1999
2000static bool
2001stack_protect_return_slot_p ()
2002{
2003 basic_block bb;
2004
2005 FOR_ALL_BB_FN (bb, cfun)
2006 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2007 !gsi_end_p (gsi); gsi_next (&gsi))
2008 {
355fe088 2009 gimple *stmt = gsi_stmt (gsi);
6545746e
FW
2010 /* This assumes that calls to internal-only functions never
2011 use a return slot. */
2012 if (is_gimple_call (stmt)
2013 && !gimple_call_internal_p (stmt)
2014 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2015 gimple_call_fndecl (stmt)))
2016 return true;
2017 }
2018 return false;
2019}
2020
1f6d3a08 2021/* Expand all variables used in the function. */
727a31fa 2022
b47aae36 2023static rtx_insn *
727a31fa
RH
2024expand_used_vars (void)
2025{
c021f10b 2026 tree var, outer_block = DECL_INITIAL (current_function_decl);
8c681247 2027 auto_vec<tree> maybe_local_decls;
b47aae36 2028 rtx_insn *var_end_seq = NULL;
4e3825db 2029 unsigned i;
c021f10b 2030 unsigned len;
f6bc1c4a 2031 bool gen_stack_protect_signal = false;
727a31fa 2032
1f6d3a08
RH
2033 /* Compute the phase of the stack frame for this function. */
2034 {
2035 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2036 int off = STARTING_FRAME_OFFSET % align;
2037 frame_phase = off ? align - off : 0;
2038 }
727a31fa 2039
3f9b14ff
SB
2040 /* Set TREE_USED on all variables in the local_decls. */
2041 FOR_EACH_LOCAL_DECL (cfun, i, var)
2042 TREE_USED (var) = 1;
2043 /* Clear TREE_USED on all variables associated with a block scope. */
2044 clear_tree_used (DECL_INITIAL (current_function_decl));
2045
ff28a94d 2046 init_vars_expansion ();
7d69de61 2047
8f51aa6b
IZ
2048 if (targetm.use_pseudo_pic_reg ())
2049 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2050
4e3825db
MM
2051 for (i = 0; i < SA.map->num_partitions; i++)
2052 {
f11a7b6d
AO
2053 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2054 continue;
2055
4e3825db
MM
2056 tree var = partition_to_var (SA.map, i);
2057
ea057359 2058 gcc_assert (!virtual_operand_p (var));
70b5e7dc 2059
1f9ceff1 2060 expand_one_ssa_partition (var);
64d7fb90 2061 }
7eb9f42e 2062
f6bc1c4a 2063 if (flag_stack_protect == SPCT_FLAG_STRONG)
6545746e
FW
2064 gen_stack_protect_signal
2065 = stack_protect_decl_p () || stack_protect_return_slot_p ();
f6bc1c4a 2066
cb91fab0 2067 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 2068 set are not associated with any block scope. Lay them out. */
c021f10b 2069
9771b263 2070 len = vec_safe_length (cfun->local_decls);
c021f10b 2071 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 2072 {
1f6d3a08
RH
2073 bool expand_now = false;
2074
4e3825db
MM
2075 /* Expanded above already. */
2076 if (is_gimple_reg (var))
eb7adebc
MM
2077 {
2078 TREE_USED (var) = 0;
3adcf52c 2079 goto next;
eb7adebc 2080 }
1f6d3a08
RH
2081 /* We didn't set a block for static or extern because it's hard
2082 to tell the difference between a global variable (re)declared
2083 in a local scope, and one that's really declared there to
2084 begin with. And it doesn't really matter much, since we're
2085 not giving them stack space. Expand them now. */
4e3825db 2086 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
2087 expand_now = true;
2088
ee2e8462
EB
2089 /* Expand variables not associated with any block now. Those created by
2090 the optimizers could be live anywhere in the function. Those that
2091 could possibly have been scoped originally and detached from their
2092 block will have their allocation deferred so we coalesce them with
2093 others when optimization is enabled. */
1f6d3a08
RH
2094 else if (TREE_USED (var))
2095 expand_now = true;
2096
2097 /* Finally, mark all variables on the list as used. We'll use
2098 this in a moment when we expand those associated with scopes. */
2099 TREE_USED (var) = 1;
2100
2101 if (expand_now)
3adcf52c
JM
2102 expand_one_var (var, true, true);
2103
2104 next:
2105 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 2106 {
3adcf52c
JM
2107 rtx rtl = DECL_RTL_IF_SET (var);
2108
2109 /* Keep artificial non-ignored vars in cfun->local_decls
2110 chain until instantiate_decls. */
2111 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2112 add_local_decl (cfun, var);
6c6366f6 2113 else if (rtl == NULL_RTX)
c021f10b
NF
2114 /* If rtl isn't set yet, which can happen e.g. with
2115 -fstack-protector, retry before returning from this
2116 function. */
9771b263 2117 maybe_local_decls.safe_push (var);
802e9f8e 2118 }
1f6d3a08 2119 }
1f6d3a08 2120
c021f10b
NF
2121 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2122
2123 +-----------------+-----------------+
2124 | ...processed... | ...duplicates...|
2125 +-----------------+-----------------+
2126 ^
2127 +-- LEN points here.
2128
2129 We just want the duplicates, as those are the artificial
2130 non-ignored vars that we want to keep until instantiate_decls.
2131 Move them down and truncate the array. */
9771b263
DN
2132 if (!vec_safe_is_empty (cfun->local_decls))
2133 cfun->local_decls->block_remove (0, len);
c021f10b 2134
1f6d3a08
RH
2135 /* At this point, all variables within the block tree with TREE_USED
2136 set are actually used by the optimized function. Lay them out. */
2137 expand_used_vars_for_block (outer_block, true);
2138
2139 if (stack_vars_num > 0)
2140 {
47598145 2141 add_scope_conflicts ();
1f6d3a08 2142
c22cacf3 2143 /* If stack protection is enabled, we don't share space between
7d69de61 2144 vulnerable data and non-vulnerable data. */
5434dc07
MD
2145 if (flag_stack_protect != 0
2146 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2147 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2148 && lookup_attribute ("stack_protect",
2149 DECL_ATTRIBUTES (current_function_decl)))))
7d69de61
RH
2150 add_stack_protection_conflicts ();
2151
c22cacf3 2152 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
2153 minimal interference graph, attempt to save some stack space. */
2154 partition_stack_vars ();
2155 if (dump_file)
2156 dump_stack_var_partition ();
7d69de61
RH
2157 }
2158
f6bc1c4a
HS
2159 switch (flag_stack_protect)
2160 {
2161 case SPCT_FLAG_ALL:
2162 create_stack_guard ();
2163 break;
2164
2165 case SPCT_FLAG_STRONG:
2166 if (gen_stack_protect_signal
5434dc07
MD
2167 || cfun->calls_alloca || has_protected_decls
2168 || lookup_attribute ("stack_protect",
2169 DECL_ATTRIBUTES (current_function_decl)))
f6bc1c4a
HS
2170 create_stack_guard ();
2171 break;
2172
2173 case SPCT_FLAG_DEFAULT:
5434dc07
MD
2174 if (cfun->calls_alloca || has_protected_decls
2175 || lookup_attribute ("stack_protect",
2176 DECL_ATTRIBUTES (current_function_decl)))
c3284718 2177 create_stack_guard ();
f6bc1c4a
HS
2178 break;
2179
5434dc07
MD
2180 case SPCT_FLAG_EXPLICIT:
2181 if (lookup_attribute ("stack_protect",
2182 DECL_ATTRIBUTES (current_function_decl)))
2183 create_stack_guard ();
2184 break;
f6bc1c4a
HS
2185 default:
2186 ;
2187 }
1f6d3a08 2188
7d69de61
RH
2189 /* Assign rtl to each variable based on these partitions. */
2190 if (stack_vars_num > 0)
2191 {
f3ddd692
JJ
2192 struct stack_vars_data data;
2193
e361382f
JJ
2194 data.asan_base = NULL_RTX;
2195 data.asan_alignb = 0;
f3ddd692 2196
7d69de61
RH
2197 /* Reorder decls to be protected by iterating over the variables
2198 array multiple times, and allocating out of each phase in turn. */
c22cacf3 2199 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
2200 earlier, such that we naturally see these variables first,
2201 and thus naturally allocate things in the right order. */
2202 if (has_protected_decls)
2203 {
2204 /* Phase 1 contains only character arrays. */
f3ddd692 2205 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
2206
2207 /* Phase 2 contains other kinds of arrays. */
5434dc07
MD
2208 if (flag_stack_protect == SPCT_FLAG_ALL
2209 || flag_stack_protect == SPCT_FLAG_STRONG
2210 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2211 && lookup_attribute ("stack_protect",
2212 DECL_ATTRIBUTES (current_function_decl))))
f3ddd692 2213 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
2214 }
2215
c461d263 2216 if (asan_sanitize_stack_p ())
f3ddd692
JJ
2217 /* Phase 3, any partitions that need asan protection
2218 in addition to phase 1 and 2. */
2219 expand_stack_vars (asan_decl_phase_3, &data);
2220
9771b263 2221 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
2222 {
2223 HOST_WIDE_INT prev_offset = frame_offset;
e361382f
JJ
2224 HOST_WIDE_INT offset, sz, redzonesz;
2225 redzonesz = ASAN_RED_ZONE_SIZE;
2226 sz = data.asan_vec[0] - prev_offset;
2227 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2228 && data.asan_alignb <= 4096
3dc87cc0 2229 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
e361382f
JJ
2230 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2231 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2232 offset
2233 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
9771b263
DN
2234 data.asan_vec.safe_push (prev_offset);
2235 data.asan_vec.safe_push (offset);
e5dcd695
LZ
2236 /* Leave space for alignment if STRICT_ALIGNMENT. */
2237 if (STRICT_ALIGNMENT)
2238 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2239 << ASAN_SHADOW_SHIFT)
2240 / BITS_PER_UNIT, 1);
f3ddd692
JJ
2241
2242 var_end_seq
2243 = asan_emit_stack_protection (virtual_stack_vars_rtx,
e361382f
JJ
2244 data.asan_base,
2245 data.asan_alignb,
9771b263 2246 data.asan_vec.address (),
e361382f 2247 data.asan_decl_vec.address (),
9771b263 2248 data.asan_vec.length ());
f3ddd692
JJ
2249 }
2250
2251 expand_stack_vars (NULL, &data);
1f6d3a08
RH
2252 }
2253
3f9b14ff
SB
2254 fini_vars_expansion ();
2255
6c6366f6
JJ
2256 /* If there were any artificial non-ignored vars without rtl
2257 found earlier, see if deferred stack allocation hasn't assigned
2258 rtl to them. */
9771b263 2259 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 2260 {
6c6366f6
JJ
2261 rtx rtl = DECL_RTL_IF_SET (var);
2262
6c6366f6
JJ
2263 /* Keep artificial non-ignored vars in cfun->local_decls
2264 chain until instantiate_decls. */
2265 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2266 add_local_decl (cfun, var);
6c6366f6
JJ
2267 }
2268
1f6d3a08
RH
2269 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2270 if (STACK_ALIGNMENT_NEEDED)
2271 {
2272 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2273 if (!FRAME_GROWS_DOWNWARD)
2274 frame_offset += align - 1;
2275 frame_offset &= -align;
2276 }
f3ddd692
JJ
2277
2278 return var_end_seq;
727a31fa
RH
2279}
2280
2281
b7211528
SB
2282/* If we need to produce a detailed dump, print the tree representation
2283 for STMT to the dump file. SINCE is the last RTX after which the RTL
2284 generated for STMT should have been appended. */
2285
2286static void
355fe088 2287maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
b7211528
SB
2288{
2289 if (dump_file && (dump_flags & TDF_DETAILS))
2290 {
2291 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
2292 print_gimple_stmt (dump_file, stmt, 0,
2293 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
2294 fprintf (dump_file, "\n");
2295
2296 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2297 }
2298}
2299
8b11009b
ZD
2300/* Maps the blocks that do not contain tree labels to rtx labels. */
2301
134aa83c 2302static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
8b11009b 2303
a9b77cd1
ZD
2304/* Returns the label_rtx expression for a label starting basic block BB. */
2305
1476d1bd 2306static rtx_code_label *
726a989a 2307label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 2308{
726a989a
RB
2309 gimple_stmt_iterator gsi;
2310 tree lab;
a9b77cd1
ZD
2311
2312 if (bb->flags & BB_RTL)
2313 return block_label (bb);
2314
134aa83c 2315 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 2316 if (elt)
39c8aaa4 2317 return *elt;
8b11009b
ZD
2318
2319 /* Find the tree label if it is present. */
b8698a0f 2320
726a989a 2321 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 2322 {
538dd0b7
DM
2323 glabel *lab_stmt;
2324
2325 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2326 if (!lab_stmt)
a9b77cd1
ZD
2327 break;
2328
726a989a 2329 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
2330 if (DECL_NONLOCAL (lab))
2331 break;
2332
1476d1bd 2333 return jump_target_rtx (lab);
a9b77cd1
ZD
2334 }
2335
19f8b229 2336 rtx_code_label *l = gen_label_rtx ();
39c8aaa4
TS
2337 lab_rtx_for_bb->put (bb, l);
2338 return l;
a9b77cd1
ZD
2339}
2340
726a989a 2341
529ff441
MM
2342/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2343 of a basic block where we just expanded the conditional at the end,
315adeda
MM
2344 possibly clean up the CFG and instruction sequence. LAST is the
2345 last instruction before the just emitted jump sequence. */
529ff441
MM
2346
2347static void
b47aae36 2348maybe_cleanup_end_of_block (edge e, rtx_insn *last)
529ff441
MM
2349{
2350 /* Special case: when jumpif decides that the condition is
2351 trivial it emits an unconditional jump (and the necessary
2352 barrier). But we still have two edges, the fallthru one is
2353 wrong. purge_dead_edges would clean this up later. Unfortunately
2354 we have to insert insns (and split edges) before
2355 find_many_sub_basic_blocks and hence before purge_dead_edges.
2356 But splitting edges might create new blocks which depend on the
2357 fact that if there are two edges there's no barrier. So the
2358 barrier would get lost and verify_flow_info would ICE. Instead
2359 of auditing all edge splitters to care for the barrier (which
2360 normally isn't there in a cleaned CFG), fix it here. */
2361 if (BARRIER_P (get_last_insn ()))
2362 {
b47aae36 2363 rtx_insn *insn;
529ff441
MM
2364 remove_edge (e);
2365 /* Now, we have a single successor block, if we have insns to
2366 insert on the remaining edge we potentially will insert
2367 it at the end of this block (if the dest block isn't feasible)
2368 in order to avoid splitting the edge. This insertion will take
2369 place in front of the last jump. But we might have emitted
2370 multiple jumps (conditional and one unconditional) to the
2371 same destination. Inserting in front of the last one then
2372 is a problem. See PR 40021. We fix this by deleting all
2373 jumps except the last unconditional one. */
2374 insn = PREV_INSN (get_last_insn ());
2375 /* Make sure we have an unconditional jump. Otherwise we're
2376 confused. */
2377 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 2378 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
2379 {
2380 insn = PREV_INSN (insn);
2381 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 2382 {
8a269cb7 2383 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
2384 {
2385 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2386 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2387 }
2388 delete_insn (NEXT_INSN (insn));
2389 }
529ff441
MM
2390 }
2391 }
2392}
2393
726a989a 2394/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
2395 Returns a new basic block if we've terminated the current basic
2396 block and created a new one. */
2397
2398static basic_block
538dd0b7 2399expand_gimple_cond (basic_block bb, gcond *stmt)
80c7a9eb
RH
2400{
2401 basic_block new_bb, dest;
2402 edge new_edge;
2403 edge true_edge;
2404 edge false_edge;
b47aae36 2405 rtx_insn *last2, *last;
28ed065e
MM
2406 enum tree_code code;
2407 tree op0, op1;
2408
2409 code = gimple_cond_code (stmt);
2410 op0 = gimple_cond_lhs (stmt);
2411 op1 = gimple_cond_rhs (stmt);
2412 /* We're sometimes presented with such code:
2413 D.123_1 = x < y;
2414 if (D.123_1 != 0)
2415 ...
2416 This would expand to two comparisons which then later might
2417 be cleaned up by combine. But some pattern matchers like if-conversion
2418 work better when there's only one compare, so make up for this
2419 here as special exception if TER would have made the same change. */
31348d52 2420 if (SA.values
28ed065e 2421 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
2422 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2423 && TREE_CODE (op1) == INTEGER_CST
2424 && ((gimple_cond_code (stmt) == NE_EXPR
2425 && integer_zerop (op1))
2426 || (gimple_cond_code (stmt) == EQ_EXPR
2427 && integer_onep (op1)))
28ed065e
MM
2428 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2429 {
355fe088 2430 gimple *second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2431 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2432 {
e83f4b68
MM
2433 enum tree_code code2 = gimple_assign_rhs_code (second);
2434 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2435 {
2436 code = code2;
2437 op0 = gimple_assign_rhs1 (second);
2438 op1 = gimple_assign_rhs2 (second);
2439 }
2d52a3a1
ZC
2440 /* If jumps are cheap and the target does not support conditional
2441 compare, turn some more codes into jumpy sequences. */
2442 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2443 && targetm.gen_ccmp_first == NULL)
e83f4b68
MM
2444 {
2445 if ((code2 == BIT_AND_EXPR
2446 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2447 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2448 || code2 == TRUTH_AND_EXPR)
2449 {
2450 code = TRUTH_ANDIF_EXPR;
2451 op0 = gimple_assign_rhs1 (second);
2452 op1 = gimple_assign_rhs2 (second);
2453 }
2454 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2455 {
2456 code = TRUTH_ORIF_EXPR;
2457 op0 = gimple_assign_rhs1 (second);
2458 op1 = gimple_assign_rhs2 (second);
2459 }
2460 }
28ed065e
MM
2461 }
2462 }
b7211528
SB
2463
2464 last2 = last = get_last_insn ();
80c7a9eb
RH
2465
2466 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2467 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2468
2469 /* These flags have no purpose in RTL land. */
2470 true_edge->flags &= ~EDGE_TRUE_VALUE;
2471 false_edge->flags &= ~EDGE_FALSE_VALUE;
2472
2473 /* We can either have a pure conditional jump with one fallthru edge or
2474 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2475 if (false_edge->dest == bb->next_bb)
80c7a9eb 2476 {
40e90eac
JJ
2477 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2478 true_edge->probability);
726a989a 2479 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2480 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2481 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2482 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2483 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2484 return NULL;
2485 }
a9b77cd1 2486 if (true_edge->dest == bb->next_bb)
80c7a9eb 2487 {
40e90eac
JJ
2488 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2489 false_edge->probability);
726a989a 2490 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2491 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2492 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2493 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2494 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2495 return NULL;
2496 }
80c7a9eb 2497
40e90eac
JJ
2498 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2499 true_edge->probability);
80c7a9eb 2500 last = get_last_insn ();
2f13f2de 2501 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2502 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2503 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb 2504
1130d5e3 2505 BB_END (bb) = last;
80c7a9eb 2506 if (BARRIER_P (BB_END (bb)))
1130d5e3 2507 BB_END (bb) = PREV_INSN (BB_END (bb));
80c7a9eb
RH
2508 update_bb_for_insn (bb);
2509
2510 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2511 dest = false_edge->dest;
2512 redirect_edge_succ (false_edge, new_bb);
2513 false_edge->flags |= EDGE_FALLTHRU;
2514 new_bb->count = false_edge->count;
2515 new_bb->frequency = EDGE_FREQUENCY (false_edge);
726338f4 2516 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2517 new_edge = make_edge (new_bb, dest, 0);
2518 new_edge->probability = REG_BR_PROB_BASE;
2519 new_edge->count = new_bb->count;
2520 if (BARRIER_P (BB_END (new_bb)))
1130d5e3 2521 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
80c7a9eb
RH
2522 update_bb_for_insn (new_bb);
2523
726a989a 2524 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2525
2f13f2de 2526 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2527 {
5368224f
DC
2528 set_curr_insn_location (true_edge->goto_locus);
2529 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2530 }
7787b4aa 2531
80c7a9eb
RH
2532 return new_bb;
2533}
2534
0a35513e
AH
2535/* Mark all calls that can have a transaction restart. */
2536
2537static void
355fe088 2538mark_transaction_restart_calls (gimple *stmt)
0a35513e
AH
2539{
2540 struct tm_restart_node dummy;
50979347 2541 tm_restart_node **slot;
0a35513e
AH
2542
2543 if (!cfun->gimple_df->tm_restart)
2544 return;
2545
2546 dummy.stmt = stmt;
50979347 2547 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
0a35513e
AH
2548 if (slot)
2549 {
50979347 2550 struct tm_restart_node *n = *slot;
0a35513e 2551 tree list = n->label_or_list;
b47aae36 2552 rtx_insn *insn;
0a35513e
AH
2553
2554 for (insn = next_real_insn (get_last_insn ());
2555 !CALL_P (insn);
2556 insn = next_real_insn (insn))
2557 continue;
2558
2559 if (TREE_CODE (list) == LABEL_DECL)
2560 add_reg_note (insn, REG_TM, label_rtx (list));
2561 else
2562 for (; list ; list = TREE_CHAIN (list))
2563 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2564 }
2565}
2566
28ed065e
MM
2567/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2568 statement STMT. */
2569
2570static void
538dd0b7 2571expand_call_stmt (gcall *stmt)
28ed065e 2572{
25583c4f 2573 tree exp, decl, lhs;
e23817b3 2574 bool builtin_p;
e7925582 2575 size_t i;
28ed065e 2576
25583c4f
RS
2577 if (gimple_call_internal_p (stmt))
2578 {
2579 expand_internal_call (stmt);
2580 return;
2581 }
2582
4cfe7a6c
RS
2583 /* If this is a call to a built-in function and it has no effect other
2584 than setting the lhs, try to implement it using an internal function
2585 instead. */
2586 decl = gimple_call_fndecl (stmt);
2587 if (gimple_call_lhs (stmt)
2588 && !gimple_has_side_effects (stmt)
2589 && (optimize || (decl && called_as_built_in (decl))))
2590 {
2591 internal_fn ifn = replacement_internal_fn (stmt);
2592 if (ifn != IFN_LAST)
2593 {
2594 expand_internal_call (ifn, stmt);
2595 return;
2596 }
2597 }
2598
01156003 2599 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
089d1227 2600
01156003 2601 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
089d1227 2602 builtin_p = decl && DECL_BUILT_IN (decl);
01156003 2603
e7925582
EB
2604 /* If this is not a builtin function, the function type through which the
2605 call is made may be different from the type of the function. */
2606 if (!builtin_p)
2607 CALL_EXPR_FN (exp)
b25aa0e8
EB
2608 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2609 CALL_EXPR_FN (exp));
e7925582 2610
28ed065e
MM
2611 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2612 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2613
2614 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2615 {
2616 tree arg = gimple_call_arg (stmt, i);
355fe088 2617 gimple *def;
e23817b3
RG
2618 /* TER addresses into arguments of builtin functions so we have a
2619 chance to infer more correct alignment information. See PR39954. */
2620 if (builtin_p
2621 && TREE_CODE (arg) == SSA_NAME
2622 && (def = get_gimple_for_ssa_name (arg))
2623 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2624 arg = gimple_assign_rhs1 (def);
2625 CALL_EXPR_ARG (exp, i) = arg;
2626 }
28ed065e 2627
93f28ca7 2628 if (gimple_has_side_effects (stmt))
28ed065e
MM
2629 TREE_SIDE_EFFECTS (exp) = 1;
2630
93f28ca7 2631 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2632 TREE_NOTHROW (exp) = 1;
2633
2634 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
9a385c2d 2635 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
28ed065e 2636 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2637 if (decl
2638 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2639 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2640 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2641 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2642 else
2643 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2644 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2645 SET_EXPR_LOCATION (exp, gimple_location (stmt));
d5e254e1 2646 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
28ed065e 2647
ddb555ed
JJ
2648 /* Ensure RTL is created for debug args. */
2649 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2650 {
9771b263 2651 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2652 unsigned int ix;
2653 tree dtemp;
2654
2655 if (debug_args)
9771b263 2656 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2657 {
2658 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2659 expand_debug_expr (dtemp);
2660 }
2661 }
2662
25583c4f 2663 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2664 if (lhs)
2665 expand_assignment (lhs, exp, false);
2666 else
4c437f02 2667 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a35513e
AH
2668
2669 mark_transaction_restart_calls (stmt);
28ed065e
MM
2670}
2671
862d0b35
DN
2672
2673/* Generate RTL for an asm statement (explicit assembler code).
2674 STRING is a STRING_CST node containing the assembler code text,
2675 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2676 insn is volatile; don't optimize it. */
2677
2678static void
2679expand_asm_loc (tree string, int vol, location_t locus)
2680{
2681 rtx body;
2682
862d0b35
DN
2683 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2684 ggc_strdup (TREE_STRING_POINTER (string)),
2685 locus);
2686
2687 MEM_VOLATILE_P (body) = vol;
2688
93671519
BE
2689 /* Non-empty basic ASM implicitly clobbers memory. */
2690 if (TREE_STRING_LENGTH (string) != 0)
2691 {
2692 rtx asm_op, clob;
2693 unsigned i, nclobbers;
2694 auto_vec<rtx> input_rvec, output_rvec;
2695 auto_vec<const char *> constraints;
2696 auto_vec<rtx> clobber_rvec;
2697 HARD_REG_SET clobbered_regs;
2698 CLEAR_HARD_REG_SET (clobbered_regs);
2699
2700 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2701 clobber_rvec.safe_push (clob);
2702
2703 if (targetm.md_asm_adjust)
2704 targetm.md_asm_adjust (output_rvec, input_rvec,
2705 constraints, clobber_rvec,
2706 clobbered_regs);
2707
2708 asm_op = body;
2709 nclobbers = clobber_rvec.length ();
2710 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2711
2712 XVECEXP (body, 0, 0) = asm_op;
2713 for (i = 0; i < nclobbers; i++)
2714 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2715 }
2716
862d0b35
DN
2717 emit_insn (body);
2718}
2719
2720/* Return the number of times character C occurs in string S. */
2721static int
2722n_occurrences (int c, const char *s)
2723{
2724 int n = 0;
2725 while (*s)
2726 n += (*s++ == c);
2727 return n;
2728}
2729
2730/* A subroutine of expand_asm_operands. Check that all operands have
2731 the same number of alternatives. Return true if so. */
2732
2733static bool
7ca35180 2734check_operand_nalternatives (const vec<const char *> &constraints)
862d0b35 2735{
7ca35180
RH
2736 unsigned len = constraints.length();
2737 if (len > 0)
862d0b35 2738 {
7ca35180 2739 int nalternatives = n_occurrences (',', constraints[0]);
862d0b35
DN
2740
2741 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2742 {
2743 error ("too many alternatives in %<asm%>");
2744 return false;
2745 }
2746
7ca35180
RH
2747 for (unsigned i = 1; i < len; ++i)
2748 if (n_occurrences (',', constraints[i]) != nalternatives)
2749 {
2750 error ("operand constraints for %<asm%> differ "
2751 "in number of alternatives");
2752 return false;
2753 }
862d0b35 2754 }
862d0b35
DN
2755 return true;
2756}
2757
2758/* Check for overlap between registers marked in CLOBBERED_REGS and
2759 anything inappropriate in T. Emit error and return the register
2760 variable definition for error, NULL_TREE for ok. */
2761
2762static bool
2763tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2764{
2765 /* Conflicts between asm-declared register variables and the clobber
2766 list are not allowed. */
2767 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2768
2769 if (overlap)
2770 {
2771 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2772 DECL_NAME (overlap));
2773
2774 /* Reset registerness to stop multiple errors emitted for a single
2775 variable. */
2776 DECL_REGISTER (overlap) = 0;
2777 return true;
2778 }
2779
2780 return false;
2781}
2782
2783/* Generate RTL for an asm statement with arguments.
2784 STRING is the instruction template.
2785 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2786 Each output or input has an expression in the TREE_VALUE and
2787 a tree list in TREE_PURPOSE which in turn contains a constraint
2788 name in TREE_VALUE (or NULL_TREE) and a constraint string
2789 in TREE_PURPOSE.
2790 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2791 that is clobbered by this insn.
2792
2793 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2794 should be the fallthru basic block of the asm goto.
2795
2796 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2797 Some elements of OUTPUTS may be replaced with trees representing temporary
2798 values. The caller should copy those temporary values to the originally
2799 specified lvalues.
2800
2801 VOL nonzero means the insn is volatile; don't optimize it. */
2802
2803static void
6476a8fd 2804expand_asm_stmt (gasm *stmt)
862d0b35 2805{
7ca35180
RH
2806 class save_input_location
2807 {
2808 location_t old;
6476a8fd 2809
7ca35180
RH
2810 public:
2811 explicit save_input_location(location_t where)
6476a8fd 2812 {
7ca35180
RH
2813 old = input_location;
2814 input_location = where;
6476a8fd
RH
2815 }
2816
7ca35180 2817 ~save_input_location()
6476a8fd 2818 {
7ca35180 2819 input_location = old;
6476a8fd 2820 }
7ca35180 2821 };
6476a8fd 2822
7ca35180 2823 location_t locus = gimple_location (stmt);
6476a8fd 2824
7ca35180 2825 if (gimple_asm_input_p (stmt))
6476a8fd 2826 {
7ca35180
RH
2827 const char *s = gimple_asm_string (stmt);
2828 tree string = build_string (strlen (s), s);
2829 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2830 return;
6476a8fd
RH
2831 }
2832
7ca35180
RH
2833 /* There are some legacy diagnostics in here, and also avoids a
2834 sixth parameger to targetm.md_asm_adjust. */
2835 save_input_location s_i_l(locus);
6476a8fd 2836
7ca35180
RH
2837 unsigned noutputs = gimple_asm_noutputs (stmt);
2838 unsigned ninputs = gimple_asm_ninputs (stmt);
2839 unsigned nlabels = gimple_asm_nlabels (stmt);
2840 unsigned i;
2841
2842 /* ??? Diagnose during gimplification? */
2843 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
6476a8fd 2844 {
7ca35180 2845 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
6476a8fd
RH
2846 return;
2847 }
2848
7ca35180
RH
2849 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2850 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2851 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
6476a8fd 2852
7ca35180 2853 /* Copy the gimple vectors into new vectors that we can manipulate. */
862d0b35 2854
7ca35180
RH
2855 output_tvec.safe_grow (noutputs);
2856 input_tvec.safe_grow (ninputs);
2857 constraints.safe_grow (noutputs + ninputs);
862d0b35 2858
7ca35180
RH
2859 for (i = 0; i < noutputs; ++i)
2860 {
2861 tree t = gimple_asm_output_op (stmt, i);
2862 output_tvec[i] = TREE_VALUE (t);
2863 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2864 }
2865 for (i = 0; i < ninputs; i++)
2866 {
2867 tree t = gimple_asm_input_op (stmt, i);
2868 input_tvec[i] = TREE_VALUE (t);
2869 constraints[i + noutputs]
2870 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2871 }
862d0b35 2872
7ca35180
RH
2873 /* ??? Diagnose during gimplification? */
2874 if (! check_operand_nalternatives (constraints))
2875 return;
862d0b35
DN
2876
2877 /* Count the number of meaningful clobbered registers, ignoring what
2878 we would ignore later. */
7ca35180
RH
2879 auto_vec<rtx> clobber_rvec;
2880 HARD_REG_SET clobbered_regs;
862d0b35 2881 CLEAR_HARD_REG_SET (clobbered_regs);
862d0b35 2882
7ca35180
RH
2883 if (unsigned n = gimple_asm_nclobbers (stmt))
2884 {
2885 clobber_rvec.reserve (n);
2886 for (i = 0; i < n; i++)
2887 {
2888 tree t = gimple_asm_clobber_op (stmt, i);
2889 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2890 int nregs, j;
862d0b35 2891
7ca35180
RH
2892 j = decode_reg_name_and_count (regname, &nregs);
2893 if (j < 0)
862d0b35 2894 {
7ca35180 2895 if (j == -2)
862d0b35 2896 {
7ca35180
RH
2897 /* ??? Diagnose during gimplification? */
2898 error ("unknown register name %qs in %<asm%>", regname);
2899 }
2900 else if (j == -4)
2901 {
2902 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2903 clobber_rvec.safe_push (x);
2904 }
2905 else
2906 {
2907 /* Otherwise we should have -1 == empty string
2908 or -3 == cc, which is not a register. */
2909 gcc_assert (j == -1 || j == -3);
862d0b35 2910 }
862d0b35 2911 }
7ca35180
RH
2912 else
2913 for (int reg = j; reg < j + nregs; reg++)
2914 {
2915 /* Clobbering the PIC register is an error. */
2916 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2917 {
2918 /* ??? Diagnose during gimplification? */
2919 error ("PIC register clobbered by %qs in %<asm%>",
2920 regname);
2921 return;
2922 }
2923
2924 SET_HARD_REG_BIT (clobbered_regs, reg);
2925 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2926 clobber_rvec.safe_push (x);
2927 }
862d0b35
DN
2928 }
2929 }
7ca35180 2930 unsigned nclobbers = clobber_rvec.length();
862d0b35
DN
2931
2932 /* First pass over inputs and outputs checks validity and sets
2933 mark_addressable if needed. */
7ca35180 2934 /* ??? Diagnose during gimplification? */
862d0b35 2935
7ca35180 2936 for (i = 0; i < noutputs; ++i)
862d0b35 2937 {
7ca35180 2938 tree val = output_tvec[i];
862d0b35
DN
2939 tree type = TREE_TYPE (val);
2940 const char *constraint;
2941 bool is_inout;
2942 bool allows_reg;
2943 bool allows_mem;
2944
862d0b35
DN
2945 /* Try to parse the output constraint. If that fails, there's
2946 no point in going further. */
2947 constraint = constraints[i];
2948 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2949 &allows_mem, &allows_reg, &is_inout))
2950 return;
2951
2952 if (! allows_reg
2953 && (allows_mem
2954 || is_inout
2955 || (DECL_P (val)
2956 && REG_P (DECL_RTL (val))
2957 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2958 mark_addressable (val);
862d0b35
DN
2959 }
2960
7ca35180 2961 for (i = 0; i < ninputs; ++i)
862d0b35
DN
2962 {
2963 bool allows_reg, allows_mem;
2964 const char *constraint;
2965
862d0b35 2966 constraint = constraints[i + noutputs];
7ca35180
RH
2967 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2968 constraints.address (),
2969 &allows_mem, &allows_reg))
862d0b35
DN
2970 return;
2971
2972 if (! allows_reg && allows_mem)
7ca35180 2973 mark_addressable (input_tvec[i]);
862d0b35
DN
2974 }
2975
2976 /* Second pass evaluates arguments. */
2977
2978 /* Make sure stack is consistent for asm goto. */
2979 if (nlabels > 0)
2980 do_pending_stack_adjust ();
7ca35180
RH
2981 int old_generating_concat_p = generating_concat_p;
2982
2983 /* Vector of RTX's of evaluated output operands. */
2984 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2985 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2986 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
862d0b35 2987
7ca35180
RH
2988 output_rvec.safe_grow (noutputs);
2989
2990 for (i = 0; i < noutputs; ++i)
862d0b35 2991 {
7ca35180 2992 tree val = output_tvec[i];
862d0b35 2993 tree type = TREE_TYPE (val);
7ca35180 2994 bool is_inout, allows_reg, allows_mem, ok;
862d0b35 2995 rtx op;
862d0b35
DN
2996
2997 ok = parse_output_constraint (&constraints[i], i, ninputs,
2998 noutputs, &allows_mem, &allows_reg,
2999 &is_inout);
3000 gcc_assert (ok);
3001
3002 /* If an output operand is not a decl or indirect ref and our constraint
3003 allows a register, make a temporary to act as an intermediate.
7ca35180 3004 Make the asm insn write into that, then we will copy it to
862d0b35
DN
3005 the real output operand. Likewise for promoted variables. */
3006
3007 generating_concat_p = 0;
3008
862d0b35
DN
3009 if ((TREE_CODE (val) == INDIRECT_REF
3010 && allows_mem)
3011 || (DECL_P (val)
3012 && (allows_mem || REG_P (DECL_RTL (val)))
3013 && ! (REG_P (DECL_RTL (val))
3014 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3015 || ! allows_reg
3016 || is_inout)
3017 {
3018 op = expand_expr (val, NULL_RTX, VOIDmode,
3019 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3020 if (MEM_P (op))
3021 op = validize_mem (op);
3022
3023 if (! allows_reg && !MEM_P (op))
3024 error ("output number %d not directly addressable", i);
3025 if ((! allows_mem && MEM_P (op))
3026 || GET_CODE (op) == CONCAT)
3027 {
7ca35180 3028 rtx old_op = op;
862d0b35 3029 op = gen_reg_rtx (GET_MODE (op));
7ca35180
RH
3030
3031 generating_concat_p = old_generating_concat_p;
3032
862d0b35 3033 if (is_inout)
7ca35180
RH
3034 emit_move_insn (op, old_op);
3035
3036 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3037 emit_move_insn (old_op, op);
3038 after_rtl_seq = get_insns ();
3039 after_rtl_end = get_last_insn ();
3040 end_sequence ();
862d0b35
DN
3041 }
3042 }
3043 else
3044 {
3045 op = assign_temp (type, 0, 1);
3046 op = validize_mem (op);
7ca35180
RH
3047 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3048 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
862d0b35 3049
7ca35180 3050 generating_concat_p = old_generating_concat_p;
862d0b35 3051
7ca35180
RH
3052 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3053 expand_assignment (val, make_tree (type, op), false);
3054 after_rtl_seq = get_insns ();
3055 after_rtl_end = get_last_insn ();
3056 end_sequence ();
862d0b35 3057 }
7ca35180 3058 output_rvec[i] = op;
862d0b35 3059
7ca35180
RH
3060 if (is_inout)
3061 inout_opnum.safe_push (i);
862d0b35
DN
3062 }
3063
7ca35180
RH
3064 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3065 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
862d0b35 3066
7ca35180
RH
3067 input_rvec.safe_grow (ninputs);
3068 input_mode.safe_grow (ninputs);
862d0b35 3069
7ca35180 3070 generating_concat_p = 0;
862d0b35 3071
7ca35180 3072 for (i = 0; i < ninputs; ++i)
862d0b35 3073 {
7ca35180
RH
3074 tree val = input_tvec[i];
3075 tree type = TREE_TYPE (val);
3076 bool allows_reg, allows_mem, ok;
862d0b35 3077 const char *constraint;
862d0b35 3078 rtx op;
862d0b35
DN
3079
3080 constraint = constraints[i + noutputs];
7ca35180
RH
3081 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3082 constraints.address (),
3083 &allows_mem, &allows_reg);
862d0b35
DN
3084 gcc_assert (ok);
3085
862d0b35
DN
3086 /* EXPAND_INITIALIZER will not generate code for valid initializer
3087 constants, but will still generate code for other types of operand.
3088 This is the behavior we want for constant constraints. */
3089 op = expand_expr (val, NULL_RTX, VOIDmode,
3090 allows_reg ? EXPAND_NORMAL
3091 : allows_mem ? EXPAND_MEMORY
3092 : EXPAND_INITIALIZER);
3093
3094 /* Never pass a CONCAT to an ASM. */
3095 if (GET_CODE (op) == CONCAT)
3096 op = force_reg (GET_MODE (op), op);
3097 else if (MEM_P (op))
3098 op = validize_mem (op);
3099
3100 if (asm_operand_ok (op, constraint, NULL) <= 0)
3101 {
3102 if (allows_reg && TYPE_MODE (type) != BLKmode)
3103 op = force_reg (TYPE_MODE (type), op);
3104 else if (!allows_mem)
3105 warning (0, "asm operand %d probably doesn%'t match constraints",
3106 i + noutputs);
3107 else if (MEM_P (op))
3108 {
3109 /* We won't recognize either volatile memory or memory
3110 with a queued address as available a memory_operand
3111 at this point. Ignore it: clearly this *is* a memory. */
3112 }
3113 else
3114 gcc_unreachable ();
3115 }
7ca35180
RH
3116 input_rvec[i] = op;
3117 input_mode[i] = TYPE_MODE (type);
862d0b35
DN
3118 }
3119
862d0b35 3120 /* For in-out operands, copy output rtx to input rtx. */
7ca35180 3121 unsigned ninout = inout_opnum.length();
862d0b35
DN
3122 for (i = 0; i < ninout; i++)
3123 {
3124 int j = inout_opnum[i];
7ca35180 3125 rtx o = output_rvec[j];
862d0b35 3126
7ca35180
RH
3127 input_rvec.safe_push (o);
3128 input_mode.safe_push (GET_MODE (o));
862d0b35 3129
7ca35180 3130 char buffer[16];
862d0b35 3131 sprintf (buffer, "%d", j);
7ca35180
RH
3132 constraints.safe_push (ggc_strdup (buffer));
3133 }
3134 ninputs += ninout;
3135
3136 /* Sometimes we wish to automatically clobber registers across an asm.
3137 Case in point is when the i386 backend moved from cc0 to a hard reg --
3138 maintaining source-level compatibility means automatically clobbering
3139 the flags register. */
3140 rtx_insn *after_md_seq = NULL;
3141 if (targetm.md_asm_adjust)
3142 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3143 constraints, clobber_rvec,
3144 clobbered_regs);
3145
3146 /* Do not allow the hook to change the output and input count,
3147 lest it mess up the operand numbering. */
3148 gcc_assert (output_rvec.length() == noutputs);
3149 gcc_assert (input_rvec.length() == ninputs);
3150 gcc_assert (constraints.length() == noutputs + ninputs);
3151
3152 /* But it certainly can adjust the clobbers. */
3153 nclobbers = clobber_rvec.length();
3154
3155 /* Third pass checks for easy conflicts. */
3156 /* ??? Why are we doing this on trees instead of rtx. */
3157
3158 bool clobber_conflict_found = 0;
3159 for (i = 0; i < noutputs; ++i)
3160 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3161 clobber_conflict_found = 1;
3162 for (i = 0; i < ninputs - ninout; ++i)
3163 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3164 clobber_conflict_found = 1;
3165
3166 /* Make vectors for the expression-rtx, constraint strings,
3167 and named operands. */
3168
3169 rtvec argvec = rtvec_alloc (ninputs);
3170 rtvec constraintvec = rtvec_alloc (ninputs);
3171 rtvec labelvec = rtvec_alloc (nlabels);
3172
3173 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3174 : GET_MODE (output_rvec[0])),
3175 ggc_strdup (gimple_asm_string (stmt)),
3176 empty_string, 0, argvec, constraintvec,
3177 labelvec, locus);
3178 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3179
3180 for (i = 0; i < ninputs; ++i)
3181 {
3182 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3183 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3184 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3185 constraints[i + noutputs],
3186 locus);
862d0b35
DN
3187 }
3188
3189 /* Copy labels to the vector. */
7ca35180
RH
3190 rtx_code_label *fallthru_label = NULL;
3191 if (nlabels > 0)
3192 {
3193 basic_block fallthru_bb = NULL;
3194 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3195 if (fallthru)
3196 fallthru_bb = fallthru->dest;
3197
3198 for (i = 0; i < nlabels; ++i)
862d0b35 3199 {
7ca35180 3200 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
e67d1102 3201 rtx_insn *r;
7ca35180
RH
3202 /* If asm goto has any labels in the fallthru basic block, use
3203 a label that we emit immediately after the asm goto. Expansion
3204 may insert further instructions into the same basic block after
3205 asm goto and if we don't do this, insertion of instructions on
3206 the fallthru edge might misbehave. See PR58670. */
3207 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3208 {
3209 if (fallthru_label == NULL_RTX)
3210 fallthru_label = gen_label_rtx ();
3211 r = fallthru_label;
3212 }
3213 else
3214 r = label_rtx (label);
3215 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
862d0b35 3216 }
862d0b35
DN
3217 }
3218
862d0b35
DN
3219 /* Now, for each output, construct an rtx
3220 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3221 ARGVEC CONSTRAINTS OPNAMES))
3222 If there is more than one, put them inside a PARALLEL. */
3223
3224 if (nlabels > 0 && nclobbers == 0)
3225 {
3226 gcc_assert (noutputs == 0);
3227 emit_jump_insn (body);
3228 }
3229 else if (noutputs == 0 && nclobbers == 0)
3230 {
3231 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3232 emit_insn (body);
3233 }
3234 else if (noutputs == 1 && nclobbers == 0)
3235 {
7ca35180
RH
3236 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3237 emit_insn (gen_rtx_SET (output_rvec[0], body));
862d0b35
DN
3238 }
3239 else
3240 {
3241 rtx obody = body;
3242 int num = noutputs;
3243
3244 if (num == 0)
3245 num = 1;
3246
3247 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3248
3249 /* For each output operand, store a SET. */
7ca35180 3250 for (i = 0; i < noutputs; ++i)
862d0b35 3251 {
7ca35180
RH
3252 rtx src, o = output_rvec[i];
3253 if (i == 0)
3254 {
3255 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3256 src = obody;
3257 }
3258 else
3259 {
3260 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3261 ASM_OPERANDS_TEMPLATE (obody),
3262 constraints[i], i, argvec,
3263 constraintvec, labelvec, locus);
3264 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3265 }
3266 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
862d0b35
DN
3267 }
3268
3269 /* If there are no outputs (but there are some clobbers)
3270 store the bare ASM_OPERANDS into the PARALLEL. */
862d0b35
DN
3271 if (i == 0)
3272 XVECEXP (body, 0, i++) = obody;
3273
3274 /* Store (clobber REG) for each clobbered register specified. */
7ca35180 3275 for (unsigned j = 0; j < nclobbers; ++j)
862d0b35 3276 {
7ca35180 3277 rtx clobbered_reg = clobber_rvec[j];
862d0b35 3278
7ca35180
RH
3279 /* Do sanity check for overlap between clobbers and respectively
3280 input and outputs that hasn't been handled. Such overlap
3281 should have been detected and reported above. */
3282 if (!clobber_conflict_found && REG_P (clobbered_reg))
862d0b35 3283 {
7ca35180
RH
3284 /* We test the old body (obody) contents to avoid
3285 tripping over the under-construction body. */
3286 for (unsigned k = 0; k < noutputs; ++k)
3287 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3288 internal_error ("asm clobber conflict with output operand");
3289
3290 for (unsigned k = 0; k < ninputs - ninout; ++k)
3291 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3292 internal_error ("asm clobber conflict with input operand");
862d0b35
DN
3293 }
3294
7ca35180 3295 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
862d0b35
DN
3296 }
3297
3298 if (nlabels > 0)
3299 emit_jump_insn (body);
3300 else
3301 emit_insn (body);
3302 }
3303
7ca35180
RH
3304 generating_concat_p = old_generating_concat_p;
3305
862d0b35
DN
3306 if (fallthru_label)
3307 emit_label (fallthru_label);
3308
7ca35180
RH
3309 if (after_md_seq)
3310 emit_insn (after_md_seq);
3311 if (after_rtl_seq)
3312 emit_insn (after_rtl_seq);
862d0b35 3313
6476a8fd 3314 free_temp_slots ();
7ca35180 3315 crtl->has_asm_statement = 1;
862d0b35
DN
3316}
3317
3318/* Emit code to jump to the address
3319 specified by the pointer expression EXP. */
3320
3321static void
3322expand_computed_goto (tree exp)
3323{
3324 rtx x = expand_normal (exp);
3325
862d0b35
DN
3326 do_pending_stack_adjust ();
3327 emit_indirect_jump (x);
3328}
3329
3330/* Generate RTL code for a `goto' statement with target label LABEL.
3331 LABEL should be a LABEL_DECL tree node that was or will later be
3332 defined with `expand_label'. */
3333
3334static void
3335expand_goto (tree label)
3336{
b2b29377
MM
3337 if (flag_checking)
3338 {
3339 /* Check for a nonlocal goto to a containing function. Should have
3340 gotten translated to __builtin_nonlocal_goto. */
3341 tree context = decl_function_context (label);
3342 gcc_assert (!context || context == current_function_decl);
3343 }
862d0b35 3344
1476d1bd 3345 emit_jump (jump_target_rtx (label));
862d0b35
DN
3346}
3347
3348/* Output a return with no value. */
3349
3350static void
3351expand_null_return_1 (void)
3352{
3353 clear_pending_stack_adjust ();
3354 do_pending_stack_adjust ();
3355 emit_jump (return_label);
3356}
3357
3358/* Generate RTL to return from the current function, with no value.
3359 (That is, we do not do anything about returning any value.) */
3360
3361void
3362expand_null_return (void)
3363{
3364 /* If this function was declared to return a value, but we
3365 didn't, clobber the return registers so that they are not
3366 propagated live to the rest of the function. */
3367 clobber_return_register ();
3368
3369 expand_null_return_1 ();
3370}
3371
3372/* Generate RTL to return from the current function, with value VAL. */
3373
3374static void
3375expand_value_return (rtx val)
3376{
3377 /* Copy the value to the return location unless it's already there. */
3378
3379 tree decl = DECL_RESULT (current_function_decl);
3380 rtx return_reg = DECL_RTL (decl);
3381 if (return_reg != val)
3382 {
3383 tree funtype = TREE_TYPE (current_function_decl);
3384 tree type = TREE_TYPE (decl);
3385 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
3386 machine_mode old_mode = DECL_MODE (decl);
3387 machine_mode mode;
862d0b35
DN
3388 if (DECL_BY_REFERENCE (decl))
3389 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3390 else
3391 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3392
3393 if (mode != old_mode)
3394 val = convert_modes (mode, old_mode, val, unsignedp);
3395
3396 if (GET_CODE (return_reg) == PARALLEL)
3397 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3398 else
3399 emit_move_insn (return_reg, val);
3400 }
3401
3402 expand_null_return_1 ();
3403}
3404
3405/* Generate RTL to evaluate the expression RETVAL and return it
3406 from the current function. */
3407
3408static void
d5e254e1 3409expand_return (tree retval, tree bounds)
862d0b35
DN
3410{
3411 rtx result_rtl;
3412 rtx val = 0;
3413 tree retval_rhs;
d5e254e1 3414 rtx bounds_rtl;
862d0b35
DN
3415
3416 /* If function wants no value, give it none. */
3417 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3418 {
3419 expand_normal (retval);
3420 expand_null_return ();
3421 return;
3422 }
3423
3424 if (retval == error_mark_node)
3425 {
3426 /* Treat this like a return of no value from a function that
3427 returns a value. */
3428 expand_null_return ();
3429 return;
3430 }
3431 else if ((TREE_CODE (retval) == MODIFY_EXPR
3432 || TREE_CODE (retval) == INIT_EXPR)
3433 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3434 retval_rhs = TREE_OPERAND (retval, 1);
3435 else
3436 retval_rhs = retval;
3437
3438 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3439
d5e254e1
IE
3440 /* Put returned bounds to the right place. */
3441 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3442 if (bounds_rtl)
3443 {
855f036d
IE
3444 rtx addr = NULL;
3445 rtx bnd = NULL;
d5e254e1 3446
855f036d 3447 if (bounds && bounds != error_mark_node)
d5e254e1
IE
3448 {
3449 bnd = expand_normal (bounds);
3450 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3451 }
3452 else if (REG_P (bounds_rtl))
3453 {
855f036d
IE
3454 if (bounds)
3455 bnd = chkp_expand_zero_bounds ();
3456 else
3457 {
3458 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3459 addr = gen_rtx_MEM (Pmode, addr);
3460 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3461 }
3462
d5e254e1
IE
3463 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3464 }
3465 else
3466 {
3467 int n;
3468
3469 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3470
855f036d
IE
3471 if (bounds)
3472 bnd = chkp_expand_zero_bounds ();
3473 else
3474 {
3475 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3476 addr = gen_rtx_MEM (Pmode, addr);
3477 }
d5e254e1
IE
3478
3479 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3480 {
d5e254e1 3481 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
855f036d
IE
3482 if (!bounds)
3483 {
3484 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3485 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3486 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3487 }
d5e254e1
IE
3488 targetm.calls.store_returned_bounds (slot, bnd);
3489 }
3490 }
3491 }
3492 else if (chkp_function_instrumented_p (current_function_decl)
3493 && !BOUNDED_P (retval_rhs)
3494 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3495 && TREE_CODE (retval_rhs) != RESULT_DECL)
3496 {
3497 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3498 addr = gen_rtx_MEM (Pmode, addr);
3499
3500 gcc_assert (MEM_P (result_rtl));
3501
3502 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3503 }
3504
862d0b35
DN
3505 /* If we are returning the RESULT_DECL, then the value has already
3506 been stored into it, so we don't have to do anything special. */
3507 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3508 expand_value_return (result_rtl);
3509
3510 /* If the result is an aggregate that is being returned in one (or more)
3511 registers, load the registers here. */
3512
3513 else if (retval_rhs != 0
3514 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3515 && REG_P (result_rtl))
3516 {
3517 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3518 if (val)
3519 {
3520 /* Use the mode of the result value on the return register. */
3521 PUT_MODE (result_rtl, GET_MODE (val));
3522 expand_value_return (val);
3523 }
3524 else
3525 expand_null_return ();
3526 }
3527 else if (retval_rhs != 0
3528 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3529 && (REG_P (result_rtl)
3530 || (GET_CODE (result_rtl) == PARALLEL)))
3531 {
9ee5337d
EB
3532 /* Compute the return value into a temporary (usually a pseudo reg). */
3533 val
3534 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
862d0b35
DN
3535 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3536 val = force_not_mem (val);
862d0b35
DN
3537 expand_value_return (val);
3538 }
3539 else
3540 {
3541 /* No hard reg used; calculate value into hard return reg. */
3542 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3543 expand_value_return (result_rtl);
3544 }
3545}
3546
28ed065e
MM
3547/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3548 STMT that doesn't require special handling for outgoing edges. That
3549 is no tailcalls and no GIMPLE_COND. */
3550
3551static void
355fe088 3552expand_gimple_stmt_1 (gimple *stmt)
28ed065e
MM
3553{
3554 tree op0;
c82fee88 3555
5368224f 3556 set_curr_insn_location (gimple_location (stmt));
c82fee88 3557
28ed065e
MM
3558 switch (gimple_code (stmt))
3559 {
3560 case GIMPLE_GOTO:
3561 op0 = gimple_goto_dest (stmt);
3562 if (TREE_CODE (op0) == LABEL_DECL)
3563 expand_goto (op0);
3564 else
3565 expand_computed_goto (op0);
3566 break;
3567 case GIMPLE_LABEL:
538dd0b7 3568 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
28ed065e
MM
3569 break;
3570 case GIMPLE_NOP:
3571 case GIMPLE_PREDICT:
3572 break;
28ed065e 3573 case GIMPLE_SWITCH:
538dd0b7 3574 expand_case (as_a <gswitch *> (stmt));
28ed065e
MM
3575 break;
3576 case GIMPLE_ASM:
538dd0b7 3577 expand_asm_stmt (as_a <gasm *> (stmt));
28ed065e
MM
3578 break;
3579 case GIMPLE_CALL:
538dd0b7 3580 expand_call_stmt (as_a <gcall *> (stmt));
28ed065e
MM
3581 break;
3582
3583 case GIMPLE_RETURN:
855f036d
IE
3584 {
3585 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3586 op0 = gimple_return_retval (as_a <greturn *> (stmt));
28ed065e 3587
855f036d
IE
3588 if (op0 && op0 != error_mark_node)
3589 {
3590 tree result = DECL_RESULT (current_function_decl);
28ed065e 3591
b5be36b1
IE
3592 /* Mark we have return statement with missing bounds. */
3593 if (!bnd
3594 && chkp_function_instrumented_p (cfun->decl)
3595 && !DECL_P (op0))
3596 bnd = error_mark_node;
3597
855f036d
IE
3598 /* If we are not returning the current function's RESULT_DECL,
3599 build an assignment to it. */
3600 if (op0 != result)
3601 {
3602 /* I believe that a function's RESULT_DECL is unique. */
3603 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3604
3605 /* ??? We'd like to use simply expand_assignment here,
3606 but this fails if the value is of BLKmode but the return
3607 decl is a register. expand_return has special handling
3608 for this combination, which eventually should move
3609 to common code. See comments there. Until then, let's
3610 build a modify expression :-/ */
3611 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3612 result, op0);
3613 }
855f036d
IE
3614 }
3615
3616 if (!op0)
3617 expand_null_return ();
3618 else
3619 expand_return (op0, bnd);
3620 }
28ed065e
MM
3621 break;
3622
3623 case GIMPLE_ASSIGN:
3624 {
538dd0b7
DM
3625 gassign *assign_stmt = as_a <gassign *> (stmt);
3626 tree lhs = gimple_assign_lhs (assign_stmt);
28ed065e
MM
3627
3628 /* Tree expand used to fiddle with |= and &= of two bitfield
3629 COMPONENT_REFs here. This can't happen with gimple, the LHS
3630 of binary assigns must be a gimple reg. */
3631
3632 if (TREE_CODE (lhs) != SSA_NAME
3633 || get_gimple_rhs_class (gimple_expr_code (stmt))
3634 == GIMPLE_SINGLE_RHS)
3635 {
538dd0b7 3636 tree rhs = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3637 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3638 == GIMPLE_SINGLE_RHS);
ae2ffe2a
RB
3639 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3640 /* Do not put locations on possibly shared trees. */
3641 && !is_gimple_min_invariant (rhs))
28ed065e 3642 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3643 if (TREE_CLOBBER_P (rhs))
3644 /* This is a clobber to mark the going out of scope for
3645 this LHS. */
3646 ;
3647 else
3648 expand_assignment (lhs, rhs,
538dd0b7
DM
3649 gimple_assign_nontemporal_move_p (
3650 assign_stmt));
28ed065e
MM
3651 }
3652 else
3653 {
3654 rtx target, temp;
538dd0b7 3655 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
28ed065e
MM
3656 struct separate_ops ops;
3657 bool promoted = false;
3658
3659 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3660 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3661 promoted = true;
3662
538dd0b7 3663 ops.code = gimple_assign_rhs_code (assign_stmt);
28ed065e 3664 ops.type = TREE_TYPE (lhs);
b0dd8c90 3665 switch (get_gimple_rhs_class (ops.code))
28ed065e 3666 {
0354c0c7 3667 case GIMPLE_TERNARY_RHS:
538dd0b7 3668 ops.op2 = gimple_assign_rhs3 (assign_stmt);
0354c0c7 3669 /* Fallthru */
28ed065e 3670 case GIMPLE_BINARY_RHS:
538dd0b7 3671 ops.op1 = gimple_assign_rhs2 (assign_stmt);
28ed065e
MM
3672 /* Fallthru */
3673 case GIMPLE_UNARY_RHS:
538dd0b7 3674 ops.op0 = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3675 break;
3676 default:
3677 gcc_unreachable ();
3678 }
3679 ops.location = gimple_location (stmt);
3680
3681 /* If we want to use a nontemporal store, force the value to
3682 register first. If we store into a promoted register,
3683 don't directly expand to target. */
3684 temp = nontemporal || promoted ? NULL_RTX : target;
3685 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3686 EXPAND_NORMAL);
3687
3688 if (temp == target)
3689 ;
3690 else if (promoted)
3691 {
362d42dc 3692 int unsignedp = SUBREG_PROMOTED_SIGN (target);
28ed065e
MM
3693 /* If TEMP is a VOIDmode constant, use convert_modes to make
3694 sure that we properly convert it. */
3695 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3696 {
3697 temp = convert_modes (GET_MODE (target),
3698 TYPE_MODE (ops.type),
4e18a7d4 3699 temp, unsignedp);
28ed065e 3700 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3701 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3702 }
3703
27be0c32 3704 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3705 }
3706 else if (nontemporal && emit_storent_insn (target, temp))
3707 ;
3708 else
3709 {
3710 temp = force_operand (temp, target);
3711 if (temp != target)
3712 emit_move_insn (target, temp);
3713 }
3714 }
3715 }
3716 break;
3717
3718 default:
3719 gcc_unreachable ();
3720 }
3721}
3722
3723/* Expand one gimple statement STMT and return the last RTL instruction
3724 before any of the newly generated ones.
3725
3726 In addition to generating the necessary RTL instructions this also
3727 sets REG_EH_REGION notes if necessary and sets the current source
3728 location for diagnostics. */
3729
b47aae36 3730static rtx_insn *
355fe088 3731expand_gimple_stmt (gimple *stmt)
28ed065e 3732{
28ed065e 3733 location_t saved_location = input_location;
b47aae36 3734 rtx_insn *last = get_last_insn ();
c82fee88 3735 int lp_nr;
28ed065e 3736
28ed065e
MM
3737 gcc_assert (cfun);
3738
c82fee88
EB
3739 /* We need to save and restore the current source location so that errors
3740 discovered during expansion are emitted with the right location. But
3741 it would be better if the diagnostic routines used the source location
3742 embedded in the tree nodes rather than globals. */
28ed065e 3743 if (gimple_has_location (stmt))
c82fee88 3744 input_location = gimple_location (stmt);
28ed065e
MM
3745
3746 expand_gimple_stmt_1 (stmt);
c82fee88 3747
28ed065e
MM
3748 /* Free any temporaries used to evaluate this statement. */
3749 free_temp_slots ();
3750
3751 input_location = saved_location;
3752
3753 /* Mark all insns that may trap. */
1d65f45c
RH
3754 lp_nr = lookup_stmt_eh_lp (stmt);
3755 if (lp_nr)
28ed065e 3756 {
b47aae36 3757 rtx_insn *insn;
28ed065e
MM
3758 for (insn = next_real_insn (last); insn;
3759 insn = next_real_insn (insn))
3760 {
3761 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3762 /* If we want exceptions for non-call insns, any
3763 may_trap_p instruction may throw. */
3764 && GET_CODE (PATTERN (insn)) != CLOBBER
3765 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3766 && insn_could_throw_p (insn))
3767 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3768 }
3769 }
3770
3771 return last;
3772}
3773
726a989a 3774/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3775 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3776 generated a tail call (something that might be denied by the ABI
cea49550
RH
3777 rules governing the call; see calls.c).
3778
3779 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3780 can still reach the rest of BB. The case here is __builtin_sqrt,
3781 where the NaN result goes through the external function (with a
3782 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3783
3784static basic_block
538dd0b7 3785expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
80c7a9eb 3786{
b47aae36 3787 rtx_insn *last2, *last;
224e770b 3788 edge e;
628f6a4e 3789 edge_iterator ei;
224e770b
RH
3790 int probability;
3791 gcov_type count;
80c7a9eb 3792
28ed065e 3793 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3794
3795 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3796 if (CALL_P (last) && SIBLING_CALL_P (last))
3797 goto found;
80c7a9eb 3798
726a989a 3799 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3800
cea49550 3801 *can_fallthru = true;
224e770b 3802 return NULL;
80c7a9eb 3803
224e770b
RH
3804 found:
3805 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3806 Any instructions emitted here are about to be deleted. */
3807 do_pending_stack_adjust ();
3808
3809 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3810 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3811 EH or abnormal edges, we shouldn't have created a tail call in
3812 the first place. So it seems to me we should just be removing
3813 all edges here, or redirecting the existing fallthru edge to
3814 the exit block. */
3815
224e770b
RH
3816 probability = 0;
3817 count = 0;
224e770b 3818
628f6a4e
BE
3819 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3820 {
224e770b
RH
3821 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3822 {
fefa31b5 3823 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
80c7a9eb 3824 {
224e770b
RH
3825 e->dest->count -= e->count;
3826 e->dest->frequency -= EDGE_FREQUENCY (e);
3827 if (e->dest->count < 0)
c22cacf3 3828 e->dest->count = 0;
224e770b 3829 if (e->dest->frequency < 0)
c22cacf3 3830 e->dest->frequency = 0;
80c7a9eb 3831 }
224e770b
RH
3832 count += e->count;
3833 probability += e->probability;
3834 remove_edge (e);
80c7a9eb 3835 }
628f6a4e
BE
3836 else
3837 ei_next (&ei);
80c7a9eb
RH
3838 }
3839
224e770b
RH
3840 /* This is somewhat ugly: the call_expr expander often emits instructions
3841 after the sibcall (to perform the function return). These confuse the
12eff7b7 3842 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3843 last = NEXT_INSN (last);
341c100f 3844 gcc_assert (BARRIER_P (last));
cea49550
RH
3845
3846 *can_fallthru = false;
224e770b
RH
3847 while (NEXT_INSN (last))
3848 {
3849 /* For instance an sqrt builtin expander expands if with
3850 sibcall in the then and label for `else`. */
3851 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3852 {
3853 *can_fallthru = true;
3854 break;
3855 }
224e770b
RH
3856 delete_insn (NEXT_INSN (last));
3857 }
3858
fefa31b5
DM
3859 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3860 | EDGE_SIBCALL);
224e770b
RH
3861 e->probability += probability;
3862 e->count += count;
1130d5e3 3863 BB_END (bb) = last;
224e770b
RH
3864 update_bb_for_insn (bb);
3865
3866 if (NEXT_INSN (last))
3867 {
3868 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3869
3870 last = BB_END (bb);
3871 if (BARRIER_P (last))
1130d5e3 3872 BB_END (bb) = PREV_INSN (last);
224e770b
RH
3873 }
3874
726a989a 3875 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3876
224e770b 3877 return bb;
80c7a9eb
RH
3878}
3879
b5b8b0ac
AO
3880/* Return the difference between the floor and the truncated result of
3881 a signed division by OP1 with remainder MOD. */
3882static rtx
ef4bddc2 3883floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3884{
3885 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3886 return gen_rtx_IF_THEN_ELSE
3887 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3888 gen_rtx_IF_THEN_ELSE
3889 (mode, gen_rtx_LT (BImode,
3890 gen_rtx_DIV (mode, op1, mod),
3891 const0_rtx),
3892 constm1_rtx, const0_rtx),
3893 const0_rtx);
3894}
3895
3896/* Return the difference between the ceil and the truncated result of
3897 a signed division by OP1 with remainder MOD. */
3898static rtx
ef4bddc2 3899ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3900{
3901 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3902 return gen_rtx_IF_THEN_ELSE
3903 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3904 gen_rtx_IF_THEN_ELSE
3905 (mode, gen_rtx_GT (BImode,
3906 gen_rtx_DIV (mode, op1, mod),
3907 const0_rtx),
3908 const1_rtx, const0_rtx),
3909 const0_rtx);
3910}
3911
3912/* Return the difference between the ceil and the truncated result of
3913 an unsigned division by OP1 with remainder MOD. */
3914static rtx
ef4bddc2 3915ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
b5b8b0ac
AO
3916{
3917 /* (mod != 0 ? 1 : 0) */
3918 return gen_rtx_IF_THEN_ELSE
3919 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3920 const1_rtx, const0_rtx);
3921}
3922
3923/* Return the difference between the rounded and the truncated result
3924 of a signed division by OP1 with remainder MOD. Halfway cases are
3925 rounded away from zero, rather than to the nearest even number. */
3926static rtx
ef4bddc2 3927round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3928{
3929 /* (abs (mod) >= abs (op1) - abs (mod)
3930 ? (op1 / mod > 0 ? 1 : -1)
3931 : 0) */
3932 return gen_rtx_IF_THEN_ELSE
3933 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3934 gen_rtx_MINUS (mode,
3935 gen_rtx_ABS (mode, op1),
3936 gen_rtx_ABS (mode, mod))),
3937 gen_rtx_IF_THEN_ELSE
3938 (mode, gen_rtx_GT (BImode,
3939 gen_rtx_DIV (mode, op1, mod),
3940 const0_rtx),
3941 const1_rtx, constm1_rtx),
3942 const0_rtx);
3943}
3944
3945/* Return the difference between the rounded and the truncated result
3946 of a unsigned division by OP1 with remainder MOD. Halfway cases
3947 are rounded away from zero, rather than to the nearest even
3948 number. */
3949static rtx
ef4bddc2 3950round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3951{
3952 /* (mod >= op1 - mod ? 1 : 0) */
3953 return gen_rtx_IF_THEN_ELSE
3954 (mode, gen_rtx_GE (BImode, mod,
3955 gen_rtx_MINUS (mode, op1, mod)),
3956 const1_rtx, const0_rtx);
3957}
3958
dda2da58
AO
3959/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3960 any rtl. */
3961
3962static rtx
ef4bddc2 3963convert_debug_memory_address (machine_mode mode, rtx x,
f61c6f34 3964 addr_space_t as)
dda2da58 3965{
ef4bddc2 3966 machine_mode xmode = GET_MODE (x);
dda2da58
AO
3967
3968#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3969 gcc_assert (mode == Pmode
3970 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3971 gcc_assert (xmode == mode || xmode == VOIDmode);
3972#else
f61c6f34 3973 rtx temp;
f61c6f34 3974
639d4bb8 3975 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3976
3977 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3978 return x;
3979
69660a70 3980 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3403a1a9 3981 x = lowpart_subreg (mode, x, xmode);
dda2da58
AO
3982 else if (POINTERS_EXTEND_UNSIGNED > 0)
3983 x = gen_rtx_ZERO_EXTEND (mode, x);
3984 else if (!POINTERS_EXTEND_UNSIGNED)
3985 x = gen_rtx_SIGN_EXTEND (mode, x);
3986 else
f61c6f34
JJ
3987 {
3988 switch (GET_CODE (x))
3989 {
3990 case SUBREG:
3991 if ((SUBREG_PROMOTED_VAR_P (x)
3992 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3993 || (GET_CODE (SUBREG_REG (x)) == PLUS
3994 && REG_P (XEXP (SUBREG_REG (x), 0))
3995 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3996 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3997 && GET_MODE (SUBREG_REG (x)) == mode)
3998 return SUBREG_REG (x);
3999 break;
4000 case LABEL_REF:
a827d9b1 4001 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
f61c6f34
JJ
4002 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4003 return temp;
4004 case SYMBOL_REF:
4005 temp = shallow_copy_rtx (x);
4006 PUT_MODE (temp, mode);
4007 return temp;
4008 case CONST:
4009 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4010 if (temp)
4011 temp = gen_rtx_CONST (mode, temp);
4012 return temp;
4013 case PLUS:
4014 case MINUS:
4015 if (CONST_INT_P (XEXP (x, 1)))
4016 {
4017 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4018 if (temp)
4019 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4020 }
4021 break;
4022 default:
4023 break;
4024 }
4025 /* Don't know how to express ptr_extend as operation in debug info. */
4026 return NULL;
4027 }
dda2da58
AO
4028#endif /* POINTERS_EXTEND_UNSIGNED */
4029
4030 return x;
4031}
4032
dfde35b3
JJ
4033/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4034 by avoid_deep_ter_for_debug. */
4035
4036static hash_map<tree, tree> *deep_ter_debug_map;
4037
4038/* Split too deep TER chains for debug stmts using debug temporaries. */
4039
4040static void
355fe088 4041avoid_deep_ter_for_debug (gimple *stmt, int depth)
dfde35b3
JJ
4042{
4043 use_operand_p use_p;
4044 ssa_op_iter iter;
4045 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4046 {
4047 tree use = USE_FROM_PTR (use_p);
4048 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4049 continue;
355fe088 4050 gimple *g = get_gimple_for_ssa_name (use);
dfde35b3
JJ
4051 if (g == NULL)
4052 continue;
4053 if (depth > 6 && !stmt_ends_bb_p (g))
4054 {
4055 if (deep_ter_debug_map == NULL)
4056 deep_ter_debug_map = new hash_map<tree, tree>;
4057
4058 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4059 if (vexpr != NULL)
4060 continue;
4061 vexpr = make_node (DEBUG_EXPR_DECL);
355fe088 4062 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
dfde35b3
JJ
4063 DECL_ARTIFICIAL (vexpr) = 1;
4064 TREE_TYPE (vexpr) = TREE_TYPE (use);
4065 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
4066 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4067 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4068 avoid_deep_ter_for_debug (def_temp, 0);
4069 }
4070 else
4071 avoid_deep_ter_for_debug (g, depth + 1);
4072 }
4073}
4074
12c5ffe5
EB
4075/* Return an RTX equivalent to the value of the parameter DECL. */
4076
4077static rtx
4078expand_debug_parm_decl (tree decl)
4079{
4080 rtx incoming = DECL_INCOMING_RTL (decl);
4081
4082 if (incoming
4083 && GET_MODE (incoming) != BLKmode
4084 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4085 || (MEM_P (incoming)
4086 && REG_P (XEXP (incoming, 0))
4087 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4088 {
4089 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4090
4091#ifdef HAVE_window_save
4092 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4093 If the target machine has an explicit window save instruction, the
4094 actual entry value is the corresponding OUTGOING_REGNO instead. */
4095 if (REG_P (incoming)
4096 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4097 incoming
4098 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4099 OUTGOING_REGNO (REGNO (incoming)), 0);
4100 else if (MEM_P (incoming))
4101 {
4102 rtx reg = XEXP (incoming, 0);
4103 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4104 {
4105 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4106 incoming = replace_equiv_address_nv (incoming, reg);
4107 }
6cfa417f
JJ
4108 else
4109 incoming = copy_rtx (incoming);
12c5ffe5
EB
4110 }
4111#endif
4112
4113 ENTRY_VALUE_EXP (rtl) = incoming;
4114 return rtl;
4115 }
4116
4117 if (incoming
4118 && GET_MODE (incoming) != BLKmode
4119 && !TREE_ADDRESSABLE (decl)
4120 && MEM_P (incoming)
4121 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4122 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4123 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4124 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 4125 return copy_rtx (incoming);
12c5ffe5
EB
4126
4127 return NULL_RTX;
4128}
4129
4130/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
4131
4132static rtx
4133expand_debug_expr (tree exp)
4134{
4135 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
ef4bddc2
RS
4136 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4137 machine_mode inner_mode = VOIDmode;
b5b8b0ac 4138 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 4139 addr_space_t as;
b5b8b0ac
AO
4140
4141 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4142 {
4143 case tcc_expression:
4144 switch (TREE_CODE (exp))
4145 {
4146 case COND_EXPR:
7ece48b1 4147 case DOT_PROD_EXPR:
79d652a5 4148 case SAD_EXPR:
0354c0c7
BS
4149 case WIDEN_MULT_PLUS_EXPR:
4150 case WIDEN_MULT_MINUS_EXPR:
0f59b812 4151 case FMA_EXPR:
b5b8b0ac
AO
4152 goto ternary;
4153
4154 case TRUTH_ANDIF_EXPR:
4155 case TRUTH_ORIF_EXPR:
4156 case TRUTH_AND_EXPR:
4157 case TRUTH_OR_EXPR:
4158 case TRUTH_XOR_EXPR:
4159 goto binary;
4160
4161 case TRUTH_NOT_EXPR:
4162 goto unary;
4163
4164 default:
4165 break;
4166 }
4167 break;
4168
4169 ternary:
4170 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4171 if (!op2)
4172 return NULL_RTX;
4173 /* Fall through. */
4174
4175 binary:
4176 case tcc_binary:
b5b8b0ac
AO
4177 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4178 if (!op1)
4179 return NULL_RTX;
26d83bcc
JJ
4180 switch (TREE_CODE (exp))
4181 {
4182 case LSHIFT_EXPR:
4183 case RSHIFT_EXPR:
4184 case LROTATE_EXPR:
4185 case RROTATE_EXPR:
4186 case WIDEN_LSHIFT_EXPR:
4187 /* Ensure second operand isn't wider than the first one. */
4188 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4189 if (SCALAR_INT_MODE_P (inner_mode))
4190 {
4191 machine_mode opmode = mode;
4192 if (VECTOR_MODE_P (mode))
4193 opmode = GET_MODE_INNER (mode);
4194 if (SCALAR_INT_MODE_P (opmode)
4195 && (GET_MODE_PRECISION (opmode)
4196 < GET_MODE_PRECISION (inner_mode)))
3403a1a9 4197 op1 = lowpart_subreg (opmode, op1, inner_mode);
26d83bcc
JJ
4198 }
4199 break;
4200 default:
4201 break;
4202 }
b5b8b0ac
AO
4203 /* Fall through. */
4204
4205 unary:
4206 case tcc_unary:
2ba172e0 4207 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4208 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4209 if (!op0)
4210 return NULL_RTX;
4211 break;
4212
871dae34
AO
4213 case tcc_comparison:
4214 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4215 goto binary;
4216
b5b8b0ac
AO
4217 case tcc_type:
4218 case tcc_statement:
4219 gcc_unreachable ();
4220
4221 case tcc_constant:
4222 case tcc_exceptional:
4223 case tcc_declaration:
4224 case tcc_reference:
4225 case tcc_vl_exp:
4226 break;
4227 }
4228
4229 switch (TREE_CODE (exp))
4230 {
4231 case STRING_CST:
4232 if (!lookup_constant_def (exp))
4233 {
e1b243a8
JJ
4234 if (strlen (TREE_STRING_POINTER (exp)) + 1
4235 != (size_t) TREE_STRING_LENGTH (exp))
4236 return NULL_RTX;
b5b8b0ac
AO
4237 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4238 op0 = gen_rtx_MEM (BLKmode, op0);
4239 set_mem_attributes (op0, exp, 0);
4240 return op0;
4241 }
191816a3 4242 /* Fall through. */
b5b8b0ac
AO
4243
4244 case INTEGER_CST:
4245 case REAL_CST:
4246 case FIXED_CST:
4247 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4248 return op0;
4249
4250 case COMPLEX_CST:
4251 gcc_assert (COMPLEX_MODE_P (mode));
4252 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 4253 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
4254 return gen_rtx_CONCAT (mode, op0, op1);
4255
0ca5af51
AO
4256 case DEBUG_EXPR_DECL:
4257 op0 = DECL_RTL_IF_SET (exp);
4258
4259 if (op0)
4260 return op0;
4261
4262 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 4263 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
4264 SET_DECL_RTL (exp, op0);
4265
4266 return op0;
4267
b5b8b0ac
AO
4268 case VAR_DECL:
4269 case PARM_DECL:
4270 case FUNCTION_DECL:
4271 case LABEL_DECL:
4272 case CONST_DECL:
4273 case RESULT_DECL:
4274 op0 = DECL_RTL_IF_SET (exp);
4275
4276 /* This decl was probably optimized away. */
4277 if (!op0)
e1b243a8
JJ
4278 {
4279 if (TREE_CODE (exp) != VAR_DECL
4280 || DECL_EXTERNAL (exp)
4281 || !TREE_STATIC (exp)
4282 || !DECL_NAME (exp)
0fba566c 4283 || DECL_HARD_REGISTER (exp)
7d5fc814 4284 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 4285 || mode == VOIDmode)
e1b243a8
JJ
4286 return NULL;
4287
b1aa0655 4288 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
4289 if (!MEM_P (op0)
4290 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4291 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4292 return NULL;
4293 }
4294 else
4295 op0 = copy_rtx (op0);
b5b8b0ac 4296
06796564 4297 if (GET_MODE (op0) == BLKmode
871dae34 4298 /* If op0 is not BLKmode, but mode is, adjust_mode
06796564
JJ
4299 below would ICE. While it is likely a FE bug,
4300 try to be robust here. See PR43166. */
132b4e82
JJ
4301 || mode == BLKmode
4302 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
4303 {
4304 gcc_assert (MEM_P (op0));
4305 op0 = adjust_address_nv (op0, mode, 0);
4306 return op0;
4307 }
4308
4309 /* Fall through. */
4310
4311 adjust_mode:
4312 case PAREN_EXPR:
625a9766 4313 CASE_CONVERT:
b5b8b0ac 4314 {
2ba172e0 4315 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
4316
4317 if (mode == inner_mode)
4318 return op0;
4319
4320 if (inner_mode == VOIDmode)
4321 {
2a8e30fb
MM
4322 if (TREE_CODE (exp) == SSA_NAME)
4323 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4324 else
4325 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4326 if (mode == inner_mode)
4327 return op0;
4328 }
4329
4330 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4331 {
4332 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4333 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4334 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4335 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4336 else
4337 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4338 }
4339 else if (FLOAT_MODE_P (mode))
4340 {
2a8e30fb 4341 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
4342 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4343 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4344 else
4345 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4346 }
4347 else if (FLOAT_MODE_P (inner_mode))
4348 {
4349 if (unsignedp)
4350 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4351 else
4352 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4353 }
4354 else if (CONSTANT_P (op0)
69660a70 4355 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3403a1a9 4356 op0 = lowpart_subreg (mode, op0, inner_mode);
cf4ef6f7 4357 else if (UNARY_CLASS_P (exp)
1b47fe3f
JJ
4358 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4359 : unsignedp)
2ba172e0 4360 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 4361 else
2ba172e0 4362 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
4363
4364 return op0;
4365 }
4366
70f34814 4367 case MEM_REF:
71f3a3f5
JJ
4368 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4369 {
4370 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4371 TREE_OPERAND (exp, 0),
4372 TREE_OPERAND (exp, 1));
4373 if (newexp)
4374 return expand_debug_expr (newexp);
4375 }
4376 /* FALLTHROUGH */
b5b8b0ac 4377 case INDIRECT_REF:
0a81f074 4378 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4379 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4380 if (!op0)
4381 return NULL;
4382
cb115041
JJ
4383 if (TREE_CODE (exp) == MEM_REF)
4384 {
583ac69c
JJ
4385 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4386 || (GET_CODE (op0) == PLUS
4387 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4388 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4389 Instead just use get_inner_reference. */
4390 goto component_ref;
4391
cb115041
JJ
4392 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4393 if (!op1 || !CONST_INT_P (op1))
4394 return NULL;
4395
0a81f074 4396 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
4397 }
4398
a148c4b2 4399 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
b5b8b0ac 4400
f61c6f34
JJ
4401 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4402 op0, as);
4403 if (op0 == NULL_RTX)
4404 return NULL;
b5b8b0ac 4405
f61c6f34 4406 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 4407 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
4408 if (TREE_CODE (exp) == MEM_REF
4409 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4410 set_mem_expr (op0, NULL_TREE);
09e881c9 4411 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4412
4413 return op0;
4414
4415 case TARGET_MEM_REF:
4d948885
RG
4416 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4417 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
4418 return NULL;
4419
4420 op0 = expand_debug_expr
4e25ca6b 4421 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
4422 if (!op0)
4423 return NULL;
4424
c168f180 4425 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
f61c6f34
JJ
4426 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4427 op0, as);
4428 if (op0 == NULL_RTX)
4429 return NULL;
b5b8b0ac
AO
4430
4431 op0 = gen_rtx_MEM (mode, op0);
4432
4433 set_mem_attributes (op0, exp, 0);
09e881c9 4434 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4435
4436 return op0;
4437
583ac69c 4438 component_ref:
b5b8b0ac
AO
4439 case ARRAY_REF:
4440 case ARRAY_RANGE_REF:
4441 case COMPONENT_REF:
4442 case BIT_FIELD_REF:
4443 case REALPART_EXPR:
4444 case IMAGPART_EXPR:
4445 case VIEW_CONVERT_EXPR:
4446 {
ef4bddc2 4447 machine_mode mode1;
b5b8b0ac
AO
4448 HOST_WIDE_INT bitsize, bitpos;
4449 tree offset;
ee45a32d
EB
4450 int reversep, volatilep = 0;
4451 tree tem
4452 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
25b75a48 4453 &unsignedp, &reversep, &volatilep);
b5b8b0ac
AO
4454 rtx orig_op0;
4455
4f2a9af8
JJ
4456 if (bitsize == 0)
4457 return NULL;
4458
b5b8b0ac
AO
4459 orig_op0 = op0 = expand_debug_expr (tem);
4460
4461 if (!op0)
4462 return NULL;
4463
4464 if (offset)
4465 {
ef4bddc2 4466 machine_mode addrmode, offmode;
dda2da58 4467
aa847cc8
JJ
4468 if (!MEM_P (op0))
4469 return NULL;
b5b8b0ac 4470
dda2da58
AO
4471 op0 = XEXP (op0, 0);
4472 addrmode = GET_MODE (op0);
4473 if (addrmode == VOIDmode)
4474 addrmode = Pmode;
4475
b5b8b0ac
AO
4476 op1 = expand_debug_expr (offset);
4477 if (!op1)
4478 return NULL;
4479
dda2da58
AO
4480 offmode = GET_MODE (op1);
4481 if (offmode == VOIDmode)
4482 offmode = TYPE_MODE (TREE_TYPE (offset));
4483
4484 if (addrmode != offmode)
3403a1a9 4485 op1 = lowpart_subreg (addrmode, op1, offmode);
dda2da58
AO
4486
4487 /* Don't use offset_address here, we don't need a
4488 recognizable address, and we don't want to generate
4489 code. */
2ba172e0
JJ
4490 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4491 op0, op1));
b5b8b0ac
AO
4492 }
4493
4494 if (MEM_P (op0))
4495 {
4f2a9af8
JJ
4496 if (mode1 == VOIDmode)
4497 /* Bitfield. */
4498 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
4499 if (bitpos >= BITS_PER_UNIT)
4500 {
4501 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4502 bitpos %= BITS_PER_UNIT;
4503 }
4504 else if (bitpos < 0)
4505 {
4f2a9af8
JJ
4506 HOST_WIDE_INT units
4507 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
e3abc83e 4508 op0 = adjust_address_nv (op0, mode1, -units);
b5b8b0ac
AO
4509 bitpos += units * BITS_PER_UNIT;
4510 }
4511 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4512 op0 = adjust_address_nv (op0, mode, 0);
4513 else if (GET_MODE (op0) != mode1)
4514 op0 = adjust_address_nv (op0, mode1, 0);
4515 else
4516 op0 = copy_rtx (op0);
4517 if (op0 == orig_op0)
4518 op0 = shallow_copy_rtx (op0);
4519 set_mem_attributes (op0, exp, 0);
4520 }
4521
4522 if (bitpos == 0 && mode == GET_MODE (op0))
4523 return op0;
4524
2d3fc6aa
JJ
4525 if (bitpos < 0)
4526 return NULL;
4527
88c04a5d
JJ
4528 if (GET_MODE (op0) == BLKmode)
4529 return NULL;
4530
b5b8b0ac
AO
4531 if ((bitpos % BITS_PER_UNIT) == 0
4532 && bitsize == GET_MODE_BITSIZE (mode1))
4533 {
ef4bddc2 4534 machine_mode opmode = GET_MODE (op0);
b5b8b0ac 4535
b5b8b0ac 4536 if (opmode == VOIDmode)
9712cba0 4537 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4538
4539 /* This condition may hold if we're expanding the address
4540 right past the end of an array that turned out not to
4541 be addressable (i.e., the address was only computed in
4542 debug stmts). The gen_subreg below would rightfully
4543 crash, and the address doesn't really exist, so just
4544 drop it. */
4545 if (bitpos >= GET_MODE_BITSIZE (opmode))
4546 return NULL;
4547
7d5d39bb
JJ
4548 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4549 return simplify_gen_subreg (mode, op0, opmode,
4550 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4551 }
4552
4553 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4554 && TYPE_UNSIGNED (TREE_TYPE (exp))
4555 ? SIGN_EXTRACT
4556 : ZERO_EXTRACT, mode,
4557 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4558 ? GET_MODE (op0)
4559 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4560 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4561 }
4562
b5b8b0ac 4563 case ABS_EXPR:
2ba172e0 4564 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4565
4566 case NEGATE_EXPR:
2ba172e0 4567 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4568
4569 case BIT_NOT_EXPR:
2ba172e0 4570 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4571
4572 case FLOAT_EXPR:
2ba172e0
JJ
4573 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4574 0)))
4575 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4576 inner_mode);
b5b8b0ac
AO
4577
4578 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4579 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4580 inner_mode);
b5b8b0ac
AO
4581
4582 case POINTER_PLUS_EXPR:
576319a7
DD
4583 /* For the rare target where pointers are not the same size as
4584 size_t, we need to check for mis-matched modes and correct
4585 the addend. */
4586 if (op0 && op1
4587 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4588 && GET_MODE (op0) != GET_MODE (op1))
4589 {
8369f38a
DD
4590 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4591 /* If OP0 is a partial mode, then we must truncate, even if it has
4592 the same bitsize as OP1 as GCC's representation of partial modes
4593 is opaque. */
4594 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4595 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4596 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4597 GET_MODE (op1));
576319a7
DD
4598 else
4599 /* We always sign-extend, regardless of the signedness of
4600 the operand, because the operand is always unsigned
4601 here even if the original C expression is signed. */
2ba172e0
JJ
4602 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4603 GET_MODE (op1));
576319a7
DD
4604 }
4605 /* Fall through. */
b5b8b0ac 4606 case PLUS_EXPR:
2ba172e0 4607 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4608
4609 case MINUS_EXPR:
2ba172e0 4610 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4611
4612 case MULT_EXPR:
2ba172e0 4613 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4614
4615 case RDIV_EXPR:
4616 case TRUNC_DIV_EXPR:
4617 case EXACT_DIV_EXPR:
4618 if (unsignedp)
2ba172e0 4619 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4620 else
2ba172e0 4621 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4622
4623 case TRUNC_MOD_EXPR:
2ba172e0 4624 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4625
4626 case FLOOR_DIV_EXPR:
4627 if (unsignedp)
2ba172e0 4628 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4629 else
4630 {
2ba172e0
JJ
4631 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4632 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4633 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4634 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4635 }
4636
4637 case FLOOR_MOD_EXPR:
4638 if (unsignedp)
2ba172e0 4639 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4640 else
4641 {
2ba172e0 4642 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4643 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4644 adj = simplify_gen_unary (NEG, mode,
4645 simplify_gen_binary (MULT, mode, adj, op1),
4646 mode);
4647 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4648 }
4649
4650 case CEIL_DIV_EXPR:
4651 if (unsignedp)
4652 {
2ba172e0
JJ
4653 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4654 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4655 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4656 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4657 }
4658 else
4659 {
2ba172e0
JJ
4660 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4661 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4662 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4663 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4664 }
4665
4666 case CEIL_MOD_EXPR:
4667 if (unsignedp)
4668 {
2ba172e0 4669 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4670 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4671 adj = simplify_gen_unary (NEG, mode,
4672 simplify_gen_binary (MULT, mode, adj, op1),
4673 mode);
4674 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4675 }
4676 else
4677 {
2ba172e0 4678 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4679 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4680 adj = simplify_gen_unary (NEG, mode,
4681 simplify_gen_binary (MULT, mode, adj, op1),
4682 mode);
4683 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4684 }
4685
4686 case ROUND_DIV_EXPR:
4687 if (unsignedp)
4688 {
2ba172e0
JJ
4689 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4690 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4691 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4692 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4693 }
4694 else
4695 {
2ba172e0
JJ
4696 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4697 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4698 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4699 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4700 }
4701
4702 case ROUND_MOD_EXPR:
4703 if (unsignedp)
4704 {
2ba172e0 4705 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4706 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4707 adj = simplify_gen_unary (NEG, mode,
4708 simplify_gen_binary (MULT, mode, adj, op1),
4709 mode);
4710 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4711 }
4712 else
4713 {
2ba172e0 4714 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4715 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4716 adj = simplify_gen_unary (NEG, mode,
4717 simplify_gen_binary (MULT, mode, adj, op1),
4718 mode);
4719 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4720 }
4721
4722 case LSHIFT_EXPR:
2ba172e0 4723 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4724
4725 case RSHIFT_EXPR:
4726 if (unsignedp)
2ba172e0 4727 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4728 else
2ba172e0 4729 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4730
4731 case LROTATE_EXPR:
2ba172e0 4732 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4733
4734 case RROTATE_EXPR:
2ba172e0 4735 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4736
4737 case MIN_EXPR:
2ba172e0 4738 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4739
4740 case MAX_EXPR:
2ba172e0 4741 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4742
4743 case BIT_AND_EXPR:
4744 case TRUTH_AND_EXPR:
2ba172e0 4745 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4746
4747 case BIT_IOR_EXPR:
4748 case TRUTH_OR_EXPR:
2ba172e0 4749 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4750
4751 case BIT_XOR_EXPR:
4752 case TRUTH_XOR_EXPR:
2ba172e0 4753 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4754
4755 case TRUTH_ANDIF_EXPR:
4756 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4757
4758 case TRUTH_ORIF_EXPR:
4759 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4760
4761 case TRUTH_NOT_EXPR:
2ba172e0 4762 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4763
4764 case LT_EXPR:
2ba172e0
JJ
4765 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4766 op0, op1);
b5b8b0ac
AO
4767
4768 case LE_EXPR:
2ba172e0
JJ
4769 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4770 op0, op1);
b5b8b0ac
AO
4771
4772 case GT_EXPR:
2ba172e0
JJ
4773 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4774 op0, op1);
b5b8b0ac
AO
4775
4776 case GE_EXPR:
2ba172e0
JJ
4777 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4778 op0, op1);
b5b8b0ac
AO
4779
4780 case EQ_EXPR:
2ba172e0 4781 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4782
4783 case NE_EXPR:
2ba172e0 4784 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4785
4786 case UNORDERED_EXPR:
2ba172e0 4787 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4788
4789 case ORDERED_EXPR:
2ba172e0 4790 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4791
4792 case UNLT_EXPR:
2ba172e0 4793 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4794
4795 case UNLE_EXPR:
2ba172e0 4796 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4797
4798 case UNGT_EXPR:
2ba172e0 4799 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4800
4801 case UNGE_EXPR:
2ba172e0 4802 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4803
4804 case UNEQ_EXPR:
2ba172e0 4805 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4806
4807 case LTGT_EXPR:
2ba172e0 4808 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4809
4810 case COND_EXPR:
4811 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4812
4813 case COMPLEX_EXPR:
4814 gcc_assert (COMPLEX_MODE_P (mode));
4815 if (GET_MODE (op0) == VOIDmode)
4816 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4817 if (GET_MODE (op1) == VOIDmode)
4818 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4819 return gen_rtx_CONCAT (mode, op0, op1);
4820
d02a5a4b
JJ
4821 case CONJ_EXPR:
4822 if (GET_CODE (op0) == CONCAT)
4823 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4824 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4825 XEXP (op0, 1),
4826 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4827 else
4828 {
ef4bddc2 4829 machine_mode imode = GET_MODE_INNER (mode);
d02a5a4b
JJ
4830 rtx re, im;
4831
4832 if (MEM_P (op0))
4833 {
4834 re = adjust_address_nv (op0, imode, 0);
4835 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4836 }
4837 else
4838 {
ef4bddc2
RS
4839 machine_mode ifmode = int_mode_for_mode (mode);
4840 machine_mode ihmode = int_mode_for_mode (imode);
d02a5a4b
JJ
4841 rtx halfsize;
4842 if (ifmode == BLKmode || ihmode == BLKmode)
4843 return NULL;
4844 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4845 re = op0;
4846 if (mode != ifmode)
4847 re = gen_rtx_SUBREG (ifmode, re, 0);
4848 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4849 if (imode != ihmode)
4850 re = gen_rtx_SUBREG (imode, re, 0);
4851 im = copy_rtx (op0);
4852 if (mode != ifmode)
4853 im = gen_rtx_SUBREG (ifmode, im, 0);
4854 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4855 if (imode != ihmode)
4856 im = gen_rtx_SUBREG (imode, im, 0);
4857 }
4858 im = gen_rtx_NEG (imode, im);
4859 return gen_rtx_CONCAT (mode, re, im);
4860 }
4861
b5b8b0ac
AO
4862 case ADDR_EXPR:
4863 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4864 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4865 {
4866 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4867 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4868 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4869 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4870 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4871 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4872
4873 if (handled_component_p (TREE_OPERAND (exp, 0)))
4874 {
4875 HOST_WIDE_INT bitoffset, bitsize, maxsize;
ee45a32d 4876 bool reverse;
c8a27c40 4877 tree decl
ee45a32d
EB
4878 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4879 &bitsize, &maxsize, &reverse);
c8a27c40
JJ
4880 if ((TREE_CODE (decl) == VAR_DECL
4881 || TREE_CODE (decl) == PARM_DECL
4882 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4883 && (!TREE_ADDRESSABLE (decl)
4884 || target_for_debug_bind (decl))
c8a27c40
JJ
4885 && (bitoffset % BITS_PER_UNIT) == 0
4886 && bitsize > 0
4887 && bitsize == maxsize)
0a81f074
RS
4888 {
4889 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4890 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4891 }
c8a27c40
JJ
4892 }
4893
9430b7ba
JJ
4894 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4895 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4896 == ADDR_EXPR)
4897 {
4898 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4899 0));
4900 if (op0 != NULL
4901 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4902 || (GET_CODE (op0) == PLUS
4903 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4904 && CONST_INT_P (XEXP (op0, 1)))))
4905 {
4906 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4907 1));
4908 if (!op1 || !CONST_INT_P (op1))
4909 return NULL;
4910
4911 return plus_constant (mode, op0, INTVAL (op1));
4912 }
4913 }
4914
c8a27c40
JJ
4915 return NULL;
4916 }
b5b8b0ac 4917
a148c4b2 4918 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
f61c6f34 4919 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4920
4921 return op0;
b5b8b0ac
AO
4922
4923 case VECTOR_CST:
d2a12ae7
RG
4924 {
4925 unsigned i;
4926
4927 op0 = gen_rtx_CONCATN
4928 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4929
4930 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4931 {
4932 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4933 if (!op1)
4934 return NULL;
4935 XVECEXP (op0, 0, i) = op1;
4936 }
4937
4938 return op0;
4939 }
b5b8b0ac
AO
4940
4941 case CONSTRUCTOR:
47598145
MM
4942 if (TREE_CLOBBER_P (exp))
4943 return NULL;
4944 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4945 {
4946 unsigned i;
4947 tree val;
4948
4949 op0 = gen_rtx_CONCATN
4950 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4951
4952 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4953 {
4954 op1 = expand_debug_expr (val);
4955 if (!op1)
4956 return NULL;
4957 XVECEXP (op0, 0, i) = op1;
4958 }
4959
4960 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4961 {
4962 op1 = expand_debug_expr
e8160c9a 4963 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4964
4965 if (!op1)
4966 return NULL;
4967
4968 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4969 XVECEXP (op0, 0, i) = op1;
4970 }
4971
4972 return op0;
4973 }
4974 else
4975 goto flag_unsupported;
4976
4977 case CALL_EXPR:
4978 /* ??? Maybe handle some builtins? */
4979 return NULL;
4980
4981 case SSA_NAME:
4982 {
355fe088 4983 gimple *g = get_gimple_for_ssa_name (exp);
2a8e30fb
MM
4984 if (g)
4985 {
dfde35b3
JJ
4986 tree t = NULL_TREE;
4987 if (deep_ter_debug_map)
4988 {
4989 tree *slot = deep_ter_debug_map->get (exp);
4990 if (slot)
4991 t = *slot;
4992 }
4993 if (t == NULL_TREE)
4994 t = gimple_assign_rhs_to_tree (g);
4995 op0 = expand_debug_expr (t);
2a8e30fb
MM
4996 if (!op0)
4997 return NULL;
4998 }
4999 else
5000 {
f11a7b6d
AO
5001 /* If this is a reference to an incoming value of
5002 parameter that is never used in the code or where the
5003 incoming value is never used in the code, use
5004 PARM_DECL's DECL_RTL if set. */
5005 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5006 && SSA_NAME_VAR (exp)
5007 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5008 && has_zero_uses (exp))
5009 {
5010 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5011 if (op0)
5012 goto adjust_mode;
5013 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5014 if (op0)
5015 goto adjust_mode;
5016 }
5017
2a8e30fb 5018 int part = var_to_partition (SA.map, exp);
b5b8b0ac 5019
2a8e30fb 5020 if (part == NO_PARTITION)
f11a7b6d 5021 return NULL;
b5b8b0ac 5022
2a8e30fb 5023 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 5024
abfea58d 5025 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 5026 }
b5b8b0ac
AO
5027 goto adjust_mode;
5028 }
5029
5030 case ERROR_MARK:
5031 return NULL;
5032
7ece48b1
JJ
5033 /* Vector stuff. For most of the codes we don't have rtl codes. */
5034 case REALIGN_LOAD_EXPR:
5035 case REDUC_MAX_EXPR:
5036 case REDUC_MIN_EXPR:
5037 case REDUC_PLUS_EXPR:
5038 case VEC_COND_EXPR:
7ece48b1
JJ
5039 case VEC_PACK_FIX_TRUNC_EXPR:
5040 case VEC_PACK_SAT_EXPR:
5041 case VEC_PACK_TRUNC_EXPR:
7ece48b1
JJ
5042 case VEC_UNPACK_FLOAT_HI_EXPR:
5043 case VEC_UNPACK_FLOAT_LO_EXPR:
5044 case VEC_UNPACK_HI_EXPR:
5045 case VEC_UNPACK_LO_EXPR:
5046 case VEC_WIDEN_MULT_HI_EXPR:
5047 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
5048 case VEC_WIDEN_MULT_EVEN_EXPR:
5049 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
5050 case VEC_WIDEN_LSHIFT_HI_EXPR:
5051 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 5052 case VEC_PERM_EXPR:
7ece48b1
JJ
5053 return NULL;
5054
98449720 5055 /* Misc codes. */
7ece48b1
JJ
5056 case ADDR_SPACE_CONVERT_EXPR:
5057 case FIXED_CONVERT_EXPR:
5058 case OBJ_TYPE_REF:
5059 case WITH_SIZE_EXPR:
483c6429 5060 case BIT_INSERT_EXPR:
7ece48b1
JJ
5061 return NULL;
5062
5063 case DOT_PROD_EXPR:
5064 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5065 && SCALAR_INT_MODE_P (mode))
5066 {
2ba172e0
JJ
5067 op0
5068 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5069 0)))
5070 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5071 inner_mode);
5072 op1
5073 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5074 1)))
5075 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5076 inner_mode);
5077 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5078 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
5079 }
5080 return NULL;
5081
5082 case WIDEN_MULT_EXPR:
0354c0c7
BS
5083 case WIDEN_MULT_PLUS_EXPR:
5084 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
5085 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5086 && SCALAR_INT_MODE_P (mode))
5087 {
2ba172e0 5088 inner_mode = GET_MODE (op0);
7ece48b1 5089 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 5090 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 5091 else
5b58b39b 5092 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 5093 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 5094 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 5095 else
5b58b39b 5096 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 5097 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
5098 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5099 return op0;
5100 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 5101 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 5102 else
2ba172e0 5103 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
5104 }
5105 return NULL;
5106
98449720
RH
5107 case MULT_HIGHPART_EXPR:
5108 /* ??? Similar to the above. */
5109 return NULL;
5110
7ece48b1 5111 case WIDEN_SUM_EXPR:
3f3af9df 5112 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
5113 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5114 && SCALAR_INT_MODE_P (mode))
5115 {
2ba172e0
JJ
5116 op0
5117 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5118 0)))
5119 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5120 inner_mode);
3f3af9df
JJ
5121 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5122 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
5123 }
5124 return NULL;
5125
0f59b812 5126 case FMA_EXPR:
2ba172e0 5127 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 5128
b5b8b0ac
AO
5129 default:
5130 flag_unsupported:
b2b29377
MM
5131 if (flag_checking)
5132 {
5133 debug_tree (exp);
5134 gcc_unreachable ();
5135 }
b5b8b0ac 5136 return NULL;
b5b8b0ac
AO
5137 }
5138}
5139
ddb555ed
JJ
5140/* Return an RTX equivalent to the source bind value of the tree expression
5141 EXP. */
5142
5143static rtx
5144expand_debug_source_expr (tree exp)
5145{
5146 rtx op0 = NULL_RTX;
ef4bddc2 5147 machine_mode mode = VOIDmode, inner_mode;
ddb555ed
JJ
5148
5149 switch (TREE_CODE (exp))
5150 {
5151 case PARM_DECL:
5152 {
ddb555ed 5153 mode = DECL_MODE (exp);
12c5ffe5
EB
5154 op0 = expand_debug_parm_decl (exp);
5155 if (op0)
5156 break;
ddb555ed
JJ
5157 /* See if this isn't an argument that has been completely
5158 optimized out. */
5159 if (!DECL_RTL_SET_P (exp)
12c5ffe5 5160 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
5161 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5162 {
7b575cfa 5163 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
5164 if (DECL_CONTEXT (aexp)
5165 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5166 {
9771b263 5167 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
5168 unsigned int ix;
5169 tree ddecl;
ddb555ed
JJ
5170 debug_args = decl_debug_args_lookup (current_function_decl);
5171 if (debug_args != NULL)
5172 {
9771b263 5173 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
5174 ix += 2)
5175 if (ddecl == aexp)
5176 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5177 }
5178 }
5179 }
5180 break;
5181 }
5182 default:
5183 break;
5184 }
5185
5186 if (op0 == NULL_RTX)
5187 return NULL_RTX;
5188
5189 inner_mode = GET_MODE (op0);
5190 if (mode == inner_mode)
5191 return op0;
5192
5193 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5194 {
5195 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5196 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5197 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5198 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5199 else
5200 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5201 }
5202 else if (FLOAT_MODE_P (mode))
5203 gcc_unreachable ();
5204 else if (FLOAT_MODE_P (inner_mode))
5205 {
5206 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5207 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5208 else
5209 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5210 }
5211 else if (CONSTANT_P (op0)
5212 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3403a1a9 5213 op0 = lowpart_subreg (mode, op0, inner_mode);
ddb555ed
JJ
5214 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5215 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5216 else
5217 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5218
5219 return op0;
5220}
5221
6cfa417f
JJ
5222/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5223 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5224 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5225
5226static void
b47aae36 5227avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
6cfa417f
JJ
5228{
5229 rtx exp = *exp_p;
5230
5231 if (exp == NULL_RTX)
5232 return;
5233
5234 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5235 return;
5236
5237 if (depth == 4)
5238 {
5239 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5240 rtx dval = make_debug_expr_from_rtl (exp);
5241
5242 /* Emit a debug bind insn before INSN. */
5243 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5244 DEBUG_EXPR_TREE_DECL (dval), exp,
5245 VAR_INIT_STATUS_INITIALIZED);
5246
5247 emit_debug_insn_before (bind, insn);
5248 *exp_p = dval;
5249 return;
5250 }
5251
5252 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5253 int i, j;
5254 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5255 switch (*format_ptr++)
5256 {
5257 case 'e':
5258 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5259 break;
5260
5261 case 'E':
5262 case 'V':
5263 for (j = 0; j < XVECLEN (exp, i); j++)
5264 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5265 break;
5266
5267 default:
5268 break;
5269 }
5270}
5271
b5b8b0ac
AO
5272/* Expand the _LOCs in debug insns. We run this after expanding all
5273 regular insns, so that any variables referenced in the function
5274 will have their DECL_RTLs set. */
5275
5276static void
5277expand_debug_locations (void)
5278{
b47aae36
DM
5279 rtx_insn *insn;
5280 rtx_insn *last = get_last_insn ();
b5b8b0ac
AO
5281 int save_strict_alias = flag_strict_aliasing;
5282
5283 /* New alias sets while setting up memory attributes cause
5284 -fcompare-debug failures, even though it doesn't bring about any
5285 codegen changes. */
5286 flag_strict_aliasing = 0;
5287
5288 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5289 if (DEBUG_INSN_P (insn))
5290 {
5291 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
b47aae36
DM
5292 rtx val;
5293 rtx_insn *prev_insn, *insn2;
ef4bddc2 5294 machine_mode mode;
b5b8b0ac
AO
5295
5296 if (value == NULL_TREE)
5297 val = NULL_RTX;
5298 else
5299 {
ddb555ed
JJ
5300 if (INSN_VAR_LOCATION_STATUS (insn)
5301 == VAR_INIT_STATUS_UNINITIALIZED)
5302 val = expand_debug_source_expr (value);
dfde35b3
JJ
5303 /* The avoid_deep_ter_for_debug function inserts
5304 debug bind stmts after SSA_NAME definition, with the
5305 SSA_NAME as the whole bind location. Disable temporarily
5306 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5307 being defined in this DEBUG_INSN. */
5308 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5309 {
5310 tree *slot = deep_ter_debug_map->get (value);
5311 if (slot)
5312 {
5313 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5314 *slot = NULL_TREE;
5315 else
5316 slot = NULL;
5317 }
5318 val = expand_debug_expr (value);
5319 if (slot)
5320 *slot = INSN_VAR_LOCATION_DECL (insn);
5321 }
ddb555ed
JJ
5322 else
5323 val = expand_debug_expr (value);
b5b8b0ac
AO
5324 gcc_assert (last == get_last_insn ());
5325 }
5326
5327 if (!val)
5328 val = gen_rtx_UNKNOWN_VAR_LOC ();
5329 else
5330 {
5331 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5332
5333 gcc_assert (mode == GET_MODE (val)
5334 || (GET_MODE (val) == VOIDmode
33ffb5c5 5335 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 5336 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
5337 || GET_CODE (val) == LABEL_REF)));
5338 }
5339
5340 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
5341 prev_insn = PREV_INSN (insn);
5342 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5343 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
5344 }
5345
5346 flag_strict_aliasing = save_strict_alias;
5347}
5348
d2626c0b
YR
5349/* Performs swapping operands of commutative operations to expand
5350 the expensive one first. */
5351
5352static void
5353reorder_operands (basic_block bb)
5354{
5355 unsigned int *lattice; /* Hold cost of each statement. */
5356 unsigned int i = 0, n = 0;
5357 gimple_stmt_iterator gsi;
5358 gimple_seq stmts;
355fe088 5359 gimple *stmt;
d2626c0b
YR
5360 bool swap;
5361 tree op0, op1;
5362 ssa_op_iter iter;
5363 use_operand_p use_p;
355fe088 5364 gimple *def0, *def1;
d2626c0b
YR
5365
5366 /* Compute cost of each statement using estimate_num_insns. */
5367 stmts = bb_seq (bb);
5368 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5369 {
5370 stmt = gsi_stmt (gsi);
090238ee
YR
5371 if (!is_gimple_debug (stmt))
5372 gimple_set_uid (stmt, n++);
d2626c0b
YR
5373 }
5374 lattice = XNEWVEC (unsigned int, n);
5375 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5376 {
5377 unsigned cost;
5378 stmt = gsi_stmt (gsi);
090238ee
YR
5379 if (is_gimple_debug (stmt))
5380 continue;
d2626c0b
YR
5381 cost = estimate_num_insns (stmt, &eni_size_weights);
5382 lattice[i] = cost;
5383 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5384 {
5385 tree use = USE_FROM_PTR (use_p);
355fe088 5386 gimple *def_stmt;
d2626c0b
YR
5387 if (TREE_CODE (use) != SSA_NAME)
5388 continue;
5389 def_stmt = get_gimple_for_ssa_name (use);
5390 if (!def_stmt)
5391 continue;
5392 lattice[i] += lattice[gimple_uid (def_stmt)];
5393 }
5394 i++;
5395 if (!is_gimple_assign (stmt)
5396 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5397 continue;
5398 op0 = gimple_op (stmt, 1);
5399 op1 = gimple_op (stmt, 2);
5400 if (TREE_CODE (op0) != SSA_NAME
5401 || TREE_CODE (op1) != SSA_NAME)
5402 continue;
5403 /* Swap operands if the second one is more expensive. */
5404 def0 = get_gimple_for_ssa_name (op0);
d2626c0b
YR
5405 def1 = get_gimple_for_ssa_name (op1);
5406 if (!def1)
5407 continue;
5408 swap = false;
68ca4ac9 5409 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
d2626c0b
YR
5410 swap = true;
5411 if (swap)
5412 {
5413 if (dump_file && (dump_flags & TDF_DETAILS))
5414 {
5415 fprintf (dump_file, "Swap operands in stmt:\n");
5416 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5417 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
68ca4ac9 5418 def0 ? lattice[gimple_uid (def0)] : 0,
d2626c0b
YR
5419 lattice[gimple_uid (def1)]);
5420 }
5421 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5422 gimple_assign_rhs2_ptr (stmt));
5423 }
5424 }
5425 XDELETE (lattice);
5426}
5427
242229bb
JH
5428/* Expand basic block BB from GIMPLE trees to RTL. */
5429
5430static basic_block
f3ddd692 5431expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 5432{
726a989a
RB
5433 gimple_stmt_iterator gsi;
5434 gimple_seq stmts;
355fe088 5435 gimple *stmt = NULL;
66e8df53 5436 rtx_note *note;
b47aae36 5437 rtx_insn *last;
242229bb 5438 edge e;
628f6a4e 5439 edge_iterator ei;
242229bb
JH
5440
5441 if (dump_file)
726a989a
RB
5442 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5443 bb->index);
5444
5445 /* Note that since we are now transitioning from GIMPLE to RTL, we
5446 cannot use the gsi_*_bb() routines because they expect the basic
5447 block to be in GIMPLE, instead of RTL. Therefore, we need to
5448 access the BB sequence directly. */
d2626c0b
YR
5449 if (optimize)
5450 reorder_operands (bb);
726a989a 5451 stmts = bb_seq (bb);
3e8b732e
MM
5452 bb->il.gimple.seq = NULL;
5453 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 5454 rtl_profile_for_bb (bb);
5e2d947c
JH
5455 init_rtl_bb_info (bb);
5456 bb->flags |= BB_RTL;
5457
a9b77cd1
ZD
5458 /* Remove the RETURN_EXPR if we may fall though to the exit
5459 instead. */
726a989a
RB
5460 gsi = gsi_last (stmts);
5461 if (!gsi_end_p (gsi)
5462 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 5463 {
538dd0b7 5464 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
a9b77cd1
ZD
5465
5466 gcc_assert (single_succ_p (bb));
fefa31b5 5467 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
a9b77cd1 5468
fefa31b5 5469 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
726a989a 5470 && !gimple_return_retval (ret_stmt))
a9b77cd1 5471 {
726a989a 5472 gsi_remove (&gsi, false);
a9b77cd1
ZD
5473 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5474 }
5475 }
5476
726a989a
RB
5477 gsi = gsi_start (stmts);
5478 if (!gsi_end_p (gsi))
8b11009b 5479 {
726a989a
RB
5480 stmt = gsi_stmt (gsi);
5481 if (gimple_code (stmt) != GIMPLE_LABEL)
5482 stmt = NULL;
8b11009b 5483 }
242229bb 5484
134aa83c 5485 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b
ZD
5486
5487 if (stmt || elt)
242229bb
JH
5488 {
5489 last = get_last_insn ();
5490
8b11009b
ZD
5491 if (stmt)
5492 {
28ed065e 5493 expand_gimple_stmt (stmt);
726a989a 5494 gsi_next (&gsi);
8b11009b
ZD
5495 }
5496
5497 if (elt)
39c8aaa4 5498 emit_label (*elt);
242229bb 5499
caf93cb0 5500 /* Java emits line number notes in the top of labels.
c22cacf3 5501 ??? Make this go away once line number notes are obsoleted. */
1130d5e3 5502 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 5503 if (NOTE_P (BB_HEAD (bb)))
1130d5e3 5504 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 5505 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 5506
726a989a 5507 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
5508 }
5509 else
1130d5e3 5510 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
242229bb
JH
5511
5512 NOTE_BASIC_BLOCK (note) = bb;
5513
726a989a 5514 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 5515 {
cea49550 5516 basic_block new_bb;
242229bb 5517
b5b8b0ac 5518 stmt = gsi_stmt (gsi);
2a8e30fb
MM
5519
5520 /* If this statement is a non-debug one, and we generate debug
5521 insns, then this one might be the last real use of a TERed
5522 SSA_NAME, but where there are still some debug uses further
5523 down. Expanding the current SSA name in such further debug
5524 uses by their RHS might lead to wrong debug info, as coalescing
5525 might make the operands of such RHS be placed into the same
5526 pseudo as something else. Like so:
5527 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5528 use(a_1);
5529 a_2 = ...
5530 #DEBUG ... => a_1
5531 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5532 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5533 the write to a_2 would actually have clobbered the place which
5534 formerly held a_0.
5535
5536 So, instead of that, we recognize the situation, and generate
5537 debug temporaries at the last real use of TERed SSA names:
5538 a_1 = a_0 + 1;
5539 #DEBUG #D1 => a_1
5540 use(a_1);
5541 a_2 = ...
5542 #DEBUG ... => #D1
5543 */
5544 if (MAY_HAVE_DEBUG_INSNS
5545 && SA.values
5546 && !is_gimple_debug (stmt))
5547 {
5548 ssa_op_iter iter;
5549 tree op;
355fe088 5550 gimple *def;
2a8e30fb 5551
5368224f 5552 location_t sloc = curr_insn_location ();
2a8e30fb
MM
5553
5554 /* Look for SSA names that have their last use here (TERed
5555 names always have only one real use). */
5556 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5557 if ((def = get_gimple_for_ssa_name (op)))
5558 {
5559 imm_use_iterator imm_iter;
5560 use_operand_p use_p;
5561 bool have_debug_uses = false;
5562
5563 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5564 {
5565 if (gimple_debug_bind_p (USE_STMT (use_p)))
5566 {
5567 have_debug_uses = true;
5568 break;
5569 }
5570 }
5571
5572 if (have_debug_uses)
5573 {
871dae34 5574 /* OP is a TERed SSA name, with DEF its defining
2a8e30fb
MM
5575 statement, and where OP is used in further debug
5576 instructions. Generate a debug temporary, and
5577 replace all uses of OP in debug insns with that
5578 temporary. */
355fe088 5579 gimple *debugstmt;
2a8e30fb
MM
5580 tree value = gimple_assign_rhs_to_tree (def);
5581 tree vexpr = make_node (DEBUG_EXPR_DECL);
5582 rtx val;
ef4bddc2 5583 machine_mode mode;
2a8e30fb 5584
5368224f 5585 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
5586
5587 DECL_ARTIFICIAL (vexpr) = 1;
5588 TREE_TYPE (vexpr) = TREE_TYPE (value);
5589 if (DECL_P (value))
5590 mode = DECL_MODE (value);
5591 else
5592 mode = TYPE_MODE (TREE_TYPE (value));
5593 DECL_MODE (vexpr) = mode;
5594
5595 val = gen_rtx_VAR_LOCATION
5596 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5597
e8c6bb74 5598 emit_debug_insn (val);
2a8e30fb
MM
5599
5600 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5601 {
5602 if (!gimple_debug_bind_p (debugstmt))
5603 continue;
5604
5605 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5606 SET_USE (use_p, vexpr);
5607
5608 update_stmt (debugstmt);
5609 }
5610 }
5611 }
5368224f 5612 set_curr_insn_location (sloc);
2a8e30fb
MM
5613 }
5614
a5883ba0 5615 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 5616
242229bb
JH
5617 /* Expand this statement, then evaluate the resulting RTL and
5618 fixup the CFG accordingly. */
726a989a 5619 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 5620 {
538dd0b7 5621 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
cea49550
RH
5622 if (new_bb)
5623 return new_bb;
5624 }
b5b8b0ac
AO
5625 else if (gimple_debug_bind_p (stmt))
5626 {
5368224f 5627 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
5628 gimple_stmt_iterator nsi = gsi;
5629
5630 for (;;)
5631 {
5632 tree var = gimple_debug_bind_get_var (stmt);
5633 tree value;
5634 rtx val;
ef4bddc2 5635 machine_mode mode;
b5b8b0ac 5636
ec8c1492
JJ
5637 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5638 && TREE_CODE (var) != LABEL_DECL
5639 && !target_for_debug_bind (var))
5640 goto delink_debug_stmt;
5641
b5b8b0ac
AO
5642 if (gimple_debug_bind_has_value_p (stmt))
5643 value = gimple_debug_bind_get_value (stmt);
5644 else
5645 value = NULL_TREE;
5646
5647 last = get_last_insn ();
5648
5368224f 5649 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
5650
5651 if (DECL_P (var))
5652 mode = DECL_MODE (var);
5653 else
5654 mode = TYPE_MODE (TREE_TYPE (var));
5655
5656 val = gen_rtx_VAR_LOCATION
5657 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5658
e16b6fd0 5659 emit_debug_insn (val);
b5b8b0ac
AO
5660
5661 if (dump_file && (dump_flags & TDF_DETAILS))
5662 {
5663 /* We can't dump the insn with a TREE where an RTX
5664 is expected. */
e8c6bb74 5665 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5666 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5667 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5668 }
5669
ec8c1492 5670 delink_debug_stmt:
2a8e30fb
MM
5671 /* In order not to generate too many debug temporaries,
5672 we delink all uses of debug statements we already expanded.
5673 Therefore debug statements between definition and real
5674 use of TERed SSA names will continue to use the SSA name,
5675 and not be replaced with debug temps. */
5676 delink_stmt_imm_use (stmt);
5677
b5b8b0ac
AO
5678 gsi = nsi;
5679 gsi_next (&nsi);
5680 if (gsi_end_p (nsi))
5681 break;
5682 stmt = gsi_stmt (nsi);
5683 if (!gimple_debug_bind_p (stmt))
5684 break;
5685 }
5686
5368224f 5687 set_curr_insn_location (sloc);
ddb555ed
JJ
5688 }
5689 else if (gimple_debug_source_bind_p (stmt))
5690 {
5368224f 5691 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5692 tree var = gimple_debug_source_bind_get_var (stmt);
5693 tree value = gimple_debug_source_bind_get_value (stmt);
5694 rtx val;
ef4bddc2 5695 machine_mode mode;
ddb555ed
JJ
5696
5697 last = get_last_insn ();
5698
5368224f 5699 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5700
5701 mode = DECL_MODE (var);
5702
5703 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5704 VAR_INIT_STATUS_UNINITIALIZED);
5705
5706 emit_debug_insn (val);
5707
5708 if (dump_file && (dump_flags & TDF_DETAILS))
5709 {
5710 /* We can't dump the insn with a TREE where an RTX
5711 is expected. */
5712 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5713 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5714 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5715 }
5716
5368224f 5717 set_curr_insn_location (sloc);
b5b8b0ac 5718 }
80c7a9eb 5719 else
242229bb 5720 {
538dd0b7
DM
5721 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5722 if (call_stmt
5723 && gimple_call_tail_p (call_stmt)
f3ddd692 5724 && disable_tail_calls)
538dd0b7 5725 gimple_call_set_tail (call_stmt, false);
f3ddd692 5726
538dd0b7 5727 if (call_stmt && gimple_call_tail_p (call_stmt))
cea49550
RH
5728 {
5729 bool can_fallthru;
538dd0b7 5730 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
cea49550
RH
5731 if (new_bb)
5732 {
5733 if (can_fallthru)
5734 bb = new_bb;
5735 else
5736 return new_bb;
5737 }
5738 }
4d7a65ea 5739 else
b7211528 5740 {
4e3825db 5741 def_operand_p def_p;
4e3825db
MM
5742 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5743
5744 if (def_p != NULL)
5745 {
5746 /* Ignore this stmt if it is in the list of
5747 replaceable expressions. */
5748 if (SA.values
b8698a0f 5749 && bitmap_bit_p (SA.values,
e97809c6 5750 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5751 continue;
5752 }
28ed065e 5753 last = expand_gimple_stmt (stmt);
726a989a 5754 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5755 }
242229bb
JH
5756 }
5757 }
5758
a5883ba0
MM
5759 currently_expanding_gimple_stmt = NULL;
5760
7241571e 5761 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5762 FOR_EACH_EDGE (e, ei, bb->succs)
5763 {
2f13f2de 5764 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5765 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5766 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5767 {
5768 emit_jump (label_rtx_for_bb (e->dest));
5769 e->flags &= ~EDGE_FALLTHRU;
5770 }
a9b77cd1
ZD
5771 }
5772
ae761c45
AH
5773 /* Expanded RTL can create a jump in the last instruction of block.
5774 This later might be assumed to be a jump to successor and break edge insertion.
5775 We need to insert dummy move to prevent this. PR41440. */
5776 if (single_succ_p (bb)
5777 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5778 && (last = get_last_insn ())
5779 && JUMP_P (last))
5780 {
5781 rtx dummy = gen_reg_rtx (SImode);
5782 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5783 }
5784
242229bb
JH
5785 do_pending_stack_adjust ();
5786
3f117656 5787 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5788 before a barrier and/or table jump insn. */
5789 last = get_last_insn ();
4b4bf941 5790 if (BARRIER_P (last))
242229bb
JH
5791 last = PREV_INSN (last);
5792 if (JUMP_TABLE_DATA_P (last))
5793 last = PREV_INSN (PREV_INSN (last));
1130d5e3 5794 BB_END (bb) = last;
caf93cb0 5795
242229bb 5796 update_bb_for_insn (bb);
80c7a9eb 5797
242229bb
JH
5798 return bb;
5799}
5800
5801
5802/* Create a basic block for initialization code. */
5803
5804static basic_block
5805construct_init_block (void)
5806{
5807 basic_block init_block, first_block;
fd44f634
JH
5808 edge e = NULL;
5809 int flags;
275a4187 5810
fd44f634 5811 /* Multiple entry points not supported yet. */
fefa31b5
DM
5812 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5813 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5814 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5815 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5816 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
242229bb 5817
fefa31b5 5818 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
275a4187 5819
fd44f634
JH
5820 /* When entry edge points to first basic block, we don't need jump,
5821 otherwise we have to jump into proper target. */
fefa31b5 5822 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
fd44f634 5823 {
726a989a 5824 tree label = gimple_block_label (e->dest);
fd44f634 5825
1476d1bd 5826 emit_jump (jump_target_rtx (label));
fd44f634 5827 flags = 0;
275a4187 5828 }
fd44f634
JH
5829 else
5830 flags = EDGE_FALLTHRU;
242229bb
JH
5831
5832 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5833 get_last_insn (),
fefa31b5
DM
5834 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5835 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5836 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5837 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
242229bb
JH
5838 if (e)
5839 {
5840 first_block = e->dest;
5841 redirect_edge_succ (e, init_block);
fd44f634 5842 e = make_edge (init_block, first_block, flags);
242229bb
JH
5843 }
5844 else
fefa31b5 5845 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5846 e->probability = REG_BR_PROB_BASE;
fefa31b5 5847 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
242229bb
JH
5848
5849 update_bb_for_insn (init_block);
5850 return init_block;
5851}
5852
55e092c4
JH
5853/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5854 found in the block tree. */
5855
5856static void
5857set_block_levels (tree block, int level)
5858{
5859 while (block)
5860 {
5861 BLOCK_NUMBER (block) = level;
5862 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5863 block = BLOCK_CHAIN (block);
5864 }
5865}
242229bb
JH
5866
5867/* Create a block containing landing pads and similar stuff. */
5868
5869static void
5870construct_exit_block (void)
5871{
b47aae36
DM
5872 rtx_insn *head = get_last_insn ();
5873 rtx_insn *end;
242229bb 5874 basic_block exit_block;
628f6a4e
BE
5875 edge e, e2;
5876 unsigned ix;
5877 edge_iterator ei;
79c7fda6 5878 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
b47aae36 5879 rtx_insn *orig_end = BB_END (prev_bb);
242229bb 5880
fefa31b5 5881 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
bf08ebeb 5882
caf93cb0 5883 /* Make sure the locus is set to the end of the function, so that
242229bb 5884 epilogue line numbers and warnings are set properly. */
2f13f2de 5885 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5886 input_location = cfun->function_end_locus;
5887
242229bb
JH
5888 /* Generate rtl for function exit. */
5889 expand_function_end ();
5890
5891 end = get_last_insn ();
5892 if (head == end)
5893 return;
79c7fda6
JJ
5894 /* While emitting the function end we could move end of the last basic
5895 block. */
1130d5e3 5896 BB_END (prev_bb) = orig_end;
4b4bf941 5897 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5898 head = NEXT_INSN (head);
79c7fda6
JJ
5899 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5900 bb frequency counting will be confused. Any instructions before that
5901 label are emitted for the case where PREV_BB falls through into the
5902 exit block, so append those instructions to prev_bb in that case. */
5903 if (NEXT_INSN (head) != return_label)
5904 {
5905 while (NEXT_INSN (head) != return_label)
5906 {
5907 if (!NOTE_P (NEXT_INSN (head)))
1130d5e3 5908 BB_END (prev_bb) = NEXT_INSN (head);
79c7fda6
JJ
5909 head = NEXT_INSN (head);
5910 }
5911 }
5912 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
fefa31b5
DM
5913 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5914 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5915 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
628f6a4e
BE
5916
5917 ix = 0;
fefa31b5 5918 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
242229bb 5919 {
fefa31b5 5920 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
242229bb 5921 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5922 redirect_edge_succ (e, exit_block);
5923 else
5924 ix++;
242229bb 5925 }
628f6a4e 5926
fefa31b5 5927 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5928 e->probability = REG_BR_PROB_BASE;
fefa31b5
DM
5929 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5930 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
242229bb
JH
5931 if (e2 != e)
5932 {
c22cacf3 5933 e->count -= e2->count;
242229bb
JH
5934 exit_block->count -= e2->count;
5935 exit_block->frequency -= EDGE_FREQUENCY (e2);
5936 }
5937 if (e->count < 0)
5938 e->count = 0;
5939 if (exit_block->count < 0)
5940 exit_block->count = 0;
5941 if (exit_block->frequency < 0)
5942 exit_block->frequency = 0;
5943 update_bb_for_insn (exit_block);
5944}
5945
c22cacf3 5946/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5947 Look for ARRAY_REF nodes with non-constant indexes and mark them
5948 addressable. */
5949
5950static tree
5951discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5952 void *data ATTRIBUTE_UNUSED)
5953{
5954 tree t = *tp;
5955
5956 if (IS_TYPE_OR_DECL_P (t))
5957 *walk_subtrees = 0;
5958 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5959 {
5960 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5961 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5962 && (!TREE_OPERAND (t, 2)
5963 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5964 || (TREE_CODE (t) == COMPONENT_REF
5965 && (!TREE_OPERAND (t,2)
5966 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5967 || TREE_CODE (t) == BIT_FIELD_REF
5968 || TREE_CODE (t) == REALPART_EXPR
5969 || TREE_CODE (t) == IMAGPART_EXPR
5970 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5971 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5972 t = TREE_OPERAND (t, 0);
5973
5974 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5975 {
5976 t = get_base_address (t);
6f11d690
RG
5977 if (t && DECL_P (t)
5978 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5979 TREE_ADDRESSABLE (t) = 1;
5980 }
5981
5982 *walk_subtrees = 0;
5983 }
5984
5985 return NULL_TREE;
5986}
5987
5988/* RTL expansion is not able to compile array references with variable
5989 offsets for arrays stored in single register. Discover such
5990 expressions and mark variables as addressable to avoid this
5991 scenario. */
5992
5993static void
5994discover_nonconstant_array_refs (void)
5995{
5996 basic_block bb;
726a989a 5997 gimple_stmt_iterator gsi;
a1b23b2f 5998
11cd3bed 5999 FOR_EACH_BB_FN (bb, cfun)
726a989a
RB
6000 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6001 {
355fe088 6002 gimple *stmt = gsi_stmt (gsi);
aa847cc8
JJ
6003 if (!is_gimple_debug (stmt))
6004 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 6005 }
a1b23b2f
UW
6006}
6007
2e3f842f
L
6008/* This function sets crtl->args.internal_arg_pointer to a virtual
6009 register if DRAP is needed. Local register allocator will replace
6010 virtual_incoming_args_rtx with the virtual register. */
6011
6012static void
6013expand_stack_alignment (void)
6014{
6015 rtx drap_rtx;
e939805b 6016 unsigned int preferred_stack_boundary;
2e3f842f
L
6017
6018 if (! SUPPORTS_STACK_ALIGNMENT)
6019 return;
b8698a0f 6020
2e3f842f
L
6021 if (cfun->calls_alloca
6022 || cfun->has_nonlocal_label
6023 || crtl->has_nonlocal_goto)
6024 crtl->need_drap = true;
6025
890b9b96
L
6026 /* Call update_stack_boundary here again to update incoming stack
6027 boundary. It may set incoming stack alignment to a different
6028 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6029 use the minimum incoming stack alignment to check if it is OK
6030 to perform sibcall optimization since sibcall optimization will
6031 only align the outgoing stack to incoming stack boundary. */
6032 if (targetm.calls.update_stack_boundary)
6033 targetm.calls.update_stack_boundary ();
6034
6035 /* The incoming stack frame has to be aligned at least at
6036 parm_stack_boundary. */
6037 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 6038
2e3f842f
L
6039 /* Update crtl->stack_alignment_estimated and use it later to align
6040 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6041 exceptions since callgraph doesn't collect incoming stack alignment
6042 in this case. */
8f4f502f 6043 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
6044 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6045 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6046 else
6047 preferred_stack_boundary = crtl->preferred_stack_boundary;
6048 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6049 crtl->stack_alignment_estimated = preferred_stack_boundary;
6050 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6051 crtl->stack_alignment_needed = preferred_stack_boundary;
6052
890b9b96
L
6053 gcc_assert (crtl->stack_alignment_needed
6054 <= crtl->stack_alignment_estimated);
6055
2e3f842f 6056 crtl->stack_realign_needed
e939805b 6057 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 6058 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
6059
6060 crtl->stack_realign_processed = true;
6061
6062 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6063 alignment. */
6064 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 6065 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 6066
d015f7cc
L
6067 /* stack_realign_drap and drap_rtx must match. */
6068 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6069
2e3f842f
L
6070 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6071 if (NULL != drap_rtx)
6072 {
6073 crtl->args.internal_arg_pointer = drap_rtx;
6074
6075 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6076 needed. */
6077 fixup_tail_calls ();
6078 }
6079}
862d0b35
DN
6080\f
6081
6082static void
6083expand_main_function (void)
6084{
6085#if (defined(INVOKE__main) \
6086 || (!defined(HAS_INIT_SECTION) \
6087 && !defined(INIT_SECTION_ASM_OP) \
6088 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6089 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6090#endif
6091}
6092\f
6093
6094/* Expand code to initialize the stack_protect_guard. This is invoked at
6095 the beginning of a function to be protected. */
6096
862d0b35
DN
6097static void
6098stack_protect_prologue (void)
6099{
6100 tree guard_decl = targetm.stack_protect_guard ();
6101 rtx x, y;
6102
6103 x = expand_normal (crtl->stack_protect_guard);
6104 y = expand_normal (guard_decl);
6105
6106 /* Allow the target to copy from Y to X without leaking Y into a
6107 register. */
c65aa042
RS
6108 if (targetm.have_stack_protect_set ())
6109 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6110 {
6111 emit_insn (insn);
6112 return;
6113 }
862d0b35
DN
6114
6115 /* Otherwise do a straight move. */
6116 emit_move_insn (x, y);
6117}
2e3f842f 6118
242229bb
JH
6119/* Translate the intermediate representation contained in the CFG
6120 from GIMPLE trees to RTL.
6121
6122 We do conversion per basic block and preserve/update the tree CFG.
6123 This implies we have to do some magic as the CFG can simultaneously
6124 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 6125 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
6126 the expansion. */
6127
be55bfe6
TS
6128namespace {
6129
6130const pass_data pass_data_expand =
6131{
6132 RTL_PASS, /* type */
6133 "expand", /* name */
6134 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6135 TV_EXPAND, /* tv_id */
6136 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6137 | PROP_gimple_lcx
f8e89441
TV
6138 | PROP_gimple_lvec
6139 | PROP_gimple_lva), /* properties_required */
be55bfe6
TS
6140 PROP_rtl, /* properties_provided */
6141 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
3bea341f 6142 0, /* todo_flags_start */
be55bfe6
TS
6143 0, /* todo_flags_finish */
6144};
6145
6146class pass_expand : public rtl_opt_pass
6147{
6148public:
6149 pass_expand (gcc::context *ctxt)
6150 : rtl_opt_pass (pass_data_expand, ctxt)
6151 {}
6152
6153 /* opt_pass methods: */
6154 virtual unsigned int execute (function *);
6155
6156}; // class pass_expand
6157
6158unsigned int
6159pass_expand::execute (function *fun)
242229bb
JH
6160{
6161 basic_block bb, init_block;
0ef90296
ZD
6162 edge_iterator ei;
6163 edge e;
b47aae36 6164 rtx_insn *var_seq, *var_ret_seq;
4e3825db
MM
6165 unsigned i;
6166
f029db69 6167 timevar_push (TV_OUT_OF_SSA);
4e3825db 6168 rewrite_out_of_ssa (&SA);
f029db69 6169 timevar_pop (TV_OUT_OF_SSA);
c302207e 6170 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 6171
dfde35b3
JJ
6172 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6173 {
6174 gimple_stmt_iterator gsi;
6175 FOR_EACH_BB_FN (bb, cfun)
6176 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6177 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6178 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6179 }
6180
be147e84
RG
6181 /* Make sure all values used by the optimization passes have sane
6182 defaults. */
6183 reg_renumber = 0;
6184
4586b4ca
SB
6185 /* Some backends want to know that we are expanding to RTL. */
6186 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
6187 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6188 free_dominance_info (CDI_DOMINATORS);
4586b4ca 6189
be55bfe6 6190 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
bf08ebeb 6191
d5e254e1
IE
6192 if (chkp_function_instrumented_p (current_function_decl))
6193 chkp_reset_rtl_bounds ();
6194
5368224f 6195 insn_locations_init ();
fe8a7779 6196 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
6197 {
6198 /* Eventually, all FEs should explicitly set function_start_locus. */
be55bfe6
TS
6199 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6200 set_curr_insn_location
6201 (DECL_SOURCE_LOCATION (current_function_decl));
1751ecd6 6202 else
be55bfe6 6203 set_curr_insn_location (fun->function_start_locus);
1751ecd6 6204 }
9ff70652 6205 else
5368224f
DC
6206 set_curr_insn_location (UNKNOWN_LOCATION);
6207 prologue_location = curr_insn_location ();
55e092c4 6208
2b21299c
JJ
6209#ifdef INSN_SCHEDULING
6210 init_sched_attrs ();
6211#endif
6212
55e092c4
JH
6213 /* Make sure first insn is a note even if we don't want linenums.
6214 This makes sure the first insn will never be deleted.
6215 Also, final expects a note to appear there. */
6216 emit_note (NOTE_INSN_DELETED);
6429e3be 6217
a1b23b2f
UW
6218 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6219 discover_nonconstant_array_refs ();
6220
e41b2a33 6221 targetm.expand_to_rtl_hook ();
cb91fab0 6222 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 6223 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 6224 crtl->stack_alignment_estimated = 0;
cb91fab0 6225 crtl->preferred_stack_boundary = STACK_BOUNDARY;
be55bfe6 6226 fun->cfg->max_jumptable_ents = 0;
cb91fab0 6227
ae9fd6b7
JH
6228 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6229 of the function section at exapnsion time to predict distance of calls. */
6230 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6231
727a31fa 6232 /* Expand the variables recorded during gimple lowering. */
f029db69 6233 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
6234 start_sequence ();
6235
f3ddd692 6236 var_ret_seq = expand_used_vars ();
3a42502d
RH
6237
6238 var_seq = get_insns ();
6239 end_sequence ();
f029db69 6240 timevar_pop (TV_VAR_EXPAND);
242229bb 6241
7d69de61
RH
6242 /* Honor stack protection warnings. */
6243 if (warn_stack_protect)
6244 {
be55bfe6 6245 if (fun->calls_alloca)
b8698a0f 6246 warning (OPT_Wstack_protector,
3b123595 6247 "stack protector not protecting local variables: "
be55bfe6 6248 "variable length buffer");
cb91fab0 6249 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 6250 warning (OPT_Wstack_protector,
3b123595 6251 "stack protector not protecting function: "
be55bfe6 6252 "all local arrays are less than %d bytes long",
7d69de61
RH
6253 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6254 }
6255
242229bb 6256 /* Set up parameters and prepare for return, for the function. */
b79c5284 6257 expand_function_start (current_function_decl);
242229bb 6258
3a42502d
RH
6259 /* If we emitted any instructions for setting up the variables,
6260 emit them before the FUNCTION_START note. */
6261 if (var_seq)
6262 {
6263 emit_insn_before (var_seq, parm_birth_insn);
6264
6265 /* In expand_function_end we'll insert the alloca save/restore
6266 before parm_birth_insn. We've just insertted an alloca call.
6267 Adjust the pointer to match. */
6268 parm_birth_insn = var_seq;
6269 }
6270
f11a7b6d
AO
6271 /* Now propagate the RTL assignment of each partition to the
6272 underlying var of each SSA_NAME. */
6273 for (i = 1; i < num_ssa_names; i++)
6274 {
6275 tree name = ssa_name (i);
6276
6277 if (!name
6278 /* We might have generated new SSA names in
6279 update_alias_info_with_stack_vars. They will have a NULL
6280 defining statements, and won't be part of the partitioning,
6281 so ignore those. */
6282 || !SSA_NAME_DEF_STMT (name))
6283 continue;
6284
6285 adjust_one_expanded_partition_var (name);
6286 }
6287
6288 /* Clean up RTL of variables that straddle across multiple
6289 partitions, and check that the rtl of any PARM_DECLs that are not
6290 cleaned up is that of their default defs. */
d466b407
MM
6291 for (i = 1; i < num_ssa_names; i++)
6292 {
6293 tree name = ssa_name (i);
6294 int part;
d466b407
MM
6295
6296 if (!name
d466b407
MM
6297 /* We might have generated new SSA names in
6298 update_alias_info_with_stack_vars. They will have a NULL
6299 defining statements, and won't be part of the partitioning,
6300 so ignore those. */
6301 || !SSA_NAME_DEF_STMT (name))
6302 continue;
6303 part = var_to_partition (SA.map, name);
6304 if (part == NO_PARTITION)
6305 continue;
70b5e7dc 6306
1f9ceff1
AO
6307 /* If this decl was marked as living in multiple places, reset
6308 this now to NULL. */
6309 tree var = SSA_NAME_VAR (name);
6310 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6311 SET_DECL_RTL (var, NULL);
6312 /* Check that the pseudos chosen by assign_parms are those of
6313 the corresponding default defs. */
6314 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6315 && (TREE_CODE (var) == PARM_DECL
6316 || TREE_CODE (var) == RESULT_DECL))
70b5e7dc 6317 {
1f9ceff1
AO
6318 rtx in = DECL_RTL_IF_SET (var);
6319 gcc_assert (in);
6320 rtx out = SA.partition_to_pseudo[part];
f11a7b6d
AO
6321 gcc_assert (in == out);
6322
6323 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6324 those expected by debug backends for each parm and for
6325 the result. This is particularly important for stabs,
6326 whose register elimination from parm's DECL_RTL may cause
6327 -fcompare-debug differences as SET_DECL_RTL changes reg's
6328 attrs. So, make sure the RTL already has the parm as the
6329 EXPR, so that it won't change. */
6330 SET_DECL_RTL (var, NULL_RTX);
6331 if (MEM_P (in))
6332 set_mem_attributes (in, var, true);
6333 SET_DECL_RTL (var, in);
70b5e7dc 6334 }
d466b407
MM
6335 }
6336
242229bb
JH
6337 /* If this function is `main', emit a call to `__main'
6338 to run global initializers, etc. */
6339 if (DECL_NAME (current_function_decl)
6340 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6341 && DECL_FILE_SCOPE_P (current_function_decl))
6342 expand_main_function ();
6343
7d69de61
RH
6344 /* Initialize the stack_protect_guard field. This must happen after the
6345 call to __main (if any) so that the external decl is initialized. */
cb91fab0 6346 if (crtl->stack_protect_guard)
7d69de61
RH
6347 stack_protect_prologue ();
6348
4e3825db
MM
6349 expand_phi_nodes (&SA);
6350
0d334e37 6351 /* Release any stale SSA redirection data. */
b3e46655 6352 redirect_edge_var_map_empty ();
0d334e37 6353
3fbd86b1 6354 /* Register rtl specific functions for cfg. */
242229bb
JH
6355 rtl_register_cfg_hooks ();
6356
6357 init_block = construct_init_block ();
6358
0ef90296 6359 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 6360 remaining edges later. */
be55bfe6 6361 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
0ef90296
ZD
6362 e->flags &= ~EDGE_EXECUTABLE;
6363
134aa83c 6364 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
be55bfe6 6365 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
fefa31b5 6366 next_bb)
f3ddd692 6367 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 6368
b5b8b0ac
AO
6369 if (MAY_HAVE_DEBUG_INSNS)
6370 expand_debug_locations ();
6371
dfde35b3
JJ
6372 if (deep_ter_debug_map)
6373 {
6374 delete deep_ter_debug_map;
6375 deep_ter_debug_map = NULL;
6376 }
6377
452aa9c5
RG
6378 /* Free stuff we no longer need after GIMPLE optimizations. */
6379 free_dominance_info (CDI_DOMINATORS);
6380 free_dominance_info (CDI_POST_DOMINATORS);
61183076 6381 delete_tree_cfg_annotations (fun);
452aa9c5 6382
f029db69 6383 timevar_push (TV_OUT_OF_SSA);
4e3825db 6384 finish_out_of_ssa (&SA);
f029db69 6385 timevar_pop (TV_OUT_OF_SSA);
4e3825db 6386
f029db69 6387 timevar_push (TV_POST_EXPAND);
91753e21 6388 /* We are no longer in SSA form. */
be55bfe6 6389 fun->gimple_df->in_ssa_p = false;
726338f4 6390 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 6391
bf08ebeb
JH
6392 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6393 conservatively to true until they are all profile aware. */
39c8aaa4 6394 delete lab_rtx_for_bb;
61183076 6395 free_histograms (fun);
242229bb
JH
6396
6397 construct_exit_block ();
5368224f 6398 insn_locations_finalize ();
242229bb 6399
f3ddd692
JJ
6400 if (var_ret_seq)
6401 {
dc01c3d1 6402 rtx_insn *after = return_label;
b47aae36 6403 rtx_insn *next = NEXT_INSN (after);
f3ddd692
JJ
6404 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6405 after = next;
6406 emit_insn_after (var_ret_seq, after);
6407 }
6408
1d65f45c 6409 /* Zap the tree EH table. */
be55bfe6 6410 set_eh_throw_stmt_table (fun, NULL);
242229bb 6411
42821aff
MM
6412 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6413 split edges which edge insertions might do. */
242229bb 6414 rebuild_jump_labels (get_insns ());
242229bb 6415
be55bfe6
TS
6416 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6417 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6418 {
6419 edge e;
6420 edge_iterator ei;
6421 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6422 {
6423 if (e->insns.r)
bc470c24 6424 {
3ffa95c2 6425 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
6426 /* Put insns after parm birth, but before
6427 NOTE_INSNS_FUNCTION_BEG. */
be55bfe6
TS
6428 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6429 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
bc470c24 6430 {
3ffa95c2
DM
6431 rtx_insn *insns = e->insns.r;
6432 e->insns.r = NULL;
e40191f1
TV
6433 if (NOTE_P (parm_birth_insn)
6434 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6435 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6436 else
6437 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
6438 }
6439 else
6440 commit_one_edge_insertion (e);
6441 }
4e3825db
MM
6442 else
6443 ei_next (&ei);
6444 }
6445 }
6446
6447 /* We're done expanding trees to RTL. */
6448 currently_expanding_to_rtl = 0;
6449
1b223a9f
AO
6450 flush_mark_addressable_queue ();
6451
be55bfe6
TS
6452 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6453 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6454 {
6455 edge e;
6456 edge_iterator ei;
6457 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6458 {
6459 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6460 e->flags &= ~EDGE_EXECUTABLE;
6461
6462 /* At the moment not all abnormal edges match the RTL
6463 representation. It is safe to remove them here as
6464 find_many_sub_basic_blocks will rediscover them.
6465 In the future we should get this fixed properly. */
6466 if ((e->flags & EDGE_ABNORMAL)
6467 && !(e->flags & EDGE_SIBCALL))
6468 remove_edge (e);
6469 else
6470 ei_next (&ei);
6471 }
6472 }
6473
7ba9e72d 6474 auto_sbitmap blocks (last_basic_block_for_fn (fun));
f61e445a 6475 bitmap_ones (blocks);
242229bb 6476 find_many_sub_basic_blocks (blocks);
4e3825db 6477 purge_all_dead_edges ();
242229bb 6478
2e3f842f
L
6479 expand_stack_alignment ();
6480
be147e84
RG
6481 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6482 function. */
6483 if (crtl->tail_call_emit)
6484 fixup_tail_calls ();
6485
dac1fbf8
RG
6486 /* After initial rtl generation, call back to finish generating
6487 exception support code. We need to do this before cleaning up
6488 the CFG as the code does not expect dead landing pads. */
be55bfe6 6489 if (fun->eh->region_tree != NULL)
dac1fbf8
RG
6490 finish_eh_generation ();
6491
6492 /* Remove unreachable blocks, otherwise we cannot compute dominators
6493 which are needed for loop state verification. As a side-effect
6494 this also compacts blocks.
6495 ??? We cannot remove trivially dead insns here as for example
6496 the DRAP reg on i?86 is not magically live at this point.
6497 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6498 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6499
b2b29377 6500 checking_verify_flow_info ();
9f8628ba 6501
be147e84
RG
6502 /* Initialize pseudos allocated for hard registers. */
6503 emit_initial_value_sets ();
6504
6505 /* And finally unshare all RTL. */
6506 unshare_all_rtl ();
6507
9f8628ba
PB
6508 /* There's no need to defer outputting this function any more; we
6509 know we want to output it. */
6510 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6511
6512 /* Now that we're done expanding trees to RTL, we shouldn't have any
6513 more CONCATs anywhere. */
6514 generating_concat_p = 0;
6515
b7211528
SB
6516 if (dump_file)
6517 {
6518 fprintf (dump_file,
6519 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6520 /* And the pass manager will dump RTL for us. */
6521 }
ef330312
PB
6522
6523 /* If we're emitting a nested function, make sure its parent gets
6524 emitted as well. Doing otherwise confuses debug info. */
be55bfe6
TS
6525 {
6526 tree parent;
6527 for (parent = DECL_CONTEXT (current_function_decl);
6528 parent != NULL_TREE;
6529 parent = get_containing_scope (parent))
6530 if (TREE_CODE (parent) == FUNCTION_DECL)
6531 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6532 }
c22cacf3 6533
ef330312
PB
6534 /* We are now committed to emitting code for this function. Do any
6535 preparation, such as emitting abstract debug info for the inline
6536 before it gets mangled by optimization. */
6537 if (cgraph_function_possibly_inlined_p (current_function_decl))
6538 (*debug_hooks->outlining_inline_function) (current_function_decl);
6539
6540 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
6541
6542 /* After expanding, the return labels are no longer needed. */
6543 return_label = NULL;
6544 naked_return_label = NULL;
0a35513e
AH
6545
6546 /* After expanding, the tm_restart map is no longer needed. */
be55bfe6 6547 if (fun->gimple_df->tm_restart)
50979347 6548 fun->gimple_df->tm_restart = NULL;
0a35513e 6549
55e092c4
JH
6550 /* Tag the blocks with a depth number so that change_scope can find
6551 the common parent easily. */
be55bfe6 6552 set_block_levels (DECL_INITIAL (fun->decl), 0);
bf08ebeb 6553 default_rtl_profile ();
be147e84 6554
f029db69 6555 timevar_pop (TV_POST_EXPAND);
be147e84 6556
c2924966 6557 return 0;
242229bb
JH
6558}
6559
27a4cd48
DM
6560} // anon namespace
6561
6562rtl_opt_pass *
6563make_pass_expand (gcc::context *ctxt)
6564{
6565 return new pass_expand (ctxt);
6566}