]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
movhi_movw.c: Enable test for ARM mode.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
818ab71a 2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5
AM
28#include "cfghooks.h"
29#include "tree-pass.h"
30#include "tm_p.h"
c7131fb2 31#include "ssa.h"
957060b5
AM
32#include "optabs.h"
33#include "regs.h" /* For reg_renumber. */
34#include "emit-rtl.h"
35#include "recog.h"
36#include "cgraph.h"
37#include "diagnostic.h"
40e23961 38#include "fold-const.h"
d8a2d370
DN
39#include "varasm.h"
40#include "stor-layout.h"
41#include "stmt.h"
42#include "print-tree.h"
60393bbc
AM
43#include "cfgrtl.h"
44#include "cfganal.h"
45#include "cfgbuild.h"
46#include "cfgcleanup.h"
36566b39
PK
47#include "dojump.h"
48#include "explow.h"
49#include "calls.h"
242229bb 50#include "expr.h"
2fb9a547
AM
51#include "internal-fn.h"
52#include "tree-eh.h"
5be5c238 53#include "gimple-iterator.h"
1b223a9f 54#include "gimple-expr.h"
5be5c238 55#include "gimple-walk.h"
442b4905 56#include "tree-cfg.h"
442b4905 57#include "tree-dfa.h"
7a300452 58#include "tree-ssa.h"
242229bb 59#include "except.h"
cf835838 60#include "gimple-pretty-print.h"
1f6d3a08 61#include "toplev.h"
ef330312 62#include "debug.h"
7d69de61 63#include "params.h"
ff28a94d 64#include "tree-inline.h"
6946b3f7 65#include "value-prof.h"
8e9055ae 66#include "tree-ssa-live.h"
78bca40d 67#include "tree-outof-ssa.h"
7d776ee2 68#include "cfgloop.h"
2b21299c 69#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 70#include "asan.h"
4484a35a 71#include "tree-ssa-address.h"
862d0b35 72#include "output.h"
9b2b7279 73#include "builtins.h"
d5e254e1
IE
74#include "tree-chkp.h"
75#include "rtl-chkp.h"
726a989a 76
8a6ce562
JBG
77/* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81#ifndef NAME__MAIN
82#define NAME__MAIN "__main"
83#endif
84
4e3825db
MM
85/* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87struct ssaexpand SA;
88
a5883ba0
MM
89/* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
355fe088 91gimple *currently_expanding_gimple_stmt;
a5883ba0 92
ddb555ed
JJ
93static rtx expand_debug_expr (tree);
94
1f9ceff1
AO
95static bool defer_stack_allocation (tree, bool);
96
f11a7b6d
AO
97static void record_alignment_for_reg_var (unsigned int);
98
726a989a
RB
99/* Return an expression tree corresponding to the RHS of GIMPLE
100 statement STMT. */
101
102tree
355fe088 103gimple_assign_rhs_to_tree (gimple *stmt)
726a989a
RB
104{
105 tree t;
82d6e6fc 106 enum gimple_rhs_class grhs_class;
b8698a0f 107
82d6e6fc 108 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 109
0354c0c7
BS
110 if (grhs_class == GIMPLE_TERNARY_RHS)
111 t = build3 (gimple_assign_rhs_code (stmt),
112 TREE_TYPE (gimple_assign_lhs (stmt)),
113 gimple_assign_rhs1 (stmt),
114 gimple_assign_rhs2 (stmt),
115 gimple_assign_rhs3 (stmt));
116 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
117 t = build2 (gimple_assign_rhs_code (stmt),
118 TREE_TYPE (gimple_assign_lhs (stmt)),
119 gimple_assign_rhs1 (stmt),
120 gimple_assign_rhs2 (stmt));
82d6e6fc 121 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
122 t = build1 (gimple_assign_rhs_code (stmt),
123 TREE_TYPE (gimple_assign_lhs (stmt)),
124 gimple_assign_rhs1 (stmt));
82d6e6fc 125 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
126 {
127 t = gimple_assign_rhs1 (stmt);
128 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
129 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
130 && gimple_location (stmt) != EXPR_LOCATION (t))
131 || (gimple_block (stmt)
132 && currently_expanding_to_rtl
5368224f 133 && EXPR_P (t)))
b5b8b0ac
AO
134 t = copy_node (t);
135 }
726a989a
RB
136 else
137 gcc_unreachable ();
138
f5045c96
AM
139 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140 SET_EXPR_LOCATION (t, gimple_location (stmt));
141
726a989a
RB
142 return t;
143}
144
726a989a 145
1f6d3a08
RH
146#ifndef STACK_ALIGNMENT_NEEDED
147#define STACK_ALIGNMENT_NEEDED 1
148#endif
149
4e3825db
MM
150#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
151
1f9ceff1
AO
152/* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
156
157static tree
158leader_merge (tree cur, tree next)
159{
160 if (cur == NULL || cur == next)
161 return next;
162
163 if (DECL_P (cur) && DECL_IGNORED_P (cur))
164 return cur;
165
166 if (DECL_P (next) && DECL_IGNORED_P (next))
167 return next;
168
169 return cur;
170}
171
4e3825db
MM
172/* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
175static inline void
176set_rtl (tree t, rtx x)
177{
f11a7b6d
AO
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 || (use_register_for_decl (t)
181 ? (REG_P (x)
182 || (GET_CODE (x) == CONCAT
183 && (REG_P (XEXP (x, 0))
184 || SUBREG_P (XEXP (x, 0)))
185 && (REG_P (XEXP (x, 1))
186 || SUBREG_P (XEXP (x, 1))))
058c6384
EB
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
f11a7b6d
AO
191 || (GET_CODE (x) == PARALLEL
192 && SSAVAR (t)
193 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
058c6384
EB
194 && (GET_MODE (x) == BLKmode
195 || !flag_tree_coalesce_vars)))
f11a7b6d
AO
196 : (MEM_P (x) || x == pc_rtx
197 || (GET_CODE (x) == CONCAT
198 && MEM_P (XEXP (x, 0))
199 && MEM_P (XEXP (x, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
21fc3950
EB
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
208 unpromoted REGs. */
f11a7b6d 209 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
21fc3950
EB
210 || (SSAVAR (t)
211 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 && (promote_ssa_mode (t, NULL) == BLKmode
213 || !flag_tree_coalesce_vars))
f11a7b6d
AO
214 || !use_register_for_decl (t)
215 || GET_MODE (x) == promote_ssa_mode (t, NULL));
216
217 if (x)
1f9ceff1
AO
218 {
219 bool skip = false;
220 tree cur = NULL_TREE;
f11a7b6d
AO
221 rtx xm = x;
222
223 retry:
224 if (MEM_P (xm))
225 cur = MEM_EXPR (xm);
226 else if (REG_P (xm))
227 cur = REG_EXPR (xm);
228 else if (SUBREG_P (xm))
229 {
230 gcc_assert (subreg_lowpart_p (xm));
231 xm = SUBREG_REG (xm);
232 goto retry;
233 }
234 else if (GET_CODE (xm) == CONCAT)
235 {
236 xm = XEXP (xm, 0);
237 goto retry;
238 }
239 else if (GET_CODE (xm) == PARALLEL)
240 {
241 xm = XVECEXP (xm, 0, 0);
242 gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 xm = XEXP (xm, 0);
244 goto retry;
245 }
246 else if (xm == pc_rtx)
1f9ceff1
AO
247 skip = true;
248 else
249 gcc_unreachable ();
250
f11a7b6d 251 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
1f9ceff1
AO
252
253 if (cur != next)
254 {
255 if (MEM_P (x))
f11a7b6d
AO
256 set_mem_attributes (x,
257 next && TREE_CODE (next) == SSA_NAME
258 ? TREE_TYPE (next)
259 : next, true);
1f9ceff1
AO
260 else
261 set_reg_attrs_for_decl_rtl (next, x);
262 }
263 }
264
4e3825db
MM
265 if (TREE_CODE (t) == SSA_NAME)
266 {
1f9ceff1
AO
267 int part = var_to_partition (SA.map, t);
268 if (part != NO_PARTITION)
269 {
270 if (SA.partition_to_pseudo[part])
271 gcc_assert (SA.partition_to_pseudo[part] == x);
272 else if (x != pc_rtx)
273 SA.partition_to_pseudo[part] = x;
274 }
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
f11a7b6d
AO
277 DECL. For PARMs and RESULTs, do so only when setting the
278 default def. */
279 if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 && (VAR_P (SSA_NAME_VAR (t))
281 || SSA_NAME_IS_DEFAULT_DEF (t)))
eb7adebc
MM
282 {
283 tree var = SSA_NAME_VAR (t);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var))
286 SET_DECL_RTL (var, x);
47598145 287 /* If we have it set already to "multiple places" don't
eb7adebc
MM
288 change this. */
289 else if (DECL_RTL (var) == pc_rtx)
290 ;
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var) != x)
298 SET_DECL_RTL (var, pc_rtx);
299 }
4e3825db
MM
300 }
301 else
302 SET_DECL_RTL (t, x);
303}
1f6d3a08
RH
304
305/* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
307struct stack_var
308{
309 /* The Variable. */
310 tree decl;
311
1f6d3a08
RH
312 /* Initially, the size of the variable. Later, the size of the partition,
313 if this variable becomes it's partition's representative. */
314 HOST_WIDE_INT size;
315
316 /* The *byte* alignment required for this variable. Or as, with the
317 size, the alignment for this partition. */
318 unsigned int alignb;
319
320 /* The partition representative. */
321 size_t representative;
322
323 /* The next stack variable in the partition, or EOC. */
324 size_t next;
2bdbbe94
MM
325
326 /* The numbers of conflicting stack variables. */
327 bitmap conflicts;
1f6d3a08
RH
328};
329
330#define EOC ((size_t)-1)
331
332/* We have an array of such objects while deciding allocation. */
333static struct stack_var *stack_vars;
334static size_t stack_vars_alloc;
335static size_t stack_vars_num;
39c8aaa4 336static hash_map<tree, size_t> *decl_to_stack_part;
1f6d3a08 337
3f9b14ff
SB
338/* Conflict bitmaps go on this obstack. This allows us to destroy
339 all of them in one big sweep. */
340static bitmap_obstack stack_var_bitmap_obstack;
341
fa10beec 342/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
343 is non-decreasing. */
344static size_t *stack_vars_sorted;
345
1f6d3a08
RH
346/* The phase of the stack frame. This is the known misalignment of
347 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
348 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
349static int frame_phase;
350
7d69de61
RH
351/* Used during expand_used_vars to remember if we saw any decls for
352 which we'd like to enable stack smashing protection. */
353static bool has_protected_decls;
354
355/* Used during expand_used_vars. Remember if we say a character buffer
356 smaller than our cutoff threshold. Used for -Wstack-protector. */
357static bool has_short_buffer;
1f6d3a08 358
6f197850 359/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
360 we can't do with expected alignment of the stack boundary. */
361
362static unsigned int
6f197850 363align_local_variable (tree decl)
765c3e8f 364{
1f9ceff1
AO
365 unsigned int align;
366
367 if (TREE_CODE (decl) == SSA_NAME)
368 align = TYPE_ALIGN (TREE_TYPE (decl));
369 else
370 {
371 align = LOCAL_DECL_ALIGNMENT (decl);
fe37c7af 372 SET_DECL_ALIGN (decl, align);
1f9ceff1 373 }
1f6d3a08
RH
374 return align / BITS_PER_UNIT;
375}
376
435be747
MO
377/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
378 down otherwise. Return truncated BASE value. */
379
380static inline unsigned HOST_WIDE_INT
381align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
382{
383 return align_up ? (base + align - 1) & -align : base & -align;
384}
385
1f6d3a08
RH
386/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
387 Return the frame offset. */
388
389static HOST_WIDE_INT
3a42502d 390alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
391{
392 HOST_WIDE_INT offset, new_frame_offset;
393
1f6d3a08
RH
394 if (FRAME_GROWS_DOWNWARD)
395 {
435be747
MO
396 new_frame_offset
397 = align_base (frame_offset - frame_phase - size,
398 align, false) + frame_phase;
1f6d3a08
RH
399 offset = new_frame_offset;
400 }
401 else
402 {
435be747
MO
403 new_frame_offset
404 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
1f6d3a08
RH
405 offset = new_frame_offset;
406 new_frame_offset += size;
407 }
408 frame_offset = new_frame_offset;
409
9fb798d7
EB
410 if (frame_offset_overflow (frame_offset, cfun->decl))
411 frame_offset = offset = 0;
412
1f6d3a08
RH
413 return offset;
414}
415
416/* Accumulate DECL into STACK_VARS. */
417
418static void
419add_stack_var (tree decl)
420{
533f611a
RH
421 struct stack_var *v;
422
1f6d3a08
RH
423 if (stack_vars_num >= stack_vars_alloc)
424 {
425 if (stack_vars_alloc)
426 stack_vars_alloc = stack_vars_alloc * 3 / 2;
427 else
428 stack_vars_alloc = 32;
429 stack_vars
430 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
431 }
47598145 432 if (!decl_to_stack_part)
39c8aaa4 433 decl_to_stack_part = new hash_map<tree, size_t>;
47598145 434
533f611a 435 v = &stack_vars[stack_vars_num];
39c8aaa4 436 decl_to_stack_part->put (decl, stack_vars_num);
533f611a
RH
437
438 v->decl = decl;
1f9ceff1
AO
439 tree size = TREE_CODE (decl) == SSA_NAME
440 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
441 : DECL_SIZE_UNIT (decl);
442 v->size = tree_to_uhwi (size);
533f611a
RH
443 /* Ensure that all variables have size, so that &a != &b for any two
444 variables that are simultaneously live. */
445 if (v->size == 0)
446 v->size = 1;
1f9ceff1 447 v->alignb = align_local_variable (decl);
13868f40
EB
448 /* An alignment of zero can mightily confuse us later. */
449 gcc_assert (v->alignb != 0);
1f6d3a08
RH
450
451 /* All variables are initially in their own partition. */
533f611a
RH
452 v->representative = stack_vars_num;
453 v->next = EOC;
1f6d3a08 454
2bdbbe94 455 /* All variables initially conflict with no other. */
533f611a 456 v->conflicts = NULL;
2bdbbe94 457
1f6d3a08 458 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 459 set_rtl (decl, pc_rtx);
1f6d3a08
RH
460
461 stack_vars_num++;
462}
463
1f6d3a08
RH
464/* Make the decls associated with luid's X and Y conflict. */
465
466static void
467add_stack_var_conflict (size_t x, size_t y)
468{
2bdbbe94
MM
469 struct stack_var *a = &stack_vars[x];
470 struct stack_var *b = &stack_vars[y];
471 if (!a->conflicts)
3f9b14ff 472 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 473 if (!b->conflicts)
3f9b14ff 474 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
475 bitmap_set_bit (a->conflicts, y);
476 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
477}
478
479/* Check whether the decls associated with luid's X and Y conflict. */
480
481static bool
482stack_var_conflict_p (size_t x, size_t y)
483{
2bdbbe94
MM
484 struct stack_var *a = &stack_vars[x];
485 struct stack_var *b = &stack_vars[y];
47598145
MM
486 if (x == y)
487 return false;
488 /* Partitions containing an SSA name result from gimple registers
489 with things like unsupported modes. They are top-level and
490 hence conflict with everything else. */
491 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
492 return true;
493
2bdbbe94
MM
494 if (!a->conflicts || !b->conflicts)
495 return false;
496 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 497}
b8698a0f 498
47598145
MM
499/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
500 enter its partition number into bitmap DATA. */
501
502static bool
355fe088 503visit_op (gimple *, tree op, tree, void *data)
47598145
MM
504{
505 bitmap active = (bitmap)data;
506 op = get_base_address (op);
507 if (op
508 && DECL_P (op)
509 && DECL_RTL_IF_SET (op) == pc_rtx)
510 {
39c8aaa4 511 size_t *v = decl_to_stack_part->get (op);
47598145
MM
512 if (v)
513 bitmap_set_bit (active, *v);
514 }
515 return false;
516}
517
518/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
519 record conflicts between it and all currently active other partitions
520 from bitmap DATA. */
521
522static bool
355fe088 523visit_conflict (gimple *, tree op, tree, void *data)
47598145
MM
524{
525 bitmap active = (bitmap)data;
526 op = get_base_address (op);
527 if (op
528 && DECL_P (op)
529 && DECL_RTL_IF_SET (op) == pc_rtx)
530 {
39c8aaa4 531 size_t *v = decl_to_stack_part->get (op);
47598145
MM
532 if (v && bitmap_set_bit (active, *v))
533 {
534 size_t num = *v;
535 bitmap_iterator bi;
536 unsigned i;
537 gcc_assert (num < stack_vars_num);
538 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
539 add_stack_var_conflict (num, i);
540 }
541 }
542 return false;
543}
544
545/* Helper routine for add_scope_conflicts, calculating the active partitions
546 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
547 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
548 liveness. */
47598145
MM
549
550static void
81bfd197 551add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
552{
553 edge e;
554 edge_iterator ei;
555 gimple_stmt_iterator gsi;
9f1363cd 556 walk_stmt_load_store_addr_fn visit;
47598145
MM
557
558 bitmap_clear (work);
559 FOR_EACH_EDGE (e, ei, bb->preds)
560 bitmap_ior_into (work, (bitmap)e->src->aux);
561
ea85edfe 562 visit = visit_op;
47598145
MM
563
564 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
565 {
355fe088 566 gimple *stmt = gsi_stmt (gsi);
ea85edfe 567 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 568 }
ea85edfe 569 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145 570 {
355fe088 571 gimple *stmt = gsi_stmt (gsi);
47598145
MM
572
573 if (gimple_clobber_p (stmt))
574 {
575 tree lhs = gimple_assign_lhs (stmt);
576 size_t *v;
577 /* Nested function lowering might introduce LHSs
578 that are COMPONENT_REFs. */
8813a647 579 if (!VAR_P (lhs))
47598145
MM
580 continue;
581 if (DECL_RTL_IF_SET (lhs) == pc_rtx
39c8aaa4 582 && (v = decl_to_stack_part->get (lhs)))
47598145
MM
583 bitmap_clear_bit (work, *v);
584 }
585 else if (!is_gimple_debug (stmt))
ea85edfe 586 {
81bfd197 587 if (for_conflict
ea85edfe
JJ
588 && visit == visit_op)
589 {
590 /* If this is the first real instruction in this BB we need
88d599dc
MM
591 to add conflicts for everything live at this point now.
592 Unlike classical liveness for named objects we can't
ea85edfe
JJ
593 rely on seeing a def/use of the names we're interested in.
594 There might merely be indirect loads/stores. We'd not add any
81bfd197 595 conflicts for such partitions. */
ea85edfe
JJ
596 bitmap_iterator bi;
597 unsigned i;
81bfd197 598 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 599 {
9b44f5d9
MM
600 struct stack_var *a = &stack_vars[i];
601 if (!a->conflicts)
3f9b14ff 602 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 603 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
604 }
605 visit = visit_conflict;
606 }
607 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
608 }
47598145
MM
609 }
610}
611
612/* Generate stack partition conflicts between all partitions that are
613 simultaneously live. */
614
615static void
616add_scope_conflicts (void)
617{
618 basic_block bb;
619 bool changed;
620 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
621 int *rpo;
622 int n_bbs;
47598145 623
88d599dc 624 /* We approximate the live range of a stack variable by taking the first
47598145
MM
625 mention of its name as starting point(s), and by the end-of-scope
626 death clobber added by gimplify as ending point(s) of the range.
627 This overapproximates in the case we for instance moved an address-taken
628 operation upward, without also moving a dereference to it upwards.
629 But it's conservatively correct as a variable never can hold values
630 before its name is mentioned at least once.
631
88d599dc 632 We then do a mostly classical bitmap liveness algorithm. */
47598145 633
04a90bec 634 FOR_ALL_BB_FN (bb, cfun)
3f9b14ff 635 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 636
8b1c6fd7 637 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
9b44f5d9
MM
638 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
639
47598145
MM
640 changed = true;
641 while (changed)
642 {
9b44f5d9 643 int i;
47598145 644 changed = false;
9b44f5d9 645 for (i = 0; i < n_bbs; i++)
47598145 646 {
9b44f5d9 647 bitmap active;
06e28de2 648 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
9b44f5d9 649 active = (bitmap)bb->aux;
81bfd197 650 add_scope_conflicts_1 (bb, work, false);
47598145
MM
651 if (bitmap_ior_into (active, work))
652 changed = true;
653 }
654 }
655
11cd3bed 656 FOR_EACH_BB_FN (bb, cfun)
81bfd197 657 add_scope_conflicts_1 (bb, work, true);
47598145 658
9b44f5d9 659 free (rpo);
47598145 660 BITMAP_FREE (work);
04a90bec 661 FOR_ALL_BB_FN (bb, cfun)
47598145
MM
662 BITMAP_FREE (bb->aux);
663}
664
1f6d3a08 665/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 666 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
667
668static int
3a42502d 669stack_var_cmp (const void *a, const void *b)
1f6d3a08 670{
3a42502d
RH
671 size_t ia = *(const size_t *)a;
672 size_t ib = *(const size_t *)b;
673 unsigned int aligna = stack_vars[ia].alignb;
674 unsigned int alignb = stack_vars[ib].alignb;
675 HOST_WIDE_INT sizea = stack_vars[ia].size;
676 HOST_WIDE_INT sizeb = stack_vars[ib].size;
677 tree decla = stack_vars[ia].decl;
678 tree declb = stack_vars[ib].decl;
679 bool largea, largeb;
4e3825db 680 unsigned int uida, uidb;
1f6d3a08 681
3a42502d
RH
682 /* Primary compare on "large" alignment. Large comes first. */
683 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
684 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
685 if (largea != largeb)
686 return (int)largeb - (int)largea;
687
688 /* Secondary compare on size, decreasing */
3a42502d 689 if (sizea > sizeb)
6ddfda8a
ER
690 return -1;
691 if (sizea < sizeb)
1f6d3a08 692 return 1;
3a42502d
RH
693
694 /* Tertiary compare on true alignment, decreasing. */
695 if (aligna < alignb)
696 return -1;
697 if (aligna > alignb)
698 return 1;
699
700 /* Final compare on ID for sort stability, increasing.
701 Two SSA names are compared by their version, SSA names come before
702 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
703 if (TREE_CODE (decla) == SSA_NAME)
704 {
705 if (TREE_CODE (declb) == SSA_NAME)
706 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
707 else
708 return -1;
709 }
710 else if (TREE_CODE (declb) == SSA_NAME)
711 return 1;
712 else
713 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 714 if (uida < uidb)
79f802f5 715 return 1;
3a42502d
RH
716 if (uida > uidb)
717 return -1;
1f6d3a08
RH
718 return 0;
719}
720
0ef08bc5 721struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
39c8aaa4 722typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
55b34b5f
RG
723
724/* If the points-to solution *PI points to variables that are in a partition
725 together with other variables add all partition members to the pointed-to
726 variables bitmap. */
727
728static void
729add_partitioned_vars_to_ptset (struct pt_solution *pt,
39c8aaa4 730 part_hashmap *decls_to_partitions,
6e2830c3 731 hash_set<bitmap> *visited, bitmap temp)
55b34b5f
RG
732{
733 bitmap_iterator bi;
734 unsigned i;
735 bitmap *part;
736
737 if (pt->anything
738 || pt->vars == NULL
739 /* The pointed-to vars bitmap is shared, it is enough to
740 visit it once. */
6e2830c3 741 || visited->add (pt->vars))
55b34b5f
RG
742 return;
743
744 bitmap_clear (temp);
745
746 /* By using a temporary bitmap to store all members of the partitions
747 we have to add we make sure to visit each of the partitions only
748 once. */
749 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
750 if ((!temp
751 || !bitmap_bit_p (temp, i))
39c8aaa4 752 && (part = decls_to_partitions->get (i)))
55b34b5f
RG
753 bitmap_ior_into (temp, *part);
754 if (!bitmap_empty_p (temp))
755 bitmap_ior_into (pt->vars, temp);
756}
757
758/* Update points-to sets based on partition info, so we can use them on RTL.
759 The bitmaps representing stack partitions will be saved until expand,
760 where partitioned decls used as bases in memory expressions will be
761 rewritten. */
762
763static void
764update_alias_info_with_stack_vars (void)
765{
39c8aaa4 766 part_hashmap *decls_to_partitions = NULL;
55b34b5f
RG
767 size_t i, j;
768 tree var = NULL_TREE;
769
770 for (i = 0; i < stack_vars_num; i++)
771 {
772 bitmap part = NULL;
773 tree name;
774 struct ptr_info_def *pi;
775
776 /* Not interested in partitions with single variable. */
777 if (stack_vars[i].representative != i
778 || stack_vars[i].next == EOC)
779 continue;
780
781 if (!decls_to_partitions)
782 {
39c8aaa4
TS
783 decls_to_partitions = new part_hashmap;
784 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
55b34b5f
RG
785 }
786
787 /* Create an SSA_NAME that points to the partition for use
788 as base during alias-oracle queries on RTL for bases that
789 have been partitioned. */
790 if (var == NULL_TREE)
b731b390
JJ
791 var = create_tmp_var (ptr_type_node);
792 name = make_ssa_name (var);
55b34b5f
RG
793
794 /* Create bitmaps representing partitions. They will be used for
795 points-to sets later, so use GGC alloc. */
796 part = BITMAP_GGC_ALLOC ();
797 for (j = i; j != EOC; j = stack_vars[j].next)
798 {
799 tree decl = stack_vars[j].decl;
25a6a873 800 unsigned int uid = DECL_PT_UID (decl);
55b34b5f 801 bitmap_set_bit (part, uid);
39c8aaa4
TS
802 decls_to_partitions->put (uid, part);
803 cfun->gimple_df->decls_to_pointers->put (decl, name);
88d8330d
EB
804 if (TREE_ADDRESSABLE (decl))
805 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
806 }
807
808 /* Make the SSA name point to all partition members. */
809 pi = get_ptr_info (name);
d3553615 810 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
811 }
812
813 /* Make all points-to sets that contain one member of a partition
814 contain all members of the partition. */
815 if (decls_to_partitions)
816 {
817 unsigned i;
46aa019a 818 tree name;
6e2830c3 819 hash_set<bitmap> visited;
3f9b14ff 820 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f 821
46aa019a 822 FOR_EACH_SSA_NAME (i, name, cfun)
55b34b5f 823 {
55b34b5f
RG
824 struct ptr_info_def *pi;
825
46aa019a 826 if (POINTER_TYPE_P (TREE_TYPE (name))
55b34b5f
RG
827 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
828 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
6e2830c3 829 &visited, temp);
55b34b5f
RG
830 }
831
832 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
6e2830c3 833 decls_to_partitions, &visited, temp);
55b34b5f 834
39c8aaa4 835 delete decls_to_partitions;
55b34b5f
RG
836 BITMAP_FREE (temp);
837 }
838}
839
1f6d3a08
RH
840/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
841 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 842 Merge them into a single partition A. */
1f6d3a08
RH
843
844static void
6ddfda8a 845union_stack_vars (size_t a, size_t b)
1f6d3a08 846{
2bdbbe94
MM
847 struct stack_var *vb = &stack_vars[b];
848 bitmap_iterator bi;
849 unsigned u;
1f6d3a08 850
6ddfda8a
ER
851 gcc_assert (stack_vars[b].next == EOC);
852 /* Add B to A's partition. */
853 stack_vars[b].next = stack_vars[a].next;
854 stack_vars[b].representative = a;
1f6d3a08
RH
855 stack_vars[a].next = b;
856
857 /* Update the required alignment of partition A to account for B. */
858 if (stack_vars[a].alignb < stack_vars[b].alignb)
859 stack_vars[a].alignb = stack_vars[b].alignb;
860
861 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
862 if (vb->conflicts)
863 {
864 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
865 add_stack_var_conflict (a, stack_vars[u].representative);
866 BITMAP_FREE (vb->conflicts);
867 }
1f6d3a08
RH
868}
869
c461d263
JJ
870/* Return true if the current function should have its stack frame
871 protected by address sanitizer. */
872
873static inline bool
874asan_sanitize_stack_p (void)
875{
876 return ((flag_sanitize & SANITIZE_ADDRESS)
877 && ASAN_STACK
878 && !lookup_attribute ("no_sanitize_address",
879 DECL_ATTRIBUTES (current_function_decl)));
880}
881
1f6d3a08
RH
882/* A subroutine of expand_used_vars. Binpack the variables into
883 partitions constrained by the interference graph. The overall
884 algorithm used is as follows:
885
6ddfda8a 886 Sort the objects by size in descending order.
1f6d3a08
RH
887 For each object A {
888 S = size(A)
889 O = 0
890 loop {
891 Look for the largest non-conflicting object B with size <= S.
892 UNION (A, B)
1f6d3a08
RH
893 }
894 }
895*/
896
897static void
898partition_stack_vars (void)
899{
900 size_t si, sj, n = stack_vars_num;
901
902 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
903 for (si = 0; si < n; ++si)
904 stack_vars_sorted[si] = si;
905
906 if (n == 1)
907 return;
908
3a42502d 909 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 910
1f6d3a08
RH
911 for (si = 0; si < n; ++si)
912 {
913 size_t i = stack_vars_sorted[si];
3a42502d 914 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 915 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 916
6ddfda8a
ER
917 /* Ignore objects that aren't partition representatives. If we
918 see a var that is not a partition representative, it must
919 have been merged earlier. */
920 if (stack_vars[i].representative != i)
921 continue;
922
923 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
924 {
925 size_t j = stack_vars_sorted[sj];
1f6d3a08 926 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 927 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
928
929 /* Ignore objects that aren't partition representatives. */
930 if (stack_vars[j].representative != j)
931 continue;
932
3a42502d
RH
933 /* Do not mix objects of "small" (supported) alignment
934 and "large" (unsupported) alignment. */
935 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
936 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
937 break;
938
939 /* For Address Sanitizer do not mix objects with different
940 sizes, as the shorter vars wouldn't be adequately protected.
941 Don't do that for "large" (unsupported) alignment objects,
942 those aren't protected anyway. */
c461d263 943 if (asan_sanitize_stack_p () && isize != jsize
f3ddd692
JJ
944 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
945 break;
946
947 /* Ignore conflicting objects. */
948 if (stack_var_conflict_p (i, j))
3a42502d
RH
949 continue;
950
1f6d3a08 951 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 952 union_stack_vars (i, j);
1f6d3a08
RH
953 }
954 }
55b34b5f 955
9b999dc5 956 update_alias_info_with_stack_vars ();
1f6d3a08
RH
957}
958
959/* A debugging aid for expand_used_vars. Dump the generated partitions. */
960
961static void
962dump_stack_var_partition (void)
963{
964 size_t si, i, j, n = stack_vars_num;
965
966 for (si = 0; si < n; ++si)
967 {
968 i = stack_vars_sorted[si];
969
970 /* Skip variables that aren't partition representatives, for now. */
971 if (stack_vars[i].representative != i)
972 continue;
973
974 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
975 " align %u\n", (unsigned long) i, stack_vars[i].size,
976 stack_vars[i].alignb);
977
978 for (j = i; j != EOC; j = stack_vars[j].next)
979 {
980 fputc ('\t', dump_file);
981 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 982 }
6ddfda8a 983 fputc ('\n', dump_file);
1f6d3a08
RH
984 }
985}
986
3a42502d 987/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
988
989static void
3a42502d
RH
990expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
991 HOST_WIDE_INT offset)
1f6d3a08 992{
3a42502d 993 unsigned align;
1f6d3a08 994 rtx x;
c22cacf3 995
1f6d3a08
RH
996 /* If this fails, we've overflowed the stack frame. Error nicely? */
997 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
998
0a81f074 999 x = plus_constant (Pmode, base, offset);
1f9ceff1
AO
1000 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1001 ? TYPE_MODE (TREE_TYPE (decl))
1002 : DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 1003
4e3825db
MM
1004 if (TREE_CODE (decl) != SSA_NAME)
1005 {
1006 /* Set alignment we actually gave this decl if it isn't an SSA name.
1007 If it is we generate stack slots only accidentally so it isn't as
1008 important, we'll simply use the alignment that is already set. */
3a42502d
RH
1009 if (base == virtual_stack_vars_rtx)
1010 offset -= frame_phase;
146ec50f 1011 align = least_bit_hwi (offset);
4e3825db 1012 align *= BITS_PER_UNIT;
3a42502d
RH
1013 if (align == 0 || align > base_align)
1014 align = base_align;
1015
1016 /* One would think that we could assert that we're not decreasing
1017 alignment here, but (at least) the i386 port does exactly this
1018 via the MINIMUM_ALIGNMENT hook. */
4e3825db 1019
fe37c7af 1020 SET_DECL_ALIGN (decl, align);
4e3825db
MM
1021 DECL_USER_ALIGN (decl) = 0;
1022 }
1023
4e3825db 1024 set_rtl (decl, x);
1f6d3a08
RH
1025}
1026
f3ddd692
JJ
1027struct stack_vars_data
1028{
1029 /* Vector of offset pairs, always end of some padding followed
1030 by start of the padding that needs Address Sanitizer protection.
1031 The vector is in reversed, highest offset pairs come first. */
06dc18b3 1032 auto_vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
1033
1034 /* Vector of partition representative decls in between the paddings. */
06dc18b3 1035 auto_vec<tree> asan_decl_vec;
e361382f
JJ
1036
1037 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1038 rtx asan_base;
1039
1040 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1041 unsigned int asan_alignb;
f3ddd692
JJ
1042};
1043
1f6d3a08
RH
1044/* A subroutine of expand_used_vars. Give each partition representative
1045 a unique location within the stack frame. Update each partition member
1046 with that location. */
1047
1048static void
f3ddd692 1049expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
1050{
1051 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
1052 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1053 rtx large_base = NULL;
1054 unsigned large_align = 0;
7072df0a 1055 bool large_allocation_done = false;
3a42502d
RH
1056 tree decl;
1057
1058 /* Determine if there are any variables requiring "large" alignment.
1059 Since these are dynamically allocated, we only process these if
1060 no predicate involved. */
1061 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1062 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1063 {
1064 /* Find the total size of these variables. */
1065 for (si = 0; si < n; ++si)
1066 {
1067 unsigned alignb;
1068
1069 i = stack_vars_sorted[si];
1070 alignb = stack_vars[i].alignb;
1071
a8eeec27
SE
1072 /* All "large" alignment decls come before all "small" alignment
1073 decls, but "large" alignment decls are not sorted based on
1074 their alignment. Increase large_align to track the largest
1075 required alignment. */
1076 if ((alignb * BITS_PER_UNIT) > large_align)
1077 large_align = alignb * BITS_PER_UNIT;
1078
3a42502d
RH
1079 /* Stop when we get to the first decl with "small" alignment. */
1080 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1081 break;
1082
1083 /* Skip variables that aren't partition representatives. */
1084 if (stack_vars[i].representative != i)
1085 continue;
1086
1087 /* Skip variables that have already had rtl assigned. See also
1088 add_stack_var where we perpetrate this pc_rtx hack. */
1089 decl = stack_vars[i].decl;
1f9ceff1
AO
1090 if (TREE_CODE (decl) == SSA_NAME
1091 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1092 : DECL_RTL (decl) != pc_rtx)
3a42502d
RH
1093 continue;
1094
1095 large_size += alignb - 1;
1096 large_size &= -(HOST_WIDE_INT)alignb;
1097 large_size += stack_vars[i].size;
1098 }
3a42502d 1099 }
1f6d3a08
RH
1100
1101 for (si = 0; si < n; ++si)
1102 {
3a42502d
RH
1103 rtx base;
1104 unsigned base_align, alignb;
1f6d3a08
RH
1105 HOST_WIDE_INT offset;
1106
1107 i = stack_vars_sorted[si];
1108
1109 /* Skip variables that aren't partition representatives, for now. */
1110 if (stack_vars[i].representative != i)
1111 continue;
1112
7d69de61
RH
1113 /* Skip variables that have already had rtl assigned. See also
1114 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d 1115 decl = stack_vars[i].decl;
1f9ceff1
AO
1116 if (TREE_CODE (decl) == SSA_NAME
1117 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1118 : DECL_RTL (decl) != pc_rtx)
7d69de61
RH
1119 continue;
1120
c22cacf3 1121 /* Check the predicate to see whether this variable should be
7d69de61 1122 allocated in this pass. */
f3ddd692 1123 if (pred && !pred (i))
7d69de61
RH
1124 continue;
1125
3a42502d
RH
1126 alignb = stack_vars[i].alignb;
1127 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1128 {
e361382f 1129 base = virtual_stack_vars_rtx;
c461d263 1130 if (asan_sanitize_stack_p () && pred)
f3ddd692 1131 {
435be747
MO
1132 HOST_WIDE_INT prev_offset
1133 = align_base (frame_offset,
1134 MAX (alignb, ASAN_RED_ZONE_SIZE),
d6c1a7a7 1135 !FRAME_GROWS_DOWNWARD);
f3ddd692 1136 tree repr_decl = NULL_TREE;
f3ddd692
JJ
1137 offset
1138 = alloc_stack_frame_space (stack_vars[i].size
1139 + ASAN_RED_ZONE_SIZE,
1140 MAX (alignb, ASAN_RED_ZONE_SIZE));
435be747 1141
9771b263
DN
1142 data->asan_vec.safe_push (prev_offset);
1143 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
1144 /* Find best representative of the partition.
1145 Prefer those with DECL_NAME, even better
1146 satisfying asan_protect_stack_decl predicate. */
1147 for (j = i; j != EOC; j = stack_vars[j].next)
1148 if (asan_protect_stack_decl (stack_vars[j].decl)
1149 && DECL_NAME (stack_vars[j].decl))
1150 {
1151 repr_decl = stack_vars[j].decl;
1152 break;
1153 }
1154 else if (repr_decl == NULL_TREE
1155 && DECL_P (stack_vars[j].decl)
1156 && DECL_NAME (stack_vars[j].decl))
1157 repr_decl = stack_vars[j].decl;
1158 if (repr_decl == NULL_TREE)
1159 repr_decl = stack_vars[i].decl;
9771b263 1160 data->asan_decl_vec.safe_push (repr_decl);
e361382f
JJ
1161 data->asan_alignb = MAX (data->asan_alignb, alignb);
1162 if (data->asan_base == NULL)
1163 data->asan_base = gen_reg_rtx (Pmode);
1164 base = data->asan_base;
e5dcd695
LZ
1165
1166 if (!STRICT_ALIGNMENT)
1167 base_align = crtl->max_used_stack_slot_alignment;
1168 else
1169 base_align = MAX (crtl->max_used_stack_slot_alignment,
1170 GET_MODE_ALIGNMENT (SImode)
1171 << ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1172 }
1173 else
e5dcd695
LZ
1174 {
1175 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1176 base_align = crtl->max_used_stack_slot_alignment;
1177 }
3a42502d
RH
1178 }
1179 else
1180 {
1181 /* Large alignment is only processed in the last pass. */
1182 if (pred)
1183 continue;
7072df0a
DV
1184
1185 /* If there were any variables requiring "large" alignment, allocate
1186 space. */
1187 if (large_size > 0 && ! large_allocation_done)
1188 {
1189 HOST_WIDE_INT loffset;
1190 rtx large_allocsize;
1191
1192 large_allocsize = GEN_INT (large_size);
1193 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1194 loffset = alloc_stack_frame_space
1195 (INTVAL (large_allocsize),
1196 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1197 large_base = get_dynamic_stack_base (loffset, large_align);
1198 large_allocation_done = true;
1199 }
533f611a 1200 gcc_assert (large_base != NULL);
3a42502d
RH
1201
1202 large_alloc += alignb - 1;
1203 large_alloc &= -(HOST_WIDE_INT)alignb;
1204 offset = large_alloc;
1205 large_alloc += stack_vars[i].size;
1206
1207 base = large_base;
1208 base_align = large_align;
1209 }
1f6d3a08
RH
1210
1211 /* Create rtl for each variable based on their location within the
1212 partition. */
1213 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1214 {
f8da8190 1215 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1216 base, base_align,
6ddfda8a 1217 offset);
f8da8190 1218 }
1f6d3a08 1219 }
3a42502d
RH
1220
1221 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1222}
1223
ff28a94d
JH
1224/* Take into account all sizes of partitions and reset DECL_RTLs. */
1225static HOST_WIDE_INT
1226account_stack_vars (void)
1227{
1228 size_t si, j, i, n = stack_vars_num;
1229 HOST_WIDE_INT size = 0;
1230
1231 for (si = 0; si < n; ++si)
1232 {
1233 i = stack_vars_sorted[si];
1234
1235 /* Skip variables that aren't partition representatives, for now. */
1236 if (stack_vars[i].representative != i)
1237 continue;
1238
1239 size += stack_vars[i].size;
1240 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1241 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1242 }
1243 return size;
1244}
1245
f11a7b6d
AO
1246/* Record the RTL assignment X for the default def of PARM. */
1247
1248extern void
1249set_parm_rtl (tree parm, rtx x)
1250{
1251 gcc_assert (TREE_CODE (parm) == PARM_DECL
1252 || TREE_CODE (parm) == RESULT_DECL);
1253
1254 if (x && !MEM_P (x))
1255 {
1256 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1257 TYPE_MODE (TREE_TYPE (parm)),
1258 TYPE_ALIGN (TREE_TYPE (parm)));
1259
1260 /* If the variable alignment is very large we'll dynamicaly
1261 allocate it, which means that in-frame portion is just a
1262 pointer. ??? We've got a pseudo for sure here, do we
1263 actually dynamically allocate its spilling area if needed?
1264 ??? Isn't it a problem when POINTER_SIZE also exceeds
1265 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1266 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1267 align = POINTER_SIZE;
1268
1269 record_alignment_for_reg_var (align);
1270 }
1271
f11a7b6d
AO
1272 tree ssa = ssa_default_def (cfun, parm);
1273 if (!ssa)
1274 return set_rtl (parm, x);
1275
1276 int part = var_to_partition (SA.map, ssa);
1277 gcc_assert (part != NO_PARTITION);
1278
1279 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1280 gcc_assert (changed);
1281
1282 set_rtl (ssa, x);
1283 gcc_assert (DECL_RTL (parm) == x);
1284}
1285
1f6d3a08
RH
1286/* A subroutine of expand_one_var. Called to immediately assign rtl
1287 to a variable to be allocated in the stack frame. */
1288
1289static void
1f9ceff1 1290expand_one_stack_var_1 (tree var)
1f6d3a08 1291{
3a42502d
RH
1292 HOST_WIDE_INT size, offset;
1293 unsigned byte_align;
1f6d3a08 1294
1f9ceff1
AO
1295 if (TREE_CODE (var) == SSA_NAME)
1296 {
1297 tree type = TREE_TYPE (var);
1298 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1299 byte_align = TYPE_ALIGN_UNIT (type);
1300 }
1301 else
1302 {
1303 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1304 byte_align = align_local_variable (var);
1305 }
3a42502d
RH
1306
1307 /* We handle highly aligned variables in expand_stack_vars. */
1308 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1309
3a42502d
RH
1310 offset = alloc_stack_frame_space (size, byte_align);
1311
1312 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1313 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1314}
1315
1f9ceff1
AO
1316/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1317 already assigned some MEM. */
1318
1319static void
1320expand_one_stack_var (tree var)
1321{
1322 if (TREE_CODE (var) == SSA_NAME)
1323 {
1324 int part = var_to_partition (SA.map, var);
1325 if (part != NO_PARTITION)
1326 {
1327 rtx x = SA.partition_to_pseudo[part];
1328 gcc_assert (x);
1329 gcc_assert (MEM_P (x));
1330 return;
1331 }
1332 }
1333
1334 return expand_one_stack_var_1 (var);
1335}
1336
1f6d3a08
RH
1337/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1338 that will reside in a hard register. */
1339
1340static void
1341expand_one_hard_reg_var (tree var)
1342{
1343 rest_of_decl_compilation (var, 0, 0);
1344}
1345
1f9ceff1
AO
1346/* Record the alignment requirements of some variable assigned to a
1347 pseudo. */
1348
1349static void
1350record_alignment_for_reg_var (unsigned int align)
1351{
1352 if (SUPPORTS_STACK_ALIGNMENT
1353 && crtl->stack_alignment_estimated < align)
1354 {
1355 /* stack_alignment_estimated shouldn't change after stack
1356 realign decision made */
1357 gcc_assert (!crtl->stack_realign_processed);
1358 crtl->stack_alignment_estimated = align;
1359 }
1360
1361 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1362 So here we only make sure stack_alignment_needed >= align. */
1363 if (crtl->stack_alignment_needed < align)
1364 crtl->stack_alignment_needed = align;
1365 if (crtl->max_used_stack_slot_alignment < align)
1366 crtl->max_used_stack_slot_alignment = align;
1367}
1368
1369/* Create RTL for an SSA partition. */
1370
1371static void
1372expand_one_ssa_partition (tree var)
1373{
1374 int part = var_to_partition (SA.map, var);
1375 gcc_assert (part != NO_PARTITION);
1376
1377 if (SA.partition_to_pseudo[part])
1378 return;
1379
1380 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1381 TYPE_MODE (TREE_TYPE (var)),
1382 TYPE_ALIGN (TREE_TYPE (var)));
1383
1384 /* If the variable alignment is very large we'll dynamicaly allocate
1385 it, which means that in-frame portion is just a pointer. */
1386 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1387 align = POINTER_SIZE;
1388
1389 record_alignment_for_reg_var (align);
1390
1391 if (!use_register_for_decl (var))
1392 {
f11a7b6d 1393 if (defer_stack_allocation (var, true))
1f9ceff1
AO
1394 add_stack_var (var);
1395 else
1396 expand_one_stack_var_1 (var);
1397 return;
1398 }
1399
1400 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1401
1402 rtx x = gen_reg_rtx (reg_mode);
1403
1404 set_rtl (var, x);
1405}
1406
f11a7b6d
AO
1407/* Record the association between the RTL generated for partition PART
1408 and the underlying variable of the SSA_NAME VAR. */
1f9ceff1
AO
1409
1410static void
1411adjust_one_expanded_partition_var (tree var)
1412{
1413 if (!var)
1414 return;
1415
1416 tree decl = SSA_NAME_VAR (var);
1417
1418 int part = var_to_partition (SA.map, var);
1419 if (part == NO_PARTITION)
1420 return;
1421
1422 rtx x = SA.partition_to_pseudo[part];
1423
f11a7b6d 1424 gcc_assert (x);
1f9ceff1
AO
1425
1426 set_rtl (var, x);
1427
1428 if (!REG_P (x))
1429 return;
1430
1431 /* Note if the object is a user variable. */
1432 if (decl && !DECL_ARTIFICIAL (decl))
1433 mark_user_reg (x);
1434
1435 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1436 mark_reg_pointer (x, get_pointer_alignment (var));
1437}
1438
1f6d3a08
RH
1439/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1440 that will reside in a pseudo register. */
1441
1442static void
1443expand_one_register_var (tree var)
1444{
1f9ceff1
AO
1445 if (TREE_CODE (var) == SSA_NAME)
1446 {
1447 int part = var_to_partition (SA.map, var);
1448 if (part != NO_PARTITION)
1449 {
1450 rtx x = SA.partition_to_pseudo[part];
1451 gcc_assert (x);
1452 gcc_assert (REG_P (x));
1453 return;
1454 }
1455 gcc_unreachable ();
1456 }
1457
1458 tree decl = var;
4e3825db 1459 tree type = TREE_TYPE (decl);
ef4bddc2 1460 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1461 rtx x = gen_reg_rtx (reg_mode);
1462
4e3825db 1463 set_rtl (var, x);
1f6d3a08
RH
1464
1465 /* Note if the object is a user variable. */
4e3825db
MM
1466 if (!DECL_ARTIFICIAL (decl))
1467 mark_user_reg (x);
1f6d3a08 1468
61021c2c 1469 if (POINTER_TYPE_P (type))
d466b407 1470 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1471}
1472
1473/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1474 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1475 to pick something that won't crash the rest of the compiler. */
1476
1477static void
1478expand_one_error_var (tree var)
1479{
ef4bddc2 1480 machine_mode mode = DECL_MODE (var);
1f6d3a08
RH
1481 rtx x;
1482
1483 if (mode == BLKmode)
1484 x = gen_rtx_MEM (BLKmode, const0_rtx);
1485 else if (mode == VOIDmode)
1486 x = const0_rtx;
1487 else
1488 x = gen_reg_rtx (mode);
1489
1490 SET_DECL_RTL (var, x);
1491}
1492
c22cacf3 1493/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1494 allocated to the local stack frame. Return true if we wish to
1495 add VAR to STACK_VARS so that it will be coalesced with other
1496 variables. Return false to allocate VAR immediately.
1497
1498 This function is used to reduce the number of variables considered
1499 for coalescing, which reduces the size of the quadratic problem. */
1500
1501static bool
1502defer_stack_allocation (tree var, bool toplevel)
1503{
1f9ceff1
AO
1504 tree size_unit = TREE_CODE (var) == SSA_NAME
1505 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1506 : DECL_SIZE_UNIT (var);
1507
ee2e8462
EB
1508 /* Whether the variable is small enough for immediate allocation not to be
1509 a problem with regard to the frame size. */
1510 bool smallish
1f9ceff1 1511 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
ee2e8462
EB
1512 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1513
7d69de61 1514 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1515 so that we can re-order the strings to the top of the frame.
1516 Similarly for Address Sanitizer. */
c461d263 1517 if (flag_stack_protect || asan_sanitize_stack_p ())
7d69de61
RH
1518 return true;
1519
1f9ceff1
AO
1520 unsigned int align = TREE_CODE (var) == SSA_NAME
1521 ? TYPE_ALIGN (TREE_TYPE (var))
1522 : DECL_ALIGN (var);
1523
3a42502d
RH
1524 /* We handle "large" alignment via dynamic allocation. We want to handle
1525 this extra complication in only one place, so defer them. */
1f9ceff1 1526 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
3a42502d
RH
1527 return true;
1528
1f9ceff1
AO
1529 bool ignored = TREE_CODE (var) == SSA_NAME
1530 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1531 : DECL_IGNORED_P (var);
1532
ee2e8462
EB
1533 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1534 might be detached from their block and appear at toplevel when we reach
1535 here. We want to coalesce them with variables from other blocks when
1536 the immediate contribution to the frame size would be noticeable. */
1f9ceff1 1537 if (toplevel && optimize > 0 && ignored && !smallish)
ee2e8462
EB
1538 return true;
1539
1540 /* Variables declared in the outermost scope automatically conflict
1541 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1542 at all is that, after sorting, we can more efficiently pack
1543 small variables in the stack frame. Continue to defer at -O2. */
1544 if (toplevel && optimize < 2)
1545 return false;
1546
1547 /* Without optimization, *most* variables are allocated from the
1548 stack, which makes the quadratic problem large exactly when we
c22cacf3 1549 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1550 other hand, we don't want the function's stack frame size to
1551 get completely out of hand. So we avoid adding scalars and
1552 "small" aggregates to the list at all. */
ee2e8462 1553 if (optimize == 0 && smallish)
1f6d3a08
RH
1554 return false;
1555
1556 return true;
1557}
1558
1559/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1560 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1561 expanded yet, merely recorded.
ff28a94d
JH
1562 When REALLY_EXPAND is false, only add stack values to be allocated.
1563 Return stack usage this variable is supposed to take.
1564*/
1f6d3a08 1565
ff28a94d
JH
1566static HOST_WIDE_INT
1567expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1568{
3a42502d 1569 unsigned int align = BITS_PER_UNIT;
4e3825db 1570 tree origvar = var;
3a42502d 1571
4e3825db
MM
1572 var = SSAVAR (var);
1573
8813a647 1574 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
2e3f842f 1575 {
9d7d6446
JB
1576 if (is_global_var (var))
1577 return 0;
1578
2e3f842f
L
1579 /* Because we don't know if VAR will be in register or on stack,
1580 we conservatively assume it will be on stack even if VAR is
1581 eventually put into register after RA pass. For non-automatic
1582 variables, which won't be on stack, we collect alignment of
3396aba5
JJ
1583 type and ignore user specified alignment. Similarly for
1584 SSA_NAMEs for which use_register_for_decl returns true. */
1585 if (TREE_STATIC (var)
1586 || DECL_EXTERNAL (var)
1587 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
ae58e548
JJ
1588 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1589 TYPE_MODE (TREE_TYPE (var)),
1590 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1591 else if (DECL_HAS_VALUE_EXPR_P (var)
1592 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1593 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1594 or variables which were assigned a stack slot already by
1595 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1596 changed from the offset chosen to it. */
1597 align = crtl->stack_alignment_estimated;
2e3f842f 1598 else
ae58e548 1599 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1600
3a42502d
RH
1601 /* If the variable alignment is very large we'll dynamicaly allocate
1602 it, which means that in-frame portion is just a pointer. */
1603 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1604 align = POINTER_SIZE;
1605 }
1606
1f9ceff1 1607 record_alignment_for_reg_var (align);
3a42502d 1608
4e3825db
MM
1609 if (TREE_CODE (origvar) == SSA_NAME)
1610 {
8813a647 1611 gcc_assert (!VAR_P (var)
4e3825db
MM
1612 || (!DECL_EXTERNAL (var)
1613 && !DECL_HAS_VALUE_EXPR_P (var)
1614 && !TREE_STATIC (var)
4e3825db
MM
1615 && TREE_TYPE (var) != error_mark_node
1616 && !DECL_HARD_REGISTER (var)
1617 && really_expand));
1618 }
8813a647 1619 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
4846b435 1620 ;
1f6d3a08
RH
1621 else if (DECL_EXTERNAL (var))
1622 ;
833b3afe 1623 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1624 ;
1625 else if (TREE_STATIC (var))
7e8b322a 1626 ;
eb7adebc 1627 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1628 ;
1629 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1630 {
1631 if (really_expand)
1632 expand_one_error_var (var);
1633 }
8813a647 1634 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
ff28a94d
JH
1635 {
1636 if (really_expand)
c218f6e8
JM
1637 {
1638 expand_one_hard_reg_var (var);
1639 if (!DECL_HARD_REGISTER (var))
1640 /* Invalid register specification. */
1641 expand_one_error_var (var);
1642 }
ff28a94d 1643 }
1f6d3a08 1644 else if (use_register_for_decl (var))
ff28a94d
JH
1645 {
1646 if (really_expand)
4e3825db 1647 expand_one_register_var (origvar);
ff28a94d 1648 }
56099f00 1649 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1650 {
56099f00 1651 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1652 if (really_expand)
1653 {
1654 error ("size of variable %q+D is too large", var);
1655 expand_one_error_var (var);
1656 }
1657 }
1f6d3a08 1658 else if (defer_stack_allocation (var, toplevel))
4e3825db 1659 add_stack_var (origvar);
1f6d3a08 1660 else
ff28a94d 1661 {
bd9f1b4b 1662 if (really_expand)
de0fb905
AB
1663 {
1664 if (lookup_attribute ("naked",
1665 DECL_ATTRIBUTES (current_function_decl)))
1666 error ("cannot allocate stack for variable %q+D, naked function.",
1667 var);
1668
1669 expand_one_stack_var (origvar);
1670 }
1671
1672
ae7e9ddd 1673 return tree_to_uhwi (DECL_SIZE_UNIT (var));
ff28a94d
JH
1674 }
1675 return 0;
1f6d3a08
RH
1676}
1677
1678/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1679 expanding variables. Those variables that can be put into registers
1680 are allocated pseudos; those that can't are put on the stack.
1681
1682 TOPLEVEL is true if this is the outermost BLOCK. */
1683
1684static void
1685expand_used_vars_for_block (tree block, bool toplevel)
1686{
1f6d3a08
RH
1687 tree t;
1688
1f6d3a08 1689 /* Expand all variables at this level. */
910ad8de 1690 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185 1691 if (TREE_USED (t)
8813a647 1692 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1693 || !DECL_NONSHAREABLE (t)))
ff28a94d 1694 expand_one_var (t, toplevel, true);
1f6d3a08 1695
1f6d3a08
RH
1696 /* Expand all variables at containing levels. */
1697 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1698 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1699}
1700
1701/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1702 and clear TREE_USED on all local variables. */
1703
1704static void
1705clear_tree_used (tree block)
1706{
1707 tree t;
1708
910ad8de 1709 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1710 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
8813a647 1711 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1ace6185 1712 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1713 TREE_USED (t) = 0;
1714
1715 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1716 clear_tree_used (t);
1717}
1718
f6bc1c4a
HS
1719enum {
1720 SPCT_FLAG_DEFAULT = 1,
1721 SPCT_FLAG_ALL = 2,
5434dc07
MD
1722 SPCT_FLAG_STRONG = 3,
1723 SPCT_FLAG_EXPLICIT = 4
f6bc1c4a
HS
1724};
1725
7d69de61
RH
1726/* Examine TYPE and determine a bit mask of the following features. */
1727
1728#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1729#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1730#define SPCT_HAS_ARRAY 4
1731#define SPCT_HAS_AGGREGATE 8
1732
1733static unsigned int
1734stack_protect_classify_type (tree type)
1735{
1736 unsigned int ret = 0;
1737 tree t;
1738
1739 switch (TREE_CODE (type))
1740 {
1741 case ARRAY_TYPE:
1742 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1743 if (t == char_type_node
1744 || t == signed_char_type_node
1745 || t == unsigned_char_type_node)
1746 {
15362b89
JJ
1747 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1748 unsigned HOST_WIDE_INT len;
7d69de61 1749
15362b89 1750 if (!TYPE_SIZE_UNIT (type)
cc269bb6 1751 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
15362b89 1752 len = max;
7d69de61 1753 else
ae7e9ddd 1754 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7d69de61
RH
1755
1756 if (len < max)
1757 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1758 else
1759 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1760 }
1761 else
1762 ret = SPCT_HAS_ARRAY;
1763 break;
1764
1765 case UNION_TYPE:
1766 case QUAL_UNION_TYPE:
1767 case RECORD_TYPE:
1768 ret = SPCT_HAS_AGGREGATE;
1769 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1770 if (TREE_CODE (t) == FIELD_DECL)
1771 ret |= stack_protect_classify_type (TREE_TYPE (t));
1772 break;
1773
1774 default:
1775 break;
1776 }
1777
1778 return ret;
1779}
1780
a4d05547
KH
1781/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1782 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1783 any variable in this function. The return value is the phase number in
1784 which the variable should be allocated. */
1785
1786static int
1787stack_protect_decl_phase (tree decl)
1788{
1789 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1790 int ret = 0;
1791
1792 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1793 has_short_buffer = true;
1794
f6bc1c4a 1795 if (flag_stack_protect == SPCT_FLAG_ALL
5434dc07
MD
1796 || flag_stack_protect == SPCT_FLAG_STRONG
1797 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1798 && lookup_attribute ("stack_protect",
1799 DECL_ATTRIBUTES (current_function_decl))))
7d69de61
RH
1800 {
1801 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1802 && !(bits & SPCT_HAS_AGGREGATE))
1803 ret = 1;
1804 else if (bits & SPCT_HAS_ARRAY)
1805 ret = 2;
1806 }
1807 else
1808 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1809
1810 if (ret)
1811 has_protected_decls = true;
1812
1813 return ret;
1814}
1815
1816/* Two helper routines that check for phase 1 and phase 2. These are used
1817 as callbacks for expand_stack_vars. */
1818
1819static bool
f3ddd692
JJ
1820stack_protect_decl_phase_1 (size_t i)
1821{
1822 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1823}
1824
1825static bool
1826stack_protect_decl_phase_2 (size_t i)
7d69de61 1827{
f3ddd692 1828 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1829}
1830
f3ddd692
JJ
1831/* And helper function that checks for asan phase (with stack protector
1832 it is phase 3). This is used as callback for expand_stack_vars.
1833 Returns true if any of the vars in the partition need to be protected. */
1834
7d69de61 1835static bool
f3ddd692 1836asan_decl_phase_3 (size_t i)
7d69de61 1837{
f3ddd692
JJ
1838 while (i != EOC)
1839 {
1840 if (asan_protect_stack_decl (stack_vars[i].decl))
1841 return true;
1842 i = stack_vars[i].next;
1843 }
1844 return false;
7d69de61
RH
1845}
1846
1847/* Ensure that variables in different stack protection phases conflict
1848 so that they are not merged and share the same stack slot. */
1849
1850static void
1851add_stack_protection_conflicts (void)
1852{
1853 size_t i, j, n = stack_vars_num;
1854 unsigned char *phase;
1855
1856 phase = XNEWVEC (unsigned char, n);
1857 for (i = 0; i < n; ++i)
1858 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1859
1860 for (i = 0; i < n; ++i)
1861 {
1862 unsigned char ph_i = phase[i];
9b44f5d9 1863 for (j = i + 1; j < n; ++j)
7d69de61
RH
1864 if (ph_i != phase[j])
1865 add_stack_var_conflict (i, j);
1866 }
1867
1868 XDELETEVEC (phase);
1869}
1870
1871/* Create a decl for the guard at the top of the stack frame. */
1872
1873static void
1874create_stack_guard (void)
1875{
c2255bc4
AH
1876 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1877 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1878 TREE_THIS_VOLATILE (guard) = 1;
1879 TREE_USED (guard) = 1;
1880 expand_one_stack_var (guard);
cb91fab0 1881 crtl->stack_protect_guard = guard;
7d69de61
RH
1882}
1883
ff28a94d 1884/* Prepare for expanding variables. */
b8698a0f 1885static void
ff28a94d
JH
1886init_vars_expansion (void)
1887{
3f9b14ff
SB
1888 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1889 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1890
3f9b14ff 1891 /* A map from decl to stack partition. */
39c8aaa4 1892 decl_to_stack_part = new hash_map<tree, size_t>;
ff28a94d
JH
1893
1894 /* Initialize local stack smashing state. */
1895 has_protected_decls = false;
1896 has_short_buffer = false;
1897}
1898
1899/* Free up stack variable graph data. */
1900static void
1901fini_vars_expansion (void)
1902{
3f9b14ff
SB
1903 bitmap_obstack_release (&stack_var_bitmap_obstack);
1904 if (stack_vars)
1905 XDELETEVEC (stack_vars);
1906 if (stack_vars_sorted)
1907 XDELETEVEC (stack_vars_sorted);
ff28a94d 1908 stack_vars = NULL;
9b44f5d9 1909 stack_vars_sorted = NULL;
ff28a94d 1910 stack_vars_alloc = stack_vars_num = 0;
39c8aaa4 1911 delete decl_to_stack_part;
47598145 1912 decl_to_stack_part = NULL;
ff28a94d
JH
1913}
1914
30925d94
AO
1915/* Make a fair guess for the size of the stack frame of the function
1916 in NODE. This doesn't have to be exact, the result is only used in
1917 the inline heuristics. So we don't want to run the full stack var
1918 packing algorithm (which is quadratic in the number of stack vars).
1919 Instead, we calculate the total size of all stack vars. This turns
1920 out to be a pretty fair estimate -- packing of stack vars doesn't
1921 happen very often. */
b5a430f3 1922
ff28a94d 1923HOST_WIDE_INT
30925d94 1924estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1925{
1926 HOST_WIDE_INT size = 0;
b5a430f3 1927 size_t i;
bb7e6d55 1928 tree var;
67348ccc 1929 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1930
bb7e6d55 1931 push_cfun (fn);
ff28a94d 1932
3f9b14ff
SB
1933 init_vars_expansion ();
1934
824f71b9
RG
1935 FOR_EACH_LOCAL_DECL (fn, i, var)
1936 if (auto_var_in_fn_p (var, fn->decl))
1937 size += expand_one_var (var, true, false);
b5a430f3 1938
ff28a94d
JH
1939 if (stack_vars_num > 0)
1940 {
b5a430f3
SB
1941 /* Fake sorting the stack vars for account_stack_vars (). */
1942 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1943 for (i = 0; i < stack_vars_num; ++i)
1944 stack_vars_sorted[i] = i;
ff28a94d 1945 size += account_stack_vars ();
ff28a94d 1946 }
3f9b14ff
SB
1947
1948 fini_vars_expansion ();
2e1ec94f 1949 pop_cfun ();
ff28a94d
JH
1950 return size;
1951}
1952
f6bc1c4a
HS
1953/* Helper routine to check if a record or union contains an array field. */
1954
1955static int
1956record_or_union_type_has_array_p (const_tree tree_type)
1957{
1958 tree fields = TYPE_FIELDS (tree_type);
1959 tree f;
1960
1961 for (f = fields; f; f = DECL_CHAIN (f))
1962 if (TREE_CODE (f) == FIELD_DECL)
1963 {
1964 tree field_type = TREE_TYPE (f);
1965 if (RECORD_OR_UNION_TYPE_P (field_type)
1966 && record_or_union_type_has_array_p (field_type))
1967 return 1;
1968 if (TREE_CODE (field_type) == ARRAY_TYPE)
1969 return 1;
1970 }
1971 return 0;
1972}
1973
6545746e
FW
1974/* Check if the current function has local referenced variables that
1975 have their addresses taken, contain an array, or are arrays. */
1976
1977static bool
1978stack_protect_decl_p ()
1979{
1980 unsigned i;
1981 tree var;
1982
1983 FOR_EACH_LOCAL_DECL (cfun, i, var)
1984 if (!is_global_var (var))
1985 {
1986 tree var_type = TREE_TYPE (var);
8813a647 1987 if (VAR_P (var)
6545746e
FW
1988 && (TREE_CODE (var_type) == ARRAY_TYPE
1989 || TREE_ADDRESSABLE (var)
1990 || (RECORD_OR_UNION_TYPE_P (var_type)
1991 && record_or_union_type_has_array_p (var_type))))
1992 return true;
1993 }
1994 return false;
1995}
1996
1997/* Check if the current function has calls that use a return slot. */
1998
1999static bool
2000stack_protect_return_slot_p ()
2001{
2002 basic_block bb;
2003
2004 FOR_ALL_BB_FN (bb, cfun)
2005 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2006 !gsi_end_p (gsi); gsi_next (&gsi))
2007 {
355fe088 2008 gimple *stmt = gsi_stmt (gsi);
6545746e
FW
2009 /* This assumes that calls to internal-only functions never
2010 use a return slot. */
2011 if (is_gimple_call (stmt)
2012 && !gimple_call_internal_p (stmt)
2013 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2014 gimple_call_fndecl (stmt)))
2015 return true;
2016 }
2017 return false;
2018}
2019
1f6d3a08 2020/* Expand all variables used in the function. */
727a31fa 2021
b47aae36 2022static rtx_insn *
727a31fa
RH
2023expand_used_vars (void)
2024{
c021f10b 2025 tree var, outer_block = DECL_INITIAL (current_function_decl);
8c681247 2026 auto_vec<tree> maybe_local_decls;
b47aae36 2027 rtx_insn *var_end_seq = NULL;
4e3825db 2028 unsigned i;
c021f10b 2029 unsigned len;
f6bc1c4a 2030 bool gen_stack_protect_signal = false;
727a31fa 2031
1f6d3a08
RH
2032 /* Compute the phase of the stack frame for this function. */
2033 {
2034 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2035 int off = STARTING_FRAME_OFFSET % align;
2036 frame_phase = off ? align - off : 0;
2037 }
727a31fa 2038
3f9b14ff
SB
2039 /* Set TREE_USED on all variables in the local_decls. */
2040 FOR_EACH_LOCAL_DECL (cfun, i, var)
2041 TREE_USED (var) = 1;
2042 /* Clear TREE_USED on all variables associated with a block scope. */
2043 clear_tree_used (DECL_INITIAL (current_function_decl));
2044
ff28a94d 2045 init_vars_expansion ();
7d69de61 2046
8f51aa6b
IZ
2047 if (targetm.use_pseudo_pic_reg ())
2048 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2049
4e3825db
MM
2050 for (i = 0; i < SA.map->num_partitions; i++)
2051 {
f11a7b6d
AO
2052 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2053 continue;
2054
4e3825db
MM
2055 tree var = partition_to_var (SA.map, i);
2056
ea057359 2057 gcc_assert (!virtual_operand_p (var));
70b5e7dc 2058
1f9ceff1 2059 expand_one_ssa_partition (var);
64d7fb90 2060 }
7eb9f42e 2061
f6bc1c4a 2062 if (flag_stack_protect == SPCT_FLAG_STRONG)
6545746e
FW
2063 gen_stack_protect_signal
2064 = stack_protect_decl_p () || stack_protect_return_slot_p ();
f6bc1c4a 2065
cb91fab0 2066 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 2067 set are not associated with any block scope. Lay them out. */
c021f10b 2068
9771b263 2069 len = vec_safe_length (cfun->local_decls);
c021f10b 2070 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 2071 {
1f6d3a08
RH
2072 bool expand_now = false;
2073
4e3825db
MM
2074 /* Expanded above already. */
2075 if (is_gimple_reg (var))
eb7adebc
MM
2076 {
2077 TREE_USED (var) = 0;
3adcf52c 2078 goto next;
eb7adebc 2079 }
1f6d3a08
RH
2080 /* We didn't set a block for static or extern because it's hard
2081 to tell the difference between a global variable (re)declared
2082 in a local scope, and one that's really declared there to
2083 begin with. And it doesn't really matter much, since we're
2084 not giving them stack space. Expand them now. */
4e3825db 2085 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
2086 expand_now = true;
2087
ee2e8462
EB
2088 /* Expand variables not associated with any block now. Those created by
2089 the optimizers could be live anywhere in the function. Those that
2090 could possibly have been scoped originally and detached from their
2091 block will have their allocation deferred so we coalesce them with
2092 others when optimization is enabled. */
1f6d3a08
RH
2093 else if (TREE_USED (var))
2094 expand_now = true;
2095
2096 /* Finally, mark all variables on the list as used. We'll use
2097 this in a moment when we expand those associated with scopes. */
2098 TREE_USED (var) = 1;
2099
2100 if (expand_now)
3adcf52c
JM
2101 expand_one_var (var, true, true);
2102
2103 next:
2104 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 2105 {
3adcf52c
JM
2106 rtx rtl = DECL_RTL_IF_SET (var);
2107
2108 /* Keep artificial non-ignored vars in cfun->local_decls
2109 chain until instantiate_decls. */
2110 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2111 add_local_decl (cfun, var);
6c6366f6 2112 else if (rtl == NULL_RTX)
c021f10b
NF
2113 /* If rtl isn't set yet, which can happen e.g. with
2114 -fstack-protector, retry before returning from this
2115 function. */
9771b263 2116 maybe_local_decls.safe_push (var);
802e9f8e 2117 }
1f6d3a08 2118 }
1f6d3a08 2119
c021f10b
NF
2120 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2121
2122 +-----------------+-----------------+
2123 | ...processed... | ...duplicates...|
2124 +-----------------+-----------------+
2125 ^
2126 +-- LEN points here.
2127
2128 We just want the duplicates, as those are the artificial
2129 non-ignored vars that we want to keep until instantiate_decls.
2130 Move them down and truncate the array. */
9771b263
DN
2131 if (!vec_safe_is_empty (cfun->local_decls))
2132 cfun->local_decls->block_remove (0, len);
c021f10b 2133
1f6d3a08
RH
2134 /* At this point, all variables within the block tree with TREE_USED
2135 set are actually used by the optimized function. Lay them out. */
2136 expand_used_vars_for_block (outer_block, true);
2137
2138 if (stack_vars_num > 0)
2139 {
47598145 2140 add_scope_conflicts ();
1f6d3a08 2141
c22cacf3 2142 /* If stack protection is enabled, we don't share space between
7d69de61 2143 vulnerable data and non-vulnerable data. */
5434dc07
MD
2144 if (flag_stack_protect != 0
2145 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2146 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2147 && lookup_attribute ("stack_protect",
2148 DECL_ATTRIBUTES (current_function_decl)))))
7d69de61
RH
2149 add_stack_protection_conflicts ();
2150
c22cacf3 2151 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
2152 minimal interference graph, attempt to save some stack space. */
2153 partition_stack_vars ();
2154 if (dump_file)
2155 dump_stack_var_partition ();
7d69de61
RH
2156 }
2157
f6bc1c4a
HS
2158 switch (flag_stack_protect)
2159 {
2160 case SPCT_FLAG_ALL:
2161 create_stack_guard ();
2162 break;
2163
2164 case SPCT_FLAG_STRONG:
2165 if (gen_stack_protect_signal
5434dc07
MD
2166 || cfun->calls_alloca || has_protected_decls
2167 || lookup_attribute ("stack_protect",
2168 DECL_ATTRIBUTES (current_function_decl)))
f6bc1c4a
HS
2169 create_stack_guard ();
2170 break;
2171
2172 case SPCT_FLAG_DEFAULT:
5434dc07
MD
2173 if (cfun->calls_alloca || has_protected_decls
2174 || lookup_attribute ("stack_protect",
2175 DECL_ATTRIBUTES (current_function_decl)))
c3284718 2176 create_stack_guard ();
f6bc1c4a
HS
2177 break;
2178
5434dc07
MD
2179 case SPCT_FLAG_EXPLICIT:
2180 if (lookup_attribute ("stack_protect",
2181 DECL_ATTRIBUTES (current_function_decl)))
2182 create_stack_guard ();
2183 break;
f6bc1c4a
HS
2184 default:
2185 ;
2186 }
1f6d3a08 2187
7d69de61
RH
2188 /* Assign rtl to each variable based on these partitions. */
2189 if (stack_vars_num > 0)
2190 {
f3ddd692
JJ
2191 struct stack_vars_data data;
2192
e361382f
JJ
2193 data.asan_base = NULL_RTX;
2194 data.asan_alignb = 0;
f3ddd692 2195
7d69de61
RH
2196 /* Reorder decls to be protected by iterating over the variables
2197 array multiple times, and allocating out of each phase in turn. */
c22cacf3 2198 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
2199 earlier, such that we naturally see these variables first,
2200 and thus naturally allocate things in the right order. */
2201 if (has_protected_decls)
2202 {
2203 /* Phase 1 contains only character arrays. */
f3ddd692 2204 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
2205
2206 /* Phase 2 contains other kinds of arrays. */
5434dc07
MD
2207 if (flag_stack_protect == SPCT_FLAG_ALL
2208 || flag_stack_protect == SPCT_FLAG_STRONG
2209 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2210 && lookup_attribute ("stack_protect",
2211 DECL_ATTRIBUTES (current_function_decl))))
f3ddd692 2212 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
2213 }
2214
c461d263 2215 if (asan_sanitize_stack_p ())
f3ddd692
JJ
2216 /* Phase 3, any partitions that need asan protection
2217 in addition to phase 1 and 2. */
2218 expand_stack_vars (asan_decl_phase_3, &data);
2219
9771b263 2220 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
2221 {
2222 HOST_WIDE_INT prev_offset = frame_offset;
e361382f
JJ
2223 HOST_WIDE_INT offset, sz, redzonesz;
2224 redzonesz = ASAN_RED_ZONE_SIZE;
2225 sz = data.asan_vec[0] - prev_offset;
2226 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2227 && data.asan_alignb <= 4096
3dc87cc0 2228 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
e361382f
JJ
2229 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2230 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2231 offset
2232 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
9771b263
DN
2233 data.asan_vec.safe_push (prev_offset);
2234 data.asan_vec.safe_push (offset);
e5dcd695
LZ
2235 /* Leave space for alignment if STRICT_ALIGNMENT. */
2236 if (STRICT_ALIGNMENT)
2237 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2238 << ASAN_SHADOW_SHIFT)
2239 / BITS_PER_UNIT, 1);
f3ddd692
JJ
2240
2241 var_end_seq
2242 = asan_emit_stack_protection (virtual_stack_vars_rtx,
e361382f
JJ
2243 data.asan_base,
2244 data.asan_alignb,
9771b263 2245 data.asan_vec.address (),
e361382f 2246 data.asan_decl_vec.address (),
9771b263 2247 data.asan_vec.length ());
f3ddd692
JJ
2248 }
2249
2250 expand_stack_vars (NULL, &data);
1f6d3a08
RH
2251 }
2252
3f9b14ff
SB
2253 fini_vars_expansion ();
2254
6c6366f6
JJ
2255 /* If there were any artificial non-ignored vars without rtl
2256 found earlier, see if deferred stack allocation hasn't assigned
2257 rtl to them. */
9771b263 2258 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 2259 {
6c6366f6
JJ
2260 rtx rtl = DECL_RTL_IF_SET (var);
2261
6c6366f6
JJ
2262 /* Keep artificial non-ignored vars in cfun->local_decls
2263 chain until instantiate_decls. */
2264 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2265 add_local_decl (cfun, var);
6c6366f6
JJ
2266 }
2267
1f6d3a08
RH
2268 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2269 if (STACK_ALIGNMENT_NEEDED)
2270 {
2271 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2272 if (!FRAME_GROWS_DOWNWARD)
2273 frame_offset += align - 1;
2274 frame_offset &= -align;
2275 }
f3ddd692
JJ
2276
2277 return var_end_seq;
727a31fa
RH
2278}
2279
2280
b7211528
SB
2281/* If we need to produce a detailed dump, print the tree representation
2282 for STMT to the dump file. SINCE is the last RTX after which the RTL
2283 generated for STMT should have been appended. */
2284
2285static void
355fe088 2286maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
b7211528
SB
2287{
2288 if (dump_file && (dump_flags & TDF_DETAILS))
2289 {
2290 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
2291 print_gimple_stmt (dump_file, stmt, 0,
2292 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
2293 fprintf (dump_file, "\n");
2294
2295 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2296 }
2297}
2298
8b11009b
ZD
2299/* Maps the blocks that do not contain tree labels to rtx labels. */
2300
134aa83c 2301static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
8b11009b 2302
a9b77cd1
ZD
2303/* Returns the label_rtx expression for a label starting basic block BB. */
2304
1476d1bd 2305static rtx_code_label *
726a989a 2306label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 2307{
726a989a
RB
2308 gimple_stmt_iterator gsi;
2309 tree lab;
a9b77cd1
ZD
2310
2311 if (bb->flags & BB_RTL)
2312 return block_label (bb);
2313
134aa83c 2314 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 2315 if (elt)
39c8aaa4 2316 return *elt;
8b11009b
ZD
2317
2318 /* Find the tree label if it is present. */
b8698a0f 2319
726a989a 2320 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 2321 {
538dd0b7
DM
2322 glabel *lab_stmt;
2323
2324 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2325 if (!lab_stmt)
a9b77cd1
ZD
2326 break;
2327
726a989a 2328 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
2329 if (DECL_NONLOCAL (lab))
2330 break;
2331
1476d1bd 2332 return jump_target_rtx (lab);
a9b77cd1
ZD
2333 }
2334
19f8b229 2335 rtx_code_label *l = gen_label_rtx ();
39c8aaa4
TS
2336 lab_rtx_for_bb->put (bb, l);
2337 return l;
a9b77cd1
ZD
2338}
2339
726a989a 2340
529ff441
MM
2341/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2342 of a basic block where we just expanded the conditional at the end,
315adeda
MM
2343 possibly clean up the CFG and instruction sequence. LAST is the
2344 last instruction before the just emitted jump sequence. */
529ff441
MM
2345
2346static void
b47aae36 2347maybe_cleanup_end_of_block (edge e, rtx_insn *last)
529ff441
MM
2348{
2349 /* Special case: when jumpif decides that the condition is
2350 trivial it emits an unconditional jump (and the necessary
2351 barrier). But we still have two edges, the fallthru one is
2352 wrong. purge_dead_edges would clean this up later. Unfortunately
2353 we have to insert insns (and split edges) before
2354 find_many_sub_basic_blocks and hence before purge_dead_edges.
2355 But splitting edges might create new blocks which depend on the
2356 fact that if there are two edges there's no barrier. So the
2357 barrier would get lost and verify_flow_info would ICE. Instead
2358 of auditing all edge splitters to care for the barrier (which
2359 normally isn't there in a cleaned CFG), fix it here. */
2360 if (BARRIER_P (get_last_insn ()))
2361 {
b47aae36 2362 rtx_insn *insn;
529ff441
MM
2363 remove_edge (e);
2364 /* Now, we have a single successor block, if we have insns to
2365 insert on the remaining edge we potentially will insert
2366 it at the end of this block (if the dest block isn't feasible)
2367 in order to avoid splitting the edge. This insertion will take
2368 place in front of the last jump. But we might have emitted
2369 multiple jumps (conditional and one unconditional) to the
2370 same destination. Inserting in front of the last one then
2371 is a problem. See PR 40021. We fix this by deleting all
2372 jumps except the last unconditional one. */
2373 insn = PREV_INSN (get_last_insn ());
2374 /* Make sure we have an unconditional jump. Otherwise we're
2375 confused. */
2376 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 2377 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
2378 {
2379 insn = PREV_INSN (insn);
2380 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 2381 {
8a269cb7 2382 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
2383 {
2384 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2385 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2386 }
2387 delete_insn (NEXT_INSN (insn));
2388 }
529ff441
MM
2389 }
2390 }
2391}
2392
726a989a 2393/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
2394 Returns a new basic block if we've terminated the current basic
2395 block and created a new one. */
2396
2397static basic_block
538dd0b7 2398expand_gimple_cond (basic_block bb, gcond *stmt)
80c7a9eb
RH
2399{
2400 basic_block new_bb, dest;
2401 edge new_edge;
2402 edge true_edge;
2403 edge false_edge;
b47aae36 2404 rtx_insn *last2, *last;
28ed065e
MM
2405 enum tree_code code;
2406 tree op0, op1;
2407
2408 code = gimple_cond_code (stmt);
2409 op0 = gimple_cond_lhs (stmt);
2410 op1 = gimple_cond_rhs (stmt);
2411 /* We're sometimes presented with such code:
2412 D.123_1 = x < y;
2413 if (D.123_1 != 0)
2414 ...
2415 This would expand to two comparisons which then later might
2416 be cleaned up by combine. But some pattern matchers like if-conversion
2417 work better when there's only one compare, so make up for this
2418 here as special exception if TER would have made the same change. */
31348d52 2419 if (SA.values
28ed065e 2420 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
2421 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2422 && TREE_CODE (op1) == INTEGER_CST
2423 && ((gimple_cond_code (stmt) == NE_EXPR
2424 && integer_zerop (op1))
2425 || (gimple_cond_code (stmt) == EQ_EXPR
2426 && integer_onep (op1)))
28ed065e
MM
2427 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2428 {
355fe088 2429 gimple *second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2430 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2431 {
e83f4b68
MM
2432 enum tree_code code2 = gimple_assign_rhs_code (second);
2433 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2434 {
2435 code = code2;
2436 op0 = gimple_assign_rhs1 (second);
2437 op1 = gimple_assign_rhs2 (second);
2438 }
2d52a3a1
ZC
2439 /* If jumps are cheap and the target does not support conditional
2440 compare, turn some more codes into jumpy sequences. */
2441 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2442 && targetm.gen_ccmp_first == NULL)
e83f4b68
MM
2443 {
2444 if ((code2 == BIT_AND_EXPR
2445 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2446 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2447 || code2 == TRUTH_AND_EXPR)
2448 {
2449 code = TRUTH_ANDIF_EXPR;
2450 op0 = gimple_assign_rhs1 (second);
2451 op1 = gimple_assign_rhs2 (second);
2452 }
2453 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2454 {
2455 code = TRUTH_ORIF_EXPR;
2456 op0 = gimple_assign_rhs1 (second);
2457 op1 = gimple_assign_rhs2 (second);
2458 }
2459 }
28ed065e
MM
2460 }
2461 }
b7211528
SB
2462
2463 last2 = last = get_last_insn ();
80c7a9eb
RH
2464
2465 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2466 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2467
2468 /* These flags have no purpose in RTL land. */
2469 true_edge->flags &= ~EDGE_TRUE_VALUE;
2470 false_edge->flags &= ~EDGE_FALSE_VALUE;
2471
2472 /* We can either have a pure conditional jump with one fallthru edge or
2473 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2474 if (false_edge->dest == bb->next_bb)
80c7a9eb 2475 {
40e90eac
JJ
2476 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2477 true_edge->probability);
726a989a 2478 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2479 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2480 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2481 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2482 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2483 return NULL;
2484 }
a9b77cd1 2485 if (true_edge->dest == bb->next_bb)
80c7a9eb 2486 {
40e90eac
JJ
2487 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2488 false_edge->probability);
726a989a 2489 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2490 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2491 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2492 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2493 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2494 return NULL;
2495 }
80c7a9eb 2496
40e90eac
JJ
2497 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2498 true_edge->probability);
80c7a9eb 2499 last = get_last_insn ();
2f13f2de 2500 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2501 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2502 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb 2503
1130d5e3 2504 BB_END (bb) = last;
80c7a9eb 2505 if (BARRIER_P (BB_END (bb)))
1130d5e3 2506 BB_END (bb) = PREV_INSN (BB_END (bb));
80c7a9eb
RH
2507 update_bb_for_insn (bb);
2508
2509 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2510 dest = false_edge->dest;
2511 redirect_edge_succ (false_edge, new_bb);
2512 false_edge->flags |= EDGE_FALLTHRU;
2513 new_bb->count = false_edge->count;
2514 new_bb->frequency = EDGE_FREQUENCY (false_edge);
726338f4 2515 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2516 new_edge = make_edge (new_bb, dest, 0);
2517 new_edge->probability = REG_BR_PROB_BASE;
2518 new_edge->count = new_bb->count;
2519 if (BARRIER_P (BB_END (new_bb)))
1130d5e3 2520 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
80c7a9eb
RH
2521 update_bb_for_insn (new_bb);
2522
726a989a 2523 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2524
2f13f2de 2525 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2526 {
5368224f
DC
2527 set_curr_insn_location (true_edge->goto_locus);
2528 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2529 }
7787b4aa 2530
80c7a9eb
RH
2531 return new_bb;
2532}
2533
0a35513e
AH
2534/* Mark all calls that can have a transaction restart. */
2535
2536static void
355fe088 2537mark_transaction_restart_calls (gimple *stmt)
0a35513e
AH
2538{
2539 struct tm_restart_node dummy;
50979347 2540 tm_restart_node **slot;
0a35513e
AH
2541
2542 if (!cfun->gimple_df->tm_restart)
2543 return;
2544
2545 dummy.stmt = stmt;
50979347 2546 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
0a35513e
AH
2547 if (slot)
2548 {
50979347 2549 struct tm_restart_node *n = *slot;
0a35513e 2550 tree list = n->label_or_list;
b47aae36 2551 rtx_insn *insn;
0a35513e
AH
2552
2553 for (insn = next_real_insn (get_last_insn ());
2554 !CALL_P (insn);
2555 insn = next_real_insn (insn))
2556 continue;
2557
2558 if (TREE_CODE (list) == LABEL_DECL)
2559 add_reg_note (insn, REG_TM, label_rtx (list));
2560 else
2561 for (; list ; list = TREE_CHAIN (list))
2562 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2563 }
2564}
2565
28ed065e
MM
2566/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2567 statement STMT. */
2568
2569static void
538dd0b7 2570expand_call_stmt (gcall *stmt)
28ed065e 2571{
25583c4f 2572 tree exp, decl, lhs;
e23817b3 2573 bool builtin_p;
e7925582 2574 size_t i;
28ed065e 2575
25583c4f
RS
2576 if (gimple_call_internal_p (stmt))
2577 {
2578 expand_internal_call (stmt);
2579 return;
2580 }
2581
4cfe7a6c
RS
2582 /* If this is a call to a built-in function and it has no effect other
2583 than setting the lhs, try to implement it using an internal function
2584 instead. */
2585 decl = gimple_call_fndecl (stmt);
2586 if (gimple_call_lhs (stmt)
2587 && !gimple_has_side_effects (stmt)
2588 && (optimize || (decl && called_as_built_in (decl))))
2589 {
2590 internal_fn ifn = replacement_internal_fn (stmt);
2591 if (ifn != IFN_LAST)
2592 {
2593 expand_internal_call (ifn, stmt);
2594 return;
2595 }
2596 }
2597
01156003 2598 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
089d1227 2599
01156003 2600 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
089d1227 2601 builtin_p = decl && DECL_BUILT_IN (decl);
01156003 2602
e7925582
EB
2603 /* If this is not a builtin function, the function type through which the
2604 call is made may be different from the type of the function. */
2605 if (!builtin_p)
2606 CALL_EXPR_FN (exp)
b25aa0e8
EB
2607 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2608 CALL_EXPR_FN (exp));
e7925582 2609
28ed065e
MM
2610 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2611 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2612
2613 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2614 {
2615 tree arg = gimple_call_arg (stmt, i);
355fe088 2616 gimple *def;
e23817b3
RG
2617 /* TER addresses into arguments of builtin functions so we have a
2618 chance to infer more correct alignment information. See PR39954. */
2619 if (builtin_p
2620 && TREE_CODE (arg) == SSA_NAME
2621 && (def = get_gimple_for_ssa_name (arg))
2622 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2623 arg = gimple_assign_rhs1 (def);
2624 CALL_EXPR_ARG (exp, i) = arg;
2625 }
28ed065e 2626
93f28ca7 2627 if (gimple_has_side_effects (stmt))
28ed065e
MM
2628 TREE_SIDE_EFFECTS (exp) = 1;
2629
93f28ca7 2630 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2631 TREE_NOTHROW (exp) = 1;
2632
2633 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
9a385c2d 2634 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
28ed065e 2635 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2636 if (decl
2637 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2638 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2639 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2640 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2641 else
2642 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2643 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2644 SET_EXPR_LOCATION (exp, gimple_location (stmt));
d5e254e1 2645 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
28ed065e 2646
ddb555ed
JJ
2647 /* Ensure RTL is created for debug args. */
2648 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2649 {
9771b263 2650 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2651 unsigned int ix;
2652 tree dtemp;
2653
2654 if (debug_args)
9771b263 2655 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2656 {
2657 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2658 expand_debug_expr (dtemp);
2659 }
2660 }
2661
25583c4f 2662 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2663 if (lhs)
2664 expand_assignment (lhs, exp, false);
2665 else
4c437f02 2666 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a35513e
AH
2667
2668 mark_transaction_restart_calls (stmt);
28ed065e
MM
2669}
2670
862d0b35
DN
2671
2672/* Generate RTL for an asm statement (explicit assembler code).
2673 STRING is a STRING_CST node containing the assembler code text,
2674 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2675 insn is volatile; don't optimize it. */
2676
2677static void
2678expand_asm_loc (tree string, int vol, location_t locus)
2679{
2680 rtx body;
2681
862d0b35
DN
2682 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2683 ggc_strdup (TREE_STRING_POINTER (string)),
2684 locus);
2685
2686 MEM_VOLATILE_P (body) = vol;
2687
93671519
BE
2688 /* Non-empty basic ASM implicitly clobbers memory. */
2689 if (TREE_STRING_LENGTH (string) != 0)
2690 {
2691 rtx asm_op, clob;
2692 unsigned i, nclobbers;
2693 auto_vec<rtx> input_rvec, output_rvec;
2694 auto_vec<const char *> constraints;
2695 auto_vec<rtx> clobber_rvec;
2696 HARD_REG_SET clobbered_regs;
2697 CLEAR_HARD_REG_SET (clobbered_regs);
2698
2699 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2700 clobber_rvec.safe_push (clob);
2701
2702 if (targetm.md_asm_adjust)
2703 targetm.md_asm_adjust (output_rvec, input_rvec,
2704 constraints, clobber_rvec,
2705 clobbered_regs);
2706
2707 asm_op = body;
2708 nclobbers = clobber_rvec.length ();
2709 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2710
2711 XVECEXP (body, 0, 0) = asm_op;
2712 for (i = 0; i < nclobbers; i++)
2713 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2714 }
2715
862d0b35
DN
2716 emit_insn (body);
2717}
2718
2719/* Return the number of times character C occurs in string S. */
2720static int
2721n_occurrences (int c, const char *s)
2722{
2723 int n = 0;
2724 while (*s)
2725 n += (*s++ == c);
2726 return n;
2727}
2728
2729/* A subroutine of expand_asm_operands. Check that all operands have
2730 the same number of alternatives. Return true if so. */
2731
2732static bool
7ca35180 2733check_operand_nalternatives (const vec<const char *> &constraints)
862d0b35 2734{
7ca35180
RH
2735 unsigned len = constraints.length();
2736 if (len > 0)
862d0b35 2737 {
7ca35180 2738 int nalternatives = n_occurrences (',', constraints[0]);
862d0b35
DN
2739
2740 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2741 {
2742 error ("too many alternatives in %<asm%>");
2743 return false;
2744 }
2745
7ca35180
RH
2746 for (unsigned i = 1; i < len; ++i)
2747 if (n_occurrences (',', constraints[i]) != nalternatives)
2748 {
2749 error ("operand constraints for %<asm%> differ "
2750 "in number of alternatives");
2751 return false;
2752 }
862d0b35 2753 }
862d0b35
DN
2754 return true;
2755}
2756
2757/* Check for overlap between registers marked in CLOBBERED_REGS and
2758 anything inappropriate in T. Emit error and return the register
2759 variable definition for error, NULL_TREE for ok. */
2760
2761static bool
2762tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2763{
2764 /* Conflicts between asm-declared register variables and the clobber
2765 list are not allowed. */
2766 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2767
2768 if (overlap)
2769 {
2770 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2771 DECL_NAME (overlap));
2772
2773 /* Reset registerness to stop multiple errors emitted for a single
2774 variable. */
2775 DECL_REGISTER (overlap) = 0;
2776 return true;
2777 }
2778
2779 return false;
2780}
2781
2782/* Generate RTL for an asm statement with arguments.
2783 STRING is the instruction template.
2784 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2785 Each output or input has an expression in the TREE_VALUE and
2786 a tree list in TREE_PURPOSE which in turn contains a constraint
2787 name in TREE_VALUE (or NULL_TREE) and a constraint string
2788 in TREE_PURPOSE.
2789 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2790 that is clobbered by this insn.
2791
2792 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2793 should be the fallthru basic block of the asm goto.
2794
2795 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2796 Some elements of OUTPUTS may be replaced with trees representing temporary
2797 values. The caller should copy those temporary values to the originally
2798 specified lvalues.
2799
2800 VOL nonzero means the insn is volatile; don't optimize it. */
2801
2802static void
6476a8fd 2803expand_asm_stmt (gasm *stmt)
862d0b35 2804{
7ca35180
RH
2805 class save_input_location
2806 {
2807 location_t old;
6476a8fd 2808
7ca35180
RH
2809 public:
2810 explicit save_input_location(location_t where)
6476a8fd 2811 {
7ca35180
RH
2812 old = input_location;
2813 input_location = where;
6476a8fd
RH
2814 }
2815
7ca35180 2816 ~save_input_location()
6476a8fd 2817 {
7ca35180 2818 input_location = old;
6476a8fd 2819 }
7ca35180 2820 };
6476a8fd 2821
7ca35180 2822 location_t locus = gimple_location (stmt);
6476a8fd 2823
7ca35180 2824 if (gimple_asm_input_p (stmt))
6476a8fd 2825 {
7ca35180
RH
2826 const char *s = gimple_asm_string (stmt);
2827 tree string = build_string (strlen (s), s);
2828 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2829 return;
6476a8fd
RH
2830 }
2831
7ca35180
RH
2832 /* There are some legacy diagnostics in here, and also avoids a
2833 sixth parameger to targetm.md_asm_adjust. */
2834 save_input_location s_i_l(locus);
6476a8fd 2835
7ca35180
RH
2836 unsigned noutputs = gimple_asm_noutputs (stmt);
2837 unsigned ninputs = gimple_asm_ninputs (stmt);
2838 unsigned nlabels = gimple_asm_nlabels (stmt);
2839 unsigned i;
2840
2841 /* ??? Diagnose during gimplification? */
2842 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
6476a8fd 2843 {
7ca35180 2844 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
6476a8fd
RH
2845 return;
2846 }
2847
7ca35180
RH
2848 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2849 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2850 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
6476a8fd 2851
7ca35180 2852 /* Copy the gimple vectors into new vectors that we can manipulate. */
862d0b35 2853
7ca35180
RH
2854 output_tvec.safe_grow (noutputs);
2855 input_tvec.safe_grow (ninputs);
2856 constraints.safe_grow (noutputs + ninputs);
862d0b35 2857
7ca35180
RH
2858 for (i = 0; i < noutputs; ++i)
2859 {
2860 tree t = gimple_asm_output_op (stmt, i);
2861 output_tvec[i] = TREE_VALUE (t);
2862 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2863 }
2864 for (i = 0; i < ninputs; i++)
2865 {
2866 tree t = gimple_asm_input_op (stmt, i);
2867 input_tvec[i] = TREE_VALUE (t);
2868 constraints[i + noutputs]
2869 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2870 }
862d0b35 2871
7ca35180
RH
2872 /* ??? Diagnose during gimplification? */
2873 if (! check_operand_nalternatives (constraints))
2874 return;
862d0b35
DN
2875
2876 /* Count the number of meaningful clobbered registers, ignoring what
2877 we would ignore later. */
7ca35180
RH
2878 auto_vec<rtx> clobber_rvec;
2879 HARD_REG_SET clobbered_regs;
862d0b35 2880 CLEAR_HARD_REG_SET (clobbered_regs);
862d0b35 2881
7ca35180
RH
2882 if (unsigned n = gimple_asm_nclobbers (stmt))
2883 {
2884 clobber_rvec.reserve (n);
2885 for (i = 0; i < n; i++)
2886 {
2887 tree t = gimple_asm_clobber_op (stmt, i);
2888 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2889 int nregs, j;
862d0b35 2890
7ca35180
RH
2891 j = decode_reg_name_and_count (regname, &nregs);
2892 if (j < 0)
862d0b35 2893 {
7ca35180 2894 if (j == -2)
862d0b35 2895 {
7ca35180
RH
2896 /* ??? Diagnose during gimplification? */
2897 error ("unknown register name %qs in %<asm%>", regname);
2898 }
2899 else if (j == -4)
2900 {
2901 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2902 clobber_rvec.safe_push (x);
2903 }
2904 else
2905 {
2906 /* Otherwise we should have -1 == empty string
2907 or -3 == cc, which is not a register. */
2908 gcc_assert (j == -1 || j == -3);
862d0b35 2909 }
862d0b35 2910 }
7ca35180
RH
2911 else
2912 for (int reg = j; reg < j + nregs; reg++)
2913 {
2914 /* Clobbering the PIC register is an error. */
2915 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2916 {
2917 /* ??? Diagnose during gimplification? */
2918 error ("PIC register clobbered by %qs in %<asm%>",
2919 regname);
2920 return;
2921 }
2922
2923 SET_HARD_REG_BIT (clobbered_regs, reg);
2924 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2925 clobber_rvec.safe_push (x);
2926 }
862d0b35
DN
2927 }
2928 }
7ca35180 2929 unsigned nclobbers = clobber_rvec.length();
862d0b35
DN
2930
2931 /* First pass over inputs and outputs checks validity and sets
2932 mark_addressable if needed. */
7ca35180 2933 /* ??? Diagnose during gimplification? */
862d0b35 2934
7ca35180 2935 for (i = 0; i < noutputs; ++i)
862d0b35 2936 {
7ca35180 2937 tree val = output_tvec[i];
862d0b35
DN
2938 tree type = TREE_TYPE (val);
2939 const char *constraint;
2940 bool is_inout;
2941 bool allows_reg;
2942 bool allows_mem;
2943
862d0b35
DN
2944 /* Try to parse the output constraint. If that fails, there's
2945 no point in going further. */
2946 constraint = constraints[i];
2947 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2948 &allows_mem, &allows_reg, &is_inout))
2949 return;
2950
2951 if (! allows_reg
2952 && (allows_mem
2953 || is_inout
2954 || (DECL_P (val)
2955 && REG_P (DECL_RTL (val))
2956 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2957 mark_addressable (val);
862d0b35
DN
2958 }
2959
7ca35180 2960 for (i = 0; i < ninputs; ++i)
862d0b35
DN
2961 {
2962 bool allows_reg, allows_mem;
2963 const char *constraint;
2964
862d0b35 2965 constraint = constraints[i + noutputs];
7ca35180
RH
2966 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2967 constraints.address (),
2968 &allows_mem, &allows_reg))
862d0b35
DN
2969 return;
2970
2971 if (! allows_reg && allows_mem)
7ca35180 2972 mark_addressable (input_tvec[i]);
862d0b35
DN
2973 }
2974
2975 /* Second pass evaluates arguments. */
2976
2977 /* Make sure stack is consistent for asm goto. */
2978 if (nlabels > 0)
2979 do_pending_stack_adjust ();
7ca35180
RH
2980 int old_generating_concat_p = generating_concat_p;
2981
2982 /* Vector of RTX's of evaluated output operands. */
2983 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2984 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2985 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
862d0b35 2986
7ca35180
RH
2987 output_rvec.safe_grow (noutputs);
2988
2989 for (i = 0; i < noutputs; ++i)
862d0b35 2990 {
7ca35180 2991 tree val = output_tvec[i];
862d0b35 2992 tree type = TREE_TYPE (val);
7ca35180 2993 bool is_inout, allows_reg, allows_mem, ok;
862d0b35 2994 rtx op;
862d0b35
DN
2995
2996 ok = parse_output_constraint (&constraints[i], i, ninputs,
2997 noutputs, &allows_mem, &allows_reg,
2998 &is_inout);
2999 gcc_assert (ok);
3000
3001 /* If an output operand is not a decl or indirect ref and our constraint
3002 allows a register, make a temporary to act as an intermediate.
7ca35180 3003 Make the asm insn write into that, then we will copy it to
862d0b35
DN
3004 the real output operand. Likewise for promoted variables. */
3005
3006 generating_concat_p = 0;
3007
862d0b35
DN
3008 if ((TREE_CODE (val) == INDIRECT_REF
3009 && allows_mem)
3010 || (DECL_P (val)
3011 && (allows_mem || REG_P (DECL_RTL (val)))
3012 && ! (REG_P (DECL_RTL (val))
3013 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3014 || ! allows_reg
3015 || is_inout)
3016 {
3017 op = expand_expr (val, NULL_RTX, VOIDmode,
3018 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3019 if (MEM_P (op))
3020 op = validize_mem (op);
3021
3022 if (! allows_reg && !MEM_P (op))
3023 error ("output number %d not directly addressable", i);
3024 if ((! allows_mem && MEM_P (op))
3025 || GET_CODE (op) == CONCAT)
3026 {
7ca35180 3027 rtx old_op = op;
862d0b35 3028 op = gen_reg_rtx (GET_MODE (op));
7ca35180
RH
3029
3030 generating_concat_p = old_generating_concat_p;
3031
862d0b35 3032 if (is_inout)
7ca35180
RH
3033 emit_move_insn (op, old_op);
3034
3035 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3036 emit_move_insn (old_op, op);
3037 after_rtl_seq = get_insns ();
3038 after_rtl_end = get_last_insn ();
3039 end_sequence ();
862d0b35
DN
3040 }
3041 }
3042 else
3043 {
3044 op = assign_temp (type, 0, 1);
3045 op = validize_mem (op);
7ca35180
RH
3046 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3047 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
862d0b35 3048
7ca35180 3049 generating_concat_p = old_generating_concat_p;
862d0b35 3050
7ca35180
RH
3051 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3052 expand_assignment (val, make_tree (type, op), false);
3053 after_rtl_seq = get_insns ();
3054 after_rtl_end = get_last_insn ();
3055 end_sequence ();
862d0b35 3056 }
7ca35180 3057 output_rvec[i] = op;
862d0b35 3058
7ca35180
RH
3059 if (is_inout)
3060 inout_opnum.safe_push (i);
862d0b35
DN
3061 }
3062
7ca35180
RH
3063 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3064 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
862d0b35 3065
7ca35180
RH
3066 input_rvec.safe_grow (ninputs);
3067 input_mode.safe_grow (ninputs);
862d0b35 3068
7ca35180 3069 generating_concat_p = 0;
862d0b35 3070
7ca35180 3071 for (i = 0; i < ninputs; ++i)
862d0b35 3072 {
7ca35180
RH
3073 tree val = input_tvec[i];
3074 tree type = TREE_TYPE (val);
3075 bool allows_reg, allows_mem, ok;
862d0b35 3076 const char *constraint;
862d0b35 3077 rtx op;
862d0b35
DN
3078
3079 constraint = constraints[i + noutputs];
7ca35180
RH
3080 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3081 constraints.address (),
3082 &allows_mem, &allows_reg);
862d0b35
DN
3083 gcc_assert (ok);
3084
862d0b35
DN
3085 /* EXPAND_INITIALIZER will not generate code for valid initializer
3086 constants, but will still generate code for other types of operand.
3087 This is the behavior we want for constant constraints. */
3088 op = expand_expr (val, NULL_RTX, VOIDmode,
3089 allows_reg ? EXPAND_NORMAL
3090 : allows_mem ? EXPAND_MEMORY
3091 : EXPAND_INITIALIZER);
3092
3093 /* Never pass a CONCAT to an ASM. */
3094 if (GET_CODE (op) == CONCAT)
3095 op = force_reg (GET_MODE (op), op);
3096 else if (MEM_P (op))
3097 op = validize_mem (op);
3098
3099 if (asm_operand_ok (op, constraint, NULL) <= 0)
3100 {
3101 if (allows_reg && TYPE_MODE (type) != BLKmode)
3102 op = force_reg (TYPE_MODE (type), op);
3103 else if (!allows_mem)
3104 warning (0, "asm operand %d probably doesn%'t match constraints",
3105 i + noutputs);
3106 else if (MEM_P (op))
3107 {
3108 /* We won't recognize either volatile memory or memory
3109 with a queued address as available a memory_operand
3110 at this point. Ignore it: clearly this *is* a memory. */
3111 }
3112 else
3113 gcc_unreachable ();
3114 }
7ca35180
RH
3115 input_rvec[i] = op;
3116 input_mode[i] = TYPE_MODE (type);
862d0b35
DN
3117 }
3118
862d0b35 3119 /* For in-out operands, copy output rtx to input rtx. */
7ca35180 3120 unsigned ninout = inout_opnum.length();
862d0b35
DN
3121 for (i = 0; i < ninout; i++)
3122 {
3123 int j = inout_opnum[i];
7ca35180 3124 rtx o = output_rvec[j];
862d0b35 3125
7ca35180
RH
3126 input_rvec.safe_push (o);
3127 input_mode.safe_push (GET_MODE (o));
862d0b35 3128
7ca35180 3129 char buffer[16];
862d0b35 3130 sprintf (buffer, "%d", j);
7ca35180
RH
3131 constraints.safe_push (ggc_strdup (buffer));
3132 }
3133 ninputs += ninout;
3134
3135 /* Sometimes we wish to automatically clobber registers across an asm.
3136 Case in point is when the i386 backend moved from cc0 to a hard reg --
3137 maintaining source-level compatibility means automatically clobbering
3138 the flags register. */
3139 rtx_insn *after_md_seq = NULL;
3140 if (targetm.md_asm_adjust)
3141 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3142 constraints, clobber_rvec,
3143 clobbered_regs);
3144
3145 /* Do not allow the hook to change the output and input count,
3146 lest it mess up the operand numbering. */
3147 gcc_assert (output_rvec.length() == noutputs);
3148 gcc_assert (input_rvec.length() == ninputs);
3149 gcc_assert (constraints.length() == noutputs + ninputs);
3150
3151 /* But it certainly can adjust the clobbers. */
3152 nclobbers = clobber_rvec.length();
3153
3154 /* Third pass checks for easy conflicts. */
3155 /* ??? Why are we doing this on trees instead of rtx. */
3156
3157 bool clobber_conflict_found = 0;
3158 for (i = 0; i < noutputs; ++i)
3159 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3160 clobber_conflict_found = 1;
3161 for (i = 0; i < ninputs - ninout; ++i)
3162 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3163 clobber_conflict_found = 1;
3164
3165 /* Make vectors for the expression-rtx, constraint strings,
3166 and named operands. */
3167
3168 rtvec argvec = rtvec_alloc (ninputs);
3169 rtvec constraintvec = rtvec_alloc (ninputs);
3170 rtvec labelvec = rtvec_alloc (nlabels);
3171
3172 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3173 : GET_MODE (output_rvec[0])),
3174 ggc_strdup (gimple_asm_string (stmt)),
3175 empty_string, 0, argvec, constraintvec,
3176 labelvec, locus);
3177 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3178
3179 for (i = 0; i < ninputs; ++i)
3180 {
3181 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3182 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3183 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3184 constraints[i + noutputs],
3185 locus);
862d0b35
DN
3186 }
3187
3188 /* Copy labels to the vector. */
7ca35180
RH
3189 rtx_code_label *fallthru_label = NULL;
3190 if (nlabels > 0)
3191 {
3192 basic_block fallthru_bb = NULL;
3193 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3194 if (fallthru)
3195 fallthru_bb = fallthru->dest;
3196
3197 for (i = 0; i < nlabels; ++i)
862d0b35 3198 {
7ca35180 3199 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
e67d1102 3200 rtx_insn *r;
7ca35180
RH
3201 /* If asm goto has any labels in the fallthru basic block, use
3202 a label that we emit immediately after the asm goto. Expansion
3203 may insert further instructions into the same basic block after
3204 asm goto and if we don't do this, insertion of instructions on
3205 the fallthru edge might misbehave. See PR58670. */
3206 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3207 {
3208 if (fallthru_label == NULL_RTX)
3209 fallthru_label = gen_label_rtx ();
3210 r = fallthru_label;
3211 }
3212 else
3213 r = label_rtx (label);
3214 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
862d0b35 3215 }
862d0b35
DN
3216 }
3217
862d0b35
DN
3218 /* Now, for each output, construct an rtx
3219 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3220 ARGVEC CONSTRAINTS OPNAMES))
3221 If there is more than one, put them inside a PARALLEL. */
3222
3223 if (nlabels > 0 && nclobbers == 0)
3224 {
3225 gcc_assert (noutputs == 0);
3226 emit_jump_insn (body);
3227 }
3228 else if (noutputs == 0 && nclobbers == 0)
3229 {
3230 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3231 emit_insn (body);
3232 }
3233 else if (noutputs == 1 && nclobbers == 0)
3234 {
7ca35180
RH
3235 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3236 emit_insn (gen_rtx_SET (output_rvec[0], body));
862d0b35
DN
3237 }
3238 else
3239 {
3240 rtx obody = body;
3241 int num = noutputs;
3242
3243 if (num == 0)
3244 num = 1;
3245
3246 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3247
3248 /* For each output operand, store a SET. */
7ca35180 3249 for (i = 0; i < noutputs; ++i)
862d0b35 3250 {
7ca35180
RH
3251 rtx src, o = output_rvec[i];
3252 if (i == 0)
3253 {
3254 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3255 src = obody;
3256 }
3257 else
3258 {
3259 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3260 ASM_OPERANDS_TEMPLATE (obody),
3261 constraints[i], i, argvec,
3262 constraintvec, labelvec, locus);
3263 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3264 }
3265 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
862d0b35
DN
3266 }
3267
3268 /* If there are no outputs (but there are some clobbers)
3269 store the bare ASM_OPERANDS into the PARALLEL. */
862d0b35
DN
3270 if (i == 0)
3271 XVECEXP (body, 0, i++) = obody;
3272
3273 /* Store (clobber REG) for each clobbered register specified. */
7ca35180 3274 for (unsigned j = 0; j < nclobbers; ++j)
862d0b35 3275 {
7ca35180 3276 rtx clobbered_reg = clobber_rvec[j];
862d0b35 3277
7ca35180
RH
3278 /* Do sanity check for overlap between clobbers and respectively
3279 input and outputs that hasn't been handled. Such overlap
3280 should have been detected and reported above. */
3281 if (!clobber_conflict_found && REG_P (clobbered_reg))
862d0b35 3282 {
7ca35180
RH
3283 /* We test the old body (obody) contents to avoid
3284 tripping over the under-construction body. */
3285 for (unsigned k = 0; k < noutputs; ++k)
3286 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3287 internal_error ("asm clobber conflict with output operand");
3288
3289 for (unsigned k = 0; k < ninputs - ninout; ++k)
3290 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3291 internal_error ("asm clobber conflict with input operand");
862d0b35
DN
3292 }
3293
7ca35180 3294 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
862d0b35
DN
3295 }
3296
3297 if (nlabels > 0)
3298 emit_jump_insn (body);
3299 else
3300 emit_insn (body);
3301 }
3302
7ca35180
RH
3303 generating_concat_p = old_generating_concat_p;
3304
862d0b35
DN
3305 if (fallthru_label)
3306 emit_label (fallthru_label);
3307
7ca35180
RH
3308 if (after_md_seq)
3309 emit_insn (after_md_seq);
3310 if (after_rtl_seq)
3311 emit_insn (after_rtl_seq);
862d0b35 3312
6476a8fd 3313 free_temp_slots ();
7ca35180 3314 crtl->has_asm_statement = 1;
862d0b35
DN
3315}
3316
3317/* Emit code to jump to the address
3318 specified by the pointer expression EXP. */
3319
3320static void
3321expand_computed_goto (tree exp)
3322{
3323 rtx x = expand_normal (exp);
3324
862d0b35
DN
3325 do_pending_stack_adjust ();
3326 emit_indirect_jump (x);
3327}
3328
3329/* Generate RTL code for a `goto' statement with target label LABEL.
3330 LABEL should be a LABEL_DECL tree node that was or will later be
3331 defined with `expand_label'. */
3332
3333static void
3334expand_goto (tree label)
3335{
b2b29377
MM
3336 if (flag_checking)
3337 {
3338 /* Check for a nonlocal goto to a containing function. Should have
3339 gotten translated to __builtin_nonlocal_goto. */
3340 tree context = decl_function_context (label);
3341 gcc_assert (!context || context == current_function_decl);
3342 }
862d0b35 3343
1476d1bd 3344 emit_jump (jump_target_rtx (label));
862d0b35
DN
3345}
3346
3347/* Output a return with no value. */
3348
3349static void
3350expand_null_return_1 (void)
3351{
3352 clear_pending_stack_adjust ();
3353 do_pending_stack_adjust ();
3354 emit_jump (return_label);
3355}
3356
3357/* Generate RTL to return from the current function, with no value.
3358 (That is, we do not do anything about returning any value.) */
3359
3360void
3361expand_null_return (void)
3362{
3363 /* If this function was declared to return a value, but we
3364 didn't, clobber the return registers so that they are not
3365 propagated live to the rest of the function. */
3366 clobber_return_register ();
3367
3368 expand_null_return_1 ();
3369}
3370
3371/* Generate RTL to return from the current function, with value VAL. */
3372
3373static void
3374expand_value_return (rtx val)
3375{
3376 /* Copy the value to the return location unless it's already there. */
3377
3378 tree decl = DECL_RESULT (current_function_decl);
3379 rtx return_reg = DECL_RTL (decl);
3380 if (return_reg != val)
3381 {
3382 tree funtype = TREE_TYPE (current_function_decl);
3383 tree type = TREE_TYPE (decl);
3384 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
3385 machine_mode old_mode = DECL_MODE (decl);
3386 machine_mode mode;
862d0b35
DN
3387 if (DECL_BY_REFERENCE (decl))
3388 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3389 else
3390 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3391
3392 if (mode != old_mode)
3393 val = convert_modes (mode, old_mode, val, unsignedp);
3394
3395 if (GET_CODE (return_reg) == PARALLEL)
3396 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3397 else
3398 emit_move_insn (return_reg, val);
3399 }
3400
3401 expand_null_return_1 ();
3402}
3403
3404/* Generate RTL to evaluate the expression RETVAL and return it
3405 from the current function. */
3406
3407static void
d5e254e1 3408expand_return (tree retval, tree bounds)
862d0b35
DN
3409{
3410 rtx result_rtl;
3411 rtx val = 0;
3412 tree retval_rhs;
d5e254e1 3413 rtx bounds_rtl;
862d0b35
DN
3414
3415 /* If function wants no value, give it none. */
3416 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3417 {
3418 expand_normal (retval);
3419 expand_null_return ();
3420 return;
3421 }
3422
3423 if (retval == error_mark_node)
3424 {
3425 /* Treat this like a return of no value from a function that
3426 returns a value. */
3427 expand_null_return ();
3428 return;
3429 }
3430 else if ((TREE_CODE (retval) == MODIFY_EXPR
3431 || TREE_CODE (retval) == INIT_EXPR)
3432 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3433 retval_rhs = TREE_OPERAND (retval, 1);
3434 else
3435 retval_rhs = retval;
3436
3437 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3438
d5e254e1
IE
3439 /* Put returned bounds to the right place. */
3440 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3441 if (bounds_rtl)
3442 {
855f036d
IE
3443 rtx addr = NULL;
3444 rtx bnd = NULL;
d5e254e1 3445
855f036d 3446 if (bounds && bounds != error_mark_node)
d5e254e1
IE
3447 {
3448 bnd = expand_normal (bounds);
3449 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3450 }
3451 else if (REG_P (bounds_rtl))
3452 {
855f036d
IE
3453 if (bounds)
3454 bnd = chkp_expand_zero_bounds ();
3455 else
3456 {
3457 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3458 addr = gen_rtx_MEM (Pmode, addr);
3459 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3460 }
3461
d5e254e1
IE
3462 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3463 }
3464 else
3465 {
3466 int n;
3467
3468 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3469
855f036d
IE
3470 if (bounds)
3471 bnd = chkp_expand_zero_bounds ();
3472 else
3473 {
3474 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3475 addr = gen_rtx_MEM (Pmode, addr);
3476 }
d5e254e1
IE
3477
3478 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3479 {
d5e254e1 3480 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
855f036d
IE
3481 if (!bounds)
3482 {
3483 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3484 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3485 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3486 }
d5e254e1
IE
3487 targetm.calls.store_returned_bounds (slot, bnd);
3488 }
3489 }
3490 }
3491 else if (chkp_function_instrumented_p (current_function_decl)
3492 && !BOUNDED_P (retval_rhs)
3493 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3494 && TREE_CODE (retval_rhs) != RESULT_DECL)
3495 {
3496 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3497 addr = gen_rtx_MEM (Pmode, addr);
3498
3499 gcc_assert (MEM_P (result_rtl));
3500
3501 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3502 }
3503
862d0b35
DN
3504 /* If we are returning the RESULT_DECL, then the value has already
3505 been stored into it, so we don't have to do anything special. */
3506 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3507 expand_value_return (result_rtl);
3508
3509 /* If the result is an aggregate that is being returned in one (or more)
3510 registers, load the registers here. */
3511
3512 else if (retval_rhs != 0
3513 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3514 && REG_P (result_rtl))
3515 {
3516 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3517 if (val)
3518 {
3519 /* Use the mode of the result value on the return register. */
3520 PUT_MODE (result_rtl, GET_MODE (val));
3521 expand_value_return (val);
3522 }
3523 else
3524 expand_null_return ();
3525 }
3526 else if (retval_rhs != 0
3527 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3528 && (REG_P (result_rtl)
3529 || (GET_CODE (result_rtl) == PARALLEL)))
3530 {
9ee5337d
EB
3531 /* Compute the return value into a temporary (usually a pseudo reg). */
3532 val
3533 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
862d0b35
DN
3534 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3535 val = force_not_mem (val);
862d0b35
DN
3536 expand_value_return (val);
3537 }
3538 else
3539 {
3540 /* No hard reg used; calculate value into hard return reg. */
3541 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3542 expand_value_return (result_rtl);
3543 }
3544}
3545
28ed065e
MM
3546/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3547 STMT that doesn't require special handling for outgoing edges. That
3548 is no tailcalls and no GIMPLE_COND. */
3549
3550static void
355fe088 3551expand_gimple_stmt_1 (gimple *stmt)
28ed065e
MM
3552{
3553 tree op0;
c82fee88 3554
5368224f 3555 set_curr_insn_location (gimple_location (stmt));
c82fee88 3556
28ed065e
MM
3557 switch (gimple_code (stmt))
3558 {
3559 case GIMPLE_GOTO:
3560 op0 = gimple_goto_dest (stmt);
3561 if (TREE_CODE (op0) == LABEL_DECL)
3562 expand_goto (op0);
3563 else
3564 expand_computed_goto (op0);
3565 break;
3566 case GIMPLE_LABEL:
538dd0b7 3567 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
28ed065e
MM
3568 break;
3569 case GIMPLE_NOP:
3570 case GIMPLE_PREDICT:
3571 break;
28ed065e 3572 case GIMPLE_SWITCH:
538dd0b7 3573 expand_case (as_a <gswitch *> (stmt));
28ed065e
MM
3574 break;
3575 case GIMPLE_ASM:
538dd0b7 3576 expand_asm_stmt (as_a <gasm *> (stmt));
28ed065e
MM
3577 break;
3578 case GIMPLE_CALL:
538dd0b7 3579 expand_call_stmt (as_a <gcall *> (stmt));
28ed065e
MM
3580 break;
3581
3582 case GIMPLE_RETURN:
855f036d
IE
3583 {
3584 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3585 op0 = gimple_return_retval (as_a <greturn *> (stmt));
28ed065e 3586
855f036d
IE
3587 if (op0 && op0 != error_mark_node)
3588 {
3589 tree result = DECL_RESULT (current_function_decl);
28ed065e 3590
b5be36b1
IE
3591 /* Mark we have return statement with missing bounds. */
3592 if (!bnd
3593 && chkp_function_instrumented_p (cfun->decl)
3594 && !DECL_P (op0))
3595 bnd = error_mark_node;
3596
855f036d
IE
3597 /* If we are not returning the current function's RESULT_DECL,
3598 build an assignment to it. */
3599 if (op0 != result)
3600 {
3601 /* I believe that a function's RESULT_DECL is unique. */
3602 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3603
3604 /* ??? We'd like to use simply expand_assignment here,
3605 but this fails if the value is of BLKmode but the return
3606 decl is a register. expand_return has special handling
3607 for this combination, which eventually should move
3608 to common code. See comments there. Until then, let's
3609 build a modify expression :-/ */
3610 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3611 result, op0);
3612 }
855f036d
IE
3613 }
3614
3615 if (!op0)
3616 expand_null_return ();
3617 else
3618 expand_return (op0, bnd);
3619 }
28ed065e
MM
3620 break;
3621
3622 case GIMPLE_ASSIGN:
3623 {
538dd0b7
DM
3624 gassign *assign_stmt = as_a <gassign *> (stmt);
3625 tree lhs = gimple_assign_lhs (assign_stmt);
28ed065e
MM
3626
3627 /* Tree expand used to fiddle with |= and &= of two bitfield
3628 COMPONENT_REFs here. This can't happen with gimple, the LHS
3629 of binary assigns must be a gimple reg. */
3630
3631 if (TREE_CODE (lhs) != SSA_NAME
3632 || get_gimple_rhs_class (gimple_expr_code (stmt))
3633 == GIMPLE_SINGLE_RHS)
3634 {
538dd0b7 3635 tree rhs = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3636 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3637 == GIMPLE_SINGLE_RHS);
ae2ffe2a
RB
3638 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3639 /* Do not put locations on possibly shared trees. */
3640 && !is_gimple_min_invariant (rhs))
28ed065e 3641 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3642 if (TREE_CLOBBER_P (rhs))
3643 /* This is a clobber to mark the going out of scope for
3644 this LHS. */
3645 ;
3646 else
3647 expand_assignment (lhs, rhs,
538dd0b7
DM
3648 gimple_assign_nontemporal_move_p (
3649 assign_stmt));
28ed065e
MM
3650 }
3651 else
3652 {
3653 rtx target, temp;
538dd0b7 3654 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
28ed065e
MM
3655 struct separate_ops ops;
3656 bool promoted = false;
3657
3658 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3659 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3660 promoted = true;
3661
538dd0b7 3662 ops.code = gimple_assign_rhs_code (assign_stmt);
28ed065e 3663 ops.type = TREE_TYPE (lhs);
b0dd8c90 3664 switch (get_gimple_rhs_class (ops.code))
28ed065e 3665 {
0354c0c7 3666 case GIMPLE_TERNARY_RHS:
538dd0b7 3667 ops.op2 = gimple_assign_rhs3 (assign_stmt);
0354c0c7 3668 /* Fallthru */
28ed065e 3669 case GIMPLE_BINARY_RHS:
538dd0b7 3670 ops.op1 = gimple_assign_rhs2 (assign_stmt);
28ed065e
MM
3671 /* Fallthru */
3672 case GIMPLE_UNARY_RHS:
538dd0b7 3673 ops.op0 = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3674 break;
3675 default:
3676 gcc_unreachable ();
3677 }
3678 ops.location = gimple_location (stmt);
3679
3680 /* If we want to use a nontemporal store, force the value to
3681 register first. If we store into a promoted register,
3682 don't directly expand to target. */
3683 temp = nontemporal || promoted ? NULL_RTX : target;
3684 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3685 EXPAND_NORMAL);
3686
3687 if (temp == target)
3688 ;
3689 else if (promoted)
3690 {
362d42dc 3691 int unsignedp = SUBREG_PROMOTED_SIGN (target);
28ed065e
MM
3692 /* If TEMP is a VOIDmode constant, use convert_modes to make
3693 sure that we properly convert it. */
3694 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3695 {
3696 temp = convert_modes (GET_MODE (target),
3697 TYPE_MODE (ops.type),
4e18a7d4 3698 temp, unsignedp);
28ed065e 3699 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3700 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3701 }
3702
27be0c32 3703 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3704 }
3705 else if (nontemporal && emit_storent_insn (target, temp))
3706 ;
3707 else
3708 {
3709 temp = force_operand (temp, target);
3710 if (temp != target)
3711 emit_move_insn (target, temp);
3712 }
3713 }
3714 }
3715 break;
3716
3717 default:
3718 gcc_unreachable ();
3719 }
3720}
3721
3722/* Expand one gimple statement STMT and return the last RTL instruction
3723 before any of the newly generated ones.
3724
3725 In addition to generating the necessary RTL instructions this also
3726 sets REG_EH_REGION notes if necessary and sets the current source
3727 location for diagnostics. */
3728
b47aae36 3729static rtx_insn *
355fe088 3730expand_gimple_stmt (gimple *stmt)
28ed065e 3731{
28ed065e 3732 location_t saved_location = input_location;
b47aae36 3733 rtx_insn *last = get_last_insn ();
c82fee88 3734 int lp_nr;
28ed065e 3735
28ed065e
MM
3736 gcc_assert (cfun);
3737
c82fee88
EB
3738 /* We need to save and restore the current source location so that errors
3739 discovered during expansion are emitted with the right location. But
3740 it would be better if the diagnostic routines used the source location
3741 embedded in the tree nodes rather than globals. */
28ed065e 3742 if (gimple_has_location (stmt))
c82fee88 3743 input_location = gimple_location (stmt);
28ed065e
MM
3744
3745 expand_gimple_stmt_1 (stmt);
c82fee88 3746
28ed065e
MM
3747 /* Free any temporaries used to evaluate this statement. */
3748 free_temp_slots ();
3749
3750 input_location = saved_location;
3751
3752 /* Mark all insns that may trap. */
1d65f45c
RH
3753 lp_nr = lookup_stmt_eh_lp (stmt);
3754 if (lp_nr)
28ed065e 3755 {
b47aae36 3756 rtx_insn *insn;
28ed065e
MM
3757 for (insn = next_real_insn (last); insn;
3758 insn = next_real_insn (insn))
3759 {
3760 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3761 /* If we want exceptions for non-call insns, any
3762 may_trap_p instruction may throw. */
3763 && GET_CODE (PATTERN (insn)) != CLOBBER
3764 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3765 && insn_could_throw_p (insn))
3766 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3767 }
3768 }
3769
3770 return last;
3771}
3772
726a989a 3773/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3774 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3775 generated a tail call (something that might be denied by the ABI
cea49550
RH
3776 rules governing the call; see calls.c).
3777
3778 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3779 can still reach the rest of BB. The case here is __builtin_sqrt,
3780 where the NaN result goes through the external function (with a
3781 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3782
3783static basic_block
538dd0b7 3784expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
80c7a9eb 3785{
b47aae36 3786 rtx_insn *last2, *last;
224e770b 3787 edge e;
628f6a4e 3788 edge_iterator ei;
224e770b
RH
3789 int probability;
3790 gcov_type count;
80c7a9eb 3791
28ed065e 3792 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3793
3794 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3795 if (CALL_P (last) && SIBLING_CALL_P (last))
3796 goto found;
80c7a9eb 3797
726a989a 3798 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3799
cea49550 3800 *can_fallthru = true;
224e770b 3801 return NULL;
80c7a9eb 3802
224e770b
RH
3803 found:
3804 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3805 Any instructions emitted here are about to be deleted. */
3806 do_pending_stack_adjust ();
3807
3808 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3809 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3810 EH or abnormal edges, we shouldn't have created a tail call in
3811 the first place. So it seems to me we should just be removing
3812 all edges here, or redirecting the existing fallthru edge to
3813 the exit block. */
3814
224e770b
RH
3815 probability = 0;
3816 count = 0;
224e770b 3817
628f6a4e
BE
3818 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3819 {
224e770b
RH
3820 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3821 {
fefa31b5 3822 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
80c7a9eb 3823 {
224e770b
RH
3824 e->dest->count -= e->count;
3825 e->dest->frequency -= EDGE_FREQUENCY (e);
3826 if (e->dest->count < 0)
c22cacf3 3827 e->dest->count = 0;
224e770b 3828 if (e->dest->frequency < 0)
c22cacf3 3829 e->dest->frequency = 0;
80c7a9eb 3830 }
224e770b
RH
3831 count += e->count;
3832 probability += e->probability;
3833 remove_edge (e);
80c7a9eb 3834 }
628f6a4e
BE
3835 else
3836 ei_next (&ei);
80c7a9eb
RH
3837 }
3838
224e770b
RH
3839 /* This is somewhat ugly: the call_expr expander often emits instructions
3840 after the sibcall (to perform the function return). These confuse the
12eff7b7 3841 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3842 last = NEXT_INSN (last);
341c100f 3843 gcc_assert (BARRIER_P (last));
cea49550
RH
3844
3845 *can_fallthru = false;
224e770b
RH
3846 while (NEXT_INSN (last))
3847 {
3848 /* For instance an sqrt builtin expander expands if with
3849 sibcall in the then and label for `else`. */
3850 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3851 {
3852 *can_fallthru = true;
3853 break;
3854 }
224e770b
RH
3855 delete_insn (NEXT_INSN (last));
3856 }
3857
fefa31b5
DM
3858 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3859 | EDGE_SIBCALL);
224e770b
RH
3860 e->probability += probability;
3861 e->count += count;
1130d5e3 3862 BB_END (bb) = last;
224e770b
RH
3863 update_bb_for_insn (bb);
3864
3865 if (NEXT_INSN (last))
3866 {
3867 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3868
3869 last = BB_END (bb);
3870 if (BARRIER_P (last))
1130d5e3 3871 BB_END (bb) = PREV_INSN (last);
224e770b
RH
3872 }
3873
726a989a 3874 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3875
224e770b 3876 return bb;
80c7a9eb
RH
3877}
3878
b5b8b0ac
AO
3879/* Return the difference between the floor and the truncated result of
3880 a signed division by OP1 with remainder MOD. */
3881static rtx
ef4bddc2 3882floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3883{
3884 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3885 return gen_rtx_IF_THEN_ELSE
3886 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3887 gen_rtx_IF_THEN_ELSE
3888 (mode, gen_rtx_LT (BImode,
3889 gen_rtx_DIV (mode, op1, mod),
3890 const0_rtx),
3891 constm1_rtx, const0_rtx),
3892 const0_rtx);
3893}
3894
3895/* Return the difference between the ceil and the truncated result of
3896 a signed division by OP1 with remainder MOD. */
3897static rtx
ef4bddc2 3898ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3899{
3900 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3901 return gen_rtx_IF_THEN_ELSE
3902 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3903 gen_rtx_IF_THEN_ELSE
3904 (mode, gen_rtx_GT (BImode,
3905 gen_rtx_DIV (mode, op1, mod),
3906 const0_rtx),
3907 const1_rtx, const0_rtx),
3908 const0_rtx);
3909}
3910
3911/* Return the difference between the ceil and the truncated result of
3912 an unsigned division by OP1 with remainder MOD. */
3913static rtx
ef4bddc2 3914ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
b5b8b0ac
AO
3915{
3916 /* (mod != 0 ? 1 : 0) */
3917 return gen_rtx_IF_THEN_ELSE
3918 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3919 const1_rtx, const0_rtx);
3920}
3921
3922/* Return the difference between the rounded and the truncated result
3923 of a signed division by OP1 with remainder MOD. Halfway cases are
3924 rounded away from zero, rather than to the nearest even number. */
3925static rtx
ef4bddc2 3926round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3927{
3928 /* (abs (mod) >= abs (op1) - abs (mod)
3929 ? (op1 / mod > 0 ? 1 : -1)
3930 : 0) */
3931 return gen_rtx_IF_THEN_ELSE
3932 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3933 gen_rtx_MINUS (mode,
3934 gen_rtx_ABS (mode, op1),
3935 gen_rtx_ABS (mode, mod))),
3936 gen_rtx_IF_THEN_ELSE
3937 (mode, gen_rtx_GT (BImode,
3938 gen_rtx_DIV (mode, op1, mod),
3939 const0_rtx),
3940 const1_rtx, constm1_rtx),
3941 const0_rtx);
3942}
3943
3944/* Return the difference between the rounded and the truncated result
3945 of a unsigned division by OP1 with remainder MOD. Halfway cases
3946 are rounded away from zero, rather than to the nearest even
3947 number. */
3948static rtx
ef4bddc2 3949round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3950{
3951 /* (mod >= op1 - mod ? 1 : 0) */
3952 return gen_rtx_IF_THEN_ELSE
3953 (mode, gen_rtx_GE (BImode, mod,
3954 gen_rtx_MINUS (mode, op1, mod)),
3955 const1_rtx, const0_rtx);
3956}
3957
dda2da58
AO
3958/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3959 any rtl. */
3960
3961static rtx
ef4bddc2 3962convert_debug_memory_address (machine_mode mode, rtx x,
f61c6f34 3963 addr_space_t as)
dda2da58 3964{
ef4bddc2 3965 machine_mode xmode = GET_MODE (x);
dda2da58
AO
3966
3967#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3968 gcc_assert (mode == Pmode
3969 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3970 gcc_assert (xmode == mode || xmode == VOIDmode);
3971#else
f61c6f34 3972 rtx temp;
f61c6f34 3973
639d4bb8 3974 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3975
3976 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3977 return x;
3978
69660a70 3979 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3403a1a9 3980 x = lowpart_subreg (mode, x, xmode);
dda2da58
AO
3981 else if (POINTERS_EXTEND_UNSIGNED > 0)
3982 x = gen_rtx_ZERO_EXTEND (mode, x);
3983 else if (!POINTERS_EXTEND_UNSIGNED)
3984 x = gen_rtx_SIGN_EXTEND (mode, x);
3985 else
f61c6f34
JJ
3986 {
3987 switch (GET_CODE (x))
3988 {
3989 case SUBREG:
3990 if ((SUBREG_PROMOTED_VAR_P (x)
3991 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3992 || (GET_CODE (SUBREG_REG (x)) == PLUS
3993 && REG_P (XEXP (SUBREG_REG (x), 0))
3994 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3995 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3996 && GET_MODE (SUBREG_REG (x)) == mode)
3997 return SUBREG_REG (x);
3998 break;
3999 case LABEL_REF:
a827d9b1 4000 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
f61c6f34
JJ
4001 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4002 return temp;
4003 case SYMBOL_REF:
4004 temp = shallow_copy_rtx (x);
4005 PUT_MODE (temp, mode);
4006 return temp;
4007 case CONST:
4008 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4009 if (temp)
4010 temp = gen_rtx_CONST (mode, temp);
4011 return temp;
4012 case PLUS:
4013 case MINUS:
4014 if (CONST_INT_P (XEXP (x, 1)))
4015 {
4016 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4017 if (temp)
4018 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4019 }
4020 break;
4021 default:
4022 break;
4023 }
4024 /* Don't know how to express ptr_extend as operation in debug info. */
4025 return NULL;
4026 }
dda2da58
AO
4027#endif /* POINTERS_EXTEND_UNSIGNED */
4028
4029 return x;
4030}
4031
dfde35b3
JJ
4032/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4033 by avoid_deep_ter_for_debug. */
4034
4035static hash_map<tree, tree> *deep_ter_debug_map;
4036
4037/* Split too deep TER chains for debug stmts using debug temporaries. */
4038
4039static void
355fe088 4040avoid_deep_ter_for_debug (gimple *stmt, int depth)
dfde35b3
JJ
4041{
4042 use_operand_p use_p;
4043 ssa_op_iter iter;
4044 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4045 {
4046 tree use = USE_FROM_PTR (use_p);
4047 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4048 continue;
355fe088 4049 gimple *g = get_gimple_for_ssa_name (use);
dfde35b3
JJ
4050 if (g == NULL)
4051 continue;
4052 if (depth > 6 && !stmt_ends_bb_p (g))
4053 {
4054 if (deep_ter_debug_map == NULL)
4055 deep_ter_debug_map = new hash_map<tree, tree>;
4056
4057 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4058 if (vexpr != NULL)
4059 continue;
4060 vexpr = make_node (DEBUG_EXPR_DECL);
355fe088 4061 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
dfde35b3
JJ
4062 DECL_ARTIFICIAL (vexpr) = 1;
4063 TREE_TYPE (vexpr) = TREE_TYPE (use);
4064 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
4065 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4066 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4067 avoid_deep_ter_for_debug (def_temp, 0);
4068 }
4069 else
4070 avoid_deep_ter_for_debug (g, depth + 1);
4071 }
4072}
4073
12c5ffe5
EB
4074/* Return an RTX equivalent to the value of the parameter DECL. */
4075
4076static rtx
4077expand_debug_parm_decl (tree decl)
4078{
4079 rtx incoming = DECL_INCOMING_RTL (decl);
4080
4081 if (incoming
4082 && GET_MODE (incoming) != BLKmode
4083 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4084 || (MEM_P (incoming)
4085 && REG_P (XEXP (incoming, 0))
4086 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4087 {
4088 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4089
4090#ifdef HAVE_window_save
4091 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4092 If the target machine has an explicit window save instruction, the
4093 actual entry value is the corresponding OUTGOING_REGNO instead. */
4094 if (REG_P (incoming)
4095 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4096 incoming
4097 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4098 OUTGOING_REGNO (REGNO (incoming)), 0);
4099 else if (MEM_P (incoming))
4100 {
4101 rtx reg = XEXP (incoming, 0);
4102 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4103 {
4104 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4105 incoming = replace_equiv_address_nv (incoming, reg);
4106 }
6cfa417f
JJ
4107 else
4108 incoming = copy_rtx (incoming);
12c5ffe5
EB
4109 }
4110#endif
4111
4112 ENTRY_VALUE_EXP (rtl) = incoming;
4113 return rtl;
4114 }
4115
4116 if (incoming
4117 && GET_MODE (incoming) != BLKmode
4118 && !TREE_ADDRESSABLE (decl)
4119 && MEM_P (incoming)
4120 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4121 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4122 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4123 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 4124 return copy_rtx (incoming);
12c5ffe5
EB
4125
4126 return NULL_RTX;
4127}
4128
4129/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
4130
4131static rtx
4132expand_debug_expr (tree exp)
4133{
4134 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
ef4bddc2
RS
4135 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4136 machine_mode inner_mode = VOIDmode;
b5b8b0ac 4137 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 4138 addr_space_t as;
b5b8b0ac
AO
4139
4140 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4141 {
4142 case tcc_expression:
4143 switch (TREE_CODE (exp))
4144 {
4145 case COND_EXPR:
7ece48b1 4146 case DOT_PROD_EXPR:
79d652a5 4147 case SAD_EXPR:
0354c0c7
BS
4148 case WIDEN_MULT_PLUS_EXPR:
4149 case WIDEN_MULT_MINUS_EXPR:
0f59b812 4150 case FMA_EXPR:
b5b8b0ac
AO
4151 goto ternary;
4152
4153 case TRUTH_ANDIF_EXPR:
4154 case TRUTH_ORIF_EXPR:
4155 case TRUTH_AND_EXPR:
4156 case TRUTH_OR_EXPR:
4157 case TRUTH_XOR_EXPR:
4158 goto binary;
4159
4160 case TRUTH_NOT_EXPR:
4161 goto unary;
4162
4163 default:
4164 break;
4165 }
4166 break;
4167
4168 ternary:
4169 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4170 if (!op2)
4171 return NULL_RTX;
4172 /* Fall through. */
4173
4174 binary:
4175 case tcc_binary:
b5b8b0ac
AO
4176 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4177 if (!op1)
4178 return NULL_RTX;
26d83bcc
JJ
4179 switch (TREE_CODE (exp))
4180 {
4181 case LSHIFT_EXPR:
4182 case RSHIFT_EXPR:
4183 case LROTATE_EXPR:
4184 case RROTATE_EXPR:
4185 case WIDEN_LSHIFT_EXPR:
4186 /* Ensure second operand isn't wider than the first one. */
4187 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4188 if (SCALAR_INT_MODE_P (inner_mode))
4189 {
4190 machine_mode opmode = mode;
4191 if (VECTOR_MODE_P (mode))
4192 opmode = GET_MODE_INNER (mode);
4193 if (SCALAR_INT_MODE_P (opmode)
4194 && (GET_MODE_PRECISION (opmode)
4195 < GET_MODE_PRECISION (inner_mode)))
3403a1a9 4196 op1 = lowpart_subreg (opmode, op1, inner_mode);
26d83bcc
JJ
4197 }
4198 break;
4199 default:
4200 break;
4201 }
b5b8b0ac
AO
4202 /* Fall through. */
4203
4204 unary:
4205 case tcc_unary:
2ba172e0 4206 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4207 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4208 if (!op0)
4209 return NULL_RTX;
4210 break;
4211
871dae34
AO
4212 case tcc_comparison:
4213 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4214 goto binary;
4215
b5b8b0ac
AO
4216 case tcc_type:
4217 case tcc_statement:
4218 gcc_unreachable ();
4219
4220 case tcc_constant:
4221 case tcc_exceptional:
4222 case tcc_declaration:
4223 case tcc_reference:
4224 case tcc_vl_exp:
4225 break;
4226 }
4227
4228 switch (TREE_CODE (exp))
4229 {
4230 case STRING_CST:
4231 if (!lookup_constant_def (exp))
4232 {
e1b243a8
JJ
4233 if (strlen (TREE_STRING_POINTER (exp)) + 1
4234 != (size_t) TREE_STRING_LENGTH (exp))
4235 return NULL_RTX;
b5b8b0ac
AO
4236 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4237 op0 = gen_rtx_MEM (BLKmode, op0);
4238 set_mem_attributes (op0, exp, 0);
4239 return op0;
4240 }
191816a3 4241 /* Fall through. */
b5b8b0ac
AO
4242
4243 case INTEGER_CST:
4244 case REAL_CST:
4245 case FIXED_CST:
4246 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4247 return op0;
4248
4249 case COMPLEX_CST:
4250 gcc_assert (COMPLEX_MODE_P (mode));
4251 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 4252 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
4253 return gen_rtx_CONCAT (mode, op0, op1);
4254
0ca5af51
AO
4255 case DEBUG_EXPR_DECL:
4256 op0 = DECL_RTL_IF_SET (exp);
4257
4258 if (op0)
4259 return op0;
4260
4261 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 4262 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
4263 SET_DECL_RTL (exp, op0);
4264
4265 return op0;
4266
b5b8b0ac
AO
4267 case VAR_DECL:
4268 case PARM_DECL:
4269 case FUNCTION_DECL:
4270 case LABEL_DECL:
4271 case CONST_DECL:
4272 case RESULT_DECL:
4273 op0 = DECL_RTL_IF_SET (exp);
4274
4275 /* This decl was probably optimized away. */
4276 if (!op0)
e1b243a8 4277 {
8813a647 4278 if (!VAR_P (exp)
e1b243a8
JJ
4279 || DECL_EXTERNAL (exp)
4280 || !TREE_STATIC (exp)
4281 || !DECL_NAME (exp)
0fba566c 4282 || DECL_HARD_REGISTER (exp)
7d5fc814 4283 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 4284 || mode == VOIDmode)
e1b243a8
JJ
4285 return NULL;
4286
b1aa0655 4287 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
4288 if (!MEM_P (op0)
4289 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4290 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4291 return NULL;
4292 }
4293 else
4294 op0 = copy_rtx (op0);
b5b8b0ac 4295
06796564 4296 if (GET_MODE (op0) == BLKmode
871dae34 4297 /* If op0 is not BLKmode, but mode is, adjust_mode
06796564
JJ
4298 below would ICE. While it is likely a FE bug,
4299 try to be robust here. See PR43166. */
132b4e82
JJ
4300 || mode == BLKmode
4301 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
4302 {
4303 gcc_assert (MEM_P (op0));
4304 op0 = adjust_address_nv (op0, mode, 0);
4305 return op0;
4306 }
4307
4308 /* Fall through. */
4309
4310 adjust_mode:
4311 case PAREN_EXPR:
625a9766 4312 CASE_CONVERT:
b5b8b0ac 4313 {
2ba172e0 4314 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
4315
4316 if (mode == inner_mode)
4317 return op0;
4318
4319 if (inner_mode == VOIDmode)
4320 {
2a8e30fb
MM
4321 if (TREE_CODE (exp) == SSA_NAME)
4322 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4323 else
4324 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4325 if (mode == inner_mode)
4326 return op0;
4327 }
4328
4329 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4330 {
4331 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4332 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4333 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4334 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4335 else
4336 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4337 }
4338 else if (FLOAT_MODE_P (mode))
4339 {
2a8e30fb 4340 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
4341 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4342 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4343 else
4344 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4345 }
4346 else if (FLOAT_MODE_P (inner_mode))
4347 {
4348 if (unsignedp)
4349 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4350 else
4351 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4352 }
4353 else if (CONSTANT_P (op0)
69660a70 4354 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3403a1a9 4355 op0 = lowpart_subreg (mode, op0, inner_mode);
cf4ef6f7 4356 else if (UNARY_CLASS_P (exp)
1b47fe3f
JJ
4357 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4358 : unsignedp)
2ba172e0 4359 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 4360 else
2ba172e0 4361 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
4362
4363 return op0;
4364 }
4365
70f34814 4366 case MEM_REF:
71f3a3f5
JJ
4367 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4368 {
4369 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4370 TREE_OPERAND (exp, 0),
4371 TREE_OPERAND (exp, 1));
4372 if (newexp)
4373 return expand_debug_expr (newexp);
4374 }
4375 /* FALLTHROUGH */
b5b8b0ac 4376 case INDIRECT_REF:
0a81f074 4377 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4378 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4379 if (!op0)
4380 return NULL;
4381
cb115041
JJ
4382 if (TREE_CODE (exp) == MEM_REF)
4383 {
583ac69c
JJ
4384 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4385 || (GET_CODE (op0) == PLUS
4386 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4387 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4388 Instead just use get_inner_reference. */
4389 goto component_ref;
4390
cb115041
JJ
4391 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4392 if (!op1 || !CONST_INT_P (op1))
4393 return NULL;
4394
0a81f074 4395 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
4396 }
4397
a148c4b2 4398 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
b5b8b0ac 4399
f61c6f34
JJ
4400 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4401 op0, as);
4402 if (op0 == NULL_RTX)
4403 return NULL;
b5b8b0ac 4404
f61c6f34 4405 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 4406 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
4407 if (TREE_CODE (exp) == MEM_REF
4408 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4409 set_mem_expr (op0, NULL_TREE);
09e881c9 4410 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4411
4412 return op0;
4413
4414 case TARGET_MEM_REF:
4d948885
RG
4415 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4416 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
4417 return NULL;
4418
4419 op0 = expand_debug_expr
4e25ca6b 4420 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
4421 if (!op0)
4422 return NULL;
4423
c168f180 4424 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
f61c6f34
JJ
4425 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4426 op0, as);
4427 if (op0 == NULL_RTX)
4428 return NULL;
b5b8b0ac
AO
4429
4430 op0 = gen_rtx_MEM (mode, op0);
4431
4432 set_mem_attributes (op0, exp, 0);
09e881c9 4433 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4434
4435 return op0;
4436
583ac69c 4437 component_ref:
b5b8b0ac
AO
4438 case ARRAY_REF:
4439 case ARRAY_RANGE_REF:
4440 case COMPONENT_REF:
4441 case BIT_FIELD_REF:
4442 case REALPART_EXPR:
4443 case IMAGPART_EXPR:
4444 case VIEW_CONVERT_EXPR:
4445 {
ef4bddc2 4446 machine_mode mode1;
b5b8b0ac
AO
4447 HOST_WIDE_INT bitsize, bitpos;
4448 tree offset;
ee45a32d
EB
4449 int reversep, volatilep = 0;
4450 tree tem
4451 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
25b75a48 4452 &unsignedp, &reversep, &volatilep);
b5b8b0ac
AO
4453 rtx orig_op0;
4454
4f2a9af8
JJ
4455 if (bitsize == 0)
4456 return NULL;
4457
b5b8b0ac
AO
4458 orig_op0 = op0 = expand_debug_expr (tem);
4459
4460 if (!op0)
4461 return NULL;
4462
4463 if (offset)
4464 {
ef4bddc2 4465 machine_mode addrmode, offmode;
dda2da58 4466
aa847cc8
JJ
4467 if (!MEM_P (op0))
4468 return NULL;
b5b8b0ac 4469
dda2da58
AO
4470 op0 = XEXP (op0, 0);
4471 addrmode = GET_MODE (op0);
4472 if (addrmode == VOIDmode)
4473 addrmode = Pmode;
4474
b5b8b0ac
AO
4475 op1 = expand_debug_expr (offset);
4476 if (!op1)
4477 return NULL;
4478
dda2da58
AO
4479 offmode = GET_MODE (op1);
4480 if (offmode == VOIDmode)
4481 offmode = TYPE_MODE (TREE_TYPE (offset));
4482
4483 if (addrmode != offmode)
3403a1a9 4484 op1 = lowpart_subreg (addrmode, op1, offmode);
dda2da58
AO
4485
4486 /* Don't use offset_address here, we don't need a
4487 recognizable address, and we don't want to generate
4488 code. */
2ba172e0
JJ
4489 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4490 op0, op1));
b5b8b0ac
AO
4491 }
4492
4493 if (MEM_P (op0))
4494 {
4f2a9af8
JJ
4495 if (mode1 == VOIDmode)
4496 /* Bitfield. */
4497 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
4498 if (bitpos >= BITS_PER_UNIT)
4499 {
4500 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4501 bitpos %= BITS_PER_UNIT;
4502 }
4503 else if (bitpos < 0)
4504 {
4f2a9af8
JJ
4505 HOST_WIDE_INT units
4506 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
e3abc83e 4507 op0 = adjust_address_nv (op0, mode1, -units);
b5b8b0ac
AO
4508 bitpos += units * BITS_PER_UNIT;
4509 }
4510 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4511 op0 = adjust_address_nv (op0, mode, 0);
4512 else if (GET_MODE (op0) != mode1)
4513 op0 = adjust_address_nv (op0, mode1, 0);
4514 else
4515 op0 = copy_rtx (op0);
4516 if (op0 == orig_op0)
4517 op0 = shallow_copy_rtx (op0);
4518 set_mem_attributes (op0, exp, 0);
4519 }
4520
4521 if (bitpos == 0 && mode == GET_MODE (op0))
4522 return op0;
4523
2d3fc6aa
JJ
4524 if (bitpos < 0)
4525 return NULL;
4526
88c04a5d
JJ
4527 if (GET_MODE (op0) == BLKmode)
4528 return NULL;
4529
b5b8b0ac
AO
4530 if ((bitpos % BITS_PER_UNIT) == 0
4531 && bitsize == GET_MODE_BITSIZE (mode1))
4532 {
ef4bddc2 4533 machine_mode opmode = GET_MODE (op0);
b5b8b0ac 4534
b5b8b0ac 4535 if (opmode == VOIDmode)
9712cba0 4536 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4537
4538 /* This condition may hold if we're expanding the address
4539 right past the end of an array that turned out not to
4540 be addressable (i.e., the address was only computed in
4541 debug stmts). The gen_subreg below would rightfully
4542 crash, and the address doesn't really exist, so just
4543 drop it. */
4544 if (bitpos >= GET_MODE_BITSIZE (opmode))
4545 return NULL;
4546
7d5d39bb
JJ
4547 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4548 return simplify_gen_subreg (mode, op0, opmode,
4549 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4550 }
4551
4552 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4553 && TYPE_UNSIGNED (TREE_TYPE (exp))
4554 ? SIGN_EXTRACT
4555 : ZERO_EXTRACT, mode,
4556 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4557 ? GET_MODE (op0)
4558 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4559 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4560 }
4561
b5b8b0ac 4562 case ABS_EXPR:
2ba172e0 4563 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4564
4565 case NEGATE_EXPR:
2ba172e0 4566 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4567
4568 case BIT_NOT_EXPR:
2ba172e0 4569 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4570
4571 case FLOAT_EXPR:
2ba172e0
JJ
4572 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4573 0)))
4574 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4575 inner_mode);
b5b8b0ac
AO
4576
4577 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4578 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4579 inner_mode);
b5b8b0ac
AO
4580
4581 case POINTER_PLUS_EXPR:
576319a7
DD
4582 /* For the rare target where pointers are not the same size as
4583 size_t, we need to check for mis-matched modes and correct
4584 the addend. */
4585 if (op0 && op1
4586 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4587 && GET_MODE (op0) != GET_MODE (op1))
4588 {
8369f38a
DD
4589 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4590 /* If OP0 is a partial mode, then we must truncate, even if it has
4591 the same bitsize as OP1 as GCC's representation of partial modes
4592 is opaque. */
4593 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4594 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4595 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4596 GET_MODE (op1));
576319a7
DD
4597 else
4598 /* We always sign-extend, regardless of the signedness of
4599 the operand, because the operand is always unsigned
4600 here even if the original C expression is signed. */
2ba172e0
JJ
4601 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4602 GET_MODE (op1));
576319a7
DD
4603 }
4604 /* Fall through. */
b5b8b0ac 4605 case PLUS_EXPR:
2ba172e0 4606 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4607
4608 case MINUS_EXPR:
2ba172e0 4609 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4610
4611 case MULT_EXPR:
2ba172e0 4612 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4613
4614 case RDIV_EXPR:
4615 case TRUNC_DIV_EXPR:
4616 case EXACT_DIV_EXPR:
4617 if (unsignedp)
2ba172e0 4618 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4619 else
2ba172e0 4620 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4621
4622 case TRUNC_MOD_EXPR:
2ba172e0 4623 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4624
4625 case FLOOR_DIV_EXPR:
4626 if (unsignedp)
2ba172e0 4627 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4628 else
4629 {
2ba172e0
JJ
4630 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4631 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4632 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4633 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4634 }
4635
4636 case FLOOR_MOD_EXPR:
4637 if (unsignedp)
2ba172e0 4638 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4639 else
4640 {
2ba172e0 4641 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4642 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4643 adj = simplify_gen_unary (NEG, mode,
4644 simplify_gen_binary (MULT, mode, adj, op1),
4645 mode);
4646 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4647 }
4648
4649 case CEIL_DIV_EXPR:
4650 if (unsignedp)
4651 {
2ba172e0
JJ
4652 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4653 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4654 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4655 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4656 }
4657 else
4658 {
2ba172e0
JJ
4659 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4660 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4661 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4662 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4663 }
4664
4665 case CEIL_MOD_EXPR:
4666 if (unsignedp)
4667 {
2ba172e0 4668 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4669 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4670 adj = simplify_gen_unary (NEG, mode,
4671 simplify_gen_binary (MULT, mode, adj, op1),
4672 mode);
4673 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4674 }
4675 else
4676 {
2ba172e0 4677 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4678 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4679 adj = simplify_gen_unary (NEG, mode,
4680 simplify_gen_binary (MULT, mode, adj, op1),
4681 mode);
4682 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4683 }
4684
4685 case ROUND_DIV_EXPR:
4686 if (unsignedp)
4687 {
2ba172e0
JJ
4688 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4689 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4690 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4691 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4692 }
4693 else
4694 {
2ba172e0
JJ
4695 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4696 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4697 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4698 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4699 }
4700
4701 case ROUND_MOD_EXPR:
4702 if (unsignedp)
4703 {
2ba172e0 4704 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4705 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4706 adj = simplify_gen_unary (NEG, mode,
4707 simplify_gen_binary (MULT, mode, adj, op1),
4708 mode);
4709 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4710 }
4711 else
4712 {
2ba172e0 4713 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4714 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4715 adj = simplify_gen_unary (NEG, mode,
4716 simplify_gen_binary (MULT, mode, adj, op1),
4717 mode);
4718 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4719 }
4720
4721 case LSHIFT_EXPR:
2ba172e0 4722 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4723
4724 case RSHIFT_EXPR:
4725 if (unsignedp)
2ba172e0 4726 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4727 else
2ba172e0 4728 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4729
4730 case LROTATE_EXPR:
2ba172e0 4731 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4732
4733 case RROTATE_EXPR:
2ba172e0 4734 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4735
4736 case MIN_EXPR:
2ba172e0 4737 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4738
4739 case MAX_EXPR:
2ba172e0 4740 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4741
4742 case BIT_AND_EXPR:
4743 case TRUTH_AND_EXPR:
2ba172e0 4744 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4745
4746 case BIT_IOR_EXPR:
4747 case TRUTH_OR_EXPR:
2ba172e0 4748 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4749
4750 case BIT_XOR_EXPR:
4751 case TRUTH_XOR_EXPR:
2ba172e0 4752 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4753
4754 case TRUTH_ANDIF_EXPR:
4755 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4756
4757 case TRUTH_ORIF_EXPR:
4758 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4759
4760 case TRUTH_NOT_EXPR:
2ba172e0 4761 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4762
4763 case LT_EXPR:
2ba172e0
JJ
4764 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4765 op0, op1);
b5b8b0ac
AO
4766
4767 case LE_EXPR:
2ba172e0
JJ
4768 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4769 op0, op1);
b5b8b0ac
AO
4770
4771 case GT_EXPR:
2ba172e0
JJ
4772 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4773 op0, op1);
b5b8b0ac
AO
4774
4775 case GE_EXPR:
2ba172e0
JJ
4776 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4777 op0, op1);
b5b8b0ac
AO
4778
4779 case EQ_EXPR:
2ba172e0 4780 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4781
4782 case NE_EXPR:
2ba172e0 4783 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4784
4785 case UNORDERED_EXPR:
2ba172e0 4786 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4787
4788 case ORDERED_EXPR:
2ba172e0 4789 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4790
4791 case UNLT_EXPR:
2ba172e0 4792 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4793
4794 case UNLE_EXPR:
2ba172e0 4795 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4796
4797 case UNGT_EXPR:
2ba172e0 4798 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4799
4800 case UNGE_EXPR:
2ba172e0 4801 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4802
4803 case UNEQ_EXPR:
2ba172e0 4804 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4805
4806 case LTGT_EXPR:
2ba172e0 4807 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4808
4809 case COND_EXPR:
4810 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4811
4812 case COMPLEX_EXPR:
4813 gcc_assert (COMPLEX_MODE_P (mode));
4814 if (GET_MODE (op0) == VOIDmode)
4815 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4816 if (GET_MODE (op1) == VOIDmode)
4817 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4818 return gen_rtx_CONCAT (mode, op0, op1);
4819
d02a5a4b
JJ
4820 case CONJ_EXPR:
4821 if (GET_CODE (op0) == CONCAT)
4822 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4823 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4824 XEXP (op0, 1),
4825 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4826 else
4827 {
ef4bddc2 4828 machine_mode imode = GET_MODE_INNER (mode);
d02a5a4b
JJ
4829 rtx re, im;
4830
4831 if (MEM_P (op0))
4832 {
4833 re = adjust_address_nv (op0, imode, 0);
4834 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4835 }
4836 else
4837 {
ef4bddc2
RS
4838 machine_mode ifmode = int_mode_for_mode (mode);
4839 machine_mode ihmode = int_mode_for_mode (imode);
d02a5a4b
JJ
4840 rtx halfsize;
4841 if (ifmode == BLKmode || ihmode == BLKmode)
4842 return NULL;
4843 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4844 re = op0;
4845 if (mode != ifmode)
4846 re = gen_rtx_SUBREG (ifmode, re, 0);
4847 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4848 if (imode != ihmode)
4849 re = gen_rtx_SUBREG (imode, re, 0);
4850 im = copy_rtx (op0);
4851 if (mode != ifmode)
4852 im = gen_rtx_SUBREG (ifmode, im, 0);
4853 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4854 if (imode != ihmode)
4855 im = gen_rtx_SUBREG (imode, im, 0);
4856 }
4857 im = gen_rtx_NEG (imode, im);
4858 return gen_rtx_CONCAT (mode, re, im);
4859 }
4860
b5b8b0ac
AO
4861 case ADDR_EXPR:
4862 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4863 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4864 {
4865 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4866 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4867 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4868 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4869 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4870 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4871
4872 if (handled_component_p (TREE_OPERAND (exp, 0)))
4873 {
4874 HOST_WIDE_INT bitoffset, bitsize, maxsize;
ee45a32d 4875 bool reverse;
c8a27c40 4876 tree decl
ee45a32d
EB
4877 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4878 &bitsize, &maxsize, &reverse);
8813a647 4879 if ((VAR_P (decl)
c8a27c40
JJ
4880 || TREE_CODE (decl) == PARM_DECL
4881 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4882 && (!TREE_ADDRESSABLE (decl)
4883 || target_for_debug_bind (decl))
c8a27c40
JJ
4884 && (bitoffset % BITS_PER_UNIT) == 0
4885 && bitsize > 0
4886 && bitsize == maxsize)
0a81f074
RS
4887 {
4888 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4889 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4890 }
c8a27c40
JJ
4891 }
4892
9430b7ba
JJ
4893 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4894 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4895 == ADDR_EXPR)
4896 {
4897 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4898 0));
4899 if (op0 != NULL
4900 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4901 || (GET_CODE (op0) == PLUS
4902 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4903 && CONST_INT_P (XEXP (op0, 1)))))
4904 {
4905 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4906 1));
4907 if (!op1 || !CONST_INT_P (op1))
4908 return NULL;
4909
4910 return plus_constant (mode, op0, INTVAL (op1));
4911 }
4912 }
4913
c8a27c40
JJ
4914 return NULL;
4915 }
b5b8b0ac 4916
a148c4b2 4917 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
f61c6f34 4918 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4919
4920 return op0;
b5b8b0ac
AO
4921
4922 case VECTOR_CST:
d2a12ae7
RG
4923 {
4924 unsigned i;
4925
4926 op0 = gen_rtx_CONCATN
4927 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4928
4929 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4930 {
4931 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4932 if (!op1)
4933 return NULL;
4934 XVECEXP (op0, 0, i) = op1;
4935 }
4936
4937 return op0;
4938 }
b5b8b0ac
AO
4939
4940 case CONSTRUCTOR:
47598145
MM
4941 if (TREE_CLOBBER_P (exp))
4942 return NULL;
4943 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4944 {
4945 unsigned i;
4946 tree val;
4947
4948 op0 = gen_rtx_CONCATN
4949 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4950
4951 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4952 {
4953 op1 = expand_debug_expr (val);
4954 if (!op1)
4955 return NULL;
4956 XVECEXP (op0, 0, i) = op1;
4957 }
4958
4959 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4960 {
4961 op1 = expand_debug_expr
e8160c9a 4962 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4963
4964 if (!op1)
4965 return NULL;
4966
4967 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4968 XVECEXP (op0, 0, i) = op1;
4969 }
4970
4971 return op0;
4972 }
4973 else
4974 goto flag_unsupported;
4975
4976 case CALL_EXPR:
4977 /* ??? Maybe handle some builtins? */
4978 return NULL;
4979
4980 case SSA_NAME:
4981 {
355fe088 4982 gimple *g = get_gimple_for_ssa_name (exp);
2a8e30fb
MM
4983 if (g)
4984 {
dfde35b3
JJ
4985 tree t = NULL_TREE;
4986 if (deep_ter_debug_map)
4987 {
4988 tree *slot = deep_ter_debug_map->get (exp);
4989 if (slot)
4990 t = *slot;
4991 }
4992 if (t == NULL_TREE)
4993 t = gimple_assign_rhs_to_tree (g);
4994 op0 = expand_debug_expr (t);
2a8e30fb
MM
4995 if (!op0)
4996 return NULL;
4997 }
4998 else
4999 {
f11a7b6d
AO
5000 /* If this is a reference to an incoming value of
5001 parameter that is never used in the code or where the
5002 incoming value is never used in the code, use
5003 PARM_DECL's DECL_RTL if set. */
5004 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5005 && SSA_NAME_VAR (exp)
5006 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5007 && has_zero_uses (exp))
5008 {
5009 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5010 if (op0)
5011 goto adjust_mode;
5012 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5013 if (op0)
5014 goto adjust_mode;
5015 }
5016
2a8e30fb 5017 int part = var_to_partition (SA.map, exp);
b5b8b0ac 5018
2a8e30fb 5019 if (part == NO_PARTITION)
f11a7b6d 5020 return NULL;
b5b8b0ac 5021
2a8e30fb 5022 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 5023
abfea58d 5024 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 5025 }
b5b8b0ac
AO
5026 goto adjust_mode;
5027 }
5028
5029 case ERROR_MARK:
5030 return NULL;
5031
7ece48b1
JJ
5032 /* Vector stuff. For most of the codes we don't have rtl codes. */
5033 case REALIGN_LOAD_EXPR:
5034 case REDUC_MAX_EXPR:
5035 case REDUC_MIN_EXPR:
5036 case REDUC_PLUS_EXPR:
5037 case VEC_COND_EXPR:
7ece48b1
JJ
5038 case VEC_PACK_FIX_TRUNC_EXPR:
5039 case VEC_PACK_SAT_EXPR:
5040 case VEC_PACK_TRUNC_EXPR:
7ece48b1
JJ
5041 case VEC_UNPACK_FLOAT_HI_EXPR:
5042 case VEC_UNPACK_FLOAT_LO_EXPR:
5043 case VEC_UNPACK_HI_EXPR:
5044 case VEC_UNPACK_LO_EXPR:
5045 case VEC_WIDEN_MULT_HI_EXPR:
5046 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
5047 case VEC_WIDEN_MULT_EVEN_EXPR:
5048 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
5049 case VEC_WIDEN_LSHIFT_HI_EXPR:
5050 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 5051 case VEC_PERM_EXPR:
7ece48b1
JJ
5052 return NULL;
5053
98449720 5054 /* Misc codes. */
7ece48b1
JJ
5055 case ADDR_SPACE_CONVERT_EXPR:
5056 case FIXED_CONVERT_EXPR:
5057 case OBJ_TYPE_REF:
5058 case WITH_SIZE_EXPR:
483c6429 5059 case BIT_INSERT_EXPR:
7ece48b1
JJ
5060 return NULL;
5061
5062 case DOT_PROD_EXPR:
5063 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5064 && SCALAR_INT_MODE_P (mode))
5065 {
2ba172e0
JJ
5066 op0
5067 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5068 0)))
5069 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5070 inner_mode);
5071 op1
5072 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5073 1)))
5074 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5075 inner_mode);
5076 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5077 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
5078 }
5079 return NULL;
5080
5081 case WIDEN_MULT_EXPR:
0354c0c7
BS
5082 case WIDEN_MULT_PLUS_EXPR:
5083 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
5084 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5085 && SCALAR_INT_MODE_P (mode))
5086 {
2ba172e0 5087 inner_mode = GET_MODE (op0);
7ece48b1 5088 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 5089 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 5090 else
5b58b39b 5091 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 5092 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 5093 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 5094 else
5b58b39b 5095 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 5096 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
5097 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5098 return op0;
5099 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 5100 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 5101 else
2ba172e0 5102 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
5103 }
5104 return NULL;
5105
98449720
RH
5106 case MULT_HIGHPART_EXPR:
5107 /* ??? Similar to the above. */
5108 return NULL;
5109
7ece48b1 5110 case WIDEN_SUM_EXPR:
3f3af9df 5111 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
5112 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5113 && SCALAR_INT_MODE_P (mode))
5114 {
2ba172e0
JJ
5115 op0
5116 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5117 0)))
5118 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5119 inner_mode);
3f3af9df
JJ
5120 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5121 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
5122 }
5123 return NULL;
5124
0f59b812 5125 case FMA_EXPR:
2ba172e0 5126 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 5127
b5b8b0ac
AO
5128 default:
5129 flag_unsupported:
b2b29377
MM
5130 if (flag_checking)
5131 {
5132 debug_tree (exp);
5133 gcc_unreachable ();
5134 }
b5b8b0ac 5135 return NULL;
b5b8b0ac
AO
5136 }
5137}
5138
ddb555ed
JJ
5139/* Return an RTX equivalent to the source bind value of the tree expression
5140 EXP. */
5141
5142static rtx
5143expand_debug_source_expr (tree exp)
5144{
5145 rtx op0 = NULL_RTX;
ef4bddc2 5146 machine_mode mode = VOIDmode, inner_mode;
ddb555ed
JJ
5147
5148 switch (TREE_CODE (exp))
5149 {
5150 case PARM_DECL:
5151 {
ddb555ed 5152 mode = DECL_MODE (exp);
12c5ffe5
EB
5153 op0 = expand_debug_parm_decl (exp);
5154 if (op0)
5155 break;
ddb555ed
JJ
5156 /* See if this isn't an argument that has been completely
5157 optimized out. */
5158 if (!DECL_RTL_SET_P (exp)
12c5ffe5 5159 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
5160 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5161 {
7b575cfa 5162 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
5163 if (DECL_CONTEXT (aexp)
5164 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5165 {
9771b263 5166 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
5167 unsigned int ix;
5168 tree ddecl;
ddb555ed
JJ
5169 debug_args = decl_debug_args_lookup (current_function_decl);
5170 if (debug_args != NULL)
5171 {
9771b263 5172 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
5173 ix += 2)
5174 if (ddecl == aexp)
5175 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5176 }
5177 }
5178 }
5179 break;
5180 }
5181 default:
5182 break;
5183 }
5184
5185 if (op0 == NULL_RTX)
5186 return NULL_RTX;
5187
5188 inner_mode = GET_MODE (op0);
5189 if (mode == inner_mode)
5190 return op0;
5191
5192 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5193 {
5194 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5195 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5196 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5197 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5198 else
5199 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5200 }
5201 else if (FLOAT_MODE_P (mode))
5202 gcc_unreachable ();
5203 else if (FLOAT_MODE_P (inner_mode))
5204 {
5205 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5206 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5207 else
5208 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5209 }
5210 else if (CONSTANT_P (op0)
5211 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3403a1a9 5212 op0 = lowpart_subreg (mode, op0, inner_mode);
ddb555ed
JJ
5213 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5214 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5215 else
5216 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5217
5218 return op0;
5219}
5220
6cfa417f
JJ
5221/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5222 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5223 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5224
5225static void
b47aae36 5226avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
6cfa417f
JJ
5227{
5228 rtx exp = *exp_p;
5229
5230 if (exp == NULL_RTX)
5231 return;
5232
5233 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5234 return;
5235
5236 if (depth == 4)
5237 {
5238 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5239 rtx dval = make_debug_expr_from_rtl (exp);
5240
5241 /* Emit a debug bind insn before INSN. */
5242 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5243 DEBUG_EXPR_TREE_DECL (dval), exp,
5244 VAR_INIT_STATUS_INITIALIZED);
5245
5246 emit_debug_insn_before (bind, insn);
5247 *exp_p = dval;
5248 return;
5249 }
5250
5251 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5252 int i, j;
5253 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5254 switch (*format_ptr++)
5255 {
5256 case 'e':
5257 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5258 break;
5259
5260 case 'E':
5261 case 'V':
5262 for (j = 0; j < XVECLEN (exp, i); j++)
5263 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5264 break;
5265
5266 default:
5267 break;
5268 }
5269}
5270
b5b8b0ac
AO
5271/* Expand the _LOCs in debug insns. We run this after expanding all
5272 regular insns, so that any variables referenced in the function
5273 will have their DECL_RTLs set. */
5274
5275static void
5276expand_debug_locations (void)
5277{
b47aae36
DM
5278 rtx_insn *insn;
5279 rtx_insn *last = get_last_insn ();
b5b8b0ac
AO
5280 int save_strict_alias = flag_strict_aliasing;
5281
5282 /* New alias sets while setting up memory attributes cause
5283 -fcompare-debug failures, even though it doesn't bring about any
5284 codegen changes. */
5285 flag_strict_aliasing = 0;
5286
5287 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5288 if (DEBUG_INSN_P (insn))
5289 {
5290 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
b47aae36
DM
5291 rtx val;
5292 rtx_insn *prev_insn, *insn2;
ef4bddc2 5293 machine_mode mode;
b5b8b0ac
AO
5294
5295 if (value == NULL_TREE)
5296 val = NULL_RTX;
5297 else
5298 {
ddb555ed
JJ
5299 if (INSN_VAR_LOCATION_STATUS (insn)
5300 == VAR_INIT_STATUS_UNINITIALIZED)
5301 val = expand_debug_source_expr (value);
dfde35b3
JJ
5302 /* The avoid_deep_ter_for_debug function inserts
5303 debug bind stmts after SSA_NAME definition, with the
5304 SSA_NAME as the whole bind location. Disable temporarily
5305 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5306 being defined in this DEBUG_INSN. */
5307 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5308 {
5309 tree *slot = deep_ter_debug_map->get (value);
5310 if (slot)
5311 {
5312 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5313 *slot = NULL_TREE;
5314 else
5315 slot = NULL;
5316 }
5317 val = expand_debug_expr (value);
5318 if (slot)
5319 *slot = INSN_VAR_LOCATION_DECL (insn);
5320 }
ddb555ed
JJ
5321 else
5322 val = expand_debug_expr (value);
b5b8b0ac
AO
5323 gcc_assert (last == get_last_insn ());
5324 }
5325
5326 if (!val)
5327 val = gen_rtx_UNKNOWN_VAR_LOC ();
5328 else
5329 {
5330 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5331
5332 gcc_assert (mode == GET_MODE (val)
5333 || (GET_MODE (val) == VOIDmode
33ffb5c5 5334 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 5335 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
5336 || GET_CODE (val) == LABEL_REF)));
5337 }
5338
5339 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
5340 prev_insn = PREV_INSN (insn);
5341 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5342 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
5343 }
5344
5345 flag_strict_aliasing = save_strict_alias;
5346}
5347
d2626c0b
YR
5348/* Performs swapping operands of commutative operations to expand
5349 the expensive one first. */
5350
5351static void
5352reorder_operands (basic_block bb)
5353{
5354 unsigned int *lattice; /* Hold cost of each statement. */
5355 unsigned int i = 0, n = 0;
5356 gimple_stmt_iterator gsi;
5357 gimple_seq stmts;
355fe088 5358 gimple *stmt;
d2626c0b
YR
5359 bool swap;
5360 tree op0, op1;
5361 ssa_op_iter iter;
5362 use_operand_p use_p;
355fe088 5363 gimple *def0, *def1;
d2626c0b
YR
5364
5365 /* Compute cost of each statement using estimate_num_insns. */
5366 stmts = bb_seq (bb);
5367 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5368 {
5369 stmt = gsi_stmt (gsi);
090238ee
YR
5370 if (!is_gimple_debug (stmt))
5371 gimple_set_uid (stmt, n++);
d2626c0b
YR
5372 }
5373 lattice = XNEWVEC (unsigned int, n);
5374 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5375 {
5376 unsigned cost;
5377 stmt = gsi_stmt (gsi);
090238ee
YR
5378 if (is_gimple_debug (stmt))
5379 continue;
d2626c0b
YR
5380 cost = estimate_num_insns (stmt, &eni_size_weights);
5381 lattice[i] = cost;
5382 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5383 {
5384 tree use = USE_FROM_PTR (use_p);
355fe088 5385 gimple *def_stmt;
d2626c0b
YR
5386 if (TREE_CODE (use) != SSA_NAME)
5387 continue;
5388 def_stmt = get_gimple_for_ssa_name (use);
5389 if (!def_stmt)
5390 continue;
5391 lattice[i] += lattice[gimple_uid (def_stmt)];
5392 }
5393 i++;
5394 if (!is_gimple_assign (stmt)
5395 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5396 continue;
5397 op0 = gimple_op (stmt, 1);
5398 op1 = gimple_op (stmt, 2);
5399 if (TREE_CODE (op0) != SSA_NAME
5400 || TREE_CODE (op1) != SSA_NAME)
5401 continue;
5402 /* Swap operands if the second one is more expensive. */
5403 def0 = get_gimple_for_ssa_name (op0);
d2626c0b
YR
5404 def1 = get_gimple_for_ssa_name (op1);
5405 if (!def1)
5406 continue;
5407 swap = false;
68ca4ac9 5408 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
d2626c0b
YR
5409 swap = true;
5410 if (swap)
5411 {
5412 if (dump_file && (dump_flags & TDF_DETAILS))
5413 {
5414 fprintf (dump_file, "Swap operands in stmt:\n");
5415 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5416 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
68ca4ac9 5417 def0 ? lattice[gimple_uid (def0)] : 0,
d2626c0b
YR
5418 lattice[gimple_uid (def1)]);
5419 }
5420 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5421 gimple_assign_rhs2_ptr (stmt));
5422 }
5423 }
5424 XDELETE (lattice);
5425}
5426
242229bb
JH
5427/* Expand basic block BB from GIMPLE trees to RTL. */
5428
5429static basic_block
f3ddd692 5430expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 5431{
726a989a
RB
5432 gimple_stmt_iterator gsi;
5433 gimple_seq stmts;
355fe088 5434 gimple *stmt = NULL;
66e8df53 5435 rtx_note *note;
b47aae36 5436 rtx_insn *last;
242229bb 5437 edge e;
628f6a4e 5438 edge_iterator ei;
242229bb
JH
5439
5440 if (dump_file)
726a989a
RB
5441 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5442 bb->index);
5443
5444 /* Note that since we are now transitioning from GIMPLE to RTL, we
5445 cannot use the gsi_*_bb() routines because they expect the basic
5446 block to be in GIMPLE, instead of RTL. Therefore, we need to
5447 access the BB sequence directly. */
d2626c0b
YR
5448 if (optimize)
5449 reorder_operands (bb);
726a989a 5450 stmts = bb_seq (bb);
3e8b732e
MM
5451 bb->il.gimple.seq = NULL;
5452 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 5453 rtl_profile_for_bb (bb);
5e2d947c
JH
5454 init_rtl_bb_info (bb);
5455 bb->flags |= BB_RTL;
5456
a9b77cd1
ZD
5457 /* Remove the RETURN_EXPR if we may fall though to the exit
5458 instead. */
726a989a
RB
5459 gsi = gsi_last (stmts);
5460 if (!gsi_end_p (gsi)
5461 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 5462 {
538dd0b7 5463 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
a9b77cd1
ZD
5464
5465 gcc_assert (single_succ_p (bb));
fefa31b5 5466 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
a9b77cd1 5467
fefa31b5 5468 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
726a989a 5469 && !gimple_return_retval (ret_stmt))
a9b77cd1 5470 {
726a989a 5471 gsi_remove (&gsi, false);
a9b77cd1
ZD
5472 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5473 }
5474 }
5475
726a989a
RB
5476 gsi = gsi_start (stmts);
5477 if (!gsi_end_p (gsi))
8b11009b 5478 {
726a989a
RB
5479 stmt = gsi_stmt (gsi);
5480 if (gimple_code (stmt) != GIMPLE_LABEL)
5481 stmt = NULL;
8b11009b 5482 }
242229bb 5483
134aa83c 5484 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b
ZD
5485
5486 if (stmt || elt)
242229bb
JH
5487 {
5488 last = get_last_insn ();
5489
8b11009b
ZD
5490 if (stmt)
5491 {
28ed065e 5492 expand_gimple_stmt (stmt);
726a989a 5493 gsi_next (&gsi);
8b11009b
ZD
5494 }
5495
5496 if (elt)
39c8aaa4 5497 emit_label (*elt);
242229bb 5498
caf93cb0 5499 /* Java emits line number notes in the top of labels.
c22cacf3 5500 ??? Make this go away once line number notes are obsoleted. */
1130d5e3 5501 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 5502 if (NOTE_P (BB_HEAD (bb)))
1130d5e3 5503 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 5504 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 5505
726a989a 5506 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
5507 }
5508 else
1130d5e3 5509 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
242229bb
JH
5510
5511 NOTE_BASIC_BLOCK (note) = bb;
5512
726a989a 5513 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 5514 {
cea49550 5515 basic_block new_bb;
242229bb 5516
b5b8b0ac 5517 stmt = gsi_stmt (gsi);
2a8e30fb
MM
5518
5519 /* If this statement is a non-debug one, and we generate debug
5520 insns, then this one might be the last real use of a TERed
5521 SSA_NAME, but where there are still some debug uses further
5522 down. Expanding the current SSA name in such further debug
5523 uses by their RHS might lead to wrong debug info, as coalescing
5524 might make the operands of such RHS be placed into the same
5525 pseudo as something else. Like so:
5526 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5527 use(a_1);
5528 a_2 = ...
5529 #DEBUG ... => a_1
5530 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5531 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5532 the write to a_2 would actually have clobbered the place which
5533 formerly held a_0.
5534
5535 So, instead of that, we recognize the situation, and generate
5536 debug temporaries at the last real use of TERed SSA names:
5537 a_1 = a_0 + 1;
5538 #DEBUG #D1 => a_1
5539 use(a_1);
5540 a_2 = ...
5541 #DEBUG ... => #D1
5542 */
5543 if (MAY_HAVE_DEBUG_INSNS
5544 && SA.values
5545 && !is_gimple_debug (stmt))
5546 {
5547 ssa_op_iter iter;
5548 tree op;
355fe088 5549 gimple *def;
2a8e30fb 5550
5368224f 5551 location_t sloc = curr_insn_location ();
2a8e30fb
MM
5552
5553 /* Look for SSA names that have their last use here (TERed
5554 names always have only one real use). */
5555 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5556 if ((def = get_gimple_for_ssa_name (op)))
5557 {
5558 imm_use_iterator imm_iter;
5559 use_operand_p use_p;
5560 bool have_debug_uses = false;
5561
5562 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5563 {
5564 if (gimple_debug_bind_p (USE_STMT (use_p)))
5565 {
5566 have_debug_uses = true;
5567 break;
5568 }
5569 }
5570
5571 if (have_debug_uses)
5572 {
871dae34 5573 /* OP is a TERed SSA name, with DEF its defining
2a8e30fb
MM
5574 statement, and where OP is used in further debug
5575 instructions. Generate a debug temporary, and
5576 replace all uses of OP in debug insns with that
5577 temporary. */
355fe088 5578 gimple *debugstmt;
2a8e30fb
MM
5579 tree value = gimple_assign_rhs_to_tree (def);
5580 tree vexpr = make_node (DEBUG_EXPR_DECL);
5581 rtx val;
ef4bddc2 5582 machine_mode mode;
2a8e30fb 5583
5368224f 5584 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
5585
5586 DECL_ARTIFICIAL (vexpr) = 1;
5587 TREE_TYPE (vexpr) = TREE_TYPE (value);
5588 if (DECL_P (value))
5589 mode = DECL_MODE (value);
5590 else
5591 mode = TYPE_MODE (TREE_TYPE (value));
5592 DECL_MODE (vexpr) = mode;
5593
5594 val = gen_rtx_VAR_LOCATION
5595 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5596
e8c6bb74 5597 emit_debug_insn (val);
2a8e30fb
MM
5598
5599 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5600 {
5601 if (!gimple_debug_bind_p (debugstmt))
5602 continue;
5603
5604 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5605 SET_USE (use_p, vexpr);
5606
5607 update_stmt (debugstmt);
5608 }
5609 }
5610 }
5368224f 5611 set_curr_insn_location (sloc);
2a8e30fb
MM
5612 }
5613
a5883ba0 5614 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 5615
242229bb
JH
5616 /* Expand this statement, then evaluate the resulting RTL and
5617 fixup the CFG accordingly. */
726a989a 5618 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 5619 {
538dd0b7 5620 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
cea49550
RH
5621 if (new_bb)
5622 return new_bb;
5623 }
b5b8b0ac
AO
5624 else if (gimple_debug_bind_p (stmt))
5625 {
5368224f 5626 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
5627 gimple_stmt_iterator nsi = gsi;
5628
5629 for (;;)
5630 {
5631 tree var = gimple_debug_bind_get_var (stmt);
5632 tree value;
5633 rtx val;
ef4bddc2 5634 machine_mode mode;
b5b8b0ac 5635
ec8c1492
JJ
5636 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5637 && TREE_CODE (var) != LABEL_DECL
5638 && !target_for_debug_bind (var))
5639 goto delink_debug_stmt;
5640
b5b8b0ac
AO
5641 if (gimple_debug_bind_has_value_p (stmt))
5642 value = gimple_debug_bind_get_value (stmt);
5643 else
5644 value = NULL_TREE;
5645
5646 last = get_last_insn ();
5647
5368224f 5648 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
5649
5650 if (DECL_P (var))
5651 mode = DECL_MODE (var);
5652 else
5653 mode = TYPE_MODE (TREE_TYPE (var));
5654
5655 val = gen_rtx_VAR_LOCATION
5656 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5657
e16b6fd0 5658 emit_debug_insn (val);
b5b8b0ac
AO
5659
5660 if (dump_file && (dump_flags & TDF_DETAILS))
5661 {
5662 /* We can't dump the insn with a TREE where an RTX
5663 is expected. */
e8c6bb74 5664 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5665 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5666 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5667 }
5668
ec8c1492 5669 delink_debug_stmt:
2a8e30fb
MM
5670 /* In order not to generate too many debug temporaries,
5671 we delink all uses of debug statements we already expanded.
5672 Therefore debug statements between definition and real
5673 use of TERed SSA names will continue to use the SSA name,
5674 and not be replaced with debug temps. */
5675 delink_stmt_imm_use (stmt);
5676
b5b8b0ac
AO
5677 gsi = nsi;
5678 gsi_next (&nsi);
5679 if (gsi_end_p (nsi))
5680 break;
5681 stmt = gsi_stmt (nsi);
5682 if (!gimple_debug_bind_p (stmt))
5683 break;
5684 }
5685
5368224f 5686 set_curr_insn_location (sloc);
ddb555ed
JJ
5687 }
5688 else if (gimple_debug_source_bind_p (stmt))
5689 {
5368224f 5690 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5691 tree var = gimple_debug_source_bind_get_var (stmt);
5692 tree value = gimple_debug_source_bind_get_value (stmt);
5693 rtx val;
ef4bddc2 5694 machine_mode mode;
ddb555ed
JJ
5695
5696 last = get_last_insn ();
5697
5368224f 5698 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5699
5700 mode = DECL_MODE (var);
5701
5702 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5703 VAR_INIT_STATUS_UNINITIALIZED);
5704
5705 emit_debug_insn (val);
5706
5707 if (dump_file && (dump_flags & TDF_DETAILS))
5708 {
5709 /* We can't dump the insn with a TREE where an RTX
5710 is expected. */
5711 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5712 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5713 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5714 }
5715
5368224f 5716 set_curr_insn_location (sloc);
b5b8b0ac 5717 }
80c7a9eb 5718 else
242229bb 5719 {
538dd0b7
DM
5720 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5721 if (call_stmt
5722 && gimple_call_tail_p (call_stmt)
f3ddd692 5723 && disable_tail_calls)
538dd0b7 5724 gimple_call_set_tail (call_stmt, false);
f3ddd692 5725
538dd0b7 5726 if (call_stmt && gimple_call_tail_p (call_stmt))
cea49550
RH
5727 {
5728 bool can_fallthru;
538dd0b7 5729 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
cea49550
RH
5730 if (new_bb)
5731 {
5732 if (can_fallthru)
5733 bb = new_bb;
5734 else
5735 return new_bb;
5736 }
5737 }
4d7a65ea 5738 else
b7211528 5739 {
4e3825db 5740 def_operand_p def_p;
4e3825db
MM
5741 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5742
5743 if (def_p != NULL)
5744 {
5745 /* Ignore this stmt if it is in the list of
5746 replaceable expressions. */
5747 if (SA.values
b8698a0f 5748 && bitmap_bit_p (SA.values,
e97809c6 5749 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5750 continue;
5751 }
28ed065e 5752 last = expand_gimple_stmt (stmt);
726a989a 5753 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5754 }
242229bb
JH
5755 }
5756 }
5757
a5883ba0
MM
5758 currently_expanding_gimple_stmt = NULL;
5759
7241571e 5760 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5761 FOR_EACH_EDGE (e, ei, bb->succs)
5762 {
2f13f2de 5763 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5764 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5765 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5766 {
5767 emit_jump (label_rtx_for_bb (e->dest));
5768 e->flags &= ~EDGE_FALLTHRU;
5769 }
a9b77cd1
ZD
5770 }
5771
ae761c45
AH
5772 /* Expanded RTL can create a jump in the last instruction of block.
5773 This later might be assumed to be a jump to successor and break edge insertion.
5774 We need to insert dummy move to prevent this. PR41440. */
5775 if (single_succ_p (bb)
5776 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5777 && (last = get_last_insn ())
5778 && JUMP_P (last))
5779 {
5780 rtx dummy = gen_reg_rtx (SImode);
5781 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5782 }
5783
242229bb
JH
5784 do_pending_stack_adjust ();
5785
3f117656 5786 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5787 before a barrier and/or table jump insn. */
5788 last = get_last_insn ();
4b4bf941 5789 if (BARRIER_P (last))
242229bb
JH
5790 last = PREV_INSN (last);
5791 if (JUMP_TABLE_DATA_P (last))
5792 last = PREV_INSN (PREV_INSN (last));
1130d5e3 5793 BB_END (bb) = last;
caf93cb0 5794
242229bb 5795 update_bb_for_insn (bb);
80c7a9eb 5796
242229bb
JH
5797 return bb;
5798}
5799
5800
5801/* Create a basic block for initialization code. */
5802
5803static basic_block
5804construct_init_block (void)
5805{
5806 basic_block init_block, first_block;
fd44f634
JH
5807 edge e = NULL;
5808 int flags;
275a4187 5809
fd44f634 5810 /* Multiple entry points not supported yet. */
fefa31b5
DM
5811 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5812 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5813 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5814 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5815 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
242229bb 5816
fefa31b5 5817 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
275a4187 5818
fd44f634
JH
5819 /* When entry edge points to first basic block, we don't need jump,
5820 otherwise we have to jump into proper target. */
fefa31b5 5821 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
fd44f634 5822 {
726a989a 5823 tree label = gimple_block_label (e->dest);
fd44f634 5824
1476d1bd 5825 emit_jump (jump_target_rtx (label));
fd44f634 5826 flags = 0;
275a4187 5827 }
fd44f634
JH
5828 else
5829 flags = EDGE_FALLTHRU;
242229bb
JH
5830
5831 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5832 get_last_insn (),
fefa31b5
DM
5833 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5834 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5835 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5836 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
242229bb
JH
5837 if (e)
5838 {
5839 first_block = e->dest;
5840 redirect_edge_succ (e, init_block);
fd44f634 5841 e = make_edge (init_block, first_block, flags);
242229bb
JH
5842 }
5843 else
fefa31b5 5844 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5845 e->probability = REG_BR_PROB_BASE;
fefa31b5 5846 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
242229bb
JH
5847
5848 update_bb_for_insn (init_block);
5849 return init_block;
5850}
5851
55e092c4
JH
5852/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5853 found in the block tree. */
5854
5855static void
5856set_block_levels (tree block, int level)
5857{
5858 while (block)
5859 {
5860 BLOCK_NUMBER (block) = level;
5861 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5862 block = BLOCK_CHAIN (block);
5863 }
5864}
242229bb
JH
5865
5866/* Create a block containing landing pads and similar stuff. */
5867
5868static void
5869construct_exit_block (void)
5870{
b47aae36
DM
5871 rtx_insn *head = get_last_insn ();
5872 rtx_insn *end;
242229bb 5873 basic_block exit_block;
628f6a4e
BE
5874 edge e, e2;
5875 unsigned ix;
5876 edge_iterator ei;
79c7fda6 5877 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
b47aae36 5878 rtx_insn *orig_end = BB_END (prev_bb);
242229bb 5879
fefa31b5 5880 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
bf08ebeb 5881
caf93cb0 5882 /* Make sure the locus is set to the end of the function, so that
242229bb 5883 epilogue line numbers and warnings are set properly. */
2f13f2de 5884 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5885 input_location = cfun->function_end_locus;
5886
242229bb
JH
5887 /* Generate rtl for function exit. */
5888 expand_function_end ();
5889
5890 end = get_last_insn ();
5891 if (head == end)
5892 return;
79c7fda6
JJ
5893 /* While emitting the function end we could move end of the last basic
5894 block. */
1130d5e3 5895 BB_END (prev_bb) = orig_end;
4b4bf941 5896 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5897 head = NEXT_INSN (head);
79c7fda6
JJ
5898 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5899 bb frequency counting will be confused. Any instructions before that
5900 label are emitted for the case where PREV_BB falls through into the
5901 exit block, so append those instructions to prev_bb in that case. */
5902 if (NEXT_INSN (head) != return_label)
5903 {
5904 while (NEXT_INSN (head) != return_label)
5905 {
5906 if (!NOTE_P (NEXT_INSN (head)))
1130d5e3 5907 BB_END (prev_bb) = NEXT_INSN (head);
79c7fda6
JJ
5908 head = NEXT_INSN (head);
5909 }
5910 }
5911 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
fefa31b5
DM
5912 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5913 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5914 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
628f6a4e
BE
5915
5916 ix = 0;
fefa31b5 5917 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
242229bb 5918 {
fefa31b5 5919 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
242229bb 5920 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5921 redirect_edge_succ (e, exit_block);
5922 else
5923 ix++;
242229bb 5924 }
628f6a4e 5925
fefa31b5 5926 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5927 e->probability = REG_BR_PROB_BASE;
fefa31b5
DM
5928 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5929 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
242229bb
JH
5930 if (e2 != e)
5931 {
c22cacf3 5932 e->count -= e2->count;
242229bb
JH
5933 exit_block->count -= e2->count;
5934 exit_block->frequency -= EDGE_FREQUENCY (e2);
5935 }
5936 if (e->count < 0)
5937 e->count = 0;
5938 if (exit_block->count < 0)
5939 exit_block->count = 0;
5940 if (exit_block->frequency < 0)
5941 exit_block->frequency = 0;
5942 update_bb_for_insn (exit_block);
5943}
5944
c22cacf3 5945/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5946 Look for ARRAY_REF nodes with non-constant indexes and mark them
5947 addressable. */
5948
5949static tree
5950discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5951 void *data ATTRIBUTE_UNUSED)
5952{
5953 tree t = *tp;
5954
5955 if (IS_TYPE_OR_DECL_P (t))
5956 *walk_subtrees = 0;
5957 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5958 {
5959 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5960 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5961 && (!TREE_OPERAND (t, 2)
5962 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5963 || (TREE_CODE (t) == COMPONENT_REF
5964 && (!TREE_OPERAND (t,2)
5965 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5966 || TREE_CODE (t) == BIT_FIELD_REF
5967 || TREE_CODE (t) == REALPART_EXPR
5968 || TREE_CODE (t) == IMAGPART_EXPR
5969 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5970 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5971 t = TREE_OPERAND (t, 0);
5972
5973 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5974 {
5975 t = get_base_address (t);
6f11d690
RG
5976 if (t && DECL_P (t)
5977 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5978 TREE_ADDRESSABLE (t) = 1;
5979 }
5980
5981 *walk_subtrees = 0;
5982 }
5983
5984 return NULL_TREE;
5985}
5986
5987/* RTL expansion is not able to compile array references with variable
5988 offsets for arrays stored in single register. Discover such
5989 expressions and mark variables as addressable to avoid this
5990 scenario. */
5991
5992static void
5993discover_nonconstant_array_refs (void)
5994{
5995 basic_block bb;
726a989a 5996 gimple_stmt_iterator gsi;
a1b23b2f 5997
11cd3bed 5998 FOR_EACH_BB_FN (bb, cfun)
726a989a
RB
5999 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6000 {
355fe088 6001 gimple *stmt = gsi_stmt (gsi);
aa847cc8
JJ
6002 if (!is_gimple_debug (stmt))
6003 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 6004 }
a1b23b2f
UW
6005}
6006
2e3f842f
L
6007/* This function sets crtl->args.internal_arg_pointer to a virtual
6008 register if DRAP is needed. Local register allocator will replace
6009 virtual_incoming_args_rtx with the virtual register. */
6010
6011static void
6012expand_stack_alignment (void)
6013{
6014 rtx drap_rtx;
e939805b 6015 unsigned int preferred_stack_boundary;
2e3f842f
L
6016
6017 if (! SUPPORTS_STACK_ALIGNMENT)
6018 return;
b8698a0f 6019
2e3f842f
L
6020 if (cfun->calls_alloca
6021 || cfun->has_nonlocal_label
6022 || crtl->has_nonlocal_goto)
6023 crtl->need_drap = true;
6024
890b9b96
L
6025 /* Call update_stack_boundary here again to update incoming stack
6026 boundary. It may set incoming stack alignment to a different
6027 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6028 use the minimum incoming stack alignment to check if it is OK
6029 to perform sibcall optimization since sibcall optimization will
6030 only align the outgoing stack to incoming stack boundary. */
6031 if (targetm.calls.update_stack_boundary)
6032 targetm.calls.update_stack_boundary ();
6033
6034 /* The incoming stack frame has to be aligned at least at
6035 parm_stack_boundary. */
6036 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 6037
2e3f842f
L
6038 /* Update crtl->stack_alignment_estimated and use it later to align
6039 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6040 exceptions since callgraph doesn't collect incoming stack alignment
6041 in this case. */
8f4f502f 6042 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
6043 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6044 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6045 else
6046 preferred_stack_boundary = crtl->preferred_stack_boundary;
6047 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6048 crtl->stack_alignment_estimated = preferred_stack_boundary;
6049 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6050 crtl->stack_alignment_needed = preferred_stack_boundary;
6051
890b9b96
L
6052 gcc_assert (crtl->stack_alignment_needed
6053 <= crtl->stack_alignment_estimated);
6054
2e3f842f 6055 crtl->stack_realign_needed
e939805b 6056 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 6057 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
6058
6059 crtl->stack_realign_processed = true;
6060
6061 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6062 alignment. */
6063 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 6064 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 6065
d015f7cc
L
6066 /* stack_realign_drap and drap_rtx must match. */
6067 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6068
2e3f842f
L
6069 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6070 if (NULL != drap_rtx)
6071 {
6072 crtl->args.internal_arg_pointer = drap_rtx;
6073
6074 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6075 needed. */
6076 fixup_tail_calls ();
6077 }
6078}
862d0b35
DN
6079\f
6080
6081static void
6082expand_main_function (void)
6083{
6084#if (defined(INVOKE__main) \
6085 || (!defined(HAS_INIT_SECTION) \
6086 && !defined(INIT_SECTION_ASM_OP) \
6087 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6088 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6089#endif
6090}
6091\f
6092
6093/* Expand code to initialize the stack_protect_guard. This is invoked at
6094 the beginning of a function to be protected. */
6095
862d0b35
DN
6096static void
6097stack_protect_prologue (void)
6098{
6099 tree guard_decl = targetm.stack_protect_guard ();
6100 rtx x, y;
6101
6102 x = expand_normal (crtl->stack_protect_guard);
6103 y = expand_normal (guard_decl);
6104
6105 /* Allow the target to copy from Y to X without leaking Y into a
6106 register. */
c65aa042
RS
6107 if (targetm.have_stack_protect_set ())
6108 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6109 {
6110 emit_insn (insn);
6111 return;
6112 }
862d0b35
DN
6113
6114 /* Otherwise do a straight move. */
6115 emit_move_insn (x, y);
6116}
2e3f842f 6117
242229bb
JH
6118/* Translate the intermediate representation contained in the CFG
6119 from GIMPLE trees to RTL.
6120
6121 We do conversion per basic block and preserve/update the tree CFG.
6122 This implies we have to do some magic as the CFG can simultaneously
6123 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 6124 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
6125 the expansion. */
6126
be55bfe6
TS
6127namespace {
6128
6129const pass_data pass_data_expand =
6130{
6131 RTL_PASS, /* type */
6132 "expand", /* name */
6133 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6134 TV_EXPAND, /* tv_id */
6135 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6136 | PROP_gimple_lcx
f8e89441
TV
6137 | PROP_gimple_lvec
6138 | PROP_gimple_lva), /* properties_required */
be55bfe6
TS
6139 PROP_rtl, /* properties_provided */
6140 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
3bea341f 6141 0, /* todo_flags_start */
be55bfe6
TS
6142 0, /* todo_flags_finish */
6143};
6144
6145class pass_expand : public rtl_opt_pass
6146{
6147public:
6148 pass_expand (gcc::context *ctxt)
6149 : rtl_opt_pass (pass_data_expand, ctxt)
6150 {}
6151
6152 /* opt_pass methods: */
6153 virtual unsigned int execute (function *);
6154
6155}; // class pass_expand
6156
6157unsigned int
6158pass_expand::execute (function *fun)
242229bb
JH
6159{
6160 basic_block bb, init_block;
0ef90296
ZD
6161 edge_iterator ei;
6162 edge e;
b47aae36 6163 rtx_insn *var_seq, *var_ret_seq;
4e3825db
MM
6164 unsigned i;
6165
f029db69 6166 timevar_push (TV_OUT_OF_SSA);
4e3825db 6167 rewrite_out_of_ssa (&SA);
f029db69 6168 timevar_pop (TV_OUT_OF_SSA);
c302207e 6169 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 6170
dfde35b3
JJ
6171 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6172 {
6173 gimple_stmt_iterator gsi;
6174 FOR_EACH_BB_FN (bb, cfun)
6175 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6176 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6177 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6178 }
6179
be147e84
RG
6180 /* Make sure all values used by the optimization passes have sane
6181 defaults. */
6182 reg_renumber = 0;
6183
4586b4ca
SB
6184 /* Some backends want to know that we are expanding to RTL. */
6185 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
6186 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6187 free_dominance_info (CDI_DOMINATORS);
4586b4ca 6188
be55bfe6 6189 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
bf08ebeb 6190
d5e254e1
IE
6191 if (chkp_function_instrumented_p (current_function_decl))
6192 chkp_reset_rtl_bounds ();
6193
5368224f 6194 insn_locations_init ();
fe8a7779 6195 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
6196 {
6197 /* Eventually, all FEs should explicitly set function_start_locus. */
be55bfe6
TS
6198 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6199 set_curr_insn_location
6200 (DECL_SOURCE_LOCATION (current_function_decl));
1751ecd6 6201 else
be55bfe6 6202 set_curr_insn_location (fun->function_start_locus);
1751ecd6 6203 }
9ff70652 6204 else
5368224f
DC
6205 set_curr_insn_location (UNKNOWN_LOCATION);
6206 prologue_location = curr_insn_location ();
55e092c4 6207
2b21299c
JJ
6208#ifdef INSN_SCHEDULING
6209 init_sched_attrs ();
6210#endif
6211
55e092c4
JH
6212 /* Make sure first insn is a note even if we don't want linenums.
6213 This makes sure the first insn will never be deleted.
6214 Also, final expects a note to appear there. */
6215 emit_note (NOTE_INSN_DELETED);
6429e3be 6216
a1b23b2f
UW
6217 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6218 discover_nonconstant_array_refs ();
6219
e41b2a33 6220 targetm.expand_to_rtl_hook ();
cb91fab0 6221 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 6222 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 6223 crtl->stack_alignment_estimated = 0;
cb91fab0 6224 crtl->preferred_stack_boundary = STACK_BOUNDARY;
be55bfe6 6225 fun->cfg->max_jumptable_ents = 0;
cb91fab0 6226
ae9fd6b7
JH
6227 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6228 of the function section at exapnsion time to predict distance of calls. */
6229 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6230
727a31fa 6231 /* Expand the variables recorded during gimple lowering. */
f029db69 6232 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
6233 start_sequence ();
6234
f3ddd692 6235 var_ret_seq = expand_used_vars ();
3a42502d
RH
6236
6237 var_seq = get_insns ();
6238 end_sequence ();
f029db69 6239 timevar_pop (TV_VAR_EXPAND);
242229bb 6240
7d69de61
RH
6241 /* Honor stack protection warnings. */
6242 if (warn_stack_protect)
6243 {
be55bfe6 6244 if (fun->calls_alloca)
b8698a0f 6245 warning (OPT_Wstack_protector,
3b123595 6246 "stack protector not protecting local variables: "
be55bfe6 6247 "variable length buffer");
cb91fab0 6248 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 6249 warning (OPT_Wstack_protector,
3b123595 6250 "stack protector not protecting function: "
be55bfe6 6251 "all local arrays are less than %d bytes long",
7d69de61
RH
6252 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6253 }
6254
242229bb 6255 /* Set up parameters and prepare for return, for the function. */
b79c5284 6256 expand_function_start (current_function_decl);
242229bb 6257
3a42502d
RH
6258 /* If we emitted any instructions for setting up the variables,
6259 emit them before the FUNCTION_START note. */
6260 if (var_seq)
6261 {
6262 emit_insn_before (var_seq, parm_birth_insn);
6263
6264 /* In expand_function_end we'll insert the alloca save/restore
6265 before parm_birth_insn. We've just insertted an alloca call.
6266 Adjust the pointer to match. */
6267 parm_birth_insn = var_seq;
6268 }
6269
f11a7b6d
AO
6270 /* Now propagate the RTL assignment of each partition to the
6271 underlying var of each SSA_NAME. */
46aa019a
KV
6272 tree name;
6273
6274 FOR_EACH_SSA_NAME (i, name, cfun)
f11a7b6d 6275 {
46aa019a
KV
6276 /* We might have generated new SSA names in
6277 update_alias_info_with_stack_vars. They will have a NULL
6278 defining statements, and won't be part of the partitioning,
6279 so ignore those. */
6280 if (!SSA_NAME_DEF_STMT (name))
f11a7b6d
AO
6281 continue;
6282
6283 adjust_one_expanded_partition_var (name);
6284 }
6285
6286 /* Clean up RTL of variables that straddle across multiple
6287 partitions, and check that the rtl of any PARM_DECLs that are not
6288 cleaned up is that of their default defs. */
46aa019a 6289 FOR_EACH_SSA_NAME (i, name, cfun)
d466b407 6290 {
d466b407 6291 int part;
d466b407 6292
46aa019a
KV
6293 /* We might have generated new SSA names in
6294 update_alias_info_with_stack_vars. They will have a NULL
6295 defining statements, and won't be part of the partitioning,
6296 so ignore those. */
6297 if (!SSA_NAME_DEF_STMT (name))
d466b407
MM
6298 continue;
6299 part = var_to_partition (SA.map, name);
6300 if (part == NO_PARTITION)
6301 continue;
70b5e7dc 6302
1f9ceff1
AO
6303 /* If this decl was marked as living in multiple places, reset
6304 this now to NULL. */
6305 tree var = SSA_NAME_VAR (name);
6306 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6307 SET_DECL_RTL (var, NULL);
6308 /* Check that the pseudos chosen by assign_parms are those of
6309 the corresponding default defs. */
6310 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6311 && (TREE_CODE (var) == PARM_DECL
6312 || TREE_CODE (var) == RESULT_DECL))
70b5e7dc 6313 {
1f9ceff1
AO
6314 rtx in = DECL_RTL_IF_SET (var);
6315 gcc_assert (in);
6316 rtx out = SA.partition_to_pseudo[part];
f11a7b6d
AO
6317 gcc_assert (in == out);
6318
6319 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6320 those expected by debug backends for each parm and for
6321 the result. This is particularly important for stabs,
6322 whose register elimination from parm's DECL_RTL may cause
6323 -fcompare-debug differences as SET_DECL_RTL changes reg's
6324 attrs. So, make sure the RTL already has the parm as the
6325 EXPR, so that it won't change. */
6326 SET_DECL_RTL (var, NULL_RTX);
6327 if (MEM_P (in))
6328 set_mem_attributes (in, var, true);
6329 SET_DECL_RTL (var, in);
70b5e7dc 6330 }
d466b407
MM
6331 }
6332
242229bb
JH
6333 /* If this function is `main', emit a call to `__main'
6334 to run global initializers, etc. */
6335 if (DECL_NAME (current_function_decl)
6336 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6337 && DECL_FILE_SCOPE_P (current_function_decl))
6338 expand_main_function ();
6339
7d69de61
RH
6340 /* Initialize the stack_protect_guard field. This must happen after the
6341 call to __main (if any) so that the external decl is initialized. */
cb91fab0 6342 if (crtl->stack_protect_guard)
7d69de61
RH
6343 stack_protect_prologue ();
6344
4e3825db
MM
6345 expand_phi_nodes (&SA);
6346
0d334e37 6347 /* Release any stale SSA redirection data. */
b3e46655 6348 redirect_edge_var_map_empty ();
0d334e37 6349
3fbd86b1 6350 /* Register rtl specific functions for cfg. */
242229bb
JH
6351 rtl_register_cfg_hooks ();
6352
6353 init_block = construct_init_block ();
6354
0ef90296 6355 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 6356 remaining edges later. */
be55bfe6 6357 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
0ef90296
ZD
6358 e->flags &= ~EDGE_EXECUTABLE;
6359
134aa83c 6360 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
be55bfe6 6361 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
fefa31b5 6362 next_bb)
f3ddd692 6363 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 6364
b5b8b0ac
AO
6365 if (MAY_HAVE_DEBUG_INSNS)
6366 expand_debug_locations ();
6367
dfde35b3
JJ
6368 if (deep_ter_debug_map)
6369 {
6370 delete deep_ter_debug_map;
6371 deep_ter_debug_map = NULL;
6372 }
6373
452aa9c5
RG
6374 /* Free stuff we no longer need after GIMPLE optimizations. */
6375 free_dominance_info (CDI_DOMINATORS);
6376 free_dominance_info (CDI_POST_DOMINATORS);
61183076 6377 delete_tree_cfg_annotations (fun);
452aa9c5 6378
f029db69 6379 timevar_push (TV_OUT_OF_SSA);
4e3825db 6380 finish_out_of_ssa (&SA);
f029db69 6381 timevar_pop (TV_OUT_OF_SSA);
4e3825db 6382
f029db69 6383 timevar_push (TV_POST_EXPAND);
91753e21 6384 /* We are no longer in SSA form. */
be55bfe6 6385 fun->gimple_df->in_ssa_p = false;
726338f4 6386 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 6387
bf08ebeb
JH
6388 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6389 conservatively to true until they are all profile aware. */
39c8aaa4 6390 delete lab_rtx_for_bb;
61183076 6391 free_histograms (fun);
242229bb
JH
6392
6393 construct_exit_block ();
5368224f 6394 insn_locations_finalize ();
242229bb 6395
f3ddd692
JJ
6396 if (var_ret_seq)
6397 {
dc01c3d1 6398 rtx_insn *after = return_label;
b47aae36 6399 rtx_insn *next = NEXT_INSN (after);
f3ddd692
JJ
6400 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6401 after = next;
6402 emit_insn_after (var_ret_seq, after);
6403 }
6404
1d65f45c 6405 /* Zap the tree EH table. */
be55bfe6 6406 set_eh_throw_stmt_table (fun, NULL);
242229bb 6407
42821aff
MM
6408 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6409 split edges which edge insertions might do. */
242229bb 6410 rebuild_jump_labels (get_insns ());
242229bb 6411
be55bfe6
TS
6412 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6413 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6414 {
6415 edge e;
6416 edge_iterator ei;
6417 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6418 {
6419 if (e->insns.r)
bc470c24 6420 {
3ffa95c2 6421 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
6422 /* Put insns after parm birth, but before
6423 NOTE_INSNS_FUNCTION_BEG. */
be55bfe6
TS
6424 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6425 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
bc470c24 6426 {
3ffa95c2
DM
6427 rtx_insn *insns = e->insns.r;
6428 e->insns.r = NULL;
e40191f1
TV
6429 if (NOTE_P (parm_birth_insn)
6430 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6431 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6432 else
6433 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
6434 }
6435 else
6436 commit_one_edge_insertion (e);
6437 }
4e3825db
MM
6438 else
6439 ei_next (&ei);
6440 }
6441 }
6442
6443 /* We're done expanding trees to RTL. */
6444 currently_expanding_to_rtl = 0;
6445
1b223a9f
AO
6446 flush_mark_addressable_queue ();
6447
be55bfe6
TS
6448 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6449 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6450 {
6451 edge e;
6452 edge_iterator ei;
6453 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6454 {
6455 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6456 e->flags &= ~EDGE_EXECUTABLE;
6457
6458 /* At the moment not all abnormal edges match the RTL
6459 representation. It is safe to remove them here as
6460 find_many_sub_basic_blocks will rediscover them.
6461 In the future we should get this fixed properly. */
6462 if ((e->flags & EDGE_ABNORMAL)
6463 && !(e->flags & EDGE_SIBCALL))
6464 remove_edge (e);
6465 else
6466 ei_next (&ei);
6467 }
6468 }
6469
7ba9e72d 6470 auto_sbitmap blocks (last_basic_block_for_fn (fun));
f61e445a 6471 bitmap_ones (blocks);
242229bb 6472 find_many_sub_basic_blocks (blocks);
4e3825db 6473 purge_all_dead_edges ();
242229bb 6474
2e3f842f
L
6475 expand_stack_alignment ();
6476
be147e84
RG
6477 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6478 function. */
6479 if (crtl->tail_call_emit)
6480 fixup_tail_calls ();
6481
dac1fbf8
RG
6482 /* After initial rtl generation, call back to finish generating
6483 exception support code. We need to do this before cleaning up
6484 the CFG as the code does not expect dead landing pads. */
be55bfe6 6485 if (fun->eh->region_tree != NULL)
dac1fbf8
RG
6486 finish_eh_generation ();
6487
6488 /* Remove unreachable blocks, otherwise we cannot compute dominators
6489 which are needed for loop state verification. As a side-effect
6490 this also compacts blocks.
6491 ??? We cannot remove trivially dead insns here as for example
6492 the DRAP reg on i?86 is not magically live at this point.
6493 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6494 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6495
b2b29377 6496 checking_verify_flow_info ();
9f8628ba 6497
be147e84
RG
6498 /* Initialize pseudos allocated for hard registers. */
6499 emit_initial_value_sets ();
6500
6501 /* And finally unshare all RTL. */
6502 unshare_all_rtl ();
6503
9f8628ba
PB
6504 /* There's no need to defer outputting this function any more; we
6505 know we want to output it. */
6506 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6507
6508 /* Now that we're done expanding trees to RTL, we shouldn't have any
6509 more CONCATs anywhere. */
6510 generating_concat_p = 0;
6511
b7211528
SB
6512 if (dump_file)
6513 {
6514 fprintf (dump_file,
6515 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6516 /* And the pass manager will dump RTL for us. */
6517 }
ef330312
PB
6518
6519 /* If we're emitting a nested function, make sure its parent gets
6520 emitted as well. Doing otherwise confuses debug info. */
be55bfe6
TS
6521 {
6522 tree parent;
6523 for (parent = DECL_CONTEXT (current_function_decl);
6524 parent != NULL_TREE;
6525 parent = get_containing_scope (parent))
6526 if (TREE_CODE (parent) == FUNCTION_DECL)
6527 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6528 }
c22cacf3 6529
ef330312
PB
6530 /* We are now committed to emitting code for this function. Do any
6531 preparation, such as emitting abstract debug info for the inline
6532 before it gets mangled by optimization. */
6533 if (cgraph_function_possibly_inlined_p (current_function_decl))
6534 (*debug_hooks->outlining_inline_function) (current_function_decl);
6535
6536 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
6537
6538 /* After expanding, the return labels are no longer needed. */
6539 return_label = NULL;
6540 naked_return_label = NULL;
0a35513e
AH
6541
6542 /* After expanding, the tm_restart map is no longer needed. */
be55bfe6 6543 if (fun->gimple_df->tm_restart)
50979347 6544 fun->gimple_df->tm_restart = NULL;
0a35513e 6545
55e092c4
JH
6546 /* Tag the blocks with a depth number so that change_scope can find
6547 the common parent easily. */
be55bfe6 6548 set_block_levels (DECL_INITIAL (fun->decl), 0);
bf08ebeb 6549 default_rtl_profile ();
be147e84 6550
f029db69 6551 timevar_pop (TV_POST_EXPAND);
be147e84 6552
c2924966 6553 return 0;
242229bb
JH
6554}
6555
27a4cd48
DM
6556} // anon namespace
6557
6558rtl_opt_pass *
6559make_pass_expand (gcc::context *ctxt)
6560{
6561 return new pass_expand (ctxt);
6562}