]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR target/71185 (ice in gimplify_modify_expr, at gimplify.c:4873)
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
818ab71a 2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5
AM
28#include "cfghooks.h"
29#include "tree-pass.h"
30#include "tm_p.h"
c7131fb2 31#include "ssa.h"
957060b5
AM
32#include "optabs.h"
33#include "regs.h" /* For reg_renumber. */
34#include "emit-rtl.h"
35#include "recog.h"
36#include "cgraph.h"
37#include "diagnostic.h"
40e23961 38#include "fold-const.h"
d8a2d370
DN
39#include "varasm.h"
40#include "stor-layout.h"
41#include "stmt.h"
42#include "print-tree.h"
60393bbc
AM
43#include "cfgrtl.h"
44#include "cfganal.h"
45#include "cfgbuild.h"
46#include "cfgcleanup.h"
36566b39
PK
47#include "dojump.h"
48#include "explow.h"
49#include "calls.h"
242229bb 50#include "expr.h"
2fb9a547
AM
51#include "internal-fn.h"
52#include "tree-eh.h"
5be5c238 53#include "gimple-iterator.h"
1b223a9f 54#include "gimple-expr.h"
5be5c238 55#include "gimple-walk.h"
442b4905 56#include "tree-cfg.h"
442b4905 57#include "tree-dfa.h"
7a300452 58#include "tree-ssa.h"
242229bb 59#include "except.h"
cf835838 60#include "gimple-pretty-print.h"
1f6d3a08 61#include "toplev.h"
ef330312 62#include "debug.h"
7d69de61 63#include "params.h"
ff28a94d 64#include "tree-inline.h"
6946b3f7 65#include "value-prof.h"
8e9055ae 66#include "tree-ssa-live.h"
78bca40d 67#include "tree-outof-ssa.h"
7d776ee2 68#include "cfgloop.h"
2b21299c 69#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 70#include "asan.h"
4484a35a 71#include "tree-ssa-address.h"
862d0b35 72#include "output.h"
9b2b7279 73#include "builtins.h"
d5e254e1
IE
74#include "tree-chkp.h"
75#include "rtl-chkp.h"
726a989a 76
8a6ce562
JBG
77/* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81#ifndef NAME__MAIN
82#define NAME__MAIN "__main"
83#endif
84
4e3825db
MM
85/* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87struct ssaexpand SA;
88
a5883ba0
MM
89/* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
355fe088 91gimple *currently_expanding_gimple_stmt;
a5883ba0 92
ddb555ed
JJ
93static rtx expand_debug_expr (tree);
94
1f9ceff1
AO
95static bool defer_stack_allocation (tree, bool);
96
f11a7b6d
AO
97static void record_alignment_for_reg_var (unsigned int);
98
726a989a
RB
99/* Return an expression tree corresponding to the RHS of GIMPLE
100 statement STMT. */
101
102tree
355fe088 103gimple_assign_rhs_to_tree (gimple *stmt)
726a989a
RB
104{
105 tree t;
82d6e6fc 106 enum gimple_rhs_class grhs_class;
b8698a0f 107
82d6e6fc 108 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 109
0354c0c7
BS
110 if (grhs_class == GIMPLE_TERNARY_RHS)
111 t = build3 (gimple_assign_rhs_code (stmt),
112 TREE_TYPE (gimple_assign_lhs (stmt)),
113 gimple_assign_rhs1 (stmt),
114 gimple_assign_rhs2 (stmt),
115 gimple_assign_rhs3 (stmt));
116 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
117 t = build2 (gimple_assign_rhs_code (stmt),
118 TREE_TYPE (gimple_assign_lhs (stmt)),
119 gimple_assign_rhs1 (stmt),
120 gimple_assign_rhs2 (stmt));
82d6e6fc 121 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
122 t = build1 (gimple_assign_rhs_code (stmt),
123 TREE_TYPE (gimple_assign_lhs (stmt)),
124 gimple_assign_rhs1 (stmt));
82d6e6fc 125 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
126 {
127 t = gimple_assign_rhs1 (stmt);
128 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
129 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
130 && gimple_location (stmt) != EXPR_LOCATION (t))
131 || (gimple_block (stmt)
132 && currently_expanding_to_rtl
5368224f 133 && EXPR_P (t)))
b5b8b0ac
AO
134 t = copy_node (t);
135 }
726a989a
RB
136 else
137 gcc_unreachable ();
138
f5045c96
AM
139 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140 SET_EXPR_LOCATION (t, gimple_location (stmt));
141
726a989a
RB
142 return t;
143}
144
726a989a 145
1f6d3a08
RH
146#ifndef STACK_ALIGNMENT_NEEDED
147#define STACK_ALIGNMENT_NEEDED 1
148#endif
149
4e3825db
MM
150#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
151
1f9ceff1
AO
152/* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
156
157static tree
158leader_merge (tree cur, tree next)
159{
160 if (cur == NULL || cur == next)
161 return next;
162
163 if (DECL_P (cur) && DECL_IGNORED_P (cur))
164 return cur;
165
166 if (DECL_P (next) && DECL_IGNORED_P (next))
167 return next;
168
169 return cur;
170}
171
4e3825db
MM
172/* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
175static inline void
176set_rtl (tree t, rtx x)
177{
f11a7b6d
AO
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 || (use_register_for_decl (t)
181 ? (REG_P (x)
182 || (GET_CODE (x) == CONCAT
183 && (REG_P (XEXP (x, 0))
184 || SUBREG_P (XEXP (x, 0)))
185 && (REG_P (XEXP (x, 1))
186 || SUBREG_P (XEXP (x, 1))))
058c6384
EB
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
f11a7b6d
AO
191 || (GET_CODE (x) == PARALLEL
192 && SSAVAR (t)
193 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
058c6384
EB
194 && (GET_MODE (x) == BLKmode
195 || !flag_tree_coalesce_vars)))
f11a7b6d
AO
196 : (MEM_P (x) || x == pc_rtx
197 || (GET_CODE (x) == CONCAT
198 && MEM_P (XEXP (x, 0))
199 && MEM_P (XEXP (x, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
21fc3950
EB
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
208 unpromoted REGs. */
f11a7b6d 209 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
21fc3950
EB
210 || (SSAVAR (t)
211 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 && (promote_ssa_mode (t, NULL) == BLKmode
213 || !flag_tree_coalesce_vars))
f11a7b6d
AO
214 || !use_register_for_decl (t)
215 || GET_MODE (x) == promote_ssa_mode (t, NULL));
216
217 if (x)
1f9ceff1
AO
218 {
219 bool skip = false;
220 tree cur = NULL_TREE;
f11a7b6d
AO
221 rtx xm = x;
222
223 retry:
224 if (MEM_P (xm))
225 cur = MEM_EXPR (xm);
226 else if (REG_P (xm))
227 cur = REG_EXPR (xm);
228 else if (SUBREG_P (xm))
229 {
230 gcc_assert (subreg_lowpart_p (xm));
231 xm = SUBREG_REG (xm);
232 goto retry;
233 }
234 else if (GET_CODE (xm) == CONCAT)
235 {
236 xm = XEXP (xm, 0);
237 goto retry;
238 }
239 else if (GET_CODE (xm) == PARALLEL)
240 {
241 xm = XVECEXP (xm, 0, 0);
242 gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 xm = XEXP (xm, 0);
244 goto retry;
245 }
246 else if (xm == pc_rtx)
1f9ceff1
AO
247 skip = true;
248 else
249 gcc_unreachable ();
250
f11a7b6d 251 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
1f9ceff1
AO
252
253 if (cur != next)
254 {
255 if (MEM_P (x))
f11a7b6d
AO
256 set_mem_attributes (x,
257 next && TREE_CODE (next) == SSA_NAME
258 ? TREE_TYPE (next)
259 : next, true);
1f9ceff1
AO
260 else
261 set_reg_attrs_for_decl_rtl (next, x);
262 }
263 }
264
4e3825db
MM
265 if (TREE_CODE (t) == SSA_NAME)
266 {
1f9ceff1
AO
267 int part = var_to_partition (SA.map, t);
268 if (part != NO_PARTITION)
269 {
270 if (SA.partition_to_pseudo[part])
271 gcc_assert (SA.partition_to_pseudo[part] == x);
272 else if (x != pc_rtx)
273 SA.partition_to_pseudo[part] = x;
274 }
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
f11a7b6d
AO
277 DECL. For PARMs and RESULTs, do so only when setting the
278 default def. */
279 if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 && (VAR_P (SSA_NAME_VAR (t))
281 || SSA_NAME_IS_DEFAULT_DEF (t)))
eb7adebc
MM
282 {
283 tree var = SSA_NAME_VAR (t);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var))
286 SET_DECL_RTL (var, x);
47598145 287 /* If we have it set already to "multiple places" don't
eb7adebc
MM
288 change this. */
289 else if (DECL_RTL (var) == pc_rtx)
290 ;
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var) != x)
298 SET_DECL_RTL (var, pc_rtx);
299 }
4e3825db
MM
300 }
301 else
302 SET_DECL_RTL (t, x);
303}
1f6d3a08
RH
304
305/* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
307struct stack_var
308{
309 /* The Variable. */
310 tree decl;
311
1f6d3a08
RH
312 /* Initially, the size of the variable. Later, the size of the partition,
313 if this variable becomes it's partition's representative. */
314 HOST_WIDE_INT size;
315
316 /* The *byte* alignment required for this variable. Or as, with the
317 size, the alignment for this partition. */
318 unsigned int alignb;
319
320 /* The partition representative. */
321 size_t representative;
322
323 /* The next stack variable in the partition, or EOC. */
324 size_t next;
2bdbbe94
MM
325
326 /* The numbers of conflicting stack variables. */
327 bitmap conflicts;
1f6d3a08
RH
328};
329
330#define EOC ((size_t)-1)
331
332/* We have an array of such objects while deciding allocation. */
333static struct stack_var *stack_vars;
334static size_t stack_vars_alloc;
335static size_t stack_vars_num;
39c8aaa4 336static hash_map<tree, size_t> *decl_to_stack_part;
1f6d3a08 337
3f9b14ff
SB
338/* Conflict bitmaps go on this obstack. This allows us to destroy
339 all of them in one big sweep. */
340static bitmap_obstack stack_var_bitmap_obstack;
341
fa10beec 342/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
343 is non-decreasing. */
344static size_t *stack_vars_sorted;
345
1f6d3a08
RH
346/* The phase of the stack frame. This is the known misalignment of
347 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
348 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
349static int frame_phase;
350
7d69de61
RH
351/* Used during expand_used_vars to remember if we saw any decls for
352 which we'd like to enable stack smashing protection. */
353static bool has_protected_decls;
354
355/* Used during expand_used_vars. Remember if we say a character buffer
356 smaller than our cutoff threshold. Used for -Wstack-protector. */
357static bool has_short_buffer;
1f6d3a08 358
6f197850 359/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
360 we can't do with expected alignment of the stack boundary. */
361
362static unsigned int
6f197850 363align_local_variable (tree decl)
765c3e8f 364{
1f9ceff1
AO
365 unsigned int align;
366
367 if (TREE_CODE (decl) == SSA_NAME)
368 align = TYPE_ALIGN (TREE_TYPE (decl));
369 else
370 {
371 align = LOCAL_DECL_ALIGNMENT (decl);
fe37c7af 372 SET_DECL_ALIGN (decl, align);
1f9ceff1 373 }
1f6d3a08
RH
374 return align / BITS_PER_UNIT;
375}
376
435be747
MO
377/* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
378 down otherwise. Return truncated BASE value. */
379
380static inline unsigned HOST_WIDE_INT
381align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
382{
383 return align_up ? (base + align - 1) & -align : base & -align;
384}
385
1f6d3a08
RH
386/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
387 Return the frame offset. */
388
389static HOST_WIDE_INT
3a42502d 390alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
391{
392 HOST_WIDE_INT offset, new_frame_offset;
393
1f6d3a08
RH
394 if (FRAME_GROWS_DOWNWARD)
395 {
435be747
MO
396 new_frame_offset
397 = align_base (frame_offset - frame_phase - size,
398 align, false) + frame_phase;
1f6d3a08
RH
399 offset = new_frame_offset;
400 }
401 else
402 {
435be747
MO
403 new_frame_offset
404 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
1f6d3a08
RH
405 offset = new_frame_offset;
406 new_frame_offset += size;
407 }
408 frame_offset = new_frame_offset;
409
9fb798d7
EB
410 if (frame_offset_overflow (frame_offset, cfun->decl))
411 frame_offset = offset = 0;
412
1f6d3a08
RH
413 return offset;
414}
415
416/* Accumulate DECL into STACK_VARS. */
417
418static void
419add_stack_var (tree decl)
420{
533f611a
RH
421 struct stack_var *v;
422
1f6d3a08
RH
423 if (stack_vars_num >= stack_vars_alloc)
424 {
425 if (stack_vars_alloc)
426 stack_vars_alloc = stack_vars_alloc * 3 / 2;
427 else
428 stack_vars_alloc = 32;
429 stack_vars
430 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
431 }
47598145 432 if (!decl_to_stack_part)
39c8aaa4 433 decl_to_stack_part = new hash_map<tree, size_t>;
47598145 434
533f611a 435 v = &stack_vars[stack_vars_num];
39c8aaa4 436 decl_to_stack_part->put (decl, stack_vars_num);
533f611a
RH
437
438 v->decl = decl;
1f9ceff1
AO
439 tree size = TREE_CODE (decl) == SSA_NAME
440 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
441 : DECL_SIZE_UNIT (decl);
442 v->size = tree_to_uhwi (size);
533f611a
RH
443 /* Ensure that all variables have size, so that &a != &b for any two
444 variables that are simultaneously live. */
445 if (v->size == 0)
446 v->size = 1;
1f9ceff1 447 v->alignb = align_local_variable (decl);
13868f40
EB
448 /* An alignment of zero can mightily confuse us later. */
449 gcc_assert (v->alignb != 0);
1f6d3a08
RH
450
451 /* All variables are initially in their own partition. */
533f611a
RH
452 v->representative = stack_vars_num;
453 v->next = EOC;
1f6d3a08 454
2bdbbe94 455 /* All variables initially conflict with no other. */
533f611a 456 v->conflicts = NULL;
2bdbbe94 457
1f6d3a08 458 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 459 set_rtl (decl, pc_rtx);
1f6d3a08
RH
460
461 stack_vars_num++;
462}
463
1f6d3a08
RH
464/* Make the decls associated with luid's X and Y conflict. */
465
466static void
467add_stack_var_conflict (size_t x, size_t y)
468{
2bdbbe94
MM
469 struct stack_var *a = &stack_vars[x];
470 struct stack_var *b = &stack_vars[y];
471 if (!a->conflicts)
3f9b14ff 472 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 473 if (!b->conflicts)
3f9b14ff 474 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
475 bitmap_set_bit (a->conflicts, y);
476 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
477}
478
479/* Check whether the decls associated with luid's X and Y conflict. */
480
481static bool
482stack_var_conflict_p (size_t x, size_t y)
483{
2bdbbe94
MM
484 struct stack_var *a = &stack_vars[x];
485 struct stack_var *b = &stack_vars[y];
47598145
MM
486 if (x == y)
487 return false;
488 /* Partitions containing an SSA name result from gimple registers
489 with things like unsupported modes. They are top-level and
490 hence conflict with everything else. */
491 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
492 return true;
493
2bdbbe94
MM
494 if (!a->conflicts || !b->conflicts)
495 return false;
496 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 497}
b8698a0f 498
47598145
MM
499/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
500 enter its partition number into bitmap DATA. */
501
502static bool
355fe088 503visit_op (gimple *, tree op, tree, void *data)
47598145
MM
504{
505 bitmap active = (bitmap)data;
506 op = get_base_address (op);
507 if (op
508 && DECL_P (op)
509 && DECL_RTL_IF_SET (op) == pc_rtx)
510 {
39c8aaa4 511 size_t *v = decl_to_stack_part->get (op);
47598145
MM
512 if (v)
513 bitmap_set_bit (active, *v);
514 }
515 return false;
516}
517
518/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
519 record conflicts between it and all currently active other partitions
520 from bitmap DATA. */
521
522static bool
355fe088 523visit_conflict (gimple *, tree op, tree, void *data)
47598145
MM
524{
525 bitmap active = (bitmap)data;
526 op = get_base_address (op);
527 if (op
528 && DECL_P (op)
529 && DECL_RTL_IF_SET (op) == pc_rtx)
530 {
39c8aaa4 531 size_t *v = decl_to_stack_part->get (op);
47598145
MM
532 if (v && bitmap_set_bit (active, *v))
533 {
534 size_t num = *v;
535 bitmap_iterator bi;
536 unsigned i;
537 gcc_assert (num < stack_vars_num);
538 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
539 add_stack_var_conflict (num, i);
540 }
541 }
542 return false;
543}
544
545/* Helper routine for add_scope_conflicts, calculating the active partitions
546 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
547 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
548 liveness. */
47598145
MM
549
550static void
81bfd197 551add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
552{
553 edge e;
554 edge_iterator ei;
555 gimple_stmt_iterator gsi;
9f1363cd 556 walk_stmt_load_store_addr_fn visit;
47598145
MM
557
558 bitmap_clear (work);
559 FOR_EACH_EDGE (e, ei, bb->preds)
560 bitmap_ior_into (work, (bitmap)e->src->aux);
561
ea85edfe 562 visit = visit_op;
47598145
MM
563
564 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
565 {
355fe088 566 gimple *stmt = gsi_stmt (gsi);
ea85edfe 567 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 568 }
ea85edfe 569 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145 570 {
355fe088 571 gimple *stmt = gsi_stmt (gsi);
47598145
MM
572
573 if (gimple_clobber_p (stmt))
574 {
575 tree lhs = gimple_assign_lhs (stmt);
576 size_t *v;
577 /* Nested function lowering might introduce LHSs
578 that are COMPONENT_REFs. */
579 if (TREE_CODE (lhs) != VAR_DECL)
580 continue;
581 if (DECL_RTL_IF_SET (lhs) == pc_rtx
39c8aaa4 582 && (v = decl_to_stack_part->get (lhs)))
47598145
MM
583 bitmap_clear_bit (work, *v);
584 }
585 else if (!is_gimple_debug (stmt))
ea85edfe 586 {
81bfd197 587 if (for_conflict
ea85edfe
JJ
588 && visit == visit_op)
589 {
590 /* If this is the first real instruction in this BB we need
88d599dc
MM
591 to add conflicts for everything live at this point now.
592 Unlike classical liveness for named objects we can't
ea85edfe
JJ
593 rely on seeing a def/use of the names we're interested in.
594 There might merely be indirect loads/stores. We'd not add any
81bfd197 595 conflicts for such partitions. */
ea85edfe
JJ
596 bitmap_iterator bi;
597 unsigned i;
81bfd197 598 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 599 {
9b44f5d9
MM
600 struct stack_var *a = &stack_vars[i];
601 if (!a->conflicts)
3f9b14ff 602 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 603 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
604 }
605 visit = visit_conflict;
606 }
607 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
608 }
47598145
MM
609 }
610}
611
612/* Generate stack partition conflicts between all partitions that are
613 simultaneously live. */
614
615static void
616add_scope_conflicts (void)
617{
618 basic_block bb;
619 bool changed;
620 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
621 int *rpo;
622 int n_bbs;
47598145 623
88d599dc 624 /* We approximate the live range of a stack variable by taking the first
47598145
MM
625 mention of its name as starting point(s), and by the end-of-scope
626 death clobber added by gimplify as ending point(s) of the range.
627 This overapproximates in the case we for instance moved an address-taken
628 operation upward, without also moving a dereference to it upwards.
629 But it's conservatively correct as a variable never can hold values
630 before its name is mentioned at least once.
631
88d599dc 632 We then do a mostly classical bitmap liveness algorithm. */
47598145 633
04a90bec 634 FOR_ALL_BB_FN (bb, cfun)
3f9b14ff 635 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 636
8b1c6fd7 637 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
9b44f5d9
MM
638 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
639
47598145
MM
640 changed = true;
641 while (changed)
642 {
9b44f5d9 643 int i;
47598145 644 changed = false;
9b44f5d9 645 for (i = 0; i < n_bbs; i++)
47598145 646 {
9b44f5d9 647 bitmap active;
06e28de2 648 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
9b44f5d9 649 active = (bitmap)bb->aux;
81bfd197 650 add_scope_conflicts_1 (bb, work, false);
47598145
MM
651 if (bitmap_ior_into (active, work))
652 changed = true;
653 }
654 }
655
11cd3bed 656 FOR_EACH_BB_FN (bb, cfun)
81bfd197 657 add_scope_conflicts_1 (bb, work, true);
47598145 658
9b44f5d9 659 free (rpo);
47598145 660 BITMAP_FREE (work);
04a90bec 661 FOR_ALL_BB_FN (bb, cfun)
47598145
MM
662 BITMAP_FREE (bb->aux);
663}
664
1f6d3a08 665/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 666 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
667
668static int
3a42502d 669stack_var_cmp (const void *a, const void *b)
1f6d3a08 670{
3a42502d
RH
671 size_t ia = *(const size_t *)a;
672 size_t ib = *(const size_t *)b;
673 unsigned int aligna = stack_vars[ia].alignb;
674 unsigned int alignb = stack_vars[ib].alignb;
675 HOST_WIDE_INT sizea = stack_vars[ia].size;
676 HOST_WIDE_INT sizeb = stack_vars[ib].size;
677 tree decla = stack_vars[ia].decl;
678 tree declb = stack_vars[ib].decl;
679 bool largea, largeb;
4e3825db 680 unsigned int uida, uidb;
1f6d3a08 681
3a42502d
RH
682 /* Primary compare on "large" alignment. Large comes first. */
683 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
684 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
685 if (largea != largeb)
686 return (int)largeb - (int)largea;
687
688 /* Secondary compare on size, decreasing */
3a42502d 689 if (sizea > sizeb)
6ddfda8a
ER
690 return -1;
691 if (sizea < sizeb)
1f6d3a08 692 return 1;
3a42502d
RH
693
694 /* Tertiary compare on true alignment, decreasing. */
695 if (aligna < alignb)
696 return -1;
697 if (aligna > alignb)
698 return 1;
699
700 /* Final compare on ID for sort stability, increasing.
701 Two SSA names are compared by their version, SSA names come before
702 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
703 if (TREE_CODE (decla) == SSA_NAME)
704 {
705 if (TREE_CODE (declb) == SSA_NAME)
706 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
707 else
708 return -1;
709 }
710 else if (TREE_CODE (declb) == SSA_NAME)
711 return 1;
712 else
713 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 714 if (uida < uidb)
79f802f5 715 return 1;
3a42502d
RH
716 if (uida > uidb)
717 return -1;
1f6d3a08
RH
718 return 0;
719}
720
0ef08bc5 721struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
39c8aaa4 722typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
55b34b5f
RG
723
724/* If the points-to solution *PI points to variables that are in a partition
725 together with other variables add all partition members to the pointed-to
726 variables bitmap. */
727
728static void
729add_partitioned_vars_to_ptset (struct pt_solution *pt,
39c8aaa4 730 part_hashmap *decls_to_partitions,
6e2830c3 731 hash_set<bitmap> *visited, bitmap temp)
55b34b5f
RG
732{
733 bitmap_iterator bi;
734 unsigned i;
735 bitmap *part;
736
737 if (pt->anything
738 || pt->vars == NULL
739 /* The pointed-to vars bitmap is shared, it is enough to
740 visit it once. */
6e2830c3 741 || visited->add (pt->vars))
55b34b5f
RG
742 return;
743
744 bitmap_clear (temp);
745
746 /* By using a temporary bitmap to store all members of the partitions
747 we have to add we make sure to visit each of the partitions only
748 once. */
749 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
750 if ((!temp
751 || !bitmap_bit_p (temp, i))
39c8aaa4 752 && (part = decls_to_partitions->get (i)))
55b34b5f
RG
753 bitmap_ior_into (temp, *part);
754 if (!bitmap_empty_p (temp))
755 bitmap_ior_into (pt->vars, temp);
756}
757
758/* Update points-to sets based on partition info, so we can use them on RTL.
759 The bitmaps representing stack partitions will be saved until expand,
760 where partitioned decls used as bases in memory expressions will be
761 rewritten. */
762
763static void
764update_alias_info_with_stack_vars (void)
765{
39c8aaa4 766 part_hashmap *decls_to_partitions = NULL;
55b34b5f
RG
767 size_t i, j;
768 tree var = NULL_TREE;
769
770 for (i = 0; i < stack_vars_num; i++)
771 {
772 bitmap part = NULL;
773 tree name;
774 struct ptr_info_def *pi;
775
776 /* Not interested in partitions with single variable. */
777 if (stack_vars[i].representative != i
778 || stack_vars[i].next == EOC)
779 continue;
780
781 if (!decls_to_partitions)
782 {
39c8aaa4
TS
783 decls_to_partitions = new part_hashmap;
784 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
55b34b5f
RG
785 }
786
787 /* Create an SSA_NAME that points to the partition for use
788 as base during alias-oracle queries on RTL for bases that
789 have been partitioned. */
790 if (var == NULL_TREE)
b731b390
JJ
791 var = create_tmp_var (ptr_type_node);
792 name = make_ssa_name (var);
55b34b5f
RG
793
794 /* Create bitmaps representing partitions. They will be used for
795 points-to sets later, so use GGC alloc. */
796 part = BITMAP_GGC_ALLOC ();
797 for (j = i; j != EOC; j = stack_vars[j].next)
798 {
799 tree decl = stack_vars[j].decl;
25a6a873 800 unsigned int uid = DECL_PT_UID (decl);
55b34b5f 801 bitmap_set_bit (part, uid);
39c8aaa4
TS
802 decls_to_partitions->put (uid, part);
803 cfun->gimple_df->decls_to_pointers->put (decl, name);
88d8330d
EB
804 if (TREE_ADDRESSABLE (decl))
805 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
806 }
807
808 /* Make the SSA name point to all partition members. */
809 pi = get_ptr_info (name);
d3553615 810 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
811 }
812
813 /* Make all points-to sets that contain one member of a partition
814 contain all members of the partition. */
815 if (decls_to_partitions)
816 {
817 unsigned i;
6e2830c3 818 hash_set<bitmap> visited;
3f9b14ff 819 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f
RG
820
821 for (i = 1; i < num_ssa_names; i++)
822 {
823 tree name = ssa_name (i);
824 struct ptr_info_def *pi;
825
826 if (name
827 && POINTER_TYPE_P (TREE_TYPE (name))
828 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
829 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
6e2830c3 830 &visited, temp);
55b34b5f
RG
831 }
832
833 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
6e2830c3 834 decls_to_partitions, &visited, temp);
55b34b5f 835
39c8aaa4 836 delete decls_to_partitions;
55b34b5f
RG
837 BITMAP_FREE (temp);
838 }
839}
840
1f6d3a08
RH
841/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
842 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 843 Merge them into a single partition A. */
1f6d3a08
RH
844
845static void
6ddfda8a 846union_stack_vars (size_t a, size_t b)
1f6d3a08 847{
2bdbbe94
MM
848 struct stack_var *vb = &stack_vars[b];
849 bitmap_iterator bi;
850 unsigned u;
1f6d3a08 851
6ddfda8a
ER
852 gcc_assert (stack_vars[b].next == EOC);
853 /* Add B to A's partition. */
854 stack_vars[b].next = stack_vars[a].next;
855 stack_vars[b].representative = a;
1f6d3a08
RH
856 stack_vars[a].next = b;
857
858 /* Update the required alignment of partition A to account for B. */
859 if (stack_vars[a].alignb < stack_vars[b].alignb)
860 stack_vars[a].alignb = stack_vars[b].alignb;
861
862 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
863 if (vb->conflicts)
864 {
865 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
866 add_stack_var_conflict (a, stack_vars[u].representative);
867 BITMAP_FREE (vb->conflicts);
868 }
1f6d3a08
RH
869}
870
c461d263
JJ
871/* Return true if the current function should have its stack frame
872 protected by address sanitizer. */
873
874static inline bool
875asan_sanitize_stack_p (void)
876{
877 return ((flag_sanitize & SANITIZE_ADDRESS)
878 && ASAN_STACK
879 && !lookup_attribute ("no_sanitize_address",
880 DECL_ATTRIBUTES (current_function_decl)));
881}
882
1f6d3a08
RH
883/* A subroutine of expand_used_vars. Binpack the variables into
884 partitions constrained by the interference graph. The overall
885 algorithm used is as follows:
886
6ddfda8a 887 Sort the objects by size in descending order.
1f6d3a08
RH
888 For each object A {
889 S = size(A)
890 O = 0
891 loop {
892 Look for the largest non-conflicting object B with size <= S.
893 UNION (A, B)
1f6d3a08
RH
894 }
895 }
896*/
897
898static void
899partition_stack_vars (void)
900{
901 size_t si, sj, n = stack_vars_num;
902
903 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
904 for (si = 0; si < n; ++si)
905 stack_vars_sorted[si] = si;
906
907 if (n == 1)
908 return;
909
3a42502d 910 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 911
1f6d3a08
RH
912 for (si = 0; si < n; ++si)
913 {
914 size_t i = stack_vars_sorted[si];
3a42502d 915 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 916 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 917
6ddfda8a
ER
918 /* Ignore objects that aren't partition representatives. If we
919 see a var that is not a partition representative, it must
920 have been merged earlier. */
921 if (stack_vars[i].representative != i)
922 continue;
923
924 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
925 {
926 size_t j = stack_vars_sorted[sj];
1f6d3a08 927 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 928 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
929
930 /* Ignore objects that aren't partition representatives. */
931 if (stack_vars[j].representative != j)
932 continue;
933
3a42502d
RH
934 /* Do not mix objects of "small" (supported) alignment
935 and "large" (unsupported) alignment. */
936 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
938 break;
939
940 /* For Address Sanitizer do not mix objects with different
941 sizes, as the shorter vars wouldn't be adequately protected.
942 Don't do that for "large" (unsupported) alignment objects,
943 those aren't protected anyway. */
c461d263 944 if (asan_sanitize_stack_p () && isize != jsize
f3ddd692
JJ
945 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
946 break;
947
948 /* Ignore conflicting objects. */
949 if (stack_var_conflict_p (i, j))
3a42502d
RH
950 continue;
951
1f6d3a08 952 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 953 union_stack_vars (i, j);
1f6d3a08
RH
954 }
955 }
55b34b5f 956
9b999dc5 957 update_alias_info_with_stack_vars ();
1f6d3a08
RH
958}
959
960/* A debugging aid for expand_used_vars. Dump the generated partitions. */
961
962static void
963dump_stack_var_partition (void)
964{
965 size_t si, i, j, n = stack_vars_num;
966
967 for (si = 0; si < n; ++si)
968 {
969 i = stack_vars_sorted[si];
970
971 /* Skip variables that aren't partition representatives, for now. */
972 if (stack_vars[i].representative != i)
973 continue;
974
975 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
976 " align %u\n", (unsigned long) i, stack_vars[i].size,
977 stack_vars[i].alignb);
978
979 for (j = i; j != EOC; j = stack_vars[j].next)
980 {
981 fputc ('\t', dump_file);
982 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 983 }
6ddfda8a 984 fputc ('\n', dump_file);
1f6d3a08
RH
985 }
986}
987
3a42502d 988/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
989
990static void
3a42502d
RH
991expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
992 HOST_WIDE_INT offset)
1f6d3a08 993{
3a42502d 994 unsigned align;
1f6d3a08 995 rtx x;
c22cacf3 996
1f6d3a08
RH
997 /* If this fails, we've overflowed the stack frame. Error nicely? */
998 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
999
0a81f074 1000 x = plus_constant (Pmode, base, offset);
1f9ceff1
AO
1001 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1002 ? TYPE_MODE (TREE_TYPE (decl))
1003 : DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 1004
4e3825db
MM
1005 if (TREE_CODE (decl) != SSA_NAME)
1006 {
1007 /* Set alignment we actually gave this decl if it isn't an SSA name.
1008 If it is we generate stack slots only accidentally so it isn't as
1009 important, we'll simply use the alignment that is already set. */
3a42502d
RH
1010 if (base == virtual_stack_vars_rtx)
1011 offset -= frame_phase;
4e3825db
MM
1012 align = offset & -offset;
1013 align *= BITS_PER_UNIT;
3a42502d
RH
1014 if (align == 0 || align > base_align)
1015 align = base_align;
1016
1017 /* One would think that we could assert that we're not decreasing
1018 alignment here, but (at least) the i386 port does exactly this
1019 via the MINIMUM_ALIGNMENT hook. */
4e3825db 1020
fe37c7af 1021 SET_DECL_ALIGN (decl, align);
4e3825db
MM
1022 DECL_USER_ALIGN (decl) = 0;
1023 }
1024
4e3825db 1025 set_rtl (decl, x);
1f6d3a08
RH
1026}
1027
f3ddd692
JJ
1028struct stack_vars_data
1029{
1030 /* Vector of offset pairs, always end of some padding followed
1031 by start of the padding that needs Address Sanitizer protection.
1032 The vector is in reversed, highest offset pairs come first. */
9771b263 1033 vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
1034
1035 /* Vector of partition representative decls in between the paddings. */
9771b263 1036 vec<tree> asan_decl_vec;
e361382f
JJ
1037
1038 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1039 rtx asan_base;
1040
1041 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1042 unsigned int asan_alignb;
f3ddd692
JJ
1043};
1044
1f6d3a08
RH
1045/* A subroutine of expand_used_vars. Give each partition representative
1046 a unique location within the stack frame. Update each partition member
1047 with that location. */
1048
1049static void
f3ddd692 1050expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
1051{
1052 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
1053 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1054 rtx large_base = NULL;
1055 unsigned large_align = 0;
1056 tree decl;
1057
1058 /* Determine if there are any variables requiring "large" alignment.
1059 Since these are dynamically allocated, we only process these if
1060 no predicate involved. */
1061 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1062 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1063 {
1064 /* Find the total size of these variables. */
1065 for (si = 0; si < n; ++si)
1066 {
1067 unsigned alignb;
1068
1069 i = stack_vars_sorted[si];
1070 alignb = stack_vars[i].alignb;
1071
a8eeec27
SE
1072 /* All "large" alignment decls come before all "small" alignment
1073 decls, but "large" alignment decls are not sorted based on
1074 their alignment. Increase large_align to track the largest
1075 required alignment. */
1076 if ((alignb * BITS_PER_UNIT) > large_align)
1077 large_align = alignb * BITS_PER_UNIT;
1078
3a42502d
RH
1079 /* Stop when we get to the first decl with "small" alignment. */
1080 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1081 break;
1082
1083 /* Skip variables that aren't partition representatives. */
1084 if (stack_vars[i].representative != i)
1085 continue;
1086
1087 /* Skip variables that have already had rtl assigned. See also
1088 add_stack_var where we perpetrate this pc_rtx hack. */
1089 decl = stack_vars[i].decl;
1f9ceff1
AO
1090 if (TREE_CODE (decl) == SSA_NAME
1091 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1092 : DECL_RTL (decl) != pc_rtx)
3a42502d
RH
1093 continue;
1094
1095 large_size += alignb - 1;
1096 large_size &= -(HOST_WIDE_INT)alignb;
1097 large_size += stack_vars[i].size;
1098 }
1099
1100 /* If there were any, allocate space. */
1101 if (large_size > 0)
1102 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1103 large_align, true);
1104 }
1f6d3a08
RH
1105
1106 for (si = 0; si < n; ++si)
1107 {
3a42502d
RH
1108 rtx base;
1109 unsigned base_align, alignb;
1f6d3a08
RH
1110 HOST_WIDE_INT offset;
1111
1112 i = stack_vars_sorted[si];
1113
1114 /* Skip variables that aren't partition representatives, for now. */
1115 if (stack_vars[i].representative != i)
1116 continue;
1117
7d69de61
RH
1118 /* Skip variables that have already had rtl assigned. See also
1119 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d 1120 decl = stack_vars[i].decl;
1f9ceff1
AO
1121 if (TREE_CODE (decl) == SSA_NAME
1122 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1123 : DECL_RTL (decl) != pc_rtx)
7d69de61
RH
1124 continue;
1125
c22cacf3 1126 /* Check the predicate to see whether this variable should be
7d69de61 1127 allocated in this pass. */
f3ddd692 1128 if (pred && !pred (i))
7d69de61
RH
1129 continue;
1130
3a42502d
RH
1131 alignb = stack_vars[i].alignb;
1132 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1133 {
e361382f 1134 base = virtual_stack_vars_rtx;
c461d263 1135 if (asan_sanitize_stack_p () && pred)
f3ddd692 1136 {
435be747
MO
1137 HOST_WIDE_INT prev_offset
1138 = align_base (frame_offset,
1139 MAX (alignb, ASAN_RED_ZONE_SIZE),
d6c1a7a7 1140 !FRAME_GROWS_DOWNWARD);
f3ddd692 1141 tree repr_decl = NULL_TREE;
f3ddd692
JJ
1142 offset
1143 = alloc_stack_frame_space (stack_vars[i].size
1144 + ASAN_RED_ZONE_SIZE,
1145 MAX (alignb, ASAN_RED_ZONE_SIZE));
435be747 1146
9771b263
DN
1147 data->asan_vec.safe_push (prev_offset);
1148 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
1149 /* Find best representative of the partition.
1150 Prefer those with DECL_NAME, even better
1151 satisfying asan_protect_stack_decl predicate. */
1152 for (j = i; j != EOC; j = stack_vars[j].next)
1153 if (asan_protect_stack_decl (stack_vars[j].decl)
1154 && DECL_NAME (stack_vars[j].decl))
1155 {
1156 repr_decl = stack_vars[j].decl;
1157 break;
1158 }
1159 else if (repr_decl == NULL_TREE
1160 && DECL_P (stack_vars[j].decl)
1161 && DECL_NAME (stack_vars[j].decl))
1162 repr_decl = stack_vars[j].decl;
1163 if (repr_decl == NULL_TREE)
1164 repr_decl = stack_vars[i].decl;
9771b263 1165 data->asan_decl_vec.safe_push (repr_decl);
e361382f
JJ
1166 data->asan_alignb = MAX (data->asan_alignb, alignb);
1167 if (data->asan_base == NULL)
1168 data->asan_base = gen_reg_rtx (Pmode);
1169 base = data->asan_base;
e5dcd695
LZ
1170
1171 if (!STRICT_ALIGNMENT)
1172 base_align = crtl->max_used_stack_slot_alignment;
1173 else
1174 base_align = MAX (crtl->max_used_stack_slot_alignment,
1175 GET_MODE_ALIGNMENT (SImode)
1176 << ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1177 }
1178 else
e5dcd695
LZ
1179 {
1180 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1181 base_align = crtl->max_used_stack_slot_alignment;
1182 }
3a42502d
RH
1183 }
1184 else
1185 {
1186 /* Large alignment is only processed in the last pass. */
1187 if (pred)
1188 continue;
533f611a 1189 gcc_assert (large_base != NULL);
3a42502d
RH
1190
1191 large_alloc += alignb - 1;
1192 large_alloc &= -(HOST_WIDE_INT)alignb;
1193 offset = large_alloc;
1194 large_alloc += stack_vars[i].size;
1195
1196 base = large_base;
1197 base_align = large_align;
1198 }
1f6d3a08
RH
1199
1200 /* Create rtl for each variable based on their location within the
1201 partition. */
1202 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1203 {
f8da8190 1204 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1205 base, base_align,
6ddfda8a 1206 offset);
f8da8190 1207 }
1f6d3a08 1208 }
3a42502d
RH
1209
1210 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1211}
1212
ff28a94d
JH
1213/* Take into account all sizes of partitions and reset DECL_RTLs. */
1214static HOST_WIDE_INT
1215account_stack_vars (void)
1216{
1217 size_t si, j, i, n = stack_vars_num;
1218 HOST_WIDE_INT size = 0;
1219
1220 for (si = 0; si < n; ++si)
1221 {
1222 i = stack_vars_sorted[si];
1223
1224 /* Skip variables that aren't partition representatives, for now. */
1225 if (stack_vars[i].representative != i)
1226 continue;
1227
1228 size += stack_vars[i].size;
1229 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1230 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1231 }
1232 return size;
1233}
1234
f11a7b6d
AO
1235/* Record the RTL assignment X for the default def of PARM. */
1236
1237extern void
1238set_parm_rtl (tree parm, rtx x)
1239{
1240 gcc_assert (TREE_CODE (parm) == PARM_DECL
1241 || TREE_CODE (parm) == RESULT_DECL);
1242
1243 if (x && !MEM_P (x))
1244 {
1245 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1246 TYPE_MODE (TREE_TYPE (parm)),
1247 TYPE_ALIGN (TREE_TYPE (parm)));
1248
1249 /* If the variable alignment is very large we'll dynamicaly
1250 allocate it, which means that in-frame portion is just a
1251 pointer. ??? We've got a pseudo for sure here, do we
1252 actually dynamically allocate its spilling area if needed?
1253 ??? Isn't it a problem when POINTER_SIZE also exceeds
1254 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1255 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1256 align = POINTER_SIZE;
1257
1258 record_alignment_for_reg_var (align);
1259 }
1260
f11a7b6d
AO
1261 tree ssa = ssa_default_def (cfun, parm);
1262 if (!ssa)
1263 return set_rtl (parm, x);
1264
1265 int part = var_to_partition (SA.map, ssa);
1266 gcc_assert (part != NO_PARTITION);
1267
1268 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1269 gcc_assert (changed);
1270
1271 set_rtl (ssa, x);
1272 gcc_assert (DECL_RTL (parm) == x);
1273}
1274
1f6d3a08
RH
1275/* A subroutine of expand_one_var. Called to immediately assign rtl
1276 to a variable to be allocated in the stack frame. */
1277
1278static void
1f9ceff1 1279expand_one_stack_var_1 (tree var)
1f6d3a08 1280{
3a42502d
RH
1281 HOST_WIDE_INT size, offset;
1282 unsigned byte_align;
1f6d3a08 1283
1f9ceff1
AO
1284 if (TREE_CODE (var) == SSA_NAME)
1285 {
1286 tree type = TREE_TYPE (var);
1287 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1288 byte_align = TYPE_ALIGN_UNIT (type);
1289 }
1290 else
1291 {
1292 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1293 byte_align = align_local_variable (var);
1294 }
3a42502d
RH
1295
1296 /* We handle highly aligned variables in expand_stack_vars. */
1297 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1298
3a42502d
RH
1299 offset = alloc_stack_frame_space (size, byte_align);
1300
1301 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1302 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1303}
1304
1f9ceff1
AO
1305/* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1306 already assigned some MEM. */
1307
1308static void
1309expand_one_stack_var (tree var)
1310{
1311 if (TREE_CODE (var) == SSA_NAME)
1312 {
1313 int part = var_to_partition (SA.map, var);
1314 if (part != NO_PARTITION)
1315 {
1316 rtx x = SA.partition_to_pseudo[part];
1317 gcc_assert (x);
1318 gcc_assert (MEM_P (x));
1319 return;
1320 }
1321 }
1322
1323 return expand_one_stack_var_1 (var);
1324}
1325
1f6d3a08
RH
1326/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1327 that will reside in a hard register. */
1328
1329static void
1330expand_one_hard_reg_var (tree var)
1331{
1332 rest_of_decl_compilation (var, 0, 0);
1333}
1334
1f9ceff1
AO
1335/* Record the alignment requirements of some variable assigned to a
1336 pseudo. */
1337
1338static void
1339record_alignment_for_reg_var (unsigned int align)
1340{
1341 if (SUPPORTS_STACK_ALIGNMENT
1342 && crtl->stack_alignment_estimated < align)
1343 {
1344 /* stack_alignment_estimated shouldn't change after stack
1345 realign decision made */
1346 gcc_assert (!crtl->stack_realign_processed);
1347 crtl->stack_alignment_estimated = align;
1348 }
1349
1350 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1351 So here we only make sure stack_alignment_needed >= align. */
1352 if (crtl->stack_alignment_needed < align)
1353 crtl->stack_alignment_needed = align;
1354 if (crtl->max_used_stack_slot_alignment < align)
1355 crtl->max_used_stack_slot_alignment = align;
1356}
1357
1358/* Create RTL for an SSA partition. */
1359
1360static void
1361expand_one_ssa_partition (tree var)
1362{
1363 int part = var_to_partition (SA.map, var);
1364 gcc_assert (part != NO_PARTITION);
1365
1366 if (SA.partition_to_pseudo[part])
1367 return;
1368
1369 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1370 TYPE_MODE (TREE_TYPE (var)),
1371 TYPE_ALIGN (TREE_TYPE (var)));
1372
1373 /* If the variable alignment is very large we'll dynamicaly allocate
1374 it, which means that in-frame portion is just a pointer. */
1375 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1376 align = POINTER_SIZE;
1377
1378 record_alignment_for_reg_var (align);
1379
1380 if (!use_register_for_decl (var))
1381 {
f11a7b6d 1382 if (defer_stack_allocation (var, true))
1f9ceff1
AO
1383 add_stack_var (var);
1384 else
1385 expand_one_stack_var_1 (var);
1386 return;
1387 }
1388
1389 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1390
1391 rtx x = gen_reg_rtx (reg_mode);
1392
1393 set_rtl (var, x);
1394}
1395
f11a7b6d
AO
1396/* Record the association between the RTL generated for partition PART
1397 and the underlying variable of the SSA_NAME VAR. */
1f9ceff1
AO
1398
1399static void
1400adjust_one_expanded_partition_var (tree var)
1401{
1402 if (!var)
1403 return;
1404
1405 tree decl = SSA_NAME_VAR (var);
1406
1407 int part = var_to_partition (SA.map, var);
1408 if (part == NO_PARTITION)
1409 return;
1410
1411 rtx x = SA.partition_to_pseudo[part];
1412
f11a7b6d 1413 gcc_assert (x);
1f9ceff1
AO
1414
1415 set_rtl (var, x);
1416
1417 if (!REG_P (x))
1418 return;
1419
1420 /* Note if the object is a user variable. */
1421 if (decl && !DECL_ARTIFICIAL (decl))
1422 mark_user_reg (x);
1423
1424 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1425 mark_reg_pointer (x, get_pointer_alignment (var));
1426}
1427
1f6d3a08
RH
1428/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1429 that will reside in a pseudo register. */
1430
1431static void
1432expand_one_register_var (tree var)
1433{
1f9ceff1
AO
1434 if (TREE_CODE (var) == SSA_NAME)
1435 {
1436 int part = var_to_partition (SA.map, var);
1437 if (part != NO_PARTITION)
1438 {
1439 rtx x = SA.partition_to_pseudo[part];
1440 gcc_assert (x);
1441 gcc_assert (REG_P (x));
1442 return;
1443 }
1444 gcc_unreachable ();
1445 }
1446
1447 tree decl = var;
4e3825db 1448 tree type = TREE_TYPE (decl);
ef4bddc2 1449 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1450 rtx x = gen_reg_rtx (reg_mode);
1451
4e3825db 1452 set_rtl (var, x);
1f6d3a08
RH
1453
1454 /* Note if the object is a user variable. */
4e3825db
MM
1455 if (!DECL_ARTIFICIAL (decl))
1456 mark_user_reg (x);
1f6d3a08 1457
61021c2c 1458 if (POINTER_TYPE_P (type))
d466b407 1459 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1460}
1461
1462/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1463 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1464 to pick something that won't crash the rest of the compiler. */
1465
1466static void
1467expand_one_error_var (tree var)
1468{
ef4bddc2 1469 machine_mode mode = DECL_MODE (var);
1f6d3a08
RH
1470 rtx x;
1471
1472 if (mode == BLKmode)
1473 x = gen_rtx_MEM (BLKmode, const0_rtx);
1474 else if (mode == VOIDmode)
1475 x = const0_rtx;
1476 else
1477 x = gen_reg_rtx (mode);
1478
1479 SET_DECL_RTL (var, x);
1480}
1481
c22cacf3 1482/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1483 allocated to the local stack frame. Return true if we wish to
1484 add VAR to STACK_VARS so that it will be coalesced with other
1485 variables. Return false to allocate VAR immediately.
1486
1487 This function is used to reduce the number of variables considered
1488 for coalescing, which reduces the size of the quadratic problem. */
1489
1490static bool
1491defer_stack_allocation (tree var, bool toplevel)
1492{
1f9ceff1
AO
1493 tree size_unit = TREE_CODE (var) == SSA_NAME
1494 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1495 : DECL_SIZE_UNIT (var);
1496
ee2e8462
EB
1497 /* Whether the variable is small enough for immediate allocation not to be
1498 a problem with regard to the frame size. */
1499 bool smallish
1f9ceff1 1500 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
ee2e8462
EB
1501 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1502
7d69de61 1503 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1504 so that we can re-order the strings to the top of the frame.
1505 Similarly for Address Sanitizer. */
c461d263 1506 if (flag_stack_protect || asan_sanitize_stack_p ())
7d69de61
RH
1507 return true;
1508
1f9ceff1
AO
1509 unsigned int align = TREE_CODE (var) == SSA_NAME
1510 ? TYPE_ALIGN (TREE_TYPE (var))
1511 : DECL_ALIGN (var);
1512
3a42502d
RH
1513 /* We handle "large" alignment via dynamic allocation. We want to handle
1514 this extra complication in only one place, so defer them. */
1f9ceff1 1515 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
3a42502d
RH
1516 return true;
1517
1f9ceff1
AO
1518 bool ignored = TREE_CODE (var) == SSA_NAME
1519 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1520 : DECL_IGNORED_P (var);
1521
ee2e8462
EB
1522 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1523 might be detached from their block and appear at toplevel when we reach
1524 here. We want to coalesce them with variables from other blocks when
1525 the immediate contribution to the frame size would be noticeable. */
1f9ceff1 1526 if (toplevel && optimize > 0 && ignored && !smallish)
ee2e8462
EB
1527 return true;
1528
1529 /* Variables declared in the outermost scope automatically conflict
1530 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1531 at all is that, after sorting, we can more efficiently pack
1532 small variables in the stack frame. Continue to defer at -O2. */
1533 if (toplevel && optimize < 2)
1534 return false;
1535
1536 /* Without optimization, *most* variables are allocated from the
1537 stack, which makes the quadratic problem large exactly when we
c22cacf3 1538 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1539 other hand, we don't want the function's stack frame size to
1540 get completely out of hand. So we avoid adding scalars and
1541 "small" aggregates to the list at all. */
ee2e8462 1542 if (optimize == 0 && smallish)
1f6d3a08
RH
1543 return false;
1544
1545 return true;
1546}
1547
1548/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1549 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1550 expanded yet, merely recorded.
ff28a94d
JH
1551 When REALLY_EXPAND is false, only add stack values to be allocated.
1552 Return stack usage this variable is supposed to take.
1553*/
1f6d3a08 1554
ff28a94d
JH
1555static HOST_WIDE_INT
1556expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1557{
3a42502d 1558 unsigned int align = BITS_PER_UNIT;
4e3825db 1559 tree origvar = var;
3a42502d 1560
4e3825db
MM
1561 var = SSAVAR (var);
1562
3a42502d 1563 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1564 {
9d7d6446
JB
1565 if (is_global_var (var))
1566 return 0;
1567
2e3f842f
L
1568 /* Because we don't know if VAR will be in register or on stack,
1569 we conservatively assume it will be on stack even if VAR is
1570 eventually put into register after RA pass. For non-automatic
1571 variables, which won't be on stack, we collect alignment of
3396aba5
JJ
1572 type and ignore user specified alignment. Similarly for
1573 SSA_NAMEs for which use_register_for_decl returns true. */
1574 if (TREE_STATIC (var)
1575 || DECL_EXTERNAL (var)
1576 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
ae58e548
JJ
1577 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1578 TYPE_MODE (TREE_TYPE (var)),
1579 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1580 else if (DECL_HAS_VALUE_EXPR_P (var)
1581 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1582 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1583 or variables which were assigned a stack slot already by
1584 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1585 changed from the offset chosen to it. */
1586 align = crtl->stack_alignment_estimated;
2e3f842f 1587 else
ae58e548 1588 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1589
3a42502d
RH
1590 /* If the variable alignment is very large we'll dynamicaly allocate
1591 it, which means that in-frame portion is just a pointer. */
1592 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1593 align = POINTER_SIZE;
1594 }
1595
1f9ceff1 1596 record_alignment_for_reg_var (align);
3a42502d 1597
4e3825db
MM
1598 if (TREE_CODE (origvar) == SSA_NAME)
1599 {
1600 gcc_assert (TREE_CODE (var) != VAR_DECL
1601 || (!DECL_EXTERNAL (var)
1602 && !DECL_HAS_VALUE_EXPR_P (var)
1603 && !TREE_STATIC (var)
4e3825db
MM
1604 && TREE_TYPE (var) != error_mark_node
1605 && !DECL_HARD_REGISTER (var)
1606 && really_expand));
1607 }
1608 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1609 ;
1f6d3a08
RH
1610 else if (DECL_EXTERNAL (var))
1611 ;
833b3afe 1612 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1613 ;
1614 else if (TREE_STATIC (var))
7e8b322a 1615 ;
eb7adebc 1616 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1617 ;
1618 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1619 {
1620 if (really_expand)
1621 expand_one_error_var (var);
1622 }
4e3825db 1623 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1624 {
1625 if (really_expand)
c218f6e8
JM
1626 {
1627 expand_one_hard_reg_var (var);
1628 if (!DECL_HARD_REGISTER (var))
1629 /* Invalid register specification. */
1630 expand_one_error_var (var);
1631 }
ff28a94d 1632 }
1f6d3a08 1633 else if (use_register_for_decl (var))
ff28a94d
JH
1634 {
1635 if (really_expand)
4e3825db 1636 expand_one_register_var (origvar);
ff28a94d 1637 }
56099f00 1638 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1639 {
56099f00 1640 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1641 if (really_expand)
1642 {
1643 error ("size of variable %q+D is too large", var);
1644 expand_one_error_var (var);
1645 }
1646 }
1f6d3a08 1647 else if (defer_stack_allocation (var, toplevel))
4e3825db 1648 add_stack_var (origvar);
1f6d3a08 1649 else
ff28a94d 1650 {
bd9f1b4b 1651 if (really_expand)
de0fb905
AB
1652 {
1653 if (lookup_attribute ("naked",
1654 DECL_ATTRIBUTES (current_function_decl)))
1655 error ("cannot allocate stack for variable %q+D, naked function.",
1656 var);
1657
1658 expand_one_stack_var (origvar);
1659 }
1660
1661
ae7e9ddd 1662 return tree_to_uhwi (DECL_SIZE_UNIT (var));
ff28a94d
JH
1663 }
1664 return 0;
1f6d3a08
RH
1665}
1666
1667/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1668 expanding variables. Those variables that can be put into registers
1669 are allocated pseudos; those that can't are put on the stack.
1670
1671 TOPLEVEL is true if this is the outermost BLOCK. */
1672
1673static void
1674expand_used_vars_for_block (tree block, bool toplevel)
1675{
1f6d3a08
RH
1676 tree t;
1677
1f6d3a08 1678 /* Expand all variables at this level. */
910ad8de 1679 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1680 if (TREE_USED (t)
1681 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1682 || !DECL_NONSHAREABLE (t)))
ff28a94d 1683 expand_one_var (t, toplevel, true);
1f6d3a08 1684
1f6d3a08
RH
1685 /* Expand all variables at containing levels. */
1686 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1687 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1688}
1689
1690/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1691 and clear TREE_USED on all local variables. */
1692
1693static void
1694clear_tree_used (tree block)
1695{
1696 tree t;
1697
910ad8de 1698 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1699 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1700 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1701 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1702 TREE_USED (t) = 0;
1703
1704 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1705 clear_tree_used (t);
1706}
1707
f6bc1c4a
HS
1708enum {
1709 SPCT_FLAG_DEFAULT = 1,
1710 SPCT_FLAG_ALL = 2,
5434dc07
MD
1711 SPCT_FLAG_STRONG = 3,
1712 SPCT_FLAG_EXPLICIT = 4
f6bc1c4a
HS
1713};
1714
7d69de61
RH
1715/* Examine TYPE and determine a bit mask of the following features. */
1716
1717#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1718#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1719#define SPCT_HAS_ARRAY 4
1720#define SPCT_HAS_AGGREGATE 8
1721
1722static unsigned int
1723stack_protect_classify_type (tree type)
1724{
1725 unsigned int ret = 0;
1726 tree t;
1727
1728 switch (TREE_CODE (type))
1729 {
1730 case ARRAY_TYPE:
1731 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1732 if (t == char_type_node
1733 || t == signed_char_type_node
1734 || t == unsigned_char_type_node)
1735 {
15362b89
JJ
1736 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1737 unsigned HOST_WIDE_INT len;
7d69de61 1738
15362b89 1739 if (!TYPE_SIZE_UNIT (type)
cc269bb6 1740 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
15362b89 1741 len = max;
7d69de61 1742 else
ae7e9ddd 1743 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7d69de61
RH
1744
1745 if (len < max)
1746 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1747 else
1748 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1749 }
1750 else
1751 ret = SPCT_HAS_ARRAY;
1752 break;
1753
1754 case UNION_TYPE:
1755 case QUAL_UNION_TYPE:
1756 case RECORD_TYPE:
1757 ret = SPCT_HAS_AGGREGATE;
1758 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1759 if (TREE_CODE (t) == FIELD_DECL)
1760 ret |= stack_protect_classify_type (TREE_TYPE (t));
1761 break;
1762
1763 default:
1764 break;
1765 }
1766
1767 return ret;
1768}
1769
a4d05547
KH
1770/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1771 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1772 any variable in this function. The return value is the phase number in
1773 which the variable should be allocated. */
1774
1775static int
1776stack_protect_decl_phase (tree decl)
1777{
1778 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1779 int ret = 0;
1780
1781 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1782 has_short_buffer = true;
1783
f6bc1c4a 1784 if (flag_stack_protect == SPCT_FLAG_ALL
5434dc07
MD
1785 || flag_stack_protect == SPCT_FLAG_STRONG
1786 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1787 && lookup_attribute ("stack_protect",
1788 DECL_ATTRIBUTES (current_function_decl))))
7d69de61
RH
1789 {
1790 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1791 && !(bits & SPCT_HAS_AGGREGATE))
1792 ret = 1;
1793 else if (bits & SPCT_HAS_ARRAY)
1794 ret = 2;
1795 }
1796 else
1797 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1798
1799 if (ret)
1800 has_protected_decls = true;
1801
1802 return ret;
1803}
1804
1805/* Two helper routines that check for phase 1 and phase 2. These are used
1806 as callbacks for expand_stack_vars. */
1807
1808static bool
f3ddd692
JJ
1809stack_protect_decl_phase_1 (size_t i)
1810{
1811 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1812}
1813
1814static bool
1815stack_protect_decl_phase_2 (size_t i)
7d69de61 1816{
f3ddd692 1817 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1818}
1819
f3ddd692
JJ
1820/* And helper function that checks for asan phase (with stack protector
1821 it is phase 3). This is used as callback for expand_stack_vars.
1822 Returns true if any of the vars in the partition need to be protected. */
1823
7d69de61 1824static bool
f3ddd692 1825asan_decl_phase_3 (size_t i)
7d69de61 1826{
f3ddd692
JJ
1827 while (i != EOC)
1828 {
1829 if (asan_protect_stack_decl (stack_vars[i].decl))
1830 return true;
1831 i = stack_vars[i].next;
1832 }
1833 return false;
7d69de61
RH
1834}
1835
1836/* Ensure that variables in different stack protection phases conflict
1837 so that they are not merged and share the same stack slot. */
1838
1839static void
1840add_stack_protection_conflicts (void)
1841{
1842 size_t i, j, n = stack_vars_num;
1843 unsigned char *phase;
1844
1845 phase = XNEWVEC (unsigned char, n);
1846 for (i = 0; i < n; ++i)
1847 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1848
1849 for (i = 0; i < n; ++i)
1850 {
1851 unsigned char ph_i = phase[i];
9b44f5d9 1852 for (j = i + 1; j < n; ++j)
7d69de61
RH
1853 if (ph_i != phase[j])
1854 add_stack_var_conflict (i, j);
1855 }
1856
1857 XDELETEVEC (phase);
1858}
1859
1860/* Create a decl for the guard at the top of the stack frame. */
1861
1862static void
1863create_stack_guard (void)
1864{
c2255bc4
AH
1865 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1866 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1867 TREE_THIS_VOLATILE (guard) = 1;
1868 TREE_USED (guard) = 1;
1869 expand_one_stack_var (guard);
cb91fab0 1870 crtl->stack_protect_guard = guard;
7d69de61
RH
1871}
1872
ff28a94d 1873/* Prepare for expanding variables. */
b8698a0f 1874static void
ff28a94d
JH
1875init_vars_expansion (void)
1876{
3f9b14ff
SB
1877 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1878 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1879
3f9b14ff 1880 /* A map from decl to stack partition. */
39c8aaa4 1881 decl_to_stack_part = new hash_map<tree, size_t>;
ff28a94d
JH
1882
1883 /* Initialize local stack smashing state. */
1884 has_protected_decls = false;
1885 has_short_buffer = false;
1886}
1887
1888/* Free up stack variable graph data. */
1889static void
1890fini_vars_expansion (void)
1891{
3f9b14ff
SB
1892 bitmap_obstack_release (&stack_var_bitmap_obstack);
1893 if (stack_vars)
1894 XDELETEVEC (stack_vars);
1895 if (stack_vars_sorted)
1896 XDELETEVEC (stack_vars_sorted);
ff28a94d 1897 stack_vars = NULL;
9b44f5d9 1898 stack_vars_sorted = NULL;
ff28a94d 1899 stack_vars_alloc = stack_vars_num = 0;
39c8aaa4 1900 delete decl_to_stack_part;
47598145 1901 decl_to_stack_part = NULL;
ff28a94d
JH
1902}
1903
30925d94
AO
1904/* Make a fair guess for the size of the stack frame of the function
1905 in NODE. This doesn't have to be exact, the result is only used in
1906 the inline heuristics. So we don't want to run the full stack var
1907 packing algorithm (which is quadratic in the number of stack vars).
1908 Instead, we calculate the total size of all stack vars. This turns
1909 out to be a pretty fair estimate -- packing of stack vars doesn't
1910 happen very often. */
b5a430f3 1911
ff28a94d 1912HOST_WIDE_INT
30925d94 1913estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1914{
1915 HOST_WIDE_INT size = 0;
b5a430f3 1916 size_t i;
bb7e6d55 1917 tree var;
67348ccc 1918 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1919
bb7e6d55 1920 push_cfun (fn);
ff28a94d 1921
3f9b14ff
SB
1922 init_vars_expansion ();
1923
824f71b9
RG
1924 FOR_EACH_LOCAL_DECL (fn, i, var)
1925 if (auto_var_in_fn_p (var, fn->decl))
1926 size += expand_one_var (var, true, false);
b5a430f3 1927
ff28a94d
JH
1928 if (stack_vars_num > 0)
1929 {
b5a430f3
SB
1930 /* Fake sorting the stack vars for account_stack_vars (). */
1931 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1932 for (i = 0; i < stack_vars_num; ++i)
1933 stack_vars_sorted[i] = i;
ff28a94d 1934 size += account_stack_vars ();
ff28a94d 1935 }
3f9b14ff
SB
1936
1937 fini_vars_expansion ();
2e1ec94f 1938 pop_cfun ();
ff28a94d
JH
1939 return size;
1940}
1941
f6bc1c4a
HS
1942/* Helper routine to check if a record or union contains an array field. */
1943
1944static int
1945record_or_union_type_has_array_p (const_tree tree_type)
1946{
1947 tree fields = TYPE_FIELDS (tree_type);
1948 tree f;
1949
1950 for (f = fields; f; f = DECL_CHAIN (f))
1951 if (TREE_CODE (f) == FIELD_DECL)
1952 {
1953 tree field_type = TREE_TYPE (f);
1954 if (RECORD_OR_UNION_TYPE_P (field_type)
1955 && record_or_union_type_has_array_p (field_type))
1956 return 1;
1957 if (TREE_CODE (field_type) == ARRAY_TYPE)
1958 return 1;
1959 }
1960 return 0;
1961}
1962
6545746e
FW
1963/* Check if the current function has local referenced variables that
1964 have their addresses taken, contain an array, or are arrays. */
1965
1966static bool
1967stack_protect_decl_p ()
1968{
1969 unsigned i;
1970 tree var;
1971
1972 FOR_EACH_LOCAL_DECL (cfun, i, var)
1973 if (!is_global_var (var))
1974 {
1975 tree var_type = TREE_TYPE (var);
1976 if (TREE_CODE (var) == VAR_DECL
1977 && (TREE_CODE (var_type) == ARRAY_TYPE
1978 || TREE_ADDRESSABLE (var)
1979 || (RECORD_OR_UNION_TYPE_P (var_type)
1980 && record_or_union_type_has_array_p (var_type))))
1981 return true;
1982 }
1983 return false;
1984}
1985
1986/* Check if the current function has calls that use a return slot. */
1987
1988static bool
1989stack_protect_return_slot_p ()
1990{
1991 basic_block bb;
1992
1993 FOR_ALL_BB_FN (bb, cfun)
1994 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1995 !gsi_end_p (gsi); gsi_next (&gsi))
1996 {
355fe088 1997 gimple *stmt = gsi_stmt (gsi);
6545746e
FW
1998 /* This assumes that calls to internal-only functions never
1999 use a return slot. */
2000 if (is_gimple_call (stmt)
2001 && !gimple_call_internal_p (stmt)
2002 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2003 gimple_call_fndecl (stmt)))
2004 return true;
2005 }
2006 return false;
2007}
2008
1f6d3a08 2009/* Expand all variables used in the function. */
727a31fa 2010
b47aae36 2011static rtx_insn *
727a31fa
RH
2012expand_used_vars (void)
2013{
c021f10b 2014 tree var, outer_block = DECL_INITIAL (current_function_decl);
6e1aa848 2015 vec<tree> maybe_local_decls = vNULL;
b47aae36 2016 rtx_insn *var_end_seq = NULL;
4e3825db 2017 unsigned i;
c021f10b 2018 unsigned len;
f6bc1c4a 2019 bool gen_stack_protect_signal = false;
727a31fa 2020
1f6d3a08
RH
2021 /* Compute the phase of the stack frame for this function. */
2022 {
2023 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2024 int off = STARTING_FRAME_OFFSET % align;
2025 frame_phase = off ? align - off : 0;
2026 }
727a31fa 2027
3f9b14ff
SB
2028 /* Set TREE_USED on all variables in the local_decls. */
2029 FOR_EACH_LOCAL_DECL (cfun, i, var)
2030 TREE_USED (var) = 1;
2031 /* Clear TREE_USED on all variables associated with a block scope. */
2032 clear_tree_used (DECL_INITIAL (current_function_decl));
2033
ff28a94d 2034 init_vars_expansion ();
7d69de61 2035
8f51aa6b
IZ
2036 if (targetm.use_pseudo_pic_reg ())
2037 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2038
4e3825db
MM
2039 for (i = 0; i < SA.map->num_partitions; i++)
2040 {
f11a7b6d
AO
2041 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2042 continue;
2043
4e3825db
MM
2044 tree var = partition_to_var (SA.map, i);
2045
ea057359 2046 gcc_assert (!virtual_operand_p (var));
70b5e7dc 2047
1f9ceff1 2048 expand_one_ssa_partition (var);
64d7fb90 2049 }
7eb9f42e 2050
f6bc1c4a 2051 if (flag_stack_protect == SPCT_FLAG_STRONG)
6545746e
FW
2052 gen_stack_protect_signal
2053 = stack_protect_decl_p () || stack_protect_return_slot_p ();
f6bc1c4a 2054
cb91fab0 2055 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 2056 set are not associated with any block scope. Lay them out. */
c021f10b 2057
9771b263 2058 len = vec_safe_length (cfun->local_decls);
c021f10b 2059 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 2060 {
1f6d3a08
RH
2061 bool expand_now = false;
2062
4e3825db
MM
2063 /* Expanded above already. */
2064 if (is_gimple_reg (var))
eb7adebc
MM
2065 {
2066 TREE_USED (var) = 0;
3adcf52c 2067 goto next;
eb7adebc 2068 }
1f6d3a08
RH
2069 /* We didn't set a block for static or extern because it's hard
2070 to tell the difference between a global variable (re)declared
2071 in a local scope, and one that's really declared there to
2072 begin with. And it doesn't really matter much, since we're
2073 not giving them stack space. Expand them now. */
4e3825db 2074 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
2075 expand_now = true;
2076
ee2e8462
EB
2077 /* Expand variables not associated with any block now. Those created by
2078 the optimizers could be live anywhere in the function. Those that
2079 could possibly have been scoped originally and detached from their
2080 block will have their allocation deferred so we coalesce them with
2081 others when optimization is enabled. */
1f6d3a08
RH
2082 else if (TREE_USED (var))
2083 expand_now = true;
2084
2085 /* Finally, mark all variables on the list as used. We'll use
2086 this in a moment when we expand those associated with scopes. */
2087 TREE_USED (var) = 1;
2088
2089 if (expand_now)
3adcf52c
JM
2090 expand_one_var (var, true, true);
2091
2092 next:
2093 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 2094 {
3adcf52c
JM
2095 rtx rtl = DECL_RTL_IF_SET (var);
2096
2097 /* Keep artificial non-ignored vars in cfun->local_decls
2098 chain until instantiate_decls. */
2099 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2100 add_local_decl (cfun, var);
6c6366f6 2101 else if (rtl == NULL_RTX)
c021f10b
NF
2102 /* If rtl isn't set yet, which can happen e.g. with
2103 -fstack-protector, retry before returning from this
2104 function. */
9771b263 2105 maybe_local_decls.safe_push (var);
802e9f8e 2106 }
1f6d3a08 2107 }
1f6d3a08 2108
c021f10b
NF
2109 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2110
2111 +-----------------+-----------------+
2112 | ...processed... | ...duplicates...|
2113 +-----------------+-----------------+
2114 ^
2115 +-- LEN points here.
2116
2117 We just want the duplicates, as those are the artificial
2118 non-ignored vars that we want to keep until instantiate_decls.
2119 Move them down and truncate the array. */
9771b263
DN
2120 if (!vec_safe_is_empty (cfun->local_decls))
2121 cfun->local_decls->block_remove (0, len);
c021f10b 2122
1f6d3a08
RH
2123 /* At this point, all variables within the block tree with TREE_USED
2124 set are actually used by the optimized function. Lay them out. */
2125 expand_used_vars_for_block (outer_block, true);
2126
2127 if (stack_vars_num > 0)
2128 {
47598145 2129 add_scope_conflicts ();
1f6d3a08 2130
c22cacf3 2131 /* If stack protection is enabled, we don't share space between
7d69de61 2132 vulnerable data and non-vulnerable data. */
5434dc07
MD
2133 if (flag_stack_protect != 0
2134 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2135 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2136 && lookup_attribute ("stack_protect",
2137 DECL_ATTRIBUTES (current_function_decl)))))
7d69de61
RH
2138 add_stack_protection_conflicts ();
2139
c22cacf3 2140 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
2141 minimal interference graph, attempt to save some stack space. */
2142 partition_stack_vars ();
2143 if (dump_file)
2144 dump_stack_var_partition ();
7d69de61
RH
2145 }
2146
f6bc1c4a
HS
2147 switch (flag_stack_protect)
2148 {
2149 case SPCT_FLAG_ALL:
2150 create_stack_guard ();
2151 break;
2152
2153 case SPCT_FLAG_STRONG:
2154 if (gen_stack_protect_signal
5434dc07
MD
2155 || cfun->calls_alloca || has_protected_decls
2156 || lookup_attribute ("stack_protect",
2157 DECL_ATTRIBUTES (current_function_decl)))
f6bc1c4a
HS
2158 create_stack_guard ();
2159 break;
2160
2161 case SPCT_FLAG_DEFAULT:
5434dc07
MD
2162 if (cfun->calls_alloca || has_protected_decls
2163 || lookup_attribute ("stack_protect",
2164 DECL_ATTRIBUTES (current_function_decl)))
c3284718 2165 create_stack_guard ();
f6bc1c4a
HS
2166 break;
2167
5434dc07
MD
2168 case SPCT_FLAG_EXPLICIT:
2169 if (lookup_attribute ("stack_protect",
2170 DECL_ATTRIBUTES (current_function_decl)))
2171 create_stack_guard ();
2172 break;
f6bc1c4a
HS
2173 default:
2174 ;
2175 }
1f6d3a08 2176
7d69de61
RH
2177 /* Assign rtl to each variable based on these partitions. */
2178 if (stack_vars_num > 0)
2179 {
f3ddd692
JJ
2180 struct stack_vars_data data;
2181
6e1aa848
DN
2182 data.asan_vec = vNULL;
2183 data.asan_decl_vec = vNULL;
e361382f
JJ
2184 data.asan_base = NULL_RTX;
2185 data.asan_alignb = 0;
f3ddd692 2186
7d69de61
RH
2187 /* Reorder decls to be protected by iterating over the variables
2188 array multiple times, and allocating out of each phase in turn. */
c22cacf3 2189 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
2190 earlier, such that we naturally see these variables first,
2191 and thus naturally allocate things in the right order. */
2192 if (has_protected_decls)
2193 {
2194 /* Phase 1 contains only character arrays. */
f3ddd692 2195 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
2196
2197 /* Phase 2 contains other kinds of arrays. */
5434dc07
MD
2198 if (flag_stack_protect == SPCT_FLAG_ALL
2199 || flag_stack_protect == SPCT_FLAG_STRONG
2200 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2201 && lookup_attribute ("stack_protect",
2202 DECL_ATTRIBUTES (current_function_decl))))
f3ddd692 2203 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
2204 }
2205
c461d263 2206 if (asan_sanitize_stack_p ())
f3ddd692
JJ
2207 /* Phase 3, any partitions that need asan protection
2208 in addition to phase 1 and 2. */
2209 expand_stack_vars (asan_decl_phase_3, &data);
2210
9771b263 2211 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
2212 {
2213 HOST_WIDE_INT prev_offset = frame_offset;
e361382f
JJ
2214 HOST_WIDE_INT offset, sz, redzonesz;
2215 redzonesz = ASAN_RED_ZONE_SIZE;
2216 sz = data.asan_vec[0] - prev_offset;
2217 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2218 && data.asan_alignb <= 4096
3dc87cc0 2219 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
e361382f
JJ
2220 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2221 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2222 offset
2223 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
9771b263
DN
2224 data.asan_vec.safe_push (prev_offset);
2225 data.asan_vec.safe_push (offset);
e5dcd695
LZ
2226 /* Leave space for alignment if STRICT_ALIGNMENT. */
2227 if (STRICT_ALIGNMENT)
2228 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2229 << ASAN_SHADOW_SHIFT)
2230 / BITS_PER_UNIT, 1);
f3ddd692
JJ
2231
2232 var_end_seq
2233 = asan_emit_stack_protection (virtual_stack_vars_rtx,
e361382f
JJ
2234 data.asan_base,
2235 data.asan_alignb,
9771b263 2236 data.asan_vec.address (),
e361382f 2237 data.asan_decl_vec.address (),
9771b263 2238 data.asan_vec.length ());
f3ddd692
JJ
2239 }
2240
2241 expand_stack_vars (NULL, &data);
2242
9771b263
DN
2243 data.asan_vec.release ();
2244 data.asan_decl_vec.release ();
1f6d3a08
RH
2245 }
2246
3f9b14ff
SB
2247 fini_vars_expansion ();
2248
6c6366f6
JJ
2249 /* If there were any artificial non-ignored vars without rtl
2250 found earlier, see if deferred stack allocation hasn't assigned
2251 rtl to them. */
9771b263 2252 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 2253 {
6c6366f6
JJ
2254 rtx rtl = DECL_RTL_IF_SET (var);
2255
6c6366f6
JJ
2256 /* Keep artificial non-ignored vars in cfun->local_decls
2257 chain until instantiate_decls. */
2258 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 2259 add_local_decl (cfun, var);
6c6366f6 2260 }
9771b263 2261 maybe_local_decls.release ();
6c6366f6 2262
1f6d3a08
RH
2263 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2264 if (STACK_ALIGNMENT_NEEDED)
2265 {
2266 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2267 if (!FRAME_GROWS_DOWNWARD)
2268 frame_offset += align - 1;
2269 frame_offset &= -align;
2270 }
f3ddd692
JJ
2271
2272 return var_end_seq;
727a31fa
RH
2273}
2274
2275
b7211528
SB
2276/* If we need to produce a detailed dump, print the tree representation
2277 for STMT to the dump file. SINCE is the last RTX after which the RTL
2278 generated for STMT should have been appended. */
2279
2280static void
355fe088 2281maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
b7211528
SB
2282{
2283 if (dump_file && (dump_flags & TDF_DETAILS))
2284 {
2285 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
2286 print_gimple_stmt (dump_file, stmt, 0,
2287 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
2288 fprintf (dump_file, "\n");
2289
2290 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2291 }
2292}
2293
8b11009b
ZD
2294/* Maps the blocks that do not contain tree labels to rtx labels. */
2295
134aa83c 2296static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
8b11009b 2297
a9b77cd1
ZD
2298/* Returns the label_rtx expression for a label starting basic block BB. */
2299
1476d1bd 2300static rtx_code_label *
726a989a 2301label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 2302{
726a989a
RB
2303 gimple_stmt_iterator gsi;
2304 tree lab;
a9b77cd1
ZD
2305
2306 if (bb->flags & BB_RTL)
2307 return block_label (bb);
2308
134aa83c 2309 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b 2310 if (elt)
39c8aaa4 2311 return *elt;
8b11009b
ZD
2312
2313 /* Find the tree label if it is present. */
b8698a0f 2314
726a989a 2315 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 2316 {
538dd0b7
DM
2317 glabel *lab_stmt;
2318
2319 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2320 if (!lab_stmt)
a9b77cd1
ZD
2321 break;
2322
726a989a 2323 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
2324 if (DECL_NONLOCAL (lab))
2325 break;
2326
1476d1bd 2327 return jump_target_rtx (lab);
a9b77cd1
ZD
2328 }
2329
19f8b229 2330 rtx_code_label *l = gen_label_rtx ();
39c8aaa4
TS
2331 lab_rtx_for_bb->put (bb, l);
2332 return l;
a9b77cd1
ZD
2333}
2334
726a989a 2335
529ff441
MM
2336/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2337 of a basic block where we just expanded the conditional at the end,
315adeda
MM
2338 possibly clean up the CFG and instruction sequence. LAST is the
2339 last instruction before the just emitted jump sequence. */
529ff441
MM
2340
2341static void
b47aae36 2342maybe_cleanup_end_of_block (edge e, rtx_insn *last)
529ff441
MM
2343{
2344 /* Special case: when jumpif decides that the condition is
2345 trivial it emits an unconditional jump (and the necessary
2346 barrier). But we still have two edges, the fallthru one is
2347 wrong. purge_dead_edges would clean this up later. Unfortunately
2348 we have to insert insns (and split edges) before
2349 find_many_sub_basic_blocks and hence before purge_dead_edges.
2350 But splitting edges might create new blocks which depend on the
2351 fact that if there are two edges there's no barrier. So the
2352 barrier would get lost and verify_flow_info would ICE. Instead
2353 of auditing all edge splitters to care for the barrier (which
2354 normally isn't there in a cleaned CFG), fix it here. */
2355 if (BARRIER_P (get_last_insn ()))
2356 {
b47aae36 2357 rtx_insn *insn;
529ff441
MM
2358 remove_edge (e);
2359 /* Now, we have a single successor block, if we have insns to
2360 insert on the remaining edge we potentially will insert
2361 it at the end of this block (if the dest block isn't feasible)
2362 in order to avoid splitting the edge. This insertion will take
2363 place in front of the last jump. But we might have emitted
2364 multiple jumps (conditional and one unconditional) to the
2365 same destination. Inserting in front of the last one then
2366 is a problem. See PR 40021. We fix this by deleting all
2367 jumps except the last unconditional one. */
2368 insn = PREV_INSN (get_last_insn ());
2369 /* Make sure we have an unconditional jump. Otherwise we're
2370 confused. */
2371 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 2372 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
2373 {
2374 insn = PREV_INSN (insn);
2375 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 2376 {
8a269cb7 2377 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
2378 {
2379 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2380 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2381 }
2382 delete_insn (NEXT_INSN (insn));
2383 }
529ff441
MM
2384 }
2385 }
2386}
2387
726a989a 2388/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
2389 Returns a new basic block if we've terminated the current basic
2390 block and created a new one. */
2391
2392static basic_block
538dd0b7 2393expand_gimple_cond (basic_block bb, gcond *stmt)
80c7a9eb
RH
2394{
2395 basic_block new_bb, dest;
2396 edge new_edge;
2397 edge true_edge;
2398 edge false_edge;
b47aae36 2399 rtx_insn *last2, *last;
28ed065e
MM
2400 enum tree_code code;
2401 tree op0, op1;
2402
2403 code = gimple_cond_code (stmt);
2404 op0 = gimple_cond_lhs (stmt);
2405 op1 = gimple_cond_rhs (stmt);
2406 /* We're sometimes presented with such code:
2407 D.123_1 = x < y;
2408 if (D.123_1 != 0)
2409 ...
2410 This would expand to two comparisons which then later might
2411 be cleaned up by combine. But some pattern matchers like if-conversion
2412 work better when there's only one compare, so make up for this
2413 here as special exception if TER would have made the same change. */
31348d52 2414 if (SA.values
28ed065e 2415 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
2416 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2417 && TREE_CODE (op1) == INTEGER_CST
2418 && ((gimple_cond_code (stmt) == NE_EXPR
2419 && integer_zerop (op1))
2420 || (gimple_cond_code (stmt) == EQ_EXPR
2421 && integer_onep (op1)))
28ed065e
MM
2422 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2423 {
355fe088 2424 gimple *second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2425 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2426 {
e83f4b68
MM
2427 enum tree_code code2 = gimple_assign_rhs_code (second);
2428 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2429 {
2430 code = code2;
2431 op0 = gimple_assign_rhs1 (second);
2432 op1 = gimple_assign_rhs2 (second);
2433 }
2d52a3a1
ZC
2434 /* If jumps are cheap and the target does not support conditional
2435 compare, turn some more codes into jumpy sequences. */
2436 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2437 && targetm.gen_ccmp_first == NULL)
e83f4b68
MM
2438 {
2439 if ((code2 == BIT_AND_EXPR
2440 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2441 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2442 || code2 == TRUTH_AND_EXPR)
2443 {
2444 code = TRUTH_ANDIF_EXPR;
2445 op0 = gimple_assign_rhs1 (second);
2446 op1 = gimple_assign_rhs2 (second);
2447 }
2448 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2449 {
2450 code = TRUTH_ORIF_EXPR;
2451 op0 = gimple_assign_rhs1 (second);
2452 op1 = gimple_assign_rhs2 (second);
2453 }
2454 }
28ed065e
MM
2455 }
2456 }
b7211528
SB
2457
2458 last2 = last = get_last_insn ();
80c7a9eb
RH
2459
2460 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2461 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2462
2463 /* These flags have no purpose in RTL land. */
2464 true_edge->flags &= ~EDGE_TRUE_VALUE;
2465 false_edge->flags &= ~EDGE_FALSE_VALUE;
2466
2467 /* We can either have a pure conditional jump with one fallthru edge or
2468 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2469 if (false_edge->dest == bb->next_bb)
80c7a9eb 2470 {
40e90eac
JJ
2471 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2472 true_edge->probability);
726a989a 2473 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2474 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2475 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2476 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2477 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2478 return NULL;
2479 }
a9b77cd1 2480 if (true_edge->dest == bb->next_bb)
80c7a9eb 2481 {
40e90eac
JJ
2482 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2483 false_edge->probability);
726a989a 2484 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2485 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2486 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2487 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2488 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2489 return NULL;
2490 }
80c7a9eb 2491
40e90eac
JJ
2492 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2493 true_edge->probability);
80c7a9eb 2494 last = get_last_insn ();
2f13f2de 2495 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2496 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2497 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb 2498
1130d5e3 2499 BB_END (bb) = last;
80c7a9eb 2500 if (BARRIER_P (BB_END (bb)))
1130d5e3 2501 BB_END (bb) = PREV_INSN (BB_END (bb));
80c7a9eb
RH
2502 update_bb_for_insn (bb);
2503
2504 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2505 dest = false_edge->dest;
2506 redirect_edge_succ (false_edge, new_bb);
2507 false_edge->flags |= EDGE_FALLTHRU;
2508 new_bb->count = false_edge->count;
2509 new_bb->frequency = EDGE_FREQUENCY (false_edge);
726338f4 2510 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2511 new_edge = make_edge (new_bb, dest, 0);
2512 new_edge->probability = REG_BR_PROB_BASE;
2513 new_edge->count = new_bb->count;
2514 if (BARRIER_P (BB_END (new_bb)))
1130d5e3 2515 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
80c7a9eb
RH
2516 update_bb_for_insn (new_bb);
2517
726a989a 2518 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2519
2f13f2de 2520 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2521 {
5368224f
DC
2522 set_curr_insn_location (true_edge->goto_locus);
2523 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2524 }
7787b4aa 2525
80c7a9eb
RH
2526 return new_bb;
2527}
2528
0a35513e
AH
2529/* Mark all calls that can have a transaction restart. */
2530
2531static void
355fe088 2532mark_transaction_restart_calls (gimple *stmt)
0a35513e
AH
2533{
2534 struct tm_restart_node dummy;
50979347 2535 tm_restart_node **slot;
0a35513e
AH
2536
2537 if (!cfun->gimple_df->tm_restart)
2538 return;
2539
2540 dummy.stmt = stmt;
50979347 2541 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
0a35513e
AH
2542 if (slot)
2543 {
50979347 2544 struct tm_restart_node *n = *slot;
0a35513e 2545 tree list = n->label_or_list;
b47aae36 2546 rtx_insn *insn;
0a35513e
AH
2547
2548 for (insn = next_real_insn (get_last_insn ());
2549 !CALL_P (insn);
2550 insn = next_real_insn (insn))
2551 continue;
2552
2553 if (TREE_CODE (list) == LABEL_DECL)
2554 add_reg_note (insn, REG_TM, label_rtx (list));
2555 else
2556 for (; list ; list = TREE_CHAIN (list))
2557 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2558 }
2559}
2560
28ed065e
MM
2561/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2562 statement STMT. */
2563
2564static void
538dd0b7 2565expand_call_stmt (gcall *stmt)
28ed065e 2566{
25583c4f 2567 tree exp, decl, lhs;
e23817b3 2568 bool builtin_p;
e7925582 2569 size_t i;
28ed065e 2570
25583c4f
RS
2571 if (gimple_call_internal_p (stmt))
2572 {
2573 expand_internal_call (stmt);
2574 return;
2575 }
2576
4cfe7a6c
RS
2577 /* If this is a call to a built-in function and it has no effect other
2578 than setting the lhs, try to implement it using an internal function
2579 instead. */
2580 decl = gimple_call_fndecl (stmt);
2581 if (gimple_call_lhs (stmt)
2582 && !gimple_has_side_effects (stmt)
2583 && (optimize || (decl && called_as_built_in (decl))))
2584 {
2585 internal_fn ifn = replacement_internal_fn (stmt);
2586 if (ifn != IFN_LAST)
2587 {
2588 expand_internal_call (ifn, stmt);
2589 return;
2590 }
2591 }
2592
01156003 2593 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
089d1227 2594
01156003 2595 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
089d1227 2596 builtin_p = decl && DECL_BUILT_IN (decl);
01156003 2597
e7925582
EB
2598 /* If this is not a builtin function, the function type through which the
2599 call is made may be different from the type of the function. */
2600 if (!builtin_p)
2601 CALL_EXPR_FN (exp)
b25aa0e8
EB
2602 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2603 CALL_EXPR_FN (exp));
e7925582 2604
28ed065e
MM
2605 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2606 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2607
2608 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2609 {
2610 tree arg = gimple_call_arg (stmt, i);
355fe088 2611 gimple *def;
e23817b3
RG
2612 /* TER addresses into arguments of builtin functions so we have a
2613 chance to infer more correct alignment information. See PR39954. */
2614 if (builtin_p
2615 && TREE_CODE (arg) == SSA_NAME
2616 && (def = get_gimple_for_ssa_name (arg))
2617 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2618 arg = gimple_assign_rhs1 (def);
2619 CALL_EXPR_ARG (exp, i) = arg;
2620 }
28ed065e 2621
93f28ca7 2622 if (gimple_has_side_effects (stmt))
28ed065e
MM
2623 TREE_SIDE_EFFECTS (exp) = 1;
2624
93f28ca7 2625 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2626 TREE_NOTHROW (exp) = 1;
2627
2628 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2629 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2630 if (decl
2631 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2632 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2633 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2634 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2635 else
2636 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2637 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2638 SET_EXPR_LOCATION (exp, gimple_location (stmt));
d5e254e1 2639 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
28ed065e 2640
ddb555ed
JJ
2641 /* Ensure RTL is created for debug args. */
2642 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2643 {
9771b263 2644 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2645 unsigned int ix;
2646 tree dtemp;
2647
2648 if (debug_args)
9771b263 2649 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2650 {
2651 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2652 expand_debug_expr (dtemp);
2653 }
2654 }
2655
25583c4f 2656 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2657 if (lhs)
2658 expand_assignment (lhs, exp, false);
2659 else
4c437f02 2660 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a35513e
AH
2661
2662 mark_transaction_restart_calls (stmt);
28ed065e
MM
2663}
2664
862d0b35
DN
2665
2666/* Generate RTL for an asm statement (explicit assembler code).
2667 STRING is a STRING_CST node containing the assembler code text,
2668 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2669 insn is volatile; don't optimize it. */
2670
2671static void
2672expand_asm_loc (tree string, int vol, location_t locus)
2673{
2674 rtx body;
2675
2676 if (TREE_CODE (string) == ADDR_EXPR)
2677 string = TREE_OPERAND (string, 0);
2678
2679 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2680 ggc_strdup (TREE_STRING_POINTER (string)),
2681 locus);
2682
2683 MEM_VOLATILE_P (body) = vol;
2684
2685 emit_insn (body);
2686}
2687
2688/* Return the number of times character C occurs in string S. */
2689static int
2690n_occurrences (int c, const char *s)
2691{
2692 int n = 0;
2693 while (*s)
2694 n += (*s++ == c);
2695 return n;
2696}
2697
2698/* A subroutine of expand_asm_operands. Check that all operands have
2699 the same number of alternatives. Return true if so. */
2700
2701static bool
7ca35180 2702check_operand_nalternatives (const vec<const char *> &constraints)
862d0b35 2703{
7ca35180
RH
2704 unsigned len = constraints.length();
2705 if (len > 0)
862d0b35 2706 {
7ca35180 2707 int nalternatives = n_occurrences (',', constraints[0]);
862d0b35
DN
2708
2709 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2710 {
2711 error ("too many alternatives in %<asm%>");
2712 return false;
2713 }
2714
7ca35180
RH
2715 for (unsigned i = 1; i < len; ++i)
2716 if (n_occurrences (',', constraints[i]) != nalternatives)
2717 {
2718 error ("operand constraints for %<asm%> differ "
2719 "in number of alternatives");
2720 return false;
2721 }
862d0b35 2722 }
862d0b35
DN
2723 return true;
2724}
2725
2726/* Check for overlap between registers marked in CLOBBERED_REGS and
2727 anything inappropriate in T. Emit error and return the register
2728 variable definition for error, NULL_TREE for ok. */
2729
2730static bool
2731tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2732{
2733 /* Conflicts between asm-declared register variables and the clobber
2734 list are not allowed. */
2735 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2736
2737 if (overlap)
2738 {
2739 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2740 DECL_NAME (overlap));
2741
2742 /* Reset registerness to stop multiple errors emitted for a single
2743 variable. */
2744 DECL_REGISTER (overlap) = 0;
2745 return true;
2746 }
2747
2748 return false;
2749}
2750
2751/* Generate RTL for an asm statement with arguments.
2752 STRING is the instruction template.
2753 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2754 Each output or input has an expression in the TREE_VALUE and
2755 a tree list in TREE_PURPOSE which in turn contains a constraint
2756 name in TREE_VALUE (or NULL_TREE) and a constraint string
2757 in TREE_PURPOSE.
2758 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2759 that is clobbered by this insn.
2760
2761 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2762 should be the fallthru basic block of the asm goto.
2763
2764 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2765 Some elements of OUTPUTS may be replaced with trees representing temporary
2766 values. The caller should copy those temporary values to the originally
2767 specified lvalues.
2768
2769 VOL nonzero means the insn is volatile; don't optimize it. */
2770
2771static void
6476a8fd 2772expand_asm_stmt (gasm *stmt)
862d0b35 2773{
7ca35180
RH
2774 class save_input_location
2775 {
2776 location_t old;
6476a8fd 2777
7ca35180
RH
2778 public:
2779 explicit save_input_location(location_t where)
6476a8fd 2780 {
7ca35180
RH
2781 old = input_location;
2782 input_location = where;
6476a8fd
RH
2783 }
2784
7ca35180 2785 ~save_input_location()
6476a8fd 2786 {
7ca35180 2787 input_location = old;
6476a8fd 2788 }
7ca35180 2789 };
6476a8fd 2790
7ca35180 2791 location_t locus = gimple_location (stmt);
6476a8fd 2792
7ca35180 2793 if (gimple_asm_input_p (stmt))
6476a8fd 2794 {
7ca35180
RH
2795 const char *s = gimple_asm_string (stmt);
2796 tree string = build_string (strlen (s), s);
2797 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2798 return;
6476a8fd
RH
2799 }
2800
7ca35180
RH
2801 /* There are some legacy diagnostics in here, and also avoids a
2802 sixth parameger to targetm.md_asm_adjust. */
2803 save_input_location s_i_l(locus);
6476a8fd 2804
7ca35180
RH
2805 unsigned noutputs = gimple_asm_noutputs (stmt);
2806 unsigned ninputs = gimple_asm_ninputs (stmt);
2807 unsigned nlabels = gimple_asm_nlabels (stmt);
2808 unsigned i;
2809
2810 /* ??? Diagnose during gimplification? */
2811 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
6476a8fd 2812 {
7ca35180 2813 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
6476a8fd
RH
2814 return;
2815 }
2816
7ca35180
RH
2817 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2818 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2819 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
6476a8fd 2820
7ca35180 2821 /* Copy the gimple vectors into new vectors that we can manipulate. */
862d0b35 2822
7ca35180
RH
2823 output_tvec.safe_grow (noutputs);
2824 input_tvec.safe_grow (ninputs);
2825 constraints.safe_grow (noutputs + ninputs);
862d0b35 2826
7ca35180
RH
2827 for (i = 0; i < noutputs; ++i)
2828 {
2829 tree t = gimple_asm_output_op (stmt, i);
2830 output_tvec[i] = TREE_VALUE (t);
2831 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2832 }
2833 for (i = 0; i < ninputs; i++)
2834 {
2835 tree t = gimple_asm_input_op (stmt, i);
2836 input_tvec[i] = TREE_VALUE (t);
2837 constraints[i + noutputs]
2838 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2839 }
862d0b35 2840
7ca35180
RH
2841 /* ??? Diagnose during gimplification? */
2842 if (! check_operand_nalternatives (constraints))
2843 return;
862d0b35
DN
2844
2845 /* Count the number of meaningful clobbered registers, ignoring what
2846 we would ignore later. */
7ca35180
RH
2847 auto_vec<rtx> clobber_rvec;
2848 HARD_REG_SET clobbered_regs;
862d0b35 2849 CLEAR_HARD_REG_SET (clobbered_regs);
862d0b35 2850
7ca35180
RH
2851 if (unsigned n = gimple_asm_nclobbers (stmt))
2852 {
2853 clobber_rvec.reserve (n);
2854 for (i = 0; i < n; i++)
2855 {
2856 tree t = gimple_asm_clobber_op (stmt, i);
2857 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2858 int nregs, j;
862d0b35 2859
7ca35180
RH
2860 j = decode_reg_name_and_count (regname, &nregs);
2861 if (j < 0)
862d0b35 2862 {
7ca35180 2863 if (j == -2)
862d0b35 2864 {
7ca35180
RH
2865 /* ??? Diagnose during gimplification? */
2866 error ("unknown register name %qs in %<asm%>", regname);
2867 }
2868 else if (j == -4)
2869 {
2870 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2871 clobber_rvec.safe_push (x);
2872 }
2873 else
2874 {
2875 /* Otherwise we should have -1 == empty string
2876 or -3 == cc, which is not a register. */
2877 gcc_assert (j == -1 || j == -3);
862d0b35 2878 }
862d0b35 2879 }
7ca35180
RH
2880 else
2881 for (int reg = j; reg < j + nregs; reg++)
2882 {
2883 /* Clobbering the PIC register is an error. */
2884 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2885 {
2886 /* ??? Diagnose during gimplification? */
2887 error ("PIC register clobbered by %qs in %<asm%>",
2888 regname);
2889 return;
2890 }
2891
2892 SET_HARD_REG_BIT (clobbered_regs, reg);
2893 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2894 clobber_rvec.safe_push (x);
2895 }
862d0b35
DN
2896 }
2897 }
7ca35180 2898 unsigned nclobbers = clobber_rvec.length();
862d0b35
DN
2899
2900 /* First pass over inputs and outputs checks validity and sets
2901 mark_addressable if needed. */
7ca35180 2902 /* ??? Diagnose during gimplification? */
862d0b35 2903
7ca35180 2904 for (i = 0; i < noutputs; ++i)
862d0b35 2905 {
7ca35180 2906 tree val = output_tvec[i];
862d0b35
DN
2907 tree type = TREE_TYPE (val);
2908 const char *constraint;
2909 bool is_inout;
2910 bool allows_reg;
2911 bool allows_mem;
2912
862d0b35
DN
2913 /* Try to parse the output constraint. If that fails, there's
2914 no point in going further. */
2915 constraint = constraints[i];
2916 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2917 &allows_mem, &allows_reg, &is_inout))
2918 return;
2919
2920 if (! allows_reg
2921 && (allows_mem
2922 || is_inout
2923 || (DECL_P (val)
2924 && REG_P (DECL_RTL (val))
2925 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2926 mark_addressable (val);
862d0b35
DN
2927 }
2928
7ca35180 2929 for (i = 0; i < ninputs; ++i)
862d0b35
DN
2930 {
2931 bool allows_reg, allows_mem;
2932 const char *constraint;
2933
862d0b35 2934 constraint = constraints[i + noutputs];
7ca35180
RH
2935 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2936 constraints.address (),
2937 &allows_mem, &allows_reg))
862d0b35
DN
2938 return;
2939
2940 if (! allows_reg && allows_mem)
7ca35180 2941 mark_addressable (input_tvec[i]);
862d0b35
DN
2942 }
2943
2944 /* Second pass evaluates arguments. */
2945
2946 /* Make sure stack is consistent for asm goto. */
2947 if (nlabels > 0)
2948 do_pending_stack_adjust ();
7ca35180
RH
2949 int old_generating_concat_p = generating_concat_p;
2950
2951 /* Vector of RTX's of evaluated output operands. */
2952 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2953 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2954 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
862d0b35 2955
7ca35180
RH
2956 output_rvec.safe_grow (noutputs);
2957
2958 for (i = 0; i < noutputs; ++i)
862d0b35 2959 {
7ca35180 2960 tree val = output_tvec[i];
862d0b35 2961 tree type = TREE_TYPE (val);
7ca35180 2962 bool is_inout, allows_reg, allows_mem, ok;
862d0b35 2963 rtx op;
862d0b35
DN
2964
2965 ok = parse_output_constraint (&constraints[i], i, ninputs,
2966 noutputs, &allows_mem, &allows_reg,
2967 &is_inout);
2968 gcc_assert (ok);
2969
2970 /* If an output operand is not a decl or indirect ref and our constraint
2971 allows a register, make a temporary to act as an intermediate.
7ca35180 2972 Make the asm insn write into that, then we will copy it to
862d0b35
DN
2973 the real output operand. Likewise for promoted variables. */
2974
2975 generating_concat_p = 0;
2976
862d0b35
DN
2977 if ((TREE_CODE (val) == INDIRECT_REF
2978 && allows_mem)
2979 || (DECL_P (val)
2980 && (allows_mem || REG_P (DECL_RTL (val)))
2981 && ! (REG_P (DECL_RTL (val))
2982 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2983 || ! allows_reg
2984 || is_inout)
2985 {
2986 op = expand_expr (val, NULL_RTX, VOIDmode,
2987 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2988 if (MEM_P (op))
2989 op = validize_mem (op);
2990
2991 if (! allows_reg && !MEM_P (op))
2992 error ("output number %d not directly addressable", i);
2993 if ((! allows_mem && MEM_P (op))
2994 || GET_CODE (op) == CONCAT)
2995 {
7ca35180 2996 rtx old_op = op;
862d0b35 2997 op = gen_reg_rtx (GET_MODE (op));
7ca35180
RH
2998
2999 generating_concat_p = old_generating_concat_p;
3000
862d0b35 3001 if (is_inout)
7ca35180
RH
3002 emit_move_insn (op, old_op);
3003
3004 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3005 emit_move_insn (old_op, op);
3006 after_rtl_seq = get_insns ();
3007 after_rtl_end = get_last_insn ();
3008 end_sequence ();
862d0b35
DN
3009 }
3010 }
3011 else
3012 {
3013 op = assign_temp (type, 0, 1);
3014 op = validize_mem (op);
7ca35180
RH
3015 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3016 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
862d0b35 3017
7ca35180 3018 generating_concat_p = old_generating_concat_p;
862d0b35 3019
7ca35180
RH
3020 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3021 expand_assignment (val, make_tree (type, op), false);
3022 after_rtl_seq = get_insns ();
3023 after_rtl_end = get_last_insn ();
3024 end_sequence ();
862d0b35 3025 }
7ca35180 3026 output_rvec[i] = op;
862d0b35 3027
7ca35180
RH
3028 if (is_inout)
3029 inout_opnum.safe_push (i);
862d0b35
DN
3030 }
3031
7ca35180
RH
3032 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3033 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
862d0b35 3034
7ca35180
RH
3035 input_rvec.safe_grow (ninputs);
3036 input_mode.safe_grow (ninputs);
862d0b35 3037
7ca35180 3038 generating_concat_p = 0;
862d0b35 3039
7ca35180 3040 for (i = 0; i < ninputs; ++i)
862d0b35 3041 {
7ca35180
RH
3042 tree val = input_tvec[i];
3043 tree type = TREE_TYPE (val);
3044 bool allows_reg, allows_mem, ok;
862d0b35 3045 const char *constraint;
862d0b35 3046 rtx op;
862d0b35
DN
3047
3048 constraint = constraints[i + noutputs];
7ca35180
RH
3049 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3050 constraints.address (),
3051 &allows_mem, &allows_reg);
862d0b35
DN
3052 gcc_assert (ok);
3053
862d0b35
DN
3054 /* EXPAND_INITIALIZER will not generate code for valid initializer
3055 constants, but will still generate code for other types of operand.
3056 This is the behavior we want for constant constraints. */
3057 op = expand_expr (val, NULL_RTX, VOIDmode,
3058 allows_reg ? EXPAND_NORMAL
3059 : allows_mem ? EXPAND_MEMORY
3060 : EXPAND_INITIALIZER);
3061
3062 /* Never pass a CONCAT to an ASM. */
3063 if (GET_CODE (op) == CONCAT)
3064 op = force_reg (GET_MODE (op), op);
3065 else if (MEM_P (op))
3066 op = validize_mem (op);
3067
3068 if (asm_operand_ok (op, constraint, NULL) <= 0)
3069 {
3070 if (allows_reg && TYPE_MODE (type) != BLKmode)
3071 op = force_reg (TYPE_MODE (type), op);
3072 else if (!allows_mem)
3073 warning (0, "asm operand %d probably doesn%'t match constraints",
3074 i + noutputs);
3075 else if (MEM_P (op))
3076 {
3077 /* We won't recognize either volatile memory or memory
3078 with a queued address as available a memory_operand
3079 at this point. Ignore it: clearly this *is* a memory. */
3080 }
3081 else
3082 gcc_unreachable ();
3083 }
7ca35180
RH
3084 input_rvec[i] = op;
3085 input_mode[i] = TYPE_MODE (type);
862d0b35
DN
3086 }
3087
862d0b35 3088 /* For in-out operands, copy output rtx to input rtx. */
7ca35180 3089 unsigned ninout = inout_opnum.length();
862d0b35
DN
3090 for (i = 0; i < ninout; i++)
3091 {
3092 int j = inout_opnum[i];
7ca35180 3093 rtx o = output_rvec[j];
862d0b35 3094
7ca35180
RH
3095 input_rvec.safe_push (o);
3096 input_mode.safe_push (GET_MODE (o));
862d0b35 3097
7ca35180 3098 char buffer[16];
862d0b35 3099 sprintf (buffer, "%d", j);
7ca35180
RH
3100 constraints.safe_push (ggc_strdup (buffer));
3101 }
3102 ninputs += ninout;
3103
3104 /* Sometimes we wish to automatically clobber registers across an asm.
3105 Case in point is when the i386 backend moved from cc0 to a hard reg --
3106 maintaining source-level compatibility means automatically clobbering
3107 the flags register. */
3108 rtx_insn *after_md_seq = NULL;
3109 if (targetm.md_asm_adjust)
3110 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3111 constraints, clobber_rvec,
3112 clobbered_regs);
3113
3114 /* Do not allow the hook to change the output and input count,
3115 lest it mess up the operand numbering. */
3116 gcc_assert (output_rvec.length() == noutputs);
3117 gcc_assert (input_rvec.length() == ninputs);
3118 gcc_assert (constraints.length() == noutputs + ninputs);
3119
3120 /* But it certainly can adjust the clobbers. */
3121 nclobbers = clobber_rvec.length();
3122
3123 /* Third pass checks for easy conflicts. */
3124 /* ??? Why are we doing this on trees instead of rtx. */
3125
3126 bool clobber_conflict_found = 0;
3127 for (i = 0; i < noutputs; ++i)
3128 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3129 clobber_conflict_found = 1;
3130 for (i = 0; i < ninputs - ninout; ++i)
3131 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3132 clobber_conflict_found = 1;
3133
3134 /* Make vectors for the expression-rtx, constraint strings,
3135 and named operands. */
3136
3137 rtvec argvec = rtvec_alloc (ninputs);
3138 rtvec constraintvec = rtvec_alloc (ninputs);
3139 rtvec labelvec = rtvec_alloc (nlabels);
3140
3141 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3142 : GET_MODE (output_rvec[0])),
3143 ggc_strdup (gimple_asm_string (stmt)),
3144 empty_string, 0, argvec, constraintvec,
3145 labelvec, locus);
3146 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3147
3148 for (i = 0; i < ninputs; ++i)
3149 {
3150 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3151 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3152 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3153 constraints[i + noutputs],
3154 locus);
862d0b35
DN
3155 }
3156
3157 /* Copy labels to the vector. */
7ca35180
RH
3158 rtx_code_label *fallthru_label = NULL;
3159 if (nlabels > 0)
3160 {
3161 basic_block fallthru_bb = NULL;
3162 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3163 if (fallthru)
3164 fallthru_bb = fallthru->dest;
3165
3166 for (i = 0; i < nlabels; ++i)
862d0b35 3167 {
7ca35180 3168 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
e67d1102 3169 rtx_insn *r;
7ca35180
RH
3170 /* If asm goto has any labels in the fallthru basic block, use
3171 a label that we emit immediately after the asm goto. Expansion
3172 may insert further instructions into the same basic block after
3173 asm goto and if we don't do this, insertion of instructions on
3174 the fallthru edge might misbehave. See PR58670. */
3175 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3176 {
3177 if (fallthru_label == NULL_RTX)
3178 fallthru_label = gen_label_rtx ();
3179 r = fallthru_label;
3180 }
3181 else
3182 r = label_rtx (label);
3183 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
862d0b35 3184 }
862d0b35
DN
3185 }
3186
862d0b35
DN
3187 /* Now, for each output, construct an rtx
3188 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3189 ARGVEC CONSTRAINTS OPNAMES))
3190 If there is more than one, put them inside a PARALLEL. */
3191
3192 if (nlabels > 0 && nclobbers == 0)
3193 {
3194 gcc_assert (noutputs == 0);
3195 emit_jump_insn (body);
3196 }
3197 else if (noutputs == 0 && nclobbers == 0)
3198 {
3199 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3200 emit_insn (body);
3201 }
3202 else if (noutputs == 1 && nclobbers == 0)
3203 {
7ca35180
RH
3204 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3205 emit_insn (gen_rtx_SET (output_rvec[0], body));
862d0b35
DN
3206 }
3207 else
3208 {
3209 rtx obody = body;
3210 int num = noutputs;
3211
3212 if (num == 0)
3213 num = 1;
3214
3215 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3216
3217 /* For each output operand, store a SET. */
7ca35180 3218 for (i = 0; i < noutputs; ++i)
862d0b35 3219 {
7ca35180
RH
3220 rtx src, o = output_rvec[i];
3221 if (i == 0)
3222 {
3223 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3224 src = obody;
3225 }
3226 else
3227 {
3228 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3229 ASM_OPERANDS_TEMPLATE (obody),
3230 constraints[i], i, argvec,
3231 constraintvec, labelvec, locus);
3232 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3233 }
3234 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
862d0b35
DN
3235 }
3236
3237 /* If there are no outputs (but there are some clobbers)
3238 store the bare ASM_OPERANDS into the PARALLEL. */
862d0b35
DN
3239 if (i == 0)
3240 XVECEXP (body, 0, i++) = obody;
3241
3242 /* Store (clobber REG) for each clobbered register specified. */
7ca35180 3243 for (unsigned j = 0; j < nclobbers; ++j)
862d0b35 3244 {
7ca35180 3245 rtx clobbered_reg = clobber_rvec[j];
862d0b35 3246
7ca35180
RH
3247 /* Do sanity check for overlap between clobbers and respectively
3248 input and outputs that hasn't been handled. Such overlap
3249 should have been detected and reported above. */
3250 if (!clobber_conflict_found && REG_P (clobbered_reg))
862d0b35 3251 {
7ca35180
RH
3252 /* We test the old body (obody) contents to avoid
3253 tripping over the under-construction body. */
3254 for (unsigned k = 0; k < noutputs; ++k)
3255 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3256 internal_error ("asm clobber conflict with output operand");
3257
3258 for (unsigned k = 0; k < ninputs - ninout; ++k)
3259 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3260 internal_error ("asm clobber conflict with input operand");
862d0b35
DN
3261 }
3262
7ca35180 3263 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
862d0b35
DN
3264 }
3265
3266 if (nlabels > 0)
3267 emit_jump_insn (body);
3268 else
3269 emit_insn (body);
3270 }
3271
7ca35180
RH
3272 generating_concat_p = old_generating_concat_p;
3273
862d0b35
DN
3274 if (fallthru_label)
3275 emit_label (fallthru_label);
3276
7ca35180
RH
3277 if (after_md_seq)
3278 emit_insn (after_md_seq);
3279 if (after_rtl_seq)
3280 emit_insn (after_rtl_seq);
862d0b35 3281
6476a8fd 3282 free_temp_slots ();
7ca35180 3283 crtl->has_asm_statement = 1;
862d0b35
DN
3284}
3285
3286/* Emit code to jump to the address
3287 specified by the pointer expression EXP. */
3288
3289static void
3290expand_computed_goto (tree exp)
3291{
3292 rtx x = expand_normal (exp);
3293
862d0b35
DN
3294 do_pending_stack_adjust ();
3295 emit_indirect_jump (x);
3296}
3297
3298/* Generate RTL code for a `goto' statement with target label LABEL.
3299 LABEL should be a LABEL_DECL tree node that was or will later be
3300 defined with `expand_label'. */
3301
3302static void
3303expand_goto (tree label)
3304{
b2b29377
MM
3305 if (flag_checking)
3306 {
3307 /* Check for a nonlocal goto to a containing function. Should have
3308 gotten translated to __builtin_nonlocal_goto. */
3309 tree context = decl_function_context (label);
3310 gcc_assert (!context || context == current_function_decl);
3311 }
862d0b35 3312
1476d1bd 3313 emit_jump (jump_target_rtx (label));
862d0b35
DN
3314}
3315
3316/* Output a return with no value. */
3317
3318static void
3319expand_null_return_1 (void)
3320{
3321 clear_pending_stack_adjust ();
3322 do_pending_stack_adjust ();
3323 emit_jump (return_label);
3324}
3325
3326/* Generate RTL to return from the current function, with no value.
3327 (That is, we do not do anything about returning any value.) */
3328
3329void
3330expand_null_return (void)
3331{
3332 /* If this function was declared to return a value, but we
3333 didn't, clobber the return registers so that they are not
3334 propagated live to the rest of the function. */
3335 clobber_return_register ();
3336
3337 expand_null_return_1 ();
3338}
3339
3340/* Generate RTL to return from the current function, with value VAL. */
3341
3342static void
3343expand_value_return (rtx val)
3344{
3345 /* Copy the value to the return location unless it's already there. */
3346
3347 tree decl = DECL_RESULT (current_function_decl);
3348 rtx return_reg = DECL_RTL (decl);
3349 if (return_reg != val)
3350 {
3351 tree funtype = TREE_TYPE (current_function_decl);
3352 tree type = TREE_TYPE (decl);
3353 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
3354 machine_mode old_mode = DECL_MODE (decl);
3355 machine_mode mode;
862d0b35
DN
3356 if (DECL_BY_REFERENCE (decl))
3357 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3358 else
3359 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3360
3361 if (mode != old_mode)
3362 val = convert_modes (mode, old_mode, val, unsignedp);
3363
3364 if (GET_CODE (return_reg) == PARALLEL)
3365 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3366 else
3367 emit_move_insn (return_reg, val);
3368 }
3369
3370 expand_null_return_1 ();
3371}
3372
3373/* Generate RTL to evaluate the expression RETVAL and return it
3374 from the current function. */
3375
3376static void
d5e254e1 3377expand_return (tree retval, tree bounds)
862d0b35
DN
3378{
3379 rtx result_rtl;
3380 rtx val = 0;
3381 tree retval_rhs;
d5e254e1 3382 rtx bounds_rtl;
862d0b35
DN
3383
3384 /* If function wants no value, give it none. */
3385 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3386 {
3387 expand_normal (retval);
3388 expand_null_return ();
3389 return;
3390 }
3391
3392 if (retval == error_mark_node)
3393 {
3394 /* Treat this like a return of no value from a function that
3395 returns a value. */
3396 expand_null_return ();
3397 return;
3398 }
3399 else if ((TREE_CODE (retval) == MODIFY_EXPR
3400 || TREE_CODE (retval) == INIT_EXPR)
3401 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3402 retval_rhs = TREE_OPERAND (retval, 1);
3403 else
3404 retval_rhs = retval;
3405
3406 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3407
d5e254e1
IE
3408 /* Put returned bounds to the right place. */
3409 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3410 if (bounds_rtl)
3411 {
855f036d
IE
3412 rtx addr = NULL;
3413 rtx bnd = NULL;
d5e254e1 3414
855f036d 3415 if (bounds && bounds != error_mark_node)
d5e254e1
IE
3416 {
3417 bnd = expand_normal (bounds);
3418 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3419 }
3420 else if (REG_P (bounds_rtl))
3421 {
855f036d
IE
3422 if (bounds)
3423 bnd = chkp_expand_zero_bounds ();
3424 else
3425 {
3426 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3427 addr = gen_rtx_MEM (Pmode, addr);
3428 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3429 }
3430
d5e254e1
IE
3431 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3432 }
3433 else
3434 {
3435 int n;
3436
3437 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3438
855f036d
IE
3439 if (bounds)
3440 bnd = chkp_expand_zero_bounds ();
3441 else
3442 {
3443 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3444 addr = gen_rtx_MEM (Pmode, addr);
3445 }
d5e254e1
IE
3446
3447 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3448 {
d5e254e1 3449 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
855f036d
IE
3450 if (!bounds)
3451 {
3452 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3453 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3454 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3455 }
d5e254e1
IE
3456 targetm.calls.store_returned_bounds (slot, bnd);
3457 }
3458 }
3459 }
3460 else if (chkp_function_instrumented_p (current_function_decl)
3461 && !BOUNDED_P (retval_rhs)
3462 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3463 && TREE_CODE (retval_rhs) != RESULT_DECL)
3464 {
3465 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3466 addr = gen_rtx_MEM (Pmode, addr);
3467
3468 gcc_assert (MEM_P (result_rtl));
3469
3470 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3471 }
3472
862d0b35
DN
3473 /* If we are returning the RESULT_DECL, then the value has already
3474 been stored into it, so we don't have to do anything special. */
3475 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3476 expand_value_return (result_rtl);
3477
3478 /* If the result is an aggregate that is being returned in one (or more)
3479 registers, load the registers here. */
3480
3481 else if (retval_rhs != 0
3482 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3483 && REG_P (result_rtl))
3484 {
3485 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3486 if (val)
3487 {
3488 /* Use the mode of the result value on the return register. */
3489 PUT_MODE (result_rtl, GET_MODE (val));
3490 expand_value_return (val);
3491 }
3492 else
3493 expand_null_return ();
3494 }
3495 else if (retval_rhs != 0
3496 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3497 && (REG_P (result_rtl)
3498 || (GET_CODE (result_rtl) == PARALLEL)))
3499 {
9ee5337d
EB
3500 /* Compute the return value into a temporary (usually a pseudo reg). */
3501 val
3502 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
862d0b35
DN
3503 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3504 val = force_not_mem (val);
862d0b35
DN
3505 expand_value_return (val);
3506 }
3507 else
3508 {
3509 /* No hard reg used; calculate value into hard return reg. */
3510 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3511 expand_value_return (result_rtl);
3512 }
3513}
3514
28ed065e
MM
3515/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3516 STMT that doesn't require special handling for outgoing edges. That
3517 is no tailcalls and no GIMPLE_COND. */
3518
3519static void
355fe088 3520expand_gimple_stmt_1 (gimple *stmt)
28ed065e
MM
3521{
3522 tree op0;
c82fee88 3523
5368224f 3524 set_curr_insn_location (gimple_location (stmt));
c82fee88 3525
28ed065e
MM
3526 switch (gimple_code (stmt))
3527 {
3528 case GIMPLE_GOTO:
3529 op0 = gimple_goto_dest (stmt);
3530 if (TREE_CODE (op0) == LABEL_DECL)
3531 expand_goto (op0);
3532 else
3533 expand_computed_goto (op0);
3534 break;
3535 case GIMPLE_LABEL:
538dd0b7 3536 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
28ed065e
MM
3537 break;
3538 case GIMPLE_NOP:
3539 case GIMPLE_PREDICT:
3540 break;
28ed065e 3541 case GIMPLE_SWITCH:
538dd0b7 3542 expand_case (as_a <gswitch *> (stmt));
28ed065e
MM
3543 break;
3544 case GIMPLE_ASM:
538dd0b7 3545 expand_asm_stmt (as_a <gasm *> (stmt));
28ed065e
MM
3546 break;
3547 case GIMPLE_CALL:
538dd0b7 3548 expand_call_stmt (as_a <gcall *> (stmt));
28ed065e
MM
3549 break;
3550
3551 case GIMPLE_RETURN:
855f036d
IE
3552 {
3553 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3554 op0 = gimple_return_retval (as_a <greturn *> (stmt));
28ed065e 3555
855f036d
IE
3556 if (op0 && op0 != error_mark_node)
3557 {
3558 tree result = DECL_RESULT (current_function_decl);
28ed065e 3559
b5be36b1
IE
3560 /* Mark we have return statement with missing bounds. */
3561 if (!bnd
3562 && chkp_function_instrumented_p (cfun->decl)
3563 && !DECL_P (op0))
3564 bnd = error_mark_node;
3565
855f036d
IE
3566 /* If we are not returning the current function's RESULT_DECL,
3567 build an assignment to it. */
3568 if (op0 != result)
3569 {
3570 /* I believe that a function's RESULT_DECL is unique. */
3571 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3572
3573 /* ??? We'd like to use simply expand_assignment here,
3574 but this fails if the value is of BLKmode but the return
3575 decl is a register. expand_return has special handling
3576 for this combination, which eventually should move
3577 to common code. See comments there. Until then, let's
3578 build a modify expression :-/ */
3579 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3580 result, op0);
3581 }
855f036d
IE
3582 }
3583
3584 if (!op0)
3585 expand_null_return ();
3586 else
3587 expand_return (op0, bnd);
3588 }
28ed065e
MM
3589 break;
3590
3591 case GIMPLE_ASSIGN:
3592 {
538dd0b7
DM
3593 gassign *assign_stmt = as_a <gassign *> (stmt);
3594 tree lhs = gimple_assign_lhs (assign_stmt);
28ed065e
MM
3595
3596 /* Tree expand used to fiddle with |= and &= of two bitfield
3597 COMPONENT_REFs here. This can't happen with gimple, the LHS
3598 of binary assigns must be a gimple reg. */
3599
3600 if (TREE_CODE (lhs) != SSA_NAME
3601 || get_gimple_rhs_class (gimple_expr_code (stmt))
3602 == GIMPLE_SINGLE_RHS)
3603 {
538dd0b7 3604 tree rhs = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3605 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3606 == GIMPLE_SINGLE_RHS);
ae2ffe2a
RB
3607 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3608 /* Do not put locations on possibly shared trees. */
3609 && !is_gimple_min_invariant (rhs))
28ed065e 3610 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3611 if (TREE_CLOBBER_P (rhs))
3612 /* This is a clobber to mark the going out of scope for
3613 this LHS. */
3614 ;
3615 else
3616 expand_assignment (lhs, rhs,
538dd0b7
DM
3617 gimple_assign_nontemporal_move_p (
3618 assign_stmt));
28ed065e
MM
3619 }
3620 else
3621 {
3622 rtx target, temp;
538dd0b7 3623 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
28ed065e
MM
3624 struct separate_ops ops;
3625 bool promoted = false;
3626
3627 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3628 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3629 promoted = true;
3630
538dd0b7 3631 ops.code = gimple_assign_rhs_code (assign_stmt);
28ed065e 3632 ops.type = TREE_TYPE (lhs);
b0dd8c90 3633 switch (get_gimple_rhs_class (ops.code))
28ed065e 3634 {
0354c0c7 3635 case GIMPLE_TERNARY_RHS:
538dd0b7 3636 ops.op2 = gimple_assign_rhs3 (assign_stmt);
0354c0c7 3637 /* Fallthru */
28ed065e 3638 case GIMPLE_BINARY_RHS:
538dd0b7 3639 ops.op1 = gimple_assign_rhs2 (assign_stmt);
28ed065e
MM
3640 /* Fallthru */
3641 case GIMPLE_UNARY_RHS:
538dd0b7 3642 ops.op0 = gimple_assign_rhs1 (assign_stmt);
28ed065e
MM
3643 break;
3644 default:
3645 gcc_unreachable ();
3646 }
3647 ops.location = gimple_location (stmt);
3648
3649 /* If we want to use a nontemporal store, force the value to
3650 register first. If we store into a promoted register,
3651 don't directly expand to target. */
3652 temp = nontemporal || promoted ? NULL_RTX : target;
3653 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3654 EXPAND_NORMAL);
3655
3656 if (temp == target)
3657 ;
3658 else if (promoted)
3659 {
362d42dc 3660 int unsignedp = SUBREG_PROMOTED_SIGN (target);
28ed065e
MM
3661 /* If TEMP is a VOIDmode constant, use convert_modes to make
3662 sure that we properly convert it. */
3663 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3664 {
3665 temp = convert_modes (GET_MODE (target),
3666 TYPE_MODE (ops.type),
4e18a7d4 3667 temp, unsignedp);
28ed065e 3668 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3669 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3670 }
3671
27be0c32 3672 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3673 }
3674 else if (nontemporal && emit_storent_insn (target, temp))
3675 ;
3676 else
3677 {
3678 temp = force_operand (temp, target);
3679 if (temp != target)
3680 emit_move_insn (target, temp);
3681 }
3682 }
3683 }
3684 break;
3685
3686 default:
3687 gcc_unreachable ();
3688 }
3689}
3690
3691/* Expand one gimple statement STMT and return the last RTL instruction
3692 before any of the newly generated ones.
3693
3694 In addition to generating the necessary RTL instructions this also
3695 sets REG_EH_REGION notes if necessary and sets the current source
3696 location for diagnostics. */
3697
b47aae36 3698static rtx_insn *
355fe088 3699expand_gimple_stmt (gimple *stmt)
28ed065e 3700{
28ed065e 3701 location_t saved_location = input_location;
b47aae36 3702 rtx_insn *last = get_last_insn ();
c82fee88 3703 int lp_nr;
28ed065e 3704
28ed065e
MM
3705 gcc_assert (cfun);
3706
c82fee88
EB
3707 /* We need to save and restore the current source location so that errors
3708 discovered during expansion are emitted with the right location. But
3709 it would be better if the diagnostic routines used the source location
3710 embedded in the tree nodes rather than globals. */
28ed065e 3711 if (gimple_has_location (stmt))
c82fee88 3712 input_location = gimple_location (stmt);
28ed065e
MM
3713
3714 expand_gimple_stmt_1 (stmt);
c82fee88 3715
28ed065e
MM
3716 /* Free any temporaries used to evaluate this statement. */
3717 free_temp_slots ();
3718
3719 input_location = saved_location;
3720
3721 /* Mark all insns that may trap. */
1d65f45c
RH
3722 lp_nr = lookup_stmt_eh_lp (stmt);
3723 if (lp_nr)
28ed065e 3724 {
b47aae36 3725 rtx_insn *insn;
28ed065e
MM
3726 for (insn = next_real_insn (last); insn;
3727 insn = next_real_insn (insn))
3728 {
3729 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3730 /* If we want exceptions for non-call insns, any
3731 may_trap_p instruction may throw. */
3732 && GET_CODE (PATTERN (insn)) != CLOBBER
3733 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3734 && insn_could_throw_p (insn))
3735 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3736 }
3737 }
3738
3739 return last;
3740}
3741
726a989a 3742/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3743 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3744 generated a tail call (something that might be denied by the ABI
cea49550
RH
3745 rules governing the call; see calls.c).
3746
3747 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3748 can still reach the rest of BB. The case here is __builtin_sqrt,
3749 where the NaN result goes through the external function (with a
3750 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3751
3752static basic_block
538dd0b7 3753expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
80c7a9eb 3754{
b47aae36 3755 rtx_insn *last2, *last;
224e770b 3756 edge e;
628f6a4e 3757 edge_iterator ei;
224e770b
RH
3758 int probability;
3759 gcov_type count;
80c7a9eb 3760
28ed065e 3761 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3762
3763 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3764 if (CALL_P (last) && SIBLING_CALL_P (last))
3765 goto found;
80c7a9eb 3766
726a989a 3767 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3768
cea49550 3769 *can_fallthru = true;
224e770b 3770 return NULL;
80c7a9eb 3771
224e770b
RH
3772 found:
3773 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3774 Any instructions emitted here are about to be deleted. */
3775 do_pending_stack_adjust ();
3776
3777 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3778 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3779 EH or abnormal edges, we shouldn't have created a tail call in
3780 the first place. So it seems to me we should just be removing
3781 all edges here, or redirecting the existing fallthru edge to
3782 the exit block. */
3783
224e770b
RH
3784 probability = 0;
3785 count = 0;
224e770b 3786
628f6a4e
BE
3787 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3788 {
224e770b
RH
3789 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3790 {
fefa31b5 3791 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
80c7a9eb 3792 {
224e770b
RH
3793 e->dest->count -= e->count;
3794 e->dest->frequency -= EDGE_FREQUENCY (e);
3795 if (e->dest->count < 0)
c22cacf3 3796 e->dest->count = 0;
224e770b 3797 if (e->dest->frequency < 0)
c22cacf3 3798 e->dest->frequency = 0;
80c7a9eb 3799 }
224e770b
RH
3800 count += e->count;
3801 probability += e->probability;
3802 remove_edge (e);
80c7a9eb 3803 }
628f6a4e
BE
3804 else
3805 ei_next (&ei);
80c7a9eb
RH
3806 }
3807
224e770b
RH
3808 /* This is somewhat ugly: the call_expr expander often emits instructions
3809 after the sibcall (to perform the function return). These confuse the
12eff7b7 3810 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3811 last = NEXT_INSN (last);
341c100f 3812 gcc_assert (BARRIER_P (last));
cea49550
RH
3813
3814 *can_fallthru = false;
224e770b
RH
3815 while (NEXT_INSN (last))
3816 {
3817 /* For instance an sqrt builtin expander expands if with
3818 sibcall in the then and label for `else`. */
3819 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3820 {
3821 *can_fallthru = true;
3822 break;
3823 }
224e770b
RH
3824 delete_insn (NEXT_INSN (last));
3825 }
3826
fefa31b5
DM
3827 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3828 | EDGE_SIBCALL);
224e770b
RH
3829 e->probability += probability;
3830 e->count += count;
1130d5e3 3831 BB_END (bb) = last;
224e770b
RH
3832 update_bb_for_insn (bb);
3833
3834 if (NEXT_INSN (last))
3835 {
3836 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3837
3838 last = BB_END (bb);
3839 if (BARRIER_P (last))
1130d5e3 3840 BB_END (bb) = PREV_INSN (last);
224e770b
RH
3841 }
3842
726a989a 3843 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3844
224e770b 3845 return bb;
80c7a9eb
RH
3846}
3847
b5b8b0ac
AO
3848/* Return the difference between the floor and the truncated result of
3849 a signed division by OP1 with remainder MOD. */
3850static rtx
ef4bddc2 3851floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3852{
3853 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3854 return gen_rtx_IF_THEN_ELSE
3855 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3856 gen_rtx_IF_THEN_ELSE
3857 (mode, gen_rtx_LT (BImode,
3858 gen_rtx_DIV (mode, op1, mod),
3859 const0_rtx),
3860 constm1_rtx, const0_rtx),
3861 const0_rtx);
3862}
3863
3864/* Return the difference between the ceil and the truncated result of
3865 a signed division by OP1 with remainder MOD. */
3866static rtx
ef4bddc2 3867ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3868{
3869 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3870 return gen_rtx_IF_THEN_ELSE
3871 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3872 gen_rtx_IF_THEN_ELSE
3873 (mode, gen_rtx_GT (BImode,
3874 gen_rtx_DIV (mode, op1, mod),
3875 const0_rtx),
3876 const1_rtx, const0_rtx),
3877 const0_rtx);
3878}
3879
3880/* Return the difference between the ceil and the truncated result of
3881 an unsigned division by OP1 with remainder MOD. */
3882static rtx
ef4bddc2 3883ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
b5b8b0ac
AO
3884{
3885 /* (mod != 0 ? 1 : 0) */
3886 return gen_rtx_IF_THEN_ELSE
3887 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3888 const1_rtx, const0_rtx);
3889}
3890
3891/* Return the difference between the rounded and the truncated result
3892 of a signed division by OP1 with remainder MOD. Halfway cases are
3893 rounded away from zero, rather than to the nearest even number. */
3894static rtx
ef4bddc2 3895round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3896{
3897 /* (abs (mod) >= abs (op1) - abs (mod)
3898 ? (op1 / mod > 0 ? 1 : -1)
3899 : 0) */
3900 return gen_rtx_IF_THEN_ELSE
3901 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3902 gen_rtx_MINUS (mode,
3903 gen_rtx_ABS (mode, op1),
3904 gen_rtx_ABS (mode, mod))),
3905 gen_rtx_IF_THEN_ELSE
3906 (mode, gen_rtx_GT (BImode,
3907 gen_rtx_DIV (mode, op1, mod),
3908 const0_rtx),
3909 const1_rtx, constm1_rtx),
3910 const0_rtx);
3911}
3912
3913/* Return the difference between the rounded and the truncated result
3914 of a unsigned division by OP1 with remainder MOD. Halfway cases
3915 are rounded away from zero, rather than to the nearest even
3916 number. */
3917static rtx
ef4bddc2 3918round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
b5b8b0ac
AO
3919{
3920 /* (mod >= op1 - mod ? 1 : 0) */
3921 return gen_rtx_IF_THEN_ELSE
3922 (mode, gen_rtx_GE (BImode, mod,
3923 gen_rtx_MINUS (mode, op1, mod)),
3924 const1_rtx, const0_rtx);
3925}
3926
dda2da58
AO
3927/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3928 any rtl. */
3929
3930static rtx
ef4bddc2 3931convert_debug_memory_address (machine_mode mode, rtx x,
f61c6f34 3932 addr_space_t as)
dda2da58 3933{
ef4bddc2 3934 machine_mode xmode = GET_MODE (x);
dda2da58
AO
3935
3936#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3937 gcc_assert (mode == Pmode
3938 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3939 gcc_assert (xmode == mode || xmode == VOIDmode);
3940#else
f61c6f34 3941 rtx temp;
f61c6f34 3942
639d4bb8 3943 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3944
3945 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3946 return x;
3947
69660a70 3948 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3403a1a9 3949 x = lowpart_subreg (mode, x, xmode);
dda2da58
AO
3950 else if (POINTERS_EXTEND_UNSIGNED > 0)
3951 x = gen_rtx_ZERO_EXTEND (mode, x);
3952 else if (!POINTERS_EXTEND_UNSIGNED)
3953 x = gen_rtx_SIGN_EXTEND (mode, x);
3954 else
f61c6f34
JJ
3955 {
3956 switch (GET_CODE (x))
3957 {
3958 case SUBREG:
3959 if ((SUBREG_PROMOTED_VAR_P (x)
3960 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3961 || (GET_CODE (SUBREG_REG (x)) == PLUS
3962 && REG_P (XEXP (SUBREG_REG (x), 0))
3963 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3964 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3965 && GET_MODE (SUBREG_REG (x)) == mode)
3966 return SUBREG_REG (x);
3967 break;
3968 case LABEL_REF:
a827d9b1 3969 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
f61c6f34
JJ
3970 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3971 return temp;
3972 case SYMBOL_REF:
3973 temp = shallow_copy_rtx (x);
3974 PUT_MODE (temp, mode);
3975 return temp;
3976 case CONST:
3977 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3978 if (temp)
3979 temp = gen_rtx_CONST (mode, temp);
3980 return temp;
3981 case PLUS:
3982 case MINUS:
3983 if (CONST_INT_P (XEXP (x, 1)))
3984 {
3985 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3986 if (temp)
3987 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3988 }
3989 break;
3990 default:
3991 break;
3992 }
3993 /* Don't know how to express ptr_extend as operation in debug info. */
3994 return NULL;
3995 }
dda2da58
AO
3996#endif /* POINTERS_EXTEND_UNSIGNED */
3997
3998 return x;
3999}
4000
dfde35b3
JJ
4001/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4002 by avoid_deep_ter_for_debug. */
4003
4004static hash_map<tree, tree> *deep_ter_debug_map;
4005
4006/* Split too deep TER chains for debug stmts using debug temporaries. */
4007
4008static void
355fe088 4009avoid_deep_ter_for_debug (gimple *stmt, int depth)
dfde35b3
JJ
4010{
4011 use_operand_p use_p;
4012 ssa_op_iter iter;
4013 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4014 {
4015 tree use = USE_FROM_PTR (use_p);
4016 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4017 continue;
355fe088 4018 gimple *g = get_gimple_for_ssa_name (use);
dfde35b3
JJ
4019 if (g == NULL)
4020 continue;
4021 if (depth > 6 && !stmt_ends_bb_p (g))
4022 {
4023 if (deep_ter_debug_map == NULL)
4024 deep_ter_debug_map = new hash_map<tree, tree>;
4025
4026 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4027 if (vexpr != NULL)
4028 continue;
4029 vexpr = make_node (DEBUG_EXPR_DECL);
355fe088 4030 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
dfde35b3
JJ
4031 DECL_ARTIFICIAL (vexpr) = 1;
4032 TREE_TYPE (vexpr) = TREE_TYPE (use);
4033 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
4034 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4035 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4036 avoid_deep_ter_for_debug (def_temp, 0);
4037 }
4038 else
4039 avoid_deep_ter_for_debug (g, depth + 1);
4040 }
4041}
4042
12c5ffe5
EB
4043/* Return an RTX equivalent to the value of the parameter DECL. */
4044
4045static rtx
4046expand_debug_parm_decl (tree decl)
4047{
4048 rtx incoming = DECL_INCOMING_RTL (decl);
4049
4050 if (incoming
4051 && GET_MODE (incoming) != BLKmode
4052 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4053 || (MEM_P (incoming)
4054 && REG_P (XEXP (incoming, 0))
4055 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4056 {
4057 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4058
4059#ifdef HAVE_window_save
4060 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4061 If the target machine has an explicit window save instruction, the
4062 actual entry value is the corresponding OUTGOING_REGNO instead. */
4063 if (REG_P (incoming)
4064 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4065 incoming
4066 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4067 OUTGOING_REGNO (REGNO (incoming)), 0);
4068 else if (MEM_P (incoming))
4069 {
4070 rtx reg = XEXP (incoming, 0);
4071 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4072 {
4073 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4074 incoming = replace_equiv_address_nv (incoming, reg);
4075 }
6cfa417f
JJ
4076 else
4077 incoming = copy_rtx (incoming);
12c5ffe5
EB
4078 }
4079#endif
4080
4081 ENTRY_VALUE_EXP (rtl) = incoming;
4082 return rtl;
4083 }
4084
4085 if (incoming
4086 && GET_MODE (incoming) != BLKmode
4087 && !TREE_ADDRESSABLE (decl)
4088 && MEM_P (incoming)
4089 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4090 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4091 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4092 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 4093 return copy_rtx (incoming);
12c5ffe5
EB
4094
4095 return NULL_RTX;
4096}
4097
4098/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
4099
4100static rtx
4101expand_debug_expr (tree exp)
4102{
4103 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
ef4bddc2
RS
4104 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4105 machine_mode inner_mode = VOIDmode;
b5b8b0ac 4106 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 4107 addr_space_t as;
b5b8b0ac
AO
4108
4109 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4110 {
4111 case tcc_expression:
4112 switch (TREE_CODE (exp))
4113 {
4114 case COND_EXPR:
7ece48b1 4115 case DOT_PROD_EXPR:
79d652a5 4116 case SAD_EXPR:
0354c0c7
BS
4117 case WIDEN_MULT_PLUS_EXPR:
4118 case WIDEN_MULT_MINUS_EXPR:
0f59b812 4119 case FMA_EXPR:
b5b8b0ac
AO
4120 goto ternary;
4121
4122 case TRUTH_ANDIF_EXPR:
4123 case TRUTH_ORIF_EXPR:
4124 case TRUTH_AND_EXPR:
4125 case TRUTH_OR_EXPR:
4126 case TRUTH_XOR_EXPR:
4127 goto binary;
4128
4129 case TRUTH_NOT_EXPR:
4130 goto unary;
4131
4132 default:
4133 break;
4134 }
4135 break;
4136
4137 ternary:
4138 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4139 if (!op2)
4140 return NULL_RTX;
4141 /* Fall through. */
4142
4143 binary:
4144 case tcc_binary:
b5b8b0ac
AO
4145 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4146 if (!op1)
4147 return NULL_RTX;
26d83bcc
JJ
4148 switch (TREE_CODE (exp))
4149 {
4150 case LSHIFT_EXPR:
4151 case RSHIFT_EXPR:
4152 case LROTATE_EXPR:
4153 case RROTATE_EXPR:
4154 case WIDEN_LSHIFT_EXPR:
4155 /* Ensure second operand isn't wider than the first one. */
4156 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4157 if (SCALAR_INT_MODE_P (inner_mode))
4158 {
4159 machine_mode opmode = mode;
4160 if (VECTOR_MODE_P (mode))
4161 opmode = GET_MODE_INNER (mode);
4162 if (SCALAR_INT_MODE_P (opmode)
4163 && (GET_MODE_PRECISION (opmode)
4164 < GET_MODE_PRECISION (inner_mode)))
3403a1a9 4165 op1 = lowpart_subreg (opmode, op1, inner_mode);
26d83bcc
JJ
4166 }
4167 break;
4168 default:
4169 break;
4170 }
b5b8b0ac
AO
4171 /* Fall through. */
4172
4173 unary:
4174 case tcc_unary:
2ba172e0 4175 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4176 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4177 if (!op0)
4178 return NULL_RTX;
4179 break;
4180
871dae34
AO
4181 case tcc_comparison:
4182 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4183 goto binary;
4184
b5b8b0ac
AO
4185 case tcc_type:
4186 case tcc_statement:
4187 gcc_unreachable ();
4188
4189 case tcc_constant:
4190 case tcc_exceptional:
4191 case tcc_declaration:
4192 case tcc_reference:
4193 case tcc_vl_exp:
4194 break;
4195 }
4196
4197 switch (TREE_CODE (exp))
4198 {
4199 case STRING_CST:
4200 if (!lookup_constant_def (exp))
4201 {
e1b243a8
JJ
4202 if (strlen (TREE_STRING_POINTER (exp)) + 1
4203 != (size_t) TREE_STRING_LENGTH (exp))
4204 return NULL_RTX;
b5b8b0ac
AO
4205 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4206 op0 = gen_rtx_MEM (BLKmode, op0);
4207 set_mem_attributes (op0, exp, 0);
4208 return op0;
4209 }
4210 /* Fall through... */
4211
4212 case INTEGER_CST:
4213 case REAL_CST:
4214 case FIXED_CST:
4215 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4216 return op0;
4217
4218 case COMPLEX_CST:
4219 gcc_assert (COMPLEX_MODE_P (mode));
4220 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 4221 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
4222 return gen_rtx_CONCAT (mode, op0, op1);
4223
0ca5af51
AO
4224 case DEBUG_EXPR_DECL:
4225 op0 = DECL_RTL_IF_SET (exp);
4226
4227 if (op0)
4228 return op0;
4229
4230 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 4231 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
4232 SET_DECL_RTL (exp, op0);
4233
4234 return op0;
4235
b5b8b0ac
AO
4236 case VAR_DECL:
4237 case PARM_DECL:
4238 case FUNCTION_DECL:
4239 case LABEL_DECL:
4240 case CONST_DECL:
4241 case RESULT_DECL:
4242 op0 = DECL_RTL_IF_SET (exp);
4243
4244 /* This decl was probably optimized away. */
4245 if (!op0)
e1b243a8
JJ
4246 {
4247 if (TREE_CODE (exp) != VAR_DECL
4248 || DECL_EXTERNAL (exp)
4249 || !TREE_STATIC (exp)
4250 || !DECL_NAME (exp)
0fba566c 4251 || DECL_HARD_REGISTER (exp)
7d5fc814 4252 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 4253 || mode == VOIDmode)
e1b243a8
JJ
4254 return NULL;
4255
b1aa0655 4256 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
4257 if (!MEM_P (op0)
4258 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4259 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4260 return NULL;
4261 }
4262 else
4263 op0 = copy_rtx (op0);
b5b8b0ac 4264
06796564 4265 if (GET_MODE (op0) == BLKmode
871dae34 4266 /* If op0 is not BLKmode, but mode is, adjust_mode
06796564
JJ
4267 below would ICE. While it is likely a FE bug,
4268 try to be robust here. See PR43166. */
132b4e82
JJ
4269 || mode == BLKmode
4270 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
4271 {
4272 gcc_assert (MEM_P (op0));
4273 op0 = adjust_address_nv (op0, mode, 0);
4274 return op0;
4275 }
4276
4277 /* Fall through. */
4278
4279 adjust_mode:
4280 case PAREN_EXPR:
625a9766 4281 CASE_CONVERT:
b5b8b0ac 4282 {
2ba172e0 4283 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
4284
4285 if (mode == inner_mode)
4286 return op0;
4287
4288 if (inner_mode == VOIDmode)
4289 {
2a8e30fb
MM
4290 if (TREE_CODE (exp) == SSA_NAME)
4291 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4292 else
4293 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4294 if (mode == inner_mode)
4295 return op0;
4296 }
4297
4298 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4299 {
4300 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4301 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4302 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4303 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4304 else
4305 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4306 }
4307 else if (FLOAT_MODE_P (mode))
4308 {
2a8e30fb 4309 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
4310 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4311 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4312 else
4313 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4314 }
4315 else if (FLOAT_MODE_P (inner_mode))
4316 {
4317 if (unsignedp)
4318 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4319 else
4320 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4321 }
4322 else if (CONSTANT_P (op0)
69660a70 4323 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3403a1a9 4324 op0 = lowpart_subreg (mode, op0, inner_mode);
cf4ef6f7 4325 else if (UNARY_CLASS_P (exp)
1b47fe3f
JJ
4326 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4327 : unsignedp)
2ba172e0 4328 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 4329 else
2ba172e0 4330 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
4331
4332 return op0;
4333 }
4334
70f34814 4335 case MEM_REF:
71f3a3f5
JJ
4336 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4337 {
4338 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4339 TREE_OPERAND (exp, 0),
4340 TREE_OPERAND (exp, 1));
4341 if (newexp)
4342 return expand_debug_expr (newexp);
4343 }
4344 /* FALLTHROUGH */
b5b8b0ac 4345 case INDIRECT_REF:
0a81f074 4346 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
4347 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4348 if (!op0)
4349 return NULL;
4350
cb115041
JJ
4351 if (TREE_CODE (exp) == MEM_REF)
4352 {
583ac69c
JJ
4353 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4354 || (GET_CODE (op0) == PLUS
4355 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4356 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4357 Instead just use get_inner_reference. */
4358 goto component_ref;
4359
cb115041
JJ
4360 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4361 if (!op1 || !CONST_INT_P (op1))
4362 return NULL;
4363
0a81f074 4364 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
4365 }
4366
a148c4b2 4367 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
b5b8b0ac 4368
f61c6f34
JJ
4369 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4370 op0, as);
4371 if (op0 == NULL_RTX)
4372 return NULL;
b5b8b0ac 4373
f61c6f34 4374 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 4375 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
4376 if (TREE_CODE (exp) == MEM_REF
4377 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4378 set_mem_expr (op0, NULL_TREE);
09e881c9 4379 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4380
4381 return op0;
4382
4383 case TARGET_MEM_REF:
4d948885
RG
4384 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4385 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
4386 return NULL;
4387
4388 op0 = expand_debug_expr
4e25ca6b 4389 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
4390 if (!op0)
4391 return NULL;
4392
c168f180 4393 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
f61c6f34
JJ
4394 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4395 op0, as);
4396 if (op0 == NULL_RTX)
4397 return NULL;
b5b8b0ac
AO
4398
4399 op0 = gen_rtx_MEM (mode, op0);
4400
4401 set_mem_attributes (op0, exp, 0);
09e881c9 4402 set_mem_addr_space (op0, as);
b5b8b0ac
AO
4403
4404 return op0;
4405
583ac69c 4406 component_ref:
b5b8b0ac
AO
4407 case ARRAY_REF:
4408 case ARRAY_RANGE_REF:
4409 case COMPONENT_REF:
4410 case BIT_FIELD_REF:
4411 case REALPART_EXPR:
4412 case IMAGPART_EXPR:
4413 case VIEW_CONVERT_EXPR:
4414 {
ef4bddc2 4415 machine_mode mode1;
b5b8b0ac
AO
4416 HOST_WIDE_INT bitsize, bitpos;
4417 tree offset;
ee45a32d
EB
4418 int reversep, volatilep = 0;
4419 tree tem
4420 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4421 &unsignedp, &reversep, &volatilep, false);
b5b8b0ac
AO
4422 rtx orig_op0;
4423
4f2a9af8
JJ
4424 if (bitsize == 0)
4425 return NULL;
4426
b5b8b0ac
AO
4427 orig_op0 = op0 = expand_debug_expr (tem);
4428
4429 if (!op0)
4430 return NULL;
4431
4432 if (offset)
4433 {
ef4bddc2 4434 machine_mode addrmode, offmode;
dda2da58 4435
aa847cc8
JJ
4436 if (!MEM_P (op0))
4437 return NULL;
b5b8b0ac 4438
dda2da58
AO
4439 op0 = XEXP (op0, 0);
4440 addrmode = GET_MODE (op0);
4441 if (addrmode == VOIDmode)
4442 addrmode = Pmode;
4443
b5b8b0ac
AO
4444 op1 = expand_debug_expr (offset);
4445 if (!op1)
4446 return NULL;
4447
dda2da58
AO
4448 offmode = GET_MODE (op1);
4449 if (offmode == VOIDmode)
4450 offmode = TYPE_MODE (TREE_TYPE (offset));
4451
4452 if (addrmode != offmode)
3403a1a9 4453 op1 = lowpart_subreg (addrmode, op1, offmode);
dda2da58
AO
4454
4455 /* Don't use offset_address here, we don't need a
4456 recognizable address, and we don't want to generate
4457 code. */
2ba172e0
JJ
4458 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4459 op0, op1));
b5b8b0ac
AO
4460 }
4461
4462 if (MEM_P (op0))
4463 {
4f2a9af8
JJ
4464 if (mode1 == VOIDmode)
4465 /* Bitfield. */
4466 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
4467 if (bitpos >= BITS_PER_UNIT)
4468 {
4469 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4470 bitpos %= BITS_PER_UNIT;
4471 }
4472 else if (bitpos < 0)
4473 {
4f2a9af8
JJ
4474 HOST_WIDE_INT units
4475 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
e3abc83e 4476 op0 = adjust_address_nv (op0, mode1, -units);
b5b8b0ac
AO
4477 bitpos += units * BITS_PER_UNIT;
4478 }
4479 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4480 op0 = adjust_address_nv (op0, mode, 0);
4481 else if (GET_MODE (op0) != mode1)
4482 op0 = adjust_address_nv (op0, mode1, 0);
4483 else
4484 op0 = copy_rtx (op0);
4485 if (op0 == orig_op0)
4486 op0 = shallow_copy_rtx (op0);
4487 set_mem_attributes (op0, exp, 0);
4488 }
4489
4490 if (bitpos == 0 && mode == GET_MODE (op0))
4491 return op0;
4492
2d3fc6aa
JJ
4493 if (bitpos < 0)
4494 return NULL;
4495
88c04a5d
JJ
4496 if (GET_MODE (op0) == BLKmode)
4497 return NULL;
4498
b5b8b0ac
AO
4499 if ((bitpos % BITS_PER_UNIT) == 0
4500 && bitsize == GET_MODE_BITSIZE (mode1))
4501 {
ef4bddc2 4502 machine_mode opmode = GET_MODE (op0);
b5b8b0ac 4503
b5b8b0ac 4504 if (opmode == VOIDmode)
9712cba0 4505 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4506
4507 /* This condition may hold if we're expanding the address
4508 right past the end of an array that turned out not to
4509 be addressable (i.e., the address was only computed in
4510 debug stmts). The gen_subreg below would rightfully
4511 crash, and the address doesn't really exist, so just
4512 drop it. */
4513 if (bitpos >= GET_MODE_BITSIZE (opmode))
4514 return NULL;
4515
7d5d39bb
JJ
4516 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4517 return simplify_gen_subreg (mode, op0, opmode,
4518 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4519 }
4520
4521 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4522 && TYPE_UNSIGNED (TREE_TYPE (exp))
4523 ? SIGN_EXTRACT
4524 : ZERO_EXTRACT, mode,
4525 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4526 ? GET_MODE (op0)
4527 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4528 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4529 }
4530
b5b8b0ac 4531 case ABS_EXPR:
2ba172e0 4532 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4533
4534 case NEGATE_EXPR:
2ba172e0 4535 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4536
4537 case BIT_NOT_EXPR:
2ba172e0 4538 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4539
4540 case FLOAT_EXPR:
2ba172e0
JJ
4541 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4542 0)))
4543 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4544 inner_mode);
b5b8b0ac
AO
4545
4546 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4547 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4548 inner_mode);
b5b8b0ac
AO
4549
4550 case POINTER_PLUS_EXPR:
576319a7
DD
4551 /* For the rare target where pointers are not the same size as
4552 size_t, we need to check for mis-matched modes and correct
4553 the addend. */
4554 if (op0 && op1
4555 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4556 && GET_MODE (op0) != GET_MODE (op1))
4557 {
8369f38a
DD
4558 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4559 /* If OP0 is a partial mode, then we must truncate, even if it has
4560 the same bitsize as OP1 as GCC's representation of partial modes
4561 is opaque. */
4562 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4563 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4564 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4565 GET_MODE (op1));
576319a7
DD
4566 else
4567 /* We always sign-extend, regardless of the signedness of
4568 the operand, because the operand is always unsigned
4569 here even if the original C expression is signed. */
2ba172e0
JJ
4570 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4571 GET_MODE (op1));
576319a7
DD
4572 }
4573 /* Fall through. */
b5b8b0ac 4574 case PLUS_EXPR:
2ba172e0 4575 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4576
4577 case MINUS_EXPR:
2ba172e0 4578 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4579
4580 case MULT_EXPR:
2ba172e0 4581 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4582
4583 case RDIV_EXPR:
4584 case TRUNC_DIV_EXPR:
4585 case EXACT_DIV_EXPR:
4586 if (unsignedp)
2ba172e0 4587 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4588 else
2ba172e0 4589 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4590
4591 case TRUNC_MOD_EXPR:
2ba172e0 4592 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4593
4594 case FLOOR_DIV_EXPR:
4595 if (unsignedp)
2ba172e0 4596 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4597 else
4598 {
2ba172e0
JJ
4599 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4600 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4601 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4602 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4603 }
4604
4605 case FLOOR_MOD_EXPR:
4606 if (unsignedp)
2ba172e0 4607 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4608 else
4609 {
2ba172e0 4610 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4611 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4612 adj = simplify_gen_unary (NEG, mode,
4613 simplify_gen_binary (MULT, mode, adj, op1),
4614 mode);
4615 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4616 }
4617
4618 case CEIL_DIV_EXPR:
4619 if (unsignedp)
4620 {
2ba172e0
JJ
4621 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4622 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4623 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4624 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4625 }
4626 else
4627 {
2ba172e0
JJ
4628 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4629 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4630 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4631 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4632 }
4633
4634 case CEIL_MOD_EXPR:
4635 if (unsignedp)
4636 {
2ba172e0 4637 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4638 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4639 adj = simplify_gen_unary (NEG, mode,
4640 simplify_gen_binary (MULT, mode, adj, op1),
4641 mode);
4642 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4643 }
4644 else
4645 {
2ba172e0 4646 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4647 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4648 adj = simplify_gen_unary (NEG, mode,
4649 simplify_gen_binary (MULT, mode, adj, op1),
4650 mode);
4651 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4652 }
4653
4654 case ROUND_DIV_EXPR:
4655 if (unsignedp)
4656 {
2ba172e0
JJ
4657 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4658 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4659 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4660 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4661 }
4662 else
4663 {
2ba172e0
JJ
4664 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4665 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4666 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4667 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4668 }
4669
4670 case ROUND_MOD_EXPR:
4671 if (unsignedp)
4672 {
2ba172e0 4673 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4674 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4675 adj = simplify_gen_unary (NEG, mode,
4676 simplify_gen_binary (MULT, mode, adj, op1),
4677 mode);
4678 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4679 }
4680 else
4681 {
2ba172e0 4682 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4683 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4684 adj = simplify_gen_unary (NEG, mode,
4685 simplify_gen_binary (MULT, mode, adj, op1),
4686 mode);
4687 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4688 }
4689
4690 case LSHIFT_EXPR:
2ba172e0 4691 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4692
4693 case RSHIFT_EXPR:
4694 if (unsignedp)
2ba172e0 4695 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4696 else
2ba172e0 4697 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4698
4699 case LROTATE_EXPR:
2ba172e0 4700 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4701
4702 case RROTATE_EXPR:
2ba172e0 4703 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4704
4705 case MIN_EXPR:
2ba172e0 4706 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4707
4708 case MAX_EXPR:
2ba172e0 4709 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4710
4711 case BIT_AND_EXPR:
4712 case TRUTH_AND_EXPR:
2ba172e0 4713 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4714
4715 case BIT_IOR_EXPR:
4716 case TRUTH_OR_EXPR:
2ba172e0 4717 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4718
4719 case BIT_XOR_EXPR:
4720 case TRUTH_XOR_EXPR:
2ba172e0 4721 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4722
4723 case TRUTH_ANDIF_EXPR:
4724 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4725
4726 case TRUTH_ORIF_EXPR:
4727 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4728
4729 case TRUTH_NOT_EXPR:
2ba172e0 4730 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4731
4732 case LT_EXPR:
2ba172e0
JJ
4733 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4734 op0, op1);
b5b8b0ac
AO
4735
4736 case LE_EXPR:
2ba172e0
JJ
4737 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4738 op0, op1);
b5b8b0ac
AO
4739
4740 case GT_EXPR:
2ba172e0
JJ
4741 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4742 op0, op1);
b5b8b0ac
AO
4743
4744 case GE_EXPR:
2ba172e0
JJ
4745 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4746 op0, op1);
b5b8b0ac
AO
4747
4748 case EQ_EXPR:
2ba172e0 4749 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4750
4751 case NE_EXPR:
2ba172e0 4752 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4753
4754 case UNORDERED_EXPR:
2ba172e0 4755 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4756
4757 case ORDERED_EXPR:
2ba172e0 4758 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4759
4760 case UNLT_EXPR:
2ba172e0 4761 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4762
4763 case UNLE_EXPR:
2ba172e0 4764 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4765
4766 case UNGT_EXPR:
2ba172e0 4767 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4768
4769 case UNGE_EXPR:
2ba172e0 4770 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4771
4772 case UNEQ_EXPR:
2ba172e0 4773 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4774
4775 case LTGT_EXPR:
2ba172e0 4776 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4777
4778 case COND_EXPR:
4779 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4780
4781 case COMPLEX_EXPR:
4782 gcc_assert (COMPLEX_MODE_P (mode));
4783 if (GET_MODE (op0) == VOIDmode)
4784 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4785 if (GET_MODE (op1) == VOIDmode)
4786 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4787 return gen_rtx_CONCAT (mode, op0, op1);
4788
d02a5a4b
JJ
4789 case CONJ_EXPR:
4790 if (GET_CODE (op0) == CONCAT)
4791 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4792 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4793 XEXP (op0, 1),
4794 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4795 else
4796 {
ef4bddc2 4797 machine_mode imode = GET_MODE_INNER (mode);
d02a5a4b
JJ
4798 rtx re, im;
4799
4800 if (MEM_P (op0))
4801 {
4802 re = adjust_address_nv (op0, imode, 0);
4803 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4804 }
4805 else
4806 {
ef4bddc2
RS
4807 machine_mode ifmode = int_mode_for_mode (mode);
4808 machine_mode ihmode = int_mode_for_mode (imode);
d02a5a4b
JJ
4809 rtx halfsize;
4810 if (ifmode == BLKmode || ihmode == BLKmode)
4811 return NULL;
4812 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4813 re = op0;
4814 if (mode != ifmode)
4815 re = gen_rtx_SUBREG (ifmode, re, 0);
4816 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4817 if (imode != ihmode)
4818 re = gen_rtx_SUBREG (imode, re, 0);
4819 im = copy_rtx (op0);
4820 if (mode != ifmode)
4821 im = gen_rtx_SUBREG (ifmode, im, 0);
4822 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4823 if (imode != ihmode)
4824 im = gen_rtx_SUBREG (imode, im, 0);
4825 }
4826 im = gen_rtx_NEG (imode, im);
4827 return gen_rtx_CONCAT (mode, re, im);
4828 }
4829
b5b8b0ac
AO
4830 case ADDR_EXPR:
4831 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4832 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4833 {
4834 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4835 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4836 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4837 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4838 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4839 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4840
4841 if (handled_component_p (TREE_OPERAND (exp, 0)))
4842 {
4843 HOST_WIDE_INT bitoffset, bitsize, maxsize;
ee45a32d 4844 bool reverse;
c8a27c40 4845 tree decl
ee45a32d
EB
4846 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4847 &bitsize, &maxsize, &reverse);
c8a27c40
JJ
4848 if ((TREE_CODE (decl) == VAR_DECL
4849 || TREE_CODE (decl) == PARM_DECL
4850 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4851 && (!TREE_ADDRESSABLE (decl)
4852 || target_for_debug_bind (decl))
c8a27c40
JJ
4853 && (bitoffset % BITS_PER_UNIT) == 0
4854 && bitsize > 0
4855 && bitsize == maxsize)
0a81f074
RS
4856 {
4857 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4858 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4859 }
c8a27c40
JJ
4860 }
4861
9430b7ba
JJ
4862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4863 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4864 == ADDR_EXPR)
4865 {
4866 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4867 0));
4868 if (op0 != NULL
4869 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4870 || (GET_CODE (op0) == PLUS
4871 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4872 && CONST_INT_P (XEXP (op0, 1)))))
4873 {
4874 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4875 1));
4876 if (!op1 || !CONST_INT_P (op1))
4877 return NULL;
4878
4879 return plus_constant (mode, op0, INTVAL (op1));
4880 }
4881 }
4882
c8a27c40
JJ
4883 return NULL;
4884 }
b5b8b0ac 4885
a148c4b2 4886 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
f61c6f34 4887 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4888
4889 return op0;
b5b8b0ac
AO
4890
4891 case VECTOR_CST:
d2a12ae7
RG
4892 {
4893 unsigned i;
4894
4895 op0 = gen_rtx_CONCATN
4896 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4897
4898 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4899 {
4900 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4901 if (!op1)
4902 return NULL;
4903 XVECEXP (op0, 0, i) = op1;
4904 }
4905
4906 return op0;
4907 }
b5b8b0ac
AO
4908
4909 case CONSTRUCTOR:
47598145
MM
4910 if (TREE_CLOBBER_P (exp))
4911 return NULL;
4912 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4913 {
4914 unsigned i;
4915 tree val;
4916
4917 op0 = gen_rtx_CONCATN
4918 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4919
4920 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4921 {
4922 op1 = expand_debug_expr (val);
4923 if (!op1)
4924 return NULL;
4925 XVECEXP (op0, 0, i) = op1;
4926 }
4927
4928 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4929 {
4930 op1 = expand_debug_expr
e8160c9a 4931 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4932
4933 if (!op1)
4934 return NULL;
4935
4936 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4937 XVECEXP (op0, 0, i) = op1;
4938 }
4939
4940 return op0;
4941 }
4942 else
4943 goto flag_unsupported;
4944
4945 case CALL_EXPR:
4946 /* ??? Maybe handle some builtins? */
4947 return NULL;
4948
4949 case SSA_NAME:
4950 {
355fe088 4951 gimple *g = get_gimple_for_ssa_name (exp);
2a8e30fb
MM
4952 if (g)
4953 {
dfde35b3
JJ
4954 tree t = NULL_TREE;
4955 if (deep_ter_debug_map)
4956 {
4957 tree *slot = deep_ter_debug_map->get (exp);
4958 if (slot)
4959 t = *slot;
4960 }
4961 if (t == NULL_TREE)
4962 t = gimple_assign_rhs_to_tree (g);
4963 op0 = expand_debug_expr (t);
2a8e30fb
MM
4964 if (!op0)
4965 return NULL;
4966 }
4967 else
4968 {
f11a7b6d
AO
4969 /* If this is a reference to an incoming value of
4970 parameter that is never used in the code or where the
4971 incoming value is never used in the code, use
4972 PARM_DECL's DECL_RTL if set. */
4973 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4974 && SSA_NAME_VAR (exp)
4975 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
4976 && has_zero_uses (exp))
4977 {
4978 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4979 if (op0)
4980 goto adjust_mode;
4981 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4982 if (op0)
4983 goto adjust_mode;
4984 }
4985
2a8e30fb 4986 int part = var_to_partition (SA.map, exp);
b5b8b0ac 4987
2a8e30fb 4988 if (part == NO_PARTITION)
f11a7b6d 4989 return NULL;
b5b8b0ac 4990
2a8e30fb 4991 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 4992
abfea58d 4993 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 4994 }
b5b8b0ac
AO
4995 goto adjust_mode;
4996 }
4997
4998 case ERROR_MARK:
4999 return NULL;
5000
7ece48b1
JJ
5001 /* Vector stuff. For most of the codes we don't have rtl codes. */
5002 case REALIGN_LOAD_EXPR:
5003 case REDUC_MAX_EXPR:
5004 case REDUC_MIN_EXPR:
5005 case REDUC_PLUS_EXPR:
5006 case VEC_COND_EXPR:
7ece48b1
JJ
5007 case VEC_PACK_FIX_TRUNC_EXPR:
5008 case VEC_PACK_SAT_EXPR:
5009 case VEC_PACK_TRUNC_EXPR:
7ece48b1
JJ
5010 case VEC_UNPACK_FLOAT_HI_EXPR:
5011 case VEC_UNPACK_FLOAT_LO_EXPR:
5012 case VEC_UNPACK_HI_EXPR:
5013 case VEC_UNPACK_LO_EXPR:
5014 case VEC_WIDEN_MULT_HI_EXPR:
5015 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
5016 case VEC_WIDEN_MULT_EVEN_EXPR:
5017 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
5018 case VEC_WIDEN_LSHIFT_HI_EXPR:
5019 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 5020 case VEC_PERM_EXPR:
7ece48b1
JJ
5021 return NULL;
5022
98449720 5023 /* Misc codes. */
7ece48b1
JJ
5024 case ADDR_SPACE_CONVERT_EXPR:
5025 case FIXED_CONVERT_EXPR:
5026 case OBJ_TYPE_REF:
5027 case WITH_SIZE_EXPR:
5028 return NULL;
5029
5030 case DOT_PROD_EXPR:
5031 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5032 && SCALAR_INT_MODE_P (mode))
5033 {
2ba172e0
JJ
5034 op0
5035 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5036 0)))
5037 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5038 inner_mode);
5039 op1
5040 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5041 1)))
5042 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5043 inner_mode);
5044 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5045 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
5046 }
5047 return NULL;
5048
5049 case WIDEN_MULT_EXPR:
0354c0c7
BS
5050 case WIDEN_MULT_PLUS_EXPR:
5051 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
5052 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5053 && SCALAR_INT_MODE_P (mode))
5054 {
2ba172e0 5055 inner_mode = GET_MODE (op0);
7ece48b1 5056 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 5057 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 5058 else
5b58b39b 5059 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 5060 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 5061 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 5062 else
5b58b39b 5063 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 5064 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
5065 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5066 return op0;
5067 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 5068 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 5069 else
2ba172e0 5070 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
5071 }
5072 return NULL;
5073
98449720
RH
5074 case MULT_HIGHPART_EXPR:
5075 /* ??? Similar to the above. */
5076 return NULL;
5077
7ece48b1 5078 case WIDEN_SUM_EXPR:
3f3af9df 5079 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
5080 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5081 && SCALAR_INT_MODE_P (mode))
5082 {
2ba172e0
JJ
5083 op0
5084 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5085 0)))
5086 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5087 inner_mode);
3f3af9df
JJ
5088 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5089 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
5090 }
5091 return NULL;
5092
0f59b812 5093 case FMA_EXPR:
2ba172e0 5094 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 5095
b5b8b0ac
AO
5096 default:
5097 flag_unsupported:
b2b29377
MM
5098 if (flag_checking)
5099 {
5100 debug_tree (exp);
5101 gcc_unreachable ();
5102 }
b5b8b0ac 5103 return NULL;
b5b8b0ac
AO
5104 }
5105}
5106
ddb555ed
JJ
5107/* Return an RTX equivalent to the source bind value of the tree expression
5108 EXP. */
5109
5110static rtx
5111expand_debug_source_expr (tree exp)
5112{
5113 rtx op0 = NULL_RTX;
ef4bddc2 5114 machine_mode mode = VOIDmode, inner_mode;
ddb555ed
JJ
5115
5116 switch (TREE_CODE (exp))
5117 {
5118 case PARM_DECL:
5119 {
ddb555ed 5120 mode = DECL_MODE (exp);
12c5ffe5
EB
5121 op0 = expand_debug_parm_decl (exp);
5122 if (op0)
5123 break;
ddb555ed
JJ
5124 /* See if this isn't an argument that has been completely
5125 optimized out. */
5126 if (!DECL_RTL_SET_P (exp)
12c5ffe5 5127 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
5128 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5129 {
7b575cfa 5130 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
5131 if (DECL_CONTEXT (aexp)
5132 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5133 {
9771b263 5134 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
5135 unsigned int ix;
5136 tree ddecl;
ddb555ed
JJ
5137 debug_args = decl_debug_args_lookup (current_function_decl);
5138 if (debug_args != NULL)
5139 {
9771b263 5140 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
5141 ix += 2)
5142 if (ddecl == aexp)
5143 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5144 }
5145 }
5146 }
5147 break;
5148 }
5149 default:
5150 break;
5151 }
5152
5153 if (op0 == NULL_RTX)
5154 return NULL_RTX;
5155
5156 inner_mode = GET_MODE (op0);
5157 if (mode == inner_mode)
5158 return op0;
5159
5160 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5161 {
5162 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5163 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5164 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5165 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5166 else
5167 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5168 }
5169 else if (FLOAT_MODE_P (mode))
5170 gcc_unreachable ();
5171 else if (FLOAT_MODE_P (inner_mode))
5172 {
5173 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5174 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5175 else
5176 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5177 }
5178 else if (CONSTANT_P (op0)
5179 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3403a1a9 5180 op0 = lowpart_subreg (mode, op0, inner_mode);
ddb555ed
JJ
5181 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5182 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5183 else
5184 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5185
5186 return op0;
5187}
5188
6cfa417f
JJ
5189/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5190 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5191 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5192
5193static void
b47aae36 5194avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
6cfa417f
JJ
5195{
5196 rtx exp = *exp_p;
5197
5198 if (exp == NULL_RTX)
5199 return;
5200
5201 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5202 return;
5203
5204 if (depth == 4)
5205 {
5206 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5207 rtx dval = make_debug_expr_from_rtl (exp);
5208
5209 /* Emit a debug bind insn before INSN. */
5210 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5211 DEBUG_EXPR_TREE_DECL (dval), exp,
5212 VAR_INIT_STATUS_INITIALIZED);
5213
5214 emit_debug_insn_before (bind, insn);
5215 *exp_p = dval;
5216 return;
5217 }
5218
5219 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5220 int i, j;
5221 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5222 switch (*format_ptr++)
5223 {
5224 case 'e':
5225 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5226 break;
5227
5228 case 'E':
5229 case 'V':
5230 for (j = 0; j < XVECLEN (exp, i); j++)
5231 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5232 break;
5233
5234 default:
5235 break;
5236 }
5237}
5238
b5b8b0ac
AO
5239/* Expand the _LOCs in debug insns. We run this after expanding all
5240 regular insns, so that any variables referenced in the function
5241 will have their DECL_RTLs set. */
5242
5243static void
5244expand_debug_locations (void)
5245{
b47aae36
DM
5246 rtx_insn *insn;
5247 rtx_insn *last = get_last_insn ();
b5b8b0ac
AO
5248 int save_strict_alias = flag_strict_aliasing;
5249
5250 /* New alias sets while setting up memory attributes cause
5251 -fcompare-debug failures, even though it doesn't bring about any
5252 codegen changes. */
5253 flag_strict_aliasing = 0;
5254
5255 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5256 if (DEBUG_INSN_P (insn))
5257 {
5258 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
b47aae36
DM
5259 rtx val;
5260 rtx_insn *prev_insn, *insn2;
ef4bddc2 5261 machine_mode mode;
b5b8b0ac
AO
5262
5263 if (value == NULL_TREE)
5264 val = NULL_RTX;
5265 else
5266 {
ddb555ed
JJ
5267 if (INSN_VAR_LOCATION_STATUS (insn)
5268 == VAR_INIT_STATUS_UNINITIALIZED)
5269 val = expand_debug_source_expr (value);
dfde35b3
JJ
5270 /* The avoid_deep_ter_for_debug function inserts
5271 debug bind stmts after SSA_NAME definition, with the
5272 SSA_NAME as the whole bind location. Disable temporarily
5273 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5274 being defined in this DEBUG_INSN. */
5275 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5276 {
5277 tree *slot = deep_ter_debug_map->get (value);
5278 if (slot)
5279 {
5280 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5281 *slot = NULL_TREE;
5282 else
5283 slot = NULL;
5284 }
5285 val = expand_debug_expr (value);
5286 if (slot)
5287 *slot = INSN_VAR_LOCATION_DECL (insn);
5288 }
ddb555ed
JJ
5289 else
5290 val = expand_debug_expr (value);
b5b8b0ac
AO
5291 gcc_assert (last == get_last_insn ());
5292 }
5293
5294 if (!val)
5295 val = gen_rtx_UNKNOWN_VAR_LOC ();
5296 else
5297 {
5298 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5299
5300 gcc_assert (mode == GET_MODE (val)
5301 || (GET_MODE (val) == VOIDmode
33ffb5c5 5302 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 5303 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
5304 || GET_CODE (val) == LABEL_REF)));
5305 }
5306
5307 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
5308 prev_insn = PREV_INSN (insn);
5309 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5310 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
5311 }
5312
5313 flag_strict_aliasing = save_strict_alias;
5314}
5315
d2626c0b
YR
5316/* Performs swapping operands of commutative operations to expand
5317 the expensive one first. */
5318
5319static void
5320reorder_operands (basic_block bb)
5321{
5322 unsigned int *lattice; /* Hold cost of each statement. */
5323 unsigned int i = 0, n = 0;
5324 gimple_stmt_iterator gsi;
5325 gimple_seq stmts;
355fe088 5326 gimple *stmt;
d2626c0b
YR
5327 bool swap;
5328 tree op0, op1;
5329 ssa_op_iter iter;
5330 use_operand_p use_p;
355fe088 5331 gimple *def0, *def1;
d2626c0b
YR
5332
5333 /* Compute cost of each statement using estimate_num_insns. */
5334 stmts = bb_seq (bb);
5335 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5336 {
5337 stmt = gsi_stmt (gsi);
090238ee
YR
5338 if (!is_gimple_debug (stmt))
5339 gimple_set_uid (stmt, n++);
d2626c0b
YR
5340 }
5341 lattice = XNEWVEC (unsigned int, n);
5342 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5343 {
5344 unsigned cost;
5345 stmt = gsi_stmt (gsi);
090238ee
YR
5346 if (is_gimple_debug (stmt))
5347 continue;
d2626c0b
YR
5348 cost = estimate_num_insns (stmt, &eni_size_weights);
5349 lattice[i] = cost;
5350 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5351 {
5352 tree use = USE_FROM_PTR (use_p);
355fe088 5353 gimple *def_stmt;
d2626c0b
YR
5354 if (TREE_CODE (use) != SSA_NAME)
5355 continue;
5356 def_stmt = get_gimple_for_ssa_name (use);
5357 if (!def_stmt)
5358 continue;
5359 lattice[i] += lattice[gimple_uid (def_stmt)];
5360 }
5361 i++;
5362 if (!is_gimple_assign (stmt)
5363 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5364 continue;
5365 op0 = gimple_op (stmt, 1);
5366 op1 = gimple_op (stmt, 2);
5367 if (TREE_CODE (op0) != SSA_NAME
5368 || TREE_CODE (op1) != SSA_NAME)
5369 continue;
5370 /* Swap operands if the second one is more expensive. */
5371 def0 = get_gimple_for_ssa_name (op0);
d2626c0b
YR
5372 def1 = get_gimple_for_ssa_name (op1);
5373 if (!def1)
5374 continue;
5375 swap = false;
68ca4ac9 5376 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
d2626c0b
YR
5377 swap = true;
5378 if (swap)
5379 {
5380 if (dump_file && (dump_flags & TDF_DETAILS))
5381 {
5382 fprintf (dump_file, "Swap operands in stmt:\n");
5383 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5384 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
68ca4ac9 5385 def0 ? lattice[gimple_uid (def0)] : 0,
d2626c0b
YR
5386 lattice[gimple_uid (def1)]);
5387 }
5388 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5389 gimple_assign_rhs2_ptr (stmt));
5390 }
5391 }
5392 XDELETE (lattice);
5393}
5394
242229bb
JH
5395/* Expand basic block BB from GIMPLE trees to RTL. */
5396
5397static basic_block
f3ddd692 5398expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 5399{
726a989a
RB
5400 gimple_stmt_iterator gsi;
5401 gimple_seq stmts;
355fe088 5402 gimple *stmt = NULL;
66e8df53 5403 rtx_note *note;
b47aae36 5404 rtx_insn *last;
242229bb 5405 edge e;
628f6a4e 5406 edge_iterator ei;
242229bb
JH
5407
5408 if (dump_file)
726a989a
RB
5409 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5410 bb->index);
5411
5412 /* Note that since we are now transitioning from GIMPLE to RTL, we
5413 cannot use the gsi_*_bb() routines because they expect the basic
5414 block to be in GIMPLE, instead of RTL. Therefore, we need to
5415 access the BB sequence directly. */
d2626c0b
YR
5416 if (optimize)
5417 reorder_operands (bb);
726a989a 5418 stmts = bb_seq (bb);
3e8b732e
MM
5419 bb->il.gimple.seq = NULL;
5420 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 5421 rtl_profile_for_bb (bb);
5e2d947c
JH
5422 init_rtl_bb_info (bb);
5423 bb->flags |= BB_RTL;
5424
a9b77cd1
ZD
5425 /* Remove the RETURN_EXPR if we may fall though to the exit
5426 instead. */
726a989a
RB
5427 gsi = gsi_last (stmts);
5428 if (!gsi_end_p (gsi)
5429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 5430 {
538dd0b7 5431 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
a9b77cd1
ZD
5432
5433 gcc_assert (single_succ_p (bb));
fefa31b5 5434 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
a9b77cd1 5435
fefa31b5 5436 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
726a989a 5437 && !gimple_return_retval (ret_stmt))
a9b77cd1 5438 {
726a989a 5439 gsi_remove (&gsi, false);
a9b77cd1
ZD
5440 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5441 }
5442 }
5443
726a989a
RB
5444 gsi = gsi_start (stmts);
5445 if (!gsi_end_p (gsi))
8b11009b 5446 {
726a989a
RB
5447 stmt = gsi_stmt (gsi);
5448 if (gimple_code (stmt) != GIMPLE_LABEL)
5449 stmt = NULL;
8b11009b 5450 }
242229bb 5451
134aa83c 5452 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
8b11009b
ZD
5453
5454 if (stmt || elt)
242229bb
JH
5455 {
5456 last = get_last_insn ();
5457
8b11009b
ZD
5458 if (stmt)
5459 {
28ed065e 5460 expand_gimple_stmt (stmt);
726a989a 5461 gsi_next (&gsi);
8b11009b
ZD
5462 }
5463
5464 if (elt)
39c8aaa4 5465 emit_label (*elt);
242229bb 5466
caf93cb0 5467 /* Java emits line number notes in the top of labels.
c22cacf3 5468 ??? Make this go away once line number notes are obsoleted. */
1130d5e3 5469 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 5470 if (NOTE_P (BB_HEAD (bb)))
1130d5e3 5471 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 5472 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 5473
726a989a 5474 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
5475 }
5476 else
1130d5e3 5477 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
242229bb
JH
5478
5479 NOTE_BASIC_BLOCK (note) = bb;
5480
726a989a 5481 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 5482 {
cea49550 5483 basic_block new_bb;
242229bb 5484
b5b8b0ac 5485 stmt = gsi_stmt (gsi);
2a8e30fb
MM
5486
5487 /* If this statement is a non-debug one, and we generate debug
5488 insns, then this one might be the last real use of a TERed
5489 SSA_NAME, but where there are still some debug uses further
5490 down. Expanding the current SSA name in such further debug
5491 uses by their RHS might lead to wrong debug info, as coalescing
5492 might make the operands of such RHS be placed into the same
5493 pseudo as something else. Like so:
5494 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5495 use(a_1);
5496 a_2 = ...
5497 #DEBUG ... => a_1
5498 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5499 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5500 the write to a_2 would actually have clobbered the place which
5501 formerly held a_0.
5502
5503 So, instead of that, we recognize the situation, and generate
5504 debug temporaries at the last real use of TERed SSA names:
5505 a_1 = a_0 + 1;
5506 #DEBUG #D1 => a_1
5507 use(a_1);
5508 a_2 = ...
5509 #DEBUG ... => #D1
5510 */
5511 if (MAY_HAVE_DEBUG_INSNS
5512 && SA.values
5513 && !is_gimple_debug (stmt))
5514 {
5515 ssa_op_iter iter;
5516 tree op;
355fe088 5517 gimple *def;
2a8e30fb 5518
5368224f 5519 location_t sloc = curr_insn_location ();
2a8e30fb
MM
5520
5521 /* Look for SSA names that have their last use here (TERed
5522 names always have only one real use). */
5523 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5524 if ((def = get_gimple_for_ssa_name (op)))
5525 {
5526 imm_use_iterator imm_iter;
5527 use_operand_p use_p;
5528 bool have_debug_uses = false;
5529
5530 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5531 {
5532 if (gimple_debug_bind_p (USE_STMT (use_p)))
5533 {
5534 have_debug_uses = true;
5535 break;
5536 }
5537 }
5538
5539 if (have_debug_uses)
5540 {
871dae34 5541 /* OP is a TERed SSA name, with DEF its defining
2a8e30fb
MM
5542 statement, and where OP is used in further debug
5543 instructions. Generate a debug temporary, and
5544 replace all uses of OP in debug insns with that
5545 temporary. */
355fe088 5546 gimple *debugstmt;
2a8e30fb
MM
5547 tree value = gimple_assign_rhs_to_tree (def);
5548 tree vexpr = make_node (DEBUG_EXPR_DECL);
5549 rtx val;
ef4bddc2 5550 machine_mode mode;
2a8e30fb 5551
5368224f 5552 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
5553
5554 DECL_ARTIFICIAL (vexpr) = 1;
5555 TREE_TYPE (vexpr) = TREE_TYPE (value);
5556 if (DECL_P (value))
5557 mode = DECL_MODE (value);
5558 else
5559 mode = TYPE_MODE (TREE_TYPE (value));
5560 DECL_MODE (vexpr) = mode;
5561
5562 val = gen_rtx_VAR_LOCATION
5563 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5564
e8c6bb74 5565 emit_debug_insn (val);
2a8e30fb
MM
5566
5567 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5568 {
5569 if (!gimple_debug_bind_p (debugstmt))
5570 continue;
5571
5572 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5573 SET_USE (use_p, vexpr);
5574
5575 update_stmt (debugstmt);
5576 }
5577 }
5578 }
5368224f 5579 set_curr_insn_location (sloc);
2a8e30fb
MM
5580 }
5581
a5883ba0 5582 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 5583
242229bb
JH
5584 /* Expand this statement, then evaluate the resulting RTL and
5585 fixup the CFG accordingly. */
726a989a 5586 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 5587 {
538dd0b7 5588 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
cea49550
RH
5589 if (new_bb)
5590 return new_bb;
5591 }
b5b8b0ac
AO
5592 else if (gimple_debug_bind_p (stmt))
5593 {
5368224f 5594 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
5595 gimple_stmt_iterator nsi = gsi;
5596
5597 for (;;)
5598 {
5599 tree var = gimple_debug_bind_get_var (stmt);
5600 tree value;
5601 rtx val;
ef4bddc2 5602 machine_mode mode;
b5b8b0ac 5603
ec8c1492
JJ
5604 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5605 && TREE_CODE (var) != LABEL_DECL
5606 && !target_for_debug_bind (var))
5607 goto delink_debug_stmt;
5608
b5b8b0ac
AO
5609 if (gimple_debug_bind_has_value_p (stmt))
5610 value = gimple_debug_bind_get_value (stmt);
5611 else
5612 value = NULL_TREE;
5613
5614 last = get_last_insn ();
5615
5368224f 5616 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
5617
5618 if (DECL_P (var))
5619 mode = DECL_MODE (var);
5620 else
5621 mode = TYPE_MODE (TREE_TYPE (var));
5622
5623 val = gen_rtx_VAR_LOCATION
5624 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5625
e16b6fd0 5626 emit_debug_insn (val);
b5b8b0ac
AO
5627
5628 if (dump_file && (dump_flags & TDF_DETAILS))
5629 {
5630 /* We can't dump the insn with a TREE where an RTX
5631 is expected. */
e8c6bb74 5632 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5633 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5634 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5635 }
5636
ec8c1492 5637 delink_debug_stmt:
2a8e30fb
MM
5638 /* In order not to generate too many debug temporaries,
5639 we delink all uses of debug statements we already expanded.
5640 Therefore debug statements between definition and real
5641 use of TERed SSA names will continue to use the SSA name,
5642 and not be replaced with debug temps. */
5643 delink_stmt_imm_use (stmt);
5644
b5b8b0ac
AO
5645 gsi = nsi;
5646 gsi_next (&nsi);
5647 if (gsi_end_p (nsi))
5648 break;
5649 stmt = gsi_stmt (nsi);
5650 if (!gimple_debug_bind_p (stmt))
5651 break;
5652 }
5653
5368224f 5654 set_curr_insn_location (sloc);
ddb555ed
JJ
5655 }
5656 else if (gimple_debug_source_bind_p (stmt))
5657 {
5368224f 5658 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5659 tree var = gimple_debug_source_bind_get_var (stmt);
5660 tree value = gimple_debug_source_bind_get_value (stmt);
5661 rtx val;
ef4bddc2 5662 machine_mode mode;
ddb555ed
JJ
5663
5664 last = get_last_insn ();
5665
5368224f 5666 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5667
5668 mode = DECL_MODE (var);
5669
5670 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5671 VAR_INIT_STATUS_UNINITIALIZED);
5672
5673 emit_debug_insn (val);
5674
5675 if (dump_file && (dump_flags & TDF_DETAILS))
5676 {
5677 /* We can't dump the insn with a TREE where an RTX
5678 is expected. */
5679 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5680 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5681 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5682 }
5683
5368224f 5684 set_curr_insn_location (sloc);
b5b8b0ac 5685 }
80c7a9eb 5686 else
242229bb 5687 {
538dd0b7
DM
5688 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5689 if (call_stmt
5690 && gimple_call_tail_p (call_stmt)
f3ddd692 5691 && disable_tail_calls)
538dd0b7 5692 gimple_call_set_tail (call_stmt, false);
f3ddd692 5693
538dd0b7 5694 if (call_stmt && gimple_call_tail_p (call_stmt))
cea49550
RH
5695 {
5696 bool can_fallthru;
538dd0b7 5697 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
cea49550
RH
5698 if (new_bb)
5699 {
5700 if (can_fallthru)
5701 bb = new_bb;
5702 else
5703 return new_bb;
5704 }
5705 }
4d7a65ea 5706 else
b7211528 5707 {
4e3825db 5708 def_operand_p def_p;
4e3825db
MM
5709 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5710
5711 if (def_p != NULL)
5712 {
5713 /* Ignore this stmt if it is in the list of
5714 replaceable expressions. */
5715 if (SA.values
b8698a0f 5716 && bitmap_bit_p (SA.values,
e97809c6 5717 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5718 continue;
5719 }
28ed065e 5720 last = expand_gimple_stmt (stmt);
726a989a 5721 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5722 }
242229bb
JH
5723 }
5724 }
5725
a5883ba0
MM
5726 currently_expanding_gimple_stmt = NULL;
5727
7241571e 5728 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5729 FOR_EACH_EDGE (e, ei, bb->succs)
5730 {
2f13f2de 5731 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5732 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5733 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5734 {
5735 emit_jump (label_rtx_for_bb (e->dest));
5736 e->flags &= ~EDGE_FALLTHRU;
5737 }
a9b77cd1
ZD
5738 }
5739
ae761c45
AH
5740 /* Expanded RTL can create a jump in the last instruction of block.
5741 This later might be assumed to be a jump to successor and break edge insertion.
5742 We need to insert dummy move to prevent this. PR41440. */
5743 if (single_succ_p (bb)
5744 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5745 && (last = get_last_insn ())
5746 && JUMP_P (last))
5747 {
5748 rtx dummy = gen_reg_rtx (SImode);
5749 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5750 }
5751
242229bb
JH
5752 do_pending_stack_adjust ();
5753
3f117656 5754 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5755 before a barrier and/or table jump insn. */
5756 last = get_last_insn ();
4b4bf941 5757 if (BARRIER_P (last))
242229bb
JH
5758 last = PREV_INSN (last);
5759 if (JUMP_TABLE_DATA_P (last))
5760 last = PREV_INSN (PREV_INSN (last));
1130d5e3 5761 BB_END (bb) = last;
caf93cb0 5762
242229bb 5763 update_bb_for_insn (bb);
80c7a9eb 5764
242229bb
JH
5765 return bb;
5766}
5767
5768
5769/* Create a basic block for initialization code. */
5770
5771static basic_block
5772construct_init_block (void)
5773{
5774 basic_block init_block, first_block;
fd44f634
JH
5775 edge e = NULL;
5776 int flags;
275a4187 5777
fd44f634 5778 /* Multiple entry points not supported yet. */
fefa31b5
DM
5779 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5780 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5781 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5782 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5783 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
242229bb 5784
fefa31b5 5785 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
275a4187 5786
fd44f634
JH
5787 /* When entry edge points to first basic block, we don't need jump,
5788 otherwise we have to jump into proper target. */
fefa31b5 5789 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
fd44f634 5790 {
726a989a 5791 tree label = gimple_block_label (e->dest);
fd44f634 5792
1476d1bd 5793 emit_jump (jump_target_rtx (label));
fd44f634 5794 flags = 0;
275a4187 5795 }
fd44f634
JH
5796 else
5797 flags = EDGE_FALLTHRU;
242229bb
JH
5798
5799 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5800 get_last_insn (),
fefa31b5
DM
5801 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5802 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5803 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5804 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
242229bb
JH
5805 if (e)
5806 {
5807 first_block = e->dest;
5808 redirect_edge_succ (e, init_block);
fd44f634 5809 e = make_edge (init_block, first_block, flags);
242229bb
JH
5810 }
5811 else
fefa31b5 5812 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5813 e->probability = REG_BR_PROB_BASE;
fefa31b5 5814 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
242229bb
JH
5815
5816 update_bb_for_insn (init_block);
5817 return init_block;
5818}
5819
55e092c4
JH
5820/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5821 found in the block tree. */
5822
5823static void
5824set_block_levels (tree block, int level)
5825{
5826 while (block)
5827 {
5828 BLOCK_NUMBER (block) = level;
5829 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5830 block = BLOCK_CHAIN (block);
5831 }
5832}
242229bb
JH
5833
5834/* Create a block containing landing pads and similar stuff. */
5835
5836static void
5837construct_exit_block (void)
5838{
b47aae36
DM
5839 rtx_insn *head = get_last_insn ();
5840 rtx_insn *end;
242229bb 5841 basic_block exit_block;
628f6a4e
BE
5842 edge e, e2;
5843 unsigned ix;
5844 edge_iterator ei;
79c7fda6 5845 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
b47aae36 5846 rtx_insn *orig_end = BB_END (prev_bb);
242229bb 5847
fefa31b5 5848 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
bf08ebeb 5849
caf93cb0 5850 /* Make sure the locus is set to the end of the function, so that
242229bb 5851 epilogue line numbers and warnings are set properly. */
2f13f2de 5852 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5853 input_location = cfun->function_end_locus;
5854
242229bb
JH
5855 /* Generate rtl for function exit. */
5856 expand_function_end ();
5857
5858 end = get_last_insn ();
5859 if (head == end)
5860 return;
79c7fda6
JJ
5861 /* While emitting the function end we could move end of the last basic
5862 block. */
1130d5e3 5863 BB_END (prev_bb) = orig_end;
4b4bf941 5864 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5865 head = NEXT_INSN (head);
79c7fda6
JJ
5866 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5867 bb frequency counting will be confused. Any instructions before that
5868 label are emitted for the case where PREV_BB falls through into the
5869 exit block, so append those instructions to prev_bb in that case. */
5870 if (NEXT_INSN (head) != return_label)
5871 {
5872 while (NEXT_INSN (head) != return_label)
5873 {
5874 if (!NOTE_P (NEXT_INSN (head)))
1130d5e3 5875 BB_END (prev_bb) = NEXT_INSN (head);
79c7fda6
JJ
5876 head = NEXT_INSN (head);
5877 }
5878 }
5879 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
fefa31b5
DM
5880 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5881 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
726338f4 5882 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
628f6a4e
BE
5883
5884 ix = 0;
fefa31b5 5885 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
242229bb 5886 {
fefa31b5 5887 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
242229bb 5888 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5889 redirect_edge_succ (e, exit_block);
5890 else
5891 ix++;
242229bb 5892 }
628f6a4e 5893
fefa31b5 5894 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5895 e->probability = REG_BR_PROB_BASE;
fefa31b5
DM
5896 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5897 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
242229bb
JH
5898 if (e2 != e)
5899 {
c22cacf3 5900 e->count -= e2->count;
242229bb
JH
5901 exit_block->count -= e2->count;
5902 exit_block->frequency -= EDGE_FREQUENCY (e2);
5903 }
5904 if (e->count < 0)
5905 e->count = 0;
5906 if (exit_block->count < 0)
5907 exit_block->count = 0;
5908 if (exit_block->frequency < 0)
5909 exit_block->frequency = 0;
5910 update_bb_for_insn (exit_block);
5911}
5912
c22cacf3 5913/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5914 Look for ARRAY_REF nodes with non-constant indexes and mark them
5915 addressable. */
5916
5917static tree
5918discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5919 void *data ATTRIBUTE_UNUSED)
5920{
5921 tree t = *tp;
5922
5923 if (IS_TYPE_OR_DECL_P (t))
5924 *walk_subtrees = 0;
5925 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5926 {
5927 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5928 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5929 && (!TREE_OPERAND (t, 2)
5930 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5931 || (TREE_CODE (t) == COMPONENT_REF
5932 && (!TREE_OPERAND (t,2)
5933 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5934 || TREE_CODE (t) == BIT_FIELD_REF
5935 || TREE_CODE (t) == REALPART_EXPR
5936 || TREE_CODE (t) == IMAGPART_EXPR
5937 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5938 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5939 t = TREE_OPERAND (t, 0);
5940
5941 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5942 {
5943 t = get_base_address (t);
6f11d690
RG
5944 if (t && DECL_P (t)
5945 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5946 TREE_ADDRESSABLE (t) = 1;
5947 }
5948
5949 *walk_subtrees = 0;
5950 }
5951
5952 return NULL_TREE;
5953}
5954
5955/* RTL expansion is not able to compile array references with variable
5956 offsets for arrays stored in single register. Discover such
5957 expressions and mark variables as addressable to avoid this
5958 scenario. */
5959
5960static void
5961discover_nonconstant_array_refs (void)
5962{
5963 basic_block bb;
726a989a 5964 gimple_stmt_iterator gsi;
a1b23b2f 5965
11cd3bed 5966 FOR_EACH_BB_FN (bb, cfun)
726a989a
RB
5967 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5968 {
355fe088 5969 gimple *stmt = gsi_stmt (gsi);
aa847cc8
JJ
5970 if (!is_gimple_debug (stmt))
5971 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 5972 }
a1b23b2f
UW
5973}
5974
2e3f842f
L
5975/* This function sets crtl->args.internal_arg_pointer to a virtual
5976 register if DRAP is needed. Local register allocator will replace
5977 virtual_incoming_args_rtx with the virtual register. */
5978
5979static void
5980expand_stack_alignment (void)
5981{
5982 rtx drap_rtx;
e939805b 5983 unsigned int preferred_stack_boundary;
2e3f842f
L
5984
5985 if (! SUPPORTS_STACK_ALIGNMENT)
5986 return;
b8698a0f 5987
2e3f842f
L
5988 if (cfun->calls_alloca
5989 || cfun->has_nonlocal_label
5990 || crtl->has_nonlocal_goto)
5991 crtl->need_drap = true;
5992
890b9b96
L
5993 /* Call update_stack_boundary here again to update incoming stack
5994 boundary. It may set incoming stack alignment to a different
5995 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5996 use the minimum incoming stack alignment to check if it is OK
5997 to perform sibcall optimization since sibcall optimization will
5998 only align the outgoing stack to incoming stack boundary. */
5999 if (targetm.calls.update_stack_boundary)
6000 targetm.calls.update_stack_boundary ();
6001
6002 /* The incoming stack frame has to be aligned at least at
6003 parm_stack_boundary. */
6004 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 6005
2e3f842f
L
6006 /* Update crtl->stack_alignment_estimated and use it later to align
6007 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6008 exceptions since callgraph doesn't collect incoming stack alignment
6009 in this case. */
8f4f502f 6010 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
6011 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6012 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6013 else
6014 preferred_stack_boundary = crtl->preferred_stack_boundary;
6015 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6016 crtl->stack_alignment_estimated = preferred_stack_boundary;
6017 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6018 crtl->stack_alignment_needed = preferred_stack_boundary;
6019
890b9b96
L
6020 gcc_assert (crtl->stack_alignment_needed
6021 <= crtl->stack_alignment_estimated);
6022
2e3f842f 6023 crtl->stack_realign_needed
e939805b 6024 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 6025 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
6026
6027 crtl->stack_realign_processed = true;
6028
6029 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6030 alignment. */
6031 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 6032 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 6033
d015f7cc
L
6034 /* stack_realign_drap and drap_rtx must match. */
6035 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6036
2e3f842f
L
6037 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6038 if (NULL != drap_rtx)
6039 {
6040 crtl->args.internal_arg_pointer = drap_rtx;
6041
6042 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6043 needed. */
6044 fixup_tail_calls ();
6045 }
6046}
862d0b35
DN
6047\f
6048
6049static void
6050expand_main_function (void)
6051{
6052#if (defined(INVOKE__main) \
6053 || (!defined(HAS_INIT_SECTION) \
6054 && !defined(INIT_SECTION_ASM_OP) \
6055 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6056 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6057#endif
6058}
6059\f
6060
6061/* Expand code to initialize the stack_protect_guard. This is invoked at
6062 the beginning of a function to be protected. */
6063
862d0b35
DN
6064static void
6065stack_protect_prologue (void)
6066{
6067 tree guard_decl = targetm.stack_protect_guard ();
6068 rtx x, y;
6069
6070 x = expand_normal (crtl->stack_protect_guard);
6071 y = expand_normal (guard_decl);
6072
6073 /* Allow the target to copy from Y to X without leaking Y into a
6074 register. */
c65aa042
RS
6075 if (targetm.have_stack_protect_set ())
6076 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6077 {
6078 emit_insn (insn);
6079 return;
6080 }
862d0b35
DN
6081
6082 /* Otherwise do a straight move. */
6083 emit_move_insn (x, y);
6084}
2e3f842f 6085
242229bb
JH
6086/* Translate the intermediate representation contained in the CFG
6087 from GIMPLE trees to RTL.
6088
6089 We do conversion per basic block and preserve/update the tree CFG.
6090 This implies we have to do some magic as the CFG can simultaneously
6091 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 6092 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
6093 the expansion. */
6094
be55bfe6
TS
6095namespace {
6096
6097const pass_data pass_data_expand =
6098{
6099 RTL_PASS, /* type */
6100 "expand", /* name */
6101 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6102 TV_EXPAND, /* tv_id */
6103 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6104 | PROP_gimple_lcx
f8e89441
TV
6105 | PROP_gimple_lvec
6106 | PROP_gimple_lva), /* properties_required */
be55bfe6
TS
6107 PROP_rtl, /* properties_provided */
6108 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
3bea341f 6109 0, /* todo_flags_start */
be55bfe6
TS
6110 0, /* todo_flags_finish */
6111};
6112
6113class pass_expand : public rtl_opt_pass
6114{
6115public:
6116 pass_expand (gcc::context *ctxt)
6117 : rtl_opt_pass (pass_data_expand, ctxt)
6118 {}
6119
6120 /* opt_pass methods: */
6121 virtual unsigned int execute (function *);
6122
6123}; // class pass_expand
6124
6125unsigned int
6126pass_expand::execute (function *fun)
242229bb
JH
6127{
6128 basic_block bb, init_block;
6129 sbitmap blocks;
0ef90296
ZD
6130 edge_iterator ei;
6131 edge e;
b47aae36 6132 rtx_insn *var_seq, *var_ret_seq;
4e3825db
MM
6133 unsigned i;
6134
f029db69 6135 timevar_push (TV_OUT_OF_SSA);
4e3825db 6136 rewrite_out_of_ssa (&SA);
f029db69 6137 timevar_pop (TV_OUT_OF_SSA);
c302207e 6138 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 6139
dfde35b3
JJ
6140 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6141 {
6142 gimple_stmt_iterator gsi;
6143 FOR_EACH_BB_FN (bb, cfun)
6144 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6145 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6146 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6147 }
6148
be147e84
RG
6149 /* Make sure all values used by the optimization passes have sane
6150 defaults. */
6151 reg_renumber = 0;
6152
4586b4ca
SB
6153 /* Some backends want to know that we are expanding to RTL. */
6154 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
6155 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6156 free_dominance_info (CDI_DOMINATORS);
4586b4ca 6157
be55bfe6 6158 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
bf08ebeb 6159
d5e254e1
IE
6160 if (chkp_function_instrumented_p (current_function_decl))
6161 chkp_reset_rtl_bounds ();
6162
5368224f 6163 insn_locations_init ();
fe8a7779 6164 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
6165 {
6166 /* Eventually, all FEs should explicitly set function_start_locus. */
be55bfe6
TS
6167 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6168 set_curr_insn_location
6169 (DECL_SOURCE_LOCATION (current_function_decl));
1751ecd6 6170 else
be55bfe6 6171 set_curr_insn_location (fun->function_start_locus);
1751ecd6 6172 }
9ff70652 6173 else
5368224f
DC
6174 set_curr_insn_location (UNKNOWN_LOCATION);
6175 prologue_location = curr_insn_location ();
55e092c4 6176
2b21299c
JJ
6177#ifdef INSN_SCHEDULING
6178 init_sched_attrs ();
6179#endif
6180
55e092c4
JH
6181 /* Make sure first insn is a note even if we don't want linenums.
6182 This makes sure the first insn will never be deleted.
6183 Also, final expects a note to appear there. */
6184 emit_note (NOTE_INSN_DELETED);
6429e3be 6185
a1b23b2f
UW
6186 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6187 discover_nonconstant_array_refs ();
6188
e41b2a33 6189 targetm.expand_to_rtl_hook ();
cb91fab0 6190 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 6191 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 6192 crtl->stack_alignment_estimated = 0;
cb91fab0 6193 crtl->preferred_stack_boundary = STACK_BOUNDARY;
be55bfe6 6194 fun->cfg->max_jumptable_ents = 0;
cb91fab0 6195
ae9fd6b7
JH
6196 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6197 of the function section at exapnsion time to predict distance of calls. */
6198 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6199
727a31fa 6200 /* Expand the variables recorded during gimple lowering. */
f029db69 6201 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
6202 start_sequence ();
6203
f3ddd692 6204 var_ret_seq = expand_used_vars ();
3a42502d
RH
6205
6206 var_seq = get_insns ();
6207 end_sequence ();
f029db69 6208 timevar_pop (TV_VAR_EXPAND);
242229bb 6209
7d69de61
RH
6210 /* Honor stack protection warnings. */
6211 if (warn_stack_protect)
6212 {
be55bfe6 6213 if (fun->calls_alloca)
b8698a0f 6214 warning (OPT_Wstack_protector,
3b123595 6215 "stack protector not protecting local variables: "
be55bfe6 6216 "variable length buffer");
cb91fab0 6217 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 6218 warning (OPT_Wstack_protector,
3b123595 6219 "stack protector not protecting function: "
be55bfe6 6220 "all local arrays are less than %d bytes long",
7d69de61
RH
6221 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6222 }
6223
242229bb 6224 /* Set up parameters and prepare for return, for the function. */
b79c5284 6225 expand_function_start (current_function_decl);
242229bb 6226
3a42502d
RH
6227 /* If we emitted any instructions for setting up the variables,
6228 emit them before the FUNCTION_START note. */
6229 if (var_seq)
6230 {
6231 emit_insn_before (var_seq, parm_birth_insn);
6232
6233 /* In expand_function_end we'll insert the alloca save/restore
6234 before parm_birth_insn. We've just insertted an alloca call.
6235 Adjust the pointer to match. */
6236 parm_birth_insn = var_seq;
6237 }
6238
f11a7b6d
AO
6239 /* Now propagate the RTL assignment of each partition to the
6240 underlying var of each SSA_NAME. */
6241 for (i = 1; i < num_ssa_names; i++)
6242 {
6243 tree name = ssa_name (i);
6244
6245 if (!name
6246 /* We might have generated new SSA names in
6247 update_alias_info_with_stack_vars. They will have a NULL
6248 defining statements, and won't be part of the partitioning,
6249 so ignore those. */
6250 || !SSA_NAME_DEF_STMT (name))
6251 continue;
6252
6253 adjust_one_expanded_partition_var (name);
6254 }
6255
6256 /* Clean up RTL of variables that straddle across multiple
6257 partitions, and check that the rtl of any PARM_DECLs that are not
6258 cleaned up is that of their default defs. */
d466b407
MM
6259 for (i = 1; i < num_ssa_names; i++)
6260 {
6261 tree name = ssa_name (i);
6262 int part;
d466b407
MM
6263
6264 if (!name
d466b407
MM
6265 /* We might have generated new SSA names in
6266 update_alias_info_with_stack_vars. They will have a NULL
6267 defining statements, and won't be part of the partitioning,
6268 so ignore those. */
6269 || !SSA_NAME_DEF_STMT (name))
6270 continue;
6271 part = var_to_partition (SA.map, name);
6272 if (part == NO_PARTITION)
6273 continue;
70b5e7dc 6274
1f9ceff1
AO
6275 /* If this decl was marked as living in multiple places, reset
6276 this now to NULL. */
6277 tree var = SSA_NAME_VAR (name);
6278 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6279 SET_DECL_RTL (var, NULL);
6280 /* Check that the pseudos chosen by assign_parms are those of
6281 the corresponding default defs. */
6282 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6283 && (TREE_CODE (var) == PARM_DECL
6284 || TREE_CODE (var) == RESULT_DECL))
70b5e7dc 6285 {
1f9ceff1
AO
6286 rtx in = DECL_RTL_IF_SET (var);
6287 gcc_assert (in);
6288 rtx out = SA.partition_to_pseudo[part];
f11a7b6d
AO
6289 gcc_assert (in == out);
6290
6291 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6292 those expected by debug backends for each parm and for
6293 the result. This is particularly important for stabs,
6294 whose register elimination from parm's DECL_RTL may cause
6295 -fcompare-debug differences as SET_DECL_RTL changes reg's
6296 attrs. So, make sure the RTL already has the parm as the
6297 EXPR, so that it won't change. */
6298 SET_DECL_RTL (var, NULL_RTX);
6299 if (MEM_P (in))
6300 set_mem_attributes (in, var, true);
6301 SET_DECL_RTL (var, in);
70b5e7dc 6302 }
d466b407
MM
6303 }
6304
242229bb
JH
6305 /* If this function is `main', emit a call to `__main'
6306 to run global initializers, etc. */
6307 if (DECL_NAME (current_function_decl)
6308 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6309 && DECL_FILE_SCOPE_P (current_function_decl))
6310 expand_main_function ();
6311
7d69de61
RH
6312 /* Initialize the stack_protect_guard field. This must happen after the
6313 call to __main (if any) so that the external decl is initialized. */
cb91fab0 6314 if (crtl->stack_protect_guard)
7d69de61
RH
6315 stack_protect_prologue ();
6316
4e3825db
MM
6317 expand_phi_nodes (&SA);
6318
0d334e37 6319 /* Release any stale SSA redirection data. */
b3e46655 6320 redirect_edge_var_map_empty ();
0d334e37 6321
3fbd86b1 6322 /* Register rtl specific functions for cfg. */
242229bb
JH
6323 rtl_register_cfg_hooks ();
6324
6325 init_block = construct_init_block ();
6326
0ef90296 6327 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 6328 remaining edges later. */
be55bfe6 6329 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
0ef90296
ZD
6330 e->flags &= ~EDGE_EXECUTABLE;
6331
134aa83c 6332 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
be55bfe6 6333 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
fefa31b5 6334 next_bb)
f3ddd692 6335 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 6336
b5b8b0ac
AO
6337 if (MAY_HAVE_DEBUG_INSNS)
6338 expand_debug_locations ();
6339
dfde35b3
JJ
6340 if (deep_ter_debug_map)
6341 {
6342 delete deep_ter_debug_map;
6343 deep_ter_debug_map = NULL;
6344 }
6345
452aa9c5
RG
6346 /* Free stuff we no longer need after GIMPLE optimizations. */
6347 free_dominance_info (CDI_DOMINATORS);
6348 free_dominance_info (CDI_POST_DOMINATORS);
61183076 6349 delete_tree_cfg_annotations (fun);
452aa9c5 6350
f029db69 6351 timevar_push (TV_OUT_OF_SSA);
4e3825db 6352 finish_out_of_ssa (&SA);
f029db69 6353 timevar_pop (TV_OUT_OF_SSA);
4e3825db 6354
f029db69 6355 timevar_push (TV_POST_EXPAND);
91753e21 6356 /* We are no longer in SSA form. */
be55bfe6 6357 fun->gimple_df->in_ssa_p = false;
726338f4 6358 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 6359
bf08ebeb
JH
6360 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6361 conservatively to true until they are all profile aware. */
39c8aaa4 6362 delete lab_rtx_for_bb;
61183076 6363 free_histograms (fun);
242229bb
JH
6364
6365 construct_exit_block ();
5368224f 6366 insn_locations_finalize ();
242229bb 6367
f3ddd692
JJ
6368 if (var_ret_seq)
6369 {
dc01c3d1 6370 rtx_insn *after = return_label;
b47aae36 6371 rtx_insn *next = NEXT_INSN (after);
f3ddd692
JJ
6372 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6373 after = next;
6374 emit_insn_after (var_ret_seq, after);
6375 }
6376
1d65f45c 6377 /* Zap the tree EH table. */
be55bfe6 6378 set_eh_throw_stmt_table (fun, NULL);
242229bb 6379
42821aff
MM
6380 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6381 split edges which edge insertions might do. */
242229bb 6382 rebuild_jump_labels (get_insns ());
242229bb 6383
be55bfe6
TS
6384 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6385 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6386 {
6387 edge e;
6388 edge_iterator ei;
6389 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6390 {
6391 if (e->insns.r)
bc470c24 6392 {
3ffa95c2 6393 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
6394 /* Put insns after parm birth, but before
6395 NOTE_INSNS_FUNCTION_BEG. */
be55bfe6
TS
6396 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6397 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
bc470c24 6398 {
3ffa95c2
DM
6399 rtx_insn *insns = e->insns.r;
6400 e->insns.r = NULL;
e40191f1
TV
6401 if (NOTE_P (parm_birth_insn)
6402 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6403 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6404 else
6405 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
6406 }
6407 else
6408 commit_one_edge_insertion (e);
6409 }
4e3825db
MM
6410 else
6411 ei_next (&ei);
6412 }
6413 }
6414
6415 /* We're done expanding trees to RTL. */
6416 currently_expanding_to_rtl = 0;
6417
1b223a9f
AO
6418 flush_mark_addressable_queue ();
6419
be55bfe6
TS
6420 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6421 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
6422 {
6423 edge e;
6424 edge_iterator ei;
6425 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6426 {
6427 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6428 e->flags &= ~EDGE_EXECUTABLE;
6429
6430 /* At the moment not all abnormal edges match the RTL
6431 representation. It is safe to remove them here as
6432 find_many_sub_basic_blocks will rediscover them.
6433 In the future we should get this fixed properly. */
6434 if ((e->flags & EDGE_ABNORMAL)
6435 && !(e->flags & EDGE_SIBCALL))
6436 remove_edge (e);
6437 else
6438 ei_next (&ei);
6439 }
6440 }
6441
be55bfe6 6442 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
f61e445a 6443 bitmap_ones (blocks);
242229bb 6444 find_many_sub_basic_blocks (blocks);
242229bb 6445 sbitmap_free (blocks);
4e3825db 6446 purge_all_dead_edges ();
242229bb 6447
2e3f842f
L
6448 expand_stack_alignment ();
6449
be147e84
RG
6450 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6451 function. */
6452 if (crtl->tail_call_emit)
6453 fixup_tail_calls ();
6454
dac1fbf8
RG
6455 /* After initial rtl generation, call back to finish generating
6456 exception support code. We need to do this before cleaning up
6457 the CFG as the code does not expect dead landing pads. */
be55bfe6 6458 if (fun->eh->region_tree != NULL)
dac1fbf8
RG
6459 finish_eh_generation ();
6460
6461 /* Remove unreachable blocks, otherwise we cannot compute dominators
6462 which are needed for loop state verification. As a side-effect
6463 this also compacts blocks.
6464 ??? We cannot remove trivially dead insns here as for example
6465 the DRAP reg on i?86 is not magically live at this point.
6466 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6467 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6468
b2b29377 6469 checking_verify_flow_info ();
9f8628ba 6470
be147e84
RG
6471 /* Initialize pseudos allocated for hard registers. */
6472 emit_initial_value_sets ();
6473
6474 /* And finally unshare all RTL. */
6475 unshare_all_rtl ();
6476
9f8628ba
PB
6477 /* There's no need to defer outputting this function any more; we
6478 know we want to output it. */
6479 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6480
6481 /* Now that we're done expanding trees to RTL, we shouldn't have any
6482 more CONCATs anywhere. */
6483 generating_concat_p = 0;
6484
b7211528
SB
6485 if (dump_file)
6486 {
6487 fprintf (dump_file,
6488 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6489 /* And the pass manager will dump RTL for us. */
6490 }
ef330312
PB
6491
6492 /* If we're emitting a nested function, make sure its parent gets
6493 emitted as well. Doing otherwise confuses debug info. */
be55bfe6
TS
6494 {
6495 tree parent;
6496 for (parent = DECL_CONTEXT (current_function_decl);
6497 parent != NULL_TREE;
6498 parent = get_containing_scope (parent))
6499 if (TREE_CODE (parent) == FUNCTION_DECL)
6500 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6501 }
c22cacf3 6502
ef330312
PB
6503 /* We are now committed to emitting code for this function. Do any
6504 preparation, such as emitting abstract debug info for the inline
6505 before it gets mangled by optimization. */
6506 if (cgraph_function_possibly_inlined_p (current_function_decl))
6507 (*debug_hooks->outlining_inline_function) (current_function_decl);
6508
6509 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
6510
6511 /* After expanding, the return labels are no longer needed. */
6512 return_label = NULL;
6513 naked_return_label = NULL;
0a35513e
AH
6514
6515 /* After expanding, the tm_restart map is no longer needed. */
be55bfe6 6516 if (fun->gimple_df->tm_restart)
50979347 6517 fun->gimple_df->tm_restart = NULL;
0a35513e 6518
55e092c4
JH
6519 /* Tag the blocks with a depth number so that change_scope can find
6520 the common parent easily. */
be55bfe6 6521 set_block_levels (DECL_INITIAL (fun->decl), 0);
bf08ebeb 6522 default_rtl_profile ();
be147e84 6523
f029db69 6524 timevar_pop (TV_POST_EXPAND);
be147e84 6525
c2924966 6526 return 0;
242229bb
JH
6527}
6528
27a4cd48
DM
6529} // anon namespace
6530
6531rtl_opt_pass *
6532make_pass_expand (gcc::context *ctxt)
6533{
6534 return new pass_expand (ctxt);
6535}