]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
* gimple-expr.h (create_tmp_var_raw, create_tmp_var,
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
0ec80471 1/* A pass for lowering trees to RTL.
3aea1f79 2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
0ec80471 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
0ec80471 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
0ec80471 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
0ec80471 24#include "rtl.h"
0e80b01d 25#include "hard-reg-set.h"
26#include "tree.h"
9ed99284 27#include "stringpool.h"
28#include "varasm.h"
29#include "stor-layout.h"
30#include "stmt.h"
31#include "print-tree.h"
0ec80471 32#include "tm_p.h"
94ea8568 33#include "predict.h"
34#include "vec.h"
a3020f2f 35#include "hashtab.h"
36#include "hash-set.h"
a3020f2f 37#include "machmode.h"
38#include "input.h"
0ec80471 39#include "function.h"
94ea8568 40#include "dominance.h"
41#include "cfg.h"
42#include "cfgrtl.h"
43#include "cfganal.h"
44#include "cfgbuild.h"
45#include "cfgcleanup.h"
46#include "basic-block.h"
34517c64 47#include "insn-codes.h"
48#include "optabs.h"
0ec80471 49#include "expr.h"
50#include "langhooks.h"
073c1fd5 51#include "bitmap.h"
bc61cadb 52#include "tree-ssa-alias.h"
53#include "internal-fn.h"
54#include "tree-eh.h"
55#include "gimple-expr.h"
56#include "is-a.h"
073c1fd5 57#include "gimple.h"
dcf1a1ec 58#include "gimple-iterator.h"
59#include "gimple-walk.h"
073c1fd5 60#include "gimple-ssa.h"
1140c305 61#include "hash-map.h"
62#include "plugin-api.h"
63#include "ipa-ref.h"
073c1fd5 64#include "cgraph.h"
65#include "tree-cfg.h"
66#include "tree-phinodes.h"
67#include "ssa-iterators.h"
68#include "tree-ssanames.h"
69#include "tree-dfa.h"
69ee5dbb 70#include "tree-ssa.h"
0ec80471 71#include "tree-pass.h"
72#include "except.h"
73#include "flags.h"
60d03123 74#include "diagnostic.h"
ce084dfc 75#include "gimple-pretty-print.h"
60d03123 76#include "toplev.h"
77fce4cd 77#include "debug.h"
f1a0edff 78#include "params.h"
5a02d67b 79#include "tree-inline.h"
4992f399 80#include "value-prof.h"
bc5e6ea1 81#include "target.h"
b23fb4cb 82#include "tree-ssa-live.h"
f7373a91 83#include "tree-outof-ssa.h"
0f71a633 84#include "sbitmap.h"
79f958cb 85#include "cfgloop.h"
212dddd3 86#include "regs.h" /* For reg_renumber. */
fdc86f97 87#include "insn-attr.h" /* For INSN_SCHEDULING. */
3c919612 88#include "asan.h"
424a4a92 89#include "tree-ssa-address.h"
0e80b01d 90#include "recog.h"
91#include "output.h"
f7715905 92#include "builtins.h"
058a1b7a 93#include "tree-chkp.h"
94#include "rtl-chkp.h"
75a70cf9 95
d3211b7e 96/* Some systems use __main in a way incompatible with its use in gcc, in these
97 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
98 give the same symbol without quotes for an alternative entry point. You
99 must define both, or neither. */
100#ifndef NAME__MAIN
101#define NAME__MAIN "__main"
102#endif
103
a8dd994c 104/* This variable holds information helping the rewriting of SSA trees
105 into RTL. */
106struct ssaexpand SA;
107
8cee8dc0 108/* This variable holds the currently expanded gimple statement for purposes
109 of comminucating the profile info to the builtin expanders. */
110gimple currently_expanding_gimple_stmt;
111
841424cc 112static rtx expand_debug_expr (tree);
113
75a70cf9 114/* Return an expression tree corresponding to the RHS of GIMPLE
115 statement STMT. */
116
117tree
118gimple_assign_rhs_to_tree (gimple stmt)
119{
120 tree t;
f4e36c33 121 enum gimple_rhs_class grhs_class;
48e1416a 122
f4e36c33 123 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
75a70cf9 124
00f4f705 125 if (grhs_class == GIMPLE_TERNARY_RHS)
126 t = build3 (gimple_assign_rhs_code (stmt),
127 TREE_TYPE (gimple_assign_lhs (stmt)),
128 gimple_assign_rhs1 (stmt),
129 gimple_assign_rhs2 (stmt),
130 gimple_assign_rhs3 (stmt));
131 else if (grhs_class == GIMPLE_BINARY_RHS)
75a70cf9 132 t = build2 (gimple_assign_rhs_code (stmt),
133 TREE_TYPE (gimple_assign_lhs (stmt)),
134 gimple_assign_rhs1 (stmt),
135 gimple_assign_rhs2 (stmt));
f4e36c33 136 else if (grhs_class == GIMPLE_UNARY_RHS)
75a70cf9 137 t = build1 (gimple_assign_rhs_code (stmt),
138 TREE_TYPE (gimple_assign_lhs (stmt)),
139 gimple_assign_rhs1 (stmt));
f4e36c33 140 else if (grhs_class == GIMPLE_SINGLE_RHS)
9845d120 141 {
142 t = gimple_assign_rhs1 (stmt);
143 /* Avoid modifying this tree in place below. */
dfecf957 144 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
145 && gimple_location (stmt) != EXPR_LOCATION (t))
146 || (gimple_block (stmt)
147 && currently_expanding_to_rtl
5169661d 148 && EXPR_P (t)))
9845d120 149 t = copy_node (t);
150 }
75a70cf9 151 else
152 gcc_unreachable ();
153
efbcb6de 154 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
155 SET_EXPR_LOCATION (t, gimple_location (stmt));
156
75a70cf9 157 return t;
158}
159
75a70cf9 160
60d03123 161#ifndef STACK_ALIGNMENT_NEEDED
162#define STACK_ALIGNMENT_NEEDED 1
163#endif
164
a8dd994c 165#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
166
167/* Associate declaration T with storage space X. If T is no
168 SSA name this is exactly SET_DECL_RTL, otherwise make the
169 partition of T associated with X. */
170static inline void
171set_rtl (tree t, rtx x)
172{
173 if (TREE_CODE (t) == SSA_NAME)
174 {
175 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
176 if (x && !MEM_P (x))
177 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
e32b531f 178 /* For the benefit of debug information at -O0 (where vartracking
179 doesn't run) record the place also in the base DECL if it's
180 a normal variable (not a parameter). */
181 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
182 {
183 tree var = SSA_NAME_VAR (t);
184 /* If we don't yet have something recorded, just record it now. */
185 if (!DECL_RTL_SET_P (var))
186 SET_DECL_RTL (var, x);
3c25489e 187 /* If we have it set already to "multiple places" don't
e32b531f 188 change this. */
189 else if (DECL_RTL (var) == pc_rtx)
190 ;
191 /* If we have something recorded and it's not the same place
192 as we want to record now, we have multiple partitions for the
193 same base variable, with different places. We can't just
194 randomly chose one, hence we have to say that we don't know.
195 This only happens with optimization, and there var-tracking
196 will figure out the right thing. */
197 else if (DECL_RTL (var) != x)
198 SET_DECL_RTL (var, pc_rtx);
199 }
a8dd994c 200 }
201 else
202 SET_DECL_RTL (t, x);
203}
60d03123 204
205/* This structure holds data relevant to one variable that will be
206 placed in a stack slot. */
207struct stack_var
208{
209 /* The Variable. */
210 tree decl;
211
60d03123 212 /* Initially, the size of the variable. Later, the size of the partition,
213 if this variable becomes it's partition's representative. */
214 HOST_WIDE_INT size;
215
216 /* The *byte* alignment required for this variable. Or as, with the
217 size, the alignment for this partition. */
218 unsigned int alignb;
219
220 /* The partition representative. */
221 size_t representative;
222
223 /* The next stack variable in the partition, or EOC. */
224 size_t next;
dfa054ff 225
226 /* The numbers of conflicting stack variables. */
227 bitmap conflicts;
60d03123 228};
229
230#define EOC ((size_t)-1)
231
232/* We have an array of such objects while deciding allocation. */
233static struct stack_var *stack_vars;
234static size_t stack_vars_alloc;
235static size_t stack_vars_num;
5f8841a5 236static hash_map<tree, size_t> *decl_to_stack_part;
60d03123 237
4fb07d00 238/* Conflict bitmaps go on this obstack. This allows us to destroy
239 all of them in one big sweep. */
240static bitmap_obstack stack_var_bitmap_obstack;
241
f0b5f617 242/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
60d03123 243 is non-decreasing. */
244static size_t *stack_vars_sorted;
245
60d03123 246/* The phase of the stack frame. This is the known misalignment of
247 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
248 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
249static int frame_phase;
250
f1a0edff 251/* Used during expand_used_vars to remember if we saw any decls for
252 which we'd like to enable stack smashing protection. */
253static bool has_protected_decls;
254
255/* Used during expand_used_vars. Remember if we say a character buffer
256 smaller than our cutoff threshold. Used for -Wstack-protector. */
257static bool has_short_buffer;
60d03123 258
25c513b9 259/* Compute the byte alignment to use for DECL. Ignore alignment
f64dc32d 260 we can't do with expected alignment of the stack boundary. */
261
262static unsigned int
25c513b9 263align_local_variable (tree decl)
f64dc32d 264{
5be42b39 265 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
25c513b9 266 DECL_ALIGN (decl) = align;
60d03123 267 return align / BITS_PER_UNIT;
268}
269
270/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
271 Return the frame offset. */
272
273static HOST_WIDE_INT
5be42b39 274alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
60d03123 275{
276 HOST_WIDE_INT offset, new_frame_offset;
277
278 new_frame_offset = frame_offset;
279 if (FRAME_GROWS_DOWNWARD)
280 {
281 new_frame_offset -= size + frame_phase;
282 new_frame_offset &= -align;
283 new_frame_offset += frame_phase;
284 offset = new_frame_offset;
285 }
286 else
287 {
288 new_frame_offset -= frame_phase;
289 new_frame_offset += align - 1;
290 new_frame_offset &= -align;
291 new_frame_offset += frame_phase;
292 offset = new_frame_offset;
293 new_frame_offset += size;
294 }
295 frame_offset = new_frame_offset;
296
26d04e5f 297 if (frame_offset_overflow (frame_offset, cfun->decl))
298 frame_offset = offset = 0;
299
60d03123 300 return offset;
301}
302
303/* Accumulate DECL into STACK_VARS. */
304
305static void
306add_stack_var (tree decl)
307{
e67bda38 308 struct stack_var *v;
309
60d03123 310 if (stack_vars_num >= stack_vars_alloc)
311 {
312 if (stack_vars_alloc)
313 stack_vars_alloc = stack_vars_alloc * 3 / 2;
314 else
315 stack_vars_alloc = 32;
316 stack_vars
317 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
318 }
3c25489e 319 if (!decl_to_stack_part)
5f8841a5 320 decl_to_stack_part = new hash_map<tree, size_t>;
3c25489e 321
e67bda38 322 v = &stack_vars[stack_vars_num];
5f8841a5 323 decl_to_stack_part->put (decl, stack_vars_num);
e67bda38 324
325 v->decl = decl;
6a0712d4 326 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
e67bda38 327 /* Ensure that all variables have size, so that &a != &b for any two
328 variables that are simultaneously live. */
329 if (v->size == 0)
330 v->size = 1;
25c513b9 331 v->alignb = align_local_variable (SSAVAR (decl));
0a4cd568 332 /* An alignment of zero can mightily confuse us later. */
333 gcc_assert (v->alignb != 0);
60d03123 334
335 /* All variables are initially in their own partition. */
e67bda38 336 v->representative = stack_vars_num;
337 v->next = EOC;
60d03123 338
dfa054ff 339 /* All variables initially conflict with no other. */
e67bda38 340 v->conflicts = NULL;
dfa054ff 341
60d03123 342 /* Ensure that this decl doesn't get put onto the list twice. */
a8dd994c 343 set_rtl (decl, pc_rtx);
60d03123 344
345 stack_vars_num++;
346}
347
60d03123 348/* Make the decls associated with luid's X and Y conflict. */
349
350static void
351add_stack_var_conflict (size_t x, size_t y)
352{
dfa054ff 353 struct stack_var *a = &stack_vars[x];
354 struct stack_var *b = &stack_vars[y];
355 if (!a->conflicts)
4fb07d00 356 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
dfa054ff 357 if (!b->conflicts)
4fb07d00 358 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
dfa054ff 359 bitmap_set_bit (a->conflicts, y);
360 bitmap_set_bit (b->conflicts, x);
60d03123 361}
362
363/* Check whether the decls associated with luid's X and Y conflict. */
364
365static bool
366stack_var_conflict_p (size_t x, size_t y)
367{
dfa054ff 368 struct stack_var *a = &stack_vars[x];
369 struct stack_var *b = &stack_vars[y];
3c25489e 370 if (x == y)
371 return false;
372 /* Partitions containing an SSA name result from gimple registers
373 with things like unsupported modes. They are top-level and
374 hence conflict with everything else. */
375 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
376 return true;
377
dfa054ff 378 if (!a->conflicts || !b->conflicts)
379 return false;
380 return bitmap_bit_p (a->conflicts, y);
60d03123 381}
48e1416a 382
3c25489e 383/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
384 enter its partition number into bitmap DATA. */
385
386static bool
5b26a9e3 387visit_op (gimple, tree op, tree, void *data)
3c25489e 388{
389 bitmap active = (bitmap)data;
390 op = get_base_address (op);
391 if (op
392 && DECL_P (op)
393 && DECL_RTL_IF_SET (op) == pc_rtx)
394 {
5f8841a5 395 size_t *v = decl_to_stack_part->get (op);
3c25489e 396 if (v)
397 bitmap_set_bit (active, *v);
398 }
399 return false;
400}
401
402/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
403 record conflicts between it and all currently active other partitions
404 from bitmap DATA. */
405
406static bool
5b26a9e3 407visit_conflict (gimple, tree op, tree, void *data)
3c25489e 408{
409 bitmap active = (bitmap)data;
410 op = get_base_address (op);
411 if (op
412 && DECL_P (op)
413 && DECL_RTL_IF_SET (op) == pc_rtx)
414 {
5f8841a5 415 size_t *v = decl_to_stack_part->get (op);
3c25489e 416 if (v && bitmap_set_bit (active, *v))
417 {
418 size_t num = *v;
419 bitmap_iterator bi;
420 unsigned i;
421 gcc_assert (num < stack_vars_num);
422 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
423 add_stack_var_conflict (num, i);
424 }
425 }
426 return false;
427}
428
429/* Helper routine for add_scope_conflicts, calculating the active partitions
430 at the end of BB, leaving the result in WORK. We're called to generate
b74338cf 431 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
432 liveness. */
3c25489e 433
434static void
b74338cf 435add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
3c25489e 436{
437 edge e;
438 edge_iterator ei;
439 gimple_stmt_iterator gsi;
5b26a9e3 440 walk_stmt_load_store_addr_fn visit;
3c25489e 441
442 bitmap_clear (work);
443 FOR_EACH_EDGE (e, ei, bb->preds)
444 bitmap_ior_into (work, (bitmap)e->src->aux);
445
07428872 446 visit = visit_op;
3c25489e 447
448 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
449 {
450 gimple stmt = gsi_stmt (gsi);
07428872 451 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
3c25489e 452 }
07428872 453 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3c25489e 454 {
455 gimple stmt = gsi_stmt (gsi);
456
457 if (gimple_clobber_p (stmt))
458 {
459 tree lhs = gimple_assign_lhs (stmt);
460 size_t *v;
461 /* Nested function lowering might introduce LHSs
462 that are COMPONENT_REFs. */
463 if (TREE_CODE (lhs) != VAR_DECL)
464 continue;
465 if (DECL_RTL_IF_SET (lhs) == pc_rtx
5f8841a5 466 && (v = decl_to_stack_part->get (lhs)))
3c25489e 467 bitmap_clear_bit (work, *v);
468 }
469 else if (!is_gimple_debug (stmt))
07428872 470 {
b74338cf 471 if (for_conflict
07428872 472 && visit == visit_op)
473 {
474 /* If this is the first real instruction in this BB we need
0b44da0d 475 to add conflicts for everything live at this point now.
476 Unlike classical liveness for named objects we can't
07428872 477 rely on seeing a def/use of the names we're interested in.
478 There might merely be indirect loads/stores. We'd not add any
b74338cf 479 conflicts for such partitions. */
07428872 480 bitmap_iterator bi;
481 unsigned i;
b74338cf 482 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
07428872 483 {
99fade12 484 struct stack_var *a = &stack_vars[i];
485 if (!a->conflicts)
4fb07d00 486 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
99fade12 487 bitmap_ior_into (a->conflicts, work);
07428872 488 }
489 visit = visit_conflict;
490 }
491 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
492 }
3c25489e 493 }
494}
495
496/* Generate stack partition conflicts between all partitions that are
497 simultaneously live. */
498
499static void
500add_scope_conflicts (void)
501{
502 basic_block bb;
503 bool changed;
504 bitmap work = BITMAP_ALLOC (NULL);
99fade12 505 int *rpo;
506 int n_bbs;
3c25489e 507
0b44da0d 508 /* We approximate the live range of a stack variable by taking the first
3c25489e 509 mention of its name as starting point(s), and by the end-of-scope
510 death clobber added by gimplify as ending point(s) of the range.
511 This overapproximates in the case we for instance moved an address-taken
512 operation upward, without also moving a dereference to it upwards.
513 But it's conservatively correct as a variable never can hold values
514 before its name is mentioned at least once.
515
0b44da0d 516 We then do a mostly classical bitmap liveness algorithm. */
3c25489e 517
ed7d889a 518 FOR_ALL_BB_FN (bb, cfun)
4fb07d00 519 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
3c25489e 520
fe672ac0 521 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
99fade12 522 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
523
3c25489e 524 changed = true;
525 while (changed)
526 {
99fade12 527 int i;
3c25489e 528 changed = false;
99fade12 529 for (i = 0; i < n_bbs; i++)
3c25489e 530 {
99fade12 531 bitmap active;
f5a6b05f 532 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
99fade12 533 active = (bitmap)bb->aux;
b74338cf 534 add_scope_conflicts_1 (bb, work, false);
3c25489e 535 if (bitmap_ior_into (active, work))
536 changed = true;
537 }
538 }
539
fc00614f 540 FOR_EACH_BB_FN (bb, cfun)
b74338cf 541 add_scope_conflicts_1 (bb, work, true);
3c25489e 542
99fade12 543 free (rpo);
3c25489e 544 BITMAP_FREE (work);
ed7d889a 545 FOR_ALL_BB_FN (bb, cfun)
3c25489e 546 BITMAP_FREE (bb->aux);
547}
548
60d03123 549/* A subroutine of partition_stack_vars. A comparison function for qsort,
5be42b39 550 sorting an array of indices by the properties of the object. */
60d03123 551
552static int
5be42b39 553stack_var_cmp (const void *a, const void *b)
60d03123 554{
5be42b39 555 size_t ia = *(const size_t *)a;
556 size_t ib = *(const size_t *)b;
557 unsigned int aligna = stack_vars[ia].alignb;
558 unsigned int alignb = stack_vars[ib].alignb;
559 HOST_WIDE_INT sizea = stack_vars[ia].size;
560 HOST_WIDE_INT sizeb = stack_vars[ib].size;
561 tree decla = stack_vars[ia].decl;
562 tree declb = stack_vars[ib].decl;
563 bool largea, largeb;
a8dd994c 564 unsigned int uida, uidb;
60d03123 565
5be42b39 566 /* Primary compare on "large" alignment. Large comes first. */
567 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
568 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
569 if (largea != largeb)
570 return (int)largeb - (int)largea;
571
572 /* Secondary compare on size, decreasing */
5be42b39 573 if (sizea > sizeb)
2a24c3a6 574 return -1;
575 if (sizea < sizeb)
60d03123 576 return 1;
5be42b39 577
578 /* Tertiary compare on true alignment, decreasing. */
579 if (aligna < alignb)
580 return -1;
581 if (aligna > alignb)
582 return 1;
583
584 /* Final compare on ID for sort stability, increasing.
585 Two SSA names are compared by their version, SSA names come before
586 non-SSA names, and two normal decls are compared by their DECL_UID. */
a8dd994c 587 if (TREE_CODE (decla) == SSA_NAME)
588 {
589 if (TREE_CODE (declb) == SSA_NAME)
590 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
591 else
592 return -1;
593 }
594 else if (TREE_CODE (declb) == SSA_NAME)
595 return 1;
596 else
597 uida = DECL_UID (decla), uidb = DECL_UID (declb);
7615883a 598 if (uida < uidb)
7615883a 599 return 1;
5be42b39 600 if (uida > uidb)
601 return -1;
60d03123 602 return 0;
603}
604
5f8841a5 605struct part_traits : default_hashmap_traits
606{
607 template<typename T>
608 static bool
609 is_deleted (T &e)
610 { return e.m_value == reinterpret_cast<void *> (1); }
611
612 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
613 template<typename T>
614 static void
615 mark_deleted (T &e)
616 { e.m_value = reinterpret_cast<T> (1); }
617
618 template<typename T>
619 static void
620 mark_empty (T &e)
621 { e.m_value = NULL; }
622};
623
624typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
3a443843 625
626/* If the points-to solution *PI points to variables that are in a partition
627 together with other variables add all partition members to the pointed-to
628 variables bitmap. */
629
630static void
631add_partitioned_vars_to_ptset (struct pt_solution *pt,
5f8841a5 632 part_hashmap *decls_to_partitions,
431205b7 633 hash_set<bitmap> *visited, bitmap temp)
3a443843 634{
635 bitmap_iterator bi;
636 unsigned i;
637 bitmap *part;
638
639 if (pt->anything
640 || pt->vars == NULL
641 /* The pointed-to vars bitmap is shared, it is enough to
642 visit it once. */
431205b7 643 || visited->add (pt->vars))
3a443843 644 return;
645
646 bitmap_clear (temp);
647
648 /* By using a temporary bitmap to store all members of the partitions
649 we have to add we make sure to visit each of the partitions only
650 once. */
651 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
652 if ((!temp
653 || !bitmap_bit_p (temp, i))
5f8841a5 654 && (part = decls_to_partitions->get (i)))
3a443843 655 bitmap_ior_into (temp, *part);
656 if (!bitmap_empty_p (temp))
657 bitmap_ior_into (pt->vars, temp);
658}
659
660/* Update points-to sets based on partition info, so we can use them on RTL.
661 The bitmaps representing stack partitions will be saved until expand,
662 where partitioned decls used as bases in memory expressions will be
663 rewritten. */
664
665static void
666update_alias_info_with_stack_vars (void)
667{
5f8841a5 668 part_hashmap *decls_to_partitions = NULL;
3a443843 669 size_t i, j;
670 tree var = NULL_TREE;
671
672 for (i = 0; i < stack_vars_num; i++)
673 {
674 bitmap part = NULL;
675 tree name;
676 struct ptr_info_def *pi;
677
678 /* Not interested in partitions with single variable. */
679 if (stack_vars[i].representative != i
680 || stack_vars[i].next == EOC)
681 continue;
682
683 if (!decls_to_partitions)
684 {
5f8841a5 685 decls_to_partitions = new part_hashmap;
686 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
3a443843 687 }
688
689 /* Create an SSA_NAME that points to the partition for use
690 as base during alias-oracle queries on RTL for bases that
691 have been partitioned. */
692 if (var == NULL_TREE)
f9e245b2 693 var = create_tmp_var (ptr_type_node);
694 name = make_ssa_name (var);
3a443843 695
696 /* Create bitmaps representing partitions. They will be used for
697 points-to sets later, so use GGC alloc. */
698 part = BITMAP_GGC_ALLOC ();
699 for (j = i; j != EOC; j = stack_vars[j].next)
700 {
701 tree decl = stack_vars[j].decl;
1a981e1a 702 unsigned int uid = DECL_PT_UID (decl);
3a443843 703 bitmap_set_bit (part, uid);
5f8841a5 704 decls_to_partitions->put (uid, part);
705 cfun->gimple_df->decls_to_pointers->put (decl, name);
f7b5f694 706 if (TREE_ADDRESSABLE (decl))
707 TREE_ADDRESSABLE (name) = 1;
3a443843 708 }
709
710 /* Make the SSA name point to all partition members. */
711 pi = get_ptr_info (name);
6fc56905 712 pt_solution_set (&pi->pt, part, false);
3a443843 713 }
714
715 /* Make all points-to sets that contain one member of a partition
716 contain all members of the partition. */
717 if (decls_to_partitions)
718 {
719 unsigned i;
431205b7 720 hash_set<bitmap> visited;
4fb07d00 721 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
3a443843 722
723 for (i = 1; i < num_ssa_names; i++)
724 {
725 tree name = ssa_name (i);
726 struct ptr_info_def *pi;
727
728 if (name
729 && POINTER_TYPE_P (TREE_TYPE (name))
730 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
731 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
431205b7 732 &visited, temp);
3a443843 733 }
734
735 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
431205b7 736 decls_to_partitions, &visited, temp);
3a443843 737
5f8841a5 738 delete decls_to_partitions;
3a443843 739 BITMAP_FREE (temp);
740 }
741}
742
60d03123 743/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
744 partitioning algorithm. Partitions A and B are known to be non-conflicting.
2a24c3a6 745 Merge them into a single partition A. */
60d03123 746
747static void
2a24c3a6 748union_stack_vars (size_t a, size_t b)
60d03123 749{
dfa054ff 750 struct stack_var *vb = &stack_vars[b];
751 bitmap_iterator bi;
752 unsigned u;
60d03123 753
2a24c3a6 754 gcc_assert (stack_vars[b].next == EOC);
755 /* Add B to A's partition. */
756 stack_vars[b].next = stack_vars[a].next;
757 stack_vars[b].representative = a;
60d03123 758 stack_vars[a].next = b;
759
760 /* Update the required alignment of partition A to account for B. */
761 if (stack_vars[a].alignb < stack_vars[b].alignb)
762 stack_vars[a].alignb = stack_vars[b].alignb;
763
764 /* Update the interference graph and merge the conflicts. */
dfa054ff 765 if (vb->conflicts)
766 {
767 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
768 add_stack_var_conflict (a, stack_vars[u].representative);
769 BITMAP_FREE (vb->conflicts);
770 }
60d03123 771}
772
773/* A subroutine of expand_used_vars. Binpack the variables into
774 partitions constrained by the interference graph. The overall
775 algorithm used is as follows:
776
2a24c3a6 777 Sort the objects by size in descending order.
60d03123 778 For each object A {
779 S = size(A)
780 O = 0
781 loop {
782 Look for the largest non-conflicting object B with size <= S.
783 UNION (A, B)
60d03123 784 }
785 }
786*/
787
788static void
789partition_stack_vars (void)
790{
791 size_t si, sj, n = stack_vars_num;
792
793 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
794 for (si = 0; si < n; ++si)
795 stack_vars_sorted[si] = si;
796
797 if (n == 1)
798 return;
799
5be42b39 800 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
60d03123 801
60d03123 802 for (si = 0; si < n; ++si)
803 {
804 size_t i = stack_vars_sorted[si];
5be42b39 805 unsigned int ialign = stack_vars[i].alignb;
3c919612 806 HOST_WIDE_INT isize = stack_vars[i].size;
60d03123 807
2a24c3a6 808 /* Ignore objects that aren't partition representatives. If we
809 see a var that is not a partition representative, it must
810 have been merged earlier. */
811 if (stack_vars[i].representative != i)
812 continue;
813
814 for (sj = si + 1; sj < n; ++sj)
60d03123 815 {
816 size_t j = stack_vars_sorted[sj];
60d03123 817 unsigned int jalign = stack_vars[j].alignb;
3c919612 818 HOST_WIDE_INT jsize = stack_vars[j].size;
60d03123 819
820 /* Ignore objects that aren't partition representatives. */
821 if (stack_vars[j].representative != j)
822 continue;
823
5be42b39 824 /* Do not mix objects of "small" (supported) alignment
825 and "large" (unsupported) alignment. */
826 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
827 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
3c919612 828 break;
829
830 /* For Address Sanitizer do not mix objects with different
831 sizes, as the shorter vars wouldn't be adequately protected.
832 Don't do that for "large" (unsupported) alignment objects,
833 those aren't protected anyway. */
bf2b7c22 834 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
3c919612 835 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
836 break;
837
838 /* Ignore conflicting objects. */
839 if (stack_var_conflict_p (i, j))
5be42b39 840 continue;
841
60d03123 842 /* UNION the objects, placing J at OFFSET. */
2a24c3a6 843 union_stack_vars (i, j);
60d03123 844 }
845 }
3a443843 846
ba487639 847 update_alias_info_with_stack_vars ();
60d03123 848}
849
850/* A debugging aid for expand_used_vars. Dump the generated partitions. */
851
852static void
853dump_stack_var_partition (void)
854{
855 size_t si, i, j, n = stack_vars_num;
856
857 for (si = 0; si < n; ++si)
858 {
859 i = stack_vars_sorted[si];
860
861 /* Skip variables that aren't partition representatives, for now. */
862 if (stack_vars[i].representative != i)
863 continue;
864
865 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
866 " align %u\n", (unsigned long) i, stack_vars[i].size,
867 stack_vars[i].alignb);
868
869 for (j = i; j != EOC; j = stack_vars[j].next)
870 {
871 fputc ('\t', dump_file);
872 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
60d03123 873 }
2a24c3a6 874 fputc ('\n', dump_file);
60d03123 875 }
876}
877
5be42b39 878/* Assign rtl to DECL at BASE + OFFSET. */
60d03123 879
880static void
5be42b39 881expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
882 HOST_WIDE_INT offset)
60d03123 883{
5be42b39 884 unsigned align;
60d03123 885 rtx x;
a0c938f0 886
60d03123 887 /* If this fails, we've overflowed the stack frame. Error nicely? */
888 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
889
29c05e22 890 x = plus_constant (Pmode, base, offset);
a8dd994c 891 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
60d03123 892
a8dd994c 893 if (TREE_CODE (decl) != SSA_NAME)
894 {
895 /* Set alignment we actually gave this decl if it isn't an SSA name.
896 If it is we generate stack slots only accidentally so it isn't as
897 important, we'll simply use the alignment that is already set. */
5be42b39 898 if (base == virtual_stack_vars_rtx)
899 offset -= frame_phase;
a8dd994c 900 align = offset & -offset;
901 align *= BITS_PER_UNIT;
5be42b39 902 if (align == 0 || align > base_align)
903 align = base_align;
904
905 /* One would think that we could assert that we're not decreasing
906 alignment here, but (at least) the i386 port does exactly this
907 via the MINIMUM_ALIGNMENT hook. */
a8dd994c 908
909 DECL_ALIGN (decl) = align;
910 DECL_USER_ALIGN (decl) = 0;
911 }
912
913 set_mem_attributes (x, SSAVAR (decl), true);
914 set_rtl (decl, x);
60d03123 915}
916
3c919612 917struct stack_vars_data
918{
919 /* Vector of offset pairs, always end of some padding followed
920 by start of the padding that needs Address Sanitizer protection.
921 The vector is in reversed, highest offset pairs come first. */
f1f41a6c 922 vec<HOST_WIDE_INT> asan_vec;
3c919612 923
924 /* Vector of partition representative decls in between the paddings. */
f1f41a6c 925 vec<tree> asan_decl_vec;
683539f6 926
927 /* Base pseudo register for Address Sanitizer protected automatic vars. */
928 rtx asan_base;
929
930 /* Alignment needed for the Address Sanitizer protected automatic vars. */
931 unsigned int asan_alignb;
3c919612 932};
933
60d03123 934/* A subroutine of expand_used_vars. Give each partition representative
935 a unique location within the stack frame. Update each partition member
936 with that location. */
937
938static void
3c919612 939expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
60d03123 940{
941 size_t si, i, j, n = stack_vars_num;
5be42b39 942 HOST_WIDE_INT large_size = 0, large_alloc = 0;
943 rtx large_base = NULL;
944 unsigned large_align = 0;
945 tree decl;
946
947 /* Determine if there are any variables requiring "large" alignment.
948 Since these are dynamically allocated, we only process these if
949 no predicate involved. */
950 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
951 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
952 {
953 /* Find the total size of these variables. */
954 for (si = 0; si < n; ++si)
955 {
956 unsigned alignb;
957
958 i = stack_vars_sorted[si];
959 alignb = stack_vars[i].alignb;
960
961 /* Stop when we get to the first decl with "small" alignment. */
962 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
963 break;
964
965 /* Skip variables that aren't partition representatives. */
966 if (stack_vars[i].representative != i)
967 continue;
968
969 /* Skip variables that have already had rtl assigned. See also
970 add_stack_var where we perpetrate this pc_rtx hack. */
971 decl = stack_vars[i].decl;
972 if ((TREE_CODE (decl) == SSA_NAME
973 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
974 : DECL_RTL (decl)) != pc_rtx)
975 continue;
976
977 large_size += alignb - 1;
978 large_size &= -(HOST_WIDE_INT)alignb;
979 large_size += stack_vars[i].size;
980 }
981
982 /* If there were any, allocate space. */
983 if (large_size > 0)
984 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
985 large_align, true);
986 }
60d03123 987
988 for (si = 0; si < n; ++si)
989 {
5be42b39 990 rtx base;
991 unsigned base_align, alignb;
60d03123 992 HOST_WIDE_INT offset;
993
994 i = stack_vars_sorted[si];
995
996 /* Skip variables that aren't partition representatives, for now. */
997 if (stack_vars[i].representative != i)
998 continue;
999
f1a0edff 1000 /* Skip variables that have already had rtl assigned. See also
1001 add_stack_var where we perpetrate this pc_rtx hack. */
5be42b39 1002 decl = stack_vars[i].decl;
1003 if ((TREE_CODE (decl) == SSA_NAME
1004 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1005 : DECL_RTL (decl)) != pc_rtx)
f1a0edff 1006 continue;
1007
a0c938f0 1008 /* Check the predicate to see whether this variable should be
f1a0edff 1009 allocated in this pass. */
3c919612 1010 if (pred && !pred (i))
f1a0edff 1011 continue;
1012
5be42b39 1013 alignb = stack_vars[i].alignb;
1014 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1015 {
683539f6 1016 base = virtual_stack_vars_rtx;
bf2b7c22 1017 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
3c919612 1018 {
1019 HOST_WIDE_INT prev_offset = frame_offset;
1020 tree repr_decl = NULL_TREE;
1021
1022 offset
1023 = alloc_stack_frame_space (stack_vars[i].size
1024 + ASAN_RED_ZONE_SIZE,
1025 MAX (alignb, ASAN_RED_ZONE_SIZE));
f1f41a6c 1026 data->asan_vec.safe_push (prev_offset);
1027 data->asan_vec.safe_push (offset + stack_vars[i].size);
3c919612 1028 /* Find best representative of the partition.
1029 Prefer those with DECL_NAME, even better
1030 satisfying asan_protect_stack_decl predicate. */
1031 for (j = i; j != EOC; j = stack_vars[j].next)
1032 if (asan_protect_stack_decl (stack_vars[j].decl)
1033 && DECL_NAME (stack_vars[j].decl))
1034 {
1035 repr_decl = stack_vars[j].decl;
1036 break;
1037 }
1038 else if (repr_decl == NULL_TREE
1039 && DECL_P (stack_vars[j].decl)
1040 && DECL_NAME (stack_vars[j].decl))
1041 repr_decl = stack_vars[j].decl;
1042 if (repr_decl == NULL_TREE)
1043 repr_decl = stack_vars[i].decl;
f1f41a6c 1044 data->asan_decl_vec.safe_push (repr_decl);
683539f6 1045 data->asan_alignb = MAX (data->asan_alignb, alignb);
1046 if (data->asan_base == NULL)
1047 data->asan_base = gen_reg_rtx (Pmode);
1048 base = data->asan_base;
f89175bb 1049
1050 if (!STRICT_ALIGNMENT)
1051 base_align = crtl->max_used_stack_slot_alignment;
1052 else
1053 base_align = MAX (crtl->max_used_stack_slot_alignment,
1054 GET_MODE_ALIGNMENT (SImode)
1055 << ASAN_SHADOW_SHIFT);
3c919612 1056 }
1057 else
f89175bb 1058 {
1059 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1060 base_align = crtl->max_used_stack_slot_alignment;
1061 }
5be42b39 1062 }
1063 else
1064 {
1065 /* Large alignment is only processed in the last pass. */
1066 if (pred)
1067 continue;
e67bda38 1068 gcc_assert (large_base != NULL);
5be42b39 1069
1070 large_alloc += alignb - 1;
1071 large_alloc &= -(HOST_WIDE_INT)alignb;
1072 offset = large_alloc;
1073 large_alloc += stack_vars[i].size;
1074
1075 base = large_base;
1076 base_align = large_align;
1077 }
60d03123 1078
1079 /* Create rtl for each variable based on their location within the
1080 partition. */
1081 for (j = i; j != EOC; j = stack_vars[j].next)
8394f3a0 1082 {
8394f3a0 1083 expand_one_stack_var_at (stack_vars[j].decl,
5be42b39 1084 base, base_align,
2a24c3a6 1085 offset);
8394f3a0 1086 }
60d03123 1087 }
5be42b39 1088
1089 gcc_assert (large_alloc == large_size);
60d03123 1090}
1091
5a02d67b 1092/* Take into account all sizes of partitions and reset DECL_RTLs. */
1093static HOST_WIDE_INT
1094account_stack_vars (void)
1095{
1096 size_t si, j, i, n = stack_vars_num;
1097 HOST_WIDE_INT size = 0;
1098
1099 for (si = 0; si < n; ++si)
1100 {
1101 i = stack_vars_sorted[si];
1102
1103 /* Skip variables that aren't partition representatives, for now. */
1104 if (stack_vars[i].representative != i)
1105 continue;
1106
1107 size += stack_vars[i].size;
1108 for (j = i; j != EOC; j = stack_vars[j].next)
a8dd994c 1109 set_rtl (stack_vars[j].decl, NULL);
5a02d67b 1110 }
1111 return size;
1112}
1113
60d03123 1114/* A subroutine of expand_one_var. Called to immediately assign rtl
1115 to a variable to be allocated in the stack frame. */
1116
1117static void
1118expand_one_stack_var (tree var)
1119{
5be42b39 1120 HOST_WIDE_INT size, offset;
1121 unsigned byte_align;
60d03123 1122
6a0712d4 1123 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
25c513b9 1124 byte_align = align_local_variable (SSAVAR (var));
5be42b39 1125
1126 /* We handle highly aligned variables in expand_stack_vars. */
1127 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
60d03123 1128
5be42b39 1129 offset = alloc_stack_frame_space (size, byte_align);
1130
1131 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1132 crtl->max_used_stack_slot_alignment, offset);
60d03123 1133}
1134
60d03123 1135/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1136 that will reside in a hard register. */
1137
1138static void
1139expand_one_hard_reg_var (tree var)
1140{
1141 rest_of_decl_compilation (var, 0, 0);
1142}
1143
1144/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1145 that will reside in a pseudo register. */
1146
1147static void
1148expand_one_register_var (tree var)
1149{
a8dd994c 1150 tree decl = SSAVAR (var);
1151 tree type = TREE_TYPE (decl);
3754d046 1152 machine_mode reg_mode = promote_decl_mode (decl, NULL);
60d03123 1153 rtx x = gen_reg_rtx (reg_mode);
1154
a8dd994c 1155 set_rtl (var, x);
60d03123 1156
1157 /* Note if the object is a user variable. */
a8dd994c 1158 if (!DECL_ARTIFICIAL (decl))
1159 mark_user_reg (x);
60d03123 1160
9961142a 1161 if (POINTER_TYPE_P (type))
f2ca19b4 1162 mark_reg_pointer (x, get_pointer_alignment (var));
60d03123 1163}
1164
1165/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
c78cbec8 1166 has some associated error, e.g. its type is error-mark. We just need
60d03123 1167 to pick something that won't crash the rest of the compiler. */
1168
1169static void
1170expand_one_error_var (tree var)
1171{
3754d046 1172 machine_mode mode = DECL_MODE (var);
60d03123 1173 rtx x;
1174
1175 if (mode == BLKmode)
1176 x = gen_rtx_MEM (BLKmode, const0_rtx);
1177 else if (mode == VOIDmode)
1178 x = const0_rtx;
1179 else
1180 x = gen_reg_rtx (mode);
1181
1182 SET_DECL_RTL (var, x);
1183}
1184
a0c938f0 1185/* A subroutine of expand_one_var. VAR is a variable that will be
60d03123 1186 allocated to the local stack frame. Return true if we wish to
1187 add VAR to STACK_VARS so that it will be coalesced with other
1188 variables. Return false to allocate VAR immediately.
1189
1190 This function is used to reduce the number of variables considered
1191 for coalescing, which reduces the size of the quadratic problem. */
1192
1193static bool
1194defer_stack_allocation (tree var, bool toplevel)
1195{
da4b9ed5 1196 /* Whether the variable is small enough for immediate allocation not to be
1197 a problem with regard to the frame size. */
1198 bool smallish
aa59f000 1199 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
da4b9ed5 1200 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1201
f1a0edff 1202 /* If stack protection is enabled, *all* stack variables must be deferred,
3c919612 1203 so that we can re-order the strings to the top of the frame.
1204 Similarly for Address Sanitizer. */
bf2b7c22 1205 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
f1a0edff 1206 return true;
1207
5be42b39 1208 /* We handle "large" alignment via dynamic allocation. We want to handle
1209 this extra complication in only one place, so defer them. */
1210 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1211 return true;
1212
da4b9ed5 1213 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1214 might be detached from their block and appear at toplevel when we reach
1215 here. We want to coalesce them with variables from other blocks when
1216 the immediate contribution to the frame size would be noticeable. */
1217 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1218 return true;
1219
1220 /* Variables declared in the outermost scope automatically conflict
1221 with every other variable. The only reason to want to defer them
60d03123 1222 at all is that, after sorting, we can more efficiently pack
1223 small variables in the stack frame. Continue to defer at -O2. */
1224 if (toplevel && optimize < 2)
1225 return false;
1226
1227 /* Without optimization, *most* variables are allocated from the
1228 stack, which makes the quadratic problem large exactly when we
a0c938f0 1229 want compilation to proceed as quickly as possible. On the
60d03123 1230 other hand, we don't want the function's stack frame size to
1231 get completely out of hand. So we avoid adding scalars and
1232 "small" aggregates to the list at all. */
da4b9ed5 1233 if (optimize == 0 && smallish)
60d03123 1234 return false;
1235
1236 return true;
1237}
1238
1239/* A subroutine of expand_used_vars. Expand one variable according to
91275768 1240 its flavor. Variables to be placed on the stack are not actually
48e1416a 1241 expanded yet, merely recorded.
5a02d67b 1242 When REALLY_EXPAND is false, only add stack values to be allocated.
1243 Return stack usage this variable is supposed to take.
1244*/
60d03123 1245
5a02d67b 1246static HOST_WIDE_INT
1247expand_one_var (tree var, bool toplevel, bool really_expand)
60d03123 1248{
5be42b39 1249 unsigned int align = BITS_PER_UNIT;
a8dd994c 1250 tree origvar = var;
5be42b39 1251
a8dd994c 1252 var = SSAVAR (var);
1253
5be42b39 1254 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
27a7a23a 1255 {
27a7a23a 1256 /* Because we don't know if VAR will be in register or on stack,
1257 we conservatively assume it will be on stack even if VAR is
1258 eventually put into register after RA pass. For non-automatic
1259 variables, which won't be on stack, we collect alignment of
fc1995c6 1260 type and ignore user specified alignment. Similarly for
1261 SSA_NAMEs for which use_register_for_decl returns true. */
1262 if (TREE_STATIC (var)
1263 || DECL_EXTERNAL (var)
1264 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
8645d3e7 1265 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1266 TYPE_MODE (TREE_TYPE (var)),
1267 TYPE_ALIGN (TREE_TYPE (var)));
505a6491 1268 else if (DECL_HAS_VALUE_EXPR_P (var)
1269 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1270 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1271 or variables which were assigned a stack slot already by
1272 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1273 changed from the offset chosen to it. */
1274 align = crtl->stack_alignment_estimated;
27a7a23a 1275 else
8645d3e7 1276 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
27a7a23a 1277
5be42b39 1278 /* If the variable alignment is very large we'll dynamicaly allocate
1279 it, which means that in-frame portion is just a pointer. */
1280 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1281 align = POINTER_SIZE;
1282 }
1283
1284 if (SUPPORTS_STACK_ALIGNMENT
1285 && crtl->stack_alignment_estimated < align)
1286 {
1287 /* stack_alignment_estimated shouldn't change after stack
1288 realign decision made */
9af5ce0c 1289 gcc_assert (!crtl->stack_realign_processed);
5be42b39 1290 crtl->stack_alignment_estimated = align;
27a7a23a 1291 }
1292
5be42b39 1293 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1294 So here we only make sure stack_alignment_needed >= align. */
1295 if (crtl->stack_alignment_needed < align)
1296 crtl->stack_alignment_needed = align;
1297 if (crtl->max_used_stack_slot_alignment < align)
1298 crtl->max_used_stack_slot_alignment = align;
1299
a8dd994c 1300 if (TREE_CODE (origvar) == SSA_NAME)
1301 {
1302 gcc_assert (TREE_CODE (var) != VAR_DECL
1303 || (!DECL_EXTERNAL (var)
1304 && !DECL_HAS_VALUE_EXPR_P (var)
1305 && !TREE_STATIC (var)
a8dd994c 1306 && TREE_TYPE (var) != error_mark_node
1307 && !DECL_HARD_REGISTER (var)
1308 && really_expand));
1309 }
1310 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1a105fae 1311 ;
60d03123 1312 else if (DECL_EXTERNAL (var))
1313 ;
75fa4f82 1314 else if (DECL_HAS_VALUE_EXPR_P (var))
60d03123 1315 ;
1316 else if (TREE_STATIC (var))
6329636b 1317 ;
e32b531f 1318 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
60d03123 1319 ;
1320 else if (TREE_TYPE (var) == error_mark_node)
5a02d67b 1321 {
1322 if (really_expand)
1323 expand_one_error_var (var);
1324 }
a8dd994c 1325 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
5a02d67b 1326 {
1327 if (really_expand)
2ea8d869 1328 {
1329 expand_one_hard_reg_var (var);
1330 if (!DECL_HARD_REGISTER (var))
1331 /* Invalid register specification. */
1332 expand_one_error_var (var);
1333 }
5a02d67b 1334 }
60d03123 1335 else if (use_register_for_decl (var))
5a02d67b 1336 {
1337 if (really_expand)
a8dd994c 1338 expand_one_register_var (origvar);
5a02d67b 1339 }
85d86b55 1340 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
ce0afe34 1341 {
85d86b55 1342 /* Reject variables which cover more than half of the address-space. */
ce0afe34 1343 if (really_expand)
1344 {
1345 error ("size of variable %q+D is too large", var);
1346 expand_one_error_var (var);
1347 }
1348 }
60d03123 1349 else if (defer_stack_allocation (var, toplevel))
a8dd994c 1350 add_stack_var (origvar);
60d03123 1351 else
5a02d67b 1352 {
15083ac3 1353 if (really_expand)
a8dd994c 1354 expand_one_stack_var (origvar);
6a0712d4 1355 return tree_to_uhwi (DECL_SIZE_UNIT (var));
5a02d67b 1356 }
1357 return 0;
60d03123 1358}
1359
1360/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1361 expanding variables. Those variables that can be put into registers
1362 are allocated pseudos; those that can't are put on the stack.
1363
1364 TOPLEVEL is true if this is the outermost BLOCK. */
1365
1366static void
1367expand_used_vars_for_block (tree block, bool toplevel)
1368{
60d03123 1369 tree t;
1370
60d03123 1371 /* Expand all variables at this level. */
1767a056 1372 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
c7c68014 1373 if (TREE_USED (t)
1374 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1375 || !DECL_NONSHAREABLE (t)))
5a02d67b 1376 expand_one_var (t, toplevel, true);
60d03123 1377
60d03123 1378 /* Expand all variables at containing levels. */
1379 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1380 expand_used_vars_for_block (t, false);
60d03123 1381}
1382
1383/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1384 and clear TREE_USED on all local variables. */
1385
1386static void
1387clear_tree_used (tree block)
1388{
1389 tree t;
1390
1767a056 1391 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
60d03123 1392 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
c7c68014 1393 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1394 || !DECL_NONSHAREABLE (t))
60d03123 1395 TREE_USED (t) = 0;
1396
1397 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1398 clear_tree_used (t);
1399}
1400
b156ec37 1401enum {
1402 SPCT_FLAG_DEFAULT = 1,
1403 SPCT_FLAG_ALL = 2,
1404 SPCT_FLAG_STRONG = 3
1405};
1406
f1a0edff 1407/* Examine TYPE and determine a bit mask of the following features. */
1408
1409#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1410#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1411#define SPCT_HAS_ARRAY 4
1412#define SPCT_HAS_AGGREGATE 8
1413
1414static unsigned int
1415stack_protect_classify_type (tree type)
1416{
1417 unsigned int ret = 0;
1418 tree t;
1419
1420 switch (TREE_CODE (type))
1421 {
1422 case ARRAY_TYPE:
1423 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1424 if (t == char_type_node
1425 || t == signed_char_type_node
1426 || t == unsigned_char_type_node)
1427 {
b888d9d5 1428 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1429 unsigned HOST_WIDE_INT len;
f1a0edff 1430
b888d9d5 1431 if (!TYPE_SIZE_UNIT (type)
cd4547bf 1432 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
b888d9d5 1433 len = max;
f1a0edff 1434 else
6a0712d4 1435 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
f1a0edff 1436
1437 if (len < max)
1438 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1439 else
1440 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1441 }
1442 else
1443 ret = SPCT_HAS_ARRAY;
1444 break;
1445
1446 case UNION_TYPE:
1447 case QUAL_UNION_TYPE:
1448 case RECORD_TYPE:
1449 ret = SPCT_HAS_AGGREGATE;
1450 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1451 if (TREE_CODE (t) == FIELD_DECL)
1452 ret |= stack_protect_classify_type (TREE_TYPE (t));
1453 break;
1454
1455 default:
1456 break;
1457 }
1458
1459 return ret;
1460}
1461
3ce7ff97 1462/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1463 part of the local stack frame. Remember if we ever return nonzero for
f1a0edff 1464 any variable in this function. The return value is the phase number in
1465 which the variable should be allocated. */
1466
1467static int
1468stack_protect_decl_phase (tree decl)
1469{
1470 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1471 int ret = 0;
1472
1473 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1474 has_short_buffer = true;
1475
b156ec37 1476 if (flag_stack_protect == SPCT_FLAG_ALL
1477 || flag_stack_protect == SPCT_FLAG_STRONG)
f1a0edff 1478 {
1479 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1480 && !(bits & SPCT_HAS_AGGREGATE))
1481 ret = 1;
1482 else if (bits & SPCT_HAS_ARRAY)
1483 ret = 2;
1484 }
1485 else
1486 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1487
1488 if (ret)
1489 has_protected_decls = true;
1490
1491 return ret;
1492}
1493
1494/* Two helper routines that check for phase 1 and phase 2. These are used
1495 as callbacks for expand_stack_vars. */
1496
1497static bool
3c919612 1498stack_protect_decl_phase_1 (size_t i)
1499{
1500 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1501}
1502
1503static bool
1504stack_protect_decl_phase_2 (size_t i)
f1a0edff 1505{
3c919612 1506 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
f1a0edff 1507}
1508
3c919612 1509/* And helper function that checks for asan phase (with stack protector
1510 it is phase 3). This is used as callback for expand_stack_vars.
1511 Returns true if any of the vars in the partition need to be protected. */
1512
f1a0edff 1513static bool
3c919612 1514asan_decl_phase_3 (size_t i)
f1a0edff 1515{
3c919612 1516 while (i != EOC)
1517 {
1518 if (asan_protect_stack_decl (stack_vars[i].decl))
1519 return true;
1520 i = stack_vars[i].next;
1521 }
1522 return false;
f1a0edff 1523}
1524
1525/* Ensure that variables in different stack protection phases conflict
1526 so that they are not merged and share the same stack slot. */
1527
1528static void
1529add_stack_protection_conflicts (void)
1530{
1531 size_t i, j, n = stack_vars_num;
1532 unsigned char *phase;
1533
1534 phase = XNEWVEC (unsigned char, n);
1535 for (i = 0; i < n; ++i)
1536 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1537
1538 for (i = 0; i < n; ++i)
1539 {
1540 unsigned char ph_i = phase[i];
99fade12 1541 for (j = i + 1; j < n; ++j)
f1a0edff 1542 if (ph_i != phase[j])
1543 add_stack_var_conflict (i, j);
1544 }
1545
1546 XDELETEVEC (phase);
1547}
1548
1549/* Create a decl for the guard at the top of the stack frame. */
1550
1551static void
1552create_stack_guard (void)
1553{
e60a6f7b 1554 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1555 VAR_DECL, NULL, ptr_type_node);
f1a0edff 1556 TREE_THIS_VOLATILE (guard) = 1;
1557 TREE_USED (guard) = 1;
1558 expand_one_stack_var (guard);
edb7afe8 1559 crtl->stack_protect_guard = guard;
f1a0edff 1560}
1561
5a02d67b 1562/* Prepare for expanding variables. */
48e1416a 1563static void
5a02d67b 1564init_vars_expansion (void)
1565{
4fb07d00 1566 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1567 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
5a02d67b 1568
4fb07d00 1569 /* A map from decl to stack partition. */
5f8841a5 1570 decl_to_stack_part = new hash_map<tree, size_t>;
5a02d67b 1571
1572 /* Initialize local stack smashing state. */
1573 has_protected_decls = false;
1574 has_short_buffer = false;
1575}
1576
1577/* Free up stack variable graph data. */
1578static void
1579fini_vars_expansion (void)
1580{
4fb07d00 1581 bitmap_obstack_release (&stack_var_bitmap_obstack);
1582 if (stack_vars)
1583 XDELETEVEC (stack_vars);
1584 if (stack_vars_sorted)
1585 XDELETEVEC (stack_vars_sorted);
5a02d67b 1586 stack_vars = NULL;
99fade12 1587 stack_vars_sorted = NULL;
5a02d67b 1588 stack_vars_alloc = stack_vars_num = 0;
5f8841a5 1589 delete decl_to_stack_part;
3c25489e 1590 decl_to_stack_part = NULL;
5a02d67b 1591}
1592
970270ba 1593/* Make a fair guess for the size of the stack frame of the function
1594 in NODE. This doesn't have to be exact, the result is only used in
1595 the inline heuristics. So we don't want to run the full stack var
1596 packing algorithm (which is quadratic in the number of stack vars).
1597 Instead, we calculate the total size of all stack vars. This turns
1598 out to be a pretty fair estimate -- packing of stack vars doesn't
1599 happen very often. */
961c8f72 1600
5a02d67b 1601HOST_WIDE_INT
970270ba 1602estimated_stack_frame_size (struct cgraph_node *node)
5a02d67b 1603{
1604 HOST_WIDE_INT size = 0;
961c8f72 1605 size_t i;
649597af 1606 tree var;
02774f2d 1607 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
970270ba 1608
649597af 1609 push_cfun (fn);
5a02d67b 1610
4fb07d00 1611 init_vars_expansion ();
1612
24ccd9c6 1613 FOR_EACH_LOCAL_DECL (fn, i, var)
1614 if (auto_var_in_fn_p (var, fn->decl))
1615 size += expand_one_var (var, true, false);
961c8f72 1616
5a02d67b 1617 if (stack_vars_num > 0)
1618 {
961c8f72 1619 /* Fake sorting the stack vars for account_stack_vars (). */
1620 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1621 for (i = 0; i < stack_vars_num; ++i)
1622 stack_vars_sorted[i] = i;
5a02d67b 1623 size += account_stack_vars ();
5a02d67b 1624 }
4fb07d00 1625
1626 fini_vars_expansion ();
73b46517 1627 pop_cfun ();
5a02d67b 1628 return size;
1629}
1630
b156ec37 1631/* Helper routine to check if a record or union contains an array field. */
1632
1633static int
1634record_or_union_type_has_array_p (const_tree tree_type)
1635{
1636 tree fields = TYPE_FIELDS (tree_type);
1637 tree f;
1638
1639 for (f = fields; f; f = DECL_CHAIN (f))
1640 if (TREE_CODE (f) == FIELD_DECL)
1641 {
1642 tree field_type = TREE_TYPE (f);
1643 if (RECORD_OR_UNION_TYPE_P (field_type)
1644 && record_or_union_type_has_array_p (field_type))
1645 return 1;
1646 if (TREE_CODE (field_type) == ARRAY_TYPE)
1647 return 1;
1648 }
1649 return 0;
1650}
1651
ec4af1be 1652/* Check if the current function has local referenced variables that
1653 have their addresses taken, contain an array, or are arrays. */
1654
1655static bool
1656stack_protect_decl_p ()
1657{
1658 unsigned i;
1659 tree var;
1660
1661 FOR_EACH_LOCAL_DECL (cfun, i, var)
1662 if (!is_global_var (var))
1663 {
1664 tree var_type = TREE_TYPE (var);
1665 if (TREE_CODE (var) == VAR_DECL
1666 && (TREE_CODE (var_type) == ARRAY_TYPE
1667 || TREE_ADDRESSABLE (var)
1668 || (RECORD_OR_UNION_TYPE_P (var_type)
1669 && record_or_union_type_has_array_p (var_type))))
1670 return true;
1671 }
1672 return false;
1673}
1674
1675/* Check if the current function has calls that use a return slot. */
1676
1677static bool
1678stack_protect_return_slot_p ()
1679{
1680 basic_block bb;
1681
1682 FOR_ALL_BB_FN (bb, cfun)
1683 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1684 !gsi_end_p (gsi); gsi_next (&gsi))
1685 {
1686 gimple stmt = gsi_stmt (gsi);
1687 /* This assumes that calls to internal-only functions never
1688 use a return slot. */
1689 if (is_gimple_call (stmt)
1690 && !gimple_call_internal_p (stmt)
1691 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1692 gimple_call_fndecl (stmt)))
1693 return true;
1694 }
1695 return false;
1696}
1697
60d03123 1698/* Expand all variables used in the function. */
280450fa 1699
74a0cbc4 1700static rtx_insn *
280450fa 1701expand_used_vars (void)
1702{
2ab2ce89 1703 tree var, outer_block = DECL_INITIAL (current_function_decl);
1e094109 1704 vec<tree> maybe_local_decls = vNULL;
74a0cbc4 1705 rtx_insn *var_end_seq = NULL;
a8dd994c 1706 unsigned i;
2ab2ce89 1707 unsigned len;
b156ec37 1708 bool gen_stack_protect_signal = false;
280450fa 1709
60d03123 1710 /* Compute the phase of the stack frame for this function. */
1711 {
1712 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1713 int off = STARTING_FRAME_OFFSET % align;
1714 frame_phase = off ? align - off : 0;
1715 }
280450fa 1716
4fb07d00 1717 /* Set TREE_USED on all variables in the local_decls. */
1718 FOR_EACH_LOCAL_DECL (cfun, i, var)
1719 TREE_USED (var) = 1;
1720 /* Clear TREE_USED on all variables associated with a block scope. */
1721 clear_tree_used (DECL_INITIAL (current_function_decl));
1722
5a02d67b 1723 init_vars_expansion ();
f1a0edff 1724
ab0f939c 1725 if (targetm.use_pseudo_pic_reg ())
1726 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1727
5f8841a5 1728 hash_map<tree, tree> ssa_name_decls;
a8dd994c 1729 for (i = 0; i < SA.map->num_partitions; i++)
1730 {
1731 tree var = partition_to_var (SA.map, i);
1732
7c782c9b 1733 gcc_assert (!virtual_operand_p (var));
ec11736b 1734
1735 /* Assign decls to each SSA name partition, share decls for partitions
1736 we could have coalesced (those with the same type). */
1737 if (SSA_NAME_VAR (var) == NULL_TREE)
1738 {
5f8841a5 1739 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
ec11736b 1740 if (!*slot)
f9e245b2 1741 *slot = create_tmp_reg (TREE_TYPE (var));
5f8841a5 1742 replace_ssa_name_symbol (var, *slot);
ec11736b 1743 }
1744
5d1c1f55 1745 /* Always allocate space for partitions based on VAR_DECLs. But for
1746 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1747 debug info, there is no need to do so if optimization is disabled
1748 because all the SSA_NAMEs based on these DECLs have been coalesced
1749 into a single partition, which is thus assigned the canonical RTL
45ddd4eb 1750 location of the DECLs. If in_lto_p, we can't rely on optimize,
1751 a function could be compiled with -O1 -flto first and only the
1752 link performed at -O0. */
a8dd994c 1753 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1754 expand_one_var (var, true, true);
45ddd4eb 1755 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
a8dd994c 1756 {
1757 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1758 contain the default def (representing the parm or result itself)
1759 we don't do anything here. But those which don't contain the
1760 default def (representing a temporary based on the parm/result)
1761 we need to allocate space just like for normal VAR_DECLs. */
1762 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1763 {
1764 expand_one_var (var, true, true);
1765 gcc_assert (SA.partition_to_pseudo[i]);
1766 }
1767 }
1768 }
1769
b156ec37 1770 if (flag_stack_protect == SPCT_FLAG_STRONG)
ec4af1be 1771 gen_stack_protect_signal
1772 = stack_protect_decl_p () || stack_protect_return_slot_p ();
b156ec37 1773
edb7afe8 1774 /* At this point all variables on the local_decls with TREE_USED
60d03123 1775 set are not associated with any block scope. Lay them out. */
2ab2ce89 1776
f1f41a6c 1777 len = vec_safe_length (cfun->local_decls);
2ab2ce89 1778 FOR_EACH_LOCAL_DECL (cfun, i, var)
60d03123 1779 {
60d03123 1780 bool expand_now = false;
1781
a8dd994c 1782 /* Expanded above already. */
1783 if (is_gimple_reg (var))
e32b531f 1784 {
1785 TREE_USED (var) = 0;
a45d3ce3 1786 goto next;
e32b531f 1787 }
60d03123 1788 /* We didn't set a block for static or extern because it's hard
1789 to tell the difference between a global variable (re)declared
1790 in a local scope, and one that's really declared there to
1791 begin with. And it doesn't really matter much, since we're
1792 not giving them stack space. Expand them now. */
a8dd994c 1793 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
60d03123 1794 expand_now = true;
1795
da4b9ed5 1796 /* Expand variables not associated with any block now. Those created by
1797 the optimizers could be live anywhere in the function. Those that
1798 could possibly have been scoped originally and detached from their
1799 block will have their allocation deferred so we coalesce them with
1800 others when optimization is enabled. */
60d03123 1801 else if (TREE_USED (var))
1802 expand_now = true;
1803
1804 /* Finally, mark all variables on the list as used. We'll use
1805 this in a moment when we expand those associated with scopes. */
1806 TREE_USED (var) = 1;
1807
1808 if (expand_now)
a45d3ce3 1809 expand_one_var (var, true, true);
1810
1811 next:
1812 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
78fa9ba7 1813 {
a45d3ce3 1814 rtx rtl = DECL_RTL_IF_SET (var);
1815
1816 /* Keep artificial non-ignored vars in cfun->local_decls
1817 chain until instantiate_decls. */
1818 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2ab2ce89 1819 add_local_decl (cfun, var);
257b4da1 1820 else if (rtl == NULL_RTX)
2ab2ce89 1821 /* If rtl isn't set yet, which can happen e.g. with
1822 -fstack-protector, retry before returning from this
1823 function. */
f1f41a6c 1824 maybe_local_decls.safe_push (var);
78fa9ba7 1825 }
60d03123 1826 }
60d03123 1827
2ab2ce89 1828 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1829
1830 +-----------------+-----------------+
1831 | ...processed... | ...duplicates...|
1832 +-----------------+-----------------+
1833 ^
1834 +-- LEN points here.
1835
1836 We just want the duplicates, as those are the artificial
1837 non-ignored vars that we want to keep until instantiate_decls.
1838 Move them down and truncate the array. */
f1f41a6c 1839 if (!vec_safe_is_empty (cfun->local_decls))
1840 cfun->local_decls->block_remove (0, len);
2ab2ce89 1841
60d03123 1842 /* At this point, all variables within the block tree with TREE_USED
1843 set are actually used by the optimized function. Lay them out. */
1844 expand_used_vars_for_block (outer_block, true);
1845
1846 if (stack_vars_num > 0)
1847 {
3c25489e 1848 add_scope_conflicts ();
60d03123 1849
a0c938f0 1850 /* If stack protection is enabled, we don't share space between
f1a0edff 1851 vulnerable data and non-vulnerable data. */
1852 if (flag_stack_protect)
1853 add_stack_protection_conflicts ();
1854
a0c938f0 1855 /* Now that we have collected all stack variables, and have computed a
60d03123 1856 minimal interference graph, attempt to save some stack space. */
1857 partition_stack_vars ();
1858 if (dump_file)
1859 dump_stack_var_partition ();
f1a0edff 1860 }
1861
b156ec37 1862 switch (flag_stack_protect)
1863 {
1864 case SPCT_FLAG_ALL:
1865 create_stack_guard ();
1866 break;
1867
1868 case SPCT_FLAG_STRONG:
1869 if (gen_stack_protect_signal
1870 || cfun->calls_alloca || has_protected_decls)
1871 create_stack_guard ();
1872 break;
1873
1874 case SPCT_FLAG_DEFAULT:
1875 if (cfun->calls_alloca || has_protected_decls)
9af5ce0c 1876 create_stack_guard ();
b156ec37 1877 break;
1878
1879 default:
1880 ;
1881 }
60d03123 1882
f1a0edff 1883 /* Assign rtl to each variable based on these partitions. */
1884 if (stack_vars_num > 0)
1885 {
3c919612 1886 struct stack_vars_data data;
1887
1e094109 1888 data.asan_vec = vNULL;
1889 data.asan_decl_vec = vNULL;
683539f6 1890 data.asan_base = NULL_RTX;
1891 data.asan_alignb = 0;
3c919612 1892
f1a0edff 1893 /* Reorder decls to be protected by iterating over the variables
1894 array multiple times, and allocating out of each phase in turn. */
a0c938f0 1895 /* ??? We could probably integrate this into the qsort we did
f1a0edff 1896 earlier, such that we naturally see these variables first,
1897 and thus naturally allocate things in the right order. */
1898 if (has_protected_decls)
1899 {
1900 /* Phase 1 contains only character arrays. */
3c919612 1901 expand_stack_vars (stack_protect_decl_phase_1, &data);
f1a0edff 1902
1903 /* Phase 2 contains other kinds of arrays. */
1904 if (flag_stack_protect == 2)
3c919612 1905 expand_stack_vars (stack_protect_decl_phase_2, &data);
f1a0edff 1906 }
1907
bf2b7c22 1908 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
3c919612 1909 /* Phase 3, any partitions that need asan protection
1910 in addition to phase 1 and 2. */
1911 expand_stack_vars (asan_decl_phase_3, &data);
1912
f1f41a6c 1913 if (!data.asan_vec.is_empty ())
3c919612 1914 {
1915 HOST_WIDE_INT prev_offset = frame_offset;
683539f6 1916 HOST_WIDE_INT offset, sz, redzonesz;
1917 redzonesz = ASAN_RED_ZONE_SIZE;
1918 sz = data.asan_vec[0] - prev_offset;
1919 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1920 && data.asan_alignb <= 4096
c8c66351 1921 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
683539f6 1922 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1923 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1924 offset
1925 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
f1f41a6c 1926 data.asan_vec.safe_push (prev_offset);
1927 data.asan_vec.safe_push (offset);
f89175bb 1928 /* Leave space for alignment if STRICT_ALIGNMENT. */
1929 if (STRICT_ALIGNMENT)
1930 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1931 << ASAN_SHADOW_SHIFT)
1932 / BITS_PER_UNIT, 1);
3c919612 1933
1934 var_end_seq
1935 = asan_emit_stack_protection (virtual_stack_vars_rtx,
683539f6 1936 data.asan_base,
1937 data.asan_alignb,
f1f41a6c 1938 data.asan_vec.address (),
683539f6 1939 data.asan_decl_vec.address (),
f1f41a6c 1940 data.asan_vec.length ());
3c919612 1941 }
1942
1943 expand_stack_vars (NULL, &data);
1944
f1f41a6c 1945 data.asan_vec.release ();
1946 data.asan_decl_vec.release ();
60d03123 1947 }
1948
4fb07d00 1949 fini_vars_expansion ();
1950
257b4da1 1951 /* If there were any artificial non-ignored vars without rtl
1952 found earlier, see if deferred stack allocation hasn't assigned
1953 rtl to them. */
f1f41a6c 1954 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
257b4da1 1955 {
257b4da1 1956 rtx rtl = DECL_RTL_IF_SET (var);
1957
257b4da1 1958 /* Keep artificial non-ignored vars in cfun->local_decls
1959 chain until instantiate_decls. */
1960 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2ab2ce89 1961 add_local_decl (cfun, var);
257b4da1 1962 }
f1f41a6c 1963 maybe_local_decls.release ();
257b4da1 1964
60d03123 1965 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1966 if (STACK_ALIGNMENT_NEEDED)
1967 {
1968 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1969 if (!FRAME_GROWS_DOWNWARD)
1970 frame_offset += align - 1;
1971 frame_offset &= -align;
1972 }
3c919612 1973
1974 return var_end_seq;
280450fa 1975}
1976
1977
49377e21 1978/* If we need to produce a detailed dump, print the tree representation
1979 for STMT to the dump file. SINCE is the last RTX after which the RTL
1980 generated for STMT should have been appended. */
1981
1982static void
74a0cbc4 1983maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
49377e21 1984{
1985 if (dump_file && (dump_flags & TDF_DETAILS))
1986 {
1987 fprintf (dump_file, "\n;; ");
9845d120 1988 print_gimple_stmt (dump_file, stmt, 0,
1989 TDF_SLIM | (dump_flags & TDF_LINENO));
49377e21 1990 fprintf (dump_file, "\n");
1991
1992 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1993 }
1994}
1995
6313ae8b 1996/* Maps the blocks that do not contain tree labels to rtx labels. */
1997
0699065d 1998static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
6313ae8b 1999
63f88450 2000/* Returns the label_rtx expression for a label starting basic block BB. */
2001
2002static rtx
75a70cf9 2003label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
63f88450 2004{
75a70cf9 2005 gimple_stmt_iterator gsi;
2006 tree lab;
63f88450 2007
2008 if (bb->flags & BB_RTL)
2009 return block_label (bb);
2010
0699065d 2011 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
6313ae8b 2012 if (elt)
5f8841a5 2013 return *elt;
6313ae8b 2014
2015 /* Find the tree label if it is present. */
48e1416a 2016
75a70cf9 2017 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
63f88450 2018 {
1a91d914 2019 glabel *lab_stmt;
2020
2021 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2022 if (!lab_stmt)
63f88450 2023 break;
2024
75a70cf9 2025 lab = gimple_label_label (lab_stmt);
63f88450 2026 if (DECL_NONLOCAL (lab))
2027 break;
2028
2029 return label_rtx (lab);
2030 }
2031
79f6a8ed 2032 rtx_code_label *l = gen_label_rtx ();
5f8841a5 2033 lab_rtx_for_bb->put (bb, l);
2034 return l;
63f88450 2035}
2036
75a70cf9 2037
f800c469 2038/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2039 of a basic block where we just expanded the conditional at the end,
ee0f3895 2040 possibly clean up the CFG and instruction sequence. LAST is the
2041 last instruction before the just emitted jump sequence. */
f800c469 2042
2043static void
74a0cbc4 2044maybe_cleanup_end_of_block (edge e, rtx_insn *last)
f800c469 2045{
2046 /* Special case: when jumpif decides that the condition is
2047 trivial it emits an unconditional jump (and the necessary
2048 barrier). But we still have two edges, the fallthru one is
2049 wrong. purge_dead_edges would clean this up later. Unfortunately
2050 we have to insert insns (and split edges) before
2051 find_many_sub_basic_blocks and hence before purge_dead_edges.
2052 But splitting edges might create new blocks which depend on the
2053 fact that if there are two edges there's no barrier. So the
2054 barrier would get lost and verify_flow_info would ICE. Instead
2055 of auditing all edge splitters to care for the barrier (which
2056 normally isn't there in a cleaned CFG), fix it here. */
2057 if (BARRIER_P (get_last_insn ()))
2058 {
74a0cbc4 2059 rtx_insn *insn;
f800c469 2060 remove_edge (e);
2061 /* Now, we have a single successor block, if we have insns to
2062 insert on the remaining edge we potentially will insert
2063 it at the end of this block (if the dest block isn't feasible)
2064 in order to avoid splitting the edge. This insertion will take
2065 place in front of the last jump. But we might have emitted
2066 multiple jumps (conditional and one unconditional) to the
2067 same destination. Inserting in front of the last one then
2068 is a problem. See PR 40021. We fix this by deleting all
2069 jumps except the last unconditional one. */
2070 insn = PREV_INSN (get_last_insn ());
2071 /* Make sure we have an unconditional jump. Otherwise we're
2072 confused. */
2073 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
ee0f3895 2074 for (insn = PREV_INSN (insn); insn != last;)
f800c469 2075 {
2076 insn = PREV_INSN (insn);
2077 if (JUMP_P (NEXT_INSN (insn)))
2755d767 2078 {
46a5816d 2079 if (!any_condjump_p (NEXT_INSN (insn)))
2755d767 2080 {
2081 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2082 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2083 }
2084 delete_insn (NEXT_INSN (insn));
2085 }
f800c469 2086 }
2087 }
2088}
2089
75a70cf9 2090/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
3ced8962 2091 Returns a new basic block if we've terminated the current basic
2092 block and created a new one. */
2093
2094static basic_block
1a91d914 2095expand_gimple_cond (basic_block bb, gcond *stmt)
3ced8962 2096{
2097 basic_block new_bb, dest;
2098 edge new_edge;
2099 edge true_edge;
2100 edge false_edge;
74a0cbc4 2101 rtx_insn *last2, *last;
16c9337c 2102 enum tree_code code;
2103 tree op0, op1;
2104
2105 code = gimple_cond_code (stmt);
2106 op0 = gimple_cond_lhs (stmt);
2107 op1 = gimple_cond_rhs (stmt);
2108 /* We're sometimes presented with such code:
2109 D.123_1 = x < y;
2110 if (D.123_1 != 0)
2111 ...
2112 This would expand to two comparisons which then later might
2113 be cleaned up by combine. But some pattern matchers like if-conversion
2114 work better when there's only one compare, so make up for this
2115 here as special exception if TER would have made the same change. */
9532a315 2116 if (SA.values
16c9337c 2117 && TREE_CODE (op0) == SSA_NAME
9532a315 2118 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2119 && TREE_CODE (op1) == INTEGER_CST
2120 && ((gimple_cond_code (stmt) == NE_EXPR
2121 && integer_zerop (op1))
2122 || (gimple_cond_code (stmt) == EQ_EXPR
2123 && integer_onep (op1)))
16c9337c 2124 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2125 {
2126 gimple second = SSA_NAME_DEF_STMT (op0);
5905fb26 2127 if (gimple_code (second) == GIMPLE_ASSIGN)
16c9337c 2128 {
5905fb26 2129 enum tree_code code2 = gimple_assign_rhs_code (second);
2130 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2131 {
2132 code = code2;
2133 op0 = gimple_assign_rhs1 (second);
2134 op1 = gimple_assign_rhs2 (second);
2135 }
01ee997b 2136 /* If jumps are cheap and the target does not support conditional
2137 compare, turn some more codes into jumpy sequences. */
2138 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2139 && targetm.gen_ccmp_first == NULL)
5905fb26 2140 {
2141 if ((code2 == BIT_AND_EXPR
2142 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2143 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2144 || code2 == TRUTH_AND_EXPR)
2145 {
2146 code = TRUTH_ANDIF_EXPR;
2147 op0 = gimple_assign_rhs1 (second);
2148 op1 = gimple_assign_rhs2 (second);
2149 }
2150 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2151 {
2152 code = TRUTH_ORIF_EXPR;
2153 op0 = gimple_assign_rhs1 (second);
2154 op1 = gimple_assign_rhs2 (second);
2155 }
2156 }
16c9337c 2157 }
2158 }
49377e21 2159
2160 last2 = last = get_last_insn ();
3ced8962 2161
2162 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5169661d 2163 set_curr_insn_location (gimple_location (stmt));
3ced8962 2164
2165 /* These flags have no purpose in RTL land. */
2166 true_edge->flags &= ~EDGE_TRUE_VALUE;
2167 false_edge->flags &= ~EDGE_FALSE_VALUE;
2168
2169 /* We can either have a pure conditional jump with one fallthru edge or
2170 two-way jump that needs to be decomposed into two basic blocks. */
63f88450 2171 if (false_edge->dest == bb->next_bb)
3ced8962 2172 {
79ab74cc 2173 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2174 true_edge->probability);
75a70cf9 2175 maybe_dump_rtl_for_gimple_stmt (stmt, last);
8e7408e3 2176 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2177 set_curr_insn_location (true_edge->goto_locus);
63f88450 2178 false_edge->flags |= EDGE_FALLTHRU;
ee0f3895 2179 maybe_cleanup_end_of_block (false_edge, last);
3ced8962 2180 return NULL;
2181 }
63f88450 2182 if (true_edge->dest == bb->next_bb)
3ced8962 2183 {
79ab74cc 2184 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2185 false_edge->probability);
75a70cf9 2186 maybe_dump_rtl_for_gimple_stmt (stmt, last);
8e7408e3 2187 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2188 set_curr_insn_location (false_edge->goto_locus);
63f88450 2189 true_edge->flags |= EDGE_FALLTHRU;
ee0f3895 2190 maybe_cleanup_end_of_block (true_edge, last);
3ced8962 2191 return NULL;
2192 }
3ced8962 2193
79ab74cc 2194 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2195 true_edge->probability);
3ced8962 2196 last = get_last_insn ();
8e7408e3 2197 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5169661d 2198 set_curr_insn_location (false_edge->goto_locus);
63f88450 2199 emit_jump (label_rtx_for_bb (false_edge->dest));
3ced8962 2200
26bb3cb2 2201 BB_END (bb) = last;
3ced8962 2202 if (BARRIER_P (BB_END (bb)))
26bb3cb2 2203 BB_END (bb) = PREV_INSN (BB_END (bb));
3ced8962 2204 update_bb_for_insn (bb);
2205
2206 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2207 dest = false_edge->dest;
2208 redirect_edge_succ (false_edge, new_bb);
2209 false_edge->flags |= EDGE_FALLTHRU;
2210 new_bb->count = false_edge->count;
2211 new_bb->frequency = EDGE_FREQUENCY (false_edge);
b3083327 2212 add_bb_to_loop (new_bb, bb->loop_father);
3ced8962 2213 new_edge = make_edge (new_bb, dest, 0);
2214 new_edge->probability = REG_BR_PROB_BASE;
2215 new_edge->count = new_bb->count;
2216 if (BARRIER_P (BB_END (new_bb)))
26bb3cb2 2217 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
3ced8962 2218 update_bb_for_insn (new_bb);
2219
75a70cf9 2220 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
a0c938f0 2221
8e7408e3 2222 if (true_edge->goto_locus != UNKNOWN_LOCATION)
c4ad3297 2223 {
5169661d 2224 set_curr_insn_location (true_edge->goto_locus);
2225 true_edge->goto_locus = curr_insn_location ();
c4ad3297 2226 }
c4ad3297 2227
3ced8962 2228 return new_bb;
2229}
2230
4c0315d0 2231/* Mark all calls that can have a transaction restart. */
2232
2233static void
2234mark_transaction_restart_calls (gimple stmt)
2235{
2236 struct tm_restart_node dummy;
b7aa58e4 2237 tm_restart_node **slot;
4c0315d0 2238
2239 if (!cfun->gimple_df->tm_restart)
2240 return;
2241
2242 dummy.stmt = stmt;
b7aa58e4 2243 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
4c0315d0 2244 if (slot)
2245 {
b7aa58e4 2246 struct tm_restart_node *n = *slot;
4c0315d0 2247 tree list = n->label_or_list;
74a0cbc4 2248 rtx_insn *insn;
4c0315d0 2249
2250 for (insn = next_real_insn (get_last_insn ());
2251 !CALL_P (insn);
2252 insn = next_real_insn (insn))
2253 continue;
2254
2255 if (TREE_CODE (list) == LABEL_DECL)
2256 add_reg_note (insn, REG_TM, label_rtx (list));
2257 else
2258 for (; list ; list = TREE_CHAIN (list))
2259 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2260 }
2261}
2262
16c9337c 2263/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2264 statement STMT. */
2265
2266static void
1a91d914 2267expand_call_stmt (gcall *stmt)
16c9337c 2268{
fb049fba 2269 tree exp, decl, lhs;
facbb5c4 2270 bool builtin_p;
a967d5e5 2271 size_t i;
16c9337c 2272
fb049fba 2273 if (gimple_call_internal_p (stmt))
2274 {
2275 expand_internal_call (stmt);
2276 return;
2277 }
2278
0fcb889c 2279 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
bbc26dcc 2280
0fcb889c 2281 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
bbc26dcc 2282 decl = gimple_call_fndecl (stmt);
2283 builtin_p = decl && DECL_BUILT_IN (decl);
0fcb889c 2284
a967d5e5 2285 /* If this is not a builtin function, the function type through which the
2286 call is made may be different from the type of the function. */
2287 if (!builtin_p)
2288 CALL_EXPR_FN (exp)
317bd3b6 2289 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2290 CALL_EXPR_FN (exp));
a967d5e5 2291
16c9337c 2292 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2293 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2294
2295 for (i = 0; i < gimple_call_num_args (stmt); i++)
facbb5c4 2296 {
2297 tree arg = gimple_call_arg (stmt, i);
2298 gimple def;
2299 /* TER addresses into arguments of builtin functions so we have a
2300 chance to infer more correct alignment information. See PR39954. */
2301 if (builtin_p
2302 && TREE_CODE (arg) == SSA_NAME
2303 && (def = get_gimple_for_ssa_name (arg))
2304 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2305 arg = gimple_assign_rhs1 (def);
2306 CALL_EXPR_ARG (exp, i) = arg;
2307 }
16c9337c 2308
e1ac6f35 2309 if (gimple_has_side_effects (stmt))
16c9337c 2310 TREE_SIDE_EFFECTS (exp) = 1;
2311
e1ac6f35 2312 if (gimple_call_nothrow_p (stmt))
16c9337c 2313 TREE_NOTHROW (exp) = 1;
2314
2315 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2316 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
a882d754 2317 if (decl
2318 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
581bf1c2 2319 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2320 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
a882d754 2321 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2322 else
2323 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
16c9337c 2324 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2325 SET_EXPR_LOCATION (exp, gimple_location (stmt));
058a1b7a 2326 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
16c9337c 2327
841424cc 2328 /* Ensure RTL is created for debug args. */
2329 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2330 {
f1f41a6c 2331 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
841424cc 2332 unsigned int ix;
2333 tree dtemp;
2334
2335 if (debug_args)
f1f41a6c 2336 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
841424cc 2337 {
2338 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2339 expand_debug_expr (dtemp);
2340 }
2341 }
2342
fb049fba 2343 lhs = gimple_call_lhs (stmt);
16c9337c 2344 if (lhs)
2345 expand_assignment (lhs, exp, false);
2346 else
a12f023f 2347 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4c0315d0 2348
2349 mark_transaction_restart_calls (stmt);
16c9337c 2350}
2351
0e80b01d 2352
2353/* Generate RTL for an asm statement (explicit assembler code).
2354 STRING is a STRING_CST node containing the assembler code text,
2355 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2356 insn is volatile; don't optimize it. */
2357
2358static void
2359expand_asm_loc (tree string, int vol, location_t locus)
2360{
2361 rtx body;
2362
2363 if (TREE_CODE (string) == ADDR_EXPR)
2364 string = TREE_OPERAND (string, 0);
2365
2366 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2367 ggc_strdup (TREE_STRING_POINTER (string)),
2368 locus);
2369
2370 MEM_VOLATILE_P (body) = vol;
2371
2372 emit_insn (body);
2373}
2374
2375/* Return the number of times character C occurs in string S. */
2376static int
2377n_occurrences (int c, const char *s)
2378{
2379 int n = 0;
2380 while (*s)
2381 n += (*s++ == c);
2382 return n;
2383}
2384
2385/* A subroutine of expand_asm_operands. Check that all operands have
2386 the same number of alternatives. Return true if so. */
2387
2388static bool
2389check_operand_nalternatives (tree outputs, tree inputs)
2390{
2391 if (outputs || inputs)
2392 {
2393 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2394 int nalternatives
2395 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2396 tree next = inputs;
2397
2398 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2399 {
2400 error ("too many alternatives in %<asm%>");
2401 return false;
2402 }
2403
2404 tmp = outputs;
2405 while (tmp)
2406 {
2407 const char *constraint
2408 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2409
2410 if (n_occurrences (',', constraint) != nalternatives)
2411 {
2412 error ("operand constraints for %<asm%> differ "
2413 "in number of alternatives");
2414 return false;
2415 }
2416
2417 if (TREE_CHAIN (tmp))
2418 tmp = TREE_CHAIN (tmp);
2419 else
2420 tmp = next, next = 0;
2421 }
2422 }
2423
2424 return true;
2425}
2426
2427/* Check for overlap between registers marked in CLOBBERED_REGS and
2428 anything inappropriate in T. Emit error and return the register
2429 variable definition for error, NULL_TREE for ok. */
2430
2431static bool
2432tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2433{
2434 /* Conflicts between asm-declared register variables and the clobber
2435 list are not allowed. */
2436 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2437
2438 if (overlap)
2439 {
2440 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2441 DECL_NAME (overlap));
2442
2443 /* Reset registerness to stop multiple errors emitted for a single
2444 variable. */
2445 DECL_REGISTER (overlap) = 0;
2446 return true;
2447 }
2448
2449 return false;
2450}
2451
2452/* Generate RTL for an asm statement with arguments.
2453 STRING is the instruction template.
2454 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2455 Each output or input has an expression in the TREE_VALUE and
2456 a tree list in TREE_PURPOSE which in turn contains a constraint
2457 name in TREE_VALUE (or NULL_TREE) and a constraint string
2458 in TREE_PURPOSE.
2459 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2460 that is clobbered by this insn.
2461
2462 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2463 should be the fallthru basic block of the asm goto.
2464
2465 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2466 Some elements of OUTPUTS may be replaced with trees representing temporary
2467 values. The caller should copy those temporary values to the originally
2468 specified lvalues.
2469
2470 VOL nonzero means the insn is volatile; don't optimize it. */
2471
2472static void
2473expand_asm_operands (tree string, tree outputs, tree inputs,
2474 tree clobbers, tree labels, basic_block fallthru_bb,
2475 int vol, location_t locus)
2476{
2477 rtvec argvec, constraintvec, labelvec;
2478 rtx body;
2479 int ninputs = list_length (inputs);
2480 int noutputs = list_length (outputs);
2481 int nlabels = list_length (labels);
2482 int ninout;
2483 int nclobbers;
2484 HARD_REG_SET clobbered_regs;
2485 int clobber_conflict_found = 0;
2486 tree tail;
2487 tree t;
2488 int i;
2489 /* Vector of RTX's of evaluated output operands. */
2490 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2491 int *inout_opnum = XALLOCAVEC (int, noutputs);
2492 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
3754d046 2493 machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
0e80b01d 2494 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2495 int old_generating_concat_p = generating_concat_p;
79f6a8ed 2496 rtx_code_label *fallthru_label = NULL;
0e80b01d 2497
2498 /* An ASM with no outputs needs to be treated as volatile, for now. */
2499 if (noutputs == 0)
2500 vol = 1;
2501
2502 if (! check_operand_nalternatives (outputs, inputs))
2503 return;
2504
2505 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2506
2507 /* Collect constraints. */
2508 i = 0;
2509 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2510 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2511 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2512 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2513
2514 /* Sometimes we wish to automatically clobber registers across an asm.
2515 Case in point is when the i386 backend moved from cc0 to a hard reg --
2516 maintaining source-level compatibility means automatically clobbering
2517 the flags register. */
2518 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2519
2520 /* Count the number of meaningful clobbered registers, ignoring what
2521 we would ignore later. */
2522 nclobbers = 0;
2523 CLEAR_HARD_REG_SET (clobbered_regs);
2524 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2525 {
2526 const char *regname;
2527 int nregs;
2528
2529 if (TREE_VALUE (tail) == error_mark_node)
2530 return;
2531 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2532
2533 i = decode_reg_name_and_count (regname, &nregs);
2534 if (i == -4)
2535 ++nclobbers;
2536 else if (i == -2)
2537 error ("unknown register name %qs in %<asm%>", regname);
2538
2539 /* Mark clobbered registers. */
2540 if (i >= 0)
2541 {
2542 int reg;
2543
2544 for (reg = i; reg < i + nregs; reg++)
2545 {
2546 ++nclobbers;
2547
2548 /* Clobbering the PIC register is an error. */
2549 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2550 {
2551 error ("PIC register clobbered by %qs in %<asm%>", regname);
2552 return;
2553 }
2554
2555 SET_HARD_REG_BIT (clobbered_regs, reg);
2556 }
2557 }
2558 }
2559
2560 /* First pass over inputs and outputs checks validity and sets
2561 mark_addressable if needed. */
2562
2563 ninout = 0;
2564 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2565 {
2566 tree val = TREE_VALUE (tail);
2567 tree type = TREE_TYPE (val);
2568 const char *constraint;
2569 bool is_inout;
2570 bool allows_reg;
2571 bool allows_mem;
2572
2573 /* If there's an erroneous arg, emit no insn. */
2574 if (type == error_mark_node)
2575 return;
2576
2577 /* Try to parse the output constraint. If that fails, there's
2578 no point in going further. */
2579 constraint = constraints[i];
2580 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2581 &allows_mem, &allows_reg, &is_inout))
2582 return;
2583
2584 if (! allows_reg
2585 && (allows_mem
2586 || is_inout
2587 || (DECL_P (val)
2588 && REG_P (DECL_RTL (val))
2589 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2590 mark_addressable (val);
2591
2592 if (is_inout)
2593 ninout++;
2594 }
2595
2596 ninputs += ninout;
2597 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2598 {
2599 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2600 return;
2601 }
2602
2603 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2604 {
2605 bool allows_reg, allows_mem;
2606 const char *constraint;
2607
2608 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2609 would get VOIDmode and that could cause a crash in reload. */
2610 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2611 return;
2612
2613 constraint = constraints[i + noutputs];
2614 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2615 constraints, &allows_mem, &allows_reg))
2616 return;
2617
2618 if (! allows_reg && allows_mem)
2619 mark_addressable (TREE_VALUE (tail));
2620 }
2621
2622 /* Second pass evaluates arguments. */
2623
2624 /* Make sure stack is consistent for asm goto. */
2625 if (nlabels > 0)
2626 do_pending_stack_adjust ();
2627
2628 ninout = 0;
2629 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2630 {
2631 tree val = TREE_VALUE (tail);
2632 tree type = TREE_TYPE (val);
2633 bool is_inout;
2634 bool allows_reg;
2635 bool allows_mem;
2636 rtx op;
2637 bool ok;
2638
2639 ok = parse_output_constraint (&constraints[i], i, ninputs,
2640 noutputs, &allows_mem, &allows_reg,
2641 &is_inout);
2642 gcc_assert (ok);
2643
2644 /* If an output operand is not a decl or indirect ref and our constraint
2645 allows a register, make a temporary to act as an intermediate.
2646 Make the asm insn write into that, then our caller will copy it to
2647 the real output operand. Likewise for promoted variables. */
2648
2649 generating_concat_p = 0;
2650
2651 real_output_rtx[i] = NULL_RTX;
2652 if ((TREE_CODE (val) == INDIRECT_REF
2653 && allows_mem)
2654 || (DECL_P (val)
2655 && (allows_mem || REG_P (DECL_RTL (val)))
2656 && ! (REG_P (DECL_RTL (val))
2657 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2658 || ! allows_reg
2659 || is_inout)
2660 {
2661 op = expand_expr (val, NULL_RTX, VOIDmode,
2662 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2663 if (MEM_P (op))
2664 op = validize_mem (op);
2665
2666 if (! allows_reg && !MEM_P (op))
2667 error ("output number %d not directly addressable", i);
2668 if ((! allows_mem && MEM_P (op))
2669 || GET_CODE (op) == CONCAT)
2670 {
2671 real_output_rtx[i] = op;
2672 op = gen_reg_rtx (GET_MODE (op));
2673 if (is_inout)
2674 emit_move_insn (op, real_output_rtx[i]);
2675 }
2676 }
2677 else
2678 {
2679 op = assign_temp (type, 0, 1);
2680 op = validize_mem (op);
2681 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2682 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2683 TREE_VALUE (tail) = make_tree (type, op);
2684 }
2685 output_rtx[i] = op;
2686
2687 generating_concat_p = old_generating_concat_p;
2688
2689 if (is_inout)
2690 {
2691 inout_mode[ninout] = TYPE_MODE (type);
2692 inout_opnum[ninout++] = i;
2693 }
2694
2695 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2696 clobber_conflict_found = 1;
2697 }
2698
2699 /* Make vectors for the expression-rtx, constraint strings,
2700 and named operands. */
2701
2702 argvec = rtvec_alloc (ninputs);
2703 constraintvec = rtvec_alloc (ninputs);
2704 labelvec = rtvec_alloc (nlabels);
2705
2706 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2707 : GET_MODE (output_rtx[0])),
2708 ggc_strdup (TREE_STRING_POINTER (string)),
2709 empty_string, 0, argvec, constraintvec,
2710 labelvec, locus);
2711
2712 MEM_VOLATILE_P (body) = vol;
2713
2714 /* Eval the inputs and put them into ARGVEC.
2715 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2716
2717 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2718 {
2719 bool allows_reg, allows_mem;
2720 const char *constraint;
2721 tree val, type;
2722 rtx op;
2723 bool ok;
2724
2725 constraint = constraints[i + noutputs];
2726 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2727 constraints, &allows_mem, &allows_reg);
2728 gcc_assert (ok);
2729
2730 generating_concat_p = 0;
2731
2732 val = TREE_VALUE (tail);
2733 type = TREE_TYPE (val);
2734 /* EXPAND_INITIALIZER will not generate code for valid initializer
2735 constants, but will still generate code for other types of operand.
2736 This is the behavior we want for constant constraints. */
2737 op = expand_expr (val, NULL_RTX, VOIDmode,
2738 allows_reg ? EXPAND_NORMAL
2739 : allows_mem ? EXPAND_MEMORY
2740 : EXPAND_INITIALIZER);
2741
2742 /* Never pass a CONCAT to an ASM. */
2743 if (GET_CODE (op) == CONCAT)
2744 op = force_reg (GET_MODE (op), op);
2745 else if (MEM_P (op))
2746 op = validize_mem (op);
2747
2748 if (asm_operand_ok (op, constraint, NULL) <= 0)
2749 {
2750 if (allows_reg && TYPE_MODE (type) != BLKmode)
2751 op = force_reg (TYPE_MODE (type), op);
2752 else if (!allows_mem)
2753 warning (0, "asm operand %d probably doesn%'t match constraints",
2754 i + noutputs);
2755 else if (MEM_P (op))
2756 {
2757 /* We won't recognize either volatile memory or memory
2758 with a queued address as available a memory_operand
2759 at this point. Ignore it: clearly this *is* a memory. */
2760 }
2761 else
2762 gcc_unreachable ();
2763 }
2764
2765 generating_concat_p = old_generating_concat_p;
2766 ASM_OPERANDS_INPUT (body, i) = op;
2767
2768 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
c36804c0 2769 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2770 ggc_strdup (constraints[i + noutputs]),
2771 locus);
0e80b01d 2772
2773 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2774 clobber_conflict_found = 1;
2775 }
2776
2777 /* Protect all the operands from the queue now that they have all been
2778 evaluated. */
2779
2780 generating_concat_p = 0;
2781
2782 /* For in-out operands, copy output rtx to input rtx. */
2783 for (i = 0; i < ninout; i++)
2784 {
2785 int j = inout_opnum[i];
2786 char buffer[16];
2787
2788 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2789 = output_rtx[j];
2790
2791 sprintf (buffer, "%d", j);
2792 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
c36804c0 2793 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
0e80b01d 2794 }
2795
2796 /* Copy labels to the vector. */
2797 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2798 {
2799 rtx r;
2800 /* If asm goto has any labels in the fallthru basic block, use
2801 a label that we emit immediately after the asm goto. Expansion
2802 may insert further instructions into the same basic block after
2803 asm goto and if we don't do this, insertion of instructions on
2804 the fallthru edge might misbehave. See PR58670. */
2805 if (fallthru_bb
2806 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2807 {
2808 if (fallthru_label == NULL_RTX)
2809 fallthru_label = gen_label_rtx ();
2810 r = fallthru_label;
2811 }
2812 else
2813 r = label_rtx (TREE_VALUE (tail));
2814 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2815 }
2816
2817 generating_concat_p = old_generating_concat_p;
2818
2819 /* Now, for each output, construct an rtx
2820 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2821 ARGVEC CONSTRAINTS OPNAMES))
2822 If there is more than one, put them inside a PARALLEL. */
2823
2824 if (nlabels > 0 && nclobbers == 0)
2825 {
2826 gcc_assert (noutputs == 0);
2827 emit_jump_insn (body);
2828 }
2829 else if (noutputs == 0 && nclobbers == 0)
2830 {
2831 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2832 emit_insn (body);
2833 }
2834 else if (noutputs == 1 && nclobbers == 0)
2835 {
2836 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2837 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2838 }
2839 else
2840 {
2841 rtx obody = body;
2842 int num = noutputs;
2843
2844 if (num == 0)
2845 num = 1;
2846
2847 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2848
2849 /* For each output operand, store a SET. */
2850 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2851 {
2852 XVECEXP (body, 0, i)
2853 = gen_rtx_SET (VOIDmode,
2854 output_rtx[i],
2855 gen_rtx_ASM_OPERANDS
2856 (GET_MODE (output_rtx[i]),
2857 ggc_strdup (TREE_STRING_POINTER (string)),
2858 ggc_strdup (constraints[i]),
2859 i, argvec, constraintvec, labelvec, locus));
2860
2861 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2862 }
2863
2864 /* If there are no outputs (but there are some clobbers)
2865 store the bare ASM_OPERANDS into the PARALLEL. */
2866
2867 if (i == 0)
2868 XVECEXP (body, 0, i++) = obody;
2869
2870 /* Store (clobber REG) for each clobbered register specified. */
2871
2872 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2873 {
2874 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2875 int reg, nregs;
2876 int j = decode_reg_name_and_count (regname, &nregs);
2877 rtx clobbered_reg;
2878
2879 if (j < 0)
2880 {
2881 if (j == -3) /* `cc', which is not a register */
2882 continue;
2883
2884 if (j == -4) /* `memory', don't cache memory across asm */
2885 {
2886 XVECEXP (body, 0, i++)
2887 = gen_rtx_CLOBBER (VOIDmode,
2888 gen_rtx_MEM
2889 (BLKmode,
2890 gen_rtx_SCRATCH (VOIDmode)));
2891 continue;
2892 }
2893
2894 /* Ignore unknown register, error already signaled. */
2895 continue;
2896 }
2897
2898 for (reg = j; reg < j + nregs; reg++)
2899 {
2900 /* Use QImode since that's guaranteed to clobber just
2901 * one reg. */
2902 clobbered_reg = gen_rtx_REG (QImode, reg);
2903
2904 /* Do sanity check for overlap between clobbers and
2905 respectively input and outputs that hasn't been
2906 handled. Such overlap should have been detected and
2907 reported above. */
2908 if (!clobber_conflict_found)
2909 {
2910 int opno;
2911
2912 /* We test the old body (obody) contents to avoid
2913 tripping over the under-construction body. */
2914 for (opno = 0; opno < noutputs; opno++)
2915 if (reg_overlap_mentioned_p (clobbered_reg,
2916 output_rtx[opno]))
2917 internal_error
2918 ("asm clobber conflict with output operand");
2919
2920 for (opno = 0; opno < ninputs - ninout; opno++)
2921 if (reg_overlap_mentioned_p (clobbered_reg,
2922 ASM_OPERANDS_INPUT (obody,
2923 opno)))
2924 internal_error
2925 ("asm clobber conflict with input operand");
2926 }
2927
2928 XVECEXP (body, 0, i++)
2929 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2930 }
2931 }
2932
2933 if (nlabels > 0)
2934 emit_jump_insn (body);
2935 else
2936 emit_insn (body);
2937 }
2938
2939 if (fallthru_label)
2940 emit_label (fallthru_label);
2941
2942 /* For any outputs that needed reloading into registers, spill them
2943 back to where they belong. */
2944 for (i = 0; i < noutputs; ++i)
2945 if (real_output_rtx[i])
2946 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2947
2948 crtl->has_asm_statement = 1;
2949 free_temp_slots ();
2950}
2951
2952
2953static void
1a91d914 2954expand_asm_stmt (gasm *stmt)
0e80b01d 2955{
2956 int noutputs;
2957 tree outputs, tail, t;
2958 tree *o;
2959 size_t i, n;
2960 const char *s;
2961 tree str, out, in, cl, labels;
2962 location_t locus = gimple_location (stmt);
2963 basic_block fallthru_bb = NULL;
2964
2965 /* Meh... convert the gimple asm operands into real tree lists.
2966 Eventually we should make all routines work on the vectors instead
2967 of relying on TREE_CHAIN. */
2968 out = NULL_TREE;
2969 n = gimple_asm_noutputs (stmt);
2970 if (n > 0)
2971 {
2972 t = out = gimple_asm_output_op (stmt, 0);
2973 for (i = 1; i < n; i++)
2974 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2975 }
2976
2977 in = NULL_TREE;
2978 n = gimple_asm_ninputs (stmt);
2979 if (n > 0)
2980 {
2981 t = in = gimple_asm_input_op (stmt, 0);
2982 for (i = 1; i < n; i++)
2983 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2984 }
2985
2986 cl = NULL_TREE;
2987 n = gimple_asm_nclobbers (stmt);
2988 if (n > 0)
2989 {
2990 t = cl = gimple_asm_clobber_op (stmt, 0);
2991 for (i = 1; i < n; i++)
2992 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2993 }
2994
2995 labels = NULL_TREE;
2996 n = gimple_asm_nlabels (stmt);
2997 if (n > 0)
2998 {
2999 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3000 if (fallthru)
3001 fallthru_bb = fallthru->dest;
3002 t = labels = gimple_asm_label_op (stmt, 0);
3003 for (i = 1; i < n; i++)
3004 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3005 }
3006
3007 s = gimple_asm_string (stmt);
3008 str = build_string (strlen (s), s);
3009
3010 if (gimple_asm_input_p (stmt))
3011 {
3012 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3013 return;
3014 }
3015
3016 outputs = out;
3017 noutputs = gimple_asm_noutputs (stmt);
3018 /* o[I] is the place that output number I should be written. */
3019 o = (tree *) alloca (noutputs * sizeof (tree));
3020
3021 /* Record the contents of OUTPUTS before it is modified. */
3022 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3023 o[i] = TREE_VALUE (tail);
3024
3025 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3026 OUTPUTS some trees for where the values were actually stored. */
3027 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3028 gimple_asm_volatile_p (stmt), locus);
3029
3030 /* Copy all the intermediate outputs into the specified outputs. */
3031 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3032 {
3033 if (o[i] != TREE_VALUE (tail))
3034 {
3035 expand_assignment (o[i], TREE_VALUE (tail), false);
3036 free_temp_slots ();
3037
3038 /* Restore the original value so that it's correct the next
3039 time we expand this function. */
3040 TREE_VALUE (tail) = o[i];
3041 }
3042 }
3043}
3044
3045/* Emit code to jump to the address
3046 specified by the pointer expression EXP. */
3047
3048static void
3049expand_computed_goto (tree exp)
3050{
3051 rtx x = expand_normal (exp);
3052
3053 x = convert_memory_address (Pmode, x);
3054
3055 do_pending_stack_adjust ();
3056 emit_indirect_jump (x);
3057}
3058
3059/* Generate RTL code for a `goto' statement with target label LABEL.
3060 LABEL should be a LABEL_DECL tree node that was or will later be
3061 defined with `expand_label'. */
3062
3063static void
3064expand_goto (tree label)
3065{
3066#ifdef ENABLE_CHECKING
3067 /* Check for a nonlocal goto to a containing function. Should have
3068 gotten translated to __builtin_nonlocal_goto. */
3069 tree context = decl_function_context (label);
3070 gcc_assert (!context || context == current_function_decl);
3071#endif
3072
3073 emit_jump (label_rtx (label));
3074}
3075
3076/* Output a return with no value. */
3077
3078static void
3079expand_null_return_1 (void)
3080{
3081 clear_pending_stack_adjust ();
3082 do_pending_stack_adjust ();
3083 emit_jump (return_label);
3084}
3085
3086/* Generate RTL to return from the current function, with no value.
3087 (That is, we do not do anything about returning any value.) */
3088
3089void
3090expand_null_return (void)
3091{
3092 /* If this function was declared to return a value, but we
3093 didn't, clobber the return registers so that they are not
3094 propagated live to the rest of the function. */
3095 clobber_return_register ();
3096
3097 expand_null_return_1 ();
3098}
3099
3100/* Generate RTL to return from the current function, with value VAL. */
3101
3102static void
3103expand_value_return (rtx val)
3104{
3105 /* Copy the value to the return location unless it's already there. */
3106
3107 tree decl = DECL_RESULT (current_function_decl);
3108 rtx return_reg = DECL_RTL (decl);
3109 if (return_reg != val)
3110 {
3111 tree funtype = TREE_TYPE (current_function_decl);
3112 tree type = TREE_TYPE (decl);
3113 int unsignedp = TYPE_UNSIGNED (type);
3754d046 3114 machine_mode old_mode = DECL_MODE (decl);
3115 machine_mode mode;
0e80b01d 3116 if (DECL_BY_REFERENCE (decl))
3117 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3118 else
3119 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3120
3121 if (mode != old_mode)
3122 val = convert_modes (mode, old_mode, val, unsignedp);
3123
3124 if (GET_CODE (return_reg) == PARALLEL)
3125 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3126 else
3127 emit_move_insn (return_reg, val);
3128 }
3129
3130 expand_null_return_1 ();
3131}
3132
3133/* Generate RTL to evaluate the expression RETVAL and return it
3134 from the current function. */
3135
3136static void
058a1b7a 3137expand_return (tree retval, tree bounds)
0e80b01d 3138{
3139 rtx result_rtl;
3140 rtx val = 0;
3141 tree retval_rhs;
058a1b7a 3142 rtx bounds_rtl;
0e80b01d 3143
3144 /* If function wants no value, give it none. */
3145 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3146 {
3147 expand_normal (retval);
3148 expand_null_return ();
3149 return;
3150 }
3151
3152 if (retval == error_mark_node)
3153 {
3154 /* Treat this like a return of no value from a function that
3155 returns a value. */
3156 expand_null_return ();
3157 return;
3158 }
3159 else if ((TREE_CODE (retval) == MODIFY_EXPR
3160 || TREE_CODE (retval) == INIT_EXPR)
3161 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3162 retval_rhs = TREE_OPERAND (retval, 1);
3163 else
3164 retval_rhs = retval;
3165
3166 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3167
058a1b7a 3168 /* Put returned bounds to the right place. */
3169 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3170 if (bounds_rtl)
3171 {
3172 rtx addr, bnd;
3173
3174 if (bounds)
3175 {
3176 bnd = expand_normal (bounds);
3177 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3178 }
3179 else if (REG_P (bounds_rtl))
3180 {
3181 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3182 addr = gen_rtx_MEM (Pmode, addr);
3183 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3184 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3185 }
3186 else
3187 {
3188 int n;
3189
3190 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3191
3192 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3193 addr = gen_rtx_MEM (Pmode, addr);
3194
3195 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3196 {
3197 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3198 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3199 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3200 rtx bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3201 targetm.calls.store_returned_bounds (slot, bnd);
3202 }
3203 }
3204 }
3205 else if (chkp_function_instrumented_p (current_function_decl)
3206 && !BOUNDED_P (retval_rhs)
3207 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3208 && TREE_CODE (retval_rhs) != RESULT_DECL)
3209 {
3210 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3211 addr = gen_rtx_MEM (Pmode, addr);
3212
3213 gcc_assert (MEM_P (result_rtl));
3214
3215 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3216 }
3217
0e80b01d 3218 /* If we are returning the RESULT_DECL, then the value has already
3219 been stored into it, so we don't have to do anything special. */
3220 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3221 expand_value_return (result_rtl);
3222
3223 /* If the result is an aggregate that is being returned in one (or more)
3224 registers, load the registers here. */
3225
3226 else if (retval_rhs != 0
3227 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3228 && REG_P (result_rtl))
3229 {
3230 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3231 if (val)
3232 {
3233 /* Use the mode of the result value on the return register. */
3234 PUT_MODE (result_rtl, GET_MODE (val));
3235 expand_value_return (val);
3236 }
3237 else
3238 expand_null_return ();
3239 }
3240 else if (retval_rhs != 0
3241 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3242 && (REG_P (result_rtl)
3243 || (GET_CODE (result_rtl) == PARALLEL)))
3244 {
9f495e8d 3245 /* Compute the return value into a temporary (usually a pseudo reg). */
3246 val
3247 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
0e80b01d 3248 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3249 val = force_not_mem (val);
0e80b01d 3250 expand_value_return (val);
3251 }
3252 else
3253 {
3254 /* No hard reg used; calculate value into hard return reg. */
3255 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3256 expand_value_return (result_rtl);
3257 }
3258}
3259
16c9337c 3260/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3261 STMT that doesn't require special handling for outgoing edges. That
3262 is no tailcalls and no GIMPLE_COND. */
3263
3264static void
3265expand_gimple_stmt_1 (gimple stmt)
3266{
3267 tree op0;
8c593757 3268
5169661d 3269 set_curr_insn_location (gimple_location (stmt));
8c593757 3270
16c9337c 3271 switch (gimple_code (stmt))
3272 {
3273 case GIMPLE_GOTO:
3274 op0 = gimple_goto_dest (stmt);
3275 if (TREE_CODE (op0) == LABEL_DECL)
3276 expand_goto (op0);
3277 else
3278 expand_computed_goto (op0);
3279 break;
3280 case GIMPLE_LABEL:
1a91d914 3281 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
16c9337c 3282 break;
3283 case GIMPLE_NOP:
3284 case GIMPLE_PREDICT:
3285 break;
16c9337c 3286 case GIMPLE_SWITCH:
1a91d914 3287 expand_case (as_a <gswitch *> (stmt));
16c9337c 3288 break;
3289 case GIMPLE_ASM:
1a91d914 3290 expand_asm_stmt (as_a <gasm *> (stmt));
16c9337c 3291 break;
3292 case GIMPLE_CALL:
1a91d914 3293 expand_call_stmt (as_a <gcall *> (stmt));
16c9337c 3294 break;
3295
3296 case GIMPLE_RETURN:
1a91d914 3297 op0 = gimple_return_retval (as_a <greturn *> (stmt));
16c9337c 3298
3299 if (op0 && op0 != error_mark_node)
3300 {
3301 tree result = DECL_RESULT (current_function_decl);
3302
3303 /* If we are not returning the current function's RESULT_DECL,
3304 build an assignment to it. */
3305 if (op0 != result)
3306 {
3307 /* I believe that a function's RESULT_DECL is unique. */
3308 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3309
3310 /* ??? We'd like to use simply expand_assignment here,
3311 but this fails if the value is of BLKmode but the return
3312 decl is a register. expand_return has special handling
3313 for this combination, which eventually should move
3314 to common code. See comments there. Until then, let's
3315 build a modify expression :-/ */
3316 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3317 result, op0);
3318 }
3319 }
3320 if (!op0)
3321 expand_null_return ();
3322 else
058a1b7a 3323 expand_return (op0, gimple_return_retbnd (stmt));
16c9337c 3324 break;
3325
3326 case GIMPLE_ASSIGN:
3327 {
1a91d914 3328 gassign *assign_stmt = as_a <gassign *> (stmt);
3329 tree lhs = gimple_assign_lhs (assign_stmt);
16c9337c 3330
3331 /* Tree expand used to fiddle with |= and &= of two bitfield
3332 COMPONENT_REFs here. This can't happen with gimple, the LHS
3333 of binary assigns must be a gimple reg. */
3334
3335 if (TREE_CODE (lhs) != SSA_NAME
3336 || get_gimple_rhs_class (gimple_expr_code (stmt))
3337 == GIMPLE_SINGLE_RHS)
3338 {
1a91d914 3339 tree rhs = gimple_assign_rhs1 (assign_stmt);
16c9337c 3340 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3341 == GIMPLE_SINGLE_RHS);
3342 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3343 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3c25489e 3344 if (TREE_CLOBBER_P (rhs))
3345 /* This is a clobber to mark the going out of scope for
3346 this LHS. */
3347 ;
3348 else
3349 expand_assignment (lhs, rhs,
1a91d914 3350 gimple_assign_nontemporal_move_p (
3351 assign_stmt));
16c9337c 3352 }
3353 else
3354 {
3355 rtx target, temp;
1a91d914 3356 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
16c9337c 3357 struct separate_ops ops;
3358 bool promoted = false;
3359
3360 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3361 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3362 promoted = true;
3363
1a91d914 3364 ops.code = gimple_assign_rhs_code (assign_stmt);
16c9337c 3365 ops.type = TREE_TYPE (lhs);
3366 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3367 {
00f4f705 3368 case GIMPLE_TERNARY_RHS:
1a91d914 3369 ops.op2 = gimple_assign_rhs3 (assign_stmt);
00f4f705 3370 /* Fallthru */
16c9337c 3371 case GIMPLE_BINARY_RHS:
1a91d914 3372 ops.op1 = gimple_assign_rhs2 (assign_stmt);
16c9337c 3373 /* Fallthru */
3374 case GIMPLE_UNARY_RHS:
1a91d914 3375 ops.op0 = gimple_assign_rhs1 (assign_stmt);
16c9337c 3376 break;
3377 default:
3378 gcc_unreachable ();
3379 }
3380 ops.location = gimple_location (stmt);
3381
3382 /* If we want to use a nontemporal store, force the value to
3383 register first. If we store into a promoted register,
3384 don't directly expand to target. */
3385 temp = nontemporal || promoted ? NULL_RTX : target;
3386 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3387 EXPAND_NORMAL);
3388
3389 if (temp == target)
3390 ;
3391 else if (promoted)
3392 {
e8629f9e 3393 int unsignedp = SUBREG_PROMOTED_SIGN (target);
16c9337c 3394 /* If TEMP is a VOIDmode constant, use convert_modes to make
3395 sure that we properly convert it. */
3396 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3397 {
3398 temp = convert_modes (GET_MODE (target),
3399 TYPE_MODE (ops.type),
088c4b7b 3400 temp, unsignedp);
16c9337c 3401 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
088c4b7b 3402 GET_MODE (target), temp, unsignedp);
16c9337c 3403 }
3404
5a9ccd1b 3405 convert_move (SUBREG_REG (target), temp, unsignedp);
16c9337c 3406 }
3407 else if (nontemporal && emit_storent_insn (target, temp))
3408 ;
3409 else
3410 {
3411 temp = force_operand (temp, target);
3412 if (temp != target)
3413 emit_move_insn (target, temp);
3414 }
3415 }
3416 }
3417 break;
3418
3419 default:
3420 gcc_unreachable ();
3421 }
3422}
3423
3424/* Expand one gimple statement STMT and return the last RTL instruction
3425 before any of the newly generated ones.
3426
3427 In addition to generating the necessary RTL instructions this also
3428 sets REG_EH_REGION notes if necessary and sets the current source
3429 location for diagnostics. */
3430
74a0cbc4 3431static rtx_insn *
16c9337c 3432expand_gimple_stmt (gimple stmt)
3433{
16c9337c 3434 location_t saved_location = input_location;
74a0cbc4 3435 rtx_insn *last = get_last_insn ();
8c593757 3436 int lp_nr;
16c9337c 3437
16c9337c 3438 gcc_assert (cfun);
3439
8c593757 3440 /* We need to save and restore the current source location so that errors
3441 discovered during expansion are emitted with the right location. But
3442 it would be better if the diagnostic routines used the source location
3443 embedded in the tree nodes rather than globals. */
16c9337c 3444 if (gimple_has_location (stmt))
8c593757 3445 input_location = gimple_location (stmt);
16c9337c 3446
3447 expand_gimple_stmt_1 (stmt);
8c593757 3448
16c9337c 3449 /* Free any temporaries used to evaluate this statement. */
3450 free_temp_slots ();
3451
3452 input_location = saved_location;
3453
3454 /* Mark all insns that may trap. */
e38def9c 3455 lp_nr = lookup_stmt_eh_lp (stmt);
3456 if (lp_nr)
16c9337c 3457 {
74a0cbc4 3458 rtx_insn *insn;
16c9337c 3459 for (insn = next_real_insn (last); insn;
3460 insn = next_real_insn (insn))
3461 {
3462 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3463 /* If we want exceptions for non-call insns, any
3464 may_trap_p instruction may throw. */
3465 && GET_CODE (PATTERN (insn)) != CLOBBER
3466 && GET_CODE (PATTERN (insn)) != USE
e38def9c 3467 && insn_could_throw_p (insn))
3468 make_reg_eh_region_note (insn, 0, lp_nr);
16c9337c 3469 }
3470 }
3471
3472 return last;
3473}
3474
75a70cf9 3475/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
17ceb1d5 3476 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3477 generated a tail call (something that might be denied by the ABI
c578459e 3478 rules governing the call; see calls.c).
3479
3480 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3481 can still reach the rest of BB. The case here is __builtin_sqrt,
3482 where the NaN result goes through the external function (with a
3483 tailcall) and the normal result happens via a sqrt instruction. */
3ced8962 3484
3485static basic_block
1a91d914 3486expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3ced8962 3487{
74a0cbc4 3488 rtx_insn *last2, *last;
17ceb1d5 3489 edge e;
cd665a06 3490 edge_iterator ei;
17ceb1d5 3491 int probability;
3492 gcov_type count;
3ced8962 3493
16c9337c 3494 last2 = last = expand_gimple_stmt (stmt);
3ced8962 3495
3496 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
17ceb1d5 3497 if (CALL_P (last) && SIBLING_CALL_P (last))
3498 goto found;
3ced8962 3499
75a70cf9 3500 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
49377e21 3501
c578459e 3502 *can_fallthru = true;
17ceb1d5 3503 return NULL;
3ced8962 3504
17ceb1d5 3505 found:
3506 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3507 Any instructions emitted here are about to be deleted. */
3508 do_pending_stack_adjust ();
3509
3510 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3511 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3512 EH or abnormal edges, we shouldn't have created a tail call in
3513 the first place. So it seems to me we should just be removing
3514 all edges here, or redirecting the existing fallthru edge to
3515 the exit block. */
3516
17ceb1d5 3517 probability = 0;
3518 count = 0;
17ceb1d5 3519
cd665a06 3520 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3521 {
17ceb1d5 3522 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3523 {
34154e27 3524 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3ced8962 3525 {
17ceb1d5 3526 e->dest->count -= e->count;
3527 e->dest->frequency -= EDGE_FREQUENCY (e);
3528 if (e->dest->count < 0)
a0c938f0 3529 e->dest->count = 0;
17ceb1d5 3530 if (e->dest->frequency < 0)
a0c938f0 3531 e->dest->frequency = 0;
3ced8962 3532 }
17ceb1d5 3533 count += e->count;
3534 probability += e->probability;
3535 remove_edge (e);
3ced8962 3536 }
cd665a06 3537 else
3538 ei_next (&ei);
3ced8962 3539 }
3540
17ceb1d5 3541 /* This is somewhat ugly: the call_expr expander often emits instructions
3542 after the sibcall (to perform the function return). These confuse the
794d8e3f 3543 find_many_sub_basic_blocks code, so we need to get rid of these. */
17ceb1d5 3544 last = NEXT_INSN (last);
cc636d56 3545 gcc_assert (BARRIER_P (last));
c578459e 3546
3547 *can_fallthru = false;
17ceb1d5 3548 while (NEXT_INSN (last))
3549 {
3550 /* For instance an sqrt builtin expander expands if with
3551 sibcall in the then and label for `else`. */
3552 if (LABEL_P (NEXT_INSN (last)))
c578459e 3553 {
3554 *can_fallthru = true;
3555 break;
3556 }
17ceb1d5 3557 delete_insn (NEXT_INSN (last));
3558 }
3559
34154e27 3560 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3561 | EDGE_SIBCALL);
17ceb1d5 3562 e->probability += probability;
3563 e->count += count;
26bb3cb2 3564 BB_END (bb) = last;
17ceb1d5 3565 update_bb_for_insn (bb);
3566
3567 if (NEXT_INSN (last))
3568 {
3569 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3570
3571 last = BB_END (bb);
3572 if (BARRIER_P (last))
26bb3cb2 3573 BB_END (bb) = PREV_INSN (last);
17ceb1d5 3574 }
3575
75a70cf9 3576 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
49377e21 3577
17ceb1d5 3578 return bb;
3ced8962 3579}
3580
9845d120 3581/* Return the difference between the floor and the truncated result of
3582 a signed division by OP1 with remainder MOD. */
3583static rtx
3754d046 3584floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3585{
3586 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3587 return gen_rtx_IF_THEN_ELSE
3588 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3589 gen_rtx_IF_THEN_ELSE
3590 (mode, gen_rtx_LT (BImode,
3591 gen_rtx_DIV (mode, op1, mod),
3592 const0_rtx),
3593 constm1_rtx, const0_rtx),
3594 const0_rtx);
3595}
3596
3597/* Return the difference between the ceil and the truncated result of
3598 a signed division by OP1 with remainder MOD. */
3599static rtx
3754d046 3600ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3601{
3602 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3603 return gen_rtx_IF_THEN_ELSE
3604 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3605 gen_rtx_IF_THEN_ELSE
3606 (mode, gen_rtx_GT (BImode,
3607 gen_rtx_DIV (mode, op1, mod),
3608 const0_rtx),
3609 const1_rtx, const0_rtx),
3610 const0_rtx);
3611}
3612
3613/* Return the difference between the ceil and the truncated result of
3614 an unsigned division by OP1 with remainder MOD. */
3615static rtx
3754d046 3616ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
9845d120 3617{
3618 /* (mod != 0 ? 1 : 0) */
3619 return gen_rtx_IF_THEN_ELSE
3620 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3621 const1_rtx, const0_rtx);
3622}
3623
3624/* Return the difference between the rounded and the truncated result
3625 of a signed division by OP1 with remainder MOD. Halfway cases are
3626 rounded away from zero, rather than to the nearest even number. */
3627static rtx
3754d046 3628round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3629{
3630 /* (abs (mod) >= abs (op1) - abs (mod)
3631 ? (op1 / mod > 0 ? 1 : -1)
3632 : 0) */
3633 return gen_rtx_IF_THEN_ELSE
3634 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3635 gen_rtx_MINUS (mode,
3636 gen_rtx_ABS (mode, op1),
3637 gen_rtx_ABS (mode, mod))),
3638 gen_rtx_IF_THEN_ELSE
3639 (mode, gen_rtx_GT (BImode,
3640 gen_rtx_DIV (mode, op1, mod),
3641 const0_rtx),
3642 const1_rtx, constm1_rtx),
3643 const0_rtx);
3644}
3645
3646/* Return the difference between the rounded and the truncated result
3647 of a unsigned division by OP1 with remainder MOD. Halfway cases
3648 are rounded away from zero, rather than to the nearest even
3649 number. */
3650static rtx
3754d046 3651round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
9845d120 3652{
3653 /* (mod >= op1 - mod ? 1 : 0) */
3654 return gen_rtx_IF_THEN_ELSE
3655 (mode, gen_rtx_GE (BImode, mod,
3656 gen_rtx_MINUS (mode, op1, mod)),
3657 const1_rtx, const0_rtx);
3658}
3659
d89c81d6 3660/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3661 any rtl. */
3662
3663static rtx
3754d046 3664convert_debug_memory_address (machine_mode mode, rtx x,
cd799492 3665 addr_space_t as)
d89c81d6 3666{
3754d046 3667 machine_mode xmode = GET_MODE (x);
d89c81d6 3668
3669#ifndef POINTERS_EXTEND_UNSIGNED
cd799492 3670 gcc_assert (mode == Pmode
3671 || mode == targetm.addr_space.address_mode (as));
d89c81d6 3672 gcc_assert (xmode == mode || xmode == VOIDmode);
3673#else
cd799492 3674 rtx temp;
cd799492 3675
df7f3935 3676 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
d89c81d6 3677
3678 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3679 return x;
3680
995b44f5 3681 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
d89c81d6 3682 x = simplify_gen_subreg (mode, x, xmode,
3683 subreg_lowpart_offset
3684 (mode, xmode));
3685 else if (POINTERS_EXTEND_UNSIGNED > 0)
3686 x = gen_rtx_ZERO_EXTEND (mode, x);
3687 else if (!POINTERS_EXTEND_UNSIGNED)
3688 x = gen_rtx_SIGN_EXTEND (mode, x);
3689 else
cd799492 3690 {
3691 switch (GET_CODE (x))
3692 {
3693 case SUBREG:
3694 if ((SUBREG_PROMOTED_VAR_P (x)
3695 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3696 || (GET_CODE (SUBREG_REG (x)) == PLUS
3697 && REG_P (XEXP (SUBREG_REG (x), 0))
3698 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3699 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3700 && GET_MODE (SUBREG_REG (x)) == mode)
3701 return SUBREG_REG (x);
3702 break;
3703 case LABEL_REF:
b49f2e4b 3704 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
cd799492 3705 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3706 return temp;
3707 case SYMBOL_REF:
3708 temp = shallow_copy_rtx (x);
3709 PUT_MODE (temp, mode);
3710 return temp;
3711 case CONST:
3712 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3713 if (temp)
3714 temp = gen_rtx_CONST (mode, temp);
3715 return temp;
3716 case PLUS:
3717 case MINUS:
3718 if (CONST_INT_P (XEXP (x, 1)))
3719 {
3720 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3721 if (temp)
3722 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3723 }
3724 break;
3725 default:
3726 break;
3727 }
3728 /* Don't know how to express ptr_extend as operation in debug info. */
3729 return NULL;
3730 }
d89c81d6 3731#endif /* POINTERS_EXTEND_UNSIGNED */
3732
3733 return x;
3734}
3735
8ee59e4e 3736/* Return an RTX equivalent to the value of the parameter DECL. */
3737
3738static rtx
3739expand_debug_parm_decl (tree decl)
3740{
3741 rtx incoming = DECL_INCOMING_RTL (decl);
3742
3743 if (incoming
3744 && GET_MODE (incoming) != BLKmode
3745 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3746 || (MEM_P (incoming)
3747 && REG_P (XEXP (incoming, 0))
3748 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3749 {
3750 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3751
3752#ifdef HAVE_window_save
3753 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3754 If the target machine has an explicit window save instruction, the
3755 actual entry value is the corresponding OUTGOING_REGNO instead. */
3756 if (REG_P (incoming)
3757 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3758 incoming
3759 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3760 OUTGOING_REGNO (REGNO (incoming)), 0);
3761 else if (MEM_P (incoming))
3762 {
3763 rtx reg = XEXP (incoming, 0);
3764 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3765 {
3766 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3767 incoming = replace_equiv_address_nv (incoming, reg);
3768 }
848d0536 3769 else
3770 incoming = copy_rtx (incoming);
8ee59e4e 3771 }
3772#endif
3773
3774 ENTRY_VALUE_EXP (rtl) = incoming;
3775 return rtl;
3776 }
3777
3778 if (incoming
3779 && GET_MODE (incoming) != BLKmode
3780 && !TREE_ADDRESSABLE (decl)
3781 && MEM_P (incoming)
3782 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3783 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3784 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3785 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
848d0536 3786 return copy_rtx (incoming);
8ee59e4e 3787
3788 return NULL_RTX;
3789}
3790
3791/* Return an RTX equivalent to the value of the tree expression EXP. */
9845d120 3792
3793static rtx
3794expand_debug_expr (tree exp)
3795{
3796 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3754d046 3797 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3798 machine_mode inner_mode = VOIDmode;
9845d120 3799 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bd1a81f7 3800 addr_space_t as;
9845d120 3801
3802 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3803 {
3804 case tcc_expression:
3805 switch (TREE_CODE (exp))
3806 {
3807 case COND_EXPR:
b54ee9da 3808 case DOT_PROD_EXPR:
a2287001 3809 case SAD_EXPR:
00f4f705 3810 case WIDEN_MULT_PLUS_EXPR:
3811 case WIDEN_MULT_MINUS_EXPR:
156f51b9 3812 case FMA_EXPR:
9845d120 3813 goto ternary;
3814
3815 case TRUTH_ANDIF_EXPR:
3816 case TRUTH_ORIF_EXPR:
3817 case TRUTH_AND_EXPR:
3818 case TRUTH_OR_EXPR:
3819 case TRUTH_XOR_EXPR:
3820 goto binary;
3821
3822 case TRUTH_NOT_EXPR:
3823 goto unary;
3824
3825 default:
3826 break;
3827 }
3828 break;
3829
3830 ternary:
3831 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3832 if (!op2)
3833 return NULL_RTX;
3834 /* Fall through. */
3835
3836 binary:
3837 case tcc_binary:
3838 case tcc_comparison:
3839 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3840 if (!op1)
3841 return NULL_RTX;
3842 /* Fall through. */
3843
3844 unary:
3845 case tcc_unary:
9ecadf14 3846 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 3847 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3848 if (!op0)
3849 return NULL_RTX;
3850 break;
3851
3852 case tcc_type:
3853 case tcc_statement:
3854 gcc_unreachable ();
3855
3856 case tcc_constant:
3857 case tcc_exceptional:
3858 case tcc_declaration:
3859 case tcc_reference:
3860 case tcc_vl_exp:
3861 break;
3862 }
3863
3864 switch (TREE_CODE (exp))
3865 {
3866 case STRING_CST:
3867 if (!lookup_constant_def (exp))
3868 {
0f89d483 3869 if (strlen (TREE_STRING_POINTER (exp)) + 1
3870 != (size_t) TREE_STRING_LENGTH (exp))
3871 return NULL_RTX;
9845d120 3872 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3873 op0 = gen_rtx_MEM (BLKmode, op0);
3874 set_mem_attributes (op0, exp, 0);
3875 return op0;
3876 }
3877 /* Fall through... */
3878
3879 case INTEGER_CST:
3880 case REAL_CST:
3881 case FIXED_CST:
3882 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3883 return op0;
3884
3885 case COMPLEX_CST:
3886 gcc_assert (COMPLEX_MODE_P (mode));
3887 op0 = expand_debug_expr (TREE_REALPART (exp));
9845d120 3888 op1 = expand_debug_expr (TREE_IMAGPART (exp));
9845d120 3889 return gen_rtx_CONCAT (mode, op0, op1);
3890
688ff29b 3891 case DEBUG_EXPR_DECL:
3892 op0 = DECL_RTL_IF_SET (exp);
3893
3894 if (op0)
3895 return op0;
3896
3897 op0 = gen_rtx_DEBUG_EXPR (mode);
23dd51cb 3898 DEBUG_EXPR_TREE_DECL (op0) = exp;
688ff29b 3899 SET_DECL_RTL (exp, op0);
3900
3901 return op0;
3902
9845d120 3903 case VAR_DECL:
3904 case PARM_DECL:
3905 case FUNCTION_DECL:
3906 case LABEL_DECL:
3907 case CONST_DECL:
3908 case RESULT_DECL:
3909 op0 = DECL_RTL_IF_SET (exp);
3910
3911 /* This decl was probably optimized away. */
3912 if (!op0)
0f89d483 3913 {
3914 if (TREE_CODE (exp) != VAR_DECL
3915 || DECL_EXTERNAL (exp)
3916 || !TREE_STATIC (exp)
3917 || !DECL_NAME (exp)
a5653528 3918 || DECL_HARD_REGISTER (exp)
1cdbcae1 3919 || DECL_IN_CONSTANT_POOL (exp)
a5653528 3920 || mode == VOIDmode)
0f89d483 3921 return NULL;
3922
e6db644e 3923 op0 = make_decl_rtl_for_debug (exp);
0f89d483 3924 if (!MEM_P (op0)
3925 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3926 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3927 return NULL;
3928 }
3929 else
3930 op0 = copy_rtx (op0);
9845d120 3931
5d713e67 3932 if (GET_MODE (op0) == BLKmode
3933 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3934 below would ICE. While it is likely a FE bug,
3935 try to be robust here. See PR43166. */
0f18e023 3936 || mode == BLKmode
3937 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
9845d120 3938 {
3939 gcc_assert (MEM_P (op0));
3940 op0 = adjust_address_nv (op0, mode, 0);
3941 return op0;
3942 }
3943
3944 /* Fall through. */
3945
3946 adjust_mode:
3947 case PAREN_EXPR:
d09ef31a 3948 CASE_CONVERT:
9845d120 3949 {
9ecadf14 3950 inner_mode = GET_MODE (op0);
9845d120 3951
3952 if (mode == inner_mode)
3953 return op0;
3954
3955 if (inner_mode == VOIDmode)
3956 {
3c800ea7 3957 if (TREE_CODE (exp) == SSA_NAME)
3958 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3959 else
3960 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 3961 if (mode == inner_mode)
3962 return op0;
3963 }
3964
3965 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3966 {
3967 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3968 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3969 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3970 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3971 else
3972 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3973 }
3974 else if (FLOAT_MODE_P (mode))
3975 {
3c800ea7 3976 gcc_assert (TREE_CODE (exp) != SSA_NAME);
9845d120 3977 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3978 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3979 else
3980 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3981 }
3982 else if (FLOAT_MODE_P (inner_mode))
3983 {
3984 if (unsignedp)
3985 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3986 else
3987 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3988 }
3989 else if (CONSTANT_P (op0)
995b44f5 3990 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
9845d120 3991 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3992 subreg_lowpart_offset (mode,
3993 inner_mode));
f84ead57 3994 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3995 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3996 : unsignedp)
9ecadf14 3997 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
9845d120 3998 else
9ecadf14 3999 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
9845d120 4000
4001 return op0;
4002 }
4003
182cf5a9 4004 case MEM_REF:
e488c25f 4005 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4006 {
4007 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4008 TREE_OPERAND (exp, 0),
4009 TREE_OPERAND (exp, 1));
4010 if (newexp)
4011 return expand_debug_expr (newexp);
4012 }
4013 /* FALLTHROUGH */
9845d120 4014 case INDIRECT_REF:
29c05e22 4015 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845d120 4016 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4017 if (!op0)
4018 return NULL;
4019
7ef770fa 4020 if (TREE_CODE (exp) == MEM_REF)
4021 {
f87ea39e 4022 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4023 || (GET_CODE (op0) == PLUS
4024 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4025 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4026 Instead just use get_inner_reference. */
4027 goto component_ref;
4028
7ef770fa 4029 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4030 if (!op1 || !CONST_INT_P (op1))
4031 return NULL;
4032
29c05e22 4033 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
7ef770fa 4034 }
4035
14a3093e 4036 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9845d120 4037
cd799492 4038 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4039 op0, as);
4040 if (op0 == NULL_RTX)
4041 return NULL;
9845d120 4042
cd799492 4043 op0 = gen_rtx_MEM (mode, op0);
9845d120 4044 set_mem_attributes (op0, exp, 0);
e488c25f 4045 if (TREE_CODE (exp) == MEM_REF
4046 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4047 set_mem_expr (op0, NULL_TREE);
bd1a81f7 4048 set_mem_addr_space (op0, as);
9845d120 4049
4050 return op0;
4051
4052 case TARGET_MEM_REF:
28daba6f 4053 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4054 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
9845d120 4055 return NULL;
4056
4057 op0 = expand_debug_expr
8d8150c8 4058 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
9845d120 4059 if (!op0)
4060 return NULL;
4061
27628c9e 4062 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
cd799492 4063 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4064 op0, as);
4065 if (op0 == NULL_RTX)
4066 return NULL;
9845d120 4067
4068 op0 = gen_rtx_MEM (mode, op0);
4069
4070 set_mem_attributes (op0, exp, 0);
bd1a81f7 4071 set_mem_addr_space (op0, as);
9845d120 4072
4073 return op0;
4074
f87ea39e 4075 component_ref:
9845d120 4076 case ARRAY_REF:
4077 case ARRAY_RANGE_REF:
4078 case COMPONENT_REF:
4079 case BIT_FIELD_REF:
4080 case REALPART_EXPR:
4081 case IMAGPART_EXPR:
4082 case VIEW_CONVERT_EXPR:
4083 {
3754d046 4084 machine_mode mode1;
9845d120 4085 HOST_WIDE_INT bitsize, bitpos;
4086 tree offset;
4087 int volatilep = 0;
4088 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
dc317fc8 4089 &mode1, &unsignedp, &volatilep, false);
9845d120 4090 rtx orig_op0;
4091
9e3c8673 4092 if (bitsize == 0)
4093 return NULL;
4094
9845d120 4095 orig_op0 = op0 = expand_debug_expr (tem);
4096
4097 if (!op0)
4098 return NULL;
4099
4100 if (offset)
4101 {
3754d046 4102 machine_mode addrmode, offmode;
d89c81d6 4103
f4b490ea 4104 if (!MEM_P (op0))
4105 return NULL;
9845d120 4106
d89c81d6 4107 op0 = XEXP (op0, 0);
4108 addrmode = GET_MODE (op0);
4109 if (addrmode == VOIDmode)
4110 addrmode = Pmode;
4111
9845d120 4112 op1 = expand_debug_expr (offset);
4113 if (!op1)
4114 return NULL;
4115
d89c81d6 4116 offmode = GET_MODE (op1);
4117 if (offmode == VOIDmode)
4118 offmode = TYPE_MODE (TREE_TYPE (offset));
4119
4120 if (addrmode != offmode)
4121 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4122 subreg_lowpart_offset (addrmode,
4123 offmode));
4124
4125 /* Don't use offset_address here, we don't need a
4126 recognizable address, and we don't want to generate
4127 code. */
9ecadf14 4128 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4129 op0, op1));
9845d120 4130 }
4131
4132 if (MEM_P (op0))
4133 {
9e3c8673 4134 if (mode1 == VOIDmode)
4135 /* Bitfield. */
4136 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
9845d120 4137 if (bitpos >= BITS_PER_UNIT)
4138 {
4139 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4140 bitpos %= BITS_PER_UNIT;
4141 }
4142 else if (bitpos < 0)
4143 {
9e3c8673 4144 HOST_WIDE_INT units
4145 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
9845d120 4146 op0 = adjust_address_nv (op0, mode1, units);
4147 bitpos += units * BITS_PER_UNIT;
4148 }
4149 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4150 op0 = adjust_address_nv (op0, mode, 0);
4151 else if (GET_MODE (op0) != mode1)
4152 op0 = adjust_address_nv (op0, mode1, 0);
4153 else
4154 op0 = copy_rtx (op0);
4155 if (op0 == orig_op0)
4156 op0 = shallow_copy_rtx (op0);
4157 set_mem_attributes (op0, exp, 0);
4158 }
4159
4160 if (bitpos == 0 && mode == GET_MODE (op0))
4161 return op0;
4162
5bd71193 4163 if (bitpos < 0)
4164 return NULL;
4165
37cd7b09 4166 if (GET_MODE (op0) == BLKmode)
4167 return NULL;
4168
9845d120 4169 if ((bitpos % BITS_PER_UNIT) == 0
4170 && bitsize == GET_MODE_BITSIZE (mode1))
4171 {
3754d046 4172 machine_mode opmode = GET_MODE (op0);
9845d120 4173
9845d120 4174 if (opmode == VOIDmode)
c8b13e49 4175 opmode = TYPE_MODE (TREE_TYPE (tem));
9845d120 4176
4177 /* This condition may hold if we're expanding the address
4178 right past the end of an array that turned out not to
4179 be addressable (i.e., the address was only computed in
4180 debug stmts). The gen_subreg below would rightfully
4181 crash, and the address doesn't really exist, so just
4182 drop it. */
4183 if (bitpos >= GET_MODE_BITSIZE (opmode))
4184 return NULL;
4185
406aed5d 4186 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4187 return simplify_gen_subreg (mode, op0, opmode,
4188 bitpos / BITS_PER_UNIT);
9845d120 4189 }
4190
4191 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4192 && TYPE_UNSIGNED (TREE_TYPE (exp))
4193 ? SIGN_EXTRACT
4194 : ZERO_EXTRACT, mode,
4195 GET_MODE (op0) != VOIDmode
c8b13e49 4196 ? GET_MODE (op0)
4197 : TYPE_MODE (TREE_TYPE (tem)),
9845d120 4198 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4199 }
4200
9845d120 4201 case ABS_EXPR:
9ecadf14 4202 return simplify_gen_unary (ABS, mode, op0, mode);
9845d120 4203
4204 case NEGATE_EXPR:
9ecadf14 4205 return simplify_gen_unary (NEG, mode, op0, mode);
9845d120 4206
4207 case BIT_NOT_EXPR:
9ecadf14 4208 return simplify_gen_unary (NOT, mode, op0, mode);
9845d120 4209
4210 case FLOAT_EXPR:
9ecadf14 4211 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4212 0)))
4213 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4214 inner_mode);
9845d120 4215
4216 case FIX_TRUNC_EXPR:
9ecadf14 4217 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4218 inner_mode);
9845d120 4219
4220 case POINTER_PLUS_EXPR:
af3d13d6 4221 /* For the rare target where pointers are not the same size as
4222 size_t, we need to check for mis-matched modes and correct
4223 the addend. */
4224 if (op0 && op1
4225 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4226 && GET_MODE (op0) != GET_MODE (op1))
4227 {
5acc9e33 4228 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4229 /* If OP0 is a partial mode, then we must truncate, even if it has
4230 the same bitsize as OP1 as GCC's representation of partial modes
4231 is opaque. */
4232 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4233 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
9ecadf14 4234 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4235 GET_MODE (op1));
af3d13d6 4236 else
4237 /* We always sign-extend, regardless of the signedness of
4238 the operand, because the operand is always unsigned
4239 here even if the original C expression is signed. */
9ecadf14 4240 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4241 GET_MODE (op1));
af3d13d6 4242 }
4243 /* Fall through. */
9845d120 4244 case PLUS_EXPR:
9ecadf14 4245 return simplify_gen_binary (PLUS, mode, op0, op1);
9845d120 4246
4247 case MINUS_EXPR:
9ecadf14 4248 return simplify_gen_binary (MINUS, mode, op0, op1);
9845d120 4249
4250 case MULT_EXPR:
9ecadf14 4251 return simplify_gen_binary (MULT, mode, op0, op1);
9845d120 4252
4253 case RDIV_EXPR:
4254 case TRUNC_DIV_EXPR:
4255 case EXACT_DIV_EXPR:
4256 if (unsignedp)
9ecadf14 4257 return simplify_gen_binary (UDIV, mode, op0, op1);
9845d120 4258 else
9ecadf14 4259 return simplify_gen_binary (DIV, mode, op0, op1);
9845d120 4260
4261 case TRUNC_MOD_EXPR:
9ecadf14 4262 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
9845d120 4263
4264 case FLOOR_DIV_EXPR:
4265 if (unsignedp)
9ecadf14 4266 return simplify_gen_binary (UDIV, mode, op0, op1);
9845d120 4267 else
4268 {
9ecadf14 4269 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4270 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4271 rtx adj = floor_sdiv_adjust (mode, mod, op1);
9ecadf14 4272 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4273 }
4274
4275 case FLOOR_MOD_EXPR:
4276 if (unsignedp)
9ecadf14 4277 return simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4278 else
4279 {
9ecadf14 4280 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4281 rtx adj = floor_sdiv_adjust (mode, mod, op1);
9ecadf14 4282 adj = simplify_gen_unary (NEG, mode,
4283 simplify_gen_binary (MULT, mode, adj, op1),
4284 mode);
4285 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4286 }
4287
4288 case CEIL_DIV_EXPR:
4289 if (unsignedp)
4290 {
9ecadf14 4291 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4292 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4293 rtx adj = ceil_udiv_adjust (mode, mod, op1);
9ecadf14 4294 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4295 }
4296 else
4297 {
9ecadf14 4298 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4299 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4300 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
9ecadf14 4301 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4302 }
4303
4304 case CEIL_MOD_EXPR:
4305 if (unsignedp)
4306 {
9ecadf14 4307 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4308 rtx adj = ceil_udiv_adjust (mode, mod, op1);
9ecadf14 4309 adj = simplify_gen_unary (NEG, mode,
4310 simplify_gen_binary (MULT, mode, adj, op1),
4311 mode);
4312 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4313 }
4314 else
4315 {
9ecadf14 4316 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4317 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
9ecadf14 4318 adj = simplify_gen_unary (NEG, mode,
4319 simplify_gen_binary (MULT, mode, adj, op1),
4320 mode);
4321 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4322 }
4323
4324 case ROUND_DIV_EXPR:
4325 if (unsignedp)
4326 {
9ecadf14 4327 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4328 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4329 rtx adj = round_udiv_adjust (mode, mod, op1);
9ecadf14 4330 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4331 }
4332 else
4333 {
9ecadf14 4334 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4335 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4336 rtx adj = round_sdiv_adjust (mode, mod, op1);
9ecadf14 4337 return simplify_gen_binary (PLUS, mode, div, adj);
9845d120 4338 }
4339
4340 case ROUND_MOD_EXPR:
4341 if (unsignedp)
4342 {
9ecadf14 4343 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
9845d120 4344 rtx adj = round_udiv_adjust (mode, mod, op1);
9ecadf14 4345 adj = simplify_gen_unary (NEG, mode,
4346 simplify_gen_binary (MULT, mode, adj, op1),
4347 mode);
4348 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4349 }
4350 else
4351 {
9ecadf14 4352 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
9845d120 4353 rtx adj = round_sdiv_adjust (mode, mod, op1);
9ecadf14 4354 adj = simplify_gen_unary (NEG, mode,
4355 simplify_gen_binary (MULT, mode, adj, op1),
4356 mode);
4357 return simplify_gen_binary (PLUS, mode, mod, adj);
9845d120 4358 }
4359
4360 case LSHIFT_EXPR:
9ecadf14 4361 return simplify_gen_binary (ASHIFT, mode, op0, op1);
9845d120 4362
4363 case RSHIFT_EXPR:
4364 if (unsignedp)
9ecadf14 4365 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
9845d120 4366 else
9ecadf14 4367 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
9845d120 4368
4369 case LROTATE_EXPR:
9ecadf14 4370 return simplify_gen_binary (ROTATE, mode, op0, op1);
9845d120 4371
4372 case RROTATE_EXPR:
9ecadf14 4373 return simplify_gen_binary (ROTATERT, mode, op0, op1);
9845d120 4374
4375 case MIN_EXPR:
9ecadf14 4376 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
9845d120 4377
4378 case MAX_EXPR:
9ecadf14 4379 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
9845d120 4380
4381 case BIT_AND_EXPR:
4382 case TRUTH_AND_EXPR:
9ecadf14 4383 return simplify_gen_binary (AND, mode, op0, op1);
9845d120 4384
4385 case BIT_IOR_EXPR:
4386 case TRUTH_OR_EXPR:
9ecadf14 4387 return simplify_gen_binary (IOR, mode, op0, op1);
9845d120 4388
4389 case BIT_XOR_EXPR:
4390 case TRUTH_XOR_EXPR:
9ecadf14 4391 return simplify_gen_binary (XOR, mode, op0, op1);
9845d120 4392
4393 case TRUTH_ANDIF_EXPR:
4394 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4395
4396 case TRUTH_ORIF_EXPR:
4397 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4398
4399 case TRUTH_NOT_EXPR:
9ecadf14 4400 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
9845d120 4401
4402 case LT_EXPR:
9ecadf14 4403 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4404 op0, op1);
9845d120 4405
4406 case LE_EXPR:
9ecadf14 4407 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4408 op0, op1);
9845d120 4409
4410 case GT_EXPR:
9ecadf14 4411 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4412 op0, op1);
9845d120 4413
4414 case GE_EXPR:
9ecadf14 4415 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4416 op0, op1);
9845d120 4417
4418 case EQ_EXPR:
9ecadf14 4419 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
9845d120 4420
4421 case NE_EXPR:
9ecadf14 4422 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
9845d120 4423
4424 case UNORDERED_EXPR:
9ecadf14 4425 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
9845d120 4426
4427 case ORDERED_EXPR:
9ecadf14 4428 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
9845d120 4429
4430 case UNLT_EXPR:
9ecadf14 4431 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
9845d120 4432
4433 case UNLE_EXPR:
9ecadf14 4434 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
9845d120 4435
4436 case UNGT_EXPR:
9ecadf14 4437 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
9845d120 4438
4439 case UNGE_EXPR:
9ecadf14 4440 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
9845d120 4441
4442 case UNEQ_EXPR:
9ecadf14 4443 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
9845d120 4444
4445 case LTGT_EXPR:
9ecadf14 4446 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
9845d120 4447
4448 case COND_EXPR:
4449 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4450
4451 case COMPLEX_EXPR:
4452 gcc_assert (COMPLEX_MODE_P (mode));
4453 if (GET_MODE (op0) == VOIDmode)
4454 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4455 if (GET_MODE (op1) == VOIDmode)
4456 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4457 return gen_rtx_CONCAT (mode, op0, op1);
4458
4e6677f8 4459 case CONJ_EXPR:
4460 if (GET_CODE (op0) == CONCAT)
4461 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
9ecadf14 4462 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4463 XEXP (op0, 1),
4464 GET_MODE_INNER (mode)));
4e6677f8 4465 else
4466 {
3754d046 4467 machine_mode imode = GET_MODE_INNER (mode);
4e6677f8 4468 rtx re, im;
4469
4470 if (MEM_P (op0))
4471 {
4472 re = adjust_address_nv (op0, imode, 0);
4473 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4474 }
4475 else
4476 {
3754d046 4477 machine_mode ifmode = int_mode_for_mode (mode);
4478 machine_mode ihmode = int_mode_for_mode (imode);
4e6677f8 4479 rtx halfsize;
4480 if (ifmode == BLKmode || ihmode == BLKmode)
4481 return NULL;
4482 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4483 re = op0;
4484 if (mode != ifmode)
4485 re = gen_rtx_SUBREG (ifmode, re, 0);
4486 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4487 if (imode != ihmode)
4488 re = gen_rtx_SUBREG (imode, re, 0);
4489 im = copy_rtx (op0);
4490 if (mode != ifmode)
4491 im = gen_rtx_SUBREG (ifmode, im, 0);
4492 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4493 if (imode != ihmode)
4494 im = gen_rtx_SUBREG (imode, im, 0);
4495 }
4496 im = gen_rtx_NEG (imode, im);
4497 return gen_rtx_CONCAT (mode, re, im);
4498 }
4499
9845d120 4500 case ADDR_EXPR:
4501 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4502 if (!op0 || !MEM_P (op0))
f9c61ef7 4503 {
4504 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4505 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4506 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
88f2e16b 4507 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4508 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
f9c61ef7 4509 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4510
4511 if (handled_component_p (TREE_OPERAND (exp, 0)))
4512 {
4513 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4514 tree decl
4515 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4516 &bitoffset, &bitsize, &maxsize);
4517 if ((TREE_CODE (decl) == VAR_DECL
4518 || TREE_CODE (decl) == PARM_DECL
4519 || TREE_CODE (decl) == RESULT_DECL)
88f2e16b 4520 && (!TREE_ADDRESSABLE (decl)
4521 || target_for_debug_bind (decl))
f9c61ef7 4522 && (bitoffset % BITS_PER_UNIT) == 0
4523 && bitsize > 0
4524 && bitsize == maxsize)
29c05e22 4525 {
4526 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4527 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4528 }
f9c61ef7 4529 }
4530
8afb7c4b 4531 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4532 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4533 == ADDR_EXPR)
4534 {
4535 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4536 0));
4537 if (op0 != NULL
4538 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4539 || (GET_CODE (op0) == PLUS
4540 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4541 && CONST_INT_P (XEXP (op0, 1)))))
4542 {
4543 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4544 1));
4545 if (!op1 || !CONST_INT_P (op1))
4546 return NULL;
4547
4548 return plus_constant (mode, op0, INTVAL (op1));
4549 }
4550 }
4551
f9c61ef7 4552 return NULL;
4553 }
9845d120 4554
14a3093e 4555 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
cd799492 4556 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
d89c81d6 4557
4558 return op0;
9845d120 4559
4560 case VECTOR_CST:
fadf62f4 4561 {
4562 unsigned i;
4563
4564 op0 = gen_rtx_CONCATN
4565 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4566
4567 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4568 {
4569 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4570 if (!op1)
4571 return NULL;
4572 XVECEXP (op0, 0, i) = op1;
4573 }
4574
4575 return op0;
4576 }
9845d120 4577
4578 case CONSTRUCTOR:
3c25489e 4579 if (TREE_CLOBBER_P (exp))
4580 return NULL;
4581 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
9845d120 4582 {
4583 unsigned i;
4584 tree val;
4585
4586 op0 = gen_rtx_CONCATN
4587 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4588
4589 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4590 {
4591 op1 = expand_debug_expr (val);
4592 if (!op1)
4593 return NULL;
4594 XVECEXP (op0, 0, i) = op1;
4595 }
4596
4597 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4598 {
4599 op1 = expand_debug_expr
385f3f36 4600 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
9845d120 4601
4602 if (!op1)
4603 return NULL;
4604
4605 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4606 XVECEXP (op0, 0, i) = op1;
4607 }
4608
4609 return op0;
4610 }
4611 else
4612 goto flag_unsupported;
4613
4614 case CALL_EXPR:
4615 /* ??? Maybe handle some builtins? */
4616 return NULL;
4617
4618 case SSA_NAME:
4619 {
3c800ea7 4620 gimple g = get_gimple_for_ssa_name (exp);
4621 if (g)
4622 {
4623 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4624 if (!op0)
4625 return NULL;
4626 }
4627 else
4628 {
4629 int part = var_to_partition (SA.map, exp);
9845d120 4630
3c800ea7 4631 if (part == NO_PARTITION)
a5701bde 4632 {
4633 /* If this is a reference to an incoming value of parameter
4634 that is never used in the code or where the incoming
4635 value is never used in the code, use PARM_DECL's
4636 DECL_RTL if set. */
4637 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4638 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4639 {
8ee59e4e 4640 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4641 if (op0)
4642 goto adjust_mode;
a5701bde 4643 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
8ee59e4e 4644 if (op0)
4645 goto adjust_mode;
a5701bde 4646 }
4647 return NULL;
4648 }
9845d120 4649
3c800ea7 4650 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
9845d120 4651
ce6d059c 4652 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3c800ea7 4653 }
9845d120 4654 goto adjust_mode;
4655 }
4656
4657 case ERROR_MARK:
4658 return NULL;
4659
b54ee9da 4660 /* Vector stuff. For most of the codes we don't have rtl codes. */
4661 case REALIGN_LOAD_EXPR:
4662 case REDUC_MAX_EXPR:
4663 case REDUC_MIN_EXPR:
4664 case REDUC_PLUS_EXPR:
4665 case VEC_COND_EXPR:
b54ee9da 4666 case VEC_PACK_FIX_TRUNC_EXPR:
4667 case VEC_PACK_SAT_EXPR:
4668 case VEC_PACK_TRUNC_EXPR:
b54ee9da 4669 case VEC_UNPACK_FLOAT_HI_EXPR:
4670 case VEC_UNPACK_FLOAT_LO_EXPR:
4671 case VEC_UNPACK_HI_EXPR:
4672 case VEC_UNPACK_LO_EXPR:
4673 case VEC_WIDEN_MULT_HI_EXPR:
4674 case VEC_WIDEN_MULT_LO_EXPR:
79a78f7f 4675 case VEC_WIDEN_MULT_EVEN_EXPR:
4676 case VEC_WIDEN_MULT_ODD_EXPR:
6083c152 4677 case VEC_WIDEN_LSHIFT_HI_EXPR:
4678 case VEC_WIDEN_LSHIFT_LO_EXPR:
3557cb99 4679 case VEC_PERM_EXPR:
b54ee9da 4680 return NULL;
4681
96504875 4682 /* Misc codes. */
b54ee9da 4683 case ADDR_SPACE_CONVERT_EXPR:
4684 case FIXED_CONVERT_EXPR:
4685 case OBJ_TYPE_REF:
4686 case WITH_SIZE_EXPR:
4687 return NULL;
4688
4689 case DOT_PROD_EXPR:
4690 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4691 && SCALAR_INT_MODE_P (mode))
4692 {
9ecadf14 4693 op0
4694 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4695 0)))
4696 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4697 inner_mode);
4698 op1
4699 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4700 1)))
4701 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4702 inner_mode);
4703 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4704 return simplify_gen_binary (PLUS, mode, op0, op2);
b54ee9da 4705 }
4706 return NULL;
4707
4708 case WIDEN_MULT_EXPR:
00f4f705 4709 case WIDEN_MULT_PLUS_EXPR:
4710 case WIDEN_MULT_MINUS_EXPR:
b54ee9da 4711 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4712 && SCALAR_INT_MODE_P (mode))
4713 {
9ecadf14 4714 inner_mode = GET_MODE (op0);
b54ee9da 4715 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
62be004c 4716 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b54ee9da 4717 else
62be004c 4718 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b54ee9da 4719 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
62be004c 4720 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
b54ee9da 4721 else
62be004c 4722 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
9ecadf14 4723 op0 = simplify_gen_binary (MULT, mode, op0, op1);
00f4f705 4724 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4725 return op0;
4726 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
9ecadf14 4727 return simplify_gen_binary (PLUS, mode, op0, op2);
00f4f705 4728 else
9ecadf14 4729 return simplify_gen_binary (MINUS, mode, op2, op0);
b54ee9da 4730 }
4731 return NULL;
4732
96504875 4733 case MULT_HIGHPART_EXPR:
4734 /* ??? Similar to the above. */
4735 return NULL;
4736
b54ee9da 4737 case WIDEN_SUM_EXPR:
3557cb99 4738 case WIDEN_LSHIFT_EXPR:
b54ee9da 4739 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4740 && SCALAR_INT_MODE_P (mode))
4741 {
9ecadf14 4742 op0
4743 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4744 0)))
4745 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4746 inner_mode);
3557cb99 4747 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4748 ? ASHIFT : PLUS, mode, op0, op1);
b54ee9da 4749 }
4750 return NULL;
4751
156f51b9 4752 case FMA_EXPR:
9ecadf14 4753 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
156f51b9 4754
9845d120 4755 default:
4756 flag_unsupported:
4757#ifdef ENABLE_CHECKING
4758 debug_tree (exp);
4759 gcc_unreachable ();
4760#else
4761 return NULL;
4762#endif
4763 }
4764}
4765
841424cc 4766/* Return an RTX equivalent to the source bind value of the tree expression
4767 EXP. */
4768
4769static rtx
4770expand_debug_source_expr (tree exp)
4771{
4772 rtx op0 = NULL_RTX;
3754d046 4773 machine_mode mode = VOIDmode, inner_mode;
841424cc 4774
4775 switch (TREE_CODE (exp))
4776 {
4777 case PARM_DECL:
4778 {
841424cc 4779 mode = DECL_MODE (exp);
8ee59e4e 4780 op0 = expand_debug_parm_decl (exp);
4781 if (op0)
4782 break;
841424cc 4783 /* See if this isn't an argument that has been completely
4784 optimized out. */
4785 if (!DECL_RTL_SET_P (exp)
8ee59e4e 4786 && !DECL_INCOMING_RTL (exp)
841424cc 4787 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4788 {
42c442a9 4789 tree aexp = DECL_ORIGIN (exp);
841424cc 4790 if (DECL_CONTEXT (aexp)
4791 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4792 {
f1f41a6c 4793 vec<tree, va_gc> **debug_args;
841424cc 4794 unsigned int ix;
4795 tree ddecl;
841424cc 4796 debug_args = decl_debug_args_lookup (current_function_decl);
4797 if (debug_args != NULL)
4798 {
f1f41a6c 4799 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
841424cc 4800 ix += 2)
4801 if (ddecl == aexp)
4802 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4803 }
4804 }
4805 }
4806 break;
4807 }
4808 default:
4809 break;
4810 }
4811
4812 if (op0 == NULL_RTX)
4813 return NULL_RTX;
4814
4815 inner_mode = GET_MODE (op0);
4816 if (mode == inner_mode)
4817 return op0;
4818
4819 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4820 {
4821 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4822 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4823 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4824 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4825 else
4826 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4827 }
4828 else if (FLOAT_MODE_P (mode))
4829 gcc_unreachable ();
4830 else if (FLOAT_MODE_P (inner_mode))
4831 {
4832 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4833 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4834 else
4835 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4836 }
4837 else if (CONSTANT_P (op0)
4838 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4839 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4840 subreg_lowpart_offset (mode, inner_mode));
4841 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4842 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4843 else
4844 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4845
4846 return op0;
4847}
4848
848d0536 4849/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4850 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4851 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4852
4853static void
74a0cbc4 4854avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
848d0536 4855{
4856 rtx exp = *exp_p;
4857
4858 if (exp == NULL_RTX)
4859 return;
4860
4861 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4862 return;
4863
4864 if (depth == 4)
4865 {
4866 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4867 rtx dval = make_debug_expr_from_rtl (exp);
4868
4869 /* Emit a debug bind insn before INSN. */
4870 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4871 DEBUG_EXPR_TREE_DECL (dval), exp,
4872 VAR_INIT_STATUS_INITIALIZED);
4873
4874 emit_debug_insn_before (bind, insn);
4875 *exp_p = dval;
4876 return;
4877 }
4878
4879 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4880 int i, j;
4881 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4882 switch (*format_ptr++)
4883 {
4884 case 'e':
4885 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4886 break;
4887
4888 case 'E':
4889 case 'V':
4890 for (j = 0; j < XVECLEN (exp, i); j++)
4891 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4892 break;
4893
4894 default:
4895 break;
4896 }
4897}
4898
9845d120 4899/* Expand the _LOCs in debug insns. We run this after expanding all
4900 regular insns, so that any variables referenced in the function
4901 will have their DECL_RTLs set. */
4902
4903static void
4904expand_debug_locations (void)
4905{
74a0cbc4 4906 rtx_insn *insn;
4907 rtx_insn *last = get_last_insn ();
9845d120 4908 int save_strict_alias = flag_strict_aliasing;
4909
4910 /* New alias sets while setting up memory attributes cause
4911 -fcompare-debug failures, even though it doesn't bring about any
4912 codegen changes. */
4913 flag_strict_aliasing = 0;
4914
4915 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4916 if (DEBUG_INSN_P (insn))
4917 {
4918 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
74a0cbc4 4919 rtx val;
4920 rtx_insn *prev_insn, *insn2;
3754d046 4921 machine_mode mode;
9845d120 4922
4923 if (value == NULL_TREE)
4924 val = NULL_RTX;
4925 else
4926 {
841424cc 4927 if (INSN_VAR_LOCATION_STATUS (insn)
4928 == VAR_INIT_STATUS_UNINITIALIZED)
4929 val = expand_debug_source_expr (value);
4930 else
4931 val = expand_debug_expr (value);
9845d120 4932 gcc_assert (last == get_last_insn ());
4933 }
4934
4935 if (!val)
4936 val = gen_rtx_UNKNOWN_VAR_LOC ();
4937 else
4938 {
4939 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4940
4941 gcc_assert (mode == GET_MODE (val)
4942 || (GET_MODE (val) == VOIDmode
efa08fc2 4943 && (CONST_SCALAR_INT_P (val)
9845d120 4944 || GET_CODE (val) == CONST_FIXED
9845d120 4945 || GET_CODE (val) == LABEL_REF)));
4946 }
4947
4948 INSN_VAR_LOCATION_LOC (insn) = val;
848d0536 4949 prev_insn = PREV_INSN (insn);
4950 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4951 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
9845d120 4952 }
4953
4954 flag_strict_aliasing = save_strict_alias;
4955}
4956
0ec80471 4957/* Expand basic block BB from GIMPLE trees to RTL. */
4958
4959static basic_block
3c919612 4960expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
0ec80471 4961{
75a70cf9 4962 gimple_stmt_iterator gsi;
4963 gimple_seq stmts;
4964 gimple stmt = NULL;
cef3d8ad 4965 rtx_note *note;
74a0cbc4 4966 rtx_insn *last;
0ec80471 4967 edge e;
cd665a06 4968 edge_iterator ei;
0ec80471 4969
4970 if (dump_file)
75a70cf9 4971 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4972 bb->index);
4973
4974 /* Note that since we are now transitioning from GIMPLE to RTL, we
4975 cannot use the gsi_*_bb() routines because they expect the basic
4976 block to be in GIMPLE, instead of RTL. Therefore, we need to
4977 access the BB sequence directly. */
4978 stmts = bb_seq (bb);
924c4c71 4979 bb->il.gimple.seq = NULL;
4980 bb->il.gimple.phi_nodes = NULL;
7dfb44a0 4981 rtl_profile_for_bb (bb);
e0dde8f8 4982 init_rtl_bb_info (bb);
4983 bb->flags |= BB_RTL;
4984
63f88450 4985 /* Remove the RETURN_EXPR if we may fall though to the exit
4986 instead. */
75a70cf9 4987 gsi = gsi_last (stmts);
4988 if (!gsi_end_p (gsi)
4989 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
63f88450 4990 {
1a91d914 4991 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
63f88450 4992
4993 gcc_assert (single_succ_p (bb));
34154e27 4994 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
63f88450 4995
34154e27 4996 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
75a70cf9 4997 && !gimple_return_retval (ret_stmt))
63f88450 4998 {
75a70cf9 4999 gsi_remove (&gsi, false);
63f88450 5000 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5001 }
5002 }
5003
75a70cf9 5004 gsi = gsi_start (stmts);
5005 if (!gsi_end_p (gsi))
6313ae8b 5006 {
75a70cf9 5007 stmt = gsi_stmt (gsi);
5008 if (gimple_code (stmt) != GIMPLE_LABEL)
5009 stmt = NULL;
6313ae8b 5010 }
0ec80471 5011
0699065d 5012 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
6313ae8b 5013
5014 if (stmt || elt)
0ec80471 5015 {
5016 last = get_last_insn ();
5017
6313ae8b 5018 if (stmt)
5019 {
16c9337c 5020 expand_gimple_stmt (stmt);
75a70cf9 5021 gsi_next (&gsi);
6313ae8b 5022 }
5023
5024 if (elt)
5f8841a5 5025 emit_label (*elt);
0ec80471 5026
491e04ef 5027 /* Java emits line number notes in the top of labels.
a0c938f0 5028 ??? Make this go away once line number notes are obsoleted. */
26bb3cb2 5029 BB_HEAD (bb) = NEXT_INSN (last);
6d7dc5b9 5030 if (NOTE_P (BB_HEAD (bb)))
26bb3cb2 5031 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
0ec80471 5032 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
49377e21 5033
75a70cf9 5034 maybe_dump_rtl_for_gimple_stmt (stmt, last);
0ec80471 5035 }
5036 else
26bb3cb2 5037 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
0ec80471 5038
5039 NOTE_BASIC_BLOCK (note) = bb;
5040
75a70cf9 5041 for (; !gsi_end_p (gsi); gsi_next (&gsi))
0ec80471 5042 {
c578459e 5043 basic_block new_bb;
0ec80471 5044
9845d120 5045 stmt = gsi_stmt (gsi);
3c800ea7 5046
5047 /* If this statement is a non-debug one, and we generate debug
5048 insns, then this one might be the last real use of a TERed
5049 SSA_NAME, but where there are still some debug uses further
5050 down. Expanding the current SSA name in such further debug
5051 uses by their RHS might lead to wrong debug info, as coalescing
5052 might make the operands of such RHS be placed into the same
5053 pseudo as something else. Like so:
5054 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5055 use(a_1);
5056 a_2 = ...
5057 #DEBUG ... => a_1
5058 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5059 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5060 the write to a_2 would actually have clobbered the place which
5061 formerly held a_0.
5062
5063 So, instead of that, we recognize the situation, and generate
5064 debug temporaries at the last real use of TERed SSA names:
5065 a_1 = a_0 + 1;
5066 #DEBUG #D1 => a_1
5067 use(a_1);
5068 a_2 = ...
5069 #DEBUG ... => #D1
5070 */
5071 if (MAY_HAVE_DEBUG_INSNS
5072 && SA.values
5073 && !is_gimple_debug (stmt))
5074 {
5075 ssa_op_iter iter;
5076 tree op;
5077 gimple def;
5078
5169661d 5079 location_t sloc = curr_insn_location ();
3c800ea7 5080
5081 /* Look for SSA names that have their last use here (TERed
5082 names always have only one real use). */
5083 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5084 if ((def = get_gimple_for_ssa_name (op)))
5085 {
5086 imm_use_iterator imm_iter;
5087 use_operand_p use_p;
5088 bool have_debug_uses = false;
5089
5090 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5091 {
5092 if (gimple_debug_bind_p (USE_STMT (use_p)))
5093 {
5094 have_debug_uses = true;
5095 break;
5096 }
5097 }
5098
5099 if (have_debug_uses)
5100 {
5101 /* OP is a TERed SSA name, with DEF it's defining
5102 statement, and where OP is used in further debug
5103 instructions. Generate a debug temporary, and
5104 replace all uses of OP in debug insns with that
5105 temporary. */
5106 gimple debugstmt;
5107 tree value = gimple_assign_rhs_to_tree (def);
5108 tree vexpr = make_node (DEBUG_EXPR_DECL);
5109 rtx val;
3754d046 5110 machine_mode mode;
3c800ea7 5111
5169661d 5112 set_curr_insn_location (gimple_location (def));
3c800ea7 5113
5114 DECL_ARTIFICIAL (vexpr) = 1;
5115 TREE_TYPE (vexpr) = TREE_TYPE (value);
5116 if (DECL_P (value))
5117 mode = DECL_MODE (value);
5118 else
5119 mode = TYPE_MODE (TREE_TYPE (value));
5120 DECL_MODE (vexpr) = mode;
5121
5122 val = gen_rtx_VAR_LOCATION
5123 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5124
3e549002 5125 emit_debug_insn (val);
3c800ea7 5126
5127 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5128 {
5129 if (!gimple_debug_bind_p (debugstmt))
5130 continue;
5131
5132 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5133 SET_USE (use_p, vexpr);
5134
5135 update_stmt (debugstmt);
5136 }
5137 }
5138 }
5169661d 5139 set_curr_insn_location (sloc);
3c800ea7 5140 }
5141
8cee8dc0 5142 currently_expanding_gimple_stmt = stmt;
9845d120 5143
0ec80471 5144 /* Expand this statement, then evaluate the resulting RTL and
5145 fixup the CFG accordingly. */
75a70cf9 5146 if (gimple_code (stmt) == GIMPLE_COND)
c578459e 5147 {
1a91d914 5148 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
c578459e 5149 if (new_bb)
5150 return new_bb;
5151 }
9845d120 5152 else if (gimple_debug_bind_p (stmt))
5153 {
5169661d 5154 location_t sloc = curr_insn_location ();
9845d120 5155 gimple_stmt_iterator nsi = gsi;
5156
5157 for (;;)
5158 {
5159 tree var = gimple_debug_bind_get_var (stmt);
5160 tree value;
5161 rtx val;
3754d046 5162 machine_mode mode;
9845d120 5163
9bae88bc 5164 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5165 && TREE_CODE (var) != LABEL_DECL
5166 && !target_for_debug_bind (var))
5167 goto delink_debug_stmt;
5168
9845d120 5169 if (gimple_debug_bind_has_value_p (stmt))
5170 value = gimple_debug_bind_get_value (stmt);
5171 else
5172 value = NULL_TREE;
5173
5174 last = get_last_insn ();
5175
5169661d 5176 set_curr_insn_location (gimple_location (stmt));
9845d120 5177
5178 if (DECL_P (var))
5179 mode = DECL_MODE (var);
5180 else
5181 mode = TYPE_MODE (TREE_TYPE (var));
5182
5183 val = gen_rtx_VAR_LOCATION
5184 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5185
1084097d 5186 emit_debug_insn (val);
9845d120 5187
5188 if (dump_file && (dump_flags & TDF_DETAILS))
5189 {
5190 /* We can't dump the insn with a TREE where an RTX
5191 is expected. */
3e549002 5192 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
9845d120 5193 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3e549002 5194 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
9845d120 5195 }
5196
9bae88bc 5197 delink_debug_stmt:
3c800ea7 5198 /* In order not to generate too many debug temporaries,
5199 we delink all uses of debug statements we already expanded.
5200 Therefore debug statements between definition and real
5201 use of TERed SSA names will continue to use the SSA name,
5202 and not be replaced with debug temps. */
5203 delink_stmt_imm_use (stmt);
5204
9845d120 5205 gsi = nsi;
5206 gsi_next (&nsi);
5207 if (gsi_end_p (nsi))
5208 break;
5209 stmt = gsi_stmt (nsi);
5210 if (!gimple_debug_bind_p (stmt))
5211 break;
5212 }
5213
5169661d 5214 set_curr_insn_location (sloc);
841424cc 5215 }
5216 else if (gimple_debug_source_bind_p (stmt))
5217 {
5169661d 5218 location_t sloc = curr_insn_location ();
841424cc 5219 tree var = gimple_debug_source_bind_get_var (stmt);
5220 tree value = gimple_debug_source_bind_get_value (stmt);
5221 rtx val;
3754d046 5222 machine_mode mode;
841424cc 5223
5224 last = get_last_insn ();
5225
5169661d 5226 set_curr_insn_location (gimple_location (stmt));
841424cc 5227
5228 mode = DECL_MODE (var);
5229
5230 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5231 VAR_INIT_STATUS_UNINITIALIZED);
5232
5233 emit_debug_insn (val);
5234
5235 if (dump_file && (dump_flags & TDF_DETAILS))
5236 {
5237 /* We can't dump the insn with a TREE where an RTX
5238 is expected. */
5239 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5240 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5241 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5242 }
5243
5169661d 5244 set_curr_insn_location (sloc);
9845d120 5245 }
3ced8962 5246 else
0ec80471 5247 {
1a91d914 5248 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5249 if (call_stmt
5250 && gimple_call_tail_p (call_stmt)
3c919612 5251 && disable_tail_calls)
1a91d914 5252 gimple_call_set_tail (call_stmt, false);
3c919612 5253
1a91d914 5254 if (call_stmt && gimple_call_tail_p (call_stmt))
c578459e 5255 {
5256 bool can_fallthru;
1a91d914 5257 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
c578459e 5258 if (new_bb)
5259 {
5260 if (can_fallthru)
5261 bb = new_bb;
5262 else
5263 return new_bb;
5264 }
5265 }
2a3ebafa 5266 else
49377e21 5267 {
a8dd994c 5268 def_operand_p def_p;
a8dd994c 5269 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5270
5271 if (def_p != NULL)
5272 {
5273 /* Ignore this stmt if it is in the list of
5274 replaceable expressions. */
5275 if (SA.values
48e1416a 5276 && bitmap_bit_p (SA.values,
dfdbf3fd 5277 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
a8dd994c 5278 continue;
5279 }
16c9337c 5280 last = expand_gimple_stmt (stmt);
75a70cf9 5281 maybe_dump_rtl_for_gimple_stmt (stmt, last);
49377e21 5282 }
0ec80471 5283 }
5284 }
5285
8cee8dc0 5286 currently_expanding_gimple_stmt = NULL;
5287
9c388755 5288 /* Expand implicit goto and convert goto_locus. */
63f88450 5289 FOR_EACH_EDGE (e, ei, bb->succs)
5290 {
8e7408e3 5291 if (e->goto_locus != UNKNOWN_LOCATION)
5169661d 5292 set_curr_insn_location (e->goto_locus);
9c388755 5293 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5294 {
5295 emit_jump (label_rtx_for_bb (e->dest));
5296 e->flags &= ~EDGE_FALLTHRU;
5297 }
63f88450 5298 }
5299
8a9ad55b 5300 /* Expanded RTL can create a jump in the last instruction of block.
5301 This later might be assumed to be a jump to successor and break edge insertion.
5302 We need to insert dummy move to prevent this. PR41440. */
5303 if (single_succ_p (bb)
5304 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5305 && (last = get_last_insn ())
5306 && JUMP_P (last))
5307 {
5308 rtx dummy = gen_reg_rtx (SImode);
5309 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5310 }
5311
0ec80471 5312 do_pending_stack_adjust ();
5313
822e391f 5314 /* Find the block tail. The last insn in the block is the insn
0ec80471 5315 before a barrier and/or table jump insn. */
5316 last = get_last_insn ();
6d7dc5b9 5317 if (BARRIER_P (last))
0ec80471 5318 last = PREV_INSN (last);
5319 if (JUMP_TABLE_DATA_P (last))
5320 last = PREV_INSN (PREV_INSN (last));
26bb3cb2 5321 BB_END (bb) = last;
491e04ef 5322
0ec80471 5323 update_bb_for_insn (bb);
3ced8962 5324
0ec80471 5325 return bb;
5326}
5327
5328
5329/* Create a basic block for initialization code. */
5330
5331static basic_block
5332construct_init_block (void)
5333{
5334 basic_block init_block, first_block;
9a755727 5335 edge e = NULL;
5336 int flags;
e20bf721 5337
9a755727 5338 /* Multiple entry points not supported yet. */
34154e27 5339 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5340 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5341 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5342 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5343 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
0ec80471 5344
34154e27 5345 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
e20bf721 5346
9a755727 5347 /* When entry edge points to first basic block, we don't need jump,
5348 otherwise we have to jump into proper target. */
34154e27 5349 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
9a755727 5350 {
75a70cf9 5351 tree label = gimple_block_label (e->dest);
9a755727 5352
5353 emit_jump (label_rtx (label));
5354 flags = 0;
e20bf721 5355 }
9a755727 5356 else
5357 flags = EDGE_FALLTHRU;
0ec80471 5358
5359 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5360 get_last_insn (),
34154e27 5361 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5362 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5363 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
b3083327 5364 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
0ec80471 5365 if (e)
5366 {
5367 first_block = e->dest;
5368 redirect_edge_succ (e, init_block);
9a755727 5369 e = make_edge (init_block, first_block, flags);
0ec80471 5370 }
5371 else
34154e27 5372 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
0ec80471 5373 e->probability = REG_BR_PROB_BASE;
34154e27 5374 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
0ec80471 5375
5376 update_bb_for_insn (init_block);
5377 return init_block;
5378}
5379
375c1c8a 5380/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5381 found in the block tree. */
5382
5383static void
5384set_block_levels (tree block, int level)
5385{
5386 while (block)
5387 {
5388 BLOCK_NUMBER (block) = level;
5389 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5390 block = BLOCK_CHAIN (block);
5391 }
5392}
0ec80471 5393
5394/* Create a block containing landing pads and similar stuff. */
5395
5396static void
5397construct_exit_block (void)
5398{
74a0cbc4 5399 rtx_insn *head = get_last_insn ();
5400 rtx_insn *end;
0ec80471 5401 basic_block exit_block;
cd665a06 5402 edge e, e2;
5403 unsigned ix;
5404 edge_iterator ei;
04e7d9cb 5405 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
74a0cbc4 5406 rtx_insn *orig_end = BB_END (prev_bb);
0ec80471 5407
34154e27 5408 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
7dfb44a0 5409
491e04ef 5410 /* Make sure the locus is set to the end of the function, so that
0ec80471 5411 epilogue line numbers and warnings are set properly. */
8e7408e3 5412 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
0ec80471 5413 input_location = cfun->function_end_locus;
5414
0ec80471 5415 /* Generate rtl for function exit. */
5416 expand_function_end ();
5417
5418 end = get_last_insn ();
5419 if (head == end)
5420 return;
04e7d9cb 5421 /* While emitting the function end we could move end of the last basic
5422 block. */
26bb3cb2 5423 BB_END (prev_bb) = orig_end;
6d7dc5b9 5424 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
0ec80471 5425 head = NEXT_INSN (head);
04e7d9cb 5426 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5427 bb frequency counting will be confused. Any instructions before that
5428 label are emitted for the case where PREV_BB falls through into the
5429 exit block, so append those instructions to prev_bb in that case. */
5430 if (NEXT_INSN (head) != return_label)
5431 {
5432 while (NEXT_INSN (head) != return_label)
5433 {
5434 if (!NOTE_P (NEXT_INSN (head)))
26bb3cb2 5435 BB_END (prev_bb) = NEXT_INSN (head);
04e7d9cb 5436 head = NEXT_INSN (head);
5437 }
5438 }
5439 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
34154e27 5440 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5441 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
b3083327 5442 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
cd665a06 5443
5444 ix = 0;
34154e27 5445 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
0ec80471 5446 {
34154e27 5447 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
0ec80471 5448 if (!(e->flags & EDGE_ABNORMAL))
cd665a06 5449 redirect_edge_succ (e, exit_block);
5450 else
5451 ix++;
0ec80471 5452 }
cd665a06 5453
34154e27 5454 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
0ec80471 5455 e->probability = REG_BR_PROB_BASE;
34154e27 5456 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5457 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
0ec80471 5458 if (e2 != e)
5459 {
a0c938f0 5460 e->count -= e2->count;
0ec80471 5461 exit_block->count -= e2->count;
5462 exit_block->frequency -= EDGE_FREQUENCY (e2);
5463 }
5464 if (e->count < 0)
5465 e->count = 0;
5466 if (exit_block->count < 0)
5467 exit_block->count = 0;
5468 if (exit_block->frequency < 0)
5469 exit_block->frequency = 0;
5470 update_bb_for_insn (exit_block);
5471}
5472
a0c938f0 5473/* Helper function for discover_nonconstant_array_refs.
9d5aa3bd 5474 Look for ARRAY_REF nodes with non-constant indexes and mark them
5475 addressable. */
5476
5477static tree
5478discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5479 void *data ATTRIBUTE_UNUSED)
5480{
5481 tree t = *tp;
5482
5483 if (IS_TYPE_OR_DECL_P (t))
5484 *walk_subtrees = 0;
5485 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5486 {
5487 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5488 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5489 && (!TREE_OPERAND (t, 2)
5490 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5491 || (TREE_CODE (t) == COMPONENT_REF
5492 && (!TREE_OPERAND (t,2)
5493 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5494 || TREE_CODE (t) == BIT_FIELD_REF
5495 || TREE_CODE (t) == REALPART_EXPR
5496 || TREE_CODE (t) == IMAGPART_EXPR
5497 || TREE_CODE (t) == VIEW_CONVERT_EXPR
72dd6141 5498 || CONVERT_EXPR_P (t))
9d5aa3bd 5499 t = TREE_OPERAND (t, 0);
5500
5501 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5502 {
5503 t = get_base_address (t);
9a60c3b9 5504 if (t && DECL_P (t)
5505 && DECL_MODE (t) != BLKmode)
9d5aa3bd 5506 TREE_ADDRESSABLE (t) = 1;
5507 }
5508
5509 *walk_subtrees = 0;
5510 }
5511
5512 return NULL_TREE;
5513}
5514
5515/* RTL expansion is not able to compile array references with variable
5516 offsets for arrays stored in single register. Discover such
5517 expressions and mark variables as addressable to avoid this
5518 scenario. */
5519
5520static void
5521discover_nonconstant_array_refs (void)
5522{
5523 basic_block bb;
75a70cf9 5524 gimple_stmt_iterator gsi;
9d5aa3bd 5525
fc00614f 5526 FOR_EACH_BB_FN (bb, cfun)
75a70cf9 5527 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5528 {
5529 gimple stmt = gsi_stmt (gsi);
f4b490ea 5530 if (!is_gimple_debug (stmt))
5531 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
75a70cf9 5532 }
9d5aa3bd 5533}
5534
27a7a23a 5535/* This function sets crtl->args.internal_arg_pointer to a virtual
5536 register if DRAP is needed. Local register allocator will replace
5537 virtual_incoming_args_rtx with the virtual register. */
5538
5539static void
5540expand_stack_alignment (void)
5541{
5542 rtx drap_rtx;
9e1c1bf0 5543 unsigned int preferred_stack_boundary;
27a7a23a 5544
5545 if (! SUPPORTS_STACK_ALIGNMENT)
5546 return;
48e1416a 5547
27a7a23a 5548 if (cfun->calls_alloca
5549 || cfun->has_nonlocal_label
5550 || crtl->has_nonlocal_goto)
5551 crtl->need_drap = true;
5552
c0a05dc0 5553 /* Call update_stack_boundary here again to update incoming stack
5554 boundary. It may set incoming stack alignment to a different
5555 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5556 use the minimum incoming stack alignment to check if it is OK
5557 to perform sibcall optimization since sibcall optimization will
5558 only align the outgoing stack to incoming stack boundary. */
5559 if (targetm.calls.update_stack_boundary)
5560 targetm.calls.update_stack_boundary ();
5561
5562 /* The incoming stack frame has to be aligned at least at
5563 parm_stack_boundary. */
5564 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
27a7a23a 5565
27a7a23a 5566 /* Update crtl->stack_alignment_estimated and use it later to align
5567 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5568 exceptions since callgraph doesn't collect incoming stack alignment
5569 in this case. */
cbeb677e 5570 if (cfun->can_throw_non_call_exceptions
27a7a23a 5571 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5572 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5573 else
5574 preferred_stack_boundary = crtl->preferred_stack_boundary;
5575 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5576 crtl->stack_alignment_estimated = preferred_stack_boundary;
5577 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5578 crtl->stack_alignment_needed = preferred_stack_boundary;
5579
c0a05dc0 5580 gcc_assert (crtl->stack_alignment_needed
5581 <= crtl->stack_alignment_estimated);
5582
27a7a23a 5583 crtl->stack_realign_needed
9e1c1bf0 5584 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
7b70fdf7 5585 crtl->stack_realign_tried = crtl->stack_realign_needed;
27a7a23a 5586
5587 crtl->stack_realign_processed = true;
5588
5589 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5590 alignment. */
5591 gcc_assert (targetm.calls.get_drap_rtx != NULL);
48e1416a 5592 drap_rtx = targetm.calls.get_drap_rtx ();
27a7a23a 5593
f6754469 5594 /* stack_realign_drap and drap_rtx must match. */
5595 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5596
27a7a23a 5597 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5598 if (NULL != drap_rtx)
5599 {
5600 crtl->args.internal_arg_pointer = drap_rtx;
5601
5602 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5603 needed. */
5604 fixup_tail_calls ();
5605 }
5606}
0e80b01d 5607\f
5608
5609static void
5610expand_main_function (void)
5611{
5612#if (defined(INVOKE__main) \
5613 || (!defined(HAS_INIT_SECTION) \
5614 && !defined(INIT_SECTION_ASM_OP) \
5615 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5616 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5617#endif
5618}
5619\f
5620
5621/* Expand code to initialize the stack_protect_guard. This is invoked at
5622 the beginning of a function to be protected. */
5623
5624#ifndef HAVE_stack_protect_set
5625# define HAVE_stack_protect_set 0
5626# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5627#endif
5628
5629static void
5630stack_protect_prologue (void)
5631{
5632 tree guard_decl = targetm.stack_protect_guard ();
5633 rtx x, y;
5634
5635 x = expand_normal (crtl->stack_protect_guard);
5636 y = expand_normal (guard_decl);
5637
5638 /* Allow the target to copy from Y to X without leaking Y into a
5639 register. */
5640 if (HAVE_stack_protect_set)
5641 {
5642 rtx insn = gen_stack_protect_set (x, y);
5643 if (insn)
5644 {
5645 emit_insn (insn);
5646 return;
5647 }
5648 }
5649
5650 /* Otherwise do a straight move. */
5651 emit_move_insn (x, y);
5652}
27a7a23a 5653
0ec80471 5654/* Translate the intermediate representation contained in the CFG
5655 from GIMPLE trees to RTL.
5656
5657 We do conversion per basic block and preserve/update the tree CFG.
5658 This implies we have to do some magic as the CFG can simultaneously
5659 consist of basic blocks containing RTL and GIMPLE trees. This can
2c763ed4 5660 confuse the CFG hooks, so be careful to not manipulate CFG during
0ec80471 5661 the expansion. */
5662
65b0537f 5663namespace {
5664
5665const pass_data pass_data_expand =
5666{
5667 RTL_PASS, /* type */
5668 "expand", /* name */
5669 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 5670 TV_EXPAND, /* tv_id */
5671 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5672 | PROP_gimple_lcx
5673 | PROP_gimple_lvec ), /* properties_required */
5674 PROP_rtl, /* properties_provided */
5675 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
8b88439e 5676 0, /* todo_flags_start */
65b0537f 5677 0, /* todo_flags_finish */
5678};
5679
5680class pass_expand : public rtl_opt_pass
5681{
5682public:
5683 pass_expand (gcc::context *ctxt)
5684 : rtl_opt_pass (pass_data_expand, ctxt)
5685 {}
5686
5687 /* opt_pass methods: */
5688 virtual unsigned int execute (function *);
5689
5690}; // class pass_expand
5691
5692unsigned int
5693pass_expand::execute (function *fun)
0ec80471 5694{
5695 basic_block bb, init_block;
5696 sbitmap blocks;
ea06d49f 5697 edge_iterator ei;
5698 edge e;
74a0cbc4 5699 rtx_insn *var_seq, *var_ret_seq;
a8dd994c 5700 unsigned i;
5701
e2050933 5702 timevar_push (TV_OUT_OF_SSA);
a8dd994c 5703 rewrite_out_of_ssa (&SA);
e2050933 5704 timevar_pop (TV_OUT_OF_SSA);
ed7e2206 5705 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
0ec80471 5706
212dddd3 5707 /* Make sure all values used by the optimization passes have sane
5708 defaults. */
5709 reg_renumber = 0;
5710
723c0ee7 5711 /* Some backends want to know that we are expanding to RTL. */
5712 currently_expanding_to_rtl = 1;
821ac701 5713 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5714 free_dominance_info (CDI_DOMINATORS);
723c0ee7 5715
65b0537f 5716 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
7dfb44a0 5717
058a1b7a 5718 if (chkp_function_instrumented_p (current_function_decl))
5719 chkp_reset_rtl_bounds ();
5720
5169661d 5721 insn_locations_init ();
c3771ec9 5722 if (!DECL_IS_BUILTIN (current_function_decl))
30099c0c 5723 {
5724 /* Eventually, all FEs should explicitly set function_start_locus. */
65b0537f 5725 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5726 set_curr_insn_location
5727 (DECL_SOURCE_LOCATION (current_function_decl));
30099c0c 5728 else
65b0537f 5729 set_curr_insn_location (fun->function_start_locus);
30099c0c 5730 }
0aecb55e 5731 else
5169661d 5732 set_curr_insn_location (UNKNOWN_LOCATION);
5733 prologue_location = curr_insn_location ();
375c1c8a 5734
fdc86f97 5735#ifdef INSN_SCHEDULING
5736 init_sched_attrs ();
5737#endif
5738
375c1c8a 5739 /* Make sure first insn is a note even if we don't want linenums.
5740 This makes sure the first insn will never be deleted.
5741 Also, final expects a note to appear there. */
5742 emit_note (NOTE_INSN_DELETED);
656047bf 5743
9d5aa3bd 5744 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5745 discover_nonconstant_array_refs ();
5746
bc5e6ea1 5747 targetm.expand_to_rtl_hook ();
edb7afe8 5748 crtl->stack_alignment_needed = STACK_BOUNDARY;
27a7a23a 5749 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
c0a05dc0 5750 crtl->stack_alignment_estimated = 0;
edb7afe8 5751 crtl->preferred_stack_boundary = STACK_BOUNDARY;
65b0537f 5752 fun->cfg->max_jumptable_ents = 0;
edb7afe8 5753
b8a89e7e 5754 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5755 of the function section at exapnsion time to predict distance of calls. */
5756 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5757
280450fa 5758 /* Expand the variables recorded during gimple lowering. */
e2050933 5759 timevar_push (TV_VAR_EXPAND);
5be42b39 5760 start_sequence ();
5761
3c919612 5762 var_ret_seq = expand_used_vars ();
5be42b39 5763
5764 var_seq = get_insns ();
5765 end_sequence ();
e2050933 5766 timevar_pop (TV_VAR_EXPAND);
0ec80471 5767
f1a0edff 5768 /* Honor stack protection warnings. */
5769 if (warn_stack_protect)
5770 {
65b0537f 5771 if (fun->calls_alloca)
48e1416a 5772 warning (OPT_Wstack_protector,
b15b8239 5773 "stack protector not protecting local variables: "
65b0537f 5774 "variable length buffer");
edb7afe8 5775 if (has_short_buffer && !crtl->stack_protect_guard)
48e1416a 5776 warning (OPT_Wstack_protector,
b15b8239 5777 "stack protector not protecting function: "
65b0537f 5778 "all local arrays are less than %d bytes long",
f1a0edff 5779 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5780 }
5781
0ec80471 5782 /* Set up parameters and prepare for return, for the function. */
82aa4bd5 5783 expand_function_start (current_function_decl);
0ec80471 5784
5be42b39 5785 /* If we emitted any instructions for setting up the variables,
5786 emit them before the FUNCTION_START note. */
5787 if (var_seq)
5788 {
5789 emit_insn_before (var_seq, parm_birth_insn);
5790
5791 /* In expand_function_end we'll insert the alloca save/restore
5792 before parm_birth_insn. We've just insertted an alloca call.
5793 Adjust the pointer to match. */
5794 parm_birth_insn = var_seq;
5795 }
5796
a8dd994c 5797 /* Now that we also have the parameter RTXs, copy them over to our
5798 partitions. */
5799 for (i = 0; i < SA.map->num_partitions; i++)
5800 {
5801 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5802
5803 if (TREE_CODE (var) != VAR_DECL
5804 && !SA.partition_to_pseudo[i])
5805 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5806 gcc_assert (SA.partition_to_pseudo[i]);
e32b531f 5807
5808 /* If this decl was marked as living in multiple places, reset
65b0537f 5809 this now to NULL. */
e32b531f 5810 if (DECL_RTL_IF_SET (var) == pc_rtx)
5811 SET_DECL_RTL (var, NULL);
5812
a8dd994c 5813 /* Some RTL parts really want to look at DECL_RTL(x) when x
65b0537f 5814 was a decl marked in REG_ATTR or MEM_ATTR. We could use
a8dd994c 5815 SET_DECL_RTL here making this available, but that would mean
5816 to select one of the potentially many RTLs for one DECL. Instead
5817 of doing that we simply reset the MEM_EXPR of the RTL in question,
5818 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5819 if (!DECL_RTL_SET_P (var))
5820 {
5821 if (MEM_P (SA.partition_to_pseudo[i]))
5822 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5823 }
5824 }
5825
f2ca19b4 5826 /* If we have a class containing differently aligned pointers
5827 we need to merge those into the corresponding RTL pointer
5828 alignment. */
5829 for (i = 1; i < num_ssa_names; i++)
5830 {
5831 tree name = ssa_name (i);
5832 int part;
5833 rtx r;
5834
5835 if (!name
f2ca19b4 5836 /* We might have generated new SSA names in
5837 update_alias_info_with_stack_vars. They will have a NULL
5838 defining statements, and won't be part of the partitioning,
5839 so ignore those. */
5840 || !SSA_NAME_DEF_STMT (name))
5841 continue;
5842 part = var_to_partition (SA.map, name);
5843 if (part == NO_PARTITION)
5844 continue;
ec11736b 5845
5846 /* Adjust all partition members to get the underlying decl of
5847 the representative which we might have created in expand_one_var. */
5848 if (SSA_NAME_VAR (name) == NULL_TREE)
5849 {
5850 tree leader = partition_to_var (SA.map, part);
5851 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5852 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5853 }
5854 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5855 continue;
5856
f2ca19b4 5857 r = SA.partition_to_pseudo[part];
5858 if (REG_P (r))
5859 mark_reg_pointer (r, get_pointer_alignment (name));
5860 }
5861
0ec80471 5862 /* If this function is `main', emit a call to `__main'
5863 to run global initializers, etc. */
5864 if (DECL_NAME (current_function_decl)
5865 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5866 && DECL_FILE_SCOPE_P (current_function_decl))
5867 expand_main_function ();
5868
f1a0edff 5869 /* Initialize the stack_protect_guard field. This must happen after the
5870 call to __main (if any) so that the external decl is initialized. */
edb7afe8 5871 if (crtl->stack_protect_guard)
f1a0edff 5872 stack_protect_prologue ();
5873
a8dd994c 5874 expand_phi_nodes (&SA);
5875
011e6b51 5876 /* Register rtl specific functions for cfg. */
0ec80471 5877 rtl_register_cfg_hooks ();
5878
5879 init_block = construct_init_block ();
5880
ea06d49f 5881 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
a8dd994c 5882 remaining edges later. */
65b0537f 5883 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
ea06d49f 5884 e->flags &= ~EDGE_EXECUTABLE;
5885
0699065d 5886 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
65b0537f 5887 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
34154e27 5888 next_bb)
3c919612 5889 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
7dfb44a0 5890
9845d120 5891 if (MAY_HAVE_DEBUG_INSNS)
5892 expand_debug_locations ();
5893
3db65b62 5894 /* Free stuff we no longer need after GIMPLE optimizations. */
5895 free_dominance_info (CDI_DOMINATORS);
5896 free_dominance_info (CDI_POST_DOMINATORS);
5897 delete_tree_cfg_annotations ();
5898
e2050933 5899 timevar_push (TV_OUT_OF_SSA);
a8dd994c 5900 finish_out_of_ssa (&SA);
e2050933 5901 timevar_pop (TV_OUT_OF_SSA);
a8dd994c 5902
e2050933 5903 timevar_push (TV_POST_EXPAND);
67817f0f 5904 /* We are no longer in SSA form. */
65b0537f 5905 fun->gimple_df->in_ssa_p = false;
b3083327 5906 loops_state_clear (LOOP_CLOSED_SSA);
67817f0f 5907
7dfb44a0 5908 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5909 conservatively to true until they are all profile aware. */
5f8841a5 5910 delete lab_rtx_for_bb;
edb7afe8 5911 free_histograms ();
0ec80471 5912
5913 construct_exit_block ();
5169661d 5914 insn_locations_finalize ();
0ec80471 5915
3c919612 5916 if (var_ret_seq)
5917 {
4cd001d5 5918 rtx_insn *after = return_label;
74a0cbc4 5919 rtx_insn *next = NEXT_INSN (after);
3c919612 5920 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5921 after = next;
5922 emit_insn_after (var_ret_seq, after);
5923 }
5924
e38def9c 5925 /* Zap the tree EH table. */
65b0537f 5926 set_eh_throw_stmt_table (fun, NULL);
0ec80471 5927
409e049a 5928 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5929 split edges which edge insertions might do. */
0ec80471 5930 rebuild_jump_labels (get_insns ());
0ec80471 5931
65b0537f 5932 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
5933 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
a8dd994c 5934 {
5935 edge e;
5936 edge_iterator ei;
5937 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5938 {
5939 if (e->insns.r)
4547eca6 5940 {
ae5e6486 5941 rebuild_jump_labels_chain (e->insns.r);
d699f73a 5942 /* Put insns after parm birth, but before
5943 NOTE_INSNS_FUNCTION_BEG. */
65b0537f 5944 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
5945 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
4547eca6 5946 {
ae5e6486 5947 rtx_insn *insns = e->insns.r;
5948 e->insns.r = NULL;
d699f73a 5949 if (NOTE_P (parm_birth_insn)
5950 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5951 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5952 else
5953 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4547eca6 5954 }
5955 else
5956 commit_one_edge_insertion (e);
5957 }
a8dd994c 5958 else
5959 ei_next (&ei);
5960 }
5961 }
5962
5963 /* We're done expanding trees to RTL. */
5964 currently_expanding_to_rtl = 0;
5965
65b0537f 5966 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
5967 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
a8dd994c 5968 {
5969 edge e;
5970 edge_iterator ei;
5971 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5972 {
5973 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5974 e->flags &= ~EDGE_EXECUTABLE;
5975
5976 /* At the moment not all abnormal edges match the RTL
5977 representation. It is safe to remove them here as
5978 find_many_sub_basic_blocks will rediscover them.
5979 In the future we should get this fixed properly. */
5980 if ((e->flags & EDGE_ABNORMAL)
5981 && !(e->flags & EDGE_SIBCALL))
5982 remove_edge (e);
5983 else
5984 ei_next (&ei);
5985 }
5986 }
5987
65b0537f 5988 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
53c5d9d4 5989 bitmap_ones (blocks);
0ec80471 5990 find_many_sub_basic_blocks (blocks);
0ec80471 5991 sbitmap_free (blocks);
a8dd994c 5992 purge_all_dead_edges ();
0ec80471 5993
27a7a23a 5994 expand_stack_alignment ();
5995
212dddd3 5996 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5997 function. */
5998 if (crtl->tail_call_emit)
5999 fixup_tail_calls ();
6000
1dd4980f 6001 /* After initial rtl generation, call back to finish generating
6002 exception support code. We need to do this before cleaning up
6003 the CFG as the code does not expect dead landing pads. */
65b0537f 6004 if (fun->eh->region_tree != NULL)
1dd4980f 6005 finish_eh_generation ();
6006
6007 /* Remove unreachable blocks, otherwise we cannot compute dominators
6008 which are needed for loop state verification. As a side-effect
6009 this also compacts blocks.
6010 ??? We cannot remove trivially dead insns here as for example
6011 the DRAP reg on i?86 is not magically live at this point.
6012 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6013 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6014
0ec80471 6015#ifdef ENABLE_CHECKING
d4473c84 6016 verify_flow_info ();
0ec80471 6017#endif
0f9005dd 6018
212dddd3 6019 /* Initialize pseudos allocated for hard registers. */
6020 emit_initial_value_sets ();
6021
6022 /* And finally unshare all RTL. */
6023 unshare_all_rtl ();
6024
0f9005dd 6025 /* There's no need to defer outputting this function any more; we
6026 know we want to output it. */
6027 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6028
6029 /* Now that we're done expanding trees to RTL, we shouldn't have any
6030 more CONCATs anywhere. */
6031 generating_concat_p = 0;
6032
49377e21 6033 if (dump_file)
6034 {
6035 fprintf (dump_file,
6036 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6037 /* And the pass manager will dump RTL for us. */
6038 }
77fce4cd 6039
6040 /* If we're emitting a nested function, make sure its parent gets
6041 emitted as well. Doing otherwise confuses debug info. */
65b0537f 6042 {
6043 tree parent;
6044 for (parent = DECL_CONTEXT (current_function_decl);
6045 parent != NULL_TREE;
6046 parent = get_containing_scope (parent))
6047 if (TREE_CODE (parent) == FUNCTION_DECL)
6048 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6049 }
a0c938f0 6050
77fce4cd 6051 /* We are now committed to emitting code for this function. Do any
6052 preparation, such as emitting abstract debug info for the inline
6053 before it gets mangled by optimization. */
6054 if (cgraph_function_possibly_inlined_p (current_function_decl))
6055 (*debug_hooks->outlining_inline_function) (current_function_decl);
6056
6057 TREE_ASM_WRITTEN (current_function_decl) = 1;
1a56c787 6058
6059 /* After expanding, the return labels are no longer needed. */
6060 return_label = NULL;
6061 naked_return_label = NULL;
4c0315d0 6062
6063 /* After expanding, the tm_restart map is no longer needed. */
65b0537f 6064 if (fun->gimple_df->tm_restart)
b7aa58e4 6065 fun->gimple_df->tm_restart = NULL;
4c0315d0 6066
375c1c8a 6067 /* Tag the blocks with a depth number so that change_scope can find
6068 the common parent easily. */
65b0537f 6069 set_block_levels (DECL_INITIAL (fun->decl), 0);
7dfb44a0 6070 default_rtl_profile ();
212dddd3 6071
e2050933 6072 timevar_pop (TV_POST_EXPAND);
212dddd3 6073
2a1990e9 6074 return 0;
0ec80471 6075}
6076
cbe8bda8 6077} // anon namespace
6078
6079rtl_opt_pass *
6080make_pass_expand (gcc::context *ctxt)
6081{
6082 return new pass_expand (ctxt);
6083}