]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR middle-end/61111 (Infinite recursion between fold_build2_stat_loc and fold_bina...
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
23a5b65a 2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
242229bb 24#include "rtl.h"
862d0b35
DN
25#include "hard-reg-set.h"
26#include "tree.h"
d8a2d370
DN
27#include "stringpool.h"
28#include "varasm.h"
29#include "stor-layout.h"
30#include "stmt.h"
31#include "print-tree.h"
242229bb
JH
32#include "tm_p.h"
33#include "basic-block.h"
34#include "function.h"
35#include "expr.h"
36#include "langhooks.h"
442b4905 37#include "bitmap.h"
2fb9a547
AM
38#include "pointer-set.h"
39#include "tree-ssa-alias.h"
40#include "internal-fn.h"
41#include "tree-eh.h"
42#include "gimple-expr.h"
43#include "is-a.h"
442b4905 44#include "gimple.h"
5be5c238
AM
45#include "gimple-iterator.h"
46#include "gimple-walk.h"
442b4905
AM
47#include "gimple-ssa.h"
48#include "cgraph.h"
49#include "tree-cfg.h"
50#include "tree-phinodes.h"
51#include "ssa-iterators.h"
52#include "tree-ssanames.h"
53#include "tree-dfa.h"
7a300452 54#include "tree-ssa.h"
242229bb
JH
55#include "tree-pass.h"
56#include "except.h"
57#include "flags.h"
1f6d3a08 58#include "diagnostic.h"
cf835838 59#include "gimple-pretty-print.h"
1f6d3a08 60#include "toplev.h"
ef330312 61#include "debug.h"
7d69de61 62#include "params.h"
ff28a94d 63#include "tree-inline.h"
6946b3f7 64#include "value-prof.h"
e41b2a33 65#include "target.h"
8e9055ae 66#include "tree-ssa-live.h"
78bca40d 67#include "tree-outof-ssa.h"
7a8cba34 68#include "sbitmap.h"
7d776ee2 69#include "cfgloop.h"
be147e84 70#include "regs.h" /* For reg_renumber. */
2b21299c 71#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 72#include "asan.h"
4484a35a 73#include "tree-ssa-address.h"
862d0b35
DN
74#include "recog.h"
75#include "output.h"
726a989a 76
8a6ce562
JBG
77/* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81#ifndef NAME__MAIN
82#define NAME__MAIN "__main"
83#endif
84
4e3825db
MM
85/* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87struct ssaexpand SA;
88
a5883ba0
MM
89/* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91gimple currently_expanding_gimple_stmt;
92
ddb555ed
JJ
93static rtx expand_debug_expr (tree);
94
726a989a
RB
95/* Return an expression tree corresponding to the RHS of GIMPLE
96 statement STMT. */
97
98tree
99gimple_assign_rhs_to_tree (gimple stmt)
100{
101 tree t;
82d6e6fc 102 enum gimple_rhs_class grhs_class;
b8698a0f 103
82d6e6fc 104 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 105
0354c0c7
BS
106 if (grhs_class == GIMPLE_TERNARY_RHS)
107 t = build3 (gimple_assign_rhs_code (stmt),
108 TREE_TYPE (gimple_assign_lhs (stmt)),
109 gimple_assign_rhs1 (stmt),
110 gimple_assign_rhs2 (stmt),
111 gimple_assign_rhs3 (stmt));
112 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
113 t = build2 (gimple_assign_rhs_code (stmt),
114 TREE_TYPE (gimple_assign_lhs (stmt)),
115 gimple_assign_rhs1 (stmt),
116 gimple_assign_rhs2 (stmt));
82d6e6fc 117 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
118 t = build1 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt));
82d6e6fc 121 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
122 {
123 t = gimple_assign_rhs1 (stmt);
124 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
125 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
126 && gimple_location (stmt) != EXPR_LOCATION (t))
127 || (gimple_block (stmt)
128 && currently_expanding_to_rtl
5368224f 129 && EXPR_P (t)))
b5b8b0ac
AO
130 t = copy_node (t);
131 }
726a989a
RB
132 else
133 gcc_unreachable ();
134
f5045c96
AM
135 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
136 SET_EXPR_LOCATION (t, gimple_location (stmt));
137
726a989a
RB
138 return t;
139}
140
726a989a 141
1f6d3a08
RH
142#ifndef STACK_ALIGNMENT_NEEDED
143#define STACK_ALIGNMENT_NEEDED 1
144#endif
145
4e3825db
MM
146#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
147
148/* Associate declaration T with storage space X. If T is no
149 SSA name this is exactly SET_DECL_RTL, otherwise make the
150 partition of T associated with X. */
151static inline void
152set_rtl (tree t, rtx x)
153{
154 if (TREE_CODE (t) == SSA_NAME)
155 {
156 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
157 if (x && !MEM_P (x))
158 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
159 /* For the benefit of debug information at -O0 (where vartracking
160 doesn't run) record the place also in the base DECL if it's
161 a normal variable (not a parameter). */
162 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
163 {
164 tree var = SSA_NAME_VAR (t);
165 /* If we don't yet have something recorded, just record it now. */
166 if (!DECL_RTL_SET_P (var))
167 SET_DECL_RTL (var, x);
47598145 168 /* If we have it set already to "multiple places" don't
eb7adebc
MM
169 change this. */
170 else if (DECL_RTL (var) == pc_rtx)
171 ;
172 /* If we have something recorded and it's not the same place
173 as we want to record now, we have multiple partitions for the
174 same base variable, with different places. We can't just
175 randomly chose one, hence we have to say that we don't know.
176 This only happens with optimization, and there var-tracking
177 will figure out the right thing. */
178 else if (DECL_RTL (var) != x)
179 SET_DECL_RTL (var, pc_rtx);
180 }
4e3825db
MM
181 }
182 else
183 SET_DECL_RTL (t, x);
184}
1f6d3a08
RH
185
186/* This structure holds data relevant to one variable that will be
187 placed in a stack slot. */
188struct stack_var
189{
190 /* The Variable. */
191 tree decl;
192
1f6d3a08
RH
193 /* Initially, the size of the variable. Later, the size of the partition,
194 if this variable becomes it's partition's representative. */
195 HOST_WIDE_INT size;
196
197 /* The *byte* alignment required for this variable. Or as, with the
198 size, the alignment for this partition. */
199 unsigned int alignb;
200
201 /* The partition representative. */
202 size_t representative;
203
204 /* The next stack variable in the partition, or EOC. */
205 size_t next;
2bdbbe94
MM
206
207 /* The numbers of conflicting stack variables. */
208 bitmap conflicts;
1f6d3a08
RH
209};
210
211#define EOC ((size_t)-1)
212
213/* We have an array of such objects while deciding allocation. */
214static struct stack_var *stack_vars;
215static size_t stack_vars_alloc;
216static size_t stack_vars_num;
47598145 217static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 218
3f9b14ff
SB
219/* Conflict bitmaps go on this obstack. This allows us to destroy
220 all of them in one big sweep. */
221static bitmap_obstack stack_var_bitmap_obstack;
222
fa10beec 223/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
224 is non-decreasing. */
225static size_t *stack_vars_sorted;
226
1f6d3a08
RH
227/* The phase of the stack frame. This is the known misalignment of
228 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
229 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
230static int frame_phase;
231
7d69de61
RH
232/* Used during expand_used_vars to remember if we saw any decls for
233 which we'd like to enable stack smashing protection. */
234static bool has_protected_decls;
235
236/* Used during expand_used_vars. Remember if we say a character buffer
237 smaller than our cutoff threshold. Used for -Wstack-protector. */
238static bool has_short_buffer;
1f6d3a08 239
6f197850 240/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
241 we can't do with expected alignment of the stack boundary. */
242
243static unsigned int
6f197850 244align_local_variable (tree decl)
765c3e8f 245{
3a42502d 246 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 247 DECL_ALIGN (decl) = align;
1f6d3a08
RH
248 return align / BITS_PER_UNIT;
249}
250
251/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
252 Return the frame offset. */
253
254static HOST_WIDE_INT
3a42502d 255alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
256{
257 HOST_WIDE_INT offset, new_frame_offset;
258
259 new_frame_offset = frame_offset;
260 if (FRAME_GROWS_DOWNWARD)
261 {
262 new_frame_offset -= size + frame_phase;
263 new_frame_offset &= -align;
264 new_frame_offset += frame_phase;
265 offset = new_frame_offset;
266 }
267 else
268 {
269 new_frame_offset -= frame_phase;
270 new_frame_offset += align - 1;
271 new_frame_offset &= -align;
272 new_frame_offset += frame_phase;
273 offset = new_frame_offset;
274 new_frame_offset += size;
275 }
276 frame_offset = new_frame_offset;
277
9fb798d7
EB
278 if (frame_offset_overflow (frame_offset, cfun->decl))
279 frame_offset = offset = 0;
280
1f6d3a08
RH
281 return offset;
282}
283
284/* Accumulate DECL into STACK_VARS. */
285
286static void
287add_stack_var (tree decl)
288{
533f611a
RH
289 struct stack_var *v;
290
1f6d3a08
RH
291 if (stack_vars_num >= stack_vars_alloc)
292 {
293 if (stack_vars_alloc)
294 stack_vars_alloc = stack_vars_alloc * 3 / 2;
295 else
296 stack_vars_alloc = 32;
297 stack_vars
298 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
299 }
47598145
MM
300 if (!decl_to_stack_part)
301 decl_to_stack_part = pointer_map_create ();
302
533f611a 303 v = &stack_vars[stack_vars_num];
47598145 304 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
305
306 v->decl = decl;
ae7e9ddd 307 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
533f611a
RH
308 /* Ensure that all variables have size, so that &a != &b for any two
309 variables that are simultaneously live. */
310 if (v->size == 0)
311 v->size = 1;
6f197850 312 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
313 /* An alignment of zero can mightily confuse us later. */
314 gcc_assert (v->alignb != 0);
1f6d3a08
RH
315
316 /* All variables are initially in their own partition. */
533f611a
RH
317 v->representative = stack_vars_num;
318 v->next = EOC;
1f6d3a08 319
2bdbbe94 320 /* All variables initially conflict with no other. */
533f611a 321 v->conflicts = NULL;
2bdbbe94 322
1f6d3a08 323 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 324 set_rtl (decl, pc_rtx);
1f6d3a08
RH
325
326 stack_vars_num++;
327}
328
1f6d3a08
RH
329/* Make the decls associated with luid's X and Y conflict. */
330
331static void
332add_stack_var_conflict (size_t x, size_t y)
333{
2bdbbe94
MM
334 struct stack_var *a = &stack_vars[x];
335 struct stack_var *b = &stack_vars[y];
336 if (!a->conflicts)
3f9b14ff 337 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 338 if (!b->conflicts)
3f9b14ff 339 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
340 bitmap_set_bit (a->conflicts, y);
341 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
342}
343
344/* Check whether the decls associated with luid's X and Y conflict. */
345
346static bool
347stack_var_conflict_p (size_t x, size_t y)
348{
2bdbbe94
MM
349 struct stack_var *a = &stack_vars[x];
350 struct stack_var *b = &stack_vars[y];
47598145
MM
351 if (x == y)
352 return false;
353 /* Partitions containing an SSA name result from gimple registers
354 with things like unsupported modes. They are top-level and
355 hence conflict with everything else. */
356 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
357 return true;
358
2bdbbe94
MM
359 if (!a->conflicts || !b->conflicts)
360 return false;
361 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 362}
b8698a0f 363
47598145
MM
364/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
365 enter its partition number into bitmap DATA. */
366
367static bool
9f1363cd 368visit_op (gimple, tree op, tree, void *data)
47598145
MM
369{
370 bitmap active = (bitmap)data;
371 op = get_base_address (op);
372 if (op
373 && DECL_P (op)
374 && DECL_RTL_IF_SET (op) == pc_rtx)
375 {
376 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
377 if (v)
378 bitmap_set_bit (active, *v);
379 }
380 return false;
381}
382
383/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
384 record conflicts between it and all currently active other partitions
385 from bitmap DATA. */
386
387static bool
9f1363cd 388visit_conflict (gimple, tree op, tree, void *data)
47598145
MM
389{
390 bitmap active = (bitmap)data;
391 op = get_base_address (op);
392 if (op
393 && DECL_P (op)
394 && DECL_RTL_IF_SET (op) == pc_rtx)
395 {
396 size_t *v =
397 (size_t *) pointer_map_contains (decl_to_stack_part, op);
398 if (v && bitmap_set_bit (active, *v))
399 {
400 size_t num = *v;
401 bitmap_iterator bi;
402 unsigned i;
403 gcc_assert (num < stack_vars_num);
404 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
405 add_stack_var_conflict (num, i);
406 }
407 }
408 return false;
409}
410
411/* Helper routine for add_scope_conflicts, calculating the active partitions
412 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
413 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
414 liveness. */
47598145
MM
415
416static void
81bfd197 417add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
418{
419 edge e;
420 edge_iterator ei;
421 gimple_stmt_iterator gsi;
9f1363cd 422 walk_stmt_load_store_addr_fn visit;
47598145
MM
423
424 bitmap_clear (work);
425 FOR_EACH_EDGE (e, ei, bb->preds)
426 bitmap_ior_into (work, (bitmap)e->src->aux);
427
ea85edfe 428 visit = visit_op;
47598145
MM
429
430 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
431 {
432 gimple stmt = gsi_stmt (gsi);
ea85edfe 433 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 434 }
ea85edfe 435 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
436 {
437 gimple stmt = gsi_stmt (gsi);
438
439 if (gimple_clobber_p (stmt))
440 {
441 tree lhs = gimple_assign_lhs (stmt);
442 size_t *v;
443 /* Nested function lowering might introduce LHSs
444 that are COMPONENT_REFs. */
445 if (TREE_CODE (lhs) != VAR_DECL)
446 continue;
447 if (DECL_RTL_IF_SET (lhs) == pc_rtx
448 && (v = (size_t *)
449 pointer_map_contains (decl_to_stack_part, lhs)))
450 bitmap_clear_bit (work, *v);
451 }
452 else if (!is_gimple_debug (stmt))
ea85edfe 453 {
81bfd197 454 if (for_conflict
ea85edfe
JJ
455 && visit == visit_op)
456 {
457 /* If this is the first real instruction in this BB we need
88d599dc
MM
458 to add conflicts for everything live at this point now.
459 Unlike classical liveness for named objects we can't
ea85edfe
JJ
460 rely on seeing a def/use of the names we're interested in.
461 There might merely be indirect loads/stores. We'd not add any
81bfd197 462 conflicts for such partitions. */
ea85edfe
JJ
463 bitmap_iterator bi;
464 unsigned i;
81bfd197 465 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 466 {
9b44f5d9
MM
467 struct stack_var *a = &stack_vars[i];
468 if (!a->conflicts)
3f9b14ff 469 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 470 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
471 }
472 visit = visit_conflict;
473 }
474 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
475 }
47598145
MM
476 }
477}
478
479/* Generate stack partition conflicts between all partitions that are
480 simultaneously live. */
481
482static void
483add_scope_conflicts (void)
484{
485 basic_block bb;
486 bool changed;
487 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
488 int *rpo;
489 int n_bbs;
47598145 490
88d599dc 491 /* We approximate the live range of a stack variable by taking the first
47598145
MM
492 mention of its name as starting point(s), and by the end-of-scope
493 death clobber added by gimplify as ending point(s) of the range.
494 This overapproximates in the case we for instance moved an address-taken
495 operation upward, without also moving a dereference to it upwards.
496 But it's conservatively correct as a variable never can hold values
497 before its name is mentioned at least once.
498
88d599dc 499 We then do a mostly classical bitmap liveness algorithm. */
47598145 500
04a90bec 501 FOR_ALL_BB_FN (bb, cfun)
3f9b14ff 502 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 503
8b1c6fd7 504 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
9b44f5d9
MM
505 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
506
47598145
MM
507 changed = true;
508 while (changed)
509 {
9b44f5d9 510 int i;
47598145 511 changed = false;
9b44f5d9 512 for (i = 0; i < n_bbs; i++)
47598145 513 {
9b44f5d9 514 bitmap active;
06e28de2 515 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
9b44f5d9 516 active = (bitmap)bb->aux;
81bfd197 517 add_scope_conflicts_1 (bb, work, false);
47598145
MM
518 if (bitmap_ior_into (active, work))
519 changed = true;
520 }
521 }
522
11cd3bed 523 FOR_EACH_BB_FN (bb, cfun)
81bfd197 524 add_scope_conflicts_1 (bb, work, true);
47598145 525
9b44f5d9 526 free (rpo);
47598145 527 BITMAP_FREE (work);
04a90bec 528 FOR_ALL_BB_FN (bb, cfun)
47598145
MM
529 BITMAP_FREE (bb->aux);
530}
531
1f6d3a08 532/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 533 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
534
535static int
3a42502d 536stack_var_cmp (const void *a, const void *b)
1f6d3a08 537{
3a42502d
RH
538 size_t ia = *(const size_t *)a;
539 size_t ib = *(const size_t *)b;
540 unsigned int aligna = stack_vars[ia].alignb;
541 unsigned int alignb = stack_vars[ib].alignb;
542 HOST_WIDE_INT sizea = stack_vars[ia].size;
543 HOST_WIDE_INT sizeb = stack_vars[ib].size;
544 tree decla = stack_vars[ia].decl;
545 tree declb = stack_vars[ib].decl;
546 bool largea, largeb;
4e3825db 547 unsigned int uida, uidb;
1f6d3a08 548
3a42502d
RH
549 /* Primary compare on "large" alignment. Large comes first. */
550 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
551 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
552 if (largea != largeb)
553 return (int)largeb - (int)largea;
554
555 /* Secondary compare on size, decreasing */
3a42502d 556 if (sizea > sizeb)
6ddfda8a
ER
557 return -1;
558 if (sizea < sizeb)
1f6d3a08 559 return 1;
3a42502d
RH
560
561 /* Tertiary compare on true alignment, decreasing. */
562 if (aligna < alignb)
563 return -1;
564 if (aligna > alignb)
565 return 1;
566
567 /* Final compare on ID for sort stability, increasing.
568 Two SSA names are compared by their version, SSA names come before
569 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
570 if (TREE_CODE (decla) == SSA_NAME)
571 {
572 if (TREE_CODE (declb) == SSA_NAME)
573 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
574 else
575 return -1;
576 }
577 else if (TREE_CODE (declb) == SSA_NAME)
578 return 1;
579 else
580 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 581 if (uida < uidb)
79f802f5 582 return 1;
3a42502d
RH
583 if (uida > uidb)
584 return -1;
1f6d3a08
RH
585 return 0;
586}
587
55b34b5f
RG
588
589/* If the points-to solution *PI points to variables that are in a partition
590 together with other variables add all partition members to the pointed-to
591 variables bitmap. */
592
593static void
594add_partitioned_vars_to_ptset (struct pt_solution *pt,
595 struct pointer_map_t *decls_to_partitions,
596 struct pointer_set_t *visited, bitmap temp)
597{
598 bitmap_iterator bi;
599 unsigned i;
600 bitmap *part;
601
602 if (pt->anything
603 || pt->vars == NULL
604 /* The pointed-to vars bitmap is shared, it is enough to
605 visit it once. */
c3284718 606 || pointer_set_insert (visited, pt->vars))
55b34b5f
RG
607 return;
608
609 bitmap_clear (temp);
610
611 /* By using a temporary bitmap to store all members of the partitions
612 we have to add we make sure to visit each of the partitions only
613 once. */
614 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
615 if ((!temp
616 || !bitmap_bit_p (temp, i))
617 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
618 (void *)(size_t) i)))
619 bitmap_ior_into (temp, *part);
620 if (!bitmap_empty_p (temp))
621 bitmap_ior_into (pt->vars, temp);
622}
623
624/* Update points-to sets based on partition info, so we can use them on RTL.
625 The bitmaps representing stack partitions will be saved until expand,
626 where partitioned decls used as bases in memory expressions will be
627 rewritten. */
628
629static void
630update_alias_info_with_stack_vars (void)
631{
632 struct pointer_map_t *decls_to_partitions = NULL;
633 size_t i, j;
634 tree var = NULL_TREE;
635
636 for (i = 0; i < stack_vars_num; i++)
637 {
638 bitmap part = NULL;
639 tree name;
640 struct ptr_info_def *pi;
641
642 /* Not interested in partitions with single variable. */
643 if (stack_vars[i].representative != i
644 || stack_vars[i].next == EOC)
645 continue;
646
647 if (!decls_to_partitions)
648 {
649 decls_to_partitions = pointer_map_create ();
650 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
651 }
652
653 /* Create an SSA_NAME that points to the partition for use
654 as base during alias-oracle queries on RTL for bases that
655 have been partitioned. */
656 if (var == NULL_TREE)
657 var = create_tmp_var (ptr_type_node, NULL);
658 name = make_ssa_name (var, NULL);
659
660 /* Create bitmaps representing partitions. They will be used for
661 points-to sets later, so use GGC alloc. */
662 part = BITMAP_GGC_ALLOC ();
663 for (j = i; j != EOC; j = stack_vars[j].next)
664 {
665 tree decl = stack_vars[j].decl;
25a6a873 666 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
667 bitmap_set_bit (part, uid);
668 *((bitmap *) pointer_map_insert (decls_to_partitions,
669 (void *)(size_t) uid)) = part;
670 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
671 decl)) = name;
88d8330d
EB
672 if (TREE_ADDRESSABLE (decl))
673 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
674 }
675
676 /* Make the SSA name point to all partition members. */
677 pi = get_ptr_info (name);
d3553615 678 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
679 }
680
681 /* Make all points-to sets that contain one member of a partition
682 contain all members of the partition. */
683 if (decls_to_partitions)
684 {
685 unsigned i;
686 struct pointer_set_t *visited = pointer_set_create ();
3f9b14ff 687 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f
RG
688
689 for (i = 1; i < num_ssa_names; i++)
690 {
691 tree name = ssa_name (i);
692 struct ptr_info_def *pi;
693
694 if (name
695 && POINTER_TYPE_P (TREE_TYPE (name))
696 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
697 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
698 visited, temp);
699 }
700
701 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
702 decls_to_partitions, visited, temp);
55b34b5f
RG
703
704 pointer_set_destroy (visited);
705 pointer_map_destroy (decls_to_partitions);
706 BITMAP_FREE (temp);
707 }
708}
709
1f6d3a08
RH
710/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
711 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 712 Merge them into a single partition A. */
1f6d3a08
RH
713
714static void
6ddfda8a 715union_stack_vars (size_t a, size_t b)
1f6d3a08 716{
2bdbbe94
MM
717 struct stack_var *vb = &stack_vars[b];
718 bitmap_iterator bi;
719 unsigned u;
1f6d3a08 720
6ddfda8a
ER
721 gcc_assert (stack_vars[b].next == EOC);
722 /* Add B to A's partition. */
723 stack_vars[b].next = stack_vars[a].next;
724 stack_vars[b].representative = a;
1f6d3a08
RH
725 stack_vars[a].next = b;
726
727 /* Update the required alignment of partition A to account for B. */
728 if (stack_vars[a].alignb < stack_vars[b].alignb)
729 stack_vars[a].alignb = stack_vars[b].alignb;
730
731 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
732 if (vb->conflicts)
733 {
734 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
735 add_stack_var_conflict (a, stack_vars[u].representative);
736 BITMAP_FREE (vb->conflicts);
737 }
1f6d3a08
RH
738}
739
740/* A subroutine of expand_used_vars. Binpack the variables into
741 partitions constrained by the interference graph. The overall
742 algorithm used is as follows:
743
6ddfda8a 744 Sort the objects by size in descending order.
1f6d3a08
RH
745 For each object A {
746 S = size(A)
747 O = 0
748 loop {
749 Look for the largest non-conflicting object B with size <= S.
750 UNION (A, B)
1f6d3a08
RH
751 }
752 }
753*/
754
755static void
756partition_stack_vars (void)
757{
758 size_t si, sj, n = stack_vars_num;
759
760 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
761 for (si = 0; si < n; ++si)
762 stack_vars_sorted[si] = si;
763
764 if (n == 1)
765 return;
766
3a42502d 767 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 768
1f6d3a08
RH
769 for (si = 0; si < n; ++si)
770 {
771 size_t i = stack_vars_sorted[si];
3a42502d 772 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 773 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 774
6ddfda8a
ER
775 /* Ignore objects that aren't partition representatives. If we
776 see a var that is not a partition representative, it must
777 have been merged earlier. */
778 if (stack_vars[i].representative != i)
779 continue;
780
781 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
782 {
783 size_t j = stack_vars_sorted[sj];
1f6d3a08 784 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 785 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
786
787 /* Ignore objects that aren't partition representatives. */
788 if (stack_vars[j].representative != j)
789 continue;
790
3a42502d
RH
791 /* Do not mix objects of "small" (supported) alignment
792 and "large" (unsupported) alignment. */
793 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
794 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
795 break;
796
797 /* For Address Sanitizer do not mix objects with different
798 sizes, as the shorter vars wouldn't be adequately protected.
799 Don't do that for "large" (unsupported) alignment objects,
800 those aren't protected anyway. */
b5ebc991 801 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
f3ddd692
JJ
802 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
803 break;
804
805 /* Ignore conflicting objects. */
806 if (stack_var_conflict_p (i, j))
3a42502d
RH
807 continue;
808
1f6d3a08 809 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 810 union_stack_vars (i, j);
1f6d3a08
RH
811 }
812 }
55b34b5f 813
9b999dc5 814 update_alias_info_with_stack_vars ();
1f6d3a08
RH
815}
816
817/* A debugging aid for expand_used_vars. Dump the generated partitions. */
818
819static void
820dump_stack_var_partition (void)
821{
822 size_t si, i, j, n = stack_vars_num;
823
824 for (si = 0; si < n; ++si)
825 {
826 i = stack_vars_sorted[si];
827
828 /* Skip variables that aren't partition representatives, for now. */
829 if (stack_vars[i].representative != i)
830 continue;
831
832 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
833 " align %u\n", (unsigned long) i, stack_vars[i].size,
834 stack_vars[i].alignb);
835
836 for (j = i; j != EOC; j = stack_vars[j].next)
837 {
838 fputc ('\t', dump_file);
839 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 840 }
6ddfda8a 841 fputc ('\n', dump_file);
1f6d3a08
RH
842 }
843}
844
3a42502d 845/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
846
847static void
3a42502d
RH
848expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
849 HOST_WIDE_INT offset)
1f6d3a08 850{
3a42502d 851 unsigned align;
1f6d3a08 852 rtx x;
c22cacf3 853
1f6d3a08
RH
854 /* If this fails, we've overflowed the stack frame. Error nicely? */
855 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
856
0a81f074 857 x = plus_constant (Pmode, base, offset);
4e3825db 858 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 859
4e3825db
MM
860 if (TREE_CODE (decl) != SSA_NAME)
861 {
862 /* Set alignment we actually gave this decl if it isn't an SSA name.
863 If it is we generate stack slots only accidentally so it isn't as
864 important, we'll simply use the alignment that is already set. */
3a42502d
RH
865 if (base == virtual_stack_vars_rtx)
866 offset -= frame_phase;
4e3825db
MM
867 align = offset & -offset;
868 align *= BITS_PER_UNIT;
3a42502d
RH
869 if (align == 0 || align > base_align)
870 align = base_align;
871
872 /* One would think that we could assert that we're not decreasing
873 alignment here, but (at least) the i386 port does exactly this
874 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
875
876 DECL_ALIGN (decl) = align;
877 DECL_USER_ALIGN (decl) = 0;
878 }
879
880 set_mem_attributes (x, SSAVAR (decl), true);
881 set_rtl (decl, x);
1f6d3a08
RH
882}
883
f3ddd692
JJ
884struct stack_vars_data
885{
886 /* Vector of offset pairs, always end of some padding followed
887 by start of the padding that needs Address Sanitizer protection.
888 The vector is in reversed, highest offset pairs come first. */
9771b263 889 vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
890
891 /* Vector of partition representative decls in between the paddings. */
9771b263 892 vec<tree> asan_decl_vec;
e361382f
JJ
893
894 /* Base pseudo register for Address Sanitizer protected automatic vars. */
895 rtx asan_base;
896
897 /* Alignment needed for the Address Sanitizer protected automatic vars. */
898 unsigned int asan_alignb;
f3ddd692
JJ
899};
900
1f6d3a08
RH
901/* A subroutine of expand_used_vars. Give each partition representative
902 a unique location within the stack frame. Update each partition member
903 with that location. */
904
905static void
f3ddd692 906expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
907{
908 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
909 HOST_WIDE_INT large_size = 0, large_alloc = 0;
910 rtx large_base = NULL;
911 unsigned large_align = 0;
912 tree decl;
913
914 /* Determine if there are any variables requiring "large" alignment.
915 Since these are dynamically allocated, we only process these if
916 no predicate involved. */
917 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
918 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
919 {
920 /* Find the total size of these variables. */
921 for (si = 0; si < n; ++si)
922 {
923 unsigned alignb;
924
925 i = stack_vars_sorted[si];
926 alignb = stack_vars[i].alignb;
927
928 /* Stop when we get to the first decl with "small" alignment. */
929 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
930 break;
931
932 /* Skip variables that aren't partition representatives. */
933 if (stack_vars[i].representative != i)
934 continue;
935
936 /* Skip variables that have already had rtl assigned. See also
937 add_stack_var where we perpetrate this pc_rtx hack. */
938 decl = stack_vars[i].decl;
939 if ((TREE_CODE (decl) == SSA_NAME
940 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
941 : DECL_RTL (decl)) != pc_rtx)
942 continue;
943
944 large_size += alignb - 1;
945 large_size &= -(HOST_WIDE_INT)alignb;
946 large_size += stack_vars[i].size;
947 }
948
949 /* If there were any, allocate space. */
950 if (large_size > 0)
951 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
952 large_align, true);
953 }
1f6d3a08
RH
954
955 for (si = 0; si < n; ++si)
956 {
3a42502d
RH
957 rtx base;
958 unsigned base_align, alignb;
1f6d3a08
RH
959 HOST_WIDE_INT offset;
960
961 i = stack_vars_sorted[si];
962
963 /* Skip variables that aren't partition representatives, for now. */
964 if (stack_vars[i].representative != i)
965 continue;
966
7d69de61
RH
967 /* Skip variables that have already had rtl assigned. See also
968 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
969 decl = stack_vars[i].decl;
970 if ((TREE_CODE (decl) == SSA_NAME
971 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
972 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
973 continue;
974
c22cacf3 975 /* Check the predicate to see whether this variable should be
7d69de61 976 allocated in this pass. */
f3ddd692 977 if (pred && !pred (i))
7d69de61
RH
978 continue;
979
3a42502d
RH
980 alignb = stack_vars[i].alignb;
981 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
982 {
e361382f 983 base = virtual_stack_vars_rtx;
b5ebc991 984 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
f3ddd692
JJ
985 {
986 HOST_WIDE_INT prev_offset = frame_offset;
987 tree repr_decl = NULL_TREE;
988
989 offset
990 = alloc_stack_frame_space (stack_vars[i].size
991 + ASAN_RED_ZONE_SIZE,
992 MAX (alignb, ASAN_RED_ZONE_SIZE));
9771b263
DN
993 data->asan_vec.safe_push (prev_offset);
994 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
995 /* Find best representative of the partition.
996 Prefer those with DECL_NAME, even better
997 satisfying asan_protect_stack_decl predicate. */
998 for (j = i; j != EOC; j = stack_vars[j].next)
999 if (asan_protect_stack_decl (stack_vars[j].decl)
1000 && DECL_NAME (stack_vars[j].decl))
1001 {
1002 repr_decl = stack_vars[j].decl;
1003 break;
1004 }
1005 else if (repr_decl == NULL_TREE
1006 && DECL_P (stack_vars[j].decl)
1007 && DECL_NAME (stack_vars[j].decl))
1008 repr_decl = stack_vars[j].decl;
1009 if (repr_decl == NULL_TREE)
1010 repr_decl = stack_vars[i].decl;
9771b263 1011 data->asan_decl_vec.safe_push (repr_decl);
e361382f
JJ
1012 data->asan_alignb = MAX (data->asan_alignb, alignb);
1013 if (data->asan_base == NULL)
1014 data->asan_base = gen_reg_rtx (Pmode);
1015 base = data->asan_base;
e5dcd695
LZ
1016
1017 if (!STRICT_ALIGNMENT)
1018 base_align = crtl->max_used_stack_slot_alignment;
1019 else
1020 base_align = MAX (crtl->max_used_stack_slot_alignment,
1021 GET_MODE_ALIGNMENT (SImode)
1022 << ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1023 }
1024 else
e5dcd695
LZ
1025 {
1026 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1027 base_align = crtl->max_used_stack_slot_alignment;
1028 }
3a42502d
RH
1029 }
1030 else
1031 {
1032 /* Large alignment is only processed in the last pass. */
1033 if (pred)
1034 continue;
533f611a 1035 gcc_assert (large_base != NULL);
3a42502d
RH
1036
1037 large_alloc += alignb - 1;
1038 large_alloc &= -(HOST_WIDE_INT)alignb;
1039 offset = large_alloc;
1040 large_alloc += stack_vars[i].size;
1041
1042 base = large_base;
1043 base_align = large_align;
1044 }
1f6d3a08
RH
1045
1046 /* Create rtl for each variable based on their location within the
1047 partition. */
1048 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1049 {
f8da8190 1050 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1051 base, base_align,
6ddfda8a 1052 offset);
f8da8190 1053 }
1f6d3a08 1054 }
3a42502d
RH
1055
1056 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1057}
1058
ff28a94d
JH
1059/* Take into account all sizes of partitions and reset DECL_RTLs. */
1060static HOST_WIDE_INT
1061account_stack_vars (void)
1062{
1063 size_t si, j, i, n = stack_vars_num;
1064 HOST_WIDE_INT size = 0;
1065
1066 for (si = 0; si < n; ++si)
1067 {
1068 i = stack_vars_sorted[si];
1069
1070 /* Skip variables that aren't partition representatives, for now. */
1071 if (stack_vars[i].representative != i)
1072 continue;
1073
1074 size += stack_vars[i].size;
1075 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1076 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1077 }
1078 return size;
1079}
1080
1f6d3a08
RH
1081/* A subroutine of expand_one_var. Called to immediately assign rtl
1082 to a variable to be allocated in the stack frame. */
1083
1084static void
1085expand_one_stack_var (tree var)
1086{
3a42502d
RH
1087 HOST_WIDE_INT size, offset;
1088 unsigned byte_align;
1f6d3a08 1089
ae7e9ddd 1090 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
6f197850 1091 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1092
1093 /* We handle highly aligned variables in expand_stack_vars. */
1094 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1095
3a42502d
RH
1096 offset = alloc_stack_frame_space (size, byte_align);
1097
1098 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1099 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1100}
1101
1f6d3a08
RH
1102/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1103 that will reside in a hard register. */
1104
1105static void
1106expand_one_hard_reg_var (tree var)
1107{
1108 rest_of_decl_compilation (var, 0, 0);
1109}
1110
1111/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1112 that will reside in a pseudo register. */
1113
1114static void
1115expand_one_register_var (tree var)
1116{
4e3825db
MM
1117 tree decl = SSAVAR (var);
1118 tree type = TREE_TYPE (decl);
cde0f3fd 1119 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1120 rtx x = gen_reg_rtx (reg_mode);
1121
4e3825db 1122 set_rtl (var, x);
1f6d3a08
RH
1123
1124 /* Note if the object is a user variable. */
4e3825db
MM
1125 if (!DECL_ARTIFICIAL (decl))
1126 mark_user_reg (x);
1f6d3a08 1127
61021c2c 1128 if (POINTER_TYPE_P (type))
d466b407 1129 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1130}
1131
1132/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1133 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1134 to pick something that won't crash the rest of the compiler. */
1135
1136static void
1137expand_one_error_var (tree var)
1138{
1139 enum machine_mode mode = DECL_MODE (var);
1140 rtx x;
1141
1142 if (mode == BLKmode)
1143 x = gen_rtx_MEM (BLKmode, const0_rtx);
1144 else if (mode == VOIDmode)
1145 x = const0_rtx;
1146 else
1147 x = gen_reg_rtx (mode);
1148
1149 SET_DECL_RTL (var, x);
1150}
1151
c22cacf3 1152/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1153 allocated to the local stack frame. Return true if we wish to
1154 add VAR to STACK_VARS so that it will be coalesced with other
1155 variables. Return false to allocate VAR immediately.
1156
1157 This function is used to reduce the number of variables considered
1158 for coalescing, which reduces the size of the quadratic problem. */
1159
1160static bool
1161defer_stack_allocation (tree var, bool toplevel)
1162{
ee2e8462
EB
1163 /* Whether the variable is small enough for immediate allocation not to be
1164 a problem with regard to the frame size. */
1165 bool smallish
7d362f6c 1166 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
ee2e8462
EB
1167 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1168
7d69de61 1169 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1170 so that we can re-order the strings to the top of the frame.
1171 Similarly for Address Sanitizer. */
b5ebc991 1172 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
7d69de61
RH
1173 return true;
1174
3a42502d
RH
1175 /* We handle "large" alignment via dynamic allocation. We want to handle
1176 this extra complication in only one place, so defer them. */
1177 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1178 return true;
1179
ee2e8462
EB
1180 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1181 might be detached from their block and appear at toplevel when we reach
1182 here. We want to coalesce them with variables from other blocks when
1183 the immediate contribution to the frame size would be noticeable. */
1184 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1185 return true;
1186
1187 /* Variables declared in the outermost scope automatically conflict
1188 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1189 at all is that, after sorting, we can more efficiently pack
1190 small variables in the stack frame. Continue to defer at -O2. */
1191 if (toplevel && optimize < 2)
1192 return false;
1193
1194 /* Without optimization, *most* variables are allocated from the
1195 stack, which makes the quadratic problem large exactly when we
c22cacf3 1196 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1197 other hand, we don't want the function's stack frame size to
1198 get completely out of hand. So we avoid adding scalars and
1199 "small" aggregates to the list at all. */
ee2e8462 1200 if (optimize == 0 && smallish)
1f6d3a08
RH
1201 return false;
1202
1203 return true;
1204}
1205
1206/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1207 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1208 expanded yet, merely recorded.
ff28a94d
JH
1209 When REALLY_EXPAND is false, only add stack values to be allocated.
1210 Return stack usage this variable is supposed to take.
1211*/
1f6d3a08 1212
ff28a94d
JH
1213static HOST_WIDE_INT
1214expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1215{
3a42502d 1216 unsigned int align = BITS_PER_UNIT;
4e3825db 1217 tree origvar = var;
3a42502d 1218
4e3825db
MM
1219 var = SSAVAR (var);
1220
3a42502d 1221 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1222 {
2e3f842f
L
1223 /* Because we don't know if VAR will be in register or on stack,
1224 we conservatively assume it will be on stack even if VAR is
1225 eventually put into register after RA pass. For non-automatic
1226 variables, which won't be on stack, we collect alignment of
3396aba5
JJ
1227 type and ignore user specified alignment. Similarly for
1228 SSA_NAMEs for which use_register_for_decl returns true. */
1229 if (TREE_STATIC (var)
1230 || DECL_EXTERNAL (var)
1231 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
ae58e548
JJ
1232 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1233 TYPE_MODE (TREE_TYPE (var)),
1234 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1235 else if (DECL_HAS_VALUE_EXPR_P (var)
1236 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1237 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1238 or variables which were assigned a stack slot already by
1239 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1240 changed from the offset chosen to it. */
1241 align = crtl->stack_alignment_estimated;
2e3f842f 1242 else
ae58e548 1243 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1244
3a42502d
RH
1245 /* If the variable alignment is very large we'll dynamicaly allocate
1246 it, which means that in-frame portion is just a pointer. */
1247 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1248 align = POINTER_SIZE;
1249 }
1250
1251 if (SUPPORTS_STACK_ALIGNMENT
1252 && crtl->stack_alignment_estimated < align)
1253 {
1254 /* stack_alignment_estimated shouldn't change after stack
1255 realign decision made */
c3284718 1256 gcc_assert (!crtl->stack_realign_processed);
3a42502d 1257 crtl->stack_alignment_estimated = align;
2e3f842f
L
1258 }
1259
3a42502d
RH
1260 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1261 So here we only make sure stack_alignment_needed >= align. */
1262 if (crtl->stack_alignment_needed < align)
1263 crtl->stack_alignment_needed = align;
1264 if (crtl->max_used_stack_slot_alignment < align)
1265 crtl->max_used_stack_slot_alignment = align;
1266
4e3825db
MM
1267 if (TREE_CODE (origvar) == SSA_NAME)
1268 {
1269 gcc_assert (TREE_CODE (var) != VAR_DECL
1270 || (!DECL_EXTERNAL (var)
1271 && !DECL_HAS_VALUE_EXPR_P (var)
1272 && !TREE_STATIC (var)
4e3825db
MM
1273 && TREE_TYPE (var) != error_mark_node
1274 && !DECL_HARD_REGISTER (var)
1275 && really_expand));
1276 }
1277 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1278 ;
1f6d3a08
RH
1279 else if (DECL_EXTERNAL (var))
1280 ;
833b3afe 1281 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1282 ;
1283 else if (TREE_STATIC (var))
7e8b322a 1284 ;
eb7adebc 1285 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1286 ;
1287 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1288 {
1289 if (really_expand)
1290 expand_one_error_var (var);
1291 }
4e3825db 1292 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1293 {
1294 if (really_expand)
1295 expand_one_hard_reg_var (var);
1296 }
1f6d3a08 1297 else if (use_register_for_decl (var))
ff28a94d
JH
1298 {
1299 if (really_expand)
4e3825db 1300 expand_one_register_var (origvar);
ff28a94d 1301 }
56099f00 1302 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1303 {
56099f00 1304 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1305 if (really_expand)
1306 {
1307 error ("size of variable %q+D is too large", var);
1308 expand_one_error_var (var);
1309 }
1310 }
1f6d3a08 1311 else if (defer_stack_allocation (var, toplevel))
4e3825db 1312 add_stack_var (origvar);
1f6d3a08 1313 else
ff28a94d 1314 {
bd9f1b4b 1315 if (really_expand)
4e3825db 1316 expand_one_stack_var (origvar);
ae7e9ddd 1317 return tree_to_uhwi (DECL_SIZE_UNIT (var));
ff28a94d
JH
1318 }
1319 return 0;
1f6d3a08
RH
1320}
1321
1322/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1323 expanding variables. Those variables that can be put into registers
1324 are allocated pseudos; those that can't are put on the stack.
1325
1326 TOPLEVEL is true if this is the outermost BLOCK. */
1327
1328static void
1329expand_used_vars_for_block (tree block, bool toplevel)
1330{
1f6d3a08
RH
1331 tree t;
1332
1f6d3a08 1333 /* Expand all variables at this level. */
910ad8de 1334 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1335 if (TREE_USED (t)
1336 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1337 || !DECL_NONSHAREABLE (t)))
ff28a94d 1338 expand_one_var (t, toplevel, true);
1f6d3a08 1339
1f6d3a08
RH
1340 /* Expand all variables at containing levels. */
1341 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1342 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1343}
1344
1345/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1346 and clear TREE_USED on all local variables. */
1347
1348static void
1349clear_tree_used (tree block)
1350{
1351 tree t;
1352
910ad8de 1353 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1354 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1355 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1356 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1357 TREE_USED (t) = 0;
1358
1359 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1360 clear_tree_used (t);
1361}
1362
f6bc1c4a
HS
1363enum {
1364 SPCT_FLAG_DEFAULT = 1,
1365 SPCT_FLAG_ALL = 2,
1366 SPCT_FLAG_STRONG = 3
1367};
1368
7d69de61
RH
1369/* Examine TYPE and determine a bit mask of the following features. */
1370
1371#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1372#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1373#define SPCT_HAS_ARRAY 4
1374#define SPCT_HAS_AGGREGATE 8
1375
1376static unsigned int
1377stack_protect_classify_type (tree type)
1378{
1379 unsigned int ret = 0;
1380 tree t;
1381
1382 switch (TREE_CODE (type))
1383 {
1384 case ARRAY_TYPE:
1385 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1386 if (t == char_type_node
1387 || t == signed_char_type_node
1388 || t == unsigned_char_type_node)
1389 {
15362b89
JJ
1390 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1391 unsigned HOST_WIDE_INT len;
7d69de61 1392
15362b89 1393 if (!TYPE_SIZE_UNIT (type)
cc269bb6 1394 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
15362b89 1395 len = max;
7d69de61 1396 else
ae7e9ddd 1397 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7d69de61
RH
1398
1399 if (len < max)
1400 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1401 else
1402 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1403 }
1404 else
1405 ret = SPCT_HAS_ARRAY;
1406 break;
1407
1408 case UNION_TYPE:
1409 case QUAL_UNION_TYPE:
1410 case RECORD_TYPE:
1411 ret = SPCT_HAS_AGGREGATE;
1412 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1413 if (TREE_CODE (t) == FIELD_DECL)
1414 ret |= stack_protect_classify_type (TREE_TYPE (t));
1415 break;
1416
1417 default:
1418 break;
1419 }
1420
1421 return ret;
1422}
1423
a4d05547
KH
1424/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1425 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1426 any variable in this function. The return value is the phase number in
1427 which the variable should be allocated. */
1428
1429static int
1430stack_protect_decl_phase (tree decl)
1431{
1432 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1433 int ret = 0;
1434
1435 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1436 has_short_buffer = true;
1437
f6bc1c4a
HS
1438 if (flag_stack_protect == SPCT_FLAG_ALL
1439 || flag_stack_protect == SPCT_FLAG_STRONG)
7d69de61
RH
1440 {
1441 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1442 && !(bits & SPCT_HAS_AGGREGATE))
1443 ret = 1;
1444 else if (bits & SPCT_HAS_ARRAY)
1445 ret = 2;
1446 }
1447 else
1448 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1449
1450 if (ret)
1451 has_protected_decls = true;
1452
1453 return ret;
1454}
1455
1456/* Two helper routines that check for phase 1 and phase 2. These are used
1457 as callbacks for expand_stack_vars. */
1458
1459static bool
f3ddd692
JJ
1460stack_protect_decl_phase_1 (size_t i)
1461{
1462 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1463}
1464
1465static bool
1466stack_protect_decl_phase_2 (size_t i)
7d69de61 1467{
f3ddd692 1468 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1469}
1470
f3ddd692
JJ
1471/* And helper function that checks for asan phase (with stack protector
1472 it is phase 3). This is used as callback for expand_stack_vars.
1473 Returns true if any of the vars in the partition need to be protected. */
1474
7d69de61 1475static bool
f3ddd692 1476asan_decl_phase_3 (size_t i)
7d69de61 1477{
f3ddd692
JJ
1478 while (i != EOC)
1479 {
1480 if (asan_protect_stack_decl (stack_vars[i].decl))
1481 return true;
1482 i = stack_vars[i].next;
1483 }
1484 return false;
7d69de61
RH
1485}
1486
1487/* Ensure that variables in different stack protection phases conflict
1488 so that they are not merged and share the same stack slot. */
1489
1490static void
1491add_stack_protection_conflicts (void)
1492{
1493 size_t i, j, n = stack_vars_num;
1494 unsigned char *phase;
1495
1496 phase = XNEWVEC (unsigned char, n);
1497 for (i = 0; i < n; ++i)
1498 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1499
1500 for (i = 0; i < n; ++i)
1501 {
1502 unsigned char ph_i = phase[i];
9b44f5d9 1503 for (j = i + 1; j < n; ++j)
7d69de61
RH
1504 if (ph_i != phase[j])
1505 add_stack_var_conflict (i, j);
1506 }
1507
1508 XDELETEVEC (phase);
1509}
1510
1511/* Create a decl for the guard at the top of the stack frame. */
1512
1513static void
1514create_stack_guard (void)
1515{
c2255bc4
AH
1516 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1517 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1518 TREE_THIS_VOLATILE (guard) = 1;
1519 TREE_USED (guard) = 1;
1520 expand_one_stack_var (guard);
cb91fab0 1521 crtl->stack_protect_guard = guard;
7d69de61
RH
1522}
1523
ff28a94d 1524/* Prepare for expanding variables. */
b8698a0f 1525static void
ff28a94d
JH
1526init_vars_expansion (void)
1527{
3f9b14ff
SB
1528 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1529 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1530
3f9b14ff
SB
1531 /* A map from decl to stack partition. */
1532 decl_to_stack_part = pointer_map_create ();
ff28a94d
JH
1533
1534 /* Initialize local stack smashing state. */
1535 has_protected_decls = false;
1536 has_short_buffer = false;
1537}
1538
1539/* Free up stack variable graph data. */
1540static void
1541fini_vars_expansion (void)
1542{
3f9b14ff
SB
1543 bitmap_obstack_release (&stack_var_bitmap_obstack);
1544 if (stack_vars)
1545 XDELETEVEC (stack_vars);
1546 if (stack_vars_sorted)
1547 XDELETEVEC (stack_vars_sorted);
ff28a94d 1548 stack_vars = NULL;
9b44f5d9 1549 stack_vars_sorted = NULL;
ff28a94d 1550 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1551 pointer_map_destroy (decl_to_stack_part);
1552 decl_to_stack_part = NULL;
ff28a94d
JH
1553}
1554
30925d94
AO
1555/* Make a fair guess for the size of the stack frame of the function
1556 in NODE. This doesn't have to be exact, the result is only used in
1557 the inline heuristics. So we don't want to run the full stack var
1558 packing algorithm (which is quadratic in the number of stack vars).
1559 Instead, we calculate the total size of all stack vars. This turns
1560 out to be a pretty fair estimate -- packing of stack vars doesn't
1561 happen very often. */
b5a430f3 1562
ff28a94d 1563HOST_WIDE_INT
30925d94 1564estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1565{
1566 HOST_WIDE_INT size = 0;
b5a430f3 1567 size_t i;
bb7e6d55 1568 tree var;
67348ccc 1569 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1570
bb7e6d55 1571 push_cfun (fn);
ff28a94d 1572
3f9b14ff
SB
1573 init_vars_expansion ();
1574
824f71b9
RG
1575 FOR_EACH_LOCAL_DECL (fn, i, var)
1576 if (auto_var_in_fn_p (var, fn->decl))
1577 size += expand_one_var (var, true, false);
b5a430f3 1578
ff28a94d
JH
1579 if (stack_vars_num > 0)
1580 {
b5a430f3
SB
1581 /* Fake sorting the stack vars for account_stack_vars (). */
1582 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1583 for (i = 0; i < stack_vars_num; ++i)
1584 stack_vars_sorted[i] = i;
ff28a94d 1585 size += account_stack_vars ();
ff28a94d 1586 }
3f9b14ff
SB
1587
1588 fini_vars_expansion ();
2e1ec94f 1589 pop_cfun ();
ff28a94d
JH
1590 return size;
1591}
1592
f6bc1c4a
HS
1593/* Helper routine to check if a record or union contains an array field. */
1594
1595static int
1596record_or_union_type_has_array_p (const_tree tree_type)
1597{
1598 tree fields = TYPE_FIELDS (tree_type);
1599 tree f;
1600
1601 for (f = fields; f; f = DECL_CHAIN (f))
1602 if (TREE_CODE (f) == FIELD_DECL)
1603 {
1604 tree field_type = TREE_TYPE (f);
1605 if (RECORD_OR_UNION_TYPE_P (field_type)
1606 && record_or_union_type_has_array_p (field_type))
1607 return 1;
1608 if (TREE_CODE (field_type) == ARRAY_TYPE)
1609 return 1;
1610 }
1611 return 0;
1612}
1613
1f6d3a08 1614/* Expand all variables used in the function. */
727a31fa 1615
f3ddd692 1616static rtx
727a31fa
RH
1617expand_used_vars (void)
1618{
c021f10b 1619 tree var, outer_block = DECL_INITIAL (current_function_decl);
6e1aa848 1620 vec<tree> maybe_local_decls = vNULL;
f3ddd692 1621 rtx var_end_seq = NULL_RTX;
70b5e7dc 1622 struct pointer_map_t *ssa_name_decls;
4e3825db 1623 unsigned i;
c021f10b 1624 unsigned len;
f6bc1c4a 1625 bool gen_stack_protect_signal = false;
727a31fa 1626
1f6d3a08
RH
1627 /* Compute the phase of the stack frame for this function. */
1628 {
1629 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1630 int off = STARTING_FRAME_OFFSET % align;
1631 frame_phase = off ? align - off : 0;
1632 }
727a31fa 1633
3f9b14ff
SB
1634 /* Set TREE_USED on all variables in the local_decls. */
1635 FOR_EACH_LOCAL_DECL (cfun, i, var)
1636 TREE_USED (var) = 1;
1637 /* Clear TREE_USED on all variables associated with a block scope. */
1638 clear_tree_used (DECL_INITIAL (current_function_decl));
1639
ff28a94d 1640 init_vars_expansion ();
7d69de61 1641
70b5e7dc 1642 ssa_name_decls = pointer_map_create ();
4e3825db
MM
1643 for (i = 0; i < SA.map->num_partitions; i++)
1644 {
1645 tree var = partition_to_var (SA.map, i);
1646
ea057359 1647 gcc_assert (!virtual_operand_p (var));
70b5e7dc
RG
1648
1649 /* Assign decls to each SSA name partition, share decls for partitions
1650 we could have coalesced (those with the same type). */
1651 if (SSA_NAME_VAR (var) == NULL_TREE)
1652 {
1653 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1654 if (!*slot)
1655 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1656 replace_ssa_name_symbol (var, (tree) *slot);
1657 }
1658
cfb9edba
EB
1659 /* Always allocate space for partitions based on VAR_DECLs. But for
1660 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1661 debug info, there is no need to do so if optimization is disabled
1662 because all the SSA_NAMEs based on these DECLs have been coalesced
1663 into a single partition, which is thus assigned the canonical RTL
5525ed38
JJ
1664 location of the DECLs. If in_lto_p, we can't rely on optimize,
1665 a function could be compiled with -O1 -flto first and only the
1666 link performed at -O0. */
4e3825db
MM
1667 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1668 expand_one_var (var, true, true);
5525ed38 1669 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
4e3825db
MM
1670 {
1671 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1672 contain the default def (representing the parm or result itself)
1673 we don't do anything here. But those which don't contain the
1674 default def (representing a temporary based on the parm/result)
1675 we need to allocate space just like for normal VAR_DECLs. */
1676 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1677 {
1678 expand_one_var (var, true, true);
1679 gcc_assert (SA.partition_to_pseudo[i]);
1680 }
1681 }
1682 }
70b5e7dc 1683 pointer_map_destroy (ssa_name_decls);
4e3825db 1684
f6bc1c4a
HS
1685 if (flag_stack_protect == SPCT_FLAG_STRONG)
1686 FOR_EACH_LOCAL_DECL (cfun, i, var)
1687 if (!is_global_var (var))
1688 {
1689 tree var_type = TREE_TYPE (var);
1690 /* Examine local referenced variables that have their addresses taken,
1691 contain an array, or are arrays. */
1692 if (TREE_CODE (var) == VAR_DECL
1693 && (TREE_CODE (var_type) == ARRAY_TYPE
1694 || TREE_ADDRESSABLE (var)
1695 || (RECORD_OR_UNION_TYPE_P (var_type)
1696 && record_or_union_type_has_array_p (var_type))))
1697 {
1698 gen_stack_protect_signal = true;
1699 break;
1700 }
1701 }
1702
cb91fab0 1703 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1704 set are not associated with any block scope. Lay them out. */
c021f10b 1705
9771b263 1706 len = vec_safe_length (cfun->local_decls);
c021f10b 1707 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1708 {
1f6d3a08
RH
1709 bool expand_now = false;
1710
4e3825db
MM
1711 /* Expanded above already. */
1712 if (is_gimple_reg (var))
eb7adebc
MM
1713 {
1714 TREE_USED (var) = 0;
3adcf52c 1715 goto next;
eb7adebc 1716 }
1f6d3a08
RH
1717 /* We didn't set a block for static or extern because it's hard
1718 to tell the difference between a global variable (re)declared
1719 in a local scope, and one that's really declared there to
1720 begin with. And it doesn't really matter much, since we're
1721 not giving them stack space. Expand them now. */
4e3825db 1722 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1723 expand_now = true;
1724
ee2e8462
EB
1725 /* Expand variables not associated with any block now. Those created by
1726 the optimizers could be live anywhere in the function. Those that
1727 could possibly have been scoped originally and detached from their
1728 block will have their allocation deferred so we coalesce them with
1729 others when optimization is enabled. */
1f6d3a08
RH
1730 else if (TREE_USED (var))
1731 expand_now = true;
1732
1733 /* Finally, mark all variables on the list as used. We'll use
1734 this in a moment when we expand those associated with scopes. */
1735 TREE_USED (var) = 1;
1736
1737 if (expand_now)
3adcf52c
JM
1738 expand_one_var (var, true, true);
1739
1740 next:
1741 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1742 {
3adcf52c
JM
1743 rtx rtl = DECL_RTL_IF_SET (var);
1744
1745 /* Keep artificial non-ignored vars in cfun->local_decls
1746 chain until instantiate_decls. */
1747 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1748 add_local_decl (cfun, var);
6c6366f6 1749 else if (rtl == NULL_RTX)
c021f10b
NF
1750 /* If rtl isn't set yet, which can happen e.g. with
1751 -fstack-protector, retry before returning from this
1752 function. */
9771b263 1753 maybe_local_decls.safe_push (var);
802e9f8e 1754 }
1f6d3a08 1755 }
1f6d3a08 1756
c021f10b
NF
1757 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1758
1759 +-----------------+-----------------+
1760 | ...processed... | ...duplicates...|
1761 +-----------------+-----------------+
1762 ^
1763 +-- LEN points here.
1764
1765 We just want the duplicates, as those are the artificial
1766 non-ignored vars that we want to keep until instantiate_decls.
1767 Move them down and truncate the array. */
9771b263
DN
1768 if (!vec_safe_is_empty (cfun->local_decls))
1769 cfun->local_decls->block_remove (0, len);
c021f10b 1770
1f6d3a08
RH
1771 /* At this point, all variables within the block tree with TREE_USED
1772 set are actually used by the optimized function. Lay them out. */
1773 expand_used_vars_for_block (outer_block, true);
1774
1775 if (stack_vars_num > 0)
1776 {
47598145 1777 add_scope_conflicts ();
1f6d3a08 1778
c22cacf3 1779 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1780 vulnerable data and non-vulnerable data. */
1781 if (flag_stack_protect)
1782 add_stack_protection_conflicts ();
1783
c22cacf3 1784 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1785 minimal interference graph, attempt to save some stack space. */
1786 partition_stack_vars ();
1787 if (dump_file)
1788 dump_stack_var_partition ();
7d69de61
RH
1789 }
1790
f6bc1c4a
HS
1791 switch (flag_stack_protect)
1792 {
1793 case SPCT_FLAG_ALL:
1794 create_stack_guard ();
1795 break;
1796
1797 case SPCT_FLAG_STRONG:
1798 if (gen_stack_protect_signal
1799 || cfun->calls_alloca || has_protected_decls)
1800 create_stack_guard ();
1801 break;
1802
1803 case SPCT_FLAG_DEFAULT:
1804 if (cfun->calls_alloca || has_protected_decls)
c3284718 1805 create_stack_guard ();
f6bc1c4a
HS
1806 break;
1807
1808 default:
1809 ;
1810 }
1f6d3a08 1811
7d69de61
RH
1812 /* Assign rtl to each variable based on these partitions. */
1813 if (stack_vars_num > 0)
1814 {
f3ddd692
JJ
1815 struct stack_vars_data data;
1816
6e1aa848
DN
1817 data.asan_vec = vNULL;
1818 data.asan_decl_vec = vNULL;
e361382f
JJ
1819 data.asan_base = NULL_RTX;
1820 data.asan_alignb = 0;
f3ddd692 1821
7d69de61
RH
1822 /* Reorder decls to be protected by iterating over the variables
1823 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1824 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1825 earlier, such that we naturally see these variables first,
1826 and thus naturally allocate things in the right order. */
1827 if (has_protected_decls)
1828 {
1829 /* Phase 1 contains only character arrays. */
f3ddd692 1830 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
1831
1832 /* Phase 2 contains other kinds of arrays. */
1833 if (flag_stack_protect == 2)
f3ddd692 1834 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
1835 }
1836
b5ebc991 1837 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
f3ddd692
JJ
1838 /* Phase 3, any partitions that need asan protection
1839 in addition to phase 1 and 2. */
1840 expand_stack_vars (asan_decl_phase_3, &data);
1841
9771b263 1842 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
1843 {
1844 HOST_WIDE_INT prev_offset = frame_offset;
e361382f
JJ
1845 HOST_WIDE_INT offset, sz, redzonesz;
1846 redzonesz = ASAN_RED_ZONE_SIZE;
1847 sz = data.asan_vec[0] - prev_offset;
1848 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1849 && data.asan_alignb <= 4096
3dc87cc0 1850 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
e361382f
JJ
1851 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1852 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1853 offset
1854 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
9771b263
DN
1855 data.asan_vec.safe_push (prev_offset);
1856 data.asan_vec.safe_push (offset);
e5dcd695
LZ
1857 /* Leave space for alignment if STRICT_ALIGNMENT. */
1858 if (STRICT_ALIGNMENT)
1859 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1860 << ASAN_SHADOW_SHIFT)
1861 / BITS_PER_UNIT, 1);
f3ddd692
JJ
1862
1863 var_end_seq
1864 = asan_emit_stack_protection (virtual_stack_vars_rtx,
e361382f
JJ
1865 data.asan_base,
1866 data.asan_alignb,
9771b263 1867 data.asan_vec.address (),
e361382f 1868 data.asan_decl_vec.address (),
9771b263 1869 data.asan_vec.length ());
f3ddd692
JJ
1870 }
1871
1872 expand_stack_vars (NULL, &data);
1873
9771b263
DN
1874 data.asan_vec.release ();
1875 data.asan_decl_vec.release ();
1f6d3a08
RH
1876 }
1877
3f9b14ff
SB
1878 fini_vars_expansion ();
1879
6c6366f6
JJ
1880 /* If there were any artificial non-ignored vars without rtl
1881 found earlier, see if deferred stack allocation hasn't assigned
1882 rtl to them. */
9771b263 1883 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 1884 {
6c6366f6
JJ
1885 rtx rtl = DECL_RTL_IF_SET (var);
1886
6c6366f6
JJ
1887 /* Keep artificial non-ignored vars in cfun->local_decls
1888 chain until instantiate_decls. */
1889 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1890 add_local_decl (cfun, var);
6c6366f6 1891 }
9771b263 1892 maybe_local_decls.release ();
6c6366f6 1893
1f6d3a08
RH
1894 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1895 if (STACK_ALIGNMENT_NEEDED)
1896 {
1897 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1898 if (!FRAME_GROWS_DOWNWARD)
1899 frame_offset += align - 1;
1900 frame_offset &= -align;
1901 }
f3ddd692
JJ
1902
1903 return var_end_seq;
727a31fa
RH
1904}
1905
1906
b7211528
SB
1907/* If we need to produce a detailed dump, print the tree representation
1908 for STMT to the dump file. SINCE is the last RTX after which the RTL
1909 generated for STMT should have been appended. */
1910
1911static void
726a989a 1912maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1913{
1914 if (dump_file && (dump_flags & TDF_DETAILS))
1915 {
1916 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1917 print_gimple_stmt (dump_file, stmt, 0,
1918 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1919 fprintf (dump_file, "\n");
1920
1921 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1922 }
1923}
1924
8b11009b
ZD
1925/* Maps the blocks that do not contain tree labels to rtx labels. */
1926
1927static struct pointer_map_t *lab_rtx_for_bb;
1928
a9b77cd1
ZD
1929/* Returns the label_rtx expression for a label starting basic block BB. */
1930
1931static rtx
726a989a 1932label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1933{
726a989a
RB
1934 gimple_stmt_iterator gsi;
1935 tree lab;
1936 gimple lab_stmt;
8b11009b 1937 void **elt;
a9b77cd1
ZD
1938
1939 if (bb->flags & BB_RTL)
1940 return block_label (bb);
1941
8b11009b
ZD
1942 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1943 if (elt)
ae50c0cb 1944 return (rtx) *elt;
8b11009b
ZD
1945
1946 /* Find the tree label if it is present. */
b8698a0f 1947
726a989a 1948 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1949 {
726a989a
RB
1950 lab_stmt = gsi_stmt (gsi);
1951 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1952 break;
1953
726a989a 1954 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1955 if (DECL_NONLOCAL (lab))
1956 break;
1957
1958 return label_rtx (lab);
1959 }
1960
8b11009b
ZD
1961 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1962 *elt = gen_label_rtx ();
ae50c0cb 1963 return (rtx) *elt;
a9b77cd1
ZD
1964}
1965
726a989a 1966
529ff441
MM
1967/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1968 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1969 possibly clean up the CFG and instruction sequence. LAST is the
1970 last instruction before the just emitted jump sequence. */
529ff441
MM
1971
1972static void
315adeda 1973maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1974{
1975 /* Special case: when jumpif decides that the condition is
1976 trivial it emits an unconditional jump (and the necessary
1977 barrier). But we still have two edges, the fallthru one is
1978 wrong. purge_dead_edges would clean this up later. Unfortunately
1979 we have to insert insns (and split edges) before
1980 find_many_sub_basic_blocks and hence before purge_dead_edges.
1981 But splitting edges might create new blocks which depend on the
1982 fact that if there are two edges there's no barrier. So the
1983 barrier would get lost and verify_flow_info would ICE. Instead
1984 of auditing all edge splitters to care for the barrier (which
1985 normally isn't there in a cleaned CFG), fix it here. */
1986 if (BARRIER_P (get_last_insn ()))
1987 {
529ff441
MM
1988 rtx insn;
1989 remove_edge (e);
1990 /* Now, we have a single successor block, if we have insns to
1991 insert on the remaining edge we potentially will insert
1992 it at the end of this block (if the dest block isn't feasible)
1993 in order to avoid splitting the edge. This insertion will take
1994 place in front of the last jump. But we might have emitted
1995 multiple jumps (conditional and one unconditional) to the
1996 same destination. Inserting in front of the last one then
1997 is a problem. See PR 40021. We fix this by deleting all
1998 jumps except the last unconditional one. */
1999 insn = PREV_INSN (get_last_insn ());
2000 /* Make sure we have an unconditional jump. Otherwise we're
2001 confused. */
2002 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 2003 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
2004 {
2005 insn = PREV_INSN (insn);
2006 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 2007 {
8a269cb7 2008 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
2009 {
2010 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2011 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2012 }
2013 delete_insn (NEXT_INSN (insn));
2014 }
529ff441
MM
2015 }
2016 }
2017}
2018
726a989a 2019/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
2020 Returns a new basic block if we've terminated the current basic
2021 block and created a new one. */
2022
2023static basic_block
726a989a 2024expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
2025{
2026 basic_block new_bb, dest;
2027 edge new_edge;
2028 edge true_edge;
2029 edge false_edge;
b7211528 2030 rtx last2, last;
28ed065e
MM
2031 enum tree_code code;
2032 tree op0, op1;
2033
2034 code = gimple_cond_code (stmt);
2035 op0 = gimple_cond_lhs (stmt);
2036 op1 = gimple_cond_rhs (stmt);
2037 /* We're sometimes presented with such code:
2038 D.123_1 = x < y;
2039 if (D.123_1 != 0)
2040 ...
2041 This would expand to two comparisons which then later might
2042 be cleaned up by combine. But some pattern matchers like if-conversion
2043 work better when there's only one compare, so make up for this
2044 here as special exception if TER would have made the same change. */
31348d52 2045 if (SA.values
28ed065e 2046 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
2047 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2048 && TREE_CODE (op1) == INTEGER_CST
2049 && ((gimple_cond_code (stmt) == NE_EXPR
2050 && integer_zerop (op1))
2051 || (gimple_cond_code (stmt) == EQ_EXPR
2052 && integer_onep (op1)))
28ed065e
MM
2053 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2054 {
2055 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2056 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2057 {
e83f4b68
MM
2058 enum tree_code code2 = gimple_assign_rhs_code (second);
2059 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2060 {
2061 code = code2;
2062 op0 = gimple_assign_rhs1 (second);
2063 op1 = gimple_assign_rhs2 (second);
2064 }
2065 /* If jumps are cheap turn some more codes into
2066 jumpy sequences. */
2067 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2068 {
2069 if ((code2 == BIT_AND_EXPR
2070 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2071 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2072 || code2 == TRUTH_AND_EXPR)
2073 {
2074 code = TRUTH_ANDIF_EXPR;
2075 op0 = gimple_assign_rhs1 (second);
2076 op1 = gimple_assign_rhs2 (second);
2077 }
2078 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2079 {
2080 code = TRUTH_ORIF_EXPR;
2081 op0 = gimple_assign_rhs1 (second);
2082 op1 = gimple_assign_rhs2 (second);
2083 }
2084 }
28ed065e
MM
2085 }
2086 }
b7211528
SB
2087
2088 last2 = last = get_last_insn ();
80c7a9eb
RH
2089
2090 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2091 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2092
2093 /* These flags have no purpose in RTL land. */
2094 true_edge->flags &= ~EDGE_TRUE_VALUE;
2095 false_edge->flags &= ~EDGE_FALSE_VALUE;
2096
2097 /* We can either have a pure conditional jump with one fallthru edge or
2098 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2099 if (false_edge->dest == bb->next_bb)
80c7a9eb 2100 {
40e90eac
JJ
2101 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2102 true_edge->probability);
726a989a 2103 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2104 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2105 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2106 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2107 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2108 return NULL;
2109 }
a9b77cd1 2110 if (true_edge->dest == bb->next_bb)
80c7a9eb 2111 {
40e90eac
JJ
2112 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2113 false_edge->probability);
726a989a 2114 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2115 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2116 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2117 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2118 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2119 return NULL;
2120 }
80c7a9eb 2121
40e90eac
JJ
2122 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2123 true_edge->probability);
80c7a9eb 2124 last = get_last_insn ();
2f13f2de 2125 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2126 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2127 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
2128
2129 BB_END (bb) = last;
2130 if (BARRIER_P (BB_END (bb)))
2131 BB_END (bb) = PREV_INSN (BB_END (bb));
2132 update_bb_for_insn (bb);
2133
2134 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2135 dest = false_edge->dest;
2136 redirect_edge_succ (false_edge, new_bb);
2137 false_edge->flags |= EDGE_FALLTHRU;
2138 new_bb->count = false_edge->count;
2139 new_bb->frequency = EDGE_FREQUENCY (false_edge);
7d776ee2
RG
2140 if (current_loops && bb->loop_father)
2141 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2142 new_edge = make_edge (new_bb, dest, 0);
2143 new_edge->probability = REG_BR_PROB_BASE;
2144 new_edge->count = new_bb->count;
2145 if (BARRIER_P (BB_END (new_bb)))
2146 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2147 update_bb_for_insn (new_bb);
2148
726a989a 2149 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2150
2f13f2de 2151 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2152 {
5368224f
DC
2153 set_curr_insn_location (true_edge->goto_locus);
2154 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2155 }
7787b4aa 2156
80c7a9eb
RH
2157 return new_bb;
2158}
2159
0a35513e
AH
2160/* Mark all calls that can have a transaction restart. */
2161
2162static void
2163mark_transaction_restart_calls (gimple stmt)
2164{
2165 struct tm_restart_node dummy;
2166 void **slot;
2167
2168 if (!cfun->gimple_df->tm_restart)
2169 return;
2170
2171 dummy.stmt = stmt;
2172 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2173 if (slot)
2174 {
2175 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2176 tree list = n->label_or_list;
2177 rtx insn;
2178
2179 for (insn = next_real_insn (get_last_insn ());
2180 !CALL_P (insn);
2181 insn = next_real_insn (insn))
2182 continue;
2183
2184 if (TREE_CODE (list) == LABEL_DECL)
2185 add_reg_note (insn, REG_TM, label_rtx (list));
2186 else
2187 for (; list ; list = TREE_CHAIN (list))
2188 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2189 }
2190}
2191
28ed065e
MM
2192/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2193 statement STMT. */
2194
2195static void
2196expand_call_stmt (gimple stmt)
2197{
25583c4f 2198 tree exp, decl, lhs;
e23817b3 2199 bool builtin_p;
e7925582 2200 size_t i;
28ed065e 2201
25583c4f
RS
2202 if (gimple_call_internal_p (stmt))
2203 {
2204 expand_internal_call (stmt);
2205 return;
2206 }
2207
01156003 2208 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
089d1227 2209
01156003 2210 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
089d1227
IE
2211 decl = gimple_call_fndecl (stmt);
2212 builtin_p = decl && DECL_BUILT_IN (decl);
01156003 2213
e7925582
EB
2214 /* If this is not a builtin function, the function type through which the
2215 call is made may be different from the type of the function. */
2216 if (!builtin_p)
2217 CALL_EXPR_FN (exp)
b25aa0e8
EB
2218 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2219 CALL_EXPR_FN (exp));
e7925582 2220
28ed065e
MM
2221 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2222 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2223
2224 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2225 {
2226 tree arg = gimple_call_arg (stmt, i);
2227 gimple def;
2228 /* TER addresses into arguments of builtin functions so we have a
2229 chance to infer more correct alignment information. See PR39954. */
2230 if (builtin_p
2231 && TREE_CODE (arg) == SSA_NAME
2232 && (def = get_gimple_for_ssa_name (arg))
2233 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2234 arg = gimple_assign_rhs1 (def);
2235 CALL_EXPR_ARG (exp, i) = arg;
2236 }
28ed065e 2237
93f28ca7 2238 if (gimple_has_side_effects (stmt))
28ed065e
MM
2239 TREE_SIDE_EFFECTS (exp) = 1;
2240
93f28ca7 2241 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2242 TREE_NOTHROW (exp) = 1;
2243
2244 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2245 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2246 if (decl
2247 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2248 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2249 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2250 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2251 else
2252 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2253 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2254 SET_EXPR_LOCATION (exp, gimple_location (stmt));
28ed065e 2255
ddb555ed
JJ
2256 /* Ensure RTL is created for debug args. */
2257 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2258 {
9771b263 2259 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2260 unsigned int ix;
2261 tree dtemp;
2262
2263 if (debug_args)
9771b263 2264 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2265 {
2266 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2267 expand_debug_expr (dtemp);
2268 }
2269 }
2270
25583c4f 2271 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2272 if (lhs)
2273 expand_assignment (lhs, exp, false);
2274 else
4c437f02 2275 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a35513e
AH
2276
2277 mark_transaction_restart_calls (stmt);
28ed065e
MM
2278}
2279
862d0b35
DN
2280
2281/* Generate RTL for an asm statement (explicit assembler code).
2282 STRING is a STRING_CST node containing the assembler code text,
2283 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2284 insn is volatile; don't optimize it. */
2285
2286static void
2287expand_asm_loc (tree string, int vol, location_t locus)
2288{
2289 rtx body;
2290
2291 if (TREE_CODE (string) == ADDR_EXPR)
2292 string = TREE_OPERAND (string, 0);
2293
2294 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2295 ggc_strdup (TREE_STRING_POINTER (string)),
2296 locus);
2297
2298 MEM_VOLATILE_P (body) = vol;
2299
2300 emit_insn (body);
2301}
2302
2303/* Return the number of times character C occurs in string S. */
2304static int
2305n_occurrences (int c, const char *s)
2306{
2307 int n = 0;
2308 while (*s)
2309 n += (*s++ == c);
2310 return n;
2311}
2312
2313/* A subroutine of expand_asm_operands. Check that all operands have
2314 the same number of alternatives. Return true if so. */
2315
2316static bool
2317check_operand_nalternatives (tree outputs, tree inputs)
2318{
2319 if (outputs || inputs)
2320 {
2321 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2322 int nalternatives
2323 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2324 tree next = inputs;
2325
2326 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2327 {
2328 error ("too many alternatives in %<asm%>");
2329 return false;
2330 }
2331
2332 tmp = outputs;
2333 while (tmp)
2334 {
2335 const char *constraint
2336 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2337
2338 if (n_occurrences (',', constraint) != nalternatives)
2339 {
2340 error ("operand constraints for %<asm%> differ "
2341 "in number of alternatives");
2342 return false;
2343 }
2344
2345 if (TREE_CHAIN (tmp))
2346 tmp = TREE_CHAIN (tmp);
2347 else
2348 tmp = next, next = 0;
2349 }
2350 }
2351
2352 return true;
2353}
2354
2355/* Check for overlap between registers marked in CLOBBERED_REGS and
2356 anything inappropriate in T. Emit error and return the register
2357 variable definition for error, NULL_TREE for ok. */
2358
2359static bool
2360tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2361{
2362 /* Conflicts between asm-declared register variables and the clobber
2363 list are not allowed. */
2364 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2365
2366 if (overlap)
2367 {
2368 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2369 DECL_NAME (overlap));
2370
2371 /* Reset registerness to stop multiple errors emitted for a single
2372 variable. */
2373 DECL_REGISTER (overlap) = 0;
2374 return true;
2375 }
2376
2377 return false;
2378}
2379
2380/* Generate RTL for an asm statement with arguments.
2381 STRING is the instruction template.
2382 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2383 Each output or input has an expression in the TREE_VALUE and
2384 a tree list in TREE_PURPOSE which in turn contains a constraint
2385 name in TREE_VALUE (or NULL_TREE) and a constraint string
2386 in TREE_PURPOSE.
2387 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2388 that is clobbered by this insn.
2389
2390 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2391 should be the fallthru basic block of the asm goto.
2392
2393 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2394 Some elements of OUTPUTS may be replaced with trees representing temporary
2395 values. The caller should copy those temporary values to the originally
2396 specified lvalues.
2397
2398 VOL nonzero means the insn is volatile; don't optimize it. */
2399
2400static void
2401expand_asm_operands (tree string, tree outputs, tree inputs,
2402 tree clobbers, tree labels, basic_block fallthru_bb,
2403 int vol, location_t locus)
2404{
2405 rtvec argvec, constraintvec, labelvec;
2406 rtx body;
2407 int ninputs = list_length (inputs);
2408 int noutputs = list_length (outputs);
2409 int nlabels = list_length (labels);
2410 int ninout;
2411 int nclobbers;
2412 HARD_REG_SET clobbered_regs;
2413 int clobber_conflict_found = 0;
2414 tree tail;
2415 tree t;
2416 int i;
2417 /* Vector of RTX's of evaluated output operands. */
2418 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2419 int *inout_opnum = XALLOCAVEC (int, noutputs);
2420 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2421 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2422 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2423 int old_generating_concat_p = generating_concat_p;
2424 rtx fallthru_label = NULL_RTX;
2425
2426 /* An ASM with no outputs needs to be treated as volatile, for now. */
2427 if (noutputs == 0)
2428 vol = 1;
2429
2430 if (! check_operand_nalternatives (outputs, inputs))
2431 return;
2432
2433 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2434
2435 /* Collect constraints. */
2436 i = 0;
2437 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2438 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2439 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2440 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2441
2442 /* Sometimes we wish to automatically clobber registers across an asm.
2443 Case in point is when the i386 backend moved from cc0 to a hard reg --
2444 maintaining source-level compatibility means automatically clobbering
2445 the flags register. */
2446 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2447
2448 /* Count the number of meaningful clobbered registers, ignoring what
2449 we would ignore later. */
2450 nclobbers = 0;
2451 CLEAR_HARD_REG_SET (clobbered_regs);
2452 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2453 {
2454 const char *regname;
2455 int nregs;
2456
2457 if (TREE_VALUE (tail) == error_mark_node)
2458 return;
2459 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2460
2461 i = decode_reg_name_and_count (regname, &nregs);
2462 if (i == -4)
2463 ++nclobbers;
2464 else if (i == -2)
2465 error ("unknown register name %qs in %<asm%>", regname);
2466
2467 /* Mark clobbered registers. */
2468 if (i >= 0)
2469 {
2470 int reg;
2471
2472 for (reg = i; reg < i + nregs; reg++)
2473 {
2474 ++nclobbers;
2475
2476 /* Clobbering the PIC register is an error. */
2477 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2478 {
2479 error ("PIC register clobbered by %qs in %<asm%>", regname);
2480 return;
2481 }
2482
2483 SET_HARD_REG_BIT (clobbered_regs, reg);
2484 }
2485 }
2486 }
2487
2488 /* First pass over inputs and outputs checks validity and sets
2489 mark_addressable if needed. */
2490
2491 ninout = 0;
2492 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2493 {
2494 tree val = TREE_VALUE (tail);
2495 tree type = TREE_TYPE (val);
2496 const char *constraint;
2497 bool is_inout;
2498 bool allows_reg;
2499 bool allows_mem;
2500
2501 /* If there's an erroneous arg, emit no insn. */
2502 if (type == error_mark_node)
2503 return;
2504
2505 /* Try to parse the output constraint. If that fails, there's
2506 no point in going further. */
2507 constraint = constraints[i];
2508 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2509 &allows_mem, &allows_reg, &is_inout))
2510 return;
2511
2512 if (! allows_reg
2513 && (allows_mem
2514 || is_inout
2515 || (DECL_P (val)
2516 && REG_P (DECL_RTL (val))
2517 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2518 mark_addressable (val);
2519
2520 if (is_inout)
2521 ninout++;
2522 }
2523
2524 ninputs += ninout;
2525 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2526 {
2527 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2528 return;
2529 }
2530
2531 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2532 {
2533 bool allows_reg, allows_mem;
2534 const char *constraint;
2535
2536 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2537 would get VOIDmode and that could cause a crash in reload. */
2538 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2539 return;
2540
2541 constraint = constraints[i + noutputs];
2542 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2543 constraints, &allows_mem, &allows_reg))
2544 return;
2545
2546 if (! allows_reg && allows_mem)
2547 mark_addressable (TREE_VALUE (tail));
2548 }
2549
2550 /* Second pass evaluates arguments. */
2551
2552 /* Make sure stack is consistent for asm goto. */
2553 if (nlabels > 0)
2554 do_pending_stack_adjust ();
2555
2556 ninout = 0;
2557 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2558 {
2559 tree val = TREE_VALUE (tail);
2560 tree type = TREE_TYPE (val);
2561 bool is_inout;
2562 bool allows_reg;
2563 bool allows_mem;
2564 rtx op;
2565 bool ok;
2566
2567 ok = parse_output_constraint (&constraints[i], i, ninputs,
2568 noutputs, &allows_mem, &allows_reg,
2569 &is_inout);
2570 gcc_assert (ok);
2571
2572 /* If an output operand is not a decl or indirect ref and our constraint
2573 allows a register, make a temporary to act as an intermediate.
2574 Make the asm insn write into that, then our caller will copy it to
2575 the real output operand. Likewise for promoted variables. */
2576
2577 generating_concat_p = 0;
2578
2579 real_output_rtx[i] = NULL_RTX;
2580 if ((TREE_CODE (val) == INDIRECT_REF
2581 && allows_mem)
2582 || (DECL_P (val)
2583 && (allows_mem || REG_P (DECL_RTL (val)))
2584 && ! (REG_P (DECL_RTL (val))
2585 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2586 || ! allows_reg
2587 || is_inout)
2588 {
2589 op = expand_expr (val, NULL_RTX, VOIDmode,
2590 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2591 if (MEM_P (op))
2592 op = validize_mem (op);
2593
2594 if (! allows_reg && !MEM_P (op))
2595 error ("output number %d not directly addressable", i);
2596 if ((! allows_mem && MEM_P (op))
2597 || GET_CODE (op) == CONCAT)
2598 {
2599 real_output_rtx[i] = op;
2600 op = gen_reg_rtx (GET_MODE (op));
2601 if (is_inout)
2602 emit_move_insn (op, real_output_rtx[i]);
2603 }
2604 }
2605 else
2606 {
2607 op = assign_temp (type, 0, 1);
2608 op = validize_mem (op);
2609 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2610 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2611 TREE_VALUE (tail) = make_tree (type, op);
2612 }
2613 output_rtx[i] = op;
2614
2615 generating_concat_p = old_generating_concat_p;
2616
2617 if (is_inout)
2618 {
2619 inout_mode[ninout] = TYPE_MODE (type);
2620 inout_opnum[ninout++] = i;
2621 }
2622
2623 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2624 clobber_conflict_found = 1;
2625 }
2626
2627 /* Make vectors for the expression-rtx, constraint strings,
2628 and named operands. */
2629
2630 argvec = rtvec_alloc (ninputs);
2631 constraintvec = rtvec_alloc (ninputs);
2632 labelvec = rtvec_alloc (nlabels);
2633
2634 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2635 : GET_MODE (output_rtx[0])),
2636 ggc_strdup (TREE_STRING_POINTER (string)),
2637 empty_string, 0, argvec, constraintvec,
2638 labelvec, locus);
2639
2640 MEM_VOLATILE_P (body) = vol;
2641
2642 /* Eval the inputs and put them into ARGVEC.
2643 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2644
2645 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2646 {
2647 bool allows_reg, allows_mem;
2648 const char *constraint;
2649 tree val, type;
2650 rtx op;
2651 bool ok;
2652
2653 constraint = constraints[i + noutputs];
2654 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2655 constraints, &allows_mem, &allows_reg);
2656 gcc_assert (ok);
2657
2658 generating_concat_p = 0;
2659
2660 val = TREE_VALUE (tail);
2661 type = TREE_TYPE (val);
2662 /* EXPAND_INITIALIZER will not generate code for valid initializer
2663 constants, but will still generate code for other types of operand.
2664 This is the behavior we want for constant constraints. */
2665 op = expand_expr (val, NULL_RTX, VOIDmode,
2666 allows_reg ? EXPAND_NORMAL
2667 : allows_mem ? EXPAND_MEMORY
2668 : EXPAND_INITIALIZER);
2669
2670 /* Never pass a CONCAT to an ASM. */
2671 if (GET_CODE (op) == CONCAT)
2672 op = force_reg (GET_MODE (op), op);
2673 else if (MEM_P (op))
2674 op = validize_mem (op);
2675
2676 if (asm_operand_ok (op, constraint, NULL) <= 0)
2677 {
2678 if (allows_reg && TYPE_MODE (type) != BLKmode)
2679 op = force_reg (TYPE_MODE (type), op);
2680 else if (!allows_mem)
2681 warning (0, "asm operand %d probably doesn%'t match constraints",
2682 i + noutputs);
2683 else if (MEM_P (op))
2684 {
2685 /* We won't recognize either volatile memory or memory
2686 with a queued address as available a memory_operand
2687 at this point. Ignore it: clearly this *is* a memory. */
2688 }
2689 else
2690 gcc_unreachable ();
2691 }
2692
2693 generating_concat_p = old_generating_concat_p;
2694 ASM_OPERANDS_INPUT (body, i) = op;
2695
2696 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
e2fc3b4f
BE
2697 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2698 ggc_strdup (constraints[i + noutputs]),
2699 locus);
862d0b35
DN
2700
2701 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2702 clobber_conflict_found = 1;
2703 }
2704
2705 /* Protect all the operands from the queue now that they have all been
2706 evaluated. */
2707
2708 generating_concat_p = 0;
2709
2710 /* For in-out operands, copy output rtx to input rtx. */
2711 for (i = 0; i < ninout; i++)
2712 {
2713 int j = inout_opnum[i];
2714 char buffer[16];
2715
2716 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2717 = output_rtx[j];
2718
2719 sprintf (buffer, "%d", j);
2720 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
e2fc3b4f 2721 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
862d0b35
DN
2722 }
2723
2724 /* Copy labels to the vector. */
2725 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2726 {
2727 rtx r;
2728 /* If asm goto has any labels in the fallthru basic block, use
2729 a label that we emit immediately after the asm goto. Expansion
2730 may insert further instructions into the same basic block after
2731 asm goto and if we don't do this, insertion of instructions on
2732 the fallthru edge might misbehave. See PR58670. */
2733 if (fallthru_bb
2734 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2735 {
2736 if (fallthru_label == NULL_RTX)
2737 fallthru_label = gen_label_rtx ();
2738 r = fallthru_label;
2739 }
2740 else
2741 r = label_rtx (TREE_VALUE (tail));
2742 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2743 }
2744
2745 generating_concat_p = old_generating_concat_p;
2746
2747 /* Now, for each output, construct an rtx
2748 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2749 ARGVEC CONSTRAINTS OPNAMES))
2750 If there is more than one, put them inside a PARALLEL. */
2751
2752 if (nlabels > 0 && nclobbers == 0)
2753 {
2754 gcc_assert (noutputs == 0);
2755 emit_jump_insn (body);
2756 }
2757 else if (noutputs == 0 && nclobbers == 0)
2758 {
2759 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2760 emit_insn (body);
2761 }
2762 else if (noutputs == 1 && nclobbers == 0)
2763 {
2764 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2765 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2766 }
2767 else
2768 {
2769 rtx obody = body;
2770 int num = noutputs;
2771
2772 if (num == 0)
2773 num = 1;
2774
2775 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2776
2777 /* For each output operand, store a SET. */
2778 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2779 {
2780 XVECEXP (body, 0, i)
2781 = gen_rtx_SET (VOIDmode,
2782 output_rtx[i],
2783 gen_rtx_ASM_OPERANDS
2784 (GET_MODE (output_rtx[i]),
2785 ggc_strdup (TREE_STRING_POINTER (string)),
2786 ggc_strdup (constraints[i]),
2787 i, argvec, constraintvec, labelvec, locus));
2788
2789 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2790 }
2791
2792 /* If there are no outputs (but there are some clobbers)
2793 store the bare ASM_OPERANDS into the PARALLEL. */
2794
2795 if (i == 0)
2796 XVECEXP (body, 0, i++) = obody;
2797
2798 /* Store (clobber REG) for each clobbered register specified. */
2799
2800 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2801 {
2802 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2803 int reg, nregs;
2804 int j = decode_reg_name_and_count (regname, &nregs);
2805 rtx clobbered_reg;
2806
2807 if (j < 0)
2808 {
2809 if (j == -3) /* `cc', which is not a register */
2810 continue;
2811
2812 if (j == -4) /* `memory', don't cache memory across asm */
2813 {
2814 XVECEXP (body, 0, i++)
2815 = gen_rtx_CLOBBER (VOIDmode,
2816 gen_rtx_MEM
2817 (BLKmode,
2818 gen_rtx_SCRATCH (VOIDmode)));
2819 continue;
2820 }
2821
2822 /* Ignore unknown register, error already signaled. */
2823 continue;
2824 }
2825
2826 for (reg = j; reg < j + nregs; reg++)
2827 {
2828 /* Use QImode since that's guaranteed to clobber just
2829 * one reg. */
2830 clobbered_reg = gen_rtx_REG (QImode, reg);
2831
2832 /* Do sanity check for overlap between clobbers and
2833 respectively input and outputs that hasn't been
2834 handled. Such overlap should have been detected and
2835 reported above. */
2836 if (!clobber_conflict_found)
2837 {
2838 int opno;
2839
2840 /* We test the old body (obody) contents to avoid
2841 tripping over the under-construction body. */
2842 for (opno = 0; opno < noutputs; opno++)
2843 if (reg_overlap_mentioned_p (clobbered_reg,
2844 output_rtx[opno]))
2845 internal_error
2846 ("asm clobber conflict with output operand");
2847
2848 for (opno = 0; opno < ninputs - ninout; opno++)
2849 if (reg_overlap_mentioned_p (clobbered_reg,
2850 ASM_OPERANDS_INPUT (obody,
2851 opno)))
2852 internal_error
2853 ("asm clobber conflict with input operand");
2854 }
2855
2856 XVECEXP (body, 0, i++)
2857 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2858 }
2859 }
2860
2861 if (nlabels > 0)
2862 emit_jump_insn (body);
2863 else
2864 emit_insn (body);
2865 }
2866
2867 if (fallthru_label)
2868 emit_label (fallthru_label);
2869
2870 /* For any outputs that needed reloading into registers, spill them
2871 back to where they belong. */
2872 for (i = 0; i < noutputs; ++i)
2873 if (real_output_rtx[i])
2874 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2875
2876 crtl->has_asm_statement = 1;
2877 free_temp_slots ();
2878}
2879
2880
2881static void
2882expand_asm_stmt (gimple stmt)
2883{
2884 int noutputs;
2885 tree outputs, tail, t;
2886 tree *o;
2887 size_t i, n;
2888 const char *s;
2889 tree str, out, in, cl, labels;
2890 location_t locus = gimple_location (stmt);
2891 basic_block fallthru_bb = NULL;
2892
2893 /* Meh... convert the gimple asm operands into real tree lists.
2894 Eventually we should make all routines work on the vectors instead
2895 of relying on TREE_CHAIN. */
2896 out = NULL_TREE;
2897 n = gimple_asm_noutputs (stmt);
2898 if (n > 0)
2899 {
2900 t = out = gimple_asm_output_op (stmt, 0);
2901 for (i = 1; i < n; i++)
2902 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2903 }
2904
2905 in = NULL_TREE;
2906 n = gimple_asm_ninputs (stmt);
2907 if (n > 0)
2908 {
2909 t = in = gimple_asm_input_op (stmt, 0);
2910 for (i = 1; i < n; i++)
2911 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2912 }
2913
2914 cl = NULL_TREE;
2915 n = gimple_asm_nclobbers (stmt);
2916 if (n > 0)
2917 {
2918 t = cl = gimple_asm_clobber_op (stmt, 0);
2919 for (i = 1; i < n; i++)
2920 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2921 }
2922
2923 labels = NULL_TREE;
2924 n = gimple_asm_nlabels (stmt);
2925 if (n > 0)
2926 {
2927 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2928 if (fallthru)
2929 fallthru_bb = fallthru->dest;
2930 t = labels = gimple_asm_label_op (stmt, 0);
2931 for (i = 1; i < n; i++)
2932 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2933 }
2934
2935 s = gimple_asm_string (stmt);
2936 str = build_string (strlen (s), s);
2937
2938 if (gimple_asm_input_p (stmt))
2939 {
2940 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
2941 return;
2942 }
2943
2944 outputs = out;
2945 noutputs = gimple_asm_noutputs (stmt);
2946 /* o[I] is the place that output number I should be written. */
2947 o = (tree *) alloca (noutputs * sizeof (tree));
2948
2949 /* Record the contents of OUTPUTS before it is modified. */
2950 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2951 o[i] = TREE_VALUE (tail);
2952
2953 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
2954 OUTPUTS some trees for where the values were actually stored. */
2955 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
2956 gimple_asm_volatile_p (stmt), locus);
2957
2958 /* Copy all the intermediate outputs into the specified outputs. */
2959 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2960 {
2961 if (o[i] != TREE_VALUE (tail))
2962 {
2963 expand_assignment (o[i], TREE_VALUE (tail), false);
2964 free_temp_slots ();
2965
2966 /* Restore the original value so that it's correct the next
2967 time we expand this function. */
2968 TREE_VALUE (tail) = o[i];
2969 }
2970 }
2971}
2972
2973/* Emit code to jump to the address
2974 specified by the pointer expression EXP. */
2975
2976static void
2977expand_computed_goto (tree exp)
2978{
2979 rtx x = expand_normal (exp);
2980
2981 x = convert_memory_address (Pmode, x);
2982
2983 do_pending_stack_adjust ();
2984 emit_indirect_jump (x);
2985}
2986
2987/* Generate RTL code for a `goto' statement with target label LABEL.
2988 LABEL should be a LABEL_DECL tree node that was or will later be
2989 defined with `expand_label'. */
2990
2991static void
2992expand_goto (tree label)
2993{
2994#ifdef ENABLE_CHECKING
2995 /* Check for a nonlocal goto to a containing function. Should have
2996 gotten translated to __builtin_nonlocal_goto. */
2997 tree context = decl_function_context (label);
2998 gcc_assert (!context || context == current_function_decl);
2999#endif
3000
3001 emit_jump (label_rtx (label));
3002}
3003
3004/* Output a return with no value. */
3005
3006static void
3007expand_null_return_1 (void)
3008{
3009 clear_pending_stack_adjust ();
3010 do_pending_stack_adjust ();
3011 emit_jump (return_label);
3012}
3013
3014/* Generate RTL to return from the current function, with no value.
3015 (That is, we do not do anything about returning any value.) */
3016
3017void
3018expand_null_return (void)
3019{
3020 /* If this function was declared to return a value, but we
3021 didn't, clobber the return registers so that they are not
3022 propagated live to the rest of the function. */
3023 clobber_return_register ();
3024
3025 expand_null_return_1 ();
3026}
3027
3028/* Generate RTL to return from the current function, with value VAL. */
3029
3030static void
3031expand_value_return (rtx val)
3032{
3033 /* Copy the value to the return location unless it's already there. */
3034
3035 tree decl = DECL_RESULT (current_function_decl);
3036 rtx return_reg = DECL_RTL (decl);
3037 if (return_reg != val)
3038 {
3039 tree funtype = TREE_TYPE (current_function_decl);
3040 tree type = TREE_TYPE (decl);
3041 int unsignedp = TYPE_UNSIGNED (type);
3042 enum machine_mode old_mode = DECL_MODE (decl);
3043 enum machine_mode mode;
3044 if (DECL_BY_REFERENCE (decl))
3045 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3046 else
3047 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3048
3049 if (mode != old_mode)
3050 val = convert_modes (mode, old_mode, val, unsignedp);
3051
3052 if (GET_CODE (return_reg) == PARALLEL)
3053 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3054 else
3055 emit_move_insn (return_reg, val);
3056 }
3057
3058 expand_null_return_1 ();
3059}
3060
3061/* Generate RTL to evaluate the expression RETVAL and return it
3062 from the current function. */
3063
3064static void
3065expand_return (tree retval)
3066{
3067 rtx result_rtl;
3068 rtx val = 0;
3069 tree retval_rhs;
3070
3071 /* If function wants no value, give it none. */
3072 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3073 {
3074 expand_normal (retval);
3075 expand_null_return ();
3076 return;
3077 }
3078
3079 if (retval == error_mark_node)
3080 {
3081 /* Treat this like a return of no value from a function that
3082 returns a value. */
3083 expand_null_return ();
3084 return;
3085 }
3086 else if ((TREE_CODE (retval) == MODIFY_EXPR
3087 || TREE_CODE (retval) == INIT_EXPR)
3088 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3089 retval_rhs = TREE_OPERAND (retval, 1);
3090 else
3091 retval_rhs = retval;
3092
3093 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3094
3095 /* If we are returning the RESULT_DECL, then the value has already
3096 been stored into it, so we don't have to do anything special. */
3097 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3098 expand_value_return (result_rtl);
3099
3100 /* If the result is an aggregate that is being returned in one (or more)
3101 registers, load the registers here. */
3102
3103 else if (retval_rhs != 0
3104 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3105 && REG_P (result_rtl))
3106 {
3107 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3108 if (val)
3109 {
3110 /* Use the mode of the result value on the return register. */
3111 PUT_MODE (result_rtl, GET_MODE (val));
3112 expand_value_return (val);
3113 }
3114 else
3115 expand_null_return ();
3116 }
3117 else if (retval_rhs != 0
3118 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3119 && (REG_P (result_rtl)
3120 || (GET_CODE (result_rtl) == PARALLEL)))
3121 {
9ee5337d
EB
3122 /* Compute the return value into a temporary (usually a pseudo reg). */
3123 val
3124 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
862d0b35
DN
3125 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3126 val = force_not_mem (val);
862d0b35
DN
3127 expand_value_return (val);
3128 }
3129 else
3130 {
3131 /* No hard reg used; calculate value into hard return reg. */
3132 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3133 expand_value_return (result_rtl);
3134 }
3135}
3136
28ed065e
MM
3137/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3138 STMT that doesn't require special handling for outgoing edges. That
3139 is no tailcalls and no GIMPLE_COND. */
3140
3141static void
3142expand_gimple_stmt_1 (gimple stmt)
3143{
3144 tree op0;
c82fee88 3145
5368224f 3146 set_curr_insn_location (gimple_location (stmt));
c82fee88 3147
28ed065e
MM
3148 switch (gimple_code (stmt))
3149 {
3150 case GIMPLE_GOTO:
3151 op0 = gimple_goto_dest (stmt);
3152 if (TREE_CODE (op0) == LABEL_DECL)
3153 expand_goto (op0);
3154 else
3155 expand_computed_goto (op0);
3156 break;
3157 case GIMPLE_LABEL:
3158 expand_label (gimple_label_label (stmt));
3159 break;
3160 case GIMPLE_NOP:
3161 case GIMPLE_PREDICT:
3162 break;
28ed065e
MM
3163 case GIMPLE_SWITCH:
3164 expand_case (stmt);
3165 break;
3166 case GIMPLE_ASM:
3167 expand_asm_stmt (stmt);
3168 break;
3169 case GIMPLE_CALL:
3170 expand_call_stmt (stmt);
3171 break;
3172
3173 case GIMPLE_RETURN:
3174 op0 = gimple_return_retval (stmt);
3175
3176 if (op0 && op0 != error_mark_node)
3177 {
3178 tree result = DECL_RESULT (current_function_decl);
3179
3180 /* If we are not returning the current function's RESULT_DECL,
3181 build an assignment to it. */
3182 if (op0 != result)
3183 {
3184 /* I believe that a function's RESULT_DECL is unique. */
3185 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3186
3187 /* ??? We'd like to use simply expand_assignment here,
3188 but this fails if the value is of BLKmode but the return
3189 decl is a register. expand_return has special handling
3190 for this combination, which eventually should move
3191 to common code. See comments there. Until then, let's
3192 build a modify expression :-/ */
3193 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3194 result, op0);
3195 }
3196 }
3197 if (!op0)
3198 expand_null_return ();
3199 else
3200 expand_return (op0);
3201 break;
3202
3203 case GIMPLE_ASSIGN:
3204 {
3205 tree lhs = gimple_assign_lhs (stmt);
3206
3207 /* Tree expand used to fiddle with |= and &= of two bitfield
3208 COMPONENT_REFs here. This can't happen with gimple, the LHS
3209 of binary assigns must be a gimple reg. */
3210
3211 if (TREE_CODE (lhs) != SSA_NAME
3212 || get_gimple_rhs_class (gimple_expr_code (stmt))
3213 == GIMPLE_SINGLE_RHS)
3214 {
3215 tree rhs = gimple_assign_rhs1 (stmt);
3216 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3217 == GIMPLE_SINGLE_RHS);
3218 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3219 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3220 if (TREE_CLOBBER_P (rhs))
3221 /* This is a clobber to mark the going out of scope for
3222 this LHS. */
3223 ;
3224 else
3225 expand_assignment (lhs, rhs,
3226 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
3227 }
3228 else
3229 {
3230 rtx target, temp;
3231 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3232 struct separate_ops ops;
3233 bool promoted = false;
3234
3235 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3236 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3237 promoted = true;
3238
3239 ops.code = gimple_assign_rhs_code (stmt);
3240 ops.type = TREE_TYPE (lhs);
3241 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3242 {
0354c0c7
BS
3243 case GIMPLE_TERNARY_RHS:
3244 ops.op2 = gimple_assign_rhs3 (stmt);
3245 /* Fallthru */
28ed065e
MM
3246 case GIMPLE_BINARY_RHS:
3247 ops.op1 = gimple_assign_rhs2 (stmt);
3248 /* Fallthru */
3249 case GIMPLE_UNARY_RHS:
3250 ops.op0 = gimple_assign_rhs1 (stmt);
3251 break;
3252 default:
3253 gcc_unreachable ();
3254 }
3255 ops.location = gimple_location (stmt);
3256
3257 /* If we want to use a nontemporal store, force the value to
3258 register first. If we store into a promoted register,
3259 don't directly expand to target. */
3260 temp = nontemporal || promoted ? NULL_RTX : target;
3261 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3262 EXPAND_NORMAL);
3263
3264 if (temp == target)
3265 ;
3266 else if (promoted)
3267 {
4e18a7d4 3268 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
3269 /* If TEMP is a VOIDmode constant, use convert_modes to make
3270 sure that we properly convert it. */
3271 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3272 {
3273 temp = convert_modes (GET_MODE (target),
3274 TYPE_MODE (ops.type),
4e18a7d4 3275 temp, unsignedp);
28ed065e 3276 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3277 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3278 }
3279
4e18a7d4 3280 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3281 }
3282 else if (nontemporal && emit_storent_insn (target, temp))
3283 ;
3284 else
3285 {
3286 temp = force_operand (temp, target);
3287 if (temp != target)
3288 emit_move_insn (target, temp);
3289 }
3290 }
3291 }
3292 break;
3293
3294 default:
3295 gcc_unreachable ();
3296 }
3297}
3298
3299/* Expand one gimple statement STMT and return the last RTL instruction
3300 before any of the newly generated ones.
3301
3302 In addition to generating the necessary RTL instructions this also
3303 sets REG_EH_REGION notes if necessary and sets the current source
3304 location for diagnostics. */
3305
3306static rtx
3307expand_gimple_stmt (gimple stmt)
3308{
28ed065e 3309 location_t saved_location = input_location;
c82fee88
EB
3310 rtx last = get_last_insn ();
3311 int lp_nr;
28ed065e 3312
28ed065e
MM
3313 gcc_assert (cfun);
3314
c82fee88
EB
3315 /* We need to save and restore the current source location so that errors
3316 discovered during expansion are emitted with the right location. But
3317 it would be better if the diagnostic routines used the source location
3318 embedded in the tree nodes rather than globals. */
28ed065e 3319 if (gimple_has_location (stmt))
c82fee88 3320 input_location = gimple_location (stmt);
28ed065e
MM
3321
3322 expand_gimple_stmt_1 (stmt);
c82fee88 3323
28ed065e
MM
3324 /* Free any temporaries used to evaluate this statement. */
3325 free_temp_slots ();
3326
3327 input_location = saved_location;
3328
3329 /* Mark all insns that may trap. */
1d65f45c
RH
3330 lp_nr = lookup_stmt_eh_lp (stmt);
3331 if (lp_nr)
28ed065e
MM
3332 {
3333 rtx insn;
3334 for (insn = next_real_insn (last); insn;
3335 insn = next_real_insn (insn))
3336 {
3337 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3338 /* If we want exceptions for non-call insns, any
3339 may_trap_p instruction may throw. */
3340 && GET_CODE (PATTERN (insn)) != CLOBBER
3341 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3342 && insn_could_throw_p (insn))
3343 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3344 }
3345 }
3346
3347 return last;
3348}
3349
726a989a 3350/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3351 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3352 generated a tail call (something that might be denied by the ABI
cea49550
RH
3353 rules governing the call; see calls.c).
3354
3355 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3356 can still reach the rest of BB. The case here is __builtin_sqrt,
3357 where the NaN result goes through the external function (with a
3358 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3359
3360static basic_block
726a989a 3361expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 3362{
b7211528 3363 rtx last2, last;
224e770b 3364 edge e;
628f6a4e 3365 edge_iterator ei;
224e770b
RH
3366 int probability;
3367 gcov_type count;
80c7a9eb 3368
28ed065e 3369 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3370
3371 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3372 if (CALL_P (last) && SIBLING_CALL_P (last))
3373 goto found;
80c7a9eb 3374
726a989a 3375 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3376
cea49550 3377 *can_fallthru = true;
224e770b 3378 return NULL;
80c7a9eb 3379
224e770b
RH
3380 found:
3381 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3382 Any instructions emitted here are about to be deleted. */
3383 do_pending_stack_adjust ();
3384
3385 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3386 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3387 EH or abnormal edges, we shouldn't have created a tail call in
3388 the first place. So it seems to me we should just be removing
3389 all edges here, or redirecting the existing fallthru edge to
3390 the exit block. */
3391
224e770b
RH
3392 probability = 0;
3393 count = 0;
224e770b 3394
628f6a4e
BE
3395 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3396 {
224e770b
RH
3397 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3398 {
fefa31b5 3399 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
80c7a9eb 3400 {
224e770b
RH
3401 e->dest->count -= e->count;
3402 e->dest->frequency -= EDGE_FREQUENCY (e);
3403 if (e->dest->count < 0)
c22cacf3 3404 e->dest->count = 0;
224e770b 3405 if (e->dest->frequency < 0)
c22cacf3 3406 e->dest->frequency = 0;
80c7a9eb 3407 }
224e770b
RH
3408 count += e->count;
3409 probability += e->probability;
3410 remove_edge (e);
80c7a9eb 3411 }
628f6a4e
BE
3412 else
3413 ei_next (&ei);
80c7a9eb
RH
3414 }
3415
224e770b
RH
3416 /* This is somewhat ugly: the call_expr expander often emits instructions
3417 after the sibcall (to perform the function return). These confuse the
12eff7b7 3418 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3419 last = NEXT_INSN (last);
341c100f 3420 gcc_assert (BARRIER_P (last));
cea49550
RH
3421
3422 *can_fallthru = false;
224e770b
RH
3423 while (NEXT_INSN (last))
3424 {
3425 /* For instance an sqrt builtin expander expands if with
3426 sibcall in the then and label for `else`. */
3427 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3428 {
3429 *can_fallthru = true;
3430 break;
3431 }
224e770b
RH
3432 delete_insn (NEXT_INSN (last));
3433 }
3434
fefa31b5
DM
3435 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3436 | EDGE_SIBCALL);
224e770b
RH
3437 e->probability += probability;
3438 e->count += count;
3439 BB_END (bb) = last;
3440 update_bb_for_insn (bb);
3441
3442 if (NEXT_INSN (last))
3443 {
3444 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3445
3446 last = BB_END (bb);
3447 if (BARRIER_P (last))
3448 BB_END (bb) = PREV_INSN (last);
3449 }
3450
726a989a 3451 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3452
224e770b 3453 return bb;
80c7a9eb
RH
3454}
3455
b5b8b0ac
AO
3456/* Return the difference between the floor and the truncated result of
3457 a signed division by OP1 with remainder MOD. */
3458static rtx
3459floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3460{
3461 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3462 return gen_rtx_IF_THEN_ELSE
3463 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3464 gen_rtx_IF_THEN_ELSE
3465 (mode, gen_rtx_LT (BImode,
3466 gen_rtx_DIV (mode, op1, mod),
3467 const0_rtx),
3468 constm1_rtx, const0_rtx),
3469 const0_rtx);
3470}
3471
3472/* Return the difference between the ceil and the truncated result of
3473 a signed division by OP1 with remainder MOD. */
3474static rtx
3475ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3476{
3477 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3478 return gen_rtx_IF_THEN_ELSE
3479 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3480 gen_rtx_IF_THEN_ELSE
3481 (mode, gen_rtx_GT (BImode,
3482 gen_rtx_DIV (mode, op1, mod),
3483 const0_rtx),
3484 const1_rtx, const0_rtx),
3485 const0_rtx);
3486}
3487
3488/* Return the difference between the ceil and the truncated result of
3489 an unsigned division by OP1 with remainder MOD. */
3490static rtx
3491ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3492{
3493 /* (mod != 0 ? 1 : 0) */
3494 return gen_rtx_IF_THEN_ELSE
3495 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3496 const1_rtx, const0_rtx);
3497}
3498
3499/* Return the difference between the rounded and the truncated result
3500 of a signed division by OP1 with remainder MOD. Halfway cases are
3501 rounded away from zero, rather than to the nearest even number. */
3502static rtx
3503round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3504{
3505 /* (abs (mod) >= abs (op1) - abs (mod)
3506 ? (op1 / mod > 0 ? 1 : -1)
3507 : 0) */
3508 return gen_rtx_IF_THEN_ELSE
3509 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3510 gen_rtx_MINUS (mode,
3511 gen_rtx_ABS (mode, op1),
3512 gen_rtx_ABS (mode, mod))),
3513 gen_rtx_IF_THEN_ELSE
3514 (mode, gen_rtx_GT (BImode,
3515 gen_rtx_DIV (mode, op1, mod),
3516 const0_rtx),
3517 const1_rtx, constm1_rtx),
3518 const0_rtx);
3519}
3520
3521/* Return the difference between the rounded and the truncated result
3522 of a unsigned division by OP1 with remainder MOD. Halfway cases
3523 are rounded away from zero, rather than to the nearest even
3524 number. */
3525static rtx
3526round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3527{
3528 /* (mod >= op1 - mod ? 1 : 0) */
3529 return gen_rtx_IF_THEN_ELSE
3530 (mode, gen_rtx_GE (BImode, mod,
3531 gen_rtx_MINUS (mode, op1, mod)),
3532 const1_rtx, const0_rtx);
3533}
3534
dda2da58
AO
3535/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3536 any rtl. */
3537
3538static rtx
f61c6f34
JJ
3539convert_debug_memory_address (enum machine_mode mode, rtx x,
3540 addr_space_t as)
dda2da58
AO
3541{
3542 enum machine_mode xmode = GET_MODE (x);
3543
3544#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3545 gcc_assert (mode == Pmode
3546 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3547 gcc_assert (xmode == mode || xmode == VOIDmode);
3548#else
f61c6f34 3549 rtx temp;
f61c6f34 3550
639d4bb8 3551 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3552
3553 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3554 return x;
3555
69660a70 3556 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
3557 x = simplify_gen_subreg (mode, x, xmode,
3558 subreg_lowpart_offset
3559 (mode, xmode));
3560 else if (POINTERS_EXTEND_UNSIGNED > 0)
3561 x = gen_rtx_ZERO_EXTEND (mode, x);
3562 else if (!POINTERS_EXTEND_UNSIGNED)
3563 x = gen_rtx_SIGN_EXTEND (mode, x);
3564 else
f61c6f34
JJ
3565 {
3566 switch (GET_CODE (x))
3567 {
3568 case SUBREG:
3569 if ((SUBREG_PROMOTED_VAR_P (x)
3570 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3571 || (GET_CODE (SUBREG_REG (x)) == PLUS
3572 && REG_P (XEXP (SUBREG_REG (x), 0))
3573 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3574 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3575 && GET_MODE (SUBREG_REG (x)) == mode)
3576 return SUBREG_REG (x);
3577 break;
3578 case LABEL_REF:
3579 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
3580 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3581 return temp;
3582 case SYMBOL_REF:
3583 temp = shallow_copy_rtx (x);
3584 PUT_MODE (temp, mode);
3585 return temp;
3586 case CONST:
3587 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3588 if (temp)
3589 temp = gen_rtx_CONST (mode, temp);
3590 return temp;
3591 case PLUS:
3592 case MINUS:
3593 if (CONST_INT_P (XEXP (x, 1)))
3594 {
3595 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3596 if (temp)
3597 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3598 }
3599 break;
3600 default:
3601 break;
3602 }
3603 /* Don't know how to express ptr_extend as operation in debug info. */
3604 return NULL;
3605 }
dda2da58
AO
3606#endif /* POINTERS_EXTEND_UNSIGNED */
3607
3608 return x;
3609}
3610
12c5ffe5
EB
3611/* Return an RTX equivalent to the value of the parameter DECL. */
3612
3613static rtx
3614expand_debug_parm_decl (tree decl)
3615{
3616 rtx incoming = DECL_INCOMING_RTL (decl);
3617
3618 if (incoming
3619 && GET_MODE (incoming) != BLKmode
3620 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3621 || (MEM_P (incoming)
3622 && REG_P (XEXP (incoming, 0))
3623 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3624 {
3625 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3626
3627#ifdef HAVE_window_save
3628 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3629 If the target machine has an explicit window save instruction, the
3630 actual entry value is the corresponding OUTGOING_REGNO instead. */
3631 if (REG_P (incoming)
3632 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3633 incoming
3634 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3635 OUTGOING_REGNO (REGNO (incoming)), 0);
3636 else if (MEM_P (incoming))
3637 {
3638 rtx reg = XEXP (incoming, 0);
3639 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3640 {
3641 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3642 incoming = replace_equiv_address_nv (incoming, reg);
3643 }
6cfa417f
JJ
3644 else
3645 incoming = copy_rtx (incoming);
12c5ffe5
EB
3646 }
3647#endif
3648
3649 ENTRY_VALUE_EXP (rtl) = incoming;
3650 return rtl;
3651 }
3652
3653 if (incoming
3654 && GET_MODE (incoming) != BLKmode
3655 && !TREE_ADDRESSABLE (decl)
3656 && MEM_P (incoming)
3657 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3658 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3659 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3660 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 3661 return copy_rtx (incoming);
12c5ffe5
EB
3662
3663 return NULL_RTX;
3664}
3665
3666/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
3667
3668static rtx
3669expand_debug_expr (tree exp)
3670{
3671 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3672 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 3673 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 3674 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 3675 addr_space_t as;
b5b8b0ac
AO
3676
3677 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3678 {
3679 case tcc_expression:
3680 switch (TREE_CODE (exp))
3681 {
3682 case COND_EXPR:
7ece48b1 3683 case DOT_PROD_EXPR:
0354c0c7
BS
3684 case WIDEN_MULT_PLUS_EXPR:
3685 case WIDEN_MULT_MINUS_EXPR:
0f59b812 3686 case FMA_EXPR:
b5b8b0ac
AO
3687 goto ternary;
3688
3689 case TRUTH_ANDIF_EXPR:
3690 case TRUTH_ORIF_EXPR:
3691 case TRUTH_AND_EXPR:
3692 case TRUTH_OR_EXPR:
3693 case TRUTH_XOR_EXPR:
3694 goto binary;
3695
3696 case TRUTH_NOT_EXPR:
3697 goto unary;
3698
3699 default:
3700 break;
3701 }
3702 break;
3703
3704 ternary:
3705 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3706 if (!op2)
3707 return NULL_RTX;
3708 /* Fall through. */
3709
3710 binary:
3711 case tcc_binary:
3712 case tcc_comparison:
3713 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3714 if (!op1)
3715 return NULL_RTX;
3716 /* Fall through. */
3717
3718 unary:
3719 case tcc_unary:
2ba172e0 3720 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3721 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3722 if (!op0)
3723 return NULL_RTX;
3724 break;
3725
3726 case tcc_type:
3727 case tcc_statement:
3728 gcc_unreachable ();
3729
3730 case tcc_constant:
3731 case tcc_exceptional:
3732 case tcc_declaration:
3733 case tcc_reference:
3734 case tcc_vl_exp:
3735 break;
3736 }
3737
3738 switch (TREE_CODE (exp))
3739 {
3740 case STRING_CST:
3741 if (!lookup_constant_def (exp))
3742 {
e1b243a8
JJ
3743 if (strlen (TREE_STRING_POINTER (exp)) + 1
3744 != (size_t) TREE_STRING_LENGTH (exp))
3745 return NULL_RTX;
b5b8b0ac
AO
3746 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3747 op0 = gen_rtx_MEM (BLKmode, op0);
3748 set_mem_attributes (op0, exp, 0);
3749 return op0;
3750 }
3751 /* Fall through... */
3752
3753 case INTEGER_CST:
3754 case REAL_CST:
3755 case FIXED_CST:
3756 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3757 return op0;
3758
3759 case COMPLEX_CST:
3760 gcc_assert (COMPLEX_MODE_P (mode));
3761 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 3762 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
3763 return gen_rtx_CONCAT (mode, op0, op1);
3764
0ca5af51
AO
3765 case DEBUG_EXPR_DECL:
3766 op0 = DECL_RTL_IF_SET (exp);
3767
3768 if (op0)
3769 return op0;
3770
3771 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 3772 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
3773 SET_DECL_RTL (exp, op0);
3774
3775 return op0;
3776
b5b8b0ac
AO
3777 case VAR_DECL:
3778 case PARM_DECL:
3779 case FUNCTION_DECL:
3780 case LABEL_DECL:
3781 case CONST_DECL:
3782 case RESULT_DECL:
3783 op0 = DECL_RTL_IF_SET (exp);
3784
3785 /* This decl was probably optimized away. */
3786 if (!op0)
e1b243a8
JJ
3787 {
3788 if (TREE_CODE (exp) != VAR_DECL
3789 || DECL_EXTERNAL (exp)
3790 || !TREE_STATIC (exp)
3791 || !DECL_NAME (exp)
0fba566c 3792 || DECL_HARD_REGISTER (exp)
7d5fc814 3793 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 3794 || mode == VOIDmode)
e1b243a8
JJ
3795 return NULL;
3796
b1aa0655 3797 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
3798 if (!MEM_P (op0)
3799 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3800 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3801 return NULL;
3802 }
3803 else
3804 op0 = copy_rtx (op0);
b5b8b0ac 3805
06796564
JJ
3806 if (GET_MODE (op0) == BLKmode
3807 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3808 below would ICE. While it is likely a FE bug,
3809 try to be robust here. See PR43166. */
132b4e82
JJ
3810 || mode == BLKmode
3811 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
3812 {
3813 gcc_assert (MEM_P (op0));
3814 op0 = adjust_address_nv (op0, mode, 0);
3815 return op0;
3816 }
3817
3818 /* Fall through. */
3819
3820 adjust_mode:
3821 case PAREN_EXPR:
3822 case NOP_EXPR:
3823 case CONVERT_EXPR:
3824 {
2ba172e0 3825 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
3826
3827 if (mode == inner_mode)
3828 return op0;
3829
3830 if (inner_mode == VOIDmode)
3831 {
2a8e30fb
MM
3832 if (TREE_CODE (exp) == SSA_NAME)
3833 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3834 else
3835 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3836 if (mode == inner_mode)
3837 return op0;
3838 }
3839
3840 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3841 {
3842 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3843 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3844 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3845 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3846 else
3847 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3848 }
3849 else if (FLOAT_MODE_P (mode))
3850 {
2a8e30fb 3851 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
3852 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3853 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3854 else
3855 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3856 }
3857 else if (FLOAT_MODE_P (inner_mode))
3858 {
3859 if (unsignedp)
3860 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3861 else
3862 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3863 }
3864 else if (CONSTANT_P (op0)
69660a70 3865 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
3866 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3867 subreg_lowpart_offset (mode,
3868 inner_mode));
1b47fe3f
JJ
3869 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3870 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3871 : unsignedp)
2ba172e0 3872 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 3873 else
2ba172e0 3874 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
3875
3876 return op0;
3877 }
3878
70f34814 3879 case MEM_REF:
71f3a3f5
JJ
3880 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3881 {
3882 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3883 TREE_OPERAND (exp, 0),
3884 TREE_OPERAND (exp, 1));
3885 if (newexp)
3886 return expand_debug_expr (newexp);
3887 }
3888 /* FALLTHROUGH */
b5b8b0ac 3889 case INDIRECT_REF:
0a81f074 3890 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3891 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3892 if (!op0)
3893 return NULL;
3894
cb115041
JJ
3895 if (TREE_CODE (exp) == MEM_REF)
3896 {
583ac69c
JJ
3897 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3898 || (GET_CODE (op0) == PLUS
3899 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3900 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3901 Instead just use get_inner_reference. */
3902 goto component_ref;
3903
cb115041
JJ
3904 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3905 if (!op1 || !CONST_INT_P (op1))
3906 return NULL;
3907
0a81f074 3908 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
3909 }
3910
09e881c9 3911 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 3912 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 3913 else
75421dcd 3914 as = ADDR_SPACE_GENERIC;
b5b8b0ac 3915
f61c6f34
JJ
3916 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3917 op0, as);
3918 if (op0 == NULL_RTX)
3919 return NULL;
b5b8b0ac 3920
f61c6f34 3921 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 3922 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
3923 if (TREE_CODE (exp) == MEM_REF
3924 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3925 set_mem_expr (op0, NULL_TREE);
09e881c9 3926 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3927
3928 return op0;
3929
3930 case TARGET_MEM_REF:
4d948885
RG
3931 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3932 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
3933 return NULL;
3934
3935 op0 = expand_debug_expr
4e25ca6b 3936 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
3937 if (!op0)
3938 return NULL;
3939
f61c6f34
JJ
3940 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3941 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3942 else
3943 as = ADDR_SPACE_GENERIC;
3944
3945 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3946 op0, as);
3947 if (op0 == NULL_RTX)
3948 return NULL;
b5b8b0ac
AO
3949
3950 op0 = gen_rtx_MEM (mode, op0);
3951
3952 set_mem_attributes (op0, exp, 0);
09e881c9 3953 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3954
3955 return op0;
3956
583ac69c 3957 component_ref:
b5b8b0ac
AO
3958 case ARRAY_REF:
3959 case ARRAY_RANGE_REF:
3960 case COMPONENT_REF:
3961 case BIT_FIELD_REF:
3962 case REALPART_EXPR:
3963 case IMAGPART_EXPR:
3964 case VIEW_CONVERT_EXPR:
3965 {
3966 enum machine_mode mode1;
3967 HOST_WIDE_INT bitsize, bitpos;
3968 tree offset;
3969 int volatilep = 0;
3970 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
b3ecff82 3971 &mode1, &unsignedp, &volatilep, false);
b5b8b0ac
AO
3972 rtx orig_op0;
3973
4f2a9af8
JJ
3974 if (bitsize == 0)
3975 return NULL;
3976
b5b8b0ac
AO
3977 orig_op0 = op0 = expand_debug_expr (tem);
3978
3979 if (!op0)
3980 return NULL;
3981
3982 if (offset)
3983 {
dda2da58
AO
3984 enum machine_mode addrmode, offmode;
3985
aa847cc8
JJ
3986 if (!MEM_P (op0))
3987 return NULL;
b5b8b0ac 3988
dda2da58
AO
3989 op0 = XEXP (op0, 0);
3990 addrmode = GET_MODE (op0);
3991 if (addrmode == VOIDmode)
3992 addrmode = Pmode;
3993
b5b8b0ac
AO
3994 op1 = expand_debug_expr (offset);
3995 if (!op1)
3996 return NULL;
3997
dda2da58
AO
3998 offmode = GET_MODE (op1);
3999 if (offmode == VOIDmode)
4000 offmode = TYPE_MODE (TREE_TYPE (offset));
4001
4002 if (addrmode != offmode)
4003 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4004 subreg_lowpart_offset (addrmode,
4005 offmode));
4006
4007 /* Don't use offset_address here, we don't need a
4008 recognizable address, and we don't want to generate
4009 code. */
2ba172e0
JJ
4010 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4011 op0, op1));
b5b8b0ac
AO
4012 }
4013
4014 if (MEM_P (op0))
4015 {
4f2a9af8
JJ
4016 if (mode1 == VOIDmode)
4017 /* Bitfield. */
4018 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
4019 if (bitpos >= BITS_PER_UNIT)
4020 {
4021 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4022 bitpos %= BITS_PER_UNIT;
4023 }
4024 else if (bitpos < 0)
4025 {
4f2a9af8
JJ
4026 HOST_WIDE_INT units
4027 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
4028 op0 = adjust_address_nv (op0, mode1, units);
4029 bitpos += units * BITS_PER_UNIT;
4030 }
4031 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4032 op0 = adjust_address_nv (op0, mode, 0);
4033 else if (GET_MODE (op0) != mode1)
4034 op0 = adjust_address_nv (op0, mode1, 0);
4035 else
4036 op0 = copy_rtx (op0);
4037 if (op0 == orig_op0)
4038 op0 = shallow_copy_rtx (op0);
4039 set_mem_attributes (op0, exp, 0);
4040 }
4041
4042 if (bitpos == 0 && mode == GET_MODE (op0))
4043 return op0;
4044
2d3fc6aa
JJ
4045 if (bitpos < 0)
4046 return NULL;
4047
88c04a5d
JJ
4048 if (GET_MODE (op0) == BLKmode)
4049 return NULL;
4050
b5b8b0ac
AO
4051 if ((bitpos % BITS_PER_UNIT) == 0
4052 && bitsize == GET_MODE_BITSIZE (mode1))
4053 {
4054 enum machine_mode opmode = GET_MODE (op0);
4055
b5b8b0ac 4056 if (opmode == VOIDmode)
9712cba0 4057 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4058
4059 /* This condition may hold if we're expanding the address
4060 right past the end of an array that turned out not to
4061 be addressable (i.e., the address was only computed in
4062 debug stmts). The gen_subreg below would rightfully
4063 crash, and the address doesn't really exist, so just
4064 drop it. */
4065 if (bitpos >= GET_MODE_BITSIZE (opmode))
4066 return NULL;
4067
7d5d39bb
JJ
4068 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4069 return simplify_gen_subreg (mode, op0, opmode,
4070 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4071 }
4072
4073 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4074 && TYPE_UNSIGNED (TREE_TYPE (exp))
4075 ? SIGN_EXTRACT
4076 : ZERO_EXTRACT, mode,
4077 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4078 ? GET_MODE (op0)
4079 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4080 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4081 }
4082
b5b8b0ac 4083 case ABS_EXPR:
2ba172e0 4084 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4085
4086 case NEGATE_EXPR:
2ba172e0 4087 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4088
4089 case BIT_NOT_EXPR:
2ba172e0 4090 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4091
4092 case FLOAT_EXPR:
2ba172e0
JJ
4093 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4094 0)))
4095 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4096 inner_mode);
b5b8b0ac
AO
4097
4098 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4099 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4100 inner_mode);
b5b8b0ac
AO
4101
4102 case POINTER_PLUS_EXPR:
576319a7
DD
4103 /* For the rare target where pointers are not the same size as
4104 size_t, we need to check for mis-matched modes and correct
4105 the addend. */
4106 if (op0 && op1
4107 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4108 && GET_MODE (op0) != GET_MODE (op1))
4109 {
8369f38a
DD
4110 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4111 /* If OP0 is a partial mode, then we must truncate, even if it has
4112 the same bitsize as OP1 as GCC's representation of partial modes
4113 is opaque. */
4114 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4115 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4116 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4117 GET_MODE (op1));
576319a7
DD
4118 else
4119 /* We always sign-extend, regardless of the signedness of
4120 the operand, because the operand is always unsigned
4121 here even if the original C expression is signed. */
2ba172e0
JJ
4122 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4123 GET_MODE (op1));
576319a7
DD
4124 }
4125 /* Fall through. */
b5b8b0ac 4126 case PLUS_EXPR:
2ba172e0 4127 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4128
4129 case MINUS_EXPR:
2ba172e0 4130 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4131
4132 case MULT_EXPR:
2ba172e0 4133 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4134
4135 case RDIV_EXPR:
4136 case TRUNC_DIV_EXPR:
4137 case EXACT_DIV_EXPR:
4138 if (unsignedp)
2ba172e0 4139 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4140 else
2ba172e0 4141 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4142
4143 case TRUNC_MOD_EXPR:
2ba172e0 4144 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4145
4146 case FLOOR_DIV_EXPR:
4147 if (unsignedp)
2ba172e0 4148 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4149 else
4150 {
2ba172e0
JJ
4151 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4152 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4153 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4154 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4155 }
4156
4157 case FLOOR_MOD_EXPR:
4158 if (unsignedp)
2ba172e0 4159 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4160 else
4161 {
2ba172e0 4162 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4163 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4164 adj = simplify_gen_unary (NEG, mode,
4165 simplify_gen_binary (MULT, mode, adj, op1),
4166 mode);
4167 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4168 }
4169
4170 case CEIL_DIV_EXPR:
4171 if (unsignedp)
4172 {
2ba172e0
JJ
4173 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4174 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4175 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4176 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4177 }
4178 else
4179 {
2ba172e0
JJ
4180 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4181 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4182 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4183 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4184 }
4185
4186 case CEIL_MOD_EXPR:
4187 if (unsignedp)
4188 {
2ba172e0 4189 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4190 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4191 adj = simplify_gen_unary (NEG, mode,
4192 simplify_gen_binary (MULT, mode, adj, op1),
4193 mode);
4194 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4195 }
4196 else
4197 {
2ba172e0 4198 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4199 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4200 adj = simplify_gen_unary (NEG, mode,
4201 simplify_gen_binary (MULT, mode, adj, op1),
4202 mode);
4203 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4204 }
4205
4206 case ROUND_DIV_EXPR:
4207 if (unsignedp)
4208 {
2ba172e0
JJ
4209 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4210 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4211 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4212 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4213 }
4214 else
4215 {
2ba172e0
JJ
4216 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4217 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4218 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4219 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4220 }
4221
4222 case ROUND_MOD_EXPR:
4223 if (unsignedp)
4224 {
2ba172e0 4225 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4226 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4227 adj = simplify_gen_unary (NEG, mode,
4228 simplify_gen_binary (MULT, mode, adj, op1),
4229 mode);
4230 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4231 }
4232 else
4233 {
2ba172e0 4234 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4235 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4236 adj = simplify_gen_unary (NEG, mode,
4237 simplify_gen_binary (MULT, mode, adj, op1),
4238 mode);
4239 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4240 }
4241
4242 case LSHIFT_EXPR:
2ba172e0 4243 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4244
4245 case RSHIFT_EXPR:
4246 if (unsignedp)
2ba172e0 4247 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4248 else
2ba172e0 4249 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4250
4251 case LROTATE_EXPR:
2ba172e0 4252 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4253
4254 case RROTATE_EXPR:
2ba172e0 4255 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4256
4257 case MIN_EXPR:
2ba172e0 4258 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4259
4260 case MAX_EXPR:
2ba172e0 4261 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4262
4263 case BIT_AND_EXPR:
4264 case TRUTH_AND_EXPR:
2ba172e0 4265 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4266
4267 case BIT_IOR_EXPR:
4268 case TRUTH_OR_EXPR:
2ba172e0 4269 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4270
4271 case BIT_XOR_EXPR:
4272 case TRUTH_XOR_EXPR:
2ba172e0 4273 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4274
4275 case TRUTH_ANDIF_EXPR:
4276 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4277
4278 case TRUTH_ORIF_EXPR:
4279 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4280
4281 case TRUTH_NOT_EXPR:
2ba172e0 4282 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4283
4284 case LT_EXPR:
2ba172e0
JJ
4285 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4286 op0, op1);
b5b8b0ac
AO
4287
4288 case LE_EXPR:
2ba172e0
JJ
4289 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4290 op0, op1);
b5b8b0ac
AO
4291
4292 case GT_EXPR:
2ba172e0
JJ
4293 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4294 op0, op1);
b5b8b0ac
AO
4295
4296 case GE_EXPR:
2ba172e0
JJ
4297 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4298 op0, op1);
b5b8b0ac
AO
4299
4300 case EQ_EXPR:
2ba172e0 4301 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4302
4303 case NE_EXPR:
2ba172e0 4304 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4305
4306 case UNORDERED_EXPR:
2ba172e0 4307 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4308
4309 case ORDERED_EXPR:
2ba172e0 4310 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4311
4312 case UNLT_EXPR:
2ba172e0 4313 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4314
4315 case UNLE_EXPR:
2ba172e0 4316 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4317
4318 case UNGT_EXPR:
2ba172e0 4319 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4320
4321 case UNGE_EXPR:
2ba172e0 4322 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4323
4324 case UNEQ_EXPR:
2ba172e0 4325 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4326
4327 case LTGT_EXPR:
2ba172e0 4328 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4329
4330 case COND_EXPR:
4331 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4332
4333 case COMPLEX_EXPR:
4334 gcc_assert (COMPLEX_MODE_P (mode));
4335 if (GET_MODE (op0) == VOIDmode)
4336 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4337 if (GET_MODE (op1) == VOIDmode)
4338 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4339 return gen_rtx_CONCAT (mode, op0, op1);
4340
d02a5a4b
JJ
4341 case CONJ_EXPR:
4342 if (GET_CODE (op0) == CONCAT)
4343 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4344 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4345 XEXP (op0, 1),
4346 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4347 else
4348 {
4349 enum machine_mode imode = GET_MODE_INNER (mode);
4350 rtx re, im;
4351
4352 if (MEM_P (op0))
4353 {
4354 re = adjust_address_nv (op0, imode, 0);
4355 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4356 }
4357 else
4358 {
4359 enum machine_mode ifmode = int_mode_for_mode (mode);
4360 enum machine_mode ihmode = int_mode_for_mode (imode);
4361 rtx halfsize;
4362 if (ifmode == BLKmode || ihmode == BLKmode)
4363 return NULL;
4364 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4365 re = op0;
4366 if (mode != ifmode)
4367 re = gen_rtx_SUBREG (ifmode, re, 0);
4368 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4369 if (imode != ihmode)
4370 re = gen_rtx_SUBREG (imode, re, 0);
4371 im = copy_rtx (op0);
4372 if (mode != ifmode)
4373 im = gen_rtx_SUBREG (ifmode, im, 0);
4374 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4375 if (imode != ihmode)
4376 im = gen_rtx_SUBREG (imode, im, 0);
4377 }
4378 im = gen_rtx_NEG (imode, im);
4379 return gen_rtx_CONCAT (mode, re, im);
4380 }
4381
b5b8b0ac
AO
4382 case ADDR_EXPR:
4383 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4384 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4385 {
4386 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4387 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4388 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4389 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4390 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4391 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4392
4393 if (handled_component_p (TREE_OPERAND (exp, 0)))
4394 {
4395 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4396 tree decl
4397 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4398 &bitoffset, &bitsize, &maxsize);
4399 if ((TREE_CODE (decl) == VAR_DECL
4400 || TREE_CODE (decl) == PARM_DECL
4401 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4402 && (!TREE_ADDRESSABLE (decl)
4403 || target_for_debug_bind (decl))
c8a27c40
JJ
4404 && (bitoffset % BITS_PER_UNIT) == 0
4405 && bitsize > 0
4406 && bitsize == maxsize)
0a81f074
RS
4407 {
4408 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4409 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4410 }
c8a27c40
JJ
4411 }
4412
9430b7ba
JJ
4413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4414 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4415 == ADDR_EXPR)
4416 {
4417 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4418 0));
4419 if (op0 != NULL
4420 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4421 || (GET_CODE (op0) == PLUS
4422 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4423 && CONST_INT_P (XEXP (op0, 1)))))
4424 {
4425 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4426 1));
4427 if (!op1 || !CONST_INT_P (op1))
4428 return NULL;
4429
4430 return plus_constant (mode, op0, INTVAL (op1));
4431 }
4432 }
4433
c8a27c40
JJ
4434 return NULL;
4435 }
b5b8b0ac 4436
f61c6f34
JJ
4437 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
4438 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4439
4440 return op0;
b5b8b0ac
AO
4441
4442 case VECTOR_CST:
d2a12ae7
RG
4443 {
4444 unsigned i;
4445
4446 op0 = gen_rtx_CONCATN
4447 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4448
4449 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4450 {
4451 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4452 if (!op1)
4453 return NULL;
4454 XVECEXP (op0, 0, i) = op1;
4455 }
4456
4457 return op0;
4458 }
b5b8b0ac
AO
4459
4460 case CONSTRUCTOR:
47598145
MM
4461 if (TREE_CLOBBER_P (exp))
4462 return NULL;
4463 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4464 {
4465 unsigned i;
4466 tree val;
4467
4468 op0 = gen_rtx_CONCATN
4469 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4470
4471 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4472 {
4473 op1 = expand_debug_expr (val);
4474 if (!op1)
4475 return NULL;
4476 XVECEXP (op0, 0, i) = op1;
4477 }
4478
4479 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4480 {
4481 op1 = expand_debug_expr
e8160c9a 4482 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4483
4484 if (!op1)
4485 return NULL;
4486
4487 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4488 XVECEXP (op0, 0, i) = op1;
4489 }
4490
4491 return op0;
4492 }
4493 else
4494 goto flag_unsupported;
4495
4496 case CALL_EXPR:
4497 /* ??? Maybe handle some builtins? */
4498 return NULL;
4499
4500 case SSA_NAME:
4501 {
2a8e30fb
MM
4502 gimple g = get_gimple_for_ssa_name (exp);
4503 if (g)
4504 {
4505 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4506 if (!op0)
4507 return NULL;
4508 }
4509 else
4510 {
4511 int part = var_to_partition (SA.map, exp);
b5b8b0ac 4512
2a8e30fb 4513 if (part == NO_PARTITION)
a58a8e4b
JJ
4514 {
4515 /* If this is a reference to an incoming value of parameter
4516 that is never used in the code or where the incoming
4517 value is never used in the code, use PARM_DECL's
4518 DECL_RTL if set. */
4519 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4520 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4521 {
12c5ffe5
EB
4522 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4523 if (op0)
4524 goto adjust_mode;
a58a8e4b 4525 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
4526 if (op0)
4527 goto adjust_mode;
a58a8e4b
JJ
4528 }
4529 return NULL;
4530 }
b5b8b0ac 4531
2a8e30fb 4532 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 4533
abfea58d 4534 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 4535 }
b5b8b0ac
AO
4536 goto adjust_mode;
4537 }
4538
4539 case ERROR_MARK:
4540 return NULL;
4541
7ece48b1
JJ
4542 /* Vector stuff. For most of the codes we don't have rtl codes. */
4543 case REALIGN_LOAD_EXPR:
4544 case REDUC_MAX_EXPR:
4545 case REDUC_MIN_EXPR:
4546 case REDUC_PLUS_EXPR:
4547 case VEC_COND_EXPR:
7ece48b1
JJ
4548 case VEC_LSHIFT_EXPR:
4549 case VEC_PACK_FIX_TRUNC_EXPR:
4550 case VEC_PACK_SAT_EXPR:
4551 case VEC_PACK_TRUNC_EXPR:
4552 case VEC_RSHIFT_EXPR:
4553 case VEC_UNPACK_FLOAT_HI_EXPR:
4554 case VEC_UNPACK_FLOAT_LO_EXPR:
4555 case VEC_UNPACK_HI_EXPR:
4556 case VEC_UNPACK_LO_EXPR:
4557 case VEC_WIDEN_MULT_HI_EXPR:
4558 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
4559 case VEC_WIDEN_MULT_EVEN_EXPR:
4560 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
4561 case VEC_WIDEN_LSHIFT_HI_EXPR:
4562 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 4563 case VEC_PERM_EXPR:
7ece48b1
JJ
4564 return NULL;
4565
98449720 4566 /* Misc codes. */
7ece48b1
JJ
4567 case ADDR_SPACE_CONVERT_EXPR:
4568 case FIXED_CONVERT_EXPR:
4569 case OBJ_TYPE_REF:
4570 case WITH_SIZE_EXPR:
4571 return NULL;
4572
4573 case DOT_PROD_EXPR:
4574 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4575 && SCALAR_INT_MODE_P (mode))
4576 {
2ba172e0
JJ
4577 op0
4578 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4579 0)))
4580 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4581 inner_mode);
4582 op1
4583 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4584 1)))
4585 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4586 inner_mode);
4587 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4588 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
4589 }
4590 return NULL;
4591
4592 case WIDEN_MULT_EXPR:
0354c0c7
BS
4593 case WIDEN_MULT_PLUS_EXPR:
4594 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
4595 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4596 && SCALAR_INT_MODE_P (mode))
4597 {
2ba172e0 4598 inner_mode = GET_MODE (op0);
7ece48b1 4599 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 4600 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 4601 else
5b58b39b 4602 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 4603 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 4604 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 4605 else
5b58b39b 4606 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 4607 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
4608 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4609 return op0;
4610 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 4611 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 4612 else
2ba172e0 4613 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
4614 }
4615 return NULL;
4616
98449720
RH
4617 case MULT_HIGHPART_EXPR:
4618 /* ??? Similar to the above. */
4619 return NULL;
4620
7ece48b1 4621 case WIDEN_SUM_EXPR:
3f3af9df 4622 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
4623 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4624 && SCALAR_INT_MODE_P (mode))
4625 {
2ba172e0
JJ
4626 op0
4627 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4628 0)))
4629 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4630 inner_mode);
3f3af9df
JJ
4631 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4632 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
4633 }
4634 return NULL;
4635
0f59b812 4636 case FMA_EXPR:
2ba172e0 4637 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 4638
b5b8b0ac
AO
4639 default:
4640 flag_unsupported:
4641#ifdef ENABLE_CHECKING
4642 debug_tree (exp);
4643 gcc_unreachable ();
4644#else
4645 return NULL;
4646#endif
4647 }
4648}
4649
ddb555ed
JJ
4650/* Return an RTX equivalent to the source bind value of the tree expression
4651 EXP. */
4652
4653static rtx
4654expand_debug_source_expr (tree exp)
4655{
4656 rtx op0 = NULL_RTX;
4657 enum machine_mode mode = VOIDmode, inner_mode;
4658
4659 switch (TREE_CODE (exp))
4660 {
4661 case PARM_DECL:
4662 {
ddb555ed 4663 mode = DECL_MODE (exp);
12c5ffe5
EB
4664 op0 = expand_debug_parm_decl (exp);
4665 if (op0)
4666 break;
ddb555ed
JJ
4667 /* See if this isn't an argument that has been completely
4668 optimized out. */
4669 if (!DECL_RTL_SET_P (exp)
12c5ffe5 4670 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
4671 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4672 {
7b575cfa 4673 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
4674 if (DECL_CONTEXT (aexp)
4675 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4676 {
9771b263 4677 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
4678 unsigned int ix;
4679 tree ddecl;
ddb555ed
JJ
4680 debug_args = decl_debug_args_lookup (current_function_decl);
4681 if (debug_args != NULL)
4682 {
9771b263 4683 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
4684 ix += 2)
4685 if (ddecl == aexp)
4686 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4687 }
4688 }
4689 }
4690 break;
4691 }
4692 default:
4693 break;
4694 }
4695
4696 if (op0 == NULL_RTX)
4697 return NULL_RTX;
4698
4699 inner_mode = GET_MODE (op0);
4700 if (mode == inner_mode)
4701 return op0;
4702
4703 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4704 {
4705 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4706 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4707 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4708 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4709 else
4710 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4711 }
4712 else if (FLOAT_MODE_P (mode))
4713 gcc_unreachable ();
4714 else if (FLOAT_MODE_P (inner_mode))
4715 {
4716 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4717 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4718 else
4719 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4720 }
4721 else if (CONSTANT_P (op0)
4722 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4723 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4724 subreg_lowpart_offset (mode, inner_mode));
4725 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4726 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4727 else
4728 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4729
4730 return op0;
4731}
4732
6cfa417f
JJ
4733/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4734 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4735 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4736
4737static void
4738avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
4739{
4740 rtx exp = *exp_p;
4741
4742 if (exp == NULL_RTX)
4743 return;
4744
4745 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4746 return;
4747
4748 if (depth == 4)
4749 {
4750 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4751 rtx dval = make_debug_expr_from_rtl (exp);
4752
4753 /* Emit a debug bind insn before INSN. */
4754 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4755 DEBUG_EXPR_TREE_DECL (dval), exp,
4756 VAR_INIT_STATUS_INITIALIZED);
4757
4758 emit_debug_insn_before (bind, insn);
4759 *exp_p = dval;
4760 return;
4761 }
4762
4763 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4764 int i, j;
4765 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4766 switch (*format_ptr++)
4767 {
4768 case 'e':
4769 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4770 break;
4771
4772 case 'E':
4773 case 'V':
4774 for (j = 0; j < XVECLEN (exp, i); j++)
4775 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4776 break;
4777
4778 default:
4779 break;
4780 }
4781}
4782
b5b8b0ac
AO
4783/* Expand the _LOCs in debug insns. We run this after expanding all
4784 regular insns, so that any variables referenced in the function
4785 will have their DECL_RTLs set. */
4786
4787static void
4788expand_debug_locations (void)
4789{
4790 rtx insn;
4791 rtx last = get_last_insn ();
4792 int save_strict_alias = flag_strict_aliasing;
4793
4794 /* New alias sets while setting up memory attributes cause
4795 -fcompare-debug failures, even though it doesn't bring about any
4796 codegen changes. */
4797 flag_strict_aliasing = 0;
4798
4799 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4800 if (DEBUG_INSN_P (insn))
4801 {
4802 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
6cfa417f 4803 rtx val, prev_insn, insn2;
b5b8b0ac
AO
4804 enum machine_mode mode;
4805
4806 if (value == NULL_TREE)
4807 val = NULL_RTX;
4808 else
4809 {
ddb555ed
JJ
4810 if (INSN_VAR_LOCATION_STATUS (insn)
4811 == VAR_INIT_STATUS_UNINITIALIZED)
4812 val = expand_debug_source_expr (value);
4813 else
4814 val = expand_debug_expr (value);
b5b8b0ac
AO
4815 gcc_assert (last == get_last_insn ());
4816 }
4817
4818 if (!val)
4819 val = gen_rtx_UNKNOWN_VAR_LOC ();
4820 else
4821 {
4822 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4823
4824 gcc_assert (mode == GET_MODE (val)
4825 || (GET_MODE (val) == VOIDmode
33ffb5c5 4826 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 4827 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
4828 || GET_CODE (val) == LABEL_REF)));
4829 }
4830
4831 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
4832 prev_insn = PREV_INSN (insn);
4833 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4834 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
4835 }
4836
4837 flag_strict_aliasing = save_strict_alias;
4838}
4839
242229bb
JH
4840/* Expand basic block BB from GIMPLE trees to RTL. */
4841
4842static basic_block
f3ddd692 4843expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 4844{
726a989a
RB
4845 gimple_stmt_iterator gsi;
4846 gimple_seq stmts;
4847 gimple stmt = NULL;
242229bb
JH
4848 rtx note, last;
4849 edge e;
628f6a4e 4850 edge_iterator ei;
8b11009b 4851 void **elt;
242229bb
JH
4852
4853 if (dump_file)
726a989a
RB
4854 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4855 bb->index);
4856
4857 /* Note that since we are now transitioning from GIMPLE to RTL, we
4858 cannot use the gsi_*_bb() routines because they expect the basic
4859 block to be in GIMPLE, instead of RTL. Therefore, we need to
4860 access the BB sequence directly. */
4861 stmts = bb_seq (bb);
3e8b732e
MM
4862 bb->il.gimple.seq = NULL;
4863 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 4864 rtl_profile_for_bb (bb);
5e2d947c
JH
4865 init_rtl_bb_info (bb);
4866 bb->flags |= BB_RTL;
4867
a9b77cd1
ZD
4868 /* Remove the RETURN_EXPR if we may fall though to the exit
4869 instead. */
726a989a
RB
4870 gsi = gsi_last (stmts);
4871 if (!gsi_end_p (gsi)
4872 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 4873 {
726a989a 4874 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
4875
4876 gcc_assert (single_succ_p (bb));
fefa31b5 4877 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
a9b77cd1 4878
fefa31b5 4879 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
726a989a 4880 && !gimple_return_retval (ret_stmt))
a9b77cd1 4881 {
726a989a 4882 gsi_remove (&gsi, false);
a9b77cd1
ZD
4883 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4884 }
4885 }
4886
726a989a
RB
4887 gsi = gsi_start (stmts);
4888 if (!gsi_end_p (gsi))
8b11009b 4889 {
726a989a
RB
4890 stmt = gsi_stmt (gsi);
4891 if (gimple_code (stmt) != GIMPLE_LABEL)
4892 stmt = NULL;
8b11009b 4893 }
242229bb 4894
8b11009b
ZD
4895 elt = pointer_map_contains (lab_rtx_for_bb, bb);
4896
4897 if (stmt || elt)
242229bb
JH
4898 {
4899 last = get_last_insn ();
4900
8b11009b
ZD
4901 if (stmt)
4902 {
28ed065e 4903 expand_gimple_stmt (stmt);
726a989a 4904 gsi_next (&gsi);
8b11009b
ZD
4905 }
4906
4907 if (elt)
ae50c0cb 4908 emit_label ((rtx) *elt);
242229bb 4909
caf93cb0 4910 /* Java emits line number notes in the top of labels.
c22cacf3 4911 ??? Make this go away once line number notes are obsoleted. */
242229bb 4912 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 4913 if (NOTE_P (BB_HEAD (bb)))
242229bb 4914 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 4915 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 4916
726a989a 4917 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
4918 }
4919 else
4920 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
4921
4922 NOTE_BASIC_BLOCK (note) = bb;
4923
726a989a 4924 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 4925 {
cea49550 4926 basic_block new_bb;
242229bb 4927
b5b8b0ac 4928 stmt = gsi_stmt (gsi);
2a8e30fb
MM
4929
4930 /* If this statement is a non-debug one, and we generate debug
4931 insns, then this one might be the last real use of a TERed
4932 SSA_NAME, but where there are still some debug uses further
4933 down. Expanding the current SSA name in such further debug
4934 uses by their RHS might lead to wrong debug info, as coalescing
4935 might make the operands of such RHS be placed into the same
4936 pseudo as something else. Like so:
4937 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4938 use(a_1);
4939 a_2 = ...
4940 #DEBUG ... => a_1
4941 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4942 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4943 the write to a_2 would actually have clobbered the place which
4944 formerly held a_0.
4945
4946 So, instead of that, we recognize the situation, and generate
4947 debug temporaries at the last real use of TERed SSA names:
4948 a_1 = a_0 + 1;
4949 #DEBUG #D1 => a_1
4950 use(a_1);
4951 a_2 = ...
4952 #DEBUG ... => #D1
4953 */
4954 if (MAY_HAVE_DEBUG_INSNS
4955 && SA.values
4956 && !is_gimple_debug (stmt))
4957 {
4958 ssa_op_iter iter;
4959 tree op;
4960 gimple def;
4961
5368224f 4962 location_t sloc = curr_insn_location ();
2a8e30fb
MM
4963
4964 /* Look for SSA names that have their last use here (TERed
4965 names always have only one real use). */
4966 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4967 if ((def = get_gimple_for_ssa_name (op)))
4968 {
4969 imm_use_iterator imm_iter;
4970 use_operand_p use_p;
4971 bool have_debug_uses = false;
4972
4973 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4974 {
4975 if (gimple_debug_bind_p (USE_STMT (use_p)))
4976 {
4977 have_debug_uses = true;
4978 break;
4979 }
4980 }
4981
4982 if (have_debug_uses)
4983 {
4984 /* OP is a TERed SSA name, with DEF it's defining
4985 statement, and where OP is used in further debug
4986 instructions. Generate a debug temporary, and
4987 replace all uses of OP in debug insns with that
4988 temporary. */
4989 gimple debugstmt;
4990 tree value = gimple_assign_rhs_to_tree (def);
4991 tree vexpr = make_node (DEBUG_EXPR_DECL);
4992 rtx val;
4993 enum machine_mode mode;
4994
5368224f 4995 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
4996
4997 DECL_ARTIFICIAL (vexpr) = 1;
4998 TREE_TYPE (vexpr) = TREE_TYPE (value);
4999 if (DECL_P (value))
5000 mode = DECL_MODE (value);
5001 else
5002 mode = TYPE_MODE (TREE_TYPE (value));
5003 DECL_MODE (vexpr) = mode;
5004
5005 val = gen_rtx_VAR_LOCATION
5006 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5007
e8c6bb74 5008 emit_debug_insn (val);
2a8e30fb
MM
5009
5010 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5011 {
5012 if (!gimple_debug_bind_p (debugstmt))
5013 continue;
5014
5015 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5016 SET_USE (use_p, vexpr);
5017
5018 update_stmt (debugstmt);
5019 }
5020 }
5021 }
5368224f 5022 set_curr_insn_location (sloc);
2a8e30fb
MM
5023 }
5024
a5883ba0 5025 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 5026
242229bb
JH
5027 /* Expand this statement, then evaluate the resulting RTL and
5028 fixup the CFG accordingly. */
726a989a 5029 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 5030 {
726a989a 5031 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
5032 if (new_bb)
5033 return new_bb;
5034 }
b5b8b0ac
AO
5035 else if (gimple_debug_bind_p (stmt))
5036 {
5368224f 5037 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
5038 gimple_stmt_iterator nsi = gsi;
5039
5040 for (;;)
5041 {
5042 tree var = gimple_debug_bind_get_var (stmt);
5043 tree value;
5044 rtx val;
5045 enum machine_mode mode;
5046
ec8c1492
JJ
5047 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5048 && TREE_CODE (var) != LABEL_DECL
5049 && !target_for_debug_bind (var))
5050 goto delink_debug_stmt;
5051
b5b8b0ac
AO
5052 if (gimple_debug_bind_has_value_p (stmt))
5053 value = gimple_debug_bind_get_value (stmt);
5054 else
5055 value = NULL_TREE;
5056
5057 last = get_last_insn ();
5058
5368224f 5059 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
5060
5061 if (DECL_P (var))
5062 mode = DECL_MODE (var);
5063 else
5064 mode = TYPE_MODE (TREE_TYPE (var));
5065
5066 val = gen_rtx_VAR_LOCATION
5067 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5068
e16b6fd0 5069 emit_debug_insn (val);
b5b8b0ac
AO
5070
5071 if (dump_file && (dump_flags & TDF_DETAILS))
5072 {
5073 /* We can't dump the insn with a TREE where an RTX
5074 is expected. */
e8c6bb74 5075 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5076 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5077 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5078 }
5079
ec8c1492 5080 delink_debug_stmt:
2a8e30fb
MM
5081 /* In order not to generate too many debug temporaries,
5082 we delink all uses of debug statements we already expanded.
5083 Therefore debug statements between definition and real
5084 use of TERed SSA names will continue to use the SSA name,
5085 and not be replaced with debug temps. */
5086 delink_stmt_imm_use (stmt);
5087
b5b8b0ac
AO
5088 gsi = nsi;
5089 gsi_next (&nsi);
5090 if (gsi_end_p (nsi))
5091 break;
5092 stmt = gsi_stmt (nsi);
5093 if (!gimple_debug_bind_p (stmt))
5094 break;
5095 }
5096
5368224f 5097 set_curr_insn_location (sloc);
ddb555ed
JJ
5098 }
5099 else if (gimple_debug_source_bind_p (stmt))
5100 {
5368224f 5101 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5102 tree var = gimple_debug_source_bind_get_var (stmt);
5103 tree value = gimple_debug_source_bind_get_value (stmt);
5104 rtx val;
5105 enum machine_mode mode;
5106
5107 last = get_last_insn ();
5108
5368224f 5109 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5110
5111 mode = DECL_MODE (var);
5112
5113 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5114 VAR_INIT_STATUS_UNINITIALIZED);
5115
5116 emit_debug_insn (val);
5117
5118 if (dump_file && (dump_flags & TDF_DETAILS))
5119 {
5120 /* We can't dump the insn with a TREE where an RTX
5121 is expected. */
5122 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5123 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5124 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5125 }
5126
5368224f 5127 set_curr_insn_location (sloc);
b5b8b0ac 5128 }
80c7a9eb 5129 else
242229bb 5130 {
f3ddd692
JJ
5131 if (is_gimple_call (stmt)
5132 && gimple_call_tail_p (stmt)
5133 && disable_tail_calls)
5134 gimple_call_set_tail (stmt, false);
5135
726a989a 5136 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
5137 {
5138 bool can_fallthru;
5139 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5140 if (new_bb)
5141 {
5142 if (can_fallthru)
5143 bb = new_bb;
5144 else
5145 return new_bb;
5146 }
5147 }
4d7a65ea 5148 else
b7211528 5149 {
4e3825db 5150 def_operand_p def_p;
4e3825db
MM
5151 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5152
5153 if (def_p != NULL)
5154 {
5155 /* Ignore this stmt if it is in the list of
5156 replaceable expressions. */
5157 if (SA.values
b8698a0f 5158 && bitmap_bit_p (SA.values,
e97809c6 5159 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5160 continue;
5161 }
28ed065e 5162 last = expand_gimple_stmt (stmt);
726a989a 5163 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5164 }
242229bb
JH
5165 }
5166 }
5167
a5883ba0
MM
5168 currently_expanding_gimple_stmt = NULL;
5169
7241571e 5170 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5171 FOR_EACH_EDGE (e, ei, bb->succs)
5172 {
2f13f2de 5173 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5174 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5175 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5176 {
5177 emit_jump (label_rtx_for_bb (e->dest));
5178 e->flags &= ~EDGE_FALLTHRU;
5179 }
a9b77cd1
ZD
5180 }
5181
ae761c45
AH
5182 /* Expanded RTL can create a jump in the last instruction of block.
5183 This later might be assumed to be a jump to successor and break edge insertion.
5184 We need to insert dummy move to prevent this. PR41440. */
5185 if (single_succ_p (bb)
5186 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5187 && (last = get_last_insn ())
5188 && JUMP_P (last))
5189 {
5190 rtx dummy = gen_reg_rtx (SImode);
5191 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5192 }
5193
242229bb
JH
5194 do_pending_stack_adjust ();
5195
3f117656 5196 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5197 before a barrier and/or table jump insn. */
5198 last = get_last_insn ();
4b4bf941 5199 if (BARRIER_P (last))
242229bb
JH
5200 last = PREV_INSN (last);
5201 if (JUMP_TABLE_DATA_P (last))
5202 last = PREV_INSN (PREV_INSN (last));
5203 BB_END (bb) = last;
caf93cb0 5204
242229bb 5205 update_bb_for_insn (bb);
80c7a9eb 5206
242229bb
JH
5207 return bb;
5208}
5209
5210
5211/* Create a basic block for initialization code. */
5212
5213static basic_block
5214construct_init_block (void)
5215{
5216 basic_block init_block, first_block;
fd44f634
JH
5217 edge e = NULL;
5218 int flags;
275a4187 5219
fd44f634 5220 /* Multiple entry points not supported yet. */
fefa31b5
DM
5221 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5222 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5223 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5224 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5225 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
242229bb 5226
fefa31b5 5227 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
275a4187 5228
fd44f634
JH
5229 /* When entry edge points to first basic block, we don't need jump,
5230 otherwise we have to jump into proper target. */
fefa31b5 5231 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
fd44f634 5232 {
726a989a 5233 tree label = gimple_block_label (e->dest);
fd44f634
JH
5234
5235 emit_jump (label_rtx (label));
5236 flags = 0;
275a4187 5237 }
fd44f634
JH
5238 else
5239 flags = EDGE_FALLTHRU;
242229bb
JH
5240
5241 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5242 get_last_insn (),
fefa31b5
DM
5243 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5244 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5245 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5246 if (current_loops && ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father)
5247 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
242229bb
JH
5248 if (e)
5249 {
5250 first_block = e->dest;
5251 redirect_edge_succ (e, init_block);
fd44f634 5252 e = make_edge (init_block, first_block, flags);
242229bb
JH
5253 }
5254 else
fefa31b5 5255 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5256 e->probability = REG_BR_PROB_BASE;
fefa31b5 5257 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
242229bb
JH
5258
5259 update_bb_for_insn (init_block);
5260 return init_block;
5261}
5262
55e092c4
JH
5263/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5264 found in the block tree. */
5265
5266static void
5267set_block_levels (tree block, int level)
5268{
5269 while (block)
5270 {
5271 BLOCK_NUMBER (block) = level;
5272 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5273 block = BLOCK_CHAIN (block);
5274 }
5275}
242229bb
JH
5276
5277/* Create a block containing landing pads and similar stuff. */
5278
5279static void
5280construct_exit_block (void)
5281{
5282 rtx head = get_last_insn ();
5283 rtx end;
5284 basic_block exit_block;
628f6a4e
BE
5285 edge e, e2;
5286 unsigned ix;
5287 edge_iterator ei;
79c7fda6
JJ
5288 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5289 rtx orig_end = BB_END (prev_bb);
242229bb 5290
fefa31b5 5291 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
bf08ebeb 5292
caf93cb0 5293 /* Make sure the locus is set to the end of the function, so that
242229bb 5294 epilogue line numbers and warnings are set properly. */
2f13f2de 5295 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5296 input_location = cfun->function_end_locus;
5297
242229bb
JH
5298 /* Generate rtl for function exit. */
5299 expand_function_end ();
5300
5301 end = get_last_insn ();
5302 if (head == end)
5303 return;
79c7fda6
JJ
5304 /* While emitting the function end we could move end of the last basic
5305 block. */
5306 BB_END (prev_bb) = orig_end;
4b4bf941 5307 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5308 head = NEXT_INSN (head);
79c7fda6
JJ
5309 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5310 bb frequency counting will be confused. Any instructions before that
5311 label are emitted for the case where PREV_BB falls through into the
5312 exit block, so append those instructions to prev_bb in that case. */
5313 if (NEXT_INSN (head) != return_label)
5314 {
5315 while (NEXT_INSN (head) != return_label)
5316 {
5317 if (!NOTE_P (NEXT_INSN (head)))
5318 BB_END (prev_bb) = NEXT_INSN (head);
5319 head = NEXT_INSN (head);
5320 }
5321 }
5322 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
fefa31b5
DM
5323 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5324 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5325 if (current_loops && EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father)
5326 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
628f6a4e
BE
5327
5328 ix = 0;
fefa31b5 5329 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
242229bb 5330 {
fefa31b5 5331 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
242229bb 5332 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5333 redirect_edge_succ (e, exit_block);
5334 else
5335 ix++;
242229bb 5336 }
628f6a4e 5337
fefa31b5 5338 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
242229bb 5339 e->probability = REG_BR_PROB_BASE;
fefa31b5
DM
5340 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5341 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
242229bb
JH
5342 if (e2 != e)
5343 {
c22cacf3 5344 e->count -= e2->count;
242229bb
JH
5345 exit_block->count -= e2->count;
5346 exit_block->frequency -= EDGE_FREQUENCY (e2);
5347 }
5348 if (e->count < 0)
5349 e->count = 0;
5350 if (exit_block->count < 0)
5351 exit_block->count = 0;
5352 if (exit_block->frequency < 0)
5353 exit_block->frequency = 0;
5354 update_bb_for_insn (exit_block);
5355}
5356
c22cacf3 5357/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5358 Look for ARRAY_REF nodes with non-constant indexes and mark them
5359 addressable. */
5360
5361static tree
5362discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5363 void *data ATTRIBUTE_UNUSED)
5364{
5365 tree t = *tp;
5366
5367 if (IS_TYPE_OR_DECL_P (t))
5368 *walk_subtrees = 0;
5369 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5370 {
5371 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5372 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5373 && (!TREE_OPERAND (t, 2)
5374 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5375 || (TREE_CODE (t) == COMPONENT_REF
5376 && (!TREE_OPERAND (t,2)
5377 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5378 || TREE_CODE (t) == BIT_FIELD_REF
5379 || TREE_CODE (t) == REALPART_EXPR
5380 || TREE_CODE (t) == IMAGPART_EXPR
5381 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5382 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5383 t = TREE_OPERAND (t, 0);
5384
5385 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5386 {
5387 t = get_base_address (t);
6f11d690
RG
5388 if (t && DECL_P (t)
5389 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5390 TREE_ADDRESSABLE (t) = 1;
5391 }
5392
5393 *walk_subtrees = 0;
5394 }
5395
5396 return NULL_TREE;
5397}
5398
5399/* RTL expansion is not able to compile array references with variable
5400 offsets for arrays stored in single register. Discover such
5401 expressions and mark variables as addressable to avoid this
5402 scenario. */
5403
5404static void
5405discover_nonconstant_array_refs (void)
5406{
5407 basic_block bb;
726a989a 5408 gimple_stmt_iterator gsi;
a1b23b2f 5409
11cd3bed 5410 FOR_EACH_BB_FN (bb, cfun)
726a989a
RB
5411 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5412 {
5413 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
5414 if (!is_gimple_debug (stmt))
5415 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 5416 }
a1b23b2f
UW
5417}
5418
2e3f842f
L
5419/* This function sets crtl->args.internal_arg_pointer to a virtual
5420 register if DRAP is needed. Local register allocator will replace
5421 virtual_incoming_args_rtx with the virtual register. */
5422
5423static void
5424expand_stack_alignment (void)
5425{
5426 rtx drap_rtx;
e939805b 5427 unsigned int preferred_stack_boundary;
2e3f842f
L
5428
5429 if (! SUPPORTS_STACK_ALIGNMENT)
5430 return;
b8698a0f 5431
2e3f842f
L
5432 if (cfun->calls_alloca
5433 || cfun->has_nonlocal_label
5434 || crtl->has_nonlocal_goto)
5435 crtl->need_drap = true;
5436
890b9b96
L
5437 /* Call update_stack_boundary here again to update incoming stack
5438 boundary. It may set incoming stack alignment to a different
5439 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5440 use the minimum incoming stack alignment to check if it is OK
5441 to perform sibcall optimization since sibcall optimization will
5442 only align the outgoing stack to incoming stack boundary. */
5443 if (targetm.calls.update_stack_boundary)
5444 targetm.calls.update_stack_boundary ();
5445
5446 /* The incoming stack frame has to be aligned at least at
5447 parm_stack_boundary. */
5448 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 5449
2e3f842f
L
5450 /* Update crtl->stack_alignment_estimated and use it later to align
5451 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5452 exceptions since callgraph doesn't collect incoming stack alignment
5453 in this case. */
8f4f502f 5454 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
5455 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5456 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5457 else
5458 preferred_stack_boundary = crtl->preferred_stack_boundary;
5459 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5460 crtl->stack_alignment_estimated = preferred_stack_boundary;
5461 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5462 crtl->stack_alignment_needed = preferred_stack_boundary;
5463
890b9b96
L
5464 gcc_assert (crtl->stack_alignment_needed
5465 <= crtl->stack_alignment_estimated);
5466
2e3f842f 5467 crtl->stack_realign_needed
e939805b 5468 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 5469 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
5470
5471 crtl->stack_realign_processed = true;
5472
5473 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5474 alignment. */
5475 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 5476 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 5477
d015f7cc
L
5478 /* stack_realign_drap and drap_rtx must match. */
5479 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5480
2e3f842f
L
5481 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5482 if (NULL != drap_rtx)
5483 {
5484 crtl->args.internal_arg_pointer = drap_rtx;
5485
5486 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5487 needed. */
5488 fixup_tail_calls ();
5489 }
5490}
862d0b35
DN
5491\f
5492
5493static void
5494expand_main_function (void)
5495{
5496#if (defined(INVOKE__main) \
5497 || (!defined(HAS_INIT_SECTION) \
5498 && !defined(INIT_SECTION_ASM_OP) \
5499 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5500 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5501#endif
5502}
5503\f
5504
5505/* Expand code to initialize the stack_protect_guard. This is invoked at
5506 the beginning of a function to be protected. */
5507
5508#ifndef HAVE_stack_protect_set
5509# define HAVE_stack_protect_set 0
5510# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5511#endif
5512
5513static void
5514stack_protect_prologue (void)
5515{
5516 tree guard_decl = targetm.stack_protect_guard ();
5517 rtx x, y;
5518
5519 x = expand_normal (crtl->stack_protect_guard);
5520 y = expand_normal (guard_decl);
5521
5522 /* Allow the target to copy from Y to X without leaking Y into a
5523 register. */
5524 if (HAVE_stack_protect_set)
5525 {
5526 rtx insn = gen_stack_protect_set (x, y);
5527 if (insn)
5528 {
5529 emit_insn (insn);
5530 return;
5531 }
5532 }
5533
5534 /* Otherwise do a straight move. */
5535 emit_move_insn (x, y);
5536}
2e3f842f 5537
242229bb
JH
5538/* Translate the intermediate representation contained in the CFG
5539 from GIMPLE trees to RTL.
5540
5541 We do conversion per basic block and preserve/update the tree CFG.
5542 This implies we have to do some magic as the CFG can simultaneously
5543 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 5544 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
5545 the expansion. */
5546
be55bfe6
TS
5547namespace {
5548
5549const pass_data pass_data_expand =
5550{
5551 RTL_PASS, /* type */
5552 "expand", /* name */
5553 OPTGROUP_NONE, /* optinfo_flags */
5554 true, /* has_execute */
5555 TV_EXPAND, /* tv_id */
5556 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5557 | PROP_gimple_lcx
5558 | PROP_gimple_lvec ), /* properties_required */
5559 PROP_rtl, /* properties_provided */
5560 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
3bea341f 5561 0, /* todo_flags_start */
be55bfe6
TS
5562 0, /* todo_flags_finish */
5563};
5564
5565class pass_expand : public rtl_opt_pass
5566{
5567public:
5568 pass_expand (gcc::context *ctxt)
5569 : rtl_opt_pass (pass_data_expand, ctxt)
5570 {}
5571
5572 /* opt_pass methods: */
5573 virtual unsigned int execute (function *);
5574
5575}; // class pass_expand
5576
5577unsigned int
5578pass_expand::execute (function *fun)
242229bb
JH
5579{
5580 basic_block bb, init_block;
5581 sbitmap blocks;
0ef90296
ZD
5582 edge_iterator ei;
5583 edge e;
f3ddd692 5584 rtx var_seq, var_ret_seq;
4e3825db
MM
5585 unsigned i;
5586
f029db69 5587 timevar_push (TV_OUT_OF_SSA);
4e3825db 5588 rewrite_out_of_ssa (&SA);
f029db69 5589 timevar_pop (TV_OUT_OF_SSA);
c302207e 5590 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 5591
be147e84
RG
5592 /* Make sure all values used by the optimization passes have sane
5593 defaults. */
5594 reg_renumber = 0;
5595
4586b4ca
SB
5596 /* Some backends want to know that we are expanding to RTL. */
5597 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
5598 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5599 free_dominance_info (CDI_DOMINATORS);
4586b4ca 5600
be55bfe6 5601 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
bf08ebeb 5602
5368224f 5603 insn_locations_init ();
fe8a7779 5604 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
5605 {
5606 /* Eventually, all FEs should explicitly set function_start_locus. */
be55bfe6
TS
5607 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5608 set_curr_insn_location
5609 (DECL_SOURCE_LOCATION (current_function_decl));
1751ecd6 5610 else
be55bfe6 5611 set_curr_insn_location (fun->function_start_locus);
1751ecd6 5612 }
9ff70652 5613 else
5368224f
DC
5614 set_curr_insn_location (UNKNOWN_LOCATION);
5615 prologue_location = curr_insn_location ();
55e092c4 5616
2b21299c
JJ
5617#ifdef INSN_SCHEDULING
5618 init_sched_attrs ();
5619#endif
5620
55e092c4
JH
5621 /* Make sure first insn is a note even if we don't want linenums.
5622 This makes sure the first insn will never be deleted.
5623 Also, final expects a note to appear there. */
5624 emit_note (NOTE_INSN_DELETED);
6429e3be 5625
a1b23b2f
UW
5626 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5627 discover_nonconstant_array_refs ();
5628
e41b2a33 5629 targetm.expand_to_rtl_hook ();
cb91fab0 5630 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 5631 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 5632 crtl->stack_alignment_estimated = 0;
cb91fab0 5633 crtl->preferred_stack_boundary = STACK_BOUNDARY;
be55bfe6 5634 fun->cfg->max_jumptable_ents = 0;
cb91fab0 5635
ae9fd6b7
JH
5636 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5637 of the function section at exapnsion time to predict distance of calls. */
5638 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5639
727a31fa 5640 /* Expand the variables recorded during gimple lowering. */
f029db69 5641 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
5642 start_sequence ();
5643
f3ddd692 5644 var_ret_seq = expand_used_vars ();
3a42502d
RH
5645
5646 var_seq = get_insns ();
5647 end_sequence ();
f029db69 5648 timevar_pop (TV_VAR_EXPAND);
242229bb 5649
7d69de61
RH
5650 /* Honor stack protection warnings. */
5651 if (warn_stack_protect)
5652 {
be55bfe6 5653 if (fun->calls_alloca)
b8698a0f 5654 warning (OPT_Wstack_protector,
3b123595 5655 "stack protector not protecting local variables: "
be55bfe6 5656 "variable length buffer");
cb91fab0 5657 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 5658 warning (OPT_Wstack_protector,
3b123595 5659 "stack protector not protecting function: "
be55bfe6 5660 "all local arrays are less than %d bytes long",
7d69de61
RH
5661 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5662 }
5663
242229bb 5664 /* Set up parameters and prepare for return, for the function. */
b79c5284 5665 expand_function_start (current_function_decl);
242229bb 5666
3a42502d
RH
5667 /* If we emitted any instructions for setting up the variables,
5668 emit them before the FUNCTION_START note. */
5669 if (var_seq)
5670 {
5671 emit_insn_before (var_seq, parm_birth_insn);
5672
5673 /* In expand_function_end we'll insert the alloca save/restore
5674 before parm_birth_insn. We've just insertted an alloca call.
5675 Adjust the pointer to match. */
5676 parm_birth_insn = var_seq;
5677 }
5678
4e3825db
MM
5679 /* Now that we also have the parameter RTXs, copy them over to our
5680 partitions. */
5681 for (i = 0; i < SA.map->num_partitions; i++)
5682 {
5683 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5684
5685 if (TREE_CODE (var) != VAR_DECL
5686 && !SA.partition_to_pseudo[i])
5687 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5688 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
5689
5690 /* If this decl was marked as living in multiple places, reset
be55bfe6 5691 this now to NULL. */
eb7adebc
MM
5692 if (DECL_RTL_IF_SET (var) == pc_rtx)
5693 SET_DECL_RTL (var, NULL);
5694
4e3825db 5695 /* Some RTL parts really want to look at DECL_RTL(x) when x
be55bfe6 5696 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4e3825db
MM
5697 SET_DECL_RTL here making this available, but that would mean
5698 to select one of the potentially many RTLs for one DECL. Instead
5699 of doing that we simply reset the MEM_EXPR of the RTL in question,
5700 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5701 if (!DECL_RTL_SET_P (var))
5702 {
5703 if (MEM_P (SA.partition_to_pseudo[i]))
5704 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5705 }
5706 }
5707
d466b407
MM
5708 /* If we have a class containing differently aligned pointers
5709 we need to merge those into the corresponding RTL pointer
5710 alignment. */
5711 for (i = 1; i < num_ssa_names; i++)
5712 {
5713 tree name = ssa_name (i);
5714 int part;
5715 rtx r;
5716
5717 if (!name
d466b407
MM
5718 /* We might have generated new SSA names in
5719 update_alias_info_with_stack_vars. They will have a NULL
5720 defining statements, and won't be part of the partitioning,
5721 so ignore those. */
5722 || !SSA_NAME_DEF_STMT (name))
5723 continue;
5724 part = var_to_partition (SA.map, name);
5725 if (part == NO_PARTITION)
5726 continue;
70b5e7dc
RG
5727
5728 /* Adjust all partition members to get the underlying decl of
5729 the representative which we might have created in expand_one_var. */
5730 if (SSA_NAME_VAR (name) == NULL_TREE)
5731 {
5732 tree leader = partition_to_var (SA.map, part);
5733 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5734 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5735 }
5736 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5737 continue;
5738
d466b407
MM
5739 r = SA.partition_to_pseudo[part];
5740 if (REG_P (r))
5741 mark_reg_pointer (r, get_pointer_alignment (name));
5742 }
5743
242229bb
JH
5744 /* If this function is `main', emit a call to `__main'
5745 to run global initializers, etc. */
5746 if (DECL_NAME (current_function_decl)
5747 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5748 && DECL_FILE_SCOPE_P (current_function_decl))
5749 expand_main_function ();
5750
7d69de61
RH
5751 /* Initialize the stack_protect_guard field. This must happen after the
5752 call to __main (if any) so that the external decl is initialized. */
cb91fab0 5753 if (crtl->stack_protect_guard)
7d69de61
RH
5754 stack_protect_prologue ();
5755
4e3825db
MM
5756 expand_phi_nodes (&SA);
5757
3fbd86b1 5758 /* Register rtl specific functions for cfg. */
242229bb
JH
5759 rtl_register_cfg_hooks ();
5760
5761 init_block = construct_init_block ();
5762
0ef90296 5763 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 5764 remaining edges later. */
be55bfe6 5765 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
0ef90296
ZD
5766 e->flags &= ~EDGE_EXECUTABLE;
5767
8b11009b 5768 lab_rtx_for_bb = pointer_map_create ();
be55bfe6 5769 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
fefa31b5 5770 next_bb)
f3ddd692 5771 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 5772
b5b8b0ac
AO
5773 if (MAY_HAVE_DEBUG_INSNS)
5774 expand_debug_locations ();
5775
452aa9c5
RG
5776 /* Free stuff we no longer need after GIMPLE optimizations. */
5777 free_dominance_info (CDI_DOMINATORS);
5778 free_dominance_info (CDI_POST_DOMINATORS);
5779 delete_tree_cfg_annotations ();
5780
f029db69 5781 timevar_push (TV_OUT_OF_SSA);
4e3825db 5782 finish_out_of_ssa (&SA);
f029db69 5783 timevar_pop (TV_OUT_OF_SSA);
4e3825db 5784
f029db69 5785 timevar_push (TV_POST_EXPAND);
91753e21 5786 /* We are no longer in SSA form. */
be55bfe6 5787 fun->gimple_df->in_ssa_p = false;
7d776ee2
RG
5788 if (current_loops)
5789 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 5790
bf08ebeb
JH
5791 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5792 conservatively to true until they are all profile aware. */
8b11009b 5793 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 5794 free_histograms ();
242229bb
JH
5795
5796 construct_exit_block ();
5368224f 5797 insn_locations_finalize ();
242229bb 5798
f3ddd692
JJ
5799 if (var_ret_seq)
5800 {
5801 rtx after = return_label;
5802 rtx next = NEXT_INSN (after);
5803 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5804 after = next;
5805 emit_insn_after (var_ret_seq, after);
5806 }
5807
1d65f45c 5808 /* Zap the tree EH table. */
be55bfe6 5809 set_eh_throw_stmt_table (fun, NULL);
242229bb 5810
42821aff
MM
5811 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5812 split edges which edge insertions might do. */
242229bb 5813 rebuild_jump_labels (get_insns ());
242229bb 5814
be55bfe6
TS
5815 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
5816 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
5817 {
5818 edge e;
5819 edge_iterator ei;
5820 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5821 {
5822 if (e->insns.r)
bc470c24 5823 {
42821aff 5824 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
5825 /* Put insns after parm birth, but before
5826 NOTE_INSNS_FUNCTION_BEG. */
be55bfe6
TS
5827 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
5828 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
bc470c24
JJ
5829 {
5830 rtx insns = e->insns.r;
5831 e->insns.r = NULL_RTX;
e40191f1
TV
5832 if (NOTE_P (parm_birth_insn)
5833 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5834 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5835 else
5836 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
5837 }
5838 else
5839 commit_one_edge_insertion (e);
5840 }
4e3825db
MM
5841 else
5842 ei_next (&ei);
5843 }
5844 }
5845
5846 /* We're done expanding trees to RTL. */
5847 currently_expanding_to_rtl = 0;
5848
be55bfe6
TS
5849 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
5850 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
4e3825db
MM
5851 {
5852 edge e;
5853 edge_iterator ei;
5854 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5855 {
5856 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5857 e->flags &= ~EDGE_EXECUTABLE;
5858
5859 /* At the moment not all abnormal edges match the RTL
5860 representation. It is safe to remove them here as
5861 find_many_sub_basic_blocks will rediscover them.
5862 In the future we should get this fixed properly. */
5863 if ((e->flags & EDGE_ABNORMAL)
5864 && !(e->flags & EDGE_SIBCALL))
5865 remove_edge (e);
5866 else
5867 ei_next (&ei);
5868 }
5869 }
5870
be55bfe6 5871 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
f61e445a 5872 bitmap_ones (blocks);
242229bb 5873 find_many_sub_basic_blocks (blocks);
242229bb 5874 sbitmap_free (blocks);
4e3825db 5875 purge_all_dead_edges ();
242229bb 5876
2e3f842f
L
5877 expand_stack_alignment ();
5878
be147e84
RG
5879 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5880 function. */
5881 if (crtl->tail_call_emit)
5882 fixup_tail_calls ();
5883
dac1fbf8
RG
5884 /* After initial rtl generation, call back to finish generating
5885 exception support code. We need to do this before cleaning up
5886 the CFG as the code does not expect dead landing pads. */
be55bfe6 5887 if (fun->eh->region_tree != NULL)
dac1fbf8
RG
5888 finish_eh_generation ();
5889
5890 /* Remove unreachable blocks, otherwise we cannot compute dominators
5891 which are needed for loop state verification. As a side-effect
5892 this also compacts blocks.
5893 ??? We cannot remove trivially dead insns here as for example
5894 the DRAP reg on i?86 is not magically live at this point.
5895 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5896 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5897
242229bb 5898#ifdef ENABLE_CHECKING
62e5bf5d 5899 verify_flow_info ();
242229bb 5900#endif
9f8628ba 5901
be147e84
RG
5902 /* Initialize pseudos allocated for hard registers. */
5903 emit_initial_value_sets ();
5904
5905 /* And finally unshare all RTL. */
5906 unshare_all_rtl ();
5907
9f8628ba
PB
5908 /* There's no need to defer outputting this function any more; we
5909 know we want to output it. */
5910 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5911
5912 /* Now that we're done expanding trees to RTL, we shouldn't have any
5913 more CONCATs anywhere. */
5914 generating_concat_p = 0;
5915
b7211528
SB
5916 if (dump_file)
5917 {
5918 fprintf (dump_file,
5919 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5920 /* And the pass manager will dump RTL for us. */
5921 }
ef330312
PB
5922
5923 /* If we're emitting a nested function, make sure its parent gets
5924 emitted as well. Doing otherwise confuses debug info. */
be55bfe6
TS
5925 {
5926 tree parent;
5927 for (parent = DECL_CONTEXT (current_function_decl);
5928 parent != NULL_TREE;
5929 parent = get_containing_scope (parent))
5930 if (TREE_CODE (parent) == FUNCTION_DECL)
5931 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
5932 }
c22cacf3 5933
ef330312
PB
5934 /* We are now committed to emitting code for this function. Do any
5935 preparation, such as emitting abstract debug info for the inline
5936 before it gets mangled by optimization. */
5937 if (cgraph_function_possibly_inlined_p (current_function_decl))
5938 (*debug_hooks->outlining_inline_function) (current_function_decl);
5939
5940 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
5941
5942 /* After expanding, the return labels are no longer needed. */
5943 return_label = NULL;
5944 naked_return_label = NULL;
0a35513e
AH
5945
5946 /* After expanding, the tm_restart map is no longer needed. */
be55bfe6 5947 if (fun->gimple_df->tm_restart)
0a35513e 5948 {
be55bfe6
TS
5949 htab_delete (fun->gimple_df->tm_restart);
5950 fun->gimple_df->tm_restart = NULL;
0a35513e
AH
5951 }
5952
55e092c4
JH
5953 /* Tag the blocks with a depth number so that change_scope can find
5954 the common parent easily. */
be55bfe6 5955 set_block_levels (DECL_INITIAL (fun->decl), 0);
bf08ebeb 5956 default_rtl_profile ();
be147e84 5957
f029db69 5958 timevar_pop (TV_POST_EXPAND);
be147e84 5959
c2924966 5960 return 0;
242229bb
JH
5961}
5962
27a4cd48
DM
5963} // anon namespace
5964
5965rtl_opt_pass *
5966make_pass_expand (gcc::context *ctxt)
5967{
5968 return new pass_expand (ctxt);
5969}