]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
remove nolonger needed {cgraph,varpool}_node_{,asm_}name () functions
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
242229bb 24#include "rtl.h"
862d0b35
DN
25#include "hard-reg-set.h"
26#include "tree.h"
242229bb
JH
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
442b4905
AM
32#include "bitmap.h"
33#include "gimple.h"
5be5c238
AM
34#include "gimple-iterator.h"
35#include "gimple-walk.h"
442b4905
AM
36#include "gimple-ssa.h"
37#include "cgraph.h"
38#include "tree-cfg.h"
39#include "tree-phinodes.h"
40#include "ssa-iterators.h"
41#include "tree-ssanames.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
242229bb
JH
44#include "tree-pass.h"
45#include "except.h"
46#include "flags.h"
1f6d3a08 47#include "diagnostic.h"
cf835838 48#include "gimple-pretty-print.h"
1f6d3a08 49#include "toplev.h"
ef330312 50#include "debug.h"
7d69de61 51#include "params.h"
ff28a94d 52#include "tree-inline.h"
6946b3f7 53#include "value-prof.h"
e41b2a33 54#include "target.h"
8e9055ae 55#include "tree-ssa-live.h"
78bca40d 56#include "tree-outof-ssa.h"
7a8cba34 57#include "sbitmap.h"
7d776ee2 58#include "cfgloop.h"
be147e84 59#include "regs.h" /* For reg_renumber. */
2b21299c 60#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 61#include "asan.h"
4484a35a 62#include "tree-ssa-address.h"
862d0b35
DN
63#include "recog.h"
64#include "output.h"
726a989a 65
8a6ce562
JBG
66/* Some systems use __main in a way incompatible with its use in gcc, in these
67 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
68 give the same symbol without quotes for an alternative entry point. You
69 must define both, or neither. */
70#ifndef NAME__MAIN
71#define NAME__MAIN "__main"
72#endif
73
4e3825db
MM
74/* This variable holds information helping the rewriting of SSA trees
75 into RTL. */
76struct ssaexpand SA;
77
a5883ba0
MM
78/* This variable holds the currently expanded gimple statement for purposes
79 of comminucating the profile info to the builtin expanders. */
80gimple currently_expanding_gimple_stmt;
81
ddb555ed
JJ
82static rtx expand_debug_expr (tree);
83
726a989a
RB
84/* Return an expression tree corresponding to the RHS of GIMPLE
85 statement STMT. */
86
87tree
88gimple_assign_rhs_to_tree (gimple stmt)
89{
90 tree t;
82d6e6fc 91 enum gimple_rhs_class grhs_class;
b8698a0f 92
82d6e6fc 93 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 94
0354c0c7
BS
95 if (grhs_class == GIMPLE_TERNARY_RHS)
96 t = build3 (gimple_assign_rhs_code (stmt),
97 TREE_TYPE (gimple_assign_lhs (stmt)),
98 gimple_assign_rhs1 (stmt),
99 gimple_assign_rhs2 (stmt),
100 gimple_assign_rhs3 (stmt));
101 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
102 t = build2 (gimple_assign_rhs_code (stmt),
103 TREE_TYPE (gimple_assign_lhs (stmt)),
104 gimple_assign_rhs1 (stmt),
105 gimple_assign_rhs2 (stmt));
82d6e6fc 106 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
107 t = build1 (gimple_assign_rhs_code (stmt),
108 TREE_TYPE (gimple_assign_lhs (stmt)),
109 gimple_assign_rhs1 (stmt));
82d6e6fc 110 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
111 {
112 t = gimple_assign_rhs1 (stmt);
113 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
114 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
115 && gimple_location (stmt) != EXPR_LOCATION (t))
116 || (gimple_block (stmt)
117 && currently_expanding_to_rtl
5368224f 118 && EXPR_P (t)))
b5b8b0ac
AO
119 t = copy_node (t);
120 }
726a989a
RB
121 else
122 gcc_unreachable ();
123
f5045c96
AM
124 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
125 SET_EXPR_LOCATION (t, gimple_location (stmt));
126
726a989a
RB
127 return t;
128}
129
726a989a 130
1f6d3a08
RH
131#ifndef STACK_ALIGNMENT_NEEDED
132#define STACK_ALIGNMENT_NEEDED 1
133#endif
134
4e3825db
MM
135#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
136
137/* Associate declaration T with storage space X. If T is no
138 SSA name this is exactly SET_DECL_RTL, otherwise make the
139 partition of T associated with X. */
140static inline void
141set_rtl (tree t, rtx x)
142{
143 if (TREE_CODE (t) == SSA_NAME)
144 {
145 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
146 if (x && !MEM_P (x))
147 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
148 /* For the benefit of debug information at -O0 (where vartracking
149 doesn't run) record the place also in the base DECL if it's
150 a normal variable (not a parameter). */
151 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
152 {
153 tree var = SSA_NAME_VAR (t);
154 /* If we don't yet have something recorded, just record it now. */
155 if (!DECL_RTL_SET_P (var))
156 SET_DECL_RTL (var, x);
47598145 157 /* If we have it set already to "multiple places" don't
eb7adebc
MM
158 change this. */
159 else if (DECL_RTL (var) == pc_rtx)
160 ;
161 /* If we have something recorded and it's not the same place
162 as we want to record now, we have multiple partitions for the
163 same base variable, with different places. We can't just
164 randomly chose one, hence we have to say that we don't know.
165 This only happens with optimization, and there var-tracking
166 will figure out the right thing. */
167 else if (DECL_RTL (var) != x)
168 SET_DECL_RTL (var, pc_rtx);
169 }
4e3825db
MM
170 }
171 else
172 SET_DECL_RTL (t, x);
173}
1f6d3a08
RH
174
175/* This structure holds data relevant to one variable that will be
176 placed in a stack slot. */
177struct stack_var
178{
179 /* The Variable. */
180 tree decl;
181
1f6d3a08
RH
182 /* Initially, the size of the variable. Later, the size of the partition,
183 if this variable becomes it's partition's representative. */
184 HOST_WIDE_INT size;
185
186 /* The *byte* alignment required for this variable. Or as, with the
187 size, the alignment for this partition. */
188 unsigned int alignb;
189
190 /* The partition representative. */
191 size_t representative;
192
193 /* The next stack variable in the partition, or EOC. */
194 size_t next;
2bdbbe94
MM
195
196 /* The numbers of conflicting stack variables. */
197 bitmap conflicts;
1f6d3a08
RH
198};
199
200#define EOC ((size_t)-1)
201
202/* We have an array of such objects while deciding allocation. */
203static struct stack_var *stack_vars;
204static size_t stack_vars_alloc;
205static size_t stack_vars_num;
47598145 206static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 207
3f9b14ff
SB
208/* Conflict bitmaps go on this obstack. This allows us to destroy
209 all of them in one big sweep. */
210static bitmap_obstack stack_var_bitmap_obstack;
211
fa10beec 212/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
213 is non-decreasing. */
214static size_t *stack_vars_sorted;
215
1f6d3a08
RH
216/* The phase of the stack frame. This is the known misalignment of
217 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
218 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
219static int frame_phase;
220
7d69de61
RH
221/* Used during expand_used_vars to remember if we saw any decls for
222 which we'd like to enable stack smashing protection. */
223static bool has_protected_decls;
224
225/* Used during expand_used_vars. Remember if we say a character buffer
226 smaller than our cutoff threshold. Used for -Wstack-protector. */
227static bool has_short_buffer;
1f6d3a08 228
6f197850 229/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
230 we can't do with expected alignment of the stack boundary. */
231
232static unsigned int
6f197850 233align_local_variable (tree decl)
765c3e8f 234{
3a42502d 235 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 236 DECL_ALIGN (decl) = align;
1f6d3a08
RH
237 return align / BITS_PER_UNIT;
238}
239
240/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
241 Return the frame offset. */
242
243static HOST_WIDE_INT
3a42502d 244alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
245{
246 HOST_WIDE_INT offset, new_frame_offset;
247
248 new_frame_offset = frame_offset;
249 if (FRAME_GROWS_DOWNWARD)
250 {
251 new_frame_offset -= size + frame_phase;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
255 }
256 else
257 {
258 new_frame_offset -= frame_phase;
259 new_frame_offset += align - 1;
260 new_frame_offset &= -align;
261 new_frame_offset += frame_phase;
262 offset = new_frame_offset;
263 new_frame_offset += size;
264 }
265 frame_offset = new_frame_offset;
266
9fb798d7
EB
267 if (frame_offset_overflow (frame_offset, cfun->decl))
268 frame_offset = offset = 0;
269
1f6d3a08
RH
270 return offset;
271}
272
273/* Accumulate DECL into STACK_VARS. */
274
275static void
276add_stack_var (tree decl)
277{
533f611a
RH
278 struct stack_var *v;
279
1f6d3a08
RH
280 if (stack_vars_num >= stack_vars_alloc)
281 {
282 if (stack_vars_alloc)
283 stack_vars_alloc = stack_vars_alloc * 3 / 2;
284 else
285 stack_vars_alloc = 32;
286 stack_vars
287 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
288 }
47598145
MM
289 if (!decl_to_stack_part)
290 decl_to_stack_part = pointer_map_create ();
291
533f611a 292 v = &stack_vars[stack_vars_num];
47598145 293 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
294
295 v->decl = decl;
533f611a
RH
296 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
297 /* Ensure that all variables have size, so that &a != &b for any two
298 variables that are simultaneously live. */
299 if (v->size == 0)
300 v->size = 1;
6f197850 301 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
302 /* An alignment of zero can mightily confuse us later. */
303 gcc_assert (v->alignb != 0);
1f6d3a08
RH
304
305 /* All variables are initially in their own partition. */
533f611a
RH
306 v->representative = stack_vars_num;
307 v->next = EOC;
1f6d3a08 308
2bdbbe94 309 /* All variables initially conflict with no other. */
533f611a 310 v->conflicts = NULL;
2bdbbe94 311
1f6d3a08 312 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 313 set_rtl (decl, pc_rtx);
1f6d3a08
RH
314
315 stack_vars_num++;
316}
317
1f6d3a08
RH
318/* Make the decls associated with luid's X and Y conflict. */
319
320static void
321add_stack_var_conflict (size_t x, size_t y)
322{
2bdbbe94
MM
323 struct stack_var *a = &stack_vars[x];
324 struct stack_var *b = &stack_vars[y];
325 if (!a->conflicts)
3f9b14ff 326 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 327 if (!b->conflicts)
3f9b14ff 328 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
329 bitmap_set_bit (a->conflicts, y);
330 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
331}
332
333/* Check whether the decls associated with luid's X and Y conflict. */
334
335static bool
336stack_var_conflict_p (size_t x, size_t y)
337{
2bdbbe94
MM
338 struct stack_var *a = &stack_vars[x];
339 struct stack_var *b = &stack_vars[y];
47598145
MM
340 if (x == y)
341 return false;
342 /* Partitions containing an SSA name result from gimple registers
343 with things like unsupported modes. They are top-level and
344 hence conflict with everything else. */
345 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
346 return true;
347
2bdbbe94
MM
348 if (!a->conflicts || !b->conflicts)
349 return false;
350 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 351}
b8698a0f 352
47598145
MM
353/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
354 enter its partition number into bitmap DATA. */
355
356static bool
357visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
358{
359 bitmap active = (bitmap)data;
360 op = get_base_address (op);
361 if (op
362 && DECL_P (op)
363 && DECL_RTL_IF_SET (op) == pc_rtx)
364 {
365 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
366 if (v)
367 bitmap_set_bit (active, *v);
368 }
369 return false;
370}
371
372/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
373 record conflicts between it and all currently active other partitions
374 from bitmap DATA. */
375
376static bool
377visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
378{
379 bitmap active = (bitmap)data;
380 op = get_base_address (op);
381 if (op
382 && DECL_P (op)
383 && DECL_RTL_IF_SET (op) == pc_rtx)
384 {
385 size_t *v =
386 (size_t *) pointer_map_contains (decl_to_stack_part, op);
387 if (v && bitmap_set_bit (active, *v))
388 {
389 size_t num = *v;
390 bitmap_iterator bi;
391 unsigned i;
392 gcc_assert (num < stack_vars_num);
393 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
394 add_stack_var_conflict (num, i);
395 }
396 }
397 return false;
398}
399
400/* Helper routine for add_scope_conflicts, calculating the active partitions
401 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
402 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
403 liveness. */
47598145
MM
404
405static void
81bfd197 406add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
407{
408 edge e;
409 edge_iterator ei;
410 gimple_stmt_iterator gsi;
411 bool (*visit)(gimple, tree, void *);
412
413 bitmap_clear (work);
414 FOR_EACH_EDGE (e, ei, bb->preds)
415 bitmap_ior_into (work, (bitmap)e->src->aux);
416
ea85edfe 417 visit = visit_op;
47598145
MM
418
419 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
420 {
421 gimple stmt = gsi_stmt (gsi);
ea85edfe 422 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 423 }
ea85edfe 424 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
425 {
426 gimple stmt = gsi_stmt (gsi);
427
428 if (gimple_clobber_p (stmt))
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431 size_t *v;
432 /* Nested function lowering might introduce LHSs
433 that are COMPONENT_REFs. */
434 if (TREE_CODE (lhs) != VAR_DECL)
435 continue;
436 if (DECL_RTL_IF_SET (lhs) == pc_rtx
437 && (v = (size_t *)
438 pointer_map_contains (decl_to_stack_part, lhs)))
439 bitmap_clear_bit (work, *v);
440 }
441 else if (!is_gimple_debug (stmt))
ea85edfe 442 {
81bfd197 443 if (for_conflict
ea85edfe
JJ
444 && visit == visit_op)
445 {
446 /* If this is the first real instruction in this BB we need
88d599dc
MM
447 to add conflicts for everything live at this point now.
448 Unlike classical liveness for named objects we can't
ea85edfe
JJ
449 rely on seeing a def/use of the names we're interested in.
450 There might merely be indirect loads/stores. We'd not add any
81bfd197 451 conflicts for such partitions. */
ea85edfe
JJ
452 bitmap_iterator bi;
453 unsigned i;
81bfd197 454 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 455 {
9b44f5d9
MM
456 struct stack_var *a = &stack_vars[i];
457 if (!a->conflicts)
3f9b14ff 458 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 459 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
460 }
461 visit = visit_conflict;
462 }
463 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
464 }
47598145
MM
465 }
466}
467
468/* Generate stack partition conflicts between all partitions that are
469 simultaneously live. */
470
471static void
472add_scope_conflicts (void)
473{
474 basic_block bb;
475 bool changed;
476 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
477 int *rpo;
478 int n_bbs;
47598145 479
88d599dc 480 /* We approximate the live range of a stack variable by taking the first
47598145
MM
481 mention of its name as starting point(s), and by the end-of-scope
482 death clobber added by gimplify as ending point(s) of the range.
483 This overapproximates in the case we for instance moved an address-taken
484 operation upward, without also moving a dereference to it upwards.
485 But it's conservatively correct as a variable never can hold values
486 before its name is mentioned at least once.
487
88d599dc 488 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
489
490 FOR_ALL_BB (bb)
3f9b14ff 491 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 492
9b44f5d9
MM
493 rpo = XNEWVEC (int, last_basic_block);
494 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
495
47598145
MM
496 changed = true;
497 while (changed)
498 {
9b44f5d9 499 int i;
47598145 500 changed = false;
9b44f5d9 501 for (i = 0; i < n_bbs; i++)
47598145 502 {
9b44f5d9
MM
503 bitmap active;
504 bb = BASIC_BLOCK (rpo[i]);
505 active = (bitmap)bb->aux;
81bfd197 506 add_scope_conflicts_1 (bb, work, false);
47598145
MM
507 if (bitmap_ior_into (active, work))
508 changed = true;
509 }
510 }
511
512 FOR_EACH_BB (bb)
81bfd197 513 add_scope_conflicts_1 (bb, work, true);
47598145 514
9b44f5d9 515 free (rpo);
47598145
MM
516 BITMAP_FREE (work);
517 FOR_ALL_BB (bb)
518 BITMAP_FREE (bb->aux);
519}
520
1f6d3a08 521/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 522 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
523
524static int
3a42502d 525stack_var_cmp (const void *a, const void *b)
1f6d3a08 526{
3a42502d
RH
527 size_t ia = *(const size_t *)a;
528 size_t ib = *(const size_t *)b;
529 unsigned int aligna = stack_vars[ia].alignb;
530 unsigned int alignb = stack_vars[ib].alignb;
531 HOST_WIDE_INT sizea = stack_vars[ia].size;
532 HOST_WIDE_INT sizeb = stack_vars[ib].size;
533 tree decla = stack_vars[ia].decl;
534 tree declb = stack_vars[ib].decl;
535 bool largea, largeb;
4e3825db 536 unsigned int uida, uidb;
1f6d3a08 537
3a42502d
RH
538 /* Primary compare on "large" alignment. Large comes first. */
539 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
540 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
541 if (largea != largeb)
542 return (int)largeb - (int)largea;
543
544 /* Secondary compare on size, decreasing */
3a42502d 545 if (sizea > sizeb)
6ddfda8a
ER
546 return -1;
547 if (sizea < sizeb)
1f6d3a08 548 return 1;
3a42502d
RH
549
550 /* Tertiary compare on true alignment, decreasing. */
551 if (aligna < alignb)
552 return -1;
553 if (aligna > alignb)
554 return 1;
555
556 /* Final compare on ID for sort stability, increasing.
557 Two SSA names are compared by their version, SSA names come before
558 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
559 if (TREE_CODE (decla) == SSA_NAME)
560 {
561 if (TREE_CODE (declb) == SSA_NAME)
562 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
563 else
564 return -1;
565 }
566 else if (TREE_CODE (declb) == SSA_NAME)
567 return 1;
568 else
569 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 570 if (uida < uidb)
79f802f5 571 return 1;
3a42502d
RH
572 if (uida > uidb)
573 return -1;
1f6d3a08
RH
574 return 0;
575}
576
55b34b5f
RG
577
578/* If the points-to solution *PI points to variables that are in a partition
579 together with other variables add all partition members to the pointed-to
580 variables bitmap. */
581
582static void
583add_partitioned_vars_to_ptset (struct pt_solution *pt,
584 struct pointer_map_t *decls_to_partitions,
585 struct pointer_set_t *visited, bitmap temp)
586{
587 bitmap_iterator bi;
588 unsigned i;
589 bitmap *part;
590
591 if (pt->anything
592 || pt->vars == NULL
593 /* The pointed-to vars bitmap is shared, it is enough to
594 visit it once. */
c3284718 595 || pointer_set_insert (visited, pt->vars))
55b34b5f
RG
596 return;
597
598 bitmap_clear (temp);
599
600 /* By using a temporary bitmap to store all members of the partitions
601 we have to add we make sure to visit each of the partitions only
602 once. */
603 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
604 if ((!temp
605 || !bitmap_bit_p (temp, i))
606 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
607 (void *)(size_t) i)))
608 bitmap_ior_into (temp, *part);
609 if (!bitmap_empty_p (temp))
610 bitmap_ior_into (pt->vars, temp);
611}
612
613/* Update points-to sets based on partition info, so we can use them on RTL.
614 The bitmaps representing stack partitions will be saved until expand,
615 where partitioned decls used as bases in memory expressions will be
616 rewritten. */
617
618static void
619update_alias_info_with_stack_vars (void)
620{
621 struct pointer_map_t *decls_to_partitions = NULL;
622 size_t i, j;
623 tree var = NULL_TREE;
624
625 for (i = 0; i < stack_vars_num; i++)
626 {
627 bitmap part = NULL;
628 tree name;
629 struct ptr_info_def *pi;
630
631 /* Not interested in partitions with single variable. */
632 if (stack_vars[i].representative != i
633 || stack_vars[i].next == EOC)
634 continue;
635
636 if (!decls_to_partitions)
637 {
638 decls_to_partitions = pointer_map_create ();
639 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
640 }
641
642 /* Create an SSA_NAME that points to the partition for use
643 as base during alias-oracle queries on RTL for bases that
644 have been partitioned. */
645 if (var == NULL_TREE)
646 var = create_tmp_var (ptr_type_node, NULL);
647 name = make_ssa_name (var, NULL);
648
649 /* Create bitmaps representing partitions. They will be used for
650 points-to sets later, so use GGC alloc. */
651 part = BITMAP_GGC_ALLOC ();
652 for (j = i; j != EOC; j = stack_vars[j].next)
653 {
654 tree decl = stack_vars[j].decl;
25a6a873 655 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
656 bitmap_set_bit (part, uid);
657 *((bitmap *) pointer_map_insert (decls_to_partitions,
658 (void *)(size_t) uid)) = part;
659 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
660 decl)) = name;
88d8330d
EB
661 if (TREE_ADDRESSABLE (decl))
662 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
663 }
664
665 /* Make the SSA name point to all partition members. */
666 pi = get_ptr_info (name);
d3553615 667 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
668 }
669
670 /* Make all points-to sets that contain one member of a partition
671 contain all members of the partition. */
672 if (decls_to_partitions)
673 {
674 unsigned i;
675 struct pointer_set_t *visited = pointer_set_create ();
3f9b14ff 676 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f
RG
677
678 for (i = 1; i < num_ssa_names; i++)
679 {
680 tree name = ssa_name (i);
681 struct ptr_info_def *pi;
682
683 if (name
684 && POINTER_TYPE_P (TREE_TYPE (name))
685 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
686 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
687 visited, temp);
688 }
689
690 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
691 decls_to_partitions, visited, temp);
55b34b5f
RG
692
693 pointer_set_destroy (visited);
694 pointer_map_destroy (decls_to_partitions);
695 BITMAP_FREE (temp);
696 }
697}
698
1f6d3a08
RH
699/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
700 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 701 Merge them into a single partition A. */
1f6d3a08
RH
702
703static void
6ddfda8a 704union_stack_vars (size_t a, size_t b)
1f6d3a08 705{
2bdbbe94
MM
706 struct stack_var *vb = &stack_vars[b];
707 bitmap_iterator bi;
708 unsigned u;
1f6d3a08 709
6ddfda8a
ER
710 gcc_assert (stack_vars[b].next == EOC);
711 /* Add B to A's partition. */
712 stack_vars[b].next = stack_vars[a].next;
713 stack_vars[b].representative = a;
1f6d3a08
RH
714 stack_vars[a].next = b;
715
716 /* Update the required alignment of partition A to account for B. */
717 if (stack_vars[a].alignb < stack_vars[b].alignb)
718 stack_vars[a].alignb = stack_vars[b].alignb;
719
720 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
721 if (vb->conflicts)
722 {
723 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
724 add_stack_var_conflict (a, stack_vars[u].representative);
725 BITMAP_FREE (vb->conflicts);
726 }
1f6d3a08
RH
727}
728
729/* A subroutine of expand_used_vars. Binpack the variables into
730 partitions constrained by the interference graph. The overall
731 algorithm used is as follows:
732
6ddfda8a 733 Sort the objects by size in descending order.
1f6d3a08
RH
734 For each object A {
735 S = size(A)
736 O = 0
737 loop {
738 Look for the largest non-conflicting object B with size <= S.
739 UNION (A, B)
1f6d3a08
RH
740 }
741 }
742*/
743
744static void
745partition_stack_vars (void)
746{
747 size_t si, sj, n = stack_vars_num;
748
749 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
750 for (si = 0; si < n; ++si)
751 stack_vars_sorted[si] = si;
752
753 if (n == 1)
754 return;
755
3a42502d 756 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 757
1f6d3a08
RH
758 for (si = 0; si < n; ++si)
759 {
760 size_t i = stack_vars_sorted[si];
3a42502d 761 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 762 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 763
6ddfda8a
ER
764 /* Ignore objects that aren't partition representatives. If we
765 see a var that is not a partition representative, it must
766 have been merged earlier. */
767 if (stack_vars[i].representative != i)
768 continue;
769
770 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
771 {
772 size_t j = stack_vars_sorted[sj];
1f6d3a08 773 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 774 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
775
776 /* Ignore objects that aren't partition representatives. */
777 if (stack_vars[j].representative != j)
778 continue;
779
3a42502d
RH
780 /* Do not mix objects of "small" (supported) alignment
781 and "large" (unsupported) alignment. */
782 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
783 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
784 break;
785
786 /* For Address Sanitizer do not mix objects with different
787 sizes, as the shorter vars wouldn't be adequately protected.
788 Don't do that for "large" (unsupported) alignment objects,
789 those aren't protected anyway. */
de5a5fa1 790 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
f3ddd692
JJ
791 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
792 break;
793
794 /* Ignore conflicting objects. */
795 if (stack_var_conflict_p (i, j))
3a42502d
RH
796 continue;
797
1f6d3a08 798 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 799 union_stack_vars (i, j);
1f6d3a08
RH
800 }
801 }
55b34b5f 802
9b999dc5 803 update_alias_info_with_stack_vars ();
1f6d3a08
RH
804}
805
806/* A debugging aid for expand_used_vars. Dump the generated partitions. */
807
808static void
809dump_stack_var_partition (void)
810{
811 size_t si, i, j, n = stack_vars_num;
812
813 for (si = 0; si < n; ++si)
814 {
815 i = stack_vars_sorted[si];
816
817 /* Skip variables that aren't partition representatives, for now. */
818 if (stack_vars[i].representative != i)
819 continue;
820
821 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
822 " align %u\n", (unsigned long) i, stack_vars[i].size,
823 stack_vars[i].alignb);
824
825 for (j = i; j != EOC; j = stack_vars[j].next)
826 {
827 fputc ('\t', dump_file);
828 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 829 }
6ddfda8a 830 fputc ('\n', dump_file);
1f6d3a08
RH
831 }
832}
833
3a42502d 834/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
835
836static void
3a42502d
RH
837expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
838 HOST_WIDE_INT offset)
1f6d3a08 839{
3a42502d 840 unsigned align;
1f6d3a08 841 rtx x;
c22cacf3 842
1f6d3a08
RH
843 /* If this fails, we've overflowed the stack frame. Error nicely? */
844 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
845
0a81f074 846 x = plus_constant (Pmode, base, offset);
4e3825db 847 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 848
4e3825db
MM
849 if (TREE_CODE (decl) != SSA_NAME)
850 {
851 /* Set alignment we actually gave this decl if it isn't an SSA name.
852 If it is we generate stack slots only accidentally so it isn't as
853 important, we'll simply use the alignment that is already set. */
3a42502d
RH
854 if (base == virtual_stack_vars_rtx)
855 offset -= frame_phase;
4e3825db
MM
856 align = offset & -offset;
857 align *= BITS_PER_UNIT;
3a42502d
RH
858 if (align == 0 || align > base_align)
859 align = base_align;
860
861 /* One would think that we could assert that we're not decreasing
862 alignment here, but (at least) the i386 port does exactly this
863 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
864
865 DECL_ALIGN (decl) = align;
866 DECL_USER_ALIGN (decl) = 0;
867 }
868
869 set_mem_attributes (x, SSAVAR (decl), true);
870 set_rtl (decl, x);
1f6d3a08
RH
871}
872
f3ddd692
JJ
873struct stack_vars_data
874{
875 /* Vector of offset pairs, always end of some padding followed
876 by start of the padding that needs Address Sanitizer protection.
877 The vector is in reversed, highest offset pairs come first. */
9771b263 878 vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
879
880 /* Vector of partition representative decls in between the paddings. */
9771b263 881 vec<tree> asan_decl_vec;
f3ddd692
JJ
882};
883
1f6d3a08
RH
884/* A subroutine of expand_used_vars. Give each partition representative
885 a unique location within the stack frame. Update each partition member
886 with that location. */
887
888static void
f3ddd692 889expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
890{
891 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
892 HOST_WIDE_INT large_size = 0, large_alloc = 0;
893 rtx large_base = NULL;
894 unsigned large_align = 0;
895 tree decl;
896
897 /* Determine if there are any variables requiring "large" alignment.
898 Since these are dynamically allocated, we only process these if
899 no predicate involved. */
900 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
901 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
902 {
903 /* Find the total size of these variables. */
904 for (si = 0; si < n; ++si)
905 {
906 unsigned alignb;
907
908 i = stack_vars_sorted[si];
909 alignb = stack_vars[i].alignb;
910
911 /* Stop when we get to the first decl with "small" alignment. */
912 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
913 break;
914
915 /* Skip variables that aren't partition representatives. */
916 if (stack_vars[i].representative != i)
917 continue;
918
919 /* Skip variables that have already had rtl assigned. See also
920 add_stack_var where we perpetrate this pc_rtx hack. */
921 decl = stack_vars[i].decl;
922 if ((TREE_CODE (decl) == SSA_NAME
923 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
924 : DECL_RTL (decl)) != pc_rtx)
925 continue;
926
927 large_size += alignb - 1;
928 large_size &= -(HOST_WIDE_INT)alignb;
929 large_size += stack_vars[i].size;
930 }
931
932 /* If there were any, allocate space. */
933 if (large_size > 0)
934 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
935 large_align, true);
936 }
1f6d3a08
RH
937
938 for (si = 0; si < n; ++si)
939 {
3a42502d
RH
940 rtx base;
941 unsigned base_align, alignb;
1f6d3a08
RH
942 HOST_WIDE_INT offset;
943
944 i = stack_vars_sorted[si];
945
946 /* Skip variables that aren't partition representatives, for now. */
947 if (stack_vars[i].representative != i)
948 continue;
949
7d69de61
RH
950 /* Skip variables that have already had rtl assigned. See also
951 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
952 decl = stack_vars[i].decl;
953 if ((TREE_CODE (decl) == SSA_NAME
954 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
955 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
956 continue;
957
c22cacf3 958 /* Check the predicate to see whether this variable should be
7d69de61 959 allocated in this pass. */
f3ddd692 960 if (pred && !pred (i))
7d69de61
RH
961 continue;
962
3a42502d
RH
963 alignb = stack_vars[i].alignb;
964 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
965 {
de5a5fa1 966 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
f3ddd692
JJ
967 {
968 HOST_WIDE_INT prev_offset = frame_offset;
969 tree repr_decl = NULL_TREE;
970
971 offset
972 = alloc_stack_frame_space (stack_vars[i].size
973 + ASAN_RED_ZONE_SIZE,
974 MAX (alignb, ASAN_RED_ZONE_SIZE));
9771b263
DN
975 data->asan_vec.safe_push (prev_offset);
976 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
977 /* Find best representative of the partition.
978 Prefer those with DECL_NAME, even better
979 satisfying asan_protect_stack_decl predicate. */
980 for (j = i; j != EOC; j = stack_vars[j].next)
981 if (asan_protect_stack_decl (stack_vars[j].decl)
982 && DECL_NAME (stack_vars[j].decl))
983 {
984 repr_decl = stack_vars[j].decl;
985 break;
986 }
987 else if (repr_decl == NULL_TREE
988 && DECL_P (stack_vars[j].decl)
989 && DECL_NAME (stack_vars[j].decl))
990 repr_decl = stack_vars[j].decl;
991 if (repr_decl == NULL_TREE)
992 repr_decl = stack_vars[i].decl;
9771b263 993 data->asan_decl_vec.safe_push (repr_decl);
f3ddd692
JJ
994 }
995 else
996 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
3a42502d
RH
997 base = virtual_stack_vars_rtx;
998 base_align = crtl->max_used_stack_slot_alignment;
999 }
1000 else
1001 {
1002 /* Large alignment is only processed in the last pass. */
1003 if (pred)
1004 continue;
533f611a 1005 gcc_assert (large_base != NULL);
3a42502d
RH
1006
1007 large_alloc += alignb - 1;
1008 large_alloc &= -(HOST_WIDE_INT)alignb;
1009 offset = large_alloc;
1010 large_alloc += stack_vars[i].size;
1011
1012 base = large_base;
1013 base_align = large_align;
1014 }
1f6d3a08
RH
1015
1016 /* Create rtl for each variable based on their location within the
1017 partition. */
1018 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1019 {
f8da8190 1020 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1021 base, base_align,
6ddfda8a 1022 offset);
f8da8190 1023 }
1f6d3a08 1024 }
3a42502d
RH
1025
1026 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1027}
1028
ff28a94d
JH
1029/* Take into account all sizes of partitions and reset DECL_RTLs. */
1030static HOST_WIDE_INT
1031account_stack_vars (void)
1032{
1033 size_t si, j, i, n = stack_vars_num;
1034 HOST_WIDE_INT size = 0;
1035
1036 for (si = 0; si < n; ++si)
1037 {
1038 i = stack_vars_sorted[si];
1039
1040 /* Skip variables that aren't partition representatives, for now. */
1041 if (stack_vars[i].representative != i)
1042 continue;
1043
1044 size += stack_vars[i].size;
1045 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1046 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1047 }
1048 return size;
1049}
1050
1f6d3a08
RH
1051/* A subroutine of expand_one_var. Called to immediately assign rtl
1052 to a variable to be allocated in the stack frame. */
1053
1054static void
1055expand_one_stack_var (tree var)
1056{
3a42502d
RH
1057 HOST_WIDE_INT size, offset;
1058 unsigned byte_align;
1f6d3a08 1059
4e3825db 1060 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1061 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1062
1063 /* We handle highly aligned variables in expand_stack_vars. */
1064 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1065
3a42502d
RH
1066 offset = alloc_stack_frame_space (size, byte_align);
1067
1068 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1069 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1070}
1071
1f6d3a08
RH
1072/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1073 that will reside in a hard register. */
1074
1075static void
1076expand_one_hard_reg_var (tree var)
1077{
1078 rest_of_decl_compilation (var, 0, 0);
1079}
1080
1081/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1082 that will reside in a pseudo register. */
1083
1084static void
1085expand_one_register_var (tree var)
1086{
4e3825db
MM
1087 tree decl = SSAVAR (var);
1088 tree type = TREE_TYPE (decl);
cde0f3fd 1089 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1090 rtx x = gen_reg_rtx (reg_mode);
1091
4e3825db 1092 set_rtl (var, x);
1f6d3a08
RH
1093
1094 /* Note if the object is a user variable. */
4e3825db
MM
1095 if (!DECL_ARTIFICIAL (decl))
1096 mark_user_reg (x);
1f6d3a08 1097
61021c2c 1098 if (POINTER_TYPE_P (type))
d466b407 1099 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1100}
1101
1102/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1103 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1104 to pick something that won't crash the rest of the compiler. */
1105
1106static void
1107expand_one_error_var (tree var)
1108{
1109 enum machine_mode mode = DECL_MODE (var);
1110 rtx x;
1111
1112 if (mode == BLKmode)
1113 x = gen_rtx_MEM (BLKmode, const0_rtx);
1114 else if (mode == VOIDmode)
1115 x = const0_rtx;
1116 else
1117 x = gen_reg_rtx (mode);
1118
1119 SET_DECL_RTL (var, x);
1120}
1121
c22cacf3 1122/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1123 allocated to the local stack frame. Return true if we wish to
1124 add VAR to STACK_VARS so that it will be coalesced with other
1125 variables. Return false to allocate VAR immediately.
1126
1127 This function is used to reduce the number of variables considered
1128 for coalescing, which reduces the size of the quadratic problem. */
1129
1130static bool
1131defer_stack_allocation (tree var, bool toplevel)
1132{
ee2e8462
EB
1133 /* Whether the variable is small enough for immediate allocation not to be
1134 a problem with regard to the frame size. */
1135 bool smallish
1136 = (tree_low_cst (DECL_SIZE_UNIT (var), 1)
1137 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1138
7d69de61 1139 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1140 so that we can re-order the strings to the top of the frame.
1141 Similarly for Address Sanitizer. */
de5a5fa1 1142 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
7d69de61
RH
1143 return true;
1144
3a42502d
RH
1145 /* We handle "large" alignment via dynamic allocation. We want to handle
1146 this extra complication in only one place, so defer them. */
1147 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1148 return true;
1149
ee2e8462
EB
1150 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1151 might be detached from their block and appear at toplevel when we reach
1152 here. We want to coalesce them with variables from other blocks when
1153 the immediate contribution to the frame size would be noticeable. */
1154 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1155 return true;
1156
1157 /* Variables declared in the outermost scope automatically conflict
1158 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1159 at all is that, after sorting, we can more efficiently pack
1160 small variables in the stack frame. Continue to defer at -O2. */
1161 if (toplevel && optimize < 2)
1162 return false;
1163
1164 /* Without optimization, *most* variables are allocated from the
1165 stack, which makes the quadratic problem large exactly when we
c22cacf3 1166 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1167 other hand, we don't want the function's stack frame size to
1168 get completely out of hand. So we avoid adding scalars and
1169 "small" aggregates to the list at all. */
ee2e8462 1170 if (optimize == 0 && smallish)
1f6d3a08
RH
1171 return false;
1172
1173 return true;
1174}
1175
1176/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1177 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1178 expanded yet, merely recorded.
ff28a94d
JH
1179 When REALLY_EXPAND is false, only add stack values to be allocated.
1180 Return stack usage this variable is supposed to take.
1181*/
1f6d3a08 1182
ff28a94d
JH
1183static HOST_WIDE_INT
1184expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1185{
3a42502d 1186 unsigned int align = BITS_PER_UNIT;
4e3825db 1187 tree origvar = var;
3a42502d 1188
4e3825db
MM
1189 var = SSAVAR (var);
1190
3a42502d 1191 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1192 {
2e3f842f
L
1193 /* Because we don't know if VAR will be in register or on stack,
1194 we conservatively assume it will be on stack even if VAR is
1195 eventually put into register after RA pass. For non-automatic
1196 variables, which won't be on stack, we collect alignment of
1197 type and ignore user specified alignment. */
1198 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1199 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1200 TYPE_MODE (TREE_TYPE (var)),
1201 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1202 else if (DECL_HAS_VALUE_EXPR_P (var)
1203 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1204 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1205 or variables which were assigned a stack slot already by
1206 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1207 changed from the offset chosen to it. */
1208 align = crtl->stack_alignment_estimated;
2e3f842f 1209 else
ae58e548 1210 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1211
3a42502d
RH
1212 /* If the variable alignment is very large we'll dynamicaly allocate
1213 it, which means that in-frame portion is just a pointer. */
1214 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1215 align = POINTER_SIZE;
1216 }
1217
1218 if (SUPPORTS_STACK_ALIGNMENT
1219 && crtl->stack_alignment_estimated < align)
1220 {
1221 /* stack_alignment_estimated shouldn't change after stack
1222 realign decision made */
c3284718 1223 gcc_assert (!crtl->stack_realign_processed);
3a42502d 1224 crtl->stack_alignment_estimated = align;
2e3f842f
L
1225 }
1226
3a42502d
RH
1227 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1228 So here we only make sure stack_alignment_needed >= align. */
1229 if (crtl->stack_alignment_needed < align)
1230 crtl->stack_alignment_needed = align;
1231 if (crtl->max_used_stack_slot_alignment < align)
1232 crtl->max_used_stack_slot_alignment = align;
1233
4e3825db
MM
1234 if (TREE_CODE (origvar) == SSA_NAME)
1235 {
1236 gcc_assert (TREE_CODE (var) != VAR_DECL
1237 || (!DECL_EXTERNAL (var)
1238 && !DECL_HAS_VALUE_EXPR_P (var)
1239 && !TREE_STATIC (var)
4e3825db
MM
1240 && TREE_TYPE (var) != error_mark_node
1241 && !DECL_HARD_REGISTER (var)
1242 && really_expand));
1243 }
1244 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1245 ;
1f6d3a08
RH
1246 else if (DECL_EXTERNAL (var))
1247 ;
833b3afe 1248 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1249 ;
1250 else if (TREE_STATIC (var))
7e8b322a 1251 ;
eb7adebc 1252 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1253 ;
1254 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1255 {
1256 if (really_expand)
1257 expand_one_error_var (var);
1258 }
4e3825db 1259 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1260 {
1261 if (really_expand)
1262 expand_one_hard_reg_var (var);
1263 }
1f6d3a08 1264 else if (use_register_for_decl (var))
ff28a94d
JH
1265 {
1266 if (really_expand)
4e3825db 1267 expand_one_register_var (origvar);
ff28a94d 1268 }
56099f00 1269 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1270 {
56099f00 1271 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1272 if (really_expand)
1273 {
1274 error ("size of variable %q+D is too large", var);
1275 expand_one_error_var (var);
1276 }
1277 }
1f6d3a08 1278 else if (defer_stack_allocation (var, toplevel))
4e3825db 1279 add_stack_var (origvar);
1f6d3a08 1280 else
ff28a94d 1281 {
bd9f1b4b 1282 if (really_expand)
4e3825db 1283 expand_one_stack_var (origvar);
ff28a94d
JH
1284 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1285 }
1286 return 0;
1f6d3a08
RH
1287}
1288
1289/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1290 expanding variables. Those variables that can be put into registers
1291 are allocated pseudos; those that can't are put on the stack.
1292
1293 TOPLEVEL is true if this is the outermost BLOCK. */
1294
1295static void
1296expand_used_vars_for_block (tree block, bool toplevel)
1297{
1f6d3a08
RH
1298 tree t;
1299
1f6d3a08 1300 /* Expand all variables at this level. */
910ad8de 1301 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1302 if (TREE_USED (t)
1303 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1304 || !DECL_NONSHAREABLE (t)))
ff28a94d 1305 expand_one_var (t, toplevel, true);
1f6d3a08 1306
1f6d3a08
RH
1307 /* Expand all variables at containing levels. */
1308 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1309 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1310}
1311
1312/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1313 and clear TREE_USED on all local variables. */
1314
1315static void
1316clear_tree_used (tree block)
1317{
1318 tree t;
1319
910ad8de 1320 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1321 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1322 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1323 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1324 TREE_USED (t) = 0;
1325
1326 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1327 clear_tree_used (t);
1328}
1329
f6bc1c4a
HS
1330enum {
1331 SPCT_FLAG_DEFAULT = 1,
1332 SPCT_FLAG_ALL = 2,
1333 SPCT_FLAG_STRONG = 3
1334};
1335
7d69de61
RH
1336/* Examine TYPE and determine a bit mask of the following features. */
1337
1338#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1339#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1340#define SPCT_HAS_ARRAY 4
1341#define SPCT_HAS_AGGREGATE 8
1342
1343static unsigned int
1344stack_protect_classify_type (tree type)
1345{
1346 unsigned int ret = 0;
1347 tree t;
1348
1349 switch (TREE_CODE (type))
1350 {
1351 case ARRAY_TYPE:
1352 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1353 if (t == char_type_node
1354 || t == signed_char_type_node
1355 || t == unsigned_char_type_node)
1356 {
15362b89
JJ
1357 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1358 unsigned HOST_WIDE_INT len;
7d69de61 1359
15362b89
JJ
1360 if (!TYPE_SIZE_UNIT (type)
1361 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1362 len = max;
7d69de61 1363 else
15362b89 1364 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1365
1366 if (len < max)
1367 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1368 else
1369 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1370 }
1371 else
1372 ret = SPCT_HAS_ARRAY;
1373 break;
1374
1375 case UNION_TYPE:
1376 case QUAL_UNION_TYPE:
1377 case RECORD_TYPE:
1378 ret = SPCT_HAS_AGGREGATE;
1379 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1380 if (TREE_CODE (t) == FIELD_DECL)
1381 ret |= stack_protect_classify_type (TREE_TYPE (t));
1382 break;
1383
1384 default:
1385 break;
1386 }
1387
1388 return ret;
1389}
1390
a4d05547
KH
1391/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1392 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1393 any variable in this function. The return value is the phase number in
1394 which the variable should be allocated. */
1395
1396static int
1397stack_protect_decl_phase (tree decl)
1398{
1399 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1400 int ret = 0;
1401
1402 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1403 has_short_buffer = true;
1404
f6bc1c4a
HS
1405 if (flag_stack_protect == SPCT_FLAG_ALL
1406 || flag_stack_protect == SPCT_FLAG_STRONG)
7d69de61
RH
1407 {
1408 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1409 && !(bits & SPCT_HAS_AGGREGATE))
1410 ret = 1;
1411 else if (bits & SPCT_HAS_ARRAY)
1412 ret = 2;
1413 }
1414 else
1415 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1416
1417 if (ret)
1418 has_protected_decls = true;
1419
1420 return ret;
1421}
1422
1423/* Two helper routines that check for phase 1 and phase 2. These are used
1424 as callbacks for expand_stack_vars. */
1425
1426static bool
f3ddd692
JJ
1427stack_protect_decl_phase_1 (size_t i)
1428{
1429 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1430}
1431
1432static bool
1433stack_protect_decl_phase_2 (size_t i)
7d69de61 1434{
f3ddd692 1435 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1436}
1437
f3ddd692
JJ
1438/* And helper function that checks for asan phase (with stack protector
1439 it is phase 3). This is used as callback for expand_stack_vars.
1440 Returns true if any of the vars in the partition need to be protected. */
1441
7d69de61 1442static bool
f3ddd692 1443asan_decl_phase_3 (size_t i)
7d69de61 1444{
f3ddd692
JJ
1445 while (i != EOC)
1446 {
1447 if (asan_protect_stack_decl (stack_vars[i].decl))
1448 return true;
1449 i = stack_vars[i].next;
1450 }
1451 return false;
7d69de61
RH
1452}
1453
1454/* Ensure that variables in different stack protection phases conflict
1455 so that they are not merged and share the same stack slot. */
1456
1457static void
1458add_stack_protection_conflicts (void)
1459{
1460 size_t i, j, n = stack_vars_num;
1461 unsigned char *phase;
1462
1463 phase = XNEWVEC (unsigned char, n);
1464 for (i = 0; i < n; ++i)
1465 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1466
1467 for (i = 0; i < n; ++i)
1468 {
1469 unsigned char ph_i = phase[i];
9b44f5d9 1470 for (j = i + 1; j < n; ++j)
7d69de61
RH
1471 if (ph_i != phase[j])
1472 add_stack_var_conflict (i, j);
1473 }
1474
1475 XDELETEVEC (phase);
1476}
1477
1478/* Create a decl for the guard at the top of the stack frame. */
1479
1480static void
1481create_stack_guard (void)
1482{
c2255bc4
AH
1483 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1484 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1485 TREE_THIS_VOLATILE (guard) = 1;
1486 TREE_USED (guard) = 1;
1487 expand_one_stack_var (guard);
cb91fab0 1488 crtl->stack_protect_guard = guard;
7d69de61
RH
1489}
1490
ff28a94d 1491/* Prepare for expanding variables. */
b8698a0f 1492static void
ff28a94d
JH
1493init_vars_expansion (void)
1494{
3f9b14ff
SB
1495 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1496 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1497
3f9b14ff
SB
1498 /* A map from decl to stack partition. */
1499 decl_to_stack_part = pointer_map_create ();
ff28a94d
JH
1500
1501 /* Initialize local stack smashing state. */
1502 has_protected_decls = false;
1503 has_short_buffer = false;
1504}
1505
1506/* Free up stack variable graph data. */
1507static void
1508fini_vars_expansion (void)
1509{
3f9b14ff
SB
1510 bitmap_obstack_release (&stack_var_bitmap_obstack);
1511 if (stack_vars)
1512 XDELETEVEC (stack_vars);
1513 if (stack_vars_sorted)
1514 XDELETEVEC (stack_vars_sorted);
ff28a94d 1515 stack_vars = NULL;
9b44f5d9 1516 stack_vars_sorted = NULL;
ff28a94d 1517 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1518 pointer_map_destroy (decl_to_stack_part);
1519 decl_to_stack_part = NULL;
ff28a94d
JH
1520}
1521
30925d94
AO
1522/* Make a fair guess for the size of the stack frame of the function
1523 in NODE. This doesn't have to be exact, the result is only used in
1524 the inline heuristics. So we don't want to run the full stack var
1525 packing algorithm (which is quadratic in the number of stack vars).
1526 Instead, we calculate the total size of all stack vars. This turns
1527 out to be a pretty fair estimate -- packing of stack vars doesn't
1528 happen very often. */
b5a430f3 1529
ff28a94d 1530HOST_WIDE_INT
30925d94 1531estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1532{
1533 HOST_WIDE_INT size = 0;
b5a430f3 1534 size_t i;
bb7e6d55 1535 tree var;
67348ccc 1536 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1537
bb7e6d55 1538 push_cfun (fn);
ff28a94d 1539
3f9b14ff
SB
1540 init_vars_expansion ();
1541
824f71b9
RG
1542 FOR_EACH_LOCAL_DECL (fn, i, var)
1543 if (auto_var_in_fn_p (var, fn->decl))
1544 size += expand_one_var (var, true, false);
b5a430f3 1545
ff28a94d
JH
1546 if (stack_vars_num > 0)
1547 {
b5a430f3
SB
1548 /* Fake sorting the stack vars for account_stack_vars (). */
1549 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1550 for (i = 0; i < stack_vars_num; ++i)
1551 stack_vars_sorted[i] = i;
ff28a94d 1552 size += account_stack_vars ();
ff28a94d 1553 }
3f9b14ff
SB
1554
1555 fini_vars_expansion ();
2e1ec94f 1556 pop_cfun ();
ff28a94d
JH
1557 return size;
1558}
1559
f6bc1c4a
HS
1560/* Helper routine to check if a record or union contains an array field. */
1561
1562static int
1563record_or_union_type_has_array_p (const_tree tree_type)
1564{
1565 tree fields = TYPE_FIELDS (tree_type);
1566 tree f;
1567
1568 for (f = fields; f; f = DECL_CHAIN (f))
1569 if (TREE_CODE (f) == FIELD_DECL)
1570 {
1571 tree field_type = TREE_TYPE (f);
1572 if (RECORD_OR_UNION_TYPE_P (field_type)
1573 && record_or_union_type_has_array_p (field_type))
1574 return 1;
1575 if (TREE_CODE (field_type) == ARRAY_TYPE)
1576 return 1;
1577 }
1578 return 0;
1579}
1580
1f6d3a08 1581/* Expand all variables used in the function. */
727a31fa 1582
f3ddd692 1583static rtx
727a31fa
RH
1584expand_used_vars (void)
1585{
c021f10b 1586 tree var, outer_block = DECL_INITIAL (current_function_decl);
6e1aa848 1587 vec<tree> maybe_local_decls = vNULL;
f3ddd692 1588 rtx var_end_seq = NULL_RTX;
70b5e7dc 1589 struct pointer_map_t *ssa_name_decls;
4e3825db 1590 unsigned i;
c021f10b 1591 unsigned len;
f6bc1c4a 1592 bool gen_stack_protect_signal = false;
727a31fa 1593
1f6d3a08
RH
1594 /* Compute the phase of the stack frame for this function. */
1595 {
1596 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1597 int off = STARTING_FRAME_OFFSET % align;
1598 frame_phase = off ? align - off : 0;
1599 }
727a31fa 1600
3f9b14ff
SB
1601 /* Set TREE_USED on all variables in the local_decls. */
1602 FOR_EACH_LOCAL_DECL (cfun, i, var)
1603 TREE_USED (var) = 1;
1604 /* Clear TREE_USED on all variables associated with a block scope. */
1605 clear_tree_used (DECL_INITIAL (current_function_decl));
1606
ff28a94d 1607 init_vars_expansion ();
7d69de61 1608
70b5e7dc 1609 ssa_name_decls = pointer_map_create ();
4e3825db
MM
1610 for (i = 0; i < SA.map->num_partitions; i++)
1611 {
1612 tree var = partition_to_var (SA.map, i);
1613
ea057359 1614 gcc_assert (!virtual_operand_p (var));
70b5e7dc
RG
1615
1616 /* Assign decls to each SSA name partition, share decls for partitions
1617 we could have coalesced (those with the same type). */
1618 if (SSA_NAME_VAR (var) == NULL_TREE)
1619 {
1620 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1621 if (!*slot)
1622 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1623 replace_ssa_name_symbol (var, (tree) *slot);
1624 }
1625
cfb9edba
EB
1626 /* Always allocate space for partitions based on VAR_DECLs. But for
1627 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1628 debug info, there is no need to do so if optimization is disabled
1629 because all the SSA_NAMEs based on these DECLs have been coalesced
1630 into a single partition, which is thus assigned the canonical RTL
1631 location of the DECLs. */
4e3825db
MM
1632 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1633 expand_one_var (var, true, true);
cfb9edba 1634 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize)
4e3825db
MM
1635 {
1636 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1637 contain the default def (representing the parm or result itself)
1638 we don't do anything here. But those which don't contain the
1639 default def (representing a temporary based on the parm/result)
1640 we need to allocate space just like for normal VAR_DECLs. */
1641 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1642 {
1643 expand_one_var (var, true, true);
1644 gcc_assert (SA.partition_to_pseudo[i]);
1645 }
1646 }
1647 }
70b5e7dc 1648 pointer_map_destroy (ssa_name_decls);
4e3825db 1649
f6bc1c4a
HS
1650 if (flag_stack_protect == SPCT_FLAG_STRONG)
1651 FOR_EACH_LOCAL_DECL (cfun, i, var)
1652 if (!is_global_var (var))
1653 {
1654 tree var_type = TREE_TYPE (var);
1655 /* Examine local referenced variables that have their addresses taken,
1656 contain an array, or are arrays. */
1657 if (TREE_CODE (var) == VAR_DECL
1658 && (TREE_CODE (var_type) == ARRAY_TYPE
1659 || TREE_ADDRESSABLE (var)
1660 || (RECORD_OR_UNION_TYPE_P (var_type)
1661 && record_or_union_type_has_array_p (var_type))))
1662 {
1663 gen_stack_protect_signal = true;
1664 break;
1665 }
1666 }
1667
cb91fab0 1668 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1669 set are not associated with any block scope. Lay them out. */
c021f10b 1670
9771b263 1671 len = vec_safe_length (cfun->local_decls);
c021f10b 1672 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1673 {
1f6d3a08
RH
1674 bool expand_now = false;
1675
4e3825db
MM
1676 /* Expanded above already. */
1677 if (is_gimple_reg (var))
eb7adebc
MM
1678 {
1679 TREE_USED (var) = 0;
3adcf52c 1680 goto next;
eb7adebc 1681 }
1f6d3a08
RH
1682 /* We didn't set a block for static or extern because it's hard
1683 to tell the difference between a global variable (re)declared
1684 in a local scope, and one that's really declared there to
1685 begin with. And it doesn't really matter much, since we're
1686 not giving them stack space. Expand them now. */
4e3825db 1687 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1688 expand_now = true;
1689
ee2e8462
EB
1690 /* Expand variables not associated with any block now. Those created by
1691 the optimizers could be live anywhere in the function. Those that
1692 could possibly have been scoped originally and detached from their
1693 block will have their allocation deferred so we coalesce them with
1694 others when optimization is enabled. */
1f6d3a08
RH
1695 else if (TREE_USED (var))
1696 expand_now = true;
1697
1698 /* Finally, mark all variables on the list as used. We'll use
1699 this in a moment when we expand those associated with scopes. */
1700 TREE_USED (var) = 1;
1701
1702 if (expand_now)
3adcf52c
JM
1703 expand_one_var (var, true, true);
1704
1705 next:
1706 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1707 {
3adcf52c
JM
1708 rtx rtl = DECL_RTL_IF_SET (var);
1709
1710 /* Keep artificial non-ignored vars in cfun->local_decls
1711 chain until instantiate_decls. */
1712 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1713 add_local_decl (cfun, var);
6c6366f6 1714 else if (rtl == NULL_RTX)
c021f10b
NF
1715 /* If rtl isn't set yet, which can happen e.g. with
1716 -fstack-protector, retry before returning from this
1717 function. */
9771b263 1718 maybe_local_decls.safe_push (var);
802e9f8e 1719 }
1f6d3a08 1720 }
1f6d3a08 1721
c021f10b
NF
1722 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1723
1724 +-----------------+-----------------+
1725 | ...processed... | ...duplicates...|
1726 +-----------------+-----------------+
1727 ^
1728 +-- LEN points here.
1729
1730 We just want the duplicates, as those are the artificial
1731 non-ignored vars that we want to keep until instantiate_decls.
1732 Move them down and truncate the array. */
9771b263
DN
1733 if (!vec_safe_is_empty (cfun->local_decls))
1734 cfun->local_decls->block_remove (0, len);
c021f10b 1735
1f6d3a08
RH
1736 /* At this point, all variables within the block tree with TREE_USED
1737 set are actually used by the optimized function. Lay them out. */
1738 expand_used_vars_for_block (outer_block, true);
1739
1740 if (stack_vars_num > 0)
1741 {
47598145 1742 add_scope_conflicts ();
1f6d3a08 1743
c22cacf3 1744 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1745 vulnerable data and non-vulnerable data. */
1746 if (flag_stack_protect)
1747 add_stack_protection_conflicts ();
1748
c22cacf3 1749 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1750 minimal interference graph, attempt to save some stack space. */
1751 partition_stack_vars ();
1752 if (dump_file)
1753 dump_stack_var_partition ();
7d69de61
RH
1754 }
1755
f6bc1c4a
HS
1756 switch (flag_stack_protect)
1757 {
1758 case SPCT_FLAG_ALL:
1759 create_stack_guard ();
1760 break;
1761
1762 case SPCT_FLAG_STRONG:
1763 if (gen_stack_protect_signal
1764 || cfun->calls_alloca || has_protected_decls)
1765 create_stack_guard ();
1766 break;
1767
1768 case SPCT_FLAG_DEFAULT:
1769 if (cfun->calls_alloca || has_protected_decls)
c3284718 1770 create_stack_guard ();
f6bc1c4a
HS
1771 break;
1772
1773 default:
1774 ;
1775 }
1f6d3a08 1776
7d69de61
RH
1777 /* Assign rtl to each variable based on these partitions. */
1778 if (stack_vars_num > 0)
1779 {
f3ddd692
JJ
1780 struct stack_vars_data data;
1781
6e1aa848
DN
1782 data.asan_vec = vNULL;
1783 data.asan_decl_vec = vNULL;
f3ddd692 1784
7d69de61
RH
1785 /* Reorder decls to be protected by iterating over the variables
1786 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1787 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1788 earlier, such that we naturally see these variables first,
1789 and thus naturally allocate things in the right order. */
1790 if (has_protected_decls)
1791 {
1792 /* Phase 1 contains only character arrays. */
f3ddd692 1793 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
1794
1795 /* Phase 2 contains other kinds of arrays. */
1796 if (flag_stack_protect == 2)
f3ddd692 1797 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
1798 }
1799
de5a5fa1 1800 if (flag_sanitize & SANITIZE_ADDRESS)
f3ddd692
JJ
1801 /* Phase 3, any partitions that need asan protection
1802 in addition to phase 1 and 2. */
1803 expand_stack_vars (asan_decl_phase_3, &data);
1804
9771b263 1805 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
1806 {
1807 HOST_WIDE_INT prev_offset = frame_offset;
1808 HOST_WIDE_INT offset
1809 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1810 ASAN_RED_ZONE_SIZE);
9771b263
DN
1811 data.asan_vec.safe_push (prev_offset);
1812 data.asan_vec.safe_push (offset);
f3ddd692
JJ
1813
1814 var_end_seq
1815 = asan_emit_stack_protection (virtual_stack_vars_rtx,
9771b263 1816 data.asan_vec.address (),
c3284718 1817 data.asan_decl_vec. address (),
9771b263 1818 data.asan_vec.length ());
f3ddd692
JJ
1819 }
1820
1821 expand_stack_vars (NULL, &data);
1822
9771b263
DN
1823 data.asan_vec.release ();
1824 data.asan_decl_vec.release ();
1f6d3a08
RH
1825 }
1826
3f9b14ff
SB
1827 fini_vars_expansion ();
1828
6c6366f6
JJ
1829 /* If there were any artificial non-ignored vars without rtl
1830 found earlier, see if deferred stack allocation hasn't assigned
1831 rtl to them. */
9771b263 1832 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 1833 {
6c6366f6
JJ
1834 rtx rtl = DECL_RTL_IF_SET (var);
1835
6c6366f6
JJ
1836 /* Keep artificial non-ignored vars in cfun->local_decls
1837 chain until instantiate_decls. */
1838 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1839 add_local_decl (cfun, var);
6c6366f6 1840 }
9771b263 1841 maybe_local_decls.release ();
6c6366f6 1842
1f6d3a08
RH
1843 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1844 if (STACK_ALIGNMENT_NEEDED)
1845 {
1846 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1847 if (!FRAME_GROWS_DOWNWARD)
1848 frame_offset += align - 1;
1849 frame_offset &= -align;
1850 }
f3ddd692
JJ
1851
1852 return var_end_seq;
727a31fa
RH
1853}
1854
1855
b7211528
SB
1856/* If we need to produce a detailed dump, print the tree representation
1857 for STMT to the dump file. SINCE is the last RTX after which the RTL
1858 generated for STMT should have been appended. */
1859
1860static void
726a989a 1861maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1862{
1863 if (dump_file && (dump_flags & TDF_DETAILS))
1864 {
1865 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1866 print_gimple_stmt (dump_file, stmt, 0,
1867 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1868 fprintf (dump_file, "\n");
1869
1870 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1871 }
1872}
1873
8b11009b
ZD
1874/* Maps the blocks that do not contain tree labels to rtx labels. */
1875
1876static struct pointer_map_t *lab_rtx_for_bb;
1877
a9b77cd1
ZD
1878/* Returns the label_rtx expression for a label starting basic block BB. */
1879
1880static rtx
726a989a 1881label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1882{
726a989a
RB
1883 gimple_stmt_iterator gsi;
1884 tree lab;
1885 gimple lab_stmt;
8b11009b 1886 void **elt;
a9b77cd1
ZD
1887
1888 if (bb->flags & BB_RTL)
1889 return block_label (bb);
1890
8b11009b
ZD
1891 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1892 if (elt)
ae50c0cb 1893 return (rtx) *elt;
8b11009b
ZD
1894
1895 /* Find the tree label if it is present. */
b8698a0f 1896
726a989a 1897 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1898 {
726a989a
RB
1899 lab_stmt = gsi_stmt (gsi);
1900 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1901 break;
1902
726a989a 1903 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1904 if (DECL_NONLOCAL (lab))
1905 break;
1906
1907 return label_rtx (lab);
1908 }
1909
8b11009b
ZD
1910 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1911 *elt = gen_label_rtx ();
ae50c0cb 1912 return (rtx) *elt;
a9b77cd1
ZD
1913}
1914
726a989a 1915
529ff441
MM
1916/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1917 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1918 possibly clean up the CFG and instruction sequence. LAST is the
1919 last instruction before the just emitted jump sequence. */
529ff441
MM
1920
1921static void
315adeda 1922maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1923{
1924 /* Special case: when jumpif decides that the condition is
1925 trivial it emits an unconditional jump (and the necessary
1926 barrier). But we still have two edges, the fallthru one is
1927 wrong. purge_dead_edges would clean this up later. Unfortunately
1928 we have to insert insns (and split edges) before
1929 find_many_sub_basic_blocks and hence before purge_dead_edges.
1930 But splitting edges might create new blocks which depend on the
1931 fact that if there are two edges there's no barrier. So the
1932 barrier would get lost and verify_flow_info would ICE. Instead
1933 of auditing all edge splitters to care for the barrier (which
1934 normally isn't there in a cleaned CFG), fix it here. */
1935 if (BARRIER_P (get_last_insn ()))
1936 {
529ff441
MM
1937 rtx insn;
1938 remove_edge (e);
1939 /* Now, we have a single successor block, if we have insns to
1940 insert on the remaining edge we potentially will insert
1941 it at the end of this block (if the dest block isn't feasible)
1942 in order to avoid splitting the edge. This insertion will take
1943 place in front of the last jump. But we might have emitted
1944 multiple jumps (conditional and one unconditional) to the
1945 same destination. Inserting in front of the last one then
1946 is a problem. See PR 40021. We fix this by deleting all
1947 jumps except the last unconditional one. */
1948 insn = PREV_INSN (get_last_insn ());
1949 /* Make sure we have an unconditional jump. Otherwise we're
1950 confused. */
1951 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1952 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1953 {
1954 insn = PREV_INSN (insn);
1955 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1956 {
8a269cb7 1957 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1958 {
1959 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1960 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1961 }
1962 delete_insn (NEXT_INSN (insn));
1963 }
529ff441
MM
1964 }
1965 }
1966}
1967
726a989a 1968/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1969 Returns a new basic block if we've terminated the current basic
1970 block and created a new one. */
1971
1972static basic_block
726a989a 1973expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1974{
1975 basic_block new_bb, dest;
1976 edge new_edge;
1977 edge true_edge;
1978 edge false_edge;
b7211528 1979 rtx last2, last;
28ed065e
MM
1980 enum tree_code code;
1981 tree op0, op1;
1982
1983 code = gimple_cond_code (stmt);
1984 op0 = gimple_cond_lhs (stmt);
1985 op1 = gimple_cond_rhs (stmt);
1986 /* We're sometimes presented with such code:
1987 D.123_1 = x < y;
1988 if (D.123_1 != 0)
1989 ...
1990 This would expand to two comparisons which then later might
1991 be cleaned up by combine. But some pattern matchers like if-conversion
1992 work better when there's only one compare, so make up for this
1993 here as special exception if TER would have made the same change. */
31348d52 1994 if (SA.values
28ed065e 1995 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
1996 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1997 && TREE_CODE (op1) == INTEGER_CST
1998 && ((gimple_cond_code (stmt) == NE_EXPR
1999 && integer_zerop (op1))
2000 || (gimple_cond_code (stmt) == EQ_EXPR
2001 && integer_onep (op1)))
28ed065e
MM
2002 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2003 {
2004 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2005 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2006 {
e83f4b68
MM
2007 enum tree_code code2 = gimple_assign_rhs_code (second);
2008 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2009 {
2010 code = code2;
2011 op0 = gimple_assign_rhs1 (second);
2012 op1 = gimple_assign_rhs2 (second);
2013 }
2014 /* If jumps are cheap turn some more codes into
2015 jumpy sequences. */
2016 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2017 {
2018 if ((code2 == BIT_AND_EXPR
2019 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2020 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2021 || code2 == TRUTH_AND_EXPR)
2022 {
2023 code = TRUTH_ANDIF_EXPR;
2024 op0 = gimple_assign_rhs1 (second);
2025 op1 = gimple_assign_rhs2 (second);
2026 }
2027 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2028 {
2029 code = TRUTH_ORIF_EXPR;
2030 op0 = gimple_assign_rhs1 (second);
2031 op1 = gimple_assign_rhs2 (second);
2032 }
2033 }
28ed065e
MM
2034 }
2035 }
b7211528
SB
2036
2037 last2 = last = get_last_insn ();
80c7a9eb
RH
2038
2039 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2040 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2041
2042 /* These flags have no purpose in RTL land. */
2043 true_edge->flags &= ~EDGE_TRUE_VALUE;
2044 false_edge->flags &= ~EDGE_FALSE_VALUE;
2045
2046 /* We can either have a pure conditional jump with one fallthru edge or
2047 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2048 if (false_edge->dest == bb->next_bb)
80c7a9eb 2049 {
40e90eac
JJ
2050 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2051 true_edge->probability);
726a989a 2052 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2053 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2054 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2055 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2056 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2057 return NULL;
2058 }
a9b77cd1 2059 if (true_edge->dest == bb->next_bb)
80c7a9eb 2060 {
40e90eac
JJ
2061 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2062 false_edge->probability);
726a989a 2063 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2064 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2065 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2066 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2067 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2068 return NULL;
2069 }
80c7a9eb 2070
40e90eac
JJ
2071 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2072 true_edge->probability);
80c7a9eb 2073 last = get_last_insn ();
2f13f2de 2074 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2075 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2076 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
2077
2078 BB_END (bb) = last;
2079 if (BARRIER_P (BB_END (bb)))
2080 BB_END (bb) = PREV_INSN (BB_END (bb));
2081 update_bb_for_insn (bb);
2082
2083 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2084 dest = false_edge->dest;
2085 redirect_edge_succ (false_edge, new_bb);
2086 false_edge->flags |= EDGE_FALLTHRU;
2087 new_bb->count = false_edge->count;
2088 new_bb->frequency = EDGE_FREQUENCY (false_edge);
7d776ee2
RG
2089 if (current_loops && bb->loop_father)
2090 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2091 new_edge = make_edge (new_bb, dest, 0);
2092 new_edge->probability = REG_BR_PROB_BASE;
2093 new_edge->count = new_bb->count;
2094 if (BARRIER_P (BB_END (new_bb)))
2095 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2096 update_bb_for_insn (new_bb);
2097
726a989a 2098 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2099
2f13f2de 2100 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2101 {
5368224f
DC
2102 set_curr_insn_location (true_edge->goto_locus);
2103 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2104 }
7787b4aa 2105
80c7a9eb
RH
2106 return new_bb;
2107}
2108
0a35513e
AH
2109/* Mark all calls that can have a transaction restart. */
2110
2111static void
2112mark_transaction_restart_calls (gimple stmt)
2113{
2114 struct tm_restart_node dummy;
2115 void **slot;
2116
2117 if (!cfun->gimple_df->tm_restart)
2118 return;
2119
2120 dummy.stmt = stmt;
2121 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2122 if (slot)
2123 {
2124 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2125 tree list = n->label_or_list;
2126 rtx insn;
2127
2128 for (insn = next_real_insn (get_last_insn ());
2129 !CALL_P (insn);
2130 insn = next_real_insn (insn))
2131 continue;
2132
2133 if (TREE_CODE (list) == LABEL_DECL)
2134 add_reg_note (insn, REG_TM, label_rtx (list));
2135 else
2136 for (; list ; list = TREE_CHAIN (list))
2137 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2138 }
2139}
2140
28ed065e
MM
2141/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2142 statement STMT. */
2143
2144static void
2145expand_call_stmt (gimple stmt)
2146{
25583c4f 2147 tree exp, decl, lhs;
e23817b3 2148 bool builtin_p;
e7925582 2149 size_t i;
28ed065e 2150
25583c4f
RS
2151 if (gimple_call_internal_p (stmt))
2152 {
2153 expand_internal_call (stmt);
2154 return;
2155 }
2156
28ed065e
MM
2157 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2158
2159 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
2160 decl = gimple_call_fndecl (stmt);
2161 builtin_p = decl && DECL_BUILT_IN (decl);
2162
e7925582
EB
2163 /* If this is not a builtin function, the function type through which the
2164 call is made may be different from the type of the function. */
2165 if (!builtin_p)
2166 CALL_EXPR_FN (exp)
b25aa0e8
EB
2167 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2168 CALL_EXPR_FN (exp));
e7925582 2169
28ed065e
MM
2170 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2171 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2172
2173 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2174 {
2175 tree arg = gimple_call_arg (stmt, i);
2176 gimple def;
2177 /* TER addresses into arguments of builtin functions so we have a
2178 chance to infer more correct alignment information. See PR39954. */
2179 if (builtin_p
2180 && TREE_CODE (arg) == SSA_NAME
2181 && (def = get_gimple_for_ssa_name (arg))
2182 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2183 arg = gimple_assign_rhs1 (def);
2184 CALL_EXPR_ARG (exp, i) = arg;
2185 }
28ed065e 2186
93f28ca7 2187 if (gimple_has_side_effects (stmt))
28ed065e
MM
2188 TREE_SIDE_EFFECTS (exp) = 1;
2189
93f28ca7 2190 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2191 TREE_NOTHROW (exp) = 1;
2192
2193 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2194 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2195 if (decl
2196 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2197 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2198 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2199 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2200 else
2201 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2202 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2203 SET_EXPR_LOCATION (exp, gimple_location (stmt));
28ed065e 2204
ddb555ed
JJ
2205 /* Ensure RTL is created for debug args. */
2206 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2207 {
9771b263 2208 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2209 unsigned int ix;
2210 tree dtemp;
2211
2212 if (debug_args)
9771b263 2213 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2214 {
2215 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2216 expand_debug_expr (dtemp);
2217 }
2218 }
2219
25583c4f 2220 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2221 if (lhs)
2222 expand_assignment (lhs, exp, false);
2223 else
2224 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2225
2226 mark_transaction_restart_calls (stmt);
28ed065e
MM
2227}
2228
862d0b35
DN
2229
2230/* Generate RTL for an asm statement (explicit assembler code).
2231 STRING is a STRING_CST node containing the assembler code text,
2232 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2233 insn is volatile; don't optimize it. */
2234
2235static void
2236expand_asm_loc (tree string, int vol, location_t locus)
2237{
2238 rtx body;
2239
2240 if (TREE_CODE (string) == ADDR_EXPR)
2241 string = TREE_OPERAND (string, 0);
2242
2243 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2244 ggc_strdup (TREE_STRING_POINTER (string)),
2245 locus);
2246
2247 MEM_VOLATILE_P (body) = vol;
2248
2249 emit_insn (body);
2250}
2251
2252/* Return the number of times character C occurs in string S. */
2253static int
2254n_occurrences (int c, const char *s)
2255{
2256 int n = 0;
2257 while (*s)
2258 n += (*s++ == c);
2259 return n;
2260}
2261
2262/* A subroutine of expand_asm_operands. Check that all operands have
2263 the same number of alternatives. Return true if so. */
2264
2265static bool
2266check_operand_nalternatives (tree outputs, tree inputs)
2267{
2268 if (outputs || inputs)
2269 {
2270 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2271 int nalternatives
2272 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2273 tree next = inputs;
2274
2275 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2276 {
2277 error ("too many alternatives in %<asm%>");
2278 return false;
2279 }
2280
2281 tmp = outputs;
2282 while (tmp)
2283 {
2284 const char *constraint
2285 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2286
2287 if (n_occurrences (',', constraint) != nalternatives)
2288 {
2289 error ("operand constraints for %<asm%> differ "
2290 "in number of alternatives");
2291 return false;
2292 }
2293
2294 if (TREE_CHAIN (tmp))
2295 tmp = TREE_CHAIN (tmp);
2296 else
2297 tmp = next, next = 0;
2298 }
2299 }
2300
2301 return true;
2302}
2303
2304/* Check for overlap between registers marked in CLOBBERED_REGS and
2305 anything inappropriate in T. Emit error and return the register
2306 variable definition for error, NULL_TREE for ok. */
2307
2308static bool
2309tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2310{
2311 /* Conflicts between asm-declared register variables and the clobber
2312 list are not allowed. */
2313 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2314
2315 if (overlap)
2316 {
2317 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2318 DECL_NAME (overlap));
2319
2320 /* Reset registerness to stop multiple errors emitted for a single
2321 variable. */
2322 DECL_REGISTER (overlap) = 0;
2323 return true;
2324 }
2325
2326 return false;
2327}
2328
2329/* Generate RTL for an asm statement with arguments.
2330 STRING is the instruction template.
2331 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2332 Each output or input has an expression in the TREE_VALUE and
2333 a tree list in TREE_PURPOSE which in turn contains a constraint
2334 name in TREE_VALUE (or NULL_TREE) and a constraint string
2335 in TREE_PURPOSE.
2336 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2337 that is clobbered by this insn.
2338
2339 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2340 should be the fallthru basic block of the asm goto.
2341
2342 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2343 Some elements of OUTPUTS may be replaced with trees representing temporary
2344 values. The caller should copy those temporary values to the originally
2345 specified lvalues.
2346
2347 VOL nonzero means the insn is volatile; don't optimize it. */
2348
2349static void
2350expand_asm_operands (tree string, tree outputs, tree inputs,
2351 tree clobbers, tree labels, basic_block fallthru_bb,
2352 int vol, location_t locus)
2353{
2354 rtvec argvec, constraintvec, labelvec;
2355 rtx body;
2356 int ninputs = list_length (inputs);
2357 int noutputs = list_length (outputs);
2358 int nlabels = list_length (labels);
2359 int ninout;
2360 int nclobbers;
2361 HARD_REG_SET clobbered_regs;
2362 int clobber_conflict_found = 0;
2363 tree tail;
2364 tree t;
2365 int i;
2366 /* Vector of RTX's of evaluated output operands. */
2367 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2368 int *inout_opnum = XALLOCAVEC (int, noutputs);
2369 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2370 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2371 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2372 int old_generating_concat_p = generating_concat_p;
2373 rtx fallthru_label = NULL_RTX;
2374
2375 /* An ASM with no outputs needs to be treated as volatile, for now. */
2376 if (noutputs == 0)
2377 vol = 1;
2378
2379 if (! check_operand_nalternatives (outputs, inputs))
2380 return;
2381
2382 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2383
2384 /* Collect constraints. */
2385 i = 0;
2386 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2387 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2388 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2389 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2390
2391 /* Sometimes we wish to automatically clobber registers across an asm.
2392 Case in point is when the i386 backend moved from cc0 to a hard reg --
2393 maintaining source-level compatibility means automatically clobbering
2394 the flags register. */
2395 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2396
2397 /* Count the number of meaningful clobbered registers, ignoring what
2398 we would ignore later. */
2399 nclobbers = 0;
2400 CLEAR_HARD_REG_SET (clobbered_regs);
2401 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2402 {
2403 const char *regname;
2404 int nregs;
2405
2406 if (TREE_VALUE (tail) == error_mark_node)
2407 return;
2408 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2409
2410 i = decode_reg_name_and_count (regname, &nregs);
2411 if (i == -4)
2412 ++nclobbers;
2413 else if (i == -2)
2414 error ("unknown register name %qs in %<asm%>", regname);
2415
2416 /* Mark clobbered registers. */
2417 if (i >= 0)
2418 {
2419 int reg;
2420
2421 for (reg = i; reg < i + nregs; reg++)
2422 {
2423 ++nclobbers;
2424
2425 /* Clobbering the PIC register is an error. */
2426 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2427 {
2428 error ("PIC register clobbered by %qs in %<asm%>", regname);
2429 return;
2430 }
2431
2432 SET_HARD_REG_BIT (clobbered_regs, reg);
2433 }
2434 }
2435 }
2436
2437 /* First pass over inputs and outputs checks validity and sets
2438 mark_addressable if needed. */
2439
2440 ninout = 0;
2441 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2442 {
2443 tree val = TREE_VALUE (tail);
2444 tree type = TREE_TYPE (val);
2445 const char *constraint;
2446 bool is_inout;
2447 bool allows_reg;
2448 bool allows_mem;
2449
2450 /* If there's an erroneous arg, emit no insn. */
2451 if (type == error_mark_node)
2452 return;
2453
2454 /* Try to parse the output constraint. If that fails, there's
2455 no point in going further. */
2456 constraint = constraints[i];
2457 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2458 &allows_mem, &allows_reg, &is_inout))
2459 return;
2460
2461 if (! allows_reg
2462 && (allows_mem
2463 || is_inout
2464 || (DECL_P (val)
2465 && REG_P (DECL_RTL (val))
2466 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2467 mark_addressable (val);
2468
2469 if (is_inout)
2470 ninout++;
2471 }
2472
2473 ninputs += ninout;
2474 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2475 {
2476 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2477 return;
2478 }
2479
2480 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2481 {
2482 bool allows_reg, allows_mem;
2483 const char *constraint;
2484
2485 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2486 would get VOIDmode and that could cause a crash in reload. */
2487 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2488 return;
2489
2490 constraint = constraints[i + noutputs];
2491 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2492 constraints, &allows_mem, &allows_reg))
2493 return;
2494
2495 if (! allows_reg && allows_mem)
2496 mark_addressable (TREE_VALUE (tail));
2497 }
2498
2499 /* Second pass evaluates arguments. */
2500
2501 /* Make sure stack is consistent for asm goto. */
2502 if (nlabels > 0)
2503 do_pending_stack_adjust ();
2504
2505 ninout = 0;
2506 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2507 {
2508 tree val = TREE_VALUE (tail);
2509 tree type = TREE_TYPE (val);
2510 bool is_inout;
2511 bool allows_reg;
2512 bool allows_mem;
2513 rtx op;
2514 bool ok;
2515
2516 ok = parse_output_constraint (&constraints[i], i, ninputs,
2517 noutputs, &allows_mem, &allows_reg,
2518 &is_inout);
2519 gcc_assert (ok);
2520
2521 /* If an output operand is not a decl or indirect ref and our constraint
2522 allows a register, make a temporary to act as an intermediate.
2523 Make the asm insn write into that, then our caller will copy it to
2524 the real output operand. Likewise for promoted variables. */
2525
2526 generating_concat_p = 0;
2527
2528 real_output_rtx[i] = NULL_RTX;
2529 if ((TREE_CODE (val) == INDIRECT_REF
2530 && allows_mem)
2531 || (DECL_P (val)
2532 && (allows_mem || REG_P (DECL_RTL (val)))
2533 && ! (REG_P (DECL_RTL (val))
2534 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2535 || ! allows_reg
2536 || is_inout)
2537 {
2538 op = expand_expr (val, NULL_RTX, VOIDmode,
2539 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2540 if (MEM_P (op))
2541 op = validize_mem (op);
2542
2543 if (! allows_reg && !MEM_P (op))
2544 error ("output number %d not directly addressable", i);
2545 if ((! allows_mem && MEM_P (op))
2546 || GET_CODE (op) == CONCAT)
2547 {
2548 real_output_rtx[i] = op;
2549 op = gen_reg_rtx (GET_MODE (op));
2550 if (is_inout)
2551 emit_move_insn (op, real_output_rtx[i]);
2552 }
2553 }
2554 else
2555 {
2556 op = assign_temp (type, 0, 1);
2557 op = validize_mem (op);
2558 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2559 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2560 TREE_VALUE (tail) = make_tree (type, op);
2561 }
2562 output_rtx[i] = op;
2563
2564 generating_concat_p = old_generating_concat_p;
2565
2566 if (is_inout)
2567 {
2568 inout_mode[ninout] = TYPE_MODE (type);
2569 inout_opnum[ninout++] = i;
2570 }
2571
2572 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2573 clobber_conflict_found = 1;
2574 }
2575
2576 /* Make vectors for the expression-rtx, constraint strings,
2577 and named operands. */
2578
2579 argvec = rtvec_alloc (ninputs);
2580 constraintvec = rtvec_alloc (ninputs);
2581 labelvec = rtvec_alloc (nlabels);
2582
2583 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2584 : GET_MODE (output_rtx[0])),
2585 ggc_strdup (TREE_STRING_POINTER (string)),
2586 empty_string, 0, argvec, constraintvec,
2587 labelvec, locus);
2588
2589 MEM_VOLATILE_P (body) = vol;
2590
2591 /* Eval the inputs and put them into ARGVEC.
2592 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2593
2594 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2595 {
2596 bool allows_reg, allows_mem;
2597 const char *constraint;
2598 tree val, type;
2599 rtx op;
2600 bool ok;
2601
2602 constraint = constraints[i + noutputs];
2603 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2604 constraints, &allows_mem, &allows_reg);
2605 gcc_assert (ok);
2606
2607 generating_concat_p = 0;
2608
2609 val = TREE_VALUE (tail);
2610 type = TREE_TYPE (val);
2611 /* EXPAND_INITIALIZER will not generate code for valid initializer
2612 constants, but will still generate code for other types of operand.
2613 This is the behavior we want for constant constraints. */
2614 op = expand_expr (val, NULL_RTX, VOIDmode,
2615 allows_reg ? EXPAND_NORMAL
2616 : allows_mem ? EXPAND_MEMORY
2617 : EXPAND_INITIALIZER);
2618
2619 /* Never pass a CONCAT to an ASM. */
2620 if (GET_CODE (op) == CONCAT)
2621 op = force_reg (GET_MODE (op), op);
2622 else if (MEM_P (op))
2623 op = validize_mem (op);
2624
2625 if (asm_operand_ok (op, constraint, NULL) <= 0)
2626 {
2627 if (allows_reg && TYPE_MODE (type) != BLKmode)
2628 op = force_reg (TYPE_MODE (type), op);
2629 else if (!allows_mem)
2630 warning (0, "asm operand %d probably doesn%'t match constraints",
2631 i + noutputs);
2632 else if (MEM_P (op))
2633 {
2634 /* We won't recognize either volatile memory or memory
2635 with a queued address as available a memory_operand
2636 at this point. Ignore it: clearly this *is* a memory. */
2637 }
2638 else
2639 gcc_unreachable ();
2640 }
2641
2642 generating_concat_p = old_generating_concat_p;
2643 ASM_OPERANDS_INPUT (body, i) = op;
2644
2645 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2646 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
2647 ggc_strdup (constraints[i + noutputs]));
2648
2649 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2650 clobber_conflict_found = 1;
2651 }
2652
2653 /* Protect all the operands from the queue now that they have all been
2654 evaluated. */
2655
2656 generating_concat_p = 0;
2657
2658 /* For in-out operands, copy output rtx to input rtx. */
2659 for (i = 0; i < ninout; i++)
2660 {
2661 int j = inout_opnum[i];
2662 char buffer[16];
2663
2664 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2665 = output_rtx[j];
2666
2667 sprintf (buffer, "%d", j);
2668 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2669 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
2670 }
2671
2672 /* Copy labels to the vector. */
2673 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2674 {
2675 rtx r;
2676 /* If asm goto has any labels in the fallthru basic block, use
2677 a label that we emit immediately after the asm goto. Expansion
2678 may insert further instructions into the same basic block after
2679 asm goto and if we don't do this, insertion of instructions on
2680 the fallthru edge might misbehave. See PR58670. */
2681 if (fallthru_bb
2682 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2683 {
2684 if (fallthru_label == NULL_RTX)
2685 fallthru_label = gen_label_rtx ();
2686 r = fallthru_label;
2687 }
2688 else
2689 r = label_rtx (TREE_VALUE (tail));
2690 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2691 }
2692
2693 generating_concat_p = old_generating_concat_p;
2694
2695 /* Now, for each output, construct an rtx
2696 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2697 ARGVEC CONSTRAINTS OPNAMES))
2698 If there is more than one, put them inside a PARALLEL. */
2699
2700 if (nlabels > 0 && nclobbers == 0)
2701 {
2702 gcc_assert (noutputs == 0);
2703 emit_jump_insn (body);
2704 }
2705 else if (noutputs == 0 && nclobbers == 0)
2706 {
2707 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2708 emit_insn (body);
2709 }
2710 else if (noutputs == 1 && nclobbers == 0)
2711 {
2712 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2713 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2714 }
2715 else
2716 {
2717 rtx obody = body;
2718 int num = noutputs;
2719
2720 if (num == 0)
2721 num = 1;
2722
2723 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2724
2725 /* For each output operand, store a SET. */
2726 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2727 {
2728 XVECEXP (body, 0, i)
2729 = gen_rtx_SET (VOIDmode,
2730 output_rtx[i],
2731 gen_rtx_ASM_OPERANDS
2732 (GET_MODE (output_rtx[i]),
2733 ggc_strdup (TREE_STRING_POINTER (string)),
2734 ggc_strdup (constraints[i]),
2735 i, argvec, constraintvec, labelvec, locus));
2736
2737 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2738 }
2739
2740 /* If there are no outputs (but there are some clobbers)
2741 store the bare ASM_OPERANDS into the PARALLEL. */
2742
2743 if (i == 0)
2744 XVECEXP (body, 0, i++) = obody;
2745
2746 /* Store (clobber REG) for each clobbered register specified. */
2747
2748 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2749 {
2750 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2751 int reg, nregs;
2752 int j = decode_reg_name_and_count (regname, &nregs);
2753 rtx clobbered_reg;
2754
2755 if (j < 0)
2756 {
2757 if (j == -3) /* `cc', which is not a register */
2758 continue;
2759
2760 if (j == -4) /* `memory', don't cache memory across asm */
2761 {
2762 XVECEXP (body, 0, i++)
2763 = gen_rtx_CLOBBER (VOIDmode,
2764 gen_rtx_MEM
2765 (BLKmode,
2766 gen_rtx_SCRATCH (VOIDmode)));
2767 continue;
2768 }
2769
2770 /* Ignore unknown register, error already signaled. */
2771 continue;
2772 }
2773
2774 for (reg = j; reg < j + nregs; reg++)
2775 {
2776 /* Use QImode since that's guaranteed to clobber just
2777 * one reg. */
2778 clobbered_reg = gen_rtx_REG (QImode, reg);
2779
2780 /* Do sanity check for overlap between clobbers and
2781 respectively input and outputs that hasn't been
2782 handled. Such overlap should have been detected and
2783 reported above. */
2784 if (!clobber_conflict_found)
2785 {
2786 int opno;
2787
2788 /* We test the old body (obody) contents to avoid
2789 tripping over the under-construction body. */
2790 for (opno = 0; opno < noutputs; opno++)
2791 if (reg_overlap_mentioned_p (clobbered_reg,
2792 output_rtx[opno]))
2793 internal_error
2794 ("asm clobber conflict with output operand");
2795
2796 for (opno = 0; opno < ninputs - ninout; opno++)
2797 if (reg_overlap_mentioned_p (clobbered_reg,
2798 ASM_OPERANDS_INPUT (obody,
2799 opno)))
2800 internal_error
2801 ("asm clobber conflict with input operand");
2802 }
2803
2804 XVECEXP (body, 0, i++)
2805 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2806 }
2807 }
2808
2809 if (nlabels > 0)
2810 emit_jump_insn (body);
2811 else
2812 emit_insn (body);
2813 }
2814
2815 if (fallthru_label)
2816 emit_label (fallthru_label);
2817
2818 /* For any outputs that needed reloading into registers, spill them
2819 back to where they belong. */
2820 for (i = 0; i < noutputs; ++i)
2821 if (real_output_rtx[i])
2822 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2823
2824 crtl->has_asm_statement = 1;
2825 free_temp_slots ();
2826}
2827
2828
2829static void
2830expand_asm_stmt (gimple stmt)
2831{
2832 int noutputs;
2833 tree outputs, tail, t;
2834 tree *o;
2835 size_t i, n;
2836 const char *s;
2837 tree str, out, in, cl, labels;
2838 location_t locus = gimple_location (stmt);
2839 basic_block fallthru_bb = NULL;
2840
2841 /* Meh... convert the gimple asm operands into real tree lists.
2842 Eventually we should make all routines work on the vectors instead
2843 of relying on TREE_CHAIN. */
2844 out = NULL_TREE;
2845 n = gimple_asm_noutputs (stmt);
2846 if (n > 0)
2847 {
2848 t = out = gimple_asm_output_op (stmt, 0);
2849 for (i = 1; i < n; i++)
2850 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2851 }
2852
2853 in = NULL_TREE;
2854 n = gimple_asm_ninputs (stmt);
2855 if (n > 0)
2856 {
2857 t = in = gimple_asm_input_op (stmt, 0);
2858 for (i = 1; i < n; i++)
2859 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2860 }
2861
2862 cl = NULL_TREE;
2863 n = gimple_asm_nclobbers (stmt);
2864 if (n > 0)
2865 {
2866 t = cl = gimple_asm_clobber_op (stmt, 0);
2867 for (i = 1; i < n; i++)
2868 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2869 }
2870
2871 labels = NULL_TREE;
2872 n = gimple_asm_nlabels (stmt);
2873 if (n > 0)
2874 {
2875 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2876 if (fallthru)
2877 fallthru_bb = fallthru->dest;
2878 t = labels = gimple_asm_label_op (stmt, 0);
2879 for (i = 1; i < n; i++)
2880 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2881 }
2882
2883 s = gimple_asm_string (stmt);
2884 str = build_string (strlen (s), s);
2885
2886 if (gimple_asm_input_p (stmt))
2887 {
2888 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
2889 return;
2890 }
2891
2892 outputs = out;
2893 noutputs = gimple_asm_noutputs (stmt);
2894 /* o[I] is the place that output number I should be written. */
2895 o = (tree *) alloca (noutputs * sizeof (tree));
2896
2897 /* Record the contents of OUTPUTS before it is modified. */
2898 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2899 o[i] = TREE_VALUE (tail);
2900
2901 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
2902 OUTPUTS some trees for where the values were actually stored. */
2903 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
2904 gimple_asm_volatile_p (stmt), locus);
2905
2906 /* Copy all the intermediate outputs into the specified outputs. */
2907 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2908 {
2909 if (o[i] != TREE_VALUE (tail))
2910 {
2911 expand_assignment (o[i], TREE_VALUE (tail), false);
2912 free_temp_slots ();
2913
2914 /* Restore the original value so that it's correct the next
2915 time we expand this function. */
2916 TREE_VALUE (tail) = o[i];
2917 }
2918 }
2919}
2920
2921/* Emit code to jump to the address
2922 specified by the pointer expression EXP. */
2923
2924static void
2925expand_computed_goto (tree exp)
2926{
2927 rtx x = expand_normal (exp);
2928
2929 x = convert_memory_address (Pmode, x);
2930
2931 do_pending_stack_adjust ();
2932 emit_indirect_jump (x);
2933}
2934
2935/* Generate RTL code for a `goto' statement with target label LABEL.
2936 LABEL should be a LABEL_DECL tree node that was or will later be
2937 defined with `expand_label'. */
2938
2939static void
2940expand_goto (tree label)
2941{
2942#ifdef ENABLE_CHECKING
2943 /* Check for a nonlocal goto to a containing function. Should have
2944 gotten translated to __builtin_nonlocal_goto. */
2945 tree context = decl_function_context (label);
2946 gcc_assert (!context || context == current_function_decl);
2947#endif
2948
2949 emit_jump (label_rtx (label));
2950}
2951
2952/* Output a return with no value. */
2953
2954static void
2955expand_null_return_1 (void)
2956{
2957 clear_pending_stack_adjust ();
2958 do_pending_stack_adjust ();
2959 emit_jump (return_label);
2960}
2961
2962/* Generate RTL to return from the current function, with no value.
2963 (That is, we do not do anything about returning any value.) */
2964
2965void
2966expand_null_return (void)
2967{
2968 /* If this function was declared to return a value, but we
2969 didn't, clobber the return registers so that they are not
2970 propagated live to the rest of the function. */
2971 clobber_return_register ();
2972
2973 expand_null_return_1 ();
2974}
2975
2976/* Generate RTL to return from the current function, with value VAL. */
2977
2978static void
2979expand_value_return (rtx val)
2980{
2981 /* Copy the value to the return location unless it's already there. */
2982
2983 tree decl = DECL_RESULT (current_function_decl);
2984 rtx return_reg = DECL_RTL (decl);
2985 if (return_reg != val)
2986 {
2987 tree funtype = TREE_TYPE (current_function_decl);
2988 tree type = TREE_TYPE (decl);
2989 int unsignedp = TYPE_UNSIGNED (type);
2990 enum machine_mode old_mode = DECL_MODE (decl);
2991 enum machine_mode mode;
2992 if (DECL_BY_REFERENCE (decl))
2993 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
2994 else
2995 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
2996
2997 if (mode != old_mode)
2998 val = convert_modes (mode, old_mode, val, unsignedp);
2999
3000 if (GET_CODE (return_reg) == PARALLEL)
3001 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3002 else
3003 emit_move_insn (return_reg, val);
3004 }
3005
3006 expand_null_return_1 ();
3007}
3008
3009/* Generate RTL to evaluate the expression RETVAL and return it
3010 from the current function. */
3011
3012static void
3013expand_return (tree retval)
3014{
3015 rtx result_rtl;
3016 rtx val = 0;
3017 tree retval_rhs;
3018
3019 /* If function wants no value, give it none. */
3020 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3021 {
3022 expand_normal (retval);
3023 expand_null_return ();
3024 return;
3025 }
3026
3027 if (retval == error_mark_node)
3028 {
3029 /* Treat this like a return of no value from a function that
3030 returns a value. */
3031 expand_null_return ();
3032 return;
3033 }
3034 else if ((TREE_CODE (retval) == MODIFY_EXPR
3035 || TREE_CODE (retval) == INIT_EXPR)
3036 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3037 retval_rhs = TREE_OPERAND (retval, 1);
3038 else
3039 retval_rhs = retval;
3040
3041 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3042
3043 /* If we are returning the RESULT_DECL, then the value has already
3044 been stored into it, so we don't have to do anything special. */
3045 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3046 expand_value_return (result_rtl);
3047
3048 /* If the result is an aggregate that is being returned in one (or more)
3049 registers, load the registers here. */
3050
3051 else if (retval_rhs != 0
3052 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3053 && REG_P (result_rtl))
3054 {
3055 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3056 if (val)
3057 {
3058 /* Use the mode of the result value on the return register. */
3059 PUT_MODE (result_rtl, GET_MODE (val));
3060 expand_value_return (val);
3061 }
3062 else
3063 expand_null_return ();
3064 }
3065 else if (retval_rhs != 0
3066 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3067 && (REG_P (result_rtl)
3068 || (GET_CODE (result_rtl) == PARALLEL)))
3069 {
3070 /* Calculate the return value into a temporary (usually a pseudo
3071 reg). */
3072 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3073 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3074
3075 val = assign_temp (nt, 0, 1);
3076 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3077 val = force_not_mem (val);
3078 /* Return the calculated value. */
3079 expand_value_return (val);
3080 }
3081 else
3082 {
3083 /* No hard reg used; calculate value into hard return reg. */
3084 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3085 expand_value_return (result_rtl);
3086 }
3087}
3088
28ed065e
MM
3089/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3090 STMT that doesn't require special handling for outgoing edges. That
3091 is no tailcalls and no GIMPLE_COND. */
3092
3093static void
3094expand_gimple_stmt_1 (gimple stmt)
3095{
3096 tree op0;
c82fee88 3097
5368224f 3098 set_curr_insn_location (gimple_location (stmt));
c82fee88 3099
28ed065e
MM
3100 switch (gimple_code (stmt))
3101 {
3102 case GIMPLE_GOTO:
3103 op0 = gimple_goto_dest (stmt);
3104 if (TREE_CODE (op0) == LABEL_DECL)
3105 expand_goto (op0);
3106 else
3107 expand_computed_goto (op0);
3108 break;
3109 case GIMPLE_LABEL:
3110 expand_label (gimple_label_label (stmt));
3111 break;
3112 case GIMPLE_NOP:
3113 case GIMPLE_PREDICT:
3114 break;
28ed065e
MM
3115 case GIMPLE_SWITCH:
3116 expand_case (stmt);
3117 break;
3118 case GIMPLE_ASM:
3119 expand_asm_stmt (stmt);
3120 break;
3121 case GIMPLE_CALL:
3122 expand_call_stmt (stmt);
3123 break;
3124
3125 case GIMPLE_RETURN:
3126 op0 = gimple_return_retval (stmt);
3127
3128 if (op0 && op0 != error_mark_node)
3129 {
3130 tree result = DECL_RESULT (current_function_decl);
3131
3132 /* If we are not returning the current function's RESULT_DECL,
3133 build an assignment to it. */
3134 if (op0 != result)
3135 {
3136 /* I believe that a function's RESULT_DECL is unique. */
3137 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3138
3139 /* ??? We'd like to use simply expand_assignment here,
3140 but this fails if the value is of BLKmode but the return
3141 decl is a register. expand_return has special handling
3142 for this combination, which eventually should move
3143 to common code. See comments there. Until then, let's
3144 build a modify expression :-/ */
3145 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3146 result, op0);
3147 }
3148 }
3149 if (!op0)
3150 expand_null_return ();
3151 else
3152 expand_return (op0);
3153 break;
3154
3155 case GIMPLE_ASSIGN:
3156 {
3157 tree lhs = gimple_assign_lhs (stmt);
3158
3159 /* Tree expand used to fiddle with |= and &= of two bitfield
3160 COMPONENT_REFs here. This can't happen with gimple, the LHS
3161 of binary assigns must be a gimple reg. */
3162
3163 if (TREE_CODE (lhs) != SSA_NAME
3164 || get_gimple_rhs_class (gimple_expr_code (stmt))
3165 == GIMPLE_SINGLE_RHS)
3166 {
3167 tree rhs = gimple_assign_rhs1 (stmt);
3168 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3169 == GIMPLE_SINGLE_RHS);
3170 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3171 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3172 if (TREE_CLOBBER_P (rhs))
3173 /* This is a clobber to mark the going out of scope for
3174 this LHS. */
3175 ;
3176 else
3177 expand_assignment (lhs, rhs,
3178 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
3179 }
3180 else
3181 {
3182 rtx target, temp;
3183 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3184 struct separate_ops ops;
3185 bool promoted = false;
3186
3187 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3188 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3189 promoted = true;
3190
3191 ops.code = gimple_assign_rhs_code (stmt);
3192 ops.type = TREE_TYPE (lhs);
3193 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3194 {
0354c0c7
BS
3195 case GIMPLE_TERNARY_RHS:
3196 ops.op2 = gimple_assign_rhs3 (stmt);
3197 /* Fallthru */
28ed065e
MM
3198 case GIMPLE_BINARY_RHS:
3199 ops.op1 = gimple_assign_rhs2 (stmt);
3200 /* Fallthru */
3201 case GIMPLE_UNARY_RHS:
3202 ops.op0 = gimple_assign_rhs1 (stmt);
3203 break;
3204 default:
3205 gcc_unreachable ();
3206 }
3207 ops.location = gimple_location (stmt);
3208
3209 /* If we want to use a nontemporal store, force the value to
3210 register first. If we store into a promoted register,
3211 don't directly expand to target. */
3212 temp = nontemporal || promoted ? NULL_RTX : target;
3213 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3214 EXPAND_NORMAL);
3215
3216 if (temp == target)
3217 ;
3218 else if (promoted)
3219 {
4e18a7d4 3220 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
3221 /* If TEMP is a VOIDmode constant, use convert_modes to make
3222 sure that we properly convert it. */
3223 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3224 {
3225 temp = convert_modes (GET_MODE (target),
3226 TYPE_MODE (ops.type),
4e18a7d4 3227 temp, unsignedp);
28ed065e 3228 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3229 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3230 }
3231
4e18a7d4 3232 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3233 }
3234 else if (nontemporal && emit_storent_insn (target, temp))
3235 ;
3236 else
3237 {
3238 temp = force_operand (temp, target);
3239 if (temp != target)
3240 emit_move_insn (target, temp);
3241 }
3242 }
3243 }
3244 break;
3245
3246 default:
3247 gcc_unreachable ();
3248 }
3249}
3250
3251/* Expand one gimple statement STMT and return the last RTL instruction
3252 before any of the newly generated ones.
3253
3254 In addition to generating the necessary RTL instructions this also
3255 sets REG_EH_REGION notes if necessary and sets the current source
3256 location for diagnostics. */
3257
3258static rtx
3259expand_gimple_stmt (gimple stmt)
3260{
28ed065e 3261 location_t saved_location = input_location;
c82fee88
EB
3262 rtx last = get_last_insn ();
3263 int lp_nr;
28ed065e 3264
28ed065e
MM
3265 gcc_assert (cfun);
3266
c82fee88
EB
3267 /* We need to save and restore the current source location so that errors
3268 discovered during expansion are emitted with the right location. But
3269 it would be better if the diagnostic routines used the source location
3270 embedded in the tree nodes rather than globals. */
28ed065e 3271 if (gimple_has_location (stmt))
c82fee88 3272 input_location = gimple_location (stmt);
28ed065e
MM
3273
3274 expand_gimple_stmt_1 (stmt);
c82fee88 3275
28ed065e
MM
3276 /* Free any temporaries used to evaluate this statement. */
3277 free_temp_slots ();
3278
3279 input_location = saved_location;
3280
3281 /* Mark all insns that may trap. */
1d65f45c
RH
3282 lp_nr = lookup_stmt_eh_lp (stmt);
3283 if (lp_nr)
28ed065e
MM
3284 {
3285 rtx insn;
3286 for (insn = next_real_insn (last); insn;
3287 insn = next_real_insn (insn))
3288 {
3289 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3290 /* If we want exceptions for non-call insns, any
3291 may_trap_p instruction may throw. */
3292 && GET_CODE (PATTERN (insn)) != CLOBBER
3293 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3294 && insn_could_throw_p (insn))
3295 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3296 }
3297 }
3298
3299 return last;
3300}
3301
726a989a 3302/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3303 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3304 generated a tail call (something that might be denied by the ABI
cea49550
RH
3305 rules governing the call; see calls.c).
3306
3307 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3308 can still reach the rest of BB. The case here is __builtin_sqrt,
3309 where the NaN result goes through the external function (with a
3310 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3311
3312static basic_block
726a989a 3313expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 3314{
b7211528 3315 rtx last2, last;
224e770b 3316 edge e;
628f6a4e 3317 edge_iterator ei;
224e770b
RH
3318 int probability;
3319 gcov_type count;
80c7a9eb 3320
28ed065e 3321 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3322
3323 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3324 if (CALL_P (last) && SIBLING_CALL_P (last))
3325 goto found;
80c7a9eb 3326
726a989a 3327 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3328
cea49550 3329 *can_fallthru = true;
224e770b 3330 return NULL;
80c7a9eb 3331
224e770b
RH
3332 found:
3333 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3334 Any instructions emitted here are about to be deleted. */
3335 do_pending_stack_adjust ();
3336
3337 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3338 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3339 EH or abnormal edges, we shouldn't have created a tail call in
3340 the first place. So it seems to me we should just be removing
3341 all edges here, or redirecting the existing fallthru edge to
3342 the exit block. */
3343
224e770b
RH
3344 probability = 0;
3345 count = 0;
224e770b 3346
628f6a4e
BE
3347 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3348 {
224e770b
RH
3349 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3350 {
3351 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 3352 {
224e770b
RH
3353 e->dest->count -= e->count;
3354 e->dest->frequency -= EDGE_FREQUENCY (e);
3355 if (e->dest->count < 0)
c22cacf3 3356 e->dest->count = 0;
224e770b 3357 if (e->dest->frequency < 0)
c22cacf3 3358 e->dest->frequency = 0;
80c7a9eb 3359 }
224e770b
RH
3360 count += e->count;
3361 probability += e->probability;
3362 remove_edge (e);
80c7a9eb 3363 }
628f6a4e
BE
3364 else
3365 ei_next (&ei);
80c7a9eb
RH
3366 }
3367
224e770b
RH
3368 /* This is somewhat ugly: the call_expr expander often emits instructions
3369 after the sibcall (to perform the function return). These confuse the
12eff7b7 3370 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3371 last = NEXT_INSN (last);
341c100f 3372 gcc_assert (BARRIER_P (last));
cea49550
RH
3373
3374 *can_fallthru = false;
224e770b
RH
3375 while (NEXT_INSN (last))
3376 {
3377 /* For instance an sqrt builtin expander expands if with
3378 sibcall in the then and label for `else`. */
3379 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3380 {
3381 *can_fallthru = true;
3382 break;
3383 }
224e770b
RH
3384 delete_insn (NEXT_INSN (last));
3385 }
3386
3387 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
3388 e->probability += probability;
3389 e->count += count;
3390 BB_END (bb) = last;
3391 update_bb_for_insn (bb);
3392
3393 if (NEXT_INSN (last))
3394 {
3395 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3396
3397 last = BB_END (bb);
3398 if (BARRIER_P (last))
3399 BB_END (bb) = PREV_INSN (last);
3400 }
3401
726a989a 3402 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3403
224e770b 3404 return bb;
80c7a9eb
RH
3405}
3406
b5b8b0ac
AO
3407/* Return the difference between the floor and the truncated result of
3408 a signed division by OP1 with remainder MOD. */
3409static rtx
3410floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3411{
3412 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3413 return gen_rtx_IF_THEN_ELSE
3414 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3415 gen_rtx_IF_THEN_ELSE
3416 (mode, gen_rtx_LT (BImode,
3417 gen_rtx_DIV (mode, op1, mod),
3418 const0_rtx),
3419 constm1_rtx, const0_rtx),
3420 const0_rtx);
3421}
3422
3423/* Return the difference between the ceil and the truncated result of
3424 a signed division by OP1 with remainder MOD. */
3425static rtx
3426ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3427{
3428 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3429 return gen_rtx_IF_THEN_ELSE
3430 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3431 gen_rtx_IF_THEN_ELSE
3432 (mode, gen_rtx_GT (BImode,
3433 gen_rtx_DIV (mode, op1, mod),
3434 const0_rtx),
3435 const1_rtx, const0_rtx),
3436 const0_rtx);
3437}
3438
3439/* Return the difference between the ceil and the truncated result of
3440 an unsigned division by OP1 with remainder MOD. */
3441static rtx
3442ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3443{
3444 /* (mod != 0 ? 1 : 0) */
3445 return gen_rtx_IF_THEN_ELSE
3446 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3447 const1_rtx, const0_rtx);
3448}
3449
3450/* Return the difference between the rounded and the truncated result
3451 of a signed division by OP1 with remainder MOD. Halfway cases are
3452 rounded away from zero, rather than to the nearest even number. */
3453static rtx
3454round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3455{
3456 /* (abs (mod) >= abs (op1) - abs (mod)
3457 ? (op1 / mod > 0 ? 1 : -1)
3458 : 0) */
3459 return gen_rtx_IF_THEN_ELSE
3460 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3461 gen_rtx_MINUS (mode,
3462 gen_rtx_ABS (mode, op1),
3463 gen_rtx_ABS (mode, mod))),
3464 gen_rtx_IF_THEN_ELSE
3465 (mode, gen_rtx_GT (BImode,
3466 gen_rtx_DIV (mode, op1, mod),
3467 const0_rtx),
3468 const1_rtx, constm1_rtx),
3469 const0_rtx);
3470}
3471
3472/* Return the difference between the rounded and the truncated result
3473 of a unsigned division by OP1 with remainder MOD. Halfway cases
3474 are rounded away from zero, rather than to the nearest even
3475 number. */
3476static rtx
3477round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3478{
3479 /* (mod >= op1 - mod ? 1 : 0) */
3480 return gen_rtx_IF_THEN_ELSE
3481 (mode, gen_rtx_GE (BImode, mod,
3482 gen_rtx_MINUS (mode, op1, mod)),
3483 const1_rtx, const0_rtx);
3484}
3485
dda2da58
AO
3486/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3487 any rtl. */
3488
3489static rtx
f61c6f34
JJ
3490convert_debug_memory_address (enum machine_mode mode, rtx x,
3491 addr_space_t as)
dda2da58
AO
3492{
3493 enum machine_mode xmode = GET_MODE (x);
3494
3495#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3496 gcc_assert (mode == Pmode
3497 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3498 gcc_assert (xmode == mode || xmode == VOIDmode);
3499#else
f61c6f34 3500 rtx temp;
f61c6f34 3501
639d4bb8 3502 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3503
3504 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3505 return x;
3506
69660a70 3507 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
3508 x = simplify_gen_subreg (mode, x, xmode,
3509 subreg_lowpart_offset
3510 (mode, xmode));
3511 else if (POINTERS_EXTEND_UNSIGNED > 0)
3512 x = gen_rtx_ZERO_EXTEND (mode, x);
3513 else if (!POINTERS_EXTEND_UNSIGNED)
3514 x = gen_rtx_SIGN_EXTEND (mode, x);
3515 else
f61c6f34
JJ
3516 {
3517 switch (GET_CODE (x))
3518 {
3519 case SUBREG:
3520 if ((SUBREG_PROMOTED_VAR_P (x)
3521 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3522 || (GET_CODE (SUBREG_REG (x)) == PLUS
3523 && REG_P (XEXP (SUBREG_REG (x), 0))
3524 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3525 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3526 && GET_MODE (SUBREG_REG (x)) == mode)
3527 return SUBREG_REG (x);
3528 break;
3529 case LABEL_REF:
3530 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
3531 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3532 return temp;
3533 case SYMBOL_REF:
3534 temp = shallow_copy_rtx (x);
3535 PUT_MODE (temp, mode);
3536 return temp;
3537 case CONST:
3538 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3539 if (temp)
3540 temp = gen_rtx_CONST (mode, temp);
3541 return temp;
3542 case PLUS:
3543 case MINUS:
3544 if (CONST_INT_P (XEXP (x, 1)))
3545 {
3546 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3547 if (temp)
3548 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3549 }
3550 break;
3551 default:
3552 break;
3553 }
3554 /* Don't know how to express ptr_extend as operation in debug info. */
3555 return NULL;
3556 }
dda2da58
AO
3557#endif /* POINTERS_EXTEND_UNSIGNED */
3558
3559 return x;
3560}
3561
12c5ffe5
EB
3562/* Return an RTX equivalent to the value of the parameter DECL. */
3563
3564static rtx
3565expand_debug_parm_decl (tree decl)
3566{
3567 rtx incoming = DECL_INCOMING_RTL (decl);
3568
3569 if (incoming
3570 && GET_MODE (incoming) != BLKmode
3571 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3572 || (MEM_P (incoming)
3573 && REG_P (XEXP (incoming, 0))
3574 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3575 {
3576 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3577
3578#ifdef HAVE_window_save
3579 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3580 If the target machine has an explicit window save instruction, the
3581 actual entry value is the corresponding OUTGOING_REGNO instead. */
3582 if (REG_P (incoming)
3583 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3584 incoming
3585 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3586 OUTGOING_REGNO (REGNO (incoming)), 0);
3587 else if (MEM_P (incoming))
3588 {
3589 rtx reg = XEXP (incoming, 0);
3590 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3591 {
3592 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3593 incoming = replace_equiv_address_nv (incoming, reg);
3594 }
6cfa417f
JJ
3595 else
3596 incoming = copy_rtx (incoming);
12c5ffe5
EB
3597 }
3598#endif
3599
3600 ENTRY_VALUE_EXP (rtl) = incoming;
3601 return rtl;
3602 }
3603
3604 if (incoming
3605 && GET_MODE (incoming) != BLKmode
3606 && !TREE_ADDRESSABLE (decl)
3607 && MEM_P (incoming)
3608 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3609 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3610 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3611 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 3612 return copy_rtx (incoming);
12c5ffe5
EB
3613
3614 return NULL_RTX;
3615}
3616
3617/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
3618
3619static rtx
3620expand_debug_expr (tree exp)
3621{
3622 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3623 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 3624 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 3625 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 3626 addr_space_t as;
b5b8b0ac
AO
3627
3628 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3629 {
3630 case tcc_expression:
3631 switch (TREE_CODE (exp))
3632 {
3633 case COND_EXPR:
7ece48b1 3634 case DOT_PROD_EXPR:
0354c0c7
BS
3635 case WIDEN_MULT_PLUS_EXPR:
3636 case WIDEN_MULT_MINUS_EXPR:
0f59b812 3637 case FMA_EXPR:
b5b8b0ac
AO
3638 goto ternary;
3639
3640 case TRUTH_ANDIF_EXPR:
3641 case TRUTH_ORIF_EXPR:
3642 case TRUTH_AND_EXPR:
3643 case TRUTH_OR_EXPR:
3644 case TRUTH_XOR_EXPR:
3645 goto binary;
3646
3647 case TRUTH_NOT_EXPR:
3648 goto unary;
3649
3650 default:
3651 break;
3652 }
3653 break;
3654
3655 ternary:
3656 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3657 if (!op2)
3658 return NULL_RTX;
3659 /* Fall through. */
3660
3661 binary:
3662 case tcc_binary:
3663 case tcc_comparison:
3664 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3665 if (!op1)
3666 return NULL_RTX;
3667 /* Fall through. */
3668
3669 unary:
3670 case tcc_unary:
2ba172e0 3671 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3672 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3673 if (!op0)
3674 return NULL_RTX;
3675 break;
3676
3677 case tcc_type:
3678 case tcc_statement:
3679 gcc_unreachable ();
3680
3681 case tcc_constant:
3682 case tcc_exceptional:
3683 case tcc_declaration:
3684 case tcc_reference:
3685 case tcc_vl_exp:
3686 break;
3687 }
3688
3689 switch (TREE_CODE (exp))
3690 {
3691 case STRING_CST:
3692 if (!lookup_constant_def (exp))
3693 {
e1b243a8
JJ
3694 if (strlen (TREE_STRING_POINTER (exp)) + 1
3695 != (size_t) TREE_STRING_LENGTH (exp))
3696 return NULL_RTX;
b5b8b0ac
AO
3697 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3698 op0 = gen_rtx_MEM (BLKmode, op0);
3699 set_mem_attributes (op0, exp, 0);
3700 return op0;
3701 }
3702 /* Fall through... */
3703
3704 case INTEGER_CST:
3705 case REAL_CST:
3706 case FIXED_CST:
3707 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3708 return op0;
3709
3710 case COMPLEX_CST:
3711 gcc_assert (COMPLEX_MODE_P (mode));
3712 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 3713 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
3714 return gen_rtx_CONCAT (mode, op0, op1);
3715
0ca5af51
AO
3716 case DEBUG_EXPR_DECL:
3717 op0 = DECL_RTL_IF_SET (exp);
3718
3719 if (op0)
3720 return op0;
3721
3722 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 3723 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
3724 SET_DECL_RTL (exp, op0);
3725
3726 return op0;
3727
b5b8b0ac
AO
3728 case VAR_DECL:
3729 case PARM_DECL:
3730 case FUNCTION_DECL:
3731 case LABEL_DECL:
3732 case CONST_DECL:
3733 case RESULT_DECL:
3734 op0 = DECL_RTL_IF_SET (exp);
3735
3736 /* This decl was probably optimized away. */
3737 if (!op0)
e1b243a8
JJ
3738 {
3739 if (TREE_CODE (exp) != VAR_DECL
3740 || DECL_EXTERNAL (exp)
3741 || !TREE_STATIC (exp)
3742 || !DECL_NAME (exp)
0fba566c 3743 || DECL_HARD_REGISTER (exp)
7d5fc814 3744 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 3745 || mode == VOIDmode)
e1b243a8
JJ
3746 return NULL;
3747
b1aa0655 3748 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
3749 if (!MEM_P (op0)
3750 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3751 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3752 return NULL;
3753 }
3754 else
3755 op0 = copy_rtx (op0);
b5b8b0ac 3756
06796564
JJ
3757 if (GET_MODE (op0) == BLKmode
3758 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3759 below would ICE. While it is likely a FE bug,
3760 try to be robust here. See PR43166. */
132b4e82
JJ
3761 || mode == BLKmode
3762 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
3763 {
3764 gcc_assert (MEM_P (op0));
3765 op0 = adjust_address_nv (op0, mode, 0);
3766 return op0;
3767 }
3768
3769 /* Fall through. */
3770
3771 adjust_mode:
3772 case PAREN_EXPR:
3773 case NOP_EXPR:
3774 case CONVERT_EXPR:
3775 {
2ba172e0 3776 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
3777
3778 if (mode == inner_mode)
3779 return op0;
3780
3781 if (inner_mode == VOIDmode)
3782 {
2a8e30fb
MM
3783 if (TREE_CODE (exp) == SSA_NAME)
3784 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3785 else
3786 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3787 if (mode == inner_mode)
3788 return op0;
3789 }
3790
3791 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3792 {
3793 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3794 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3795 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3796 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3797 else
3798 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3799 }
3800 else if (FLOAT_MODE_P (mode))
3801 {
2a8e30fb 3802 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
3803 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3804 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3805 else
3806 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3807 }
3808 else if (FLOAT_MODE_P (inner_mode))
3809 {
3810 if (unsignedp)
3811 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3812 else
3813 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3814 }
3815 else if (CONSTANT_P (op0)
69660a70 3816 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
3817 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3818 subreg_lowpart_offset (mode,
3819 inner_mode));
1b47fe3f
JJ
3820 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3821 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3822 : unsignedp)
2ba172e0 3823 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 3824 else
2ba172e0 3825 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
3826
3827 return op0;
3828 }
3829
70f34814 3830 case MEM_REF:
71f3a3f5
JJ
3831 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3832 {
3833 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3834 TREE_OPERAND (exp, 0),
3835 TREE_OPERAND (exp, 1));
3836 if (newexp)
3837 return expand_debug_expr (newexp);
3838 }
3839 /* FALLTHROUGH */
b5b8b0ac 3840 case INDIRECT_REF:
0a81f074 3841 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3842 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3843 if (!op0)
3844 return NULL;
3845
cb115041
JJ
3846 if (TREE_CODE (exp) == MEM_REF)
3847 {
583ac69c
JJ
3848 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3849 || (GET_CODE (op0) == PLUS
3850 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3851 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3852 Instead just use get_inner_reference. */
3853 goto component_ref;
3854
cb115041
JJ
3855 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3856 if (!op1 || !CONST_INT_P (op1))
3857 return NULL;
3858
0a81f074 3859 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
3860 }
3861
09e881c9 3862 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 3863 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 3864 else
75421dcd 3865 as = ADDR_SPACE_GENERIC;
b5b8b0ac 3866
f61c6f34
JJ
3867 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3868 op0, as);
3869 if (op0 == NULL_RTX)
3870 return NULL;
b5b8b0ac 3871
f61c6f34 3872 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 3873 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
3874 if (TREE_CODE (exp) == MEM_REF
3875 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3876 set_mem_expr (op0, NULL_TREE);
09e881c9 3877 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3878
3879 return op0;
3880
3881 case TARGET_MEM_REF:
4d948885
RG
3882 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3883 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
3884 return NULL;
3885
3886 op0 = expand_debug_expr
4e25ca6b 3887 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
3888 if (!op0)
3889 return NULL;
3890
f61c6f34
JJ
3891 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3892 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3893 else
3894 as = ADDR_SPACE_GENERIC;
3895
3896 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3897 op0, as);
3898 if (op0 == NULL_RTX)
3899 return NULL;
b5b8b0ac
AO
3900
3901 op0 = gen_rtx_MEM (mode, op0);
3902
3903 set_mem_attributes (op0, exp, 0);
09e881c9 3904 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3905
3906 return op0;
3907
583ac69c 3908 component_ref:
b5b8b0ac
AO
3909 case ARRAY_REF:
3910 case ARRAY_RANGE_REF:
3911 case COMPONENT_REF:
3912 case BIT_FIELD_REF:
3913 case REALPART_EXPR:
3914 case IMAGPART_EXPR:
3915 case VIEW_CONVERT_EXPR:
3916 {
3917 enum machine_mode mode1;
3918 HOST_WIDE_INT bitsize, bitpos;
3919 tree offset;
3920 int volatilep = 0;
3921 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3922 &mode1, &unsignedp, &volatilep, false);
3923 rtx orig_op0;
3924
4f2a9af8
JJ
3925 if (bitsize == 0)
3926 return NULL;
3927
b5b8b0ac
AO
3928 orig_op0 = op0 = expand_debug_expr (tem);
3929
3930 if (!op0)
3931 return NULL;
3932
3933 if (offset)
3934 {
dda2da58
AO
3935 enum machine_mode addrmode, offmode;
3936
aa847cc8
JJ
3937 if (!MEM_P (op0))
3938 return NULL;
b5b8b0ac 3939
dda2da58
AO
3940 op0 = XEXP (op0, 0);
3941 addrmode = GET_MODE (op0);
3942 if (addrmode == VOIDmode)
3943 addrmode = Pmode;
3944
b5b8b0ac
AO
3945 op1 = expand_debug_expr (offset);
3946 if (!op1)
3947 return NULL;
3948
dda2da58
AO
3949 offmode = GET_MODE (op1);
3950 if (offmode == VOIDmode)
3951 offmode = TYPE_MODE (TREE_TYPE (offset));
3952
3953 if (addrmode != offmode)
3954 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3955 subreg_lowpart_offset (addrmode,
3956 offmode));
3957
3958 /* Don't use offset_address here, we don't need a
3959 recognizable address, and we don't want to generate
3960 code. */
2ba172e0
JJ
3961 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3962 op0, op1));
b5b8b0ac
AO
3963 }
3964
3965 if (MEM_P (op0))
3966 {
4f2a9af8
JJ
3967 if (mode1 == VOIDmode)
3968 /* Bitfield. */
3969 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
3970 if (bitpos >= BITS_PER_UNIT)
3971 {
3972 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3973 bitpos %= BITS_PER_UNIT;
3974 }
3975 else if (bitpos < 0)
3976 {
4f2a9af8
JJ
3977 HOST_WIDE_INT units
3978 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
3979 op0 = adjust_address_nv (op0, mode1, units);
3980 bitpos += units * BITS_PER_UNIT;
3981 }
3982 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3983 op0 = adjust_address_nv (op0, mode, 0);
3984 else if (GET_MODE (op0) != mode1)
3985 op0 = adjust_address_nv (op0, mode1, 0);
3986 else
3987 op0 = copy_rtx (op0);
3988 if (op0 == orig_op0)
3989 op0 = shallow_copy_rtx (op0);
3990 set_mem_attributes (op0, exp, 0);
3991 }
3992
3993 if (bitpos == 0 && mode == GET_MODE (op0))
3994 return op0;
3995
2d3fc6aa
JJ
3996 if (bitpos < 0)
3997 return NULL;
3998
88c04a5d
JJ
3999 if (GET_MODE (op0) == BLKmode)
4000 return NULL;
4001
b5b8b0ac
AO
4002 if ((bitpos % BITS_PER_UNIT) == 0
4003 && bitsize == GET_MODE_BITSIZE (mode1))
4004 {
4005 enum machine_mode opmode = GET_MODE (op0);
4006
b5b8b0ac 4007 if (opmode == VOIDmode)
9712cba0 4008 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4009
4010 /* This condition may hold if we're expanding the address
4011 right past the end of an array that turned out not to
4012 be addressable (i.e., the address was only computed in
4013 debug stmts). The gen_subreg below would rightfully
4014 crash, and the address doesn't really exist, so just
4015 drop it. */
4016 if (bitpos >= GET_MODE_BITSIZE (opmode))
4017 return NULL;
4018
7d5d39bb
JJ
4019 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4020 return simplify_gen_subreg (mode, op0, opmode,
4021 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4022 }
4023
4024 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4025 && TYPE_UNSIGNED (TREE_TYPE (exp))
4026 ? SIGN_EXTRACT
4027 : ZERO_EXTRACT, mode,
4028 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4029 ? GET_MODE (op0)
4030 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4031 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4032 }
4033
b5b8b0ac 4034 case ABS_EXPR:
2ba172e0 4035 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4036
4037 case NEGATE_EXPR:
2ba172e0 4038 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4039
4040 case BIT_NOT_EXPR:
2ba172e0 4041 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4042
4043 case FLOAT_EXPR:
2ba172e0
JJ
4044 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4045 0)))
4046 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4047 inner_mode);
b5b8b0ac
AO
4048
4049 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4050 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4051 inner_mode);
b5b8b0ac
AO
4052
4053 case POINTER_PLUS_EXPR:
576319a7
DD
4054 /* For the rare target where pointers are not the same size as
4055 size_t, we need to check for mis-matched modes and correct
4056 the addend. */
4057 if (op0 && op1
4058 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4059 && GET_MODE (op0) != GET_MODE (op1))
4060 {
8369f38a
DD
4061 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4062 /* If OP0 is a partial mode, then we must truncate, even if it has
4063 the same bitsize as OP1 as GCC's representation of partial modes
4064 is opaque. */
4065 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4066 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4067 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4068 GET_MODE (op1));
576319a7
DD
4069 else
4070 /* We always sign-extend, regardless of the signedness of
4071 the operand, because the operand is always unsigned
4072 here even if the original C expression is signed. */
2ba172e0
JJ
4073 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4074 GET_MODE (op1));
576319a7
DD
4075 }
4076 /* Fall through. */
b5b8b0ac 4077 case PLUS_EXPR:
2ba172e0 4078 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4079
4080 case MINUS_EXPR:
2ba172e0 4081 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4082
4083 case MULT_EXPR:
2ba172e0 4084 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4085
4086 case RDIV_EXPR:
4087 case TRUNC_DIV_EXPR:
4088 case EXACT_DIV_EXPR:
4089 if (unsignedp)
2ba172e0 4090 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4091 else
2ba172e0 4092 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4093
4094 case TRUNC_MOD_EXPR:
2ba172e0 4095 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4096
4097 case FLOOR_DIV_EXPR:
4098 if (unsignedp)
2ba172e0 4099 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4100 else
4101 {
2ba172e0
JJ
4102 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4103 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4104 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4105 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4106 }
4107
4108 case FLOOR_MOD_EXPR:
4109 if (unsignedp)
2ba172e0 4110 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4111 else
4112 {
2ba172e0 4113 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4114 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4115 adj = simplify_gen_unary (NEG, mode,
4116 simplify_gen_binary (MULT, mode, adj, op1),
4117 mode);
4118 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4119 }
4120
4121 case CEIL_DIV_EXPR:
4122 if (unsignedp)
4123 {
2ba172e0
JJ
4124 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4125 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4126 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4127 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4128 }
4129 else
4130 {
2ba172e0
JJ
4131 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4132 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4133 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4134 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4135 }
4136
4137 case CEIL_MOD_EXPR:
4138 if (unsignedp)
4139 {
2ba172e0 4140 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4141 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4142 adj = simplify_gen_unary (NEG, mode,
4143 simplify_gen_binary (MULT, mode, adj, op1),
4144 mode);
4145 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4146 }
4147 else
4148 {
2ba172e0 4149 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4150 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4151 adj = simplify_gen_unary (NEG, mode,
4152 simplify_gen_binary (MULT, mode, adj, op1),
4153 mode);
4154 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4155 }
4156
4157 case ROUND_DIV_EXPR:
4158 if (unsignedp)
4159 {
2ba172e0
JJ
4160 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4161 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4162 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4163 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4164 }
4165 else
4166 {
2ba172e0
JJ
4167 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4168 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4169 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4170 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4171 }
4172
4173 case ROUND_MOD_EXPR:
4174 if (unsignedp)
4175 {
2ba172e0 4176 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4177 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4178 adj = simplify_gen_unary (NEG, mode,
4179 simplify_gen_binary (MULT, mode, adj, op1),
4180 mode);
4181 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4182 }
4183 else
4184 {
2ba172e0 4185 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4186 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4187 adj = simplify_gen_unary (NEG, mode,
4188 simplify_gen_binary (MULT, mode, adj, op1),
4189 mode);
4190 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4191 }
4192
4193 case LSHIFT_EXPR:
2ba172e0 4194 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4195
4196 case RSHIFT_EXPR:
4197 if (unsignedp)
2ba172e0 4198 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4199 else
2ba172e0 4200 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4201
4202 case LROTATE_EXPR:
2ba172e0 4203 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4204
4205 case RROTATE_EXPR:
2ba172e0 4206 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4207
4208 case MIN_EXPR:
2ba172e0 4209 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4210
4211 case MAX_EXPR:
2ba172e0 4212 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4213
4214 case BIT_AND_EXPR:
4215 case TRUTH_AND_EXPR:
2ba172e0 4216 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4217
4218 case BIT_IOR_EXPR:
4219 case TRUTH_OR_EXPR:
2ba172e0 4220 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4221
4222 case BIT_XOR_EXPR:
4223 case TRUTH_XOR_EXPR:
2ba172e0 4224 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4225
4226 case TRUTH_ANDIF_EXPR:
4227 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4228
4229 case TRUTH_ORIF_EXPR:
4230 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4231
4232 case TRUTH_NOT_EXPR:
2ba172e0 4233 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4234
4235 case LT_EXPR:
2ba172e0
JJ
4236 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4237 op0, op1);
b5b8b0ac
AO
4238
4239 case LE_EXPR:
2ba172e0
JJ
4240 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4241 op0, op1);
b5b8b0ac
AO
4242
4243 case GT_EXPR:
2ba172e0
JJ
4244 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4245 op0, op1);
b5b8b0ac
AO
4246
4247 case GE_EXPR:
2ba172e0
JJ
4248 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4249 op0, op1);
b5b8b0ac
AO
4250
4251 case EQ_EXPR:
2ba172e0 4252 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4253
4254 case NE_EXPR:
2ba172e0 4255 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4256
4257 case UNORDERED_EXPR:
2ba172e0 4258 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4259
4260 case ORDERED_EXPR:
2ba172e0 4261 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4262
4263 case UNLT_EXPR:
2ba172e0 4264 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4265
4266 case UNLE_EXPR:
2ba172e0 4267 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4268
4269 case UNGT_EXPR:
2ba172e0 4270 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4271
4272 case UNGE_EXPR:
2ba172e0 4273 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4274
4275 case UNEQ_EXPR:
2ba172e0 4276 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4277
4278 case LTGT_EXPR:
2ba172e0 4279 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4280
4281 case COND_EXPR:
4282 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4283
4284 case COMPLEX_EXPR:
4285 gcc_assert (COMPLEX_MODE_P (mode));
4286 if (GET_MODE (op0) == VOIDmode)
4287 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4288 if (GET_MODE (op1) == VOIDmode)
4289 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4290 return gen_rtx_CONCAT (mode, op0, op1);
4291
d02a5a4b
JJ
4292 case CONJ_EXPR:
4293 if (GET_CODE (op0) == CONCAT)
4294 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4295 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4296 XEXP (op0, 1),
4297 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4298 else
4299 {
4300 enum machine_mode imode = GET_MODE_INNER (mode);
4301 rtx re, im;
4302
4303 if (MEM_P (op0))
4304 {
4305 re = adjust_address_nv (op0, imode, 0);
4306 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4307 }
4308 else
4309 {
4310 enum machine_mode ifmode = int_mode_for_mode (mode);
4311 enum machine_mode ihmode = int_mode_for_mode (imode);
4312 rtx halfsize;
4313 if (ifmode == BLKmode || ihmode == BLKmode)
4314 return NULL;
4315 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4316 re = op0;
4317 if (mode != ifmode)
4318 re = gen_rtx_SUBREG (ifmode, re, 0);
4319 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4320 if (imode != ihmode)
4321 re = gen_rtx_SUBREG (imode, re, 0);
4322 im = copy_rtx (op0);
4323 if (mode != ifmode)
4324 im = gen_rtx_SUBREG (ifmode, im, 0);
4325 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4326 if (imode != ihmode)
4327 im = gen_rtx_SUBREG (imode, im, 0);
4328 }
4329 im = gen_rtx_NEG (imode, im);
4330 return gen_rtx_CONCAT (mode, re, im);
4331 }
4332
b5b8b0ac
AO
4333 case ADDR_EXPR:
4334 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4335 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4336 {
4337 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4338 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4339 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4340 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4341 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4342 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4343
4344 if (handled_component_p (TREE_OPERAND (exp, 0)))
4345 {
4346 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4347 tree decl
4348 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4349 &bitoffset, &bitsize, &maxsize);
4350 if ((TREE_CODE (decl) == VAR_DECL
4351 || TREE_CODE (decl) == PARM_DECL
4352 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4353 && (!TREE_ADDRESSABLE (decl)
4354 || target_for_debug_bind (decl))
c8a27c40
JJ
4355 && (bitoffset % BITS_PER_UNIT) == 0
4356 && bitsize > 0
4357 && bitsize == maxsize)
0a81f074
RS
4358 {
4359 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4360 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4361 }
c8a27c40
JJ
4362 }
4363
9430b7ba
JJ
4364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4365 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4366 == ADDR_EXPR)
4367 {
4368 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4369 0));
4370 if (op0 != NULL
4371 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4372 || (GET_CODE (op0) == PLUS
4373 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4374 && CONST_INT_P (XEXP (op0, 1)))))
4375 {
4376 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4377 1));
4378 if (!op1 || !CONST_INT_P (op1))
4379 return NULL;
4380
4381 return plus_constant (mode, op0, INTVAL (op1));
4382 }
4383 }
4384
c8a27c40
JJ
4385 return NULL;
4386 }
b5b8b0ac 4387
f61c6f34
JJ
4388 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
4389 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4390
4391 return op0;
b5b8b0ac
AO
4392
4393 case VECTOR_CST:
d2a12ae7
RG
4394 {
4395 unsigned i;
4396
4397 op0 = gen_rtx_CONCATN
4398 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4399
4400 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4401 {
4402 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4403 if (!op1)
4404 return NULL;
4405 XVECEXP (op0, 0, i) = op1;
4406 }
4407
4408 return op0;
4409 }
b5b8b0ac
AO
4410
4411 case CONSTRUCTOR:
47598145
MM
4412 if (TREE_CLOBBER_P (exp))
4413 return NULL;
4414 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4415 {
4416 unsigned i;
4417 tree val;
4418
4419 op0 = gen_rtx_CONCATN
4420 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4421
4422 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4423 {
4424 op1 = expand_debug_expr (val);
4425 if (!op1)
4426 return NULL;
4427 XVECEXP (op0, 0, i) = op1;
4428 }
4429
4430 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4431 {
4432 op1 = expand_debug_expr
e8160c9a 4433 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4434
4435 if (!op1)
4436 return NULL;
4437
4438 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4439 XVECEXP (op0, 0, i) = op1;
4440 }
4441
4442 return op0;
4443 }
4444 else
4445 goto flag_unsupported;
4446
4447 case CALL_EXPR:
4448 /* ??? Maybe handle some builtins? */
4449 return NULL;
4450
4451 case SSA_NAME:
4452 {
2a8e30fb
MM
4453 gimple g = get_gimple_for_ssa_name (exp);
4454 if (g)
4455 {
4456 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4457 if (!op0)
4458 return NULL;
4459 }
4460 else
4461 {
4462 int part = var_to_partition (SA.map, exp);
b5b8b0ac 4463
2a8e30fb 4464 if (part == NO_PARTITION)
a58a8e4b
JJ
4465 {
4466 /* If this is a reference to an incoming value of parameter
4467 that is never used in the code or where the incoming
4468 value is never used in the code, use PARM_DECL's
4469 DECL_RTL if set. */
4470 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4471 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4472 {
12c5ffe5
EB
4473 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4474 if (op0)
4475 goto adjust_mode;
a58a8e4b 4476 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
4477 if (op0)
4478 goto adjust_mode;
a58a8e4b
JJ
4479 }
4480 return NULL;
4481 }
b5b8b0ac 4482
2a8e30fb 4483 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 4484
abfea58d 4485 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 4486 }
b5b8b0ac
AO
4487 goto adjust_mode;
4488 }
4489
4490 case ERROR_MARK:
4491 return NULL;
4492
7ece48b1
JJ
4493 /* Vector stuff. For most of the codes we don't have rtl codes. */
4494 case REALIGN_LOAD_EXPR:
4495 case REDUC_MAX_EXPR:
4496 case REDUC_MIN_EXPR:
4497 case REDUC_PLUS_EXPR:
4498 case VEC_COND_EXPR:
7ece48b1
JJ
4499 case VEC_LSHIFT_EXPR:
4500 case VEC_PACK_FIX_TRUNC_EXPR:
4501 case VEC_PACK_SAT_EXPR:
4502 case VEC_PACK_TRUNC_EXPR:
4503 case VEC_RSHIFT_EXPR:
4504 case VEC_UNPACK_FLOAT_HI_EXPR:
4505 case VEC_UNPACK_FLOAT_LO_EXPR:
4506 case VEC_UNPACK_HI_EXPR:
4507 case VEC_UNPACK_LO_EXPR:
4508 case VEC_WIDEN_MULT_HI_EXPR:
4509 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
4510 case VEC_WIDEN_MULT_EVEN_EXPR:
4511 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
4512 case VEC_WIDEN_LSHIFT_HI_EXPR:
4513 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 4514 case VEC_PERM_EXPR:
7ece48b1
JJ
4515 return NULL;
4516
98449720 4517 /* Misc codes. */
7ece48b1
JJ
4518 case ADDR_SPACE_CONVERT_EXPR:
4519 case FIXED_CONVERT_EXPR:
4520 case OBJ_TYPE_REF:
4521 case WITH_SIZE_EXPR:
4522 return NULL;
4523
4524 case DOT_PROD_EXPR:
4525 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4526 && SCALAR_INT_MODE_P (mode))
4527 {
2ba172e0
JJ
4528 op0
4529 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4530 0)))
4531 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4532 inner_mode);
4533 op1
4534 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4535 1)))
4536 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4537 inner_mode);
4538 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4539 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
4540 }
4541 return NULL;
4542
4543 case WIDEN_MULT_EXPR:
0354c0c7
BS
4544 case WIDEN_MULT_PLUS_EXPR:
4545 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
4546 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4547 && SCALAR_INT_MODE_P (mode))
4548 {
2ba172e0 4549 inner_mode = GET_MODE (op0);
7ece48b1 4550 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 4551 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 4552 else
5b58b39b 4553 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 4554 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 4555 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 4556 else
5b58b39b 4557 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 4558 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
4559 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4560 return op0;
4561 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 4562 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 4563 else
2ba172e0 4564 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
4565 }
4566 return NULL;
4567
98449720
RH
4568 case MULT_HIGHPART_EXPR:
4569 /* ??? Similar to the above. */
4570 return NULL;
4571
7ece48b1 4572 case WIDEN_SUM_EXPR:
3f3af9df 4573 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
4574 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4575 && SCALAR_INT_MODE_P (mode))
4576 {
2ba172e0
JJ
4577 op0
4578 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4579 0)))
4580 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4581 inner_mode);
3f3af9df
JJ
4582 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4583 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
4584 }
4585 return NULL;
4586
0f59b812 4587 case FMA_EXPR:
2ba172e0 4588 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 4589
b5b8b0ac
AO
4590 default:
4591 flag_unsupported:
4592#ifdef ENABLE_CHECKING
4593 debug_tree (exp);
4594 gcc_unreachable ();
4595#else
4596 return NULL;
4597#endif
4598 }
4599}
4600
ddb555ed
JJ
4601/* Return an RTX equivalent to the source bind value of the tree expression
4602 EXP. */
4603
4604static rtx
4605expand_debug_source_expr (tree exp)
4606{
4607 rtx op0 = NULL_RTX;
4608 enum machine_mode mode = VOIDmode, inner_mode;
4609
4610 switch (TREE_CODE (exp))
4611 {
4612 case PARM_DECL:
4613 {
ddb555ed 4614 mode = DECL_MODE (exp);
12c5ffe5
EB
4615 op0 = expand_debug_parm_decl (exp);
4616 if (op0)
4617 break;
ddb555ed
JJ
4618 /* See if this isn't an argument that has been completely
4619 optimized out. */
4620 if (!DECL_RTL_SET_P (exp)
12c5ffe5 4621 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
4622 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4623 {
7b575cfa 4624 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
4625 if (DECL_CONTEXT (aexp)
4626 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4627 {
9771b263 4628 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
4629 unsigned int ix;
4630 tree ddecl;
ddb555ed
JJ
4631 debug_args = decl_debug_args_lookup (current_function_decl);
4632 if (debug_args != NULL)
4633 {
9771b263 4634 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
4635 ix += 2)
4636 if (ddecl == aexp)
4637 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4638 }
4639 }
4640 }
4641 break;
4642 }
4643 default:
4644 break;
4645 }
4646
4647 if (op0 == NULL_RTX)
4648 return NULL_RTX;
4649
4650 inner_mode = GET_MODE (op0);
4651 if (mode == inner_mode)
4652 return op0;
4653
4654 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4655 {
4656 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4657 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4658 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4659 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4660 else
4661 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4662 }
4663 else if (FLOAT_MODE_P (mode))
4664 gcc_unreachable ();
4665 else if (FLOAT_MODE_P (inner_mode))
4666 {
4667 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4668 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4669 else
4670 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4671 }
4672 else if (CONSTANT_P (op0)
4673 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4674 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4675 subreg_lowpart_offset (mode, inner_mode));
4676 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4677 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4678 else
4679 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4680
4681 return op0;
4682}
4683
6cfa417f
JJ
4684/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4685 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4686 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4687
4688static void
4689avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
4690{
4691 rtx exp = *exp_p;
4692
4693 if (exp == NULL_RTX)
4694 return;
4695
4696 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4697 return;
4698
4699 if (depth == 4)
4700 {
4701 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4702 rtx dval = make_debug_expr_from_rtl (exp);
4703
4704 /* Emit a debug bind insn before INSN. */
4705 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4706 DEBUG_EXPR_TREE_DECL (dval), exp,
4707 VAR_INIT_STATUS_INITIALIZED);
4708
4709 emit_debug_insn_before (bind, insn);
4710 *exp_p = dval;
4711 return;
4712 }
4713
4714 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4715 int i, j;
4716 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4717 switch (*format_ptr++)
4718 {
4719 case 'e':
4720 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4721 break;
4722
4723 case 'E':
4724 case 'V':
4725 for (j = 0; j < XVECLEN (exp, i); j++)
4726 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4727 break;
4728
4729 default:
4730 break;
4731 }
4732}
4733
b5b8b0ac
AO
4734/* Expand the _LOCs in debug insns. We run this after expanding all
4735 regular insns, so that any variables referenced in the function
4736 will have their DECL_RTLs set. */
4737
4738static void
4739expand_debug_locations (void)
4740{
4741 rtx insn;
4742 rtx last = get_last_insn ();
4743 int save_strict_alias = flag_strict_aliasing;
4744
4745 /* New alias sets while setting up memory attributes cause
4746 -fcompare-debug failures, even though it doesn't bring about any
4747 codegen changes. */
4748 flag_strict_aliasing = 0;
4749
4750 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4751 if (DEBUG_INSN_P (insn))
4752 {
4753 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
6cfa417f 4754 rtx val, prev_insn, insn2;
b5b8b0ac
AO
4755 enum machine_mode mode;
4756
4757 if (value == NULL_TREE)
4758 val = NULL_RTX;
4759 else
4760 {
ddb555ed
JJ
4761 if (INSN_VAR_LOCATION_STATUS (insn)
4762 == VAR_INIT_STATUS_UNINITIALIZED)
4763 val = expand_debug_source_expr (value);
4764 else
4765 val = expand_debug_expr (value);
b5b8b0ac
AO
4766 gcc_assert (last == get_last_insn ());
4767 }
4768
4769 if (!val)
4770 val = gen_rtx_UNKNOWN_VAR_LOC ();
4771 else
4772 {
4773 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4774
4775 gcc_assert (mode == GET_MODE (val)
4776 || (GET_MODE (val) == VOIDmode
33ffb5c5 4777 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 4778 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
4779 || GET_CODE (val) == LABEL_REF)));
4780 }
4781
4782 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
4783 prev_insn = PREV_INSN (insn);
4784 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4785 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
4786 }
4787
4788 flag_strict_aliasing = save_strict_alias;
4789}
4790
242229bb
JH
4791/* Expand basic block BB from GIMPLE trees to RTL. */
4792
4793static basic_block
f3ddd692 4794expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 4795{
726a989a
RB
4796 gimple_stmt_iterator gsi;
4797 gimple_seq stmts;
4798 gimple stmt = NULL;
242229bb
JH
4799 rtx note, last;
4800 edge e;
628f6a4e 4801 edge_iterator ei;
8b11009b 4802 void **elt;
242229bb
JH
4803
4804 if (dump_file)
726a989a
RB
4805 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4806 bb->index);
4807
4808 /* Note that since we are now transitioning from GIMPLE to RTL, we
4809 cannot use the gsi_*_bb() routines because they expect the basic
4810 block to be in GIMPLE, instead of RTL. Therefore, we need to
4811 access the BB sequence directly. */
4812 stmts = bb_seq (bb);
3e8b732e
MM
4813 bb->il.gimple.seq = NULL;
4814 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 4815 rtl_profile_for_bb (bb);
5e2d947c
JH
4816 init_rtl_bb_info (bb);
4817 bb->flags |= BB_RTL;
4818
a9b77cd1
ZD
4819 /* Remove the RETURN_EXPR if we may fall though to the exit
4820 instead. */
726a989a
RB
4821 gsi = gsi_last (stmts);
4822 if (!gsi_end_p (gsi)
4823 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 4824 {
726a989a 4825 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
4826
4827 gcc_assert (single_succ_p (bb));
4828 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
4829
4830 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 4831 && !gimple_return_retval (ret_stmt))
a9b77cd1 4832 {
726a989a 4833 gsi_remove (&gsi, false);
a9b77cd1
ZD
4834 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4835 }
4836 }
4837
726a989a
RB
4838 gsi = gsi_start (stmts);
4839 if (!gsi_end_p (gsi))
8b11009b 4840 {
726a989a
RB
4841 stmt = gsi_stmt (gsi);
4842 if (gimple_code (stmt) != GIMPLE_LABEL)
4843 stmt = NULL;
8b11009b 4844 }
242229bb 4845
8b11009b
ZD
4846 elt = pointer_map_contains (lab_rtx_for_bb, bb);
4847
4848 if (stmt || elt)
242229bb
JH
4849 {
4850 last = get_last_insn ();
4851
8b11009b
ZD
4852 if (stmt)
4853 {
28ed065e 4854 expand_gimple_stmt (stmt);
726a989a 4855 gsi_next (&gsi);
8b11009b
ZD
4856 }
4857
4858 if (elt)
ae50c0cb 4859 emit_label ((rtx) *elt);
242229bb 4860
caf93cb0 4861 /* Java emits line number notes in the top of labels.
c22cacf3 4862 ??? Make this go away once line number notes are obsoleted. */
242229bb 4863 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 4864 if (NOTE_P (BB_HEAD (bb)))
242229bb 4865 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 4866 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 4867
726a989a 4868 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
4869 }
4870 else
4871 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
4872
4873 NOTE_BASIC_BLOCK (note) = bb;
4874
726a989a 4875 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 4876 {
cea49550 4877 basic_block new_bb;
242229bb 4878
b5b8b0ac 4879 stmt = gsi_stmt (gsi);
2a8e30fb
MM
4880
4881 /* If this statement is a non-debug one, and we generate debug
4882 insns, then this one might be the last real use of a TERed
4883 SSA_NAME, but where there are still some debug uses further
4884 down. Expanding the current SSA name in such further debug
4885 uses by their RHS might lead to wrong debug info, as coalescing
4886 might make the operands of such RHS be placed into the same
4887 pseudo as something else. Like so:
4888 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4889 use(a_1);
4890 a_2 = ...
4891 #DEBUG ... => a_1
4892 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4893 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4894 the write to a_2 would actually have clobbered the place which
4895 formerly held a_0.
4896
4897 So, instead of that, we recognize the situation, and generate
4898 debug temporaries at the last real use of TERed SSA names:
4899 a_1 = a_0 + 1;
4900 #DEBUG #D1 => a_1
4901 use(a_1);
4902 a_2 = ...
4903 #DEBUG ... => #D1
4904 */
4905 if (MAY_HAVE_DEBUG_INSNS
4906 && SA.values
4907 && !is_gimple_debug (stmt))
4908 {
4909 ssa_op_iter iter;
4910 tree op;
4911 gimple def;
4912
5368224f 4913 location_t sloc = curr_insn_location ();
2a8e30fb
MM
4914
4915 /* Look for SSA names that have their last use here (TERed
4916 names always have only one real use). */
4917 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4918 if ((def = get_gimple_for_ssa_name (op)))
4919 {
4920 imm_use_iterator imm_iter;
4921 use_operand_p use_p;
4922 bool have_debug_uses = false;
4923
4924 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4925 {
4926 if (gimple_debug_bind_p (USE_STMT (use_p)))
4927 {
4928 have_debug_uses = true;
4929 break;
4930 }
4931 }
4932
4933 if (have_debug_uses)
4934 {
4935 /* OP is a TERed SSA name, with DEF it's defining
4936 statement, and where OP is used in further debug
4937 instructions. Generate a debug temporary, and
4938 replace all uses of OP in debug insns with that
4939 temporary. */
4940 gimple debugstmt;
4941 tree value = gimple_assign_rhs_to_tree (def);
4942 tree vexpr = make_node (DEBUG_EXPR_DECL);
4943 rtx val;
4944 enum machine_mode mode;
4945
5368224f 4946 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
4947
4948 DECL_ARTIFICIAL (vexpr) = 1;
4949 TREE_TYPE (vexpr) = TREE_TYPE (value);
4950 if (DECL_P (value))
4951 mode = DECL_MODE (value);
4952 else
4953 mode = TYPE_MODE (TREE_TYPE (value));
4954 DECL_MODE (vexpr) = mode;
4955
4956 val = gen_rtx_VAR_LOCATION
4957 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4958
e8c6bb74 4959 emit_debug_insn (val);
2a8e30fb
MM
4960
4961 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4962 {
4963 if (!gimple_debug_bind_p (debugstmt))
4964 continue;
4965
4966 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4967 SET_USE (use_p, vexpr);
4968
4969 update_stmt (debugstmt);
4970 }
4971 }
4972 }
5368224f 4973 set_curr_insn_location (sloc);
2a8e30fb
MM
4974 }
4975
a5883ba0 4976 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 4977
242229bb
JH
4978 /* Expand this statement, then evaluate the resulting RTL and
4979 fixup the CFG accordingly. */
726a989a 4980 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 4981 {
726a989a 4982 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
4983 if (new_bb)
4984 return new_bb;
4985 }
b5b8b0ac
AO
4986 else if (gimple_debug_bind_p (stmt))
4987 {
5368224f 4988 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
4989 gimple_stmt_iterator nsi = gsi;
4990
4991 for (;;)
4992 {
4993 tree var = gimple_debug_bind_get_var (stmt);
4994 tree value;
4995 rtx val;
4996 enum machine_mode mode;
4997
ec8c1492
JJ
4998 if (TREE_CODE (var) != DEBUG_EXPR_DECL
4999 && TREE_CODE (var) != LABEL_DECL
5000 && !target_for_debug_bind (var))
5001 goto delink_debug_stmt;
5002
b5b8b0ac
AO
5003 if (gimple_debug_bind_has_value_p (stmt))
5004 value = gimple_debug_bind_get_value (stmt);
5005 else
5006 value = NULL_TREE;
5007
5008 last = get_last_insn ();
5009
5368224f 5010 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
5011
5012 if (DECL_P (var))
5013 mode = DECL_MODE (var);
5014 else
5015 mode = TYPE_MODE (TREE_TYPE (var));
5016
5017 val = gen_rtx_VAR_LOCATION
5018 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5019
e16b6fd0 5020 emit_debug_insn (val);
b5b8b0ac
AO
5021
5022 if (dump_file && (dump_flags & TDF_DETAILS))
5023 {
5024 /* We can't dump the insn with a TREE where an RTX
5025 is expected. */
e8c6bb74 5026 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5027 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5028 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5029 }
5030
ec8c1492 5031 delink_debug_stmt:
2a8e30fb
MM
5032 /* In order not to generate too many debug temporaries,
5033 we delink all uses of debug statements we already expanded.
5034 Therefore debug statements between definition and real
5035 use of TERed SSA names will continue to use the SSA name,
5036 and not be replaced with debug temps. */
5037 delink_stmt_imm_use (stmt);
5038
b5b8b0ac
AO
5039 gsi = nsi;
5040 gsi_next (&nsi);
5041 if (gsi_end_p (nsi))
5042 break;
5043 stmt = gsi_stmt (nsi);
5044 if (!gimple_debug_bind_p (stmt))
5045 break;
5046 }
5047
5368224f 5048 set_curr_insn_location (sloc);
ddb555ed
JJ
5049 }
5050 else if (gimple_debug_source_bind_p (stmt))
5051 {
5368224f 5052 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5053 tree var = gimple_debug_source_bind_get_var (stmt);
5054 tree value = gimple_debug_source_bind_get_value (stmt);
5055 rtx val;
5056 enum machine_mode mode;
5057
5058 last = get_last_insn ();
5059
5368224f 5060 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5061
5062 mode = DECL_MODE (var);
5063
5064 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5065 VAR_INIT_STATUS_UNINITIALIZED);
5066
5067 emit_debug_insn (val);
5068
5069 if (dump_file && (dump_flags & TDF_DETAILS))
5070 {
5071 /* We can't dump the insn with a TREE where an RTX
5072 is expected. */
5073 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5074 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5075 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5076 }
5077
5368224f 5078 set_curr_insn_location (sloc);
b5b8b0ac 5079 }
80c7a9eb 5080 else
242229bb 5081 {
f3ddd692
JJ
5082 if (is_gimple_call (stmt)
5083 && gimple_call_tail_p (stmt)
5084 && disable_tail_calls)
5085 gimple_call_set_tail (stmt, false);
5086
726a989a 5087 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
5088 {
5089 bool can_fallthru;
5090 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5091 if (new_bb)
5092 {
5093 if (can_fallthru)
5094 bb = new_bb;
5095 else
5096 return new_bb;
5097 }
5098 }
4d7a65ea 5099 else
b7211528 5100 {
4e3825db 5101 def_operand_p def_p;
4e3825db
MM
5102 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5103
5104 if (def_p != NULL)
5105 {
5106 /* Ignore this stmt if it is in the list of
5107 replaceable expressions. */
5108 if (SA.values
b8698a0f 5109 && bitmap_bit_p (SA.values,
e97809c6 5110 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5111 continue;
5112 }
28ed065e 5113 last = expand_gimple_stmt (stmt);
726a989a 5114 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5115 }
242229bb
JH
5116 }
5117 }
5118
a5883ba0
MM
5119 currently_expanding_gimple_stmt = NULL;
5120
7241571e 5121 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5122 FOR_EACH_EDGE (e, ei, bb->succs)
5123 {
2f13f2de 5124 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5125 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5126 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5127 {
5128 emit_jump (label_rtx_for_bb (e->dest));
5129 e->flags &= ~EDGE_FALLTHRU;
5130 }
a9b77cd1
ZD
5131 }
5132
ae761c45
AH
5133 /* Expanded RTL can create a jump in the last instruction of block.
5134 This later might be assumed to be a jump to successor and break edge insertion.
5135 We need to insert dummy move to prevent this. PR41440. */
5136 if (single_succ_p (bb)
5137 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5138 && (last = get_last_insn ())
5139 && JUMP_P (last))
5140 {
5141 rtx dummy = gen_reg_rtx (SImode);
5142 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5143 }
5144
242229bb
JH
5145 do_pending_stack_adjust ();
5146
3f117656 5147 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5148 before a barrier and/or table jump insn. */
5149 last = get_last_insn ();
4b4bf941 5150 if (BARRIER_P (last))
242229bb
JH
5151 last = PREV_INSN (last);
5152 if (JUMP_TABLE_DATA_P (last))
5153 last = PREV_INSN (PREV_INSN (last));
5154 BB_END (bb) = last;
caf93cb0 5155
242229bb 5156 update_bb_for_insn (bb);
80c7a9eb 5157
242229bb
JH
5158 return bb;
5159}
5160
5161
5162/* Create a basic block for initialization code. */
5163
5164static basic_block
5165construct_init_block (void)
5166{
5167 basic_block init_block, first_block;
fd44f634
JH
5168 edge e = NULL;
5169 int flags;
275a4187 5170
fd44f634
JH
5171 /* Multiple entry points not supported yet. */
5172 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
5173 init_rtl_bb_info (ENTRY_BLOCK_PTR);
5174 init_rtl_bb_info (EXIT_BLOCK_PTR);
5175 ENTRY_BLOCK_PTR->flags |= BB_RTL;
5176 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 5177
fd44f634 5178 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 5179
fd44f634
JH
5180 /* When entry edge points to first basic block, we don't need jump,
5181 otherwise we have to jump into proper target. */
5182 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
5183 {
726a989a 5184 tree label = gimple_block_label (e->dest);
fd44f634
JH
5185
5186 emit_jump (label_rtx (label));
5187 flags = 0;
275a4187 5188 }
fd44f634
JH
5189 else
5190 flags = EDGE_FALLTHRU;
242229bb
JH
5191
5192 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5193 get_last_insn (),
5194 ENTRY_BLOCK_PTR);
5195 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
5196 init_block->count = ENTRY_BLOCK_PTR->count;
7d776ee2
RG
5197 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
5198 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
242229bb
JH
5199 if (e)
5200 {
5201 first_block = e->dest;
5202 redirect_edge_succ (e, init_block);
fd44f634 5203 e = make_edge (init_block, first_block, flags);
242229bb
JH
5204 }
5205 else
5206 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5207 e->probability = REG_BR_PROB_BASE;
5208 e->count = ENTRY_BLOCK_PTR->count;
5209
5210 update_bb_for_insn (init_block);
5211 return init_block;
5212}
5213
55e092c4
JH
5214/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5215 found in the block tree. */
5216
5217static void
5218set_block_levels (tree block, int level)
5219{
5220 while (block)
5221 {
5222 BLOCK_NUMBER (block) = level;
5223 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5224 block = BLOCK_CHAIN (block);
5225 }
5226}
242229bb
JH
5227
5228/* Create a block containing landing pads and similar stuff. */
5229
5230static void
5231construct_exit_block (void)
5232{
5233 rtx head = get_last_insn ();
5234 rtx end;
5235 basic_block exit_block;
628f6a4e
BE
5236 edge e, e2;
5237 unsigned ix;
5238 edge_iterator ei;
071a42f9 5239 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 5240
bf08ebeb
JH
5241 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5242
caf93cb0 5243 /* Make sure the locus is set to the end of the function, so that
242229bb 5244 epilogue line numbers and warnings are set properly. */
2f13f2de 5245 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5246 input_location = cfun->function_end_locus;
5247
242229bb
JH
5248 /* Generate rtl for function exit. */
5249 expand_function_end ();
5250
5251 end = get_last_insn ();
5252 if (head == end)
5253 return;
071a42f9
JH
5254 /* While emitting the function end we could move end of the last basic block.
5255 */
5256 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 5257 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5258 head = NEXT_INSN (head);
80c7a9eb
RH
5259 exit_block = create_basic_block (NEXT_INSN (head), end,
5260 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
5261 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
5262 exit_block->count = EXIT_BLOCK_PTR->count;
7d776ee2
RG
5263 if (current_loops && EXIT_BLOCK_PTR->loop_father)
5264 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
628f6a4e
BE
5265
5266 ix = 0;
5267 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 5268 {
8fb790fd 5269 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 5270 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5271 redirect_edge_succ (e, exit_block);
5272 else
5273 ix++;
242229bb 5274 }
628f6a4e 5275
242229bb
JH
5276 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5277 e->probability = REG_BR_PROB_BASE;
5278 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 5279 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
5280 if (e2 != e)
5281 {
c22cacf3 5282 e->count -= e2->count;
242229bb
JH
5283 exit_block->count -= e2->count;
5284 exit_block->frequency -= EDGE_FREQUENCY (e2);
5285 }
5286 if (e->count < 0)
5287 e->count = 0;
5288 if (exit_block->count < 0)
5289 exit_block->count = 0;
5290 if (exit_block->frequency < 0)
5291 exit_block->frequency = 0;
5292 update_bb_for_insn (exit_block);
5293}
5294
c22cacf3 5295/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5296 Look for ARRAY_REF nodes with non-constant indexes and mark them
5297 addressable. */
5298
5299static tree
5300discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5301 void *data ATTRIBUTE_UNUSED)
5302{
5303 tree t = *tp;
5304
5305 if (IS_TYPE_OR_DECL_P (t))
5306 *walk_subtrees = 0;
5307 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5308 {
5309 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5310 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5311 && (!TREE_OPERAND (t, 2)
5312 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5313 || (TREE_CODE (t) == COMPONENT_REF
5314 && (!TREE_OPERAND (t,2)
5315 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5316 || TREE_CODE (t) == BIT_FIELD_REF
5317 || TREE_CODE (t) == REALPART_EXPR
5318 || TREE_CODE (t) == IMAGPART_EXPR
5319 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5320 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5321 t = TREE_OPERAND (t, 0);
5322
5323 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5324 {
5325 t = get_base_address (t);
6f11d690
RG
5326 if (t && DECL_P (t)
5327 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5328 TREE_ADDRESSABLE (t) = 1;
5329 }
5330
5331 *walk_subtrees = 0;
5332 }
5333
5334 return NULL_TREE;
5335}
5336
5337/* RTL expansion is not able to compile array references with variable
5338 offsets for arrays stored in single register. Discover such
5339 expressions and mark variables as addressable to avoid this
5340 scenario. */
5341
5342static void
5343discover_nonconstant_array_refs (void)
5344{
5345 basic_block bb;
726a989a 5346 gimple_stmt_iterator gsi;
a1b23b2f
UW
5347
5348 FOR_EACH_BB (bb)
726a989a
RB
5349 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5350 {
5351 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
5352 if (!is_gimple_debug (stmt))
5353 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 5354 }
a1b23b2f
UW
5355}
5356
2e3f842f
L
5357/* This function sets crtl->args.internal_arg_pointer to a virtual
5358 register if DRAP is needed. Local register allocator will replace
5359 virtual_incoming_args_rtx with the virtual register. */
5360
5361static void
5362expand_stack_alignment (void)
5363{
5364 rtx drap_rtx;
e939805b 5365 unsigned int preferred_stack_boundary;
2e3f842f
L
5366
5367 if (! SUPPORTS_STACK_ALIGNMENT)
5368 return;
b8698a0f 5369
2e3f842f
L
5370 if (cfun->calls_alloca
5371 || cfun->has_nonlocal_label
5372 || crtl->has_nonlocal_goto)
5373 crtl->need_drap = true;
5374
890b9b96
L
5375 /* Call update_stack_boundary here again to update incoming stack
5376 boundary. It may set incoming stack alignment to a different
5377 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5378 use the minimum incoming stack alignment to check if it is OK
5379 to perform sibcall optimization since sibcall optimization will
5380 only align the outgoing stack to incoming stack boundary. */
5381 if (targetm.calls.update_stack_boundary)
5382 targetm.calls.update_stack_boundary ();
5383
5384 /* The incoming stack frame has to be aligned at least at
5385 parm_stack_boundary. */
5386 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 5387
2e3f842f
L
5388 /* Update crtl->stack_alignment_estimated and use it later to align
5389 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5390 exceptions since callgraph doesn't collect incoming stack alignment
5391 in this case. */
8f4f502f 5392 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
5393 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5394 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5395 else
5396 preferred_stack_boundary = crtl->preferred_stack_boundary;
5397 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5398 crtl->stack_alignment_estimated = preferred_stack_boundary;
5399 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5400 crtl->stack_alignment_needed = preferred_stack_boundary;
5401
890b9b96
L
5402 gcc_assert (crtl->stack_alignment_needed
5403 <= crtl->stack_alignment_estimated);
5404
2e3f842f 5405 crtl->stack_realign_needed
e939805b 5406 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 5407 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
5408
5409 crtl->stack_realign_processed = true;
5410
5411 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5412 alignment. */
5413 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 5414 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 5415
d015f7cc
L
5416 /* stack_realign_drap and drap_rtx must match. */
5417 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5418
2e3f842f
L
5419 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5420 if (NULL != drap_rtx)
5421 {
5422 crtl->args.internal_arg_pointer = drap_rtx;
5423
5424 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5425 needed. */
5426 fixup_tail_calls ();
5427 }
5428}
862d0b35
DN
5429\f
5430
5431static void
5432expand_main_function (void)
5433{
5434#if (defined(INVOKE__main) \
5435 || (!defined(HAS_INIT_SECTION) \
5436 && !defined(INIT_SECTION_ASM_OP) \
5437 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5438 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5439#endif
5440}
5441\f
5442
5443/* Expand code to initialize the stack_protect_guard. This is invoked at
5444 the beginning of a function to be protected. */
5445
5446#ifndef HAVE_stack_protect_set
5447# define HAVE_stack_protect_set 0
5448# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5449#endif
5450
5451static void
5452stack_protect_prologue (void)
5453{
5454 tree guard_decl = targetm.stack_protect_guard ();
5455 rtx x, y;
5456
5457 x = expand_normal (crtl->stack_protect_guard);
5458 y = expand_normal (guard_decl);
5459
5460 /* Allow the target to copy from Y to X without leaking Y into a
5461 register. */
5462 if (HAVE_stack_protect_set)
5463 {
5464 rtx insn = gen_stack_protect_set (x, y);
5465 if (insn)
5466 {
5467 emit_insn (insn);
5468 return;
5469 }
5470 }
5471
5472 /* Otherwise do a straight move. */
5473 emit_move_insn (x, y);
5474}
2e3f842f 5475
242229bb
JH
5476/* Translate the intermediate representation contained in the CFG
5477 from GIMPLE trees to RTL.
5478
5479 We do conversion per basic block and preserve/update the tree CFG.
5480 This implies we have to do some magic as the CFG can simultaneously
5481 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 5482 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
5483 the expansion. */
5484
c2924966 5485static unsigned int
726a989a 5486gimple_expand_cfg (void)
242229bb
JH
5487{
5488 basic_block bb, init_block;
5489 sbitmap blocks;
0ef90296
ZD
5490 edge_iterator ei;
5491 edge e;
f3ddd692 5492 rtx var_seq, var_ret_seq;
4e3825db
MM
5493 unsigned i;
5494
f029db69 5495 timevar_push (TV_OUT_OF_SSA);
4e3825db 5496 rewrite_out_of_ssa (&SA);
f029db69 5497 timevar_pop (TV_OUT_OF_SSA);
c302207e 5498 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 5499
be147e84
RG
5500 /* Make sure all values used by the optimization passes have sane
5501 defaults. */
5502 reg_renumber = 0;
5503
4586b4ca
SB
5504 /* Some backends want to know that we are expanding to RTL. */
5505 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
5506 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5507 free_dominance_info (CDI_DOMINATORS);
4586b4ca 5508
bf08ebeb
JH
5509 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5510
5368224f 5511 insn_locations_init ();
fe8a7779 5512 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
5513 {
5514 /* Eventually, all FEs should explicitly set function_start_locus. */
2f13f2de 5515 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
5368224f 5516 set_curr_insn_location
1751ecd6
AH
5517 (DECL_SOURCE_LOCATION (current_function_decl));
5518 else
5368224f 5519 set_curr_insn_location (cfun->function_start_locus);
1751ecd6 5520 }
9ff70652 5521 else
5368224f
DC
5522 set_curr_insn_location (UNKNOWN_LOCATION);
5523 prologue_location = curr_insn_location ();
55e092c4 5524
2b21299c
JJ
5525#ifdef INSN_SCHEDULING
5526 init_sched_attrs ();
5527#endif
5528
55e092c4
JH
5529 /* Make sure first insn is a note even if we don't want linenums.
5530 This makes sure the first insn will never be deleted.
5531 Also, final expects a note to appear there. */
5532 emit_note (NOTE_INSN_DELETED);
6429e3be 5533
a1b23b2f
UW
5534 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5535 discover_nonconstant_array_refs ();
5536
e41b2a33 5537 targetm.expand_to_rtl_hook ();
cb91fab0 5538 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 5539 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 5540 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
5541 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5542 cfun->cfg->max_jumptable_ents = 0;
5543
ae9fd6b7
JH
5544 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5545 of the function section at exapnsion time to predict distance of calls. */
5546 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5547
727a31fa 5548 /* Expand the variables recorded during gimple lowering. */
f029db69 5549 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
5550 start_sequence ();
5551
f3ddd692 5552 var_ret_seq = expand_used_vars ();
3a42502d
RH
5553
5554 var_seq = get_insns ();
5555 end_sequence ();
f029db69 5556 timevar_pop (TV_VAR_EXPAND);
242229bb 5557
7d69de61
RH
5558 /* Honor stack protection warnings. */
5559 if (warn_stack_protect)
5560 {
e3b5732b 5561 if (cfun->calls_alloca)
b8698a0f 5562 warning (OPT_Wstack_protector,
3b123595
SB
5563 "stack protector not protecting local variables: "
5564 "variable length buffer");
cb91fab0 5565 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 5566 warning (OPT_Wstack_protector,
3b123595
SB
5567 "stack protector not protecting function: "
5568 "all local arrays are less than %d bytes long",
7d69de61
RH
5569 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5570 }
5571
242229bb 5572 /* Set up parameters and prepare for return, for the function. */
b79c5284 5573 expand_function_start (current_function_decl);
242229bb 5574
3a42502d
RH
5575 /* If we emitted any instructions for setting up the variables,
5576 emit them before the FUNCTION_START note. */
5577 if (var_seq)
5578 {
5579 emit_insn_before (var_seq, parm_birth_insn);
5580
5581 /* In expand_function_end we'll insert the alloca save/restore
5582 before parm_birth_insn. We've just insertted an alloca call.
5583 Adjust the pointer to match. */
5584 parm_birth_insn = var_seq;
5585 }
5586
4e3825db
MM
5587 /* Now that we also have the parameter RTXs, copy them over to our
5588 partitions. */
5589 for (i = 0; i < SA.map->num_partitions; i++)
5590 {
5591 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5592
5593 if (TREE_CODE (var) != VAR_DECL
5594 && !SA.partition_to_pseudo[i])
5595 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5596 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
5597
5598 /* If this decl was marked as living in multiple places, reset
5599 this now to NULL. */
5600 if (DECL_RTL_IF_SET (var) == pc_rtx)
5601 SET_DECL_RTL (var, NULL);
5602
4e3825db
MM
5603 /* Some RTL parts really want to look at DECL_RTL(x) when x
5604 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5605 SET_DECL_RTL here making this available, but that would mean
5606 to select one of the potentially many RTLs for one DECL. Instead
5607 of doing that we simply reset the MEM_EXPR of the RTL in question,
5608 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5609 if (!DECL_RTL_SET_P (var))
5610 {
5611 if (MEM_P (SA.partition_to_pseudo[i]))
5612 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5613 }
5614 }
5615
d466b407
MM
5616 /* If we have a class containing differently aligned pointers
5617 we need to merge those into the corresponding RTL pointer
5618 alignment. */
5619 for (i = 1; i < num_ssa_names; i++)
5620 {
5621 tree name = ssa_name (i);
5622 int part;
5623 rtx r;
5624
5625 if (!name
d466b407
MM
5626 /* We might have generated new SSA names in
5627 update_alias_info_with_stack_vars. They will have a NULL
5628 defining statements, and won't be part of the partitioning,
5629 so ignore those. */
5630 || !SSA_NAME_DEF_STMT (name))
5631 continue;
5632 part = var_to_partition (SA.map, name);
5633 if (part == NO_PARTITION)
5634 continue;
70b5e7dc
RG
5635
5636 /* Adjust all partition members to get the underlying decl of
5637 the representative which we might have created in expand_one_var. */
5638 if (SSA_NAME_VAR (name) == NULL_TREE)
5639 {
5640 tree leader = partition_to_var (SA.map, part);
5641 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5642 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5643 }
5644 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5645 continue;
5646
d466b407
MM
5647 r = SA.partition_to_pseudo[part];
5648 if (REG_P (r))
5649 mark_reg_pointer (r, get_pointer_alignment (name));
5650 }
5651
242229bb
JH
5652 /* If this function is `main', emit a call to `__main'
5653 to run global initializers, etc. */
5654 if (DECL_NAME (current_function_decl)
5655 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5656 && DECL_FILE_SCOPE_P (current_function_decl))
5657 expand_main_function ();
5658
7d69de61
RH
5659 /* Initialize the stack_protect_guard field. This must happen after the
5660 call to __main (if any) so that the external decl is initialized. */
cb91fab0 5661 if (crtl->stack_protect_guard)
7d69de61
RH
5662 stack_protect_prologue ();
5663
4e3825db
MM
5664 expand_phi_nodes (&SA);
5665
3fbd86b1 5666 /* Register rtl specific functions for cfg. */
242229bb
JH
5667 rtl_register_cfg_hooks ();
5668
5669 init_block = construct_init_block ();
5670
0ef90296 5671 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 5672 remaining edges later. */
0ef90296
ZD
5673 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
5674 e->flags &= ~EDGE_EXECUTABLE;
5675
8b11009b 5676 lab_rtx_for_bb = pointer_map_create ();
242229bb 5677 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
f3ddd692 5678 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 5679
b5b8b0ac
AO
5680 if (MAY_HAVE_DEBUG_INSNS)
5681 expand_debug_locations ();
5682
452aa9c5
RG
5683 /* Free stuff we no longer need after GIMPLE optimizations. */
5684 free_dominance_info (CDI_DOMINATORS);
5685 free_dominance_info (CDI_POST_DOMINATORS);
5686 delete_tree_cfg_annotations ();
5687
f029db69 5688 timevar_push (TV_OUT_OF_SSA);
4e3825db 5689 finish_out_of_ssa (&SA);
f029db69 5690 timevar_pop (TV_OUT_OF_SSA);
4e3825db 5691
f029db69 5692 timevar_push (TV_POST_EXPAND);
91753e21
RG
5693 /* We are no longer in SSA form. */
5694 cfun->gimple_df->in_ssa_p = false;
7d776ee2
RG
5695 if (current_loops)
5696 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 5697
bf08ebeb
JH
5698 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5699 conservatively to true until they are all profile aware. */
8b11009b 5700 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 5701 free_histograms ();
242229bb
JH
5702
5703 construct_exit_block ();
5368224f 5704 insn_locations_finalize ();
242229bb 5705
f3ddd692
JJ
5706 if (var_ret_seq)
5707 {
5708 rtx after = return_label;
5709 rtx next = NEXT_INSN (after);
5710 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5711 after = next;
5712 emit_insn_after (var_ret_seq, after);
5713 }
5714
1d65f45c 5715 /* Zap the tree EH table. */
e8a2a782 5716 set_eh_throw_stmt_table (cfun, NULL);
242229bb 5717
42821aff
MM
5718 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5719 split edges which edge insertions might do. */
242229bb 5720 rebuild_jump_labels (get_insns ());
242229bb 5721
4e3825db
MM
5722 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
5723 {
5724 edge e;
5725 edge_iterator ei;
5726 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5727 {
5728 if (e->insns.r)
bc470c24 5729 {
42821aff 5730 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
5731 /* Put insns after parm birth, but before
5732 NOTE_INSNS_FUNCTION_BEG. */
bc470c24 5733 if (e->src == ENTRY_BLOCK_PTR
e40191f1 5734 && single_succ_p (ENTRY_BLOCK_PTR))
bc470c24
JJ
5735 {
5736 rtx insns = e->insns.r;
5737 e->insns.r = NULL_RTX;
e40191f1
TV
5738 if (NOTE_P (parm_birth_insn)
5739 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5740 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5741 else
5742 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
5743 }
5744 else
5745 commit_one_edge_insertion (e);
5746 }
4e3825db
MM
5747 else
5748 ei_next (&ei);
5749 }
5750 }
5751
5752 /* We're done expanding trees to RTL. */
5753 currently_expanding_to_rtl = 0;
5754
5755 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
5756 {
5757 edge e;
5758 edge_iterator ei;
5759 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5760 {
5761 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5762 e->flags &= ~EDGE_EXECUTABLE;
5763
5764 /* At the moment not all abnormal edges match the RTL
5765 representation. It is safe to remove them here as
5766 find_many_sub_basic_blocks will rediscover them.
5767 In the future we should get this fixed properly. */
5768 if ((e->flags & EDGE_ABNORMAL)
5769 && !(e->flags & EDGE_SIBCALL))
5770 remove_edge (e);
5771 else
5772 ei_next (&ei);
5773 }
5774 }
5775
242229bb 5776 blocks = sbitmap_alloc (last_basic_block);
f61e445a 5777 bitmap_ones (blocks);
242229bb 5778 find_many_sub_basic_blocks (blocks);
242229bb 5779 sbitmap_free (blocks);
4e3825db 5780 purge_all_dead_edges ();
242229bb 5781
2e3f842f
L
5782 expand_stack_alignment ();
5783
be147e84
RG
5784 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5785 function. */
5786 if (crtl->tail_call_emit)
5787 fixup_tail_calls ();
5788
dac1fbf8
RG
5789 /* After initial rtl generation, call back to finish generating
5790 exception support code. We need to do this before cleaning up
5791 the CFG as the code does not expect dead landing pads. */
5792 if (cfun->eh->region_tree != NULL)
5793 finish_eh_generation ();
5794
5795 /* Remove unreachable blocks, otherwise we cannot compute dominators
5796 which are needed for loop state verification. As a side-effect
5797 this also compacts blocks.
5798 ??? We cannot remove trivially dead insns here as for example
5799 the DRAP reg on i?86 is not magically live at this point.
5800 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5801 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5802
242229bb 5803#ifdef ENABLE_CHECKING
62e5bf5d 5804 verify_flow_info ();
242229bb 5805#endif
9f8628ba 5806
be147e84
RG
5807 /* Initialize pseudos allocated for hard registers. */
5808 emit_initial_value_sets ();
5809
5810 /* And finally unshare all RTL. */
5811 unshare_all_rtl ();
5812
9f8628ba
PB
5813 /* There's no need to defer outputting this function any more; we
5814 know we want to output it. */
5815 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5816
5817 /* Now that we're done expanding trees to RTL, we shouldn't have any
5818 more CONCATs anywhere. */
5819 generating_concat_p = 0;
5820
b7211528
SB
5821 if (dump_file)
5822 {
5823 fprintf (dump_file,
5824 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5825 /* And the pass manager will dump RTL for us. */
5826 }
ef330312
PB
5827
5828 /* If we're emitting a nested function, make sure its parent gets
5829 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 5830 {
ef330312
PB
5831 tree parent;
5832 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
5833 parent != NULL_TREE;
5834 parent = get_containing_scope (parent))
ef330312 5835 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 5836 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 5837 }
c22cacf3 5838
ef330312
PB
5839 /* We are now committed to emitting code for this function. Do any
5840 preparation, such as emitting abstract debug info for the inline
5841 before it gets mangled by optimization. */
5842 if (cgraph_function_possibly_inlined_p (current_function_decl))
5843 (*debug_hooks->outlining_inline_function) (current_function_decl);
5844
5845 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
5846
5847 /* After expanding, the return labels are no longer needed. */
5848 return_label = NULL;
5849 naked_return_label = NULL;
0a35513e
AH
5850
5851 /* After expanding, the tm_restart map is no longer needed. */
5852 if (cfun->gimple_df->tm_restart)
5853 {
5854 htab_delete (cfun->gimple_df->tm_restart);
5855 cfun->gimple_df->tm_restart = NULL;
5856 }
5857
55e092c4
JH
5858 /* Tag the blocks with a depth number so that change_scope can find
5859 the common parent easily. */
5860 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 5861 default_rtl_profile ();
be147e84 5862
f029db69 5863 timevar_pop (TV_POST_EXPAND);
be147e84 5864
c2924966 5865 return 0;
242229bb
JH
5866}
5867
27a4cd48
DM
5868namespace {
5869
5870const pass_data pass_data_expand =
242229bb 5871{
27a4cd48
DM
5872 RTL_PASS, /* type */
5873 "expand", /* name */
5874 OPTGROUP_NONE, /* optinfo_flags */
5875 false, /* has_gate */
5876 true, /* has_execute */
5877 TV_EXPAND, /* tv_id */
5878 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6f37411d 5879 | PROP_gimple_lcx
27a4cd48
DM
5880 | PROP_gimple_lvec ), /* properties_required */
5881 PROP_rtl, /* properties_provided */
5882 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5883 ( TODO_verify_ssa | TODO_verify_flow
5884 | TODO_verify_stmts ), /* todo_flags_start */
5885 0, /* todo_flags_finish */
242229bb 5886};
27a4cd48
DM
5887
5888class pass_expand : public rtl_opt_pass
5889{
5890public:
c3284718
RS
5891 pass_expand (gcc::context *ctxt)
5892 : rtl_opt_pass (pass_data_expand, ctxt)
27a4cd48
DM
5893 {}
5894
5895 /* opt_pass methods: */
5896 unsigned int execute () { return gimple_expand_cfg (); }
5897
5898}; // class pass_expand
5899
5900} // anon namespace
5901
5902rtl_opt_pass *
5903make_pass_expand (gcc::context *ctxt)
5904{
5905 return new pass_expand (ctxt);
5906}