]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
Move Cilk Plus Builtins node before Other Builtins node
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "tree.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "basic-block.h"
28#include "function.h"
29#include "expr.h"
30#include "langhooks.h"
442b4905
AM
31#include "bitmap.h"
32#include "gimple.h"
33#include "gimple-ssa.h"
34#include "cgraph.h"
35#include "tree-cfg.h"
36#include "tree-phinodes.h"
37#include "ssa-iterators.h"
38#include "tree-ssanames.h"
39#include "tree-dfa.h"
7a300452 40#include "tree-ssa.h"
242229bb
JH
41#include "tree-pass.h"
42#include "except.h"
43#include "flags.h"
1f6d3a08 44#include "diagnostic.h"
cf835838 45#include "gimple-pretty-print.h"
1f6d3a08 46#include "toplev.h"
ef330312 47#include "debug.h"
7d69de61 48#include "params.h"
ff28a94d 49#include "tree-inline.h"
6946b3f7 50#include "value-prof.h"
e41b2a33 51#include "target.h"
8e9055ae 52#include "tree-ssa-live.h"
78bca40d 53#include "tree-outof-ssa.h"
7a8cba34 54#include "sbitmap.h"
7d776ee2 55#include "cfgloop.h"
be147e84 56#include "regs.h" /* For reg_renumber. */
2b21299c 57#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 58#include "asan.h"
4484a35a 59#include "tree-ssa-address.h"
726a989a 60
4e3825db
MM
61/* This variable holds information helping the rewriting of SSA trees
62 into RTL. */
63struct ssaexpand SA;
64
a5883ba0
MM
65/* This variable holds the currently expanded gimple statement for purposes
66 of comminucating the profile info to the builtin expanders. */
67gimple currently_expanding_gimple_stmt;
68
ddb555ed
JJ
69static rtx expand_debug_expr (tree);
70
726a989a
RB
71/* Return an expression tree corresponding to the RHS of GIMPLE
72 statement STMT. */
73
74tree
75gimple_assign_rhs_to_tree (gimple stmt)
76{
77 tree t;
82d6e6fc 78 enum gimple_rhs_class grhs_class;
b8698a0f 79
82d6e6fc 80 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 81
0354c0c7
BS
82 if (grhs_class == GIMPLE_TERNARY_RHS)
83 t = build3 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt),
86 gimple_assign_rhs2 (stmt),
87 gimple_assign_rhs3 (stmt));
88 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
89 t = build2 (gimple_assign_rhs_code (stmt),
90 TREE_TYPE (gimple_assign_lhs (stmt)),
91 gimple_assign_rhs1 (stmt),
92 gimple_assign_rhs2 (stmt));
82d6e6fc 93 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
94 t = build1 (gimple_assign_rhs_code (stmt),
95 TREE_TYPE (gimple_assign_lhs (stmt)),
96 gimple_assign_rhs1 (stmt));
82d6e6fc 97 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
98 {
99 t = gimple_assign_rhs1 (stmt);
100 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
101 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
102 && gimple_location (stmt) != EXPR_LOCATION (t))
103 || (gimple_block (stmt)
104 && currently_expanding_to_rtl
5368224f 105 && EXPR_P (t)))
b5b8b0ac
AO
106 t = copy_node (t);
107 }
726a989a
RB
108 else
109 gcc_unreachable ();
110
f5045c96
AM
111 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
112 SET_EXPR_LOCATION (t, gimple_location (stmt));
113
726a989a
RB
114 return t;
115}
116
726a989a 117
1f6d3a08
RH
118#ifndef STACK_ALIGNMENT_NEEDED
119#define STACK_ALIGNMENT_NEEDED 1
120#endif
121
4e3825db
MM
122#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
123
124/* Associate declaration T with storage space X. If T is no
125 SSA name this is exactly SET_DECL_RTL, otherwise make the
126 partition of T associated with X. */
127static inline void
128set_rtl (tree t, rtx x)
129{
130 if (TREE_CODE (t) == SSA_NAME)
131 {
132 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
133 if (x && !MEM_P (x))
134 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
135 /* For the benefit of debug information at -O0 (where vartracking
136 doesn't run) record the place also in the base DECL if it's
137 a normal variable (not a parameter). */
138 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
139 {
140 tree var = SSA_NAME_VAR (t);
141 /* If we don't yet have something recorded, just record it now. */
142 if (!DECL_RTL_SET_P (var))
143 SET_DECL_RTL (var, x);
47598145 144 /* If we have it set already to "multiple places" don't
eb7adebc
MM
145 change this. */
146 else if (DECL_RTL (var) == pc_rtx)
147 ;
148 /* If we have something recorded and it's not the same place
149 as we want to record now, we have multiple partitions for the
150 same base variable, with different places. We can't just
151 randomly chose one, hence we have to say that we don't know.
152 This only happens with optimization, and there var-tracking
153 will figure out the right thing. */
154 else if (DECL_RTL (var) != x)
155 SET_DECL_RTL (var, pc_rtx);
156 }
4e3825db
MM
157 }
158 else
159 SET_DECL_RTL (t, x);
160}
1f6d3a08
RH
161
162/* This structure holds data relevant to one variable that will be
163 placed in a stack slot. */
164struct stack_var
165{
166 /* The Variable. */
167 tree decl;
168
1f6d3a08
RH
169 /* Initially, the size of the variable. Later, the size of the partition,
170 if this variable becomes it's partition's representative. */
171 HOST_WIDE_INT size;
172
173 /* The *byte* alignment required for this variable. Or as, with the
174 size, the alignment for this partition. */
175 unsigned int alignb;
176
177 /* The partition representative. */
178 size_t representative;
179
180 /* The next stack variable in the partition, or EOC. */
181 size_t next;
2bdbbe94
MM
182
183 /* The numbers of conflicting stack variables. */
184 bitmap conflicts;
1f6d3a08
RH
185};
186
187#define EOC ((size_t)-1)
188
189/* We have an array of such objects while deciding allocation. */
190static struct stack_var *stack_vars;
191static size_t stack_vars_alloc;
192static size_t stack_vars_num;
47598145 193static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 194
3f9b14ff
SB
195/* Conflict bitmaps go on this obstack. This allows us to destroy
196 all of them in one big sweep. */
197static bitmap_obstack stack_var_bitmap_obstack;
198
fa10beec 199/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
200 is non-decreasing. */
201static size_t *stack_vars_sorted;
202
1f6d3a08
RH
203/* The phase of the stack frame. This is the known misalignment of
204 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
205 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
206static int frame_phase;
207
7d69de61
RH
208/* Used during expand_used_vars to remember if we saw any decls for
209 which we'd like to enable stack smashing protection. */
210static bool has_protected_decls;
211
212/* Used during expand_used_vars. Remember if we say a character buffer
213 smaller than our cutoff threshold. Used for -Wstack-protector. */
214static bool has_short_buffer;
1f6d3a08 215
6f197850 216/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
217 we can't do with expected alignment of the stack boundary. */
218
219static unsigned int
6f197850 220align_local_variable (tree decl)
765c3e8f 221{
3a42502d 222 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 223 DECL_ALIGN (decl) = align;
1f6d3a08
RH
224 return align / BITS_PER_UNIT;
225}
226
227/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
228 Return the frame offset. */
229
230static HOST_WIDE_INT
3a42502d 231alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
232{
233 HOST_WIDE_INT offset, new_frame_offset;
234
235 new_frame_offset = frame_offset;
236 if (FRAME_GROWS_DOWNWARD)
237 {
238 new_frame_offset -= size + frame_phase;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
242 }
243 else
244 {
245 new_frame_offset -= frame_phase;
246 new_frame_offset += align - 1;
247 new_frame_offset &= -align;
248 new_frame_offset += frame_phase;
249 offset = new_frame_offset;
250 new_frame_offset += size;
251 }
252 frame_offset = new_frame_offset;
253
9fb798d7
EB
254 if (frame_offset_overflow (frame_offset, cfun->decl))
255 frame_offset = offset = 0;
256
1f6d3a08
RH
257 return offset;
258}
259
260/* Accumulate DECL into STACK_VARS. */
261
262static void
263add_stack_var (tree decl)
264{
533f611a
RH
265 struct stack_var *v;
266
1f6d3a08
RH
267 if (stack_vars_num >= stack_vars_alloc)
268 {
269 if (stack_vars_alloc)
270 stack_vars_alloc = stack_vars_alloc * 3 / 2;
271 else
272 stack_vars_alloc = 32;
273 stack_vars
274 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
275 }
47598145
MM
276 if (!decl_to_stack_part)
277 decl_to_stack_part = pointer_map_create ();
278
533f611a 279 v = &stack_vars[stack_vars_num];
47598145 280 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
281
282 v->decl = decl;
533f611a
RH
283 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
284 /* Ensure that all variables have size, so that &a != &b for any two
285 variables that are simultaneously live. */
286 if (v->size == 0)
287 v->size = 1;
6f197850 288 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
289 /* An alignment of zero can mightily confuse us later. */
290 gcc_assert (v->alignb != 0);
1f6d3a08
RH
291
292 /* All variables are initially in their own partition. */
533f611a
RH
293 v->representative = stack_vars_num;
294 v->next = EOC;
1f6d3a08 295
2bdbbe94 296 /* All variables initially conflict with no other. */
533f611a 297 v->conflicts = NULL;
2bdbbe94 298
1f6d3a08 299 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 300 set_rtl (decl, pc_rtx);
1f6d3a08
RH
301
302 stack_vars_num++;
303}
304
1f6d3a08
RH
305/* Make the decls associated with luid's X and Y conflict. */
306
307static void
308add_stack_var_conflict (size_t x, size_t y)
309{
2bdbbe94
MM
310 struct stack_var *a = &stack_vars[x];
311 struct stack_var *b = &stack_vars[y];
312 if (!a->conflicts)
3f9b14ff 313 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 314 if (!b->conflicts)
3f9b14ff 315 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
316 bitmap_set_bit (a->conflicts, y);
317 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
318}
319
320/* Check whether the decls associated with luid's X and Y conflict. */
321
322static bool
323stack_var_conflict_p (size_t x, size_t y)
324{
2bdbbe94
MM
325 struct stack_var *a = &stack_vars[x];
326 struct stack_var *b = &stack_vars[y];
47598145
MM
327 if (x == y)
328 return false;
329 /* Partitions containing an SSA name result from gimple registers
330 with things like unsupported modes. They are top-level and
331 hence conflict with everything else. */
332 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
333 return true;
334
2bdbbe94
MM
335 if (!a->conflicts || !b->conflicts)
336 return false;
337 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 338}
b8698a0f 339
47598145
MM
340/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
341 enter its partition number into bitmap DATA. */
342
343static bool
344visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
345{
346 bitmap active = (bitmap)data;
347 op = get_base_address (op);
348 if (op
349 && DECL_P (op)
350 && DECL_RTL_IF_SET (op) == pc_rtx)
351 {
352 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
353 if (v)
354 bitmap_set_bit (active, *v);
355 }
356 return false;
357}
358
359/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
360 record conflicts between it and all currently active other partitions
361 from bitmap DATA. */
362
363static bool
364visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
365{
366 bitmap active = (bitmap)data;
367 op = get_base_address (op);
368 if (op
369 && DECL_P (op)
370 && DECL_RTL_IF_SET (op) == pc_rtx)
371 {
372 size_t *v =
373 (size_t *) pointer_map_contains (decl_to_stack_part, op);
374 if (v && bitmap_set_bit (active, *v))
375 {
376 size_t num = *v;
377 bitmap_iterator bi;
378 unsigned i;
379 gcc_assert (num < stack_vars_num);
380 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
381 add_stack_var_conflict (num, i);
382 }
383 }
384 return false;
385}
386
387/* Helper routine for add_scope_conflicts, calculating the active partitions
388 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
389 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
390 liveness. */
47598145
MM
391
392static void
81bfd197 393add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
394{
395 edge e;
396 edge_iterator ei;
397 gimple_stmt_iterator gsi;
398 bool (*visit)(gimple, tree, void *);
399
400 bitmap_clear (work);
401 FOR_EACH_EDGE (e, ei, bb->preds)
402 bitmap_ior_into (work, (bitmap)e->src->aux);
403
ea85edfe 404 visit = visit_op;
47598145
MM
405
406 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
407 {
408 gimple stmt = gsi_stmt (gsi);
ea85edfe 409 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 410 }
ea85edfe 411 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
412 {
413 gimple stmt = gsi_stmt (gsi);
414
415 if (gimple_clobber_p (stmt))
416 {
417 tree lhs = gimple_assign_lhs (stmt);
418 size_t *v;
419 /* Nested function lowering might introduce LHSs
420 that are COMPONENT_REFs. */
421 if (TREE_CODE (lhs) != VAR_DECL)
422 continue;
423 if (DECL_RTL_IF_SET (lhs) == pc_rtx
424 && (v = (size_t *)
425 pointer_map_contains (decl_to_stack_part, lhs)))
426 bitmap_clear_bit (work, *v);
427 }
428 else if (!is_gimple_debug (stmt))
ea85edfe 429 {
81bfd197 430 if (for_conflict
ea85edfe
JJ
431 && visit == visit_op)
432 {
433 /* If this is the first real instruction in this BB we need
88d599dc
MM
434 to add conflicts for everything live at this point now.
435 Unlike classical liveness for named objects we can't
ea85edfe
JJ
436 rely on seeing a def/use of the names we're interested in.
437 There might merely be indirect loads/stores. We'd not add any
81bfd197 438 conflicts for such partitions. */
ea85edfe
JJ
439 bitmap_iterator bi;
440 unsigned i;
81bfd197 441 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 442 {
9b44f5d9
MM
443 struct stack_var *a = &stack_vars[i];
444 if (!a->conflicts)
3f9b14ff 445 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 446 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
447 }
448 visit = visit_conflict;
449 }
450 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
451 }
47598145
MM
452 }
453}
454
455/* Generate stack partition conflicts between all partitions that are
456 simultaneously live. */
457
458static void
459add_scope_conflicts (void)
460{
461 basic_block bb;
462 bool changed;
463 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
464 int *rpo;
465 int n_bbs;
47598145 466
88d599dc 467 /* We approximate the live range of a stack variable by taking the first
47598145
MM
468 mention of its name as starting point(s), and by the end-of-scope
469 death clobber added by gimplify as ending point(s) of the range.
470 This overapproximates in the case we for instance moved an address-taken
471 operation upward, without also moving a dereference to it upwards.
472 But it's conservatively correct as a variable never can hold values
473 before its name is mentioned at least once.
474
88d599dc 475 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
476
477 FOR_ALL_BB (bb)
3f9b14ff 478 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 479
9b44f5d9
MM
480 rpo = XNEWVEC (int, last_basic_block);
481 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
482
47598145
MM
483 changed = true;
484 while (changed)
485 {
9b44f5d9 486 int i;
47598145 487 changed = false;
9b44f5d9 488 for (i = 0; i < n_bbs; i++)
47598145 489 {
9b44f5d9
MM
490 bitmap active;
491 bb = BASIC_BLOCK (rpo[i]);
492 active = (bitmap)bb->aux;
81bfd197 493 add_scope_conflicts_1 (bb, work, false);
47598145
MM
494 if (bitmap_ior_into (active, work))
495 changed = true;
496 }
497 }
498
499 FOR_EACH_BB (bb)
81bfd197 500 add_scope_conflicts_1 (bb, work, true);
47598145 501
9b44f5d9 502 free (rpo);
47598145
MM
503 BITMAP_FREE (work);
504 FOR_ALL_BB (bb)
505 BITMAP_FREE (bb->aux);
506}
507
1f6d3a08 508/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 509 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
510
511static int
3a42502d 512stack_var_cmp (const void *a, const void *b)
1f6d3a08 513{
3a42502d
RH
514 size_t ia = *(const size_t *)a;
515 size_t ib = *(const size_t *)b;
516 unsigned int aligna = stack_vars[ia].alignb;
517 unsigned int alignb = stack_vars[ib].alignb;
518 HOST_WIDE_INT sizea = stack_vars[ia].size;
519 HOST_WIDE_INT sizeb = stack_vars[ib].size;
520 tree decla = stack_vars[ia].decl;
521 tree declb = stack_vars[ib].decl;
522 bool largea, largeb;
4e3825db 523 unsigned int uida, uidb;
1f6d3a08 524
3a42502d
RH
525 /* Primary compare on "large" alignment. Large comes first. */
526 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
527 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
528 if (largea != largeb)
529 return (int)largeb - (int)largea;
530
531 /* Secondary compare on size, decreasing */
3a42502d 532 if (sizea > sizeb)
6ddfda8a
ER
533 return -1;
534 if (sizea < sizeb)
1f6d3a08 535 return 1;
3a42502d
RH
536
537 /* Tertiary compare on true alignment, decreasing. */
538 if (aligna < alignb)
539 return -1;
540 if (aligna > alignb)
541 return 1;
542
543 /* Final compare on ID for sort stability, increasing.
544 Two SSA names are compared by their version, SSA names come before
545 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
546 if (TREE_CODE (decla) == SSA_NAME)
547 {
548 if (TREE_CODE (declb) == SSA_NAME)
549 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
550 else
551 return -1;
552 }
553 else if (TREE_CODE (declb) == SSA_NAME)
554 return 1;
555 else
556 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 557 if (uida < uidb)
79f802f5 558 return 1;
3a42502d
RH
559 if (uida > uidb)
560 return -1;
1f6d3a08
RH
561 return 0;
562}
563
55b34b5f
RG
564
565/* If the points-to solution *PI points to variables that are in a partition
566 together with other variables add all partition members to the pointed-to
567 variables bitmap. */
568
569static void
570add_partitioned_vars_to_ptset (struct pt_solution *pt,
571 struct pointer_map_t *decls_to_partitions,
572 struct pointer_set_t *visited, bitmap temp)
573{
574 bitmap_iterator bi;
575 unsigned i;
576 bitmap *part;
577
578 if (pt->anything
579 || pt->vars == NULL
580 /* The pointed-to vars bitmap is shared, it is enough to
581 visit it once. */
c3284718 582 || pointer_set_insert (visited, pt->vars))
55b34b5f
RG
583 return;
584
585 bitmap_clear (temp);
586
587 /* By using a temporary bitmap to store all members of the partitions
588 we have to add we make sure to visit each of the partitions only
589 once. */
590 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
591 if ((!temp
592 || !bitmap_bit_p (temp, i))
593 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
594 (void *)(size_t) i)))
595 bitmap_ior_into (temp, *part);
596 if (!bitmap_empty_p (temp))
597 bitmap_ior_into (pt->vars, temp);
598}
599
600/* Update points-to sets based on partition info, so we can use them on RTL.
601 The bitmaps representing stack partitions will be saved until expand,
602 where partitioned decls used as bases in memory expressions will be
603 rewritten. */
604
605static void
606update_alias_info_with_stack_vars (void)
607{
608 struct pointer_map_t *decls_to_partitions = NULL;
609 size_t i, j;
610 tree var = NULL_TREE;
611
612 for (i = 0; i < stack_vars_num; i++)
613 {
614 bitmap part = NULL;
615 tree name;
616 struct ptr_info_def *pi;
617
618 /* Not interested in partitions with single variable. */
619 if (stack_vars[i].representative != i
620 || stack_vars[i].next == EOC)
621 continue;
622
623 if (!decls_to_partitions)
624 {
625 decls_to_partitions = pointer_map_create ();
626 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
627 }
628
629 /* Create an SSA_NAME that points to the partition for use
630 as base during alias-oracle queries on RTL for bases that
631 have been partitioned. */
632 if (var == NULL_TREE)
633 var = create_tmp_var (ptr_type_node, NULL);
634 name = make_ssa_name (var, NULL);
635
636 /* Create bitmaps representing partitions. They will be used for
637 points-to sets later, so use GGC alloc. */
638 part = BITMAP_GGC_ALLOC ();
639 for (j = i; j != EOC; j = stack_vars[j].next)
640 {
641 tree decl = stack_vars[j].decl;
25a6a873 642 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
643 bitmap_set_bit (part, uid);
644 *((bitmap *) pointer_map_insert (decls_to_partitions,
645 (void *)(size_t) uid)) = part;
646 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
647 decl)) = name;
88d8330d
EB
648 if (TREE_ADDRESSABLE (decl))
649 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
650 }
651
652 /* Make the SSA name point to all partition members. */
653 pi = get_ptr_info (name);
d3553615 654 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
655 }
656
657 /* Make all points-to sets that contain one member of a partition
658 contain all members of the partition. */
659 if (decls_to_partitions)
660 {
661 unsigned i;
662 struct pointer_set_t *visited = pointer_set_create ();
3f9b14ff 663 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f
RG
664
665 for (i = 1; i < num_ssa_names; i++)
666 {
667 tree name = ssa_name (i);
668 struct ptr_info_def *pi;
669
670 if (name
671 && POINTER_TYPE_P (TREE_TYPE (name))
672 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
673 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
674 visited, temp);
675 }
676
677 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
678 decls_to_partitions, visited, temp);
55b34b5f
RG
679
680 pointer_set_destroy (visited);
681 pointer_map_destroy (decls_to_partitions);
682 BITMAP_FREE (temp);
683 }
684}
685
1f6d3a08
RH
686/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
687 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 688 Merge them into a single partition A. */
1f6d3a08
RH
689
690static void
6ddfda8a 691union_stack_vars (size_t a, size_t b)
1f6d3a08 692{
2bdbbe94
MM
693 struct stack_var *vb = &stack_vars[b];
694 bitmap_iterator bi;
695 unsigned u;
1f6d3a08 696
6ddfda8a
ER
697 gcc_assert (stack_vars[b].next == EOC);
698 /* Add B to A's partition. */
699 stack_vars[b].next = stack_vars[a].next;
700 stack_vars[b].representative = a;
1f6d3a08
RH
701 stack_vars[a].next = b;
702
703 /* Update the required alignment of partition A to account for B. */
704 if (stack_vars[a].alignb < stack_vars[b].alignb)
705 stack_vars[a].alignb = stack_vars[b].alignb;
706
707 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
708 if (vb->conflicts)
709 {
710 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
711 add_stack_var_conflict (a, stack_vars[u].representative);
712 BITMAP_FREE (vb->conflicts);
713 }
1f6d3a08
RH
714}
715
716/* A subroutine of expand_used_vars. Binpack the variables into
717 partitions constrained by the interference graph. The overall
718 algorithm used is as follows:
719
6ddfda8a 720 Sort the objects by size in descending order.
1f6d3a08
RH
721 For each object A {
722 S = size(A)
723 O = 0
724 loop {
725 Look for the largest non-conflicting object B with size <= S.
726 UNION (A, B)
1f6d3a08
RH
727 }
728 }
729*/
730
731static void
732partition_stack_vars (void)
733{
734 size_t si, sj, n = stack_vars_num;
735
736 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
737 for (si = 0; si < n; ++si)
738 stack_vars_sorted[si] = si;
739
740 if (n == 1)
741 return;
742
3a42502d 743 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 744
1f6d3a08
RH
745 for (si = 0; si < n; ++si)
746 {
747 size_t i = stack_vars_sorted[si];
3a42502d 748 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 749 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 750
6ddfda8a
ER
751 /* Ignore objects that aren't partition representatives. If we
752 see a var that is not a partition representative, it must
753 have been merged earlier. */
754 if (stack_vars[i].representative != i)
755 continue;
756
757 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
758 {
759 size_t j = stack_vars_sorted[sj];
1f6d3a08 760 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 761 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
762
763 /* Ignore objects that aren't partition representatives. */
764 if (stack_vars[j].representative != j)
765 continue;
766
3a42502d
RH
767 /* Do not mix objects of "small" (supported) alignment
768 and "large" (unsupported) alignment. */
769 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
770 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
771 break;
772
773 /* For Address Sanitizer do not mix objects with different
774 sizes, as the shorter vars wouldn't be adequately protected.
775 Don't do that for "large" (unsupported) alignment objects,
776 those aren't protected anyway. */
de5a5fa1 777 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
f3ddd692
JJ
778 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
779 break;
780
781 /* Ignore conflicting objects. */
782 if (stack_var_conflict_p (i, j))
3a42502d
RH
783 continue;
784
1f6d3a08 785 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 786 union_stack_vars (i, j);
1f6d3a08
RH
787 }
788 }
55b34b5f 789
9b999dc5 790 update_alias_info_with_stack_vars ();
1f6d3a08
RH
791}
792
793/* A debugging aid for expand_used_vars. Dump the generated partitions. */
794
795static void
796dump_stack_var_partition (void)
797{
798 size_t si, i, j, n = stack_vars_num;
799
800 for (si = 0; si < n; ++si)
801 {
802 i = stack_vars_sorted[si];
803
804 /* Skip variables that aren't partition representatives, for now. */
805 if (stack_vars[i].representative != i)
806 continue;
807
808 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
809 " align %u\n", (unsigned long) i, stack_vars[i].size,
810 stack_vars[i].alignb);
811
812 for (j = i; j != EOC; j = stack_vars[j].next)
813 {
814 fputc ('\t', dump_file);
815 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 816 }
6ddfda8a 817 fputc ('\n', dump_file);
1f6d3a08
RH
818 }
819}
820
3a42502d 821/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
822
823static void
3a42502d
RH
824expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
825 HOST_WIDE_INT offset)
1f6d3a08 826{
3a42502d 827 unsigned align;
1f6d3a08 828 rtx x;
c22cacf3 829
1f6d3a08
RH
830 /* If this fails, we've overflowed the stack frame. Error nicely? */
831 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
832
0a81f074 833 x = plus_constant (Pmode, base, offset);
4e3825db 834 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 835
4e3825db
MM
836 if (TREE_CODE (decl) != SSA_NAME)
837 {
838 /* Set alignment we actually gave this decl if it isn't an SSA name.
839 If it is we generate stack slots only accidentally so it isn't as
840 important, we'll simply use the alignment that is already set. */
3a42502d
RH
841 if (base == virtual_stack_vars_rtx)
842 offset -= frame_phase;
4e3825db
MM
843 align = offset & -offset;
844 align *= BITS_PER_UNIT;
3a42502d
RH
845 if (align == 0 || align > base_align)
846 align = base_align;
847
848 /* One would think that we could assert that we're not decreasing
849 alignment here, but (at least) the i386 port does exactly this
850 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
851
852 DECL_ALIGN (decl) = align;
853 DECL_USER_ALIGN (decl) = 0;
854 }
855
856 set_mem_attributes (x, SSAVAR (decl), true);
857 set_rtl (decl, x);
1f6d3a08
RH
858}
859
f3ddd692
JJ
860struct stack_vars_data
861{
862 /* Vector of offset pairs, always end of some padding followed
863 by start of the padding that needs Address Sanitizer protection.
864 The vector is in reversed, highest offset pairs come first. */
9771b263 865 vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
866
867 /* Vector of partition representative decls in between the paddings. */
9771b263 868 vec<tree> asan_decl_vec;
f3ddd692
JJ
869};
870
1f6d3a08
RH
871/* A subroutine of expand_used_vars. Give each partition representative
872 a unique location within the stack frame. Update each partition member
873 with that location. */
874
875static void
f3ddd692 876expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
877{
878 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
879 HOST_WIDE_INT large_size = 0, large_alloc = 0;
880 rtx large_base = NULL;
881 unsigned large_align = 0;
882 tree decl;
883
884 /* Determine if there are any variables requiring "large" alignment.
885 Since these are dynamically allocated, we only process these if
886 no predicate involved. */
887 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
888 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
889 {
890 /* Find the total size of these variables. */
891 for (si = 0; si < n; ++si)
892 {
893 unsigned alignb;
894
895 i = stack_vars_sorted[si];
896 alignb = stack_vars[i].alignb;
897
898 /* Stop when we get to the first decl with "small" alignment. */
899 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
900 break;
901
902 /* Skip variables that aren't partition representatives. */
903 if (stack_vars[i].representative != i)
904 continue;
905
906 /* Skip variables that have already had rtl assigned. See also
907 add_stack_var where we perpetrate this pc_rtx hack. */
908 decl = stack_vars[i].decl;
909 if ((TREE_CODE (decl) == SSA_NAME
910 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
911 : DECL_RTL (decl)) != pc_rtx)
912 continue;
913
914 large_size += alignb - 1;
915 large_size &= -(HOST_WIDE_INT)alignb;
916 large_size += stack_vars[i].size;
917 }
918
919 /* If there were any, allocate space. */
920 if (large_size > 0)
921 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
922 large_align, true);
923 }
1f6d3a08
RH
924
925 for (si = 0; si < n; ++si)
926 {
3a42502d
RH
927 rtx base;
928 unsigned base_align, alignb;
1f6d3a08
RH
929 HOST_WIDE_INT offset;
930
931 i = stack_vars_sorted[si];
932
933 /* Skip variables that aren't partition representatives, for now. */
934 if (stack_vars[i].representative != i)
935 continue;
936
7d69de61
RH
937 /* Skip variables that have already had rtl assigned. See also
938 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
939 decl = stack_vars[i].decl;
940 if ((TREE_CODE (decl) == SSA_NAME
941 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
942 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
943 continue;
944
c22cacf3 945 /* Check the predicate to see whether this variable should be
7d69de61 946 allocated in this pass. */
f3ddd692 947 if (pred && !pred (i))
7d69de61
RH
948 continue;
949
3a42502d
RH
950 alignb = stack_vars[i].alignb;
951 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
952 {
de5a5fa1 953 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
f3ddd692
JJ
954 {
955 HOST_WIDE_INT prev_offset = frame_offset;
956 tree repr_decl = NULL_TREE;
957
958 offset
959 = alloc_stack_frame_space (stack_vars[i].size
960 + ASAN_RED_ZONE_SIZE,
961 MAX (alignb, ASAN_RED_ZONE_SIZE));
9771b263
DN
962 data->asan_vec.safe_push (prev_offset);
963 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
964 /* Find best representative of the partition.
965 Prefer those with DECL_NAME, even better
966 satisfying asan_protect_stack_decl predicate. */
967 for (j = i; j != EOC; j = stack_vars[j].next)
968 if (asan_protect_stack_decl (stack_vars[j].decl)
969 && DECL_NAME (stack_vars[j].decl))
970 {
971 repr_decl = stack_vars[j].decl;
972 break;
973 }
974 else if (repr_decl == NULL_TREE
975 && DECL_P (stack_vars[j].decl)
976 && DECL_NAME (stack_vars[j].decl))
977 repr_decl = stack_vars[j].decl;
978 if (repr_decl == NULL_TREE)
979 repr_decl = stack_vars[i].decl;
9771b263 980 data->asan_decl_vec.safe_push (repr_decl);
f3ddd692
JJ
981 }
982 else
983 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
3a42502d
RH
984 base = virtual_stack_vars_rtx;
985 base_align = crtl->max_used_stack_slot_alignment;
986 }
987 else
988 {
989 /* Large alignment is only processed in the last pass. */
990 if (pred)
991 continue;
533f611a 992 gcc_assert (large_base != NULL);
3a42502d
RH
993
994 large_alloc += alignb - 1;
995 large_alloc &= -(HOST_WIDE_INT)alignb;
996 offset = large_alloc;
997 large_alloc += stack_vars[i].size;
998
999 base = large_base;
1000 base_align = large_align;
1001 }
1f6d3a08
RH
1002
1003 /* Create rtl for each variable based on their location within the
1004 partition. */
1005 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1006 {
f8da8190 1007 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1008 base, base_align,
6ddfda8a 1009 offset);
f8da8190 1010 }
1f6d3a08 1011 }
3a42502d
RH
1012
1013 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1014}
1015
ff28a94d
JH
1016/* Take into account all sizes of partitions and reset DECL_RTLs. */
1017static HOST_WIDE_INT
1018account_stack_vars (void)
1019{
1020 size_t si, j, i, n = stack_vars_num;
1021 HOST_WIDE_INT size = 0;
1022
1023 for (si = 0; si < n; ++si)
1024 {
1025 i = stack_vars_sorted[si];
1026
1027 /* Skip variables that aren't partition representatives, for now. */
1028 if (stack_vars[i].representative != i)
1029 continue;
1030
1031 size += stack_vars[i].size;
1032 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1033 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1034 }
1035 return size;
1036}
1037
1f6d3a08
RH
1038/* A subroutine of expand_one_var. Called to immediately assign rtl
1039 to a variable to be allocated in the stack frame. */
1040
1041static void
1042expand_one_stack_var (tree var)
1043{
3a42502d
RH
1044 HOST_WIDE_INT size, offset;
1045 unsigned byte_align;
1f6d3a08 1046
4e3825db 1047 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1048 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1049
1050 /* We handle highly aligned variables in expand_stack_vars. */
1051 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1052
3a42502d
RH
1053 offset = alloc_stack_frame_space (size, byte_align);
1054
1055 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1056 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1057}
1058
1f6d3a08
RH
1059/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1060 that will reside in a hard register. */
1061
1062static void
1063expand_one_hard_reg_var (tree var)
1064{
1065 rest_of_decl_compilation (var, 0, 0);
1066}
1067
1068/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1069 that will reside in a pseudo register. */
1070
1071static void
1072expand_one_register_var (tree var)
1073{
4e3825db
MM
1074 tree decl = SSAVAR (var);
1075 tree type = TREE_TYPE (decl);
cde0f3fd 1076 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1077 rtx x = gen_reg_rtx (reg_mode);
1078
4e3825db 1079 set_rtl (var, x);
1f6d3a08
RH
1080
1081 /* Note if the object is a user variable. */
4e3825db
MM
1082 if (!DECL_ARTIFICIAL (decl))
1083 mark_user_reg (x);
1f6d3a08 1084
61021c2c 1085 if (POINTER_TYPE_P (type))
d466b407 1086 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1087}
1088
1089/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1090 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1091 to pick something that won't crash the rest of the compiler. */
1092
1093static void
1094expand_one_error_var (tree var)
1095{
1096 enum machine_mode mode = DECL_MODE (var);
1097 rtx x;
1098
1099 if (mode == BLKmode)
1100 x = gen_rtx_MEM (BLKmode, const0_rtx);
1101 else if (mode == VOIDmode)
1102 x = const0_rtx;
1103 else
1104 x = gen_reg_rtx (mode);
1105
1106 SET_DECL_RTL (var, x);
1107}
1108
c22cacf3 1109/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1110 allocated to the local stack frame. Return true if we wish to
1111 add VAR to STACK_VARS so that it will be coalesced with other
1112 variables. Return false to allocate VAR immediately.
1113
1114 This function is used to reduce the number of variables considered
1115 for coalescing, which reduces the size of the quadratic problem. */
1116
1117static bool
1118defer_stack_allocation (tree var, bool toplevel)
1119{
7d69de61 1120 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1121 so that we can re-order the strings to the top of the frame.
1122 Similarly for Address Sanitizer. */
de5a5fa1 1123 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
7d69de61
RH
1124 return true;
1125
3a42502d
RH
1126 /* We handle "large" alignment via dynamic allocation. We want to handle
1127 this extra complication in only one place, so defer them. */
1128 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1129 return true;
1130
1f6d3a08
RH
1131 /* Variables in the outermost scope automatically conflict with
1132 every other variable. The only reason to want to defer them
1133 at all is that, after sorting, we can more efficiently pack
1134 small variables in the stack frame. Continue to defer at -O2. */
1135 if (toplevel && optimize < 2)
1136 return false;
1137
1138 /* Without optimization, *most* variables are allocated from the
1139 stack, which makes the quadratic problem large exactly when we
c22cacf3 1140 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1141 other hand, we don't want the function's stack frame size to
1142 get completely out of hand. So we avoid adding scalars and
1143 "small" aggregates to the list at all. */
4d5b5e9f
ER
1144 if (optimize == 0
1145 && (tree_low_cst (DECL_SIZE_UNIT (var), 1)
1146 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)))
1f6d3a08
RH
1147 return false;
1148
1149 return true;
1150}
1151
1152/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1153 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1154 expanded yet, merely recorded.
ff28a94d
JH
1155 When REALLY_EXPAND is false, only add stack values to be allocated.
1156 Return stack usage this variable is supposed to take.
1157*/
1f6d3a08 1158
ff28a94d
JH
1159static HOST_WIDE_INT
1160expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1161{
3a42502d 1162 unsigned int align = BITS_PER_UNIT;
4e3825db 1163 tree origvar = var;
3a42502d 1164
4e3825db
MM
1165 var = SSAVAR (var);
1166
3a42502d 1167 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1168 {
2e3f842f
L
1169 /* Because we don't know if VAR will be in register or on stack,
1170 we conservatively assume it will be on stack even if VAR is
1171 eventually put into register after RA pass. For non-automatic
1172 variables, which won't be on stack, we collect alignment of
1173 type and ignore user specified alignment. */
1174 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1175 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1176 TYPE_MODE (TREE_TYPE (var)),
1177 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1178 else if (DECL_HAS_VALUE_EXPR_P (var)
1179 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1180 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1181 or variables which were assigned a stack slot already by
1182 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1183 changed from the offset chosen to it. */
1184 align = crtl->stack_alignment_estimated;
2e3f842f 1185 else
ae58e548 1186 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1187
3a42502d
RH
1188 /* If the variable alignment is very large we'll dynamicaly allocate
1189 it, which means that in-frame portion is just a pointer. */
1190 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1191 align = POINTER_SIZE;
1192 }
1193
1194 if (SUPPORTS_STACK_ALIGNMENT
1195 && crtl->stack_alignment_estimated < align)
1196 {
1197 /* stack_alignment_estimated shouldn't change after stack
1198 realign decision made */
c3284718 1199 gcc_assert (!crtl->stack_realign_processed);
3a42502d 1200 crtl->stack_alignment_estimated = align;
2e3f842f
L
1201 }
1202
3a42502d
RH
1203 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1204 So here we only make sure stack_alignment_needed >= align. */
1205 if (crtl->stack_alignment_needed < align)
1206 crtl->stack_alignment_needed = align;
1207 if (crtl->max_used_stack_slot_alignment < align)
1208 crtl->max_used_stack_slot_alignment = align;
1209
4e3825db
MM
1210 if (TREE_CODE (origvar) == SSA_NAME)
1211 {
1212 gcc_assert (TREE_CODE (var) != VAR_DECL
1213 || (!DECL_EXTERNAL (var)
1214 && !DECL_HAS_VALUE_EXPR_P (var)
1215 && !TREE_STATIC (var)
4e3825db
MM
1216 && TREE_TYPE (var) != error_mark_node
1217 && !DECL_HARD_REGISTER (var)
1218 && really_expand));
1219 }
1220 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1221 ;
1f6d3a08
RH
1222 else if (DECL_EXTERNAL (var))
1223 ;
833b3afe 1224 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1225 ;
1226 else if (TREE_STATIC (var))
7e8b322a 1227 ;
eb7adebc 1228 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1229 ;
1230 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1231 {
1232 if (really_expand)
1233 expand_one_error_var (var);
1234 }
4e3825db 1235 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1236 {
1237 if (really_expand)
1238 expand_one_hard_reg_var (var);
1239 }
1f6d3a08 1240 else if (use_register_for_decl (var))
ff28a94d
JH
1241 {
1242 if (really_expand)
4e3825db 1243 expand_one_register_var (origvar);
ff28a94d 1244 }
56099f00 1245 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1246 {
56099f00 1247 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1248 if (really_expand)
1249 {
1250 error ("size of variable %q+D is too large", var);
1251 expand_one_error_var (var);
1252 }
1253 }
1f6d3a08 1254 else if (defer_stack_allocation (var, toplevel))
4e3825db 1255 add_stack_var (origvar);
1f6d3a08 1256 else
ff28a94d 1257 {
bd9f1b4b 1258 if (really_expand)
4e3825db 1259 expand_one_stack_var (origvar);
ff28a94d
JH
1260 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1261 }
1262 return 0;
1f6d3a08
RH
1263}
1264
1265/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1266 expanding variables. Those variables that can be put into registers
1267 are allocated pseudos; those that can't are put on the stack.
1268
1269 TOPLEVEL is true if this is the outermost BLOCK. */
1270
1271static void
1272expand_used_vars_for_block (tree block, bool toplevel)
1273{
1f6d3a08
RH
1274 tree t;
1275
1f6d3a08 1276 /* Expand all variables at this level. */
910ad8de 1277 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1278 if (TREE_USED (t)
1279 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1280 || !DECL_NONSHAREABLE (t)))
ff28a94d 1281 expand_one_var (t, toplevel, true);
1f6d3a08 1282
1f6d3a08
RH
1283 /* Expand all variables at containing levels. */
1284 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1285 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1286}
1287
1288/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1289 and clear TREE_USED on all local variables. */
1290
1291static void
1292clear_tree_used (tree block)
1293{
1294 tree t;
1295
910ad8de 1296 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1297 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1298 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1299 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1300 TREE_USED (t) = 0;
1301
1302 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1303 clear_tree_used (t);
1304}
1305
f6bc1c4a
HS
1306enum {
1307 SPCT_FLAG_DEFAULT = 1,
1308 SPCT_FLAG_ALL = 2,
1309 SPCT_FLAG_STRONG = 3
1310};
1311
7d69de61
RH
1312/* Examine TYPE and determine a bit mask of the following features. */
1313
1314#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1315#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1316#define SPCT_HAS_ARRAY 4
1317#define SPCT_HAS_AGGREGATE 8
1318
1319static unsigned int
1320stack_protect_classify_type (tree type)
1321{
1322 unsigned int ret = 0;
1323 tree t;
1324
1325 switch (TREE_CODE (type))
1326 {
1327 case ARRAY_TYPE:
1328 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1329 if (t == char_type_node
1330 || t == signed_char_type_node
1331 || t == unsigned_char_type_node)
1332 {
15362b89
JJ
1333 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1334 unsigned HOST_WIDE_INT len;
7d69de61 1335
15362b89
JJ
1336 if (!TYPE_SIZE_UNIT (type)
1337 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1338 len = max;
7d69de61 1339 else
15362b89 1340 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1341
1342 if (len < max)
1343 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1344 else
1345 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1346 }
1347 else
1348 ret = SPCT_HAS_ARRAY;
1349 break;
1350
1351 case UNION_TYPE:
1352 case QUAL_UNION_TYPE:
1353 case RECORD_TYPE:
1354 ret = SPCT_HAS_AGGREGATE;
1355 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1356 if (TREE_CODE (t) == FIELD_DECL)
1357 ret |= stack_protect_classify_type (TREE_TYPE (t));
1358 break;
1359
1360 default:
1361 break;
1362 }
1363
1364 return ret;
1365}
1366
a4d05547
KH
1367/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1368 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1369 any variable in this function. The return value is the phase number in
1370 which the variable should be allocated. */
1371
1372static int
1373stack_protect_decl_phase (tree decl)
1374{
1375 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1376 int ret = 0;
1377
1378 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1379 has_short_buffer = true;
1380
f6bc1c4a
HS
1381 if (flag_stack_protect == SPCT_FLAG_ALL
1382 || flag_stack_protect == SPCT_FLAG_STRONG)
7d69de61
RH
1383 {
1384 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1385 && !(bits & SPCT_HAS_AGGREGATE))
1386 ret = 1;
1387 else if (bits & SPCT_HAS_ARRAY)
1388 ret = 2;
1389 }
1390 else
1391 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1392
1393 if (ret)
1394 has_protected_decls = true;
1395
1396 return ret;
1397}
1398
1399/* Two helper routines that check for phase 1 and phase 2. These are used
1400 as callbacks for expand_stack_vars. */
1401
1402static bool
f3ddd692
JJ
1403stack_protect_decl_phase_1 (size_t i)
1404{
1405 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1406}
1407
1408static bool
1409stack_protect_decl_phase_2 (size_t i)
7d69de61 1410{
f3ddd692 1411 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1412}
1413
f3ddd692
JJ
1414/* And helper function that checks for asan phase (with stack protector
1415 it is phase 3). This is used as callback for expand_stack_vars.
1416 Returns true if any of the vars in the partition need to be protected. */
1417
7d69de61 1418static bool
f3ddd692 1419asan_decl_phase_3 (size_t i)
7d69de61 1420{
f3ddd692
JJ
1421 while (i != EOC)
1422 {
1423 if (asan_protect_stack_decl (stack_vars[i].decl))
1424 return true;
1425 i = stack_vars[i].next;
1426 }
1427 return false;
7d69de61
RH
1428}
1429
1430/* Ensure that variables in different stack protection phases conflict
1431 so that they are not merged and share the same stack slot. */
1432
1433static void
1434add_stack_protection_conflicts (void)
1435{
1436 size_t i, j, n = stack_vars_num;
1437 unsigned char *phase;
1438
1439 phase = XNEWVEC (unsigned char, n);
1440 for (i = 0; i < n; ++i)
1441 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1442
1443 for (i = 0; i < n; ++i)
1444 {
1445 unsigned char ph_i = phase[i];
9b44f5d9 1446 for (j = i + 1; j < n; ++j)
7d69de61
RH
1447 if (ph_i != phase[j])
1448 add_stack_var_conflict (i, j);
1449 }
1450
1451 XDELETEVEC (phase);
1452}
1453
1454/* Create a decl for the guard at the top of the stack frame. */
1455
1456static void
1457create_stack_guard (void)
1458{
c2255bc4
AH
1459 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1460 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1461 TREE_THIS_VOLATILE (guard) = 1;
1462 TREE_USED (guard) = 1;
1463 expand_one_stack_var (guard);
cb91fab0 1464 crtl->stack_protect_guard = guard;
7d69de61
RH
1465}
1466
ff28a94d 1467/* Prepare for expanding variables. */
b8698a0f 1468static void
ff28a94d
JH
1469init_vars_expansion (void)
1470{
3f9b14ff
SB
1471 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1472 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1473
3f9b14ff
SB
1474 /* A map from decl to stack partition. */
1475 decl_to_stack_part = pointer_map_create ();
ff28a94d
JH
1476
1477 /* Initialize local stack smashing state. */
1478 has_protected_decls = false;
1479 has_short_buffer = false;
1480}
1481
1482/* Free up stack variable graph data. */
1483static void
1484fini_vars_expansion (void)
1485{
3f9b14ff
SB
1486 bitmap_obstack_release (&stack_var_bitmap_obstack);
1487 if (stack_vars)
1488 XDELETEVEC (stack_vars);
1489 if (stack_vars_sorted)
1490 XDELETEVEC (stack_vars_sorted);
ff28a94d 1491 stack_vars = NULL;
9b44f5d9 1492 stack_vars_sorted = NULL;
ff28a94d 1493 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1494 pointer_map_destroy (decl_to_stack_part);
1495 decl_to_stack_part = NULL;
ff28a94d
JH
1496}
1497
30925d94
AO
1498/* Make a fair guess for the size of the stack frame of the function
1499 in NODE. This doesn't have to be exact, the result is only used in
1500 the inline heuristics. So we don't want to run the full stack var
1501 packing algorithm (which is quadratic in the number of stack vars).
1502 Instead, we calculate the total size of all stack vars. This turns
1503 out to be a pretty fair estimate -- packing of stack vars doesn't
1504 happen very often. */
b5a430f3 1505
ff28a94d 1506HOST_WIDE_INT
30925d94 1507estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1508{
1509 HOST_WIDE_INT size = 0;
b5a430f3 1510 size_t i;
bb7e6d55 1511 tree var;
67348ccc 1512 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1513
bb7e6d55 1514 push_cfun (fn);
ff28a94d 1515
3f9b14ff
SB
1516 init_vars_expansion ();
1517
824f71b9
RG
1518 FOR_EACH_LOCAL_DECL (fn, i, var)
1519 if (auto_var_in_fn_p (var, fn->decl))
1520 size += expand_one_var (var, true, false);
b5a430f3 1521
ff28a94d
JH
1522 if (stack_vars_num > 0)
1523 {
b5a430f3
SB
1524 /* Fake sorting the stack vars for account_stack_vars (). */
1525 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1526 for (i = 0; i < stack_vars_num; ++i)
1527 stack_vars_sorted[i] = i;
ff28a94d 1528 size += account_stack_vars ();
ff28a94d 1529 }
3f9b14ff
SB
1530
1531 fini_vars_expansion ();
2e1ec94f 1532 pop_cfun ();
ff28a94d
JH
1533 return size;
1534}
1535
f6bc1c4a
HS
1536/* Helper routine to check if a record or union contains an array field. */
1537
1538static int
1539record_or_union_type_has_array_p (const_tree tree_type)
1540{
1541 tree fields = TYPE_FIELDS (tree_type);
1542 tree f;
1543
1544 for (f = fields; f; f = DECL_CHAIN (f))
1545 if (TREE_CODE (f) == FIELD_DECL)
1546 {
1547 tree field_type = TREE_TYPE (f);
1548 if (RECORD_OR_UNION_TYPE_P (field_type)
1549 && record_or_union_type_has_array_p (field_type))
1550 return 1;
1551 if (TREE_CODE (field_type) == ARRAY_TYPE)
1552 return 1;
1553 }
1554 return 0;
1555}
1556
1f6d3a08 1557/* Expand all variables used in the function. */
727a31fa 1558
f3ddd692 1559static rtx
727a31fa
RH
1560expand_used_vars (void)
1561{
c021f10b 1562 tree var, outer_block = DECL_INITIAL (current_function_decl);
6e1aa848 1563 vec<tree> maybe_local_decls = vNULL;
f3ddd692 1564 rtx var_end_seq = NULL_RTX;
70b5e7dc 1565 struct pointer_map_t *ssa_name_decls;
4e3825db 1566 unsigned i;
c021f10b 1567 unsigned len;
f6bc1c4a 1568 bool gen_stack_protect_signal = false;
727a31fa 1569
1f6d3a08
RH
1570 /* Compute the phase of the stack frame for this function. */
1571 {
1572 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1573 int off = STARTING_FRAME_OFFSET % align;
1574 frame_phase = off ? align - off : 0;
1575 }
727a31fa 1576
3f9b14ff
SB
1577 /* Set TREE_USED on all variables in the local_decls. */
1578 FOR_EACH_LOCAL_DECL (cfun, i, var)
1579 TREE_USED (var) = 1;
1580 /* Clear TREE_USED on all variables associated with a block scope. */
1581 clear_tree_used (DECL_INITIAL (current_function_decl));
1582
ff28a94d 1583 init_vars_expansion ();
7d69de61 1584
70b5e7dc 1585 ssa_name_decls = pointer_map_create ();
4e3825db
MM
1586 for (i = 0; i < SA.map->num_partitions; i++)
1587 {
1588 tree var = partition_to_var (SA.map, i);
1589
ea057359 1590 gcc_assert (!virtual_operand_p (var));
70b5e7dc
RG
1591
1592 /* Assign decls to each SSA name partition, share decls for partitions
1593 we could have coalesced (those with the same type). */
1594 if (SSA_NAME_VAR (var) == NULL_TREE)
1595 {
1596 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1597 if (!*slot)
1598 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1599 replace_ssa_name_symbol (var, (tree) *slot);
1600 }
1601
4e3825db
MM
1602 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1603 expand_one_var (var, true, true);
1604 else
1605 {
1606 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1607 contain the default def (representing the parm or result itself)
1608 we don't do anything here. But those which don't contain the
1609 default def (representing a temporary based on the parm/result)
1610 we need to allocate space just like for normal VAR_DECLs. */
1611 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1612 {
1613 expand_one_var (var, true, true);
1614 gcc_assert (SA.partition_to_pseudo[i]);
1615 }
1616 }
1617 }
70b5e7dc 1618 pointer_map_destroy (ssa_name_decls);
4e3825db 1619
f6bc1c4a
HS
1620 if (flag_stack_protect == SPCT_FLAG_STRONG)
1621 FOR_EACH_LOCAL_DECL (cfun, i, var)
1622 if (!is_global_var (var))
1623 {
1624 tree var_type = TREE_TYPE (var);
1625 /* Examine local referenced variables that have their addresses taken,
1626 contain an array, or are arrays. */
1627 if (TREE_CODE (var) == VAR_DECL
1628 && (TREE_CODE (var_type) == ARRAY_TYPE
1629 || TREE_ADDRESSABLE (var)
1630 || (RECORD_OR_UNION_TYPE_P (var_type)
1631 && record_or_union_type_has_array_p (var_type))))
1632 {
1633 gen_stack_protect_signal = true;
1634 break;
1635 }
1636 }
1637
cb91fab0 1638 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1639 set are not associated with any block scope. Lay them out. */
c021f10b 1640
9771b263 1641 len = vec_safe_length (cfun->local_decls);
c021f10b 1642 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1643 {
1f6d3a08
RH
1644 bool expand_now = false;
1645
4e3825db
MM
1646 /* Expanded above already. */
1647 if (is_gimple_reg (var))
eb7adebc
MM
1648 {
1649 TREE_USED (var) = 0;
3adcf52c 1650 goto next;
eb7adebc 1651 }
1f6d3a08
RH
1652 /* We didn't set a block for static or extern because it's hard
1653 to tell the difference between a global variable (re)declared
1654 in a local scope, and one that's really declared there to
1655 begin with. And it doesn't really matter much, since we're
1656 not giving them stack space. Expand them now. */
4e3825db 1657 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1658 expand_now = true;
1659
1660 /* If the variable is not associated with any block, then it
1661 was created by the optimizers, and could be live anywhere
1662 in the function. */
1663 else if (TREE_USED (var))
1664 expand_now = true;
1665
1666 /* Finally, mark all variables on the list as used. We'll use
1667 this in a moment when we expand those associated with scopes. */
1668 TREE_USED (var) = 1;
1669
1670 if (expand_now)
3adcf52c
JM
1671 expand_one_var (var, true, true);
1672
1673 next:
1674 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1675 {
3adcf52c
JM
1676 rtx rtl = DECL_RTL_IF_SET (var);
1677
1678 /* Keep artificial non-ignored vars in cfun->local_decls
1679 chain until instantiate_decls. */
1680 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1681 add_local_decl (cfun, var);
6c6366f6 1682 else if (rtl == NULL_RTX)
c021f10b
NF
1683 /* If rtl isn't set yet, which can happen e.g. with
1684 -fstack-protector, retry before returning from this
1685 function. */
9771b263 1686 maybe_local_decls.safe_push (var);
802e9f8e 1687 }
1f6d3a08 1688 }
1f6d3a08 1689
c021f10b
NF
1690 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1691
1692 +-----------------+-----------------+
1693 | ...processed... | ...duplicates...|
1694 +-----------------+-----------------+
1695 ^
1696 +-- LEN points here.
1697
1698 We just want the duplicates, as those are the artificial
1699 non-ignored vars that we want to keep until instantiate_decls.
1700 Move them down and truncate the array. */
9771b263
DN
1701 if (!vec_safe_is_empty (cfun->local_decls))
1702 cfun->local_decls->block_remove (0, len);
c021f10b 1703
1f6d3a08
RH
1704 /* At this point, all variables within the block tree with TREE_USED
1705 set are actually used by the optimized function. Lay them out. */
1706 expand_used_vars_for_block (outer_block, true);
1707
1708 if (stack_vars_num > 0)
1709 {
47598145 1710 add_scope_conflicts ();
1f6d3a08 1711
c22cacf3 1712 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1713 vulnerable data and non-vulnerable data. */
1714 if (flag_stack_protect)
1715 add_stack_protection_conflicts ();
1716
c22cacf3 1717 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1718 minimal interference graph, attempt to save some stack space. */
1719 partition_stack_vars ();
1720 if (dump_file)
1721 dump_stack_var_partition ();
7d69de61
RH
1722 }
1723
f6bc1c4a
HS
1724 switch (flag_stack_protect)
1725 {
1726 case SPCT_FLAG_ALL:
1727 create_stack_guard ();
1728 break;
1729
1730 case SPCT_FLAG_STRONG:
1731 if (gen_stack_protect_signal
1732 || cfun->calls_alloca || has_protected_decls)
1733 create_stack_guard ();
1734 break;
1735
1736 case SPCT_FLAG_DEFAULT:
1737 if (cfun->calls_alloca || has_protected_decls)
c3284718 1738 create_stack_guard ();
f6bc1c4a
HS
1739 break;
1740
1741 default:
1742 ;
1743 }
1f6d3a08 1744
7d69de61
RH
1745 /* Assign rtl to each variable based on these partitions. */
1746 if (stack_vars_num > 0)
1747 {
f3ddd692
JJ
1748 struct stack_vars_data data;
1749
6e1aa848
DN
1750 data.asan_vec = vNULL;
1751 data.asan_decl_vec = vNULL;
f3ddd692 1752
7d69de61
RH
1753 /* Reorder decls to be protected by iterating over the variables
1754 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1755 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1756 earlier, such that we naturally see these variables first,
1757 and thus naturally allocate things in the right order. */
1758 if (has_protected_decls)
1759 {
1760 /* Phase 1 contains only character arrays. */
f3ddd692 1761 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
1762
1763 /* Phase 2 contains other kinds of arrays. */
1764 if (flag_stack_protect == 2)
f3ddd692 1765 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
1766 }
1767
de5a5fa1 1768 if (flag_sanitize & SANITIZE_ADDRESS)
f3ddd692
JJ
1769 /* Phase 3, any partitions that need asan protection
1770 in addition to phase 1 and 2. */
1771 expand_stack_vars (asan_decl_phase_3, &data);
1772
9771b263 1773 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
1774 {
1775 HOST_WIDE_INT prev_offset = frame_offset;
1776 HOST_WIDE_INT offset
1777 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1778 ASAN_RED_ZONE_SIZE);
9771b263
DN
1779 data.asan_vec.safe_push (prev_offset);
1780 data.asan_vec.safe_push (offset);
f3ddd692
JJ
1781
1782 var_end_seq
1783 = asan_emit_stack_protection (virtual_stack_vars_rtx,
9771b263 1784 data.asan_vec.address (),
c3284718 1785 data.asan_decl_vec. address (),
9771b263 1786 data.asan_vec.length ());
f3ddd692
JJ
1787 }
1788
1789 expand_stack_vars (NULL, &data);
1790
9771b263
DN
1791 data.asan_vec.release ();
1792 data.asan_decl_vec.release ();
1f6d3a08
RH
1793 }
1794
3f9b14ff
SB
1795 fini_vars_expansion ();
1796
6c6366f6
JJ
1797 /* If there were any artificial non-ignored vars without rtl
1798 found earlier, see if deferred stack allocation hasn't assigned
1799 rtl to them. */
9771b263 1800 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 1801 {
6c6366f6
JJ
1802 rtx rtl = DECL_RTL_IF_SET (var);
1803
6c6366f6
JJ
1804 /* Keep artificial non-ignored vars in cfun->local_decls
1805 chain until instantiate_decls. */
1806 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1807 add_local_decl (cfun, var);
6c6366f6 1808 }
9771b263 1809 maybe_local_decls.release ();
6c6366f6 1810
1f6d3a08
RH
1811 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1812 if (STACK_ALIGNMENT_NEEDED)
1813 {
1814 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1815 if (!FRAME_GROWS_DOWNWARD)
1816 frame_offset += align - 1;
1817 frame_offset &= -align;
1818 }
f3ddd692
JJ
1819
1820 return var_end_seq;
727a31fa
RH
1821}
1822
1823
b7211528
SB
1824/* If we need to produce a detailed dump, print the tree representation
1825 for STMT to the dump file. SINCE is the last RTX after which the RTL
1826 generated for STMT should have been appended. */
1827
1828static void
726a989a 1829maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1830{
1831 if (dump_file && (dump_flags & TDF_DETAILS))
1832 {
1833 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1834 print_gimple_stmt (dump_file, stmt, 0,
1835 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1836 fprintf (dump_file, "\n");
1837
1838 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1839 }
1840}
1841
8b11009b
ZD
1842/* Maps the blocks that do not contain tree labels to rtx labels. */
1843
1844static struct pointer_map_t *lab_rtx_for_bb;
1845
a9b77cd1
ZD
1846/* Returns the label_rtx expression for a label starting basic block BB. */
1847
1848static rtx
726a989a 1849label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1850{
726a989a
RB
1851 gimple_stmt_iterator gsi;
1852 tree lab;
1853 gimple lab_stmt;
8b11009b 1854 void **elt;
a9b77cd1
ZD
1855
1856 if (bb->flags & BB_RTL)
1857 return block_label (bb);
1858
8b11009b
ZD
1859 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1860 if (elt)
ae50c0cb 1861 return (rtx) *elt;
8b11009b
ZD
1862
1863 /* Find the tree label if it is present. */
b8698a0f 1864
726a989a 1865 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1866 {
726a989a
RB
1867 lab_stmt = gsi_stmt (gsi);
1868 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1869 break;
1870
726a989a 1871 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1872 if (DECL_NONLOCAL (lab))
1873 break;
1874
1875 return label_rtx (lab);
1876 }
1877
8b11009b
ZD
1878 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1879 *elt = gen_label_rtx ();
ae50c0cb 1880 return (rtx) *elt;
a9b77cd1
ZD
1881}
1882
726a989a 1883
529ff441
MM
1884/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1885 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1886 possibly clean up the CFG and instruction sequence. LAST is the
1887 last instruction before the just emitted jump sequence. */
529ff441
MM
1888
1889static void
315adeda 1890maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1891{
1892 /* Special case: when jumpif decides that the condition is
1893 trivial it emits an unconditional jump (and the necessary
1894 barrier). But we still have two edges, the fallthru one is
1895 wrong. purge_dead_edges would clean this up later. Unfortunately
1896 we have to insert insns (and split edges) before
1897 find_many_sub_basic_blocks and hence before purge_dead_edges.
1898 But splitting edges might create new blocks which depend on the
1899 fact that if there are two edges there's no barrier. So the
1900 barrier would get lost and verify_flow_info would ICE. Instead
1901 of auditing all edge splitters to care for the barrier (which
1902 normally isn't there in a cleaned CFG), fix it here. */
1903 if (BARRIER_P (get_last_insn ()))
1904 {
529ff441
MM
1905 rtx insn;
1906 remove_edge (e);
1907 /* Now, we have a single successor block, if we have insns to
1908 insert on the remaining edge we potentially will insert
1909 it at the end of this block (if the dest block isn't feasible)
1910 in order to avoid splitting the edge. This insertion will take
1911 place in front of the last jump. But we might have emitted
1912 multiple jumps (conditional and one unconditional) to the
1913 same destination. Inserting in front of the last one then
1914 is a problem. See PR 40021. We fix this by deleting all
1915 jumps except the last unconditional one. */
1916 insn = PREV_INSN (get_last_insn ());
1917 /* Make sure we have an unconditional jump. Otherwise we're
1918 confused. */
1919 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1920 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1921 {
1922 insn = PREV_INSN (insn);
1923 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1924 {
8a269cb7 1925 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1926 {
1927 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1928 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1929 }
1930 delete_insn (NEXT_INSN (insn));
1931 }
529ff441
MM
1932 }
1933 }
1934}
1935
726a989a 1936/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1937 Returns a new basic block if we've terminated the current basic
1938 block and created a new one. */
1939
1940static basic_block
726a989a 1941expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1942{
1943 basic_block new_bb, dest;
1944 edge new_edge;
1945 edge true_edge;
1946 edge false_edge;
b7211528 1947 rtx last2, last;
28ed065e
MM
1948 enum tree_code code;
1949 tree op0, op1;
1950
1951 code = gimple_cond_code (stmt);
1952 op0 = gimple_cond_lhs (stmt);
1953 op1 = gimple_cond_rhs (stmt);
1954 /* We're sometimes presented with such code:
1955 D.123_1 = x < y;
1956 if (D.123_1 != 0)
1957 ...
1958 This would expand to two comparisons which then later might
1959 be cleaned up by combine. But some pattern matchers like if-conversion
1960 work better when there's only one compare, so make up for this
1961 here as special exception if TER would have made the same change. */
31348d52 1962 if (SA.values
28ed065e 1963 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
1964 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1965 && TREE_CODE (op1) == INTEGER_CST
1966 && ((gimple_cond_code (stmt) == NE_EXPR
1967 && integer_zerop (op1))
1968 || (gimple_cond_code (stmt) == EQ_EXPR
1969 && integer_onep (op1)))
28ed065e
MM
1970 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1971 {
1972 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1973 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1974 {
e83f4b68
MM
1975 enum tree_code code2 = gimple_assign_rhs_code (second);
1976 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1977 {
1978 code = code2;
1979 op0 = gimple_assign_rhs1 (second);
1980 op1 = gimple_assign_rhs2 (second);
1981 }
1982 /* If jumps are cheap turn some more codes into
1983 jumpy sequences. */
1984 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1985 {
1986 if ((code2 == BIT_AND_EXPR
1987 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1988 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1989 || code2 == TRUTH_AND_EXPR)
1990 {
1991 code = TRUTH_ANDIF_EXPR;
1992 op0 = gimple_assign_rhs1 (second);
1993 op1 = gimple_assign_rhs2 (second);
1994 }
1995 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1996 {
1997 code = TRUTH_ORIF_EXPR;
1998 op0 = gimple_assign_rhs1 (second);
1999 op1 = gimple_assign_rhs2 (second);
2000 }
2001 }
28ed065e
MM
2002 }
2003 }
b7211528
SB
2004
2005 last2 = last = get_last_insn ();
80c7a9eb
RH
2006
2007 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2008 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2009
2010 /* These flags have no purpose in RTL land. */
2011 true_edge->flags &= ~EDGE_TRUE_VALUE;
2012 false_edge->flags &= ~EDGE_FALSE_VALUE;
2013
2014 /* We can either have a pure conditional jump with one fallthru edge or
2015 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2016 if (false_edge->dest == bb->next_bb)
80c7a9eb 2017 {
40e90eac
JJ
2018 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2019 true_edge->probability);
726a989a 2020 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2021 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2022 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2023 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2024 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2025 return NULL;
2026 }
a9b77cd1 2027 if (true_edge->dest == bb->next_bb)
80c7a9eb 2028 {
40e90eac
JJ
2029 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2030 false_edge->probability);
726a989a 2031 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2032 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2033 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2034 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2035 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2036 return NULL;
2037 }
80c7a9eb 2038
40e90eac
JJ
2039 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2040 true_edge->probability);
80c7a9eb 2041 last = get_last_insn ();
2f13f2de 2042 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2043 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2044 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
2045
2046 BB_END (bb) = last;
2047 if (BARRIER_P (BB_END (bb)))
2048 BB_END (bb) = PREV_INSN (BB_END (bb));
2049 update_bb_for_insn (bb);
2050
2051 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2052 dest = false_edge->dest;
2053 redirect_edge_succ (false_edge, new_bb);
2054 false_edge->flags |= EDGE_FALLTHRU;
2055 new_bb->count = false_edge->count;
2056 new_bb->frequency = EDGE_FREQUENCY (false_edge);
7d776ee2
RG
2057 if (current_loops && bb->loop_father)
2058 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2059 new_edge = make_edge (new_bb, dest, 0);
2060 new_edge->probability = REG_BR_PROB_BASE;
2061 new_edge->count = new_bb->count;
2062 if (BARRIER_P (BB_END (new_bb)))
2063 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2064 update_bb_for_insn (new_bb);
2065
726a989a 2066 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2067
2f13f2de 2068 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2069 {
5368224f
DC
2070 set_curr_insn_location (true_edge->goto_locus);
2071 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2072 }
7787b4aa 2073
80c7a9eb
RH
2074 return new_bb;
2075}
2076
0a35513e
AH
2077/* Mark all calls that can have a transaction restart. */
2078
2079static void
2080mark_transaction_restart_calls (gimple stmt)
2081{
2082 struct tm_restart_node dummy;
2083 void **slot;
2084
2085 if (!cfun->gimple_df->tm_restart)
2086 return;
2087
2088 dummy.stmt = stmt;
2089 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2090 if (slot)
2091 {
2092 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2093 tree list = n->label_or_list;
2094 rtx insn;
2095
2096 for (insn = next_real_insn (get_last_insn ());
2097 !CALL_P (insn);
2098 insn = next_real_insn (insn))
2099 continue;
2100
2101 if (TREE_CODE (list) == LABEL_DECL)
2102 add_reg_note (insn, REG_TM, label_rtx (list));
2103 else
2104 for (; list ; list = TREE_CHAIN (list))
2105 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2106 }
2107}
2108
28ed065e
MM
2109/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2110 statement STMT. */
2111
2112static void
2113expand_call_stmt (gimple stmt)
2114{
25583c4f 2115 tree exp, decl, lhs;
e23817b3 2116 bool builtin_p;
e7925582 2117 size_t i;
28ed065e 2118
25583c4f
RS
2119 if (gimple_call_internal_p (stmt))
2120 {
2121 expand_internal_call (stmt);
2122 return;
2123 }
2124
28ed065e
MM
2125 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2126
2127 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
2128 decl = gimple_call_fndecl (stmt);
2129 builtin_p = decl && DECL_BUILT_IN (decl);
2130
e7925582
EB
2131 /* If this is not a builtin function, the function type through which the
2132 call is made may be different from the type of the function. */
2133 if (!builtin_p)
2134 CALL_EXPR_FN (exp)
b25aa0e8
EB
2135 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2136 CALL_EXPR_FN (exp));
e7925582 2137
28ed065e
MM
2138 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2139 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2140
2141 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2142 {
2143 tree arg = gimple_call_arg (stmt, i);
2144 gimple def;
2145 /* TER addresses into arguments of builtin functions so we have a
2146 chance to infer more correct alignment information. See PR39954. */
2147 if (builtin_p
2148 && TREE_CODE (arg) == SSA_NAME
2149 && (def = get_gimple_for_ssa_name (arg))
2150 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2151 arg = gimple_assign_rhs1 (def);
2152 CALL_EXPR_ARG (exp, i) = arg;
2153 }
28ed065e 2154
93f28ca7 2155 if (gimple_has_side_effects (stmt))
28ed065e
MM
2156 TREE_SIDE_EFFECTS (exp) = 1;
2157
93f28ca7 2158 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2159 TREE_NOTHROW (exp) = 1;
2160
2161 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2162 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2163 if (decl
2164 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2165 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2166 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2167 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2168 else
2169 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2170 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2171 SET_EXPR_LOCATION (exp, gimple_location (stmt));
28ed065e 2172
ddb555ed
JJ
2173 /* Ensure RTL is created for debug args. */
2174 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2175 {
9771b263 2176 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2177 unsigned int ix;
2178 tree dtemp;
2179
2180 if (debug_args)
9771b263 2181 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2182 {
2183 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2184 expand_debug_expr (dtemp);
2185 }
2186 }
2187
25583c4f 2188 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2189 if (lhs)
2190 expand_assignment (lhs, exp, false);
2191 else
2192 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2193
2194 mark_transaction_restart_calls (stmt);
28ed065e
MM
2195}
2196
2197/* A subroutine of expand_gimple_stmt, expanding one gimple statement
2198 STMT that doesn't require special handling for outgoing edges. That
2199 is no tailcalls and no GIMPLE_COND. */
2200
2201static void
2202expand_gimple_stmt_1 (gimple stmt)
2203{
2204 tree op0;
c82fee88 2205
5368224f 2206 set_curr_insn_location (gimple_location (stmt));
c82fee88 2207
28ed065e
MM
2208 switch (gimple_code (stmt))
2209 {
2210 case GIMPLE_GOTO:
2211 op0 = gimple_goto_dest (stmt);
2212 if (TREE_CODE (op0) == LABEL_DECL)
2213 expand_goto (op0);
2214 else
2215 expand_computed_goto (op0);
2216 break;
2217 case GIMPLE_LABEL:
2218 expand_label (gimple_label_label (stmt));
2219 break;
2220 case GIMPLE_NOP:
2221 case GIMPLE_PREDICT:
2222 break;
28ed065e
MM
2223 case GIMPLE_SWITCH:
2224 expand_case (stmt);
2225 break;
2226 case GIMPLE_ASM:
2227 expand_asm_stmt (stmt);
2228 break;
2229 case GIMPLE_CALL:
2230 expand_call_stmt (stmt);
2231 break;
2232
2233 case GIMPLE_RETURN:
2234 op0 = gimple_return_retval (stmt);
2235
2236 if (op0 && op0 != error_mark_node)
2237 {
2238 tree result = DECL_RESULT (current_function_decl);
2239
2240 /* If we are not returning the current function's RESULT_DECL,
2241 build an assignment to it. */
2242 if (op0 != result)
2243 {
2244 /* I believe that a function's RESULT_DECL is unique. */
2245 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2246
2247 /* ??? We'd like to use simply expand_assignment here,
2248 but this fails if the value is of BLKmode but the return
2249 decl is a register. expand_return has special handling
2250 for this combination, which eventually should move
2251 to common code. See comments there. Until then, let's
2252 build a modify expression :-/ */
2253 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2254 result, op0);
2255 }
2256 }
2257 if (!op0)
2258 expand_null_return ();
2259 else
2260 expand_return (op0);
2261 break;
2262
2263 case GIMPLE_ASSIGN:
2264 {
2265 tree lhs = gimple_assign_lhs (stmt);
2266
2267 /* Tree expand used to fiddle with |= and &= of two bitfield
2268 COMPONENT_REFs here. This can't happen with gimple, the LHS
2269 of binary assigns must be a gimple reg. */
2270
2271 if (TREE_CODE (lhs) != SSA_NAME
2272 || get_gimple_rhs_class (gimple_expr_code (stmt))
2273 == GIMPLE_SINGLE_RHS)
2274 {
2275 tree rhs = gimple_assign_rhs1 (stmt);
2276 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2277 == GIMPLE_SINGLE_RHS);
2278 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2279 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
2280 if (TREE_CLOBBER_P (rhs))
2281 /* This is a clobber to mark the going out of scope for
2282 this LHS. */
2283 ;
2284 else
2285 expand_assignment (lhs, rhs,
2286 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
2287 }
2288 else
2289 {
2290 rtx target, temp;
2291 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2292 struct separate_ops ops;
2293 bool promoted = false;
2294
2295 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2296 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2297 promoted = true;
2298
2299 ops.code = gimple_assign_rhs_code (stmt);
2300 ops.type = TREE_TYPE (lhs);
2301 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2302 {
0354c0c7
BS
2303 case GIMPLE_TERNARY_RHS:
2304 ops.op2 = gimple_assign_rhs3 (stmt);
2305 /* Fallthru */
28ed065e
MM
2306 case GIMPLE_BINARY_RHS:
2307 ops.op1 = gimple_assign_rhs2 (stmt);
2308 /* Fallthru */
2309 case GIMPLE_UNARY_RHS:
2310 ops.op0 = gimple_assign_rhs1 (stmt);
2311 break;
2312 default:
2313 gcc_unreachable ();
2314 }
2315 ops.location = gimple_location (stmt);
2316
2317 /* If we want to use a nontemporal store, force the value to
2318 register first. If we store into a promoted register,
2319 don't directly expand to target. */
2320 temp = nontemporal || promoted ? NULL_RTX : target;
2321 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2322 EXPAND_NORMAL);
2323
2324 if (temp == target)
2325 ;
2326 else if (promoted)
2327 {
4e18a7d4 2328 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2329 /* If TEMP is a VOIDmode constant, use convert_modes to make
2330 sure that we properly convert it. */
2331 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2332 {
2333 temp = convert_modes (GET_MODE (target),
2334 TYPE_MODE (ops.type),
4e18a7d4 2335 temp, unsignedp);
28ed065e 2336 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2337 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2338 }
2339
4e18a7d4 2340 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2341 }
2342 else if (nontemporal && emit_storent_insn (target, temp))
2343 ;
2344 else
2345 {
2346 temp = force_operand (temp, target);
2347 if (temp != target)
2348 emit_move_insn (target, temp);
2349 }
2350 }
2351 }
2352 break;
2353
2354 default:
2355 gcc_unreachable ();
2356 }
2357}
2358
2359/* Expand one gimple statement STMT and return the last RTL instruction
2360 before any of the newly generated ones.
2361
2362 In addition to generating the necessary RTL instructions this also
2363 sets REG_EH_REGION notes if necessary and sets the current source
2364 location for diagnostics. */
2365
2366static rtx
2367expand_gimple_stmt (gimple stmt)
2368{
28ed065e 2369 location_t saved_location = input_location;
c82fee88
EB
2370 rtx last = get_last_insn ();
2371 int lp_nr;
28ed065e 2372
28ed065e
MM
2373 gcc_assert (cfun);
2374
c82fee88
EB
2375 /* We need to save and restore the current source location so that errors
2376 discovered during expansion are emitted with the right location. But
2377 it would be better if the diagnostic routines used the source location
2378 embedded in the tree nodes rather than globals. */
28ed065e 2379 if (gimple_has_location (stmt))
c82fee88 2380 input_location = gimple_location (stmt);
28ed065e
MM
2381
2382 expand_gimple_stmt_1 (stmt);
c82fee88 2383
28ed065e
MM
2384 /* Free any temporaries used to evaluate this statement. */
2385 free_temp_slots ();
2386
2387 input_location = saved_location;
2388
2389 /* Mark all insns that may trap. */
1d65f45c
RH
2390 lp_nr = lookup_stmt_eh_lp (stmt);
2391 if (lp_nr)
28ed065e
MM
2392 {
2393 rtx insn;
2394 for (insn = next_real_insn (last); insn;
2395 insn = next_real_insn (insn))
2396 {
2397 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2398 /* If we want exceptions for non-call insns, any
2399 may_trap_p instruction may throw. */
2400 && GET_CODE (PATTERN (insn)) != CLOBBER
2401 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2402 && insn_could_throw_p (insn))
2403 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2404 }
2405 }
2406
2407 return last;
2408}
2409
726a989a 2410/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2411 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2412 generated a tail call (something that might be denied by the ABI
cea49550
RH
2413 rules governing the call; see calls.c).
2414
2415 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2416 can still reach the rest of BB. The case here is __builtin_sqrt,
2417 where the NaN result goes through the external function (with a
2418 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2419
2420static basic_block
726a989a 2421expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2422{
b7211528 2423 rtx last2, last;
224e770b 2424 edge e;
628f6a4e 2425 edge_iterator ei;
224e770b
RH
2426 int probability;
2427 gcov_type count;
80c7a9eb 2428
28ed065e 2429 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2430
2431 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2432 if (CALL_P (last) && SIBLING_CALL_P (last))
2433 goto found;
80c7a9eb 2434
726a989a 2435 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2436
cea49550 2437 *can_fallthru = true;
224e770b 2438 return NULL;
80c7a9eb 2439
224e770b
RH
2440 found:
2441 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2442 Any instructions emitted here are about to be deleted. */
2443 do_pending_stack_adjust ();
2444
2445 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2446 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2447 EH or abnormal edges, we shouldn't have created a tail call in
2448 the first place. So it seems to me we should just be removing
2449 all edges here, or redirecting the existing fallthru edge to
2450 the exit block. */
2451
224e770b
RH
2452 probability = 0;
2453 count = 0;
224e770b 2454
628f6a4e
BE
2455 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2456 {
224e770b
RH
2457 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2458 {
2459 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2460 {
224e770b
RH
2461 e->dest->count -= e->count;
2462 e->dest->frequency -= EDGE_FREQUENCY (e);
2463 if (e->dest->count < 0)
c22cacf3 2464 e->dest->count = 0;
224e770b 2465 if (e->dest->frequency < 0)
c22cacf3 2466 e->dest->frequency = 0;
80c7a9eb 2467 }
224e770b
RH
2468 count += e->count;
2469 probability += e->probability;
2470 remove_edge (e);
80c7a9eb 2471 }
628f6a4e
BE
2472 else
2473 ei_next (&ei);
80c7a9eb
RH
2474 }
2475
224e770b
RH
2476 /* This is somewhat ugly: the call_expr expander often emits instructions
2477 after the sibcall (to perform the function return). These confuse the
12eff7b7 2478 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2479 last = NEXT_INSN (last);
341c100f 2480 gcc_assert (BARRIER_P (last));
cea49550
RH
2481
2482 *can_fallthru = false;
224e770b
RH
2483 while (NEXT_INSN (last))
2484 {
2485 /* For instance an sqrt builtin expander expands if with
2486 sibcall in the then and label for `else`. */
2487 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2488 {
2489 *can_fallthru = true;
2490 break;
2491 }
224e770b
RH
2492 delete_insn (NEXT_INSN (last));
2493 }
2494
2495 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2496 e->probability += probability;
2497 e->count += count;
2498 BB_END (bb) = last;
2499 update_bb_for_insn (bb);
2500
2501 if (NEXT_INSN (last))
2502 {
2503 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2504
2505 last = BB_END (bb);
2506 if (BARRIER_P (last))
2507 BB_END (bb) = PREV_INSN (last);
2508 }
2509
726a989a 2510 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2511
224e770b 2512 return bb;
80c7a9eb
RH
2513}
2514
b5b8b0ac
AO
2515/* Return the difference between the floor and the truncated result of
2516 a signed division by OP1 with remainder MOD. */
2517static rtx
2518floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2519{
2520 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2521 return gen_rtx_IF_THEN_ELSE
2522 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2523 gen_rtx_IF_THEN_ELSE
2524 (mode, gen_rtx_LT (BImode,
2525 gen_rtx_DIV (mode, op1, mod),
2526 const0_rtx),
2527 constm1_rtx, const0_rtx),
2528 const0_rtx);
2529}
2530
2531/* Return the difference between the ceil and the truncated result of
2532 a signed division by OP1 with remainder MOD. */
2533static rtx
2534ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2535{
2536 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2537 return gen_rtx_IF_THEN_ELSE
2538 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2539 gen_rtx_IF_THEN_ELSE
2540 (mode, gen_rtx_GT (BImode,
2541 gen_rtx_DIV (mode, op1, mod),
2542 const0_rtx),
2543 const1_rtx, const0_rtx),
2544 const0_rtx);
2545}
2546
2547/* Return the difference between the ceil and the truncated result of
2548 an unsigned division by OP1 with remainder MOD. */
2549static rtx
2550ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2551{
2552 /* (mod != 0 ? 1 : 0) */
2553 return gen_rtx_IF_THEN_ELSE
2554 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2555 const1_rtx, const0_rtx);
2556}
2557
2558/* Return the difference between the rounded and the truncated result
2559 of a signed division by OP1 with remainder MOD. Halfway cases are
2560 rounded away from zero, rather than to the nearest even number. */
2561static rtx
2562round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2563{
2564 /* (abs (mod) >= abs (op1) - abs (mod)
2565 ? (op1 / mod > 0 ? 1 : -1)
2566 : 0) */
2567 return gen_rtx_IF_THEN_ELSE
2568 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2569 gen_rtx_MINUS (mode,
2570 gen_rtx_ABS (mode, op1),
2571 gen_rtx_ABS (mode, mod))),
2572 gen_rtx_IF_THEN_ELSE
2573 (mode, gen_rtx_GT (BImode,
2574 gen_rtx_DIV (mode, op1, mod),
2575 const0_rtx),
2576 const1_rtx, constm1_rtx),
2577 const0_rtx);
2578}
2579
2580/* Return the difference between the rounded and the truncated result
2581 of a unsigned division by OP1 with remainder MOD. Halfway cases
2582 are rounded away from zero, rather than to the nearest even
2583 number. */
2584static rtx
2585round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2586{
2587 /* (mod >= op1 - mod ? 1 : 0) */
2588 return gen_rtx_IF_THEN_ELSE
2589 (mode, gen_rtx_GE (BImode, mod,
2590 gen_rtx_MINUS (mode, op1, mod)),
2591 const1_rtx, const0_rtx);
2592}
2593
dda2da58
AO
2594/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2595 any rtl. */
2596
2597static rtx
f61c6f34
JJ
2598convert_debug_memory_address (enum machine_mode mode, rtx x,
2599 addr_space_t as)
dda2da58
AO
2600{
2601 enum machine_mode xmode = GET_MODE (x);
2602
2603#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
2604 gcc_assert (mode == Pmode
2605 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
2606 gcc_assert (xmode == mode || xmode == VOIDmode);
2607#else
f61c6f34 2608 rtx temp;
f61c6f34 2609
639d4bb8 2610 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
2611
2612 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2613 return x;
2614
69660a70 2615 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
2616 x = simplify_gen_subreg (mode, x, xmode,
2617 subreg_lowpart_offset
2618 (mode, xmode));
2619 else if (POINTERS_EXTEND_UNSIGNED > 0)
2620 x = gen_rtx_ZERO_EXTEND (mode, x);
2621 else if (!POINTERS_EXTEND_UNSIGNED)
2622 x = gen_rtx_SIGN_EXTEND (mode, x);
2623 else
f61c6f34
JJ
2624 {
2625 switch (GET_CODE (x))
2626 {
2627 case SUBREG:
2628 if ((SUBREG_PROMOTED_VAR_P (x)
2629 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2630 || (GET_CODE (SUBREG_REG (x)) == PLUS
2631 && REG_P (XEXP (SUBREG_REG (x), 0))
2632 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2633 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2634 && GET_MODE (SUBREG_REG (x)) == mode)
2635 return SUBREG_REG (x);
2636 break;
2637 case LABEL_REF:
2638 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2639 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2640 return temp;
2641 case SYMBOL_REF:
2642 temp = shallow_copy_rtx (x);
2643 PUT_MODE (temp, mode);
2644 return temp;
2645 case CONST:
2646 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2647 if (temp)
2648 temp = gen_rtx_CONST (mode, temp);
2649 return temp;
2650 case PLUS:
2651 case MINUS:
2652 if (CONST_INT_P (XEXP (x, 1)))
2653 {
2654 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2655 if (temp)
2656 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2657 }
2658 break;
2659 default:
2660 break;
2661 }
2662 /* Don't know how to express ptr_extend as operation in debug info. */
2663 return NULL;
2664 }
dda2da58
AO
2665#endif /* POINTERS_EXTEND_UNSIGNED */
2666
2667 return x;
2668}
2669
12c5ffe5
EB
2670/* Return an RTX equivalent to the value of the parameter DECL. */
2671
2672static rtx
2673expand_debug_parm_decl (tree decl)
2674{
2675 rtx incoming = DECL_INCOMING_RTL (decl);
2676
2677 if (incoming
2678 && GET_MODE (incoming) != BLKmode
2679 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2680 || (MEM_P (incoming)
2681 && REG_P (XEXP (incoming, 0))
2682 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2683 {
2684 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2685
2686#ifdef HAVE_window_save
2687 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2688 If the target machine has an explicit window save instruction, the
2689 actual entry value is the corresponding OUTGOING_REGNO instead. */
2690 if (REG_P (incoming)
2691 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2692 incoming
2693 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2694 OUTGOING_REGNO (REGNO (incoming)), 0);
2695 else if (MEM_P (incoming))
2696 {
2697 rtx reg = XEXP (incoming, 0);
2698 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2699 {
2700 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2701 incoming = replace_equiv_address_nv (incoming, reg);
2702 }
6cfa417f
JJ
2703 else
2704 incoming = copy_rtx (incoming);
12c5ffe5
EB
2705 }
2706#endif
2707
2708 ENTRY_VALUE_EXP (rtl) = incoming;
2709 return rtl;
2710 }
2711
2712 if (incoming
2713 && GET_MODE (incoming) != BLKmode
2714 && !TREE_ADDRESSABLE (decl)
2715 && MEM_P (incoming)
2716 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2717 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2718 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2719 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 2720 return copy_rtx (incoming);
12c5ffe5
EB
2721
2722 return NULL_RTX;
2723}
2724
2725/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
2726
2727static rtx
2728expand_debug_expr (tree exp)
2729{
2730 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2731 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 2732 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 2733 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2734 addr_space_t as;
b5b8b0ac
AO
2735
2736 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2737 {
2738 case tcc_expression:
2739 switch (TREE_CODE (exp))
2740 {
2741 case COND_EXPR:
7ece48b1 2742 case DOT_PROD_EXPR:
0354c0c7
BS
2743 case WIDEN_MULT_PLUS_EXPR:
2744 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2745 case FMA_EXPR:
b5b8b0ac
AO
2746 goto ternary;
2747
2748 case TRUTH_ANDIF_EXPR:
2749 case TRUTH_ORIF_EXPR:
2750 case TRUTH_AND_EXPR:
2751 case TRUTH_OR_EXPR:
2752 case TRUTH_XOR_EXPR:
2753 goto binary;
2754
2755 case TRUTH_NOT_EXPR:
2756 goto unary;
2757
2758 default:
2759 break;
2760 }
2761 break;
2762
2763 ternary:
2764 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2765 if (!op2)
2766 return NULL_RTX;
2767 /* Fall through. */
2768
2769 binary:
2770 case tcc_binary:
2771 case tcc_comparison:
2772 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2773 if (!op1)
2774 return NULL_RTX;
2775 /* Fall through. */
2776
2777 unary:
2778 case tcc_unary:
2ba172e0 2779 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2780 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2781 if (!op0)
2782 return NULL_RTX;
2783 break;
2784
2785 case tcc_type:
2786 case tcc_statement:
2787 gcc_unreachable ();
2788
2789 case tcc_constant:
2790 case tcc_exceptional:
2791 case tcc_declaration:
2792 case tcc_reference:
2793 case tcc_vl_exp:
2794 break;
2795 }
2796
2797 switch (TREE_CODE (exp))
2798 {
2799 case STRING_CST:
2800 if (!lookup_constant_def (exp))
2801 {
e1b243a8
JJ
2802 if (strlen (TREE_STRING_POINTER (exp)) + 1
2803 != (size_t) TREE_STRING_LENGTH (exp))
2804 return NULL_RTX;
b5b8b0ac
AO
2805 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2806 op0 = gen_rtx_MEM (BLKmode, op0);
2807 set_mem_attributes (op0, exp, 0);
2808 return op0;
2809 }
2810 /* Fall through... */
2811
2812 case INTEGER_CST:
2813 case REAL_CST:
2814 case FIXED_CST:
2815 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2816 return op0;
2817
2818 case COMPLEX_CST:
2819 gcc_assert (COMPLEX_MODE_P (mode));
2820 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2821 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2822 return gen_rtx_CONCAT (mode, op0, op1);
2823
0ca5af51
AO
2824 case DEBUG_EXPR_DECL:
2825 op0 = DECL_RTL_IF_SET (exp);
2826
2827 if (op0)
2828 return op0;
2829
2830 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2831 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2832 SET_DECL_RTL (exp, op0);
2833
2834 return op0;
2835
b5b8b0ac
AO
2836 case VAR_DECL:
2837 case PARM_DECL:
2838 case FUNCTION_DECL:
2839 case LABEL_DECL:
2840 case CONST_DECL:
2841 case RESULT_DECL:
2842 op0 = DECL_RTL_IF_SET (exp);
2843
2844 /* This decl was probably optimized away. */
2845 if (!op0)
e1b243a8
JJ
2846 {
2847 if (TREE_CODE (exp) != VAR_DECL
2848 || DECL_EXTERNAL (exp)
2849 || !TREE_STATIC (exp)
2850 || !DECL_NAME (exp)
0fba566c 2851 || DECL_HARD_REGISTER (exp)
7d5fc814 2852 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 2853 || mode == VOIDmode)
e1b243a8
JJ
2854 return NULL;
2855
b1aa0655 2856 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2857 if (!MEM_P (op0)
2858 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2859 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2860 return NULL;
2861 }
2862 else
2863 op0 = copy_rtx (op0);
b5b8b0ac 2864
06796564
JJ
2865 if (GET_MODE (op0) == BLKmode
2866 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2867 below would ICE. While it is likely a FE bug,
2868 try to be robust here. See PR43166. */
132b4e82
JJ
2869 || mode == BLKmode
2870 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2871 {
2872 gcc_assert (MEM_P (op0));
2873 op0 = adjust_address_nv (op0, mode, 0);
2874 return op0;
2875 }
2876
2877 /* Fall through. */
2878
2879 adjust_mode:
2880 case PAREN_EXPR:
2881 case NOP_EXPR:
2882 case CONVERT_EXPR:
2883 {
2ba172e0 2884 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
2885
2886 if (mode == inner_mode)
2887 return op0;
2888
2889 if (inner_mode == VOIDmode)
2890 {
2a8e30fb
MM
2891 if (TREE_CODE (exp) == SSA_NAME)
2892 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2893 else
2894 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2895 if (mode == inner_mode)
2896 return op0;
2897 }
2898
2899 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2900 {
2901 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2902 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2903 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2904 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2905 else
2906 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2907 }
2908 else if (FLOAT_MODE_P (mode))
2909 {
2a8e30fb 2910 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2911 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2912 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2913 else
2914 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2915 }
2916 else if (FLOAT_MODE_P (inner_mode))
2917 {
2918 if (unsignedp)
2919 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2920 else
2921 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2922 }
2923 else if (CONSTANT_P (op0)
69660a70 2924 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
2925 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2926 subreg_lowpart_offset (mode,
2927 inner_mode));
1b47fe3f
JJ
2928 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2929 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2930 : unsignedp)
2ba172e0 2931 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 2932 else
2ba172e0 2933 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
2934
2935 return op0;
2936 }
2937
70f34814 2938 case MEM_REF:
71f3a3f5
JJ
2939 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2940 {
2941 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2942 TREE_OPERAND (exp, 0),
2943 TREE_OPERAND (exp, 1));
2944 if (newexp)
2945 return expand_debug_expr (newexp);
2946 }
2947 /* FALLTHROUGH */
b5b8b0ac 2948 case INDIRECT_REF:
0a81f074 2949 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2950 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2951 if (!op0)
2952 return NULL;
2953
cb115041
JJ
2954 if (TREE_CODE (exp) == MEM_REF)
2955 {
583ac69c
JJ
2956 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2957 || (GET_CODE (op0) == PLUS
2958 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2959 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2960 Instead just use get_inner_reference. */
2961 goto component_ref;
2962
cb115041
JJ
2963 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2964 if (!op1 || !CONST_INT_P (op1))
2965 return NULL;
2966
0a81f074 2967 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
2968 }
2969
09e881c9 2970 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2971 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2972 else
75421dcd 2973 as = ADDR_SPACE_GENERIC;
b5b8b0ac 2974
f61c6f34
JJ
2975 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2976 op0, as);
2977 if (op0 == NULL_RTX)
2978 return NULL;
b5b8b0ac 2979
f61c6f34 2980 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 2981 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
2982 if (TREE_CODE (exp) == MEM_REF
2983 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2984 set_mem_expr (op0, NULL_TREE);
09e881c9 2985 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2986
2987 return op0;
2988
2989 case TARGET_MEM_REF:
4d948885
RG
2990 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2991 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2992 return NULL;
2993
2994 op0 = expand_debug_expr
4e25ca6b 2995 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2996 if (!op0)
2997 return NULL;
2998
f61c6f34
JJ
2999 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3000 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3001 else
3002 as = ADDR_SPACE_GENERIC;
3003
3004 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3005 op0, as);
3006 if (op0 == NULL_RTX)
3007 return NULL;
b5b8b0ac
AO
3008
3009 op0 = gen_rtx_MEM (mode, op0);
3010
3011 set_mem_attributes (op0, exp, 0);
09e881c9 3012 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3013
3014 return op0;
3015
583ac69c 3016 component_ref:
b5b8b0ac
AO
3017 case ARRAY_REF:
3018 case ARRAY_RANGE_REF:
3019 case COMPONENT_REF:
3020 case BIT_FIELD_REF:
3021 case REALPART_EXPR:
3022 case IMAGPART_EXPR:
3023 case VIEW_CONVERT_EXPR:
3024 {
3025 enum machine_mode mode1;
3026 HOST_WIDE_INT bitsize, bitpos;
3027 tree offset;
3028 int volatilep = 0;
3029 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3030 &mode1, &unsignedp, &volatilep, false);
3031 rtx orig_op0;
3032
4f2a9af8
JJ
3033 if (bitsize == 0)
3034 return NULL;
3035
b5b8b0ac
AO
3036 orig_op0 = op0 = expand_debug_expr (tem);
3037
3038 if (!op0)
3039 return NULL;
3040
3041 if (offset)
3042 {
dda2da58
AO
3043 enum machine_mode addrmode, offmode;
3044
aa847cc8
JJ
3045 if (!MEM_P (op0))
3046 return NULL;
b5b8b0ac 3047
dda2da58
AO
3048 op0 = XEXP (op0, 0);
3049 addrmode = GET_MODE (op0);
3050 if (addrmode == VOIDmode)
3051 addrmode = Pmode;
3052
b5b8b0ac
AO
3053 op1 = expand_debug_expr (offset);
3054 if (!op1)
3055 return NULL;
3056
dda2da58
AO
3057 offmode = GET_MODE (op1);
3058 if (offmode == VOIDmode)
3059 offmode = TYPE_MODE (TREE_TYPE (offset));
3060
3061 if (addrmode != offmode)
3062 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3063 subreg_lowpart_offset (addrmode,
3064 offmode));
3065
3066 /* Don't use offset_address here, we don't need a
3067 recognizable address, and we don't want to generate
3068 code. */
2ba172e0
JJ
3069 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3070 op0, op1));
b5b8b0ac
AO
3071 }
3072
3073 if (MEM_P (op0))
3074 {
4f2a9af8
JJ
3075 if (mode1 == VOIDmode)
3076 /* Bitfield. */
3077 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
3078 if (bitpos >= BITS_PER_UNIT)
3079 {
3080 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3081 bitpos %= BITS_PER_UNIT;
3082 }
3083 else if (bitpos < 0)
3084 {
4f2a9af8
JJ
3085 HOST_WIDE_INT units
3086 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
3087 op0 = adjust_address_nv (op0, mode1, units);
3088 bitpos += units * BITS_PER_UNIT;
3089 }
3090 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3091 op0 = adjust_address_nv (op0, mode, 0);
3092 else if (GET_MODE (op0) != mode1)
3093 op0 = adjust_address_nv (op0, mode1, 0);
3094 else
3095 op0 = copy_rtx (op0);
3096 if (op0 == orig_op0)
3097 op0 = shallow_copy_rtx (op0);
3098 set_mem_attributes (op0, exp, 0);
3099 }
3100
3101 if (bitpos == 0 && mode == GET_MODE (op0))
3102 return op0;
3103
2d3fc6aa
JJ
3104 if (bitpos < 0)
3105 return NULL;
3106
88c04a5d
JJ
3107 if (GET_MODE (op0) == BLKmode)
3108 return NULL;
3109
b5b8b0ac
AO
3110 if ((bitpos % BITS_PER_UNIT) == 0
3111 && bitsize == GET_MODE_BITSIZE (mode1))
3112 {
3113 enum machine_mode opmode = GET_MODE (op0);
3114
b5b8b0ac 3115 if (opmode == VOIDmode)
9712cba0 3116 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
3117
3118 /* This condition may hold if we're expanding the address
3119 right past the end of an array that turned out not to
3120 be addressable (i.e., the address was only computed in
3121 debug stmts). The gen_subreg below would rightfully
3122 crash, and the address doesn't really exist, so just
3123 drop it. */
3124 if (bitpos >= GET_MODE_BITSIZE (opmode))
3125 return NULL;
3126
7d5d39bb
JJ
3127 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3128 return simplify_gen_subreg (mode, op0, opmode,
3129 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
3130 }
3131
3132 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3133 && TYPE_UNSIGNED (TREE_TYPE (exp))
3134 ? SIGN_EXTRACT
3135 : ZERO_EXTRACT, mode,
3136 GET_MODE (op0) != VOIDmode
9712cba0
JJ
3137 ? GET_MODE (op0)
3138 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
3139 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3140 }
3141
b5b8b0ac 3142 case ABS_EXPR:
2ba172e0 3143 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
3144
3145 case NEGATE_EXPR:
2ba172e0 3146 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
3147
3148 case BIT_NOT_EXPR:
2ba172e0 3149 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
3150
3151 case FLOAT_EXPR:
2ba172e0
JJ
3152 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3153 0)))
3154 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3155 inner_mode);
b5b8b0ac
AO
3156
3157 case FIX_TRUNC_EXPR:
2ba172e0
JJ
3158 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3159 inner_mode);
b5b8b0ac
AO
3160
3161 case POINTER_PLUS_EXPR:
576319a7
DD
3162 /* For the rare target where pointers are not the same size as
3163 size_t, we need to check for mis-matched modes and correct
3164 the addend. */
3165 if (op0 && op1
3166 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3167 && GET_MODE (op0) != GET_MODE (op1))
3168 {
8369f38a
DD
3169 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
3170 /* If OP0 is a partial mode, then we must truncate, even if it has
3171 the same bitsize as OP1 as GCC's representation of partial modes
3172 is opaque. */
3173 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
3174 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
3175 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3176 GET_MODE (op1));
576319a7
DD
3177 else
3178 /* We always sign-extend, regardless of the signedness of
3179 the operand, because the operand is always unsigned
3180 here even if the original C expression is signed. */
2ba172e0
JJ
3181 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3182 GET_MODE (op1));
576319a7
DD
3183 }
3184 /* Fall through. */
b5b8b0ac 3185 case PLUS_EXPR:
2ba172e0 3186 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
3187
3188 case MINUS_EXPR:
2ba172e0 3189 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
3190
3191 case MULT_EXPR:
2ba172e0 3192 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
3193
3194 case RDIV_EXPR:
3195 case TRUNC_DIV_EXPR:
3196 case EXACT_DIV_EXPR:
3197 if (unsignedp)
2ba172e0 3198 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 3199 else
2ba172e0 3200 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
3201
3202 case TRUNC_MOD_EXPR:
2ba172e0 3203 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
3204
3205 case FLOOR_DIV_EXPR:
3206 if (unsignedp)
2ba172e0 3207 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
3208 else
3209 {
2ba172e0
JJ
3210 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3211 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3212 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 3213 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3214 }
3215
3216 case FLOOR_MOD_EXPR:
3217 if (unsignedp)
2ba172e0 3218 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
3219 else
3220 {
2ba172e0 3221 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3222 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3223 adj = simplify_gen_unary (NEG, mode,
3224 simplify_gen_binary (MULT, mode, adj, op1),
3225 mode);
3226 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3227 }
3228
3229 case CEIL_DIV_EXPR:
3230 if (unsignedp)
3231 {
2ba172e0
JJ
3232 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3233 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3234 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 3235 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3236 }
3237 else
3238 {
2ba172e0
JJ
3239 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3240 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3241 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 3242 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3243 }
3244
3245 case CEIL_MOD_EXPR:
3246 if (unsignedp)
3247 {
2ba172e0 3248 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3249 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3250 adj = simplify_gen_unary (NEG, mode,
3251 simplify_gen_binary (MULT, mode, adj, op1),
3252 mode);
3253 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3254 }
3255 else
3256 {
2ba172e0 3257 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3258 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3259 adj = simplify_gen_unary (NEG, mode,
3260 simplify_gen_binary (MULT, mode, adj, op1),
3261 mode);
3262 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3263 }
3264
3265 case ROUND_DIV_EXPR:
3266 if (unsignedp)
3267 {
2ba172e0
JJ
3268 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3269 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3270 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 3271 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3272 }
3273 else
3274 {
2ba172e0
JJ
3275 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3276 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3277 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 3278 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3279 }
3280
3281 case ROUND_MOD_EXPR:
3282 if (unsignedp)
3283 {
2ba172e0 3284 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3285 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3286 adj = simplify_gen_unary (NEG, mode,
3287 simplify_gen_binary (MULT, mode, adj, op1),
3288 mode);
3289 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3290 }
3291 else
3292 {
2ba172e0 3293 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3294 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3295 adj = simplify_gen_unary (NEG, mode,
3296 simplify_gen_binary (MULT, mode, adj, op1),
3297 mode);
3298 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3299 }
3300
3301 case LSHIFT_EXPR:
2ba172e0 3302 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
3303
3304 case RSHIFT_EXPR:
3305 if (unsignedp)
2ba172e0 3306 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 3307 else
2ba172e0 3308 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
3309
3310 case LROTATE_EXPR:
2ba172e0 3311 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
3312
3313 case RROTATE_EXPR:
2ba172e0 3314 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
3315
3316 case MIN_EXPR:
2ba172e0 3317 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
3318
3319 case MAX_EXPR:
2ba172e0 3320 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
3321
3322 case BIT_AND_EXPR:
3323 case TRUTH_AND_EXPR:
2ba172e0 3324 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
3325
3326 case BIT_IOR_EXPR:
3327 case TRUTH_OR_EXPR:
2ba172e0 3328 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
3329
3330 case BIT_XOR_EXPR:
3331 case TRUTH_XOR_EXPR:
2ba172e0 3332 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
3333
3334 case TRUTH_ANDIF_EXPR:
3335 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3336
3337 case TRUTH_ORIF_EXPR:
3338 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3339
3340 case TRUTH_NOT_EXPR:
2ba172e0 3341 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
3342
3343 case LT_EXPR:
2ba172e0
JJ
3344 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3345 op0, op1);
b5b8b0ac
AO
3346
3347 case LE_EXPR:
2ba172e0
JJ
3348 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3349 op0, op1);
b5b8b0ac
AO
3350
3351 case GT_EXPR:
2ba172e0
JJ
3352 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3353 op0, op1);
b5b8b0ac
AO
3354
3355 case GE_EXPR:
2ba172e0
JJ
3356 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3357 op0, op1);
b5b8b0ac
AO
3358
3359 case EQ_EXPR:
2ba172e0 3360 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3361
3362 case NE_EXPR:
2ba172e0 3363 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3364
3365 case UNORDERED_EXPR:
2ba172e0 3366 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3367
3368 case ORDERED_EXPR:
2ba172e0 3369 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3370
3371 case UNLT_EXPR:
2ba172e0 3372 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3373
3374 case UNLE_EXPR:
2ba172e0 3375 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3376
3377 case UNGT_EXPR:
2ba172e0 3378 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3379
3380 case UNGE_EXPR:
2ba172e0 3381 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3382
3383 case UNEQ_EXPR:
2ba172e0 3384 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3385
3386 case LTGT_EXPR:
2ba172e0 3387 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3388
3389 case COND_EXPR:
3390 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3391
3392 case COMPLEX_EXPR:
3393 gcc_assert (COMPLEX_MODE_P (mode));
3394 if (GET_MODE (op0) == VOIDmode)
3395 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3396 if (GET_MODE (op1) == VOIDmode)
3397 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3398 return gen_rtx_CONCAT (mode, op0, op1);
3399
d02a5a4b
JJ
3400 case CONJ_EXPR:
3401 if (GET_CODE (op0) == CONCAT)
3402 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
3403 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3404 XEXP (op0, 1),
3405 GET_MODE_INNER (mode)));
d02a5a4b
JJ
3406 else
3407 {
3408 enum machine_mode imode = GET_MODE_INNER (mode);
3409 rtx re, im;
3410
3411 if (MEM_P (op0))
3412 {
3413 re = adjust_address_nv (op0, imode, 0);
3414 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3415 }
3416 else
3417 {
3418 enum machine_mode ifmode = int_mode_for_mode (mode);
3419 enum machine_mode ihmode = int_mode_for_mode (imode);
3420 rtx halfsize;
3421 if (ifmode == BLKmode || ihmode == BLKmode)
3422 return NULL;
3423 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3424 re = op0;
3425 if (mode != ifmode)
3426 re = gen_rtx_SUBREG (ifmode, re, 0);
3427 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3428 if (imode != ihmode)
3429 re = gen_rtx_SUBREG (imode, re, 0);
3430 im = copy_rtx (op0);
3431 if (mode != ifmode)
3432 im = gen_rtx_SUBREG (ifmode, im, 0);
3433 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3434 if (imode != ihmode)
3435 im = gen_rtx_SUBREG (imode, im, 0);
3436 }
3437 im = gen_rtx_NEG (imode, im);
3438 return gen_rtx_CONCAT (mode, re, im);
3439 }
3440
b5b8b0ac
AO
3441 case ADDR_EXPR:
3442 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3443 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3444 {
3445 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3446 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3447 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
3448 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3449 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
3450 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3451
3452 if (handled_component_p (TREE_OPERAND (exp, 0)))
3453 {
3454 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3455 tree decl
3456 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3457 &bitoffset, &bitsize, &maxsize);
3458 if ((TREE_CODE (decl) == VAR_DECL
3459 || TREE_CODE (decl) == PARM_DECL
3460 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
3461 && (!TREE_ADDRESSABLE (decl)
3462 || target_for_debug_bind (decl))
c8a27c40
JJ
3463 && (bitoffset % BITS_PER_UNIT) == 0
3464 && bitsize > 0
3465 && bitsize == maxsize)
0a81f074
RS
3466 {
3467 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3468 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3469 }
c8a27c40
JJ
3470 }
3471
9430b7ba
JJ
3472 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
3473 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
3474 == ADDR_EXPR)
3475 {
3476 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3477 0));
3478 if (op0 != NULL
3479 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3480 || (GET_CODE (op0) == PLUS
3481 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
3482 && CONST_INT_P (XEXP (op0, 1)))))
3483 {
3484 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3485 1));
3486 if (!op1 || !CONST_INT_P (op1))
3487 return NULL;
3488
3489 return plus_constant (mode, op0, INTVAL (op1));
3490 }
3491 }
3492
c8a27c40
JJ
3493 return NULL;
3494 }
b5b8b0ac 3495
f61c6f34
JJ
3496 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3497 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
3498
3499 return op0;
b5b8b0ac
AO
3500
3501 case VECTOR_CST:
d2a12ae7
RG
3502 {
3503 unsigned i;
3504
3505 op0 = gen_rtx_CONCATN
3506 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3507
3508 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3509 {
3510 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3511 if (!op1)
3512 return NULL;
3513 XVECEXP (op0, 0, i) = op1;
3514 }
3515
3516 return op0;
3517 }
b5b8b0ac
AO
3518
3519 case CONSTRUCTOR:
47598145
MM
3520 if (TREE_CLOBBER_P (exp))
3521 return NULL;
3522 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
3523 {
3524 unsigned i;
3525 tree val;
3526
3527 op0 = gen_rtx_CONCATN
3528 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3529
3530 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3531 {
3532 op1 = expand_debug_expr (val);
3533 if (!op1)
3534 return NULL;
3535 XVECEXP (op0, 0, i) = op1;
3536 }
3537
3538 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3539 {
3540 op1 = expand_debug_expr
e8160c9a 3541 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3542
3543 if (!op1)
3544 return NULL;
3545
3546 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3547 XVECEXP (op0, 0, i) = op1;
3548 }
3549
3550 return op0;
3551 }
3552 else
3553 goto flag_unsupported;
3554
3555 case CALL_EXPR:
3556 /* ??? Maybe handle some builtins? */
3557 return NULL;
3558
3559 case SSA_NAME:
3560 {
2a8e30fb
MM
3561 gimple g = get_gimple_for_ssa_name (exp);
3562 if (g)
3563 {
3564 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3565 if (!op0)
3566 return NULL;
3567 }
3568 else
3569 {
3570 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3571
2a8e30fb 3572 if (part == NO_PARTITION)
a58a8e4b
JJ
3573 {
3574 /* If this is a reference to an incoming value of parameter
3575 that is never used in the code or where the incoming
3576 value is never used in the code, use PARM_DECL's
3577 DECL_RTL if set. */
3578 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3579 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3580 {
12c5ffe5
EB
3581 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3582 if (op0)
3583 goto adjust_mode;
a58a8e4b 3584 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
3585 if (op0)
3586 goto adjust_mode;
a58a8e4b
JJ
3587 }
3588 return NULL;
3589 }
b5b8b0ac 3590
2a8e30fb 3591 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3592
abfea58d 3593 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 3594 }
b5b8b0ac
AO
3595 goto adjust_mode;
3596 }
3597
3598 case ERROR_MARK:
3599 return NULL;
3600
7ece48b1
JJ
3601 /* Vector stuff. For most of the codes we don't have rtl codes. */
3602 case REALIGN_LOAD_EXPR:
3603 case REDUC_MAX_EXPR:
3604 case REDUC_MIN_EXPR:
3605 case REDUC_PLUS_EXPR:
3606 case VEC_COND_EXPR:
7ece48b1
JJ
3607 case VEC_LSHIFT_EXPR:
3608 case VEC_PACK_FIX_TRUNC_EXPR:
3609 case VEC_PACK_SAT_EXPR:
3610 case VEC_PACK_TRUNC_EXPR:
3611 case VEC_RSHIFT_EXPR:
3612 case VEC_UNPACK_FLOAT_HI_EXPR:
3613 case VEC_UNPACK_FLOAT_LO_EXPR:
3614 case VEC_UNPACK_HI_EXPR:
3615 case VEC_UNPACK_LO_EXPR:
3616 case VEC_WIDEN_MULT_HI_EXPR:
3617 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
3618 case VEC_WIDEN_MULT_EVEN_EXPR:
3619 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
3620 case VEC_WIDEN_LSHIFT_HI_EXPR:
3621 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 3622 case VEC_PERM_EXPR:
7ece48b1
JJ
3623 return NULL;
3624
98449720 3625 /* Misc codes. */
7ece48b1
JJ
3626 case ADDR_SPACE_CONVERT_EXPR:
3627 case FIXED_CONVERT_EXPR:
3628 case OBJ_TYPE_REF:
3629 case WITH_SIZE_EXPR:
3630 return NULL;
3631
3632 case DOT_PROD_EXPR:
3633 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3634 && SCALAR_INT_MODE_P (mode))
3635 {
2ba172e0
JJ
3636 op0
3637 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3638 0)))
3639 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3640 inner_mode);
3641 op1
3642 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3643 1)))
3644 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3645 inner_mode);
3646 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3647 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
3648 }
3649 return NULL;
3650
3651 case WIDEN_MULT_EXPR:
0354c0c7
BS
3652 case WIDEN_MULT_PLUS_EXPR:
3653 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3654 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3655 && SCALAR_INT_MODE_P (mode))
3656 {
2ba172e0 3657 inner_mode = GET_MODE (op0);
7ece48b1 3658 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3659 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3660 else
5b58b39b 3661 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3662 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3663 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3664 else
5b58b39b 3665 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 3666 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
3667 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3668 return op0;
3669 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 3670 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 3671 else
2ba172e0 3672 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
3673 }
3674 return NULL;
3675
98449720
RH
3676 case MULT_HIGHPART_EXPR:
3677 /* ??? Similar to the above. */
3678 return NULL;
3679
7ece48b1 3680 case WIDEN_SUM_EXPR:
3f3af9df 3681 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
3682 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3683 && SCALAR_INT_MODE_P (mode))
3684 {
2ba172e0
JJ
3685 op0
3686 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3687 0)))
3688 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3689 inner_mode);
3f3af9df
JJ
3690 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3691 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
3692 }
3693 return NULL;
3694
0f59b812 3695 case FMA_EXPR:
2ba172e0 3696 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 3697
b5b8b0ac
AO
3698 default:
3699 flag_unsupported:
3700#ifdef ENABLE_CHECKING
3701 debug_tree (exp);
3702 gcc_unreachable ();
3703#else
3704 return NULL;
3705#endif
3706 }
3707}
3708
ddb555ed
JJ
3709/* Return an RTX equivalent to the source bind value of the tree expression
3710 EXP. */
3711
3712static rtx
3713expand_debug_source_expr (tree exp)
3714{
3715 rtx op0 = NULL_RTX;
3716 enum machine_mode mode = VOIDmode, inner_mode;
3717
3718 switch (TREE_CODE (exp))
3719 {
3720 case PARM_DECL:
3721 {
ddb555ed 3722 mode = DECL_MODE (exp);
12c5ffe5
EB
3723 op0 = expand_debug_parm_decl (exp);
3724 if (op0)
3725 break;
ddb555ed
JJ
3726 /* See if this isn't an argument that has been completely
3727 optimized out. */
3728 if (!DECL_RTL_SET_P (exp)
12c5ffe5 3729 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
3730 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3731 {
7b575cfa 3732 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
3733 if (DECL_CONTEXT (aexp)
3734 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3735 {
9771b263 3736 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
3737 unsigned int ix;
3738 tree ddecl;
ddb555ed
JJ
3739 debug_args = decl_debug_args_lookup (current_function_decl);
3740 if (debug_args != NULL)
3741 {
9771b263 3742 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
3743 ix += 2)
3744 if (ddecl == aexp)
3745 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3746 }
3747 }
3748 }
3749 break;
3750 }
3751 default:
3752 break;
3753 }
3754
3755 if (op0 == NULL_RTX)
3756 return NULL_RTX;
3757
3758 inner_mode = GET_MODE (op0);
3759 if (mode == inner_mode)
3760 return op0;
3761
3762 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3763 {
3764 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3765 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3766 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3767 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3768 else
3769 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3770 }
3771 else if (FLOAT_MODE_P (mode))
3772 gcc_unreachable ();
3773 else if (FLOAT_MODE_P (inner_mode))
3774 {
3775 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3776 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3777 else
3778 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3779 }
3780 else if (CONSTANT_P (op0)
3781 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3782 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3783 subreg_lowpart_offset (mode, inner_mode));
3784 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3785 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3786 else
3787 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3788
3789 return op0;
3790}
3791
6cfa417f
JJ
3792/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3793 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3794 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
3795
3796static void
3797avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3798{
3799 rtx exp = *exp_p;
3800
3801 if (exp == NULL_RTX)
3802 return;
3803
3804 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3805 return;
3806
3807 if (depth == 4)
3808 {
3809 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3810 rtx dval = make_debug_expr_from_rtl (exp);
3811
3812 /* Emit a debug bind insn before INSN. */
3813 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3814 DEBUG_EXPR_TREE_DECL (dval), exp,
3815 VAR_INIT_STATUS_INITIALIZED);
3816
3817 emit_debug_insn_before (bind, insn);
3818 *exp_p = dval;
3819 return;
3820 }
3821
3822 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3823 int i, j;
3824 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3825 switch (*format_ptr++)
3826 {
3827 case 'e':
3828 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3829 break;
3830
3831 case 'E':
3832 case 'V':
3833 for (j = 0; j < XVECLEN (exp, i); j++)
3834 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3835 break;
3836
3837 default:
3838 break;
3839 }
3840}
3841
b5b8b0ac
AO
3842/* Expand the _LOCs in debug insns. We run this after expanding all
3843 regular insns, so that any variables referenced in the function
3844 will have their DECL_RTLs set. */
3845
3846static void
3847expand_debug_locations (void)
3848{
3849 rtx insn;
3850 rtx last = get_last_insn ();
3851 int save_strict_alias = flag_strict_aliasing;
3852
3853 /* New alias sets while setting up memory attributes cause
3854 -fcompare-debug failures, even though it doesn't bring about any
3855 codegen changes. */
3856 flag_strict_aliasing = 0;
3857
3858 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3859 if (DEBUG_INSN_P (insn))
3860 {
3861 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
6cfa417f 3862 rtx val, prev_insn, insn2;
b5b8b0ac
AO
3863 enum machine_mode mode;
3864
3865 if (value == NULL_TREE)
3866 val = NULL_RTX;
3867 else
3868 {
ddb555ed
JJ
3869 if (INSN_VAR_LOCATION_STATUS (insn)
3870 == VAR_INIT_STATUS_UNINITIALIZED)
3871 val = expand_debug_source_expr (value);
3872 else
3873 val = expand_debug_expr (value);
b5b8b0ac
AO
3874 gcc_assert (last == get_last_insn ());
3875 }
3876
3877 if (!val)
3878 val = gen_rtx_UNKNOWN_VAR_LOC ();
3879 else
3880 {
3881 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3882
3883 gcc_assert (mode == GET_MODE (val)
3884 || (GET_MODE (val) == VOIDmode
33ffb5c5 3885 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 3886 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
3887 || GET_CODE (val) == LABEL_REF)));
3888 }
3889
3890 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
3891 prev_insn = PREV_INSN (insn);
3892 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3893 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
3894 }
3895
3896 flag_strict_aliasing = save_strict_alias;
3897}
3898
242229bb
JH
3899/* Expand basic block BB from GIMPLE trees to RTL. */
3900
3901static basic_block
f3ddd692 3902expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 3903{
726a989a
RB
3904 gimple_stmt_iterator gsi;
3905 gimple_seq stmts;
3906 gimple stmt = NULL;
242229bb
JH
3907 rtx note, last;
3908 edge e;
628f6a4e 3909 edge_iterator ei;
8b11009b 3910 void **elt;
242229bb
JH
3911
3912 if (dump_file)
726a989a
RB
3913 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3914 bb->index);
3915
3916 /* Note that since we are now transitioning from GIMPLE to RTL, we
3917 cannot use the gsi_*_bb() routines because they expect the basic
3918 block to be in GIMPLE, instead of RTL. Therefore, we need to
3919 access the BB sequence directly. */
3920 stmts = bb_seq (bb);
3e8b732e
MM
3921 bb->il.gimple.seq = NULL;
3922 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 3923 rtl_profile_for_bb (bb);
5e2d947c
JH
3924 init_rtl_bb_info (bb);
3925 bb->flags |= BB_RTL;
3926
a9b77cd1
ZD
3927 /* Remove the RETURN_EXPR if we may fall though to the exit
3928 instead. */
726a989a
RB
3929 gsi = gsi_last (stmts);
3930 if (!gsi_end_p (gsi)
3931 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3932 {
726a989a 3933 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3934
3935 gcc_assert (single_succ_p (bb));
3936 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3937
3938 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3939 && !gimple_return_retval (ret_stmt))
a9b77cd1 3940 {
726a989a 3941 gsi_remove (&gsi, false);
a9b77cd1
ZD
3942 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3943 }
3944 }
3945
726a989a
RB
3946 gsi = gsi_start (stmts);
3947 if (!gsi_end_p (gsi))
8b11009b 3948 {
726a989a
RB
3949 stmt = gsi_stmt (gsi);
3950 if (gimple_code (stmt) != GIMPLE_LABEL)
3951 stmt = NULL;
8b11009b 3952 }
242229bb 3953
8b11009b
ZD
3954 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3955
3956 if (stmt || elt)
242229bb
JH
3957 {
3958 last = get_last_insn ();
3959
8b11009b
ZD
3960 if (stmt)
3961 {
28ed065e 3962 expand_gimple_stmt (stmt);
726a989a 3963 gsi_next (&gsi);
8b11009b
ZD
3964 }
3965
3966 if (elt)
ae50c0cb 3967 emit_label ((rtx) *elt);
242229bb 3968
caf93cb0 3969 /* Java emits line number notes in the top of labels.
c22cacf3 3970 ??? Make this go away once line number notes are obsoleted. */
242229bb 3971 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3972 if (NOTE_P (BB_HEAD (bb)))
242229bb 3973 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3974 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3975
726a989a 3976 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3977 }
3978 else
3979 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3980
3981 NOTE_BASIC_BLOCK (note) = bb;
3982
726a989a 3983 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3984 {
cea49550 3985 basic_block new_bb;
242229bb 3986
b5b8b0ac 3987 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3988
3989 /* If this statement is a non-debug one, and we generate debug
3990 insns, then this one might be the last real use of a TERed
3991 SSA_NAME, but where there are still some debug uses further
3992 down. Expanding the current SSA name in such further debug
3993 uses by their RHS might lead to wrong debug info, as coalescing
3994 might make the operands of such RHS be placed into the same
3995 pseudo as something else. Like so:
3996 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3997 use(a_1);
3998 a_2 = ...
3999 #DEBUG ... => a_1
4000 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4001 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4002 the write to a_2 would actually have clobbered the place which
4003 formerly held a_0.
4004
4005 So, instead of that, we recognize the situation, and generate
4006 debug temporaries at the last real use of TERed SSA names:
4007 a_1 = a_0 + 1;
4008 #DEBUG #D1 => a_1
4009 use(a_1);
4010 a_2 = ...
4011 #DEBUG ... => #D1
4012 */
4013 if (MAY_HAVE_DEBUG_INSNS
4014 && SA.values
4015 && !is_gimple_debug (stmt))
4016 {
4017 ssa_op_iter iter;
4018 tree op;
4019 gimple def;
4020
5368224f 4021 location_t sloc = curr_insn_location ();
2a8e30fb
MM
4022
4023 /* Look for SSA names that have their last use here (TERed
4024 names always have only one real use). */
4025 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4026 if ((def = get_gimple_for_ssa_name (op)))
4027 {
4028 imm_use_iterator imm_iter;
4029 use_operand_p use_p;
4030 bool have_debug_uses = false;
4031
4032 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4033 {
4034 if (gimple_debug_bind_p (USE_STMT (use_p)))
4035 {
4036 have_debug_uses = true;
4037 break;
4038 }
4039 }
4040
4041 if (have_debug_uses)
4042 {
4043 /* OP is a TERed SSA name, with DEF it's defining
4044 statement, and where OP is used in further debug
4045 instructions. Generate a debug temporary, and
4046 replace all uses of OP in debug insns with that
4047 temporary. */
4048 gimple debugstmt;
4049 tree value = gimple_assign_rhs_to_tree (def);
4050 tree vexpr = make_node (DEBUG_EXPR_DECL);
4051 rtx val;
4052 enum machine_mode mode;
4053
5368224f 4054 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
4055
4056 DECL_ARTIFICIAL (vexpr) = 1;
4057 TREE_TYPE (vexpr) = TREE_TYPE (value);
4058 if (DECL_P (value))
4059 mode = DECL_MODE (value);
4060 else
4061 mode = TYPE_MODE (TREE_TYPE (value));
4062 DECL_MODE (vexpr) = mode;
4063
4064 val = gen_rtx_VAR_LOCATION
4065 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4066
e8c6bb74 4067 emit_debug_insn (val);
2a8e30fb
MM
4068
4069 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4070 {
4071 if (!gimple_debug_bind_p (debugstmt))
4072 continue;
4073
4074 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4075 SET_USE (use_p, vexpr);
4076
4077 update_stmt (debugstmt);
4078 }
4079 }
4080 }
5368224f 4081 set_curr_insn_location (sloc);
2a8e30fb
MM
4082 }
4083
a5883ba0 4084 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 4085
242229bb
JH
4086 /* Expand this statement, then evaluate the resulting RTL and
4087 fixup the CFG accordingly. */
726a989a 4088 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 4089 {
726a989a 4090 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
4091 if (new_bb)
4092 return new_bb;
4093 }
b5b8b0ac
AO
4094 else if (gimple_debug_bind_p (stmt))
4095 {
5368224f 4096 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
4097 gimple_stmt_iterator nsi = gsi;
4098
4099 for (;;)
4100 {
4101 tree var = gimple_debug_bind_get_var (stmt);
4102 tree value;
4103 rtx val;
4104 enum machine_mode mode;
4105
ec8c1492
JJ
4106 if (TREE_CODE (var) != DEBUG_EXPR_DECL
4107 && TREE_CODE (var) != LABEL_DECL
4108 && !target_for_debug_bind (var))
4109 goto delink_debug_stmt;
4110
b5b8b0ac
AO
4111 if (gimple_debug_bind_has_value_p (stmt))
4112 value = gimple_debug_bind_get_value (stmt);
4113 else
4114 value = NULL_TREE;
4115
4116 last = get_last_insn ();
4117
5368224f 4118 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
4119
4120 if (DECL_P (var))
4121 mode = DECL_MODE (var);
4122 else
4123 mode = TYPE_MODE (TREE_TYPE (var));
4124
4125 val = gen_rtx_VAR_LOCATION
4126 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4127
e16b6fd0 4128 emit_debug_insn (val);
b5b8b0ac
AO
4129
4130 if (dump_file && (dump_flags & TDF_DETAILS))
4131 {
4132 /* We can't dump the insn with a TREE where an RTX
4133 is expected. */
e8c6bb74 4134 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 4135 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 4136 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
4137 }
4138
ec8c1492 4139 delink_debug_stmt:
2a8e30fb
MM
4140 /* In order not to generate too many debug temporaries,
4141 we delink all uses of debug statements we already expanded.
4142 Therefore debug statements between definition and real
4143 use of TERed SSA names will continue to use the SSA name,
4144 and not be replaced with debug temps. */
4145 delink_stmt_imm_use (stmt);
4146
b5b8b0ac
AO
4147 gsi = nsi;
4148 gsi_next (&nsi);
4149 if (gsi_end_p (nsi))
4150 break;
4151 stmt = gsi_stmt (nsi);
4152 if (!gimple_debug_bind_p (stmt))
4153 break;
4154 }
4155
5368224f 4156 set_curr_insn_location (sloc);
ddb555ed
JJ
4157 }
4158 else if (gimple_debug_source_bind_p (stmt))
4159 {
5368224f 4160 location_t sloc = curr_insn_location ();
ddb555ed
JJ
4161 tree var = gimple_debug_source_bind_get_var (stmt);
4162 tree value = gimple_debug_source_bind_get_value (stmt);
4163 rtx val;
4164 enum machine_mode mode;
4165
4166 last = get_last_insn ();
4167
5368224f 4168 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
4169
4170 mode = DECL_MODE (var);
4171
4172 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4173 VAR_INIT_STATUS_UNINITIALIZED);
4174
4175 emit_debug_insn (val);
4176
4177 if (dump_file && (dump_flags & TDF_DETAILS))
4178 {
4179 /* We can't dump the insn with a TREE where an RTX
4180 is expected. */
4181 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4182 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4183 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4184 }
4185
5368224f 4186 set_curr_insn_location (sloc);
b5b8b0ac 4187 }
80c7a9eb 4188 else
242229bb 4189 {
f3ddd692
JJ
4190 if (is_gimple_call (stmt)
4191 && gimple_call_tail_p (stmt)
4192 && disable_tail_calls)
4193 gimple_call_set_tail (stmt, false);
4194
726a989a 4195 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
4196 {
4197 bool can_fallthru;
4198 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4199 if (new_bb)
4200 {
4201 if (can_fallthru)
4202 bb = new_bb;
4203 else
4204 return new_bb;
4205 }
4206 }
4d7a65ea 4207 else
b7211528 4208 {
4e3825db 4209 def_operand_p def_p;
4e3825db
MM
4210 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4211
4212 if (def_p != NULL)
4213 {
4214 /* Ignore this stmt if it is in the list of
4215 replaceable expressions. */
4216 if (SA.values
b8698a0f 4217 && bitmap_bit_p (SA.values,
e97809c6 4218 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
4219 continue;
4220 }
28ed065e 4221 last = expand_gimple_stmt (stmt);
726a989a 4222 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 4223 }
242229bb
JH
4224 }
4225 }
4226
a5883ba0
MM
4227 currently_expanding_gimple_stmt = NULL;
4228
7241571e 4229 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
4230 FOR_EACH_EDGE (e, ei, bb->succs)
4231 {
2f13f2de 4232 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 4233 set_curr_insn_location (e->goto_locus);
7241571e
JJ
4234 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4235 {
4236 emit_jump (label_rtx_for_bb (e->dest));
4237 e->flags &= ~EDGE_FALLTHRU;
4238 }
a9b77cd1
ZD
4239 }
4240
ae761c45
AH
4241 /* Expanded RTL can create a jump in the last instruction of block.
4242 This later might be assumed to be a jump to successor and break edge insertion.
4243 We need to insert dummy move to prevent this. PR41440. */
4244 if (single_succ_p (bb)
4245 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4246 && (last = get_last_insn ())
4247 && JUMP_P (last))
4248 {
4249 rtx dummy = gen_reg_rtx (SImode);
4250 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4251 }
4252
242229bb
JH
4253 do_pending_stack_adjust ();
4254
3f117656 4255 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
4256 before a barrier and/or table jump insn. */
4257 last = get_last_insn ();
4b4bf941 4258 if (BARRIER_P (last))
242229bb
JH
4259 last = PREV_INSN (last);
4260 if (JUMP_TABLE_DATA_P (last))
4261 last = PREV_INSN (PREV_INSN (last));
4262 BB_END (bb) = last;
caf93cb0 4263
242229bb 4264 update_bb_for_insn (bb);
80c7a9eb 4265
242229bb
JH
4266 return bb;
4267}
4268
4269
4270/* Create a basic block for initialization code. */
4271
4272static basic_block
4273construct_init_block (void)
4274{
4275 basic_block init_block, first_block;
fd44f634
JH
4276 edge e = NULL;
4277 int flags;
275a4187 4278
fd44f634
JH
4279 /* Multiple entry points not supported yet. */
4280 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
4281 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4282 init_rtl_bb_info (EXIT_BLOCK_PTR);
4283 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4284 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 4285
fd44f634 4286 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 4287
fd44f634
JH
4288 /* When entry edge points to first basic block, we don't need jump,
4289 otherwise we have to jump into proper target. */
4290 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4291 {
726a989a 4292 tree label = gimple_block_label (e->dest);
fd44f634
JH
4293
4294 emit_jump (label_rtx (label));
4295 flags = 0;
275a4187 4296 }
fd44f634
JH
4297 else
4298 flags = EDGE_FALLTHRU;
242229bb
JH
4299
4300 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4301 get_last_insn (),
4302 ENTRY_BLOCK_PTR);
4303 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4304 init_block->count = ENTRY_BLOCK_PTR->count;
7d776ee2
RG
4305 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4306 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
242229bb
JH
4307 if (e)
4308 {
4309 first_block = e->dest;
4310 redirect_edge_succ (e, init_block);
fd44f634 4311 e = make_edge (init_block, first_block, flags);
242229bb
JH
4312 }
4313 else
4314 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4315 e->probability = REG_BR_PROB_BASE;
4316 e->count = ENTRY_BLOCK_PTR->count;
4317
4318 update_bb_for_insn (init_block);
4319 return init_block;
4320}
4321
55e092c4
JH
4322/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4323 found in the block tree. */
4324
4325static void
4326set_block_levels (tree block, int level)
4327{
4328 while (block)
4329 {
4330 BLOCK_NUMBER (block) = level;
4331 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4332 block = BLOCK_CHAIN (block);
4333 }
4334}
242229bb
JH
4335
4336/* Create a block containing landing pads and similar stuff. */
4337
4338static void
4339construct_exit_block (void)
4340{
4341 rtx head = get_last_insn ();
4342 rtx end;
4343 basic_block exit_block;
628f6a4e
BE
4344 edge e, e2;
4345 unsigned ix;
4346 edge_iterator ei;
071a42f9 4347 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 4348
bf08ebeb
JH
4349 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4350
caf93cb0 4351 /* Make sure the locus is set to the end of the function, so that
242229bb 4352 epilogue line numbers and warnings are set properly. */
2f13f2de 4353 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
4354 input_location = cfun->function_end_locus;
4355
242229bb
JH
4356 /* Generate rtl for function exit. */
4357 expand_function_end ();
4358
4359 end = get_last_insn ();
4360 if (head == end)
4361 return;
071a42f9
JH
4362 /* While emitting the function end we could move end of the last basic block.
4363 */
4364 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 4365 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 4366 head = NEXT_INSN (head);
80c7a9eb
RH
4367 exit_block = create_basic_block (NEXT_INSN (head), end,
4368 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
4369 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4370 exit_block->count = EXIT_BLOCK_PTR->count;
7d776ee2
RG
4371 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4372 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
628f6a4e
BE
4373
4374 ix = 0;
4375 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 4376 {
8fb790fd 4377 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 4378 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
4379 redirect_edge_succ (e, exit_block);
4380 else
4381 ix++;
242229bb 4382 }
628f6a4e 4383
242229bb
JH
4384 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4385 e->probability = REG_BR_PROB_BASE;
4386 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 4387 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
4388 if (e2 != e)
4389 {
c22cacf3 4390 e->count -= e2->count;
242229bb
JH
4391 exit_block->count -= e2->count;
4392 exit_block->frequency -= EDGE_FREQUENCY (e2);
4393 }
4394 if (e->count < 0)
4395 e->count = 0;
4396 if (exit_block->count < 0)
4397 exit_block->count = 0;
4398 if (exit_block->frequency < 0)
4399 exit_block->frequency = 0;
4400 update_bb_for_insn (exit_block);
4401}
4402
c22cacf3 4403/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
4404 Look for ARRAY_REF nodes with non-constant indexes and mark them
4405 addressable. */
4406
4407static tree
4408discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4409 void *data ATTRIBUTE_UNUSED)
4410{
4411 tree t = *tp;
4412
4413 if (IS_TYPE_OR_DECL_P (t))
4414 *walk_subtrees = 0;
4415 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4416 {
4417 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4418 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4419 && (!TREE_OPERAND (t, 2)
4420 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4421 || (TREE_CODE (t) == COMPONENT_REF
4422 && (!TREE_OPERAND (t,2)
4423 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4424 || TREE_CODE (t) == BIT_FIELD_REF
4425 || TREE_CODE (t) == REALPART_EXPR
4426 || TREE_CODE (t) == IMAGPART_EXPR
4427 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 4428 || CONVERT_EXPR_P (t))
a1b23b2f
UW
4429 t = TREE_OPERAND (t, 0);
4430
4431 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4432 {
4433 t = get_base_address (t);
6f11d690
RG
4434 if (t && DECL_P (t)
4435 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
4436 TREE_ADDRESSABLE (t) = 1;
4437 }
4438
4439 *walk_subtrees = 0;
4440 }
4441
4442 return NULL_TREE;
4443}
4444
4445/* RTL expansion is not able to compile array references with variable
4446 offsets for arrays stored in single register. Discover such
4447 expressions and mark variables as addressable to avoid this
4448 scenario. */
4449
4450static void
4451discover_nonconstant_array_refs (void)
4452{
4453 basic_block bb;
726a989a 4454 gimple_stmt_iterator gsi;
a1b23b2f
UW
4455
4456 FOR_EACH_BB (bb)
726a989a
RB
4457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4458 {
4459 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
4460 if (!is_gimple_debug (stmt))
4461 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 4462 }
a1b23b2f
UW
4463}
4464
2e3f842f
L
4465/* This function sets crtl->args.internal_arg_pointer to a virtual
4466 register if DRAP is needed. Local register allocator will replace
4467 virtual_incoming_args_rtx with the virtual register. */
4468
4469static void
4470expand_stack_alignment (void)
4471{
4472 rtx drap_rtx;
e939805b 4473 unsigned int preferred_stack_boundary;
2e3f842f
L
4474
4475 if (! SUPPORTS_STACK_ALIGNMENT)
4476 return;
b8698a0f 4477
2e3f842f
L
4478 if (cfun->calls_alloca
4479 || cfun->has_nonlocal_label
4480 || crtl->has_nonlocal_goto)
4481 crtl->need_drap = true;
4482
890b9b96
L
4483 /* Call update_stack_boundary here again to update incoming stack
4484 boundary. It may set incoming stack alignment to a different
4485 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4486 use the minimum incoming stack alignment to check if it is OK
4487 to perform sibcall optimization since sibcall optimization will
4488 only align the outgoing stack to incoming stack boundary. */
4489 if (targetm.calls.update_stack_boundary)
4490 targetm.calls.update_stack_boundary ();
4491
4492 /* The incoming stack frame has to be aligned at least at
4493 parm_stack_boundary. */
4494 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 4495
2e3f842f
L
4496 /* Update crtl->stack_alignment_estimated and use it later to align
4497 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4498 exceptions since callgraph doesn't collect incoming stack alignment
4499 in this case. */
8f4f502f 4500 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
4501 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4502 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4503 else
4504 preferred_stack_boundary = crtl->preferred_stack_boundary;
4505 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4506 crtl->stack_alignment_estimated = preferred_stack_boundary;
4507 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4508 crtl->stack_alignment_needed = preferred_stack_boundary;
4509
890b9b96
L
4510 gcc_assert (crtl->stack_alignment_needed
4511 <= crtl->stack_alignment_estimated);
4512
2e3f842f 4513 crtl->stack_realign_needed
e939805b 4514 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 4515 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
4516
4517 crtl->stack_realign_processed = true;
4518
4519 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4520 alignment. */
4521 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 4522 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 4523
d015f7cc
L
4524 /* stack_realign_drap and drap_rtx must match. */
4525 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4526
2e3f842f
L
4527 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4528 if (NULL != drap_rtx)
4529 {
4530 crtl->args.internal_arg_pointer = drap_rtx;
4531
4532 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4533 needed. */
4534 fixup_tail_calls ();
4535 }
4536}
4537
242229bb
JH
4538/* Translate the intermediate representation contained in the CFG
4539 from GIMPLE trees to RTL.
4540
4541 We do conversion per basic block and preserve/update the tree CFG.
4542 This implies we have to do some magic as the CFG can simultaneously
4543 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 4544 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
4545 the expansion. */
4546
c2924966 4547static unsigned int
726a989a 4548gimple_expand_cfg (void)
242229bb
JH
4549{
4550 basic_block bb, init_block;
4551 sbitmap blocks;
0ef90296
ZD
4552 edge_iterator ei;
4553 edge e;
f3ddd692 4554 rtx var_seq, var_ret_seq;
4e3825db
MM
4555 unsigned i;
4556
f029db69 4557 timevar_push (TV_OUT_OF_SSA);
4e3825db 4558 rewrite_out_of_ssa (&SA);
f029db69 4559 timevar_pop (TV_OUT_OF_SSA);
c302207e 4560 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 4561
be147e84
RG
4562 /* Make sure all values used by the optimization passes have sane
4563 defaults. */
4564 reg_renumber = 0;
4565
4586b4ca
SB
4566 /* Some backends want to know that we are expanding to RTL. */
4567 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
4568 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4569 free_dominance_info (CDI_DOMINATORS);
4586b4ca 4570
bf08ebeb
JH
4571 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4572
5368224f 4573 insn_locations_init ();
fe8a7779 4574 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
4575 {
4576 /* Eventually, all FEs should explicitly set function_start_locus. */
2f13f2de 4577 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
5368224f 4578 set_curr_insn_location
1751ecd6
AH
4579 (DECL_SOURCE_LOCATION (current_function_decl));
4580 else
5368224f 4581 set_curr_insn_location (cfun->function_start_locus);
1751ecd6 4582 }
9ff70652 4583 else
5368224f
DC
4584 set_curr_insn_location (UNKNOWN_LOCATION);
4585 prologue_location = curr_insn_location ();
55e092c4 4586
2b21299c
JJ
4587#ifdef INSN_SCHEDULING
4588 init_sched_attrs ();
4589#endif
4590
55e092c4
JH
4591 /* Make sure first insn is a note even if we don't want linenums.
4592 This makes sure the first insn will never be deleted.
4593 Also, final expects a note to appear there. */
4594 emit_note (NOTE_INSN_DELETED);
6429e3be 4595
a1b23b2f
UW
4596 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4597 discover_nonconstant_array_refs ();
4598
e41b2a33 4599 targetm.expand_to_rtl_hook ();
cb91fab0 4600 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 4601 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 4602 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
4603 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4604 cfun->cfg->max_jumptable_ents = 0;
4605
ae9fd6b7
JH
4606 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4607 of the function section at exapnsion time to predict distance of calls. */
4608 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4609
727a31fa 4610 /* Expand the variables recorded during gimple lowering. */
f029db69 4611 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
4612 start_sequence ();
4613
f3ddd692 4614 var_ret_seq = expand_used_vars ();
3a42502d
RH
4615
4616 var_seq = get_insns ();
4617 end_sequence ();
f029db69 4618 timevar_pop (TV_VAR_EXPAND);
242229bb 4619
7d69de61
RH
4620 /* Honor stack protection warnings. */
4621 if (warn_stack_protect)
4622 {
e3b5732b 4623 if (cfun->calls_alloca)
b8698a0f 4624 warning (OPT_Wstack_protector,
3b123595
SB
4625 "stack protector not protecting local variables: "
4626 "variable length buffer");
cb91fab0 4627 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 4628 warning (OPT_Wstack_protector,
3b123595
SB
4629 "stack protector not protecting function: "
4630 "all local arrays are less than %d bytes long",
7d69de61
RH
4631 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4632 }
4633
242229bb 4634 /* Set up parameters and prepare for return, for the function. */
b79c5284 4635 expand_function_start (current_function_decl);
242229bb 4636
3a42502d
RH
4637 /* If we emitted any instructions for setting up the variables,
4638 emit them before the FUNCTION_START note. */
4639 if (var_seq)
4640 {
4641 emit_insn_before (var_seq, parm_birth_insn);
4642
4643 /* In expand_function_end we'll insert the alloca save/restore
4644 before parm_birth_insn. We've just insertted an alloca call.
4645 Adjust the pointer to match. */
4646 parm_birth_insn = var_seq;
4647 }
4648
4e3825db
MM
4649 /* Now that we also have the parameter RTXs, copy them over to our
4650 partitions. */
4651 for (i = 0; i < SA.map->num_partitions; i++)
4652 {
4653 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4654
4655 if (TREE_CODE (var) != VAR_DECL
4656 && !SA.partition_to_pseudo[i])
4657 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4658 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4659
4660 /* If this decl was marked as living in multiple places, reset
4661 this now to NULL. */
4662 if (DECL_RTL_IF_SET (var) == pc_rtx)
4663 SET_DECL_RTL (var, NULL);
4664
4e3825db
MM
4665 /* Some RTL parts really want to look at DECL_RTL(x) when x
4666 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4667 SET_DECL_RTL here making this available, but that would mean
4668 to select one of the potentially many RTLs for one DECL. Instead
4669 of doing that we simply reset the MEM_EXPR of the RTL in question,
4670 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4671 if (!DECL_RTL_SET_P (var))
4672 {
4673 if (MEM_P (SA.partition_to_pseudo[i]))
4674 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4675 }
4676 }
4677
d466b407
MM
4678 /* If we have a class containing differently aligned pointers
4679 we need to merge those into the corresponding RTL pointer
4680 alignment. */
4681 for (i = 1; i < num_ssa_names; i++)
4682 {
4683 tree name = ssa_name (i);
4684 int part;
4685 rtx r;
4686
4687 if (!name
d466b407
MM
4688 /* We might have generated new SSA names in
4689 update_alias_info_with_stack_vars. They will have a NULL
4690 defining statements, and won't be part of the partitioning,
4691 so ignore those. */
4692 || !SSA_NAME_DEF_STMT (name))
4693 continue;
4694 part = var_to_partition (SA.map, name);
4695 if (part == NO_PARTITION)
4696 continue;
70b5e7dc
RG
4697
4698 /* Adjust all partition members to get the underlying decl of
4699 the representative which we might have created in expand_one_var. */
4700 if (SSA_NAME_VAR (name) == NULL_TREE)
4701 {
4702 tree leader = partition_to_var (SA.map, part);
4703 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4704 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4705 }
4706 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4707 continue;
4708
d466b407
MM
4709 r = SA.partition_to_pseudo[part];
4710 if (REG_P (r))
4711 mark_reg_pointer (r, get_pointer_alignment (name));
4712 }
4713
242229bb
JH
4714 /* If this function is `main', emit a call to `__main'
4715 to run global initializers, etc. */
4716 if (DECL_NAME (current_function_decl)
4717 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4718 && DECL_FILE_SCOPE_P (current_function_decl))
4719 expand_main_function ();
4720
7d69de61
RH
4721 /* Initialize the stack_protect_guard field. This must happen after the
4722 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4723 if (crtl->stack_protect_guard)
7d69de61
RH
4724 stack_protect_prologue ();
4725
4e3825db
MM
4726 expand_phi_nodes (&SA);
4727
3fbd86b1 4728 /* Register rtl specific functions for cfg. */
242229bb
JH
4729 rtl_register_cfg_hooks ();
4730
4731 init_block = construct_init_block ();
4732
0ef90296 4733 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4734 remaining edges later. */
0ef90296
ZD
4735 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4736 e->flags &= ~EDGE_EXECUTABLE;
4737
8b11009b 4738 lab_rtx_for_bb = pointer_map_create ();
242229bb 4739 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
f3ddd692 4740 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 4741
b5b8b0ac
AO
4742 if (MAY_HAVE_DEBUG_INSNS)
4743 expand_debug_locations ();
4744
452aa9c5
RG
4745 /* Free stuff we no longer need after GIMPLE optimizations. */
4746 free_dominance_info (CDI_DOMINATORS);
4747 free_dominance_info (CDI_POST_DOMINATORS);
4748 delete_tree_cfg_annotations ();
4749
f029db69 4750 timevar_push (TV_OUT_OF_SSA);
4e3825db 4751 finish_out_of_ssa (&SA);
f029db69 4752 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4753
f029db69 4754 timevar_push (TV_POST_EXPAND);
91753e21
RG
4755 /* We are no longer in SSA form. */
4756 cfun->gimple_df->in_ssa_p = false;
7d776ee2
RG
4757 if (current_loops)
4758 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 4759
bf08ebeb
JH
4760 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4761 conservatively to true until they are all profile aware. */
8b11009b 4762 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4763 free_histograms ();
242229bb
JH
4764
4765 construct_exit_block ();
5368224f 4766 insn_locations_finalize ();
242229bb 4767
f3ddd692
JJ
4768 if (var_ret_seq)
4769 {
4770 rtx after = return_label;
4771 rtx next = NEXT_INSN (after);
4772 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
4773 after = next;
4774 emit_insn_after (var_ret_seq, after);
4775 }
4776
1d65f45c 4777 /* Zap the tree EH table. */
e8a2a782 4778 set_eh_throw_stmt_table (cfun, NULL);
242229bb 4779
42821aff
MM
4780 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4781 split edges which edge insertions might do. */
242229bb 4782 rebuild_jump_labels (get_insns ());
242229bb 4783
4e3825db
MM
4784 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4785 {
4786 edge e;
4787 edge_iterator ei;
4788 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4789 {
4790 if (e->insns.r)
bc470c24 4791 {
42821aff 4792 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
4793 /* Put insns after parm birth, but before
4794 NOTE_INSNS_FUNCTION_BEG. */
bc470c24 4795 if (e->src == ENTRY_BLOCK_PTR
e40191f1 4796 && single_succ_p (ENTRY_BLOCK_PTR))
bc470c24
JJ
4797 {
4798 rtx insns = e->insns.r;
4799 e->insns.r = NULL_RTX;
e40191f1
TV
4800 if (NOTE_P (parm_birth_insn)
4801 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
4802 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
4803 else
4804 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
4805 }
4806 else
4807 commit_one_edge_insertion (e);
4808 }
4e3825db
MM
4809 else
4810 ei_next (&ei);
4811 }
4812 }
4813
4814 /* We're done expanding trees to RTL. */
4815 currently_expanding_to_rtl = 0;
4816
4817 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4818 {
4819 edge e;
4820 edge_iterator ei;
4821 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4822 {
4823 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4824 e->flags &= ~EDGE_EXECUTABLE;
4825
4826 /* At the moment not all abnormal edges match the RTL
4827 representation. It is safe to remove them here as
4828 find_many_sub_basic_blocks will rediscover them.
4829 In the future we should get this fixed properly. */
4830 if ((e->flags & EDGE_ABNORMAL)
4831 && !(e->flags & EDGE_SIBCALL))
4832 remove_edge (e);
4833 else
4834 ei_next (&ei);
4835 }
4836 }
4837
242229bb 4838 blocks = sbitmap_alloc (last_basic_block);
f61e445a 4839 bitmap_ones (blocks);
242229bb 4840 find_many_sub_basic_blocks (blocks);
242229bb 4841 sbitmap_free (blocks);
4e3825db 4842 purge_all_dead_edges ();
242229bb 4843
2e3f842f
L
4844 expand_stack_alignment ();
4845
be147e84
RG
4846 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4847 function. */
4848 if (crtl->tail_call_emit)
4849 fixup_tail_calls ();
4850
dac1fbf8
RG
4851 /* After initial rtl generation, call back to finish generating
4852 exception support code. We need to do this before cleaning up
4853 the CFG as the code does not expect dead landing pads. */
4854 if (cfun->eh->region_tree != NULL)
4855 finish_eh_generation ();
4856
4857 /* Remove unreachable blocks, otherwise we cannot compute dominators
4858 which are needed for loop state verification. As a side-effect
4859 this also compacts blocks.
4860 ??? We cannot remove trivially dead insns here as for example
4861 the DRAP reg on i?86 is not magically live at this point.
4862 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4863 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4864
242229bb 4865#ifdef ENABLE_CHECKING
62e5bf5d 4866 verify_flow_info ();
242229bb 4867#endif
9f8628ba 4868
be147e84
RG
4869 /* Initialize pseudos allocated for hard registers. */
4870 emit_initial_value_sets ();
4871
4872 /* And finally unshare all RTL. */
4873 unshare_all_rtl ();
4874
9f8628ba
PB
4875 /* There's no need to defer outputting this function any more; we
4876 know we want to output it. */
4877 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4878
4879 /* Now that we're done expanding trees to RTL, we shouldn't have any
4880 more CONCATs anywhere. */
4881 generating_concat_p = 0;
4882
b7211528
SB
4883 if (dump_file)
4884 {
4885 fprintf (dump_file,
4886 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4887 /* And the pass manager will dump RTL for us. */
4888 }
ef330312
PB
4889
4890 /* If we're emitting a nested function, make sure its parent gets
4891 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4892 {
ef330312
PB
4893 tree parent;
4894 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4895 parent != NULL_TREE;
4896 parent = get_containing_scope (parent))
ef330312 4897 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4898 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4899 }
c22cacf3 4900
ef330312
PB
4901 /* We are now committed to emitting code for this function. Do any
4902 preparation, such as emitting abstract debug info for the inline
4903 before it gets mangled by optimization. */
4904 if (cgraph_function_possibly_inlined_p (current_function_decl))
4905 (*debug_hooks->outlining_inline_function) (current_function_decl);
4906
4907 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4908
4909 /* After expanding, the return labels are no longer needed. */
4910 return_label = NULL;
4911 naked_return_label = NULL;
0a35513e
AH
4912
4913 /* After expanding, the tm_restart map is no longer needed. */
4914 if (cfun->gimple_df->tm_restart)
4915 {
4916 htab_delete (cfun->gimple_df->tm_restart);
4917 cfun->gimple_df->tm_restart = NULL;
4918 }
4919
55e092c4
JH
4920 /* Tag the blocks with a depth number so that change_scope can find
4921 the common parent easily. */
4922 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4923 default_rtl_profile ();
be147e84 4924
f029db69 4925 timevar_pop (TV_POST_EXPAND);
be147e84 4926
c2924966 4927 return 0;
242229bb
JH
4928}
4929
27a4cd48
DM
4930namespace {
4931
4932const pass_data pass_data_expand =
242229bb 4933{
27a4cd48
DM
4934 RTL_PASS, /* type */
4935 "expand", /* name */
4936 OPTGROUP_NONE, /* optinfo_flags */
4937 false, /* has_gate */
4938 true, /* has_execute */
4939 TV_EXPAND, /* tv_id */
4940 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6f37411d 4941 | PROP_gimple_lcx
27a4cd48
DM
4942 | PROP_gimple_lvec ), /* properties_required */
4943 PROP_rtl, /* properties_provided */
4944 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
4945 ( TODO_verify_ssa | TODO_verify_flow
4946 | TODO_verify_stmts ), /* todo_flags_start */
4947 0, /* todo_flags_finish */
242229bb 4948};
27a4cd48
DM
4949
4950class pass_expand : public rtl_opt_pass
4951{
4952public:
c3284718
RS
4953 pass_expand (gcc::context *ctxt)
4954 : rtl_opt_pass (pass_data_expand, ctxt)
27a4cd48
DM
4955 {}
4956
4957 /* opt_pass methods: */
4958 unsigned int execute () { return gimple_expand_cfg (); }
4959
4960}; // class pass_expand
4961
4962} // anon namespace
4963
4964rtl_opt_pass *
4965make_pass_expand (gcc::context *ctxt)
4966{
4967 return new pass_expand (ctxt);
4968}