]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
dbgcnt.def (ira_move): New counter.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
4c0c3228 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
7d776ee2 50#include "cfgloop.h"
be147e84
RG
51#include "regs.h" /* For reg_renumber. */
52#include "integrate.h" /* For emit_initial_value_sets. */
2b21299c 53#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 54
4e3825db
MM
55/* This variable holds information helping the rewriting of SSA trees
56 into RTL. */
57struct ssaexpand SA;
58
a5883ba0
MM
59/* This variable holds the currently expanded gimple statement for purposes
60 of comminucating the profile info to the builtin expanders. */
61gimple currently_expanding_gimple_stmt;
62
ddb555ed
JJ
63static rtx expand_debug_expr (tree);
64
726a989a
RB
65/* Return an expression tree corresponding to the RHS of GIMPLE
66 statement STMT. */
67
68tree
69gimple_assign_rhs_to_tree (gimple stmt)
70{
71 tree t;
82d6e6fc 72 enum gimple_rhs_class grhs_class;
b8698a0f 73
82d6e6fc 74 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 75
0354c0c7
BS
76 if (grhs_class == GIMPLE_TERNARY_RHS)
77 t = build3 (gimple_assign_rhs_code (stmt),
78 TREE_TYPE (gimple_assign_lhs (stmt)),
79 gimple_assign_rhs1 (stmt),
80 gimple_assign_rhs2 (stmt),
81 gimple_assign_rhs3 (stmt));
82 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
83 t = build2 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt),
86 gimple_assign_rhs2 (stmt));
82d6e6fc 87 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
88 t = build1 (gimple_assign_rhs_code (stmt),
89 TREE_TYPE (gimple_assign_lhs (stmt)),
90 gimple_assign_rhs1 (stmt));
82d6e6fc 91 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
92 {
93 t = gimple_assign_rhs1 (stmt);
94 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
95 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
96 && gimple_location (stmt) != EXPR_LOCATION (t))
97 || (gimple_block (stmt)
98 && currently_expanding_to_rtl
99 && EXPR_P (t)
100 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
101 t = copy_node (t);
102 }
726a989a
RB
103 else
104 gcc_unreachable ();
105
f5045c96
AM
106 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
107 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
108 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
109 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 110
726a989a
RB
111 return t;
112}
113
726a989a 114
1f6d3a08
RH
115#ifndef STACK_ALIGNMENT_NEEDED
116#define STACK_ALIGNMENT_NEEDED 1
117#endif
118
4e3825db
MM
119#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
120
121/* Associate declaration T with storage space X. If T is no
122 SSA name this is exactly SET_DECL_RTL, otherwise make the
123 partition of T associated with X. */
124static inline void
125set_rtl (tree t, rtx x)
126{
127 if (TREE_CODE (t) == SSA_NAME)
128 {
129 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
130 if (x && !MEM_P (x))
131 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
132 /* For the benefit of debug information at -O0 (where vartracking
133 doesn't run) record the place also in the base DECL if it's
134 a normal variable (not a parameter). */
135 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
136 {
137 tree var = SSA_NAME_VAR (t);
138 /* If we don't yet have something recorded, just record it now. */
139 if (!DECL_RTL_SET_P (var))
140 SET_DECL_RTL (var, x);
47598145 141 /* If we have it set already to "multiple places" don't
eb7adebc
MM
142 change this. */
143 else if (DECL_RTL (var) == pc_rtx)
144 ;
145 /* If we have something recorded and it's not the same place
146 as we want to record now, we have multiple partitions for the
147 same base variable, with different places. We can't just
148 randomly chose one, hence we have to say that we don't know.
149 This only happens with optimization, and there var-tracking
150 will figure out the right thing. */
151 else if (DECL_RTL (var) != x)
152 SET_DECL_RTL (var, pc_rtx);
153 }
4e3825db
MM
154 }
155 else
156 SET_DECL_RTL (t, x);
157}
1f6d3a08
RH
158
159/* This structure holds data relevant to one variable that will be
160 placed in a stack slot. */
161struct stack_var
162{
163 /* The Variable. */
164 tree decl;
165
1f6d3a08
RH
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
169
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
173
174 /* The partition representative. */
175 size_t representative;
176
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
2bdbbe94
MM
179
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
1f6d3a08
RH
182};
183
184#define EOC ((size_t)-1)
185
186/* We have an array of such objects while deciding allocation. */
187static struct stack_var *stack_vars;
188static size_t stack_vars_alloc;
189static size_t stack_vars_num;
47598145 190static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 191
fa10beec 192/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
193 is non-decreasing. */
194static size_t *stack_vars_sorted;
195
1f6d3a08
RH
196/* The phase of the stack frame. This is the known misalignment of
197 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
198 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
199static int frame_phase;
200
7d69de61
RH
201/* Used during expand_used_vars to remember if we saw any decls for
202 which we'd like to enable stack smashing protection. */
203static bool has_protected_decls;
204
205/* Used during expand_used_vars. Remember if we say a character buffer
206 smaller than our cutoff threshold. Used for -Wstack-protector. */
207static bool has_short_buffer;
1f6d3a08 208
6f197850 209/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
210 we can't do with expected alignment of the stack boundary. */
211
212static unsigned int
6f197850 213align_local_variable (tree decl)
765c3e8f 214{
3a42502d 215 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 216 DECL_ALIGN (decl) = align;
1f6d3a08
RH
217 return align / BITS_PER_UNIT;
218}
219
220/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
221 Return the frame offset. */
222
223static HOST_WIDE_INT
3a42502d 224alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
225{
226 HOST_WIDE_INT offset, new_frame_offset;
227
228 new_frame_offset = frame_offset;
229 if (FRAME_GROWS_DOWNWARD)
230 {
231 new_frame_offset -= size + frame_phase;
232 new_frame_offset &= -align;
233 new_frame_offset += frame_phase;
234 offset = new_frame_offset;
235 }
236 else
237 {
238 new_frame_offset -= frame_phase;
239 new_frame_offset += align - 1;
240 new_frame_offset &= -align;
241 new_frame_offset += frame_phase;
242 offset = new_frame_offset;
243 new_frame_offset += size;
244 }
245 frame_offset = new_frame_offset;
246
9fb798d7
EB
247 if (frame_offset_overflow (frame_offset, cfun->decl))
248 frame_offset = offset = 0;
249
1f6d3a08
RH
250 return offset;
251}
252
253/* Accumulate DECL into STACK_VARS. */
254
255static void
256add_stack_var (tree decl)
257{
533f611a
RH
258 struct stack_var *v;
259
1f6d3a08
RH
260 if (stack_vars_num >= stack_vars_alloc)
261 {
262 if (stack_vars_alloc)
263 stack_vars_alloc = stack_vars_alloc * 3 / 2;
264 else
265 stack_vars_alloc = 32;
266 stack_vars
267 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
268 }
47598145
MM
269 if (!decl_to_stack_part)
270 decl_to_stack_part = pointer_map_create ();
271
533f611a 272 v = &stack_vars[stack_vars_num];
47598145 273 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
274
275 v->decl = decl;
533f611a
RH
276 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
277 /* Ensure that all variables have size, so that &a != &b for any two
278 variables that are simultaneously live. */
279 if (v->size == 0)
280 v->size = 1;
6f197850 281 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
282 /* An alignment of zero can mightily confuse us later. */
283 gcc_assert (v->alignb != 0);
1f6d3a08
RH
284
285 /* All variables are initially in their own partition. */
533f611a
RH
286 v->representative = stack_vars_num;
287 v->next = EOC;
1f6d3a08 288
2bdbbe94 289 /* All variables initially conflict with no other. */
533f611a 290 v->conflicts = NULL;
2bdbbe94 291
1f6d3a08 292 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 293 set_rtl (decl, pc_rtx);
1f6d3a08
RH
294
295 stack_vars_num++;
296}
297
1f6d3a08
RH
298/* Make the decls associated with luid's X and Y conflict. */
299
300static void
301add_stack_var_conflict (size_t x, size_t y)
302{
2bdbbe94
MM
303 struct stack_var *a = &stack_vars[x];
304 struct stack_var *b = &stack_vars[y];
305 if (!a->conflicts)
306 a->conflicts = BITMAP_ALLOC (NULL);
307 if (!b->conflicts)
308 b->conflicts = BITMAP_ALLOC (NULL);
309 bitmap_set_bit (a->conflicts, y);
310 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
311}
312
313/* Check whether the decls associated with luid's X and Y conflict. */
314
315static bool
316stack_var_conflict_p (size_t x, size_t y)
317{
2bdbbe94
MM
318 struct stack_var *a = &stack_vars[x];
319 struct stack_var *b = &stack_vars[y];
47598145
MM
320 if (x == y)
321 return false;
322 /* Partitions containing an SSA name result from gimple registers
323 with things like unsupported modes. They are top-level and
324 hence conflict with everything else. */
325 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
326 return true;
327
2bdbbe94
MM
328 if (!a->conflicts || !b->conflicts)
329 return false;
330 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 331}
b8698a0f 332
d239ed56
SB
333/* Returns true if TYPE is or contains a union type. */
334
335static bool
336aggregate_contains_union_type (tree type)
337{
338 tree field;
339
340 if (TREE_CODE (type) == UNION_TYPE
341 || TREE_CODE (type) == QUAL_UNION_TYPE)
342 return true;
343 if (TREE_CODE (type) == ARRAY_TYPE)
344 return aggregate_contains_union_type (TREE_TYPE (type));
345 if (TREE_CODE (type) != RECORD_TYPE)
346 return false;
347
910ad8de 348 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
349 if (TREE_CODE (field) == FIELD_DECL)
350 if (aggregate_contains_union_type (TREE_TYPE (field)))
351 return true;
352
353 return false;
354}
355
1f6d3a08
RH
356/* A subroutine of expand_used_vars. If two variables X and Y have alias
357 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
358 in the interference graph. We also need to make sure to add conflicts
359 for union containing structures. Else RTL alias analysis comes along
360 and due to type based aliasing rules decides that for two overlapping
361 union temporaries { short s; int i; } accesses to the same mem through
362 different types may not alias and happily reorders stores across
55356334 363 life-time boundaries of the temporaries (See PR25654). */
1f6d3a08
RH
364
365static void
366add_alias_set_conflicts (void)
367{
368 size_t i, j, n = stack_vars_num;
369
370 for (i = 0; i < n; ++i)
371 {
a4d25453
RH
372 tree type_i = TREE_TYPE (stack_vars[i].decl);
373 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 374 bool contains_union;
1f6d3a08 375
d239ed56 376 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
377 for (j = 0; j < i; ++j)
378 {
a4d25453
RH
379 tree type_j = TREE_TYPE (stack_vars[j].decl);
380 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
381 if (aggr_i != aggr_j
382 /* Either the objects conflict by means of type based
383 aliasing rules, or we need to add a conflict. */
384 || !objects_must_conflict_p (type_i, type_j)
385 /* In case the types do not conflict ensure that access
386 to elements will conflict. In case of unions we have
387 to be careful as type based aliasing rules may say
388 access to the same memory does not conflict. So play
4a25752b
ER
389 safe and add a conflict in this case when
390 -fstrict-aliasing is used. */
391 || (contains_union && flag_strict_aliasing))
1f6d3a08
RH
392 add_stack_var_conflict (i, j);
393 }
394 }
395}
396
47598145
MM
397/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
398 enter its partition number into bitmap DATA. */
399
400static bool
401visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
402{
403 bitmap active = (bitmap)data;
404 op = get_base_address (op);
405 if (op
406 && DECL_P (op)
407 && DECL_RTL_IF_SET (op) == pc_rtx)
408 {
409 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
410 if (v)
411 bitmap_set_bit (active, *v);
412 }
413 return false;
414}
415
416/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
417 record conflicts between it and all currently active other partitions
418 from bitmap DATA. */
419
420static bool
421visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
422{
423 bitmap active = (bitmap)data;
424 op = get_base_address (op);
425 if (op
426 && DECL_P (op)
427 && DECL_RTL_IF_SET (op) == pc_rtx)
428 {
429 size_t *v =
430 (size_t *) pointer_map_contains (decl_to_stack_part, op);
431 if (v && bitmap_set_bit (active, *v))
432 {
433 size_t num = *v;
434 bitmap_iterator bi;
435 unsigned i;
436 gcc_assert (num < stack_vars_num);
437 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
438 add_stack_var_conflict (num, i);
439 }
440 }
441 return false;
442}
443
444/* Helper routine for add_scope_conflicts, calculating the active partitions
445 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
446 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
447 liveness. */
47598145
MM
448
449static void
81bfd197 450add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
451{
452 edge e;
453 edge_iterator ei;
454 gimple_stmt_iterator gsi;
455 bool (*visit)(gimple, tree, void *);
456
457 bitmap_clear (work);
458 FOR_EACH_EDGE (e, ei, bb->preds)
459 bitmap_ior_into (work, (bitmap)e->src->aux);
460
ea85edfe 461 visit = visit_op;
47598145
MM
462
463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
464 {
465 gimple stmt = gsi_stmt (gsi);
ea85edfe 466 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 467 }
ea85edfe 468 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
469 {
470 gimple stmt = gsi_stmt (gsi);
471
472 if (gimple_clobber_p (stmt))
473 {
474 tree lhs = gimple_assign_lhs (stmt);
475 size_t *v;
476 /* Nested function lowering might introduce LHSs
477 that are COMPONENT_REFs. */
478 if (TREE_CODE (lhs) != VAR_DECL)
479 continue;
480 if (DECL_RTL_IF_SET (lhs) == pc_rtx
481 && (v = (size_t *)
482 pointer_map_contains (decl_to_stack_part, lhs)))
483 bitmap_clear_bit (work, *v);
484 }
485 else if (!is_gimple_debug (stmt))
ea85edfe 486 {
81bfd197 487 if (for_conflict
ea85edfe
JJ
488 && visit == visit_op)
489 {
490 /* If this is the first real instruction in this BB we need
88d599dc
MM
491 to add conflicts for everything live at this point now.
492 Unlike classical liveness for named objects we can't
ea85edfe
JJ
493 rely on seeing a def/use of the names we're interested in.
494 There might merely be indirect loads/stores. We'd not add any
81bfd197 495 conflicts for such partitions. */
ea85edfe
JJ
496 bitmap_iterator bi;
497 unsigned i;
81bfd197 498 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe
JJ
499 {
500 unsigned j;
501 bitmap_iterator bj;
81bfd197 502 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
ea85edfe
JJ
503 add_stack_var_conflict (i, j);
504 }
505 visit = visit_conflict;
506 }
507 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
508 }
47598145
MM
509 }
510}
511
512/* Generate stack partition conflicts between all partitions that are
513 simultaneously live. */
514
515static void
516add_scope_conflicts (void)
517{
518 basic_block bb;
519 bool changed;
520 bitmap work = BITMAP_ALLOC (NULL);
521
88d599dc 522 /* We approximate the live range of a stack variable by taking the first
47598145
MM
523 mention of its name as starting point(s), and by the end-of-scope
524 death clobber added by gimplify as ending point(s) of the range.
525 This overapproximates in the case we for instance moved an address-taken
526 operation upward, without also moving a dereference to it upwards.
527 But it's conservatively correct as a variable never can hold values
528 before its name is mentioned at least once.
529
88d599dc 530 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
531
532 FOR_ALL_BB (bb)
533 bb->aux = BITMAP_ALLOC (NULL);
534
535 changed = true;
536 while (changed)
537 {
538 changed = false;
539 FOR_EACH_BB (bb)
540 {
541 bitmap active = (bitmap)bb->aux;
81bfd197 542 add_scope_conflicts_1 (bb, work, false);
47598145
MM
543 if (bitmap_ior_into (active, work))
544 changed = true;
545 }
546 }
547
548 FOR_EACH_BB (bb)
81bfd197 549 add_scope_conflicts_1 (bb, work, true);
47598145
MM
550
551 BITMAP_FREE (work);
552 FOR_ALL_BB (bb)
553 BITMAP_FREE (bb->aux);
554}
555
1f6d3a08 556/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 557 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
558
559static int
3a42502d 560stack_var_cmp (const void *a, const void *b)
1f6d3a08 561{
3a42502d
RH
562 size_t ia = *(const size_t *)a;
563 size_t ib = *(const size_t *)b;
564 unsigned int aligna = stack_vars[ia].alignb;
565 unsigned int alignb = stack_vars[ib].alignb;
566 HOST_WIDE_INT sizea = stack_vars[ia].size;
567 HOST_WIDE_INT sizeb = stack_vars[ib].size;
568 tree decla = stack_vars[ia].decl;
569 tree declb = stack_vars[ib].decl;
570 bool largea, largeb;
4e3825db 571 unsigned int uida, uidb;
1f6d3a08 572
3a42502d
RH
573 /* Primary compare on "large" alignment. Large comes first. */
574 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
575 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
576 if (largea != largeb)
577 return (int)largeb - (int)largea;
578
579 /* Secondary compare on size, decreasing */
3a42502d 580 if (sizea > sizeb)
6ddfda8a
ER
581 return -1;
582 if (sizea < sizeb)
1f6d3a08 583 return 1;
3a42502d
RH
584
585 /* Tertiary compare on true alignment, decreasing. */
586 if (aligna < alignb)
587 return -1;
588 if (aligna > alignb)
589 return 1;
590
591 /* Final compare on ID for sort stability, increasing.
592 Two SSA names are compared by their version, SSA names come before
593 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
594 if (TREE_CODE (decla) == SSA_NAME)
595 {
596 if (TREE_CODE (declb) == SSA_NAME)
597 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
598 else
599 return -1;
600 }
601 else if (TREE_CODE (declb) == SSA_NAME)
602 return 1;
603 else
604 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 605 if (uida < uidb)
79f802f5 606 return 1;
3a42502d
RH
607 if (uida > uidb)
608 return -1;
1f6d3a08
RH
609 return 0;
610}
611
55b34b5f
RG
612
613/* If the points-to solution *PI points to variables that are in a partition
614 together with other variables add all partition members to the pointed-to
615 variables bitmap. */
616
617static void
618add_partitioned_vars_to_ptset (struct pt_solution *pt,
619 struct pointer_map_t *decls_to_partitions,
620 struct pointer_set_t *visited, bitmap temp)
621{
622 bitmap_iterator bi;
623 unsigned i;
624 bitmap *part;
625
626 if (pt->anything
627 || pt->vars == NULL
628 /* The pointed-to vars bitmap is shared, it is enough to
629 visit it once. */
630 || pointer_set_insert(visited, pt->vars))
631 return;
632
633 bitmap_clear (temp);
634
635 /* By using a temporary bitmap to store all members of the partitions
636 we have to add we make sure to visit each of the partitions only
637 once. */
638 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
639 if ((!temp
640 || !bitmap_bit_p (temp, i))
641 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
642 (void *)(size_t) i)))
643 bitmap_ior_into (temp, *part);
644 if (!bitmap_empty_p (temp))
645 bitmap_ior_into (pt->vars, temp);
646}
647
648/* Update points-to sets based on partition info, so we can use them on RTL.
649 The bitmaps representing stack partitions will be saved until expand,
650 where partitioned decls used as bases in memory expressions will be
651 rewritten. */
652
653static void
654update_alias_info_with_stack_vars (void)
655{
656 struct pointer_map_t *decls_to_partitions = NULL;
657 size_t i, j;
658 tree var = NULL_TREE;
659
660 for (i = 0; i < stack_vars_num; i++)
661 {
662 bitmap part = NULL;
663 tree name;
664 struct ptr_info_def *pi;
665
666 /* Not interested in partitions with single variable. */
667 if (stack_vars[i].representative != i
668 || stack_vars[i].next == EOC)
669 continue;
670
671 if (!decls_to_partitions)
672 {
673 decls_to_partitions = pointer_map_create ();
674 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
675 }
676
677 /* Create an SSA_NAME that points to the partition for use
678 as base during alias-oracle queries on RTL for bases that
679 have been partitioned. */
680 if (var == NULL_TREE)
681 var = create_tmp_var (ptr_type_node, NULL);
682 name = make_ssa_name (var, NULL);
683
684 /* Create bitmaps representing partitions. They will be used for
685 points-to sets later, so use GGC alloc. */
686 part = BITMAP_GGC_ALLOC ();
687 for (j = i; j != EOC; j = stack_vars[j].next)
688 {
689 tree decl = stack_vars[j].decl;
25a6a873 690 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
691 /* We should never end up partitioning SSA names (though they
692 may end up on the stack). Neither should we allocate stack
9b999dc5
JJ
693 space to something that is unused and thus unreferenced, except
694 for -O0 where we are preserving even unreferenced variables. */
55b34b5f 695 gcc_assert (DECL_P (decl)
9b999dc5 696 && (!optimize
27c6b086 697 || referenced_var_lookup (cfun, DECL_UID (decl))));
55b34b5f
RG
698 bitmap_set_bit (part, uid);
699 *((bitmap *) pointer_map_insert (decls_to_partitions,
700 (void *)(size_t) uid)) = part;
701 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
702 decl)) = name;
703 }
704
705 /* Make the SSA name point to all partition members. */
706 pi = get_ptr_info (name);
d3553615 707 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
708 }
709
710 /* Make all points-to sets that contain one member of a partition
711 contain all members of the partition. */
712 if (decls_to_partitions)
713 {
714 unsigned i;
715 struct pointer_set_t *visited = pointer_set_create ();
716 bitmap temp = BITMAP_ALLOC (NULL);
717
718 for (i = 1; i < num_ssa_names; i++)
719 {
720 tree name = ssa_name (i);
721 struct ptr_info_def *pi;
722
723 if (name
724 && POINTER_TYPE_P (TREE_TYPE (name))
725 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
726 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
727 visited, temp);
728 }
729
730 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
731 decls_to_partitions, visited, temp);
55b34b5f
RG
732
733 pointer_set_destroy (visited);
734 pointer_map_destroy (decls_to_partitions);
735 BITMAP_FREE (temp);
736 }
737}
738
1f6d3a08
RH
739/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
740 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 741 Merge them into a single partition A. */
1f6d3a08
RH
742
743static void
6ddfda8a 744union_stack_vars (size_t a, size_t b)
1f6d3a08 745{
2bdbbe94
MM
746 struct stack_var *vb = &stack_vars[b];
747 bitmap_iterator bi;
748 unsigned u;
1f6d3a08 749
6ddfda8a
ER
750 gcc_assert (stack_vars[b].next == EOC);
751 /* Add B to A's partition. */
752 stack_vars[b].next = stack_vars[a].next;
753 stack_vars[b].representative = a;
1f6d3a08
RH
754 stack_vars[a].next = b;
755
756 /* Update the required alignment of partition A to account for B. */
757 if (stack_vars[a].alignb < stack_vars[b].alignb)
758 stack_vars[a].alignb = stack_vars[b].alignb;
759
760 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
761 if (vb->conflicts)
762 {
763 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
764 add_stack_var_conflict (a, stack_vars[u].representative);
765 BITMAP_FREE (vb->conflicts);
766 }
1f6d3a08
RH
767}
768
769/* A subroutine of expand_used_vars. Binpack the variables into
770 partitions constrained by the interference graph. The overall
771 algorithm used is as follows:
772
6ddfda8a 773 Sort the objects by size in descending order.
1f6d3a08
RH
774 For each object A {
775 S = size(A)
776 O = 0
777 loop {
778 Look for the largest non-conflicting object B with size <= S.
779 UNION (A, B)
1f6d3a08
RH
780 }
781 }
782*/
783
784static void
785partition_stack_vars (void)
786{
787 size_t si, sj, n = stack_vars_num;
788
789 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
790 for (si = 0; si < n; ++si)
791 stack_vars_sorted[si] = si;
792
793 if (n == 1)
794 return;
795
3a42502d 796 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 797
1f6d3a08
RH
798 for (si = 0; si < n; ++si)
799 {
800 size_t i = stack_vars_sorted[si];
3a42502d 801 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08 802
6ddfda8a
ER
803 /* Ignore objects that aren't partition representatives. If we
804 see a var that is not a partition representative, it must
805 have been merged earlier. */
806 if (stack_vars[i].representative != i)
807 continue;
808
809 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
810 {
811 size_t j = stack_vars_sorted[sj];
1f6d3a08
RH
812 unsigned int jalign = stack_vars[j].alignb;
813
814 /* Ignore objects that aren't partition representatives. */
815 if (stack_vars[j].representative != j)
816 continue;
817
1f6d3a08
RH
818 /* Ignore conflicting objects. */
819 if (stack_var_conflict_p (i, j))
820 continue;
821
3a42502d
RH
822 /* Do not mix objects of "small" (supported) alignment
823 and "large" (unsupported) alignment. */
824 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
825 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
826 continue;
827
1f6d3a08 828 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 829 union_stack_vars (i, j);
1f6d3a08
RH
830 }
831 }
55b34b5f 832
9b999dc5 833 update_alias_info_with_stack_vars ();
1f6d3a08
RH
834}
835
836/* A debugging aid for expand_used_vars. Dump the generated partitions. */
837
838static void
839dump_stack_var_partition (void)
840{
841 size_t si, i, j, n = stack_vars_num;
842
843 for (si = 0; si < n; ++si)
844 {
845 i = stack_vars_sorted[si];
846
847 /* Skip variables that aren't partition representatives, for now. */
848 if (stack_vars[i].representative != i)
849 continue;
850
851 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
852 " align %u\n", (unsigned long) i, stack_vars[i].size,
853 stack_vars[i].alignb);
854
855 for (j = i; j != EOC; j = stack_vars[j].next)
856 {
857 fputc ('\t', dump_file);
858 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 859 }
6ddfda8a 860 fputc ('\n', dump_file);
1f6d3a08
RH
861 }
862}
863
3a42502d 864/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
865
866static void
3a42502d
RH
867expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
868 HOST_WIDE_INT offset)
1f6d3a08 869{
3a42502d 870 unsigned align;
1f6d3a08 871 rtx x;
c22cacf3 872
1f6d3a08
RH
873 /* If this fails, we've overflowed the stack frame. Error nicely? */
874 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
875
3a42502d 876 x = plus_constant (base, offset);
4e3825db 877 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 878
4e3825db
MM
879 if (TREE_CODE (decl) != SSA_NAME)
880 {
881 /* Set alignment we actually gave this decl if it isn't an SSA name.
882 If it is we generate stack slots only accidentally so it isn't as
883 important, we'll simply use the alignment that is already set. */
3a42502d
RH
884 if (base == virtual_stack_vars_rtx)
885 offset -= frame_phase;
4e3825db
MM
886 align = offset & -offset;
887 align *= BITS_PER_UNIT;
3a42502d
RH
888 if (align == 0 || align > base_align)
889 align = base_align;
890
891 /* One would think that we could assert that we're not decreasing
892 alignment here, but (at least) the i386 port does exactly this
893 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
894
895 DECL_ALIGN (decl) = align;
896 DECL_USER_ALIGN (decl) = 0;
897 }
898
899 set_mem_attributes (x, SSAVAR (decl), true);
900 set_rtl (decl, x);
1f6d3a08
RH
901}
902
903/* A subroutine of expand_used_vars. Give each partition representative
904 a unique location within the stack frame. Update each partition member
905 with that location. */
906
907static void
7d69de61 908expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
909{
910 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
911 HOST_WIDE_INT large_size = 0, large_alloc = 0;
912 rtx large_base = NULL;
913 unsigned large_align = 0;
914 tree decl;
915
916 /* Determine if there are any variables requiring "large" alignment.
917 Since these are dynamically allocated, we only process these if
918 no predicate involved. */
919 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
920 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
921 {
922 /* Find the total size of these variables. */
923 for (si = 0; si < n; ++si)
924 {
925 unsigned alignb;
926
927 i = stack_vars_sorted[si];
928 alignb = stack_vars[i].alignb;
929
930 /* Stop when we get to the first decl with "small" alignment. */
931 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
932 break;
933
934 /* Skip variables that aren't partition representatives. */
935 if (stack_vars[i].representative != i)
936 continue;
937
938 /* Skip variables that have already had rtl assigned. See also
939 add_stack_var where we perpetrate this pc_rtx hack. */
940 decl = stack_vars[i].decl;
941 if ((TREE_CODE (decl) == SSA_NAME
942 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
943 : DECL_RTL (decl)) != pc_rtx)
944 continue;
945
946 large_size += alignb - 1;
947 large_size &= -(HOST_WIDE_INT)alignb;
948 large_size += stack_vars[i].size;
949 }
950
951 /* If there were any, allocate space. */
952 if (large_size > 0)
953 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
954 large_align, true);
955 }
1f6d3a08
RH
956
957 for (si = 0; si < n; ++si)
958 {
3a42502d
RH
959 rtx base;
960 unsigned base_align, alignb;
1f6d3a08
RH
961 HOST_WIDE_INT offset;
962
963 i = stack_vars_sorted[si];
964
965 /* Skip variables that aren't partition representatives, for now. */
966 if (stack_vars[i].representative != i)
967 continue;
968
7d69de61
RH
969 /* Skip variables that have already had rtl assigned. See also
970 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
971 decl = stack_vars[i].decl;
972 if ((TREE_CODE (decl) == SSA_NAME
973 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
974 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
975 continue;
976
c22cacf3 977 /* Check the predicate to see whether this variable should be
7d69de61 978 allocated in this pass. */
3a42502d 979 if (pred && !pred (decl))
7d69de61
RH
980 continue;
981
3a42502d
RH
982 alignb = stack_vars[i].alignb;
983 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
984 {
985 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
986 base = virtual_stack_vars_rtx;
987 base_align = crtl->max_used_stack_slot_alignment;
988 }
989 else
990 {
991 /* Large alignment is only processed in the last pass. */
992 if (pred)
993 continue;
533f611a 994 gcc_assert (large_base != NULL);
3a42502d
RH
995
996 large_alloc += alignb - 1;
997 large_alloc &= -(HOST_WIDE_INT)alignb;
998 offset = large_alloc;
999 large_alloc += stack_vars[i].size;
1000
1001 base = large_base;
1002 base_align = large_align;
1003 }
1f6d3a08
RH
1004
1005 /* Create rtl for each variable based on their location within the
1006 partition. */
1007 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1008 {
f8da8190 1009 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1010 base, base_align,
6ddfda8a 1011 offset);
f8da8190 1012 }
1f6d3a08 1013 }
3a42502d
RH
1014
1015 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1016}
1017
ff28a94d
JH
1018/* Take into account all sizes of partitions and reset DECL_RTLs. */
1019static HOST_WIDE_INT
1020account_stack_vars (void)
1021{
1022 size_t si, j, i, n = stack_vars_num;
1023 HOST_WIDE_INT size = 0;
1024
1025 for (si = 0; si < n; ++si)
1026 {
1027 i = stack_vars_sorted[si];
1028
1029 /* Skip variables that aren't partition representatives, for now. */
1030 if (stack_vars[i].representative != i)
1031 continue;
1032
1033 size += stack_vars[i].size;
1034 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1035 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1036 }
1037 return size;
1038}
1039
1f6d3a08
RH
1040/* A subroutine of expand_one_var. Called to immediately assign rtl
1041 to a variable to be allocated in the stack frame. */
1042
1043static void
1044expand_one_stack_var (tree var)
1045{
3a42502d
RH
1046 HOST_WIDE_INT size, offset;
1047 unsigned byte_align;
1f6d3a08 1048
4e3825db 1049 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1050 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1051
1052 /* We handle highly aligned variables in expand_stack_vars. */
1053 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1054
3a42502d
RH
1055 offset = alloc_stack_frame_space (size, byte_align);
1056
1057 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1058 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1059}
1060
1f6d3a08
RH
1061/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1062 that will reside in a hard register. */
1063
1064static void
1065expand_one_hard_reg_var (tree var)
1066{
1067 rest_of_decl_compilation (var, 0, 0);
1068}
1069
1070/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1071 that will reside in a pseudo register. */
1072
1073static void
1074expand_one_register_var (tree var)
1075{
4e3825db
MM
1076 tree decl = SSAVAR (var);
1077 tree type = TREE_TYPE (decl);
cde0f3fd 1078 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1079 rtx x = gen_reg_rtx (reg_mode);
1080
4e3825db 1081 set_rtl (var, x);
1f6d3a08
RH
1082
1083 /* Note if the object is a user variable. */
4e3825db
MM
1084 if (!DECL_ARTIFICIAL (decl))
1085 mark_user_reg (x);
1f6d3a08 1086
61021c2c 1087 if (POINTER_TYPE_P (type))
d466b407 1088 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1089}
1090
1091/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1092 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1093 to pick something that won't crash the rest of the compiler. */
1094
1095static void
1096expand_one_error_var (tree var)
1097{
1098 enum machine_mode mode = DECL_MODE (var);
1099 rtx x;
1100
1101 if (mode == BLKmode)
1102 x = gen_rtx_MEM (BLKmode, const0_rtx);
1103 else if (mode == VOIDmode)
1104 x = const0_rtx;
1105 else
1106 x = gen_reg_rtx (mode);
1107
1108 SET_DECL_RTL (var, x);
1109}
1110
c22cacf3 1111/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1112 allocated to the local stack frame. Return true if we wish to
1113 add VAR to STACK_VARS so that it will be coalesced with other
1114 variables. Return false to allocate VAR immediately.
1115
1116 This function is used to reduce the number of variables considered
1117 for coalescing, which reduces the size of the quadratic problem. */
1118
1119static bool
1120defer_stack_allocation (tree var, bool toplevel)
1121{
7d69de61
RH
1122 /* If stack protection is enabled, *all* stack variables must be deferred,
1123 so that we can re-order the strings to the top of the frame. */
1124 if (flag_stack_protect)
1125 return true;
1126
3a42502d
RH
1127 /* We handle "large" alignment via dynamic allocation. We want to handle
1128 this extra complication in only one place, so defer them. */
1129 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1130 return true;
1131
1f6d3a08
RH
1132 /* Variables in the outermost scope automatically conflict with
1133 every other variable. The only reason to want to defer them
1134 at all is that, after sorting, we can more efficiently pack
1135 small variables in the stack frame. Continue to defer at -O2. */
1136 if (toplevel && optimize < 2)
1137 return false;
1138
1139 /* Without optimization, *most* variables are allocated from the
1140 stack, which makes the quadratic problem large exactly when we
c22cacf3 1141 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1142 other hand, we don't want the function's stack frame size to
1143 get completely out of hand. So we avoid adding scalars and
1144 "small" aggregates to the list at all. */
1145 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1146 return false;
1147
1148 return true;
1149}
1150
1151/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1152 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1153 expanded yet, merely recorded.
ff28a94d
JH
1154 When REALLY_EXPAND is false, only add stack values to be allocated.
1155 Return stack usage this variable is supposed to take.
1156*/
1f6d3a08 1157
ff28a94d
JH
1158static HOST_WIDE_INT
1159expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1160{
3a42502d 1161 unsigned int align = BITS_PER_UNIT;
4e3825db 1162 tree origvar = var;
3a42502d 1163
4e3825db
MM
1164 var = SSAVAR (var);
1165
3a42502d 1166 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1167 {
2e3f842f
L
1168 /* Because we don't know if VAR will be in register or on stack,
1169 we conservatively assume it will be on stack even if VAR is
1170 eventually put into register after RA pass. For non-automatic
1171 variables, which won't be on stack, we collect alignment of
1172 type and ignore user specified alignment. */
1173 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1174 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1175 TYPE_MODE (TREE_TYPE (var)),
1176 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1177 else if (DECL_HAS_VALUE_EXPR_P (var)
1178 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1179 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1180 or variables which were assigned a stack slot already by
1181 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1182 changed from the offset chosen to it. */
1183 align = crtl->stack_alignment_estimated;
2e3f842f 1184 else
ae58e548 1185 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1186
3a42502d
RH
1187 /* If the variable alignment is very large we'll dynamicaly allocate
1188 it, which means that in-frame portion is just a pointer. */
1189 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1190 align = POINTER_SIZE;
1191 }
1192
1193 if (SUPPORTS_STACK_ALIGNMENT
1194 && crtl->stack_alignment_estimated < align)
1195 {
1196 /* stack_alignment_estimated shouldn't change after stack
1197 realign decision made */
1198 gcc_assert(!crtl->stack_realign_processed);
1199 crtl->stack_alignment_estimated = align;
2e3f842f
L
1200 }
1201
3a42502d
RH
1202 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1203 So here we only make sure stack_alignment_needed >= align. */
1204 if (crtl->stack_alignment_needed < align)
1205 crtl->stack_alignment_needed = align;
1206 if (crtl->max_used_stack_slot_alignment < align)
1207 crtl->max_used_stack_slot_alignment = align;
1208
4e3825db
MM
1209 if (TREE_CODE (origvar) == SSA_NAME)
1210 {
1211 gcc_assert (TREE_CODE (var) != VAR_DECL
1212 || (!DECL_EXTERNAL (var)
1213 && !DECL_HAS_VALUE_EXPR_P (var)
1214 && !TREE_STATIC (var)
4e3825db
MM
1215 && TREE_TYPE (var) != error_mark_node
1216 && !DECL_HARD_REGISTER (var)
1217 && really_expand));
1218 }
1219 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1220 ;
1f6d3a08
RH
1221 else if (DECL_EXTERNAL (var))
1222 ;
833b3afe 1223 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1224 ;
1225 else if (TREE_STATIC (var))
7e8b322a 1226 ;
eb7adebc 1227 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1228 ;
1229 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1230 {
1231 if (really_expand)
1232 expand_one_error_var (var);
1233 }
4e3825db 1234 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1235 {
1236 if (really_expand)
1237 expand_one_hard_reg_var (var);
1238 }
1f6d3a08 1239 else if (use_register_for_decl (var))
ff28a94d
JH
1240 {
1241 if (really_expand)
4e3825db 1242 expand_one_register_var (origvar);
ff28a94d 1243 }
7604eb4e
JJ
1244 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1245 {
1246 if (really_expand)
1247 {
1248 error ("size of variable %q+D is too large", var);
1249 expand_one_error_var (var);
1250 }
1251 }
1f6d3a08 1252 else if (defer_stack_allocation (var, toplevel))
4e3825db 1253 add_stack_var (origvar);
1f6d3a08 1254 else
ff28a94d 1255 {
bd9f1b4b 1256 if (really_expand)
4e3825db 1257 expand_one_stack_var (origvar);
ff28a94d
JH
1258 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1259 }
1260 return 0;
1f6d3a08
RH
1261}
1262
1263/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1264 expanding variables. Those variables that can be put into registers
1265 are allocated pseudos; those that can't are put on the stack.
1266
1267 TOPLEVEL is true if this is the outermost BLOCK. */
1268
1269static void
1270expand_used_vars_for_block (tree block, bool toplevel)
1271{
1f6d3a08
RH
1272 tree t;
1273
1f6d3a08 1274 /* Expand all variables at this level. */
910ad8de 1275 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1276 if (TREE_USED (t)
1277 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1278 || !DECL_NONSHAREABLE (t)))
ff28a94d 1279 expand_one_var (t, toplevel, true);
1f6d3a08 1280
1f6d3a08
RH
1281 /* Expand all variables at containing levels. */
1282 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1283 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1284}
1285
1286/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1287 and clear TREE_USED on all local variables. */
1288
1289static void
1290clear_tree_used (tree block)
1291{
1292 tree t;
1293
910ad8de 1294 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1295 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1296 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1297 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1298 TREE_USED (t) = 0;
1299
1300 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1301 clear_tree_used (t);
1302}
1303
7d69de61
RH
1304/* Examine TYPE and determine a bit mask of the following features. */
1305
1306#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1307#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1308#define SPCT_HAS_ARRAY 4
1309#define SPCT_HAS_AGGREGATE 8
1310
1311static unsigned int
1312stack_protect_classify_type (tree type)
1313{
1314 unsigned int ret = 0;
1315 tree t;
1316
1317 switch (TREE_CODE (type))
1318 {
1319 case ARRAY_TYPE:
1320 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1321 if (t == char_type_node
1322 || t == signed_char_type_node
1323 || t == unsigned_char_type_node)
1324 {
15362b89
JJ
1325 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1326 unsigned HOST_WIDE_INT len;
7d69de61 1327
15362b89
JJ
1328 if (!TYPE_SIZE_UNIT (type)
1329 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1330 len = max;
7d69de61 1331 else
15362b89 1332 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1333
1334 if (len < max)
1335 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1336 else
1337 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1338 }
1339 else
1340 ret = SPCT_HAS_ARRAY;
1341 break;
1342
1343 case UNION_TYPE:
1344 case QUAL_UNION_TYPE:
1345 case RECORD_TYPE:
1346 ret = SPCT_HAS_AGGREGATE;
1347 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1348 if (TREE_CODE (t) == FIELD_DECL)
1349 ret |= stack_protect_classify_type (TREE_TYPE (t));
1350 break;
1351
1352 default:
1353 break;
1354 }
1355
1356 return ret;
1357}
1358
a4d05547
KH
1359/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1360 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1361 any variable in this function. The return value is the phase number in
1362 which the variable should be allocated. */
1363
1364static int
1365stack_protect_decl_phase (tree decl)
1366{
1367 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1368 int ret = 0;
1369
1370 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1371 has_short_buffer = true;
1372
1373 if (flag_stack_protect == 2)
1374 {
1375 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1376 && !(bits & SPCT_HAS_AGGREGATE))
1377 ret = 1;
1378 else if (bits & SPCT_HAS_ARRAY)
1379 ret = 2;
1380 }
1381 else
1382 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1383
1384 if (ret)
1385 has_protected_decls = true;
1386
1387 return ret;
1388}
1389
1390/* Two helper routines that check for phase 1 and phase 2. These are used
1391 as callbacks for expand_stack_vars. */
1392
1393static bool
1394stack_protect_decl_phase_1 (tree decl)
1395{
1396 return stack_protect_decl_phase (decl) == 1;
1397}
1398
1399static bool
1400stack_protect_decl_phase_2 (tree decl)
1401{
1402 return stack_protect_decl_phase (decl) == 2;
1403}
1404
1405/* Ensure that variables in different stack protection phases conflict
1406 so that they are not merged and share the same stack slot. */
1407
1408static void
1409add_stack_protection_conflicts (void)
1410{
1411 size_t i, j, n = stack_vars_num;
1412 unsigned char *phase;
1413
1414 phase = XNEWVEC (unsigned char, n);
1415 for (i = 0; i < n; ++i)
1416 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1417
1418 for (i = 0; i < n; ++i)
1419 {
1420 unsigned char ph_i = phase[i];
1421 for (j = 0; j < i; ++j)
1422 if (ph_i != phase[j])
1423 add_stack_var_conflict (i, j);
1424 }
1425
1426 XDELETEVEC (phase);
1427}
1428
1429/* Create a decl for the guard at the top of the stack frame. */
1430
1431static void
1432create_stack_guard (void)
1433{
c2255bc4
AH
1434 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1435 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1436 TREE_THIS_VOLATILE (guard) = 1;
1437 TREE_USED (guard) = 1;
1438 expand_one_stack_var (guard);
cb91fab0 1439 crtl->stack_protect_guard = guard;
7d69de61
RH
1440}
1441
ff28a94d 1442/* Prepare for expanding variables. */
b8698a0f 1443static void
ff28a94d
JH
1444init_vars_expansion (void)
1445{
1446 tree t;
c021f10b 1447 unsigned ix;
cb91fab0 1448 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1449 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1450 TREE_USED (t) = 1;
ff28a94d
JH
1451
1452 /* Clear TREE_USED on all variables associated with a block scope. */
1453 clear_tree_used (DECL_INITIAL (current_function_decl));
1454
1455 /* Initialize local stack smashing state. */
1456 has_protected_decls = false;
1457 has_short_buffer = false;
1458}
1459
1460/* Free up stack variable graph data. */
1461static void
1462fini_vars_expansion (void)
1463{
2bdbbe94
MM
1464 size_t i, n = stack_vars_num;
1465 for (i = 0; i < n; i++)
1466 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1467 XDELETEVEC (stack_vars);
1468 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1469 stack_vars = NULL;
1470 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1471 pointer_map_destroy (decl_to_stack_part);
1472 decl_to_stack_part = NULL;
ff28a94d
JH
1473}
1474
30925d94
AO
1475/* Make a fair guess for the size of the stack frame of the function
1476 in NODE. This doesn't have to be exact, the result is only used in
1477 the inline heuristics. So we don't want to run the full stack var
1478 packing algorithm (which is quadratic in the number of stack vars).
1479 Instead, we calculate the total size of all stack vars. This turns
1480 out to be a pretty fair estimate -- packing of stack vars doesn't
1481 happen very often. */
b5a430f3 1482
ff28a94d 1483HOST_WIDE_INT
30925d94 1484estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1485{
1486 HOST_WIDE_INT size = 0;
b5a430f3 1487 size_t i;
bb7e6d55 1488 tree var;
2e1ec94f 1489 tree old_cur_fun_decl = current_function_decl;
bb7e6d55
AO
1490 referenced_var_iterator rvi;
1491 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94
AO
1492
1493 current_function_decl = node->decl;
bb7e6d55 1494 push_cfun (fn);
ff28a94d 1495
bb7e6d55
AO
1496 gcc_checking_assert (gimple_referenced_vars (fn));
1497 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1498 size += expand_one_var (var, true, false);
b5a430f3 1499
ff28a94d
JH
1500 if (stack_vars_num > 0)
1501 {
b5a430f3
SB
1502 /* Fake sorting the stack vars for account_stack_vars (). */
1503 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1504 for (i = 0; i < stack_vars_num; ++i)
1505 stack_vars_sorted[i] = i;
ff28a94d
JH
1506 size += account_stack_vars ();
1507 fini_vars_expansion ();
1508 }
2e1ec94f
RR
1509 pop_cfun ();
1510 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1511 return size;
1512}
1513
1f6d3a08 1514/* Expand all variables used in the function. */
727a31fa
RH
1515
1516static void
1517expand_used_vars (void)
1518{
c021f10b
NF
1519 tree var, outer_block = DECL_INITIAL (current_function_decl);
1520 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1521 unsigned i;
c021f10b 1522 unsigned len;
727a31fa 1523
1f6d3a08
RH
1524 /* Compute the phase of the stack frame for this function. */
1525 {
1526 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1527 int off = STARTING_FRAME_OFFSET % align;
1528 frame_phase = off ? align - off : 0;
1529 }
727a31fa 1530
ff28a94d 1531 init_vars_expansion ();
7d69de61 1532
4e3825db
MM
1533 for (i = 0; i < SA.map->num_partitions; i++)
1534 {
1535 tree var = partition_to_var (SA.map, i);
1536
1537 gcc_assert (is_gimple_reg (var));
1538 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1539 expand_one_var (var, true, true);
1540 else
1541 {
1542 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1543 contain the default def (representing the parm or result itself)
1544 we don't do anything here. But those which don't contain the
1545 default def (representing a temporary based on the parm/result)
1546 we need to allocate space just like for normal VAR_DECLs. */
1547 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1548 {
1549 expand_one_var (var, true, true);
1550 gcc_assert (SA.partition_to_pseudo[i]);
1551 }
1552 }
1553 }
1554
cb91fab0 1555 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1556 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1557
1558 len = VEC_length (tree, cfun->local_decls);
1559 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1560 {
1f6d3a08
RH
1561 bool expand_now = false;
1562
4e3825db
MM
1563 /* Expanded above already. */
1564 if (is_gimple_reg (var))
eb7adebc
MM
1565 {
1566 TREE_USED (var) = 0;
3adcf52c 1567 goto next;
eb7adebc 1568 }
1f6d3a08
RH
1569 /* We didn't set a block for static or extern because it's hard
1570 to tell the difference between a global variable (re)declared
1571 in a local scope, and one that's really declared there to
1572 begin with. And it doesn't really matter much, since we're
1573 not giving them stack space. Expand them now. */
4e3825db 1574 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1575 expand_now = true;
1576
1577 /* If the variable is not associated with any block, then it
1578 was created by the optimizers, and could be live anywhere
1579 in the function. */
1580 else if (TREE_USED (var))
1581 expand_now = true;
1582
1583 /* Finally, mark all variables on the list as used. We'll use
1584 this in a moment when we expand those associated with scopes. */
1585 TREE_USED (var) = 1;
1586
1587 if (expand_now)
3adcf52c
JM
1588 expand_one_var (var, true, true);
1589
1590 next:
1591 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1592 {
3adcf52c
JM
1593 rtx rtl = DECL_RTL_IF_SET (var);
1594
1595 /* Keep artificial non-ignored vars in cfun->local_decls
1596 chain until instantiate_decls. */
1597 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1598 add_local_decl (cfun, var);
6c6366f6 1599 else if (rtl == NULL_RTX)
c021f10b
NF
1600 /* If rtl isn't set yet, which can happen e.g. with
1601 -fstack-protector, retry before returning from this
1602 function. */
1603 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1604 }
1f6d3a08 1605 }
1f6d3a08 1606
c021f10b
NF
1607 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1608
1609 +-----------------+-----------------+
1610 | ...processed... | ...duplicates...|
1611 +-----------------+-----------------+
1612 ^
1613 +-- LEN points here.
1614
1615 We just want the duplicates, as those are the artificial
1616 non-ignored vars that we want to keep until instantiate_decls.
1617 Move them down and truncate the array. */
1618 if (!VEC_empty (tree, cfun->local_decls))
1619 VEC_block_remove (tree, cfun->local_decls, 0, len);
1620
1f6d3a08
RH
1621 /* At this point, all variables within the block tree with TREE_USED
1622 set are actually used by the optimized function. Lay them out. */
1623 expand_used_vars_for_block (outer_block, true);
1624
1625 if (stack_vars_num > 0)
1626 {
47598145 1627 add_scope_conflicts ();
1f6d3a08 1628 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1629 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1630 reflect this. */
1631 add_alias_set_conflicts ();
1632
c22cacf3 1633 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1634 vulnerable data and non-vulnerable data. */
1635 if (flag_stack_protect)
1636 add_stack_protection_conflicts ();
1637
c22cacf3 1638 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1639 minimal interference graph, attempt to save some stack space. */
1640 partition_stack_vars ();
1641 if (dump_file)
1642 dump_stack_var_partition ();
7d69de61
RH
1643 }
1644
1645 /* There are several conditions under which we should create a
1646 stack guard: protect-all, alloca used, protected decls present. */
1647 if (flag_stack_protect == 2
1648 || (flag_stack_protect
e3b5732b 1649 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1650 create_stack_guard ();
1f6d3a08 1651
7d69de61
RH
1652 /* Assign rtl to each variable based on these partitions. */
1653 if (stack_vars_num > 0)
1654 {
1655 /* Reorder decls to be protected by iterating over the variables
1656 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1657 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1658 earlier, such that we naturally see these variables first,
1659 and thus naturally allocate things in the right order. */
1660 if (has_protected_decls)
1661 {
1662 /* Phase 1 contains only character arrays. */
1663 expand_stack_vars (stack_protect_decl_phase_1);
1664
1665 /* Phase 2 contains other kinds of arrays. */
1666 if (flag_stack_protect == 2)
1667 expand_stack_vars (stack_protect_decl_phase_2);
1668 }
1669
1670 expand_stack_vars (NULL);
1f6d3a08 1671
ff28a94d 1672 fini_vars_expansion ();
1f6d3a08
RH
1673 }
1674
6c6366f6
JJ
1675 /* If there were any artificial non-ignored vars without rtl
1676 found earlier, see if deferred stack allocation hasn't assigned
1677 rtl to them. */
c021f10b 1678 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1679 {
6c6366f6
JJ
1680 rtx rtl = DECL_RTL_IF_SET (var);
1681
6c6366f6
JJ
1682 /* Keep artificial non-ignored vars in cfun->local_decls
1683 chain until instantiate_decls. */
1684 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1685 add_local_decl (cfun, var);
6c6366f6 1686 }
c021f10b 1687 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1688
1f6d3a08
RH
1689 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1690 if (STACK_ALIGNMENT_NEEDED)
1691 {
1692 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1693 if (!FRAME_GROWS_DOWNWARD)
1694 frame_offset += align - 1;
1695 frame_offset &= -align;
1696 }
727a31fa
RH
1697}
1698
1699
b7211528
SB
1700/* If we need to produce a detailed dump, print the tree representation
1701 for STMT to the dump file. SINCE is the last RTX after which the RTL
1702 generated for STMT should have been appended. */
1703
1704static void
726a989a 1705maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1706{
1707 if (dump_file && (dump_flags & TDF_DETAILS))
1708 {
1709 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1710 print_gimple_stmt (dump_file, stmt, 0,
1711 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1712 fprintf (dump_file, "\n");
1713
1714 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1715 }
1716}
1717
8b11009b
ZD
1718/* Maps the blocks that do not contain tree labels to rtx labels. */
1719
1720static struct pointer_map_t *lab_rtx_for_bb;
1721
a9b77cd1
ZD
1722/* Returns the label_rtx expression for a label starting basic block BB. */
1723
1724static rtx
726a989a 1725label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1726{
726a989a
RB
1727 gimple_stmt_iterator gsi;
1728 tree lab;
1729 gimple lab_stmt;
8b11009b 1730 void **elt;
a9b77cd1
ZD
1731
1732 if (bb->flags & BB_RTL)
1733 return block_label (bb);
1734
8b11009b
ZD
1735 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1736 if (elt)
ae50c0cb 1737 return (rtx) *elt;
8b11009b
ZD
1738
1739 /* Find the tree label if it is present. */
b8698a0f 1740
726a989a 1741 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1742 {
726a989a
RB
1743 lab_stmt = gsi_stmt (gsi);
1744 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1745 break;
1746
726a989a 1747 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1748 if (DECL_NONLOCAL (lab))
1749 break;
1750
1751 return label_rtx (lab);
1752 }
1753
8b11009b
ZD
1754 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1755 *elt = gen_label_rtx ();
ae50c0cb 1756 return (rtx) *elt;
a9b77cd1
ZD
1757}
1758
726a989a 1759
529ff441
MM
1760/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1761 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1762 possibly clean up the CFG and instruction sequence. LAST is the
1763 last instruction before the just emitted jump sequence. */
529ff441
MM
1764
1765static void
315adeda 1766maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1767{
1768 /* Special case: when jumpif decides that the condition is
1769 trivial it emits an unconditional jump (and the necessary
1770 barrier). But we still have two edges, the fallthru one is
1771 wrong. purge_dead_edges would clean this up later. Unfortunately
1772 we have to insert insns (and split edges) before
1773 find_many_sub_basic_blocks and hence before purge_dead_edges.
1774 But splitting edges might create new blocks which depend on the
1775 fact that if there are two edges there's no barrier. So the
1776 barrier would get lost and verify_flow_info would ICE. Instead
1777 of auditing all edge splitters to care for the barrier (which
1778 normally isn't there in a cleaned CFG), fix it here. */
1779 if (BARRIER_P (get_last_insn ()))
1780 {
529ff441
MM
1781 rtx insn;
1782 remove_edge (e);
1783 /* Now, we have a single successor block, if we have insns to
1784 insert on the remaining edge we potentially will insert
1785 it at the end of this block (if the dest block isn't feasible)
1786 in order to avoid splitting the edge. This insertion will take
1787 place in front of the last jump. But we might have emitted
1788 multiple jumps (conditional and one unconditional) to the
1789 same destination. Inserting in front of the last one then
1790 is a problem. See PR 40021. We fix this by deleting all
1791 jumps except the last unconditional one. */
1792 insn = PREV_INSN (get_last_insn ());
1793 /* Make sure we have an unconditional jump. Otherwise we're
1794 confused. */
1795 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1796 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1797 {
1798 insn = PREV_INSN (insn);
1799 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1800 {
8a269cb7 1801 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1802 {
1803 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1804 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1805 }
1806 delete_insn (NEXT_INSN (insn));
1807 }
529ff441
MM
1808 }
1809 }
1810}
1811
726a989a 1812/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1813 Returns a new basic block if we've terminated the current basic
1814 block and created a new one. */
1815
1816static basic_block
726a989a 1817expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1818{
1819 basic_block new_bb, dest;
1820 edge new_edge;
1821 edge true_edge;
1822 edge false_edge;
b7211528 1823 rtx last2, last;
28ed065e
MM
1824 enum tree_code code;
1825 tree op0, op1;
1826
1827 code = gimple_cond_code (stmt);
1828 op0 = gimple_cond_lhs (stmt);
1829 op1 = gimple_cond_rhs (stmt);
1830 /* We're sometimes presented with such code:
1831 D.123_1 = x < y;
1832 if (D.123_1 != 0)
1833 ...
1834 This would expand to two comparisons which then later might
1835 be cleaned up by combine. But some pattern matchers like if-conversion
1836 work better when there's only one compare, so make up for this
1837 here as special exception if TER would have made the same change. */
1838 if (gimple_cond_single_var_p (stmt)
1839 && SA.values
1840 && TREE_CODE (op0) == SSA_NAME
1841 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1842 {
1843 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1844 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1845 {
e83f4b68
MM
1846 enum tree_code code2 = gimple_assign_rhs_code (second);
1847 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1848 {
1849 code = code2;
1850 op0 = gimple_assign_rhs1 (second);
1851 op1 = gimple_assign_rhs2 (second);
1852 }
1853 /* If jumps are cheap turn some more codes into
1854 jumpy sequences. */
1855 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1856 {
1857 if ((code2 == BIT_AND_EXPR
1858 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1859 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1860 || code2 == TRUTH_AND_EXPR)
1861 {
1862 code = TRUTH_ANDIF_EXPR;
1863 op0 = gimple_assign_rhs1 (second);
1864 op1 = gimple_assign_rhs2 (second);
1865 }
1866 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1867 {
1868 code = TRUTH_ORIF_EXPR;
1869 op0 = gimple_assign_rhs1 (second);
1870 op1 = gimple_assign_rhs2 (second);
1871 }
1872 }
28ed065e
MM
1873 }
1874 }
b7211528
SB
1875
1876 last2 = last = get_last_insn ();
80c7a9eb
RH
1877
1878 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
c82fee88
EB
1879 set_curr_insn_source_location (gimple_location (stmt));
1880 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1881
1882 /* These flags have no purpose in RTL land. */
1883 true_edge->flags &= ~EDGE_TRUE_VALUE;
1884 false_edge->flags &= ~EDGE_FALSE_VALUE;
1885
1886 /* We can either have a pure conditional jump with one fallthru edge or
1887 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1888 if (false_edge->dest == bb->next_bb)
80c7a9eb 1889 {
40e90eac
JJ
1890 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1891 true_edge->probability);
726a989a 1892 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1893 if (true_edge->goto_locus)
7241571e
JJ
1894 {
1895 set_curr_insn_source_location (true_edge->goto_locus);
1896 set_curr_insn_block (true_edge->goto_block);
1897 true_edge->goto_locus = curr_insn_locator ();
1898 }
1899 true_edge->goto_block = NULL;
a9b77cd1 1900 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1901 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1902 return NULL;
1903 }
a9b77cd1 1904 if (true_edge->dest == bb->next_bb)
80c7a9eb 1905 {
40e90eac
JJ
1906 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1907 false_edge->probability);
726a989a 1908 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1909 if (false_edge->goto_locus)
7241571e
JJ
1910 {
1911 set_curr_insn_source_location (false_edge->goto_locus);
1912 set_curr_insn_block (false_edge->goto_block);
1913 false_edge->goto_locus = curr_insn_locator ();
1914 }
1915 false_edge->goto_block = NULL;
a9b77cd1 1916 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1917 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1918 return NULL;
1919 }
80c7a9eb 1920
40e90eac
JJ
1921 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1922 true_edge->probability);
80c7a9eb 1923 last = get_last_insn ();
7241571e
JJ
1924 if (false_edge->goto_locus)
1925 {
1926 set_curr_insn_source_location (false_edge->goto_locus);
1927 set_curr_insn_block (false_edge->goto_block);
1928 false_edge->goto_locus = curr_insn_locator ();
1929 }
1930 false_edge->goto_block = NULL;
a9b77cd1 1931 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1932
1933 BB_END (bb) = last;
1934 if (BARRIER_P (BB_END (bb)))
1935 BB_END (bb) = PREV_INSN (BB_END (bb));
1936 update_bb_for_insn (bb);
1937
1938 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1939 dest = false_edge->dest;
1940 redirect_edge_succ (false_edge, new_bb);
1941 false_edge->flags |= EDGE_FALLTHRU;
1942 new_bb->count = false_edge->count;
1943 new_bb->frequency = EDGE_FREQUENCY (false_edge);
7d776ee2
RG
1944 if (current_loops && bb->loop_father)
1945 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
1946 new_edge = make_edge (new_bb, dest, 0);
1947 new_edge->probability = REG_BR_PROB_BASE;
1948 new_edge->count = new_bb->count;
1949 if (BARRIER_P (BB_END (new_bb)))
1950 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1951 update_bb_for_insn (new_bb);
1952
726a989a 1953 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1954
7787b4aa
JJ
1955 if (true_edge->goto_locus)
1956 {
1957 set_curr_insn_source_location (true_edge->goto_locus);
1958 set_curr_insn_block (true_edge->goto_block);
1959 true_edge->goto_locus = curr_insn_locator ();
1960 }
1961 true_edge->goto_block = NULL;
1962
80c7a9eb
RH
1963 return new_bb;
1964}
1965
0a35513e
AH
1966/* Mark all calls that can have a transaction restart. */
1967
1968static void
1969mark_transaction_restart_calls (gimple stmt)
1970{
1971 struct tm_restart_node dummy;
1972 void **slot;
1973
1974 if (!cfun->gimple_df->tm_restart)
1975 return;
1976
1977 dummy.stmt = stmt;
1978 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1979 if (slot)
1980 {
1981 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1982 tree list = n->label_or_list;
1983 rtx insn;
1984
1985 for (insn = next_real_insn (get_last_insn ());
1986 !CALL_P (insn);
1987 insn = next_real_insn (insn))
1988 continue;
1989
1990 if (TREE_CODE (list) == LABEL_DECL)
1991 add_reg_note (insn, REG_TM, label_rtx (list));
1992 else
1993 for (; list ; list = TREE_CHAIN (list))
1994 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1995 }
1996}
1997
28ed065e
MM
1998/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1999 statement STMT. */
2000
2001static void
2002expand_call_stmt (gimple stmt)
2003{
25583c4f 2004 tree exp, decl, lhs;
e23817b3 2005 bool builtin_p;
e7925582 2006 size_t i;
28ed065e 2007
25583c4f
RS
2008 if (gimple_call_internal_p (stmt))
2009 {
2010 expand_internal_call (stmt);
2011 return;
2012 }
2013
28ed065e
MM
2014 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2015
2016 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
2017 decl = gimple_call_fndecl (stmt);
2018 builtin_p = decl && DECL_BUILT_IN (decl);
2019
e7925582
EB
2020 /* If this is not a builtin function, the function type through which the
2021 call is made may be different from the type of the function. */
2022 if (!builtin_p)
2023 CALL_EXPR_FN (exp)
b25aa0e8
EB
2024 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2025 CALL_EXPR_FN (exp));
e7925582 2026
28ed065e
MM
2027 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2028 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2029
2030 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2031 {
2032 tree arg = gimple_call_arg (stmt, i);
2033 gimple def;
2034 /* TER addresses into arguments of builtin functions so we have a
2035 chance to infer more correct alignment information. See PR39954. */
2036 if (builtin_p
2037 && TREE_CODE (arg) == SSA_NAME
2038 && (def = get_gimple_for_ssa_name (arg))
2039 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2040 arg = gimple_assign_rhs1 (def);
2041 CALL_EXPR_ARG (exp, i) = arg;
2042 }
28ed065e 2043
93f28ca7 2044 if (gimple_has_side_effects (stmt))
28ed065e
MM
2045 TREE_SIDE_EFFECTS (exp) = 1;
2046
93f28ca7 2047 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2048 TREE_NOTHROW (exp) = 1;
2049
2050 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2051 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2052 if (decl
2053 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2054 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2055 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2056 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2057 else
2058 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2059 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2060 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2061 TREE_BLOCK (exp) = gimple_block (stmt);
2062
ddb555ed
JJ
2063 /* Ensure RTL is created for debug args. */
2064 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2065 {
2066 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2067 unsigned int ix;
2068 tree dtemp;
2069
2070 if (debug_args)
2071 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2072 {
2073 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2074 expand_debug_expr (dtemp);
2075 }
2076 }
2077
25583c4f 2078 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2079 if (lhs)
2080 expand_assignment (lhs, exp, false);
2081 else
2082 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2083
2084 mark_transaction_restart_calls (stmt);
28ed065e
MM
2085}
2086
2087/* A subroutine of expand_gimple_stmt, expanding one gimple statement
2088 STMT that doesn't require special handling for outgoing edges. That
2089 is no tailcalls and no GIMPLE_COND. */
2090
2091static void
2092expand_gimple_stmt_1 (gimple stmt)
2093{
2094 tree op0;
c82fee88
EB
2095
2096 set_curr_insn_source_location (gimple_location (stmt));
2097 set_curr_insn_block (gimple_block (stmt));
2098
28ed065e
MM
2099 switch (gimple_code (stmt))
2100 {
2101 case GIMPLE_GOTO:
2102 op0 = gimple_goto_dest (stmt);
2103 if (TREE_CODE (op0) == LABEL_DECL)
2104 expand_goto (op0);
2105 else
2106 expand_computed_goto (op0);
2107 break;
2108 case GIMPLE_LABEL:
2109 expand_label (gimple_label_label (stmt));
2110 break;
2111 case GIMPLE_NOP:
2112 case GIMPLE_PREDICT:
2113 break;
28ed065e
MM
2114 case GIMPLE_SWITCH:
2115 expand_case (stmt);
2116 break;
2117 case GIMPLE_ASM:
2118 expand_asm_stmt (stmt);
2119 break;
2120 case GIMPLE_CALL:
2121 expand_call_stmt (stmt);
2122 break;
2123
2124 case GIMPLE_RETURN:
2125 op0 = gimple_return_retval (stmt);
2126
2127 if (op0 && op0 != error_mark_node)
2128 {
2129 tree result = DECL_RESULT (current_function_decl);
2130
2131 /* If we are not returning the current function's RESULT_DECL,
2132 build an assignment to it. */
2133 if (op0 != result)
2134 {
2135 /* I believe that a function's RESULT_DECL is unique. */
2136 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2137
2138 /* ??? We'd like to use simply expand_assignment here,
2139 but this fails if the value is of BLKmode but the return
2140 decl is a register. expand_return has special handling
2141 for this combination, which eventually should move
2142 to common code. See comments there. Until then, let's
2143 build a modify expression :-/ */
2144 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2145 result, op0);
2146 }
2147 }
2148 if (!op0)
2149 expand_null_return ();
2150 else
2151 expand_return (op0);
2152 break;
2153
2154 case GIMPLE_ASSIGN:
2155 {
2156 tree lhs = gimple_assign_lhs (stmt);
2157
2158 /* Tree expand used to fiddle with |= and &= of two bitfield
2159 COMPONENT_REFs here. This can't happen with gimple, the LHS
2160 of binary assigns must be a gimple reg. */
2161
2162 if (TREE_CODE (lhs) != SSA_NAME
2163 || get_gimple_rhs_class (gimple_expr_code (stmt))
2164 == GIMPLE_SINGLE_RHS)
2165 {
2166 tree rhs = gimple_assign_rhs1 (stmt);
2167 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2168 == GIMPLE_SINGLE_RHS);
2169 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2170 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
2171 if (TREE_CLOBBER_P (rhs))
2172 /* This is a clobber to mark the going out of scope for
2173 this LHS. */
2174 ;
2175 else
2176 expand_assignment (lhs, rhs,
2177 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
2178 }
2179 else
2180 {
2181 rtx target, temp;
2182 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2183 struct separate_ops ops;
2184 bool promoted = false;
2185
2186 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2187 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2188 promoted = true;
2189
2190 ops.code = gimple_assign_rhs_code (stmt);
2191 ops.type = TREE_TYPE (lhs);
2192 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2193 {
0354c0c7
BS
2194 case GIMPLE_TERNARY_RHS:
2195 ops.op2 = gimple_assign_rhs3 (stmt);
2196 /* Fallthru */
28ed065e
MM
2197 case GIMPLE_BINARY_RHS:
2198 ops.op1 = gimple_assign_rhs2 (stmt);
2199 /* Fallthru */
2200 case GIMPLE_UNARY_RHS:
2201 ops.op0 = gimple_assign_rhs1 (stmt);
2202 break;
2203 default:
2204 gcc_unreachable ();
2205 }
2206 ops.location = gimple_location (stmt);
2207
2208 /* If we want to use a nontemporal store, force the value to
2209 register first. If we store into a promoted register,
2210 don't directly expand to target. */
2211 temp = nontemporal || promoted ? NULL_RTX : target;
2212 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2213 EXPAND_NORMAL);
2214
2215 if (temp == target)
2216 ;
2217 else if (promoted)
2218 {
4e18a7d4 2219 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2220 /* If TEMP is a VOIDmode constant, use convert_modes to make
2221 sure that we properly convert it. */
2222 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2223 {
2224 temp = convert_modes (GET_MODE (target),
2225 TYPE_MODE (ops.type),
4e18a7d4 2226 temp, unsignedp);
28ed065e 2227 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2228 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2229 }
2230
4e18a7d4 2231 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2232 }
2233 else if (nontemporal && emit_storent_insn (target, temp))
2234 ;
2235 else
2236 {
2237 temp = force_operand (temp, target);
2238 if (temp != target)
2239 emit_move_insn (target, temp);
2240 }
2241 }
2242 }
2243 break;
2244
2245 default:
2246 gcc_unreachable ();
2247 }
2248}
2249
2250/* Expand one gimple statement STMT and return the last RTL instruction
2251 before any of the newly generated ones.
2252
2253 In addition to generating the necessary RTL instructions this also
2254 sets REG_EH_REGION notes if necessary and sets the current source
2255 location for diagnostics. */
2256
2257static rtx
2258expand_gimple_stmt (gimple stmt)
2259{
28ed065e 2260 location_t saved_location = input_location;
c82fee88
EB
2261 rtx last = get_last_insn ();
2262 int lp_nr;
28ed065e 2263
28ed065e
MM
2264 gcc_assert (cfun);
2265
c82fee88
EB
2266 /* We need to save and restore the current source location so that errors
2267 discovered during expansion are emitted with the right location. But
2268 it would be better if the diagnostic routines used the source location
2269 embedded in the tree nodes rather than globals. */
28ed065e 2270 if (gimple_has_location (stmt))
c82fee88 2271 input_location = gimple_location (stmt);
28ed065e
MM
2272
2273 expand_gimple_stmt_1 (stmt);
c82fee88 2274
28ed065e
MM
2275 /* Free any temporaries used to evaluate this statement. */
2276 free_temp_slots ();
2277
2278 input_location = saved_location;
2279
2280 /* Mark all insns that may trap. */
1d65f45c
RH
2281 lp_nr = lookup_stmt_eh_lp (stmt);
2282 if (lp_nr)
28ed065e
MM
2283 {
2284 rtx insn;
2285 for (insn = next_real_insn (last); insn;
2286 insn = next_real_insn (insn))
2287 {
2288 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2289 /* If we want exceptions for non-call insns, any
2290 may_trap_p instruction may throw. */
2291 && GET_CODE (PATTERN (insn)) != CLOBBER
2292 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2293 && insn_could_throw_p (insn))
2294 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2295 }
2296 }
2297
2298 return last;
2299}
2300
726a989a 2301/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2302 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2303 generated a tail call (something that might be denied by the ABI
cea49550
RH
2304 rules governing the call; see calls.c).
2305
2306 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2307 can still reach the rest of BB. The case here is __builtin_sqrt,
2308 where the NaN result goes through the external function (with a
2309 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2310
2311static basic_block
726a989a 2312expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2313{
b7211528 2314 rtx last2, last;
224e770b 2315 edge e;
628f6a4e 2316 edge_iterator ei;
224e770b
RH
2317 int probability;
2318 gcov_type count;
80c7a9eb 2319
28ed065e 2320 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2321
2322 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2323 if (CALL_P (last) && SIBLING_CALL_P (last))
2324 goto found;
80c7a9eb 2325
726a989a 2326 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2327
cea49550 2328 *can_fallthru = true;
224e770b 2329 return NULL;
80c7a9eb 2330
224e770b
RH
2331 found:
2332 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2333 Any instructions emitted here are about to be deleted. */
2334 do_pending_stack_adjust ();
2335
2336 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2337 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2338 EH or abnormal edges, we shouldn't have created a tail call in
2339 the first place. So it seems to me we should just be removing
2340 all edges here, or redirecting the existing fallthru edge to
2341 the exit block. */
2342
224e770b
RH
2343 probability = 0;
2344 count = 0;
224e770b 2345
628f6a4e
BE
2346 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2347 {
224e770b
RH
2348 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2349 {
2350 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2351 {
224e770b
RH
2352 e->dest->count -= e->count;
2353 e->dest->frequency -= EDGE_FREQUENCY (e);
2354 if (e->dest->count < 0)
c22cacf3 2355 e->dest->count = 0;
224e770b 2356 if (e->dest->frequency < 0)
c22cacf3 2357 e->dest->frequency = 0;
80c7a9eb 2358 }
224e770b
RH
2359 count += e->count;
2360 probability += e->probability;
2361 remove_edge (e);
80c7a9eb 2362 }
628f6a4e
BE
2363 else
2364 ei_next (&ei);
80c7a9eb
RH
2365 }
2366
224e770b
RH
2367 /* This is somewhat ugly: the call_expr expander often emits instructions
2368 after the sibcall (to perform the function return). These confuse the
12eff7b7 2369 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2370 last = NEXT_INSN (last);
341c100f 2371 gcc_assert (BARRIER_P (last));
cea49550
RH
2372
2373 *can_fallthru = false;
224e770b
RH
2374 while (NEXT_INSN (last))
2375 {
2376 /* For instance an sqrt builtin expander expands if with
2377 sibcall in the then and label for `else`. */
2378 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2379 {
2380 *can_fallthru = true;
2381 break;
2382 }
224e770b
RH
2383 delete_insn (NEXT_INSN (last));
2384 }
2385
2386 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2387 e->probability += probability;
2388 e->count += count;
2389 BB_END (bb) = last;
2390 update_bb_for_insn (bb);
2391
2392 if (NEXT_INSN (last))
2393 {
2394 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2395
2396 last = BB_END (bb);
2397 if (BARRIER_P (last))
2398 BB_END (bb) = PREV_INSN (last);
2399 }
2400
726a989a 2401 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2402
224e770b 2403 return bb;
80c7a9eb
RH
2404}
2405
b5b8b0ac
AO
2406/* Return the difference between the floor and the truncated result of
2407 a signed division by OP1 with remainder MOD. */
2408static rtx
2409floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2410{
2411 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2412 return gen_rtx_IF_THEN_ELSE
2413 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2414 gen_rtx_IF_THEN_ELSE
2415 (mode, gen_rtx_LT (BImode,
2416 gen_rtx_DIV (mode, op1, mod),
2417 const0_rtx),
2418 constm1_rtx, const0_rtx),
2419 const0_rtx);
2420}
2421
2422/* Return the difference between the ceil and the truncated result of
2423 a signed division by OP1 with remainder MOD. */
2424static rtx
2425ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2426{
2427 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2428 return gen_rtx_IF_THEN_ELSE
2429 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2430 gen_rtx_IF_THEN_ELSE
2431 (mode, gen_rtx_GT (BImode,
2432 gen_rtx_DIV (mode, op1, mod),
2433 const0_rtx),
2434 const1_rtx, const0_rtx),
2435 const0_rtx);
2436}
2437
2438/* Return the difference between the ceil and the truncated result of
2439 an unsigned division by OP1 with remainder MOD. */
2440static rtx
2441ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2442{
2443 /* (mod != 0 ? 1 : 0) */
2444 return gen_rtx_IF_THEN_ELSE
2445 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2446 const1_rtx, const0_rtx);
2447}
2448
2449/* Return the difference between the rounded and the truncated result
2450 of a signed division by OP1 with remainder MOD. Halfway cases are
2451 rounded away from zero, rather than to the nearest even number. */
2452static rtx
2453round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2454{
2455 /* (abs (mod) >= abs (op1) - abs (mod)
2456 ? (op1 / mod > 0 ? 1 : -1)
2457 : 0) */
2458 return gen_rtx_IF_THEN_ELSE
2459 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2460 gen_rtx_MINUS (mode,
2461 gen_rtx_ABS (mode, op1),
2462 gen_rtx_ABS (mode, mod))),
2463 gen_rtx_IF_THEN_ELSE
2464 (mode, gen_rtx_GT (BImode,
2465 gen_rtx_DIV (mode, op1, mod),
2466 const0_rtx),
2467 const1_rtx, constm1_rtx),
2468 const0_rtx);
2469}
2470
2471/* Return the difference between the rounded and the truncated result
2472 of a unsigned division by OP1 with remainder MOD. Halfway cases
2473 are rounded away from zero, rather than to the nearest even
2474 number. */
2475static rtx
2476round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2477{
2478 /* (mod >= op1 - mod ? 1 : 0) */
2479 return gen_rtx_IF_THEN_ELSE
2480 (mode, gen_rtx_GE (BImode, mod,
2481 gen_rtx_MINUS (mode, op1, mod)),
2482 const1_rtx, const0_rtx);
2483}
2484
dda2da58
AO
2485/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2486 any rtl. */
2487
2488static rtx
f61c6f34
JJ
2489convert_debug_memory_address (enum machine_mode mode, rtx x,
2490 addr_space_t as)
dda2da58
AO
2491{
2492 enum machine_mode xmode = GET_MODE (x);
2493
2494#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
2495 gcc_assert (mode == Pmode
2496 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
2497 gcc_assert (xmode == mode || xmode == VOIDmode);
2498#else
f61c6f34 2499 rtx temp;
f61c6f34 2500
639d4bb8 2501 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
2502
2503 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2504 return x;
2505
69660a70 2506 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
2507 x = simplify_gen_subreg (mode, x, xmode,
2508 subreg_lowpart_offset
2509 (mode, xmode));
2510 else if (POINTERS_EXTEND_UNSIGNED > 0)
2511 x = gen_rtx_ZERO_EXTEND (mode, x);
2512 else if (!POINTERS_EXTEND_UNSIGNED)
2513 x = gen_rtx_SIGN_EXTEND (mode, x);
2514 else
f61c6f34
JJ
2515 {
2516 switch (GET_CODE (x))
2517 {
2518 case SUBREG:
2519 if ((SUBREG_PROMOTED_VAR_P (x)
2520 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2521 || (GET_CODE (SUBREG_REG (x)) == PLUS
2522 && REG_P (XEXP (SUBREG_REG (x), 0))
2523 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2524 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2525 && GET_MODE (SUBREG_REG (x)) == mode)
2526 return SUBREG_REG (x);
2527 break;
2528 case LABEL_REF:
2529 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2530 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2531 return temp;
2532 case SYMBOL_REF:
2533 temp = shallow_copy_rtx (x);
2534 PUT_MODE (temp, mode);
2535 return temp;
2536 case CONST:
2537 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2538 if (temp)
2539 temp = gen_rtx_CONST (mode, temp);
2540 return temp;
2541 case PLUS:
2542 case MINUS:
2543 if (CONST_INT_P (XEXP (x, 1)))
2544 {
2545 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2546 if (temp)
2547 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2548 }
2549 break;
2550 default:
2551 break;
2552 }
2553 /* Don't know how to express ptr_extend as operation in debug info. */
2554 return NULL;
2555 }
dda2da58
AO
2556#endif /* POINTERS_EXTEND_UNSIGNED */
2557
2558 return x;
2559}
2560
12c5ffe5
EB
2561/* Return an RTX equivalent to the value of the parameter DECL. */
2562
2563static rtx
2564expand_debug_parm_decl (tree decl)
2565{
2566 rtx incoming = DECL_INCOMING_RTL (decl);
2567
2568 if (incoming
2569 && GET_MODE (incoming) != BLKmode
2570 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2571 || (MEM_P (incoming)
2572 && REG_P (XEXP (incoming, 0))
2573 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2574 {
2575 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2576
2577#ifdef HAVE_window_save
2578 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2579 If the target machine has an explicit window save instruction, the
2580 actual entry value is the corresponding OUTGOING_REGNO instead. */
2581 if (REG_P (incoming)
2582 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2583 incoming
2584 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2585 OUTGOING_REGNO (REGNO (incoming)), 0);
2586 else if (MEM_P (incoming))
2587 {
2588 rtx reg = XEXP (incoming, 0);
2589 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2590 {
2591 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2592 incoming = replace_equiv_address_nv (incoming, reg);
2593 }
2594 }
2595#endif
2596
2597 ENTRY_VALUE_EXP (rtl) = incoming;
2598 return rtl;
2599 }
2600
2601 if (incoming
2602 && GET_MODE (incoming) != BLKmode
2603 && !TREE_ADDRESSABLE (decl)
2604 && MEM_P (incoming)
2605 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2606 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2607 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2608 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2609 return incoming;
2610
2611 return NULL_RTX;
2612}
2613
2614/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
2615
2616static rtx
2617expand_debug_expr (tree exp)
2618{
2619 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2620 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 2621 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 2622 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2623 addr_space_t as;
b5b8b0ac
AO
2624
2625 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2626 {
2627 case tcc_expression:
2628 switch (TREE_CODE (exp))
2629 {
2630 case COND_EXPR:
7ece48b1 2631 case DOT_PROD_EXPR:
0354c0c7
BS
2632 case WIDEN_MULT_PLUS_EXPR:
2633 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2634 case FMA_EXPR:
b5b8b0ac
AO
2635 goto ternary;
2636
2637 case TRUTH_ANDIF_EXPR:
2638 case TRUTH_ORIF_EXPR:
2639 case TRUTH_AND_EXPR:
2640 case TRUTH_OR_EXPR:
2641 case TRUTH_XOR_EXPR:
2642 goto binary;
2643
2644 case TRUTH_NOT_EXPR:
2645 goto unary;
2646
2647 default:
2648 break;
2649 }
2650 break;
2651
2652 ternary:
2653 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2654 if (!op2)
2655 return NULL_RTX;
2656 /* Fall through. */
2657
2658 binary:
2659 case tcc_binary:
2660 case tcc_comparison:
2661 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2662 if (!op1)
2663 return NULL_RTX;
2664 /* Fall through. */
2665
2666 unary:
2667 case tcc_unary:
2ba172e0 2668 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2669 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2670 if (!op0)
2671 return NULL_RTX;
2672 break;
2673
2674 case tcc_type:
2675 case tcc_statement:
2676 gcc_unreachable ();
2677
2678 case tcc_constant:
2679 case tcc_exceptional:
2680 case tcc_declaration:
2681 case tcc_reference:
2682 case tcc_vl_exp:
2683 break;
2684 }
2685
2686 switch (TREE_CODE (exp))
2687 {
2688 case STRING_CST:
2689 if (!lookup_constant_def (exp))
2690 {
e1b243a8
JJ
2691 if (strlen (TREE_STRING_POINTER (exp)) + 1
2692 != (size_t) TREE_STRING_LENGTH (exp))
2693 return NULL_RTX;
b5b8b0ac
AO
2694 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2695 op0 = gen_rtx_MEM (BLKmode, op0);
2696 set_mem_attributes (op0, exp, 0);
2697 return op0;
2698 }
2699 /* Fall through... */
2700
2701 case INTEGER_CST:
2702 case REAL_CST:
2703 case FIXED_CST:
2704 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2705 return op0;
2706
2707 case COMPLEX_CST:
2708 gcc_assert (COMPLEX_MODE_P (mode));
2709 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2710 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2711 return gen_rtx_CONCAT (mode, op0, op1);
2712
0ca5af51
AO
2713 case DEBUG_EXPR_DECL:
2714 op0 = DECL_RTL_IF_SET (exp);
2715
2716 if (op0)
2717 return op0;
2718
2719 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2720 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2721 SET_DECL_RTL (exp, op0);
2722
2723 return op0;
2724
b5b8b0ac
AO
2725 case VAR_DECL:
2726 case PARM_DECL:
2727 case FUNCTION_DECL:
2728 case LABEL_DECL:
2729 case CONST_DECL:
2730 case RESULT_DECL:
2731 op0 = DECL_RTL_IF_SET (exp);
2732
2733 /* This decl was probably optimized away. */
2734 if (!op0)
e1b243a8
JJ
2735 {
2736 if (TREE_CODE (exp) != VAR_DECL
2737 || DECL_EXTERNAL (exp)
2738 || !TREE_STATIC (exp)
2739 || !DECL_NAME (exp)
0fba566c 2740 || DECL_HARD_REGISTER (exp)
7d5fc814 2741 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 2742 || mode == VOIDmode)
e1b243a8
JJ
2743 return NULL;
2744
b1aa0655 2745 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2746 if (!MEM_P (op0)
2747 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2748 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2749 return NULL;
2750 }
2751 else
2752 op0 = copy_rtx (op0);
b5b8b0ac 2753
06796564
JJ
2754 if (GET_MODE (op0) == BLKmode
2755 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2756 below would ICE. While it is likely a FE bug,
2757 try to be robust here. See PR43166. */
132b4e82
JJ
2758 || mode == BLKmode
2759 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2760 {
2761 gcc_assert (MEM_P (op0));
2762 op0 = adjust_address_nv (op0, mode, 0);
2763 return op0;
2764 }
2765
2766 /* Fall through. */
2767
2768 adjust_mode:
2769 case PAREN_EXPR:
2770 case NOP_EXPR:
2771 case CONVERT_EXPR:
2772 {
2ba172e0 2773 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
2774
2775 if (mode == inner_mode)
2776 return op0;
2777
2778 if (inner_mode == VOIDmode)
2779 {
2a8e30fb
MM
2780 if (TREE_CODE (exp) == SSA_NAME)
2781 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2782 else
2783 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2784 if (mode == inner_mode)
2785 return op0;
2786 }
2787
2788 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2789 {
2790 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2791 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2792 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2793 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2794 else
2795 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2796 }
2797 else if (FLOAT_MODE_P (mode))
2798 {
2a8e30fb 2799 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2800 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2801 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2802 else
2803 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2804 }
2805 else if (FLOAT_MODE_P (inner_mode))
2806 {
2807 if (unsignedp)
2808 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2809 else
2810 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2811 }
2812 else if (CONSTANT_P (op0)
69660a70 2813 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
2814 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2815 subreg_lowpart_offset (mode,
2816 inner_mode));
1b47fe3f
JJ
2817 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2818 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2819 : unsignedp)
2ba172e0 2820 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 2821 else
2ba172e0 2822 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
2823
2824 return op0;
2825 }
2826
70f34814 2827 case MEM_REF:
71f3a3f5
JJ
2828 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2829 {
2830 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2831 TREE_OPERAND (exp, 0),
2832 TREE_OPERAND (exp, 1));
2833 if (newexp)
2834 return expand_debug_expr (newexp);
2835 }
2836 /* FALLTHROUGH */
b5b8b0ac 2837 case INDIRECT_REF:
b5b8b0ac
AO
2838 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2839 if (!op0)
2840 return NULL;
2841
cb115041
JJ
2842 if (TREE_CODE (exp) == MEM_REF)
2843 {
583ac69c
JJ
2844 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2845 || (GET_CODE (op0) == PLUS
2846 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2847 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2848 Instead just use get_inner_reference. */
2849 goto component_ref;
2850
cb115041
JJ
2851 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2852 if (!op1 || !CONST_INT_P (op1))
2853 return NULL;
2854
2855 op0 = plus_constant (op0, INTVAL (op1));
2856 }
2857
09e881c9 2858 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2859 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2860 else
75421dcd 2861 as = ADDR_SPACE_GENERIC;
b5b8b0ac 2862
f61c6f34
JJ
2863 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2864 op0, as);
2865 if (op0 == NULL_RTX)
2866 return NULL;
b5b8b0ac 2867
f61c6f34 2868 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 2869 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
2870 if (TREE_CODE (exp) == MEM_REF
2871 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2872 set_mem_expr (op0, NULL_TREE);
09e881c9 2873 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2874
2875 return op0;
2876
2877 case TARGET_MEM_REF:
4d948885
RG
2878 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2879 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2880 return NULL;
2881
2882 op0 = expand_debug_expr
4e25ca6b 2883 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2884 if (!op0)
2885 return NULL;
2886
f61c6f34
JJ
2887 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2888 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2889 else
2890 as = ADDR_SPACE_GENERIC;
2891
2892 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2893 op0, as);
2894 if (op0 == NULL_RTX)
2895 return NULL;
b5b8b0ac
AO
2896
2897 op0 = gen_rtx_MEM (mode, op0);
2898
2899 set_mem_attributes (op0, exp, 0);
09e881c9 2900 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2901
2902 return op0;
2903
583ac69c 2904 component_ref:
b5b8b0ac
AO
2905 case ARRAY_REF:
2906 case ARRAY_RANGE_REF:
2907 case COMPONENT_REF:
2908 case BIT_FIELD_REF:
2909 case REALPART_EXPR:
2910 case IMAGPART_EXPR:
2911 case VIEW_CONVERT_EXPR:
2912 {
2913 enum machine_mode mode1;
2914 HOST_WIDE_INT bitsize, bitpos;
2915 tree offset;
2916 int volatilep = 0;
2917 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2918 &mode1, &unsignedp, &volatilep, false);
2919 rtx orig_op0;
2920
4f2a9af8
JJ
2921 if (bitsize == 0)
2922 return NULL;
2923
b5b8b0ac
AO
2924 orig_op0 = op0 = expand_debug_expr (tem);
2925
2926 if (!op0)
2927 return NULL;
2928
2929 if (offset)
2930 {
dda2da58
AO
2931 enum machine_mode addrmode, offmode;
2932
aa847cc8
JJ
2933 if (!MEM_P (op0))
2934 return NULL;
b5b8b0ac 2935
dda2da58
AO
2936 op0 = XEXP (op0, 0);
2937 addrmode = GET_MODE (op0);
2938 if (addrmode == VOIDmode)
2939 addrmode = Pmode;
2940
b5b8b0ac
AO
2941 op1 = expand_debug_expr (offset);
2942 if (!op1)
2943 return NULL;
2944
dda2da58
AO
2945 offmode = GET_MODE (op1);
2946 if (offmode == VOIDmode)
2947 offmode = TYPE_MODE (TREE_TYPE (offset));
2948
2949 if (addrmode != offmode)
2950 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2951 subreg_lowpart_offset (addrmode,
2952 offmode));
2953
2954 /* Don't use offset_address here, we don't need a
2955 recognizable address, and we don't want to generate
2956 code. */
2ba172e0
JJ
2957 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2958 op0, op1));
b5b8b0ac
AO
2959 }
2960
2961 if (MEM_P (op0))
2962 {
4f2a9af8
JJ
2963 if (mode1 == VOIDmode)
2964 /* Bitfield. */
2965 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2966 if (bitpos >= BITS_PER_UNIT)
2967 {
2968 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2969 bitpos %= BITS_PER_UNIT;
2970 }
2971 else if (bitpos < 0)
2972 {
4f2a9af8
JJ
2973 HOST_WIDE_INT units
2974 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2975 op0 = adjust_address_nv (op0, mode1, units);
2976 bitpos += units * BITS_PER_UNIT;
2977 }
2978 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2979 op0 = adjust_address_nv (op0, mode, 0);
2980 else if (GET_MODE (op0) != mode1)
2981 op0 = adjust_address_nv (op0, mode1, 0);
2982 else
2983 op0 = copy_rtx (op0);
2984 if (op0 == orig_op0)
2985 op0 = shallow_copy_rtx (op0);
2986 set_mem_attributes (op0, exp, 0);
2987 }
2988
2989 if (bitpos == 0 && mode == GET_MODE (op0))
2990 return op0;
2991
2d3fc6aa
JJ
2992 if (bitpos < 0)
2993 return NULL;
2994
88c04a5d
JJ
2995 if (GET_MODE (op0) == BLKmode)
2996 return NULL;
2997
b5b8b0ac
AO
2998 if ((bitpos % BITS_PER_UNIT) == 0
2999 && bitsize == GET_MODE_BITSIZE (mode1))
3000 {
3001 enum machine_mode opmode = GET_MODE (op0);
3002
b5b8b0ac 3003 if (opmode == VOIDmode)
9712cba0 3004 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
3005
3006 /* This condition may hold if we're expanding the address
3007 right past the end of an array that turned out not to
3008 be addressable (i.e., the address was only computed in
3009 debug stmts). The gen_subreg below would rightfully
3010 crash, and the address doesn't really exist, so just
3011 drop it. */
3012 if (bitpos >= GET_MODE_BITSIZE (opmode))
3013 return NULL;
3014
7d5d39bb
JJ
3015 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3016 return simplify_gen_subreg (mode, op0, opmode,
3017 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
3018 }
3019
3020 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3021 && TYPE_UNSIGNED (TREE_TYPE (exp))
3022 ? SIGN_EXTRACT
3023 : ZERO_EXTRACT, mode,
3024 GET_MODE (op0) != VOIDmode
9712cba0
JJ
3025 ? GET_MODE (op0)
3026 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
3027 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3028 }
3029
b5b8b0ac 3030 case ABS_EXPR:
2ba172e0 3031 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
3032
3033 case NEGATE_EXPR:
2ba172e0 3034 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
3035
3036 case BIT_NOT_EXPR:
2ba172e0 3037 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
3038
3039 case FLOAT_EXPR:
2ba172e0
JJ
3040 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3041 0)))
3042 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3043 inner_mode);
b5b8b0ac
AO
3044
3045 case FIX_TRUNC_EXPR:
2ba172e0
JJ
3046 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3047 inner_mode);
b5b8b0ac
AO
3048
3049 case POINTER_PLUS_EXPR:
576319a7
DD
3050 /* For the rare target where pointers are not the same size as
3051 size_t, we need to check for mis-matched modes and correct
3052 the addend. */
3053 if (op0 && op1
3054 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3055 && GET_MODE (op0) != GET_MODE (op1))
3056 {
3057 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2ba172e0
JJ
3058 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3059 GET_MODE (op1));
576319a7
DD
3060 else
3061 /* We always sign-extend, regardless of the signedness of
3062 the operand, because the operand is always unsigned
3063 here even if the original C expression is signed. */
2ba172e0
JJ
3064 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3065 GET_MODE (op1));
576319a7
DD
3066 }
3067 /* Fall through. */
b5b8b0ac 3068 case PLUS_EXPR:
2ba172e0 3069 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
3070
3071 case MINUS_EXPR:
2ba172e0 3072 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
3073
3074 case MULT_EXPR:
2ba172e0 3075 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
3076
3077 case RDIV_EXPR:
3078 case TRUNC_DIV_EXPR:
3079 case EXACT_DIV_EXPR:
3080 if (unsignedp)
2ba172e0 3081 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 3082 else
2ba172e0 3083 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
3084
3085 case TRUNC_MOD_EXPR:
2ba172e0 3086 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
3087
3088 case FLOOR_DIV_EXPR:
3089 if (unsignedp)
2ba172e0 3090 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
3091 else
3092 {
2ba172e0
JJ
3093 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3094 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3095 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 3096 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3097 }
3098
3099 case FLOOR_MOD_EXPR:
3100 if (unsignedp)
2ba172e0 3101 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
3102 else
3103 {
2ba172e0 3104 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3105 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3106 adj = simplify_gen_unary (NEG, mode,
3107 simplify_gen_binary (MULT, mode, adj, op1),
3108 mode);
3109 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3110 }
3111
3112 case CEIL_DIV_EXPR:
3113 if (unsignedp)
3114 {
2ba172e0
JJ
3115 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3116 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3117 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 3118 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3119 }
3120 else
3121 {
2ba172e0
JJ
3122 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3123 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3124 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 3125 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3126 }
3127
3128 case CEIL_MOD_EXPR:
3129 if (unsignedp)
3130 {
2ba172e0 3131 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3132 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3133 adj = simplify_gen_unary (NEG, mode,
3134 simplify_gen_binary (MULT, mode, adj, op1),
3135 mode);
3136 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3137 }
3138 else
3139 {
2ba172e0 3140 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3141 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3142 adj = simplify_gen_unary (NEG, mode,
3143 simplify_gen_binary (MULT, mode, adj, op1),
3144 mode);
3145 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3146 }
3147
3148 case ROUND_DIV_EXPR:
3149 if (unsignedp)
3150 {
2ba172e0
JJ
3151 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3152 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3153 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 3154 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3155 }
3156 else
3157 {
2ba172e0
JJ
3158 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3159 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3160 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 3161 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3162 }
3163
3164 case ROUND_MOD_EXPR:
3165 if (unsignedp)
3166 {
2ba172e0 3167 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3168 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3169 adj = simplify_gen_unary (NEG, mode,
3170 simplify_gen_binary (MULT, mode, adj, op1),
3171 mode);
3172 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3173 }
3174 else
3175 {
2ba172e0 3176 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3177 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3178 adj = simplify_gen_unary (NEG, mode,
3179 simplify_gen_binary (MULT, mode, adj, op1),
3180 mode);
3181 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3182 }
3183
3184 case LSHIFT_EXPR:
2ba172e0 3185 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
3186
3187 case RSHIFT_EXPR:
3188 if (unsignedp)
2ba172e0 3189 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 3190 else
2ba172e0 3191 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
3192
3193 case LROTATE_EXPR:
2ba172e0 3194 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
3195
3196 case RROTATE_EXPR:
2ba172e0 3197 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
3198
3199 case MIN_EXPR:
2ba172e0 3200 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
3201
3202 case MAX_EXPR:
2ba172e0 3203 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
3204
3205 case BIT_AND_EXPR:
3206 case TRUTH_AND_EXPR:
2ba172e0 3207 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
3208
3209 case BIT_IOR_EXPR:
3210 case TRUTH_OR_EXPR:
2ba172e0 3211 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
3212
3213 case BIT_XOR_EXPR:
3214 case TRUTH_XOR_EXPR:
2ba172e0 3215 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
3216
3217 case TRUTH_ANDIF_EXPR:
3218 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3219
3220 case TRUTH_ORIF_EXPR:
3221 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3222
3223 case TRUTH_NOT_EXPR:
2ba172e0 3224 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
3225
3226 case LT_EXPR:
2ba172e0
JJ
3227 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3228 op0, op1);
b5b8b0ac
AO
3229
3230 case LE_EXPR:
2ba172e0
JJ
3231 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3232 op0, op1);
b5b8b0ac
AO
3233
3234 case GT_EXPR:
2ba172e0
JJ
3235 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3236 op0, op1);
b5b8b0ac
AO
3237
3238 case GE_EXPR:
2ba172e0
JJ
3239 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3240 op0, op1);
b5b8b0ac
AO
3241
3242 case EQ_EXPR:
2ba172e0 3243 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3244
3245 case NE_EXPR:
2ba172e0 3246 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3247
3248 case UNORDERED_EXPR:
2ba172e0 3249 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3250
3251 case ORDERED_EXPR:
2ba172e0 3252 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3253
3254 case UNLT_EXPR:
2ba172e0 3255 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3256
3257 case UNLE_EXPR:
2ba172e0 3258 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3259
3260 case UNGT_EXPR:
2ba172e0 3261 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3262
3263 case UNGE_EXPR:
2ba172e0 3264 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3265
3266 case UNEQ_EXPR:
2ba172e0 3267 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3268
3269 case LTGT_EXPR:
2ba172e0 3270 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3271
3272 case COND_EXPR:
3273 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3274
3275 case COMPLEX_EXPR:
3276 gcc_assert (COMPLEX_MODE_P (mode));
3277 if (GET_MODE (op0) == VOIDmode)
3278 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3279 if (GET_MODE (op1) == VOIDmode)
3280 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3281 return gen_rtx_CONCAT (mode, op0, op1);
3282
d02a5a4b
JJ
3283 case CONJ_EXPR:
3284 if (GET_CODE (op0) == CONCAT)
3285 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
3286 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3287 XEXP (op0, 1),
3288 GET_MODE_INNER (mode)));
d02a5a4b
JJ
3289 else
3290 {
3291 enum machine_mode imode = GET_MODE_INNER (mode);
3292 rtx re, im;
3293
3294 if (MEM_P (op0))
3295 {
3296 re = adjust_address_nv (op0, imode, 0);
3297 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3298 }
3299 else
3300 {
3301 enum machine_mode ifmode = int_mode_for_mode (mode);
3302 enum machine_mode ihmode = int_mode_for_mode (imode);
3303 rtx halfsize;
3304 if (ifmode == BLKmode || ihmode == BLKmode)
3305 return NULL;
3306 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3307 re = op0;
3308 if (mode != ifmode)
3309 re = gen_rtx_SUBREG (ifmode, re, 0);
3310 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3311 if (imode != ihmode)
3312 re = gen_rtx_SUBREG (imode, re, 0);
3313 im = copy_rtx (op0);
3314 if (mode != ifmode)
3315 im = gen_rtx_SUBREG (ifmode, im, 0);
3316 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3317 if (imode != ihmode)
3318 im = gen_rtx_SUBREG (imode, im, 0);
3319 }
3320 im = gen_rtx_NEG (imode, im);
3321 return gen_rtx_CONCAT (mode, re, im);
3322 }
3323
b5b8b0ac
AO
3324 case ADDR_EXPR:
3325 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3326 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3327 {
3328 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3329 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3330 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
3331 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3332 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
3333 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3334
3335 if (handled_component_p (TREE_OPERAND (exp, 0)))
3336 {
3337 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3338 tree decl
3339 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3340 &bitoffset, &bitsize, &maxsize);
3341 if ((TREE_CODE (decl) == VAR_DECL
3342 || TREE_CODE (decl) == PARM_DECL
3343 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
3344 && (!TREE_ADDRESSABLE (decl)
3345 || target_for_debug_bind (decl))
c8a27c40
JJ
3346 && (bitoffset % BITS_PER_UNIT) == 0
3347 && bitsize > 0
3348 && bitsize == maxsize)
3349 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3350 bitoffset / BITS_PER_UNIT);
3351 }
3352
3353 return NULL;
3354 }
b5b8b0ac 3355
f61c6f34
JJ
3356 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3357 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
3358
3359 return op0;
b5b8b0ac
AO
3360
3361 case VECTOR_CST:
d2a12ae7
RG
3362 {
3363 unsigned i;
3364
3365 op0 = gen_rtx_CONCATN
3366 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3367
3368 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3369 {
3370 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3371 if (!op1)
3372 return NULL;
3373 XVECEXP (op0, 0, i) = op1;
3374 }
3375
3376 return op0;
3377 }
b5b8b0ac
AO
3378
3379 case CONSTRUCTOR:
47598145
MM
3380 if (TREE_CLOBBER_P (exp))
3381 return NULL;
3382 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
3383 {
3384 unsigned i;
3385 tree val;
3386
3387 op0 = gen_rtx_CONCATN
3388 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3389
3390 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3391 {
3392 op1 = expand_debug_expr (val);
3393 if (!op1)
3394 return NULL;
3395 XVECEXP (op0, 0, i) = op1;
3396 }
3397
3398 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3399 {
3400 op1 = expand_debug_expr
e8160c9a 3401 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3402
3403 if (!op1)
3404 return NULL;
3405
3406 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3407 XVECEXP (op0, 0, i) = op1;
3408 }
3409
3410 return op0;
3411 }
3412 else
3413 goto flag_unsupported;
3414
3415 case CALL_EXPR:
3416 /* ??? Maybe handle some builtins? */
3417 return NULL;
3418
3419 case SSA_NAME:
3420 {
2a8e30fb
MM
3421 gimple g = get_gimple_for_ssa_name (exp);
3422 if (g)
3423 {
3424 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3425 if (!op0)
3426 return NULL;
3427 }
3428 else
3429 {
3430 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3431
2a8e30fb 3432 if (part == NO_PARTITION)
a58a8e4b
JJ
3433 {
3434 /* If this is a reference to an incoming value of parameter
3435 that is never used in the code or where the incoming
3436 value is never used in the code, use PARM_DECL's
3437 DECL_RTL if set. */
3438 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3439 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3440 {
12c5ffe5
EB
3441 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3442 if (op0)
3443 goto adjust_mode;
a58a8e4b 3444 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
3445 if (op0)
3446 goto adjust_mode;
a58a8e4b
JJ
3447 }
3448 return NULL;
3449 }
b5b8b0ac 3450
2a8e30fb 3451 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3452
abfea58d 3453 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 3454 }
b5b8b0ac
AO
3455 goto adjust_mode;
3456 }
3457
3458 case ERROR_MARK:
3459 return NULL;
3460
7ece48b1
JJ
3461 /* Vector stuff. For most of the codes we don't have rtl codes. */
3462 case REALIGN_LOAD_EXPR:
3463 case REDUC_MAX_EXPR:
3464 case REDUC_MIN_EXPR:
3465 case REDUC_PLUS_EXPR:
3466 case VEC_COND_EXPR:
7ece48b1
JJ
3467 case VEC_LSHIFT_EXPR:
3468 case VEC_PACK_FIX_TRUNC_EXPR:
3469 case VEC_PACK_SAT_EXPR:
3470 case VEC_PACK_TRUNC_EXPR:
3471 case VEC_RSHIFT_EXPR:
3472 case VEC_UNPACK_FLOAT_HI_EXPR:
3473 case VEC_UNPACK_FLOAT_LO_EXPR:
3474 case VEC_UNPACK_HI_EXPR:
3475 case VEC_UNPACK_LO_EXPR:
3476 case VEC_WIDEN_MULT_HI_EXPR:
3477 case VEC_WIDEN_MULT_LO_EXPR:
36ba4aae
IR
3478 case VEC_WIDEN_LSHIFT_HI_EXPR:
3479 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 3480 case VEC_PERM_EXPR:
7ece48b1
JJ
3481 return NULL;
3482
3483 /* Misc codes. */
3484 case ADDR_SPACE_CONVERT_EXPR:
3485 case FIXED_CONVERT_EXPR:
3486 case OBJ_TYPE_REF:
3487 case WITH_SIZE_EXPR:
3488 return NULL;
3489
3490 case DOT_PROD_EXPR:
3491 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3492 && SCALAR_INT_MODE_P (mode))
3493 {
2ba172e0
JJ
3494 op0
3495 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3496 0)))
3497 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3498 inner_mode);
3499 op1
3500 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3501 1)))
3502 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3503 inner_mode);
3504 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3505 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
3506 }
3507 return NULL;
3508
3509 case WIDEN_MULT_EXPR:
0354c0c7
BS
3510 case WIDEN_MULT_PLUS_EXPR:
3511 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3512 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3513 && SCALAR_INT_MODE_P (mode))
3514 {
2ba172e0 3515 inner_mode = GET_MODE (op0);
7ece48b1 3516 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3517 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3518 else
5b58b39b 3519 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3520 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3521 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3522 else
5b58b39b 3523 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 3524 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
3525 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3526 return op0;
3527 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 3528 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 3529 else
2ba172e0 3530 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
3531 }
3532 return NULL;
3533
3534 case WIDEN_SUM_EXPR:
3f3af9df 3535 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
3536 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3537 && SCALAR_INT_MODE_P (mode))
3538 {
2ba172e0
JJ
3539 op0
3540 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3541 0)))
3542 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3543 inner_mode);
3f3af9df
JJ
3544 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3545 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
3546 }
3547 return NULL;
3548
0f59b812 3549 case FMA_EXPR:
2ba172e0 3550 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 3551
b5b8b0ac
AO
3552 default:
3553 flag_unsupported:
3554#ifdef ENABLE_CHECKING
3555 debug_tree (exp);
3556 gcc_unreachable ();
3557#else
3558 return NULL;
3559#endif
3560 }
3561}
3562
ddb555ed
JJ
3563/* Return an RTX equivalent to the source bind value of the tree expression
3564 EXP. */
3565
3566static rtx
3567expand_debug_source_expr (tree exp)
3568{
3569 rtx op0 = NULL_RTX;
3570 enum machine_mode mode = VOIDmode, inner_mode;
3571
3572 switch (TREE_CODE (exp))
3573 {
3574 case PARM_DECL:
3575 {
ddb555ed 3576 mode = DECL_MODE (exp);
12c5ffe5
EB
3577 op0 = expand_debug_parm_decl (exp);
3578 if (op0)
3579 break;
ddb555ed
JJ
3580 /* See if this isn't an argument that has been completely
3581 optimized out. */
3582 if (!DECL_RTL_SET_P (exp)
12c5ffe5 3583 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
3584 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3585 {
3586 tree aexp = exp;
3587 if (DECL_ABSTRACT_ORIGIN (exp))
3588 aexp = DECL_ABSTRACT_ORIGIN (exp);
3589 if (DECL_CONTEXT (aexp)
3590 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3591 {
3592 VEC(tree, gc) **debug_args;
3593 unsigned int ix;
3594 tree ddecl;
3595#ifdef ENABLE_CHECKING
3596 tree parm;
3597 for (parm = DECL_ARGUMENTS (current_function_decl);
3598 parm; parm = DECL_CHAIN (parm))
3599 gcc_assert (parm != exp
3600 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3601#endif
3602 debug_args = decl_debug_args_lookup (current_function_decl);
3603 if (debug_args != NULL)
3604 {
3605 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3606 ix += 2)
3607 if (ddecl == aexp)
3608 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3609 }
3610 }
3611 }
3612 break;
3613 }
3614 default:
3615 break;
3616 }
3617
3618 if (op0 == NULL_RTX)
3619 return NULL_RTX;
3620
3621 inner_mode = GET_MODE (op0);
3622 if (mode == inner_mode)
3623 return op0;
3624
3625 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3626 {
3627 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3628 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3629 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3630 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3631 else
3632 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3633 }
3634 else if (FLOAT_MODE_P (mode))
3635 gcc_unreachable ();
3636 else if (FLOAT_MODE_P (inner_mode))
3637 {
3638 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3639 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3640 else
3641 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3642 }
3643 else if (CONSTANT_P (op0)
3644 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3645 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3646 subreg_lowpart_offset (mode, inner_mode));
3647 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3648 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3649 else
3650 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3651
3652 return op0;
3653}
3654
b5b8b0ac
AO
3655/* Expand the _LOCs in debug insns. We run this after expanding all
3656 regular insns, so that any variables referenced in the function
3657 will have their DECL_RTLs set. */
3658
3659static void
3660expand_debug_locations (void)
3661{
3662 rtx insn;
3663 rtx last = get_last_insn ();
3664 int save_strict_alias = flag_strict_aliasing;
3665
3666 /* New alias sets while setting up memory attributes cause
3667 -fcompare-debug failures, even though it doesn't bring about any
3668 codegen changes. */
3669 flag_strict_aliasing = 0;
3670
3671 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3672 if (DEBUG_INSN_P (insn))
3673 {
3674 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3675 rtx val;
3676 enum machine_mode mode;
3677
3678 if (value == NULL_TREE)
3679 val = NULL_RTX;
3680 else
3681 {
ddb555ed
JJ
3682 if (INSN_VAR_LOCATION_STATUS (insn)
3683 == VAR_INIT_STATUS_UNINITIALIZED)
3684 val = expand_debug_source_expr (value);
3685 else
3686 val = expand_debug_expr (value);
b5b8b0ac
AO
3687 gcc_assert (last == get_last_insn ());
3688 }
3689
3690 if (!val)
3691 val = gen_rtx_UNKNOWN_VAR_LOC ();
3692 else
3693 {
3694 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3695
3696 gcc_assert (mode == GET_MODE (val)
3697 || (GET_MODE (val) == VOIDmode
3698 && (CONST_INT_P (val)
3699 || GET_CODE (val) == CONST_FIXED
3700 || GET_CODE (val) == CONST_DOUBLE
3701 || GET_CODE (val) == LABEL_REF)));
3702 }
3703
3704 INSN_VAR_LOCATION_LOC (insn) = val;
3705 }
3706
3707 flag_strict_aliasing = save_strict_alias;
3708}
3709
242229bb
JH
3710/* Expand basic block BB from GIMPLE trees to RTL. */
3711
3712static basic_block
10d22567 3713expand_gimple_basic_block (basic_block bb)
242229bb 3714{
726a989a
RB
3715 gimple_stmt_iterator gsi;
3716 gimple_seq stmts;
3717 gimple stmt = NULL;
242229bb
JH
3718 rtx note, last;
3719 edge e;
628f6a4e 3720 edge_iterator ei;
8b11009b 3721 void **elt;
242229bb
JH
3722
3723 if (dump_file)
726a989a
RB
3724 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3725 bb->index);
3726
3727 /* Note that since we are now transitioning from GIMPLE to RTL, we
3728 cannot use the gsi_*_bb() routines because they expect the basic
3729 block to be in GIMPLE, instead of RTL. Therefore, we need to
3730 access the BB sequence directly. */
3731 stmts = bb_seq (bb);
3732 bb->il.gimple = NULL;
bf08ebeb 3733 rtl_profile_for_bb (bb);
5e2d947c
JH
3734 init_rtl_bb_info (bb);
3735 bb->flags |= BB_RTL;
3736
a9b77cd1
ZD
3737 /* Remove the RETURN_EXPR if we may fall though to the exit
3738 instead. */
726a989a
RB
3739 gsi = gsi_last (stmts);
3740 if (!gsi_end_p (gsi)
3741 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3742 {
726a989a 3743 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3744
3745 gcc_assert (single_succ_p (bb));
3746 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3747
3748 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3749 && !gimple_return_retval (ret_stmt))
a9b77cd1 3750 {
726a989a 3751 gsi_remove (&gsi, false);
a9b77cd1
ZD
3752 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3753 }
3754 }
3755
726a989a
RB
3756 gsi = gsi_start (stmts);
3757 if (!gsi_end_p (gsi))
8b11009b 3758 {
726a989a
RB
3759 stmt = gsi_stmt (gsi);
3760 if (gimple_code (stmt) != GIMPLE_LABEL)
3761 stmt = NULL;
8b11009b 3762 }
242229bb 3763
8b11009b
ZD
3764 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3765
3766 if (stmt || elt)
242229bb
JH
3767 {
3768 last = get_last_insn ();
3769
8b11009b
ZD
3770 if (stmt)
3771 {
28ed065e 3772 expand_gimple_stmt (stmt);
726a989a 3773 gsi_next (&gsi);
8b11009b
ZD
3774 }
3775
3776 if (elt)
ae50c0cb 3777 emit_label ((rtx) *elt);
242229bb 3778
caf93cb0 3779 /* Java emits line number notes in the top of labels.
c22cacf3 3780 ??? Make this go away once line number notes are obsoleted. */
242229bb 3781 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3782 if (NOTE_P (BB_HEAD (bb)))
242229bb 3783 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3784 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3785
726a989a 3786 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3787 }
3788 else
3789 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3790
3791 NOTE_BASIC_BLOCK (note) = bb;
3792
726a989a 3793 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3794 {
cea49550 3795 basic_block new_bb;
242229bb 3796
b5b8b0ac 3797 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3798
3799 /* If this statement is a non-debug one, and we generate debug
3800 insns, then this one might be the last real use of a TERed
3801 SSA_NAME, but where there are still some debug uses further
3802 down. Expanding the current SSA name in such further debug
3803 uses by their RHS might lead to wrong debug info, as coalescing
3804 might make the operands of such RHS be placed into the same
3805 pseudo as something else. Like so:
3806 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3807 use(a_1);
3808 a_2 = ...
3809 #DEBUG ... => a_1
3810 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3811 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3812 the write to a_2 would actually have clobbered the place which
3813 formerly held a_0.
3814
3815 So, instead of that, we recognize the situation, and generate
3816 debug temporaries at the last real use of TERed SSA names:
3817 a_1 = a_0 + 1;
3818 #DEBUG #D1 => a_1
3819 use(a_1);
3820 a_2 = ...
3821 #DEBUG ... => #D1
3822 */
3823 if (MAY_HAVE_DEBUG_INSNS
3824 && SA.values
3825 && !is_gimple_debug (stmt))
3826 {
3827 ssa_op_iter iter;
3828 tree op;
3829 gimple def;
3830
3831 location_t sloc = get_curr_insn_source_location ();
3832 tree sblock = get_curr_insn_block ();
3833
3834 /* Look for SSA names that have their last use here (TERed
3835 names always have only one real use). */
3836 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3837 if ((def = get_gimple_for_ssa_name (op)))
3838 {
3839 imm_use_iterator imm_iter;
3840 use_operand_p use_p;
3841 bool have_debug_uses = false;
3842
3843 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3844 {
3845 if (gimple_debug_bind_p (USE_STMT (use_p)))
3846 {
3847 have_debug_uses = true;
3848 break;
3849 }
3850 }
3851
3852 if (have_debug_uses)
3853 {
3854 /* OP is a TERed SSA name, with DEF it's defining
3855 statement, and where OP is used in further debug
3856 instructions. Generate a debug temporary, and
3857 replace all uses of OP in debug insns with that
3858 temporary. */
3859 gimple debugstmt;
3860 tree value = gimple_assign_rhs_to_tree (def);
3861 tree vexpr = make_node (DEBUG_EXPR_DECL);
3862 rtx val;
3863 enum machine_mode mode;
3864
3865 set_curr_insn_source_location (gimple_location (def));
3866 set_curr_insn_block (gimple_block (def));
3867
3868 DECL_ARTIFICIAL (vexpr) = 1;
3869 TREE_TYPE (vexpr) = TREE_TYPE (value);
3870 if (DECL_P (value))
3871 mode = DECL_MODE (value);
3872 else
3873 mode = TYPE_MODE (TREE_TYPE (value));
3874 DECL_MODE (vexpr) = mode;
3875
3876 val = gen_rtx_VAR_LOCATION
3877 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3878
e8c6bb74 3879 emit_debug_insn (val);
2a8e30fb
MM
3880
3881 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3882 {
3883 if (!gimple_debug_bind_p (debugstmt))
3884 continue;
3885
3886 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3887 SET_USE (use_p, vexpr);
3888
3889 update_stmt (debugstmt);
3890 }
3891 }
3892 }
3893 set_curr_insn_source_location (sloc);
3894 set_curr_insn_block (sblock);
3895 }
3896
a5883ba0 3897 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3898
242229bb
JH
3899 /* Expand this statement, then evaluate the resulting RTL and
3900 fixup the CFG accordingly. */
726a989a 3901 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3902 {
726a989a 3903 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3904 if (new_bb)
3905 return new_bb;
3906 }
b5b8b0ac
AO
3907 else if (gimple_debug_bind_p (stmt))
3908 {
3909 location_t sloc = get_curr_insn_source_location ();
3910 tree sblock = get_curr_insn_block ();
3911 gimple_stmt_iterator nsi = gsi;
3912
3913 for (;;)
3914 {
3915 tree var = gimple_debug_bind_get_var (stmt);
3916 tree value;
3917 rtx val;
3918 enum machine_mode mode;
3919
ec8c1492
JJ
3920 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3921 && TREE_CODE (var) != LABEL_DECL
3922 && !target_for_debug_bind (var))
3923 goto delink_debug_stmt;
3924
b5b8b0ac
AO
3925 if (gimple_debug_bind_has_value_p (stmt))
3926 value = gimple_debug_bind_get_value (stmt);
3927 else
3928 value = NULL_TREE;
3929
3930 last = get_last_insn ();
3931
3932 set_curr_insn_source_location (gimple_location (stmt));
3933 set_curr_insn_block (gimple_block (stmt));
3934
3935 if (DECL_P (var))
3936 mode = DECL_MODE (var);
3937 else
3938 mode = TYPE_MODE (TREE_TYPE (var));
3939
3940 val = gen_rtx_VAR_LOCATION
3941 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3942
e16b6fd0 3943 emit_debug_insn (val);
b5b8b0ac
AO
3944
3945 if (dump_file && (dump_flags & TDF_DETAILS))
3946 {
3947 /* We can't dump the insn with a TREE where an RTX
3948 is expected. */
e8c6bb74 3949 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 3950 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 3951 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
3952 }
3953
ec8c1492 3954 delink_debug_stmt:
2a8e30fb
MM
3955 /* In order not to generate too many debug temporaries,
3956 we delink all uses of debug statements we already expanded.
3957 Therefore debug statements between definition and real
3958 use of TERed SSA names will continue to use the SSA name,
3959 and not be replaced with debug temps. */
3960 delink_stmt_imm_use (stmt);
3961
b5b8b0ac
AO
3962 gsi = nsi;
3963 gsi_next (&nsi);
3964 if (gsi_end_p (nsi))
3965 break;
3966 stmt = gsi_stmt (nsi);
3967 if (!gimple_debug_bind_p (stmt))
3968 break;
3969 }
3970
ddb555ed
JJ
3971 set_curr_insn_source_location (sloc);
3972 set_curr_insn_block (sblock);
3973 }
3974 else if (gimple_debug_source_bind_p (stmt))
3975 {
3976 location_t sloc = get_curr_insn_source_location ();
3977 tree sblock = get_curr_insn_block ();
3978 tree var = gimple_debug_source_bind_get_var (stmt);
3979 tree value = gimple_debug_source_bind_get_value (stmt);
3980 rtx val;
3981 enum machine_mode mode;
3982
3983 last = get_last_insn ();
3984
3985 set_curr_insn_source_location (gimple_location (stmt));
3986 set_curr_insn_block (gimple_block (stmt));
3987
3988 mode = DECL_MODE (var);
3989
3990 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3991 VAR_INIT_STATUS_UNINITIALIZED);
3992
3993 emit_debug_insn (val);
3994
3995 if (dump_file && (dump_flags & TDF_DETAILS))
3996 {
3997 /* We can't dump the insn with a TREE where an RTX
3998 is expected. */
3999 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4000 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4001 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4002 }
4003
b5b8b0ac
AO
4004 set_curr_insn_source_location (sloc);
4005 set_curr_insn_block (sblock);
4006 }
80c7a9eb 4007 else
242229bb 4008 {
726a989a 4009 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
4010 {
4011 bool can_fallthru;
4012 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4013 if (new_bb)
4014 {
4015 if (can_fallthru)
4016 bb = new_bb;
4017 else
4018 return new_bb;
4019 }
4020 }
4d7a65ea 4021 else
b7211528 4022 {
4e3825db 4023 def_operand_p def_p;
4e3825db
MM
4024 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4025
4026 if (def_p != NULL)
4027 {
4028 /* Ignore this stmt if it is in the list of
4029 replaceable expressions. */
4030 if (SA.values
b8698a0f 4031 && bitmap_bit_p (SA.values,
e97809c6 4032 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
4033 continue;
4034 }
28ed065e 4035 last = expand_gimple_stmt (stmt);
726a989a 4036 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 4037 }
242229bb
JH
4038 }
4039 }
4040
a5883ba0
MM
4041 currently_expanding_gimple_stmt = NULL;
4042
7241571e 4043 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
4044 FOR_EACH_EDGE (e, ei, bb->succs)
4045 {
7241571e
JJ
4046 if (e->goto_locus && e->goto_block)
4047 {
4048 set_curr_insn_source_location (e->goto_locus);
4049 set_curr_insn_block (e->goto_block);
4050 e->goto_locus = curr_insn_locator ();
4051 }
4052 e->goto_block = NULL;
4053 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4054 {
4055 emit_jump (label_rtx_for_bb (e->dest));
4056 e->flags &= ~EDGE_FALLTHRU;
4057 }
a9b77cd1
ZD
4058 }
4059
ae761c45
AH
4060 /* Expanded RTL can create a jump in the last instruction of block.
4061 This later might be assumed to be a jump to successor and break edge insertion.
4062 We need to insert dummy move to prevent this. PR41440. */
4063 if (single_succ_p (bb)
4064 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4065 && (last = get_last_insn ())
4066 && JUMP_P (last))
4067 {
4068 rtx dummy = gen_reg_rtx (SImode);
4069 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4070 }
4071
242229bb
JH
4072 do_pending_stack_adjust ();
4073
3f117656 4074 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
4075 before a barrier and/or table jump insn. */
4076 last = get_last_insn ();
4b4bf941 4077 if (BARRIER_P (last))
242229bb
JH
4078 last = PREV_INSN (last);
4079 if (JUMP_TABLE_DATA_P (last))
4080 last = PREV_INSN (PREV_INSN (last));
4081 BB_END (bb) = last;
caf93cb0 4082
242229bb 4083 update_bb_for_insn (bb);
80c7a9eb 4084
242229bb
JH
4085 return bb;
4086}
4087
4088
4089/* Create a basic block for initialization code. */
4090
4091static basic_block
4092construct_init_block (void)
4093{
4094 basic_block init_block, first_block;
fd44f634
JH
4095 edge e = NULL;
4096 int flags;
275a4187 4097
fd44f634
JH
4098 /* Multiple entry points not supported yet. */
4099 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
4100 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4101 init_rtl_bb_info (EXIT_BLOCK_PTR);
4102 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4103 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 4104
fd44f634 4105 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 4106
fd44f634
JH
4107 /* When entry edge points to first basic block, we don't need jump,
4108 otherwise we have to jump into proper target. */
4109 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4110 {
726a989a 4111 tree label = gimple_block_label (e->dest);
fd44f634
JH
4112
4113 emit_jump (label_rtx (label));
4114 flags = 0;
275a4187 4115 }
fd44f634
JH
4116 else
4117 flags = EDGE_FALLTHRU;
242229bb
JH
4118
4119 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4120 get_last_insn (),
4121 ENTRY_BLOCK_PTR);
4122 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4123 init_block->count = ENTRY_BLOCK_PTR->count;
7d776ee2
RG
4124 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4125 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
242229bb
JH
4126 if (e)
4127 {
4128 first_block = e->dest;
4129 redirect_edge_succ (e, init_block);
fd44f634 4130 e = make_edge (init_block, first_block, flags);
242229bb
JH
4131 }
4132 else
4133 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4134 e->probability = REG_BR_PROB_BASE;
4135 e->count = ENTRY_BLOCK_PTR->count;
4136
4137 update_bb_for_insn (init_block);
4138 return init_block;
4139}
4140
55e092c4
JH
4141/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4142 found in the block tree. */
4143
4144static void
4145set_block_levels (tree block, int level)
4146{
4147 while (block)
4148 {
4149 BLOCK_NUMBER (block) = level;
4150 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4151 block = BLOCK_CHAIN (block);
4152 }
4153}
242229bb
JH
4154
4155/* Create a block containing landing pads and similar stuff. */
4156
4157static void
4158construct_exit_block (void)
4159{
4160 rtx head = get_last_insn ();
4161 rtx end;
4162 basic_block exit_block;
628f6a4e
BE
4163 edge e, e2;
4164 unsigned ix;
4165 edge_iterator ei;
071a42f9 4166 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 4167
bf08ebeb
JH
4168 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4169
caf93cb0 4170 /* Make sure the locus is set to the end of the function, so that
242229bb 4171 epilogue line numbers and warnings are set properly. */
6773e15f 4172 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
4173 input_location = cfun->function_end_locus;
4174
4175 /* The following insns belong to the top scope. */
55e092c4 4176 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 4177
242229bb
JH
4178 /* Generate rtl for function exit. */
4179 expand_function_end ();
4180
4181 end = get_last_insn ();
4182 if (head == end)
4183 return;
071a42f9
JH
4184 /* While emitting the function end we could move end of the last basic block.
4185 */
4186 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 4187 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 4188 head = NEXT_INSN (head);
80c7a9eb
RH
4189 exit_block = create_basic_block (NEXT_INSN (head), end,
4190 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
4191 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4192 exit_block->count = EXIT_BLOCK_PTR->count;
7d776ee2
RG
4193 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4194 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
628f6a4e
BE
4195
4196 ix = 0;
4197 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 4198 {
8fb790fd 4199 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 4200 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
4201 redirect_edge_succ (e, exit_block);
4202 else
4203 ix++;
242229bb 4204 }
628f6a4e 4205
242229bb
JH
4206 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4207 e->probability = REG_BR_PROB_BASE;
4208 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 4209 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
4210 if (e2 != e)
4211 {
c22cacf3 4212 e->count -= e2->count;
242229bb
JH
4213 exit_block->count -= e2->count;
4214 exit_block->frequency -= EDGE_FREQUENCY (e2);
4215 }
4216 if (e->count < 0)
4217 e->count = 0;
4218 if (exit_block->count < 0)
4219 exit_block->count = 0;
4220 if (exit_block->frequency < 0)
4221 exit_block->frequency = 0;
4222 update_bb_for_insn (exit_block);
4223}
4224
c22cacf3 4225/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
4226 Look for ARRAY_REF nodes with non-constant indexes and mark them
4227 addressable. */
4228
4229static tree
4230discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4231 void *data ATTRIBUTE_UNUSED)
4232{
4233 tree t = *tp;
4234
4235 if (IS_TYPE_OR_DECL_P (t))
4236 *walk_subtrees = 0;
4237 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4238 {
4239 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4240 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4241 && (!TREE_OPERAND (t, 2)
4242 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4243 || (TREE_CODE (t) == COMPONENT_REF
4244 && (!TREE_OPERAND (t,2)
4245 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4246 || TREE_CODE (t) == BIT_FIELD_REF
4247 || TREE_CODE (t) == REALPART_EXPR
4248 || TREE_CODE (t) == IMAGPART_EXPR
4249 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 4250 || CONVERT_EXPR_P (t))
a1b23b2f
UW
4251 t = TREE_OPERAND (t, 0);
4252
4253 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4254 {
4255 t = get_base_address (t);
6f11d690
RG
4256 if (t && DECL_P (t)
4257 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
4258 TREE_ADDRESSABLE (t) = 1;
4259 }
4260
4261 *walk_subtrees = 0;
4262 }
4263
4264 return NULL_TREE;
4265}
4266
4267/* RTL expansion is not able to compile array references with variable
4268 offsets for arrays stored in single register. Discover such
4269 expressions and mark variables as addressable to avoid this
4270 scenario. */
4271
4272static void
4273discover_nonconstant_array_refs (void)
4274{
4275 basic_block bb;
726a989a 4276 gimple_stmt_iterator gsi;
a1b23b2f
UW
4277
4278 FOR_EACH_BB (bb)
726a989a
RB
4279 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4280 {
4281 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
4282 if (!is_gimple_debug (stmt))
4283 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 4284 }
a1b23b2f
UW
4285}
4286
2e3f842f
L
4287/* This function sets crtl->args.internal_arg_pointer to a virtual
4288 register if DRAP is needed. Local register allocator will replace
4289 virtual_incoming_args_rtx with the virtual register. */
4290
4291static void
4292expand_stack_alignment (void)
4293{
4294 rtx drap_rtx;
e939805b 4295 unsigned int preferred_stack_boundary;
2e3f842f
L
4296
4297 if (! SUPPORTS_STACK_ALIGNMENT)
4298 return;
b8698a0f 4299
2e3f842f
L
4300 if (cfun->calls_alloca
4301 || cfun->has_nonlocal_label
4302 || crtl->has_nonlocal_goto)
4303 crtl->need_drap = true;
4304
890b9b96
L
4305 /* Call update_stack_boundary here again to update incoming stack
4306 boundary. It may set incoming stack alignment to a different
4307 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4308 use the minimum incoming stack alignment to check if it is OK
4309 to perform sibcall optimization since sibcall optimization will
4310 only align the outgoing stack to incoming stack boundary. */
4311 if (targetm.calls.update_stack_boundary)
4312 targetm.calls.update_stack_boundary ();
4313
4314 /* The incoming stack frame has to be aligned at least at
4315 parm_stack_boundary. */
4316 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 4317
2e3f842f
L
4318 /* Update crtl->stack_alignment_estimated and use it later to align
4319 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4320 exceptions since callgraph doesn't collect incoming stack alignment
4321 in this case. */
8f4f502f 4322 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
4323 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4324 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4325 else
4326 preferred_stack_boundary = crtl->preferred_stack_boundary;
4327 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4328 crtl->stack_alignment_estimated = preferred_stack_boundary;
4329 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4330 crtl->stack_alignment_needed = preferred_stack_boundary;
4331
890b9b96
L
4332 gcc_assert (crtl->stack_alignment_needed
4333 <= crtl->stack_alignment_estimated);
4334
2e3f842f 4335 crtl->stack_realign_needed
e939805b 4336 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 4337 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
4338
4339 crtl->stack_realign_processed = true;
4340
4341 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4342 alignment. */
4343 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 4344 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 4345
d015f7cc
L
4346 /* stack_realign_drap and drap_rtx must match. */
4347 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4348
2e3f842f
L
4349 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4350 if (NULL != drap_rtx)
4351 {
4352 crtl->args.internal_arg_pointer = drap_rtx;
4353
4354 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4355 needed. */
4356 fixup_tail_calls ();
4357 }
4358}
4359
242229bb
JH
4360/* Translate the intermediate representation contained in the CFG
4361 from GIMPLE trees to RTL.
4362
4363 We do conversion per basic block and preserve/update the tree CFG.
4364 This implies we have to do some magic as the CFG can simultaneously
4365 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 4366 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
4367 the expansion. */
4368
c2924966 4369static unsigned int
726a989a 4370gimple_expand_cfg (void)
242229bb
JH
4371{
4372 basic_block bb, init_block;
4373 sbitmap blocks;
0ef90296
ZD
4374 edge_iterator ei;
4375 edge e;
3a42502d 4376 rtx var_seq;
4e3825db
MM
4377 unsigned i;
4378
f029db69 4379 timevar_push (TV_OUT_OF_SSA);
4e3825db 4380 rewrite_out_of_ssa (&SA);
f029db69 4381 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
4382 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4383 sizeof (rtx));
242229bb 4384
be147e84
RG
4385 /* Make sure all values used by the optimization passes have sane
4386 defaults. */
4387 reg_renumber = 0;
4388
4586b4ca
SB
4389 /* Some backends want to know that we are expanding to RTL. */
4390 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
4391 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4392 free_dominance_info (CDI_DOMINATORS);
4586b4ca 4393
bf08ebeb
JH
4394 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4395
55e092c4 4396 insn_locators_alloc ();
fe8a7779 4397 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
4398 {
4399 /* Eventually, all FEs should explicitly set function_start_locus. */
4400 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4401 set_curr_insn_source_location
4402 (DECL_SOURCE_LOCATION (current_function_decl));
4403 else
4404 set_curr_insn_source_location (cfun->function_start_locus);
4405 }
9ff70652
JJ
4406 else
4407 set_curr_insn_source_location (UNKNOWN_LOCATION);
55e092c4
JH
4408 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4409 prologue_locator = curr_insn_locator ();
4410
2b21299c
JJ
4411#ifdef INSN_SCHEDULING
4412 init_sched_attrs ();
4413#endif
4414
55e092c4
JH
4415 /* Make sure first insn is a note even if we don't want linenums.
4416 This makes sure the first insn will never be deleted.
4417 Also, final expects a note to appear there. */
4418 emit_note (NOTE_INSN_DELETED);
6429e3be 4419
a1b23b2f
UW
4420 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4421 discover_nonconstant_array_refs ();
4422
e41b2a33 4423 targetm.expand_to_rtl_hook ();
cb91fab0 4424 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 4425 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 4426 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
4427 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4428 cfun->cfg->max_jumptable_ents = 0;
4429
ae9fd6b7
JH
4430 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4431 of the function section at exapnsion time to predict distance of calls. */
4432 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4433
727a31fa 4434 /* Expand the variables recorded during gimple lowering. */
f029db69 4435 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
4436 start_sequence ();
4437
242229bb 4438 expand_used_vars ();
3a42502d
RH
4439
4440 var_seq = get_insns ();
4441 end_sequence ();
f029db69 4442 timevar_pop (TV_VAR_EXPAND);
242229bb 4443
7d69de61
RH
4444 /* Honor stack protection warnings. */
4445 if (warn_stack_protect)
4446 {
e3b5732b 4447 if (cfun->calls_alloca)
b8698a0f 4448 warning (OPT_Wstack_protector,
3b123595
SB
4449 "stack protector not protecting local variables: "
4450 "variable length buffer");
cb91fab0 4451 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 4452 warning (OPT_Wstack_protector,
3b123595
SB
4453 "stack protector not protecting function: "
4454 "all local arrays are less than %d bytes long",
7d69de61
RH
4455 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4456 }
4457
242229bb 4458 /* Set up parameters and prepare for return, for the function. */
b79c5284 4459 expand_function_start (current_function_decl);
242229bb 4460
3a42502d
RH
4461 /* If we emitted any instructions for setting up the variables,
4462 emit them before the FUNCTION_START note. */
4463 if (var_seq)
4464 {
4465 emit_insn_before (var_seq, parm_birth_insn);
4466
4467 /* In expand_function_end we'll insert the alloca save/restore
4468 before parm_birth_insn. We've just insertted an alloca call.
4469 Adjust the pointer to match. */
4470 parm_birth_insn = var_seq;
4471 }
4472
4e3825db
MM
4473 /* Now that we also have the parameter RTXs, copy them over to our
4474 partitions. */
4475 for (i = 0; i < SA.map->num_partitions; i++)
4476 {
4477 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4478
4479 if (TREE_CODE (var) != VAR_DECL
4480 && !SA.partition_to_pseudo[i])
4481 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4482 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4483
4484 /* If this decl was marked as living in multiple places, reset
4485 this now to NULL. */
4486 if (DECL_RTL_IF_SET (var) == pc_rtx)
4487 SET_DECL_RTL (var, NULL);
4488
4e3825db
MM
4489 /* Some RTL parts really want to look at DECL_RTL(x) when x
4490 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4491 SET_DECL_RTL here making this available, but that would mean
4492 to select one of the potentially many RTLs for one DECL. Instead
4493 of doing that we simply reset the MEM_EXPR of the RTL in question,
4494 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4495 if (!DECL_RTL_SET_P (var))
4496 {
4497 if (MEM_P (SA.partition_to_pseudo[i]))
4498 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4499 }
4500 }
4501
d466b407
MM
4502 /* If we have a class containing differently aligned pointers
4503 we need to merge those into the corresponding RTL pointer
4504 alignment. */
4505 for (i = 1; i < num_ssa_names; i++)
4506 {
4507 tree name = ssa_name (i);
4508 int part;
4509 rtx r;
4510
4511 if (!name
4512 || !POINTER_TYPE_P (TREE_TYPE (name))
4513 /* We might have generated new SSA names in
4514 update_alias_info_with_stack_vars. They will have a NULL
4515 defining statements, and won't be part of the partitioning,
4516 so ignore those. */
4517 || !SSA_NAME_DEF_STMT (name))
4518 continue;
4519 part = var_to_partition (SA.map, name);
4520 if (part == NO_PARTITION)
4521 continue;
4522 r = SA.partition_to_pseudo[part];
4523 if (REG_P (r))
4524 mark_reg_pointer (r, get_pointer_alignment (name));
4525 }
4526
242229bb
JH
4527 /* If this function is `main', emit a call to `__main'
4528 to run global initializers, etc. */
4529 if (DECL_NAME (current_function_decl)
4530 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4531 && DECL_FILE_SCOPE_P (current_function_decl))
4532 expand_main_function ();
4533
7d69de61
RH
4534 /* Initialize the stack_protect_guard field. This must happen after the
4535 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4536 if (crtl->stack_protect_guard)
7d69de61
RH
4537 stack_protect_prologue ();
4538
4e3825db
MM
4539 expand_phi_nodes (&SA);
4540
3fbd86b1 4541 /* Register rtl specific functions for cfg. */
242229bb
JH
4542 rtl_register_cfg_hooks ();
4543
4544 init_block = construct_init_block ();
4545
0ef90296 4546 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4547 remaining edges later. */
0ef90296
ZD
4548 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4549 e->flags &= ~EDGE_EXECUTABLE;
4550
8b11009b 4551 lab_rtx_for_bb = pointer_map_create ();
242229bb 4552 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4553 bb = expand_gimple_basic_block (bb);
bf08ebeb 4554
b5b8b0ac
AO
4555 if (MAY_HAVE_DEBUG_INSNS)
4556 expand_debug_locations ();
4557
4e3825db 4558 execute_free_datastructures ();
f029db69 4559 timevar_push (TV_OUT_OF_SSA);
4e3825db 4560 finish_out_of_ssa (&SA);
f029db69 4561 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4562
f029db69 4563 timevar_push (TV_POST_EXPAND);
91753e21
RG
4564 /* We are no longer in SSA form. */
4565 cfun->gimple_df->in_ssa_p = false;
7d776ee2
RG
4566 if (current_loops)
4567 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 4568
bf08ebeb
JH
4569 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4570 conservatively to true until they are all profile aware. */
8b11009b 4571 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4572 free_histograms ();
242229bb
JH
4573
4574 construct_exit_block ();
55e092c4
JH
4575 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4576 insn_locators_finalize ();
242229bb 4577
1d65f45c 4578 /* Zap the tree EH table. */
e8a2a782 4579 set_eh_throw_stmt_table (cfun, NULL);
242229bb 4580
42821aff
MM
4581 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4582 split edges which edge insertions might do. */
242229bb 4583 rebuild_jump_labels (get_insns ());
242229bb 4584
4e3825db
MM
4585 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4586 {
4587 edge e;
4588 edge_iterator ei;
4589 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4590 {
4591 if (e->insns.r)
bc470c24 4592 {
42821aff 4593 rebuild_jump_labels_chain (e->insns.r);
bc470c24
JJ
4594 /* Avoid putting insns before parm_birth_insn. */
4595 if (e->src == ENTRY_BLOCK_PTR
4596 && single_succ_p (ENTRY_BLOCK_PTR)
4597 && parm_birth_insn)
4598 {
4599 rtx insns = e->insns.r;
4600 e->insns.r = NULL_RTX;
4601 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4602 }
4603 else
4604 commit_one_edge_insertion (e);
4605 }
4e3825db
MM
4606 else
4607 ei_next (&ei);
4608 }
4609 }
4610
4611 /* We're done expanding trees to RTL. */
4612 currently_expanding_to_rtl = 0;
4613
4614 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4615 {
4616 edge e;
4617 edge_iterator ei;
4618 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4619 {
4620 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4621 e->flags &= ~EDGE_EXECUTABLE;
4622
4623 /* At the moment not all abnormal edges match the RTL
4624 representation. It is safe to remove them here as
4625 find_many_sub_basic_blocks will rediscover them.
4626 In the future we should get this fixed properly. */
4627 if ((e->flags & EDGE_ABNORMAL)
4628 && !(e->flags & EDGE_SIBCALL))
4629 remove_edge (e);
4630 else
4631 ei_next (&ei);
4632 }
4633 }
4634
242229bb
JH
4635 blocks = sbitmap_alloc (last_basic_block);
4636 sbitmap_ones (blocks);
4637 find_many_sub_basic_blocks (blocks);
242229bb 4638 sbitmap_free (blocks);
4e3825db 4639 purge_all_dead_edges ();
242229bb 4640
2e3f842f
L
4641 expand_stack_alignment ();
4642
be147e84
RG
4643 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4644 function. */
4645 if (crtl->tail_call_emit)
4646 fixup_tail_calls ();
4647
dac1fbf8
RG
4648 /* After initial rtl generation, call back to finish generating
4649 exception support code. We need to do this before cleaning up
4650 the CFG as the code does not expect dead landing pads. */
4651 if (cfun->eh->region_tree != NULL)
4652 finish_eh_generation ();
4653
4654 /* Remove unreachable blocks, otherwise we cannot compute dominators
4655 which are needed for loop state verification. As a side-effect
4656 this also compacts blocks.
4657 ??? We cannot remove trivially dead insns here as for example
4658 the DRAP reg on i?86 is not magically live at this point.
4659 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4660 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4661
242229bb 4662#ifdef ENABLE_CHECKING
62e5bf5d 4663 verify_flow_info ();
242229bb 4664#endif
9f8628ba 4665
be147e84
RG
4666 /* Initialize pseudos allocated for hard registers. */
4667 emit_initial_value_sets ();
4668
4669 /* And finally unshare all RTL. */
4670 unshare_all_rtl ();
4671
9f8628ba
PB
4672 /* There's no need to defer outputting this function any more; we
4673 know we want to output it. */
4674 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4675
4676 /* Now that we're done expanding trees to RTL, we shouldn't have any
4677 more CONCATs anywhere. */
4678 generating_concat_p = 0;
4679
b7211528
SB
4680 if (dump_file)
4681 {
4682 fprintf (dump_file,
4683 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4684 /* And the pass manager will dump RTL for us. */
4685 }
ef330312
PB
4686
4687 /* If we're emitting a nested function, make sure its parent gets
4688 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4689 {
ef330312
PB
4690 tree parent;
4691 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4692 parent != NULL_TREE;
4693 parent = get_containing_scope (parent))
ef330312 4694 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4695 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4696 }
c22cacf3 4697
ef330312
PB
4698 /* We are now committed to emitting code for this function. Do any
4699 preparation, such as emitting abstract debug info for the inline
4700 before it gets mangled by optimization. */
4701 if (cgraph_function_possibly_inlined_p (current_function_decl))
4702 (*debug_hooks->outlining_inline_function) (current_function_decl);
4703
4704 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4705
4706 /* After expanding, the return labels are no longer needed. */
4707 return_label = NULL;
4708 naked_return_label = NULL;
0a35513e
AH
4709
4710 /* After expanding, the tm_restart map is no longer needed. */
4711 if (cfun->gimple_df->tm_restart)
4712 {
4713 htab_delete (cfun->gimple_df->tm_restart);
4714 cfun->gimple_df->tm_restart = NULL;
4715 }
4716
55e092c4
JH
4717 /* Tag the blocks with a depth number so that change_scope can find
4718 the common parent easily. */
4719 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4720 default_rtl_profile ();
be147e84 4721
f029db69 4722 timevar_pop (TV_POST_EXPAND);
be147e84 4723
c2924966 4724 return 0;
242229bb
JH
4725}
4726
e3b5732b 4727struct rtl_opt_pass pass_expand =
242229bb 4728{
8ddbbcae 4729 {
e3b5732b 4730 RTL_PASS,
c22cacf3 4731 "expand", /* name */
242229bb 4732 NULL, /* gate */
726a989a 4733 gimple_expand_cfg, /* execute */
242229bb
JH
4734 NULL, /* sub */
4735 NULL, /* next */
4736 0, /* static_pass_number */
c22cacf3 4737 TV_EXPAND, /* tv_id */
688a482d
RG
4738 PROP_ssa | PROP_gimple_leh | PROP_cfg
4739 | PROP_gimple_lcx, /* properties_required */
242229bb 4740 PROP_rtl, /* properties_provided */
4e3825db
MM
4741 PROP_ssa | PROP_trees, /* properties_destroyed */
4742 TODO_verify_ssa | TODO_verify_flow
4743 | TODO_verify_stmts, /* todo_flags_start */
22c5fa5f 4744 TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4745 }
242229bb 4746};