]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR target/52692 ([avr]: Add support for avr-specific built-ins + LTO)
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
4c0c3228 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
be147e84
RG
50#include "regs.h" /* For reg_renumber. */
51#include "integrate.h" /* For emit_initial_value_sets. */
2b21299c 52#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 53
4e3825db
MM
54/* This variable holds information helping the rewriting of SSA trees
55 into RTL. */
56struct ssaexpand SA;
57
a5883ba0
MM
58/* This variable holds the currently expanded gimple statement for purposes
59 of comminucating the profile info to the builtin expanders. */
60gimple currently_expanding_gimple_stmt;
61
ddb555ed
JJ
62static rtx expand_debug_expr (tree);
63
726a989a
RB
64/* Return an expression tree corresponding to the RHS of GIMPLE
65 statement STMT. */
66
67tree
68gimple_assign_rhs_to_tree (gimple stmt)
69{
70 tree t;
82d6e6fc 71 enum gimple_rhs_class grhs_class;
b8698a0f 72
82d6e6fc 73 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 74
0354c0c7
BS
75 if (grhs_class == GIMPLE_TERNARY_RHS)
76 t = build3 (gimple_assign_rhs_code (stmt),
77 TREE_TYPE (gimple_assign_lhs (stmt)),
78 gimple_assign_rhs1 (stmt),
79 gimple_assign_rhs2 (stmt),
80 gimple_assign_rhs3 (stmt));
81 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
82 t = build2 (gimple_assign_rhs_code (stmt),
83 TREE_TYPE (gimple_assign_lhs (stmt)),
84 gimple_assign_rhs1 (stmt),
85 gimple_assign_rhs2 (stmt));
82d6e6fc 86 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
87 t = build1 (gimple_assign_rhs_code (stmt),
88 TREE_TYPE (gimple_assign_lhs (stmt)),
89 gimple_assign_rhs1 (stmt));
82d6e6fc 90 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
91 {
92 t = gimple_assign_rhs1 (stmt);
93 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
94 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
95 && gimple_location (stmt) != EXPR_LOCATION (t))
96 || (gimple_block (stmt)
97 && currently_expanding_to_rtl
98 && EXPR_P (t)
99 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
100 t = copy_node (t);
101 }
726a989a
RB
102 else
103 gcc_unreachable ();
104
f5045c96
AM
105 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
106 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
107 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
108 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 109
726a989a
RB
110 return t;
111}
112
726a989a 113
1f6d3a08
RH
114#ifndef STACK_ALIGNMENT_NEEDED
115#define STACK_ALIGNMENT_NEEDED 1
116#endif
117
4e3825db
MM
118#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
119
120/* Associate declaration T with storage space X. If T is no
121 SSA name this is exactly SET_DECL_RTL, otherwise make the
122 partition of T associated with X. */
123static inline void
124set_rtl (tree t, rtx x)
125{
126 if (TREE_CODE (t) == SSA_NAME)
127 {
128 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
129 if (x && !MEM_P (x))
130 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
131 /* For the benefit of debug information at -O0 (where vartracking
132 doesn't run) record the place also in the base DECL if it's
133 a normal variable (not a parameter). */
134 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
135 {
136 tree var = SSA_NAME_VAR (t);
137 /* If we don't yet have something recorded, just record it now. */
138 if (!DECL_RTL_SET_P (var))
139 SET_DECL_RTL (var, x);
47598145 140 /* If we have it set already to "multiple places" don't
eb7adebc
MM
141 change this. */
142 else if (DECL_RTL (var) == pc_rtx)
143 ;
144 /* If we have something recorded and it's not the same place
145 as we want to record now, we have multiple partitions for the
146 same base variable, with different places. We can't just
147 randomly chose one, hence we have to say that we don't know.
148 This only happens with optimization, and there var-tracking
149 will figure out the right thing. */
150 else if (DECL_RTL (var) != x)
151 SET_DECL_RTL (var, pc_rtx);
152 }
4e3825db
MM
153 }
154 else
155 SET_DECL_RTL (t, x);
156}
1f6d3a08
RH
157
158/* This structure holds data relevant to one variable that will be
159 placed in a stack slot. */
160struct stack_var
161{
162 /* The Variable. */
163 tree decl;
164
1f6d3a08
RH
165 /* Initially, the size of the variable. Later, the size of the partition,
166 if this variable becomes it's partition's representative. */
167 HOST_WIDE_INT size;
168
169 /* The *byte* alignment required for this variable. Or as, with the
170 size, the alignment for this partition. */
171 unsigned int alignb;
172
173 /* The partition representative. */
174 size_t representative;
175
176 /* The next stack variable in the partition, or EOC. */
177 size_t next;
2bdbbe94
MM
178
179 /* The numbers of conflicting stack variables. */
180 bitmap conflicts;
1f6d3a08
RH
181};
182
183#define EOC ((size_t)-1)
184
185/* We have an array of such objects while deciding allocation. */
186static struct stack_var *stack_vars;
187static size_t stack_vars_alloc;
188static size_t stack_vars_num;
47598145 189static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 190
fa10beec 191/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
192 is non-decreasing. */
193static size_t *stack_vars_sorted;
194
1f6d3a08
RH
195/* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198static int frame_phase;
199
7d69de61
RH
200/* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202static bool has_protected_decls;
203
204/* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206static bool has_short_buffer;
1f6d3a08 207
6f197850 208/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
209 we can't do with expected alignment of the stack boundary. */
210
211static unsigned int
6f197850 212align_local_variable (tree decl)
765c3e8f 213{
3a42502d 214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 215 DECL_ALIGN (decl) = align;
1f6d3a08
RH
216 return align / BITS_PER_UNIT;
217}
218
219/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
220 Return the frame offset. */
221
222static HOST_WIDE_INT
3a42502d 223alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
224{
225 HOST_WIDE_INT offset, new_frame_offset;
226
227 new_frame_offset = frame_offset;
228 if (FRAME_GROWS_DOWNWARD)
229 {
230 new_frame_offset -= size + frame_phase;
231 new_frame_offset &= -align;
232 new_frame_offset += frame_phase;
233 offset = new_frame_offset;
234 }
235 else
236 {
237 new_frame_offset -= frame_phase;
238 new_frame_offset += align - 1;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
242 new_frame_offset += size;
243 }
244 frame_offset = new_frame_offset;
245
9fb798d7
EB
246 if (frame_offset_overflow (frame_offset, cfun->decl))
247 frame_offset = offset = 0;
248
1f6d3a08
RH
249 return offset;
250}
251
252/* Accumulate DECL into STACK_VARS. */
253
254static void
255add_stack_var (tree decl)
256{
533f611a
RH
257 struct stack_var *v;
258
1f6d3a08
RH
259 if (stack_vars_num >= stack_vars_alloc)
260 {
261 if (stack_vars_alloc)
262 stack_vars_alloc = stack_vars_alloc * 3 / 2;
263 else
264 stack_vars_alloc = 32;
265 stack_vars
266 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 }
47598145
MM
268 if (!decl_to_stack_part)
269 decl_to_stack_part = pointer_map_create ();
270
533f611a 271 v = &stack_vars[stack_vars_num];
47598145 272 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
273
274 v->decl = decl;
533f611a
RH
275 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
276 /* Ensure that all variables have size, so that &a != &b for any two
277 variables that are simultaneously live. */
278 if (v->size == 0)
279 v->size = 1;
6f197850 280 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
281 /* An alignment of zero can mightily confuse us later. */
282 gcc_assert (v->alignb != 0);
1f6d3a08
RH
283
284 /* All variables are initially in their own partition. */
533f611a
RH
285 v->representative = stack_vars_num;
286 v->next = EOC;
1f6d3a08 287
2bdbbe94 288 /* All variables initially conflict with no other. */
533f611a 289 v->conflicts = NULL;
2bdbbe94 290
1f6d3a08 291 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 292 set_rtl (decl, pc_rtx);
1f6d3a08
RH
293
294 stack_vars_num++;
295}
296
1f6d3a08
RH
297/* Make the decls associated with luid's X and Y conflict. */
298
299static void
300add_stack_var_conflict (size_t x, size_t y)
301{
2bdbbe94
MM
302 struct stack_var *a = &stack_vars[x];
303 struct stack_var *b = &stack_vars[y];
304 if (!a->conflicts)
305 a->conflicts = BITMAP_ALLOC (NULL);
306 if (!b->conflicts)
307 b->conflicts = BITMAP_ALLOC (NULL);
308 bitmap_set_bit (a->conflicts, y);
309 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
310}
311
312/* Check whether the decls associated with luid's X and Y conflict. */
313
314static bool
315stack_var_conflict_p (size_t x, size_t y)
316{
2bdbbe94
MM
317 struct stack_var *a = &stack_vars[x];
318 struct stack_var *b = &stack_vars[y];
47598145
MM
319 if (x == y)
320 return false;
321 /* Partitions containing an SSA name result from gimple registers
322 with things like unsupported modes. They are top-level and
323 hence conflict with everything else. */
324 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
325 return true;
326
2bdbbe94
MM
327 if (!a->conflicts || !b->conflicts)
328 return false;
329 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 330}
b8698a0f 331
d239ed56
SB
332/* Returns true if TYPE is or contains a union type. */
333
334static bool
335aggregate_contains_union_type (tree type)
336{
337 tree field;
338
339 if (TREE_CODE (type) == UNION_TYPE
340 || TREE_CODE (type) == QUAL_UNION_TYPE)
341 return true;
342 if (TREE_CODE (type) == ARRAY_TYPE)
343 return aggregate_contains_union_type (TREE_TYPE (type));
344 if (TREE_CODE (type) != RECORD_TYPE)
345 return false;
346
910ad8de 347 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
348 if (TREE_CODE (field) == FIELD_DECL)
349 if (aggregate_contains_union_type (TREE_TYPE (field)))
350 return true;
351
352 return false;
353}
354
1f6d3a08
RH
355/* A subroutine of expand_used_vars. If two variables X and Y have alias
356 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
357 in the interference graph. We also need to make sure to add conflicts
358 for union containing structures. Else RTL alias analysis comes along
359 and due to type based aliasing rules decides that for two overlapping
360 union temporaries { short s; int i; } accesses to the same mem through
361 different types may not alias and happily reorders stores across
55356334 362 life-time boundaries of the temporaries (See PR25654). */
1f6d3a08
RH
363
364static void
365add_alias_set_conflicts (void)
366{
367 size_t i, j, n = stack_vars_num;
368
369 for (i = 0; i < n; ++i)
370 {
a4d25453
RH
371 tree type_i = TREE_TYPE (stack_vars[i].decl);
372 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 373 bool contains_union;
1f6d3a08 374
d239ed56 375 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
376 for (j = 0; j < i; ++j)
377 {
a4d25453
RH
378 tree type_j = TREE_TYPE (stack_vars[j].decl);
379 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
380 if (aggr_i != aggr_j
381 /* Either the objects conflict by means of type based
382 aliasing rules, or we need to add a conflict. */
383 || !objects_must_conflict_p (type_i, type_j)
384 /* In case the types do not conflict ensure that access
385 to elements will conflict. In case of unions we have
386 to be careful as type based aliasing rules may say
387 access to the same memory does not conflict. So play
4a25752b
ER
388 safe and add a conflict in this case when
389 -fstrict-aliasing is used. */
390 || (contains_union && flag_strict_aliasing))
1f6d3a08
RH
391 add_stack_var_conflict (i, j);
392 }
393 }
394}
395
47598145
MM
396/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
397 enter its partition number into bitmap DATA. */
398
399static bool
400visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
401{
402 bitmap active = (bitmap)data;
403 op = get_base_address (op);
404 if (op
405 && DECL_P (op)
406 && DECL_RTL_IF_SET (op) == pc_rtx)
407 {
408 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
409 if (v)
410 bitmap_set_bit (active, *v);
411 }
412 return false;
413}
414
415/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
416 record conflicts between it and all currently active other partitions
417 from bitmap DATA. */
418
419static bool
420visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
421{
422 bitmap active = (bitmap)data;
423 op = get_base_address (op);
424 if (op
425 && DECL_P (op)
426 && DECL_RTL_IF_SET (op) == pc_rtx)
427 {
428 size_t *v =
429 (size_t *) pointer_map_contains (decl_to_stack_part, op);
430 if (v && bitmap_set_bit (active, *v))
431 {
432 size_t num = *v;
433 bitmap_iterator bi;
434 unsigned i;
435 gcc_assert (num < stack_vars_num);
436 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
437 add_stack_var_conflict (num, i);
438 }
439 }
440 return false;
441}
442
443/* Helper routine for add_scope_conflicts, calculating the active partitions
444 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
445 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
446 liveness. */
47598145
MM
447
448static void
81bfd197 449add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
450{
451 edge e;
452 edge_iterator ei;
453 gimple_stmt_iterator gsi;
454 bool (*visit)(gimple, tree, void *);
455
456 bitmap_clear (work);
457 FOR_EACH_EDGE (e, ei, bb->preds)
458 bitmap_ior_into (work, (bitmap)e->src->aux);
459
ea85edfe 460 visit = visit_op;
47598145
MM
461
462 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
463 {
464 gimple stmt = gsi_stmt (gsi);
ea85edfe 465 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 466 }
ea85edfe 467 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
468 {
469 gimple stmt = gsi_stmt (gsi);
470
471 if (gimple_clobber_p (stmt))
472 {
473 tree lhs = gimple_assign_lhs (stmt);
474 size_t *v;
475 /* Nested function lowering might introduce LHSs
476 that are COMPONENT_REFs. */
477 if (TREE_CODE (lhs) != VAR_DECL)
478 continue;
479 if (DECL_RTL_IF_SET (lhs) == pc_rtx
480 && (v = (size_t *)
481 pointer_map_contains (decl_to_stack_part, lhs)))
482 bitmap_clear_bit (work, *v);
483 }
484 else if (!is_gimple_debug (stmt))
ea85edfe 485 {
81bfd197 486 if (for_conflict
ea85edfe
JJ
487 && visit == visit_op)
488 {
489 /* If this is the first real instruction in this BB we need
88d599dc
MM
490 to add conflicts for everything live at this point now.
491 Unlike classical liveness for named objects we can't
ea85edfe
JJ
492 rely on seeing a def/use of the names we're interested in.
493 There might merely be indirect loads/stores. We'd not add any
81bfd197 494 conflicts for such partitions. */
ea85edfe
JJ
495 bitmap_iterator bi;
496 unsigned i;
81bfd197 497 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe
JJ
498 {
499 unsigned j;
500 bitmap_iterator bj;
81bfd197 501 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
ea85edfe
JJ
502 add_stack_var_conflict (i, j);
503 }
504 visit = visit_conflict;
505 }
506 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
507 }
47598145
MM
508 }
509}
510
511/* Generate stack partition conflicts between all partitions that are
512 simultaneously live. */
513
514static void
515add_scope_conflicts (void)
516{
517 basic_block bb;
518 bool changed;
519 bitmap work = BITMAP_ALLOC (NULL);
520
88d599dc 521 /* We approximate the live range of a stack variable by taking the first
47598145
MM
522 mention of its name as starting point(s), and by the end-of-scope
523 death clobber added by gimplify as ending point(s) of the range.
524 This overapproximates in the case we for instance moved an address-taken
525 operation upward, without also moving a dereference to it upwards.
526 But it's conservatively correct as a variable never can hold values
527 before its name is mentioned at least once.
528
88d599dc 529 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
530
531 FOR_ALL_BB (bb)
532 bb->aux = BITMAP_ALLOC (NULL);
533
534 changed = true;
535 while (changed)
536 {
537 changed = false;
538 FOR_EACH_BB (bb)
539 {
540 bitmap active = (bitmap)bb->aux;
81bfd197 541 add_scope_conflicts_1 (bb, work, false);
47598145
MM
542 if (bitmap_ior_into (active, work))
543 changed = true;
544 }
545 }
546
547 FOR_EACH_BB (bb)
81bfd197 548 add_scope_conflicts_1 (bb, work, true);
47598145
MM
549
550 BITMAP_FREE (work);
551 FOR_ALL_BB (bb)
552 BITMAP_FREE (bb->aux);
553}
554
1f6d3a08 555/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 556 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
557
558static int
3a42502d 559stack_var_cmp (const void *a, const void *b)
1f6d3a08 560{
3a42502d
RH
561 size_t ia = *(const size_t *)a;
562 size_t ib = *(const size_t *)b;
563 unsigned int aligna = stack_vars[ia].alignb;
564 unsigned int alignb = stack_vars[ib].alignb;
565 HOST_WIDE_INT sizea = stack_vars[ia].size;
566 HOST_WIDE_INT sizeb = stack_vars[ib].size;
567 tree decla = stack_vars[ia].decl;
568 tree declb = stack_vars[ib].decl;
569 bool largea, largeb;
4e3825db 570 unsigned int uida, uidb;
1f6d3a08 571
3a42502d
RH
572 /* Primary compare on "large" alignment. Large comes first. */
573 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
574 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
575 if (largea != largeb)
576 return (int)largeb - (int)largea;
577
578 /* Secondary compare on size, decreasing */
3a42502d 579 if (sizea > sizeb)
6ddfda8a
ER
580 return -1;
581 if (sizea < sizeb)
1f6d3a08 582 return 1;
3a42502d
RH
583
584 /* Tertiary compare on true alignment, decreasing. */
585 if (aligna < alignb)
586 return -1;
587 if (aligna > alignb)
588 return 1;
589
590 /* Final compare on ID for sort stability, increasing.
591 Two SSA names are compared by their version, SSA names come before
592 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
593 if (TREE_CODE (decla) == SSA_NAME)
594 {
595 if (TREE_CODE (declb) == SSA_NAME)
596 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
597 else
598 return -1;
599 }
600 else if (TREE_CODE (declb) == SSA_NAME)
601 return 1;
602 else
603 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 604 if (uida < uidb)
79f802f5 605 return 1;
3a42502d
RH
606 if (uida > uidb)
607 return -1;
1f6d3a08
RH
608 return 0;
609}
610
55b34b5f
RG
611
612/* If the points-to solution *PI points to variables that are in a partition
613 together with other variables add all partition members to the pointed-to
614 variables bitmap. */
615
616static void
617add_partitioned_vars_to_ptset (struct pt_solution *pt,
618 struct pointer_map_t *decls_to_partitions,
619 struct pointer_set_t *visited, bitmap temp)
620{
621 bitmap_iterator bi;
622 unsigned i;
623 bitmap *part;
624
625 if (pt->anything
626 || pt->vars == NULL
627 /* The pointed-to vars bitmap is shared, it is enough to
628 visit it once. */
629 || pointer_set_insert(visited, pt->vars))
630 return;
631
632 bitmap_clear (temp);
633
634 /* By using a temporary bitmap to store all members of the partitions
635 we have to add we make sure to visit each of the partitions only
636 once. */
637 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
638 if ((!temp
639 || !bitmap_bit_p (temp, i))
640 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
641 (void *)(size_t) i)))
642 bitmap_ior_into (temp, *part);
643 if (!bitmap_empty_p (temp))
644 bitmap_ior_into (pt->vars, temp);
645}
646
647/* Update points-to sets based on partition info, so we can use them on RTL.
648 The bitmaps representing stack partitions will be saved until expand,
649 where partitioned decls used as bases in memory expressions will be
650 rewritten. */
651
652static void
653update_alias_info_with_stack_vars (void)
654{
655 struct pointer_map_t *decls_to_partitions = NULL;
656 size_t i, j;
657 tree var = NULL_TREE;
658
659 for (i = 0; i < stack_vars_num; i++)
660 {
661 bitmap part = NULL;
662 tree name;
663 struct ptr_info_def *pi;
664
665 /* Not interested in partitions with single variable. */
666 if (stack_vars[i].representative != i
667 || stack_vars[i].next == EOC)
668 continue;
669
670 if (!decls_to_partitions)
671 {
672 decls_to_partitions = pointer_map_create ();
673 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
674 }
675
676 /* Create an SSA_NAME that points to the partition for use
677 as base during alias-oracle queries on RTL for bases that
678 have been partitioned. */
679 if (var == NULL_TREE)
680 var = create_tmp_var (ptr_type_node, NULL);
681 name = make_ssa_name (var, NULL);
682
683 /* Create bitmaps representing partitions. They will be used for
684 points-to sets later, so use GGC alloc. */
685 part = BITMAP_GGC_ALLOC ();
686 for (j = i; j != EOC; j = stack_vars[j].next)
687 {
688 tree decl = stack_vars[j].decl;
25a6a873 689 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
690 /* We should never end up partitioning SSA names (though they
691 may end up on the stack). Neither should we allocate stack
9b999dc5
JJ
692 space to something that is unused and thus unreferenced, except
693 for -O0 where we are preserving even unreferenced variables. */
55b34b5f 694 gcc_assert (DECL_P (decl)
9b999dc5 695 && (!optimize
27c6b086 696 || referenced_var_lookup (cfun, DECL_UID (decl))));
55b34b5f
RG
697 bitmap_set_bit (part, uid);
698 *((bitmap *) pointer_map_insert (decls_to_partitions,
699 (void *)(size_t) uid)) = part;
700 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
701 decl)) = name;
702 }
703
704 /* Make the SSA name point to all partition members. */
705 pi = get_ptr_info (name);
d3553615 706 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
707 }
708
709 /* Make all points-to sets that contain one member of a partition
710 contain all members of the partition. */
711 if (decls_to_partitions)
712 {
713 unsigned i;
714 struct pointer_set_t *visited = pointer_set_create ();
715 bitmap temp = BITMAP_ALLOC (NULL);
716
717 for (i = 1; i < num_ssa_names; i++)
718 {
719 tree name = ssa_name (i);
720 struct ptr_info_def *pi;
721
722 if (name
723 && POINTER_TYPE_P (TREE_TYPE (name))
724 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
725 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
726 visited, temp);
727 }
728
729 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
730 decls_to_partitions, visited, temp);
55b34b5f
RG
731
732 pointer_set_destroy (visited);
733 pointer_map_destroy (decls_to_partitions);
734 BITMAP_FREE (temp);
735 }
736}
737
1f6d3a08
RH
738/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
739 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 740 Merge them into a single partition A. */
1f6d3a08
RH
741
742static void
6ddfda8a 743union_stack_vars (size_t a, size_t b)
1f6d3a08 744{
2bdbbe94
MM
745 struct stack_var *vb = &stack_vars[b];
746 bitmap_iterator bi;
747 unsigned u;
1f6d3a08 748
6ddfda8a
ER
749 gcc_assert (stack_vars[b].next == EOC);
750 /* Add B to A's partition. */
751 stack_vars[b].next = stack_vars[a].next;
752 stack_vars[b].representative = a;
1f6d3a08
RH
753 stack_vars[a].next = b;
754
755 /* Update the required alignment of partition A to account for B. */
756 if (stack_vars[a].alignb < stack_vars[b].alignb)
757 stack_vars[a].alignb = stack_vars[b].alignb;
758
759 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
760 if (vb->conflicts)
761 {
762 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
763 add_stack_var_conflict (a, stack_vars[u].representative);
764 BITMAP_FREE (vb->conflicts);
765 }
1f6d3a08
RH
766}
767
768/* A subroutine of expand_used_vars. Binpack the variables into
769 partitions constrained by the interference graph. The overall
770 algorithm used is as follows:
771
6ddfda8a 772 Sort the objects by size in descending order.
1f6d3a08
RH
773 For each object A {
774 S = size(A)
775 O = 0
776 loop {
777 Look for the largest non-conflicting object B with size <= S.
778 UNION (A, B)
1f6d3a08
RH
779 }
780 }
781*/
782
783static void
784partition_stack_vars (void)
785{
786 size_t si, sj, n = stack_vars_num;
787
788 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
789 for (si = 0; si < n; ++si)
790 stack_vars_sorted[si] = si;
791
792 if (n == 1)
793 return;
794
3a42502d 795 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 796
1f6d3a08
RH
797 for (si = 0; si < n; ++si)
798 {
799 size_t i = stack_vars_sorted[si];
3a42502d 800 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08 801
6ddfda8a
ER
802 /* Ignore objects that aren't partition representatives. If we
803 see a var that is not a partition representative, it must
804 have been merged earlier. */
805 if (stack_vars[i].representative != i)
806 continue;
807
808 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
809 {
810 size_t j = stack_vars_sorted[sj];
1f6d3a08
RH
811 unsigned int jalign = stack_vars[j].alignb;
812
813 /* Ignore objects that aren't partition representatives. */
814 if (stack_vars[j].representative != j)
815 continue;
816
1f6d3a08
RH
817 /* Ignore conflicting objects. */
818 if (stack_var_conflict_p (i, j))
819 continue;
820
3a42502d
RH
821 /* Do not mix objects of "small" (supported) alignment
822 and "large" (unsupported) alignment. */
823 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
824 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
825 continue;
826
1f6d3a08 827 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 828 union_stack_vars (i, j);
1f6d3a08
RH
829 }
830 }
55b34b5f 831
9b999dc5 832 update_alias_info_with_stack_vars ();
1f6d3a08
RH
833}
834
835/* A debugging aid for expand_used_vars. Dump the generated partitions. */
836
837static void
838dump_stack_var_partition (void)
839{
840 size_t si, i, j, n = stack_vars_num;
841
842 for (si = 0; si < n; ++si)
843 {
844 i = stack_vars_sorted[si];
845
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars[i].representative != i)
848 continue;
849
850 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i, stack_vars[i].size,
852 stack_vars[i].alignb);
853
854 for (j = i; j != EOC; j = stack_vars[j].next)
855 {
856 fputc ('\t', dump_file);
857 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 858 }
6ddfda8a 859 fputc ('\n', dump_file);
1f6d3a08
RH
860 }
861}
862
3a42502d 863/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
864
865static void
3a42502d
RH
866expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
867 HOST_WIDE_INT offset)
1f6d3a08 868{
3a42502d 869 unsigned align;
1f6d3a08 870 rtx x;
c22cacf3 871
1f6d3a08
RH
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
874
3a42502d 875 x = plus_constant (base, offset);
4e3825db 876 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 877
4e3825db
MM
878 if (TREE_CODE (decl) != SSA_NAME)
879 {
880 /* Set alignment we actually gave this decl if it isn't an SSA name.
881 If it is we generate stack slots only accidentally so it isn't as
882 important, we'll simply use the alignment that is already set. */
3a42502d
RH
883 if (base == virtual_stack_vars_rtx)
884 offset -= frame_phase;
4e3825db
MM
885 align = offset & -offset;
886 align *= BITS_PER_UNIT;
3a42502d
RH
887 if (align == 0 || align > base_align)
888 align = base_align;
889
890 /* One would think that we could assert that we're not decreasing
891 alignment here, but (at least) the i386 port does exactly this
892 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
893
894 DECL_ALIGN (decl) = align;
895 DECL_USER_ALIGN (decl) = 0;
896 }
897
898 set_mem_attributes (x, SSAVAR (decl), true);
899 set_rtl (decl, x);
1f6d3a08
RH
900}
901
902/* A subroutine of expand_used_vars. Give each partition representative
903 a unique location within the stack frame. Update each partition member
904 with that location. */
905
906static void
7d69de61 907expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
908{
909 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
910 HOST_WIDE_INT large_size = 0, large_alloc = 0;
911 rtx large_base = NULL;
912 unsigned large_align = 0;
913 tree decl;
914
915 /* Determine if there are any variables requiring "large" alignment.
916 Since these are dynamically allocated, we only process these if
917 no predicate involved. */
918 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
919 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
920 {
921 /* Find the total size of these variables. */
922 for (si = 0; si < n; ++si)
923 {
924 unsigned alignb;
925
926 i = stack_vars_sorted[si];
927 alignb = stack_vars[i].alignb;
928
929 /* Stop when we get to the first decl with "small" alignment. */
930 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
931 break;
932
933 /* Skip variables that aren't partition representatives. */
934 if (stack_vars[i].representative != i)
935 continue;
936
937 /* Skip variables that have already had rtl assigned. See also
938 add_stack_var where we perpetrate this pc_rtx hack. */
939 decl = stack_vars[i].decl;
940 if ((TREE_CODE (decl) == SSA_NAME
941 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
942 : DECL_RTL (decl)) != pc_rtx)
943 continue;
944
945 large_size += alignb - 1;
946 large_size &= -(HOST_WIDE_INT)alignb;
947 large_size += stack_vars[i].size;
948 }
949
950 /* If there were any, allocate space. */
951 if (large_size > 0)
952 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
953 large_align, true);
954 }
1f6d3a08
RH
955
956 for (si = 0; si < n; ++si)
957 {
3a42502d
RH
958 rtx base;
959 unsigned base_align, alignb;
1f6d3a08
RH
960 HOST_WIDE_INT offset;
961
962 i = stack_vars_sorted[si];
963
964 /* Skip variables that aren't partition representatives, for now. */
965 if (stack_vars[i].representative != i)
966 continue;
967
7d69de61
RH
968 /* Skip variables that have already had rtl assigned. See also
969 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
970 decl = stack_vars[i].decl;
971 if ((TREE_CODE (decl) == SSA_NAME
972 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
973 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
974 continue;
975
c22cacf3 976 /* Check the predicate to see whether this variable should be
7d69de61 977 allocated in this pass. */
3a42502d 978 if (pred && !pred (decl))
7d69de61
RH
979 continue;
980
3a42502d
RH
981 alignb = stack_vars[i].alignb;
982 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
983 {
984 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
985 base = virtual_stack_vars_rtx;
986 base_align = crtl->max_used_stack_slot_alignment;
987 }
988 else
989 {
990 /* Large alignment is only processed in the last pass. */
991 if (pred)
992 continue;
533f611a 993 gcc_assert (large_base != NULL);
3a42502d
RH
994
995 large_alloc += alignb - 1;
996 large_alloc &= -(HOST_WIDE_INT)alignb;
997 offset = large_alloc;
998 large_alloc += stack_vars[i].size;
999
1000 base = large_base;
1001 base_align = large_align;
1002 }
1f6d3a08
RH
1003
1004 /* Create rtl for each variable based on their location within the
1005 partition. */
1006 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1007 {
f8da8190 1008 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1009 base, base_align,
6ddfda8a 1010 offset);
f8da8190 1011 }
1f6d3a08 1012 }
3a42502d
RH
1013
1014 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1015}
1016
ff28a94d
JH
1017/* Take into account all sizes of partitions and reset DECL_RTLs. */
1018static HOST_WIDE_INT
1019account_stack_vars (void)
1020{
1021 size_t si, j, i, n = stack_vars_num;
1022 HOST_WIDE_INT size = 0;
1023
1024 for (si = 0; si < n; ++si)
1025 {
1026 i = stack_vars_sorted[si];
1027
1028 /* Skip variables that aren't partition representatives, for now. */
1029 if (stack_vars[i].representative != i)
1030 continue;
1031
1032 size += stack_vars[i].size;
1033 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1034 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1035 }
1036 return size;
1037}
1038
1f6d3a08
RH
1039/* A subroutine of expand_one_var. Called to immediately assign rtl
1040 to a variable to be allocated in the stack frame. */
1041
1042static void
1043expand_one_stack_var (tree var)
1044{
3a42502d
RH
1045 HOST_WIDE_INT size, offset;
1046 unsigned byte_align;
1f6d3a08 1047
4e3825db 1048 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1049 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1050
1051 /* We handle highly aligned variables in expand_stack_vars. */
1052 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1053
3a42502d
RH
1054 offset = alloc_stack_frame_space (size, byte_align);
1055
1056 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1057 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1058}
1059
1f6d3a08
RH
1060/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1061 that will reside in a hard register. */
1062
1063static void
1064expand_one_hard_reg_var (tree var)
1065{
1066 rest_of_decl_compilation (var, 0, 0);
1067}
1068
1069/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1070 that will reside in a pseudo register. */
1071
1072static void
1073expand_one_register_var (tree var)
1074{
4e3825db
MM
1075 tree decl = SSAVAR (var);
1076 tree type = TREE_TYPE (decl);
cde0f3fd 1077 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1078 rtx x = gen_reg_rtx (reg_mode);
1079
4e3825db 1080 set_rtl (var, x);
1f6d3a08
RH
1081
1082 /* Note if the object is a user variable. */
4e3825db
MM
1083 if (!DECL_ARTIFICIAL (decl))
1084 mark_user_reg (x);
1f6d3a08 1085
61021c2c 1086 if (POINTER_TYPE_P (type))
d466b407 1087 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1088}
1089
1090/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1091 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1092 to pick something that won't crash the rest of the compiler. */
1093
1094static void
1095expand_one_error_var (tree var)
1096{
1097 enum machine_mode mode = DECL_MODE (var);
1098 rtx x;
1099
1100 if (mode == BLKmode)
1101 x = gen_rtx_MEM (BLKmode, const0_rtx);
1102 else if (mode == VOIDmode)
1103 x = const0_rtx;
1104 else
1105 x = gen_reg_rtx (mode);
1106
1107 SET_DECL_RTL (var, x);
1108}
1109
c22cacf3 1110/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1111 allocated to the local stack frame. Return true if we wish to
1112 add VAR to STACK_VARS so that it will be coalesced with other
1113 variables. Return false to allocate VAR immediately.
1114
1115 This function is used to reduce the number of variables considered
1116 for coalescing, which reduces the size of the quadratic problem. */
1117
1118static bool
1119defer_stack_allocation (tree var, bool toplevel)
1120{
7d69de61
RH
1121 /* If stack protection is enabled, *all* stack variables must be deferred,
1122 so that we can re-order the strings to the top of the frame. */
1123 if (flag_stack_protect)
1124 return true;
1125
3a42502d
RH
1126 /* We handle "large" alignment via dynamic allocation. We want to handle
1127 this extra complication in only one place, so defer them. */
1128 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1129 return true;
1130
1f6d3a08
RH
1131 /* Variables in the outermost scope automatically conflict with
1132 every other variable. The only reason to want to defer them
1133 at all is that, after sorting, we can more efficiently pack
1134 small variables in the stack frame. Continue to defer at -O2. */
1135 if (toplevel && optimize < 2)
1136 return false;
1137
1138 /* Without optimization, *most* variables are allocated from the
1139 stack, which makes the quadratic problem large exactly when we
c22cacf3 1140 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1141 other hand, we don't want the function's stack frame size to
1142 get completely out of hand. So we avoid adding scalars and
1143 "small" aggregates to the list at all. */
1144 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1145 return false;
1146
1147 return true;
1148}
1149
1150/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1151 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1152 expanded yet, merely recorded.
ff28a94d
JH
1153 When REALLY_EXPAND is false, only add stack values to be allocated.
1154 Return stack usage this variable is supposed to take.
1155*/
1f6d3a08 1156
ff28a94d
JH
1157static HOST_WIDE_INT
1158expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1159{
3a42502d 1160 unsigned int align = BITS_PER_UNIT;
4e3825db 1161 tree origvar = var;
3a42502d 1162
4e3825db
MM
1163 var = SSAVAR (var);
1164
3a42502d 1165 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1166 {
2e3f842f
L
1167 /* Because we don't know if VAR will be in register or on stack,
1168 we conservatively assume it will be on stack even if VAR is
1169 eventually put into register after RA pass. For non-automatic
1170 variables, which won't be on stack, we collect alignment of
1171 type and ignore user specified alignment. */
1172 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1173 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1174 TYPE_MODE (TREE_TYPE (var)),
1175 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1176 else if (DECL_HAS_VALUE_EXPR_P (var)
1177 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1178 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1179 or variables which were assigned a stack slot already by
1180 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1181 changed from the offset chosen to it. */
1182 align = crtl->stack_alignment_estimated;
2e3f842f 1183 else
ae58e548 1184 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1185
3a42502d
RH
1186 /* If the variable alignment is very large we'll dynamicaly allocate
1187 it, which means that in-frame portion is just a pointer. */
1188 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1189 align = POINTER_SIZE;
1190 }
1191
1192 if (SUPPORTS_STACK_ALIGNMENT
1193 && crtl->stack_alignment_estimated < align)
1194 {
1195 /* stack_alignment_estimated shouldn't change after stack
1196 realign decision made */
1197 gcc_assert(!crtl->stack_realign_processed);
1198 crtl->stack_alignment_estimated = align;
2e3f842f
L
1199 }
1200
3a42502d
RH
1201 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1202 So here we only make sure stack_alignment_needed >= align. */
1203 if (crtl->stack_alignment_needed < align)
1204 crtl->stack_alignment_needed = align;
1205 if (crtl->max_used_stack_slot_alignment < align)
1206 crtl->max_used_stack_slot_alignment = align;
1207
4e3825db
MM
1208 if (TREE_CODE (origvar) == SSA_NAME)
1209 {
1210 gcc_assert (TREE_CODE (var) != VAR_DECL
1211 || (!DECL_EXTERNAL (var)
1212 && !DECL_HAS_VALUE_EXPR_P (var)
1213 && !TREE_STATIC (var)
4e3825db
MM
1214 && TREE_TYPE (var) != error_mark_node
1215 && !DECL_HARD_REGISTER (var)
1216 && really_expand));
1217 }
1218 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1219 ;
1f6d3a08
RH
1220 else if (DECL_EXTERNAL (var))
1221 ;
833b3afe 1222 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1223 ;
1224 else if (TREE_STATIC (var))
7e8b322a 1225 ;
eb7adebc 1226 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1227 ;
1228 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1229 {
1230 if (really_expand)
1231 expand_one_error_var (var);
1232 }
4e3825db 1233 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1234 {
1235 if (really_expand)
1236 expand_one_hard_reg_var (var);
1237 }
1f6d3a08 1238 else if (use_register_for_decl (var))
ff28a94d
JH
1239 {
1240 if (really_expand)
4e3825db 1241 expand_one_register_var (origvar);
ff28a94d 1242 }
7604eb4e
JJ
1243 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1244 {
1245 if (really_expand)
1246 {
1247 error ("size of variable %q+D is too large", var);
1248 expand_one_error_var (var);
1249 }
1250 }
1f6d3a08 1251 else if (defer_stack_allocation (var, toplevel))
4e3825db 1252 add_stack_var (origvar);
1f6d3a08 1253 else
ff28a94d 1254 {
bd9f1b4b 1255 if (really_expand)
4e3825db 1256 expand_one_stack_var (origvar);
ff28a94d
JH
1257 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1258 }
1259 return 0;
1f6d3a08
RH
1260}
1261
1262/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1263 expanding variables. Those variables that can be put into registers
1264 are allocated pseudos; those that can't are put on the stack.
1265
1266 TOPLEVEL is true if this is the outermost BLOCK. */
1267
1268static void
1269expand_used_vars_for_block (tree block, bool toplevel)
1270{
1f6d3a08
RH
1271 tree t;
1272
1f6d3a08 1273 /* Expand all variables at this level. */
910ad8de 1274 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1275 if (TREE_USED (t)
1276 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1277 || !DECL_NONSHAREABLE (t)))
ff28a94d 1278 expand_one_var (t, toplevel, true);
1f6d3a08 1279
1f6d3a08
RH
1280 /* Expand all variables at containing levels. */
1281 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1282 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1283}
1284
1285/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1286 and clear TREE_USED on all local variables. */
1287
1288static void
1289clear_tree_used (tree block)
1290{
1291 tree t;
1292
910ad8de 1293 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1294 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1295 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1296 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1297 TREE_USED (t) = 0;
1298
1299 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1300 clear_tree_used (t);
1301}
1302
7d69de61
RH
1303/* Examine TYPE and determine a bit mask of the following features. */
1304
1305#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1306#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1307#define SPCT_HAS_ARRAY 4
1308#define SPCT_HAS_AGGREGATE 8
1309
1310static unsigned int
1311stack_protect_classify_type (tree type)
1312{
1313 unsigned int ret = 0;
1314 tree t;
1315
1316 switch (TREE_CODE (type))
1317 {
1318 case ARRAY_TYPE:
1319 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1320 if (t == char_type_node
1321 || t == signed_char_type_node
1322 || t == unsigned_char_type_node)
1323 {
15362b89
JJ
1324 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1325 unsigned HOST_WIDE_INT len;
7d69de61 1326
15362b89
JJ
1327 if (!TYPE_SIZE_UNIT (type)
1328 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1329 len = max;
7d69de61 1330 else
15362b89 1331 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1332
1333 if (len < max)
1334 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1335 else
1336 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1337 }
1338 else
1339 ret = SPCT_HAS_ARRAY;
1340 break;
1341
1342 case UNION_TYPE:
1343 case QUAL_UNION_TYPE:
1344 case RECORD_TYPE:
1345 ret = SPCT_HAS_AGGREGATE;
1346 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1347 if (TREE_CODE (t) == FIELD_DECL)
1348 ret |= stack_protect_classify_type (TREE_TYPE (t));
1349 break;
1350
1351 default:
1352 break;
1353 }
1354
1355 return ret;
1356}
1357
a4d05547
KH
1358/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1359 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1360 any variable in this function. The return value is the phase number in
1361 which the variable should be allocated. */
1362
1363static int
1364stack_protect_decl_phase (tree decl)
1365{
1366 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1367 int ret = 0;
1368
1369 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1370 has_short_buffer = true;
1371
1372 if (flag_stack_protect == 2)
1373 {
1374 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1375 && !(bits & SPCT_HAS_AGGREGATE))
1376 ret = 1;
1377 else if (bits & SPCT_HAS_ARRAY)
1378 ret = 2;
1379 }
1380 else
1381 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1382
1383 if (ret)
1384 has_protected_decls = true;
1385
1386 return ret;
1387}
1388
1389/* Two helper routines that check for phase 1 and phase 2. These are used
1390 as callbacks for expand_stack_vars. */
1391
1392static bool
1393stack_protect_decl_phase_1 (tree decl)
1394{
1395 return stack_protect_decl_phase (decl) == 1;
1396}
1397
1398static bool
1399stack_protect_decl_phase_2 (tree decl)
1400{
1401 return stack_protect_decl_phase (decl) == 2;
1402}
1403
1404/* Ensure that variables in different stack protection phases conflict
1405 so that they are not merged and share the same stack slot. */
1406
1407static void
1408add_stack_protection_conflicts (void)
1409{
1410 size_t i, j, n = stack_vars_num;
1411 unsigned char *phase;
1412
1413 phase = XNEWVEC (unsigned char, n);
1414 for (i = 0; i < n; ++i)
1415 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1416
1417 for (i = 0; i < n; ++i)
1418 {
1419 unsigned char ph_i = phase[i];
1420 for (j = 0; j < i; ++j)
1421 if (ph_i != phase[j])
1422 add_stack_var_conflict (i, j);
1423 }
1424
1425 XDELETEVEC (phase);
1426}
1427
1428/* Create a decl for the guard at the top of the stack frame. */
1429
1430static void
1431create_stack_guard (void)
1432{
c2255bc4
AH
1433 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1434 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1435 TREE_THIS_VOLATILE (guard) = 1;
1436 TREE_USED (guard) = 1;
1437 expand_one_stack_var (guard);
cb91fab0 1438 crtl->stack_protect_guard = guard;
7d69de61
RH
1439}
1440
ff28a94d 1441/* Prepare for expanding variables. */
b8698a0f 1442static void
ff28a94d
JH
1443init_vars_expansion (void)
1444{
1445 tree t;
c021f10b 1446 unsigned ix;
cb91fab0 1447 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1448 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1449 TREE_USED (t) = 1;
ff28a94d
JH
1450
1451 /* Clear TREE_USED on all variables associated with a block scope. */
1452 clear_tree_used (DECL_INITIAL (current_function_decl));
1453
1454 /* Initialize local stack smashing state. */
1455 has_protected_decls = false;
1456 has_short_buffer = false;
1457}
1458
1459/* Free up stack variable graph data. */
1460static void
1461fini_vars_expansion (void)
1462{
2bdbbe94
MM
1463 size_t i, n = stack_vars_num;
1464 for (i = 0; i < n; i++)
1465 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1466 XDELETEVEC (stack_vars);
1467 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1468 stack_vars = NULL;
1469 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1470 pointer_map_destroy (decl_to_stack_part);
1471 decl_to_stack_part = NULL;
ff28a94d
JH
1472}
1473
30925d94
AO
1474/* Make a fair guess for the size of the stack frame of the function
1475 in NODE. This doesn't have to be exact, the result is only used in
1476 the inline heuristics. So we don't want to run the full stack var
1477 packing algorithm (which is quadratic in the number of stack vars).
1478 Instead, we calculate the total size of all stack vars. This turns
1479 out to be a pretty fair estimate -- packing of stack vars doesn't
1480 happen very often. */
b5a430f3 1481
ff28a94d 1482HOST_WIDE_INT
30925d94 1483estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1484{
1485 HOST_WIDE_INT size = 0;
b5a430f3 1486 size_t i;
bb7e6d55 1487 tree var;
2e1ec94f 1488 tree old_cur_fun_decl = current_function_decl;
bb7e6d55
AO
1489 referenced_var_iterator rvi;
1490 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94
AO
1491
1492 current_function_decl = node->decl;
bb7e6d55 1493 push_cfun (fn);
ff28a94d 1494
bb7e6d55
AO
1495 gcc_checking_assert (gimple_referenced_vars (fn));
1496 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1497 size += expand_one_var (var, true, false);
b5a430f3 1498
ff28a94d
JH
1499 if (stack_vars_num > 0)
1500 {
b5a430f3
SB
1501 /* Fake sorting the stack vars for account_stack_vars (). */
1502 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1503 for (i = 0; i < stack_vars_num; ++i)
1504 stack_vars_sorted[i] = i;
ff28a94d
JH
1505 size += account_stack_vars ();
1506 fini_vars_expansion ();
1507 }
2e1ec94f
RR
1508 pop_cfun ();
1509 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1510 return size;
1511}
1512
1f6d3a08 1513/* Expand all variables used in the function. */
727a31fa
RH
1514
1515static void
1516expand_used_vars (void)
1517{
c021f10b
NF
1518 tree var, outer_block = DECL_INITIAL (current_function_decl);
1519 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1520 unsigned i;
c021f10b 1521 unsigned len;
727a31fa 1522
1f6d3a08
RH
1523 /* Compute the phase of the stack frame for this function. */
1524 {
1525 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1526 int off = STARTING_FRAME_OFFSET % align;
1527 frame_phase = off ? align - off : 0;
1528 }
727a31fa 1529
ff28a94d 1530 init_vars_expansion ();
7d69de61 1531
4e3825db
MM
1532 for (i = 0; i < SA.map->num_partitions; i++)
1533 {
1534 tree var = partition_to_var (SA.map, i);
1535
1536 gcc_assert (is_gimple_reg (var));
1537 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1538 expand_one_var (var, true, true);
1539 else
1540 {
1541 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1542 contain the default def (representing the parm or result itself)
1543 we don't do anything here. But those which don't contain the
1544 default def (representing a temporary based on the parm/result)
1545 we need to allocate space just like for normal VAR_DECLs. */
1546 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1547 {
1548 expand_one_var (var, true, true);
1549 gcc_assert (SA.partition_to_pseudo[i]);
1550 }
1551 }
1552 }
1553
cb91fab0 1554 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1555 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1556
1557 len = VEC_length (tree, cfun->local_decls);
1558 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1559 {
1f6d3a08
RH
1560 bool expand_now = false;
1561
4e3825db
MM
1562 /* Expanded above already. */
1563 if (is_gimple_reg (var))
eb7adebc
MM
1564 {
1565 TREE_USED (var) = 0;
3adcf52c 1566 goto next;
eb7adebc 1567 }
1f6d3a08
RH
1568 /* We didn't set a block for static or extern because it's hard
1569 to tell the difference between a global variable (re)declared
1570 in a local scope, and one that's really declared there to
1571 begin with. And it doesn't really matter much, since we're
1572 not giving them stack space. Expand them now. */
4e3825db 1573 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1574 expand_now = true;
1575
1576 /* If the variable is not associated with any block, then it
1577 was created by the optimizers, and could be live anywhere
1578 in the function. */
1579 else if (TREE_USED (var))
1580 expand_now = true;
1581
1582 /* Finally, mark all variables on the list as used. We'll use
1583 this in a moment when we expand those associated with scopes. */
1584 TREE_USED (var) = 1;
1585
1586 if (expand_now)
3adcf52c
JM
1587 expand_one_var (var, true, true);
1588
1589 next:
1590 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1591 {
3adcf52c
JM
1592 rtx rtl = DECL_RTL_IF_SET (var);
1593
1594 /* Keep artificial non-ignored vars in cfun->local_decls
1595 chain until instantiate_decls. */
1596 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1597 add_local_decl (cfun, var);
6c6366f6 1598 else if (rtl == NULL_RTX)
c021f10b
NF
1599 /* If rtl isn't set yet, which can happen e.g. with
1600 -fstack-protector, retry before returning from this
1601 function. */
1602 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1603 }
1f6d3a08 1604 }
1f6d3a08 1605
c021f10b
NF
1606 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1607
1608 +-----------------+-----------------+
1609 | ...processed... | ...duplicates...|
1610 +-----------------+-----------------+
1611 ^
1612 +-- LEN points here.
1613
1614 We just want the duplicates, as those are the artificial
1615 non-ignored vars that we want to keep until instantiate_decls.
1616 Move them down and truncate the array. */
1617 if (!VEC_empty (tree, cfun->local_decls))
1618 VEC_block_remove (tree, cfun->local_decls, 0, len);
1619
1f6d3a08
RH
1620 /* At this point, all variables within the block tree with TREE_USED
1621 set are actually used by the optimized function. Lay them out. */
1622 expand_used_vars_for_block (outer_block, true);
1623
1624 if (stack_vars_num > 0)
1625 {
47598145 1626 add_scope_conflicts ();
1f6d3a08 1627 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1628 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1629 reflect this. */
1630 add_alias_set_conflicts ();
1631
c22cacf3 1632 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1633 vulnerable data and non-vulnerable data. */
1634 if (flag_stack_protect)
1635 add_stack_protection_conflicts ();
1636
c22cacf3 1637 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1638 minimal interference graph, attempt to save some stack space. */
1639 partition_stack_vars ();
1640 if (dump_file)
1641 dump_stack_var_partition ();
7d69de61
RH
1642 }
1643
1644 /* There are several conditions under which we should create a
1645 stack guard: protect-all, alloca used, protected decls present. */
1646 if (flag_stack_protect == 2
1647 || (flag_stack_protect
e3b5732b 1648 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1649 create_stack_guard ();
1f6d3a08 1650
7d69de61
RH
1651 /* Assign rtl to each variable based on these partitions. */
1652 if (stack_vars_num > 0)
1653 {
1654 /* Reorder decls to be protected by iterating over the variables
1655 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1656 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1657 earlier, such that we naturally see these variables first,
1658 and thus naturally allocate things in the right order. */
1659 if (has_protected_decls)
1660 {
1661 /* Phase 1 contains only character arrays. */
1662 expand_stack_vars (stack_protect_decl_phase_1);
1663
1664 /* Phase 2 contains other kinds of arrays. */
1665 if (flag_stack_protect == 2)
1666 expand_stack_vars (stack_protect_decl_phase_2);
1667 }
1668
1669 expand_stack_vars (NULL);
1f6d3a08 1670
ff28a94d 1671 fini_vars_expansion ();
1f6d3a08
RH
1672 }
1673
6c6366f6
JJ
1674 /* If there were any artificial non-ignored vars without rtl
1675 found earlier, see if deferred stack allocation hasn't assigned
1676 rtl to them. */
c021f10b 1677 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1678 {
6c6366f6
JJ
1679 rtx rtl = DECL_RTL_IF_SET (var);
1680
6c6366f6
JJ
1681 /* Keep artificial non-ignored vars in cfun->local_decls
1682 chain until instantiate_decls. */
1683 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1684 add_local_decl (cfun, var);
6c6366f6 1685 }
c021f10b 1686 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1687
1f6d3a08
RH
1688 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1689 if (STACK_ALIGNMENT_NEEDED)
1690 {
1691 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1692 if (!FRAME_GROWS_DOWNWARD)
1693 frame_offset += align - 1;
1694 frame_offset &= -align;
1695 }
727a31fa
RH
1696}
1697
1698
b7211528
SB
1699/* If we need to produce a detailed dump, print the tree representation
1700 for STMT to the dump file. SINCE is the last RTX after which the RTL
1701 generated for STMT should have been appended. */
1702
1703static void
726a989a 1704maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1705{
1706 if (dump_file && (dump_flags & TDF_DETAILS))
1707 {
1708 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1709 print_gimple_stmt (dump_file, stmt, 0,
1710 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1711 fprintf (dump_file, "\n");
1712
1713 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1714 }
1715}
1716
8b11009b
ZD
1717/* Maps the blocks that do not contain tree labels to rtx labels. */
1718
1719static struct pointer_map_t *lab_rtx_for_bb;
1720
a9b77cd1
ZD
1721/* Returns the label_rtx expression for a label starting basic block BB. */
1722
1723static rtx
726a989a 1724label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1725{
726a989a
RB
1726 gimple_stmt_iterator gsi;
1727 tree lab;
1728 gimple lab_stmt;
8b11009b 1729 void **elt;
a9b77cd1
ZD
1730
1731 if (bb->flags & BB_RTL)
1732 return block_label (bb);
1733
8b11009b
ZD
1734 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1735 if (elt)
ae50c0cb 1736 return (rtx) *elt;
8b11009b
ZD
1737
1738 /* Find the tree label if it is present. */
b8698a0f 1739
726a989a 1740 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1741 {
726a989a
RB
1742 lab_stmt = gsi_stmt (gsi);
1743 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1744 break;
1745
726a989a 1746 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1747 if (DECL_NONLOCAL (lab))
1748 break;
1749
1750 return label_rtx (lab);
1751 }
1752
8b11009b
ZD
1753 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1754 *elt = gen_label_rtx ();
ae50c0cb 1755 return (rtx) *elt;
a9b77cd1
ZD
1756}
1757
726a989a 1758
529ff441
MM
1759/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1760 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1761 possibly clean up the CFG and instruction sequence. LAST is the
1762 last instruction before the just emitted jump sequence. */
529ff441
MM
1763
1764static void
315adeda 1765maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1766{
1767 /* Special case: when jumpif decides that the condition is
1768 trivial it emits an unconditional jump (and the necessary
1769 barrier). But we still have two edges, the fallthru one is
1770 wrong. purge_dead_edges would clean this up later. Unfortunately
1771 we have to insert insns (and split edges) before
1772 find_many_sub_basic_blocks and hence before purge_dead_edges.
1773 But splitting edges might create new blocks which depend on the
1774 fact that if there are two edges there's no barrier. So the
1775 barrier would get lost and verify_flow_info would ICE. Instead
1776 of auditing all edge splitters to care for the barrier (which
1777 normally isn't there in a cleaned CFG), fix it here. */
1778 if (BARRIER_P (get_last_insn ()))
1779 {
529ff441
MM
1780 rtx insn;
1781 remove_edge (e);
1782 /* Now, we have a single successor block, if we have insns to
1783 insert on the remaining edge we potentially will insert
1784 it at the end of this block (if the dest block isn't feasible)
1785 in order to avoid splitting the edge. This insertion will take
1786 place in front of the last jump. But we might have emitted
1787 multiple jumps (conditional and one unconditional) to the
1788 same destination. Inserting in front of the last one then
1789 is a problem. See PR 40021. We fix this by deleting all
1790 jumps except the last unconditional one. */
1791 insn = PREV_INSN (get_last_insn ());
1792 /* Make sure we have an unconditional jump. Otherwise we're
1793 confused. */
1794 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1795 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1796 {
1797 insn = PREV_INSN (insn);
1798 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1799 {
8a269cb7 1800 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1801 {
1802 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1803 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1804 }
1805 delete_insn (NEXT_INSN (insn));
1806 }
529ff441
MM
1807 }
1808 }
1809}
1810
726a989a 1811/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1812 Returns a new basic block if we've terminated the current basic
1813 block and created a new one. */
1814
1815static basic_block
726a989a 1816expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1817{
1818 basic_block new_bb, dest;
1819 edge new_edge;
1820 edge true_edge;
1821 edge false_edge;
b7211528 1822 rtx last2, last;
28ed065e
MM
1823 enum tree_code code;
1824 tree op0, op1;
1825
1826 code = gimple_cond_code (stmt);
1827 op0 = gimple_cond_lhs (stmt);
1828 op1 = gimple_cond_rhs (stmt);
1829 /* We're sometimes presented with such code:
1830 D.123_1 = x < y;
1831 if (D.123_1 != 0)
1832 ...
1833 This would expand to two comparisons which then later might
1834 be cleaned up by combine. But some pattern matchers like if-conversion
1835 work better when there's only one compare, so make up for this
1836 here as special exception if TER would have made the same change. */
1837 if (gimple_cond_single_var_p (stmt)
1838 && SA.values
1839 && TREE_CODE (op0) == SSA_NAME
1840 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1841 {
1842 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1843 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1844 {
e83f4b68
MM
1845 enum tree_code code2 = gimple_assign_rhs_code (second);
1846 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1847 {
1848 code = code2;
1849 op0 = gimple_assign_rhs1 (second);
1850 op1 = gimple_assign_rhs2 (second);
1851 }
1852 /* If jumps are cheap turn some more codes into
1853 jumpy sequences. */
1854 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1855 {
1856 if ((code2 == BIT_AND_EXPR
1857 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1858 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1859 || code2 == TRUTH_AND_EXPR)
1860 {
1861 code = TRUTH_ANDIF_EXPR;
1862 op0 = gimple_assign_rhs1 (second);
1863 op1 = gimple_assign_rhs2 (second);
1864 }
1865 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1866 {
1867 code = TRUTH_ORIF_EXPR;
1868 op0 = gimple_assign_rhs1 (second);
1869 op1 = gimple_assign_rhs2 (second);
1870 }
1871 }
28ed065e
MM
1872 }
1873 }
b7211528
SB
1874
1875 last2 = last = get_last_insn ();
80c7a9eb
RH
1876
1877 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
c82fee88
EB
1878 set_curr_insn_source_location (gimple_location (stmt));
1879 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1880
1881 /* These flags have no purpose in RTL land. */
1882 true_edge->flags &= ~EDGE_TRUE_VALUE;
1883 false_edge->flags &= ~EDGE_FALSE_VALUE;
1884
1885 /* We can either have a pure conditional jump with one fallthru edge or
1886 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1887 if (false_edge->dest == bb->next_bb)
80c7a9eb 1888 {
40e90eac
JJ
1889 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1890 true_edge->probability);
726a989a 1891 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1892 if (true_edge->goto_locus)
7241571e
JJ
1893 {
1894 set_curr_insn_source_location (true_edge->goto_locus);
1895 set_curr_insn_block (true_edge->goto_block);
1896 true_edge->goto_locus = curr_insn_locator ();
1897 }
1898 true_edge->goto_block = NULL;
a9b77cd1 1899 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1900 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1901 return NULL;
1902 }
a9b77cd1 1903 if (true_edge->dest == bb->next_bb)
80c7a9eb 1904 {
40e90eac
JJ
1905 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1906 false_edge->probability);
726a989a 1907 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1908 if (false_edge->goto_locus)
7241571e
JJ
1909 {
1910 set_curr_insn_source_location (false_edge->goto_locus);
1911 set_curr_insn_block (false_edge->goto_block);
1912 false_edge->goto_locus = curr_insn_locator ();
1913 }
1914 false_edge->goto_block = NULL;
a9b77cd1 1915 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1916 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1917 return NULL;
1918 }
80c7a9eb 1919
40e90eac
JJ
1920 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1921 true_edge->probability);
80c7a9eb 1922 last = get_last_insn ();
7241571e
JJ
1923 if (false_edge->goto_locus)
1924 {
1925 set_curr_insn_source_location (false_edge->goto_locus);
1926 set_curr_insn_block (false_edge->goto_block);
1927 false_edge->goto_locus = curr_insn_locator ();
1928 }
1929 false_edge->goto_block = NULL;
a9b77cd1 1930 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1931
1932 BB_END (bb) = last;
1933 if (BARRIER_P (BB_END (bb)))
1934 BB_END (bb) = PREV_INSN (BB_END (bb));
1935 update_bb_for_insn (bb);
1936
1937 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1938 dest = false_edge->dest;
1939 redirect_edge_succ (false_edge, new_bb);
1940 false_edge->flags |= EDGE_FALLTHRU;
1941 new_bb->count = false_edge->count;
1942 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1943 new_edge = make_edge (new_bb, dest, 0);
1944 new_edge->probability = REG_BR_PROB_BASE;
1945 new_edge->count = new_bb->count;
1946 if (BARRIER_P (BB_END (new_bb)))
1947 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1948 update_bb_for_insn (new_bb);
1949
726a989a 1950 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1951
7787b4aa
JJ
1952 if (true_edge->goto_locus)
1953 {
1954 set_curr_insn_source_location (true_edge->goto_locus);
1955 set_curr_insn_block (true_edge->goto_block);
1956 true_edge->goto_locus = curr_insn_locator ();
1957 }
1958 true_edge->goto_block = NULL;
1959
80c7a9eb
RH
1960 return new_bb;
1961}
1962
0a35513e
AH
1963/* Mark all calls that can have a transaction restart. */
1964
1965static void
1966mark_transaction_restart_calls (gimple stmt)
1967{
1968 struct tm_restart_node dummy;
1969 void **slot;
1970
1971 if (!cfun->gimple_df->tm_restart)
1972 return;
1973
1974 dummy.stmt = stmt;
1975 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1976 if (slot)
1977 {
1978 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1979 tree list = n->label_or_list;
1980 rtx insn;
1981
1982 for (insn = next_real_insn (get_last_insn ());
1983 !CALL_P (insn);
1984 insn = next_real_insn (insn))
1985 continue;
1986
1987 if (TREE_CODE (list) == LABEL_DECL)
1988 add_reg_note (insn, REG_TM, label_rtx (list));
1989 else
1990 for (; list ; list = TREE_CHAIN (list))
1991 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1992 }
1993}
1994
28ed065e
MM
1995/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1996 statement STMT. */
1997
1998static void
1999expand_call_stmt (gimple stmt)
2000{
25583c4f 2001 tree exp, decl, lhs;
e23817b3 2002 bool builtin_p;
e7925582 2003 size_t i;
28ed065e 2004
25583c4f
RS
2005 if (gimple_call_internal_p (stmt))
2006 {
2007 expand_internal_call (stmt);
2008 return;
2009 }
2010
28ed065e
MM
2011 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2012
2013 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
2014 decl = gimple_call_fndecl (stmt);
2015 builtin_p = decl && DECL_BUILT_IN (decl);
2016
e7925582
EB
2017 /* If this is not a builtin function, the function type through which the
2018 call is made may be different from the type of the function. */
2019 if (!builtin_p)
2020 CALL_EXPR_FN (exp)
b25aa0e8
EB
2021 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2022 CALL_EXPR_FN (exp));
e7925582 2023
28ed065e
MM
2024 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2025 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2026
2027 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2028 {
2029 tree arg = gimple_call_arg (stmt, i);
2030 gimple def;
2031 /* TER addresses into arguments of builtin functions so we have a
2032 chance to infer more correct alignment information. See PR39954. */
2033 if (builtin_p
2034 && TREE_CODE (arg) == SSA_NAME
2035 && (def = get_gimple_for_ssa_name (arg))
2036 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2037 arg = gimple_assign_rhs1 (def);
2038 CALL_EXPR_ARG (exp, i) = arg;
2039 }
28ed065e 2040
93f28ca7 2041 if (gimple_has_side_effects (stmt))
28ed065e
MM
2042 TREE_SIDE_EFFECTS (exp) = 1;
2043
93f28ca7 2044 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2045 TREE_NOTHROW (exp) = 1;
2046
2047 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2048 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2049 if (decl
2050 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2051 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2052 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2053 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2054 else
2055 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2056 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2057 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2058 TREE_BLOCK (exp) = gimple_block (stmt);
2059
ddb555ed
JJ
2060 /* Ensure RTL is created for debug args. */
2061 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2062 {
2063 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2064 unsigned int ix;
2065 tree dtemp;
2066
2067 if (debug_args)
2068 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2069 {
2070 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2071 expand_debug_expr (dtemp);
2072 }
2073 }
2074
25583c4f 2075 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2076 if (lhs)
2077 expand_assignment (lhs, exp, false);
2078 else
2079 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2080
2081 mark_transaction_restart_calls (stmt);
28ed065e
MM
2082}
2083
2084/* A subroutine of expand_gimple_stmt, expanding one gimple statement
2085 STMT that doesn't require special handling for outgoing edges. That
2086 is no tailcalls and no GIMPLE_COND. */
2087
2088static void
2089expand_gimple_stmt_1 (gimple stmt)
2090{
2091 tree op0;
c82fee88
EB
2092
2093 set_curr_insn_source_location (gimple_location (stmt));
2094 set_curr_insn_block (gimple_block (stmt));
2095
28ed065e
MM
2096 switch (gimple_code (stmt))
2097 {
2098 case GIMPLE_GOTO:
2099 op0 = gimple_goto_dest (stmt);
2100 if (TREE_CODE (op0) == LABEL_DECL)
2101 expand_goto (op0);
2102 else
2103 expand_computed_goto (op0);
2104 break;
2105 case GIMPLE_LABEL:
2106 expand_label (gimple_label_label (stmt));
2107 break;
2108 case GIMPLE_NOP:
2109 case GIMPLE_PREDICT:
2110 break;
28ed065e
MM
2111 case GIMPLE_SWITCH:
2112 expand_case (stmt);
2113 break;
2114 case GIMPLE_ASM:
2115 expand_asm_stmt (stmt);
2116 break;
2117 case GIMPLE_CALL:
2118 expand_call_stmt (stmt);
2119 break;
2120
2121 case GIMPLE_RETURN:
2122 op0 = gimple_return_retval (stmt);
2123
2124 if (op0 && op0 != error_mark_node)
2125 {
2126 tree result = DECL_RESULT (current_function_decl);
2127
2128 /* If we are not returning the current function's RESULT_DECL,
2129 build an assignment to it. */
2130 if (op0 != result)
2131 {
2132 /* I believe that a function's RESULT_DECL is unique. */
2133 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2134
2135 /* ??? We'd like to use simply expand_assignment here,
2136 but this fails if the value is of BLKmode but the return
2137 decl is a register. expand_return has special handling
2138 for this combination, which eventually should move
2139 to common code. See comments there. Until then, let's
2140 build a modify expression :-/ */
2141 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2142 result, op0);
2143 }
2144 }
2145 if (!op0)
2146 expand_null_return ();
2147 else
2148 expand_return (op0);
2149 break;
2150
2151 case GIMPLE_ASSIGN:
2152 {
2153 tree lhs = gimple_assign_lhs (stmt);
2154
2155 /* Tree expand used to fiddle with |= and &= of two bitfield
2156 COMPONENT_REFs here. This can't happen with gimple, the LHS
2157 of binary assigns must be a gimple reg. */
2158
2159 if (TREE_CODE (lhs) != SSA_NAME
2160 || get_gimple_rhs_class (gimple_expr_code (stmt))
2161 == GIMPLE_SINGLE_RHS)
2162 {
2163 tree rhs = gimple_assign_rhs1 (stmt);
2164 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2165 == GIMPLE_SINGLE_RHS);
2166 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2167 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
2168 if (TREE_CLOBBER_P (rhs))
2169 /* This is a clobber to mark the going out of scope for
2170 this LHS. */
2171 ;
2172 else
2173 expand_assignment (lhs, rhs,
2174 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
2175 }
2176 else
2177 {
2178 rtx target, temp;
2179 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2180 struct separate_ops ops;
2181 bool promoted = false;
2182
2183 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2184 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2185 promoted = true;
2186
2187 ops.code = gimple_assign_rhs_code (stmt);
2188 ops.type = TREE_TYPE (lhs);
2189 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2190 {
0354c0c7
BS
2191 case GIMPLE_TERNARY_RHS:
2192 ops.op2 = gimple_assign_rhs3 (stmt);
2193 /* Fallthru */
28ed065e
MM
2194 case GIMPLE_BINARY_RHS:
2195 ops.op1 = gimple_assign_rhs2 (stmt);
2196 /* Fallthru */
2197 case GIMPLE_UNARY_RHS:
2198 ops.op0 = gimple_assign_rhs1 (stmt);
2199 break;
2200 default:
2201 gcc_unreachable ();
2202 }
2203 ops.location = gimple_location (stmt);
2204
2205 /* If we want to use a nontemporal store, force the value to
2206 register first. If we store into a promoted register,
2207 don't directly expand to target. */
2208 temp = nontemporal || promoted ? NULL_RTX : target;
2209 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2210 EXPAND_NORMAL);
2211
2212 if (temp == target)
2213 ;
2214 else if (promoted)
2215 {
4e18a7d4 2216 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2217 /* If TEMP is a VOIDmode constant, use convert_modes to make
2218 sure that we properly convert it. */
2219 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2220 {
2221 temp = convert_modes (GET_MODE (target),
2222 TYPE_MODE (ops.type),
4e18a7d4 2223 temp, unsignedp);
28ed065e 2224 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2225 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2226 }
2227
4e18a7d4 2228 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2229 }
2230 else if (nontemporal && emit_storent_insn (target, temp))
2231 ;
2232 else
2233 {
2234 temp = force_operand (temp, target);
2235 if (temp != target)
2236 emit_move_insn (target, temp);
2237 }
2238 }
2239 }
2240 break;
2241
2242 default:
2243 gcc_unreachable ();
2244 }
2245}
2246
2247/* Expand one gimple statement STMT and return the last RTL instruction
2248 before any of the newly generated ones.
2249
2250 In addition to generating the necessary RTL instructions this also
2251 sets REG_EH_REGION notes if necessary and sets the current source
2252 location for diagnostics. */
2253
2254static rtx
2255expand_gimple_stmt (gimple stmt)
2256{
28ed065e 2257 location_t saved_location = input_location;
c82fee88
EB
2258 rtx last = get_last_insn ();
2259 int lp_nr;
28ed065e 2260
28ed065e
MM
2261 gcc_assert (cfun);
2262
c82fee88
EB
2263 /* We need to save and restore the current source location so that errors
2264 discovered during expansion are emitted with the right location. But
2265 it would be better if the diagnostic routines used the source location
2266 embedded in the tree nodes rather than globals. */
28ed065e 2267 if (gimple_has_location (stmt))
c82fee88 2268 input_location = gimple_location (stmt);
28ed065e
MM
2269
2270 expand_gimple_stmt_1 (stmt);
c82fee88 2271
28ed065e
MM
2272 /* Free any temporaries used to evaluate this statement. */
2273 free_temp_slots ();
2274
2275 input_location = saved_location;
2276
2277 /* Mark all insns that may trap. */
1d65f45c
RH
2278 lp_nr = lookup_stmt_eh_lp (stmt);
2279 if (lp_nr)
28ed065e
MM
2280 {
2281 rtx insn;
2282 for (insn = next_real_insn (last); insn;
2283 insn = next_real_insn (insn))
2284 {
2285 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2286 /* If we want exceptions for non-call insns, any
2287 may_trap_p instruction may throw. */
2288 && GET_CODE (PATTERN (insn)) != CLOBBER
2289 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2290 && insn_could_throw_p (insn))
2291 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2292 }
2293 }
2294
2295 return last;
2296}
2297
726a989a 2298/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2299 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2300 generated a tail call (something that might be denied by the ABI
cea49550
RH
2301 rules governing the call; see calls.c).
2302
2303 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2304 can still reach the rest of BB. The case here is __builtin_sqrt,
2305 where the NaN result goes through the external function (with a
2306 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2307
2308static basic_block
726a989a 2309expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2310{
b7211528 2311 rtx last2, last;
224e770b 2312 edge e;
628f6a4e 2313 edge_iterator ei;
224e770b
RH
2314 int probability;
2315 gcov_type count;
80c7a9eb 2316
28ed065e 2317 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2318
2319 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2320 if (CALL_P (last) && SIBLING_CALL_P (last))
2321 goto found;
80c7a9eb 2322
726a989a 2323 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2324
cea49550 2325 *can_fallthru = true;
224e770b 2326 return NULL;
80c7a9eb 2327
224e770b
RH
2328 found:
2329 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2330 Any instructions emitted here are about to be deleted. */
2331 do_pending_stack_adjust ();
2332
2333 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2334 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2335 EH or abnormal edges, we shouldn't have created a tail call in
2336 the first place. So it seems to me we should just be removing
2337 all edges here, or redirecting the existing fallthru edge to
2338 the exit block. */
2339
224e770b
RH
2340 probability = 0;
2341 count = 0;
224e770b 2342
628f6a4e
BE
2343 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2344 {
224e770b
RH
2345 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2346 {
2347 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2348 {
224e770b
RH
2349 e->dest->count -= e->count;
2350 e->dest->frequency -= EDGE_FREQUENCY (e);
2351 if (e->dest->count < 0)
c22cacf3 2352 e->dest->count = 0;
224e770b 2353 if (e->dest->frequency < 0)
c22cacf3 2354 e->dest->frequency = 0;
80c7a9eb 2355 }
224e770b
RH
2356 count += e->count;
2357 probability += e->probability;
2358 remove_edge (e);
80c7a9eb 2359 }
628f6a4e
BE
2360 else
2361 ei_next (&ei);
80c7a9eb
RH
2362 }
2363
224e770b
RH
2364 /* This is somewhat ugly: the call_expr expander often emits instructions
2365 after the sibcall (to perform the function return). These confuse the
12eff7b7 2366 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2367 last = NEXT_INSN (last);
341c100f 2368 gcc_assert (BARRIER_P (last));
cea49550
RH
2369
2370 *can_fallthru = false;
224e770b
RH
2371 while (NEXT_INSN (last))
2372 {
2373 /* For instance an sqrt builtin expander expands if with
2374 sibcall in the then and label for `else`. */
2375 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2376 {
2377 *can_fallthru = true;
2378 break;
2379 }
224e770b
RH
2380 delete_insn (NEXT_INSN (last));
2381 }
2382
2383 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2384 e->probability += probability;
2385 e->count += count;
2386 BB_END (bb) = last;
2387 update_bb_for_insn (bb);
2388
2389 if (NEXT_INSN (last))
2390 {
2391 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2392
2393 last = BB_END (bb);
2394 if (BARRIER_P (last))
2395 BB_END (bb) = PREV_INSN (last);
2396 }
2397
726a989a 2398 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2399
224e770b 2400 return bb;
80c7a9eb
RH
2401}
2402
b5b8b0ac
AO
2403/* Return the difference between the floor and the truncated result of
2404 a signed division by OP1 with remainder MOD. */
2405static rtx
2406floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2407{
2408 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2409 return gen_rtx_IF_THEN_ELSE
2410 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2411 gen_rtx_IF_THEN_ELSE
2412 (mode, gen_rtx_LT (BImode,
2413 gen_rtx_DIV (mode, op1, mod),
2414 const0_rtx),
2415 constm1_rtx, const0_rtx),
2416 const0_rtx);
2417}
2418
2419/* Return the difference between the ceil and the truncated result of
2420 a signed division by OP1 with remainder MOD. */
2421static rtx
2422ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2423{
2424 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2425 return gen_rtx_IF_THEN_ELSE
2426 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2427 gen_rtx_IF_THEN_ELSE
2428 (mode, gen_rtx_GT (BImode,
2429 gen_rtx_DIV (mode, op1, mod),
2430 const0_rtx),
2431 const1_rtx, const0_rtx),
2432 const0_rtx);
2433}
2434
2435/* Return the difference between the ceil and the truncated result of
2436 an unsigned division by OP1 with remainder MOD. */
2437static rtx
2438ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2439{
2440 /* (mod != 0 ? 1 : 0) */
2441 return gen_rtx_IF_THEN_ELSE
2442 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2443 const1_rtx, const0_rtx);
2444}
2445
2446/* Return the difference between the rounded and the truncated result
2447 of a signed division by OP1 with remainder MOD. Halfway cases are
2448 rounded away from zero, rather than to the nearest even number. */
2449static rtx
2450round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2451{
2452 /* (abs (mod) >= abs (op1) - abs (mod)
2453 ? (op1 / mod > 0 ? 1 : -1)
2454 : 0) */
2455 return gen_rtx_IF_THEN_ELSE
2456 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2457 gen_rtx_MINUS (mode,
2458 gen_rtx_ABS (mode, op1),
2459 gen_rtx_ABS (mode, mod))),
2460 gen_rtx_IF_THEN_ELSE
2461 (mode, gen_rtx_GT (BImode,
2462 gen_rtx_DIV (mode, op1, mod),
2463 const0_rtx),
2464 const1_rtx, constm1_rtx),
2465 const0_rtx);
2466}
2467
2468/* Return the difference between the rounded and the truncated result
2469 of a unsigned division by OP1 with remainder MOD. Halfway cases
2470 are rounded away from zero, rather than to the nearest even
2471 number. */
2472static rtx
2473round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2474{
2475 /* (mod >= op1 - mod ? 1 : 0) */
2476 return gen_rtx_IF_THEN_ELSE
2477 (mode, gen_rtx_GE (BImode, mod,
2478 gen_rtx_MINUS (mode, op1, mod)),
2479 const1_rtx, const0_rtx);
2480}
2481
dda2da58
AO
2482/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2483 any rtl. */
2484
2485static rtx
f61c6f34
JJ
2486convert_debug_memory_address (enum machine_mode mode, rtx x,
2487 addr_space_t as)
dda2da58
AO
2488{
2489 enum machine_mode xmode = GET_MODE (x);
2490
2491#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
2492 gcc_assert (mode == Pmode
2493 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
2494 gcc_assert (xmode == mode || xmode == VOIDmode);
2495#else
f61c6f34 2496 rtx temp;
f61c6f34 2497
639d4bb8 2498 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
2499
2500 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2501 return x;
2502
69660a70 2503 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
2504 x = simplify_gen_subreg (mode, x, xmode,
2505 subreg_lowpart_offset
2506 (mode, xmode));
2507 else if (POINTERS_EXTEND_UNSIGNED > 0)
2508 x = gen_rtx_ZERO_EXTEND (mode, x);
2509 else if (!POINTERS_EXTEND_UNSIGNED)
2510 x = gen_rtx_SIGN_EXTEND (mode, x);
2511 else
f61c6f34
JJ
2512 {
2513 switch (GET_CODE (x))
2514 {
2515 case SUBREG:
2516 if ((SUBREG_PROMOTED_VAR_P (x)
2517 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2518 || (GET_CODE (SUBREG_REG (x)) == PLUS
2519 && REG_P (XEXP (SUBREG_REG (x), 0))
2520 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2521 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2522 && GET_MODE (SUBREG_REG (x)) == mode)
2523 return SUBREG_REG (x);
2524 break;
2525 case LABEL_REF:
2526 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2527 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2528 return temp;
2529 case SYMBOL_REF:
2530 temp = shallow_copy_rtx (x);
2531 PUT_MODE (temp, mode);
2532 return temp;
2533 case CONST:
2534 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2535 if (temp)
2536 temp = gen_rtx_CONST (mode, temp);
2537 return temp;
2538 case PLUS:
2539 case MINUS:
2540 if (CONST_INT_P (XEXP (x, 1)))
2541 {
2542 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2543 if (temp)
2544 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2545 }
2546 break;
2547 default:
2548 break;
2549 }
2550 /* Don't know how to express ptr_extend as operation in debug info. */
2551 return NULL;
2552 }
dda2da58
AO
2553#endif /* POINTERS_EXTEND_UNSIGNED */
2554
2555 return x;
2556}
2557
12c5ffe5
EB
2558/* Return an RTX equivalent to the value of the parameter DECL. */
2559
2560static rtx
2561expand_debug_parm_decl (tree decl)
2562{
2563 rtx incoming = DECL_INCOMING_RTL (decl);
2564
2565 if (incoming
2566 && GET_MODE (incoming) != BLKmode
2567 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2568 || (MEM_P (incoming)
2569 && REG_P (XEXP (incoming, 0))
2570 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2571 {
2572 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2573
2574#ifdef HAVE_window_save
2575 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2576 If the target machine has an explicit window save instruction, the
2577 actual entry value is the corresponding OUTGOING_REGNO instead. */
2578 if (REG_P (incoming)
2579 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2580 incoming
2581 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2582 OUTGOING_REGNO (REGNO (incoming)), 0);
2583 else if (MEM_P (incoming))
2584 {
2585 rtx reg = XEXP (incoming, 0);
2586 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2587 {
2588 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2589 incoming = replace_equiv_address_nv (incoming, reg);
2590 }
2591 }
2592#endif
2593
2594 ENTRY_VALUE_EXP (rtl) = incoming;
2595 return rtl;
2596 }
2597
2598 if (incoming
2599 && GET_MODE (incoming) != BLKmode
2600 && !TREE_ADDRESSABLE (decl)
2601 && MEM_P (incoming)
2602 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2603 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2604 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2605 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2606 return incoming;
2607
2608 return NULL_RTX;
2609}
2610
2611/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
2612
2613static rtx
2614expand_debug_expr (tree exp)
2615{
2616 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2617 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 2618 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 2619 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2620 addr_space_t as;
b5b8b0ac
AO
2621
2622 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2623 {
2624 case tcc_expression:
2625 switch (TREE_CODE (exp))
2626 {
2627 case COND_EXPR:
7ece48b1 2628 case DOT_PROD_EXPR:
0354c0c7
BS
2629 case WIDEN_MULT_PLUS_EXPR:
2630 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2631 case FMA_EXPR:
b5b8b0ac
AO
2632 goto ternary;
2633
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 case TRUTH_AND_EXPR:
2637 case TRUTH_OR_EXPR:
2638 case TRUTH_XOR_EXPR:
2639 goto binary;
2640
2641 case TRUTH_NOT_EXPR:
2642 goto unary;
2643
2644 default:
2645 break;
2646 }
2647 break;
2648
2649 ternary:
2650 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2651 if (!op2)
2652 return NULL_RTX;
2653 /* Fall through. */
2654
2655 binary:
2656 case tcc_binary:
2657 case tcc_comparison:
2658 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2659 if (!op1)
2660 return NULL_RTX;
2661 /* Fall through. */
2662
2663 unary:
2664 case tcc_unary:
2ba172e0 2665 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2666 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2667 if (!op0)
2668 return NULL_RTX;
2669 break;
2670
2671 case tcc_type:
2672 case tcc_statement:
2673 gcc_unreachable ();
2674
2675 case tcc_constant:
2676 case tcc_exceptional:
2677 case tcc_declaration:
2678 case tcc_reference:
2679 case tcc_vl_exp:
2680 break;
2681 }
2682
2683 switch (TREE_CODE (exp))
2684 {
2685 case STRING_CST:
2686 if (!lookup_constant_def (exp))
2687 {
e1b243a8
JJ
2688 if (strlen (TREE_STRING_POINTER (exp)) + 1
2689 != (size_t) TREE_STRING_LENGTH (exp))
2690 return NULL_RTX;
b5b8b0ac
AO
2691 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2692 op0 = gen_rtx_MEM (BLKmode, op0);
2693 set_mem_attributes (op0, exp, 0);
2694 return op0;
2695 }
2696 /* Fall through... */
2697
2698 case INTEGER_CST:
2699 case REAL_CST:
2700 case FIXED_CST:
2701 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2702 return op0;
2703
2704 case COMPLEX_CST:
2705 gcc_assert (COMPLEX_MODE_P (mode));
2706 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2707 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2708 return gen_rtx_CONCAT (mode, op0, op1);
2709
0ca5af51
AO
2710 case DEBUG_EXPR_DECL:
2711 op0 = DECL_RTL_IF_SET (exp);
2712
2713 if (op0)
2714 return op0;
2715
2716 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2717 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2718 SET_DECL_RTL (exp, op0);
2719
2720 return op0;
2721
b5b8b0ac
AO
2722 case VAR_DECL:
2723 case PARM_DECL:
2724 case FUNCTION_DECL:
2725 case LABEL_DECL:
2726 case CONST_DECL:
2727 case RESULT_DECL:
2728 op0 = DECL_RTL_IF_SET (exp);
2729
2730 /* This decl was probably optimized away. */
2731 if (!op0)
e1b243a8
JJ
2732 {
2733 if (TREE_CODE (exp) != VAR_DECL
2734 || DECL_EXTERNAL (exp)
2735 || !TREE_STATIC (exp)
2736 || !DECL_NAME (exp)
0fba566c 2737 || DECL_HARD_REGISTER (exp)
7d5fc814 2738 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 2739 || mode == VOIDmode)
e1b243a8
JJ
2740 return NULL;
2741
b1aa0655 2742 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2743 if (!MEM_P (op0)
2744 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2745 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2746 return NULL;
2747 }
2748 else
2749 op0 = copy_rtx (op0);
b5b8b0ac 2750
06796564
JJ
2751 if (GET_MODE (op0) == BLKmode
2752 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2753 below would ICE. While it is likely a FE bug,
2754 try to be robust here. See PR43166. */
132b4e82
JJ
2755 || mode == BLKmode
2756 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2757 {
2758 gcc_assert (MEM_P (op0));
2759 op0 = adjust_address_nv (op0, mode, 0);
2760 return op0;
2761 }
2762
2763 /* Fall through. */
2764
2765 adjust_mode:
2766 case PAREN_EXPR:
2767 case NOP_EXPR:
2768 case CONVERT_EXPR:
2769 {
2ba172e0 2770 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
2771
2772 if (mode == inner_mode)
2773 return op0;
2774
2775 if (inner_mode == VOIDmode)
2776 {
2a8e30fb
MM
2777 if (TREE_CODE (exp) == SSA_NAME)
2778 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2779 else
2780 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2781 if (mode == inner_mode)
2782 return op0;
2783 }
2784
2785 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2786 {
2787 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2788 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2789 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2790 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2791 else
2792 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2793 }
2794 else if (FLOAT_MODE_P (mode))
2795 {
2a8e30fb 2796 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2797 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2798 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2799 else
2800 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2801 }
2802 else if (FLOAT_MODE_P (inner_mode))
2803 {
2804 if (unsignedp)
2805 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2806 else
2807 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2808 }
2809 else if (CONSTANT_P (op0)
69660a70 2810 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
2811 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2812 subreg_lowpart_offset (mode,
2813 inner_mode));
1b47fe3f
JJ
2814 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2815 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2816 : unsignedp)
2ba172e0 2817 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 2818 else
2ba172e0 2819 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
2820
2821 return op0;
2822 }
2823
70f34814 2824 case MEM_REF:
71f3a3f5
JJ
2825 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2826 {
2827 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2828 TREE_OPERAND (exp, 0),
2829 TREE_OPERAND (exp, 1));
2830 if (newexp)
2831 return expand_debug_expr (newexp);
2832 }
2833 /* FALLTHROUGH */
b5b8b0ac 2834 case INDIRECT_REF:
b5b8b0ac
AO
2835 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2836 if (!op0)
2837 return NULL;
2838
cb115041
JJ
2839 if (TREE_CODE (exp) == MEM_REF)
2840 {
583ac69c
JJ
2841 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2842 || (GET_CODE (op0) == PLUS
2843 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2844 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2845 Instead just use get_inner_reference. */
2846 goto component_ref;
2847
cb115041
JJ
2848 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2849 if (!op1 || !CONST_INT_P (op1))
2850 return NULL;
2851
2852 op0 = plus_constant (op0, INTVAL (op1));
2853 }
2854
09e881c9 2855 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2856 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2857 else
75421dcd 2858 as = ADDR_SPACE_GENERIC;
b5b8b0ac 2859
f61c6f34
JJ
2860 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2861 op0, as);
2862 if (op0 == NULL_RTX)
2863 return NULL;
b5b8b0ac 2864
f61c6f34 2865 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 2866 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
2867 if (TREE_CODE (exp) == MEM_REF
2868 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2869 set_mem_expr (op0, NULL_TREE);
09e881c9 2870 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2871
2872 return op0;
2873
2874 case TARGET_MEM_REF:
4d948885
RG
2875 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2876 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2877 return NULL;
2878
2879 op0 = expand_debug_expr
4e25ca6b 2880 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2881 if (!op0)
2882 return NULL;
2883
f61c6f34
JJ
2884 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2885 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2886 else
2887 as = ADDR_SPACE_GENERIC;
2888
2889 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2890 op0, as);
2891 if (op0 == NULL_RTX)
2892 return NULL;
b5b8b0ac
AO
2893
2894 op0 = gen_rtx_MEM (mode, op0);
2895
2896 set_mem_attributes (op0, exp, 0);
09e881c9 2897 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2898
2899 return op0;
2900
583ac69c 2901 component_ref:
b5b8b0ac
AO
2902 case ARRAY_REF:
2903 case ARRAY_RANGE_REF:
2904 case COMPONENT_REF:
2905 case BIT_FIELD_REF:
2906 case REALPART_EXPR:
2907 case IMAGPART_EXPR:
2908 case VIEW_CONVERT_EXPR:
2909 {
2910 enum machine_mode mode1;
2911 HOST_WIDE_INT bitsize, bitpos;
2912 tree offset;
2913 int volatilep = 0;
2914 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2915 &mode1, &unsignedp, &volatilep, false);
2916 rtx orig_op0;
2917
4f2a9af8
JJ
2918 if (bitsize == 0)
2919 return NULL;
2920
b5b8b0ac
AO
2921 orig_op0 = op0 = expand_debug_expr (tem);
2922
2923 if (!op0)
2924 return NULL;
2925
2926 if (offset)
2927 {
dda2da58
AO
2928 enum machine_mode addrmode, offmode;
2929
aa847cc8
JJ
2930 if (!MEM_P (op0))
2931 return NULL;
b5b8b0ac 2932
dda2da58
AO
2933 op0 = XEXP (op0, 0);
2934 addrmode = GET_MODE (op0);
2935 if (addrmode == VOIDmode)
2936 addrmode = Pmode;
2937
b5b8b0ac
AO
2938 op1 = expand_debug_expr (offset);
2939 if (!op1)
2940 return NULL;
2941
dda2da58
AO
2942 offmode = GET_MODE (op1);
2943 if (offmode == VOIDmode)
2944 offmode = TYPE_MODE (TREE_TYPE (offset));
2945
2946 if (addrmode != offmode)
2947 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2948 subreg_lowpart_offset (addrmode,
2949 offmode));
2950
2951 /* Don't use offset_address here, we don't need a
2952 recognizable address, and we don't want to generate
2953 code. */
2ba172e0
JJ
2954 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2955 op0, op1));
b5b8b0ac
AO
2956 }
2957
2958 if (MEM_P (op0))
2959 {
4f2a9af8
JJ
2960 if (mode1 == VOIDmode)
2961 /* Bitfield. */
2962 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2963 if (bitpos >= BITS_PER_UNIT)
2964 {
2965 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2966 bitpos %= BITS_PER_UNIT;
2967 }
2968 else if (bitpos < 0)
2969 {
4f2a9af8
JJ
2970 HOST_WIDE_INT units
2971 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2972 op0 = adjust_address_nv (op0, mode1, units);
2973 bitpos += units * BITS_PER_UNIT;
2974 }
2975 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2976 op0 = adjust_address_nv (op0, mode, 0);
2977 else if (GET_MODE (op0) != mode1)
2978 op0 = adjust_address_nv (op0, mode1, 0);
2979 else
2980 op0 = copy_rtx (op0);
2981 if (op0 == orig_op0)
2982 op0 = shallow_copy_rtx (op0);
2983 set_mem_attributes (op0, exp, 0);
2984 }
2985
2986 if (bitpos == 0 && mode == GET_MODE (op0))
2987 return op0;
2988
2d3fc6aa
JJ
2989 if (bitpos < 0)
2990 return NULL;
2991
88c04a5d
JJ
2992 if (GET_MODE (op0) == BLKmode)
2993 return NULL;
2994
b5b8b0ac
AO
2995 if ((bitpos % BITS_PER_UNIT) == 0
2996 && bitsize == GET_MODE_BITSIZE (mode1))
2997 {
2998 enum machine_mode opmode = GET_MODE (op0);
2999
b5b8b0ac 3000 if (opmode == VOIDmode)
9712cba0 3001 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
3002
3003 /* This condition may hold if we're expanding the address
3004 right past the end of an array that turned out not to
3005 be addressable (i.e., the address was only computed in
3006 debug stmts). The gen_subreg below would rightfully
3007 crash, and the address doesn't really exist, so just
3008 drop it. */
3009 if (bitpos >= GET_MODE_BITSIZE (opmode))
3010 return NULL;
3011
7d5d39bb
JJ
3012 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3013 return simplify_gen_subreg (mode, op0, opmode,
3014 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
3015 }
3016
3017 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3018 && TYPE_UNSIGNED (TREE_TYPE (exp))
3019 ? SIGN_EXTRACT
3020 : ZERO_EXTRACT, mode,
3021 GET_MODE (op0) != VOIDmode
9712cba0
JJ
3022 ? GET_MODE (op0)
3023 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
3024 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3025 }
3026
b5b8b0ac 3027 case ABS_EXPR:
2ba172e0 3028 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
3029
3030 case NEGATE_EXPR:
2ba172e0 3031 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
3032
3033 case BIT_NOT_EXPR:
2ba172e0 3034 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
3035
3036 case FLOAT_EXPR:
2ba172e0
JJ
3037 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3038 0)))
3039 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3040 inner_mode);
b5b8b0ac
AO
3041
3042 case FIX_TRUNC_EXPR:
2ba172e0
JJ
3043 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3044 inner_mode);
b5b8b0ac
AO
3045
3046 case POINTER_PLUS_EXPR:
576319a7
DD
3047 /* For the rare target where pointers are not the same size as
3048 size_t, we need to check for mis-matched modes and correct
3049 the addend. */
3050 if (op0 && op1
3051 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3052 && GET_MODE (op0) != GET_MODE (op1))
3053 {
3054 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2ba172e0
JJ
3055 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3056 GET_MODE (op1));
576319a7
DD
3057 else
3058 /* We always sign-extend, regardless of the signedness of
3059 the operand, because the operand is always unsigned
3060 here even if the original C expression is signed. */
2ba172e0
JJ
3061 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3062 GET_MODE (op1));
576319a7
DD
3063 }
3064 /* Fall through. */
b5b8b0ac 3065 case PLUS_EXPR:
2ba172e0 3066 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
3067
3068 case MINUS_EXPR:
2ba172e0 3069 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
3070
3071 case MULT_EXPR:
2ba172e0 3072 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
3073
3074 case RDIV_EXPR:
3075 case TRUNC_DIV_EXPR:
3076 case EXACT_DIV_EXPR:
3077 if (unsignedp)
2ba172e0 3078 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 3079 else
2ba172e0 3080 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
3081
3082 case TRUNC_MOD_EXPR:
2ba172e0 3083 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
3084
3085 case FLOOR_DIV_EXPR:
3086 if (unsignedp)
2ba172e0 3087 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
3088 else
3089 {
2ba172e0
JJ
3090 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3091 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3092 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 3093 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3094 }
3095
3096 case FLOOR_MOD_EXPR:
3097 if (unsignedp)
2ba172e0 3098 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
3099 else
3100 {
2ba172e0 3101 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3102 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3103 adj = simplify_gen_unary (NEG, mode,
3104 simplify_gen_binary (MULT, mode, adj, op1),
3105 mode);
3106 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3107 }
3108
3109 case CEIL_DIV_EXPR:
3110 if (unsignedp)
3111 {
2ba172e0
JJ
3112 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3113 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3114 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 3115 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3116 }
3117 else
3118 {
2ba172e0
JJ
3119 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3120 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3121 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 3122 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3123 }
3124
3125 case CEIL_MOD_EXPR:
3126 if (unsignedp)
3127 {
2ba172e0 3128 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3129 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3130 adj = simplify_gen_unary (NEG, mode,
3131 simplify_gen_binary (MULT, mode, adj, op1),
3132 mode);
3133 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3134 }
3135 else
3136 {
2ba172e0 3137 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3138 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3139 adj = simplify_gen_unary (NEG, mode,
3140 simplify_gen_binary (MULT, mode, adj, op1),
3141 mode);
3142 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3143 }
3144
3145 case ROUND_DIV_EXPR:
3146 if (unsignedp)
3147 {
2ba172e0
JJ
3148 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3149 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3150 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 3151 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3152 }
3153 else
3154 {
2ba172e0
JJ
3155 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3156 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3157 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 3158 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3159 }
3160
3161 case ROUND_MOD_EXPR:
3162 if (unsignedp)
3163 {
2ba172e0 3164 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3165 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3166 adj = simplify_gen_unary (NEG, mode,
3167 simplify_gen_binary (MULT, mode, adj, op1),
3168 mode);
3169 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3170 }
3171 else
3172 {
2ba172e0 3173 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3174 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3175 adj = simplify_gen_unary (NEG, mode,
3176 simplify_gen_binary (MULT, mode, adj, op1),
3177 mode);
3178 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3179 }
3180
3181 case LSHIFT_EXPR:
2ba172e0 3182 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
3183
3184 case RSHIFT_EXPR:
3185 if (unsignedp)
2ba172e0 3186 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 3187 else
2ba172e0 3188 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
3189
3190 case LROTATE_EXPR:
2ba172e0 3191 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
3192
3193 case RROTATE_EXPR:
2ba172e0 3194 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
3195
3196 case MIN_EXPR:
2ba172e0 3197 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
3198
3199 case MAX_EXPR:
2ba172e0 3200 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
3201
3202 case BIT_AND_EXPR:
3203 case TRUTH_AND_EXPR:
2ba172e0 3204 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
3205
3206 case BIT_IOR_EXPR:
3207 case TRUTH_OR_EXPR:
2ba172e0 3208 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
3209
3210 case BIT_XOR_EXPR:
3211 case TRUTH_XOR_EXPR:
2ba172e0 3212 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
3213
3214 case TRUTH_ANDIF_EXPR:
3215 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3216
3217 case TRUTH_ORIF_EXPR:
3218 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3219
3220 case TRUTH_NOT_EXPR:
2ba172e0 3221 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
3222
3223 case LT_EXPR:
2ba172e0
JJ
3224 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3225 op0, op1);
b5b8b0ac
AO
3226
3227 case LE_EXPR:
2ba172e0
JJ
3228 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3229 op0, op1);
b5b8b0ac
AO
3230
3231 case GT_EXPR:
2ba172e0
JJ
3232 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3233 op0, op1);
b5b8b0ac
AO
3234
3235 case GE_EXPR:
2ba172e0
JJ
3236 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3237 op0, op1);
b5b8b0ac
AO
3238
3239 case EQ_EXPR:
2ba172e0 3240 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3241
3242 case NE_EXPR:
2ba172e0 3243 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3244
3245 case UNORDERED_EXPR:
2ba172e0 3246 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3247
3248 case ORDERED_EXPR:
2ba172e0 3249 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3250
3251 case UNLT_EXPR:
2ba172e0 3252 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3253
3254 case UNLE_EXPR:
2ba172e0 3255 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3256
3257 case UNGT_EXPR:
2ba172e0 3258 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3259
3260 case UNGE_EXPR:
2ba172e0 3261 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3262
3263 case UNEQ_EXPR:
2ba172e0 3264 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3265
3266 case LTGT_EXPR:
2ba172e0 3267 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3268
3269 case COND_EXPR:
3270 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3271
3272 case COMPLEX_EXPR:
3273 gcc_assert (COMPLEX_MODE_P (mode));
3274 if (GET_MODE (op0) == VOIDmode)
3275 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3276 if (GET_MODE (op1) == VOIDmode)
3277 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3278 return gen_rtx_CONCAT (mode, op0, op1);
3279
d02a5a4b
JJ
3280 case CONJ_EXPR:
3281 if (GET_CODE (op0) == CONCAT)
3282 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
3283 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3284 XEXP (op0, 1),
3285 GET_MODE_INNER (mode)));
d02a5a4b
JJ
3286 else
3287 {
3288 enum machine_mode imode = GET_MODE_INNER (mode);
3289 rtx re, im;
3290
3291 if (MEM_P (op0))
3292 {
3293 re = adjust_address_nv (op0, imode, 0);
3294 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3295 }
3296 else
3297 {
3298 enum machine_mode ifmode = int_mode_for_mode (mode);
3299 enum machine_mode ihmode = int_mode_for_mode (imode);
3300 rtx halfsize;
3301 if (ifmode == BLKmode || ihmode == BLKmode)
3302 return NULL;
3303 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3304 re = op0;
3305 if (mode != ifmode)
3306 re = gen_rtx_SUBREG (ifmode, re, 0);
3307 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3308 if (imode != ihmode)
3309 re = gen_rtx_SUBREG (imode, re, 0);
3310 im = copy_rtx (op0);
3311 if (mode != ifmode)
3312 im = gen_rtx_SUBREG (ifmode, im, 0);
3313 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3314 if (imode != ihmode)
3315 im = gen_rtx_SUBREG (imode, im, 0);
3316 }
3317 im = gen_rtx_NEG (imode, im);
3318 return gen_rtx_CONCAT (mode, re, im);
3319 }
3320
b5b8b0ac
AO
3321 case ADDR_EXPR:
3322 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3323 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3324 {
3325 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3326 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3327 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
3328 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3329 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
3330 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3331
3332 if (handled_component_p (TREE_OPERAND (exp, 0)))
3333 {
3334 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3335 tree decl
3336 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3337 &bitoffset, &bitsize, &maxsize);
3338 if ((TREE_CODE (decl) == VAR_DECL
3339 || TREE_CODE (decl) == PARM_DECL
3340 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
3341 && (!TREE_ADDRESSABLE (decl)
3342 || target_for_debug_bind (decl))
c8a27c40
JJ
3343 && (bitoffset % BITS_PER_UNIT) == 0
3344 && bitsize > 0
3345 && bitsize == maxsize)
3346 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3347 bitoffset / BITS_PER_UNIT);
3348 }
3349
3350 return NULL;
3351 }
b5b8b0ac 3352
f61c6f34
JJ
3353 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3354 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
3355
3356 return op0;
b5b8b0ac
AO
3357
3358 case VECTOR_CST:
d2a12ae7
RG
3359 {
3360 unsigned i;
3361
3362 op0 = gen_rtx_CONCATN
3363 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3364
3365 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3366 {
3367 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3368 if (!op1)
3369 return NULL;
3370 XVECEXP (op0, 0, i) = op1;
3371 }
3372
3373 return op0;
3374 }
b5b8b0ac
AO
3375
3376 case CONSTRUCTOR:
47598145
MM
3377 if (TREE_CLOBBER_P (exp))
3378 return NULL;
3379 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
3380 {
3381 unsigned i;
3382 tree val;
3383
3384 op0 = gen_rtx_CONCATN
3385 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3386
3387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3388 {
3389 op1 = expand_debug_expr (val);
3390 if (!op1)
3391 return NULL;
3392 XVECEXP (op0, 0, i) = op1;
3393 }
3394
3395 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3396 {
3397 op1 = expand_debug_expr
e8160c9a 3398 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3399
3400 if (!op1)
3401 return NULL;
3402
3403 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3404 XVECEXP (op0, 0, i) = op1;
3405 }
3406
3407 return op0;
3408 }
3409 else
3410 goto flag_unsupported;
3411
3412 case CALL_EXPR:
3413 /* ??? Maybe handle some builtins? */
3414 return NULL;
3415
3416 case SSA_NAME:
3417 {
2a8e30fb
MM
3418 gimple g = get_gimple_for_ssa_name (exp);
3419 if (g)
3420 {
3421 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3422 if (!op0)
3423 return NULL;
3424 }
3425 else
3426 {
3427 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3428
2a8e30fb 3429 if (part == NO_PARTITION)
a58a8e4b
JJ
3430 {
3431 /* If this is a reference to an incoming value of parameter
3432 that is never used in the code or where the incoming
3433 value is never used in the code, use PARM_DECL's
3434 DECL_RTL if set. */
3435 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3436 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3437 {
12c5ffe5
EB
3438 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3439 if (op0)
3440 goto adjust_mode;
a58a8e4b 3441 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
3442 if (op0)
3443 goto adjust_mode;
a58a8e4b
JJ
3444 }
3445 return NULL;
3446 }
b5b8b0ac 3447
2a8e30fb 3448 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3449
abfea58d 3450 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 3451 }
b5b8b0ac
AO
3452 goto adjust_mode;
3453 }
3454
3455 case ERROR_MARK:
3456 return NULL;
3457
7ece48b1
JJ
3458 /* Vector stuff. For most of the codes we don't have rtl codes. */
3459 case REALIGN_LOAD_EXPR:
3460 case REDUC_MAX_EXPR:
3461 case REDUC_MIN_EXPR:
3462 case REDUC_PLUS_EXPR:
3463 case VEC_COND_EXPR:
7ece48b1
JJ
3464 case VEC_LSHIFT_EXPR:
3465 case VEC_PACK_FIX_TRUNC_EXPR:
3466 case VEC_PACK_SAT_EXPR:
3467 case VEC_PACK_TRUNC_EXPR:
3468 case VEC_RSHIFT_EXPR:
3469 case VEC_UNPACK_FLOAT_HI_EXPR:
3470 case VEC_UNPACK_FLOAT_LO_EXPR:
3471 case VEC_UNPACK_HI_EXPR:
3472 case VEC_UNPACK_LO_EXPR:
3473 case VEC_WIDEN_MULT_HI_EXPR:
3474 case VEC_WIDEN_MULT_LO_EXPR:
36ba4aae
IR
3475 case VEC_WIDEN_LSHIFT_HI_EXPR:
3476 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 3477 case VEC_PERM_EXPR:
7ece48b1
JJ
3478 return NULL;
3479
3480 /* Misc codes. */
3481 case ADDR_SPACE_CONVERT_EXPR:
3482 case FIXED_CONVERT_EXPR:
3483 case OBJ_TYPE_REF:
3484 case WITH_SIZE_EXPR:
3485 return NULL;
3486
3487 case DOT_PROD_EXPR:
3488 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3489 && SCALAR_INT_MODE_P (mode))
3490 {
2ba172e0
JJ
3491 op0
3492 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3493 0)))
3494 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3495 inner_mode);
3496 op1
3497 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3498 1)))
3499 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3500 inner_mode);
3501 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3502 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
3503 }
3504 return NULL;
3505
3506 case WIDEN_MULT_EXPR:
0354c0c7
BS
3507 case WIDEN_MULT_PLUS_EXPR:
3508 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3509 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3510 && SCALAR_INT_MODE_P (mode))
3511 {
2ba172e0 3512 inner_mode = GET_MODE (op0);
7ece48b1 3513 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3514 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3515 else
5b58b39b 3516 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3517 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3518 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3519 else
5b58b39b 3520 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 3521 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
3522 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3523 return op0;
3524 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 3525 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 3526 else
2ba172e0 3527 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
3528 }
3529 return NULL;
3530
3531 case WIDEN_SUM_EXPR:
3f3af9df 3532 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
3533 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3534 && SCALAR_INT_MODE_P (mode))
3535 {
2ba172e0
JJ
3536 op0
3537 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3538 0)))
3539 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3540 inner_mode);
3f3af9df
JJ
3541 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3542 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
3543 }
3544 return NULL;
3545
0f59b812 3546 case FMA_EXPR:
2ba172e0 3547 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 3548
b5b8b0ac
AO
3549 default:
3550 flag_unsupported:
3551#ifdef ENABLE_CHECKING
3552 debug_tree (exp);
3553 gcc_unreachable ();
3554#else
3555 return NULL;
3556#endif
3557 }
3558}
3559
ddb555ed
JJ
3560/* Return an RTX equivalent to the source bind value of the tree expression
3561 EXP. */
3562
3563static rtx
3564expand_debug_source_expr (tree exp)
3565{
3566 rtx op0 = NULL_RTX;
3567 enum machine_mode mode = VOIDmode, inner_mode;
3568
3569 switch (TREE_CODE (exp))
3570 {
3571 case PARM_DECL:
3572 {
ddb555ed 3573 mode = DECL_MODE (exp);
12c5ffe5
EB
3574 op0 = expand_debug_parm_decl (exp);
3575 if (op0)
3576 break;
ddb555ed
JJ
3577 /* See if this isn't an argument that has been completely
3578 optimized out. */
3579 if (!DECL_RTL_SET_P (exp)
12c5ffe5 3580 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
3581 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3582 {
3583 tree aexp = exp;
3584 if (DECL_ABSTRACT_ORIGIN (exp))
3585 aexp = DECL_ABSTRACT_ORIGIN (exp);
3586 if (DECL_CONTEXT (aexp)
3587 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3588 {
3589 VEC(tree, gc) **debug_args;
3590 unsigned int ix;
3591 tree ddecl;
3592#ifdef ENABLE_CHECKING
3593 tree parm;
3594 for (parm = DECL_ARGUMENTS (current_function_decl);
3595 parm; parm = DECL_CHAIN (parm))
3596 gcc_assert (parm != exp
3597 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3598#endif
3599 debug_args = decl_debug_args_lookup (current_function_decl);
3600 if (debug_args != NULL)
3601 {
3602 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3603 ix += 2)
3604 if (ddecl == aexp)
3605 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3606 }
3607 }
3608 }
3609 break;
3610 }
3611 default:
3612 break;
3613 }
3614
3615 if (op0 == NULL_RTX)
3616 return NULL_RTX;
3617
3618 inner_mode = GET_MODE (op0);
3619 if (mode == inner_mode)
3620 return op0;
3621
3622 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3623 {
3624 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3625 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3626 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3627 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3628 else
3629 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3630 }
3631 else if (FLOAT_MODE_P (mode))
3632 gcc_unreachable ();
3633 else if (FLOAT_MODE_P (inner_mode))
3634 {
3635 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3636 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3637 else
3638 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3639 }
3640 else if (CONSTANT_P (op0)
3641 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3642 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3643 subreg_lowpart_offset (mode, inner_mode));
3644 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3645 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3646 else
3647 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3648
3649 return op0;
3650}
3651
b5b8b0ac
AO
3652/* Expand the _LOCs in debug insns. We run this after expanding all
3653 regular insns, so that any variables referenced in the function
3654 will have their DECL_RTLs set. */
3655
3656static void
3657expand_debug_locations (void)
3658{
3659 rtx insn;
3660 rtx last = get_last_insn ();
3661 int save_strict_alias = flag_strict_aliasing;
3662
3663 /* New alias sets while setting up memory attributes cause
3664 -fcompare-debug failures, even though it doesn't bring about any
3665 codegen changes. */
3666 flag_strict_aliasing = 0;
3667
3668 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3669 if (DEBUG_INSN_P (insn))
3670 {
3671 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3672 rtx val;
3673 enum machine_mode mode;
3674
3675 if (value == NULL_TREE)
3676 val = NULL_RTX;
3677 else
3678 {
ddb555ed
JJ
3679 if (INSN_VAR_LOCATION_STATUS (insn)
3680 == VAR_INIT_STATUS_UNINITIALIZED)
3681 val = expand_debug_source_expr (value);
3682 else
3683 val = expand_debug_expr (value);
b5b8b0ac
AO
3684 gcc_assert (last == get_last_insn ());
3685 }
3686
3687 if (!val)
3688 val = gen_rtx_UNKNOWN_VAR_LOC ();
3689 else
3690 {
3691 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3692
3693 gcc_assert (mode == GET_MODE (val)
3694 || (GET_MODE (val) == VOIDmode
3695 && (CONST_INT_P (val)
3696 || GET_CODE (val) == CONST_FIXED
3697 || GET_CODE (val) == CONST_DOUBLE
3698 || GET_CODE (val) == LABEL_REF)));
3699 }
3700
3701 INSN_VAR_LOCATION_LOC (insn) = val;
3702 }
3703
3704 flag_strict_aliasing = save_strict_alias;
3705}
3706
242229bb
JH
3707/* Expand basic block BB from GIMPLE trees to RTL. */
3708
3709static basic_block
10d22567 3710expand_gimple_basic_block (basic_block bb)
242229bb 3711{
726a989a
RB
3712 gimple_stmt_iterator gsi;
3713 gimple_seq stmts;
3714 gimple stmt = NULL;
242229bb
JH
3715 rtx note, last;
3716 edge e;
628f6a4e 3717 edge_iterator ei;
8b11009b 3718 void **elt;
242229bb
JH
3719
3720 if (dump_file)
726a989a
RB
3721 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3722 bb->index);
3723
3724 /* Note that since we are now transitioning from GIMPLE to RTL, we
3725 cannot use the gsi_*_bb() routines because they expect the basic
3726 block to be in GIMPLE, instead of RTL. Therefore, we need to
3727 access the BB sequence directly. */
3728 stmts = bb_seq (bb);
3729 bb->il.gimple = NULL;
bf08ebeb 3730 rtl_profile_for_bb (bb);
5e2d947c
JH
3731 init_rtl_bb_info (bb);
3732 bb->flags |= BB_RTL;
3733
a9b77cd1
ZD
3734 /* Remove the RETURN_EXPR if we may fall though to the exit
3735 instead. */
726a989a
RB
3736 gsi = gsi_last (stmts);
3737 if (!gsi_end_p (gsi)
3738 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3739 {
726a989a 3740 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3741
3742 gcc_assert (single_succ_p (bb));
3743 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3744
3745 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3746 && !gimple_return_retval (ret_stmt))
a9b77cd1 3747 {
726a989a 3748 gsi_remove (&gsi, false);
a9b77cd1
ZD
3749 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3750 }
3751 }
3752
726a989a
RB
3753 gsi = gsi_start (stmts);
3754 if (!gsi_end_p (gsi))
8b11009b 3755 {
726a989a
RB
3756 stmt = gsi_stmt (gsi);
3757 if (gimple_code (stmt) != GIMPLE_LABEL)
3758 stmt = NULL;
8b11009b 3759 }
242229bb 3760
8b11009b
ZD
3761 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3762
3763 if (stmt || elt)
242229bb
JH
3764 {
3765 last = get_last_insn ();
3766
8b11009b
ZD
3767 if (stmt)
3768 {
28ed065e 3769 expand_gimple_stmt (stmt);
726a989a 3770 gsi_next (&gsi);
8b11009b
ZD
3771 }
3772
3773 if (elt)
ae50c0cb 3774 emit_label ((rtx) *elt);
242229bb 3775
caf93cb0 3776 /* Java emits line number notes in the top of labels.
c22cacf3 3777 ??? Make this go away once line number notes are obsoleted. */
242229bb 3778 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3779 if (NOTE_P (BB_HEAD (bb)))
242229bb 3780 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3781 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3782
726a989a 3783 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3784 }
3785 else
3786 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3787
3788 NOTE_BASIC_BLOCK (note) = bb;
3789
726a989a 3790 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3791 {
cea49550 3792 basic_block new_bb;
242229bb 3793
b5b8b0ac 3794 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3795
3796 /* If this statement is a non-debug one, and we generate debug
3797 insns, then this one might be the last real use of a TERed
3798 SSA_NAME, but where there are still some debug uses further
3799 down. Expanding the current SSA name in such further debug
3800 uses by their RHS might lead to wrong debug info, as coalescing
3801 might make the operands of such RHS be placed into the same
3802 pseudo as something else. Like so:
3803 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3804 use(a_1);
3805 a_2 = ...
3806 #DEBUG ... => a_1
3807 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3808 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3809 the write to a_2 would actually have clobbered the place which
3810 formerly held a_0.
3811
3812 So, instead of that, we recognize the situation, and generate
3813 debug temporaries at the last real use of TERed SSA names:
3814 a_1 = a_0 + 1;
3815 #DEBUG #D1 => a_1
3816 use(a_1);
3817 a_2 = ...
3818 #DEBUG ... => #D1
3819 */
3820 if (MAY_HAVE_DEBUG_INSNS
3821 && SA.values
3822 && !is_gimple_debug (stmt))
3823 {
3824 ssa_op_iter iter;
3825 tree op;
3826 gimple def;
3827
3828 location_t sloc = get_curr_insn_source_location ();
3829 tree sblock = get_curr_insn_block ();
3830
3831 /* Look for SSA names that have their last use here (TERed
3832 names always have only one real use). */
3833 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3834 if ((def = get_gimple_for_ssa_name (op)))
3835 {
3836 imm_use_iterator imm_iter;
3837 use_operand_p use_p;
3838 bool have_debug_uses = false;
3839
3840 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3841 {
3842 if (gimple_debug_bind_p (USE_STMT (use_p)))
3843 {
3844 have_debug_uses = true;
3845 break;
3846 }
3847 }
3848
3849 if (have_debug_uses)
3850 {
3851 /* OP is a TERed SSA name, with DEF it's defining
3852 statement, and where OP is used in further debug
3853 instructions. Generate a debug temporary, and
3854 replace all uses of OP in debug insns with that
3855 temporary. */
3856 gimple debugstmt;
3857 tree value = gimple_assign_rhs_to_tree (def);
3858 tree vexpr = make_node (DEBUG_EXPR_DECL);
3859 rtx val;
3860 enum machine_mode mode;
3861
3862 set_curr_insn_source_location (gimple_location (def));
3863 set_curr_insn_block (gimple_block (def));
3864
3865 DECL_ARTIFICIAL (vexpr) = 1;
3866 TREE_TYPE (vexpr) = TREE_TYPE (value);
3867 if (DECL_P (value))
3868 mode = DECL_MODE (value);
3869 else
3870 mode = TYPE_MODE (TREE_TYPE (value));
3871 DECL_MODE (vexpr) = mode;
3872
3873 val = gen_rtx_VAR_LOCATION
3874 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3875
e8c6bb74 3876 emit_debug_insn (val);
2a8e30fb
MM
3877
3878 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3879 {
3880 if (!gimple_debug_bind_p (debugstmt))
3881 continue;
3882
3883 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3884 SET_USE (use_p, vexpr);
3885
3886 update_stmt (debugstmt);
3887 }
3888 }
3889 }
3890 set_curr_insn_source_location (sloc);
3891 set_curr_insn_block (sblock);
3892 }
3893
a5883ba0 3894 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3895
242229bb
JH
3896 /* Expand this statement, then evaluate the resulting RTL and
3897 fixup the CFG accordingly. */
726a989a 3898 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3899 {
726a989a 3900 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3901 if (new_bb)
3902 return new_bb;
3903 }
b5b8b0ac
AO
3904 else if (gimple_debug_bind_p (stmt))
3905 {
3906 location_t sloc = get_curr_insn_source_location ();
3907 tree sblock = get_curr_insn_block ();
3908 gimple_stmt_iterator nsi = gsi;
3909
3910 for (;;)
3911 {
3912 tree var = gimple_debug_bind_get_var (stmt);
3913 tree value;
3914 rtx val;
3915 enum machine_mode mode;
3916
ec8c1492
JJ
3917 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3918 && TREE_CODE (var) != LABEL_DECL
3919 && !target_for_debug_bind (var))
3920 goto delink_debug_stmt;
3921
b5b8b0ac
AO
3922 if (gimple_debug_bind_has_value_p (stmt))
3923 value = gimple_debug_bind_get_value (stmt);
3924 else
3925 value = NULL_TREE;
3926
3927 last = get_last_insn ();
3928
3929 set_curr_insn_source_location (gimple_location (stmt));
3930 set_curr_insn_block (gimple_block (stmt));
3931
3932 if (DECL_P (var))
3933 mode = DECL_MODE (var);
3934 else
3935 mode = TYPE_MODE (TREE_TYPE (var));
3936
3937 val = gen_rtx_VAR_LOCATION
3938 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3939
e16b6fd0 3940 emit_debug_insn (val);
b5b8b0ac
AO
3941
3942 if (dump_file && (dump_flags & TDF_DETAILS))
3943 {
3944 /* We can't dump the insn with a TREE where an RTX
3945 is expected. */
e8c6bb74 3946 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 3947 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 3948 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
3949 }
3950
ec8c1492 3951 delink_debug_stmt:
2a8e30fb
MM
3952 /* In order not to generate too many debug temporaries,
3953 we delink all uses of debug statements we already expanded.
3954 Therefore debug statements between definition and real
3955 use of TERed SSA names will continue to use the SSA name,
3956 and not be replaced with debug temps. */
3957 delink_stmt_imm_use (stmt);
3958
b5b8b0ac
AO
3959 gsi = nsi;
3960 gsi_next (&nsi);
3961 if (gsi_end_p (nsi))
3962 break;
3963 stmt = gsi_stmt (nsi);
3964 if (!gimple_debug_bind_p (stmt))
3965 break;
3966 }
3967
ddb555ed
JJ
3968 set_curr_insn_source_location (sloc);
3969 set_curr_insn_block (sblock);
3970 }
3971 else if (gimple_debug_source_bind_p (stmt))
3972 {
3973 location_t sloc = get_curr_insn_source_location ();
3974 tree sblock = get_curr_insn_block ();
3975 tree var = gimple_debug_source_bind_get_var (stmt);
3976 tree value = gimple_debug_source_bind_get_value (stmt);
3977 rtx val;
3978 enum machine_mode mode;
3979
3980 last = get_last_insn ();
3981
3982 set_curr_insn_source_location (gimple_location (stmt));
3983 set_curr_insn_block (gimple_block (stmt));
3984
3985 mode = DECL_MODE (var);
3986
3987 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3988 VAR_INIT_STATUS_UNINITIALIZED);
3989
3990 emit_debug_insn (val);
3991
3992 if (dump_file && (dump_flags & TDF_DETAILS))
3993 {
3994 /* We can't dump the insn with a TREE where an RTX
3995 is expected. */
3996 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3997 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3998 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3999 }
4000
b5b8b0ac
AO
4001 set_curr_insn_source_location (sloc);
4002 set_curr_insn_block (sblock);
4003 }
80c7a9eb 4004 else
242229bb 4005 {
726a989a 4006 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
4007 {
4008 bool can_fallthru;
4009 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4010 if (new_bb)
4011 {
4012 if (can_fallthru)
4013 bb = new_bb;
4014 else
4015 return new_bb;
4016 }
4017 }
4d7a65ea 4018 else
b7211528 4019 {
4e3825db 4020 def_operand_p def_p;
4e3825db
MM
4021 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4022
4023 if (def_p != NULL)
4024 {
4025 /* Ignore this stmt if it is in the list of
4026 replaceable expressions. */
4027 if (SA.values
b8698a0f 4028 && bitmap_bit_p (SA.values,
e97809c6 4029 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
4030 continue;
4031 }
28ed065e 4032 last = expand_gimple_stmt (stmt);
726a989a 4033 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 4034 }
242229bb
JH
4035 }
4036 }
4037
a5883ba0
MM
4038 currently_expanding_gimple_stmt = NULL;
4039
7241571e 4040 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
4041 FOR_EACH_EDGE (e, ei, bb->succs)
4042 {
7241571e
JJ
4043 if (e->goto_locus && e->goto_block)
4044 {
4045 set_curr_insn_source_location (e->goto_locus);
4046 set_curr_insn_block (e->goto_block);
4047 e->goto_locus = curr_insn_locator ();
4048 }
4049 e->goto_block = NULL;
4050 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4051 {
4052 emit_jump (label_rtx_for_bb (e->dest));
4053 e->flags &= ~EDGE_FALLTHRU;
4054 }
a9b77cd1
ZD
4055 }
4056
ae761c45
AH
4057 /* Expanded RTL can create a jump in the last instruction of block.
4058 This later might be assumed to be a jump to successor and break edge insertion.
4059 We need to insert dummy move to prevent this. PR41440. */
4060 if (single_succ_p (bb)
4061 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4062 && (last = get_last_insn ())
4063 && JUMP_P (last))
4064 {
4065 rtx dummy = gen_reg_rtx (SImode);
4066 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4067 }
4068
242229bb
JH
4069 do_pending_stack_adjust ();
4070
3f117656 4071 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
4072 before a barrier and/or table jump insn. */
4073 last = get_last_insn ();
4b4bf941 4074 if (BARRIER_P (last))
242229bb
JH
4075 last = PREV_INSN (last);
4076 if (JUMP_TABLE_DATA_P (last))
4077 last = PREV_INSN (PREV_INSN (last));
4078 BB_END (bb) = last;
caf93cb0 4079
242229bb 4080 update_bb_for_insn (bb);
80c7a9eb 4081
242229bb
JH
4082 return bb;
4083}
4084
4085
4086/* Create a basic block for initialization code. */
4087
4088static basic_block
4089construct_init_block (void)
4090{
4091 basic_block init_block, first_block;
fd44f634
JH
4092 edge e = NULL;
4093 int flags;
275a4187 4094
fd44f634
JH
4095 /* Multiple entry points not supported yet. */
4096 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
4097 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4098 init_rtl_bb_info (EXIT_BLOCK_PTR);
4099 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4100 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 4101
fd44f634 4102 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 4103
fd44f634
JH
4104 /* When entry edge points to first basic block, we don't need jump,
4105 otherwise we have to jump into proper target. */
4106 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4107 {
726a989a 4108 tree label = gimple_block_label (e->dest);
fd44f634
JH
4109
4110 emit_jump (label_rtx (label));
4111 flags = 0;
275a4187 4112 }
fd44f634
JH
4113 else
4114 flags = EDGE_FALLTHRU;
242229bb
JH
4115
4116 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4117 get_last_insn (),
4118 ENTRY_BLOCK_PTR);
4119 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4120 init_block->count = ENTRY_BLOCK_PTR->count;
4121 if (e)
4122 {
4123 first_block = e->dest;
4124 redirect_edge_succ (e, init_block);
fd44f634 4125 e = make_edge (init_block, first_block, flags);
242229bb
JH
4126 }
4127 else
4128 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4129 e->probability = REG_BR_PROB_BASE;
4130 e->count = ENTRY_BLOCK_PTR->count;
4131
4132 update_bb_for_insn (init_block);
4133 return init_block;
4134}
4135
55e092c4
JH
4136/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4137 found in the block tree. */
4138
4139static void
4140set_block_levels (tree block, int level)
4141{
4142 while (block)
4143 {
4144 BLOCK_NUMBER (block) = level;
4145 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4146 block = BLOCK_CHAIN (block);
4147 }
4148}
242229bb
JH
4149
4150/* Create a block containing landing pads and similar stuff. */
4151
4152static void
4153construct_exit_block (void)
4154{
4155 rtx head = get_last_insn ();
4156 rtx end;
4157 basic_block exit_block;
628f6a4e
BE
4158 edge e, e2;
4159 unsigned ix;
4160 edge_iterator ei;
071a42f9 4161 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 4162
bf08ebeb
JH
4163 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4164
caf93cb0 4165 /* Make sure the locus is set to the end of the function, so that
242229bb 4166 epilogue line numbers and warnings are set properly. */
6773e15f 4167 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
4168 input_location = cfun->function_end_locus;
4169
4170 /* The following insns belong to the top scope. */
55e092c4 4171 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 4172
242229bb
JH
4173 /* Generate rtl for function exit. */
4174 expand_function_end ();
4175
4176 end = get_last_insn ();
4177 if (head == end)
4178 return;
071a42f9
JH
4179 /* While emitting the function end we could move end of the last basic block.
4180 */
4181 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 4182 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 4183 head = NEXT_INSN (head);
80c7a9eb
RH
4184 exit_block = create_basic_block (NEXT_INSN (head), end,
4185 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
4186 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4187 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
4188
4189 ix = 0;
4190 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 4191 {
8fb790fd 4192 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 4193 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
4194 redirect_edge_succ (e, exit_block);
4195 else
4196 ix++;
242229bb 4197 }
628f6a4e 4198
242229bb
JH
4199 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4200 e->probability = REG_BR_PROB_BASE;
4201 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 4202 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
4203 if (e2 != e)
4204 {
c22cacf3 4205 e->count -= e2->count;
242229bb
JH
4206 exit_block->count -= e2->count;
4207 exit_block->frequency -= EDGE_FREQUENCY (e2);
4208 }
4209 if (e->count < 0)
4210 e->count = 0;
4211 if (exit_block->count < 0)
4212 exit_block->count = 0;
4213 if (exit_block->frequency < 0)
4214 exit_block->frequency = 0;
4215 update_bb_for_insn (exit_block);
4216}
4217
c22cacf3 4218/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
4219 Look for ARRAY_REF nodes with non-constant indexes and mark them
4220 addressable. */
4221
4222static tree
4223discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4224 void *data ATTRIBUTE_UNUSED)
4225{
4226 tree t = *tp;
4227
4228 if (IS_TYPE_OR_DECL_P (t))
4229 *walk_subtrees = 0;
4230 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4231 {
4232 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4233 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4234 && (!TREE_OPERAND (t, 2)
4235 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4236 || (TREE_CODE (t) == COMPONENT_REF
4237 && (!TREE_OPERAND (t,2)
4238 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4239 || TREE_CODE (t) == BIT_FIELD_REF
4240 || TREE_CODE (t) == REALPART_EXPR
4241 || TREE_CODE (t) == IMAGPART_EXPR
4242 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 4243 || CONVERT_EXPR_P (t))
a1b23b2f
UW
4244 t = TREE_OPERAND (t, 0);
4245
4246 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4247 {
4248 t = get_base_address (t);
6f11d690
RG
4249 if (t && DECL_P (t)
4250 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
4251 TREE_ADDRESSABLE (t) = 1;
4252 }
4253
4254 *walk_subtrees = 0;
4255 }
4256
4257 return NULL_TREE;
4258}
4259
4260/* RTL expansion is not able to compile array references with variable
4261 offsets for arrays stored in single register. Discover such
4262 expressions and mark variables as addressable to avoid this
4263 scenario. */
4264
4265static void
4266discover_nonconstant_array_refs (void)
4267{
4268 basic_block bb;
726a989a 4269 gimple_stmt_iterator gsi;
a1b23b2f
UW
4270
4271 FOR_EACH_BB (bb)
726a989a
RB
4272 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4273 {
4274 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
4275 if (!is_gimple_debug (stmt))
4276 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 4277 }
a1b23b2f
UW
4278}
4279
2e3f842f
L
4280/* This function sets crtl->args.internal_arg_pointer to a virtual
4281 register if DRAP is needed. Local register allocator will replace
4282 virtual_incoming_args_rtx with the virtual register. */
4283
4284static void
4285expand_stack_alignment (void)
4286{
4287 rtx drap_rtx;
e939805b 4288 unsigned int preferred_stack_boundary;
2e3f842f
L
4289
4290 if (! SUPPORTS_STACK_ALIGNMENT)
4291 return;
b8698a0f 4292
2e3f842f
L
4293 if (cfun->calls_alloca
4294 || cfun->has_nonlocal_label
4295 || crtl->has_nonlocal_goto)
4296 crtl->need_drap = true;
4297
890b9b96
L
4298 /* Call update_stack_boundary here again to update incoming stack
4299 boundary. It may set incoming stack alignment to a different
4300 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4301 use the minimum incoming stack alignment to check if it is OK
4302 to perform sibcall optimization since sibcall optimization will
4303 only align the outgoing stack to incoming stack boundary. */
4304 if (targetm.calls.update_stack_boundary)
4305 targetm.calls.update_stack_boundary ();
4306
4307 /* The incoming stack frame has to be aligned at least at
4308 parm_stack_boundary. */
4309 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 4310
2e3f842f
L
4311 /* Update crtl->stack_alignment_estimated and use it later to align
4312 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4313 exceptions since callgraph doesn't collect incoming stack alignment
4314 in this case. */
8f4f502f 4315 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
4316 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4317 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4318 else
4319 preferred_stack_boundary = crtl->preferred_stack_boundary;
4320 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4321 crtl->stack_alignment_estimated = preferred_stack_boundary;
4322 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4323 crtl->stack_alignment_needed = preferred_stack_boundary;
4324
890b9b96
L
4325 gcc_assert (crtl->stack_alignment_needed
4326 <= crtl->stack_alignment_estimated);
4327
2e3f842f 4328 crtl->stack_realign_needed
e939805b 4329 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 4330 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
4331
4332 crtl->stack_realign_processed = true;
4333
4334 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4335 alignment. */
4336 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 4337 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 4338
d015f7cc
L
4339 /* stack_realign_drap and drap_rtx must match. */
4340 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4341
2e3f842f
L
4342 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4343 if (NULL != drap_rtx)
4344 {
4345 crtl->args.internal_arg_pointer = drap_rtx;
4346
4347 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4348 needed. */
4349 fixup_tail_calls ();
4350 }
4351}
4352
242229bb
JH
4353/* Translate the intermediate representation contained in the CFG
4354 from GIMPLE trees to RTL.
4355
4356 We do conversion per basic block and preserve/update the tree CFG.
4357 This implies we have to do some magic as the CFG can simultaneously
4358 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 4359 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
4360 the expansion. */
4361
c2924966 4362static unsigned int
726a989a 4363gimple_expand_cfg (void)
242229bb
JH
4364{
4365 basic_block bb, init_block;
4366 sbitmap blocks;
0ef90296
ZD
4367 edge_iterator ei;
4368 edge e;
3a42502d 4369 rtx var_seq;
4e3825db
MM
4370 unsigned i;
4371
f029db69 4372 timevar_push (TV_OUT_OF_SSA);
4e3825db 4373 rewrite_out_of_ssa (&SA);
f029db69 4374 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
4375 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4376 sizeof (rtx));
242229bb 4377
be147e84
RG
4378 /* Make sure all values used by the optimization passes have sane
4379 defaults. */
4380 reg_renumber = 0;
4381
4586b4ca
SB
4382 /* Some backends want to know that we are expanding to RTL. */
4383 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
4384 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4385 free_dominance_info (CDI_DOMINATORS);
4586b4ca 4386
bf08ebeb
JH
4387 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4388
55e092c4 4389 insn_locators_alloc ();
fe8a7779 4390 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
4391 {
4392 /* Eventually, all FEs should explicitly set function_start_locus. */
4393 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4394 set_curr_insn_source_location
4395 (DECL_SOURCE_LOCATION (current_function_decl));
4396 else
4397 set_curr_insn_source_location (cfun->function_start_locus);
4398 }
9ff70652
JJ
4399 else
4400 set_curr_insn_source_location (UNKNOWN_LOCATION);
55e092c4
JH
4401 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4402 prologue_locator = curr_insn_locator ();
4403
2b21299c
JJ
4404#ifdef INSN_SCHEDULING
4405 init_sched_attrs ();
4406#endif
4407
55e092c4
JH
4408 /* Make sure first insn is a note even if we don't want linenums.
4409 This makes sure the first insn will never be deleted.
4410 Also, final expects a note to appear there. */
4411 emit_note (NOTE_INSN_DELETED);
6429e3be 4412
a1b23b2f
UW
4413 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4414 discover_nonconstant_array_refs ();
4415
e41b2a33 4416 targetm.expand_to_rtl_hook ();
cb91fab0 4417 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 4418 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 4419 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
4420 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4421 cfun->cfg->max_jumptable_ents = 0;
4422
ae9fd6b7
JH
4423 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4424 of the function section at exapnsion time to predict distance of calls. */
4425 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4426
727a31fa 4427 /* Expand the variables recorded during gimple lowering. */
f029db69 4428 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
4429 start_sequence ();
4430
242229bb 4431 expand_used_vars ();
3a42502d
RH
4432
4433 var_seq = get_insns ();
4434 end_sequence ();
f029db69 4435 timevar_pop (TV_VAR_EXPAND);
242229bb 4436
7d69de61
RH
4437 /* Honor stack protection warnings. */
4438 if (warn_stack_protect)
4439 {
e3b5732b 4440 if (cfun->calls_alloca)
b8698a0f 4441 warning (OPT_Wstack_protector,
3b123595
SB
4442 "stack protector not protecting local variables: "
4443 "variable length buffer");
cb91fab0 4444 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 4445 warning (OPT_Wstack_protector,
3b123595
SB
4446 "stack protector not protecting function: "
4447 "all local arrays are less than %d bytes long",
7d69de61
RH
4448 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4449 }
4450
242229bb 4451 /* Set up parameters and prepare for return, for the function. */
b79c5284 4452 expand_function_start (current_function_decl);
242229bb 4453
3a42502d
RH
4454 /* If we emitted any instructions for setting up the variables,
4455 emit them before the FUNCTION_START note. */
4456 if (var_seq)
4457 {
4458 emit_insn_before (var_seq, parm_birth_insn);
4459
4460 /* In expand_function_end we'll insert the alloca save/restore
4461 before parm_birth_insn. We've just insertted an alloca call.
4462 Adjust the pointer to match. */
4463 parm_birth_insn = var_seq;
4464 }
4465
4e3825db
MM
4466 /* Now that we also have the parameter RTXs, copy them over to our
4467 partitions. */
4468 for (i = 0; i < SA.map->num_partitions; i++)
4469 {
4470 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4471
4472 if (TREE_CODE (var) != VAR_DECL
4473 && !SA.partition_to_pseudo[i])
4474 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4475 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4476
4477 /* If this decl was marked as living in multiple places, reset
4478 this now to NULL. */
4479 if (DECL_RTL_IF_SET (var) == pc_rtx)
4480 SET_DECL_RTL (var, NULL);
4481
4e3825db
MM
4482 /* Some RTL parts really want to look at DECL_RTL(x) when x
4483 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4484 SET_DECL_RTL here making this available, but that would mean
4485 to select one of the potentially many RTLs for one DECL. Instead
4486 of doing that we simply reset the MEM_EXPR of the RTL in question,
4487 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4488 if (!DECL_RTL_SET_P (var))
4489 {
4490 if (MEM_P (SA.partition_to_pseudo[i]))
4491 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4492 }
4493 }
4494
d466b407
MM
4495 /* If we have a class containing differently aligned pointers
4496 we need to merge those into the corresponding RTL pointer
4497 alignment. */
4498 for (i = 1; i < num_ssa_names; i++)
4499 {
4500 tree name = ssa_name (i);
4501 int part;
4502 rtx r;
4503
4504 if (!name
4505 || !POINTER_TYPE_P (TREE_TYPE (name))
4506 /* We might have generated new SSA names in
4507 update_alias_info_with_stack_vars. They will have a NULL
4508 defining statements, and won't be part of the partitioning,
4509 so ignore those. */
4510 || !SSA_NAME_DEF_STMT (name))
4511 continue;
4512 part = var_to_partition (SA.map, name);
4513 if (part == NO_PARTITION)
4514 continue;
4515 r = SA.partition_to_pseudo[part];
4516 if (REG_P (r))
4517 mark_reg_pointer (r, get_pointer_alignment (name));
4518 }
4519
242229bb
JH
4520 /* If this function is `main', emit a call to `__main'
4521 to run global initializers, etc. */
4522 if (DECL_NAME (current_function_decl)
4523 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4524 && DECL_FILE_SCOPE_P (current_function_decl))
4525 expand_main_function ();
4526
7d69de61
RH
4527 /* Initialize the stack_protect_guard field. This must happen after the
4528 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4529 if (crtl->stack_protect_guard)
7d69de61
RH
4530 stack_protect_prologue ();
4531
4e3825db
MM
4532 expand_phi_nodes (&SA);
4533
3fbd86b1 4534 /* Register rtl specific functions for cfg. */
242229bb
JH
4535 rtl_register_cfg_hooks ();
4536
4537 init_block = construct_init_block ();
4538
0ef90296 4539 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4540 remaining edges later. */
0ef90296
ZD
4541 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4542 e->flags &= ~EDGE_EXECUTABLE;
4543
8b11009b 4544 lab_rtx_for_bb = pointer_map_create ();
242229bb 4545 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4546 bb = expand_gimple_basic_block (bb);
bf08ebeb 4547
b5b8b0ac
AO
4548 if (MAY_HAVE_DEBUG_INSNS)
4549 expand_debug_locations ();
4550
4e3825db 4551 execute_free_datastructures ();
f029db69 4552 timevar_push (TV_OUT_OF_SSA);
4e3825db 4553 finish_out_of_ssa (&SA);
f029db69 4554 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4555
f029db69 4556 timevar_push (TV_POST_EXPAND);
91753e21
RG
4557 /* We are no longer in SSA form. */
4558 cfun->gimple_df->in_ssa_p = false;
4559
bf08ebeb
JH
4560 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4561 conservatively to true until they are all profile aware. */
8b11009b 4562 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4563 free_histograms ();
242229bb
JH
4564
4565 construct_exit_block ();
55e092c4
JH
4566 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4567 insn_locators_finalize ();
242229bb 4568
1d65f45c 4569 /* Zap the tree EH table. */
e8a2a782 4570 set_eh_throw_stmt_table (cfun, NULL);
242229bb 4571
42821aff
MM
4572 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4573 split edges which edge insertions might do. */
242229bb 4574 rebuild_jump_labels (get_insns ());
242229bb 4575
4e3825db
MM
4576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4577 {
4578 edge e;
4579 edge_iterator ei;
4580 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4581 {
4582 if (e->insns.r)
bc470c24 4583 {
42821aff 4584 rebuild_jump_labels_chain (e->insns.r);
bc470c24
JJ
4585 /* Avoid putting insns before parm_birth_insn. */
4586 if (e->src == ENTRY_BLOCK_PTR
4587 && single_succ_p (ENTRY_BLOCK_PTR)
4588 && parm_birth_insn)
4589 {
4590 rtx insns = e->insns.r;
4591 e->insns.r = NULL_RTX;
4592 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4593 }
4594 else
4595 commit_one_edge_insertion (e);
4596 }
4e3825db
MM
4597 else
4598 ei_next (&ei);
4599 }
4600 }
4601
4602 /* We're done expanding trees to RTL. */
4603 currently_expanding_to_rtl = 0;
4604
4605 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4606 {
4607 edge e;
4608 edge_iterator ei;
4609 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4610 {
4611 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4612 e->flags &= ~EDGE_EXECUTABLE;
4613
4614 /* At the moment not all abnormal edges match the RTL
4615 representation. It is safe to remove them here as
4616 find_many_sub_basic_blocks will rediscover them.
4617 In the future we should get this fixed properly. */
4618 if ((e->flags & EDGE_ABNORMAL)
4619 && !(e->flags & EDGE_SIBCALL))
4620 remove_edge (e);
4621 else
4622 ei_next (&ei);
4623 }
4624 }
4625
242229bb
JH
4626 blocks = sbitmap_alloc (last_basic_block);
4627 sbitmap_ones (blocks);
4628 find_many_sub_basic_blocks (blocks);
242229bb 4629 sbitmap_free (blocks);
4e3825db 4630 purge_all_dead_edges ();
242229bb 4631
2e3f842f
L
4632 expand_stack_alignment ();
4633
be147e84
RG
4634 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4635 function. */
4636 if (crtl->tail_call_emit)
4637 fixup_tail_calls ();
4638
dac1fbf8
RG
4639 /* After initial rtl generation, call back to finish generating
4640 exception support code. We need to do this before cleaning up
4641 the CFG as the code does not expect dead landing pads. */
4642 if (cfun->eh->region_tree != NULL)
4643 finish_eh_generation ();
4644
4645 /* Remove unreachable blocks, otherwise we cannot compute dominators
4646 which are needed for loop state verification. As a side-effect
4647 this also compacts blocks.
4648 ??? We cannot remove trivially dead insns here as for example
4649 the DRAP reg on i?86 is not magically live at this point.
4650 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4651 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4652
242229bb 4653#ifdef ENABLE_CHECKING
62e5bf5d 4654 verify_flow_info ();
242229bb 4655#endif
9f8628ba 4656
be147e84
RG
4657 /* Initialize pseudos allocated for hard registers. */
4658 emit_initial_value_sets ();
4659
4660 /* And finally unshare all RTL. */
4661 unshare_all_rtl ();
4662
9f8628ba
PB
4663 /* There's no need to defer outputting this function any more; we
4664 know we want to output it. */
4665 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4666
4667 /* Now that we're done expanding trees to RTL, we shouldn't have any
4668 more CONCATs anywhere. */
4669 generating_concat_p = 0;
4670
b7211528
SB
4671 if (dump_file)
4672 {
4673 fprintf (dump_file,
4674 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4675 /* And the pass manager will dump RTL for us. */
4676 }
ef330312
PB
4677
4678 /* If we're emitting a nested function, make sure its parent gets
4679 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4680 {
ef330312
PB
4681 tree parent;
4682 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4683 parent != NULL_TREE;
4684 parent = get_containing_scope (parent))
ef330312 4685 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4686 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4687 }
c22cacf3 4688
ef330312
PB
4689 /* We are now committed to emitting code for this function. Do any
4690 preparation, such as emitting abstract debug info for the inline
4691 before it gets mangled by optimization. */
4692 if (cgraph_function_possibly_inlined_p (current_function_decl))
4693 (*debug_hooks->outlining_inline_function) (current_function_decl);
4694
4695 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4696
4697 /* After expanding, the return labels are no longer needed. */
4698 return_label = NULL;
4699 naked_return_label = NULL;
0a35513e
AH
4700
4701 /* After expanding, the tm_restart map is no longer needed. */
4702 if (cfun->gimple_df->tm_restart)
4703 {
4704 htab_delete (cfun->gimple_df->tm_restart);
4705 cfun->gimple_df->tm_restart = NULL;
4706 }
4707
55e092c4
JH
4708 /* Tag the blocks with a depth number so that change_scope can find
4709 the common parent easily. */
4710 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4711 default_rtl_profile ();
be147e84 4712
f029db69 4713 timevar_pop (TV_POST_EXPAND);
be147e84 4714
c2924966 4715 return 0;
242229bb
JH
4716}
4717
e3b5732b 4718struct rtl_opt_pass pass_expand =
242229bb 4719{
8ddbbcae 4720 {
e3b5732b 4721 RTL_PASS,
c22cacf3 4722 "expand", /* name */
242229bb 4723 NULL, /* gate */
726a989a 4724 gimple_expand_cfg, /* execute */
242229bb
JH
4725 NULL, /* sub */
4726 NULL, /* next */
4727 0, /* static_pass_number */
c22cacf3 4728 TV_EXPAND, /* tv_id */
688a482d
RG
4729 PROP_ssa | PROP_gimple_leh | PROP_cfg
4730 | PROP_gimple_lcx, /* properties_required */
242229bb 4731 PROP_rtl, /* properties_provided */
4e3825db
MM
4732 PROP_ssa | PROP_trees, /* properties_destroyed */
4733 TODO_verify_ssa | TODO_verify_flow
4734 | TODO_verify_stmts, /* todo_flags_start */
22c5fa5f 4735 TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4736 }
242229bb 4737};