]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR debug/43237 (Wrong DW_AT_upper_bound)
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
7604eb4e 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
ef330312 40#include "debug.h"
7d69de61 41#include "params.h"
ff28a94d 42#include "tree-inline.h"
6946b3f7 43#include "value-prof.h"
e41b2a33 44#include "target.h"
4e3825db 45#include "ssaexpand.h"
7d69de61 46
726a989a 47
4e3825db
MM
48/* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50struct ssaexpand SA;
51
a5883ba0
MM
52/* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54gimple currently_expanding_gimple_stmt;
55
726a989a
RB
56/* Return an expression tree corresponding to the RHS of GIMPLE
57 statement STMT. */
58
59tree
60gimple_assign_rhs_to_tree (gimple stmt)
61{
62 tree t;
82d6e6fc 63 enum gimple_rhs_class grhs_class;
b8698a0f 64
82d6e6fc 65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 66
82d6e6fc 67 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
82d6e6fc 72 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
82d6e6fc 76 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
77 {
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
80 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
82 t = copy_node (t);
83 }
726a989a
RB
84 else
85 gcc_unreachable ();
86
f5045c96
AM
87 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
88 SET_EXPR_LOCATION (t, gimple_location (stmt));
89
726a989a
RB
90 return t;
91}
92
726a989a 93
1f6d3a08
RH
94#ifndef STACK_ALIGNMENT_NEEDED
95#define STACK_ALIGNMENT_NEEDED 1
96#endif
97
4e3825db
MM
98#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
99
100/* Associate declaration T with storage space X. If T is no
101 SSA name this is exactly SET_DECL_RTL, otherwise make the
102 partition of T associated with X. */
103static inline void
104set_rtl (tree t, rtx x)
105{
106 if (TREE_CODE (t) == SSA_NAME)
107 {
108 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
109 if (x && !MEM_P (x))
110 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
111 /* For the benefit of debug information at -O0 (where vartracking
112 doesn't run) record the place also in the base DECL if it's
113 a normal variable (not a parameter). */
114 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
115 {
116 tree var = SSA_NAME_VAR (t);
117 /* If we don't yet have something recorded, just record it now. */
118 if (!DECL_RTL_SET_P (var))
119 SET_DECL_RTL (var, x);
120 /* If we have it set alrady to "multiple places" don't
121 change this. */
122 else if (DECL_RTL (var) == pc_rtx)
123 ;
124 /* If we have something recorded and it's not the same place
125 as we want to record now, we have multiple partitions for the
126 same base variable, with different places. We can't just
127 randomly chose one, hence we have to say that we don't know.
128 This only happens with optimization, and there var-tracking
129 will figure out the right thing. */
130 else if (DECL_RTL (var) != x)
131 SET_DECL_RTL (var, pc_rtx);
132 }
4e3825db
MM
133 }
134 else
135 SET_DECL_RTL (t, x);
136}
1f6d3a08
RH
137
138/* This structure holds data relevant to one variable that will be
139 placed in a stack slot. */
140struct stack_var
141{
142 /* The Variable. */
143 tree decl;
144
145 /* The offset of the variable. During partitioning, this is the
146 offset relative to the partition. After partitioning, this
147 is relative to the stack frame. */
148 HOST_WIDE_INT offset;
149
150 /* Initially, the size of the variable. Later, the size of the partition,
151 if this variable becomes it's partition's representative. */
152 HOST_WIDE_INT size;
153
154 /* The *byte* alignment required for this variable. Or as, with the
155 size, the alignment for this partition. */
156 unsigned int alignb;
157
158 /* The partition representative. */
159 size_t representative;
160
161 /* The next stack variable in the partition, or EOC. */
162 size_t next;
2bdbbe94
MM
163
164 /* The numbers of conflicting stack variables. */
165 bitmap conflicts;
1f6d3a08
RH
166};
167
168#define EOC ((size_t)-1)
169
170/* We have an array of such objects while deciding allocation. */
171static struct stack_var *stack_vars;
172static size_t stack_vars_alloc;
173static size_t stack_vars_num;
174
fa10beec 175/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
176 is non-decreasing. */
177static size_t *stack_vars_sorted;
178
1f6d3a08
RH
179/* The phase of the stack frame. This is the known misalignment of
180 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
181 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
182static int frame_phase;
183
7d69de61
RH
184/* Used during expand_used_vars to remember if we saw any decls for
185 which we'd like to enable stack smashing protection. */
186static bool has_protected_decls;
187
188/* Used during expand_used_vars. Remember if we say a character buffer
189 smaller than our cutoff threshold. Used for -Wstack-protector. */
190static bool has_short_buffer;
1f6d3a08
RH
191
192/* Discover the byte alignment to use for DECL. Ignore alignment
193 we can't do with expected alignment of the stack boundary. */
194
195static unsigned int
196get_decl_align_unit (tree decl)
197{
198 unsigned int align;
199
9bfaf89d 200 align = LOCAL_DECL_ALIGNMENT (decl);
2e3f842f
L
201
202 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
203 align = MAX_SUPPORTED_STACK_ALIGNMENT;
204
205 if (SUPPORTS_STACK_ALIGNMENT)
206 {
207 if (crtl->stack_alignment_estimated < align)
208 {
209 gcc_assert(!crtl->stack_realign_processed);
210 crtl->stack_alignment_estimated = align;
211 }
212 }
213
214 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
215 So here we only make sure stack_alignment_needed >= align. */
cb91fab0
JH
216 if (crtl->stack_alignment_needed < align)
217 crtl->stack_alignment_needed = align;
f85882d8
JY
218 if (crtl->max_used_stack_slot_alignment < align)
219 crtl->max_used_stack_slot_alignment = align;
1f6d3a08
RH
220
221 return align / BITS_PER_UNIT;
222}
223
224/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
225 Return the frame offset. */
226
227static HOST_WIDE_INT
228alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
229{
230 HOST_WIDE_INT offset, new_frame_offset;
231
232 new_frame_offset = frame_offset;
233 if (FRAME_GROWS_DOWNWARD)
234 {
235 new_frame_offset -= size + frame_phase;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 }
240 else
241 {
242 new_frame_offset -= frame_phase;
243 new_frame_offset += align - 1;
244 new_frame_offset &= -align;
245 new_frame_offset += frame_phase;
246 offset = new_frame_offset;
247 new_frame_offset += size;
248 }
249 frame_offset = new_frame_offset;
250
9fb798d7
EB
251 if (frame_offset_overflow (frame_offset, cfun->decl))
252 frame_offset = offset = 0;
253
1f6d3a08
RH
254 return offset;
255}
256
257/* Accumulate DECL into STACK_VARS. */
258
259static void
260add_stack_var (tree decl)
261{
262 if (stack_vars_num >= stack_vars_alloc)
263 {
264 if (stack_vars_alloc)
265 stack_vars_alloc = stack_vars_alloc * 3 / 2;
266 else
267 stack_vars_alloc = 32;
268 stack_vars
269 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
270 }
271 stack_vars[stack_vars_num].decl = decl;
272 stack_vars[stack_vars_num].offset = 0;
4e3825db
MM
273 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
275
276 /* All variables are initially in their own partition. */
277 stack_vars[stack_vars_num].representative = stack_vars_num;
278 stack_vars[stack_vars_num].next = EOC;
279
2bdbbe94
MM
280 /* All variables initially conflict with no other. */
281 stack_vars[stack_vars_num].conflicts = NULL;
282
1f6d3a08 283 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 284 set_rtl (decl, pc_rtx);
1f6d3a08
RH
285
286 stack_vars_num++;
287}
288
1f6d3a08
RH
289/* Make the decls associated with luid's X and Y conflict. */
290
291static void
292add_stack_var_conflict (size_t x, size_t y)
293{
2bdbbe94
MM
294 struct stack_var *a = &stack_vars[x];
295 struct stack_var *b = &stack_vars[y];
296 if (!a->conflicts)
297 a->conflicts = BITMAP_ALLOC (NULL);
298 if (!b->conflicts)
299 b->conflicts = BITMAP_ALLOC (NULL);
300 bitmap_set_bit (a->conflicts, y);
301 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
302}
303
304/* Check whether the decls associated with luid's X and Y conflict. */
305
306static bool
307stack_var_conflict_p (size_t x, size_t y)
308{
2bdbbe94
MM
309 struct stack_var *a = &stack_vars[x];
310 struct stack_var *b = &stack_vars[y];
311 if (!a->conflicts || !b->conflicts)
312 return false;
313 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 314}
b8698a0f 315
d239ed56
SB
316/* Returns true if TYPE is or contains a union type. */
317
318static bool
319aggregate_contains_union_type (tree type)
320{
321 tree field;
322
323 if (TREE_CODE (type) == UNION_TYPE
324 || TREE_CODE (type) == QUAL_UNION_TYPE)
325 return true;
326 if (TREE_CODE (type) == ARRAY_TYPE)
327 return aggregate_contains_union_type (TREE_TYPE (type));
328 if (TREE_CODE (type) != RECORD_TYPE)
329 return false;
330
331 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
332 if (TREE_CODE (field) == FIELD_DECL)
333 if (aggregate_contains_union_type (TREE_TYPE (field)))
334 return true;
335
336 return false;
337}
338
1f6d3a08
RH
339/* A subroutine of expand_used_vars. If two variables X and Y have alias
340 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
341 in the interference graph. We also need to make sure to add conflicts
342 for union containing structures. Else RTL alias analysis comes along
343 and due to type based aliasing rules decides that for two overlapping
344 union temporaries { short s; int i; } accesses to the same mem through
345 different types may not alias and happily reorders stores across
346 life-time boundaries of the temporaries (See PR25654).
347 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
348
349static void
350add_alias_set_conflicts (void)
351{
352 size_t i, j, n = stack_vars_num;
353
354 for (i = 0; i < n; ++i)
355 {
a4d25453
RH
356 tree type_i = TREE_TYPE (stack_vars[i].decl);
357 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 358 bool contains_union;
1f6d3a08 359
d239ed56 360 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
361 for (j = 0; j < i; ++j)
362 {
a4d25453
RH
363 tree type_j = TREE_TYPE (stack_vars[j].decl);
364 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
365 if (aggr_i != aggr_j
366 /* Either the objects conflict by means of type based
367 aliasing rules, or we need to add a conflict. */
368 || !objects_must_conflict_p (type_i, type_j)
369 /* In case the types do not conflict ensure that access
370 to elements will conflict. In case of unions we have
371 to be careful as type based aliasing rules may say
372 access to the same memory does not conflict. So play
373 safe and add a conflict in this case. */
374 || contains_union)
1f6d3a08
RH
375 add_stack_var_conflict (i, j);
376 }
377 }
378}
379
380/* A subroutine of partition_stack_vars. A comparison function for qsort,
4e3825db 381 sorting an array of indices by the size and type of the object. */
1f6d3a08
RH
382
383static int
384stack_var_size_cmp (const void *a, const void *b)
385{
386 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
387 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
4e3825db
MM
388 tree decla, declb;
389 unsigned int uida, uidb;
1f6d3a08
RH
390
391 if (sa < sb)
392 return -1;
393 if (sa > sb)
394 return 1;
4e3825db
MM
395 decla = stack_vars[*(const size_t *)a].decl;
396 declb = stack_vars[*(const size_t *)b].decl;
397 /* For stack variables of the same size use and id of the decls
398 to make the sort stable. Two SSA names are compared by their
399 version, SSA names come before non-SSA names, and two normal
400 decls are compared by their DECL_UID. */
401 if (TREE_CODE (decla) == SSA_NAME)
402 {
403 if (TREE_CODE (declb) == SSA_NAME)
404 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
405 else
406 return -1;
407 }
408 else if (TREE_CODE (declb) == SSA_NAME)
409 return 1;
410 else
411 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5
RG
412 if (uida < uidb)
413 return -1;
414 if (uida > uidb)
415 return 1;
1f6d3a08
RH
416 return 0;
417}
418
55b34b5f
RG
419
420/* If the points-to solution *PI points to variables that are in a partition
421 together with other variables add all partition members to the pointed-to
422 variables bitmap. */
423
424static void
425add_partitioned_vars_to_ptset (struct pt_solution *pt,
426 struct pointer_map_t *decls_to_partitions,
427 struct pointer_set_t *visited, bitmap temp)
428{
429 bitmap_iterator bi;
430 unsigned i;
431 bitmap *part;
432
433 if (pt->anything
434 || pt->vars == NULL
435 /* The pointed-to vars bitmap is shared, it is enough to
436 visit it once. */
437 || pointer_set_insert(visited, pt->vars))
438 return;
439
440 bitmap_clear (temp);
441
442 /* By using a temporary bitmap to store all members of the partitions
443 we have to add we make sure to visit each of the partitions only
444 once. */
445 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
446 if ((!temp
447 || !bitmap_bit_p (temp, i))
448 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
449 (void *)(size_t) i)))
450 bitmap_ior_into (temp, *part);
451 if (!bitmap_empty_p (temp))
452 bitmap_ior_into (pt->vars, temp);
453}
454
455/* Update points-to sets based on partition info, so we can use them on RTL.
456 The bitmaps representing stack partitions will be saved until expand,
457 where partitioned decls used as bases in memory expressions will be
458 rewritten. */
459
460static void
461update_alias_info_with_stack_vars (void)
462{
463 struct pointer_map_t *decls_to_partitions = NULL;
464 size_t i, j;
465 tree var = NULL_TREE;
466
467 for (i = 0; i < stack_vars_num; i++)
468 {
469 bitmap part = NULL;
470 tree name;
471 struct ptr_info_def *pi;
472
473 /* Not interested in partitions with single variable. */
474 if (stack_vars[i].representative != i
475 || stack_vars[i].next == EOC)
476 continue;
477
478 if (!decls_to_partitions)
479 {
480 decls_to_partitions = pointer_map_create ();
481 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
482 }
483
484 /* Create an SSA_NAME that points to the partition for use
485 as base during alias-oracle queries on RTL for bases that
486 have been partitioned. */
487 if (var == NULL_TREE)
488 var = create_tmp_var (ptr_type_node, NULL);
489 name = make_ssa_name (var, NULL);
490
491 /* Create bitmaps representing partitions. They will be used for
492 points-to sets later, so use GGC alloc. */
493 part = BITMAP_GGC_ALLOC ();
494 for (j = i; j != EOC; j = stack_vars[j].next)
495 {
496 tree decl = stack_vars[j].decl;
497 unsigned int uid = DECL_UID (decl);
498 /* We should never end up partitioning SSA names (though they
499 may end up on the stack). Neither should we allocate stack
500 space to something that is unused and thus unreferenced. */
501 gcc_assert (DECL_P (decl)
502 && referenced_var_lookup (uid));
503 bitmap_set_bit (part, uid);
504 *((bitmap *) pointer_map_insert (decls_to_partitions,
505 (void *)(size_t) uid)) = part;
506 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
507 decl)) = name;
508 }
509
510 /* Make the SSA name point to all partition members. */
511 pi = get_ptr_info (name);
512 pt_solution_set (&pi->pt, part);
513 }
514
515 /* Make all points-to sets that contain one member of a partition
516 contain all members of the partition. */
517 if (decls_to_partitions)
518 {
519 unsigned i;
520 struct pointer_set_t *visited = pointer_set_create ();
521 bitmap temp = BITMAP_ALLOC (NULL);
522
523 for (i = 1; i < num_ssa_names; i++)
524 {
525 tree name = ssa_name (i);
526 struct ptr_info_def *pi;
527
528 if (name
529 && POINTER_TYPE_P (TREE_TYPE (name))
530 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
531 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
532 visited, temp);
533 }
534
535 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
536 decls_to_partitions, visited, temp);
537 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
538 decls_to_partitions, visited, temp);
539
540 pointer_set_destroy (visited);
541 pointer_map_destroy (decls_to_partitions);
542 BITMAP_FREE (temp);
543 }
544}
545
1f6d3a08
RH
546/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
547 partitioning algorithm. Partitions A and B are known to be non-conflicting.
548 Merge them into a single partition A.
549
550 At the same time, add OFFSET to all variables in partition B. At the end
551 of the partitioning process we've have a nice block easy to lay out within
552 the stack frame. */
553
554static void
555union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
556{
557 size_t i, last;
2bdbbe94
MM
558 struct stack_var *vb = &stack_vars[b];
559 bitmap_iterator bi;
560 unsigned u;
1f6d3a08
RH
561
562 /* Update each element of partition B with the given offset,
563 and merge them into partition A. */
564 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
565 {
566 stack_vars[i].offset += offset;
567 stack_vars[i].representative = a;
568 }
569 stack_vars[last].next = stack_vars[a].next;
570 stack_vars[a].next = b;
571
572 /* Update the required alignment of partition A to account for B. */
573 if (stack_vars[a].alignb < stack_vars[b].alignb)
574 stack_vars[a].alignb = stack_vars[b].alignb;
575
576 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
577 if (vb->conflicts)
578 {
579 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
580 add_stack_var_conflict (a, stack_vars[u].representative);
581 BITMAP_FREE (vb->conflicts);
582 }
1f6d3a08
RH
583}
584
585/* A subroutine of expand_used_vars. Binpack the variables into
586 partitions constrained by the interference graph. The overall
587 algorithm used is as follows:
588
589 Sort the objects by size.
590 For each object A {
591 S = size(A)
592 O = 0
593 loop {
594 Look for the largest non-conflicting object B with size <= S.
595 UNION (A, B)
596 offset(B) = O
597 O += size(B)
598 S -= size(B)
599 }
600 }
601*/
602
603static void
604partition_stack_vars (void)
605{
606 size_t si, sj, n = stack_vars_num;
607
608 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
609 for (si = 0; si < n; ++si)
610 stack_vars_sorted[si] = si;
611
612 if (n == 1)
613 return;
614
615 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
616
1f6d3a08
RH
617 for (si = 0; si < n; ++si)
618 {
619 size_t i = stack_vars_sorted[si];
620 HOST_WIDE_INT isize = stack_vars[i].size;
621 HOST_WIDE_INT offset = 0;
622
623 for (sj = si; sj-- > 0; )
624 {
625 size_t j = stack_vars_sorted[sj];
626 HOST_WIDE_INT jsize = stack_vars[j].size;
627 unsigned int jalign = stack_vars[j].alignb;
628
629 /* Ignore objects that aren't partition representatives. */
630 if (stack_vars[j].representative != j)
631 continue;
632
633 /* Ignore objects too large for the remaining space. */
634 if (isize < jsize)
635 continue;
636
637 /* Ignore conflicting objects. */
638 if (stack_var_conflict_p (i, j))
639 continue;
640
641 /* Refine the remaining space check to include alignment. */
642 if (offset & (jalign - 1))
643 {
644 HOST_WIDE_INT toff = offset;
645 toff += jalign - 1;
646 toff &= -(HOST_WIDE_INT)jalign;
647 if (isize - (toff - offset) < jsize)
648 continue;
649
650 isize -= toff - offset;
651 offset = toff;
652 }
653
654 /* UNION the objects, placing J at OFFSET. */
655 union_stack_vars (i, j, offset);
656
657 isize -= jsize;
658 if (isize == 0)
659 break;
660 }
661 }
55b34b5f 662
0b200b80
RG
663 if (optimize)
664 update_alias_info_with_stack_vars ();
1f6d3a08
RH
665}
666
667/* A debugging aid for expand_used_vars. Dump the generated partitions. */
668
669static void
670dump_stack_var_partition (void)
671{
672 size_t si, i, j, n = stack_vars_num;
673
674 for (si = 0; si < n; ++si)
675 {
676 i = stack_vars_sorted[si];
677
678 /* Skip variables that aren't partition representatives, for now. */
679 if (stack_vars[i].representative != i)
680 continue;
681
682 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
683 " align %u\n", (unsigned long) i, stack_vars[i].size,
684 stack_vars[i].alignb);
685
686 for (j = i; j != EOC; j = stack_vars[j].next)
687 {
688 fputc ('\t', dump_file);
689 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
690 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 691 stack_vars[j].offset);
1f6d3a08
RH
692 }
693 }
694}
695
696/* Assign rtl to DECL at frame offset OFFSET. */
697
698static void
699expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
700{
2ac26e15
L
701 /* Alignment is unsigned. */
702 unsigned HOST_WIDE_INT align;
1f6d3a08 703 rtx x;
c22cacf3 704
1f6d3a08
RH
705 /* If this fails, we've overflowed the stack frame. Error nicely? */
706 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
707
708 x = plus_constant (virtual_stack_vars_rtx, offset);
4e3825db 709 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 710
4e3825db
MM
711 if (TREE_CODE (decl) != SSA_NAME)
712 {
713 /* Set alignment we actually gave this decl if it isn't an SSA name.
714 If it is we generate stack slots only accidentally so it isn't as
715 important, we'll simply use the alignment that is already set. */
716 offset -= frame_phase;
717 align = offset & -offset;
718 align *= BITS_PER_UNIT;
719 if (align == 0)
720 align = STACK_BOUNDARY;
721 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
722 align = MAX_SUPPORTED_STACK_ALIGNMENT;
723
724 DECL_ALIGN (decl) = align;
725 DECL_USER_ALIGN (decl) = 0;
726 }
727
728 set_mem_attributes (x, SSAVAR (decl), true);
729 set_rtl (decl, x);
1f6d3a08
RH
730}
731
732/* A subroutine of expand_used_vars. Give each partition representative
733 a unique location within the stack frame. Update each partition member
734 with that location. */
735
736static void
7d69de61 737expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
738{
739 size_t si, i, j, n = stack_vars_num;
740
741 for (si = 0; si < n; ++si)
742 {
743 HOST_WIDE_INT offset;
744
745 i = stack_vars_sorted[si];
746
747 /* Skip variables that aren't partition representatives, for now. */
748 if (stack_vars[i].representative != i)
749 continue;
750
7d69de61
RH
751 /* Skip variables that have already had rtl assigned. See also
752 add_stack_var where we perpetrate this pc_rtx hack. */
4e3825db
MM
753 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
754 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
755 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
7d69de61
RH
756 continue;
757
c22cacf3 758 /* Check the predicate to see whether this variable should be
7d69de61
RH
759 allocated in this pass. */
760 if (pred && !pred (stack_vars[i].decl))
761 continue;
762
1f6d3a08
RH
763 offset = alloc_stack_frame_space (stack_vars[i].size,
764 stack_vars[i].alignb);
765
766 /* Create rtl for each variable based on their location within the
767 partition. */
768 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
769 {
770 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
771 expand_one_stack_var_at (stack_vars[j].decl,
772 stack_vars[j].offset + offset);
773 }
1f6d3a08
RH
774 }
775}
776
ff28a94d
JH
777/* Take into account all sizes of partitions and reset DECL_RTLs. */
778static HOST_WIDE_INT
779account_stack_vars (void)
780{
781 size_t si, j, i, n = stack_vars_num;
782 HOST_WIDE_INT size = 0;
783
784 for (si = 0; si < n; ++si)
785 {
786 i = stack_vars_sorted[si];
787
788 /* Skip variables that aren't partition representatives, for now. */
789 if (stack_vars[i].representative != i)
790 continue;
791
792 size += stack_vars[i].size;
793 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 794 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
795 }
796 return size;
797}
798
1f6d3a08
RH
799/* A subroutine of expand_one_var. Called to immediately assign rtl
800 to a variable to be allocated in the stack frame. */
801
802static void
803expand_one_stack_var (tree var)
804{
805 HOST_WIDE_INT size, offset, align;
806
4e3825db
MM
807 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
808 align = get_decl_align_unit (SSAVAR (var));
1f6d3a08
RH
809 offset = alloc_stack_frame_space (size, align);
810
811 expand_one_stack_var_at (var, offset);
812}
813
1f6d3a08
RH
814/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
815 that will reside in a hard register. */
816
817static void
818expand_one_hard_reg_var (tree var)
819{
820 rest_of_decl_compilation (var, 0, 0);
821}
822
823/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
824 that will reside in a pseudo register. */
825
826static void
827expand_one_register_var (tree var)
828{
4e3825db
MM
829 tree decl = SSAVAR (var);
830 tree type = TREE_TYPE (decl);
cde0f3fd 831 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
832 rtx x = gen_reg_rtx (reg_mode);
833
4e3825db 834 set_rtl (var, x);
1f6d3a08
RH
835
836 /* Note if the object is a user variable. */
4e3825db
MM
837 if (!DECL_ARTIFICIAL (decl))
838 mark_user_reg (x);
1f6d3a08 839
61021c2c 840 if (POINTER_TYPE_P (type))
4e3825db 841 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
842}
843
844/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 845 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
846 to pick something that won't crash the rest of the compiler. */
847
848static void
849expand_one_error_var (tree var)
850{
851 enum machine_mode mode = DECL_MODE (var);
852 rtx x;
853
854 if (mode == BLKmode)
855 x = gen_rtx_MEM (BLKmode, const0_rtx);
856 else if (mode == VOIDmode)
857 x = const0_rtx;
858 else
859 x = gen_reg_rtx (mode);
860
861 SET_DECL_RTL (var, x);
862}
863
c22cacf3 864/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
865 allocated to the local stack frame. Return true if we wish to
866 add VAR to STACK_VARS so that it will be coalesced with other
867 variables. Return false to allocate VAR immediately.
868
869 This function is used to reduce the number of variables considered
870 for coalescing, which reduces the size of the quadratic problem. */
871
872static bool
873defer_stack_allocation (tree var, bool toplevel)
874{
7d69de61
RH
875 /* If stack protection is enabled, *all* stack variables must be deferred,
876 so that we can re-order the strings to the top of the frame. */
877 if (flag_stack_protect)
878 return true;
879
1f6d3a08
RH
880 /* Variables in the outermost scope automatically conflict with
881 every other variable. The only reason to want to defer them
882 at all is that, after sorting, we can more efficiently pack
883 small variables in the stack frame. Continue to defer at -O2. */
884 if (toplevel && optimize < 2)
885 return false;
886
887 /* Without optimization, *most* variables are allocated from the
888 stack, which makes the quadratic problem large exactly when we
c22cacf3 889 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
890 other hand, we don't want the function's stack frame size to
891 get completely out of hand. So we avoid adding scalars and
892 "small" aggregates to the list at all. */
893 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
894 return false;
895
896 return true;
897}
898
899/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 900 its flavor. Variables to be placed on the stack are not actually
b8698a0f 901 expanded yet, merely recorded.
ff28a94d
JH
902 When REALLY_EXPAND is false, only add stack values to be allocated.
903 Return stack usage this variable is supposed to take.
904*/
1f6d3a08 905
ff28a94d
JH
906static HOST_WIDE_INT
907expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 908{
4e3825db
MM
909 tree origvar = var;
910 var = SSAVAR (var);
911
2e3f842f
L
912 if (SUPPORTS_STACK_ALIGNMENT
913 && TREE_TYPE (var) != error_mark_node
914 && TREE_CODE (var) == VAR_DECL)
915 {
916 unsigned int align;
917
918 /* Because we don't know if VAR will be in register or on stack,
919 we conservatively assume it will be on stack even if VAR is
920 eventually put into register after RA pass. For non-automatic
921 variables, which won't be on stack, we collect alignment of
922 type and ignore user specified alignment. */
923 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
924 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
925 TYPE_MODE (TREE_TYPE (var)),
926 TYPE_ALIGN (TREE_TYPE (var)));
2e3f842f 927 else
ae58e548 928 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f
L
929
930 if (crtl->stack_alignment_estimated < align)
931 {
932 /* stack_alignment_estimated shouldn't change after stack
933 realign decision made */
934 gcc_assert(!crtl->stack_realign_processed);
935 crtl->stack_alignment_estimated = align;
936 }
937 }
938
4e3825db
MM
939 if (TREE_CODE (origvar) == SSA_NAME)
940 {
941 gcc_assert (TREE_CODE (var) != VAR_DECL
942 || (!DECL_EXTERNAL (var)
943 && !DECL_HAS_VALUE_EXPR_P (var)
944 && !TREE_STATIC (var)
4e3825db
MM
945 && TREE_TYPE (var) != error_mark_node
946 && !DECL_HARD_REGISTER (var)
947 && really_expand));
948 }
949 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 950 ;
1f6d3a08
RH
951 else if (DECL_EXTERNAL (var))
952 ;
833b3afe 953 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
954 ;
955 else if (TREE_STATIC (var))
7e8b322a 956 ;
eb7adebc 957 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
958 ;
959 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
960 {
961 if (really_expand)
962 expand_one_error_var (var);
963 }
4e3825db 964 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
965 {
966 if (really_expand)
967 expand_one_hard_reg_var (var);
968 }
1f6d3a08 969 else if (use_register_for_decl (var))
ff28a94d
JH
970 {
971 if (really_expand)
4e3825db 972 expand_one_register_var (origvar);
ff28a94d 973 }
7604eb4e
JJ
974 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
975 {
976 if (really_expand)
977 {
978 error ("size of variable %q+D is too large", var);
979 expand_one_error_var (var);
980 }
981 }
1f6d3a08 982 else if (defer_stack_allocation (var, toplevel))
4e3825db 983 add_stack_var (origvar);
1f6d3a08 984 else
ff28a94d 985 {
bd9f1b4b 986 if (really_expand)
4e3825db 987 expand_one_stack_var (origvar);
ff28a94d
JH
988 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
989 }
990 return 0;
1f6d3a08
RH
991}
992
993/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
994 expanding variables. Those variables that can be put into registers
995 are allocated pseudos; those that can't are put on the stack.
996
997 TOPLEVEL is true if this is the outermost BLOCK. */
998
999static void
1000expand_used_vars_for_block (tree block, bool toplevel)
1001{
1002 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1003 tree t;
1004
1005 old_sv_num = toplevel ? 0 : stack_vars_num;
1006
1007 /* Expand all variables at this level. */
1008 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
7e8b322a 1009 if (TREE_USED (t))
ff28a94d 1010 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1011
1012 this_sv_num = stack_vars_num;
1013
1014 /* Expand all variables at containing levels. */
1015 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1016 expand_used_vars_for_block (t, false);
1017
1018 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1019 possible for variables whose address escapes), we mirror the block
1f6d3a08 1020 tree in the interference graph. Here we cause all variables at this
2bdbbe94 1021 level, and all sublevels, to conflict. */
1f6d3a08
RH
1022 if (old_sv_num < this_sv_num)
1023 {
1024 new_sv_num = stack_vars_num;
1f6d3a08
RH
1025
1026 for (i = old_sv_num; i < new_sv_num; ++i)
2bdbbe94 1027 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
f4a6d54e 1028 add_stack_var_conflict (i, j);
1f6d3a08
RH
1029 }
1030}
1031
1032/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1033 and clear TREE_USED on all local variables. */
1034
1035static void
1036clear_tree_used (tree block)
1037{
1038 tree t;
1039
1040 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1041 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1042 TREE_USED (t) = 0;
1043
1044 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1045 clear_tree_used (t);
1046}
1047
7d69de61
RH
1048/* Examine TYPE and determine a bit mask of the following features. */
1049
1050#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1051#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1052#define SPCT_HAS_ARRAY 4
1053#define SPCT_HAS_AGGREGATE 8
1054
1055static unsigned int
1056stack_protect_classify_type (tree type)
1057{
1058 unsigned int ret = 0;
1059 tree t;
1060
1061 switch (TREE_CODE (type))
1062 {
1063 case ARRAY_TYPE:
1064 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1065 if (t == char_type_node
1066 || t == signed_char_type_node
1067 || t == unsigned_char_type_node)
1068 {
15362b89
JJ
1069 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1070 unsigned HOST_WIDE_INT len;
7d69de61 1071
15362b89
JJ
1072 if (!TYPE_SIZE_UNIT (type)
1073 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1074 len = max;
7d69de61 1075 else
15362b89 1076 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1077
1078 if (len < max)
1079 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1080 else
1081 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1082 }
1083 else
1084 ret = SPCT_HAS_ARRAY;
1085 break;
1086
1087 case UNION_TYPE:
1088 case QUAL_UNION_TYPE:
1089 case RECORD_TYPE:
1090 ret = SPCT_HAS_AGGREGATE;
1091 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1092 if (TREE_CODE (t) == FIELD_DECL)
1093 ret |= stack_protect_classify_type (TREE_TYPE (t));
1094 break;
1095
1096 default:
1097 break;
1098 }
1099
1100 return ret;
1101}
1102
a4d05547
KH
1103/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1104 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1105 any variable in this function. The return value is the phase number in
1106 which the variable should be allocated. */
1107
1108static int
1109stack_protect_decl_phase (tree decl)
1110{
1111 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1112 int ret = 0;
1113
1114 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1115 has_short_buffer = true;
1116
1117 if (flag_stack_protect == 2)
1118 {
1119 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1120 && !(bits & SPCT_HAS_AGGREGATE))
1121 ret = 1;
1122 else if (bits & SPCT_HAS_ARRAY)
1123 ret = 2;
1124 }
1125 else
1126 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1127
1128 if (ret)
1129 has_protected_decls = true;
1130
1131 return ret;
1132}
1133
1134/* Two helper routines that check for phase 1 and phase 2. These are used
1135 as callbacks for expand_stack_vars. */
1136
1137static bool
1138stack_protect_decl_phase_1 (tree decl)
1139{
1140 return stack_protect_decl_phase (decl) == 1;
1141}
1142
1143static bool
1144stack_protect_decl_phase_2 (tree decl)
1145{
1146 return stack_protect_decl_phase (decl) == 2;
1147}
1148
1149/* Ensure that variables in different stack protection phases conflict
1150 so that they are not merged and share the same stack slot. */
1151
1152static void
1153add_stack_protection_conflicts (void)
1154{
1155 size_t i, j, n = stack_vars_num;
1156 unsigned char *phase;
1157
1158 phase = XNEWVEC (unsigned char, n);
1159 for (i = 0; i < n; ++i)
1160 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1161
1162 for (i = 0; i < n; ++i)
1163 {
1164 unsigned char ph_i = phase[i];
1165 for (j = 0; j < i; ++j)
1166 if (ph_i != phase[j])
1167 add_stack_var_conflict (i, j);
1168 }
1169
1170 XDELETEVEC (phase);
1171}
1172
1173/* Create a decl for the guard at the top of the stack frame. */
1174
1175static void
1176create_stack_guard (void)
1177{
c2255bc4
AH
1178 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1179 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1180 TREE_THIS_VOLATILE (guard) = 1;
1181 TREE_USED (guard) = 1;
1182 expand_one_stack_var (guard);
cb91fab0 1183 crtl->stack_protect_guard = guard;
7d69de61
RH
1184}
1185
ff28a94d
JH
1186/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1187 expanding variables. Those variables that can be put into registers
1188 are allocated pseudos; those that can't are put on the stack.
1189
1190 TOPLEVEL is true if this is the outermost BLOCK. */
1191
1192static HOST_WIDE_INT
1193account_used_vars_for_block (tree block, bool toplevel)
1194{
ff28a94d
JH
1195 tree t;
1196 HOST_WIDE_INT size = 0;
1197
ff28a94d
JH
1198 /* Expand all variables at this level. */
1199 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1200 if (TREE_USED (t))
1201 size += expand_one_var (t, toplevel, false);
1202
ff28a94d
JH
1203 /* Expand all variables at containing levels. */
1204 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1205 size += account_used_vars_for_block (t, false);
1206
ff28a94d
JH
1207 return size;
1208}
1209
1210/* Prepare for expanding variables. */
b8698a0f 1211static void
ff28a94d
JH
1212init_vars_expansion (void)
1213{
1214 tree t;
cb91fab0
JH
1215 /* Set TREE_USED on all variables in the local_decls. */
1216 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1217 TREE_USED (TREE_VALUE (t)) = 1;
1218
1219 /* Clear TREE_USED on all variables associated with a block scope. */
1220 clear_tree_used (DECL_INITIAL (current_function_decl));
1221
1222 /* Initialize local stack smashing state. */
1223 has_protected_decls = false;
1224 has_short_buffer = false;
1225}
1226
1227/* Free up stack variable graph data. */
1228static void
1229fini_vars_expansion (void)
1230{
2bdbbe94
MM
1231 size_t i, n = stack_vars_num;
1232 for (i = 0; i < n; i++)
1233 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1234 XDELETEVEC (stack_vars);
1235 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1236 stack_vars = NULL;
1237 stack_vars_alloc = stack_vars_num = 0;
ff28a94d
JH
1238}
1239
b5a430f3
SB
1240/* Make a fair guess for the size of the stack frame of the current
1241 function. This doesn't have to be exact, the result is only used
1242 in the inline heuristics. So we don't want to run the full stack
1243 var packing algorithm (which is quadratic in the number of stack
1244 vars). Instead, we calculate the total size of all stack vars.
1245 This turns out to be a pretty fair estimate -- packing of stack
1246 vars doesn't happen very often. */
1247
ff28a94d
JH
1248HOST_WIDE_INT
1249estimated_stack_frame_size (void)
1250{
1251 HOST_WIDE_INT size = 0;
b5a430f3 1252 size_t i;
ff28a94d
JH
1253 tree t, outer_block = DECL_INITIAL (current_function_decl);
1254
1255 init_vars_expansion ();
1256
cb91fab0 1257 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1258 {
1259 tree var = TREE_VALUE (t);
1260
1261 if (TREE_USED (var))
1262 size += expand_one_var (var, true, false);
1263 TREE_USED (var) = 1;
1264 }
1265 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1266
ff28a94d
JH
1267 if (stack_vars_num > 0)
1268 {
b5a430f3
SB
1269 /* Fake sorting the stack vars for account_stack_vars (). */
1270 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1271 for (i = 0; i < stack_vars_num; ++i)
1272 stack_vars_sorted[i] = i;
ff28a94d
JH
1273 size += account_stack_vars ();
1274 fini_vars_expansion ();
1275 }
b5a430f3 1276
ff28a94d
JH
1277 return size;
1278}
1279
1f6d3a08 1280/* Expand all variables used in the function. */
727a31fa
RH
1281
1282static void
1283expand_used_vars (void)
1284{
802e9f8e 1285 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
6c6366f6 1286 tree maybe_local_decls = NULL_TREE;
4e3825db 1287 unsigned i;
727a31fa 1288
1f6d3a08
RH
1289 /* Compute the phase of the stack frame for this function. */
1290 {
1291 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1292 int off = STARTING_FRAME_OFFSET % align;
1293 frame_phase = off ? align - off : 0;
1294 }
727a31fa 1295
ff28a94d 1296 init_vars_expansion ();
7d69de61 1297
4e3825db
MM
1298 for (i = 0; i < SA.map->num_partitions; i++)
1299 {
1300 tree var = partition_to_var (SA.map, i);
1301
1302 gcc_assert (is_gimple_reg (var));
1303 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1304 expand_one_var (var, true, true);
1305 else
1306 {
1307 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1308 contain the default def (representing the parm or result itself)
1309 we don't do anything here. But those which don't contain the
1310 default def (representing a temporary based on the parm/result)
1311 we need to allocate space just like for normal VAR_DECLs. */
1312 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1313 {
1314 expand_one_var (var, true, true);
1315 gcc_assert (SA.partition_to_pseudo[i]);
1316 }
1317 }
1318 }
1319
cb91fab0 1320 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1321 set are not associated with any block scope. Lay them out. */
802e9f8e
JJ
1322 t = cfun->local_decls;
1323 cfun->local_decls = NULL_TREE;
1324 for (; t; t = next)
1f6d3a08
RH
1325 {
1326 tree var = TREE_VALUE (t);
1327 bool expand_now = false;
1328
802e9f8e
JJ
1329 next = TREE_CHAIN (t);
1330
4e3825db
MM
1331 /* Expanded above already. */
1332 if (is_gimple_reg (var))
eb7adebc
MM
1333 {
1334 TREE_USED (var) = 0;
3adcf52c 1335 goto next;
eb7adebc 1336 }
1f6d3a08
RH
1337 /* We didn't set a block for static or extern because it's hard
1338 to tell the difference between a global variable (re)declared
1339 in a local scope, and one that's really declared there to
1340 begin with. And it doesn't really matter much, since we're
1341 not giving them stack space. Expand them now. */
4e3825db 1342 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1343 expand_now = true;
1344
1345 /* If the variable is not associated with any block, then it
1346 was created by the optimizers, and could be live anywhere
1347 in the function. */
1348 else if (TREE_USED (var))
1349 expand_now = true;
1350
1351 /* Finally, mark all variables on the list as used. We'll use
1352 this in a moment when we expand those associated with scopes. */
1353 TREE_USED (var) = 1;
1354
1355 if (expand_now)
3adcf52c
JM
1356 expand_one_var (var, true, true);
1357
1358 next:
1359 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1360 {
3adcf52c
JM
1361 rtx rtl = DECL_RTL_IF_SET (var);
1362
1363 /* Keep artificial non-ignored vars in cfun->local_decls
1364 chain until instantiate_decls. */
1365 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
802e9f8e 1366 {
3adcf52c
JM
1367 TREE_CHAIN (t) = cfun->local_decls;
1368 cfun->local_decls = t;
1369 continue;
802e9f8e 1370 }
6c6366f6
JJ
1371 else if (rtl == NULL_RTX)
1372 {
1373 /* If rtl isn't set yet, which can happen e.g. with
1374 -fstack-protector, retry before returning from this
1375 function. */
1376 TREE_CHAIN (t) = maybe_local_decls;
1377 maybe_local_decls = t;
1378 continue;
1379 }
802e9f8e
JJ
1380 }
1381
1382 ggc_free (t);
1f6d3a08 1383 }
1f6d3a08
RH
1384
1385 /* At this point, all variables within the block tree with TREE_USED
1386 set are actually used by the optimized function. Lay them out. */
1387 expand_used_vars_for_block (outer_block, true);
1388
1389 if (stack_vars_num > 0)
1390 {
1391 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1392 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1393 reflect this. */
1394 add_alias_set_conflicts ();
1395
c22cacf3 1396 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1397 vulnerable data and non-vulnerable data. */
1398 if (flag_stack_protect)
1399 add_stack_protection_conflicts ();
1400
c22cacf3 1401 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1402 minimal interference graph, attempt to save some stack space. */
1403 partition_stack_vars ();
1404 if (dump_file)
1405 dump_stack_var_partition ();
7d69de61
RH
1406 }
1407
1408 /* There are several conditions under which we should create a
1409 stack guard: protect-all, alloca used, protected decls present. */
1410 if (flag_stack_protect == 2
1411 || (flag_stack_protect
e3b5732b 1412 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1413 create_stack_guard ();
1f6d3a08 1414
7d69de61
RH
1415 /* Assign rtl to each variable based on these partitions. */
1416 if (stack_vars_num > 0)
1417 {
1418 /* Reorder decls to be protected by iterating over the variables
1419 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1420 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1421 earlier, such that we naturally see these variables first,
1422 and thus naturally allocate things in the right order. */
1423 if (has_protected_decls)
1424 {
1425 /* Phase 1 contains only character arrays. */
1426 expand_stack_vars (stack_protect_decl_phase_1);
1427
1428 /* Phase 2 contains other kinds of arrays. */
1429 if (flag_stack_protect == 2)
1430 expand_stack_vars (stack_protect_decl_phase_2);
1431 }
1432
1433 expand_stack_vars (NULL);
1f6d3a08 1434
ff28a94d 1435 fini_vars_expansion ();
1f6d3a08
RH
1436 }
1437
6c6366f6
JJ
1438 /* If there were any artificial non-ignored vars without rtl
1439 found earlier, see if deferred stack allocation hasn't assigned
1440 rtl to them. */
1441 for (t = maybe_local_decls; t; t = next)
1442 {
1443 tree var = TREE_VALUE (t);
1444 rtx rtl = DECL_RTL_IF_SET (var);
1445
1446 next = TREE_CHAIN (t);
1447
1448 /* Keep artificial non-ignored vars in cfun->local_decls
1449 chain until instantiate_decls. */
1450 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1451 {
1452 TREE_CHAIN (t) = cfun->local_decls;
1453 cfun->local_decls = t;
1454 continue;
1455 }
1456
1457 ggc_free (t);
1458 }
1459
1f6d3a08
RH
1460 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1461 if (STACK_ALIGNMENT_NEEDED)
1462 {
1463 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1464 if (!FRAME_GROWS_DOWNWARD)
1465 frame_offset += align - 1;
1466 frame_offset &= -align;
1467 }
727a31fa
RH
1468}
1469
1470
b7211528
SB
1471/* If we need to produce a detailed dump, print the tree representation
1472 for STMT to the dump file. SINCE is the last RTX after which the RTL
1473 generated for STMT should have been appended. */
1474
1475static void
726a989a 1476maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1477{
1478 if (dump_file && (dump_flags & TDF_DETAILS))
1479 {
1480 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1481 print_gimple_stmt (dump_file, stmt, 0,
1482 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1483 fprintf (dump_file, "\n");
1484
1485 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1486 }
1487}
1488
8b11009b
ZD
1489/* Maps the blocks that do not contain tree labels to rtx labels. */
1490
1491static struct pointer_map_t *lab_rtx_for_bb;
1492
a9b77cd1
ZD
1493/* Returns the label_rtx expression for a label starting basic block BB. */
1494
1495static rtx
726a989a 1496label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1497{
726a989a
RB
1498 gimple_stmt_iterator gsi;
1499 tree lab;
1500 gimple lab_stmt;
8b11009b 1501 void **elt;
a9b77cd1
ZD
1502
1503 if (bb->flags & BB_RTL)
1504 return block_label (bb);
1505
8b11009b
ZD
1506 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1507 if (elt)
ae50c0cb 1508 return (rtx) *elt;
8b11009b
ZD
1509
1510 /* Find the tree label if it is present. */
b8698a0f 1511
726a989a 1512 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1513 {
726a989a
RB
1514 lab_stmt = gsi_stmt (gsi);
1515 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1516 break;
1517
726a989a 1518 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1519 if (DECL_NONLOCAL (lab))
1520 break;
1521
1522 return label_rtx (lab);
1523 }
1524
8b11009b
ZD
1525 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1526 *elt = gen_label_rtx ();
ae50c0cb 1527 return (rtx) *elt;
a9b77cd1
ZD
1528}
1529
726a989a 1530
529ff441
MM
1531/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1532 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1533 possibly clean up the CFG and instruction sequence. LAST is the
1534 last instruction before the just emitted jump sequence. */
529ff441
MM
1535
1536static void
315adeda 1537maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1538{
1539 /* Special case: when jumpif decides that the condition is
1540 trivial it emits an unconditional jump (and the necessary
1541 barrier). But we still have two edges, the fallthru one is
1542 wrong. purge_dead_edges would clean this up later. Unfortunately
1543 we have to insert insns (and split edges) before
1544 find_many_sub_basic_blocks and hence before purge_dead_edges.
1545 But splitting edges might create new blocks which depend on the
1546 fact that if there are two edges there's no barrier. So the
1547 barrier would get lost and verify_flow_info would ICE. Instead
1548 of auditing all edge splitters to care for the barrier (which
1549 normally isn't there in a cleaned CFG), fix it here. */
1550 if (BARRIER_P (get_last_insn ()))
1551 {
529ff441
MM
1552 rtx insn;
1553 remove_edge (e);
1554 /* Now, we have a single successor block, if we have insns to
1555 insert on the remaining edge we potentially will insert
1556 it at the end of this block (if the dest block isn't feasible)
1557 in order to avoid splitting the edge. This insertion will take
1558 place in front of the last jump. But we might have emitted
1559 multiple jumps (conditional and one unconditional) to the
1560 same destination. Inserting in front of the last one then
1561 is a problem. See PR 40021. We fix this by deleting all
1562 jumps except the last unconditional one. */
1563 insn = PREV_INSN (get_last_insn ());
1564 /* Make sure we have an unconditional jump. Otherwise we're
1565 confused. */
1566 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1567 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1568 {
1569 insn = PREV_INSN (insn);
1570 if (JUMP_P (NEXT_INSN (insn)))
1571 delete_insn (NEXT_INSN (insn));
1572 }
1573 }
1574}
1575
726a989a 1576/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1577 Returns a new basic block if we've terminated the current basic
1578 block and created a new one. */
1579
1580static basic_block
726a989a 1581expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1582{
1583 basic_block new_bb, dest;
1584 edge new_edge;
1585 edge true_edge;
1586 edge false_edge;
b7211528 1587 rtx last2, last;
28ed065e
MM
1588 enum tree_code code;
1589 tree op0, op1;
1590
1591 code = gimple_cond_code (stmt);
1592 op0 = gimple_cond_lhs (stmt);
1593 op1 = gimple_cond_rhs (stmt);
1594 /* We're sometimes presented with such code:
1595 D.123_1 = x < y;
1596 if (D.123_1 != 0)
1597 ...
1598 This would expand to two comparisons which then later might
1599 be cleaned up by combine. But some pattern matchers like if-conversion
1600 work better when there's only one compare, so make up for this
1601 here as special exception if TER would have made the same change. */
1602 if (gimple_cond_single_var_p (stmt)
1603 && SA.values
1604 && TREE_CODE (op0) == SSA_NAME
1605 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1606 {
1607 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1608 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1609 {
e83f4b68
MM
1610 enum tree_code code2 = gimple_assign_rhs_code (second);
1611 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1612 {
1613 code = code2;
1614 op0 = gimple_assign_rhs1 (second);
1615 op1 = gimple_assign_rhs2 (second);
1616 }
1617 /* If jumps are cheap turn some more codes into
1618 jumpy sequences. */
1619 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1620 {
1621 if ((code2 == BIT_AND_EXPR
1622 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1623 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1624 || code2 == TRUTH_AND_EXPR)
1625 {
1626 code = TRUTH_ANDIF_EXPR;
1627 op0 = gimple_assign_rhs1 (second);
1628 op1 = gimple_assign_rhs2 (second);
1629 }
1630 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1631 {
1632 code = TRUTH_ORIF_EXPR;
1633 op0 = gimple_assign_rhs1 (second);
1634 op1 = gimple_assign_rhs2 (second);
1635 }
1636 }
28ed065e
MM
1637 }
1638 }
b7211528
SB
1639
1640 last2 = last = get_last_insn ();
80c7a9eb
RH
1641
1642 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1643 if (gimple_has_location (stmt))
80c7a9eb 1644 {
726a989a
RB
1645 set_curr_insn_source_location (gimple_location (stmt));
1646 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1647 }
1648
1649 /* These flags have no purpose in RTL land. */
1650 true_edge->flags &= ~EDGE_TRUE_VALUE;
1651 false_edge->flags &= ~EDGE_FALSE_VALUE;
1652
1653 /* We can either have a pure conditional jump with one fallthru edge or
1654 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1655 if (false_edge->dest == bb->next_bb)
80c7a9eb 1656 {
40e90eac
JJ
1657 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1658 true_edge->probability);
726a989a 1659 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1660 if (true_edge->goto_locus)
7241571e
JJ
1661 {
1662 set_curr_insn_source_location (true_edge->goto_locus);
1663 set_curr_insn_block (true_edge->goto_block);
1664 true_edge->goto_locus = curr_insn_locator ();
1665 }
1666 true_edge->goto_block = NULL;
a9b77cd1 1667 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1668 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1669 return NULL;
1670 }
a9b77cd1 1671 if (true_edge->dest == bb->next_bb)
80c7a9eb 1672 {
40e90eac
JJ
1673 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1674 false_edge->probability);
726a989a 1675 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1676 if (false_edge->goto_locus)
7241571e
JJ
1677 {
1678 set_curr_insn_source_location (false_edge->goto_locus);
1679 set_curr_insn_block (false_edge->goto_block);
1680 false_edge->goto_locus = curr_insn_locator ();
1681 }
1682 false_edge->goto_block = NULL;
a9b77cd1 1683 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1684 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1685 return NULL;
1686 }
80c7a9eb 1687
40e90eac
JJ
1688 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1689 true_edge->probability);
80c7a9eb 1690 last = get_last_insn ();
7241571e
JJ
1691 if (false_edge->goto_locus)
1692 {
1693 set_curr_insn_source_location (false_edge->goto_locus);
1694 set_curr_insn_block (false_edge->goto_block);
1695 false_edge->goto_locus = curr_insn_locator ();
1696 }
1697 false_edge->goto_block = NULL;
a9b77cd1 1698 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1699
1700 BB_END (bb) = last;
1701 if (BARRIER_P (BB_END (bb)))
1702 BB_END (bb) = PREV_INSN (BB_END (bb));
1703 update_bb_for_insn (bb);
1704
1705 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1706 dest = false_edge->dest;
1707 redirect_edge_succ (false_edge, new_bb);
1708 false_edge->flags |= EDGE_FALLTHRU;
1709 new_bb->count = false_edge->count;
1710 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1711 new_edge = make_edge (new_bb, dest, 0);
1712 new_edge->probability = REG_BR_PROB_BASE;
1713 new_edge->count = new_bb->count;
1714 if (BARRIER_P (BB_END (new_bb)))
1715 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1716 update_bb_for_insn (new_bb);
1717
726a989a 1718 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1719
7787b4aa
JJ
1720 if (true_edge->goto_locus)
1721 {
1722 set_curr_insn_source_location (true_edge->goto_locus);
1723 set_curr_insn_block (true_edge->goto_block);
1724 true_edge->goto_locus = curr_insn_locator ();
1725 }
1726 true_edge->goto_block = NULL;
1727
80c7a9eb
RH
1728 return new_bb;
1729}
1730
28ed065e
MM
1731/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1732 statement STMT. */
1733
1734static void
1735expand_call_stmt (gimple stmt)
1736{
1737 tree exp;
1738 tree lhs = gimple_call_lhs (stmt);
28ed065e 1739 size_t i;
e23817b3
RG
1740 bool builtin_p;
1741 tree decl;
28ed065e
MM
1742
1743 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1744
1745 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
1746 decl = gimple_call_fndecl (stmt);
1747 builtin_p = decl && DECL_BUILT_IN (decl);
1748
28ed065e
MM
1749 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1750 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1751
1752 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
1753 {
1754 tree arg = gimple_call_arg (stmt, i);
1755 gimple def;
1756 /* TER addresses into arguments of builtin functions so we have a
1757 chance to infer more correct alignment information. See PR39954. */
1758 if (builtin_p
1759 && TREE_CODE (arg) == SSA_NAME
1760 && (def = get_gimple_for_ssa_name (arg))
1761 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1762 arg = gimple_assign_rhs1 (def);
1763 CALL_EXPR_ARG (exp, i) = arg;
1764 }
28ed065e 1765
93f28ca7 1766 if (gimple_has_side_effects (stmt))
28ed065e
MM
1767 TREE_SIDE_EFFECTS (exp) = 1;
1768
93f28ca7 1769 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
1770 TREE_NOTHROW (exp) = 1;
1771
1772 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1773 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1774 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1775 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1776 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1777 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1778 TREE_BLOCK (exp) = gimple_block (stmt);
1779
28ed065e
MM
1780 if (lhs)
1781 expand_assignment (lhs, exp, false);
1782 else
1783 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1784}
1785
1786/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1787 STMT that doesn't require special handling for outgoing edges. That
1788 is no tailcalls and no GIMPLE_COND. */
1789
1790static void
1791expand_gimple_stmt_1 (gimple stmt)
1792{
1793 tree op0;
1794 switch (gimple_code (stmt))
1795 {
1796 case GIMPLE_GOTO:
1797 op0 = gimple_goto_dest (stmt);
1798 if (TREE_CODE (op0) == LABEL_DECL)
1799 expand_goto (op0);
1800 else
1801 expand_computed_goto (op0);
1802 break;
1803 case GIMPLE_LABEL:
1804 expand_label (gimple_label_label (stmt));
1805 break;
1806 case GIMPLE_NOP:
1807 case GIMPLE_PREDICT:
1808 break;
28ed065e
MM
1809 case GIMPLE_SWITCH:
1810 expand_case (stmt);
1811 break;
1812 case GIMPLE_ASM:
1813 expand_asm_stmt (stmt);
1814 break;
1815 case GIMPLE_CALL:
1816 expand_call_stmt (stmt);
1817 break;
1818
1819 case GIMPLE_RETURN:
1820 op0 = gimple_return_retval (stmt);
1821
1822 if (op0 && op0 != error_mark_node)
1823 {
1824 tree result = DECL_RESULT (current_function_decl);
1825
1826 /* If we are not returning the current function's RESULT_DECL,
1827 build an assignment to it. */
1828 if (op0 != result)
1829 {
1830 /* I believe that a function's RESULT_DECL is unique. */
1831 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1832
1833 /* ??? We'd like to use simply expand_assignment here,
1834 but this fails if the value is of BLKmode but the return
1835 decl is a register. expand_return has special handling
1836 for this combination, which eventually should move
1837 to common code. See comments there. Until then, let's
1838 build a modify expression :-/ */
1839 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1840 result, op0);
1841 }
1842 }
1843 if (!op0)
1844 expand_null_return ();
1845 else
1846 expand_return (op0);
1847 break;
1848
1849 case GIMPLE_ASSIGN:
1850 {
1851 tree lhs = gimple_assign_lhs (stmt);
1852
1853 /* Tree expand used to fiddle with |= and &= of two bitfield
1854 COMPONENT_REFs here. This can't happen with gimple, the LHS
1855 of binary assigns must be a gimple reg. */
1856
1857 if (TREE_CODE (lhs) != SSA_NAME
1858 || get_gimple_rhs_class (gimple_expr_code (stmt))
1859 == GIMPLE_SINGLE_RHS)
1860 {
1861 tree rhs = gimple_assign_rhs1 (stmt);
1862 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1863 == GIMPLE_SINGLE_RHS);
1864 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1865 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1866 expand_assignment (lhs, rhs,
1867 gimple_assign_nontemporal_move_p (stmt));
1868 }
1869 else
1870 {
1871 rtx target, temp;
1872 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1873 struct separate_ops ops;
1874 bool promoted = false;
1875
1876 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1877 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1878 promoted = true;
1879
1880 ops.code = gimple_assign_rhs_code (stmt);
1881 ops.type = TREE_TYPE (lhs);
1882 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1883 {
1884 case GIMPLE_BINARY_RHS:
1885 ops.op1 = gimple_assign_rhs2 (stmt);
1886 /* Fallthru */
1887 case GIMPLE_UNARY_RHS:
1888 ops.op0 = gimple_assign_rhs1 (stmt);
1889 break;
1890 default:
1891 gcc_unreachable ();
1892 }
1893 ops.location = gimple_location (stmt);
1894
1895 /* If we want to use a nontemporal store, force the value to
1896 register first. If we store into a promoted register,
1897 don't directly expand to target. */
1898 temp = nontemporal || promoted ? NULL_RTX : target;
1899 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1900 EXPAND_NORMAL);
1901
1902 if (temp == target)
1903 ;
1904 else if (promoted)
1905 {
4e18a7d4 1906 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
1907 /* If TEMP is a VOIDmode constant, use convert_modes to make
1908 sure that we properly convert it. */
1909 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1910 {
1911 temp = convert_modes (GET_MODE (target),
1912 TYPE_MODE (ops.type),
4e18a7d4 1913 temp, unsignedp);
28ed065e 1914 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 1915 GET_MODE (target), temp, unsignedp);
28ed065e
MM
1916 }
1917
4e18a7d4 1918 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
1919 }
1920 else if (nontemporal && emit_storent_insn (target, temp))
1921 ;
1922 else
1923 {
1924 temp = force_operand (temp, target);
1925 if (temp != target)
1926 emit_move_insn (target, temp);
1927 }
1928 }
1929 }
1930 break;
1931
1932 default:
1933 gcc_unreachable ();
1934 }
1935}
1936
1937/* Expand one gimple statement STMT and return the last RTL instruction
1938 before any of the newly generated ones.
1939
1940 In addition to generating the necessary RTL instructions this also
1941 sets REG_EH_REGION notes if necessary and sets the current source
1942 location for diagnostics. */
1943
1944static rtx
1945expand_gimple_stmt (gimple stmt)
1946{
1d65f45c 1947 int lp_nr = 0;
28ed065e
MM
1948 rtx last = NULL;
1949 location_t saved_location = input_location;
1950
1951 last = get_last_insn ();
1952
1953 /* If this is an expression of some kind and it has an associated line
1954 number, then emit the line number before expanding the expression.
1955
1956 We need to save and restore the file and line information so that
1957 errors discovered during expansion are emitted with the right
1958 information. It would be better of the diagnostic routines
1959 used the file/line information embedded in the tree nodes rather
1960 than globals. */
1961 gcc_assert (cfun);
1962
1963 if (gimple_has_location (stmt))
1964 {
1965 input_location = gimple_location (stmt);
1966 set_curr_insn_source_location (input_location);
1967
1968 /* Record where the insns produced belong. */
1969 set_curr_insn_block (gimple_block (stmt));
1970 }
1971
1972 expand_gimple_stmt_1 (stmt);
1973 /* Free any temporaries used to evaluate this statement. */
1974 free_temp_slots ();
1975
1976 input_location = saved_location;
1977
1978 /* Mark all insns that may trap. */
1d65f45c
RH
1979 lp_nr = lookup_stmt_eh_lp (stmt);
1980 if (lp_nr)
28ed065e
MM
1981 {
1982 rtx insn;
1983 for (insn = next_real_insn (last); insn;
1984 insn = next_real_insn (insn))
1985 {
1986 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1987 /* If we want exceptions for non-call insns, any
1988 may_trap_p instruction may throw. */
1989 && GET_CODE (PATTERN (insn)) != CLOBBER
1990 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
1991 && insn_could_throw_p (insn))
1992 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
1993 }
1994 }
1995
1996 return last;
1997}
1998
726a989a 1999/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2000 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2001 generated a tail call (something that might be denied by the ABI
cea49550
RH
2002 rules governing the call; see calls.c).
2003
2004 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2005 can still reach the rest of BB. The case here is __builtin_sqrt,
2006 where the NaN result goes through the external function (with a
2007 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2008
2009static basic_block
726a989a 2010expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2011{
b7211528 2012 rtx last2, last;
224e770b 2013 edge e;
628f6a4e 2014 edge_iterator ei;
224e770b
RH
2015 int probability;
2016 gcov_type count;
80c7a9eb 2017
28ed065e 2018 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2019
2020 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2021 if (CALL_P (last) && SIBLING_CALL_P (last))
2022 goto found;
80c7a9eb 2023
726a989a 2024 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2025
cea49550 2026 *can_fallthru = true;
224e770b 2027 return NULL;
80c7a9eb 2028
224e770b
RH
2029 found:
2030 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2031 Any instructions emitted here are about to be deleted. */
2032 do_pending_stack_adjust ();
2033
2034 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2035 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2036 EH or abnormal edges, we shouldn't have created a tail call in
2037 the first place. So it seems to me we should just be removing
2038 all edges here, or redirecting the existing fallthru edge to
2039 the exit block. */
2040
224e770b
RH
2041 probability = 0;
2042 count = 0;
224e770b 2043
628f6a4e
BE
2044 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2045 {
224e770b
RH
2046 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2047 {
2048 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2049 {
224e770b
RH
2050 e->dest->count -= e->count;
2051 e->dest->frequency -= EDGE_FREQUENCY (e);
2052 if (e->dest->count < 0)
c22cacf3 2053 e->dest->count = 0;
224e770b 2054 if (e->dest->frequency < 0)
c22cacf3 2055 e->dest->frequency = 0;
80c7a9eb 2056 }
224e770b
RH
2057 count += e->count;
2058 probability += e->probability;
2059 remove_edge (e);
80c7a9eb 2060 }
628f6a4e
BE
2061 else
2062 ei_next (&ei);
80c7a9eb
RH
2063 }
2064
224e770b
RH
2065 /* This is somewhat ugly: the call_expr expander often emits instructions
2066 after the sibcall (to perform the function return). These confuse the
12eff7b7 2067 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2068 last = NEXT_INSN (last);
341c100f 2069 gcc_assert (BARRIER_P (last));
cea49550
RH
2070
2071 *can_fallthru = false;
224e770b
RH
2072 while (NEXT_INSN (last))
2073 {
2074 /* For instance an sqrt builtin expander expands if with
2075 sibcall in the then and label for `else`. */
2076 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2077 {
2078 *can_fallthru = true;
2079 break;
2080 }
224e770b
RH
2081 delete_insn (NEXT_INSN (last));
2082 }
2083
2084 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2085 e->probability += probability;
2086 e->count += count;
2087 BB_END (bb) = last;
2088 update_bb_for_insn (bb);
2089
2090 if (NEXT_INSN (last))
2091 {
2092 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2093
2094 last = BB_END (bb);
2095 if (BARRIER_P (last))
2096 BB_END (bb) = PREV_INSN (last);
2097 }
2098
726a989a 2099 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2100
224e770b 2101 return bb;
80c7a9eb
RH
2102}
2103
b5b8b0ac
AO
2104/* Return the difference between the floor and the truncated result of
2105 a signed division by OP1 with remainder MOD. */
2106static rtx
2107floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2108{
2109 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2110 return gen_rtx_IF_THEN_ELSE
2111 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2112 gen_rtx_IF_THEN_ELSE
2113 (mode, gen_rtx_LT (BImode,
2114 gen_rtx_DIV (mode, op1, mod),
2115 const0_rtx),
2116 constm1_rtx, const0_rtx),
2117 const0_rtx);
2118}
2119
2120/* Return the difference between the ceil and the truncated result of
2121 a signed division by OP1 with remainder MOD. */
2122static rtx
2123ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2124{
2125 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2126 return gen_rtx_IF_THEN_ELSE
2127 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2128 gen_rtx_IF_THEN_ELSE
2129 (mode, gen_rtx_GT (BImode,
2130 gen_rtx_DIV (mode, op1, mod),
2131 const0_rtx),
2132 const1_rtx, const0_rtx),
2133 const0_rtx);
2134}
2135
2136/* Return the difference between the ceil and the truncated result of
2137 an unsigned division by OP1 with remainder MOD. */
2138static rtx
2139ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2140{
2141 /* (mod != 0 ? 1 : 0) */
2142 return gen_rtx_IF_THEN_ELSE
2143 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2144 const1_rtx, const0_rtx);
2145}
2146
2147/* Return the difference between the rounded and the truncated result
2148 of a signed division by OP1 with remainder MOD. Halfway cases are
2149 rounded away from zero, rather than to the nearest even number. */
2150static rtx
2151round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2152{
2153 /* (abs (mod) >= abs (op1) - abs (mod)
2154 ? (op1 / mod > 0 ? 1 : -1)
2155 : 0) */
2156 return gen_rtx_IF_THEN_ELSE
2157 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2158 gen_rtx_MINUS (mode,
2159 gen_rtx_ABS (mode, op1),
2160 gen_rtx_ABS (mode, mod))),
2161 gen_rtx_IF_THEN_ELSE
2162 (mode, gen_rtx_GT (BImode,
2163 gen_rtx_DIV (mode, op1, mod),
2164 const0_rtx),
2165 const1_rtx, constm1_rtx),
2166 const0_rtx);
2167}
2168
2169/* Return the difference between the rounded and the truncated result
2170 of a unsigned division by OP1 with remainder MOD. Halfway cases
2171 are rounded away from zero, rather than to the nearest even
2172 number. */
2173static rtx
2174round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2175{
2176 /* (mod >= op1 - mod ? 1 : 0) */
2177 return gen_rtx_IF_THEN_ELSE
2178 (mode, gen_rtx_GE (BImode, mod,
2179 gen_rtx_MINUS (mode, op1, mod)),
2180 const1_rtx, const0_rtx);
2181}
2182
dda2da58
AO
2183/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2184 any rtl. */
2185
2186static rtx
2187convert_debug_memory_address (enum machine_mode mode, rtx x)
2188{
2189 enum machine_mode xmode = GET_MODE (x);
2190
2191#ifndef POINTERS_EXTEND_UNSIGNED
2192 gcc_assert (mode == Pmode);
2193 gcc_assert (xmode == mode || xmode == VOIDmode);
2194#else
2195 gcc_assert (mode == Pmode || mode == ptr_mode);
2196
2197 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2198 return x;
2199
2200 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2201 x = simplify_gen_subreg (mode, x, xmode,
2202 subreg_lowpart_offset
2203 (mode, xmode));
2204 else if (POINTERS_EXTEND_UNSIGNED > 0)
2205 x = gen_rtx_ZERO_EXTEND (mode, x);
2206 else if (!POINTERS_EXTEND_UNSIGNED)
2207 x = gen_rtx_SIGN_EXTEND (mode, x);
2208 else
2209 gcc_unreachable ();
2210#endif /* POINTERS_EXTEND_UNSIGNED */
2211
2212 return x;
2213}
2214
b5b8b0ac
AO
2215/* Return an RTX equivalent to the value of the tree expression
2216 EXP. */
2217
2218static rtx
2219expand_debug_expr (tree exp)
2220{
2221 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2222 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2223 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2224 addr_space_t as;
d4ebfa65 2225 enum machine_mode address_mode;
b5b8b0ac
AO
2226
2227 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2228 {
2229 case tcc_expression:
2230 switch (TREE_CODE (exp))
2231 {
2232 case COND_EXPR:
2233 goto ternary;
2234
2235 case TRUTH_ANDIF_EXPR:
2236 case TRUTH_ORIF_EXPR:
2237 case TRUTH_AND_EXPR:
2238 case TRUTH_OR_EXPR:
2239 case TRUTH_XOR_EXPR:
2240 goto binary;
2241
2242 case TRUTH_NOT_EXPR:
2243 goto unary;
2244
2245 default:
2246 break;
2247 }
2248 break;
2249
2250 ternary:
2251 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2252 if (!op2)
2253 return NULL_RTX;
2254 /* Fall through. */
2255
2256 binary:
2257 case tcc_binary:
2258 case tcc_comparison:
2259 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2260 if (!op1)
2261 return NULL_RTX;
2262 /* Fall through. */
2263
2264 unary:
2265 case tcc_unary:
2266 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2267 if (!op0)
2268 return NULL_RTX;
2269 break;
2270
2271 case tcc_type:
2272 case tcc_statement:
2273 gcc_unreachable ();
2274
2275 case tcc_constant:
2276 case tcc_exceptional:
2277 case tcc_declaration:
2278 case tcc_reference:
2279 case tcc_vl_exp:
2280 break;
2281 }
2282
2283 switch (TREE_CODE (exp))
2284 {
2285 case STRING_CST:
2286 if (!lookup_constant_def (exp))
2287 {
e1b243a8
JJ
2288 if (strlen (TREE_STRING_POINTER (exp)) + 1
2289 != (size_t) TREE_STRING_LENGTH (exp))
2290 return NULL_RTX;
b5b8b0ac
AO
2291 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2292 op0 = gen_rtx_MEM (BLKmode, op0);
2293 set_mem_attributes (op0, exp, 0);
2294 return op0;
2295 }
2296 /* Fall through... */
2297
2298 case INTEGER_CST:
2299 case REAL_CST:
2300 case FIXED_CST:
2301 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2302 return op0;
2303
2304 case COMPLEX_CST:
2305 gcc_assert (COMPLEX_MODE_P (mode));
2306 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2307 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2308 return gen_rtx_CONCAT (mode, op0, op1);
2309
0ca5af51
AO
2310 case DEBUG_EXPR_DECL:
2311 op0 = DECL_RTL_IF_SET (exp);
2312
2313 if (op0)
2314 return op0;
2315
2316 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2317 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2318 SET_DECL_RTL (exp, op0);
2319
2320 return op0;
2321
b5b8b0ac
AO
2322 case VAR_DECL:
2323 case PARM_DECL:
2324 case FUNCTION_DECL:
2325 case LABEL_DECL:
2326 case CONST_DECL:
2327 case RESULT_DECL:
2328 op0 = DECL_RTL_IF_SET (exp);
2329
2330 /* This decl was probably optimized away. */
2331 if (!op0)
e1b243a8
JJ
2332 {
2333 if (TREE_CODE (exp) != VAR_DECL
2334 || DECL_EXTERNAL (exp)
2335 || !TREE_STATIC (exp)
2336 || !DECL_NAME (exp)
0fba566c
JJ
2337 || DECL_HARD_REGISTER (exp)
2338 || mode == VOIDmode)
e1b243a8
JJ
2339 return NULL;
2340
2341 op0 = DECL_RTL (exp);
2342 SET_DECL_RTL (exp, NULL);
2343 if (!MEM_P (op0)
2344 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2345 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2346 return NULL;
2347 }
2348 else
2349 op0 = copy_rtx (op0);
b5b8b0ac 2350
06796564
JJ
2351 if (GET_MODE (op0) == BLKmode
2352 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2353 below would ICE. While it is likely a FE bug,
2354 try to be robust here. See PR43166. */
2355 || mode == BLKmode)
b5b8b0ac
AO
2356 {
2357 gcc_assert (MEM_P (op0));
2358 op0 = adjust_address_nv (op0, mode, 0);
2359 return op0;
2360 }
2361
2362 /* Fall through. */
2363
2364 adjust_mode:
2365 case PAREN_EXPR:
2366 case NOP_EXPR:
2367 case CONVERT_EXPR:
2368 {
2369 enum machine_mode inner_mode = GET_MODE (op0);
2370
2371 if (mode == inner_mode)
2372 return op0;
2373
2374 if (inner_mode == VOIDmode)
2375 {
2a8e30fb
MM
2376 if (TREE_CODE (exp) == SSA_NAME)
2377 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2378 else
2379 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2380 if (mode == inner_mode)
2381 return op0;
2382 }
2383
2384 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2385 {
2386 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2387 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2388 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2389 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2390 else
2391 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2392 }
2393 else if (FLOAT_MODE_P (mode))
2394 {
2a8e30fb 2395 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2396 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2397 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2398 else
2399 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2400 }
2401 else if (FLOAT_MODE_P (inner_mode))
2402 {
2403 if (unsignedp)
2404 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2405 else
2406 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2407 }
2408 else if (CONSTANT_P (op0)
2409 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2410 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2411 subreg_lowpart_offset (mode,
2412 inner_mode));
2413 else if (unsignedp)
2414 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2415 else
2416 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2417
2418 return op0;
2419 }
2420
2421 case INDIRECT_REF:
2422 case ALIGN_INDIRECT_REF:
2423 case MISALIGNED_INDIRECT_REF:
2424 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2425 if (!op0)
2426 return NULL;
2427
09e881c9 2428 if (POINTER_TYPE_P (TREE_TYPE (exp)))
4e25ca6b
EB
2429 {
2430 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2431 address_mode = targetm.addr_space.address_mode (as);
2432 }
09e881c9 2433 else
4e25ca6b
EB
2434 {
2435 as = ADDR_SPACE_GENERIC;
2436 address_mode = Pmode;
2437 }
b5b8b0ac
AO
2438
2439 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2440 {
2441 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
d4ebfa65 2442 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
b5b8b0ac
AO
2443 }
2444
2445 op0 = gen_rtx_MEM (mode, op0);
2446
2447 set_mem_attributes (op0, exp, 0);
09e881c9 2448 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2449
2450 return op0;
2451
2452 case TARGET_MEM_REF:
2453 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2454 return NULL;
2455
2456 op0 = expand_debug_expr
4e25ca6b 2457 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2458 if (!op0)
2459 return NULL;
2460
09e881c9 2461 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
b5b8b0ac
AO
2462
2463 op0 = gen_rtx_MEM (mode, op0);
2464
2465 set_mem_attributes (op0, exp, 0);
09e881c9 2466 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2467
2468 return op0;
2469
2470 case ARRAY_REF:
2471 case ARRAY_RANGE_REF:
2472 case COMPONENT_REF:
2473 case BIT_FIELD_REF:
2474 case REALPART_EXPR:
2475 case IMAGPART_EXPR:
2476 case VIEW_CONVERT_EXPR:
2477 {
2478 enum machine_mode mode1;
2479 HOST_WIDE_INT bitsize, bitpos;
2480 tree offset;
2481 int volatilep = 0;
2482 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2483 &mode1, &unsignedp, &volatilep, false);
2484 rtx orig_op0;
2485
4f2a9af8
JJ
2486 if (bitsize == 0)
2487 return NULL;
2488
b5b8b0ac
AO
2489 orig_op0 = op0 = expand_debug_expr (tem);
2490
2491 if (!op0)
2492 return NULL;
2493
2494 if (offset)
2495 {
dda2da58
AO
2496 enum machine_mode addrmode, offmode;
2497
b5b8b0ac
AO
2498 gcc_assert (MEM_P (op0));
2499
dda2da58
AO
2500 op0 = XEXP (op0, 0);
2501 addrmode = GET_MODE (op0);
2502 if (addrmode == VOIDmode)
2503 addrmode = Pmode;
2504
b5b8b0ac
AO
2505 op1 = expand_debug_expr (offset);
2506 if (!op1)
2507 return NULL;
2508
dda2da58
AO
2509 offmode = GET_MODE (op1);
2510 if (offmode == VOIDmode)
2511 offmode = TYPE_MODE (TREE_TYPE (offset));
2512
2513 if (addrmode != offmode)
2514 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2515 subreg_lowpart_offset (addrmode,
2516 offmode));
2517
2518 /* Don't use offset_address here, we don't need a
2519 recognizable address, and we don't want to generate
2520 code. */
2521 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
b5b8b0ac
AO
2522 }
2523
2524 if (MEM_P (op0))
2525 {
4f2a9af8
JJ
2526 if (mode1 == VOIDmode)
2527 /* Bitfield. */
2528 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2529 if (bitpos >= BITS_PER_UNIT)
2530 {
2531 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2532 bitpos %= BITS_PER_UNIT;
2533 }
2534 else if (bitpos < 0)
2535 {
4f2a9af8
JJ
2536 HOST_WIDE_INT units
2537 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2538 op0 = adjust_address_nv (op0, mode1, units);
2539 bitpos += units * BITS_PER_UNIT;
2540 }
2541 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2542 op0 = adjust_address_nv (op0, mode, 0);
2543 else if (GET_MODE (op0) != mode1)
2544 op0 = adjust_address_nv (op0, mode1, 0);
2545 else
2546 op0 = copy_rtx (op0);
2547 if (op0 == orig_op0)
2548 op0 = shallow_copy_rtx (op0);
2549 set_mem_attributes (op0, exp, 0);
2550 }
2551
2552 if (bitpos == 0 && mode == GET_MODE (op0))
2553 return op0;
2554
2d3fc6aa
JJ
2555 if (bitpos < 0)
2556 return NULL;
2557
b5b8b0ac
AO
2558 if ((bitpos % BITS_PER_UNIT) == 0
2559 && bitsize == GET_MODE_BITSIZE (mode1))
2560 {
2561 enum machine_mode opmode = GET_MODE (op0);
2562
2563 gcc_assert (opmode != BLKmode);
2564
2565 if (opmode == VOIDmode)
2566 opmode = mode1;
2567
2568 /* This condition may hold if we're expanding the address
2569 right past the end of an array that turned out not to
2570 be addressable (i.e., the address was only computed in
2571 debug stmts). The gen_subreg below would rightfully
2572 crash, and the address doesn't really exist, so just
2573 drop it. */
2574 if (bitpos >= GET_MODE_BITSIZE (opmode))
2575 return NULL;
2576
7d5d39bb
JJ
2577 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2578 return simplify_gen_subreg (mode, op0, opmode,
2579 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
2580 }
2581
2582 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2583 && TYPE_UNSIGNED (TREE_TYPE (exp))
2584 ? SIGN_EXTRACT
2585 : ZERO_EXTRACT, mode,
2586 GET_MODE (op0) != VOIDmode
2587 ? GET_MODE (op0) : mode1,
2588 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2589 }
2590
b5b8b0ac
AO
2591 case ABS_EXPR:
2592 return gen_rtx_ABS (mode, op0);
2593
2594 case NEGATE_EXPR:
2595 return gen_rtx_NEG (mode, op0);
2596
2597 case BIT_NOT_EXPR:
2598 return gen_rtx_NOT (mode, op0);
2599
2600 case FLOAT_EXPR:
2601 if (unsignedp)
2602 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2603 else
2604 return gen_rtx_FLOAT (mode, op0);
2605
2606 case FIX_TRUNC_EXPR:
2607 if (unsignedp)
2608 return gen_rtx_UNSIGNED_FIX (mode, op0);
2609 else
2610 return gen_rtx_FIX (mode, op0);
2611
2612 case POINTER_PLUS_EXPR:
2613 case PLUS_EXPR:
2614 return gen_rtx_PLUS (mode, op0, op1);
2615
2616 case MINUS_EXPR:
2617 return gen_rtx_MINUS (mode, op0, op1);
2618
2619 case MULT_EXPR:
2620 return gen_rtx_MULT (mode, op0, op1);
2621
2622 case RDIV_EXPR:
2623 case TRUNC_DIV_EXPR:
2624 case EXACT_DIV_EXPR:
2625 if (unsignedp)
2626 return gen_rtx_UDIV (mode, op0, op1);
2627 else
2628 return gen_rtx_DIV (mode, op0, op1);
2629
2630 case TRUNC_MOD_EXPR:
2631 if (unsignedp)
2632 return gen_rtx_UMOD (mode, op0, op1);
2633 else
2634 return gen_rtx_MOD (mode, op0, op1);
2635
2636 case FLOOR_DIV_EXPR:
2637 if (unsignedp)
2638 return gen_rtx_UDIV (mode, op0, op1);
2639 else
2640 {
2641 rtx div = gen_rtx_DIV (mode, op0, op1);
2642 rtx mod = gen_rtx_MOD (mode, op0, op1);
2643 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2644 return gen_rtx_PLUS (mode, div, adj);
2645 }
2646
2647 case FLOOR_MOD_EXPR:
2648 if (unsignedp)
2649 return gen_rtx_UMOD (mode, op0, op1);
2650 else
2651 {
2652 rtx mod = gen_rtx_MOD (mode, op0, op1);
2653 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2654 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2655 return gen_rtx_PLUS (mode, mod, adj);
2656 }
2657
2658 case CEIL_DIV_EXPR:
2659 if (unsignedp)
2660 {
2661 rtx div = gen_rtx_UDIV (mode, op0, op1);
2662 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2663 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2664 return gen_rtx_PLUS (mode, div, adj);
2665 }
2666 else
2667 {
2668 rtx div = gen_rtx_DIV (mode, op0, op1);
2669 rtx mod = gen_rtx_MOD (mode, op0, op1);
2670 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2671 return gen_rtx_PLUS (mode, div, adj);
2672 }
2673
2674 case CEIL_MOD_EXPR:
2675 if (unsignedp)
2676 {
2677 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2678 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2679 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2680 return gen_rtx_PLUS (mode, mod, adj);
2681 }
2682 else
2683 {
2684 rtx mod = gen_rtx_MOD (mode, op0, op1);
2685 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2686 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2687 return gen_rtx_PLUS (mode, mod, adj);
2688 }
2689
2690 case ROUND_DIV_EXPR:
2691 if (unsignedp)
2692 {
2693 rtx div = gen_rtx_UDIV (mode, op0, op1);
2694 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2695 rtx adj = round_udiv_adjust (mode, mod, op1);
2696 return gen_rtx_PLUS (mode, div, adj);
2697 }
2698 else
2699 {
2700 rtx div = gen_rtx_DIV (mode, op0, op1);
2701 rtx mod = gen_rtx_MOD (mode, op0, op1);
2702 rtx adj = round_sdiv_adjust (mode, mod, op1);
2703 return gen_rtx_PLUS (mode, div, adj);
2704 }
2705
2706 case ROUND_MOD_EXPR:
2707 if (unsignedp)
2708 {
2709 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2710 rtx adj = round_udiv_adjust (mode, mod, op1);
2711 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2712 return gen_rtx_PLUS (mode, mod, adj);
2713 }
2714 else
2715 {
2716 rtx mod = gen_rtx_MOD (mode, op0, op1);
2717 rtx adj = round_sdiv_adjust (mode, mod, op1);
2718 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2719 return gen_rtx_PLUS (mode, mod, adj);
2720 }
2721
2722 case LSHIFT_EXPR:
2723 return gen_rtx_ASHIFT (mode, op0, op1);
2724
2725 case RSHIFT_EXPR:
2726 if (unsignedp)
2727 return gen_rtx_LSHIFTRT (mode, op0, op1);
2728 else
2729 return gen_rtx_ASHIFTRT (mode, op0, op1);
2730
2731 case LROTATE_EXPR:
2732 return gen_rtx_ROTATE (mode, op0, op1);
2733
2734 case RROTATE_EXPR:
2735 return gen_rtx_ROTATERT (mode, op0, op1);
2736
2737 case MIN_EXPR:
2738 if (unsignedp)
2739 return gen_rtx_UMIN (mode, op0, op1);
2740 else
2741 return gen_rtx_SMIN (mode, op0, op1);
2742
2743 case MAX_EXPR:
2744 if (unsignedp)
2745 return gen_rtx_UMAX (mode, op0, op1);
2746 else
2747 return gen_rtx_SMAX (mode, op0, op1);
2748
2749 case BIT_AND_EXPR:
2750 case TRUTH_AND_EXPR:
2751 return gen_rtx_AND (mode, op0, op1);
2752
2753 case BIT_IOR_EXPR:
2754 case TRUTH_OR_EXPR:
2755 return gen_rtx_IOR (mode, op0, op1);
2756
2757 case BIT_XOR_EXPR:
2758 case TRUTH_XOR_EXPR:
2759 return gen_rtx_XOR (mode, op0, op1);
2760
2761 case TRUTH_ANDIF_EXPR:
2762 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2763
2764 case TRUTH_ORIF_EXPR:
2765 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2766
2767 case TRUTH_NOT_EXPR:
2768 return gen_rtx_EQ (mode, op0, const0_rtx);
2769
2770 case LT_EXPR:
2771 if (unsignedp)
2772 return gen_rtx_LTU (mode, op0, op1);
2773 else
2774 return gen_rtx_LT (mode, op0, op1);
2775
2776 case LE_EXPR:
2777 if (unsignedp)
2778 return gen_rtx_LEU (mode, op0, op1);
2779 else
2780 return gen_rtx_LE (mode, op0, op1);
2781
2782 case GT_EXPR:
2783 if (unsignedp)
2784 return gen_rtx_GTU (mode, op0, op1);
2785 else
2786 return gen_rtx_GT (mode, op0, op1);
2787
2788 case GE_EXPR:
2789 if (unsignedp)
2790 return gen_rtx_GEU (mode, op0, op1);
2791 else
2792 return gen_rtx_GE (mode, op0, op1);
2793
2794 case EQ_EXPR:
2795 return gen_rtx_EQ (mode, op0, op1);
2796
2797 case NE_EXPR:
2798 return gen_rtx_NE (mode, op0, op1);
2799
2800 case UNORDERED_EXPR:
2801 return gen_rtx_UNORDERED (mode, op0, op1);
2802
2803 case ORDERED_EXPR:
2804 return gen_rtx_ORDERED (mode, op0, op1);
2805
2806 case UNLT_EXPR:
2807 return gen_rtx_UNLT (mode, op0, op1);
2808
2809 case UNLE_EXPR:
2810 return gen_rtx_UNLE (mode, op0, op1);
2811
2812 case UNGT_EXPR:
2813 return gen_rtx_UNGT (mode, op0, op1);
2814
2815 case UNGE_EXPR:
2816 return gen_rtx_UNGE (mode, op0, op1);
2817
2818 case UNEQ_EXPR:
2819 return gen_rtx_UNEQ (mode, op0, op1);
2820
2821 case LTGT_EXPR:
2822 return gen_rtx_LTGT (mode, op0, op1);
2823
2824 case COND_EXPR:
2825 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2826
2827 case COMPLEX_EXPR:
2828 gcc_assert (COMPLEX_MODE_P (mode));
2829 if (GET_MODE (op0) == VOIDmode)
2830 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2831 if (GET_MODE (op1) == VOIDmode)
2832 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2833 return gen_rtx_CONCAT (mode, op0, op1);
2834
d02a5a4b
JJ
2835 case CONJ_EXPR:
2836 if (GET_CODE (op0) == CONCAT)
2837 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2838 gen_rtx_NEG (GET_MODE_INNER (mode),
2839 XEXP (op0, 1)));
2840 else
2841 {
2842 enum machine_mode imode = GET_MODE_INNER (mode);
2843 rtx re, im;
2844
2845 if (MEM_P (op0))
2846 {
2847 re = adjust_address_nv (op0, imode, 0);
2848 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2849 }
2850 else
2851 {
2852 enum machine_mode ifmode = int_mode_for_mode (mode);
2853 enum machine_mode ihmode = int_mode_for_mode (imode);
2854 rtx halfsize;
2855 if (ifmode == BLKmode || ihmode == BLKmode)
2856 return NULL;
2857 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2858 re = op0;
2859 if (mode != ifmode)
2860 re = gen_rtx_SUBREG (ifmode, re, 0);
2861 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2862 if (imode != ihmode)
2863 re = gen_rtx_SUBREG (imode, re, 0);
2864 im = copy_rtx (op0);
2865 if (mode != ifmode)
2866 im = gen_rtx_SUBREG (ifmode, im, 0);
2867 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2868 if (imode != ihmode)
2869 im = gen_rtx_SUBREG (imode, im, 0);
2870 }
2871 im = gen_rtx_NEG (imode, im);
2872 return gen_rtx_CONCAT (mode, re, im);
2873 }
2874
b5b8b0ac
AO
2875 case ADDR_EXPR:
2876 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2877 if (!op0 || !MEM_P (op0))
2878 return NULL;
2879
dda2da58
AO
2880 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2881
2882 return op0;
b5b8b0ac
AO
2883
2884 case VECTOR_CST:
2885 exp = build_constructor_from_list (TREE_TYPE (exp),
2886 TREE_VECTOR_CST_ELTS (exp));
2887 /* Fall through. */
2888
2889 case CONSTRUCTOR:
2890 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2891 {
2892 unsigned i;
2893 tree val;
2894
2895 op0 = gen_rtx_CONCATN
2896 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2897
2898 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2899 {
2900 op1 = expand_debug_expr (val);
2901 if (!op1)
2902 return NULL;
2903 XVECEXP (op0, 0, i) = op1;
2904 }
2905
2906 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2907 {
2908 op1 = expand_debug_expr
2909 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2910
2911 if (!op1)
2912 return NULL;
2913
2914 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2915 XVECEXP (op0, 0, i) = op1;
2916 }
2917
2918 return op0;
2919 }
2920 else
2921 goto flag_unsupported;
2922
2923 case CALL_EXPR:
2924 /* ??? Maybe handle some builtins? */
2925 return NULL;
2926
2927 case SSA_NAME:
2928 {
2a8e30fb
MM
2929 gimple g = get_gimple_for_ssa_name (exp);
2930 if (g)
2931 {
2932 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2933 if (!op0)
2934 return NULL;
2935 }
2936 else
2937 {
2938 int part = var_to_partition (SA.map, exp);
b5b8b0ac 2939
2a8e30fb
MM
2940 if (part == NO_PARTITION)
2941 return NULL;
b5b8b0ac 2942
2a8e30fb 2943 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 2944
2a8e30fb
MM
2945 op0 = SA.partition_to_pseudo[part];
2946 }
b5b8b0ac
AO
2947 goto adjust_mode;
2948 }
2949
2950 case ERROR_MARK:
2951 return NULL;
2952
2953 default:
2954 flag_unsupported:
2955#ifdef ENABLE_CHECKING
2956 debug_tree (exp);
2957 gcc_unreachable ();
2958#else
2959 return NULL;
2960#endif
2961 }
2962}
2963
2964/* Expand the _LOCs in debug insns. We run this after expanding all
2965 regular insns, so that any variables referenced in the function
2966 will have their DECL_RTLs set. */
2967
2968static void
2969expand_debug_locations (void)
2970{
2971 rtx insn;
2972 rtx last = get_last_insn ();
2973 int save_strict_alias = flag_strict_aliasing;
2974
2975 /* New alias sets while setting up memory attributes cause
2976 -fcompare-debug failures, even though it doesn't bring about any
2977 codegen changes. */
2978 flag_strict_aliasing = 0;
2979
2980 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2981 if (DEBUG_INSN_P (insn))
2982 {
2983 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
2984 rtx val;
2985 enum machine_mode mode;
2986
2987 if (value == NULL_TREE)
2988 val = NULL_RTX;
2989 else
2990 {
2991 val = expand_debug_expr (value);
2992 gcc_assert (last == get_last_insn ());
2993 }
2994
2995 if (!val)
2996 val = gen_rtx_UNKNOWN_VAR_LOC ();
2997 else
2998 {
2999 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3000
3001 gcc_assert (mode == GET_MODE (val)
3002 || (GET_MODE (val) == VOIDmode
3003 && (CONST_INT_P (val)
3004 || GET_CODE (val) == CONST_FIXED
3005 || GET_CODE (val) == CONST_DOUBLE
3006 || GET_CODE (val) == LABEL_REF)));
3007 }
3008
3009 INSN_VAR_LOCATION_LOC (insn) = val;
3010 }
3011
3012 flag_strict_aliasing = save_strict_alias;
3013}
3014
242229bb
JH
3015/* Expand basic block BB from GIMPLE trees to RTL. */
3016
3017static basic_block
10d22567 3018expand_gimple_basic_block (basic_block bb)
242229bb 3019{
726a989a
RB
3020 gimple_stmt_iterator gsi;
3021 gimple_seq stmts;
3022 gimple stmt = NULL;
242229bb
JH
3023 rtx note, last;
3024 edge e;
628f6a4e 3025 edge_iterator ei;
8b11009b 3026 void **elt;
242229bb
JH
3027
3028 if (dump_file)
726a989a
RB
3029 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3030 bb->index);
3031
3032 /* Note that since we are now transitioning from GIMPLE to RTL, we
3033 cannot use the gsi_*_bb() routines because they expect the basic
3034 block to be in GIMPLE, instead of RTL. Therefore, we need to
3035 access the BB sequence directly. */
3036 stmts = bb_seq (bb);
3037 bb->il.gimple = NULL;
bf08ebeb 3038 rtl_profile_for_bb (bb);
5e2d947c
JH
3039 init_rtl_bb_info (bb);
3040 bb->flags |= BB_RTL;
3041
a9b77cd1
ZD
3042 /* Remove the RETURN_EXPR if we may fall though to the exit
3043 instead. */
726a989a
RB
3044 gsi = gsi_last (stmts);
3045 if (!gsi_end_p (gsi)
3046 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3047 {
726a989a 3048 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3049
3050 gcc_assert (single_succ_p (bb));
3051 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3052
3053 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3054 && !gimple_return_retval (ret_stmt))
a9b77cd1 3055 {
726a989a 3056 gsi_remove (&gsi, false);
a9b77cd1
ZD
3057 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3058 }
3059 }
3060
726a989a
RB
3061 gsi = gsi_start (stmts);
3062 if (!gsi_end_p (gsi))
8b11009b 3063 {
726a989a
RB
3064 stmt = gsi_stmt (gsi);
3065 if (gimple_code (stmt) != GIMPLE_LABEL)
3066 stmt = NULL;
8b11009b 3067 }
242229bb 3068
8b11009b
ZD
3069 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3070
3071 if (stmt || elt)
242229bb
JH
3072 {
3073 last = get_last_insn ();
3074
8b11009b
ZD
3075 if (stmt)
3076 {
28ed065e 3077 expand_gimple_stmt (stmt);
726a989a 3078 gsi_next (&gsi);
8b11009b
ZD
3079 }
3080
3081 if (elt)
ae50c0cb 3082 emit_label ((rtx) *elt);
242229bb 3083
caf93cb0 3084 /* Java emits line number notes in the top of labels.
c22cacf3 3085 ??? Make this go away once line number notes are obsoleted. */
242229bb 3086 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3087 if (NOTE_P (BB_HEAD (bb)))
242229bb 3088 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3089 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3090
726a989a 3091 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3092 }
3093 else
3094 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3095
3096 NOTE_BASIC_BLOCK (note) = bb;
3097
726a989a 3098 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3099 {
cea49550 3100 basic_block new_bb;
242229bb 3101
b5b8b0ac 3102 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3103
3104 /* If this statement is a non-debug one, and we generate debug
3105 insns, then this one might be the last real use of a TERed
3106 SSA_NAME, but where there are still some debug uses further
3107 down. Expanding the current SSA name in such further debug
3108 uses by their RHS might lead to wrong debug info, as coalescing
3109 might make the operands of such RHS be placed into the same
3110 pseudo as something else. Like so:
3111 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3112 use(a_1);
3113 a_2 = ...
3114 #DEBUG ... => a_1
3115 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3116 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3117 the write to a_2 would actually have clobbered the place which
3118 formerly held a_0.
3119
3120 So, instead of that, we recognize the situation, and generate
3121 debug temporaries at the last real use of TERed SSA names:
3122 a_1 = a_0 + 1;
3123 #DEBUG #D1 => a_1
3124 use(a_1);
3125 a_2 = ...
3126 #DEBUG ... => #D1
3127 */
3128 if (MAY_HAVE_DEBUG_INSNS
3129 && SA.values
3130 && !is_gimple_debug (stmt))
3131 {
3132 ssa_op_iter iter;
3133 tree op;
3134 gimple def;
3135
3136 location_t sloc = get_curr_insn_source_location ();
3137 tree sblock = get_curr_insn_block ();
3138
3139 /* Look for SSA names that have their last use here (TERed
3140 names always have only one real use). */
3141 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3142 if ((def = get_gimple_for_ssa_name (op)))
3143 {
3144 imm_use_iterator imm_iter;
3145 use_operand_p use_p;
3146 bool have_debug_uses = false;
3147
3148 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3149 {
3150 if (gimple_debug_bind_p (USE_STMT (use_p)))
3151 {
3152 have_debug_uses = true;
3153 break;
3154 }
3155 }
3156
3157 if (have_debug_uses)
3158 {
3159 /* OP is a TERed SSA name, with DEF it's defining
3160 statement, and where OP is used in further debug
3161 instructions. Generate a debug temporary, and
3162 replace all uses of OP in debug insns with that
3163 temporary. */
3164 gimple debugstmt;
3165 tree value = gimple_assign_rhs_to_tree (def);
3166 tree vexpr = make_node (DEBUG_EXPR_DECL);
3167 rtx val;
3168 enum machine_mode mode;
3169
3170 set_curr_insn_source_location (gimple_location (def));
3171 set_curr_insn_block (gimple_block (def));
3172
3173 DECL_ARTIFICIAL (vexpr) = 1;
3174 TREE_TYPE (vexpr) = TREE_TYPE (value);
3175 if (DECL_P (value))
3176 mode = DECL_MODE (value);
3177 else
3178 mode = TYPE_MODE (TREE_TYPE (value));
3179 DECL_MODE (vexpr) = mode;
3180
3181 val = gen_rtx_VAR_LOCATION
3182 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3183
3184 val = emit_debug_insn (val);
3185
3186 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3187 {
3188 if (!gimple_debug_bind_p (debugstmt))
3189 continue;
3190
3191 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3192 SET_USE (use_p, vexpr);
3193
3194 update_stmt (debugstmt);
3195 }
3196 }
3197 }
3198 set_curr_insn_source_location (sloc);
3199 set_curr_insn_block (sblock);
3200 }
3201
a5883ba0 3202 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3203
242229bb
JH
3204 /* Expand this statement, then evaluate the resulting RTL and
3205 fixup the CFG accordingly. */
726a989a 3206 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3207 {
726a989a 3208 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3209 if (new_bb)
3210 return new_bb;
3211 }
b5b8b0ac
AO
3212 else if (gimple_debug_bind_p (stmt))
3213 {
3214 location_t sloc = get_curr_insn_source_location ();
3215 tree sblock = get_curr_insn_block ();
3216 gimple_stmt_iterator nsi = gsi;
3217
3218 for (;;)
3219 {
3220 tree var = gimple_debug_bind_get_var (stmt);
3221 tree value;
3222 rtx val;
3223 enum machine_mode mode;
3224
3225 if (gimple_debug_bind_has_value_p (stmt))
3226 value = gimple_debug_bind_get_value (stmt);
3227 else
3228 value = NULL_TREE;
3229
3230 last = get_last_insn ();
3231
3232 set_curr_insn_source_location (gimple_location (stmt));
3233 set_curr_insn_block (gimple_block (stmt));
3234
3235 if (DECL_P (var))
3236 mode = DECL_MODE (var);
3237 else
3238 mode = TYPE_MODE (TREE_TYPE (var));
3239
3240 val = gen_rtx_VAR_LOCATION
3241 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3242
3243 val = emit_debug_insn (val);
3244
3245 if (dump_file && (dump_flags & TDF_DETAILS))
3246 {
3247 /* We can't dump the insn with a TREE where an RTX
3248 is expected. */
3249 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3250 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3251 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3252 }
3253
2a8e30fb
MM
3254 /* In order not to generate too many debug temporaries,
3255 we delink all uses of debug statements we already expanded.
3256 Therefore debug statements between definition and real
3257 use of TERed SSA names will continue to use the SSA name,
3258 and not be replaced with debug temps. */
3259 delink_stmt_imm_use (stmt);
3260
b5b8b0ac
AO
3261 gsi = nsi;
3262 gsi_next (&nsi);
3263 if (gsi_end_p (nsi))
3264 break;
3265 stmt = gsi_stmt (nsi);
3266 if (!gimple_debug_bind_p (stmt))
3267 break;
3268 }
3269
3270 set_curr_insn_source_location (sloc);
3271 set_curr_insn_block (sblock);
3272 }
80c7a9eb 3273 else
242229bb 3274 {
726a989a 3275 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3276 {
3277 bool can_fallthru;
3278 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3279 if (new_bb)
3280 {
3281 if (can_fallthru)
3282 bb = new_bb;
3283 else
3284 return new_bb;
3285 }
3286 }
4d7a65ea 3287 else
b7211528 3288 {
4e3825db 3289 def_operand_p def_p;
4e3825db
MM
3290 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3291
3292 if (def_p != NULL)
3293 {
3294 /* Ignore this stmt if it is in the list of
3295 replaceable expressions. */
3296 if (SA.values
b8698a0f 3297 && bitmap_bit_p (SA.values,
e97809c6 3298 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3299 continue;
3300 }
28ed065e 3301 last = expand_gimple_stmt (stmt);
726a989a 3302 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3303 }
242229bb
JH
3304 }
3305 }
3306
a5883ba0
MM
3307 currently_expanding_gimple_stmt = NULL;
3308
7241571e 3309 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3310 FOR_EACH_EDGE (e, ei, bb->succs)
3311 {
7241571e
JJ
3312 if (e->goto_locus && e->goto_block)
3313 {
3314 set_curr_insn_source_location (e->goto_locus);
3315 set_curr_insn_block (e->goto_block);
3316 e->goto_locus = curr_insn_locator ();
3317 }
3318 e->goto_block = NULL;
3319 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3320 {
3321 emit_jump (label_rtx_for_bb (e->dest));
3322 e->flags &= ~EDGE_FALLTHRU;
3323 }
a9b77cd1
ZD
3324 }
3325
ae761c45
AH
3326 /* Expanded RTL can create a jump in the last instruction of block.
3327 This later might be assumed to be a jump to successor and break edge insertion.
3328 We need to insert dummy move to prevent this. PR41440. */
3329 if (single_succ_p (bb)
3330 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3331 && (last = get_last_insn ())
3332 && JUMP_P (last))
3333 {
3334 rtx dummy = gen_reg_rtx (SImode);
3335 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3336 }
3337
242229bb
JH
3338 do_pending_stack_adjust ();
3339
3f117656 3340 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3341 before a barrier and/or table jump insn. */
3342 last = get_last_insn ();
4b4bf941 3343 if (BARRIER_P (last))
242229bb
JH
3344 last = PREV_INSN (last);
3345 if (JUMP_TABLE_DATA_P (last))
3346 last = PREV_INSN (PREV_INSN (last));
3347 BB_END (bb) = last;
caf93cb0 3348
242229bb 3349 update_bb_for_insn (bb);
80c7a9eb 3350
242229bb
JH
3351 return bb;
3352}
3353
3354
3355/* Create a basic block for initialization code. */
3356
3357static basic_block
3358construct_init_block (void)
3359{
3360 basic_block init_block, first_block;
fd44f634
JH
3361 edge e = NULL;
3362 int flags;
275a4187 3363
fd44f634
JH
3364 /* Multiple entry points not supported yet. */
3365 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3366 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3367 init_rtl_bb_info (EXIT_BLOCK_PTR);
3368 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3369 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3370
fd44f634 3371 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3372
fd44f634
JH
3373 /* When entry edge points to first basic block, we don't need jump,
3374 otherwise we have to jump into proper target. */
3375 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3376 {
726a989a 3377 tree label = gimple_block_label (e->dest);
fd44f634
JH
3378
3379 emit_jump (label_rtx (label));
3380 flags = 0;
275a4187 3381 }
fd44f634
JH
3382 else
3383 flags = EDGE_FALLTHRU;
242229bb
JH
3384
3385 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3386 get_last_insn (),
3387 ENTRY_BLOCK_PTR);
3388 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3389 init_block->count = ENTRY_BLOCK_PTR->count;
3390 if (e)
3391 {
3392 first_block = e->dest;
3393 redirect_edge_succ (e, init_block);
fd44f634 3394 e = make_edge (init_block, first_block, flags);
242229bb
JH
3395 }
3396 else
3397 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3398 e->probability = REG_BR_PROB_BASE;
3399 e->count = ENTRY_BLOCK_PTR->count;
3400
3401 update_bb_for_insn (init_block);
3402 return init_block;
3403}
3404
55e092c4
JH
3405/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3406 found in the block tree. */
3407
3408static void
3409set_block_levels (tree block, int level)
3410{
3411 while (block)
3412 {
3413 BLOCK_NUMBER (block) = level;
3414 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3415 block = BLOCK_CHAIN (block);
3416 }
3417}
242229bb
JH
3418
3419/* Create a block containing landing pads and similar stuff. */
3420
3421static void
3422construct_exit_block (void)
3423{
3424 rtx head = get_last_insn ();
3425 rtx end;
3426 basic_block exit_block;
628f6a4e
BE
3427 edge e, e2;
3428 unsigned ix;
3429 edge_iterator ei;
071a42f9 3430 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3431
bf08ebeb
JH
3432 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3433
caf93cb0 3434 /* Make sure the locus is set to the end of the function, so that
242229bb 3435 epilogue line numbers and warnings are set properly. */
6773e15f 3436 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3437 input_location = cfun->function_end_locus;
3438
3439 /* The following insns belong to the top scope. */
55e092c4 3440 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3441
242229bb
JH
3442 /* Generate rtl for function exit. */
3443 expand_function_end ();
3444
3445 end = get_last_insn ();
3446 if (head == end)
3447 return;
071a42f9
JH
3448 /* While emitting the function end we could move end of the last basic block.
3449 */
3450 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3451 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3452 head = NEXT_INSN (head);
80c7a9eb
RH
3453 exit_block = create_basic_block (NEXT_INSN (head), end,
3454 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3455 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3456 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3457
3458 ix = 0;
3459 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3460 {
8fb790fd 3461 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3462 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3463 redirect_edge_succ (e, exit_block);
3464 else
3465 ix++;
242229bb 3466 }
628f6a4e 3467
242229bb
JH
3468 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3469 e->probability = REG_BR_PROB_BASE;
3470 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3471 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3472 if (e2 != e)
3473 {
c22cacf3 3474 e->count -= e2->count;
242229bb
JH
3475 exit_block->count -= e2->count;
3476 exit_block->frequency -= EDGE_FREQUENCY (e2);
3477 }
3478 if (e->count < 0)
3479 e->count = 0;
3480 if (exit_block->count < 0)
3481 exit_block->count = 0;
3482 if (exit_block->frequency < 0)
3483 exit_block->frequency = 0;
3484 update_bb_for_insn (exit_block);
3485}
3486
c22cacf3 3487/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3488 Look for ARRAY_REF nodes with non-constant indexes and mark them
3489 addressable. */
3490
3491static tree
3492discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3493 void *data ATTRIBUTE_UNUSED)
3494{
3495 tree t = *tp;
3496
3497 if (IS_TYPE_OR_DECL_P (t))
3498 *walk_subtrees = 0;
3499 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3500 {
3501 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3502 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3503 && (!TREE_OPERAND (t, 2)
3504 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3505 || (TREE_CODE (t) == COMPONENT_REF
3506 && (!TREE_OPERAND (t,2)
3507 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3508 || TREE_CODE (t) == BIT_FIELD_REF
3509 || TREE_CODE (t) == REALPART_EXPR
3510 || TREE_CODE (t) == IMAGPART_EXPR
3511 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 3512 || CONVERT_EXPR_P (t))
a1b23b2f
UW
3513 t = TREE_OPERAND (t, 0);
3514
3515 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3516 {
3517 t = get_base_address (t);
6f11d690
RG
3518 if (t && DECL_P (t)
3519 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
3520 TREE_ADDRESSABLE (t) = 1;
3521 }
3522
3523 *walk_subtrees = 0;
3524 }
3525
3526 return NULL_TREE;
3527}
3528
3529/* RTL expansion is not able to compile array references with variable
3530 offsets for arrays stored in single register. Discover such
3531 expressions and mark variables as addressable to avoid this
3532 scenario. */
3533
3534static void
3535discover_nonconstant_array_refs (void)
3536{
3537 basic_block bb;
726a989a 3538 gimple_stmt_iterator gsi;
a1b23b2f
UW
3539
3540 FOR_EACH_BB (bb)
726a989a
RB
3541 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3542 {
3543 gimple stmt = gsi_stmt (gsi);
3544 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3545 }
a1b23b2f
UW
3546}
3547
2e3f842f
L
3548/* This function sets crtl->args.internal_arg_pointer to a virtual
3549 register if DRAP is needed. Local register allocator will replace
3550 virtual_incoming_args_rtx with the virtual register. */
3551
3552static void
3553expand_stack_alignment (void)
3554{
3555 rtx drap_rtx;
e939805b 3556 unsigned int preferred_stack_boundary;
2e3f842f
L
3557
3558 if (! SUPPORTS_STACK_ALIGNMENT)
3559 return;
b8698a0f 3560
2e3f842f
L
3561 if (cfun->calls_alloca
3562 || cfun->has_nonlocal_label
3563 || crtl->has_nonlocal_goto)
3564 crtl->need_drap = true;
3565
890b9b96
L
3566 /* Call update_stack_boundary here again to update incoming stack
3567 boundary. It may set incoming stack alignment to a different
3568 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3569 use the minimum incoming stack alignment to check if it is OK
3570 to perform sibcall optimization since sibcall optimization will
3571 only align the outgoing stack to incoming stack boundary. */
3572 if (targetm.calls.update_stack_boundary)
3573 targetm.calls.update_stack_boundary ();
3574
3575 /* The incoming stack frame has to be aligned at least at
3576 parm_stack_boundary. */
3577 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 3578
2e3f842f
L
3579 /* Update crtl->stack_alignment_estimated and use it later to align
3580 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3581 exceptions since callgraph doesn't collect incoming stack alignment
3582 in this case. */
3583 if (flag_non_call_exceptions
3584 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3585 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3586 else
3587 preferred_stack_boundary = crtl->preferred_stack_boundary;
3588 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3589 crtl->stack_alignment_estimated = preferred_stack_boundary;
3590 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3591 crtl->stack_alignment_needed = preferred_stack_boundary;
3592
890b9b96
L
3593 gcc_assert (crtl->stack_alignment_needed
3594 <= crtl->stack_alignment_estimated);
3595
2e3f842f 3596 crtl->stack_realign_needed
e939805b 3597 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 3598 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
3599
3600 crtl->stack_realign_processed = true;
3601
3602 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3603 alignment. */
3604 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 3605 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 3606
d015f7cc
L
3607 /* stack_realign_drap and drap_rtx must match. */
3608 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3609
2e3f842f
L
3610 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3611 if (NULL != drap_rtx)
3612 {
3613 crtl->args.internal_arg_pointer = drap_rtx;
3614
3615 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3616 needed. */
3617 fixup_tail_calls ();
3618 }
3619}
3620
242229bb
JH
3621/* Translate the intermediate representation contained in the CFG
3622 from GIMPLE trees to RTL.
3623
3624 We do conversion per basic block and preserve/update the tree CFG.
3625 This implies we have to do some magic as the CFG can simultaneously
3626 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 3627 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
3628 the expansion. */
3629
c2924966 3630static unsigned int
726a989a 3631gimple_expand_cfg (void)
242229bb
JH
3632{
3633 basic_block bb, init_block;
3634 sbitmap blocks;
0ef90296
ZD
3635 edge_iterator ei;
3636 edge e;
4e3825db
MM
3637 unsigned i;
3638
3639 rewrite_out_of_ssa (&SA);
3640 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3641 sizeof (rtx));
242229bb 3642
4586b4ca
SB
3643 /* Some backends want to know that we are expanding to RTL. */
3644 currently_expanding_to_rtl = 1;
3645
bf08ebeb
JH
3646 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3647
55e092c4 3648 insn_locators_alloc ();
fe8a7779 3649 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
3650 {
3651 /* Eventually, all FEs should explicitly set function_start_locus. */
3652 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3653 set_curr_insn_source_location
3654 (DECL_SOURCE_LOCATION (current_function_decl));
3655 else
3656 set_curr_insn_source_location (cfun->function_start_locus);
3657 }
55e092c4
JH
3658 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3659 prologue_locator = curr_insn_locator ();
3660
3661 /* Make sure first insn is a note even if we don't want linenums.
3662 This makes sure the first insn will never be deleted.
3663 Also, final expects a note to appear there. */
3664 emit_note (NOTE_INSN_DELETED);
6429e3be 3665
a1b23b2f
UW
3666 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3667 discover_nonconstant_array_refs ();
3668
e41b2a33 3669 targetm.expand_to_rtl_hook ();
cb91fab0 3670 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 3671 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 3672 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
3673 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3674 cfun->cfg->max_jumptable_ents = 0;
3675
e41b2a33 3676
727a31fa 3677 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
3678 expand_used_vars ();
3679
7d69de61
RH
3680 /* Honor stack protection warnings. */
3681 if (warn_stack_protect)
3682 {
e3b5732b 3683 if (cfun->calls_alloca)
b8698a0f 3684 warning (OPT_Wstack_protector,
c5409249 3685 "not protecting local variables: variable length buffer");
cb91fab0 3686 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 3687 warning (OPT_Wstack_protector,
c5409249 3688 "not protecting function: no buffer at least %d bytes long",
7d69de61
RH
3689 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3690 }
3691
242229bb 3692 /* Set up parameters and prepare for return, for the function. */
b79c5284 3693 expand_function_start (current_function_decl);
242229bb 3694
4e3825db
MM
3695 /* Now that we also have the parameter RTXs, copy them over to our
3696 partitions. */
3697 for (i = 0; i < SA.map->num_partitions; i++)
3698 {
3699 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3700
3701 if (TREE_CODE (var) != VAR_DECL
3702 && !SA.partition_to_pseudo[i])
3703 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3704 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
3705
3706 /* If this decl was marked as living in multiple places, reset
3707 this now to NULL. */
3708 if (DECL_RTL_IF_SET (var) == pc_rtx)
3709 SET_DECL_RTL (var, NULL);
3710
4e3825db
MM
3711 /* Some RTL parts really want to look at DECL_RTL(x) when x
3712 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3713 SET_DECL_RTL here making this available, but that would mean
3714 to select one of the potentially many RTLs for one DECL. Instead
3715 of doing that we simply reset the MEM_EXPR of the RTL in question,
3716 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3717 if (!DECL_RTL_SET_P (var))
3718 {
3719 if (MEM_P (SA.partition_to_pseudo[i]))
3720 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3721 }
3722 }
3723
242229bb
JH
3724 /* If this function is `main', emit a call to `__main'
3725 to run global initializers, etc. */
3726 if (DECL_NAME (current_function_decl)
3727 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3728 && DECL_FILE_SCOPE_P (current_function_decl))
3729 expand_main_function ();
3730
7d69de61
RH
3731 /* Initialize the stack_protect_guard field. This must happen after the
3732 call to __main (if any) so that the external decl is initialized. */
cb91fab0 3733 if (crtl->stack_protect_guard)
7d69de61
RH
3734 stack_protect_prologue ();
3735
4e3825db
MM
3736 expand_phi_nodes (&SA);
3737
3fbd86b1 3738 /* Register rtl specific functions for cfg. */
242229bb
JH
3739 rtl_register_cfg_hooks ();
3740
3741 init_block = construct_init_block ();
3742
0ef90296 3743 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 3744 remaining edges later. */
0ef90296
ZD
3745 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3746 e->flags &= ~EDGE_EXECUTABLE;
3747
8b11009b 3748 lab_rtx_for_bb = pointer_map_create ();
242229bb 3749 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 3750 bb = expand_gimple_basic_block (bb);
bf08ebeb 3751
b5b8b0ac
AO
3752 if (MAY_HAVE_DEBUG_INSNS)
3753 expand_debug_locations ();
3754
4e3825db
MM
3755 execute_free_datastructures ();
3756 finish_out_of_ssa (&SA);
3757
91753e21
RG
3758 /* We are no longer in SSA form. */
3759 cfun->gimple_df->in_ssa_p = false;
3760
bf08ebeb
JH
3761 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3762 conservatively to true until they are all profile aware. */
8b11009b 3763 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 3764 free_histograms ();
242229bb
JH
3765
3766 construct_exit_block ();
55e092c4
JH
3767 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3768 insn_locators_finalize ();
242229bb 3769
1d65f45c 3770 /* Zap the tree EH table. */
e8a2a782 3771 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
3772
3773 rebuild_jump_labels (get_insns ());
242229bb 3774
4e3825db
MM
3775 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3776 {
3777 edge e;
3778 edge_iterator ei;
3779 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3780 {
3781 if (e->insns.r)
3782 commit_one_edge_insertion (e);
3783 else
3784 ei_next (&ei);
3785 }
3786 }
3787
3788 /* We're done expanding trees to RTL. */
3789 currently_expanding_to_rtl = 0;
3790
3791 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3792 {
3793 edge e;
3794 edge_iterator ei;
3795 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3796 {
3797 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3798 e->flags &= ~EDGE_EXECUTABLE;
3799
3800 /* At the moment not all abnormal edges match the RTL
3801 representation. It is safe to remove them here as
3802 find_many_sub_basic_blocks will rediscover them.
3803 In the future we should get this fixed properly. */
3804 if ((e->flags & EDGE_ABNORMAL)
3805 && !(e->flags & EDGE_SIBCALL))
3806 remove_edge (e);
3807 else
3808 ei_next (&ei);
3809 }
3810 }
3811
242229bb
JH
3812 blocks = sbitmap_alloc (last_basic_block);
3813 sbitmap_ones (blocks);
3814 find_many_sub_basic_blocks (blocks);
242229bb 3815 sbitmap_free (blocks);
4e3825db 3816 purge_all_dead_edges ();
242229bb
JH
3817
3818 compact_blocks ();
2e3f842f
L
3819
3820 expand_stack_alignment ();
3821
242229bb 3822#ifdef ENABLE_CHECKING
62e5bf5d 3823 verify_flow_info ();
242229bb 3824#endif
9f8628ba
PB
3825
3826 /* There's no need to defer outputting this function any more; we
3827 know we want to output it. */
3828 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3829
3830 /* Now that we're done expanding trees to RTL, we shouldn't have any
3831 more CONCATs anywhere. */
3832 generating_concat_p = 0;
3833
b7211528
SB
3834 if (dump_file)
3835 {
3836 fprintf (dump_file,
3837 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3838 /* And the pass manager will dump RTL for us. */
3839 }
ef330312
PB
3840
3841 /* If we're emitting a nested function, make sure its parent gets
3842 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 3843 {
ef330312
PB
3844 tree parent;
3845 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
3846 parent != NULL_TREE;
3847 parent = get_containing_scope (parent))
ef330312 3848 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 3849 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 3850 }
c22cacf3 3851
ef330312
PB
3852 /* We are now committed to emitting code for this function. Do any
3853 preparation, such as emitting abstract debug info for the inline
3854 before it gets mangled by optimization. */
3855 if (cgraph_function_possibly_inlined_p (current_function_decl))
3856 (*debug_hooks->outlining_inline_function) (current_function_decl);
3857
3858 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
3859
3860 /* After expanding, the return labels are no longer needed. */
3861 return_label = NULL;
3862 naked_return_label = NULL;
55e092c4
JH
3863 /* Tag the blocks with a depth number so that change_scope can find
3864 the common parent easily. */
3865 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 3866 default_rtl_profile ();
c2924966 3867 return 0;
242229bb
JH
3868}
3869
e3b5732b 3870struct rtl_opt_pass pass_expand =
242229bb 3871{
8ddbbcae 3872 {
e3b5732b 3873 RTL_PASS,
c22cacf3 3874 "expand", /* name */
242229bb 3875 NULL, /* gate */
726a989a 3876 gimple_expand_cfg, /* execute */
242229bb
JH
3877 NULL, /* sub */
3878 NULL, /* next */
3879 0, /* static_pass_number */
c22cacf3 3880 TV_EXPAND, /* tv_id */
688a482d
RG
3881 PROP_ssa | PROP_gimple_leh | PROP_cfg
3882 | PROP_gimple_lcx, /* properties_required */
242229bb 3883 PROP_rtl, /* properties_provided */
4e3825db
MM
3884 PROP_ssa | PROP_trees, /* properties_destroyed */
3885 TODO_verify_ssa | TODO_verify_flow
3886 | TODO_verify_stmts, /* todo_flags_start */
3887 TODO_dump_func
3888 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 3889 }
242229bb 3890};