]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR target/26515 (peephole2 causes unrecognized insn, zero_extending non-general...
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
66647d44
JJ
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
ef330312 40#include "debug.h"
7d69de61 41#include "params.h"
ff28a94d 42#include "tree-inline.h"
6946b3f7 43#include "value-prof.h"
e41b2a33 44#include "target.h"
4e3825db 45#include "ssaexpand.h"
7d69de61 46
726a989a 47
4e3825db
MM
48/* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50struct ssaexpand SA;
51
726a989a
RB
52/* Return an expression tree corresponding to the RHS of GIMPLE
53 statement STMT. */
54
55tree
56gimple_assign_rhs_to_tree (gimple stmt)
57{
58 tree t;
82d6e6fc 59 enum gimple_rhs_class grhs_class;
726a989a 60
82d6e6fc 61 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 62
82d6e6fc 63 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
64 t = build2 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt),
67 gimple_assign_rhs2 (stmt));
82d6e6fc 68 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
69 t = build1 (gimple_assign_rhs_code (stmt),
70 TREE_TYPE (gimple_assign_lhs (stmt)),
71 gimple_assign_rhs1 (stmt));
82d6e6fc 72 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
73 {
74 t = gimple_assign_rhs1 (stmt);
75 /* Avoid modifying this tree in place below. */
76 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
77 && gimple_location (stmt) != EXPR_LOCATION (t))
78 t = copy_node (t);
79 }
726a989a
RB
80 else
81 gcc_unreachable ();
82
f5045c96
AM
83 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
84 SET_EXPR_LOCATION (t, gimple_location (stmt));
85
726a989a
RB
86 return t;
87}
88
726a989a 89
e53de54d
JH
90/* Verify that there is exactly single jump instruction since last and attach
91 REG_BR_PROB note specifying probability.
92 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
93 re-distribute it when the conditional expands into multiple conditionals.
94 This is however difficult to do. */
ef950eba 95void
10d22567 96add_reg_br_prob_note (rtx last, int probability)
e53de54d
JH
97{
98 if (profile_status == PROFILE_ABSENT)
99 return;
100 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 101 if (JUMP_P (last))
e53de54d
JH
102 {
103 /* It is common to emit condjump-around-jump sequence when we don't know
104 how to reverse the conditional. Special case this. */
105 if (!any_condjump_p (last)
2ca202e7 106 || !JUMP_P (NEXT_INSN (last))
e53de54d 107 || !simplejump_p (NEXT_INSN (last))
fa1ff4eb 108 || !NEXT_INSN (NEXT_INSN (last))
2ca202e7 109 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
fa1ff4eb 110 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
2ca202e7 111 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
112 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
113 goto failed;
41806d92 114 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6
ILT
115 add_reg_note (last, REG_BR_PROB,
116 GEN_INT (REG_BR_PROB_BASE - probability));
e53de54d
JH
117 return;
118 }
2ca202e7 119 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
120 goto failed;
121 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6 122 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
e53de54d
JH
123 return;
124failed:
125 if (dump_file)
126 fprintf (dump_file, "Failed to add probability note\n");
127}
128
80c7a9eb 129
1f6d3a08
RH
130#ifndef STACK_ALIGNMENT_NEEDED
131#define STACK_ALIGNMENT_NEEDED 1
132#endif
133
4e3825db
MM
134#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
135
136/* Associate declaration T with storage space X. If T is no
137 SSA name this is exactly SET_DECL_RTL, otherwise make the
138 partition of T associated with X. */
139static inline void
140set_rtl (tree t, rtx x)
141{
142 if (TREE_CODE (t) == SSA_NAME)
143 {
144 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
145 if (x && !MEM_P (x))
146 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
147 /* For the benefit of debug information at -O0 (where vartracking
148 doesn't run) record the place also in the base DECL if it's
149 a normal variable (not a parameter). */
150 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
151 {
152 tree var = SSA_NAME_VAR (t);
153 /* If we don't yet have something recorded, just record it now. */
154 if (!DECL_RTL_SET_P (var))
155 SET_DECL_RTL (var, x);
156 /* If we have it set alrady to "multiple places" don't
157 change this. */
158 else if (DECL_RTL (var) == pc_rtx)
159 ;
160 /* If we have something recorded and it's not the same place
161 as we want to record now, we have multiple partitions for the
162 same base variable, with different places. We can't just
163 randomly chose one, hence we have to say that we don't know.
164 This only happens with optimization, and there var-tracking
165 will figure out the right thing. */
166 else if (DECL_RTL (var) != x)
167 SET_DECL_RTL (var, pc_rtx);
168 }
4e3825db
MM
169 }
170 else
171 SET_DECL_RTL (t, x);
172}
1f6d3a08
RH
173
174/* This structure holds data relevant to one variable that will be
175 placed in a stack slot. */
176struct stack_var
177{
178 /* The Variable. */
179 tree decl;
180
181 /* The offset of the variable. During partitioning, this is the
182 offset relative to the partition. After partitioning, this
183 is relative to the stack frame. */
184 HOST_WIDE_INT offset;
185
186 /* Initially, the size of the variable. Later, the size of the partition,
187 if this variable becomes it's partition's representative. */
188 HOST_WIDE_INT size;
189
190 /* The *byte* alignment required for this variable. Or as, with the
191 size, the alignment for this partition. */
192 unsigned int alignb;
193
194 /* The partition representative. */
195 size_t representative;
196
197 /* The next stack variable in the partition, or EOC. */
198 size_t next;
199};
200
201#define EOC ((size_t)-1)
202
203/* We have an array of such objects while deciding allocation. */
204static struct stack_var *stack_vars;
205static size_t stack_vars_alloc;
206static size_t stack_vars_num;
207
fa10beec 208/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
209 is non-decreasing. */
210static size_t *stack_vars_sorted;
211
212/* We have an interference graph between such objects. This graph
213 is lower triangular. */
214static bool *stack_vars_conflict;
215static size_t stack_vars_conflict_alloc;
216
217/* The phase of the stack frame. This is the known misalignment of
218 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
219 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
220static int frame_phase;
221
7d69de61
RH
222/* Used during expand_used_vars to remember if we saw any decls for
223 which we'd like to enable stack smashing protection. */
224static bool has_protected_decls;
225
226/* Used during expand_used_vars. Remember if we say a character buffer
227 smaller than our cutoff threshold. Used for -Wstack-protector. */
228static bool has_short_buffer;
1f6d3a08
RH
229
230/* Discover the byte alignment to use for DECL. Ignore alignment
231 we can't do with expected alignment of the stack boundary. */
232
233static unsigned int
234get_decl_align_unit (tree decl)
235{
236 unsigned int align;
237
9bfaf89d 238 align = LOCAL_DECL_ALIGNMENT (decl);
2e3f842f
L
239
240 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
241 align = MAX_SUPPORTED_STACK_ALIGNMENT;
242
243 if (SUPPORTS_STACK_ALIGNMENT)
244 {
245 if (crtl->stack_alignment_estimated < align)
246 {
247 gcc_assert(!crtl->stack_realign_processed);
248 crtl->stack_alignment_estimated = align;
249 }
250 }
251
252 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
253 So here we only make sure stack_alignment_needed >= align. */
cb91fab0
JH
254 if (crtl->stack_alignment_needed < align)
255 crtl->stack_alignment_needed = align;
f85882d8
JY
256 if (crtl->max_used_stack_slot_alignment < align)
257 crtl->max_used_stack_slot_alignment = align;
1f6d3a08
RH
258
259 return align / BITS_PER_UNIT;
260}
261
262/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
263 Return the frame offset. */
264
265static HOST_WIDE_INT
266alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
267{
268 HOST_WIDE_INT offset, new_frame_offset;
269
270 new_frame_offset = frame_offset;
271 if (FRAME_GROWS_DOWNWARD)
272 {
273 new_frame_offset -= size + frame_phase;
274 new_frame_offset &= -align;
275 new_frame_offset += frame_phase;
276 offset = new_frame_offset;
277 }
278 else
279 {
280 new_frame_offset -= frame_phase;
281 new_frame_offset += align - 1;
282 new_frame_offset &= -align;
283 new_frame_offset += frame_phase;
284 offset = new_frame_offset;
285 new_frame_offset += size;
286 }
287 frame_offset = new_frame_offset;
288
9fb798d7
EB
289 if (frame_offset_overflow (frame_offset, cfun->decl))
290 frame_offset = offset = 0;
291
1f6d3a08
RH
292 return offset;
293}
294
295/* Accumulate DECL into STACK_VARS. */
296
297static void
298add_stack_var (tree decl)
299{
300 if (stack_vars_num >= stack_vars_alloc)
301 {
302 if (stack_vars_alloc)
303 stack_vars_alloc = stack_vars_alloc * 3 / 2;
304 else
305 stack_vars_alloc = 32;
306 stack_vars
307 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
308 }
309 stack_vars[stack_vars_num].decl = decl;
310 stack_vars[stack_vars_num].offset = 0;
4e3825db
MM
311 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
312 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
313
314 /* All variables are initially in their own partition. */
315 stack_vars[stack_vars_num].representative = stack_vars_num;
316 stack_vars[stack_vars_num].next = EOC;
317
318 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 319 set_rtl (decl, pc_rtx);
1f6d3a08
RH
320
321 stack_vars_num++;
322}
323
324/* Compute the linear index of a lower-triangular coordinate (I, J). */
325
326static size_t
327triangular_index (size_t i, size_t j)
328{
329 if (i < j)
330 {
331 size_t t;
332 t = i, i = j, j = t;
333 }
334 return (i * (i + 1)) / 2 + j;
335}
336
337/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
338
339static void
340resize_stack_vars_conflict (size_t n)
341{
342 size_t size = triangular_index (n-1, n-1) + 1;
343
344 if (size <= stack_vars_conflict_alloc)
345 return;
346
347 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
348 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
349 (size - stack_vars_conflict_alloc) * sizeof (bool));
350 stack_vars_conflict_alloc = size;
351}
352
353/* Make the decls associated with luid's X and Y conflict. */
354
355static void
356add_stack_var_conflict (size_t x, size_t y)
357{
358 size_t index = triangular_index (x, y);
359 gcc_assert (index < stack_vars_conflict_alloc);
360 stack_vars_conflict[index] = true;
361}
362
363/* Check whether the decls associated with luid's X and Y conflict. */
364
365static bool
366stack_var_conflict_p (size_t x, size_t y)
367{
368 size_t index = triangular_index (x, y);
369 gcc_assert (index < stack_vars_conflict_alloc);
370 return stack_vars_conflict[index];
371}
d239ed56
SB
372
373/* Returns true if TYPE is or contains a union type. */
374
375static bool
376aggregate_contains_union_type (tree type)
377{
378 tree field;
379
380 if (TREE_CODE (type) == UNION_TYPE
381 || TREE_CODE (type) == QUAL_UNION_TYPE)
382 return true;
383 if (TREE_CODE (type) == ARRAY_TYPE)
384 return aggregate_contains_union_type (TREE_TYPE (type));
385 if (TREE_CODE (type) != RECORD_TYPE)
386 return false;
387
388 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
389 if (TREE_CODE (field) == FIELD_DECL)
390 if (aggregate_contains_union_type (TREE_TYPE (field)))
391 return true;
392
393 return false;
394}
395
1f6d3a08
RH
396/* A subroutine of expand_used_vars. If two variables X and Y have alias
397 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
398 in the interference graph. We also need to make sure to add conflicts
399 for union containing structures. Else RTL alias analysis comes along
400 and due to type based aliasing rules decides that for two overlapping
401 union temporaries { short s; int i; } accesses to the same mem through
402 different types may not alias and happily reorders stores across
403 life-time boundaries of the temporaries (See PR25654).
404 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
405
406static void
407add_alias_set_conflicts (void)
408{
409 size_t i, j, n = stack_vars_num;
410
411 for (i = 0; i < n; ++i)
412 {
a4d25453
RH
413 tree type_i = TREE_TYPE (stack_vars[i].decl);
414 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 415 bool contains_union;
1f6d3a08 416
d239ed56 417 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
418 for (j = 0; j < i; ++j)
419 {
a4d25453
RH
420 tree type_j = TREE_TYPE (stack_vars[j].decl);
421 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
422 if (aggr_i != aggr_j
423 /* Either the objects conflict by means of type based
424 aliasing rules, or we need to add a conflict. */
425 || !objects_must_conflict_p (type_i, type_j)
426 /* In case the types do not conflict ensure that access
427 to elements will conflict. In case of unions we have
428 to be careful as type based aliasing rules may say
429 access to the same memory does not conflict. So play
430 safe and add a conflict in this case. */
431 || contains_union)
1f6d3a08
RH
432 add_stack_var_conflict (i, j);
433 }
434 }
435}
436
437/* A subroutine of partition_stack_vars. A comparison function for qsort,
4e3825db 438 sorting an array of indices by the size and type of the object. */
1f6d3a08
RH
439
440static int
441stack_var_size_cmp (const void *a, const void *b)
442{
443 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
444 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
4e3825db
MM
445 tree decla, declb;
446 unsigned int uida, uidb;
1f6d3a08
RH
447
448 if (sa < sb)
449 return -1;
450 if (sa > sb)
451 return 1;
4e3825db
MM
452 decla = stack_vars[*(const size_t *)a].decl;
453 declb = stack_vars[*(const size_t *)b].decl;
454 /* For stack variables of the same size use and id of the decls
455 to make the sort stable. Two SSA names are compared by their
456 version, SSA names come before non-SSA names, and two normal
457 decls are compared by their DECL_UID. */
458 if (TREE_CODE (decla) == SSA_NAME)
459 {
460 if (TREE_CODE (declb) == SSA_NAME)
461 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
462 else
463 return -1;
464 }
465 else if (TREE_CODE (declb) == SSA_NAME)
466 return 1;
467 else
468 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5
RG
469 if (uida < uidb)
470 return -1;
471 if (uida > uidb)
472 return 1;
1f6d3a08
RH
473 return 0;
474}
475
55b34b5f
RG
476
477/* If the points-to solution *PI points to variables that are in a partition
478 together with other variables add all partition members to the pointed-to
479 variables bitmap. */
480
481static void
482add_partitioned_vars_to_ptset (struct pt_solution *pt,
483 struct pointer_map_t *decls_to_partitions,
484 struct pointer_set_t *visited, bitmap temp)
485{
486 bitmap_iterator bi;
487 unsigned i;
488 bitmap *part;
489
490 if (pt->anything
491 || pt->vars == NULL
492 /* The pointed-to vars bitmap is shared, it is enough to
493 visit it once. */
494 || pointer_set_insert(visited, pt->vars))
495 return;
496
497 bitmap_clear (temp);
498
499 /* By using a temporary bitmap to store all members of the partitions
500 we have to add we make sure to visit each of the partitions only
501 once. */
502 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
503 if ((!temp
504 || !bitmap_bit_p (temp, i))
505 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
506 (void *)(size_t) i)))
507 bitmap_ior_into (temp, *part);
508 if (!bitmap_empty_p (temp))
509 bitmap_ior_into (pt->vars, temp);
510}
511
512/* Update points-to sets based on partition info, so we can use them on RTL.
513 The bitmaps representing stack partitions will be saved until expand,
514 where partitioned decls used as bases in memory expressions will be
515 rewritten. */
516
517static void
518update_alias_info_with_stack_vars (void)
519{
520 struct pointer_map_t *decls_to_partitions = NULL;
521 size_t i, j;
522 tree var = NULL_TREE;
523
524 for (i = 0; i < stack_vars_num; i++)
525 {
526 bitmap part = NULL;
527 tree name;
528 struct ptr_info_def *pi;
529
530 /* Not interested in partitions with single variable. */
531 if (stack_vars[i].representative != i
532 || stack_vars[i].next == EOC)
533 continue;
534
535 if (!decls_to_partitions)
536 {
537 decls_to_partitions = pointer_map_create ();
538 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
539 }
540
541 /* Create an SSA_NAME that points to the partition for use
542 as base during alias-oracle queries on RTL for bases that
543 have been partitioned. */
544 if (var == NULL_TREE)
545 var = create_tmp_var (ptr_type_node, NULL);
546 name = make_ssa_name (var, NULL);
547
548 /* Create bitmaps representing partitions. They will be used for
549 points-to sets later, so use GGC alloc. */
550 part = BITMAP_GGC_ALLOC ();
551 for (j = i; j != EOC; j = stack_vars[j].next)
552 {
553 tree decl = stack_vars[j].decl;
554 unsigned int uid = DECL_UID (decl);
555 /* We should never end up partitioning SSA names (though they
556 may end up on the stack). Neither should we allocate stack
557 space to something that is unused and thus unreferenced. */
558 gcc_assert (DECL_P (decl)
559 && referenced_var_lookup (uid));
560 bitmap_set_bit (part, uid);
561 *((bitmap *) pointer_map_insert (decls_to_partitions,
562 (void *)(size_t) uid)) = part;
563 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
564 decl)) = name;
565 }
566
567 /* Make the SSA name point to all partition members. */
568 pi = get_ptr_info (name);
569 pt_solution_set (&pi->pt, part);
570 }
571
572 /* Make all points-to sets that contain one member of a partition
573 contain all members of the partition. */
574 if (decls_to_partitions)
575 {
576 unsigned i;
577 struct pointer_set_t *visited = pointer_set_create ();
578 bitmap temp = BITMAP_ALLOC (NULL);
579
580 for (i = 1; i < num_ssa_names; i++)
581 {
582 tree name = ssa_name (i);
583 struct ptr_info_def *pi;
584
585 if (name
586 && POINTER_TYPE_P (TREE_TYPE (name))
587 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
588 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
589 visited, temp);
590 }
591
592 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
593 decls_to_partitions, visited, temp);
594 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
595 decls_to_partitions, visited, temp);
596
597 pointer_set_destroy (visited);
598 pointer_map_destroy (decls_to_partitions);
599 BITMAP_FREE (temp);
600 }
601}
602
1f6d3a08
RH
603/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
604 partitioning algorithm. Partitions A and B are known to be non-conflicting.
605 Merge them into a single partition A.
606
607 At the same time, add OFFSET to all variables in partition B. At the end
608 of the partitioning process we've have a nice block easy to lay out within
609 the stack frame. */
610
611static void
612union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
613{
614 size_t i, last;
615
616 /* Update each element of partition B with the given offset,
617 and merge them into partition A. */
618 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
619 {
620 stack_vars[i].offset += offset;
621 stack_vars[i].representative = a;
622 }
623 stack_vars[last].next = stack_vars[a].next;
624 stack_vars[a].next = b;
625
626 /* Update the required alignment of partition A to account for B. */
627 if (stack_vars[a].alignb < stack_vars[b].alignb)
628 stack_vars[a].alignb = stack_vars[b].alignb;
629
630 /* Update the interference graph and merge the conflicts. */
631 for (last = stack_vars_num, i = 0; i < last; ++i)
632 if (stack_var_conflict_p (b, i))
633 add_stack_var_conflict (a, i);
634}
635
636/* A subroutine of expand_used_vars. Binpack the variables into
637 partitions constrained by the interference graph. The overall
638 algorithm used is as follows:
639
640 Sort the objects by size.
641 For each object A {
642 S = size(A)
643 O = 0
644 loop {
645 Look for the largest non-conflicting object B with size <= S.
646 UNION (A, B)
647 offset(B) = O
648 O += size(B)
649 S -= size(B)
650 }
651 }
652*/
653
654static void
655partition_stack_vars (void)
656{
657 size_t si, sj, n = stack_vars_num;
658
659 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
660 for (si = 0; si < n; ++si)
661 stack_vars_sorted[si] = si;
662
663 if (n == 1)
664 return;
665
666 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
667
668 /* Special case: detect when all variables conflict, and thus we can't
669 do anything during the partitioning loop. It isn't uncommon (with
670 C code at least) to declare all variables at the top of the function,
671 and if we're not inlining, then all variables will be in the same scope.
672 Take advantage of very fast libc routines for this scan. */
673 gcc_assert (sizeof(bool) == sizeof(char));
674 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
675 return;
676
677 for (si = 0; si < n; ++si)
678 {
679 size_t i = stack_vars_sorted[si];
680 HOST_WIDE_INT isize = stack_vars[i].size;
681 HOST_WIDE_INT offset = 0;
682
683 for (sj = si; sj-- > 0; )
684 {
685 size_t j = stack_vars_sorted[sj];
686 HOST_WIDE_INT jsize = stack_vars[j].size;
687 unsigned int jalign = stack_vars[j].alignb;
688
689 /* Ignore objects that aren't partition representatives. */
690 if (stack_vars[j].representative != j)
691 continue;
692
693 /* Ignore objects too large for the remaining space. */
694 if (isize < jsize)
695 continue;
696
697 /* Ignore conflicting objects. */
698 if (stack_var_conflict_p (i, j))
699 continue;
700
701 /* Refine the remaining space check to include alignment. */
702 if (offset & (jalign - 1))
703 {
704 HOST_WIDE_INT toff = offset;
705 toff += jalign - 1;
706 toff &= -(HOST_WIDE_INT)jalign;
707 if (isize - (toff - offset) < jsize)
708 continue;
709
710 isize -= toff - offset;
711 offset = toff;
712 }
713
714 /* UNION the objects, placing J at OFFSET. */
715 union_stack_vars (i, j, offset);
716
717 isize -= jsize;
718 if (isize == 0)
719 break;
720 }
721 }
55b34b5f 722
0b200b80
RG
723 if (optimize)
724 update_alias_info_with_stack_vars ();
1f6d3a08
RH
725}
726
727/* A debugging aid for expand_used_vars. Dump the generated partitions. */
728
729static void
730dump_stack_var_partition (void)
731{
732 size_t si, i, j, n = stack_vars_num;
733
734 for (si = 0; si < n; ++si)
735 {
736 i = stack_vars_sorted[si];
737
738 /* Skip variables that aren't partition representatives, for now. */
739 if (stack_vars[i].representative != i)
740 continue;
741
742 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
743 " align %u\n", (unsigned long) i, stack_vars[i].size,
744 stack_vars[i].alignb);
745
746 for (j = i; j != EOC; j = stack_vars[j].next)
747 {
748 fputc ('\t', dump_file);
749 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
750 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 751 stack_vars[j].offset);
1f6d3a08
RH
752 }
753 }
754}
755
756/* Assign rtl to DECL at frame offset OFFSET. */
757
758static void
759expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
760{
2ac26e15
L
761 /* Alignment is unsigned. */
762 unsigned HOST_WIDE_INT align;
1f6d3a08 763 rtx x;
c22cacf3 764
1f6d3a08
RH
765 /* If this fails, we've overflowed the stack frame. Error nicely? */
766 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
767
768 x = plus_constant (virtual_stack_vars_rtx, offset);
4e3825db 769 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 770
4e3825db
MM
771 if (TREE_CODE (decl) != SSA_NAME)
772 {
773 /* Set alignment we actually gave this decl if it isn't an SSA name.
774 If it is we generate stack slots only accidentally so it isn't as
775 important, we'll simply use the alignment that is already set. */
776 offset -= frame_phase;
777 align = offset & -offset;
778 align *= BITS_PER_UNIT;
779 if (align == 0)
780 align = STACK_BOUNDARY;
781 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
782 align = MAX_SUPPORTED_STACK_ALIGNMENT;
783
784 DECL_ALIGN (decl) = align;
785 DECL_USER_ALIGN (decl) = 0;
786 }
787
788 set_mem_attributes (x, SSAVAR (decl), true);
789 set_rtl (decl, x);
1f6d3a08
RH
790}
791
792/* A subroutine of expand_used_vars. Give each partition representative
793 a unique location within the stack frame. Update each partition member
794 with that location. */
795
796static void
7d69de61 797expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
798{
799 size_t si, i, j, n = stack_vars_num;
800
801 for (si = 0; si < n; ++si)
802 {
803 HOST_WIDE_INT offset;
804
805 i = stack_vars_sorted[si];
806
807 /* Skip variables that aren't partition representatives, for now. */
808 if (stack_vars[i].representative != i)
809 continue;
810
7d69de61
RH
811 /* Skip variables that have already had rtl assigned. See also
812 add_stack_var where we perpetrate this pc_rtx hack. */
4e3825db
MM
813 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
814 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
815 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
7d69de61
RH
816 continue;
817
c22cacf3 818 /* Check the predicate to see whether this variable should be
7d69de61
RH
819 allocated in this pass. */
820 if (pred && !pred (stack_vars[i].decl))
821 continue;
822
1f6d3a08
RH
823 offset = alloc_stack_frame_space (stack_vars[i].size,
824 stack_vars[i].alignb);
825
826 /* Create rtl for each variable based on their location within the
827 partition. */
828 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
829 {
830 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
831 expand_one_stack_var_at (stack_vars[j].decl,
832 stack_vars[j].offset + offset);
833 }
1f6d3a08
RH
834 }
835}
836
ff28a94d
JH
837/* Take into account all sizes of partitions and reset DECL_RTLs. */
838static HOST_WIDE_INT
839account_stack_vars (void)
840{
841 size_t si, j, i, n = stack_vars_num;
842 HOST_WIDE_INT size = 0;
843
844 for (si = 0; si < n; ++si)
845 {
846 i = stack_vars_sorted[si];
847
848 /* Skip variables that aren't partition representatives, for now. */
849 if (stack_vars[i].representative != i)
850 continue;
851
852 size += stack_vars[i].size;
853 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 854 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
855 }
856 return size;
857}
858
1f6d3a08
RH
859/* A subroutine of expand_one_var. Called to immediately assign rtl
860 to a variable to be allocated in the stack frame. */
861
862static void
863expand_one_stack_var (tree var)
864{
865 HOST_WIDE_INT size, offset, align;
866
4e3825db
MM
867 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
868 align = get_decl_align_unit (SSAVAR (var));
1f6d3a08
RH
869 offset = alloc_stack_frame_space (size, align);
870
871 expand_one_stack_var_at (var, offset);
872}
873
1f6d3a08
RH
874/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
875 that will reside in a hard register. */
876
877static void
878expand_one_hard_reg_var (tree var)
879{
880 rest_of_decl_compilation (var, 0, 0);
881}
882
883/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
884 that will reside in a pseudo register. */
885
886static void
887expand_one_register_var (tree var)
888{
4e3825db
MM
889 tree decl = SSAVAR (var);
890 tree type = TREE_TYPE (decl);
cde0f3fd 891 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
892 rtx x = gen_reg_rtx (reg_mode);
893
4e3825db 894 set_rtl (var, x);
1f6d3a08
RH
895
896 /* Note if the object is a user variable. */
4e3825db
MM
897 if (!DECL_ARTIFICIAL (decl))
898 mark_user_reg (x);
1f6d3a08 899
61021c2c 900 if (POINTER_TYPE_P (type))
4e3825db 901 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
902}
903
904/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 905 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
906 to pick something that won't crash the rest of the compiler. */
907
908static void
909expand_one_error_var (tree var)
910{
911 enum machine_mode mode = DECL_MODE (var);
912 rtx x;
913
914 if (mode == BLKmode)
915 x = gen_rtx_MEM (BLKmode, const0_rtx);
916 else if (mode == VOIDmode)
917 x = const0_rtx;
918 else
919 x = gen_reg_rtx (mode);
920
921 SET_DECL_RTL (var, x);
922}
923
c22cacf3 924/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
925 allocated to the local stack frame. Return true if we wish to
926 add VAR to STACK_VARS so that it will be coalesced with other
927 variables. Return false to allocate VAR immediately.
928
929 This function is used to reduce the number of variables considered
930 for coalescing, which reduces the size of the quadratic problem. */
931
932static bool
933defer_stack_allocation (tree var, bool toplevel)
934{
7d69de61
RH
935 /* If stack protection is enabled, *all* stack variables must be deferred,
936 so that we can re-order the strings to the top of the frame. */
937 if (flag_stack_protect)
938 return true;
939
1f6d3a08
RH
940 /* Variables in the outermost scope automatically conflict with
941 every other variable. The only reason to want to defer them
942 at all is that, after sorting, we can more efficiently pack
943 small variables in the stack frame. Continue to defer at -O2. */
944 if (toplevel && optimize < 2)
945 return false;
946
947 /* Without optimization, *most* variables are allocated from the
948 stack, which makes the quadratic problem large exactly when we
c22cacf3 949 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
950 other hand, we don't want the function's stack frame size to
951 get completely out of hand. So we avoid adding scalars and
952 "small" aggregates to the list at all. */
953 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
954 return false;
955
956 return true;
957}
958
959/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 960 its flavor. Variables to be placed on the stack are not actually
ff28a94d
JH
961 expanded yet, merely recorded.
962 When REALLY_EXPAND is false, only add stack values to be allocated.
963 Return stack usage this variable is supposed to take.
964*/
1f6d3a08 965
ff28a94d
JH
966static HOST_WIDE_INT
967expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 968{
4e3825db
MM
969 tree origvar = var;
970 var = SSAVAR (var);
971
2e3f842f
L
972 if (SUPPORTS_STACK_ALIGNMENT
973 && TREE_TYPE (var) != error_mark_node
974 && TREE_CODE (var) == VAR_DECL)
975 {
976 unsigned int align;
977
978 /* Because we don't know if VAR will be in register or on stack,
979 we conservatively assume it will be on stack even if VAR is
980 eventually put into register after RA pass. For non-automatic
981 variables, which won't be on stack, we collect alignment of
982 type and ignore user specified alignment. */
983 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
984 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
985 TYPE_MODE (TREE_TYPE (var)),
986 TYPE_ALIGN (TREE_TYPE (var)));
2e3f842f 987 else
ae58e548 988 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f
L
989
990 if (crtl->stack_alignment_estimated < align)
991 {
992 /* stack_alignment_estimated shouldn't change after stack
993 realign decision made */
994 gcc_assert(!crtl->stack_realign_processed);
995 crtl->stack_alignment_estimated = align;
996 }
997 }
998
4e3825db
MM
999 if (TREE_CODE (origvar) == SSA_NAME)
1000 {
1001 gcc_assert (TREE_CODE (var) != VAR_DECL
1002 || (!DECL_EXTERNAL (var)
1003 && !DECL_HAS_VALUE_EXPR_P (var)
1004 && !TREE_STATIC (var)
4e3825db
MM
1005 && TREE_TYPE (var) != error_mark_node
1006 && !DECL_HARD_REGISTER (var)
1007 && really_expand));
1008 }
1009 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1010 ;
1f6d3a08
RH
1011 else if (DECL_EXTERNAL (var))
1012 ;
833b3afe 1013 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1014 ;
1015 else if (TREE_STATIC (var))
7e8b322a 1016 ;
eb7adebc 1017 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1018 ;
1019 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1020 {
1021 if (really_expand)
1022 expand_one_error_var (var);
1023 }
4e3825db 1024 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1025 {
1026 if (really_expand)
1027 expand_one_hard_reg_var (var);
1028 }
1f6d3a08 1029 else if (use_register_for_decl (var))
ff28a94d
JH
1030 {
1031 if (really_expand)
4e3825db 1032 expand_one_register_var (origvar);
ff28a94d 1033 }
1f6d3a08 1034 else if (defer_stack_allocation (var, toplevel))
4e3825db 1035 add_stack_var (origvar);
1f6d3a08 1036 else
ff28a94d 1037 {
bd9f1b4b 1038 if (really_expand)
4e3825db 1039 expand_one_stack_var (origvar);
ff28a94d
JH
1040 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1041 }
1042 return 0;
1f6d3a08
RH
1043}
1044
1045/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1046 expanding variables. Those variables that can be put into registers
1047 are allocated pseudos; those that can't are put on the stack.
1048
1049 TOPLEVEL is true if this is the outermost BLOCK. */
1050
1051static void
1052expand_used_vars_for_block (tree block, bool toplevel)
1053{
1054 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1055 tree t;
1056
1057 old_sv_num = toplevel ? 0 : stack_vars_num;
1058
1059 /* Expand all variables at this level. */
1060 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
7e8b322a 1061 if (TREE_USED (t))
ff28a94d 1062 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1063
1064 this_sv_num = stack_vars_num;
1065
1066 /* Expand all variables at containing levels. */
1067 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1068 expand_used_vars_for_block (t, false);
1069
1070 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1071 possible for variables whose address escapes), we mirror the block
1f6d3a08
RH
1072 tree in the interference graph. Here we cause all variables at this
1073 level, and all sublevels, to conflict. Do make certain that a
1074 variable conflicts with itself. */
1075 if (old_sv_num < this_sv_num)
1076 {
1077 new_sv_num = stack_vars_num;
1078 resize_stack_vars_conflict (new_sv_num);
1079
1080 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
1081 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1082 add_stack_var_conflict (i, j);
1f6d3a08
RH
1083 }
1084}
1085
1086/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1087 and clear TREE_USED on all local variables. */
1088
1089static void
1090clear_tree_used (tree block)
1091{
1092 tree t;
1093
1094 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1095 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1096 TREE_USED (t) = 0;
1097
1098 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1099 clear_tree_used (t);
1100}
1101
7d69de61
RH
1102/* Examine TYPE and determine a bit mask of the following features. */
1103
1104#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1105#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1106#define SPCT_HAS_ARRAY 4
1107#define SPCT_HAS_AGGREGATE 8
1108
1109static unsigned int
1110stack_protect_classify_type (tree type)
1111{
1112 unsigned int ret = 0;
1113 tree t;
1114
1115 switch (TREE_CODE (type))
1116 {
1117 case ARRAY_TYPE:
1118 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1119 if (t == char_type_node
1120 || t == signed_char_type_node
1121 || t == unsigned_char_type_node)
1122 {
15362b89
JJ
1123 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1124 unsigned HOST_WIDE_INT len;
7d69de61 1125
15362b89
JJ
1126 if (!TYPE_SIZE_UNIT (type)
1127 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1128 len = max;
7d69de61 1129 else
15362b89 1130 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1131
1132 if (len < max)
1133 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1134 else
1135 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1136 }
1137 else
1138 ret = SPCT_HAS_ARRAY;
1139 break;
1140
1141 case UNION_TYPE:
1142 case QUAL_UNION_TYPE:
1143 case RECORD_TYPE:
1144 ret = SPCT_HAS_AGGREGATE;
1145 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1146 if (TREE_CODE (t) == FIELD_DECL)
1147 ret |= stack_protect_classify_type (TREE_TYPE (t));
1148 break;
1149
1150 default:
1151 break;
1152 }
1153
1154 return ret;
1155}
1156
a4d05547
KH
1157/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1158 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1159 any variable in this function. The return value is the phase number in
1160 which the variable should be allocated. */
1161
1162static int
1163stack_protect_decl_phase (tree decl)
1164{
1165 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1166 int ret = 0;
1167
1168 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1169 has_short_buffer = true;
1170
1171 if (flag_stack_protect == 2)
1172 {
1173 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1174 && !(bits & SPCT_HAS_AGGREGATE))
1175 ret = 1;
1176 else if (bits & SPCT_HAS_ARRAY)
1177 ret = 2;
1178 }
1179 else
1180 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1181
1182 if (ret)
1183 has_protected_decls = true;
1184
1185 return ret;
1186}
1187
1188/* Two helper routines that check for phase 1 and phase 2. These are used
1189 as callbacks for expand_stack_vars. */
1190
1191static bool
1192stack_protect_decl_phase_1 (tree decl)
1193{
1194 return stack_protect_decl_phase (decl) == 1;
1195}
1196
1197static bool
1198stack_protect_decl_phase_2 (tree decl)
1199{
1200 return stack_protect_decl_phase (decl) == 2;
1201}
1202
1203/* Ensure that variables in different stack protection phases conflict
1204 so that they are not merged and share the same stack slot. */
1205
1206static void
1207add_stack_protection_conflicts (void)
1208{
1209 size_t i, j, n = stack_vars_num;
1210 unsigned char *phase;
1211
1212 phase = XNEWVEC (unsigned char, n);
1213 for (i = 0; i < n; ++i)
1214 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1215
1216 for (i = 0; i < n; ++i)
1217 {
1218 unsigned char ph_i = phase[i];
1219 for (j = 0; j < i; ++j)
1220 if (ph_i != phase[j])
1221 add_stack_var_conflict (i, j);
1222 }
1223
1224 XDELETEVEC (phase);
1225}
1226
1227/* Create a decl for the guard at the top of the stack frame. */
1228
1229static void
1230create_stack_guard (void)
1231{
c2255bc4
AH
1232 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1233 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1234 TREE_THIS_VOLATILE (guard) = 1;
1235 TREE_USED (guard) = 1;
1236 expand_one_stack_var (guard);
cb91fab0 1237 crtl->stack_protect_guard = guard;
7d69de61
RH
1238}
1239
ff28a94d
JH
1240/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1241 expanding variables. Those variables that can be put into registers
1242 are allocated pseudos; those that can't are put on the stack.
1243
1244 TOPLEVEL is true if this is the outermost BLOCK. */
1245
1246static HOST_WIDE_INT
1247account_used_vars_for_block (tree block, bool toplevel)
1248{
1249 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1250 tree t;
1251 HOST_WIDE_INT size = 0;
1252
1253 old_sv_num = toplevel ? 0 : stack_vars_num;
1254
1255 /* Expand all variables at this level. */
1256 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1257 if (TREE_USED (t))
1258 size += expand_one_var (t, toplevel, false);
1259
1260 this_sv_num = stack_vars_num;
1261
1262 /* Expand all variables at containing levels. */
1263 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1264 size += account_used_vars_for_block (t, false);
1265
1266 /* Since we do not track exact variable lifetimes (which is not even
1267 possible for variables whose address escapes), we mirror the block
1268 tree in the interference graph. Here we cause all variables at this
1269 level, and all sublevels, to conflict. Do make certain that a
1270 variable conflicts with itself. */
1271 if (old_sv_num < this_sv_num)
1272 {
1273 new_sv_num = stack_vars_num;
1274 resize_stack_vars_conflict (new_sv_num);
1275
1276 for (i = old_sv_num; i < new_sv_num; ++i)
1277 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1278 add_stack_var_conflict (i, j);
1279 }
1280 return size;
1281}
1282
1283/* Prepare for expanding variables. */
1284static void
1285init_vars_expansion (void)
1286{
1287 tree t;
cb91fab0
JH
1288 /* Set TREE_USED on all variables in the local_decls. */
1289 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1290 TREE_USED (TREE_VALUE (t)) = 1;
1291
1292 /* Clear TREE_USED on all variables associated with a block scope. */
1293 clear_tree_used (DECL_INITIAL (current_function_decl));
1294
1295 /* Initialize local stack smashing state. */
1296 has_protected_decls = false;
1297 has_short_buffer = false;
1298}
1299
1300/* Free up stack variable graph data. */
1301static void
1302fini_vars_expansion (void)
1303{
1304 XDELETEVEC (stack_vars);
1305 XDELETEVEC (stack_vars_sorted);
1306 XDELETEVEC (stack_vars_conflict);
1307 stack_vars = NULL;
1308 stack_vars_alloc = stack_vars_num = 0;
1309 stack_vars_conflict = NULL;
1310 stack_vars_conflict_alloc = 0;
1311}
1312
b5a430f3
SB
1313/* Make a fair guess for the size of the stack frame of the current
1314 function. This doesn't have to be exact, the result is only used
1315 in the inline heuristics. So we don't want to run the full stack
1316 var packing algorithm (which is quadratic in the number of stack
1317 vars). Instead, we calculate the total size of all stack vars.
1318 This turns out to be a pretty fair estimate -- packing of stack
1319 vars doesn't happen very often. */
1320
ff28a94d
JH
1321HOST_WIDE_INT
1322estimated_stack_frame_size (void)
1323{
1324 HOST_WIDE_INT size = 0;
b5a430f3 1325 size_t i;
ff28a94d
JH
1326 tree t, outer_block = DECL_INITIAL (current_function_decl);
1327
1328 init_vars_expansion ();
1329
cb91fab0 1330 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1331 {
1332 tree var = TREE_VALUE (t);
1333
1334 if (TREE_USED (var))
1335 size += expand_one_var (var, true, false);
1336 TREE_USED (var) = 1;
1337 }
1338 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1339
ff28a94d
JH
1340 if (stack_vars_num > 0)
1341 {
b5a430f3
SB
1342 /* Fake sorting the stack vars for account_stack_vars (). */
1343 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1344 for (i = 0; i < stack_vars_num; ++i)
1345 stack_vars_sorted[i] = i;
ff28a94d
JH
1346 size += account_stack_vars ();
1347 fini_vars_expansion ();
1348 }
b5a430f3 1349
ff28a94d
JH
1350 return size;
1351}
1352
1f6d3a08 1353/* Expand all variables used in the function. */
727a31fa
RH
1354
1355static void
1356expand_used_vars (void)
1357{
802e9f8e 1358 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
4e3825db 1359 unsigned i;
727a31fa 1360
1f6d3a08
RH
1361 /* Compute the phase of the stack frame for this function. */
1362 {
1363 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1364 int off = STARTING_FRAME_OFFSET % align;
1365 frame_phase = off ? align - off : 0;
1366 }
727a31fa 1367
ff28a94d 1368 init_vars_expansion ();
7d69de61 1369
4e3825db
MM
1370 for (i = 0; i < SA.map->num_partitions; i++)
1371 {
1372 tree var = partition_to_var (SA.map, i);
1373
1374 gcc_assert (is_gimple_reg (var));
1375 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1376 expand_one_var (var, true, true);
1377 else
1378 {
1379 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1380 contain the default def (representing the parm or result itself)
1381 we don't do anything here. But those which don't contain the
1382 default def (representing a temporary based on the parm/result)
1383 we need to allocate space just like for normal VAR_DECLs. */
1384 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1385 {
1386 expand_one_var (var, true, true);
1387 gcc_assert (SA.partition_to_pseudo[i]);
1388 }
1389 }
1390 }
1391
cb91fab0 1392 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1393 set are not associated with any block scope. Lay them out. */
802e9f8e
JJ
1394 t = cfun->local_decls;
1395 cfun->local_decls = NULL_TREE;
1396 for (; t; t = next)
1f6d3a08
RH
1397 {
1398 tree var = TREE_VALUE (t);
1399 bool expand_now = false;
1400
802e9f8e
JJ
1401 next = TREE_CHAIN (t);
1402
4e3825db
MM
1403 /* Expanded above already. */
1404 if (is_gimple_reg (var))
eb7adebc
MM
1405 {
1406 TREE_USED (var) = 0;
1407 ggc_free (t);
1408 continue;
1409 }
1f6d3a08
RH
1410 /* We didn't set a block for static or extern because it's hard
1411 to tell the difference between a global variable (re)declared
1412 in a local scope, and one that's really declared there to
1413 begin with. And it doesn't really matter much, since we're
1414 not giving them stack space. Expand them now. */
4e3825db 1415 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1416 expand_now = true;
1417
1418 /* If the variable is not associated with any block, then it
1419 was created by the optimizers, and could be live anywhere
1420 in the function. */
1421 else if (TREE_USED (var))
1422 expand_now = true;
1423
1424 /* Finally, mark all variables on the list as used. We'll use
1425 this in a moment when we expand those associated with scopes. */
1426 TREE_USED (var) = 1;
1427
1428 if (expand_now)
802e9f8e
JJ
1429 {
1430 expand_one_var (var, true, true);
1431 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1432 {
1433 rtx rtl = DECL_RTL_IF_SET (var);
1434
1435 /* Keep artificial non-ignored vars in cfun->local_decls
1436 chain until instantiate_decls. */
1437 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1438 {
1439 TREE_CHAIN (t) = cfun->local_decls;
1440 cfun->local_decls = t;
1441 continue;
1442 }
1443 }
1444 }
1445
1446 ggc_free (t);
1f6d3a08 1447 }
1f6d3a08
RH
1448
1449 /* At this point, all variables within the block tree with TREE_USED
1450 set are actually used by the optimized function. Lay them out. */
1451 expand_used_vars_for_block (outer_block, true);
1452
1453 if (stack_vars_num > 0)
1454 {
1455 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1456 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1457 reflect this. */
1458 add_alias_set_conflicts ();
1459
c22cacf3 1460 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1461 vulnerable data and non-vulnerable data. */
1462 if (flag_stack_protect)
1463 add_stack_protection_conflicts ();
1464
c22cacf3 1465 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1466 minimal interference graph, attempt to save some stack space. */
1467 partition_stack_vars ();
1468 if (dump_file)
1469 dump_stack_var_partition ();
7d69de61
RH
1470 }
1471
1472 /* There are several conditions under which we should create a
1473 stack guard: protect-all, alloca used, protected decls present. */
1474 if (flag_stack_protect == 2
1475 || (flag_stack_protect
e3b5732b 1476 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1477 create_stack_guard ();
1f6d3a08 1478
7d69de61
RH
1479 /* Assign rtl to each variable based on these partitions. */
1480 if (stack_vars_num > 0)
1481 {
1482 /* Reorder decls to be protected by iterating over the variables
1483 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1484 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1485 earlier, such that we naturally see these variables first,
1486 and thus naturally allocate things in the right order. */
1487 if (has_protected_decls)
1488 {
1489 /* Phase 1 contains only character arrays. */
1490 expand_stack_vars (stack_protect_decl_phase_1);
1491
1492 /* Phase 2 contains other kinds of arrays. */
1493 if (flag_stack_protect == 2)
1494 expand_stack_vars (stack_protect_decl_phase_2);
1495 }
1496
1497 expand_stack_vars (NULL);
1f6d3a08 1498
ff28a94d 1499 fini_vars_expansion ();
1f6d3a08
RH
1500 }
1501
1502 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1503 if (STACK_ALIGNMENT_NEEDED)
1504 {
1505 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1506 if (!FRAME_GROWS_DOWNWARD)
1507 frame_offset += align - 1;
1508 frame_offset &= -align;
1509 }
727a31fa
RH
1510}
1511
1512
b7211528
SB
1513/* If we need to produce a detailed dump, print the tree representation
1514 for STMT to the dump file. SINCE is the last RTX after which the RTL
1515 generated for STMT should have been appended. */
1516
1517static void
726a989a 1518maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1519{
1520 if (dump_file && (dump_flags & TDF_DETAILS))
1521 {
1522 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1523 print_gimple_stmt (dump_file, stmt, 0,
1524 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1525 fprintf (dump_file, "\n");
1526
1527 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1528 }
1529}
1530
8b11009b
ZD
1531/* Maps the blocks that do not contain tree labels to rtx labels. */
1532
1533static struct pointer_map_t *lab_rtx_for_bb;
1534
a9b77cd1
ZD
1535/* Returns the label_rtx expression for a label starting basic block BB. */
1536
1537static rtx
726a989a 1538label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1539{
726a989a
RB
1540 gimple_stmt_iterator gsi;
1541 tree lab;
1542 gimple lab_stmt;
8b11009b 1543 void **elt;
a9b77cd1
ZD
1544
1545 if (bb->flags & BB_RTL)
1546 return block_label (bb);
1547
8b11009b
ZD
1548 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1549 if (elt)
ae50c0cb 1550 return (rtx) *elt;
8b11009b
ZD
1551
1552 /* Find the tree label if it is present. */
1553
726a989a 1554 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1555 {
726a989a
RB
1556 lab_stmt = gsi_stmt (gsi);
1557 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1558 break;
1559
726a989a 1560 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1561 if (DECL_NONLOCAL (lab))
1562 break;
1563
1564 return label_rtx (lab);
1565 }
1566
8b11009b
ZD
1567 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1568 *elt = gen_label_rtx ();
ae50c0cb 1569 return (rtx) *elt;
a9b77cd1
ZD
1570}
1571
726a989a 1572
529ff441
MM
1573/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1574 of a basic block where we just expanded the conditional at the end,
1575 possibly clean up the CFG and instruction sequence. */
1576
1577static void
1578maybe_cleanup_end_of_block (edge e)
1579{
1580 /* Special case: when jumpif decides that the condition is
1581 trivial it emits an unconditional jump (and the necessary
1582 barrier). But we still have two edges, the fallthru one is
1583 wrong. purge_dead_edges would clean this up later. Unfortunately
1584 we have to insert insns (and split edges) before
1585 find_many_sub_basic_blocks and hence before purge_dead_edges.
1586 But splitting edges might create new blocks which depend on the
1587 fact that if there are two edges there's no barrier. So the
1588 barrier would get lost and verify_flow_info would ICE. Instead
1589 of auditing all edge splitters to care for the barrier (which
1590 normally isn't there in a cleaned CFG), fix it here. */
1591 if (BARRIER_P (get_last_insn ()))
1592 {
1593 basic_block bb = e->src;
1594 rtx insn;
1595 remove_edge (e);
1596 /* Now, we have a single successor block, if we have insns to
1597 insert on the remaining edge we potentially will insert
1598 it at the end of this block (if the dest block isn't feasible)
1599 in order to avoid splitting the edge. This insertion will take
1600 place in front of the last jump. But we might have emitted
1601 multiple jumps (conditional and one unconditional) to the
1602 same destination. Inserting in front of the last one then
1603 is a problem. See PR 40021. We fix this by deleting all
1604 jumps except the last unconditional one. */
1605 insn = PREV_INSN (get_last_insn ());
1606 /* Make sure we have an unconditional jump. Otherwise we're
1607 confused. */
1608 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1609 for (insn = PREV_INSN (insn); insn != BB_HEAD (bb);)
1610 {
1611 insn = PREV_INSN (insn);
1612 if (JUMP_P (NEXT_INSN (insn)))
1613 delete_insn (NEXT_INSN (insn));
1614 }
1615 }
1616}
1617
726a989a 1618/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1619 Returns a new basic block if we've terminated the current basic
1620 block and created a new one. */
1621
1622static basic_block
726a989a 1623expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1624{
1625 basic_block new_bb, dest;
1626 edge new_edge;
1627 edge true_edge;
1628 edge false_edge;
b7211528 1629 rtx last2, last;
28ed065e
MM
1630 enum tree_code code;
1631 tree op0, op1;
1632
1633 code = gimple_cond_code (stmt);
1634 op0 = gimple_cond_lhs (stmt);
1635 op1 = gimple_cond_rhs (stmt);
1636 /* We're sometimes presented with such code:
1637 D.123_1 = x < y;
1638 if (D.123_1 != 0)
1639 ...
1640 This would expand to two comparisons which then later might
1641 be cleaned up by combine. But some pattern matchers like if-conversion
1642 work better when there's only one compare, so make up for this
1643 here as special exception if TER would have made the same change. */
1644 if (gimple_cond_single_var_p (stmt)
1645 && SA.values
1646 && TREE_CODE (op0) == SSA_NAME
1647 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1648 {
1649 gimple second = SSA_NAME_DEF_STMT (op0);
1650 if (gimple_code (second) == GIMPLE_ASSIGN
1651 && TREE_CODE_CLASS (gimple_assign_rhs_code (second))
1652 == tcc_comparison)
1653 {
1654 code = gimple_assign_rhs_code (second);
1655 op0 = gimple_assign_rhs1 (second);
1656 op1 = gimple_assign_rhs2 (second);
1657 }
1658 }
b7211528
SB
1659
1660 last2 = last = get_last_insn ();
80c7a9eb
RH
1661
1662 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1663 if (gimple_has_location (stmt))
80c7a9eb 1664 {
726a989a
RB
1665 set_curr_insn_source_location (gimple_location (stmt));
1666 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1667 }
1668
1669 /* These flags have no purpose in RTL land. */
1670 true_edge->flags &= ~EDGE_TRUE_VALUE;
1671 false_edge->flags &= ~EDGE_FALSE_VALUE;
1672
1673 /* We can either have a pure conditional jump with one fallthru edge or
1674 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1675 if (false_edge->dest == bb->next_bb)
80c7a9eb 1676 {
28ed065e 1677 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
10d22567 1678 add_reg_br_prob_note (last, true_edge->probability);
726a989a 1679 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1680 if (true_edge->goto_locus)
7241571e
JJ
1681 {
1682 set_curr_insn_source_location (true_edge->goto_locus);
1683 set_curr_insn_block (true_edge->goto_block);
1684 true_edge->goto_locus = curr_insn_locator ();
1685 }
1686 true_edge->goto_block = NULL;
a9b77cd1 1687 false_edge->flags |= EDGE_FALLTHRU;
529ff441 1688 maybe_cleanup_end_of_block (false_edge);
80c7a9eb
RH
1689 return NULL;
1690 }
a9b77cd1 1691 if (true_edge->dest == bb->next_bb)
80c7a9eb 1692 {
28ed065e 1693 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest));
10d22567 1694 add_reg_br_prob_note (last, false_edge->probability);
726a989a 1695 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1696 if (false_edge->goto_locus)
7241571e
JJ
1697 {
1698 set_curr_insn_source_location (false_edge->goto_locus);
1699 set_curr_insn_block (false_edge->goto_block);
1700 false_edge->goto_locus = curr_insn_locator ();
1701 }
1702 false_edge->goto_block = NULL;
a9b77cd1 1703 true_edge->flags |= EDGE_FALLTHRU;
529ff441 1704 maybe_cleanup_end_of_block (true_edge);
80c7a9eb
RH
1705 return NULL;
1706 }
80c7a9eb 1707
28ed065e 1708 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
10d22567 1709 add_reg_br_prob_note (last, true_edge->probability);
80c7a9eb 1710 last = get_last_insn ();
7241571e
JJ
1711 if (false_edge->goto_locus)
1712 {
1713 set_curr_insn_source_location (false_edge->goto_locus);
1714 set_curr_insn_block (false_edge->goto_block);
1715 false_edge->goto_locus = curr_insn_locator ();
1716 }
1717 false_edge->goto_block = NULL;
a9b77cd1 1718 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1719
1720 BB_END (bb) = last;
1721 if (BARRIER_P (BB_END (bb)))
1722 BB_END (bb) = PREV_INSN (BB_END (bb));
1723 update_bb_for_insn (bb);
1724
1725 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1726 dest = false_edge->dest;
1727 redirect_edge_succ (false_edge, new_bb);
1728 false_edge->flags |= EDGE_FALLTHRU;
1729 new_bb->count = false_edge->count;
1730 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1731 new_edge = make_edge (new_bb, dest, 0);
1732 new_edge->probability = REG_BR_PROB_BASE;
1733 new_edge->count = new_bb->count;
1734 if (BARRIER_P (BB_END (new_bb)))
1735 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1736 update_bb_for_insn (new_bb);
1737
726a989a 1738 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1739
7787b4aa
JJ
1740 if (true_edge->goto_locus)
1741 {
1742 set_curr_insn_source_location (true_edge->goto_locus);
1743 set_curr_insn_block (true_edge->goto_block);
1744 true_edge->goto_locus = curr_insn_locator ();
1745 }
1746 true_edge->goto_block = NULL;
1747
80c7a9eb
RH
1748 return new_bb;
1749}
1750
28ed065e
MM
1751/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1752 statement STMT. */
1753
1754static void
1755expand_call_stmt (gimple stmt)
1756{
1757 tree exp;
1758 tree lhs = gimple_call_lhs (stmt);
1759 tree fndecl = gimple_call_fndecl (stmt);
1760 size_t i;
1761
1762 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1763
1764 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1765 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1766 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1767
1768 for (i = 0; i < gimple_call_num_args (stmt); i++)
1769 CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i);
1770
1771 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
1772 TREE_SIDE_EFFECTS (exp) = 1;
1773
1774 if (gimple_call_flags (stmt) & ECF_NOTHROW)
1775 TREE_NOTHROW (exp) = 1;
1776
1777 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1778 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1779 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1780 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1781 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1782 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1783 TREE_BLOCK (exp) = gimple_block (stmt);
1784
1785 /* Record the original call statement, as it may be used
1786 to retrieve profile information during expansion. */
1787
1788 if (fndecl && DECL_BUILT_IN (fndecl))
1789 {
1790 tree_ann_common_t ann = get_tree_common_ann (exp);
1791 ann->stmt = stmt;
1792 }
1793
1794 if (lhs)
1795 expand_assignment (lhs, exp, false);
1796 else
1797 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1798}
1799
1800/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1801 STMT that doesn't require special handling for outgoing edges. That
1802 is no tailcalls and no GIMPLE_COND. */
1803
1804static void
1805expand_gimple_stmt_1 (gimple stmt)
1806{
1807 tree op0;
1808 switch (gimple_code (stmt))
1809 {
1810 case GIMPLE_GOTO:
1811 op0 = gimple_goto_dest (stmt);
1812 if (TREE_CODE (op0) == LABEL_DECL)
1813 expand_goto (op0);
1814 else
1815 expand_computed_goto (op0);
1816 break;
1817 case GIMPLE_LABEL:
1818 expand_label (gimple_label_label (stmt));
1819 break;
1820 case GIMPLE_NOP:
1821 case GIMPLE_PREDICT:
1822 break;
28ed065e
MM
1823 case GIMPLE_SWITCH:
1824 expand_case (stmt);
1825 break;
1826 case GIMPLE_ASM:
1827 expand_asm_stmt (stmt);
1828 break;
1829 case GIMPLE_CALL:
1830 expand_call_stmt (stmt);
1831 break;
1832
1833 case GIMPLE_RETURN:
1834 op0 = gimple_return_retval (stmt);
1835
1836 if (op0 && op0 != error_mark_node)
1837 {
1838 tree result = DECL_RESULT (current_function_decl);
1839
1840 /* If we are not returning the current function's RESULT_DECL,
1841 build an assignment to it. */
1842 if (op0 != result)
1843 {
1844 /* I believe that a function's RESULT_DECL is unique. */
1845 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1846
1847 /* ??? We'd like to use simply expand_assignment here,
1848 but this fails if the value is of BLKmode but the return
1849 decl is a register. expand_return has special handling
1850 for this combination, which eventually should move
1851 to common code. See comments there. Until then, let's
1852 build a modify expression :-/ */
1853 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1854 result, op0);
1855 }
1856 }
1857 if (!op0)
1858 expand_null_return ();
1859 else
1860 expand_return (op0);
1861 break;
1862
1863 case GIMPLE_ASSIGN:
1864 {
1865 tree lhs = gimple_assign_lhs (stmt);
1866
1867 /* Tree expand used to fiddle with |= and &= of two bitfield
1868 COMPONENT_REFs here. This can't happen with gimple, the LHS
1869 of binary assigns must be a gimple reg. */
1870
1871 if (TREE_CODE (lhs) != SSA_NAME
1872 || get_gimple_rhs_class (gimple_expr_code (stmt))
1873 == GIMPLE_SINGLE_RHS)
1874 {
1875 tree rhs = gimple_assign_rhs1 (stmt);
1876 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1877 == GIMPLE_SINGLE_RHS);
1878 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1879 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1880 expand_assignment (lhs, rhs,
1881 gimple_assign_nontemporal_move_p (stmt));
1882 }
1883 else
1884 {
1885 rtx target, temp;
1886 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1887 struct separate_ops ops;
1888 bool promoted = false;
1889
1890 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1891 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1892 promoted = true;
1893
1894 ops.code = gimple_assign_rhs_code (stmt);
1895 ops.type = TREE_TYPE (lhs);
1896 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1897 {
1898 case GIMPLE_BINARY_RHS:
1899 ops.op1 = gimple_assign_rhs2 (stmt);
1900 /* Fallthru */
1901 case GIMPLE_UNARY_RHS:
1902 ops.op0 = gimple_assign_rhs1 (stmt);
1903 break;
1904 default:
1905 gcc_unreachable ();
1906 }
1907 ops.location = gimple_location (stmt);
1908
1909 /* If we want to use a nontemporal store, force the value to
1910 register first. If we store into a promoted register,
1911 don't directly expand to target. */
1912 temp = nontemporal || promoted ? NULL_RTX : target;
1913 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1914 EXPAND_NORMAL);
1915
1916 if (temp == target)
1917 ;
1918 else if (promoted)
1919 {
4e18a7d4 1920 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
1921 /* If TEMP is a VOIDmode constant, use convert_modes to make
1922 sure that we properly convert it. */
1923 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1924 {
1925 temp = convert_modes (GET_MODE (target),
1926 TYPE_MODE (ops.type),
4e18a7d4 1927 temp, unsignedp);
28ed065e 1928 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 1929 GET_MODE (target), temp, unsignedp);
28ed065e
MM
1930 }
1931
4e18a7d4 1932 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
1933 }
1934 else if (nontemporal && emit_storent_insn (target, temp))
1935 ;
1936 else
1937 {
1938 temp = force_operand (temp, target);
1939 if (temp != target)
1940 emit_move_insn (target, temp);
1941 }
1942 }
1943 }
1944 break;
1945
1946 default:
1947 gcc_unreachable ();
1948 }
1949}
1950
1951/* Expand one gimple statement STMT and return the last RTL instruction
1952 before any of the newly generated ones.
1953
1954 In addition to generating the necessary RTL instructions this also
1955 sets REG_EH_REGION notes if necessary and sets the current source
1956 location for diagnostics. */
1957
1958static rtx
1959expand_gimple_stmt (gimple stmt)
1960{
1d65f45c 1961 int lp_nr = 0;
28ed065e
MM
1962 rtx last = NULL;
1963 location_t saved_location = input_location;
1964
1965 last = get_last_insn ();
1966
1967 /* If this is an expression of some kind and it has an associated line
1968 number, then emit the line number before expanding the expression.
1969
1970 We need to save and restore the file and line information so that
1971 errors discovered during expansion are emitted with the right
1972 information. It would be better of the diagnostic routines
1973 used the file/line information embedded in the tree nodes rather
1974 than globals. */
1975 gcc_assert (cfun);
1976
1977 if (gimple_has_location (stmt))
1978 {
1979 input_location = gimple_location (stmt);
1980 set_curr_insn_source_location (input_location);
1981
1982 /* Record where the insns produced belong. */
1983 set_curr_insn_block (gimple_block (stmt));
1984 }
1985
1986 expand_gimple_stmt_1 (stmt);
1987 /* Free any temporaries used to evaluate this statement. */
1988 free_temp_slots ();
1989
1990 input_location = saved_location;
1991
1992 /* Mark all insns that may trap. */
1d65f45c
RH
1993 lp_nr = lookup_stmt_eh_lp (stmt);
1994 if (lp_nr)
28ed065e
MM
1995 {
1996 rtx insn;
1997 for (insn = next_real_insn (last); insn;
1998 insn = next_real_insn (insn))
1999 {
2000 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2001 /* If we want exceptions for non-call insns, any
2002 may_trap_p instruction may throw. */
2003 && GET_CODE (PATTERN (insn)) != CLOBBER
2004 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2005 && insn_could_throw_p (insn))
2006 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2007 }
2008 }
2009
2010 return last;
2011}
2012
726a989a 2013/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2014 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2015 generated a tail call (something that might be denied by the ABI
cea49550
RH
2016 rules governing the call; see calls.c).
2017
2018 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2019 can still reach the rest of BB. The case here is __builtin_sqrt,
2020 where the NaN result goes through the external function (with a
2021 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2022
2023static basic_block
726a989a 2024expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2025{
b7211528 2026 rtx last2, last;
224e770b 2027 edge e;
628f6a4e 2028 edge_iterator ei;
224e770b
RH
2029 int probability;
2030 gcov_type count;
80c7a9eb 2031
28ed065e 2032 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2033
2034 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2035 if (CALL_P (last) && SIBLING_CALL_P (last))
2036 goto found;
80c7a9eb 2037
726a989a 2038 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2039
cea49550 2040 *can_fallthru = true;
224e770b 2041 return NULL;
80c7a9eb 2042
224e770b
RH
2043 found:
2044 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2045 Any instructions emitted here are about to be deleted. */
2046 do_pending_stack_adjust ();
2047
2048 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2049 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2050 EH or abnormal edges, we shouldn't have created a tail call in
2051 the first place. So it seems to me we should just be removing
2052 all edges here, or redirecting the existing fallthru edge to
2053 the exit block. */
2054
224e770b
RH
2055 probability = 0;
2056 count = 0;
224e770b 2057
628f6a4e
BE
2058 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2059 {
224e770b
RH
2060 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2061 {
2062 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2063 {
224e770b
RH
2064 e->dest->count -= e->count;
2065 e->dest->frequency -= EDGE_FREQUENCY (e);
2066 if (e->dest->count < 0)
c22cacf3 2067 e->dest->count = 0;
224e770b 2068 if (e->dest->frequency < 0)
c22cacf3 2069 e->dest->frequency = 0;
80c7a9eb 2070 }
224e770b
RH
2071 count += e->count;
2072 probability += e->probability;
2073 remove_edge (e);
80c7a9eb 2074 }
628f6a4e
BE
2075 else
2076 ei_next (&ei);
80c7a9eb
RH
2077 }
2078
224e770b
RH
2079 /* This is somewhat ugly: the call_expr expander often emits instructions
2080 after the sibcall (to perform the function return). These confuse the
12eff7b7 2081 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2082 last = NEXT_INSN (last);
341c100f 2083 gcc_assert (BARRIER_P (last));
cea49550
RH
2084
2085 *can_fallthru = false;
224e770b
RH
2086 while (NEXT_INSN (last))
2087 {
2088 /* For instance an sqrt builtin expander expands if with
2089 sibcall in the then and label for `else`. */
2090 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2091 {
2092 *can_fallthru = true;
2093 break;
2094 }
224e770b
RH
2095 delete_insn (NEXT_INSN (last));
2096 }
2097
2098 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2099 e->probability += probability;
2100 e->count += count;
2101 BB_END (bb) = last;
2102 update_bb_for_insn (bb);
2103
2104 if (NEXT_INSN (last))
2105 {
2106 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2107
2108 last = BB_END (bb);
2109 if (BARRIER_P (last))
2110 BB_END (bb) = PREV_INSN (last);
2111 }
2112
726a989a 2113 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2114
224e770b 2115 return bb;
80c7a9eb
RH
2116}
2117
b5b8b0ac
AO
2118/* Return the difference between the floor and the truncated result of
2119 a signed division by OP1 with remainder MOD. */
2120static rtx
2121floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2122{
2123 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2124 return gen_rtx_IF_THEN_ELSE
2125 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2126 gen_rtx_IF_THEN_ELSE
2127 (mode, gen_rtx_LT (BImode,
2128 gen_rtx_DIV (mode, op1, mod),
2129 const0_rtx),
2130 constm1_rtx, const0_rtx),
2131 const0_rtx);
2132}
2133
2134/* Return the difference between the ceil and the truncated result of
2135 a signed division by OP1 with remainder MOD. */
2136static rtx
2137ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2138{
2139 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2140 return gen_rtx_IF_THEN_ELSE
2141 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2142 gen_rtx_IF_THEN_ELSE
2143 (mode, gen_rtx_GT (BImode,
2144 gen_rtx_DIV (mode, op1, mod),
2145 const0_rtx),
2146 const1_rtx, const0_rtx),
2147 const0_rtx);
2148}
2149
2150/* Return the difference between the ceil and the truncated result of
2151 an unsigned division by OP1 with remainder MOD. */
2152static rtx
2153ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2154{
2155 /* (mod != 0 ? 1 : 0) */
2156 return gen_rtx_IF_THEN_ELSE
2157 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2158 const1_rtx, const0_rtx);
2159}
2160
2161/* Return the difference between the rounded and the truncated result
2162 of a signed division by OP1 with remainder MOD. Halfway cases are
2163 rounded away from zero, rather than to the nearest even number. */
2164static rtx
2165round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2166{
2167 /* (abs (mod) >= abs (op1) - abs (mod)
2168 ? (op1 / mod > 0 ? 1 : -1)
2169 : 0) */
2170 return gen_rtx_IF_THEN_ELSE
2171 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2172 gen_rtx_MINUS (mode,
2173 gen_rtx_ABS (mode, op1),
2174 gen_rtx_ABS (mode, mod))),
2175 gen_rtx_IF_THEN_ELSE
2176 (mode, gen_rtx_GT (BImode,
2177 gen_rtx_DIV (mode, op1, mod),
2178 const0_rtx),
2179 const1_rtx, constm1_rtx),
2180 const0_rtx);
2181}
2182
2183/* Return the difference between the rounded and the truncated result
2184 of a unsigned division by OP1 with remainder MOD. Halfway cases
2185 are rounded away from zero, rather than to the nearest even
2186 number. */
2187static rtx
2188round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2189{
2190 /* (mod >= op1 - mod ? 1 : 0) */
2191 return gen_rtx_IF_THEN_ELSE
2192 (mode, gen_rtx_GE (BImode, mod,
2193 gen_rtx_MINUS (mode, op1, mod)),
2194 const1_rtx, const0_rtx);
2195}
2196
2197/* Wrap modeless constants in CONST:MODE. */
2198rtx
2199wrap_constant (enum machine_mode mode, rtx x)
2200{
2201 if (GET_MODE (x) != VOIDmode)
2202 return x;
2203
2204 if (CONST_INT_P (x)
2205 || GET_CODE (x) == CONST_FIXED
2206 || GET_CODE (x) == CONST_DOUBLE
2207 || GET_CODE (x) == LABEL_REF)
2208 {
2209 gcc_assert (mode != VOIDmode);
2210
2211 x = gen_rtx_CONST (mode, x);
2212 }
2213
2214 return x;
2215}
2216
2217/* Remove CONST wrapper added by wrap_constant(). */
2218rtx
2219unwrap_constant (rtx x)
2220{
2221 rtx ret = x;
2222
2223 if (GET_CODE (x) != CONST)
2224 return x;
2225
2226 x = XEXP (x, 0);
2227
2228 if (CONST_INT_P (x)
2229 || GET_CODE (x) == CONST_FIXED
2230 || GET_CODE (x) == CONST_DOUBLE
2231 || GET_CODE (x) == LABEL_REF)
2232 ret = x;
2233
2234 return ret;
2235}
2236
dda2da58
AO
2237/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2238 any rtl. */
2239
2240static rtx
2241convert_debug_memory_address (enum machine_mode mode, rtx x)
2242{
2243 enum machine_mode xmode = GET_MODE (x);
2244
2245#ifndef POINTERS_EXTEND_UNSIGNED
2246 gcc_assert (mode == Pmode);
2247 gcc_assert (xmode == mode || xmode == VOIDmode);
2248#else
2249 gcc_assert (mode == Pmode || mode == ptr_mode);
2250
2251 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2252 return x;
2253
2254 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2255 x = simplify_gen_subreg (mode, x, xmode,
2256 subreg_lowpart_offset
2257 (mode, xmode));
2258 else if (POINTERS_EXTEND_UNSIGNED > 0)
2259 x = gen_rtx_ZERO_EXTEND (mode, x);
2260 else if (!POINTERS_EXTEND_UNSIGNED)
2261 x = gen_rtx_SIGN_EXTEND (mode, x);
2262 else
2263 gcc_unreachable ();
2264#endif /* POINTERS_EXTEND_UNSIGNED */
2265
2266 return x;
2267}
2268
b5b8b0ac
AO
2269/* Return an RTX equivalent to the value of the tree expression
2270 EXP. */
2271
2272static rtx
2273expand_debug_expr (tree exp)
2274{
2275 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2276 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2277 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2278
2279 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2280 {
2281 case tcc_expression:
2282 switch (TREE_CODE (exp))
2283 {
2284 case COND_EXPR:
2285 goto ternary;
2286
2287 case TRUTH_ANDIF_EXPR:
2288 case TRUTH_ORIF_EXPR:
2289 case TRUTH_AND_EXPR:
2290 case TRUTH_OR_EXPR:
2291 case TRUTH_XOR_EXPR:
2292 goto binary;
2293
2294 case TRUTH_NOT_EXPR:
2295 goto unary;
2296
2297 default:
2298 break;
2299 }
2300 break;
2301
2302 ternary:
2303 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2304 if (!op2)
2305 return NULL_RTX;
2306 /* Fall through. */
2307
2308 binary:
2309 case tcc_binary:
2310 case tcc_comparison:
2311 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2312 if (!op1)
2313 return NULL_RTX;
2314 /* Fall through. */
2315
2316 unary:
2317 case tcc_unary:
2318 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2319 if (!op0)
2320 return NULL_RTX;
2321 break;
2322
2323 case tcc_type:
2324 case tcc_statement:
2325 gcc_unreachable ();
2326
2327 case tcc_constant:
2328 case tcc_exceptional:
2329 case tcc_declaration:
2330 case tcc_reference:
2331 case tcc_vl_exp:
2332 break;
2333 }
2334
2335 switch (TREE_CODE (exp))
2336 {
2337 case STRING_CST:
2338 if (!lookup_constant_def (exp))
2339 {
e1b243a8
JJ
2340 if (strlen (TREE_STRING_POINTER (exp)) + 1
2341 != (size_t) TREE_STRING_LENGTH (exp))
2342 return NULL_RTX;
b5b8b0ac
AO
2343 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2344 op0 = gen_rtx_MEM (BLKmode, op0);
2345 set_mem_attributes (op0, exp, 0);
2346 return op0;
2347 }
2348 /* Fall through... */
2349
2350 case INTEGER_CST:
2351 case REAL_CST:
2352 case FIXED_CST:
2353 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2354 return op0;
2355
2356 case COMPLEX_CST:
2357 gcc_assert (COMPLEX_MODE_P (mode));
2358 op0 = expand_debug_expr (TREE_REALPART (exp));
2359 op0 = wrap_constant (GET_MODE_INNER (mode), op0);
2360 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2361 op1 = wrap_constant (GET_MODE_INNER (mode), op1);
2362 return gen_rtx_CONCAT (mode, op0, op1);
2363
2364 case VAR_DECL:
2365 case PARM_DECL:
2366 case FUNCTION_DECL:
2367 case LABEL_DECL:
2368 case CONST_DECL:
2369 case RESULT_DECL:
2370 op0 = DECL_RTL_IF_SET (exp);
2371
2372 /* This decl was probably optimized away. */
2373 if (!op0)
e1b243a8
JJ
2374 {
2375 if (TREE_CODE (exp) != VAR_DECL
2376 || DECL_EXTERNAL (exp)
2377 || !TREE_STATIC (exp)
2378 || !DECL_NAME (exp)
2379 || DECL_HARD_REGISTER (exp))
2380 return NULL;
2381
2382 op0 = DECL_RTL (exp);
2383 SET_DECL_RTL (exp, NULL);
2384 if (!MEM_P (op0)
2385 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2386 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2387 return NULL;
2388 }
2389 else
2390 op0 = copy_rtx (op0);
b5b8b0ac
AO
2391
2392 if (GET_MODE (op0) == BLKmode)
2393 {
2394 gcc_assert (MEM_P (op0));
2395 op0 = adjust_address_nv (op0, mode, 0);
2396 return op0;
2397 }
2398
2399 /* Fall through. */
2400
2401 adjust_mode:
2402 case PAREN_EXPR:
2403 case NOP_EXPR:
2404 case CONVERT_EXPR:
2405 {
2406 enum machine_mode inner_mode = GET_MODE (op0);
2407
2408 if (mode == inner_mode)
2409 return op0;
2410
2411 if (inner_mode == VOIDmode)
2412 {
2413 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2414 if (mode == inner_mode)
2415 return op0;
2416 }
2417
2418 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2419 {
2420 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2421 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2422 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2423 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2424 else
2425 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2426 }
2427 else if (FLOAT_MODE_P (mode))
2428 {
2429 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2430 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2431 else
2432 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2433 }
2434 else if (FLOAT_MODE_P (inner_mode))
2435 {
2436 if (unsignedp)
2437 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2438 else
2439 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2440 }
2441 else if (CONSTANT_P (op0)
2442 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2443 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2444 subreg_lowpart_offset (mode,
2445 inner_mode));
2446 else if (unsignedp)
2447 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2448 else
2449 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2450
2451 return op0;
2452 }
2453
2454 case INDIRECT_REF:
2455 case ALIGN_INDIRECT_REF:
2456 case MISALIGNED_INDIRECT_REF:
2457 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2458 if (!op0)
2459 return NULL;
2460
2461 gcc_assert (GET_MODE (op0) == Pmode
dda2da58 2462 || GET_MODE (op0) == ptr_mode
b5b8b0ac
AO
2463 || GET_CODE (op0) == CONST_INT
2464 || GET_CODE (op0) == CONST_DOUBLE);
2465
2466 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2467 {
2468 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2469 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
2470 }
2471
2472 op0 = gen_rtx_MEM (mode, op0);
2473
2474 set_mem_attributes (op0, exp, 0);
2475
2476 return op0;
2477
2478 case TARGET_MEM_REF:
2479 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2480 return NULL;
2481
2482 op0 = expand_debug_expr
2483 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)),
2484 exp));
2485 if (!op0)
2486 return NULL;
2487
2488 gcc_assert (GET_MODE (op0) == Pmode
dda2da58 2489 || GET_MODE (op0) == ptr_mode
b5b8b0ac
AO
2490 || GET_CODE (op0) == CONST_INT
2491 || GET_CODE (op0) == CONST_DOUBLE);
2492
2493 op0 = gen_rtx_MEM (mode, op0);
2494
2495 set_mem_attributes (op0, exp, 0);
2496
2497 return op0;
2498
2499 case ARRAY_REF:
2500 case ARRAY_RANGE_REF:
2501 case COMPONENT_REF:
2502 case BIT_FIELD_REF:
2503 case REALPART_EXPR:
2504 case IMAGPART_EXPR:
2505 case VIEW_CONVERT_EXPR:
2506 {
2507 enum machine_mode mode1;
2508 HOST_WIDE_INT bitsize, bitpos;
2509 tree offset;
2510 int volatilep = 0;
2511 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2512 &mode1, &unsignedp, &volatilep, false);
2513 rtx orig_op0;
2514
2515 orig_op0 = op0 = expand_debug_expr (tem);
2516
2517 if (!op0)
2518 return NULL;
2519
2520 if (offset)
2521 {
dda2da58
AO
2522 enum machine_mode addrmode, offmode;
2523
b5b8b0ac
AO
2524 gcc_assert (MEM_P (op0));
2525
dda2da58
AO
2526 op0 = XEXP (op0, 0);
2527 addrmode = GET_MODE (op0);
2528 if (addrmode == VOIDmode)
2529 addrmode = Pmode;
2530
b5b8b0ac
AO
2531 op1 = expand_debug_expr (offset);
2532 if (!op1)
2533 return NULL;
2534
dda2da58
AO
2535 offmode = GET_MODE (op1);
2536 if (offmode == VOIDmode)
2537 offmode = TYPE_MODE (TREE_TYPE (offset));
2538
2539 if (addrmode != offmode)
2540 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2541 subreg_lowpart_offset (addrmode,
2542 offmode));
2543
2544 /* Don't use offset_address here, we don't need a
2545 recognizable address, and we don't want to generate
2546 code. */
2547 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
b5b8b0ac
AO
2548 }
2549
2550 if (MEM_P (op0))
2551 {
2552 if (bitpos >= BITS_PER_UNIT)
2553 {
2554 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2555 bitpos %= BITS_PER_UNIT;
2556 }
2557 else if (bitpos < 0)
2558 {
2559 int units = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2560 op0 = adjust_address_nv (op0, mode1, units);
2561 bitpos += units * BITS_PER_UNIT;
2562 }
2563 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2564 op0 = adjust_address_nv (op0, mode, 0);
2565 else if (GET_MODE (op0) != mode1)
2566 op0 = adjust_address_nv (op0, mode1, 0);
2567 else
2568 op0 = copy_rtx (op0);
2569 if (op0 == orig_op0)
2570 op0 = shallow_copy_rtx (op0);
2571 set_mem_attributes (op0, exp, 0);
2572 }
2573
2574 if (bitpos == 0 && mode == GET_MODE (op0))
2575 return op0;
2576
2577 if ((bitpos % BITS_PER_UNIT) == 0
2578 && bitsize == GET_MODE_BITSIZE (mode1))
2579 {
2580 enum machine_mode opmode = GET_MODE (op0);
2581
2582 gcc_assert (opmode != BLKmode);
2583
2584 if (opmode == VOIDmode)
2585 opmode = mode1;
2586
2587 /* This condition may hold if we're expanding the address
2588 right past the end of an array that turned out not to
2589 be addressable (i.e., the address was only computed in
2590 debug stmts). The gen_subreg below would rightfully
2591 crash, and the address doesn't really exist, so just
2592 drop it. */
2593 if (bitpos >= GET_MODE_BITSIZE (opmode))
2594 return NULL;
2595
2596 return simplify_gen_subreg (mode, op0, opmode,
2597 bitpos / BITS_PER_UNIT);
2598 }
2599
2600 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2601 && TYPE_UNSIGNED (TREE_TYPE (exp))
2602 ? SIGN_EXTRACT
2603 : ZERO_EXTRACT, mode,
2604 GET_MODE (op0) != VOIDmode
2605 ? GET_MODE (op0) : mode1,
2606 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2607 }
2608
b5b8b0ac
AO
2609 case ABS_EXPR:
2610 return gen_rtx_ABS (mode, op0);
2611
2612 case NEGATE_EXPR:
2613 return gen_rtx_NEG (mode, op0);
2614
2615 case BIT_NOT_EXPR:
2616 return gen_rtx_NOT (mode, op0);
2617
2618 case FLOAT_EXPR:
2619 if (unsignedp)
2620 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2621 else
2622 return gen_rtx_FLOAT (mode, op0);
2623
2624 case FIX_TRUNC_EXPR:
2625 if (unsignedp)
2626 return gen_rtx_UNSIGNED_FIX (mode, op0);
2627 else
2628 return gen_rtx_FIX (mode, op0);
2629
2630 case POINTER_PLUS_EXPR:
2631 case PLUS_EXPR:
2632 return gen_rtx_PLUS (mode, op0, op1);
2633
2634 case MINUS_EXPR:
2635 return gen_rtx_MINUS (mode, op0, op1);
2636
2637 case MULT_EXPR:
2638 return gen_rtx_MULT (mode, op0, op1);
2639
2640 case RDIV_EXPR:
2641 case TRUNC_DIV_EXPR:
2642 case EXACT_DIV_EXPR:
2643 if (unsignedp)
2644 return gen_rtx_UDIV (mode, op0, op1);
2645 else
2646 return gen_rtx_DIV (mode, op0, op1);
2647
2648 case TRUNC_MOD_EXPR:
2649 if (unsignedp)
2650 return gen_rtx_UMOD (mode, op0, op1);
2651 else
2652 return gen_rtx_MOD (mode, op0, op1);
2653
2654 case FLOOR_DIV_EXPR:
2655 if (unsignedp)
2656 return gen_rtx_UDIV (mode, op0, op1);
2657 else
2658 {
2659 rtx div = gen_rtx_DIV (mode, op0, op1);
2660 rtx mod = gen_rtx_MOD (mode, op0, op1);
2661 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2662 return gen_rtx_PLUS (mode, div, adj);
2663 }
2664
2665 case FLOOR_MOD_EXPR:
2666 if (unsignedp)
2667 return gen_rtx_UMOD (mode, op0, op1);
2668 else
2669 {
2670 rtx mod = gen_rtx_MOD (mode, op0, op1);
2671 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2672 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2673 return gen_rtx_PLUS (mode, mod, adj);
2674 }
2675
2676 case CEIL_DIV_EXPR:
2677 if (unsignedp)
2678 {
2679 rtx div = gen_rtx_UDIV (mode, op0, op1);
2680 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2681 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2682 return gen_rtx_PLUS (mode, div, adj);
2683 }
2684 else
2685 {
2686 rtx div = gen_rtx_DIV (mode, op0, op1);
2687 rtx mod = gen_rtx_MOD (mode, op0, op1);
2688 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2689 return gen_rtx_PLUS (mode, div, adj);
2690 }
2691
2692 case CEIL_MOD_EXPR:
2693 if (unsignedp)
2694 {
2695 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2696 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2697 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2698 return gen_rtx_PLUS (mode, mod, adj);
2699 }
2700 else
2701 {
2702 rtx mod = gen_rtx_MOD (mode, op0, op1);
2703 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2704 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2705 return gen_rtx_PLUS (mode, mod, adj);
2706 }
2707
2708 case ROUND_DIV_EXPR:
2709 if (unsignedp)
2710 {
2711 rtx div = gen_rtx_UDIV (mode, op0, op1);
2712 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2713 rtx adj = round_udiv_adjust (mode, mod, op1);
2714 return gen_rtx_PLUS (mode, div, adj);
2715 }
2716 else
2717 {
2718 rtx div = gen_rtx_DIV (mode, op0, op1);
2719 rtx mod = gen_rtx_MOD (mode, op0, op1);
2720 rtx adj = round_sdiv_adjust (mode, mod, op1);
2721 return gen_rtx_PLUS (mode, div, adj);
2722 }
2723
2724 case ROUND_MOD_EXPR:
2725 if (unsignedp)
2726 {
2727 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2728 rtx adj = round_udiv_adjust (mode, mod, op1);
2729 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2730 return gen_rtx_PLUS (mode, mod, adj);
2731 }
2732 else
2733 {
2734 rtx mod = gen_rtx_MOD (mode, op0, op1);
2735 rtx adj = round_sdiv_adjust (mode, mod, op1);
2736 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2737 return gen_rtx_PLUS (mode, mod, adj);
2738 }
2739
2740 case LSHIFT_EXPR:
2741 return gen_rtx_ASHIFT (mode, op0, op1);
2742
2743 case RSHIFT_EXPR:
2744 if (unsignedp)
2745 return gen_rtx_LSHIFTRT (mode, op0, op1);
2746 else
2747 return gen_rtx_ASHIFTRT (mode, op0, op1);
2748
2749 case LROTATE_EXPR:
2750 return gen_rtx_ROTATE (mode, op0, op1);
2751
2752 case RROTATE_EXPR:
2753 return gen_rtx_ROTATERT (mode, op0, op1);
2754
2755 case MIN_EXPR:
2756 if (unsignedp)
2757 return gen_rtx_UMIN (mode, op0, op1);
2758 else
2759 return gen_rtx_SMIN (mode, op0, op1);
2760
2761 case MAX_EXPR:
2762 if (unsignedp)
2763 return gen_rtx_UMAX (mode, op0, op1);
2764 else
2765 return gen_rtx_SMAX (mode, op0, op1);
2766
2767 case BIT_AND_EXPR:
2768 case TRUTH_AND_EXPR:
2769 return gen_rtx_AND (mode, op0, op1);
2770
2771 case BIT_IOR_EXPR:
2772 case TRUTH_OR_EXPR:
2773 return gen_rtx_IOR (mode, op0, op1);
2774
2775 case BIT_XOR_EXPR:
2776 case TRUTH_XOR_EXPR:
2777 return gen_rtx_XOR (mode, op0, op1);
2778
2779 case TRUTH_ANDIF_EXPR:
2780 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2781
2782 case TRUTH_ORIF_EXPR:
2783 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2784
2785 case TRUTH_NOT_EXPR:
2786 return gen_rtx_EQ (mode, op0, const0_rtx);
2787
2788 case LT_EXPR:
2789 if (unsignedp)
2790 return gen_rtx_LTU (mode, op0, op1);
2791 else
2792 return gen_rtx_LT (mode, op0, op1);
2793
2794 case LE_EXPR:
2795 if (unsignedp)
2796 return gen_rtx_LEU (mode, op0, op1);
2797 else
2798 return gen_rtx_LE (mode, op0, op1);
2799
2800 case GT_EXPR:
2801 if (unsignedp)
2802 return gen_rtx_GTU (mode, op0, op1);
2803 else
2804 return gen_rtx_GT (mode, op0, op1);
2805
2806 case GE_EXPR:
2807 if (unsignedp)
2808 return gen_rtx_GEU (mode, op0, op1);
2809 else
2810 return gen_rtx_GE (mode, op0, op1);
2811
2812 case EQ_EXPR:
2813 return gen_rtx_EQ (mode, op0, op1);
2814
2815 case NE_EXPR:
2816 return gen_rtx_NE (mode, op0, op1);
2817
2818 case UNORDERED_EXPR:
2819 return gen_rtx_UNORDERED (mode, op0, op1);
2820
2821 case ORDERED_EXPR:
2822 return gen_rtx_ORDERED (mode, op0, op1);
2823
2824 case UNLT_EXPR:
2825 return gen_rtx_UNLT (mode, op0, op1);
2826
2827 case UNLE_EXPR:
2828 return gen_rtx_UNLE (mode, op0, op1);
2829
2830 case UNGT_EXPR:
2831 return gen_rtx_UNGT (mode, op0, op1);
2832
2833 case UNGE_EXPR:
2834 return gen_rtx_UNGE (mode, op0, op1);
2835
2836 case UNEQ_EXPR:
2837 return gen_rtx_UNEQ (mode, op0, op1);
2838
2839 case LTGT_EXPR:
2840 return gen_rtx_LTGT (mode, op0, op1);
2841
2842 case COND_EXPR:
2843 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2844
2845 case COMPLEX_EXPR:
2846 gcc_assert (COMPLEX_MODE_P (mode));
2847 if (GET_MODE (op0) == VOIDmode)
2848 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2849 if (GET_MODE (op1) == VOIDmode)
2850 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2851 return gen_rtx_CONCAT (mode, op0, op1);
2852
2853 case ADDR_EXPR:
2854 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2855 if (!op0 || !MEM_P (op0))
2856 return NULL;
2857
dda2da58
AO
2858 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2859
2860 return op0;
b5b8b0ac
AO
2861
2862 case VECTOR_CST:
2863 exp = build_constructor_from_list (TREE_TYPE (exp),
2864 TREE_VECTOR_CST_ELTS (exp));
2865 /* Fall through. */
2866
2867 case CONSTRUCTOR:
2868 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2869 {
2870 unsigned i;
2871 tree val;
2872
2873 op0 = gen_rtx_CONCATN
2874 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2875
2876 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2877 {
2878 op1 = expand_debug_expr (val);
2879 if (!op1)
2880 return NULL;
2881 XVECEXP (op0, 0, i) = op1;
2882 }
2883
2884 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2885 {
2886 op1 = expand_debug_expr
2887 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2888
2889 if (!op1)
2890 return NULL;
2891
2892 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2893 XVECEXP (op0, 0, i) = op1;
2894 }
2895
2896 return op0;
2897 }
2898 else
2899 goto flag_unsupported;
2900
2901 case CALL_EXPR:
2902 /* ??? Maybe handle some builtins? */
2903 return NULL;
2904
2905 case SSA_NAME:
2906 {
2907 int part = var_to_partition (SA.map, exp);
2908
2909 if (part == NO_PARTITION)
2910 return NULL;
2911
2912 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2913
2914 op0 = SA.partition_to_pseudo[part];
2915 goto adjust_mode;
2916 }
2917
2918 case ERROR_MARK:
2919 return NULL;
2920
2921 default:
2922 flag_unsupported:
2923#ifdef ENABLE_CHECKING
2924 debug_tree (exp);
2925 gcc_unreachable ();
2926#else
2927 return NULL;
2928#endif
2929 }
2930}
2931
2932/* Expand the _LOCs in debug insns. We run this after expanding all
2933 regular insns, so that any variables referenced in the function
2934 will have their DECL_RTLs set. */
2935
2936static void
2937expand_debug_locations (void)
2938{
2939 rtx insn;
2940 rtx last = get_last_insn ();
2941 int save_strict_alias = flag_strict_aliasing;
2942
2943 /* New alias sets while setting up memory attributes cause
2944 -fcompare-debug failures, even though it doesn't bring about any
2945 codegen changes. */
2946 flag_strict_aliasing = 0;
2947
2948 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2949 if (DEBUG_INSN_P (insn))
2950 {
2951 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
2952 rtx val;
2953 enum machine_mode mode;
2954
2955 if (value == NULL_TREE)
2956 val = NULL_RTX;
2957 else
2958 {
2959 val = expand_debug_expr (value);
2960 gcc_assert (last == get_last_insn ());
2961 }
2962
2963 if (!val)
2964 val = gen_rtx_UNKNOWN_VAR_LOC ();
2965 else
2966 {
2967 mode = GET_MODE (INSN_VAR_LOCATION (insn));
2968
2969 gcc_assert (mode == GET_MODE (val)
2970 || (GET_MODE (val) == VOIDmode
2971 && (CONST_INT_P (val)
2972 || GET_CODE (val) == CONST_FIXED
2973 || GET_CODE (val) == CONST_DOUBLE
2974 || GET_CODE (val) == LABEL_REF)));
2975 }
2976
2977 INSN_VAR_LOCATION_LOC (insn) = val;
2978 }
2979
2980 flag_strict_aliasing = save_strict_alias;
2981}
2982
242229bb
JH
2983/* Expand basic block BB from GIMPLE trees to RTL. */
2984
2985static basic_block
10d22567 2986expand_gimple_basic_block (basic_block bb)
242229bb 2987{
726a989a
RB
2988 gimple_stmt_iterator gsi;
2989 gimple_seq stmts;
2990 gimple stmt = NULL;
242229bb
JH
2991 rtx note, last;
2992 edge e;
628f6a4e 2993 edge_iterator ei;
8b11009b 2994 void **elt;
242229bb
JH
2995
2996 if (dump_file)
726a989a
RB
2997 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
2998 bb->index);
2999
3000 /* Note that since we are now transitioning from GIMPLE to RTL, we
3001 cannot use the gsi_*_bb() routines because they expect the basic
3002 block to be in GIMPLE, instead of RTL. Therefore, we need to
3003 access the BB sequence directly. */
3004 stmts = bb_seq (bb);
3005 bb->il.gimple = NULL;
bf08ebeb 3006 rtl_profile_for_bb (bb);
5e2d947c
JH
3007 init_rtl_bb_info (bb);
3008 bb->flags |= BB_RTL;
3009
a9b77cd1
ZD
3010 /* Remove the RETURN_EXPR if we may fall though to the exit
3011 instead. */
726a989a
RB
3012 gsi = gsi_last (stmts);
3013 if (!gsi_end_p (gsi)
3014 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3015 {
726a989a 3016 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3017
3018 gcc_assert (single_succ_p (bb));
3019 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3020
3021 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3022 && !gimple_return_retval (ret_stmt))
a9b77cd1 3023 {
726a989a 3024 gsi_remove (&gsi, false);
a9b77cd1
ZD
3025 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3026 }
3027 }
3028
726a989a
RB
3029 gsi = gsi_start (stmts);
3030 if (!gsi_end_p (gsi))
8b11009b 3031 {
726a989a
RB
3032 stmt = gsi_stmt (gsi);
3033 if (gimple_code (stmt) != GIMPLE_LABEL)
3034 stmt = NULL;
8b11009b 3035 }
242229bb 3036
8b11009b
ZD
3037 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3038
3039 if (stmt || elt)
242229bb
JH
3040 {
3041 last = get_last_insn ();
3042
8b11009b
ZD
3043 if (stmt)
3044 {
28ed065e 3045 expand_gimple_stmt (stmt);
726a989a 3046 gsi_next (&gsi);
8b11009b
ZD
3047 }
3048
3049 if (elt)
ae50c0cb 3050 emit_label ((rtx) *elt);
242229bb 3051
caf93cb0 3052 /* Java emits line number notes in the top of labels.
c22cacf3 3053 ??? Make this go away once line number notes are obsoleted. */
242229bb 3054 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3055 if (NOTE_P (BB_HEAD (bb)))
242229bb 3056 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3057 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3058
726a989a 3059 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3060 }
3061 else
3062 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3063
3064 NOTE_BASIC_BLOCK (note) = bb;
3065
726a989a 3066 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3067 {
cea49550 3068 basic_block new_bb;
242229bb 3069
b5b8b0ac
AO
3070 stmt = gsi_stmt (gsi);
3071
242229bb
JH
3072 /* Expand this statement, then evaluate the resulting RTL and
3073 fixup the CFG accordingly. */
726a989a 3074 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3075 {
726a989a 3076 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3077 if (new_bb)
3078 return new_bb;
3079 }
b5b8b0ac
AO
3080 else if (gimple_debug_bind_p (stmt))
3081 {
3082 location_t sloc = get_curr_insn_source_location ();
3083 tree sblock = get_curr_insn_block ();
3084 gimple_stmt_iterator nsi = gsi;
3085
3086 for (;;)
3087 {
3088 tree var = gimple_debug_bind_get_var (stmt);
3089 tree value;
3090 rtx val;
3091 enum machine_mode mode;
3092
3093 if (gimple_debug_bind_has_value_p (stmt))
3094 value = gimple_debug_bind_get_value (stmt);
3095 else
3096 value = NULL_TREE;
3097
3098 last = get_last_insn ();
3099
3100 set_curr_insn_source_location (gimple_location (stmt));
3101 set_curr_insn_block (gimple_block (stmt));
3102
3103 if (DECL_P (var))
3104 mode = DECL_MODE (var);
3105 else
3106 mode = TYPE_MODE (TREE_TYPE (var));
3107
3108 val = gen_rtx_VAR_LOCATION
3109 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3110
3111 val = emit_debug_insn (val);
3112
3113 if (dump_file && (dump_flags & TDF_DETAILS))
3114 {
3115 /* We can't dump the insn with a TREE where an RTX
3116 is expected. */
3117 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3118 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3119 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3120 }
3121
3122 gsi = nsi;
3123 gsi_next (&nsi);
3124 if (gsi_end_p (nsi))
3125 break;
3126 stmt = gsi_stmt (nsi);
3127 if (!gimple_debug_bind_p (stmt))
3128 break;
3129 }
3130
3131 set_curr_insn_source_location (sloc);
3132 set_curr_insn_block (sblock);
3133 }
80c7a9eb 3134 else
242229bb 3135 {
726a989a 3136 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3137 {
3138 bool can_fallthru;
3139 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3140 if (new_bb)
3141 {
3142 if (can_fallthru)
3143 bb = new_bb;
3144 else
3145 return new_bb;
3146 }
3147 }
4d7a65ea 3148 else
b7211528 3149 {
4e3825db 3150 def_operand_p def_p;
4e3825db
MM
3151 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3152
3153 if (def_p != NULL)
3154 {
3155 /* Ignore this stmt if it is in the list of
3156 replaceable expressions. */
3157 if (SA.values
e97809c6
MM
3158 && bitmap_bit_p (SA.values,
3159 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3160 continue;
3161 }
28ed065e 3162 last = expand_gimple_stmt (stmt);
726a989a 3163 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3164 }
242229bb
JH
3165 }
3166 }
3167
7241571e 3168 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3169 FOR_EACH_EDGE (e, ei, bb->succs)
3170 {
7241571e
JJ
3171 if (e->goto_locus && e->goto_block)
3172 {
3173 set_curr_insn_source_location (e->goto_locus);
3174 set_curr_insn_block (e->goto_block);
3175 e->goto_locus = curr_insn_locator ();
3176 }
3177 e->goto_block = NULL;
3178 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3179 {
3180 emit_jump (label_rtx_for_bb (e->dest));
3181 e->flags &= ~EDGE_FALLTHRU;
3182 }
a9b77cd1
ZD
3183 }
3184
242229bb
JH
3185 do_pending_stack_adjust ();
3186
3f117656 3187 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3188 before a barrier and/or table jump insn. */
3189 last = get_last_insn ();
4b4bf941 3190 if (BARRIER_P (last))
242229bb
JH
3191 last = PREV_INSN (last);
3192 if (JUMP_TABLE_DATA_P (last))
3193 last = PREV_INSN (PREV_INSN (last));
3194 BB_END (bb) = last;
caf93cb0 3195
242229bb 3196 update_bb_for_insn (bb);
80c7a9eb 3197
242229bb
JH
3198 return bb;
3199}
3200
3201
3202/* Create a basic block for initialization code. */
3203
3204static basic_block
3205construct_init_block (void)
3206{
3207 basic_block init_block, first_block;
fd44f634
JH
3208 edge e = NULL;
3209 int flags;
275a4187 3210
fd44f634
JH
3211 /* Multiple entry points not supported yet. */
3212 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3213 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3214 init_rtl_bb_info (EXIT_BLOCK_PTR);
3215 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3216 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3217
fd44f634 3218 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3219
fd44f634
JH
3220 /* When entry edge points to first basic block, we don't need jump,
3221 otherwise we have to jump into proper target. */
3222 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3223 {
726a989a 3224 tree label = gimple_block_label (e->dest);
fd44f634
JH
3225
3226 emit_jump (label_rtx (label));
3227 flags = 0;
275a4187 3228 }
fd44f634
JH
3229 else
3230 flags = EDGE_FALLTHRU;
242229bb
JH
3231
3232 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3233 get_last_insn (),
3234 ENTRY_BLOCK_PTR);
3235 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3236 init_block->count = ENTRY_BLOCK_PTR->count;
3237 if (e)
3238 {
3239 first_block = e->dest;
3240 redirect_edge_succ (e, init_block);
fd44f634 3241 e = make_edge (init_block, first_block, flags);
242229bb
JH
3242 }
3243 else
3244 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3245 e->probability = REG_BR_PROB_BASE;
3246 e->count = ENTRY_BLOCK_PTR->count;
3247
3248 update_bb_for_insn (init_block);
3249 return init_block;
3250}
3251
55e092c4
JH
3252/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3253 found in the block tree. */
3254
3255static void
3256set_block_levels (tree block, int level)
3257{
3258 while (block)
3259 {
3260 BLOCK_NUMBER (block) = level;
3261 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3262 block = BLOCK_CHAIN (block);
3263 }
3264}
242229bb
JH
3265
3266/* Create a block containing landing pads and similar stuff. */
3267
3268static void
3269construct_exit_block (void)
3270{
3271 rtx head = get_last_insn ();
3272 rtx end;
3273 basic_block exit_block;
628f6a4e
BE
3274 edge e, e2;
3275 unsigned ix;
3276 edge_iterator ei;
071a42f9 3277 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3278
bf08ebeb
JH
3279 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3280
caf93cb0 3281 /* Make sure the locus is set to the end of the function, so that
242229bb 3282 epilogue line numbers and warnings are set properly. */
6773e15f 3283 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3284 input_location = cfun->function_end_locus;
3285
3286 /* The following insns belong to the top scope. */
55e092c4 3287 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3288
242229bb
JH
3289 /* Generate rtl for function exit. */
3290 expand_function_end ();
3291
3292 end = get_last_insn ();
3293 if (head == end)
3294 return;
071a42f9
JH
3295 /* While emitting the function end we could move end of the last basic block.
3296 */
3297 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3298 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3299 head = NEXT_INSN (head);
80c7a9eb
RH
3300 exit_block = create_basic_block (NEXT_INSN (head), end,
3301 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3302 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3303 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3304
3305 ix = 0;
3306 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3307 {
8fb790fd 3308 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3309 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3310 redirect_edge_succ (e, exit_block);
3311 else
3312 ix++;
242229bb 3313 }
628f6a4e 3314
242229bb
JH
3315 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3316 e->probability = REG_BR_PROB_BASE;
3317 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3318 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3319 if (e2 != e)
3320 {
c22cacf3 3321 e->count -= e2->count;
242229bb
JH
3322 exit_block->count -= e2->count;
3323 exit_block->frequency -= EDGE_FREQUENCY (e2);
3324 }
3325 if (e->count < 0)
3326 e->count = 0;
3327 if (exit_block->count < 0)
3328 exit_block->count = 0;
3329 if (exit_block->frequency < 0)
3330 exit_block->frequency = 0;
3331 update_bb_for_insn (exit_block);
3332}
3333
c22cacf3 3334/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3335 Look for ARRAY_REF nodes with non-constant indexes and mark them
3336 addressable. */
3337
3338static tree
3339discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3340 void *data ATTRIBUTE_UNUSED)
3341{
3342 tree t = *tp;
3343
3344 if (IS_TYPE_OR_DECL_P (t))
3345 *walk_subtrees = 0;
3346 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3347 {
3348 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3349 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3350 && (!TREE_OPERAND (t, 2)
3351 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3352 || (TREE_CODE (t) == COMPONENT_REF
3353 && (!TREE_OPERAND (t,2)
3354 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3355 || TREE_CODE (t) == BIT_FIELD_REF
3356 || TREE_CODE (t) == REALPART_EXPR
3357 || TREE_CODE (t) == IMAGPART_EXPR
3358 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 3359 || CONVERT_EXPR_P (t))
a1b23b2f
UW
3360 t = TREE_OPERAND (t, 0);
3361
3362 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3363 {
3364 t = get_base_address (t);
6f11d690
RG
3365 if (t && DECL_P (t)
3366 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
3367 TREE_ADDRESSABLE (t) = 1;
3368 }
3369
3370 *walk_subtrees = 0;
3371 }
3372
3373 return NULL_TREE;
3374}
3375
3376/* RTL expansion is not able to compile array references with variable
3377 offsets for arrays stored in single register. Discover such
3378 expressions and mark variables as addressable to avoid this
3379 scenario. */
3380
3381static void
3382discover_nonconstant_array_refs (void)
3383{
3384 basic_block bb;
726a989a 3385 gimple_stmt_iterator gsi;
a1b23b2f
UW
3386
3387 FOR_EACH_BB (bb)
726a989a
RB
3388 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3389 {
3390 gimple stmt = gsi_stmt (gsi);
3391 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3392 }
a1b23b2f
UW
3393}
3394
2e3f842f
L
3395/* This function sets crtl->args.internal_arg_pointer to a virtual
3396 register if DRAP is needed. Local register allocator will replace
3397 virtual_incoming_args_rtx with the virtual register. */
3398
3399static void
3400expand_stack_alignment (void)
3401{
3402 rtx drap_rtx;
e939805b 3403 unsigned int preferred_stack_boundary;
2e3f842f
L
3404
3405 if (! SUPPORTS_STACK_ALIGNMENT)
3406 return;
3407
3408 if (cfun->calls_alloca
3409 || cfun->has_nonlocal_label
3410 || crtl->has_nonlocal_goto)
3411 crtl->need_drap = true;
3412
3413 gcc_assert (crtl->stack_alignment_needed
3414 <= crtl->stack_alignment_estimated);
3415
2e3f842f
L
3416 /* Update crtl->stack_alignment_estimated and use it later to align
3417 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3418 exceptions since callgraph doesn't collect incoming stack alignment
3419 in this case. */
3420 if (flag_non_call_exceptions
3421 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3422 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3423 else
3424 preferred_stack_boundary = crtl->preferred_stack_boundary;
3425 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3426 crtl->stack_alignment_estimated = preferred_stack_boundary;
3427 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3428 crtl->stack_alignment_needed = preferred_stack_boundary;
3429
3430 crtl->stack_realign_needed
e939805b 3431 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 3432 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
3433
3434 crtl->stack_realign_processed = true;
3435
3436 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3437 alignment. */
3438 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3439 drap_rtx = targetm.calls.get_drap_rtx ();
3440
d015f7cc
L
3441 /* stack_realign_drap and drap_rtx must match. */
3442 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3443
2e3f842f
L
3444 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3445 if (NULL != drap_rtx)
3446 {
3447 crtl->args.internal_arg_pointer = drap_rtx;
3448
3449 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3450 needed. */
3451 fixup_tail_calls ();
3452 }
3453}
3454
242229bb
JH
3455/* Translate the intermediate representation contained in the CFG
3456 from GIMPLE trees to RTL.
3457
3458 We do conversion per basic block and preserve/update the tree CFG.
3459 This implies we have to do some magic as the CFG can simultaneously
3460 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 3461 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
3462 the expansion. */
3463
c2924966 3464static unsigned int
726a989a 3465gimple_expand_cfg (void)
242229bb
JH
3466{
3467 basic_block bb, init_block;
3468 sbitmap blocks;
0ef90296
ZD
3469 edge_iterator ei;
3470 edge e;
4e3825db
MM
3471 unsigned i;
3472
3473 rewrite_out_of_ssa (&SA);
3474 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3475 sizeof (rtx));
242229bb 3476
4586b4ca
SB
3477 /* Some backends want to know that we are expanding to RTL. */
3478 currently_expanding_to_rtl = 1;
3479
bf08ebeb
JH
3480 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3481
55e092c4 3482 insn_locators_alloc ();
fe8a7779 3483 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
3484 {
3485 /* Eventually, all FEs should explicitly set function_start_locus. */
3486 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3487 set_curr_insn_source_location
3488 (DECL_SOURCE_LOCATION (current_function_decl));
3489 else
3490 set_curr_insn_source_location (cfun->function_start_locus);
3491 }
55e092c4
JH
3492 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3493 prologue_locator = curr_insn_locator ();
3494
3495 /* Make sure first insn is a note even if we don't want linenums.
3496 This makes sure the first insn will never be deleted.
3497 Also, final expects a note to appear there. */
3498 emit_note (NOTE_INSN_DELETED);
6429e3be 3499
a1b23b2f
UW
3500 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3501 discover_nonconstant_array_refs ();
3502
e41b2a33 3503 targetm.expand_to_rtl_hook ();
cb91fab0 3504 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f
L
3505 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3506 crtl->stack_alignment_estimated = STACK_BOUNDARY;
cb91fab0
JH
3507 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3508 cfun->cfg->max_jumptable_ents = 0;
3509
e41b2a33 3510
727a31fa 3511 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
3512 expand_used_vars ();
3513
7d69de61
RH
3514 /* Honor stack protection warnings. */
3515 if (warn_stack_protect)
3516 {
e3b5732b 3517 if (cfun->calls_alloca)
c5409249
MLI
3518 warning (OPT_Wstack_protector,
3519 "not protecting local variables: variable length buffer");
cb91fab0 3520 if (has_short_buffer && !crtl->stack_protect_guard)
c5409249
MLI
3521 warning (OPT_Wstack_protector,
3522 "not protecting function: no buffer at least %d bytes long",
7d69de61
RH
3523 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3524 }
3525
242229bb 3526 /* Set up parameters and prepare for return, for the function. */
b79c5284 3527 expand_function_start (current_function_decl);
242229bb 3528
4e3825db
MM
3529 /* Now that we also have the parameter RTXs, copy them over to our
3530 partitions. */
3531 for (i = 0; i < SA.map->num_partitions; i++)
3532 {
3533 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3534
3535 if (TREE_CODE (var) != VAR_DECL
3536 && !SA.partition_to_pseudo[i])
3537 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3538 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
3539
3540 /* If this decl was marked as living in multiple places, reset
3541 this now to NULL. */
3542 if (DECL_RTL_IF_SET (var) == pc_rtx)
3543 SET_DECL_RTL (var, NULL);
3544
4e3825db
MM
3545 /* Some RTL parts really want to look at DECL_RTL(x) when x
3546 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3547 SET_DECL_RTL here making this available, but that would mean
3548 to select one of the potentially many RTLs for one DECL. Instead
3549 of doing that we simply reset the MEM_EXPR of the RTL in question,
3550 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3551 if (!DECL_RTL_SET_P (var))
3552 {
3553 if (MEM_P (SA.partition_to_pseudo[i]))
3554 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3555 }
3556 }
3557
242229bb
JH
3558 /* If this function is `main', emit a call to `__main'
3559 to run global initializers, etc. */
3560 if (DECL_NAME (current_function_decl)
3561 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3562 && DECL_FILE_SCOPE_P (current_function_decl))
3563 expand_main_function ();
3564
7d69de61
RH
3565 /* Initialize the stack_protect_guard field. This must happen after the
3566 call to __main (if any) so that the external decl is initialized. */
cb91fab0 3567 if (crtl->stack_protect_guard)
7d69de61
RH
3568 stack_protect_prologue ();
3569
e939805b
L
3570 /* Update stack boundary if needed. */
3571 if (SUPPORTS_STACK_ALIGNMENT)
3572 {
3573 /* Call update_stack_boundary here to update incoming stack
3574 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
3575 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
3576 incoming stack alignment to check if it is OK to perform
3577 sibcall optimization since sibcall optimization will only
3578 align the outgoing stack to incoming stack boundary. */
3579 if (targetm.calls.update_stack_boundary)
3580 targetm.calls.update_stack_boundary ();
3581
3582 /* The incoming stack frame has to be aligned at least at
3583 parm_stack_boundary. */
3584 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3585 }
3586
4e3825db
MM
3587 expand_phi_nodes (&SA);
3588
3fbd86b1 3589 /* Register rtl specific functions for cfg. */
242229bb
JH
3590 rtl_register_cfg_hooks ();
3591
3592 init_block = construct_init_block ();
3593
0ef90296 3594 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 3595 remaining edges later. */
0ef90296
ZD
3596 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3597 e->flags &= ~EDGE_EXECUTABLE;
3598
8b11009b 3599 lab_rtx_for_bb = pointer_map_create ();
242229bb 3600 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 3601 bb = expand_gimple_basic_block (bb);
bf08ebeb 3602
b5b8b0ac
AO
3603 if (MAY_HAVE_DEBUG_INSNS)
3604 expand_debug_locations ();
3605
4e3825db
MM
3606 execute_free_datastructures ();
3607 finish_out_of_ssa (&SA);
3608
bf08ebeb
JH
3609 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3610 conservatively to true until they are all profile aware. */
8b11009b 3611 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 3612 free_histograms ();
242229bb
JH
3613
3614 construct_exit_block ();
55e092c4
JH
3615 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3616 insn_locators_finalize ();
242229bb 3617
1d65f45c 3618 /* Zap the tree EH table. */
e8a2a782 3619 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
3620
3621 rebuild_jump_labels (get_insns ());
242229bb 3622
4e3825db
MM
3623 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3624 {
3625 edge e;
3626 edge_iterator ei;
3627 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3628 {
3629 if (e->insns.r)
3630 commit_one_edge_insertion (e);
3631 else
3632 ei_next (&ei);
3633 }
3634 }
3635
3636 /* We're done expanding trees to RTL. */
3637 currently_expanding_to_rtl = 0;
3638
3639 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3640 {
3641 edge e;
3642 edge_iterator ei;
3643 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3644 {
3645 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3646 e->flags &= ~EDGE_EXECUTABLE;
3647
3648 /* At the moment not all abnormal edges match the RTL
3649 representation. It is safe to remove them here as
3650 find_many_sub_basic_blocks will rediscover them.
3651 In the future we should get this fixed properly. */
3652 if ((e->flags & EDGE_ABNORMAL)
3653 && !(e->flags & EDGE_SIBCALL))
3654 remove_edge (e);
3655 else
3656 ei_next (&ei);
3657 }
3658 }
3659
242229bb
JH
3660 blocks = sbitmap_alloc (last_basic_block);
3661 sbitmap_ones (blocks);
3662 find_many_sub_basic_blocks (blocks);
242229bb 3663 sbitmap_free (blocks);
4e3825db 3664 purge_all_dead_edges ();
242229bb
JH
3665
3666 compact_blocks ();
2e3f842f
L
3667
3668 expand_stack_alignment ();
3669
242229bb 3670#ifdef ENABLE_CHECKING
62e5bf5d 3671 verify_flow_info ();
242229bb 3672#endif
9f8628ba
PB
3673
3674 /* There's no need to defer outputting this function any more; we
3675 know we want to output it. */
3676 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3677
3678 /* Now that we're done expanding trees to RTL, we shouldn't have any
3679 more CONCATs anywhere. */
3680 generating_concat_p = 0;
3681
b7211528
SB
3682 if (dump_file)
3683 {
3684 fprintf (dump_file,
3685 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3686 /* And the pass manager will dump RTL for us. */
3687 }
ef330312
PB
3688
3689 /* If we're emitting a nested function, make sure its parent gets
3690 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 3691 {
ef330312
PB
3692 tree parent;
3693 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
3694 parent != NULL_TREE;
3695 parent = get_containing_scope (parent))
ef330312 3696 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 3697 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 3698 }
c22cacf3 3699
ef330312
PB
3700 /* We are now committed to emitting code for this function. Do any
3701 preparation, such as emitting abstract debug info for the inline
3702 before it gets mangled by optimization. */
3703 if (cgraph_function_possibly_inlined_p (current_function_decl))
3704 (*debug_hooks->outlining_inline_function) (current_function_decl);
3705
3706 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
3707
3708 /* After expanding, the return labels are no longer needed. */
3709 return_label = NULL;
3710 naked_return_label = NULL;
55e092c4
JH
3711 /* Tag the blocks with a depth number so that change_scope can find
3712 the common parent easily. */
3713 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 3714 default_rtl_profile ();
c2924966 3715 return 0;
242229bb
JH
3716}
3717
e3b5732b 3718struct rtl_opt_pass pass_expand =
242229bb 3719{
8ddbbcae 3720 {
e3b5732b 3721 RTL_PASS,
c22cacf3 3722 "expand", /* name */
242229bb 3723 NULL, /* gate */
726a989a 3724 gimple_expand_cfg, /* execute */
242229bb
JH
3725 NULL, /* sub */
3726 NULL, /* next */
3727 0, /* static_pass_number */
c22cacf3 3728 TV_EXPAND, /* tv_id */
4e3825db 3729 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */
242229bb 3730 PROP_rtl, /* properties_provided */
4e3825db
MM
3731 PROP_ssa | PROP_trees, /* properties_destroyed */
3732 TODO_verify_ssa | TODO_verify_flow
3733 | TODO_verify_stmts, /* todo_flags_start */
3734 TODO_dump_func
3735 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 3736 }
242229bb 3737};