]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
NeXT runtime compatibility changes.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
7604eb4e 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
2b21299c 50#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 51
4e3825db
MM
52/* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54struct ssaexpand SA;
55
a5883ba0
MM
56/* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58gimple currently_expanding_gimple_stmt;
59
726a989a
RB
60/* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
62
63tree
64gimple_assign_rhs_to_tree (gimple stmt)
65{
66 tree t;
82d6e6fc 67 enum gimple_rhs_class grhs_class;
b8698a0f 68
82d6e6fc 69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 70
0354c0c7
BS
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82d6e6fc 82 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
82d6e6fc 86 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
87 {
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
96 t = copy_node (t);
97 }
726a989a
RB
98 else
99 gcc_unreachable ();
100
f5045c96
AM
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 105
726a989a
RB
106 return t;
107}
108
726a989a 109
1f6d3a08
RH
110#ifndef STACK_ALIGNMENT_NEEDED
111#define STACK_ALIGNMENT_NEEDED 1
112#endif
113
4e3825db
MM
114#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
115
116/* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119static inline void
120set_rtl (tree t, rtx x)
121{
122 if (TREE_CODE (t) == SSA_NAME)
123 {
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
131 {
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
139 ;
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
148 }
4e3825db
MM
149 }
150 else
151 SET_DECL_RTL (t, x);
152}
1f6d3a08
RH
153
154/* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156struct stack_var
157{
158 /* The Variable. */
159 tree decl;
160
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
165
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
169
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
173
174 /* The partition representative. */
175 size_t representative;
176
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
2bdbbe94
MM
179
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
1f6d3a08
RH
182};
183
184#define EOC ((size_t)-1)
185
186/* We have an array of such objects while deciding allocation. */
187static struct stack_var *stack_vars;
188static size_t stack_vars_alloc;
189static size_t stack_vars_num;
190
fa10beec 191/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
192 is non-decreasing. */
193static size_t *stack_vars_sorted;
194
1f6d3a08
RH
195/* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198static int frame_phase;
199
7d69de61
RH
200/* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202static bool has_protected_decls;
203
204/* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206static bool has_short_buffer;
1f6d3a08 207
765c3e8f
L
208/* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
210
211static unsigned int
212get_decl_align_unit (tree decl)
213{
3a42502d 214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
1f6d3a08
RH
215 return align / BITS_PER_UNIT;
216}
217
218/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
220
221static HOST_WIDE_INT
3a42502d 222alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
223{
224 HOST_WIDE_INT offset, new_frame_offset;
225
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
228 {
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
233 }
234 else
235 {
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
242 }
243 frame_offset = new_frame_offset;
244
9fb798d7
EB
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
247
1f6d3a08
RH
248 return offset;
249}
250
251/* Accumulate DECL into STACK_VARS. */
252
253static void
254add_stack_var (tree decl)
255{
533f611a
RH
256 struct stack_var *v;
257
1f6d3a08
RH
258 if (stack_vars_num >= stack_vars_alloc)
259 {
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 else
263 stack_vars_alloc = 32;
264 stack_vars
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 }
533f611a
RH
267 v = &stack_vars[stack_vars_num];
268
269 v->decl = decl;
270 v->offset = 0;
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
274 if (v->size == 0)
275 v->size = 1;
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
277
278 /* All variables are initially in their own partition. */
533f611a
RH
279 v->representative = stack_vars_num;
280 v->next = EOC;
1f6d3a08 281
2bdbbe94 282 /* All variables initially conflict with no other. */
533f611a 283 v->conflicts = NULL;
2bdbbe94 284
1f6d3a08 285 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 286 set_rtl (decl, pc_rtx);
1f6d3a08
RH
287
288 stack_vars_num++;
289}
290
1f6d3a08
RH
291/* Make the decls associated with luid's X and Y conflict. */
292
293static void
294add_stack_var_conflict (size_t x, size_t y)
295{
2bdbbe94
MM
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
298 if (!a->conflicts)
299 a->conflicts = BITMAP_ALLOC (NULL);
300 if (!b->conflicts)
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
304}
305
306/* Check whether the decls associated with luid's X and Y conflict. */
307
308static bool
309stack_var_conflict_p (size_t x, size_t y)
310{
2bdbbe94
MM
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
314 return false;
315 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 316}
b8698a0f 317
d239ed56
SB
318/* Returns true if TYPE is or contains a union type. */
319
320static bool
321aggregate_contains_union_type (tree type)
322{
323 tree field;
324
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
327 return true;
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
331 return false;
332
910ad8de 333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
336 return true;
337
338 return false;
339}
340
1f6d3a08
RH
341/* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
350
351static void
352add_alias_set_conflicts (void)
353{
354 size_t i, j, n = stack_vars_num;
355
356 for (i = 0; i < n; ++i)
357 {
a4d25453
RH
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 360 bool contains_union;
1f6d3a08 361
d239ed56 362 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
363 for (j = 0; j < i; ++j)
364 {
a4d25453
RH
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
367 if (aggr_i != aggr_j
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
376 || contains_union)
1f6d3a08
RH
377 add_stack_var_conflict (i, j);
378 }
379 }
380}
381
382/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 383 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
384
385static int
3a42502d 386stack_var_cmp (const void *a, const void *b)
1f6d3a08 387{
3a42502d
RH
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
396 bool largea, largeb;
4e3825db 397 unsigned int uida, uidb;
1f6d3a08 398
3a42502d
RH
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
404
405 /* Secondary compare on size, decreasing */
406 if (sizea < sizeb)
1f6d3a08 407 return -1;
3a42502d 408 if (sizea > sizeb)
1f6d3a08 409 return 1;
3a42502d
RH
410
411 /* Tertiary compare on true alignment, decreasing. */
412 if (aligna < alignb)
413 return -1;
414 if (aligna > alignb)
415 return 1;
416
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
420 if (TREE_CODE (decla) == SSA_NAME)
421 {
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else
425 return -1;
426 }
427 else if (TREE_CODE (declb) == SSA_NAME)
428 return 1;
429 else
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 431 if (uida < uidb)
79f802f5 432 return 1;
3a42502d
RH
433 if (uida > uidb)
434 return -1;
1f6d3a08
RH
435 return 0;
436}
437
55b34b5f
RG
438
439/* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
441 variables bitmap. */
442
443static void
444add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
447{
448 bitmap_iterator bi;
449 unsigned i;
450 bitmap *part;
451
452 if (pt->anything
453 || pt->vars == NULL
454 /* The pointed-to vars bitmap is shared, it is enough to
455 visit it once. */
456 || pointer_set_insert(visited, pt->vars))
457 return;
458
459 bitmap_clear (temp);
460
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
463 once. */
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
465 if ((!temp
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
472}
473
474/* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
477 rewritten. */
478
479static void
480update_alias_info_with_stack_vars (void)
481{
482 struct pointer_map_t *decls_to_partitions = NULL;
483 size_t i, j;
484 tree var = NULL_TREE;
485
486 for (i = 0; i < stack_vars_num; i++)
487 {
488 bitmap part = NULL;
489 tree name;
490 struct ptr_info_def *pi;
491
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
495 continue;
496
497 if (!decls_to_partitions)
498 {
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
501 }
502
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
509
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
514 {
515 tree decl = stack_vars[j].decl;
25a6a873 516 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced. */
520 gcc_assert (DECL_P (decl)
25a6a873 521 && referenced_var_lookup (DECL_UID (decl)));
55b34b5f
RG
522 bitmap_set_bit (part, uid);
523 *((bitmap *) pointer_map_insert (decls_to_partitions,
524 (void *)(size_t) uid)) = part;
525 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
526 decl)) = name;
527 }
528
529 /* Make the SSA name point to all partition members. */
530 pi = get_ptr_info (name);
25a6a873 531 pt_solution_set (&pi->pt, part, false, false);
55b34b5f
RG
532 }
533
534 /* Make all points-to sets that contain one member of a partition
535 contain all members of the partition. */
536 if (decls_to_partitions)
537 {
538 unsigned i;
539 struct pointer_set_t *visited = pointer_set_create ();
540 bitmap temp = BITMAP_ALLOC (NULL);
541
542 for (i = 1; i < num_ssa_names; i++)
543 {
544 tree name = ssa_name (i);
545 struct ptr_info_def *pi;
546
547 if (name
548 && POINTER_TYPE_P (TREE_TYPE (name))
549 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
550 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
551 visited, temp);
552 }
553
554 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
555 decls_to_partitions, visited, temp);
55b34b5f
RG
556
557 pointer_set_destroy (visited);
558 pointer_map_destroy (decls_to_partitions);
559 BITMAP_FREE (temp);
560 }
561}
562
1f6d3a08
RH
563/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
564 partitioning algorithm. Partitions A and B are known to be non-conflicting.
565 Merge them into a single partition A.
566
567 At the same time, add OFFSET to all variables in partition B. At the end
568 of the partitioning process we've have a nice block easy to lay out within
569 the stack frame. */
570
571static void
572union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
573{
574 size_t i, last;
2bdbbe94
MM
575 struct stack_var *vb = &stack_vars[b];
576 bitmap_iterator bi;
577 unsigned u;
1f6d3a08
RH
578
579 /* Update each element of partition B with the given offset,
580 and merge them into partition A. */
581 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
582 {
583 stack_vars[i].offset += offset;
584 stack_vars[i].representative = a;
585 }
586 stack_vars[last].next = stack_vars[a].next;
587 stack_vars[a].next = b;
588
589 /* Update the required alignment of partition A to account for B. */
590 if (stack_vars[a].alignb < stack_vars[b].alignb)
591 stack_vars[a].alignb = stack_vars[b].alignb;
592
593 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
594 if (vb->conflicts)
595 {
596 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
597 add_stack_var_conflict (a, stack_vars[u].representative);
598 BITMAP_FREE (vb->conflicts);
599 }
1f6d3a08
RH
600}
601
602/* A subroutine of expand_used_vars. Binpack the variables into
603 partitions constrained by the interference graph. The overall
604 algorithm used is as follows:
605
606 Sort the objects by size.
607 For each object A {
608 S = size(A)
609 O = 0
610 loop {
611 Look for the largest non-conflicting object B with size <= S.
612 UNION (A, B)
613 offset(B) = O
614 O += size(B)
615 S -= size(B)
616 }
617 }
618*/
619
620static void
621partition_stack_vars (void)
622{
623 size_t si, sj, n = stack_vars_num;
624
625 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
626 for (si = 0; si < n; ++si)
627 stack_vars_sorted[si] = si;
628
629 if (n == 1)
630 return;
631
3a42502d 632 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 633
1f6d3a08
RH
634 for (si = 0; si < n; ++si)
635 {
636 size_t i = stack_vars_sorted[si];
637 HOST_WIDE_INT isize = stack_vars[i].size;
3a42502d 638 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08
RH
639 HOST_WIDE_INT offset = 0;
640
641 for (sj = si; sj-- > 0; )
642 {
643 size_t j = stack_vars_sorted[sj];
644 HOST_WIDE_INT jsize = stack_vars[j].size;
645 unsigned int jalign = stack_vars[j].alignb;
646
647 /* Ignore objects that aren't partition representatives. */
648 if (stack_vars[j].representative != j)
649 continue;
650
651 /* Ignore objects too large for the remaining space. */
652 if (isize < jsize)
653 continue;
654
655 /* Ignore conflicting objects. */
656 if (stack_var_conflict_p (i, j))
657 continue;
658
3a42502d
RH
659 /* Do not mix objects of "small" (supported) alignment
660 and "large" (unsupported) alignment. */
661 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
662 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
663 continue;
664
1f6d3a08
RH
665 /* Refine the remaining space check to include alignment. */
666 if (offset & (jalign - 1))
667 {
668 HOST_WIDE_INT toff = offset;
669 toff += jalign - 1;
670 toff &= -(HOST_WIDE_INT)jalign;
671 if (isize - (toff - offset) < jsize)
672 continue;
673
674 isize -= toff - offset;
675 offset = toff;
676 }
677
678 /* UNION the objects, placing J at OFFSET. */
679 union_stack_vars (i, j, offset);
680
681 isize -= jsize;
682 if (isize == 0)
683 break;
684 }
685 }
55b34b5f 686
0b200b80
RG
687 if (optimize)
688 update_alias_info_with_stack_vars ();
1f6d3a08
RH
689}
690
691/* A debugging aid for expand_used_vars. Dump the generated partitions. */
692
693static void
694dump_stack_var_partition (void)
695{
696 size_t si, i, j, n = stack_vars_num;
697
698 for (si = 0; si < n; ++si)
699 {
700 i = stack_vars_sorted[si];
701
702 /* Skip variables that aren't partition representatives, for now. */
703 if (stack_vars[i].representative != i)
704 continue;
705
706 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
707 " align %u\n", (unsigned long) i, stack_vars[i].size,
708 stack_vars[i].alignb);
709
710 for (j = i; j != EOC; j = stack_vars[j].next)
711 {
712 fputc ('\t', dump_file);
713 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
714 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 715 stack_vars[j].offset);
1f6d3a08
RH
716 }
717 }
718}
719
3a42502d 720/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
721
722static void
3a42502d
RH
723expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
724 HOST_WIDE_INT offset)
1f6d3a08 725{
3a42502d 726 unsigned align;
1f6d3a08 727 rtx x;
c22cacf3 728
1f6d3a08
RH
729 /* If this fails, we've overflowed the stack frame. Error nicely? */
730 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
731
3a42502d 732 x = plus_constant (base, offset);
4e3825db 733 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 734
4e3825db
MM
735 if (TREE_CODE (decl) != SSA_NAME)
736 {
737 /* Set alignment we actually gave this decl if it isn't an SSA name.
738 If it is we generate stack slots only accidentally so it isn't as
739 important, we'll simply use the alignment that is already set. */
3a42502d
RH
740 if (base == virtual_stack_vars_rtx)
741 offset -= frame_phase;
4e3825db
MM
742 align = offset & -offset;
743 align *= BITS_PER_UNIT;
3a42502d
RH
744 if (align == 0 || align > base_align)
745 align = base_align;
746
747 /* One would think that we could assert that we're not decreasing
748 alignment here, but (at least) the i386 port does exactly this
749 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
750
751 DECL_ALIGN (decl) = align;
752 DECL_USER_ALIGN (decl) = 0;
753 }
754
755 set_mem_attributes (x, SSAVAR (decl), true);
756 set_rtl (decl, x);
1f6d3a08
RH
757}
758
759/* A subroutine of expand_used_vars. Give each partition representative
760 a unique location within the stack frame. Update each partition member
761 with that location. */
762
763static void
7d69de61 764expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
765{
766 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
767 HOST_WIDE_INT large_size = 0, large_alloc = 0;
768 rtx large_base = NULL;
769 unsigned large_align = 0;
770 tree decl;
771
772 /* Determine if there are any variables requiring "large" alignment.
773 Since these are dynamically allocated, we only process these if
774 no predicate involved. */
775 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
776 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
777 {
778 /* Find the total size of these variables. */
779 for (si = 0; si < n; ++si)
780 {
781 unsigned alignb;
782
783 i = stack_vars_sorted[si];
784 alignb = stack_vars[i].alignb;
785
786 /* Stop when we get to the first decl with "small" alignment. */
787 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
788 break;
789
790 /* Skip variables that aren't partition representatives. */
791 if (stack_vars[i].representative != i)
792 continue;
793
794 /* Skip variables that have already had rtl assigned. See also
795 add_stack_var where we perpetrate this pc_rtx hack. */
796 decl = stack_vars[i].decl;
797 if ((TREE_CODE (decl) == SSA_NAME
798 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
799 : DECL_RTL (decl)) != pc_rtx)
800 continue;
801
802 large_size += alignb - 1;
803 large_size &= -(HOST_WIDE_INT)alignb;
804 large_size += stack_vars[i].size;
805 }
806
807 /* If there were any, allocate space. */
808 if (large_size > 0)
809 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
810 large_align, true);
811 }
1f6d3a08
RH
812
813 for (si = 0; si < n; ++si)
814 {
3a42502d
RH
815 rtx base;
816 unsigned base_align, alignb;
1f6d3a08
RH
817 HOST_WIDE_INT offset;
818
819 i = stack_vars_sorted[si];
820
821 /* Skip variables that aren't partition representatives, for now. */
822 if (stack_vars[i].representative != i)
823 continue;
824
7d69de61
RH
825 /* Skip variables that have already had rtl assigned. See also
826 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
827 decl = stack_vars[i].decl;
828 if ((TREE_CODE (decl) == SSA_NAME
829 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
830 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
831 continue;
832
c22cacf3 833 /* Check the predicate to see whether this variable should be
7d69de61 834 allocated in this pass. */
3a42502d 835 if (pred && !pred (decl))
7d69de61
RH
836 continue;
837
3a42502d
RH
838 alignb = stack_vars[i].alignb;
839 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
840 {
841 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
842 base = virtual_stack_vars_rtx;
843 base_align = crtl->max_used_stack_slot_alignment;
844 }
845 else
846 {
847 /* Large alignment is only processed in the last pass. */
848 if (pred)
849 continue;
533f611a 850 gcc_assert (large_base != NULL);
3a42502d
RH
851
852 large_alloc += alignb - 1;
853 large_alloc &= -(HOST_WIDE_INT)alignb;
854 offset = large_alloc;
855 large_alloc += stack_vars[i].size;
856
857 base = large_base;
858 base_align = large_align;
859 }
1f6d3a08
RH
860
861 /* Create rtl for each variable based on their location within the
862 partition. */
863 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
864 {
865 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
866 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 867 base, base_align,
f8da8190
AP
868 stack_vars[j].offset + offset);
869 }
1f6d3a08 870 }
3a42502d
RH
871
872 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
873}
874
ff28a94d
JH
875/* Take into account all sizes of partitions and reset DECL_RTLs. */
876static HOST_WIDE_INT
877account_stack_vars (void)
878{
879 size_t si, j, i, n = stack_vars_num;
880 HOST_WIDE_INT size = 0;
881
882 for (si = 0; si < n; ++si)
883 {
884 i = stack_vars_sorted[si];
885
886 /* Skip variables that aren't partition representatives, for now. */
887 if (stack_vars[i].representative != i)
888 continue;
889
890 size += stack_vars[i].size;
891 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 892 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
893 }
894 return size;
895}
896
1f6d3a08
RH
897/* A subroutine of expand_one_var. Called to immediately assign rtl
898 to a variable to be allocated in the stack frame. */
899
900static void
901expand_one_stack_var (tree var)
902{
3a42502d
RH
903 HOST_WIDE_INT size, offset;
904 unsigned byte_align;
1f6d3a08 905
4e3825db 906 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
3a42502d
RH
907 byte_align = get_decl_align_unit (SSAVAR (var));
908
909 /* We handle highly aligned variables in expand_stack_vars. */
910 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 911
3a42502d
RH
912 offset = alloc_stack_frame_space (size, byte_align);
913
914 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
915 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
916}
917
1f6d3a08
RH
918/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
919 that will reside in a hard register. */
920
921static void
922expand_one_hard_reg_var (tree var)
923{
924 rest_of_decl_compilation (var, 0, 0);
925}
926
927/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
928 that will reside in a pseudo register. */
929
930static void
931expand_one_register_var (tree var)
932{
4e3825db
MM
933 tree decl = SSAVAR (var);
934 tree type = TREE_TYPE (decl);
cde0f3fd 935 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
936 rtx x = gen_reg_rtx (reg_mode);
937
4e3825db 938 set_rtl (var, x);
1f6d3a08
RH
939
940 /* Note if the object is a user variable. */
4e3825db
MM
941 if (!DECL_ARTIFICIAL (decl))
942 mark_user_reg (x);
1f6d3a08 943
61021c2c 944 if (POINTER_TYPE_P (type))
4e3825db 945 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
946}
947
948/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 949 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
950 to pick something that won't crash the rest of the compiler. */
951
952static void
953expand_one_error_var (tree var)
954{
955 enum machine_mode mode = DECL_MODE (var);
956 rtx x;
957
958 if (mode == BLKmode)
959 x = gen_rtx_MEM (BLKmode, const0_rtx);
960 else if (mode == VOIDmode)
961 x = const0_rtx;
962 else
963 x = gen_reg_rtx (mode);
964
965 SET_DECL_RTL (var, x);
966}
967
c22cacf3 968/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
969 allocated to the local stack frame. Return true if we wish to
970 add VAR to STACK_VARS so that it will be coalesced with other
971 variables. Return false to allocate VAR immediately.
972
973 This function is used to reduce the number of variables considered
974 for coalescing, which reduces the size of the quadratic problem. */
975
976static bool
977defer_stack_allocation (tree var, bool toplevel)
978{
7d69de61
RH
979 /* If stack protection is enabled, *all* stack variables must be deferred,
980 so that we can re-order the strings to the top of the frame. */
981 if (flag_stack_protect)
982 return true;
983
3a42502d
RH
984 /* We handle "large" alignment via dynamic allocation. We want to handle
985 this extra complication in only one place, so defer them. */
986 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
987 return true;
988
1f6d3a08
RH
989 /* Variables in the outermost scope automatically conflict with
990 every other variable. The only reason to want to defer them
991 at all is that, after sorting, we can more efficiently pack
992 small variables in the stack frame. Continue to defer at -O2. */
993 if (toplevel && optimize < 2)
994 return false;
995
996 /* Without optimization, *most* variables are allocated from the
997 stack, which makes the quadratic problem large exactly when we
c22cacf3 998 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
999 other hand, we don't want the function's stack frame size to
1000 get completely out of hand. So we avoid adding scalars and
1001 "small" aggregates to the list at all. */
1002 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1003 return false;
1004
1005 return true;
1006}
1007
1008/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1009 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1010 expanded yet, merely recorded.
ff28a94d
JH
1011 When REALLY_EXPAND is false, only add stack values to be allocated.
1012 Return stack usage this variable is supposed to take.
1013*/
1f6d3a08 1014
ff28a94d
JH
1015static HOST_WIDE_INT
1016expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1017{
3a42502d 1018 unsigned int align = BITS_PER_UNIT;
4e3825db 1019 tree origvar = var;
3a42502d 1020
4e3825db
MM
1021 var = SSAVAR (var);
1022
3a42502d 1023 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1024 {
2e3f842f
L
1025 /* Because we don't know if VAR will be in register or on stack,
1026 we conservatively assume it will be on stack even if VAR is
1027 eventually put into register after RA pass. For non-automatic
1028 variables, which won't be on stack, we collect alignment of
1029 type and ignore user specified alignment. */
1030 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1031 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1032 TYPE_MODE (TREE_TYPE (var)),
1033 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1034 else if (DECL_HAS_VALUE_EXPR_P (var)
1035 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1036 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1037 or variables which were assigned a stack slot already by
1038 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1039 changed from the offset chosen to it. */
1040 align = crtl->stack_alignment_estimated;
2e3f842f 1041 else
ae58e548 1042 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1043
3a42502d
RH
1044 /* If the variable alignment is very large we'll dynamicaly allocate
1045 it, which means that in-frame portion is just a pointer. */
1046 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1047 align = POINTER_SIZE;
1048 }
1049
1050 if (SUPPORTS_STACK_ALIGNMENT
1051 && crtl->stack_alignment_estimated < align)
1052 {
1053 /* stack_alignment_estimated shouldn't change after stack
1054 realign decision made */
1055 gcc_assert(!crtl->stack_realign_processed);
1056 crtl->stack_alignment_estimated = align;
2e3f842f
L
1057 }
1058
3a42502d
RH
1059 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1060 So here we only make sure stack_alignment_needed >= align. */
1061 if (crtl->stack_alignment_needed < align)
1062 crtl->stack_alignment_needed = align;
1063 if (crtl->max_used_stack_slot_alignment < align)
1064 crtl->max_used_stack_slot_alignment = align;
1065
4e3825db
MM
1066 if (TREE_CODE (origvar) == SSA_NAME)
1067 {
1068 gcc_assert (TREE_CODE (var) != VAR_DECL
1069 || (!DECL_EXTERNAL (var)
1070 && !DECL_HAS_VALUE_EXPR_P (var)
1071 && !TREE_STATIC (var)
4e3825db
MM
1072 && TREE_TYPE (var) != error_mark_node
1073 && !DECL_HARD_REGISTER (var)
1074 && really_expand));
1075 }
1076 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1077 ;
1f6d3a08
RH
1078 else if (DECL_EXTERNAL (var))
1079 ;
833b3afe 1080 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1081 ;
1082 else if (TREE_STATIC (var))
7e8b322a 1083 ;
eb7adebc 1084 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1085 ;
1086 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1087 {
1088 if (really_expand)
1089 expand_one_error_var (var);
1090 }
4e3825db 1091 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1092 {
1093 if (really_expand)
1094 expand_one_hard_reg_var (var);
1095 }
1f6d3a08 1096 else if (use_register_for_decl (var))
ff28a94d
JH
1097 {
1098 if (really_expand)
4e3825db 1099 expand_one_register_var (origvar);
ff28a94d 1100 }
7604eb4e
JJ
1101 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1102 {
1103 if (really_expand)
1104 {
1105 error ("size of variable %q+D is too large", var);
1106 expand_one_error_var (var);
1107 }
1108 }
1f6d3a08 1109 else if (defer_stack_allocation (var, toplevel))
4e3825db 1110 add_stack_var (origvar);
1f6d3a08 1111 else
ff28a94d 1112 {
bd9f1b4b 1113 if (really_expand)
4e3825db 1114 expand_one_stack_var (origvar);
ff28a94d
JH
1115 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1116 }
1117 return 0;
1f6d3a08
RH
1118}
1119
1120/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1121 expanding variables. Those variables that can be put into registers
1122 are allocated pseudos; those that can't are put on the stack.
1123
1124 TOPLEVEL is true if this is the outermost BLOCK. */
1125
1126static void
1127expand_used_vars_for_block (tree block, bool toplevel)
1128{
1129 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1130 tree t;
1131
1132 old_sv_num = toplevel ? 0 : stack_vars_num;
1133
1134 /* Expand all variables at this level. */
910ad8de 1135 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
7e8b322a 1136 if (TREE_USED (t))
ff28a94d 1137 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1138
1139 this_sv_num = stack_vars_num;
1140
1141 /* Expand all variables at containing levels. */
1142 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1143 expand_used_vars_for_block (t, false);
1144
1145 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1146 possible for variables whose address escapes), we mirror the block
1f6d3a08 1147 tree in the interference graph. Here we cause all variables at this
2bdbbe94 1148 level, and all sublevels, to conflict. */
1f6d3a08
RH
1149 if (old_sv_num < this_sv_num)
1150 {
1151 new_sv_num = stack_vars_num;
1f6d3a08
RH
1152
1153 for (i = old_sv_num; i < new_sv_num; ++i)
2bdbbe94 1154 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
f4a6d54e 1155 add_stack_var_conflict (i, j);
1f6d3a08
RH
1156 }
1157}
1158
1159/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1160 and clear TREE_USED on all local variables. */
1161
1162static void
1163clear_tree_used (tree block)
1164{
1165 tree t;
1166
910ad8de 1167 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08
RH
1168 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1169 TREE_USED (t) = 0;
1170
1171 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1172 clear_tree_used (t);
1173}
1174
7d69de61
RH
1175/* Examine TYPE and determine a bit mask of the following features. */
1176
1177#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1178#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1179#define SPCT_HAS_ARRAY 4
1180#define SPCT_HAS_AGGREGATE 8
1181
1182static unsigned int
1183stack_protect_classify_type (tree type)
1184{
1185 unsigned int ret = 0;
1186 tree t;
1187
1188 switch (TREE_CODE (type))
1189 {
1190 case ARRAY_TYPE:
1191 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1192 if (t == char_type_node
1193 || t == signed_char_type_node
1194 || t == unsigned_char_type_node)
1195 {
15362b89
JJ
1196 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1197 unsigned HOST_WIDE_INT len;
7d69de61 1198
15362b89
JJ
1199 if (!TYPE_SIZE_UNIT (type)
1200 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1201 len = max;
7d69de61 1202 else
15362b89 1203 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1204
1205 if (len < max)
1206 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1207 else
1208 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1209 }
1210 else
1211 ret = SPCT_HAS_ARRAY;
1212 break;
1213
1214 case UNION_TYPE:
1215 case QUAL_UNION_TYPE:
1216 case RECORD_TYPE:
1217 ret = SPCT_HAS_AGGREGATE;
1218 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1219 if (TREE_CODE (t) == FIELD_DECL)
1220 ret |= stack_protect_classify_type (TREE_TYPE (t));
1221 break;
1222
1223 default:
1224 break;
1225 }
1226
1227 return ret;
1228}
1229
a4d05547
KH
1230/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1231 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1232 any variable in this function. The return value is the phase number in
1233 which the variable should be allocated. */
1234
1235static int
1236stack_protect_decl_phase (tree decl)
1237{
1238 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1239 int ret = 0;
1240
1241 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1242 has_short_buffer = true;
1243
1244 if (flag_stack_protect == 2)
1245 {
1246 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1247 && !(bits & SPCT_HAS_AGGREGATE))
1248 ret = 1;
1249 else if (bits & SPCT_HAS_ARRAY)
1250 ret = 2;
1251 }
1252 else
1253 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1254
1255 if (ret)
1256 has_protected_decls = true;
1257
1258 return ret;
1259}
1260
1261/* Two helper routines that check for phase 1 and phase 2. These are used
1262 as callbacks for expand_stack_vars. */
1263
1264static bool
1265stack_protect_decl_phase_1 (tree decl)
1266{
1267 return stack_protect_decl_phase (decl) == 1;
1268}
1269
1270static bool
1271stack_protect_decl_phase_2 (tree decl)
1272{
1273 return stack_protect_decl_phase (decl) == 2;
1274}
1275
1276/* Ensure that variables in different stack protection phases conflict
1277 so that they are not merged and share the same stack slot. */
1278
1279static void
1280add_stack_protection_conflicts (void)
1281{
1282 size_t i, j, n = stack_vars_num;
1283 unsigned char *phase;
1284
1285 phase = XNEWVEC (unsigned char, n);
1286 for (i = 0; i < n; ++i)
1287 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1288
1289 for (i = 0; i < n; ++i)
1290 {
1291 unsigned char ph_i = phase[i];
1292 for (j = 0; j < i; ++j)
1293 if (ph_i != phase[j])
1294 add_stack_var_conflict (i, j);
1295 }
1296
1297 XDELETEVEC (phase);
1298}
1299
1300/* Create a decl for the guard at the top of the stack frame. */
1301
1302static void
1303create_stack_guard (void)
1304{
c2255bc4
AH
1305 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1306 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1307 TREE_THIS_VOLATILE (guard) = 1;
1308 TREE_USED (guard) = 1;
1309 expand_one_stack_var (guard);
cb91fab0 1310 crtl->stack_protect_guard = guard;
7d69de61
RH
1311}
1312
ff28a94d
JH
1313/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1314 expanding variables. Those variables that can be put into registers
1315 are allocated pseudos; those that can't are put on the stack.
1316
1317 TOPLEVEL is true if this is the outermost BLOCK. */
1318
1319static HOST_WIDE_INT
1320account_used_vars_for_block (tree block, bool toplevel)
1321{
ff28a94d
JH
1322 tree t;
1323 HOST_WIDE_INT size = 0;
1324
ff28a94d 1325 /* Expand all variables at this level. */
910ad8de 1326 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
ff28a94d
JH
1327 if (TREE_USED (t))
1328 size += expand_one_var (t, toplevel, false);
1329
ff28a94d
JH
1330 /* Expand all variables at containing levels. */
1331 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1332 size += account_used_vars_for_block (t, false);
1333
ff28a94d
JH
1334 return size;
1335}
1336
1337/* Prepare for expanding variables. */
b8698a0f 1338static void
ff28a94d
JH
1339init_vars_expansion (void)
1340{
1341 tree t;
c021f10b 1342 unsigned ix;
cb91fab0 1343 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1344 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1345 TREE_USED (t) = 1;
ff28a94d
JH
1346
1347 /* Clear TREE_USED on all variables associated with a block scope. */
1348 clear_tree_used (DECL_INITIAL (current_function_decl));
1349
1350 /* Initialize local stack smashing state. */
1351 has_protected_decls = false;
1352 has_short_buffer = false;
1353}
1354
1355/* Free up stack variable graph data. */
1356static void
1357fini_vars_expansion (void)
1358{
2bdbbe94
MM
1359 size_t i, n = stack_vars_num;
1360 for (i = 0; i < n; i++)
1361 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1362 XDELETEVEC (stack_vars);
1363 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1364 stack_vars = NULL;
1365 stack_vars_alloc = stack_vars_num = 0;
ff28a94d
JH
1366}
1367
2e1ec94f
RR
1368/* Make a fair guess for the size of the stack frame of the decl
1369 passed. This doesn't have to be exact, the result is only used
b5a430f3
SB
1370 in the inline heuristics. So we don't want to run the full stack
1371 var packing algorithm (which is quadratic in the number of stack
1372 vars). Instead, we calculate the total size of all stack vars.
1373 This turns out to be a pretty fair estimate -- packing of stack
1374 vars doesn't happen very often. */
1375
ff28a94d 1376HOST_WIDE_INT
2e1ec94f 1377estimated_stack_frame_size (tree decl)
ff28a94d
JH
1378{
1379 HOST_WIDE_INT size = 0;
b5a430f3 1380 size_t i;
c021f10b
NF
1381 tree var, outer_block = DECL_INITIAL (current_function_decl);
1382 unsigned ix;
2e1ec94f
RR
1383 tree old_cur_fun_decl = current_function_decl;
1384 current_function_decl = decl;
1385 push_cfun (DECL_STRUCT_FUNCTION (decl));
ff28a94d
JH
1386
1387 init_vars_expansion ();
1388
c021f10b 1389 FOR_EACH_LOCAL_DECL (cfun, ix, var)
ff28a94d 1390 {
ff28a94d
JH
1391 if (TREE_USED (var))
1392 size += expand_one_var (var, true, false);
1393 TREE_USED (var) = 1;
1394 }
1395 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1396
ff28a94d
JH
1397 if (stack_vars_num > 0)
1398 {
b5a430f3
SB
1399 /* Fake sorting the stack vars for account_stack_vars (). */
1400 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1401 for (i = 0; i < stack_vars_num; ++i)
1402 stack_vars_sorted[i] = i;
ff28a94d
JH
1403 size += account_stack_vars ();
1404 fini_vars_expansion ();
1405 }
2e1ec94f
RR
1406 pop_cfun ();
1407 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1408 return size;
1409}
1410
1f6d3a08 1411/* Expand all variables used in the function. */
727a31fa
RH
1412
1413static void
1414expand_used_vars (void)
1415{
c021f10b
NF
1416 tree var, outer_block = DECL_INITIAL (current_function_decl);
1417 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1418 unsigned i;
c021f10b 1419 unsigned len;
727a31fa 1420
1f6d3a08
RH
1421 /* Compute the phase of the stack frame for this function. */
1422 {
1423 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1424 int off = STARTING_FRAME_OFFSET % align;
1425 frame_phase = off ? align - off : 0;
1426 }
727a31fa 1427
ff28a94d 1428 init_vars_expansion ();
7d69de61 1429
4e3825db
MM
1430 for (i = 0; i < SA.map->num_partitions; i++)
1431 {
1432 tree var = partition_to_var (SA.map, i);
1433
1434 gcc_assert (is_gimple_reg (var));
1435 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1436 expand_one_var (var, true, true);
1437 else
1438 {
1439 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1440 contain the default def (representing the parm or result itself)
1441 we don't do anything here. But those which don't contain the
1442 default def (representing a temporary based on the parm/result)
1443 we need to allocate space just like for normal VAR_DECLs. */
1444 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1445 {
1446 expand_one_var (var, true, true);
1447 gcc_assert (SA.partition_to_pseudo[i]);
1448 }
1449 }
1450 }
1451
cb91fab0 1452 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1453 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1454
1455 len = VEC_length (tree, cfun->local_decls);
1456 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1457 {
1f6d3a08
RH
1458 bool expand_now = false;
1459
4e3825db
MM
1460 /* Expanded above already. */
1461 if (is_gimple_reg (var))
eb7adebc
MM
1462 {
1463 TREE_USED (var) = 0;
3adcf52c 1464 goto next;
eb7adebc 1465 }
1f6d3a08
RH
1466 /* We didn't set a block for static or extern because it's hard
1467 to tell the difference between a global variable (re)declared
1468 in a local scope, and one that's really declared there to
1469 begin with. And it doesn't really matter much, since we're
1470 not giving them stack space. Expand them now. */
4e3825db 1471 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1472 expand_now = true;
1473
1474 /* If the variable is not associated with any block, then it
1475 was created by the optimizers, and could be live anywhere
1476 in the function. */
1477 else if (TREE_USED (var))
1478 expand_now = true;
1479
1480 /* Finally, mark all variables on the list as used. We'll use
1481 this in a moment when we expand those associated with scopes. */
1482 TREE_USED (var) = 1;
1483
1484 if (expand_now)
3adcf52c
JM
1485 expand_one_var (var, true, true);
1486
1487 next:
1488 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1489 {
3adcf52c
JM
1490 rtx rtl = DECL_RTL_IF_SET (var);
1491
1492 /* Keep artificial non-ignored vars in cfun->local_decls
1493 chain until instantiate_decls. */
1494 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1495 add_local_decl (cfun, var);
6c6366f6 1496 else if (rtl == NULL_RTX)
c021f10b
NF
1497 /* If rtl isn't set yet, which can happen e.g. with
1498 -fstack-protector, retry before returning from this
1499 function. */
1500 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1501 }
1f6d3a08 1502 }
1f6d3a08 1503
c021f10b
NF
1504 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1505
1506 +-----------------+-----------------+
1507 | ...processed... | ...duplicates...|
1508 +-----------------+-----------------+
1509 ^
1510 +-- LEN points here.
1511
1512 We just want the duplicates, as those are the artificial
1513 non-ignored vars that we want to keep until instantiate_decls.
1514 Move them down and truncate the array. */
1515 if (!VEC_empty (tree, cfun->local_decls))
1516 VEC_block_remove (tree, cfun->local_decls, 0, len);
1517
1f6d3a08
RH
1518 /* At this point, all variables within the block tree with TREE_USED
1519 set are actually used by the optimized function. Lay them out. */
1520 expand_used_vars_for_block (outer_block, true);
1521
1522 if (stack_vars_num > 0)
1523 {
1524 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1525 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1526 reflect this. */
1527 add_alias_set_conflicts ();
1528
c22cacf3 1529 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1530 vulnerable data and non-vulnerable data. */
1531 if (flag_stack_protect)
1532 add_stack_protection_conflicts ();
1533
c22cacf3 1534 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1535 minimal interference graph, attempt to save some stack space. */
1536 partition_stack_vars ();
1537 if (dump_file)
1538 dump_stack_var_partition ();
7d69de61
RH
1539 }
1540
1541 /* There are several conditions under which we should create a
1542 stack guard: protect-all, alloca used, protected decls present. */
1543 if (flag_stack_protect == 2
1544 || (flag_stack_protect
e3b5732b 1545 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1546 create_stack_guard ();
1f6d3a08 1547
7d69de61
RH
1548 /* Assign rtl to each variable based on these partitions. */
1549 if (stack_vars_num > 0)
1550 {
1551 /* Reorder decls to be protected by iterating over the variables
1552 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1553 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1554 earlier, such that we naturally see these variables first,
1555 and thus naturally allocate things in the right order. */
1556 if (has_protected_decls)
1557 {
1558 /* Phase 1 contains only character arrays. */
1559 expand_stack_vars (stack_protect_decl_phase_1);
1560
1561 /* Phase 2 contains other kinds of arrays. */
1562 if (flag_stack_protect == 2)
1563 expand_stack_vars (stack_protect_decl_phase_2);
1564 }
1565
1566 expand_stack_vars (NULL);
1f6d3a08 1567
ff28a94d 1568 fini_vars_expansion ();
1f6d3a08
RH
1569 }
1570
6c6366f6
JJ
1571 /* If there were any artificial non-ignored vars without rtl
1572 found earlier, see if deferred stack allocation hasn't assigned
1573 rtl to them. */
c021f10b 1574 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1575 {
6c6366f6
JJ
1576 rtx rtl = DECL_RTL_IF_SET (var);
1577
6c6366f6
JJ
1578 /* Keep artificial non-ignored vars in cfun->local_decls
1579 chain until instantiate_decls. */
1580 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1581 add_local_decl (cfun, var);
6c6366f6 1582 }
c021f10b 1583 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1584
1f6d3a08
RH
1585 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1586 if (STACK_ALIGNMENT_NEEDED)
1587 {
1588 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1589 if (!FRAME_GROWS_DOWNWARD)
1590 frame_offset += align - 1;
1591 frame_offset &= -align;
1592 }
727a31fa
RH
1593}
1594
1595
b7211528
SB
1596/* If we need to produce a detailed dump, print the tree representation
1597 for STMT to the dump file. SINCE is the last RTX after which the RTL
1598 generated for STMT should have been appended. */
1599
1600static void
726a989a 1601maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1602{
1603 if (dump_file && (dump_flags & TDF_DETAILS))
1604 {
1605 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1606 print_gimple_stmt (dump_file, stmt, 0,
1607 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1608 fprintf (dump_file, "\n");
1609
1610 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1611 }
1612}
1613
8b11009b
ZD
1614/* Maps the blocks that do not contain tree labels to rtx labels. */
1615
1616static struct pointer_map_t *lab_rtx_for_bb;
1617
a9b77cd1
ZD
1618/* Returns the label_rtx expression for a label starting basic block BB. */
1619
1620static rtx
726a989a 1621label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1622{
726a989a
RB
1623 gimple_stmt_iterator gsi;
1624 tree lab;
1625 gimple lab_stmt;
8b11009b 1626 void **elt;
a9b77cd1
ZD
1627
1628 if (bb->flags & BB_RTL)
1629 return block_label (bb);
1630
8b11009b
ZD
1631 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1632 if (elt)
ae50c0cb 1633 return (rtx) *elt;
8b11009b
ZD
1634
1635 /* Find the tree label if it is present. */
b8698a0f 1636
726a989a 1637 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1638 {
726a989a
RB
1639 lab_stmt = gsi_stmt (gsi);
1640 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1641 break;
1642
726a989a 1643 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1644 if (DECL_NONLOCAL (lab))
1645 break;
1646
1647 return label_rtx (lab);
1648 }
1649
8b11009b
ZD
1650 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1651 *elt = gen_label_rtx ();
ae50c0cb 1652 return (rtx) *elt;
a9b77cd1
ZD
1653}
1654
726a989a 1655
529ff441
MM
1656/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1657 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1658 possibly clean up the CFG and instruction sequence. LAST is the
1659 last instruction before the just emitted jump sequence. */
529ff441
MM
1660
1661static void
315adeda 1662maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1663{
1664 /* Special case: when jumpif decides that the condition is
1665 trivial it emits an unconditional jump (and the necessary
1666 barrier). But we still have two edges, the fallthru one is
1667 wrong. purge_dead_edges would clean this up later. Unfortunately
1668 we have to insert insns (and split edges) before
1669 find_many_sub_basic_blocks and hence before purge_dead_edges.
1670 But splitting edges might create new blocks which depend on the
1671 fact that if there are two edges there's no barrier. So the
1672 barrier would get lost and verify_flow_info would ICE. Instead
1673 of auditing all edge splitters to care for the barrier (which
1674 normally isn't there in a cleaned CFG), fix it here. */
1675 if (BARRIER_P (get_last_insn ()))
1676 {
529ff441
MM
1677 rtx insn;
1678 remove_edge (e);
1679 /* Now, we have a single successor block, if we have insns to
1680 insert on the remaining edge we potentially will insert
1681 it at the end of this block (if the dest block isn't feasible)
1682 in order to avoid splitting the edge. This insertion will take
1683 place in front of the last jump. But we might have emitted
1684 multiple jumps (conditional and one unconditional) to the
1685 same destination. Inserting in front of the last one then
1686 is a problem. See PR 40021. We fix this by deleting all
1687 jumps except the last unconditional one. */
1688 insn = PREV_INSN (get_last_insn ());
1689 /* Make sure we have an unconditional jump. Otherwise we're
1690 confused. */
1691 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1692 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1693 {
1694 insn = PREV_INSN (insn);
1695 if (JUMP_P (NEXT_INSN (insn)))
1696 delete_insn (NEXT_INSN (insn));
1697 }
1698 }
1699}
1700
726a989a 1701/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1702 Returns a new basic block if we've terminated the current basic
1703 block and created a new one. */
1704
1705static basic_block
726a989a 1706expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1707{
1708 basic_block new_bb, dest;
1709 edge new_edge;
1710 edge true_edge;
1711 edge false_edge;
b7211528 1712 rtx last2, last;
28ed065e
MM
1713 enum tree_code code;
1714 tree op0, op1;
1715
1716 code = gimple_cond_code (stmt);
1717 op0 = gimple_cond_lhs (stmt);
1718 op1 = gimple_cond_rhs (stmt);
1719 /* We're sometimes presented with such code:
1720 D.123_1 = x < y;
1721 if (D.123_1 != 0)
1722 ...
1723 This would expand to two comparisons which then later might
1724 be cleaned up by combine. But some pattern matchers like if-conversion
1725 work better when there's only one compare, so make up for this
1726 here as special exception if TER would have made the same change. */
1727 if (gimple_cond_single_var_p (stmt)
1728 && SA.values
1729 && TREE_CODE (op0) == SSA_NAME
1730 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1731 {
1732 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1733 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1734 {
e83f4b68
MM
1735 enum tree_code code2 = gimple_assign_rhs_code (second);
1736 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1737 {
1738 code = code2;
1739 op0 = gimple_assign_rhs1 (second);
1740 op1 = gimple_assign_rhs2 (second);
1741 }
1742 /* If jumps are cheap turn some more codes into
1743 jumpy sequences. */
1744 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1745 {
1746 if ((code2 == BIT_AND_EXPR
1747 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1748 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1749 || code2 == TRUTH_AND_EXPR)
1750 {
1751 code = TRUTH_ANDIF_EXPR;
1752 op0 = gimple_assign_rhs1 (second);
1753 op1 = gimple_assign_rhs2 (second);
1754 }
1755 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1756 {
1757 code = TRUTH_ORIF_EXPR;
1758 op0 = gimple_assign_rhs1 (second);
1759 op1 = gimple_assign_rhs2 (second);
1760 }
1761 }
28ed065e
MM
1762 }
1763 }
b7211528
SB
1764
1765 last2 = last = get_last_insn ();
80c7a9eb
RH
1766
1767 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1768 if (gimple_has_location (stmt))
80c7a9eb 1769 {
726a989a
RB
1770 set_curr_insn_source_location (gimple_location (stmt));
1771 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1772 }
1773
1774 /* These flags have no purpose in RTL land. */
1775 true_edge->flags &= ~EDGE_TRUE_VALUE;
1776 false_edge->flags &= ~EDGE_FALSE_VALUE;
1777
1778 /* We can either have a pure conditional jump with one fallthru edge or
1779 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1780 if (false_edge->dest == bb->next_bb)
80c7a9eb 1781 {
40e90eac
JJ
1782 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1783 true_edge->probability);
726a989a 1784 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1785 if (true_edge->goto_locus)
7241571e
JJ
1786 {
1787 set_curr_insn_source_location (true_edge->goto_locus);
1788 set_curr_insn_block (true_edge->goto_block);
1789 true_edge->goto_locus = curr_insn_locator ();
1790 }
1791 true_edge->goto_block = NULL;
a9b77cd1 1792 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1793 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1794 return NULL;
1795 }
a9b77cd1 1796 if (true_edge->dest == bb->next_bb)
80c7a9eb 1797 {
40e90eac
JJ
1798 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1799 false_edge->probability);
726a989a 1800 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1801 if (false_edge->goto_locus)
7241571e
JJ
1802 {
1803 set_curr_insn_source_location (false_edge->goto_locus);
1804 set_curr_insn_block (false_edge->goto_block);
1805 false_edge->goto_locus = curr_insn_locator ();
1806 }
1807 false_edge->goto_block = NULL;
a9b77cd1 1808 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1809 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1810 return NULL;
1811 }
80c7a9eb 1812
40e90eac
JJ
1813 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1814 true_edge->probability);
80c7a9eb 1815 last = get_last_insn ();
7241571e
JJ
1816 if (false_edge->goto_locus)
1817 {
1818 set_curr_insn_source_location (false_edge->goto_locus);
1819 set_curr_insn_block (false_edge->goto_block);
1820 false_edge->goto_locus = curr_insn_locator ();
1821 }
1822 false_edge->goto_block = NULL;
a9b77cd1 1823 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1824
1825 BB_END (bb) = last;
1826 if (BARRIER_P (BB_END (bb)))
1827 BB_END (bb) = PREV_INSN (BB_END (bb));
1828 update_bb_for_insn (bb);
1829
1830 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1831 dest = false_edge->dest;
1832 redirect_edge_succ (false_edge, new_bb);
1833 false_edge->flags |= EDGE_FALLTHRU;
1834 new_bb->count = false_edge->count;
1835 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1836 new_edge = make_edge (new_bb, dest, 0);
1837 new_edge->probability = REG_BR_PROB_BASE;
1838 new_edge->count = new_bb->count;
1839 if (BARRIER_P (BB_END (new_bb)))
1840 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1841 update_bb_for_insn (new_bb);
1842
726a989a 1843 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1844
7787b4aa
JJ
1845 if (true_edge->goto_locus)
1846 {
1847 set_curr_insn_source_location (true_edge->goto_locus);
1848 set_curr_insn_block (true_edge->goto_block);
1849 true_edge->goto_locus = curr_insn_locator ();
1850 }
1851 true_edge->goto_block = NULL;
1852
80c7a9eb
RH
1853 return new_bb;
1854}
1855
28ed065e
MM
1856/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1857 statement STMT. */
1858
1859static void
1860expand_call_stmt (gimple stmt)
1861{
1862 tree exp;
1863 tree lhs = gimple_call_lhs (stmt);
28ed065e 1864 size_t i;
e23817b3
RG
1865 bool builtin_p;
1866 tree decl;
28ed065e
MM
1867
1868 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1869
1870 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
1871 decl = gimple_call_fndecl (stmt);
1872 builtin_p = decl && DECL_BUILT_IN (decl);
1873
28ed065e
MM
1874 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1875 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1876
1877 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
1878 {
1879 tree arg = gimple_call_arg (stmt, i);
1880 gimple def;
1881 /* TER addresses into arguments of builtin functions so we have a
1882 chance to infer more correct alignment information. See PR39954. */
1883 if (builtin_p
1884 && TREE_CODE (arg) == SSA_NAME
1885 && (def = get_gimple_for_ssa_name (arg))
1886 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1887 arg = gimple_assign_rhs1 (def);
1888 CALL_EXPR_ARG (exp, i) = arg;
1889 }
28ed065e 1890
93f28ca7 1891 if (gimple_has_side_effects (stmt))
28ed065e
MM
1892 TREE_SIDE_EFFECTS (exp) = 1;
1893
93f28ca7 1894 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
1895 TREE_NOTHROW (exp) = 1;
1896
1897 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1898 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1899 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1900 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1901 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1902 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1903 TREE_BLOCK (exp) = gimple_block (stmt);
1904
28ed065e
MM
1905 if (lhs)
1906 expand_assignment (lhs, exp, false);
1907 else
1908 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1909}
1910
1911/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1912 STMT that doesn't require special handling for outgoing edges. That
1913 is no tailcalls and no GIMPLE_COND. */
1914
1915static void
1916expand_gimple_stmt_1 (gimple stmt)
1917{
1918 tree op0;
1919 switch (gimple_code (stmt))
1920 {
1921 case GIMPLE_GOTO:
1922 op0 = gimple_goto_dest (stmt);
1923 if (TREE_CODE (op0) == LABEL_DECL)
1924 expand_goto (op0);
1925 else
1926 expand_computed_goto (op0);
1927 break;
1928 case GIMPLE_LABEL:
1929 expand_label (gimple_label_label (stmt));
1930 break;
1931 case GIMPLE_NOP:
1932 case GIMPLE_PREDICT:
1933 break;
28ed065e
MM
1934 case GIMPLE_SWITCH:
1935 expand_case (stmt);
1936 break;
1937 case GIMPLE_ASM:
1938 expand_asm_stmt (stmt);
1939 break;
1940 case GIMPLE_CALL:
1941 expand_call_stmt (stmt);
1942 break;
1943
1944 case GIMPLE_RETURN:
1945 op0 = gimple_return_retval (stmt);
1946
1947 if (op0 && op0 != error_mark_node)
1948 {
1949 tree result = DECL_RESULT (current_function_decl);
1950
1951 /* If we are not returning the current function's RESULT_DECL,
1952 build an assignment to it. */
1953 if (op0 != result)
1954 {
1955 /* I believe that a function's RESULT_DECL is unique. */
1956 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1957
1958 /* ??? We'd like to use simply expand_assignment here,
1959 but this fails if the value is of BLKmode but the return
1960 decl is a register. expand_return has special handling
1961 for this combination, which eventually should move
1962 to common code. See comments there. Until then, let's
1963 build a modify expression :-/ */
1964 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1965 result, op0);
1966 }
1967 }
1968 if (!op0)
1969 expand_null_return ();
1970 else
1971 expand_return (op0);
1972 break;
1973
1974 case GIMPLE_ASSIGN:
1975 {
1976 tree lhs = gimple_assign_lhs (stmt);
1977
1978 /* Tree expand used to fiddle with |= and &= of two bitfield
1979 COMPONENT_REFs here. This can't happen with gimple, the LHS
1980 of binary assigns must be a gimple reg. */
1981
1982 if (TREE_CODE (lhs) != SSA_NAME
1983 || get_gimple_rhs_class (gimple_expr_code (stmt))
1984 == GIMPLE_SINGLE_RHS)
1985 {
1986 tree rhs = gimple_assign_rhs1 (stmt);
1987 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1988 == GIMPLE_SINGLE_RHS);
1989 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1990 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1991 expand_assignment (lhs, rhs,
1992 gimple_assign_nontemporal_move_p (stmt));
1993 }
1994 else
1995 {
1996 rtx target, temp;
1997 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1998 struct separate_ops ops;
1999 bool promoted = false;
2000
2001 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2002 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2003 promoted = true;
2004
2005 ops.code = gimple_assign_rhs_code (stmt);
2006 ops.type = TREE_TYPE (lhs);
2007 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2008 {
0354c0c7
BS
2009 case GIMPLE_TERNARY_RHS:
2010 ops.op2 = gimple_assign_rhs3 (stmt);
2011 /* Fallthru */
28ed065e
MM
2012 case GIMPLE_BINARY_RHS:
2013 ops.op1 = gimple_assign_rhs2 (stmt);
2014 /* Fallthru */
2015 case GIMPLE_UNARY_RHS:
2016 ops.op0 = gimple_assign_rhs1 (stmt);
2017 break;
2018 default:
2019 gcc_unreachable ();
2020 }
2021 ops.location = gimple_location (stmt);
2022
2023 /* If we want to use a nontemporal store, force the value to
2024 register first. If we store into a promoted register,
2025 don't directly expand to target. */
2026 temp = nontemporal || promoted ? NULL_RTX : target;
2027 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2028 EXPAND_NORMAL);
2029
2030 if (temp == target)
2031 ;
2032 else if (promoted)
2033 {
4e18a7d4 2034 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2035 /* If TEMP is a VOIDmode constant, use convert_modes to make
2036 sure that we properly convert it. */
2037 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2038 {
2039 temp = convert_modes (GET_MODE (target),
2040 TYPE_MODE (ops.type),
4e18a7d4 2041 temp, unsignedp);
28ed065e 2042 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2043 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2044 }
2045
4e18a7d4 2046 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2047 }
2048 else if (nontemporal && emit_storent_insn (target, temp))
2049 ;
2050 else
2051 {
2052 temp = force_operand (temp, target);
2053 if (temp != target)
2054 emit_move_insn (target, temp);
2055 }
2056 }
2057 }
2058 break;
2059
2060 default:
2061 gcc_unreachable ();
2062 }
2063}
2064
2065/* Expand one gimple statement STMT and return the last RTL instruction
2066 before any of the newly generated ones.
2067
2068 In addition to generating the necessary RTL instructions this also
2069 sets REG_EH_REGION notes if necessary and sets the current source
2070 location for diagnostics. */
2071
2072static rtx
2073expand_gimple_stmt (gimple stmt)
2074{
1d65f45c 2075 int lp_nr = 0;
28ed065e
MM
2076 rtx last = NULL;
2077 location_t saved_location = input_location;
2078
2079 last = get_last_insn ();
2080
2081 /* If this is an expression of some kind and it has an associated line
2082 number, then emit the line number before expanding the expression.
2083
2084 We need to save and restore the file and line information so that
2085 errors discovered during expansion are emitted with the right
2086 information. It would be better of the diagnostic routines
2087 used the file/line information embedded in the tree nodes rather
2088 than globals. */
2089 gcc_assert (cfun);
2090
2091 if (gimple_has_location (stmt))
2092 {
2093 input_location = gimple_location (stmt);
2094 set_curr_insn_source_location (input_location);
2095
2096 /* Record where the insns produced belong. */
2097 set_curr_insn_block (gimple_block (stmt));
2098 }
2099
2100 expand_gimple_stmt_1 (stmt);
2101 /* Free any temporaries used to evaluate this statement. */
2102 free_temp_slots ();
2103
2104 input_location = saved_location;
2105
2106 /* Mark all insns that may trap. */
1d65f45c
RH
2107 lp_nr = lookup_stmt_eh_lp (stmt);
2108 if (lp_nr)
28ed065e
MM
2109 {
2110 rtx insn;
2111 for (insn = next_real_insn (last); insn;
2112 insn = next_real_insn (insn))
2113 {
2114 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2115 /* If we want exceptions for non-call insns, any
2116 may_trap_p instruction may throw. */
2117 && GET_CODE (PATTERN (insn)) != CLOBBER
2118 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2119 && insn_could_throw_p (insn))
2120 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2121 }
2122 }
2123
2124 return last;
2125}
2126
726a989a 2127/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2128 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2129 generated a tail call (something that might be denied by the ABI
cea49550
RH
2130 rules governing the call; see calls.c).
2131
2132 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2133 can still reach the rest of BB. The case here is __builtin_sqrt,
2134 where the NaN result goes through the external function (with a
2135 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2136
2137static basic_block
726a989a 2138expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2139{
b7211528 2140 rtx last2, last;
224e770b 2141 edge e;
628f6a4e 2142 edge_iterator ei;
224e770b
RH
2143 int probability;
2144 gcov_type count;
80c7a9eb 2145
28ed065e 2146 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2147
2148 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2149 if (CALL_P (last) && SIBLING_CALL_P (last))
2150 goto found;
80c7a9eb 2151
726a989a 2152 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2153
cea49550 2154 *can_fallthru = true;
224e770b 2155 return NULL;
80c7a9eb 2156
224e770b
RH
2157 found:
2158 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2159 Any instructions emitted here are about to be deleted. */
2160 do_pending_stack_adjust ();
2161
2162 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2163 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2164 EH or abnormal edges, we shouldn't have created a tail call in
2165 the first place. So it seems to me we should just be removing
2166 all edges here, or redirecting the existing fallthru edge to
2167 the exit block. */
2168
224e770b
RH
2169 probability = 0;
2170 count = 0;
224e770b 2171
628f6a4e
BE
2172 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2173 {
224e770b
RH
2174 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2175 {
2176 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2177 {
224e770b
RH
2178 e->dest->count -= e->count;
2179 e->dest->frequency -= EDGE_FREQUENCY (e);
2180 if (e->dest->count < 0)
c22cacf3 2181 e->dest->count = 0;
224e770b 2182 if (e->dest->frequency < 0)
c22cacf3 2183 e->dest->frequency = 0;
80c7a9eb 2184 }
224e770b
RH
2185 count += e->count;
2186 probability += e->probability;
2187 remove_edge (e);
80c7a9eb 2188 }
628f6a4e
BE
2189 else
2190 ei_next (&ei);
80c7a9eb
RH
2191 }
2192
224e770b
RH
2193 /* This is somewhat ugly: the call_expr expander often emits instructions
2194 after the sibcall (to perform the function return). These confuse the
12eff7b7 2195 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2196 last = NEXT_INSN (last);
341c100f 2197 gcc_assert (BARRIER_P (last));
cea49550
RH
2198
2199 *can_fallthru = false;
224e770b
RH
2200 while (NEXT_INSN (last))
2201 {
2202 /* For instance an sqrt builtin expander expands if with
2203 sibcall in the then and label for `else`. */
2204 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2205 {
2206 *can_fallthru = true;
2207 break;
2208 }
224e770b
RH
2209 delete_insn (NEXT_INSN (last));
2210 }
2211
2212 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2213 e->probability += probability;
2214 e->count += count;
2215 BB_END (bb) = last;
2216 update_bb_for_insn (bb);
2217
2218 if (NEXT_INSN (last))
2219 {
2220 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2221
2222 last = BB_END (bb);
2223 if (BARRIER_P (last))
2224 BB_END (bb) = PREV_INSN (last);
2225 }
2226
726a989a 2227 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2228
224e770b 2229 return bb;
80c7a9eb
RH
2230}
2231
b5b8b0ac
AO
2232/* Return the difference between the floor and the truncated result of
2233 a signed division by OP1 with remainder MOD. */
2234static rtx
2235floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2236{
2237 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2238 return gen_rtx_IF_THEN_ELSE
2239 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2240 gen_rtx_IF_THEN_ELSE
2241 (mode, gen_rtx_LT (BImode,
2242 gen_rtx_DIV (mode, op1, mod),
2243 const0_rtx),
2244 constm1_rtx, const0_rtx),
2245 const0_rtx);
2246}
2247
2248/* Return the difference between the ceil and the truncated result of
2249 a signed division by OP1 with remainder MOD. */
2250static rtx
2251ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2252{
2253 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2254 return gen_rtx_IF_THEN_ELSE
2255 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2256 gen_rtx_IF_THEN_ELSE
2257 (mode, gen_rtx_GT (BImode,
2258 gen_rtx_DIV (mode, op1, mod),
2259 const0_rtx),
2260 const1_rtx, const0_rtx),
2261 const0_rtx);
2262}
2263
2264/* Return the difference between the ceil and the truncated result of
2265 an unsigned division by OP1 with remainder MOD. */
2266static rtx
2267ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2268{
2269 /* (mod != 0 ? 1 : 0) */
2270 return gen_rtx_IF_THEN_ELSE
2271 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2272 const1_rtx, const0_rtx);
2273}
2274
2275/* Return the difference between the rounded and the truncated result
2276 of a signed division by OP1 with remainder MOD. Halfway cases are
2277 rounded away from zero, rather than to the nearest even number. */
2278static rtx
2279round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2280{
2281 /* (abs (mod) >= abs (op1) - abs (mod)
2282 ? (op1 / mod > 0 ? 1 : -1)
2283 : 0) */
2284 return gen_rtx_IF_THEN_ELSE
2285 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2286 gen_rtx_MINUS (mode,
2287 gen_rtx_ABS (mode, op1),
2288 gen_rtx_ABS (mode, mod))),
2289 gen_rtx_IF_THEN_ELSE
2290 (mode, gen_rtx_GT (BImode,
2291 gen_rtx_DIV (mode, op1, mod),
2292 const0_rtx),
2293 const1_rtx, constm1_rtx),
2294 const0_rtx);
2295}
2296
2297/* Return the difference between the rounded and the truncated result
2298 of a unsigned division by OP1 with remainder MOD. Halfway cases
2299 are rounded away from zero, rather than to the nearest even
2300 number. */
2301static rtx
2302round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2303{
2304 /* (mod >= op1 - mod ? 1 : 0) */
2305 return gen_rtx_IF_THEN_ELSE
2306 (mode, gen_rtx_GE (BImode, mod,
2307 gen_rtx_MINUS (mode, op1, mod)),
2308 const1_rtx, const0_rtx);
2309}
2310
dda2da58
AO
2311/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2312 any rtl. */
2313
2314static rtx
2315convert_debug_memory_address (enum machine_mode mode, rtx x)
2316{
2317 enum machine_mode xmode = GET_MODE (x);
2318
2319#ifndef POINTERS_EXTEND_UNSIGNED
2320 gcc_assert (mode == Pmode);
2321 gcc_assert (xmode == mode || xmode == VOIDmode);
2322#else
2323 gcc_assert (mode == Pmode || mode == ptr_mode);
2324
2325 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2326 return x;
2327
2328 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2329 x = simplify_gen_subreg (mode, x, xmode,
2330 subreg_lowpart_offset
2331 (mode, xmode));
2332 else if (POINTERS_EXTEND_UNSIGNED > 0)
2333 x = gen_rtx_ZERO_EXTEND (mode, x);
2334 else if (!POINTERS_EXTEND_UNSIGNED)
2335 x = gen_rtx_SIGN_EXTEND (mode, x);
2336 else
2337 gcc_unreachable ();
2338#endif /* POINTERS_EXTEND_UNSIGNED */
2339
2340 return x;
2341}
2342
b5b8b0ac
AO
2343/* Return an RTX equivalent to the value of the tree expression
2344 EXP. */
2345
2346static rtx
2347expand_debug_expr (tree exp)
2348{
2349 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2350 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2351 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2352 addr_space_t as;
b5b8b0ac
AO
2353
2354 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2355 {
2356 case tcc_expression:
2357 switch (TREE_CODE (exp))
2358 {
2359 case COND_EXPR:
7ece48b1 2360 case DOT_PROD_EXPR:
0354c0c7
BS
2361 case WIDEN_MULT_PLUS_EXPR:
2362 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2363 case FMA_EXPR:
b5b8b0ac
AO
2364 goto ternary;
2365
2366 case TRUTH_ANDIF_EXPR:
2367 case TRUTH_ORIF_EXPR:
2368 case TRUTH_AND_EXPR:
2369 case TRUTH_OR_EXPR:
2370 case TRUTH_XOR_EXPR:
2371 goto binary;
2372
2373 case TRUTH_NOT_EXPR:
2374 goto unary;
2375
2376 default:
2377 break;
2378 }
2379 break;
2380
2381 ternary:
2382 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2383 if (!op2)
2384 return NULL_RTX;
2385 /* Fall through. */
2386
2387 binary:
2388 case tcc_binary:
2389 case tcc_comparison:
2390 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2391 if (!op1)
2392 return NULL_RTX;
2393 /* Fall through. */
2394
2395 unary:
2396 case tcc_unary:
2397 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2398 if (!op0)
2399 return NULL_RTX;
2400 break;
2401
2402 case tcc_type:
2403 case tcc_statement:
2404 gcc_unreachable ();
2405
2406 case tcc_constant:
2407 case tcc_exceptional:
2408 case tcc_declaration:
2409 case tcc_reference:
2410 case tcc_vl_exp:
2411 break;
2412 }
2413
2414 switch (TREE_CODE (exp))
2415 {
2416 case STRING_CST:
2417 if (!lookup_constant_def (exp))
2418 {
e1b243a8
JJ
2419 if (strlen (TREE_STRING_POINTER (exp)) + 1
2420 != (size_t) TREE_STRING_LENGTH (exp))
2421 return NULL_RTX;
b5b8b0ac
AO
2422 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2423 op0 = gen_rtx_MEM (BLKmode, op0);
2424 set_mem_attributes (op0, exp, 0);
2425 return op0;
2426 }
2427 /* Fall through... */
2428
2429 case INTEGER_CST:
2430 case REAL_CST:
2431 case FIXED_CST:
2432 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2433 return op0;
2434
2435 case COMPLEX_CST:
2436 gcc_assert (COMPLEX_MODE_P (mode));
2437 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2438 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2439 return gen_rtx_CONCAT (mode, op0, op1);
2440
0ca5af51
AO
2441 case DEBUG_EXPR_DECL:
2442 op0 = DECL_RTL_IF_SET (exp);
2443
2444 if (op0)
2445 return op0;
2446
2447 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2448 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2449 SET_DECL_RTL (exp, op0);
2450
2451 return op0;
2452
b5b8b0ac
AO
2453 case VAR_DECL:
2454 case PARM_DECL:
2455 case FUNCTION_DECL:
2456 case LABEL_DECL:
2457 case CONST_DECL:
2458 case RESULT_DECL:
2459 op0 = DECL_RTL_IF_SET (exp);
2460
2461 /* This decl was probably optimized away. */
2462 if (!op0)
e1b243a8
JJ
2463 {
2464 if (TREE_CODE (exp) != VAR_DECL
2465 || DECL_EXTERNAL (exp)
2466 || !TREE_STATIC (exp)
2467 || !DECL_NAME (exp)
0fba566c
JJ
2468 || DECL_HARD_REGISTER (exp)
2469 || mode == VOIDmode)
e1b243a8
JJ
2470 return NULL;
2471
b1aa0655 2472 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2473 if (!MEM_P (op0)
2474 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2475 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2476 return NULL;
2477 }
2478 else
2479 op0 = copy_rtx (op0);
b5b8b0ac 2480
06796564
JJ
2481 if (GET_MODE (op0) == BLKmode
2482 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2483 below would ICE. While it is likely a FE bug,
2484 try to be robust here. See PR43166. */
132b4e82
JJ
2485 || mode == BLKmode
2486 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2487 {
2488 gcc_assert (MEM_P (op0));
2489 op0 = adjust_address_nv (op0, mode, 0);
2490 return op0;
2491 }
2492
2493 /* Fall through. */
2494
2495 adjust_mode:
2496 case PAREN_EXPR:
2497 case NOP_EXPR:
2498 case CONVERT_EXPR:
2499 {
2500 enum machine_mode inner_mode = GET_MODE (op0);
2501
2502 if (mode == inner_mode)
2503 return op0;
2504
2505 if (inner_mode == VOIDmode)
2506 {
2a8e30fb
MM
2507 if (TREE_CODE (exp) == SSA_NAME)
2508 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2509 else
2510 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2511 if (mode == inner_mode)
2512 return op0;
2513 }
2514
2515 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2516 {
2517 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2518 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2519 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2520 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2521 else
2522 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2523 }
2524 else if (FLOAT_MODE_P (mode))
2525 {
2a8e30fb 2526 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2527 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2528 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2529 else
2530 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2531 }
2532 else if (FLOAT_MODE_P (inner_mode))
2533 {
2534 if (unsignedp)
2535 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2536 else
2537 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2538 }
2539 else if (CONSTANT_P (op0)
2540 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2541 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2542 subreg_lowpart_offset (mode,
2543 inner_mode));
1b47fe3f
JJ
2544 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2545 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2546 : unsignedp)
b5b8b0ac
AO
2547 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2548 else
2549 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2550
2551 return op0;
2552 }
2553
70f34814
RG
2554 case MEM_REF:
2555 /* ??? FIXME. */
2556 if (!integer_zerop (TREE_OPERAND (exp, 1)))
2557 return NULL;
2558 /* Fallthru. */
b5b8b0ac 2559 case INDIRECT_REF:
b5b8b0ac
AO
2560 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2561 if (!op0)
2562 return NULL;
2563
09e881c9 2564 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2565 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2566 else
75421dcd 2567 as = ADDR_SPACE_GENERIC;
b5b8b0ac
AO
2568
2569 op0 = gen_rtx_MEM (mode, op0);
2570
2571 set_mem_attributes (op0, exp, 0);
09e881c9 2572 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2573
2574 return op0;
2575
2576 case TARGET_MEM_REF:
4d948885
RG
2577 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2578 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2579 return NULL;
2580
2581 op0 = expand_debug_expr
4e25ca6b 2582 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2583 if (!op0)
2584 return NULL;
2585
09e881c9 2586 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
b5b8b0ac
AO
2587
2588 op0 = gen_rtx_MEM (mode, op0);
2589
2590 set_mem_attributes (op0, exp, 0);
09e881c9 2591 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2592
2593 return op0;
2594
2595 case ARRAY_REF:
2596 case ARRAY_RANGE_REF:
2597 case COMPONENT_REF:
2598 case BIT_FIELD_REF:
2599 case REALPART_EXPR:
2600 case IMAGPART_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 {
2603 enum machine_mode mode1;
2604 HOST_WIDE_INT bitsize, bitpos;
2605 tree offset;
2606 int volatilep = 0;
2607 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2608 &mode1, &unsignedp, &volatilep, false);
2609 rtx orig_op0;
2610
4f2a9af8
JJ
2611 if (bitsize == 0)
2612 return NULL;
2613
b5b8b0ac
AO
2614 orig_op0 = op0 = expand_debug_expr (tem);
2615
2616 if (!op0)
2617 return NULL;
2618
2619 if (offset)
2620 {
dda2da58
AO
2621 enum machine_mode addrmode, offmode;
2622
aa847cc8
JJ
2623 if (!MEM_P (op0))
2624 return NULL;
b5b8b0ac 2625
dda2da58
AO
2626 op0 = XEXP (op0, 0);
2627 addrmode = GET_MODE (op0);
2628 if (addrmode == VOIDmode)
2629 addrmode = Pmode;
2630
b5b8b0ac
AO
2631 op1 = expand_debug_expr (offset);
2632 if (!op1)
2633 return NULL;
2634
dda2da58
AO
2635 offmode = GET_MODE (op1);
2636 if (offmode == VOIDmode)
2637 offmode = TYPE_MODE (TREE_TYPE (offset));
2638
2639 if (addrmode != offmode)
2640 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2641 subreg_lowpart_offset (addrmode,
2642 offmode));
2643
2644 /* Don't use offset_address here, we don't need a
2645 recognizable address, and we don't want to generate
2646 code. */
2647 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
b5b8b0ac
AO
2648 }
2649
2650 if (MEM_P (op0))
2651 {
4f2a9af8
JJ
2652 if (mode1 == VOIDmode)
2653 /* Bitfield. */
2654 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2655 if (bitpos >= BITS_PER_UNIT)
2656 {
2657 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2658 bitpos %= BITS_PER_UNIT;
2659 }
2660 else if (bitpos < 0)
2661 {
4f2a9af8
JJ
2662 HOST_WIDE_INT units
2663 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2664 op0 = adjust_address_nv (op0, mode1, units);
2665 bitpos += units * BITS_PER_UNIT;
2666 }
2667 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2668 op0 = adjust_address_nv (op0, mode, 0);
2669 else if (GET_MODE (op0) != mode1)
2670 op0 = adjust_address_nv (op0, mode1, 0);
2671 else
2672 op0 = copy_rtx (op0);
2673 if (op0 == orig_op0)
2674 op0 = shallow_copy_rtx (op0);
2675 set_mem_attributes (op0, exp, 0);
2676 }
2677
2678 if (bitpos == 0 && mode == GET_MODE (op0))
2679 return op0;
2680
2d3fc6aa
JJ
2681 if (bitpos < 0)
2682 return NULL;
2683
88c04a5d
JJ
2684 if (GET_MODE (op0) == BLKmode)
2685 return NULL;
2686
b5b8b0ac
AO
2687 if ((bitpos % BITS_PER_UNIT) == 0
2688 && bitsize == GET_MODE_BITSIZE (mode1))
2689 {
2690 enum machine_mode opmode = GET_MODE (op0);
2691
b5b8b0ac
AO
2692 if (opmode == VOIDmode)
2693 opmode = mode1;
2694
2695 /* This condition may hold if we're expanding the address
2696 right past the end of an array that turned out not to
2697 be addressable (i.e., the address was only computed in
2698 debug stmts). The gen_subreg below would rightfully
2699 crash, and the address doesn't really exist, so just
2700 drop it. */
2701 if (bitpos >= GET_MODE_BITSIZE (opmode))
2702 return NULL;
2703
7d5d39bb
JJ
2704 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2705 return simplify_gen_subreg (mode, op0, opmode,
2706 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
2707 }
2708
2709 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2710 && TYPE_UNSIGNED (TREE_TYPE (exp))
2711 ? SIGN_EXTRACT
2712 : ZERO_EXTRACT, mode,
2713 GET_MODE (op0) != VOIDmode
2714 ? GET_MODE (op0) : mode1,
2715 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2716 }
2717
b5b8b0ac
AO
2718 case ABS_EXPR:
2719 return gen_rtx_ABS (mode, op0);
2720
2721 case NEGATE_EXPR:
2722 return gen_rtx_NEG (mode, op0);
2723
2724 case BIT_NOT_EXPR:
2725 return gen_rtx_NOT (mode, op0);
2726
2727 case FLOAT_EXPR:
2728 if (unsignedp)
2729 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2730 else
2731 return gen_rtx_FLOAT (mode, op0);
2732
2733 case FIX_TRUNC_EXPR:
2734 if (unsignedp)
2735 return gen_rtx_UNSIGNED_FIX (mode, op0);
2736 else
2737 return gen_rtx_FIX (mode, op0);
2738
2739 case POINTER_PLUS_EXPR:
576319a7
DD
2740 /* For the rare target where pointers are not the same size as
2741 size_t, we need to check for mis-matched modes and correct
2742 the addend. */
2743 if (op0 && op1
2744 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2745 && GET_MODE (op0) != GET_MODE (op1))
2746 {
2747 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2748 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2749 else
2750 /* We always sign-extend, regardless of the signedness of
2751 the operand, because the operand is always unsigned
2752 here even if the original C expression is signed. */
2753 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2754 }
2755 /* Fall through. */
b5b8b0ac
AO
2756 case PLUS_EXPR:
2757 return gen_rtx_PLUS (mode, op0, op1);
2758
2759 case MINUS_EXPR:
2760 return gen_rtx_MINUS (mode, op0, op1);
2761
2762 case MULT_EXPR:
2763 return gen_rtx_MULT (mode, op0, op1);
2764
2765 case RDIV_EXPR:
2766 case TRUNC_DIV_EXPR:
2767 case EXACT_DIV_EXPR:
2768 if (unsignedp)
2769 return gen_rtx_UDIV (mode, op0, op1);
2770 else
2771 return gen_rtx_DIV (mode, op0, op1);
2772
2773 case TRUNC_MOD_EXPR:
2774 if (unsignedp)
2775 return gen_rtx_UMOD (mode, op0, op1);
2776 else
2777 return gen_rtx_MOD (mode, op0, op1);
2778
2779 case FLOOR_DIV_EXPR:
2780 if (unsignedp)
2781 return gen_rtx_UDIV (mode, op0, op1);
2782 else
2783 {
2784 rtx div = gen_rtx_DIV (mode, op0, op1);
2785 rtx mod = gen_rtx_MOD (mode, op0, op1);
2786 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2787 return gen_rtx_PLUS (mode, div, adj);
2788 }
2789
2790 case FLOOR_MOD_EXPR:
2791 if (unsignedp)
2792 return gen_rtx_UMOD (mode, op0, op1);
2793 else
2794 {
2795 rtx mod = gen_rtx_MOD (mode, op0, op1);
2796 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2797 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2798 return gen_rtx_PLUS (mode, mod, adj);
2799 }
2800
2801 case CEIL_DIV_EXPR:
2802 if (unsignedp)
2803 {
2804 rtx div = gen_rtx_UDIV (mode, op0, op1);
2805 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2806 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2807 return gen_rtx_PLUS (mode, div, adj);
2808 }
2809 else
2810 {
2811 rtx div = gen_rtx_DIV (mode, op0, op1);
2812 rtx mod = gen_rtx_MOD (mode, op0, op1);
2813 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2814 return gen_rtx_PLUS (mode, div, adj);
2815 }
2816
2817 case CEIL_MOD_EXPR:
2818 if (unsignedp)
2819 {
2820 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2821 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2822 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2823 return gen_rtx_PLUS (mode, mod, adj);
2824 }
2825 else
2826 {
2827 rtx mod = gen_rtx_MOD (mode, op0, op1);
2828 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2829 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2830 return gen_rtx_PLUS (mode, mod, adj);
2831 }
2832
2833 case ROUND_DIV_EXPR:
2834 if (unsignedp)
2835 {
2836 rtx div = gen_rtx_UDIV (mode, op0, op1);
2837 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2838 rtx adj = round_udiv_adjust (mode, mod, op1);
2839 return gen_rtx_PLUS (mode, div, adj);
2840 }
2841 else
2842 {
2843 rtx div = gen_rtx_DIV (mode, op0, op1);
2844 rtx mod = gen_rtx_MOD (mode, op0, op1);
2845 rtx adj = round_sdiv_adjust (mode, mod, op1);
2846 return gen_rtx_PLUS (mode, div, adj);
2847 }
2848
2849 case ROUND_MOD_EXPR:
2850 if (unsignedp)
2851 {
2852 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2853 rtx adj = round_udiv_adjust (mode, mod, op1);
2854 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2855 return gen_rtx_PLUS (mode, mod, adj);
2856 }
2857 else
2858 {
2859 rtx mod = gen_rtx_MOD (mode, op0, op1);
2860 rtx adj = round_sdiv_adjust (mode, mod, op1);
2861 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2862 return gen_rtx_PLUS (mode, mod, adj);
2863 }
2864
2865 case LSHIFT_EXPR:
2866 return gen_rtx_ASHIFT (mode, op0, op1);
2867
2868 case RSHIFT_EXPR:
2869 if (unsignedp)
2870 return gen_rtx_LSHIFTRT (mode, op0, op1);
2871 else
2872 return gen_rtx_ASHIFTRT (mode, op0, op1);
2873
2874 case LROTATE_EXPR:
2875 return gen_rtx_ROTATE (mode, op0, op1);
2876
2877 case RROTATE_EXPR:
2878 return gen_rtx_ROTATERT (mode, op0, op1);
2879
2880 case MIN_EXPR:
2881 if (unsignedp)
2882 return gen_rtx_UMIN (mode, op0, op1);
2883 else
2884 return gen_rtx_SMIN (mode, op0, op1);
2885
2886 case MAX_EXPR:
2887 if (unsignedp)
2888 return gen_rtx_UMAX (mode, op0, op1);
2889 else
2890 return gen_rtx_SMAX (mode, op0, op1);
2891
2892 case BIT_AND_EXPR:
2893 case TRUTH_AND_EXPR:
2894 return gen_rtx_AND (mode, op0, op1);
2895
2896 case BIT_IOR_EXPR:
2897 case TRUTH_OR_EXPR:
2898 return gen_rtx_IOR (mode, op0, op1);
2899
2900 case BIT_XOR_EXPR:
2901 case TRUTH_XOR_EXPR:
2902 return gen_rtx_XOR (mode, op0, op1);
2903
2904 case TRUTH_ANDIF_EXPR:
2905 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2906
2907 case TRUTH_ORIF_EXPR:
2908 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2909
2910 case TRUTH_NOT_EXPR:
2911 return gen_rtx_EQ (mode, op0, const0_rtx);
2912
2913 case LT_EXPR:
2914 if (unsignedp)
2915 return gen_rtx_LTU (mode, op0, op1);
2916 else
2917 return gen_rtx_LT (mode, op0, op1);
2918
2919 case LE_EXPR:
2920 if (unsignedp)
2921 return gen_rtx_LEU (mode, op0, op1);
2922 else
2923 return gen_rtx_LE (mode, op0, op1);
2924
2925 case GT_EXPR:
2926 if (unsignedp)
2927 return gen_rtx_GTU (mode, op0, op1);
2928 else
2929 return gen_rtx_GT (mode, op0, op1);
2930
2931 case GE_EXPR:
2932 if (unsignedp)
2933 return gen_rtx_GEU (mode, op0, op1);
2934 else
2935 return gen_rtx_GE (mode, op0, op1);
2936
2937 case EQ_EXPR:
2938 return gen_rtx_EQ (mode, op0, op1);
2939
2940 case NE_EXPR:
2941 return gen_rtx_NE (mode, op0, op1);
2942
2943 case UNORDERED_EXPR:
2944 return gen_rtx_UNORDERED (mode, op0, op1);
2945
2946 case ORDERED_EXPR:
2947 return gen_rtx_ORDERED (mode, op0, op1);
2948
2949 case UNLT_EXPR:
2950 return gen_rtx_UNLT (mode, op0, op1);
2951
2952 case UNLE_EXPR:
2953 return gen_rtx_UNLE (mode, op0, op1);
2954
2955 case UNGT_EXPR:
2956 return gen_rtx_UNGT (mode, op0, op1);
2957
2958 case UNGE_EXPR:
2959 return gen_rtx_UNGE (mode, op0, op1);
2960
2961 case UNEQ_EXPR:
2962 return gen_rtx_UNEQ (mode, op0, op1);
2963
2964 case LTGT_EXPR:
2965 return gen_rtx_LTGT (mode, op0, op1);
2966
2967 case COND_EXPR:
2968 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2969
2970 case COMPLEX_EXPR:
2971 gcc_assert (COMPLEX_MODE_P (mode));
2972 if (GET_MODE (op0) == VOIDmode)
2973 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2974 if (GET_MODE (op1) == VOIDmode)
2975 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2976 return gen_rtx_CONCAT (mode, op0, op1);
2977
d02a5a4b
JJ
2978 case CONJ_EXPR:
2979 if (GET_CODE (op0) == CONCAT)
2980 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2981 gen_rtx_NEG (GET_MODE_INNER (mode),
2982 XEXP (op0, 1)));
2983 else
2984 {
2985 enum machine_mode imode = GET_MODE_INNER (mode);
2986 rtx re, im;
2987
2988 if (MEM_P (op0))
2989 {
2990 re = adjust_address_nv (op0, imode, 0);
2991 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2992 }
2993 else
2994 {
2995 enum machine_mode ifmode = int_mode_for_mode (mode);
2996 enum machine_mode ihmode = int_mode_for_mode (imode);
2997 rtx halfsize;
2998 if (ifmode == BLKmode || ihmode == BLKmode)
2999 return NULL;
3000 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3001 re = op0;
3002 if (mode != ifmode)
3003 re = gen_rtx_SUBREG (ifmode, re, 0);
3004 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3005 if (imode != ihmode)
3006 re = gen_rtx_SUBREG (imode, re, 0);
3007 im = copy_rtx (op0);
3008 if (mode != ifmode)
3009 im = gen_rtx_SUBREG (ifmode, im, 0);
3010 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3011 if (imode != ihmode)
3012 im = gen_rtx_SUBREG (imode, im, 0);
3013 }
3014 im = gen_rtx_NEG (imode, im);
3015 return gen_rtx_CONCAT (mode, re, im);
3016 }
3017
b5b8b0ac
AO
3018 case ADDR_EXPR:
3019 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3020 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3021 {
3022 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3023 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3024 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3025 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3026 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3027
3028 if (handled_component_p (TREE_OPERAND (exp, 0)))
3029 {
3030 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3031 tree decl
3032 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3033 &bitoffset, &bitsize, &maxsize);
3034 if ((TREE_CODE (decl) == VAR_DECL
3035 || TREE_CODE (decl) == PARM_DECL
3036 || TREE_CODE (decl) == RESULT_DECL)
3037 && !TREE_ADDRESSABLE (decl)
3038 && (bitoffset % BITS_PER_UNIT) == 0
3039 && bitsize > 0
3040 && bitsize == maxsize)
3041 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3042 bitoffset / BITS_PER_UNIT);
3043 }
3044
3045 return NULL;
3046 }
b5b8b0ac 3047
dda2da58
AO
3048 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
3049
3050 return op0;
b5b8b0ac
AO
3051
3052 case VECTOR_CST:
3053 exp = build_constructor_from_list (TREE_TYPE (exp),
3054 TREE_VECTOR_CST_ELTS (exp));
3055 /* Fall through. */
3056
3057 case CONSTRUCTOR:
3058 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3059 {
3060 unsigned i;
3061 tree val;
3062
3063 op0 = gen_rtx_CONCATN
3064 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3065
3066 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3067 {
3068 op1 = expand_debug_expr (val);
3069 if (!op1)
3070 return NULL;
3071 XVECEXP (op0, 0, i) = op1;
3072 }
3073
3074 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3075 {
3076 op1 = expand_debug_expr
e8160c9a 3077 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3078
3079 if (!op1)
3080 return NULL;
3081
3082 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3083 XVECEXP (op0, 0, i) = op1;
3084 }
3085
3086 return op0;
3087 }
3088 else
3089 goto flag_unsupported;
3090
3091 case CALL_EXPR:
3092 /* ??? Maybe handle some builtins? */
3093 return NULL;
3094
3095 case SSA_NAME:
3096 {
2a8e30fb
MM
3097 gimple g = get_gimple_for_ssa_name (exp);
3098 if (g)
3099 {
3100 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3101 if (!op0)
3102 return NULL;
3103 }
3104 else
3105 {
3106 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3107
2a8e30fb
MM
3108 if (part == NO_PARTITION)
3109 return NULL;
b5b8b0ac 3110
2a8e30fb 3111 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3112
2a8e30fb
MM
3113 op0 = SA.partition_to_pseudo[part];
3114 }
b5b8b0ac
AO
3115 goto adjust_mode;
3116 }
3117
3118 case ERROR_MARK:
3119 return NULL;
3120
7ece48b1
JJ
3121 /* Vector stuff. For most of the codes we don't have rtl codes. */
3122 case REALIGN_LOAD_EXPR:
3123 case REDUC_MAX_EXPR:
3124 case REDUC_MIN_EXPR:
3125 case REDUC_PLUS_EXPR:
3126 case VEC_COND_EXPR:
3127 case VEC_EXTRACT_EVEN_EXPR:
3128 case VEC_EXTRACT_ODD_EXPR:
3129 case VEC_INTERLEAVE_HIGH_EXPR:
3130 case VEC_INTERLEAVE_LOW_EXPR:
3131 case VEC_LSHIFT_EXPR:
3132 case VEC_PACK_FIX_TRUNC_EXPR:
3133 case VEC_PACK_SAT_EXPR:
3134 case VEC_PACK_TRUNC_EXPR:
3135 case VEC_RSHIFT_EXPR:
3136 case VEC_UNPACK_FLOAT_HI_EXPR:
3137 case VEC_UNPACK_FLOAT_LO_EXPR:
3138 case VEC_UNPACK_HI_EXPR:
3139 case VEC_UNPACK_LO_EXPR:
3140 case VEC_WIDEN_MULT_HI_EXPR:
3141 case VEC_WIDEN_MULT_LO_EXPR:
3142 return NULL;
3143
3144 /* Misc codes. */
3145 case ADDR_SPACE_CONVERT_EXPR:
3146 case FIXED_CONVERT_EXPR:
3147 case OBJ_TYPE_REF:
3148 case WITH_SIZE_EXPR:
3149 return NULL;
3150
3151 case DOT_PROD_EXPR:
3152 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3153 && SCALAR_INT_MODE_P (mode))
3154 {
3155 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3156 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3157 else
3158 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3159 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3160 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3161 else
3162 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3163 op0 = gen_rtx_MULT (mode, op0, op1);
3164 return gen_rtx_PLUS (mode, op0, op2);
3165 }
3166 return NULL;
3167
3168 case WIDEN_MULT_EXPR:
0354c0c7
BS
3169 case WIDEN_MULT_PLUS_EXPR:
3170 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3171 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3172 && SCALAR_INT_MODE_P (mode))
3173 {
5b58b39b 3174 enum machine_mode inner_mode = GET_MODE (op0);
7ece48b1 3175 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3176 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3177 else
5b58b39b 3178 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3179 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3180 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3181 else
5b58b39b 3182 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
0354c0c7
BS
3183 op0 = gen_rtx_MULT (mode, op0, op1);
3184 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3185 return op0;
3186 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3187 return gen_rtx_PLUS (mode, op0, op2);
3188 else
3189 return gen_rtx_MINUS (mode, op2, op0);
7ece48b1
JJ
3190 }
3191 return NULL;
3192
3193 case WIDEN_SUM_EXPR:
3194 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3195 && SCALAR_INT_MODE_P (mode))
3196 {
3197 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3198 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3199 else
3200 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3201 return gen_rtx_PLUS (mode, op0, op1);
3202 }
3203 return NULL;
3204
0f59b812
JJ
3205 case FMA_EXPR:
3206 return gen_rtx_FMA (mode, op0, op1, op2);
3207
b5b8b0ac
AO
3208 default:
3209 flag_unsupported:
3210#ifdef ENABLE_CHECKING
3211 debug_tree (exp);
3212 gcc_unreachable ();
3213#else
3214 return NULL;
3215#endif
3216 }
3217}
3218
3219/* Expand the _LOCs in debug insns. We run this after expanding all
3220 regular insns, so that any variables referenced in the function
3221 will have their DECL_RTLs set. */
3222
3223static void
3224expand_debug_locations (void)
3225{
3226 rtx insn;
3227 rtx last = get_last_insn ();
3228 int save_strict_alias = flag_strict_aliasing;
3229
3230 /* New alias sets while setting up memory attributes cause
3231 -fcompare-debug failures, even though it doesn't bring about any
3232 codegen changes. */
3233 flag_strict_aliasing = 0;
3234
3235 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3236 if (DEBUG_INSN_P (insn))
3237 {
3238 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3239 rtx val;
3240 enum machine_mode mode;
3241
3242 if (value == NULL_TREE)
3243 val = NULL_RTX;
3244 else
3245 {
3246 val = expand_debug_expr (value);
3247 gcc_assert (last == get_last_insn ());
3248 }
3249
3250 if (!val)
3251 val = gen_rtx_UNKNOWN_VAR_LOC ();
3252 else
3253 {
3254 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3255
3256 gcc_assert (mode == GET_MODE (val)
3257 || (GET_MODE (val) == VOIDmode
3258 && (CONST_INT_P (val)
3259 || GET_CODE (val) == CONST_FIXED
3260 || GET_CODE (val) == CONST_DOUBLE
3261 || GET_CODE (val) == LABEL_REF)));
3262 }
3263
3264 INSN_VAR_LOCATION_LOC (insn) = val;
3265 }
3266
3267 flag_strict_aliasing = save_strict_alias;
3268}
3269
242229bb
JH
3270/* Expand basic block BB from GIMPLE trees to RTL. */
3271
3272static basic_block
10d22567 3273expand_gimple_basic_block (basic_block bb)
242229bb 3274{
726a989a
RB
3275 gimple_stmt_iterator gsi;
3276 gimple_seq stmts;
3277 gimple stmt = NULL;
242229bb
JH
3278 rtx note, last;
3279 edge e;
628f6a4e 3280 edge_iterator ei;
8b11009b 3281 void **elt;
242229bb
JH
3282
3283 if (dump_file)
726a989a
RB
3284 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3285 bb->index);
3286
3287 /* Note that since we are now transitioning from GIMPLE to RTL, we
3288 cannot use the gsi_*_bb() routines because they expect the basic
3289 block to be in GIMPLE, instead of RTL. Therefore, we need to
3290 access the BB sequence directly. */
3291 stmts = bb_seq (bb);
3292 bb->il.gimple = NULL;
bf08ebeb 3293 rtl_profile_for_bb (bb);
5e2d947c
JH
3294 init_rtl_bb_info (bb);
3295 bb->flags |= BB_RTL;
3296
a9b77cd1
ZD
3297 /* Remove the RETURN_EXPR if we may fall though to the exit
3298 instead. */
726a989a
RB
3299 gsi = gsi_last (stmts);
3300 if (!gsi_end_p (gsi)
3301 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3302 {
726a989a 3303 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3304
3305 gcc_assert (single_succ_p (bb));
3306 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3307
3308 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3309 && !gimple_return_retval (ret_stmt))
a9b77cd1 3310 {
726a989a 3311 gsi_remove (&gsi, false);
a9b77cd1
ZD
3312 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3313 }
3314 }
3315
726a989a
RB
3316 gsi = gsi_start (stmts);
3317 if (!gsi_end_p (gsi))
8b11009b 3318 {
726a989a
RB
3319 stmt = gsi_stmt (gsi);
3320 if (gimple_code (stmt) != GIMPLE_LABEL)
3321 stmt = NULL;
8b11009b 3322 }
242229bb 3323
8b11009b
ZD
3324 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3325
3326 if (stmt || elt)
242229bb
JH
3327 {
3328 last = get_last_insn ();
3329
8b11009b
ZD
3330 if (stmt)
3331 {
28ed065e 3332 expand_gimple_stmt (stmt);
726a989a 3333 gsi_next (&gsi);
8b11009b
ZD
3334 }
3335
3336 if (elt)
ae50c0cb 3337 emit_label ((rtx) *elt);
242229bb 3338
caf93cb0 3339 /* Java emits line number notes in the top of labels.
c22cacf3 3340 ??? Make this go away once line number notes are obsoleted. */
242229bb 3341 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3342 if (NOTE_P (BB_HEAD (bb)))
242229bb 3343 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3344 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3345
726a989a 3346 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3347 }
3348 else
3349 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3350
3351 NOTE_BASIC_BLOCK (note) = bb;
3352
726a989a 3353 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3354 {
cea49550 3355 basic_block new_bb;
242229bb 3356
b5b8b0ac 3357 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3358
3359 /* If this statement is a non-debug one, and we generate debug
3360 insns, then this one might be the last real use of a TERed
3361 SSA_NAME, but where there are still some debug uses further
3362 down. Expanding the current SSA name in such further debug
3363 uses by their RHS might lead to wrong debug info, as coalescing
3364 might make the operands of such RHS be placed into the same
3365 pseudo as something else. Like so:
3366 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3367 use(a_1);
3368 a_2 = ...
3369 #DEBUG ... => a_1
3370 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3371 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3372 the write to a_2 would actually have clobbered the place which
3373 formerly held a_0.
3374
3375 So, instead of that, we recognize the situation, and generate
3376 debug temporaries at the last real use of TERed SSA names:
3377 a_1 = a_0 + 1;
3378 #DEBUG #D1 => a_1
3379 use(a_1);
3380 a_2 = ...
3381 #DEBUG ... => #D1
3382 */
3383 if (MAY_HAVE_DEBUG_INSNS
3384 && SA.values
3385 && !is_gimple_debug (stmt))
3386 {
3387 ssa_op_iter iter;
3388 tree op;
3389 gimple def;
3390
3391 location_t sloc = get_curr_insn_source_location ();
3392 tree sblock = get_curr_insn_block ();
3393
3394 /* Look for SSA names that have their last use here (TERed
3395 names always have only one real use). */
3396 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3397 if ((def = get_gimple_for_ssa_name (op)))
3398 {
3399 imm_use_iterator imm_iter;
3400 use_operand_p use_p;
3401 bool have_debug_uses = false;
3402
3403 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3404 {
3405 if (gimple_debug_bind_p (USE_STMT (use_p)))
3406 {
3407 have_debug_uses = true;
3408 break;
3409 }
3410 }
3411
3412 if (have_debug_uses)
3413 {
3414 /* OP is a TERed SSA name, with DEF it's defining
3415 statement, and where OP is used in further debug
3416 instructions. Generate a debug temporary, and
3417 replace all uses of OP in debug insns with that
3418 temporary. */
3419 gimple debugstmt;
3420 tree value = gimple_assign_rhs_to_tree (def);
3421 tree vexpr = make_node (DEBUG_EXPR_DECL);
3422 rtx val;
3423 enum machine_mode mode;
3424
3425 set_curr_insn_source_location (gimple_location (def));
3426 set_curr_insn_block (gimple_block (def));
3427
3428 DECL_ARTIFICIAL (vexpr) = 1;
3429 TREE_TYPE (vexpr) = TREE_TYPE (value);
3430 if (DECL_P (value))
3431 mode = DECL_MODE (value);
3432 else
3433 mode = TYPE_MODE (TREE_TYPE (value));
3434 DECL_MODE (vexpr) = mode;
3435
3436 val = gen_rtx_VAR_LOCATION
3437 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3438
3439 val = emit_debug_insn (val);
3440
3441 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3442 {
3443 if (!gimple_debug_bind_p (debugstmt))
3444 continue;
3445
3446 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3447 SET_USE (use_p, vexpr);
3448
3449 update_stmt (debugstmt);
3450 }
3451 }
3452 }
3453 set_curr_insn_source_location (sloc);
3454 set_curr_insn_block (sblock);
3455 }
3456
a5883ba0 3457 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3458
242229bb
JH
3459 /* Expand this statement, then evaluate the resulting RTL and
3460 fixup the CFG accordingly. */
726a989a 3461 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3462 {
726a989a 3463 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3464 if (new_bb)
3465 return new_bb;
3466 }
b5b8b0ac
AO
3467 else if (gimple_debug_bind_p (stmt))
3468 {
3469 location_t sloc = get_curr_insn_source_location ();
3470 tree sblock = get_curr_insn_block ();
3471 gimple_stmt_iterator nsi = gsi;
3472
3473 for (;;)
3474 {
3475 tree var = gimple_debug_bind_get_var (stmt);
3476 tree value;
3477 rtx val;
3478 enum machine_mode mode;
3479
3480 if (gimple_debug_bind_has_value_p (stmt))
3481 value = gimple_debug_bind_get_value (stmt);
3482 else
3483 value = NULL_TREE;
3484
3485 last = get_last_insn ();
3486
3487 set_curr_insn_source_location (gimple_location (stmt));
3488 set_curr_insn_block (gimple_block (stmt));
3489
3490 if (DECL_P (var))
3491 mode = DECL_MODE (var);
3492 else
3493 mode = TYPE_MODE (TREE_TYPE (var));
3494
3495 val = gen_rtx_VAR_LOCATION
3496 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3497
3498 val = emit_debug_insn (val);
3499
3500 if (dump_file && (dump_flags & TDF_DETAILS))
3501 {
3502 /* We can't dump the insn with a TREE where an RTX
3503 is expected. */
3504 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3505 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3506 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3507 }
3508
2a8e30fb
MM
3509 /* In order not to generate too many debug temporaries,
3510 we delink all uses of debug statements we already expanded.
3511 Therefore debug statements between definition and real
3512 use of TERed SSA names will continue to use the SSA name,
3513 and not be replaced with debug temps. */
3514 delink_stmt_imm_use (stmt);
3515
b5b8b0ac
AO
3516 gsi = nsi;
3517 gsi_next (&nsi);
3518 if (gsi_end_p (nsi))
3519 break;
3520 stmt = gsi_stmt (nsi);
3521 if (!gimple_debug_bind_p (stmt))
3522 break;
3523 }
3524
3525 set_curr_insn_source_location (sloc);
3526 set_curr_insn_block (sblock);
3527 }
80c7a9eb 3528 else
242229bb 3529 {
726a989a 3530 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3531 {
3532 bool can_fallthru;
3533 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3534 if (new_bb)
3535 {
3536 if (can_fallthru)
3537 bb = new_bb;
3538 else
3539 return new_bb;
3540 }
3541 }
4d7a65ea 3542 else
b7211528 3543 {
4e3825db 3544 def_operand_p def_p;
4e3825db
MM
3545 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3546
3547 if (def_p != NULL)
3548 {
3549 /* Ignore this stmt if it is in the list of
3550 replaceable expressions. */
3551 if (SA.values
b8698a0f 3552 && bitmap_bit_p (SA.values,
e97809c6 3553 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3554 continue;
3555 }
28ed065e 3556 last = expand_gimple_stmt (stmt);
726a989a 3557 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3558 }
242229bb
JH
3559 }
3560 }
3561
a5883ba0
MM
3562 currently_expanding_gimple_stmt = NULL;
3563
7241571e 3564 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3565 FOR_EACH_EDGE (e, ei, bb->succs)
3566 {
7241571e
JJ
3567 if (e->goto_locus && e->goto_block)
3568 {
3569 set_curr_insn_source_location (e->goto_locus);
3570 set_curr_insn_block (e->goto_block);
3571 e->goto_locus = curr_insn_locator ();
3572 }
3573 e->goto_block = NULL;
3574 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3575 {
3576 emit_jump (label_rtx_for_bb (e->dest));
3577 e->flags &= ~EDGE_FALLTHRU;
3578 }
a9b77cd1
ZD
3579 }
3580
ae761c45
AH
3581 /* Expanded RTL can create a jump in the last instruction of block.
3582 This later might be assumed to be a jump to successor and break edge insertion.
3583 We need to insert dummy move to prevent this. PR41440. */
3584 if (single_succ_p (bb)
3585 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3586 && (last = get_last_insn ())
3587 && JUMP_P (last))
3588 {
3589 rtx dummy = gen_reg_rtx (SImode);
3590 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3591 }
3592
242229bb
JH
3593 do_pending_stack_adjust ();
3594
3f117656 3595 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3596 before a barrier and/or table jump insn. */
3597 last = get_last_insn ();
4b4bf941 3598 if (BARRIER_P (last))
242229bb
JH
3599 last = PREV_INSN (last);
3600 if (JUMP_TABLE_DATA_P (last))
3601 last = PREV_INSN (PREV_INSN (last));
3602 BB_END (bb) = last;
caf93cb0 3603
242229bb 3604 update_bb_for_insn (bb);
80c7a9eb 3605
242229bb
JH
3606 return bb;
3607}
3608
3609
3610/* Create a basic block for initialization code. */
3611
3612static basic_block
3613construct_init_block (void)
3614{
3615 basic_block init_block, first_block;
fd44f634
JH
3616 edge e = NULL;
3617 int flags;
275a4187 3618
fd44f634
JH
3619 /* Multiple entry points not supported yet. */
3620 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3621 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3622 init_rtl_bb_info (EXIT_BLOCK_PTR);
3623 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3624 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3625
fd44f634 3626 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3627
fd44f634
JH
3628 /* When entry edge points to first basic block, we don't need jump,
3629 otherwise we have to jump into proper target. */
3630 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3631 {
726a989a 3632 tree label = gimple_block_label (e->dest);
fd44f634
JH
3633
3634 emit_jump (label_rtx (label));
3635 flags = 0;
275a4187 3636 }
fd44f634
JH
3637 else
3638 flags = EDGE_FALLTHRU;
242229bb
JH
3639
3640 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3641 get_last_insn (),
3642 ENTRY_BLOCK_PTR);
3643 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3644 init_block->count = ENTRY_BLOCK_PTR->count;
3645 if (e)
3646 {
3647 first_block = e->dest;
3648 redirect_edge_succ (e, init_block);
fd44f634 3649 e = make_edge (init_block, first_block, flags);
242229bb
JH
3650 }
3651 else
3652 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3653 e->probability = REG_BR_PROB_BASE;
3654 e->count = ENTRY_BLOCK_PTR->count;
3655
3656 update_bb_for_insn (init_block);
3657 return init_block;
3658}
3659
55e092c4
JH
3660/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3661 found in the block tree. */
3662
3663static void
3664set_block_levels (tree block, int level)
3665{
3666 while (block)
3667 {
3668 BLOCK_NUMBER (block) = level;
3669 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3670 block = BLOCK_CHAIN (block);
3671 }
3672}
242229bb
JH
3673
3674/* Create a block containing landing pads and similar stuff. */
3675
3676static void
3677construct_exit_block (void)
3678{
3679 rtx head = get_last_insn ();
3680 rtx end;
3681 basic_block exit_block;
628f6a4e
BE
3682 edge e, e2;
3683 unsigned ix;
3684 edge_iterator ei;
071a42f9 3685 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3686
bf08ebeb
JH
3687 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3688
caf93cb0 3689 /* Make sure the locus is set to the end of the function, so that
242229bb 3690 epilogue line numbers and warnings are set properly. */
6773e15f 3691 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3692 input_location = cfun->function_end_locus;
3693
3694 /* The following insns belong to the top scope. */
55e092c4 3695 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3696
242229bb
JH
3697 /* Generate rtl for function exit. */
3698 expand_function_end ();
3699
3700 end = get_last_insn ();
3701 if (head == end)
3702 return;
071a42f9
JH
3703 /* While emitting the function end we could move end of the last basic block.
3704 */
3705 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3706 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3707 head = NEXT_INSN (head);
80c7a9eb
RH
3708 exit_block = create_basic_block (NEXT_INSN (head), end,
3709 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3710 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3711 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3712
3713 ix = 0;
3714 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3715 {
8fb790fd 3716 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3717 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3718 redirect_edge_succ (e, exit_block);
3719 else
3720 ix++;
242229bb 3721 }
628f6a4e 3722
242229bb
JH
3723 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3724 e->probability = REG_BR_PROB_BASE;
3725 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3726 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3727 if (e2 != e)
3728 {
c22cacf3 3729 e->count -= e2->count;
242229bb
JH
3730 exit_block->count -= e2->count;
3731 exit_block->frequency -= EDGE_FREQUENCY (e2);
3732 }
3733 if (e->count < 0)
3734 e->count = 0;
3735 if (exit_block->count < 0)
3736 exit_block->count = 0;
3737 if (exit_block->frequency < 0)
3738 exit_block->frequency = 0;
3739 update_bb_for_insn (exit_block);
3740}
3741
c22cacf3 3742/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3743 Look for ARRAY_REF nodes with non-constant indexes and mark them
3744 addressable. */
3745
3746static tree
3747discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3748 void *data ATTRIBUTE_UNUSED)
3749{
3750 tree t = *tp;
3751
3752 if (IS_TYPE_OR_DECL_P (t))
3753 *walk_subtrees = 0;
3754 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3755 {
3756 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3757 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3758 && (!TREE_OPERAND (t, 2)
3759 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3760 || (TREE_CODE (t) == COMPONENT_REF
3761 && (!TREE_OPERAND (t,2)
3762 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3763 || TREE_CODE (t) == BIT_FIELD_REF
3764 || TREE_CODE (t) == REALPART_EXPR
3765 || TREE_CODE (t) == IMAGPART_EXPR
3766 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 3767 || CONVERT_EXPR_P (t))
a1b23b2f
UW
3768 t = TREE_OPERAND (t, 0);
3769
3770 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3771 {
3772 t = get_base_address (t);
6f11d690
RG
3773 if (t && DECL_P (t)
3774 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
3775 TREE_ADDRESSABLE (t) = 1;
3776 }
3777
3778 *walk_subtrees = 0;
3779 }
3780
3781 return NULL_TREE;
3782}
3783
3784/* RTL expansion is not able to compile array references with variable
3785 offsets for arrays stored in single register. Discover such
3786 expressions and mark variables as addressable to avoid this
3787 scenario. */
3788
3789static void
3790discover_nonconstant_array_refs (void)
3791{
3792 basic_block bb;
726a989a 3793 gimple_stmt_iterator gsi;
a1b23b2f
UW
3794
3795 FOR_EACH_BB (bb)
726a989a
RB
3796 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3797 {
3798 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
3799 if (!is_gimple_debug (stmt))
3800 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 3801 }
a1b23b2f
UW
3802}
3803
2e3f842f
L
3804/* This function sets crtl->args.internal_arg_pointer to a virtual
3805 register if DRAP is needed. Local register allocator will replace
3806 virtual_incoming_args_rtx with the virtual register. */
3807
3808static void
3809expand_stack_alignment (void)
3810{
3811 rtx drap_rtx;
e939805b 3812 unsigned int preferred_stack_boundary;
2e3f842f
L
3813
3814 if (! SUPPORTS_STACK_ALIGNMENT)
3815 return;
b8698a0f 3816
2e3f842f
L
3817 if (cfun->calls_alloca
3818 || cfun->has_nonlocal_label
3819 || crtl->has_nonlocal_goto)
3820 crtl->need_drap = true;
3821
890b9b96
L
3822 /* Call update_stack_boundary here again to update incoming stack
3823 boundary. It may set incoming stack alignment to a different
3824 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3825 use the minimum incoming stack alignment to check if it is OK
3826 to perform sibcall optimization since sibcall optimization will
3827 only align the outgoing stack to incoming stack boundary. */
3828 if (targetm.calls.update_stack_boundary)
3829 targetm.calls.update_stack_boundary ();
3830
3831 /* The incoming stack frame has to be aligned at least at
3832 parm_stack_boundary. */
3833 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 3834
2e3f842f
L
3835 /* Update crtl->stack_alignment_estimated and use it later to align
3836 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3837 exceptions since callgraph doesn't collect incoming stack alignment
3838 in this case. */
8f4f502f 3839 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
3840 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3841 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3842 else
3843 preferred_stack_boundary = crtl->preferred_stack_boundary;
3844 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3845 crtl->stack_alignment_estimated = preferred_stack_boundary;
3846 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3847 crtl->stack_alignment_needed = preferred_stack_boundary;
3848
890b9b96
L
3849 gcc_assert (crtl->stack_alignment_needed
3850 <= crtl->stack_alignment_estimated);
3851
2e3f842f 3852 crtl->stack_realign_needed
e939805b 3853 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 3854 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
3855
3856 crtl->stack_realign_processed = true;
3857
3858 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3859 alignment. */
3860 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 3861 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 3862
d015f7cc
L
3863 /* stack_realign_drap and drap_rtx must match. */
3864 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3865
2e3f842f
L
3866 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3867 if (NULL != drap_rtx)
3868 {
3869 crtl->args.internal_arg_pointer = drap_rtx;
3870
3871 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3872 needed. */
3873 fixup_tail_calls ();
3874 }
3875}
3876
242229bb
JH
3877/* Translate the intermediate representation contained in the CFG
3878 from GIMPLE trees to RTL.
3879
3880 We do conversion per basic block and preserve/update the tree CFG.
3881 This implies we have to do some magic as the CFG can simultaneously
3882 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 3883 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
3884 the expansion. */
3885
c2924966 3886static unsigned int
726a989a 3887gimple_expand_cfg (void)
242229bb
JH
3888{
3889 basic_block bb, init_block;
3890 sbitmap blocks;
0ef90296
ZD
3891 edge_iterator ei;
3892 edge e;
3a42502d 3893 rtx var_seq;
4e3825db
MM
3894 unsigned i;
3895
f029db69 3896 timevar_push (TV_OUT_OF_SSA);
4e3825db 3897 rewrite_out_of_ssa (&SA);
f029db69 3898 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
3899 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3900 sizeof (rtx));
242229bb 3901
4586b4ca
SB
3902 /* Some backends want to know that we are expanding to RTL. */
3903 currently_expanding_to_rtl = 1;
3904
bf08ebeb
JH
3905 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3906
55e092c4 3907 insn_locators_alloc ();
fe8a7779 3908 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
3909 {
3910 /* Eventually, all FEs should explicitly set function_start_locus. */
3911 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3912 set_curr_insn_source_location
3913 (DECL_SOURCE_LOCATION (current_function_decl));
3914 else
3915 set_curr_insn_source_location (cfun->function_start_locus);
3916 }
55e092c4
JH
3917 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3918 prologue_locator = curr_insn_locator ();
3919
2b21299c
JJ
3920#ifdef INSN_SCHEDULING
3921 init_sched_attrs ();
3922#endif
3923
55e092c4
JH
3924 /* Make sure first insn is a note even if we don't want linenums.
3925 This makes sure the first insn will never be deleted.
3926 Also, final expects a note to appear there. */
3927 emit_note (NOTE_INSN_DELETED);
6429e3be 3928
a1b23b2f
UW
3929 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3930 discover_nonconstant_array_refs ();
3931
e41b2a33 3932 targetm.expand_to_rtl_hook ();
cb91fab0 3933 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 3934 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 3935 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
3936 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3937 cfun->cfg->max_jumptable_ents = 0;
3938
727a31fa 3939 /* Expand the variables recorded during gimple lowering. */
f029db69 3940 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
3941 start_sequence ();
3942
242229bb 3943 expand_used_vars ();
3a42502d
RH
3944
3945 var_seq = get_insns ();
3946 end_sequence ();
f029db69 3947 timevar_pop (TV_VAR_EXPAND);
242229bb 3948
7d69de61
RH
3949 /* Honor stack protection warnings. */
3950 if (warn_stack_protect)
3951 {
e3b5732b 3952 if (cfun->calls_alloca)
b8698a0f 3953 warning (OPT_Wstack_protector,
3b123595
SB
3954 "stack protector not protecting local variables: "
3955 "variable length buffer");
cb91fab0 3956 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 3957 warning (OPT_Wstack_protector,
3b123595
SB
3958 "stack protector not protecting function: "
3959 "all local arrays are less than %d bytes long",
7d69de61
RH
3960 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3961 }
3962
242229bb 3963 /* Set up parameters and prepare for return, for the function. */
b79c5284 3964 expand_function_start (current_function_decl);
242229bb 3965
3a42502d
RH
3966 /* If we emitted any instructions for setting up the variables,
3967 emit them before the FUNCTION_START note. */
3968 if (var_seq)
3969 {
3970 emit_insn_before (var_seq, parm_birth_insn);
3971
3972 /* In expand_function_end we'll insert the alloca save/restore
3973 before parm_birth_insn. We've just insertted an alloca call.
3974 Adjust the pointer to match. */
3975 parm_birth_insn = var_seq;
3976 }
3977
4e3825db
MM
3978 /* Now that we also have the parameter RTXs, copy them over to our
3979 partitions. */
3980 for (i = 0; i < SA.map->num_partitions; i++)
3981 {
3982 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3983
3984 if (TREE_CODE (var) != VAR_DECL
3985 && !SA.partition_to_pseudo[i])
3986 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3987 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
3988
3989 /* If this decl was marked as living in multiple places, reset
3990 this now to NULL. */
3991 if (DECL_RTL_IF_SET (var) == pc_rtx)
3992 SET_DECL_RTL (var, NULL);
3993
4e3825db
MM
3994 /* Some RTL parts really want to look at DECL_RTL(x) when x
3995 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3996 SET_DECL_RTL here making this available, but that would mean
3997 to select one of the potentially many RTLs for one DECL. Instead
3998 of doing that we simply reset the MEM_EXPR of the RTL in question,
3999 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4000 if (!DECL_RTL_SET_P (var))
4001 {
4002 if (MEM_P (SA.partition_to_pseudo[i]))
4003 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4004 }
4005 }
4006
242229bb
JH
4007 /* If this function is `main', emit a call to `__main'
4008 to run global initializers, etc. */
4009 if (DECL_NAME (current_function_decl)
4010 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4011 && DECL_FILE_SCOPE_P (current_function_decl))
4012 expand_main_function ();
4013
7d69de61
RH
4014 /* Initialize the stack_protect_guard field. This must happen after the
4015 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4016 if (crtl->stack_protect_guard)
7d69de61
RH
4017 stack_protect_prologue ();
4018
4e3825db
MM
4019 expand_phi_nodes (&SA);
4020
3fbd86b1 4021 /* Register rtl specific functions for cfg. */
242229bb
JH
4022 rtl_register_cfg_hooks ();
4023
4024 init_block = construct_init_block ();
4025
0ef90296 4026 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4027 remaining edges later. */
0ef90296
ZD
4028 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4029 e->flags &= ~EDGE_EXECUTABLE;
4030
8b11009b 4031 lab_rtx_for_bb = pointer_map_create ();
242229bb 4032 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4033 bb = expand_gimple_basic_block (bb);
bf08ebeb 4034
b5b8b0ac
AO
4035 if (MAY_HAVE_DEBUG_INSNS)
4036 expand_debug_locations ();
4037
4e3825db 4038 execute_free_datastructures ();
f029db69 4039 timevar_push (TV_OUT_OF_SSA);
4e3825db 4040 finish_out_of_ssa (&SA);
f029db69 4041 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4042
f029db69 4043 timevar_push (TV_POST_EXPAND);
91753e21
RG
4044 /* We are no longer in SSA form. */
4045 cfun->gimple_df->in_ssa_p = false;
4046
bf08ebeb
JH
4047 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4048 conservatively to true until they are all profile aware. */
8b11009b 4049 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4050 free_histograms ();
242229bb
JH
4051
4052 construct_exit_block ();
55e092c4
JH
4053 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4054 insn_locators_finalize ();
242229bb 4055
1d65f45c 4056 /* Zap the tree EH table. */
e8a2a782 4057 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
4058
4059 rebuild_jump_labels (get_insns ());
242229bb 4060
4e3825db
MM
4061 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4062 {
4063 edge e;
4064 edge_iterator ei;
4065 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4066 {
4067 if (e->insns.r)
4068 commit_one_edge_insertion (e);
4069 else
4070 ei_next (&ei);
4071 }
4072 }
4073
4074 /* We're done expanding trees to RTL. */
4075 currently_expanding_to_rtl = 0;
4076
4077 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4078 {
4079 edge e;
4080 edge_iterator ei;
4081 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4082 {
4083 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4084 e->flags &= ~EDGE_EXECUTABLE;
4085
4086 /* At the moment not all abnormal edges match the RTL
4087 representation. It is safe to remove them here as
4088 find_many_sub_basic_blocks will rediscover them.
4089 In the future we should get this fixed properly. */
4090 if ((e->flags & EDGE_ABNORMAL)
4091 && !(e->flags & EDGE_SIBCALL))
4092 remove_edge (e);
4093 else
4094 ei_next (&ei);
4095 }
4096 }
4097
242229bb
JH
4098 blocks = sbitmap_alloc (last_basic_block);
4099 sbitmap_ones (blocks);
4100 find_many_sub_basic_blocks (blocks);
242229bb 4101 sbitmap_free (blocks);
4e3825db 4102 purge_all_dead_edges ();
242229bb
JH
4103
4104 compact_blocks ();
2e3f842f
L
4105
4106 expand_stack_alignment ();
4107
242229bb 4108#ifdef ENABLE_CHECKING
62e5bf5d 4109 verify_flow_info ();
242229bb 4110#endif
9f8628ba
PB
4111
4112 /* There's no need to defer outputting this function any more; we
4113 know we want to output it. */
4114 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4115
4116 /* Now that we're done expanding trees to RTL, we shouldn't have any
4117 more CONCATs anywhere. */
4118 generating_concat_p = 0;
4119
b7211528
SB
4120 if (dump_file)
4121 {
4122 fprintf (dump_file,
4123 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4124 /* And the pass manager will dump RTL for us. */
4125 }
ef330312
PB
4126
4127 /* If we're emitting a nested function, make sure its parent gets
4128 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4129 {
ef330312
PB
4130 tree parent;
4131 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4132 parent != NULL_TREE;
4133 parent = get_containing_scope (parent))
ef330312 4134 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4135 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4136 }
c22cacf3 4137
ef330312
PB
4138 /* We are now committed to emitting code for this function. Do any
4139 preparation, such as emitting abstract debug info for the inline
4140 before it gets mangled by optimization. */
4141 if (cgraph_function_possibly_inlined_p (current_function_decl))
4142 (*debug_hooks->outlining_inline_function) (current_function_decl);
4143
4144 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4145
4146 /* After expanding, the return labels are no longer needed. */
4147 return_label = NULL;
4148 naked_return_label = NULL;
55e092c4
JH
4149 /* Tag the blocks with a depth number so that change_scope can find
4150 the common parent easily. */
4151 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4152 default_rtl_profile ();
f029db69 4153 timevar_pop (TV_POST_EXPAND);
c2924966 4154 return 0;
242229bb
JH
4155}
4156
e3b5732b 4157struct rtl_opt_pass pass_expand =
242229bb 4158{
8ddbbcae 4159 {
e3b5732b 4160 RTL_PASS,
c22cacf3 4161 "expand", /* name */
242229bb 4162 NULL, /* gate */
726a989a 4163 gimple_expand_cfg, /* execute */
242229bb
JH
4164 NULL, /* sub */
4165 NULL, /* next */
4166 0, /* static_pass_number */
c22cacf3 4167 TV_EXPAND, /* tv_id */
688a482d
RG
4168 PROP_ssa | PROP_gimple_leh | PROP_cfg
4169 | PROP_gimple_lcx, /* properties_required */
242229bb 4170 PROP_rtl, /* properties_provided */
4e3825db
MM
4171 PROP_ssa | PROP_trees, /* properties_destroyed */
4172 TODO_verify_ssa | TODO_verify_flow
4173 | TODO_verify_stmts, /* todo_flags_start */
4174 TODO_dump_func
4175 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4176 }
242229bb 4177};