]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
Add loongson3a integer mul/div patterns
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
7604eb4e 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
2b21299c 50#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 51
4e3825db
MM
52/* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54struct ssaexpand SA;
55
a5883ba0
MM
56/* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58gimple currently_expanding_gimple_stmt;
59
726a989a
RB
60/* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
62
63tree
64gimple_assign_rhs_to_tree (gimple stmt)
65{
66 tree t;
82d6e6fc 67 enum gimple_rhs_class grhs_class;
b8698a0f 68
82d6e6fc 69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 70
0354c0c7
BS
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82d6e6fc 82 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
82d6e6fc 86 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
87 {
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
96 t = copy_node (t);
97 }
726a989a
RB
98 else
99 gcc_unreachable ();
100
f5045c96
AM
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 105
726a989a
RB
106 return t;
107}
108
726a989a 109
1f6d3a08
RH
110#ifndef STACK_ALIGNMENT_NEEDED
111#define STACK_ALIGNMENT_NEEDED 1
112#endif
113
4e3825db
MM
114#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
115
116/* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119static inline void
120set_rtl (tree t, rtx x)
121{
122 if (TREE_CODE (t) == SSA_NAME)
123 {
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
131 {
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
139 ;
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
148 }
4e3825db
MM
149 }
150 else
151 SET_DECL_RTL (t, x);
152}
1f6d3a08
RH
153
154/* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156struct stack_var
157{
158 /* The Variable. */
159 tree decl;
160
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
165
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
169
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
173
174 /* The partition representative. */
175 size_t representative;
176
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
2bdbbe94
MM
179
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
1f6d3a08
RH
182};
183
184#define EOC ((size_t)-1)
185
186/* We have an array of such objects while deciding allocation. */
187static struct stack_var *stack_vars;
188static size_t stack_vars_alloc;
189static size_t stack_vars_num;
190
fa10beec 191/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
192 is non-decreasing. */
193static size_t *stack_vars_sorted;
194
1f6d3a08
RH
195/* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198static int frame_phase;
199
7d69de61
RH
200/* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202static bool has_protected_decls;
203
204/* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206static bool has_short_buffer;
1f6d3a08 207
765c3e8f
L
208/* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
210
211static unsigned int
212get_decl_align_unit (tree decl)
213{
3a42502d 214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
1f6d3a08
RH
215 return align / BITS_PER_UNIT;
216}
217
218/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
220
221static HOST_WIDE_INT
3a42502d 222alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
223{
224 HOST_WIDE_INT offset, new_frame_offset;
225
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
228 {
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
233 }
234 else
235 {
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
242 }
243 frame_offset = new_frame_offset;
244
9fb798d7
EB
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
247
1f6d3a08
RH
248 return offset;
249}
250
251/* Accumulate DECL into STACK_VARS. */
252
253static void
254add_stack_var (tree decl)
255{
533f611a
RH
256 struct stack_var *v;
257
1f6d3a08
RH
258 if (stack_vars_num >= stack_vars_alloc)
259 {
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 else
263 stack_vars_alloc = 32;
264 stack_vars
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 }
533f611a
RH
267 v = &stack_vars[stack_vars_num];
268
269 v->decl = decl;
270 v->offset = 0;
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
274 if (v->size == 0)
275 v->size = 1;
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
277
278 /* All variables are initially in their own partition. */
533f611a
RH
279 v->representative = stack_vars_num;
280 v->next = EOC;
1f6d3a08 281
2bdbbe94 282 /* All variables initially conflict with no other. */
533f611a 283 v->conflicts = NULL;
2bdbbe94 284
1f6d3a08 285 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 286 set_rtl (decl, pc_rtx);
1f6d3a08
RH
287
288 stack_vars_num++;
289}
290
1f6d3a08
RH
291/* Make the decls associated with luid's X and Y conflict. */
292
293static void
294add_stack_var_conflict (size_t x, size_t y)
295{
2bdbbe94
MM
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
298 if (!a->conflicts)
299 a->conflicts = BITMAP_ALLOC (NULL);
300 if (!b->conflicts)
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
304}
305
306/* Check whether the decls associated with luid's X and Y conflict. */
307
308static bool
309stack_var_conflict_p (size_t x, size_t y)
310{
2bdbbe94
MM
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
314 return false;
315 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 316}
b8698a0f 317
d239ed56
SB
318/* Returns true if TYPE is or contains a union type. */
319
320static bool
321aggregate_contains_union_type (tree type)
322{
323 tree field;
324
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
327 return true;
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
331 return false;
332
910ad8de 333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
336 return true;
337
338 return false;
339}
340
1f6d3a08
RH
341/* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
350
351static void
352add_alias_set_conflicts (void)
353{
354 size_t i, j, n = stack_vars_num;
355
356 for (i = 0; i < n; ++i)
357 {
a4d25453
RH
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 360 bool contains_union;
1f6d3a08 361
d239ed56 362 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
363 for (j = 0; j < i; ++j)
364 {
a4d25453
RH
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
367 if (aggr_i != aggr_j
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
376 || contains_union)
1f6d3a08
RH
377 add_stack_var_conflict (i, j);
378 }
379 }
380}
381
382/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 383 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
384
385static int
3a42502d 386stack_var_cmp (const void *a, const void *b)
1f6d3a08 387{
3a42502d
RH
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
396 bool largea, largeb;
4e3825db 397 unsigned int uida, uidb;
1f6d3a08 398
3a42502d
RH
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
404
405 /* Secondary compare on size, decreasing */
406 if (sizea < sizeb)
1f6d3a08 407 return -1;
3a42502d 408 if (sizea > sizeb)
1f6d3a08 409 return 1;
3a42502d
RH
410
411 /* Tertiary compare on true alignment, decreasing. */
412 if (aligna < alignb)
413 return -1;
414 if (aligna > alignb)
415 return 1;
416
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
420 if (TREE_CODE (decla) == SSA_NAME)
421 {
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else
425 return -1;
426 }
427 else if (TREE_CODE (declb) == SSA_NAME)
428 return 1;
429 else
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 431 if (uida < uidb)
79f802f5 432 return 1;
3a42502d
RH
433 if (uida > uidb)
434 return -1;
1f6d3a08
RH
435 return 0;
436}
437
55b34b5f
RG
438
439/* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
441 variables bitmap. */
442
443static void
444add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
447{
448 bitmap_iterator bi;
449 unsigned i;
450 bitmap *part;
451
452 if (pt->anything
453 || pt->vars == NULL
454 /* The pointed-to vars bitmap is shared, it is enough to
455 visit it once. */
456 || pointer_set_insert(visited, pt->vars))
457 return;
458
459 bitmap_clear (temp);
460
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
463 once. */
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
465 if ((!temp
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
472}
473
474/* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
477 rewritten. */
478
479static void
480update_alias_info_with_stack_vars (void)
481{
482 struct pointer_map_t *decls_to_partitions = NULL;
483 size_t i, j;
484 tree var = NULL_TREE;
485
486 for (i = 0; i < stack_vars_num; i++)
487 {
488 bitmap part = NULL;
489 tree name;
490 struct ptr_info_def *pi;
491
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
495 continue;
496
497 if (!decls_to_partitions)
498 {
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
501 }
502
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
509
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
514 {
515 tree decl = stack_vars[j].decl;
25a6a873 516 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
9b999dc5
JJ
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
55b34b5f 521 gcc_assert (DECL_P (decl)
9b999dc5
JJ
522 && (!optimize
523 || referenced_var_lookup (DECL_UID (decl))));
55b34b5f
RG
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
528 decl)) = name;
529 }
530
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
25a6a873 533 pt_solution_set (&pi->pt, part, false, false);
55b34b5f
RG
534 }
535
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
539 {
540 unsigned i;
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
543
544 for (i = 1; i < num_ssa_names; i++)
545 {
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
548
549 if (name
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
553 visited, temp);
554 }
555
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
55b34b5f
RG
558
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
561 BITMAP_FREE (temp);
562 }
563}
564
1f6d3a08
RH
565/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A.
568
569 At the same time, add OFFSET to all variables in partition B. At the end
570 of the partitioning process we've have a nice block easy to lay out within
571 the stack frame. */
572
573static void
574union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
575{
576 size_t i, last;
2bdbbe94
MM
577 struct stack_var *vb = &stack_vars[b];
578 bitmap_iterator bi;
579 unsigned u;
1f6d3a08
RH
580
581 /* Update each element of partition B with the given offset,
582 and merge them into partition A. */
583 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
584 {
585 stack_vars[i].offset += offset;
586 stack_vars[i].representative = a;
587 }
588 stack_vars[last].next = stack_vars[a].next;
589 stack_vars[a].next = b;
590
591 /* Update the required alignment of partition A to account for B. */
592 if (stack_vars[a].alignb < stack_vars[b].alignb)
593 stack_vars[a].alignb = stack_vars[b].alignb;
594
595 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
596 if (vb->conflicts)
597 {
598 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
599 add_stack_var_conflict (a, stack_vars[u].representative);
600 BITMAP_FREE (vb->conflicts);
601 }
1f6d3a08
RH
602}
603
604/* A subroutine of expand_used_vars. Binpack the variables into
605 partitions constrained by the interference graph. The overall
606 algorithm used is as follows:
607
608 Sort the objects by size.
609 For each object A {
610 S = size(A)
611 O = 0
612 loop {
613 Look for the largest non-conflicting object B with size <= S.
614 UNION (A, B)
615 offset(B) = O
616 O += size(B)
617 S -= size(B)
618 }
619 }
620*/
621
622static void
623partition_stack_vars (void)
624{
625 size_t si, sj, n = stack_vars_num;
626
627 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
628 for (si = 0; si < n; ++si)
629 stack_vars_sorted[si] = si;
630
631 if (n == 1)
632 return;
633
3a42502d 634 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 635
1f6d3a08
RH
636 for (si = 0; si < n; ++si)
637 {
638 size_t i = stack_vars_sorted[si];
639 HOST_WIDE_INT isize = stack_vars[i].size;
3a42502d 640 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08
RH
641 HOST_WIDE_INT offset = 0;
642
643 for (sj = si; sj-- > 0; )
644 {
645 size_t j = stack_vars_sorted[sj];
646 HOST_WIDE_INT jsize = stack_vars[j].size;
647 unsigned int jalign = stack_vars[j].alignb;
648
649 /* Ignore objects that aren't partition representatives. */
650 if (stack_vars[j].representative != j)
651 continue;
652
653 /* Ignore objects too large for the remaining space. */
654 if (isize < jsize)
655 continue;
656
657 /* Ignore conflicting objects. */
658 if (stack_var_conflict_p (i, j))
659 continue;
660
3a42502d
RH
661 /* Do not mix objects of "small" (supported) alignment
662 and "large" (unsupported) alignment. */
663 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
664 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
665 continue;
666
1f6d3a08
RH
667 /* Refine the remaining space check to include alignment. */
668 if (offset & (jalign - 1))
669 {
670 HOST_WIDE_INT toff = offset;
671 toff += jalign - 1;
672 toff &= -(HOST_WIDE_INT)jalign;
673 if (isize - (toff - offset) < jsize)
674 continue;
675
676 isize -= toff - offset;
677 offset = toff;
678 }
679
680 /* UNION the objects, placing J at OFFSET. */
681 union_stack_vars (i, j, offset);
682
683 isize -= jsize;
684 if (isize == 0)
685 break;
686 }
687 }
55b34b5f 688
9b999dc5 689 update_alias_info_with_stack_vars ();
1f6d3a08
RH
690}
691
692/* A debugging aid for expand_used_vars. Dump the generated partitions. */
693
694static void
695dump_stack_var_partition (void)
696{
697 size_t si, i, j, n = stack_vars_num;
698
699 for (si = 0; si < n; ++si)
700 {
701 i = stack_vars_sorted[si];
702
703 /* Skip variables that aren't partition representatives, for now. */
704 if (stack_vars[i].representative != i)
705 continue;
706
707 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
708 " align %u\n", (unsigned long) i, stack_vars[i].size,
709 stack_vars[i].alignb);
710
711 for (j = i; j != EOC; j = stack_vars[j].next)
712 {
713 fputc ('\t', dump_file);
714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 716 stack_vars[j].offset);
1f6d3a08
RH
717 }
718 }
719}
720
3a42502d 721/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
722
723static void
3a42502d
RH
724expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
725 HOST_WIDE_INT offset)
1f6d3a08 726{
3a42502d 727 unsigned align;
1f6d3a08 728 rtx x;
c22cacf3 729
1f6d3a08
RH
730 /* If this fails, we've overflowed the stack frame. Error nicely? */
731 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
732
3a42502d 733 x = plus_constant (base, offset);
4e3825db 734 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 735
4e3825db
MM
736 if (TREE_CODE (decl) != SSA_NAME)
737 {
738 /* Set alignment we actually gave this decl if it isn't an SSA name.
739 If it is we generate stack slots only accidentally so it isn't as
740 important, we'll simply use the alignment that is already set. */
3a42502d
RH
741 if (base == virtual_stack_vars_rtx)
742 offset -= frame_phase;
4e3825db
MM
743 align = offset & -offset;
744 align *= BITS_PER_UNIT;
3a42502d
RH
745 if (align == 0 || align > base_align)
746 align = base_align;
747
748 /* One would think that we could assert that we're not decreasing
749 alignment here, but (at least) the i386 port does exactly this
750 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
751
752 DECL_ALIGN (decl) = align;
753 DECL_USER_ALIGN (decl) = 0;
754 }
755
756 set_mem_attributes (x, SSAVAR (decl), true);
757 set_rtl (decl, x);
1f6d3a08
RH
758}
759
760/* A subroutine of expand_used_vars. Give each partition representative
761 a unique location within the stack frame. Update each partition member
762 with that location. */
763
764static void
7d69de61 765expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
766{
767 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
768 HOST_WIDE_INT large_size = 0, large_alloc = 0;
769 rtx large_base = NULL;
770 unsigned large_align = 0;
771 tree decl;
772
773 /* Determine if there are any variables requiring "large" alignment.
774 Since these are dynamically allocated, we only process these if
775 no predicate involved. */
776 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
777 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
778 {
779 /* Find the total size of these variables. */
780 for (si = 0; si < n; ++si)
781 {
782 unsigned alignb;
783
784 i = stack_vars_sorted[si];
785 alignb = stack_vars[i].alignb;
786
787 /* Stop when we get to the first decl with "small" alignment. */
788 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
789 break;
790
791 /* Skip variables that aren't partition representatives. */
792 if (stack_vars[i].representative != i)
793 continue;
794
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
801 continue;
802
803 large_size += alignb - 1;
804 large_size &= -(HOST_WIDE_INT)alignb;
805 large_size += stack_vars[i].size;
806 }
807
808 /* If there were any, allocate space. */
809 if (large_size > 0)
810 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
811 large_align, true);
812 }
1f6d3a08
RH
813
814 for (si = 0; si < n; ++si)
815 {
3a42502d
RH
816 rtx base;
817 unsigned base_align, alignb;
1f6d3a08
RH
818 HOST_WIDE_INT offset;
819
820 i = stack_vars_sorted[si];
821
822 /* Skip variables that aren't partition representatives, for now. */
823 if (stack_vars[i].representative != i)
824 continue;
825
7d69de61
RH
826 /* Skip variables that have already had rtl assigned. See also
827 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
828 decl = stack_vars[i].decl;
829 if ((TREE_CODE (decl) == SSA_NAME
830 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
831 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
832 continue;
833
c22cacf3 834 /* Check the predicate to see whether this variable should be
7d69de61 835 allocated in this pass. */
3a42502d 836 if (pred && !pred (decl))
7d69de61
RH
837 continue;
838
3a42502d
RH
839 alignb = stack_vars[i].alignb;
840 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
841 {
842 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
843 base = virtual_stack_vars_rtx;
844 base_align = crtl->max_used_stack_slot_alignment;
845 }
846 else
847 {
848 /* Large alignment is only processed in the last pass. */
849 if (pred)
850 continue;
533f611a 851 gcc_assert (large_base != NULL);
3a42502d
RH
852
853 large_alloc += alignb - 1;
854 large_alloc &= -(HOST_WIDE_INT)alignb;
855 offset = large_alloc;
856 large_alloc += stack_vars[i].size;
857
858 base = large_base;
859 base_align = large_align;
860 }
1f6d3a08
RH
861
862 /* Create rtl for each variable based on their location within the
863 partition. */
864 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
865 {
866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
867 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 868 base, base_align,
f8da8190
AP
869 stack_vars[j].offset + offset);
870 }
1f6d3a08 871 }
3a42502d
RH
872
873 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
874}
875
ff28a94d
JH
876/* Take into account all sizes of partitions and reset DECL_RTLs. */
877static HOST_WIDE_INT
878account_stack_vars (void)
879{
880 size_t si, j, i, n = stack_vars_num;
881 HOST_WIDE_INT size = 0;
882
883 for (si = 0; si < n; ++si)
884 {
885 i = stack_vars_sorted[si];
886
887 /* Skip variables that aren't partition representatives, for now. */
888 if (stack_vars[i].representative != i)
889 continue;
890
891 size += stack_vars[i].size;
892 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 893 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
894 }
895 return size;
896}
897
1f6d3a08
RH
898/* A subroutine of expand_one_var. Called to immediately assign rtl
899 to a variable to be allocated in the stack frame. */
900
901static void
902expand_one_stack_var (tree var)
903{
3a42502d
RH
904 HOST_WIDE_INT size, offset;
905 unsigned byte_align;
1f6d3a08 906
4e3825db 907 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
3a42502d
RH
908 byte_align = get_decl_align_unit (SSAVAR (var));
909
910 /* We handle highly aligned variables in expand_stack_vars. */
911 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 912
3a42502d
RH
913 offset = alloc_stack_frame_space (size, byte_align);
914
915 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
916 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
917}
918
1f6d3a08
RH
919/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
920 that will reside in a hard register. */
921
922static void
923expand_one_hard_reg_var (tree var)
924{
925 rest_of_decl_compilation (var, 0, 0);
926}
927
928/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
929 that will reside in a pseudo register. */
930
931static void
932expand_one_register_var (tree var)
933{
4e3825db
MM
934 tree decl = SSAVAR (var);
935 tree type = TREE_TYPE (decl);
cde0f3fd 936 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
937 rtx x = gen_reg_rtx (reg_mode);
938
4e3825db 939 set_rtl (var, x);
1f6d3a08
RH
940
941 /* Note if the object is a user variable. */
4e3825db
MM
942 if (!DECL_ARTIFICIAL (decl))
943 mark_user_reg (x);
1f6d3a08 944
61021c2c 945 if (POINTER_TYPE_P (type))
4e3825db 946 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
947}
948
949/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 950 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
951 to pick something that won't crash the rest of the compiler. */
952
953static void
954expand_one_error_var (tree var)
955{
956 enum machine_mode mode = DECL_MODE (var);
957 rtx x;
958
959 if (mode == BLKmode)
960 x = gen_rtx_MEM (BLKmode, const0_rtx);
961 else if (mode == VOIDmode)
962 x = const0_rtx;
963 else
964 x = gen_reg_rtx (mode);
965
966 SET_DECL_RTL (var, x);
967}
968
c22cacf3 969/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
970 allocated to the local stack frame. Return true if we wish to
971 add VAR to STACK_VARS so that it will be coalesced with other
972 variables. Return false to allocate VAR immediately.
973
974 This function is used to reduce the number of variables considered
975 for coalescing, which reduces the size of the quadratic problem. */
976
977static bool
978defer_stack_allocation (tree var, bool toplevel)
979{
7d69de61
RH
980 /* If stack protection is enabled, *all* stack variables must be deferred,
981 so that we can re-order the strings to the top of the frame. */
982 if (flag_stack_protect)
983 return true;
984
3a42502d
RH
985 /* We handle "large" alignment via dynamic allocation. We want to handle
986 this extra complication in only one place, so defer them. */
987 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
988 return true;
989
1f6d3a08
RH
990 /* Variables in the outermost scope automatically conflict with
991 every other variable. The only reason to want to defer them
992 at all is that, after sorting, we can more efficiently pack
993 small variables in the stack frame. Continue to defer at -O2. */
994 if (toplevel && optimize < 2)
995 return false;
996
997 /* Without optimization, *most* variables are allocated from the
998 stack, which makes the quadratic problem large exactly when we
c22cacf3 999 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1000 other hand, we don't want the function's stack frame size to
1001 get completely out of hand. So we avoid adding scalars and
1002 "small" aggregates to the list at all. */
1003 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1004 return false;
1005
1006 return true;
1007}
1008
1009/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1010 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1011 expanded yet, merely recorded.
ff28a94d
JH
1012 When REALLY_EXPAND is false, only add stack values to be allocated.
1013 Return stack usage this variable is supposed to take.
1014*/
1f6d3a08 1015
ff28a94d
JH
1016static HOST_WIDE_INT
1017expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1018{
3a42502d 1019 unsigned int align = BITS_PER_UNIT;
4e3825db 1020 tree origvar = var;
3a42502d 1021
4e3825db
MM
1022 var = SSAVAR (var);
1023
3a42502d 1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1025 {
2e3f842f
L
1026 /* Because we don't know if VAR will be in register or on stack,
1027 we conservatively assume it will be on stack even if VAR is
1028 eventually put into register after RA pass. For non-automatic
1029 variables, which won't be on stack, we collect alignment of
1030 type and ignore user specified alignment. */
1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1033 TYPE_MODE (TREE_TYPE (var)),
1034 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1035 else if (DECL_HAS_VALUE_EXPR_P (var)
1036 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1037 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1038 or variables which were assigned a stack slot already by
1039 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1040 changed from the offset chosen to it. */
1041 align = crtl->stack_alignment_estimated;
2e3f842f 1042 else
ae58e548 1043 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1044
3a42502d
RH
1045 /* If the variable alignment is very large we'll dynamicaly allocate
1046 it, which means that in-frame portion is just a pointer. */
1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1048 align = POINTER_SIZE;
1049 }
1050
1051 if (SUPPORTS_STACK_ALIGNMENT
1052 && crtl->stack_alignment_estimated < align)
1053 {
1054 /* stack_alignment_estimated shouldn't change after stack
1055 realign decision made */
1056 gcc_assert(!crtl->stack_realign_processed);
1057 crtl->stack_alignment_estimated = align;
2e3f842f
L
1058 }
1059
3a42502d
RH
1060 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1061 So here we only make sure stack_alignment_needed >= align. */
1062 if (crtl->stack_alignment_needed < align)
1063 crtl->stack_alignment_needed = align;
1064 if (crtl->max_used_stack_slot_alignment < align)
1065 crtl->max_used_stack_slot_alignment = align;
1066
4e3825db
MM
1067 if (TREE_CODE (origvar) == SSA_NAME)
1068 {
1069 gcc_assert (TREE_CODE (var) != VAR_DECL
1070 || (!DECL_EXTERNAL (var)
1071 && !DECL_HAS_VALUE_EXPR_P (var)
1072 && !TREE_STATIC (var)
4e3825db
MM
1073 && TREE_TYPE (var) != error_mark_node
1074 && !DECL_HARD_REGISTER (var)
1075 && really_expand));
1076 }
1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1078 ;
1f6d3a08
RH
1079 else if (DECL_EXTERNAL (var))
1080 ;
833b3afe 1081 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1082 ;
1083 else if (TREE_STATIC (var))
7e8b322a 1084 ;
eb7adebc 1085 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1086 ;
1087 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1088 {
1089 if (really_expand)
1090 expand_one_error_var (var);
1091 }
4e3825db 1092 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1093 {
1094 if (really_expand)
1095 expand_one_hard_reg_var (var);
1096 }
1f6d3a08 1097 else if (use_register_for_decl (var))
ff28a94d
JH
1098 {
1099 if (really_expand)
4e3825db 1100 expand_one_register_var (origvar);
ff28a94d 1101 }
7604eb4e
JJ
1102 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1103 {
1104 if (really_expand)
1105 {
1106 error ("size of variable %q+D is too large", var);
1107 expand_one_error_var (var);
1108 }
1109 }
1f6d3a08 1110 else if (defer_stack_allocation (var, toplevel))
4e3825db 1111 add_stack_var (origvar);
1f6d3a08 1112 else
ff28a94d 1113 {
bd9f1b4b 1114 if (really_expand)
4e3825db 1115 expand_one_stack_var (origvar);
ff28a94d
JH
1116 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1117 }
1118 return 0;
1f6d3a08
RH
1119}
1120
1121/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1122 expanding variables. Those variables that can be put into registers
1123 are allocated pseudos; those that can't are put on the stack.
1124
1125 TOPLEVEL is true if this is the outermost BLOCK. */
1126
1127static void
1128expand_used_vars_for_block (tree block, bool toplevel)
1129{
1130 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1131 tree t;
1132
1133 old_sv_num = toplevel ? 0 : stack_vars_num;
1134
1135 /* Expand all variables at this level. */
910ad8de 1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
7e8b322a 1137 if (TREE_USED (t))
ff28a94d 1138 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1139
1140 this_sv_num = stack_vars_num;
1141
1142 /* Expand all variables at containing levels. */
1143 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1144 expand_used_vars_for_block (t, false);
1145
1146 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1147 possible for variables whose address escapes), we mirror the block
1f6d3a08 1148 tree in the interference graph. Here we cause all variables at this
2bdbbe94 1149 level, and all sublevels, to conflict. */
1f6d3a08
RH
1150 if (old_sv_num < this_sv_num)
1151 {
1152 new_sv_num = stack_vars_num;
1f6d3a08
RH
1153
1154 for (i = old_sv_num; i < new_sv_num; ++i)
2bdbbe94 1155 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
f4a6d54e 1156 add_stack_var_conflict (i, j);
1f6d3a08
RH
1157 }
1158}
1159
1160/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1161 and clear TREE_USED on all local variables. */
1162
1163static void
1164clear_tree_used (tree block)
1165{
1166 tree t;
1167
910ad8de 1168 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08
RH
1169 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1170 TREE_USED (t) = 0;
1171
1172 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1173 clear_tree_used (t);
1174}
1175
7d69de61
RH
1176/* Examine TYPE and determine a bit mask of the following features. */
1177
1178#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1179#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1180#define SPCT_HAS_ARRAY 4
1181#define SPCT_HAS_AGGREGATE 8
1182
1183static unsigned int
1184stack_protect_classify_type (tree type)
1185{
1186 unsigned int ret = 0;
1187 tree t;
1188
1189 switch (TREE_CODE (type))
1190 {
1191 case ARRAY_TYPE:
1192 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1193 if (t == char_type_node
1194 || t == signed_char_type_node
1195 || t == unsigned_char_type_node)
1196 {
15362b89
JJ
1197 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1198 unsigned HOST_WIDE_INT len;
7d69de61 1199
15362b89
JJ
1200 if (!TYPE_SIZE_UNIT (type)
1201 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1202 len = max;
7d69de61 1203 else
15362b89 1204 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1205
1206 if (len < max)
1207 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1208 else
1209 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1210 }
1211 else
1212 ret = SPCT_HAS_ARRAY;
1213 break;
1214
1215 case UNION_TYPE:
1216 case QUAL_UNION_TYPE:
1217 case RECORD_TYPE:
1218 ret = SPCT_HAS_AGGREGATE;
1219 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1220 if (TREE_CODE (t) == FIELD_DECL)
1221 ret |= stack_protect_classify_type (TREE_TYPE (t));
1222 break;
1223
1224 default:
1225 break;
1226 }
1227
1228 return ret;
1229}
1230
a4d05547
KH
1231/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1232 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1233 any variable in this function. The return value is the phase number in
1234 which the variable should be allocated. */
1235
1236static int
1237stack_protect_decl_phase (tree decl)
1238{
1239 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1240 int ret = 0;
1241
1242 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1243 has_short_buffer = true;
1244
1245 if (flag_stack_protect == 2)
1246 {
1247 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1248 && !(bits & SPCT_HAS_AGGREGATE))
1249 ret = 1;
1250 else if (bits & SPCT_HAS_ARRAY)
1251 ret = 2;
1252 }
1253 else
1254 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1255
1256 if (ret)
1257 has_protected_decls = true;
1258
1259 return ret;
1260}
1261
1262/* Two helper routines that check for phase 1 and phase 2. These are used
1263 as callbacks for expand_stack_vars. */
1264
1265static bool
1266stack_protect_decl_phase_1 (tree decl)
1267{
1268 return stack_protect_decl_phase (decl) == 1;
1269}
1270
1271static bool
1272stack_protect_decl_phase_2 (tree decl)
1273{
1274 return stack_protect_decl_phase (decl) == 2;
1275}
1276
1277/* Ensure that variables in different stack protection phases conflict
1278 so that they are not merged and share the same stack slot. */
1279
1280static void
1281add_stack_protection_conflicts (void)
1282{
1283 size_t i, j, n = stack_vars_num;
1284 unsigned char *phase;
1285
1286 phase = XNEWVEC (unsigned char, n);
1287 for (i = 0; i < n; ++i)
1288 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1289
1290 for (i = 0; i < n; ++i)
1291 {
1292 unsigned char ph_i = phase[i];
1293 for (j = 0; j < i; ++j)
1294 if (ph_i != phase[j])
1295 add_stack_var_conflict (i, j);
1296 }
1297
1298 XDELETEVEC (phase);
1299}
1300
1301/* Create a decl for the guard at the top of the stack frame. */
1302
1303static void
1304create_stack_guard (void)
1305{
c2255bc4
AH
1306 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1307 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1308 TREE_THIS_VOLATILE (guard) = 1;
1309 TREE_USED (guard) = 1;
1310 expand_one_stack_var (guard);
cb91fab0 1311 crtl->stack_protect_guard = guard;
7d69de61
RH
1312}
1313
ff28a94d
JH
1314/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1315 expanding variables. Those variables that can be put into registers
1316 are allocated pseudos; those that can't are put on the stack.
1317
1318 TOPLEVEL is true if this is the outermost BLOCK. */
1319
1320static HOST_WIDE_INT
1321account_used_vars_for_block (tree block, bool toplevel)
1322{
ff28a94d
JH
1323 tree t;
1324 HOST_WIDE_INT size = 0;
1325
ff28a94d 1326 /* Expand all variables at this level. */
910ad8de 1327 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
ff28a94d
JH
1328 if (TREE_USED (t))
1329 size += expand_one_var (t, toplevel, false);
1330
ff28a94d
JH
1331 /* Expand all variables at containing levels. */
1332 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1333 size += account_used_vars_for_block (t, false);
1334
ff28a94d
JH
1335 return size;
1336}
1337
1338/* Prepare for expanding variables. */
b8698a0f 1339static void
ff28a94d
JH
1340init_vars_expansion (void)
1341{
1342 tree t;
c021f10b 1343 unsigned ix;
cb91fab0 1344 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1345 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1346 TREE_USED (t) = 1;
ff28a94d
JH
1347
1348 /* Clear TREE_USED on all variables associated with a block scope. */
1349 clear_tree_used (DECL_INITIAL (current_function_decl));
1350
1351 /* Initialize local stack smashing state. */
1352 has_protected_decls = false;
1353 has_short_buffer = false;
1354}
1355
1356/* Free up stack variable graph data. */
1357static void
1358fini_vars_expansion (void)
1359{
2bdbbe94
MM
1360 size_t i, n = stack_vars_num;
1361 for (i = 0; i < n; i++)
1362 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1363 XDELETEVEC (stack_vars);
1364 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1365 stack_vars = NULL;
1366 stack_vars_alloc = stack_vars_num = 0;
ff28a94d
JH
1367}
1368
2e1ec94f
RR
1369/* Make a fair guess for the size of the stack frame of the decl
1370 passed. This doesn't have to be exact, the result is only used
b5a430f3
SB
1371 in the inline heuristics. So we don't want to run the full stack
1372 var packing algorithm (which is quadratic in the number of stack
1373 vars). Instead, we calculate the total size of all stack vars.
1374 This turns out to be a pretty fair estimate -- packing of stack
1375 vars doesn't happen very often. */
1376
ff28a94d 1377HOST_WIDE_INT
2e1ec94f 1378estimated_stack_frame_size (tree decl)
ff28a94d
JH
1379{
1380 HOST_WIDE_INT size = 0;
b5a430f3 1381 size_t i;
c021f10b
NF
1382 tree var, outer_block = DECL_INITIAL (current_function_decl);
1383 unsigned ix;
2e1ec94f
RR
1384 tree old_cur_fun_decl = current_function_decl;
1385 current_function_decl = decl;
1386 push_cfun (DECL_STRUCT_FUNCTION (decl));
ff28a94d
JH
1387
1388 init_vars_expansion ();
1389
c021f10b 1390 FOR_EACH_LOCAL_DECL (cfun, ix, var)
ff28a94d 1391 {
ff28a94d
JH
1392 if (TREE_USED (var))
1393 size += expand_one_var (var, true, false);
1394 TREE_USED (var) = 1;
1395 }
1396 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1397
ff28a94d
JH
1398 if (stack_vars_num > 0)
1399 {
b5a430f3
SB
1400 /* Fake sorting the stack vars for account_stack_vars (). */
1401 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1402 for (i = 0; i < stack_vars_num; ++i)
1403 stack_vars_sorted[i] = i;
ff28a94d
JH
1404 size += account_stack_vars ();
1405 fini_vars_expansion ();
1406 }
2e1ec94f
RR
1407 pop_cfun ();
1408 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1409 return size;
1410}
1411
1f6d3a08 1412/* Expand all variables used in the function. */
727a31fa
RH
1413
1414static void
1415expand_used_vars (void)
1416{
c021f10b
NF
1417 tree var, outer_block = DECL_INITIAL (current_function_decl);
1418 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1419 unsigned i;
c021f10b 1420 unsigned len;
727a31fa 1421
1f6d3a08
RH
1422 /* Compute the phase of the stack frame for this function. */
1423 {
1424 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1425 int off = STARTING_FRAME_OFFSET % align;
1426 frame_phase = off ? align - off : 0;
1427 }
727a31fa 1428
ff28a94d 1429 init_vars_expansion ();
7d69de61 1430
4e3825db
MM
1431 for (i = 0; i < SA.map->num_partitions; i++)
1432 {
1433 tree var = partition_to_var (SA.map, i);
1434
1435 gcc_assert (is_gimple_reg (var));
1436 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1437 expand_one_var (var, true, true);
1438 else
1439 {
1440 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1441 contain the default def (representing the parm or result itself)
1442 we don't do anything here. But those which don't contain the
1443 default def (representing a temporary based on the parm/result)
1444 we need to allocate space just like for normal VAR_DECLs. */
1445 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1446 {
1447 expand_one_var (var, true, true);
1448 gcc_assert (SA.partition_to_pseudo[i]);
1449 }
1450 }
1451 }
1452
cb91fab0 1453 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1454 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1455
1456 len = VEC_length (tree, cfun->local_decls);
1457 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1458 {
1f6d3a08
RH
1459 bool expand_now = false;
1460
4e3825db
MM
1461 /* Expanded above already. */
1462 if (is_gimple_reg (var))
eb7adebc
MM
1463 {
1464 TREE_USED (var) = 0;
3adcf52c 1465 goto next;
eb7adebc 1466 }
1f6d3a08
RH
1467 /* We didn't set a block for static or extern because it's hard
1468 to tell the difference between a global variable (re)declared
1469 in a local scope, and one that's really declared there to
1470 begin with. And it doesn't really matter much, since we're
1471 not giving them stack space. Expand them now. */
4e3825db 1472 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1473 expand_now = true;
1474
1475 /* If the variable is not associated with any block, then it
1476 was created by the optimizers, and could be live anywhere
1477 in the function. */
1478 else if (TREE_USED (var))
1479 expand_now = true;
1480
1481 /* Finally, mark all variables on the list as used. We'll use
1482 this in a moment when we expand those associated with scopes. */
1483 TREE_USED (var) = 1;
1484
1485 if (expand_now)
3adcf52c
JM
1486 expand_one_var (var, true, true);
1487
1488 next:
1489 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1490 {
3adcf52c
JM
1491 rtx rtl = DECL_RTL_IF_SET (var);
1492
1493 /* Keep artificial non-ignored vars in cfun->local_decls
1494 chain until instantiate_decls. */
1495 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1496 add_local_decl (cfun, var);
6c6366f6 1497 else if (rtl == NULL_RTX)
c021f10b
NF
1498 /* If rtl isn't set yet, which can happen e.g. with
1499 -fstack-protector, retry before returning from this
1500 function. */
1501 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1502 }
1f6d3a08 1503 }
1f6d3a08 1504
c021f10b
NF
1505 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1506
1507 +-----------------+-----------------+
1508 | ...processed... | ...duplicates...|
1509 +-----------------+-----------------+
1510 ^
1511 +-- LEN points here.
1512
1513 We just want the duplicates, as those are the artificial
1514 non-ignored vars that we want to keep until instantiate_decls.
1515 Move them down and truncate the array. */
1516 if (!VEC_empty (tree, cfun->local_decls))
1517 VEC_block_remove (tree, cfun->local_decls, 0, len);
1518
1f6d3a08
RH
1519 /* At this point, all variables within the block tree with TREE_USED
1520 set are actually used by the optimized function. Lay them out. */
1521 expand_used_vars_for_block (outer_block, true);
1522
1523 if (stack_vars_num > 0)
1524 {
1525 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1526 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1527 reflect this. */
1528 add_alias_set_conflicts ();
1529
c22cacf3 1530 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1531 vulnerable data and non-vulnerable data. */
1532 if (flag_stack_protect)
1533 add_stack_protection_conflicts ();
1534
c22cacf3 1535 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1536 minimal interference graph, attempt to save some stack space. */
1537 partition_stack_vars ();
1538 if (dump_file)
1539 dump_stack_var_partition ();
7d69de61
RH
1540 }
1541
1542 /* There are several conditions under which we should create a
1543 stack guard: protect-all, alloca used, protected decls present. */
1544 if (flag_stack_protect == 2
1545 || (flag_stack_protect
e3b5732b 1546 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1547 create_stack_guard ();
1f6d3a08 1548
7d69de61
RH
1549 /* Assign rtl to each variable based on these partitions. */
1550 if (stack_vars_num > 0)
1551 {
1552 /* Reorder decls to be protected by iterating over the variables
1553 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1554 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1555 earlier, such that we naturally see these variables first,
1556 and thus naturally allocate things in the right order. */
1557 if (has_protected_decls)
1558 {
1559 /* Phase 1 contains only character arrays. */
1560 expand_stack_vars (stack_protect_decl_phase_1);
1561
1562 /* Phase 2 contains other kinds of arrays. */
1563 if (flag_stack_protect == 2)
1564 expand_stack_vars (stack_protect_decl_phase_2);
1565 }
1566
1567 expand_stack_vars (NULL);
1f6d3a08 1568
ff28a94d 1569 fini_vars_expansion ();
1f6d3a08
RH
1570 }
1571
6c6366f6
JJ
1572 /* If there were any artificial non-ignored vars without rtl
1573 found earlier, see if deferred stack allocation hasn't assigned
1574 rtl to them. */
c021f10b 1575 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1576 {
6c6366f6
JJ
1577 rtx rtl = DECL_RTL_IF_SET (var);
1578
6c6366f6
JJ
1579 /* Keep artificial non-ignored vars in cfun->local_decls
1580 chain until instantiate_decls. */
1581 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1582 add_local_decl (cfun, var);
6c6366f6 1583 }
c021f10b 1584 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1585
1f6d3a08
RH
1586 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1587 if (STACK_ALIGNMENT_NEEDED)
1588 {
1589 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1590 if (!FRAME_GROWS_DOWNWARD)
1591 frame_offset += align - 1;
1592 frame_offset &= -align;
1593 }
727a31fa
RH
1594}
1595
1596
b7211528
SB
1597/* If we need to produce a detailed dump, print the tree representation
1598 for STMT to the dump file. SINCE is the last RTX after which the RTL
1599 generated for STMT should have been appended. */
1600
1601static void
726a989a 1602maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1603{
1604 if (dump_file && (dump_flags & TDF_DETAILS))
1605 {
1606 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1607 print_gimple_stmt (dump_file, stmt, 0,
1608 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1609 fprintf (dump_file, "\n");
1610
1611 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1612 }
1613}
1614
8b11009b
ZD
1615/* Maps the blocks that do not contain tree labels to rtx labels. */
1616
1617static struct pointer_map_t *lab_rtx_for_bb;
1618
a9b77cd1
ZD
1619/* Returns the label_rtx expression for a label starting basic block BB. */
1620
1621static rtx
726a989a 1622label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1623{
726a989a
RB
1624 gimple_stmt_iterator gsi;
1625 tree lab;
1626 gimple lab_stmt;
8b11009b 1627 void **elt;
a9b77cd1
ZD
1628
1629 if (bb->flags & BB_RTL)
1630 return block_label (bb);
1631
8b11009b
ZD
1632 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1633 if (elt)
ae50c0cb 1634 return (rtx) *elt;
8b11009b
ZD
1635
1636 /* Find the tree label if it is present. */
b8698a0f 1637
726a989a 1638 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1639 {
726a989a
RB
1640 lab_stmt = gsi_stmt (gsi);
1641 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1642 break;
1643
726a989a 1644 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1645 if (DECL_NONLOCAL (lab))
1646 break;
1647
1648 return label_rtx (lab);
1649 }
1650
8b11009b
ZD
1651 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1652 *elt = gen_label_rtx ();
ae50c0cb 1653 return (rtx) *elt;
a9b77cd1
ZD
1654}
1655
726a989a 1656
529ff441
MM
1657/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1658 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1659 possibly clean up the CFG and instruction sequence. LAST is the
1660 last instruction before the just emitted jump sequence. */
529ff441
MM
1661
1662static void
315adeda 1663maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1664{
1665 /* Special case: when jumpif decides that the condition is
1666 trivial it emits an unconditional jump (and the necessary
1667 barrier). But we still have two edges, the fallthru one is
1668 wrong. purge_dead_edges would clean this up later. Unfortunately
1669 we have to insert insns (and split edges) before
1670 find_many_sub_basic_blocks and hence before purge_dead_edges.
1671 But splitting edges might create new blocks which depend on the
1672 fact that if there are two edges there's no barrier. So the
1673 barrier would get lost and verify_flow_info would ICE. Instead
1674 of auditing all edge splitters to care for the barrier (which
1675 normally isn't there in a cleaned CFG), fix it here. */
1676 if (BARRIER_P (get_last_insn ()))
1677 {
529ff441
MM
1678 rtx insn;
1679 remove_edge (e);
1680 /* Now, we have a single successor block, if we have insns to
1681 insert on the remaining edge we potentially will insert
1682 it at the end of this block (if the dest block isn't feasible)
1683 in order to avoid splitting the edge. This insertion will take
1684 place in front of the last jump. But we might have emitted
1685 multiple jumps (conditional and one unconditional) to the
1686 same destination. Inserting in front of the last one then
1687 is a problem. See PR 40021. We fix this by deleting all
1688 jumps except the last unconditional one. */
1689 insn = PREV_INSN (get_last_insn ());
1690 /* Make sure we have an unconditional jump. Otherwise we're
1691 confused. */
1692 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1693 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1694 {
1695 insn = PREV_INSN (insn);
1696 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33
JJ
1697 {
1698 if (!any_condjump_p (insn))
1699 {
1700 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1701 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1702 }
1703 delete_insn (NEXT_INSN (insn));
1704 }
529ff441
MM
1705 }
1706 }
1707}
1708
726a989a 1709/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1710 Returns a new basic block if we've terminated the current basic
1711 block and created a new one. */
1712
1713static basic_block
726a989a 1714expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1715{
1716 basic_block new_bb, dest;
1717 edge new_edge;
1718 edge true_edge;
1719 edge false_edge;
b7211528 1720 rtx last2, last;
28ed065e
MM
1721 enum tree_code code;
1722 tree op0, op1;
1723
1724 code = gimple_cond_code (stmt);
1725 op0 = gimple_cond_lhs (stmt);
1726 op1 = gimple_cond_rhs (stmt);
1727 /* We're sometimes presented with such code:
1728 D.123_1 = x < y;
1729 if (D.123_1 != 0)
1730 ...
1731 This would expand to two comparisons which then later might
1732 be cleaned up by combine. But some pattern matchers like if-conversion
1733 work better when there's only one compare, so make up for this
1734 here as special exception if TER would have made the same change. */
1735 if (gimple_cond_single_var_p (stmt)
1736 && SA.values
1737 && TREE_CODE (op0) == SSA_NAME
1738 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1739 {
1740 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1741 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1742 {
e83f4b68
MM
1743 enum tree_code code2 = gimple_assign_rhs_code (second);
1744 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1745 {
1746 code = code2;
1747 op0 = gimple_assign_rhs1 (second);
1748 op1 = gimple_assign_rhs2 (second);
1749 }
1750 /* If jumps are cheap turn some more codes into
1751 jumpy sequences. */
1752 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1753 {
1754 if ((code2 == BIT_AND_EXPR
1755 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1756 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1757 || code2 == TRUTH_AND_EXPR)
1758 {
1759 code = TRUTH_ANDIF_EXPR;
1760 op0 = gimple_assign_rhs1 (second);
1761 op1 = gimple_assign_rhs2 (second);
1762 }
1763 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1764 {
1765 code = TRUTH_ORIF_EXPR;
1766 op0 = gimple_assign_rhs1 (second);
1767 op1 = gimple_assign_rhs2 (second);
1768 }
1769 }
28ed065e
MM
1770 }
1771 }
b7211528
SB
1772
1773 last2 = last = get_last_insn ();
80c7a9eb
RH
1774
1775 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1776 if (gimple_has_location (stmt))
80c7a9eb 1777 {
726a989a
RB
1778 set_curr_insn_source_location (gimple_location (stmt));
1779 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1780 }
1781
1782 /* These flags have no purpose in RTL land. */
1783 true_edge->flags &= ~EDGE_TRUE_VALUE;
1784 false_edge->flags &= ~EDGE_FALSE_VALUE;
1785
1786 /* We can either have a pure conditional jump with one fallthru edge or
1787 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1788 if (false_edge->dest == bb->next_bb)
80c7a9eb 1789 {
40e90eac
JJ
1790 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1791 true_edge->probability);
726a989a 1792 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1793 if (true_edge->goto_locus)
7241571e
JJ
1794 {
1795 set_curr_insn_source_location (true_edge->goto_locus);
1796 set_curr_insn_block (true_edge->goto_block);
1797 true_edge->goto_locus = curr_insn_locator ();
1798 }
1799 true_edge->goto_block = NULL;
a9b77cd1 1800 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1801 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1802 return NULL;
1803 }
a9b77cd1 1804 if (true_edge->dest == bb->next_bb)
80c7a9eb 1805 {
40e90eac
JJ
1806 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1807 false_edge->probability);
726a989a 1808 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1809 if (false_edge->goto_locus)
7241571e
JJ
1810 {
1811 set_curr_insn_source_location (false_edge->goto_locus);
1812 set_curr_insn_block (false_edge->goto_block);
1813 false_edge->goto_locus = curr_insn_locator ();
1814 }
1815 false_edge->goto_block = NULL;
a9b77cd1 1816 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1817 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1818 return NULL;
1819 }
80c7a9eb 1820
40e90eac
JJ
1821 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1822 true_edge->probability);
80c7a9eb 1823 last = get_last_insn ();
7241571e
JJ
1824 if (false_edge->goto_locus)
1825 {
1826 set_curr_insn_source_location (false_edge->goto_locus);
1827 set_curr_insn_block (false_edge->goto_block);
1828 false_edge->goto_locus = curr_insn_locator ();
1829 }
1830 false_edge->goto_block = NULL;
a9b77cd1 1831 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1832
1833 BB_END (bb) = last;
1834 if (BARRIER_P (BB_END (bb)))
1835 BB_END (bb) = PREV_INSN (BB_END (bb));
1836 update_bb_for_insn (bb);
1837
1838 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1839 dest = false_edge->dest;
1840 redirect_edge_succ (false_edge, new_bb);
1841 false_edge->flags |= EDGE_FALLTHRU;
1842 new_bb->count = false_edge->count;
1843 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1844 new_edge = make_edge (new_bb, dest, 0);
1845 new_edge->probability = REG_BR_PROB_BASE;
1846 new_edge->count = new_bb->count;
1847 if (BARRIER_P (BB_END (new_bb)))
1848 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1849 update_bb_for_insn (new_bb);
1850
726a989a 1851 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1852
7787b4aa
JJ
1853 if (true_edge->goto_locus)
1854 {
1855 set_curr_insn_source_location (true_edge->goto_locus);
1856 set_curr_insn_block (true_edge->goto_block);
1857 true_edge->goto_locus = curr_insn_locator ();
1858 }
1859 true_edge->goto_block = NULL;
1860
80c7a9eb
RH
1861 return new_bb;
1862}
1863
28ed065e
MM
1864/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1865 statement STMT. */
1866
1867static void
1868expand_call_stmt (gimple stmt)
1869{
1870 tree exp;
1871 tree lhs = gimple_call_lhs (stmt);
28ed065e 1872 size_t i;
e23817b3
RG
1873 bool builtin_p;
1874 tree decl;
28ed065e
MM
1875
1876 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1877
1878 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
1879 decl = gimple_call_fndecl (stmt);
1880 builtin_p = decl && DECL_BUILT_IN (decl);
1881
28ed065e
MM
1882 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1883 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1884
1885 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
1886 {
1887 tree arg = gimple_call_arg (stmt, i);
1888 gimple def;
1889 /* TER addresses into arguments of builtin functions so we have a
1890 chance to infer more correct alignment information. See PR39954. */
1891 if (builtin_p
1892 && TREE_CODE (arg) == SSA_NAME
1893 && (def = get_gimple_for_ssa_name (arg))
1894 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1895 arg = gimple_assign_rhs1 (def);
1896 CALL_EXPR_ARG (exp, i) = arg;
1897 }
28ed065e 1898
93f28ca7 1899 if (gimple_has_side_effects (stmt))
28ed065e
MM
1900 TREE_SIDE_EFFECTS (exp) = 1;
1901
93f28ca7 1902 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
1903 TREE_NOTHROW (exp) = 1;
1904
1905 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1906 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1907 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1908 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1909 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1910 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1911 TREE_BLOCK (exp) = gimple_block (stmt);
1912
28ed065e
MM
1913 if (lhs)
1914 expand_assignment (lhs, exp, false);
1915 else
1916 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1917}
1918
1919/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1920 STMT that doesn't require special handling for outgoing edges. That
1921 is no tailcalls and no GIMPLE_COND. */
1922
1923static void
1924expand_gimple_stmt_1 (gimple stmt)
1925{
1926 tree op0;
1927 switch (gimple_code (stmt))
1928 {
1929 case GIMPLE_GOTO:
1930 op0 = gimple_goto_dest (stmt);
1931 if (TREE_CODE (op0) == LABEL_DECL)
1932 expand_goto (op0);
1933 else
1934 expand_computed_goto (op0);
1935 break;
1936 case GIMPLE_LABEL:
1937 expand_label (gimple_label_label (stmt));
1938 break;
1939 case GIMPLE_NOP:
1940 case GIMPLE_PREDICT:
1941 break;
28ed065e
MM
1942 case GIMPLE_SWITCH:
1943 expand_case (stmt);
1944 break;
1945 case GIMPLE_ASM:
1946 expand_asm_stmt (stmt);
1947 break;
1948 case GIMPLE_CALL:
1949 expand_call_stmt (stmt);
1950 break;
1951
1952 case GIMPLE_RETURN:
1953 op0 = gimple_return_retval (stmt);
1954
1955 if (op0 && op0 != error_mark_node)
1956 {
1957 tree result = DECL_RESULT (current_function_decl);
1958
1959 /* If we are not returning the current function's RESULT_DECL,
1960 build an assignment to it. */
1961 if (op0 != result)
1962 {
1963 /* I believe that a function's RESULT_DECL is unique. */
1964 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1965
1966 /* ??? We'd like to use simply expand_assignment here,
1967 but this fails if the value is of BLKmode but the return
1968 decl is a register. expand_return has special handling
1969 for this combination, which eventually should move
1970 to common code. See comments there. Until then, let's
1971 build a modify expression :-/ */
1972 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1973 result, op0);
1974 }
1975 }
1976 if (!op0)
1977 expand_null_return ();
1978 else
1979 expand_return (op0);
1980 break;
1981
1982 case GIMPLE_ASSIGN:
1983 {
1984 tree lhs = gimple_assign_lhs (stmt);
1985
1986 /* Tree expand used to fiddle with |= and &= of two bitfield
1987 COMPONENT_REFs here. This can't happen with gimple, the LHS
1988 of binary assigns must be a gimple reg. */
1989
1990 if (TREE_CODE (lhs) != SSA_NAME
1991 || get_gimple_rhs_class (gimple_expr_code (stmt))
1992 == GIMPLE_SINGLE_RHS)
1993 {
1994 tree rhs = gimple_assign_rhs1 (stmt);
1995 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1996 == GIMPLE_SINGLE_RHS);
1997 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1998 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1999 expand_assignment (lhs, rhs,
2000 gimple_assign_nontemporal_move_p (stmt));
2001 }
2002 else
2003 {
2004 rtx target, temp;
2005 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2006 struct separate_ops ops;
2007 bool promoted = false;
2008
2009 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2010 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2011 promoted = true;
2012
2013 ops.code = gimple_assign_rhs_code (stmt);
2014 ops.type = TREE_TYPE (lhs);
2015 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2016 {
0354c0c7
BS
2017 case GIMPLE_TERNARY_RHS:
2018 ops.op2 = gimple_assign_rhs3 (stmt);
2019 /* Fallthru */
28ed065e
MM
2020 case GIMPLE_BINARY_RHS:
2021 ops.op1 = gimple_assign_rhs2 (stmt);
2022 /* Fallthru */
2023 case GIMPLE_UNARY_RHS:
2024 ops.op0 = gimple_assign_rhs1 (stmt);
2025 break;
2026 default:
2027 gcc_unreachable ();
2028 }
2029 ops.location = gimple_location (stmt);
2030
2031 /* If we want to use a nontemporal store, force the value to
2032 register first. If we store into a promoted register,
2033 don't directly expand to target. */
2034 temp = nontemporal || promoted ? NULL_RTX : target;
2035 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2036 EXPAND_NORMAL);
2037
2038 if (temp == target)
2039 ;
2040 else if (promoted)
2041 {
4e18a7d4 2042 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2043 /* If TEMP is a VOIDmode constant, use convert_modes to make
2044 sure that we properly convert it. */
2045 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2046 {
2047 temp = convert_modes (GET_MODE (target),
2048 TYPE_MODE (ops.type),
4e18a7d4 2049 temp, unsignedp);
28ed065e 2050 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2051 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2052 }
2053
4e18a7d4 2054 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2055 }
2056 else if (nontemporal && emit_storent_insn (target, temp))
2057 ;
2058 else
2059 {
2060 temp = force_operand (temp, target);
2061 if (temp != target)
2062 emit_move_insn (target, temp);
2063 }
2064 }
2065 }
2066 break;
2067
2068 default:
2069 gcc_unreachable ();
2070 }
2071}
2072
2073/* Expand one gimple statement STMT and return the last RTL instruction
2074 before any of the newly generated ones.
2075
2076 In addition to generating the necessary RTL instructions this also
2077 sets REG_EH_REGION notes if necessary and sets the current source
2078 location for diagnostics. */
2079
2080static rtx
2081expand_gimple_stmt (gimple stmt)
2082{
1d65f45c 2083 int lp_nr = 0;
28ed065e
MM
2084 rtx last = NULL;
2085 location_t saved_location = input_location;
2086
2087 last = get_last_insn ();
2088
2089 /* If this is an expression of some kind and it has an associated line
2090 number, then emit the line number before expanding the expression.
2091
2092 We need to save and restore the file and line information so that
2093 errors discovered during expansion are emitted with the right
2094 information. It would be better of the diagnostic routines
2095 used the file/line information embedded in the tree nodes rather
2096 than globals. */
2097 gcc_assert (cfun);
2098
2099 if (gimple_has_location (stmt))
2100 {
2101 input_location = gimple_location (stmt);
2102 set_curr_insn_source_location (input_location);
2103
2104 /* Record where the insns produced belong. */
2105 set_curr_insn_block (gimple_block (stmt));
2106 }
2107
2108 expand_gimple_stmt_1 (stmt);
2109 /* Free any temporaries used to evaluate this statement. */
2110 free_temp_slots ();
2111
2112 input_location = saved_location;
2113
2114 /* Mark all insns that may trap. */
1d65f45c
RH
2115 lp_nr = lookup_stmt_eh_lp (stmt);
2116 if (lp_nr)
28ed065e
MM
2117 {
2118 rtx insn;
2119 for (insn = next_real_insn (last); insn;
2120 insn = next_real_insn (insn))
2121 {
2122 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2123 /* If we want exceptions for non-call insns, any
2124 may_trap_p instruction may throw. */
2125 && GET_CODE (PATTERN (insn)) != CLOBBER
2126 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2127 && insn_could_throw_p (insn))
2128 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2129 }
2130 }
2131
2132 return last;
2133}
2134
726a989a 2135/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2136 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2137 generated a tail call (something that might be denied by the ABI
cea49550
RH
2138 rules governing the call; see calls.c).
2139
2140 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2141 can still reach the rest of BB. The case here is __builtin_sqrt,
2142 where the NaN result goes through the external function (with a
2143 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2144
2145static basic_block
726a989a 2146expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2147{
b7211528 2148 rtx last2, last;
224e770b 2149 edge e;
628f6a4e 2150 edge_iterator ei;
224e770b
RH
2151 int probability;
2152 gcov_type count;
80c7a9eb 2153
28ed065e 2154 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2155
2156 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2157 if (CALL_P (last) && SIBLING_CALL_P (last))
2158 goto found;
80c7a9eb 2159
726a989a 2160 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2161
cea49550 2162 *can_fallthru = true;
224e770b 2163 return NULL;
80c7a9eb 2164
224e770b
RH
2165 found:
2166 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2167 Any instructions emitted here are about to be deleted. */
2168 do_pending_stack_adjust ();
2169
2170 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2171 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2172 EH or abnormal edges, we shouldn't have created a tail call in
2173 the first place. So it seems to me we should just be removing
2174 all edges here, or redirecting the existing fallthru edge to
2175 the exit block. */
2176
224e770b
RH
2177 probability = 0;
2178 count = 0;
224e770b 2179
628f6a4e
BE
2180 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2181 {
224e770b
RH
2182 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2183 {
2184 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2185 {
224e770b
RH
2186 e->dest->count -= e->count;
2187 e->dest->frequency -= EDGE_FREQUENCY (e);
2188 if (e->dest->count < 0)
c22cacf3 2189 e->dest->count = 0;
224e770b 2190 if (e->dest->frequency < 0)
c22cacf3 2191 e->dest->frequency = 0;
80c7a9eb 2192 }
224e770b
RH
2193 count += e->count;
2194 probability += e->probability;
2195 remove_edge (e);
80c7a9eb 2196 }
628f6a4e
BE
2197 else
2198 ei_next (&ei);
80c7a9eb
RH
2199 }
2200
224e770b
RH
2201 /* This is somewhat ugly: the call_expr expander often emits instructions
2202 after the sibcall (to perform the function return). These confuse the
12eff7b7 2203 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2204 last = NEXT_INSN (last);
341c100f 2205 gcc_assert (BARRIER_P (last));
cea49550
RH
2206
2207 *can_fallthru = false;
224e770b
RH
2208 while (NEXT_INSN (last))
2209 {
2210 /* For instance an sqrt builtin expander expands if with
2211 sibcall in the then and label for `else`. */
2212 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2213 {
2214 *can_fallthru = true;
2215 break;
2216 }
224e770b
RH
2217 delete_insn (NEXT_INSN (last));
2218 }
2219
2220 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2221 e->probability += probability;
2222 e->count += count;
2223 BB_END (bb) = last;
2224 update_bb_for_insn (bb);
2225
2226 if (NEXT_INSN (last))
2227 {
2228 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2229
2230 last = BB_END (bb);
2231 if (BARRIER_P (last))
2232 BB_END (bb) = PREV_INSN (last);
2233 }
2234
726a989a 2235 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2236
224e770b 2237 return bb;
80c7a9eb
RH
2238}
2239
b5b8b0ac
AO
2240/* Return the difference between the floor and the truncated result of
2241 a signed division by OP1 with remainder MOD. */
2242static rtx
2243floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2244{
2245 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2246 return gen_rtx_IF_THEN_ELSE
2247 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2248 gen_rtx_IF_THEN_ELSE
2249 (mode, gen_rtx_LT (BImode,
2250 gen_rtx_DIV (mode, op1, mod),
2251 const0_rtx),
2252 constm1_rtx, const0_rtx),
2253 const0_rtx);
2254}
2255
2256/* Return the difference between the ceil and the truncated result of
2257 a signed division by OP1 with remainder MOD. */
2258static rtx
2259ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2260{
2261 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2262 return gen_rtx_IF_THEN_ELSE
2263 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2264 gen_rtx_IF_THEN_ELSE
2265 (mode, gen_rtx_GT (BImode,
2266 gen_rtx_DIV (mode, op1, mod),
2267 const0_rtx),
2268 const1_rtx, const0_rtx),
2269 const0_rtx);
2270}
2271
2272/* Return the difference between the ceil and the truncated result of
2273 an unsigned division by OP1 with remainder MOD. */
2274static rtx
2275ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2276{
2277 /* (mod != 0 ? 1 : 0) */
2278 return gen_rtx_IF_THEN_ELSE
2279 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2280 const1_rtx, const0_rtx);
2281}
2282
2283/* Return the difference between the rounded and the truncated result
2284 of a signed division by OP1 with remainder MOD. Halfway cases are
2285 rounded away from zero, rather than to the nearest even number. */
2286static rtx
2287round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2288{
2289 /* (abs (mod) >= abs (op1) - abs (mod)
2290 ? (op1 / mod > 0 ? 1 : -1)
2291 : 0) */
2292 return gen_rtx_IF_THEN_ELSE
2293 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2294 gen_rtx_MINUS (mode,
2295 gen_rtx_ABS (mode, op1),
2296 gen_rtx_ABS (mode, mod))),
2297 gen_rtx_IF_THEN_ELSE
2298 (mode, gen_rtx_GT (BImode,
2299 gen_rtx_DIV (mode, op1, mod),
2300 const0_rtx),
2301 const1_rtx, constm1_rtx),
2302 const0_rtx);
2303}
2304
2305/* Return the difference between the rounded and the truncated result
2306 of a unsigned division by OP1 with remainder MOD. Halfway cases
2307 are rounded away from zero, rather than to the nearest even
2308 number. */
2309static rtx
2310round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2311{
2312 /* (mod >= op1 - mod ? 1 : 0) */
2313 return gen_rtx_IF_THEN_ELSE
2314 (mode, gen_rtx_GE (BImode, mod,
2315 gen_rtx_MINUS (mode, op1, mod)),
2316 const1_rtx, const0_rtx);
2317}
2318
dda2da58
AO
2319/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2320 any rtl. */
2321
2322static rtx
2323convert_debug_memory_address (enum machine_mode mode, rtx x)
2324{
2325 enum machine_mode xmode = GET_MODE (x);
2326
2327#ifndef POINTERS_EXTEND_UNSIGNED
2328 gcc_assert (mode == Pmode);
2329 gcc_assert (xmode == mode || xmode == VOIDmode);
2330#else
2331 gcc_assert (mode == Pmode || mode == ptr_mode);
2332
2333 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2334 return x;
2335
2336 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2337 x = simplify_gen_subreg (mode, x, xmode,
2338 subreg_lowpart_offset
2339 (mode, xmode));
2340 else if (POINTERS_EXTEND_UNSIGNED > 0)
2341 x = gen_rtx_ZERO_EXTEND (mode, x);
2342 else if (!POINTERS_EXTEND_UNSIGNED)
2343 x = gen_rtx_SIGN_EXTEND (mode, x);
2344 else
2345 gcc_unreachable ();
2346#endif /* POINTERS_EXTEND_UNSIGNED */
2347
2348 return x;
2349}
2350
b5b8b0ac
AO
2351/* Return an RTX equivalent to the value of the tree expression
2352 EXP. */
2353
2354static rtx
2355expand_debug_expr (tree exp)
2356{
2357 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2358 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2359 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2360 addr_space_t as;
b5b8b0ac
AO
2361
2362 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2363 {
2364 case tcc_expression:
2365 switch (TREE_CODE (exp))
2366 {
2367 case COND_EXPR:
7ece48b1 2368 case DOT_PROD_EXPR:
0354c0c7
BS
2369 case WIDEN_MULT_PLUS_EXPR:
2370 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2371 case FMA_EXPR:
b5b8b0ac
AO
2372 goto ternary;
2373
2374 case TRUTH_ANDIF_EXPR:
2375 case TRUTH_ORIF_EXPR:
2376 case TRUTH_AND_EXPR:
2377 case TRUTH_OR_EXPR:
2378 case TRUTH_XOR_EXPR:
2379 goto binary;
2380
2381 case TRUTH_NOT_EXPR:
2382 goto unary;
2383
2384 default:
2385 break;
2386 }
2387 break;
2388
2389 ternary:
2390 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2391 if (!op2)
2392 return NULL_RTX;
2393 /* Fall through. */
2394
2395 binary:
2396 case tcc_binary:
2397 case tcc_comparison:
2398 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2399 if (!op1)
2400 return NULL_RTX;
2401 /* Fall through. */
2402
2403 unary:
2404 case tcc_unary:
2405 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2406 if (!op0)
2407 return NULL_RTX;
2408 break;
2409
2410 case tcc_type:
2411 case tcc_statement:
2412 gcc_unreachable ();
2413
2414 case tcc_constant:
2415 case tcc_exceptional:
2416 case tcc_declaration:
2417 case tcc_reference:
2418 case tcc_vl_exp:
2419 break;
2420 }
2421
2422 switch (TREE_CODE (exp))
2423 {
2424 case STRING_CST:
2425 if (!lookup_constant_def (exp))
2426 {
e1b243a8
JJ
2427 if (strlen (TREE_STRING_POINTER (exp)) + 1
2428 != (size_t) TREE_STRING_LENGTH (exp))
2429 return NULL_RTX;
b5b8b0ac
AO
2430 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2431 op0 = gen_rtx_MEM (BLKmode, op0);
2432 set_mem_attributes (op0, exp, 0);
2433 return op0;
2434 }
2435 /* Fall through... */
2436
2437 case INTEGER_CST:
2438 case REAL_CST:
2439 case FIXED_CST:
2440 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2441 return op0;
2442
2443 case COMPLEX_CST:
2444 gcc_assert (COMPLEX_MODE_P (mode));
2445 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2446 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2447 return gen_rtx_CONCAT (mode, op0, op1);
2448
0ca5af51
AO
2449 case DEBUG_EXPR_DECL:
2450 op0 = DECL_RTL_IF_SET (exp);
2451
2452 if (op0)
2453 return op0;
2454
2455 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2456 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2457 SET_DECL_RTL (exp, op0);
2458
2459 return op0;
2460
b5b8b0ac
AO
2461 case VAR_DECL:
2462 case PARM_DECL:
2463 case FUNCTION_DECL:
2464 case LABEL_DECL:
2465 case CONST_DECL:
2466 case RESULT_DECL:
2467 op0 = DECL_RTL_IF_SET (exp);
2468
2469 /* This decl was probably optimized away. */
2470 if (!op0)
e1b243a8
JJ
2471 {
2472 if (TREE_CODE (exp) != VAR_DECL
2473 || DECL_EXTERNAL (exp)
2474 || !TREE_STATIC (exp)
2475 || !DECL_NAME (exp)
0fba566c
JJ
2476 || DECL_HARD_REGISTER (exp)
2477 || mode == VOIDmode)
e1b243a8
JJ
2478 return NULL;
2479
b1aa0655 2480 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2481 if (!MEM_P (op0)
2482 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2483 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2484 return NULL;
2485 }
2486 else
2487 op0 = copy_rtx (op0);
b5b8b0ac 2488
06796564
JJ
2489 if (GET_MODE (op0) == BLKmode
2490 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2491 below would ICE. While it is likely a FE bug,
2492 try to be robust here. See PR43166. */
132b4e82
JJ
2493 || mode == BLKmode
2494 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2495 {
2496 gcc_assert (MEM_P (op0));
2497 op0 = adjust_address_nv (op0, mode, 0);
2498 return op0;
2499 }
2500
2501 /* Fall through. */
2502
2503 adjust_mode:
2504 case PAREN_EXPR:
2505 case NOP_EXPR:
2506 case CONVERT_EXPR:
2507 {
2508 enum machine_mode inner_mode = GET_MODE (op0);
2509
2510 if (mode == inner_mode)
2511 return op0;
2512
2513 if (inner_mode == VOIDmode)
2514 {
2a8e30fb
MM
2515 if (TREE_CODE (exp) == SSA_NAME)
2516 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2517 else
2518 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2519 if (mode == inner_mode)
2520 return op0;
2521 }
2522
2523 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2524 {
2525 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2526 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2527 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2528 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2529 else
2530 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2531 }
2532 else if (FLOAT_MODE_P (mode))
2533 {
2a8e30fb 2534 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2535 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2536 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2537 else
2538 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2539 }
2540 else if (FLOAT_MODE_P (inner_mode))
2541 {
2542 if (unsignedp)
2543 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2544 else
2545 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2546 }
2547 else if (CONSTANT_P (op0)
2548 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2549 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2550 subreg_lowpart_offset (mode,
2551 inner_mode));
1b47fe3f
JJ
2552 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2553 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2554 : unsignedp)
b5b8b0ac
AO
2555 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2556 else
2557 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2558
2559 return op0;
2560 }
2561
70f34814 2562 case MEM_REF:
b5b8b0ac 2563 case INDIRECT_REF:
b5b8b0ac
AO
2564 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2565 if (!op0)
2566 return NULL;
2567
cb115041
JJ
2568 if (TREE_CODE (exp) == MEM_REF)
2569 {
2570 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2571 if (!op1 || !CONST_INT_P (op1))
2572 return NULL;
2573
2574 op0 = plus_constant (op0, INTVAL (op1));
2575 }
2576
09e881c9 2577 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2578 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2579 else
75421dcd 2580 as = ADDR_SPACE_GENERIC;
b5b8b0ac
AO
2581
2582 op0 = gen_rtx_MEM (mode, op0);
2583
2584 set_mem_attributes (op0, exp, 0);
09e881c9 2585 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2586
2587 return op0;
2588
2589 case TARGET_MEM_REF:
4d948885
RG
2590 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2591 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2592 return NULL;
2593
2594 op0 = expand_debug_expr
4e25ca6b 2595 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2596 if (!op0)
2597 return NULL;
2598
09e881c9 2599 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
b5b8b0ac
AO
2600
2601 op0 = gen_rtx_MEM (mode, op0);
2602
2603 set_mem_attributes (op0, exp, 0);
09e881c9 2604 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2605
2606 return op0;
2607
2608 case ARRAY_REF:
2609 case ARRAY_RANGE_REF:
2610 case COMPONENT_REF:
2611 case BIT_FIELD_REF:
2612 case REALPART_EXPR:
2613 case IMAGPART_EXPR:
2614 case VIEW_CONVERT_EXPR:
2615 {
2616 enum machine_mode mode1;
2617 HOST_WIDE_INT bitsize, bitpos;
2618 tree offset;
2619 int volatilep = 0;
2620 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2621 &mode1, &unsignedp, &volatilep, false);
2622 rtx orig_op0;
2623
4f2a9af8
JJ
2624 if (bitsize == 0)
2625 return NULL;
2626
b5b8b0ac
AO
2627 orig_op0 = op0 = expand_debug_expr (tem);
2628
2629 if (!op0)
2630 return NULL;
2631
2632 if (offset)
2633 {
dda2da58
AO
2634 enum machine_mode addrmode, offmode;
2635
aa847cc8
JJ
2636 if (!MEM_P (op0))
2637 return NULL;
b5b8b0ac 2638
dda2da58
AO
2639 op0 = XEXP (op0, 0);
2640 addrmode = GET_MODE (op0);
2641 if (addrmode == VOIDmode)
2642 addrmode = Pmode;
2643
b5b8b0ac
AO
2644 op1 = expand_debug_expr (offset);
2645 if (!op1)
2646 return NULL;
2647
dda2da58
AO
2648 offmode = GET_MODE (op1);
2649 if (offmode == VOIDmode)
2650 offmode = TYPE_MODE (TREE_TYPE (offset));
2651
2652 if (addrmode != offmode)
2653 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2654 subreg_lowpart_offset (addrmode,
2655 offmode));
2656
2657 /* Don't use offset_address here, we don't need a
2658 recognizable address, and we don't want to generate
2659 code. */
2660 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
b5b8b0ac
AO
2661 }
2662
2663 if (MEM_P (op0))
2664 {
4f2a9af8
JJ
2665 if (mode1 == VOIDmode)
2666 /* Bitfield. */
2667 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2668 if (bitpos >= BITS_PER_UNIT)
2669 {
2670 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2671 bitpos %= BITS_PER_UNIT;
2672 }
2673 else if (bitpos < 0)
2674 {
4f2a9af8
JJ
2675 HOST_WIDE_INT units
2676 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2677 op0 = adjust_address_nv (op0, mode1, units);
2678 bitpos += units * BITS_PER_UNIT;
2679 }
2680 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2681 op0 = adjust_address_nv (op0, mode, 0);
2682 else if (GET_MODE (op0) != mode1)
2683 op0 = adjust_address_nv (op0, mode1, 0);
2684 else
2685 op0 = copy_rtx (op0);
2686 if (op0 == orig_op0)
2687 op0 = shallow_copy_rtx (op0);
2688 set_mem_attributes (op0, exp, 0);
2689 }
2690
2691 if (bitpos == 0 && mode == GET_MODE (op0))
2692 return op0;
2693
2d3fc6aa
JJ
2694 if (bitpos < 0)
2695 return NULL;
2696
88c04a5d
JJ
2697 if (GET_MODE (op0) == BLKmode)
2698 return NULL;
2699
b5b8b0ac
AO
2700 if ((bitpos % BITS_PER_UNIT) == 0
2701 && bitsize == GET_MODE_BITSIZE (mode1))
2702 {
2703 enum machine_mode opmode = GET_MODE (op0);
2704
b5b8b0ac
AO
2705 if (opmode == VOIDmode)
2706 opmode = mode1;
2707
2708 /* This condition may hold if we're expanding the address
2709 right past the end of an array that turned out not to
2710 be addressable (i.e., the address was only computed in
2711 debug stmts). The gen_subreg below would rightfully
2712 crash, and the address doesn't really exist, so just
2713 drop it. */
2714 if (bitpos >= GET_MODE_BITSIZE (opmode))
2715 return NULL;
2716
7d5d39bb
JJ
2717 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2718 return simplify_gen_subreg (mode, op0, opmode,
2719 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
2720 }
2721
2722 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2723 && TYPE_UNSIGNED (TREE_TYPE (exp))
2724 ? SIGN_EXTRACT
2725 : ZERO_EXTRACT, mode,
2726 GET_MODE (op0) != VOIDmode
2727 ? GET_MODE (op0) : mode1,
2728 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2729 }
2730
b5b8b0ac
AO
2731 case ABS_EXPR:
2732 return gen_rtx_ABS (mode, op0);
2733
2734 case NEGATE_EXPR:
2735 return gen_rtx_NEG (mode, op0);
2736
2737 case BIT_NOT_EXPR:
2738 return gen_rtx_NOT (mode, op0);
2739
2740 case FLOAT_EXPR:
2741 if (unsignedp)
2742 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2743 else
2744 return gen_rtx_FLOAT (mode, op0);
2745
2746 case FIX_TRUNC_EXPR:
2747 if (unsignedp)
2748 return gen_rtx_UNSIGNED_FIX (mode, op0);
2749 else
2750 return gen_rtx_FIX (mode, op0);
2751
2752 case POINTER_PLUS_EXPR:
576319a7
DD
2753 /* For the rare target where pointers are not the same size as
2754 size_t, we need to check for mis-matched modes and correct
2755 the addend. */
2756 if (op0 && op1
2757 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2758 && GET_MODE (op0) != GET_MODE (op1))
2759 {
2760 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2761 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2762 else
2763 /* We always sign-extend, regardless of the signedness of
2764 the operand, because the operand is always unsigned
2765 here even if the original C expression is signed. */
2766 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2767 }
2768 /* Fall through. */
b5b8b0ac
AO
2769 case PLUS_EXPR:
2770 return gen_rtx_PLUS (mode, op0, op1);
2771
2772 case MINUS_EXPR:
2773 return gen_rtx_MINUS (mode, op0, op1);
2774
2775 case MULT_EXPR:
2776 return gen_rtx_MULT (mode, op0, op1);
2777
2778 case RDIV_EXPR:
2779 case TRUNC_DIV_EXPR:
2780 case EXACT_DIV_EXPR:
2781 if (unsignedp)
2782 return gen_rtx_UDIV (mode, op0, op1);
2783 else
2784 return gen_rtx_DIV (mode, op0, op1);
2785
2786 case TRUNC_MOD_EXPR:
2787 if (unsignedp)
2788 return gen_rtx_UMOD (mode, op0, op1);
2789 else
2790 return gen_rtx_MOD (mode, op0, op1);
2791
2792 case FLOOR_DIV_EXPR:
2793 if (unsignedp)
2794 return gen_rtx_UDIV (mode, op0, op1);
2795 else
2796 {
2797 rtx div = gen_rtx_DIV (mode, op0, op1);
2798 rtx mod = gen_rtx_MOD (mode, op0, op1);
2799 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2800 return gen_rtx_PLUS (mode, div, adj);
2801 }
2802
2803 case FLOOR_MOD_EXPR:
2804 if (unsignedp)
2805 return gen_rtx_UMOD (mode, op0, op1);
2806 else
2807 {
2808 rtx mod = gen_rtx_MOD (mode, op0, op1);
2809 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2810 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2811 return gen_rtx_PLUS (mode, mod, adj);
2812 }
2813
2814 case CEIL_DIV_EXPR:
2815 if (unsignedp)
2816 {
2817 rtx div = gen_rtx_UDIV (mode, op0, op1);
2818 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2819 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2820 return gen_rtx_PLUS (mode, div, adj);
2821 }
2822 else
2823 {
2824 rtx div = gen_rtx_DIV (mode, op0, op1);
2825 rtx mod = gen_rtx_MOD (mode, op0, op1);
2826 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2827 return gen_rtx_PLUS (mode, div, adj);
2828 }
2829
2830 case CEIL_MOD_EXPR:
2831 if (unsignedp)
2832 {
2833 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2834 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2835 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2836 return gen_rtx_PLUS (mode, mod, adj);
2837 }
2838 else
2839 {
2840 rtx mod = gen_rtx_MOD (mode, op0, op1);
2841 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2842 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2843 return gen_rtx_PLUS (mode, mod, adj);
2844 }
2845
2846 case ROUND_DIV_EXPR:
2847 if (unsignedp)
2848 {
2849 rtx div = gen_rtx_UDIV (mode, op0, op1);
2850 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2851 rtx adj = round_udiv_adjust (mode, mod, op1);
2852 return gen_rtx_PLUS (mode, div, adj);
2853 }
2854 else
2855 {
2856 rtx div = gen_rtx_DIV (mode, op0, op1);
2857 rtx mod = gen_rtx_MOD (mode, op0, op1);
2858 rtx adj = round_sdiv_adjust (mode, mod, op1);
2859 return gen_rtx_PLUS (mode, div, adj);
2860 }
2861
2862 case ROUND_MOD_EXPR:
2863 if (unsignedp)
2864 {
2865 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2866 rtx adj = round_udiv_adjust (mode, mod, op1);
2867 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2868 return gen_rtx_PLUS (mode, mod, adj);
2869 }
2870 else
2871 {
2872 rtx mod = gen_rtx_MOD (mode, op0, op1);
2873 rtx adj = round_sdiv_adjust (mode, mod, op1);
2874 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2875 return gen_rtx_PLUS (mode, mod, adj);
2876 }
2877
2878 case LSHIFT_EXPR:
2879 return gen_rtx_ASHIFT (mode, op0, op1);
2880
2881 case RSHIFT_EXPR:
2882 if (unsignedp)
2883 return gen_rtx_LSHIFTRT (mode, op0, op1);
2884 else
2885 return gen_rtx_ASHIFTRT (mode, op0, op1);
2886
2887 case LROTATE_EXPR:
2888 return gen_rtx_ROTATE (mode, op0, op1);
2889
2890 case RROTATE_EXPR:
2891 return gen_rtx_ROTATERT (mode, op0, op1);
2892
2893 case MIN_EXPR:
2894 if (unsignedp)
2895 return gen_rtx_UMIN (mode, op0, op1);
2896 else
2897 return gen_rtx_SMIN (mode, op0, op1);
2898
2899 case MAX_EXPR:
2900 if (unsignedp)
2901 return gen_rtx_UMAX (mode, op0, op1);
2902 else
2903 return gen_rtx_SMAX (mode, op0, op1);
2904
2905 case BIT_AND_EXPR:
2906 case TRUTH_AND_EXPR:
2907 return gen_rtx_AND (mode, op0, op1);
2908
2909 case BIT_IOR_EXPR:
2910 case TRUTH_OR_EXPR:
2911 return gen_rtx_IOR (mode, op0, op1);
2912
2913 case BIT_XOR_EXPR:
2914 case TRUTH_XOR_EXPR:
2915 return gen_rtx_XOR (mode, op0, op1);
2916
2917 case TRUTH_ANDIF_EXPR:
2918 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2919
2920 case TRUTH_ORIF_EXPR:
2921 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2922
2923 case TRUTH_NOT_EXPR:
2924 return gen_rtx_EQ (mode, op0, const0_rtx);
2925
2926 case LT_EXPR:
2927 if (unsignedp)
2928 return gen_rtx_LTU (mode, op0, op1);
2929 else
2930 return gen_rtx_LT (mode, op0, op1);
2931
2932 case LE_EXPR:
2933 if (unsignedp)
2934 return gen_rtx_LEU (mode, op0, op1);
2935 else
2936 return gen_rtx_LE (mode, op0, op1);
2937
2938 case GT_EXPR:
2939 if (unsignedp)
2940 return gen_rtx_GTU (mode, op0, op1);
2941 else
2942 return gen_rtx_GT (mode, op0, op1);
2943
2944 case GE_EXPR:
2945 if (unsignedp)
2946 return gen_rtx_GEU (mode, op0, op1);
2947 else
2948 return gen_rtx_GE (mode, op0, op1);
2949
2950 case EQ_EXPR:
2951 return gen_rtx_EQ (mode, op0, op1);
2952
2953 case NE_EXPR:
2954 return gen_rtx_NE (mode, op0, op1);
2955
2956 case UNORDERED_EXPR:
2957 return gen_rtx_UNORDERED (mode, op0, op1);
2958
2959 case ORDERED_EXPR:
2960 return gen_rtx_ORDERED (mode, op0, op1);
2961
2962 case UNLT_EXPR:
2963 return gen_rtx_UNLT (mode, op0, op1);
2964
2965 case UNLE_EXPR:
2966 return gen_rtx_UNLE (mode, op0, op1);
2967
2968 case UNGT_EXPR:
2969 return gen_rtx_UNGT (mode, op0, op1);
2970
2971 case UNGE_EXPR:
2972 return gen_rtx_UNGE (mode, op0, op1);
2973
2974 case UNEQ_EXPR:
2975 return gen_rtx_UNEQ (mode, op0, op1);
2976
2977 case LTGT_EXPR:
2978 return gen_rtx_LTGT (mode, op0, op1);
2979
2980 case COND_EXPR:
2981 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2982
2983 case COMPLEX_EXPR:
2984 gcc_assert (COMPLEX_MODE_P (mode));
2985 if (GET_MODE (op0) == VOIDmode)
2986 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2987 if (GET_MODE (op1) == VOIDmode)
2988 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2989 return gen_rtx_CONCAT (mode, op0, op1);
2990
d02a5a4b
JJ
2991 case CONJ_EXPR:
2992 if (GET_CODE (op0) == CONCAT)
2993 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2994 gen_rtx_NEG (GET_MODE_INNER (mode),
2995 XEXP (op0, 1)));
2996 else
2997 {
2998 enum machine_mode imode = GET_MODE_INNER (mode);
2999 rtx re, im;
3000
3001 if (MEM_P (op0))
3002 {
3003 re = adjust_address_nv (op0, imode, 0);
3004 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3005 }
3006 else
3007 {
3008 enum machine_mode ifmode = int_mode_for_mode (mode);
3009 enum machine_mode ihmode = int_mode_for_mode (imode);
3010 rtx halfsize;
3011 if (ifmode == BLKmode || ihmode == BLKmode)
3012 return NULL;
3013 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3014 re = op0;
3015 if (mode != ifmode)
3016 re = gen_rtx_SUBREG (ifmode, re, 0);
3017 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3018 if (imode != ihmode)
3019 re = gen_rtx_SUBREG (imode, re, 0);
3020 im = copy_rtx (op0);
3021 if (mode != ifmode)
3022 im = gen_rtx_SUBREG (ifmode, im, 0);
3023 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3024 if (imode != ihmode)
3025 im = gen_rtx_SUBREG (imode, im, 0);
3026 }
3027 im = gen_rtx_NEG (imode, im);
3028 return gen_rtx_CONCAT (mode, re, im);
3029 }
3030
b5b8b0ac
AO
3031 case ADDR_EXPR:
3032 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3033 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3034 {
3035 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3036 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3037 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3038 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3039 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3040
3041 if (handled_component_p (TREE_OPERAND (exp, 0)))
3042 {
3043 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3044 tree decl
3045 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3046 &bitoffset, &bitsize, &maxsize);
3047 if ((TREE_CODE (decl) == VAR_DECL
3048 || TREE_CODE (decl) == PARM_DECL
3049 || TREE_CODE (decl) == RESULT_DECL)
3050 && !TREE_ADDRESSABLE (decl)
3051 && (bitoffset % BITS_PER_UNIT) == 0
3052 && bitsize > 0
3053 && bitsize == maxsize)
3054 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3055 bitoffset / BITS_PER_UNIT);
3056 }
3057
3058 return NULL;
3059 }
b5b8b0ac 3060
dda2da58
AO
3061 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
3062
3063 return op0;
b5b8b0ac
AO
3064
3065 case VECTOR_CST:
3066 exp = build_constructor_from_list (TREE_TYPE (exp),
3067 TREE_VECTOR_CST_ELTS (exp));
3068 /* Fall through. */
3069
3070 case CONSTRUCTOR:
3071 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3072 {
3073 unsigned i;
3074 tree val;
3075
3076 op0 = gen_rtx_CONCATN
3077 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3078
3079 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3080 {
3081 op1 = expand_debug_expr (val);
3082 if (!op1)
3083 return NULL;
3084 XVECEXP (op0, 0, i) = op1;
3085 }
3086
3087 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3088 {
3089 op1 = expand_debug_expr
e8160c9a 3090 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3091
3092 if (!op1)
3093 return NULL;
3094
3095 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3096 XVECEXP (op0, 0, i) = op1;
3097 }
3098
3099 return op0;
3100 }
3101 else
3102 goto flag_unsupported;
3103
3104 case CALL_EXPR:
3105 /* ??? Maybe handle some builtins? */
3106 return NULL;
3107
3108 case SSA_NAME:
3109 {
2a8e30fb
MM
3110 gimple g = get_gimple_for_ssa_name (exp);
3111 if (g)
3112 {
3113 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3114 if (!op0)
3115 return NULL;
3116 }
3117 else
3118 {
3119 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3120
2a8e30fb
MM
3121 if (part == NO_PARTITION)
3122 return NULL;
b5b8b0ac 3123
2a8e30fb 3124 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3125
2a8e30fb
MM
3126 op0 = SA.partition_to_pseudo[part];
3127 }
b5b8b0ac
AO
3128 goto adjust_mode;
3129 }
3130
3131 case ERROR_MARK:
3132 return NULL;
3133
7ece48b1
JJ
3134 /* Vector stuff. For most of the codes we don't have rtl codes. */
3135 case REALIGN_LOAD_EXPR:
3136 case REDUC_MAX_EXPR:
3137 case REDUC_MIN_EXPR:
3138 case REDUC_PLUS_EXPR:
3139 case VEC_COND_EXPR:
3140 case VEC_EXTRACT_EVEN_EXPR:
3141 case VEC_EXTRACT_ODD_EXPR:
3142 case VEC_INTERLEAVE_HIGH_EXPR:
3143 case VEC_INTERLEAVE_LOW_EXPR:
3144 case VEC_LSHIFT_EXPR:
3145 case VEC_PACK_FIX_TRUNC_EXPR:
3146 case VEC_PACK_SAT_EXPR:
3147 case VEC_PACK_TRUNC_EXPR:
3148 case VEC_RSHIFT_EXPR:
3149 case VEC_UNPACK_FLOAT_HI_EXPR:
3150 case VEC_UNPACK_FLOAT_LO_EXPR:
3151 case VEC_UNPACK_HI_EXPR:
3152 case VEC_UNPACK_LO_EXPR:
3153 case VEC_WIDEN_MULT_HI_EXPR:
3154 case VEC_WIDEN_MULT_LO_EXPR:
3155 return NULL;
3156
3157 /* Misc codes. */
3158 case ADDR_SPACE_CONVERT_EXPR:
3159 case FIXED_CONVERT_EXPR:
3160 case OBJ_TYPE_REF:
3161 case WITH_SIZE_EXPR:
3162 return NULL;
3163
3164 case DOT_PROD_EXPR:
3165 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3166 && SCALAR_INT_MODE_P (mode))
3167 {
3168 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3169 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3170 else
3171 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3172 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3173 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3174 else
3175 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3176 op0 = gen_rtx_MULT (mode, op0, op1);
3177 return gen_rtx_PLUS (mode, op0, op2);
3178 }
3179 return NULL;
3180
3181 case WIDEN_MULT_EXPR:
0354c0c7
BS
3182 case WIDEN_MULT_PLUS_EXPR:
3183 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3184 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3185 && SCALAR_INT_MODE_P (mode))
3186 {
5b58b39b 3187 enum machine_mode inner_mode = GET_MODE (op0);
7ece48b1 3188 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3189 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3190 else
5b58b39b 3191 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3192 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3193 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3194 else
5b58b39b 3195 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
0354c0c7
BS
3196 op0 = gen_rtx_MULT (mode, op0, op1);
3197 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3198 return op0;
3199 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3200 return gen_rtx_PLUS (mode, op0, op2);
3201 else
3202 return gen_rtx_MINUS (mode, op2, op0);
7ece48b1
JJ
3203 }
3204 return NULL;
3205
3206 case WIDEN_SUM_EXPR:
3207 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3208 && SCALAR_INT_MODE_P (mode))
3209 {
3210 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3211 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3212 else
3213 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3214 return gen_rtx_PLUS (mode, op0, op1);
3215 }
3216 return NULL;
3217
0f59b812
JJ
3218 case FMA_EXPR:
3219 return gen_rtx_FMA (mode, op0, op1, op2);
3220
b5b8b0ac
AO
3221 default:
3222 flag_unsupported:
3223#ifdef ENABLE_CHECKING
3224 debug_tree (exp);
3225 gcc_unreachable ();
3226#else
3227 return NULL;
3228#endif
3229 }
3230}
3231
3232/* Expand the _LOCs in debug insns. We run this after expanding all
3233 regular insns, so that any variables referenced in the function
3234 will have their DECL_RTLs set. */
3235
3236static void
3237expand_debug_locations (void)
3238{
3239 rtx insn;
3240 rtx last = get_last_insn ();
3241 int save_strict_alias = flag_strict_aliasing;
3242
3243 /* New alias sets while setting up memory attributes cause
3244 -fcompare-debug failures, even though it doesn't bring about any
3245 codegen changes. */
3246 flag_strict_aliasing = 0;
3247
3248 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3249 if (DEBUG_INSN_P (insn))
3250 {
3251 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3252 rtx val;
3253 enum machine_mode mode;
3254
3255 if (value == NULL_TREE)
3256 val = NULL_RTX;
3257 else
3258 {
3259 val = expand_debug_expr (value);
3260 gcc_assert (last == get_last_insn ());
3261 }
3262
3263 if (!val)
3264 val = gen_rtx_UNKNOWN_VAR_LOC ();
3265 else
3266 {
3267 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3268
3269 gcc_assert (mode == GET_MODE (val)
3270 || (GET_MODE (val) == VOIDmode
3271 && (CONST_INT_P (val)
3272 || GET_CODE (val) == CONST_FIXED
3273 || GET_CODE (val) == CONST_DOUBLE
3274 || GET_CODE (val) == LABEL_REF)));
3275 }
3276
3277 INSN_VAR_LOCATION_LOC (insn) = val;
3278 }
3279
3280 flag_strict_aliasing = save_strict_alias;
3281}
3282
242229bb
JH
3283/* Expand basic block BB from GIMPLE trees to RTL. */
3284
3285static basic_block
10d22567 3286expand_gimple_basic_block (basic_block bb)
242229bb 3287{
726a989a
RB
3288 gimple_stmt_iterator gsi;
3289 gimple_seq stmts;
3290 gimple stmt = NULL;
242229bb
JH
3291 rtx note, last;
3292 edge e;
628f6a4e 3293 edge_iterator ei;
8b11009b 3294 void **elt;
242229bb
JH
3295
3296 if (dump_file)
726a989a
RB
3297 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3298 bb->index);
3299
3300 /* Note that since we are now transitioning from GIMPLE to RTL, we
3301 cannot use the gsi_*_bb() routines because they expect the basic
3302 block to be in GIMPLE, instead of RTL. Therefore, we need to
3303 access the BB sequence directly. */
3304 stmts = bb_seq (bb);
3305 bb->il.gimple = NULL;
bf08ebeb 3306 rtl_profile_for_bb (bb);
5e2d947c
JH
3307 init_rtl_bb_info (bb);
3308 bb->flags |= BB_RTL;
3309
a9b77cd1
ZD
3310 /* Remove the RETURN_EXPR if we may fall though to the exit
3311 instead. */
726a989a
RB
3312 gsi = gsi_last (stmts);
3313 if (!gsi_end_p (gsi)
3314 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3315 {
726a989a 3316 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3317
3318 gcc_assert (single_succ_p (bb));
3319 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3320
3321 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3322 && !gimple_return_retval (ret_stmt))
a9b77cd1 3323 {
726a989a 3324 gsi_remove (&gsi, false);
a9b77cd1
ZD
3325 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3326 }
3327 }
3328
726a989a
RB
3329 gsi = gsi_start (stmts);
3330 if (!gsi_end_p (gsi))
8b11009b 3331 {
726a989a
RB
3332 stmt = gsi_stmt (gsi);
3333 if (gimple_code (stmt) != GIMPLE_LABEL)
3334 stmt = NULL;
8b11009b 3335 }
242229bb 3336
8b11009b
ZD
3337 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3338
3339 if (stmt || elt)
242229bb
JH
3340 {
3341 last = get_last_insn ();
3342
8b11009b
ZD
3343 if (stmt)
3344 {
28ed065e 3345 expand_gimple_stmt (stmt);
726a989a 3346 gsi_next (&gsi);
8b11009b
ZD
3347 }
3348
3349 if (elt)
ae50c0cb 3350 emit_label ((rtx) *elt);
242229bb 3351
caf93cb0 3352 /* Java emits line number notes in the top of labels.
c22cacf3 3353 ??? Make this go away once line number notes are obsoleted. */
242229bb 3354 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3355 if (NOTE_P (BB_HEAD (bb)))
242229bb 3356 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3357 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3358
726a989a 3359 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3360 }
3361 else
3362 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3363
3364 NOTE_BASIC_BLOCK (note) = bb;
3365
726a989a 3366 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3367 {
cea49550 3368 basic_block new_bb;
242229bb 3369
b5b8b0ac 3370 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3371
3372 /* If this statement is a non-debug one, and we generate debug
3373 insns, then this one might be the last real use of a TERed
3374 SSA_NAME, but where there are still some debug uses further
3375 down. Expanding the current SSA name in such further debug
3376 uses by their RHS might lead to wrong debug info, as coalescing
3377 might make the operands of such RHS be placed into the same
3378 pseudo as something else. Like so:
3379 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3380 use(a_1);
3381 a_2 = ...
3382 #DEBUG ... => a_1
3383 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3384 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3385 the write to a_2 would actually have clobbered the place which
3386 formerly held a_0.
3387
3388 So, instead of that, we recognize the situation, and generate
3389 debug temporaries at the last real use of TERed SSA names:
3390 a_1 = a_0 + 1;
3391 #DEBUG #D1 => a_1
3392 use(a_1);
3393 a_2 = ...
3394 #DEBUG ... => #D1
3395 */
3396 if (MAY_HAVE_DEBUG_INSNS
3397 && SA.values
3398 && !is_gimple_debug (stmt))
3399 {
3400 ssa_op_iter iter;
3401 tree op;
3402 gimple def;
3403
3404 location_t sloc = get_curr_insn_source_location ();
3405 tree sblock = get_curr_insn_block ();
3406
3407 /* Look for SSA names that have their last use here (TERed
3408 names always have only one real use). */
3409 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3410 if ((def = get_gimple_for_ssa_name (op)))
3411 {
3412 imm_use_iterator imm_iter;
3413 use_operand_p use_p;
3414 bool have_debug_uses = false;
3415
3416 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3417 {
3418 if (gimple_debug_bind_p (USE_STMT (use_p)))
3419 {
3420 have_debug_uses = true;
3421 break;
3422 }
3423 }
3424
3425 if (have_debug_uses)
3426 {
3427 /* OP is a TERed SSA name, with DEF it's defining
3428 statement, and where OP is used in further debug
3429 instructions. Generate a debug temporary, and
3430 replace all uses of OP in debug insns with that
3431 temporary. */
3432 gimple debugstmt;
3433 tree value = gimple_assign_rhs_to_tree (def);
3434 tree vexpr = make_node (DEBUG_EXPR_DECL);
3435 rtx val;
3436 enum machine_mode mode;
3437
3438 set_curr_insn_source_location (gimple_location (def));
3439 set_curr_insn_block (gimple_block (def));
3440
3441 DECL_ARTIFICIAL (vexpr) = 1;
3442 TREE_TYPE (vexpr) = TREE_TYPE (value);
3443 if (DECL_P (value))
3444 mode = DECL_MODE (value);
3445 else
3446 mode = TYPE_MODE (TREE_TYPE (value));
3447 DECL_MODE (vexpr) = mode;
3448
3449 val = gen_rtx_VAR_LOCATION
3450 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3451
3452 val = emit_debug_insn (val);
3453
3454 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3455 {
3456 if (!gimple_debug_bind_p (debugstmt))
3457 continue;
3458
3459 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3460 SET_USE (use_p, vexpr);
3461
3462 update_stmt (debugstmt);
3463 }
3464 }
3465 }
3466 set_curr_insn_source_location (sloc);
3467 set_curr_insn_block (sblock);
3468 }
3469
a5883ba0 3470 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3471
242229bb
JH
3472 /* Expand this statement, then evaluate the resulting RTL and
3473 fixup the CFG accordingly. */
726a989a 3474 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3475 {
726a989a 3476 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3477 if (new_bb)
3478 return new_bb;
3479 }
b5b8b0ac
AO
3480 else if (gimple_debug_bind_p (stmt))
3481 {
3482 location_t sloc = get_curr_insn_source_location ();
3483 tree sblock = get_curr_insn_block ();
3484 gimple_stmt_iterator nsi = gsi;
3485
3486 for (;;)
3487 {
3488 tree var = gimple_debug_bind_get_var (stmt);
3489 tree value;
3490 rtx val;
3491 enum machine_mode mode;
3492
3493 if (gimple_debug_bind_has_value_p (stmt))
3494 value = gimple_debug_bind_get_value (stmt);
3495 else
3496 value = NULL_TREE;
3497
3498 last = get_last_insn ();
3499
3500 set_curr_insn_source_location (gimple_location (stmt));
3501 set_curr_insn_block (gimple_block (stmt));
3502
3503 if (DECL_P (var))
3504 mode = DECL_MODE (var);
3505 else
3506 mode = TYPE_MODE (TREE_TYPE (var));
3507
3508 val = gen_rtx_VAR_LOCATION
3509 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3510
3511 val = emit_debug_insn (val);
3512
3513 if (dump_file && (dump_flags & TDF_DETAILS))
3514 {
3515 /* We can't dump the insn with a TREE where an RTX
3516 is expected. */
3517 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3518 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3519 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3520 }
3521
2a8e30fb
MM
3522 /* In order not to generate too many debug temporaries,
3523 we delink all uses of debug statements we already expanded.
3524 Therefore debug statements between definition and real
3525 use of TERed SSA names will continue to use the SSA name,
3526 and not be replaced with debug temps. */
3527 delink_stmt_imm_use (stmt);
3528
b5b8b0ac
AO
3529 gsi = nsi;
3530 gsi_next (&nsi);
3531 if (gsi_end_p (nsi))
3532 break;
3533 stmt = gsi_stmt (nsi);
3534 if (!gimple_debug_bind_p (stmt))
3535 break;
3536 }
3537
3538 set_curr_insn_source_location (sloc);
3539 set_curr_insn_block (sblock);
3540 }
80c7a9eb 3541 else
242229bb 3542 {
726a989a 3543 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3544 {
3545 bool can_fallthru;
3546 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3547 if (new_bb)
3548 {
3549 if (can_fallthru)
3550 bb = new_bb;
3551 else
3552 return new_bb;
3553 }
3554 }
4d7a65ea 3555 else
b7211528 3556 {
4e3825db 3557 def_operand_p def_p;
4e3825db
MM
3558 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3559
3560 if (def_p != NULL)
3561 {
3562 /* Ignore this stmt if it is in the list of
3563 replaceable expressions. */
3564 if (SA.values
b8698a0f 3565 && bitmap_bit_p (SA.values,
e97809c6 3566 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3567 continue;
3568 }
28ed065e 3569 last = expand_gimple_stmt (stmt);
726a989a 3570 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3571 }
242229bb
JH
3572 }
3573 }
3574
a5883ba0
MM
3575 currently_expanding_gimple_stmt = NULL;
3576
7241571e 3577 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3578 FOR_EACH_EDGE (e, ei, bb->succs)
3579 {
7241571e
JJ
3580 if (e->goto_locus && e->goto_block)
3581 {
3582 set_curr_insn_source_location (e->goto_locus);
3583 set_curr_insn_block (e->goto_block);
3584 e->goto_locus = curr_insn_locator ();
3585 }
3586 e->goto_block = NULL;
3587 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3588 {
3589 emit_jump (label_rtx_for_bb (e->dest));
3590 e->flags &= ~EDGE_FALLTHRU;
3591 }
a9b77cd1
ZD
3592 }
3593
ae761c45
AH
3594 /* Expanded RTL can create a jump in the last instruction of block.
3595 This later might be assumed to be a jump to successor and break edge insertion.
3596 We need to insert dummy move to prevent this. PR41440. */
3597 if (single_succ_p (bb)
3598 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3599 && (last = get_last_insn ())
3600 && JUMP_P (last))
3601 {
3602 rtx dummy = gen_reg_rtx (SImode);
3603 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3604 }
3605
242229bb
JH
3606 do_pending_stack_adjust ();
3607
3f117656 3608 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3609 before a barrier and/or table jump insn. */
3610 last = get_last_insn ();
4b4bf941 3611 if (BARRIER_P (last))
242229bb
JH
3612 last = PREV_INSN (last);
3613 if (JUMP_TABLE_DATA_P (last))
3614 last = PREV_INSN (PREV_INSN (last));
3615 BB_END (bb) = last;
caf93cb0 3616
242229bb 3617 update_bb_for_insn (bb);
80c7a9eb 3618
242229bb
JH
3619 return bb;
3620}
3621
3622
3623/* Create a basic block for initialization code. */
3624
3625static basic_block
3626construct_init_block (void)
3627{
3628 basic_block init_block, first_block;
fd44f634
JH
3629 edge e = NULL;
3630 int flags;
275a4187 3631
fd44f634
JH
3632 /* Multiple entry points not supported yet. */
3633 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3634 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3635 init_rtl_bb_info (EXIT_BLOCK_PTR);
3636 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3637 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3638
fd44f634 3639 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3640
fd44f634
JH
3641 /* When entry edge points to first basic block, we don't need jump,
3642 otherwise we have to jump into proper target. */
3643 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3644 {
726a989a 3645 tree label = gimple_block_label (e->dest);
fd44f634
JH
3646
3647 emit_jump (label_rtx (label));
3648 flags = 0;
275a4187 3649 }
fd44f634
JH
3650 else
3651 flags = EDGE_FALLTHRU;
242229bb
JH
3652
3653 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3654 get_last_insn (),
3655 ENTRY_BLOCK_PTR);
3656 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3657 init_block->count = ENTRY_BLOCK_PTR->count;
3658 if (e)
3659 {
3660 first_block = e->dest;
3661 redirect_edge_succ (e, init_block);
fd44f634 3662 e = make_edge (init_block, first_block, flags);
242229bb
JH
3663 }
3664 else
3665 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3666 e->probability = REG_BR_PROB_BASE;
3667 e->count = ENTRY_BLOCK_PTR->count;
3668
3669 update_bb_for_insn (init_block);
3670 return init_block;
3671}
3672
55e092c4
JH
3673/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3674 found in the block tree. */
3675
3676static void
3677set_block_levels (tree block, int level)
3678{
3679 while (block)
3680 {
3681 BLOCK_NUMBER (block) = level;
3682 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3683 block = BLOCK_CHAIN (block);
3684 }
3685}
242229bb
JH
3686
3687/* Create a block containing landing pads and similar stuff. */
3688
3689static void
3690construct_exit_block (void)
3691{
3692 rtx head = get_last_insn ();
3693 rtx end;
3694 basic_block exit_block;
628f6a4e
BE
3695 edge e, e2;
3696 unsigned ix;
3697 edge_iterator ei;
071a42f9 3698 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3699
bf08ebeb
JH
3700 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3701
caf93cb0 3702 /* Make sure the locus is set to the end of the function, so that
242229bb 3703 epilogue line numbers and warnings are set properly. */
6773e15f 3704 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3705 input_location = cfun->function_end_locus;
3706
3707 /* The following insns belong to the top scope. */
55e092c4 3708 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3709
242229bb
JH
3710 /* Generate rtl for function exit. */
3711 expand_function_end ();
3712
3713 end = get_last_insn ();
3714 if (head == end)
3715 return;
071a42f9
JH
3716 /* While emitting the function end we could move end of the last basic block.
3717 */
3718 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3719 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3720 head = NEXT_INSN (head);
80c7a9eb
RH
3721 exit_block = create_basic_block (NEXT_INSN (head), end,
3722 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3723 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3724 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3725
3726 ix = 0;
3727 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3728 {
8fb790fd 3729 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3730 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3731 redirect_edge_succ (e, exit_block);
3732 else
3733 ix++;
242229bb 3734 }
628f6a4e 3735
242229bb
JH
3736 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3737 e->probability = REG_BR_PROB_BASE;
3738 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3739 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3740 if (e2 != e)
3741 {
c22cacf3 3742 e->count -= e2->count;
242229bb
JH
3743 exit_block->count -= e2->count;
3744 exit_block->frequency -= EDGE_FREQUENCY (e2);
3745 }
3746 if (e->count < 0)
3747 e->count = 0;
3748 if (exit_block->count < 0)
3749 exit_block->count = 0;
3750 if (exit_block->frequency < 0)
3751 exit_block->frequency = 0;
3752 update_bb_for_insn (exit_block);
3753}
3754
c22cacf3 3755/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3756 Look for ARRAY_REF nodes with non-constant indexes and mark them
3757 addressable. */
3758
3759static tree
3760discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3761 void *data ATTRIBUTE_UNUSED)
3762{
3763 tree t = *tp;
3764
3765 if (IS_TYPE_OR_DECL_P (t))
3766 *walk_subtrees = 0;
3767 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3768 {
3769 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3770 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3771 && (!TREE_OPERAND (t, 2)
3772 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3773 || (TREE_CODE (t) == COMPONENT_REF
3774 && (!TREE_OPERAND (t,2)
3775 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3776 || TREE_CODE (t) == BIT_FIELD_REF
3777 || TREE_CODE (t) == REALPART_EXPR
3778 || TREE_CODE (t) == IMAGPART_EXPR
3779 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 3780 || CONVERT_EXPR_P (t))
a1b23b2f
UW
3781 t = TREE_OPERAND (t, 0);
3782
3783 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3784 {
3785 t = get_base_address (t);
6f11d690
RG
3786 if (t && DECL_P (t)
3787 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
3788 TREE_ADDRESSABLE (t) = 1;
3789 }
3790
3791 *walk_subtrees = 0;
3792 }
3793
3794 return NULL_TREE;
3795}
3796
3797/* RTL expansion is not able to compile array references with variable
3798 offsets for arrays stored in single register. Discover such
3799 expressions and mark variables as addressable to avoid this
3800 scenario. */
3801
3802static void
3803discover_nonconstant_array_refs (void)
3804{
3805 basic_block bb;
726a989a 3806 gimple_stmt_iterator gsi;
a1b23b2f
UW
3807
3808 FOR_EACH_BB (bb)
726a989a
RB
3809 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3810 {
3811 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
3812 if (!is_gimple_debug (stmt))
3813 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 3814 }
a1b23b2f
UW
3815}
3816
2e3f842f
L
3817/* This function sets crtl->args.internal_arg_pointer to a virtual
3818 register if DRAP is needed. Local register allocator will replace
3819 virtual_incoming_args_rtx with the virtual register. */
3820
3821static void
3822expand_stack_alignment (void)
3823{
3824 rtx drap_rtx;
e939805b 3825 unsigned int preferred_stack_boundary;
2e3f842f
L
3826
3827 if (! SUPPORTS_STACK_ALIGNMENT)
3828 return;
b8698a0f 3829
2e3f842f
L
3830 if (cfun->calls_alloca
3831 || cfun->has_nonlocal_label
3832 || crtl->has_nonlocal_goto)
3833 crtl->need_drap = true;
3834
890b9b96
L
3835 /* Call update_stack_boundary here again to update incoming stack
3836 boundary. It may set incoming stack alignment to a different
3837 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3838 use the minimum incoming stack alignment to check if it is OK
3839 to perform sibcall optimization since sibcall optimization will
3840 only align the outgoing stack to incoming stack boundary. */
3841 if (targetm.calls.update_stack_boundary)
3842 targetm.calls.update_stack_boundary ();
3843
3844 /* The incoming stack frame has to be aligned at least at
3845 parm_stack_boundary. */
3846 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 3847
2e3f842f
L
3848 /* Update crtl->stack_alignment_estimated and use it later to align
3849 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3850 exceptions since callgraph doesn't collect incoming stack alignment
3851 in this case. */
8f4f502f 3852 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
3853 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3854 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3855 else
3856 preferred_stack_boundary = crtl->preferred_stack_boundary;
3857 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3858 crtl->stack_alignment_estimated = preferred_stack_boundary;
3859 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3860 crtl->stack_alignment_needed = preferred_stack_boundary;
3861
890b9b96
L
3862 gcc_assert (crtl->stack_alignment_needed
3863 <= crtl->stack_alignment_estimated);
3864
2e3f842f 3865 crtl->stack_realign_needed
e939805b 3866 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 3867 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
3868
3869 crtl->stack_realign_processed = true;
3870
3871 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3872 alignment. */
3873 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 3874 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 3875
d015f7cc
L
3876 /* stack_realign_drap and drap_rtx must match. */
3877 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3878
2e3f842f
L
3879 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3880 if (NULL != drap_rtx)
3881 {
3882 crtl->args.internal_arg_pointer = drap_rtx;
3883
3884 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3885 needed. */
3886 fixup_tail_calls ();
3887 }
3888}
3889
242229bb
JH
3890/* Translate the intermediate representation contained in the CFG
3891 from GIMPLE trees to RTL.
3892
3893 We do conversion per basic block and preserve/update the tree CFG.
3894 This implies we have to do some magic as the CFG can simultaneously
3895 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 3896 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
3897 the expansion. */
3898
c2924966 3899static unsigned int
726a989a 3900gimple_expand_cfg (void)
242229bb
JH
3901{
3902 basic_block bb, init_block;
3903 sbitmap blocks;
0ef90296
ZD
3904 edge_iterator ei;
3905 edge e;
3a42502d 3906 rtx var_seq;
4e3825db
MM
3907 unsigned i;
3908
f029db69 3909 timevar_push (TV_OUT_OF_SSA);
4e3825db 3910 rewrite_out_of_ssa (&SA);
f029db69 3911 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
3912 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3913 sizeof (rtx));
242229bb 3914
4586b4ca
SB
3915 /* Some backends want to know that we are expanding to RTL. */
3916 currently_expanding_to_rtl = 1;
3917
bf08ebeb
JH
3918 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3919
55e092c4 3920 insn_locators_alloc ();
fe8a7779 3921 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
3922 {
3923 /* Eventually, all FEs should explicitly set function_start_locus. */
3924 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3925 set_curr_insn_source_location
3926 (DECL_SOURCE_LOCATION (current_function_decl));
3927 else
3928 set_curr_insn_source_location (cfun->function_start_locus);
3929 }
55e092c4
JH
3930 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3931 prologue_locator = curr_insn_locator ();
3932
2b21299c
JJ
3933#ifdef INSN_SCHEDULING
3934 init_sched_attrs ();
3935#endif
3936
55e092c4
JH
3937 /* Make sure first insn is a note even if we don't want linenums.
3938 This makes sure the first insn will never be deleted.
3939 Also, final expects a note to appear there. */
3940 emit_note (NOTE_INSN_DELETED);
6429e3be 3941
a1b23b2f
UW
3942 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3943 discover_nonconstant_array_refs ();
3944
e41b2a33 3945 targetm.expand_to_rtl_hook ();
cb91fab0 3946 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 3947 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 3948 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
3949 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3950 cfun->cfg->max_jumptable_ents = 0;
3951
727a31fa 3952 /* Expand the variables recorded during gimple lowering. */
f029db69 3953 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
3954 start_sequence ();
3955
242229bb 3956 expand_used_vars ();
3a42502d
RH
3957
3958 var_seq = get_insns ();
3959 end_sequence ();
f029db69 3960 timevar_pop (TV_VAR_EXPAND);
242229bb 3961
7d69de61
RH
3962 /* Honor stack protection warnings. */
3963 if (warn_stack_protect)
3964 {
e3b5732b 3965 if (cfun->calls_alloca)
b8698a0f 3966 warning (OPT_Wstack_protector,
3b123595
SB
3967 "stack protector not protecting local variables: "
3968 "variable length buffer");
cb91fab0 3969 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 3970 warning (OPT_Wstack_protector,
3b123595
SB
3971 "stack protector not protecting function: "
3972 "all local arrays are less than %d bytes long",
7d69de61
RH
3973 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3974 }
3975
242229bb 3976 /* Set up parameters and prepare for return, for the function. */
b79c5284 3977 expand_function_start (current_function_decl);
242229bb 3978
3a42502d
RH
3979 /* If we emitted any instructions for setting up the variables,
3980 emit them before the FUNCTION_START note. */
3981 if (var_seq)
3982 {
3983 emit_insn_before (var_seq, parm_birth_insn);
3984
3985 /* In expand_function_end we'll insert the alloca save/restore
3986 before parm_birth_insn. We've just insertted an alloca call.
3987 Adjust the pointer to match. */
3988 parm_birth_insn = var_seq;
3989 }
3990
4e3825db
MM
3991 /* Now that we also have the parameter RTXs, copy them over to our
3992 partitions. */
3993 for (i = 0; i < SA.map->num_partitions; i++)
3994 {
3995 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3996
3997 if (TREE_CODE (var) != VAR_DECL
3998 && !SA.partition_to_pseudo[i])
3999 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4000 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4001
4002 /* If this decl was marked as living in multiple places, reset
4003 this now to NULL. */
4004 if (DECL_RTL_IF_SET (var) == pc_rtx)
4005 SET_DECL_RTL (var, NULL);
4006
4e3825db
MM
4007 /* Some RTL parts really want to look at DECL_RTL(x) when x
4008 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4009 SET_DECL_RTL here making this available, but that would mean
4010 to select one of the potentially many RTLs for one DECL. Instead
4011 of doing that we simply reset the MEM_EXPR of the RTL in question,
4012 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4013 if (!DECL_RTL_SET_P (var))
4014 {
4015 if (MEM_P (SA.partition_to_pseudo[i]))
4016 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4017 }
4018 }
4019
242229bb
JH
4020 /* If this function is `main', emit a call to `__main'
4021 to run global initializers, etc. */
4022 if (DECL_NAME (current_function_decl)
4023 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4024 && DECL_FILE_SCOPE_P (current_function_decl))
4025 expand_main_function ();
4026
7d69de61
RH
4027 /* Initialize the stack_protect_guard field. This must happen after the
4028 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4029 if (crtl->stack_protect_guard)
7d69de61
RH
4030 stack_protect_prologue ();
4031
4e3825db
MM
4032 expand_phi_nodes (&SA);
4033
3fbd86b1 4034 /* Register rtl specific functions for cfg. */
242229bb
JH
4035 rtl_register_cfg_hooks ();
4036
4037 init_block = construct_init_block ();
4038
0ef90296 4039 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4040 remaining edges later. */
0ef90296
ZD
4041 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4042 e->flags &= ~EDGE_EXECUTABLE;
4043
8b11009b 4044 lab_rtx_for_bb = pointer_map_create ();
242229bb 4045 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4046 bb = expand_gimple_basic_block (bb);
bf08ebeb 4047
b5b8b0ac
AO
4048 if (MAY_HAVE_DEBUG_INSNS)
4049 expand_debug_locations ();
4050
4e3825db 4051 execute_free_datastructures ();
f029db69 4052 timevar_push (TV_OUT_OF_SSA);
4e3825db 4053 finish_out_of_ssa (&SA);
f029db69 4054 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4055
f029db69 4056 timevar_push (TV_POST_EXPAND);
91753e21
RG
4057 /* We are no longer in SSA form. */
4058 cfun->gimple_df->in_ssa_p = false;
4059
bf08ebeb
JH
4060 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4061 conservatively to true until they are all profile aware. */
8b11009b 4062 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4063 free_histograms ();
242229bb
JH
4064
4065 construct_exit_block ();
55e092c4
JH
4066 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4067 insn_locators_finalize ();
242229bb 4068
1d65f45c 4069 /* Zap the tree EH table. */
e8a2a782 4070 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
4071
4072 rebuild_jump_labels (get_insns ());
242229bb 4073
4e3825db
MM
4074 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4075 {
4076 edge e;
4077 edge_iterator ei;
4078 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4079 {
4080 if (e->insns.r)
4081 commit_one_edge_insertion (e);
4082 else
4083 ei_next (&ei);
4084 }
4085 }
4086
4087 /* We're done expanding trees to RTL. */
4088 currently_expanding_to_rtl = 0;
4089
4090 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4091 {
4092 edge e;
4093 edge_iterator ei;
4094 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4095 {
4096 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4097 e->flags &= ~EDGE_EXECUTABLE;
4098
4099 /* At the moment not all abnormal edges match the RTL
4100 representation. It is safe to remove them here as
4101 find_many_sub_basic_blocks will rediscover them.
4102 In the future we should get this fixed properly. */
4103 if ((e->flags & EDGE_ABNORMAL)
4104 && !(e->flags & EDGE_SIBCALL))
4105 remove_edge (e);
4106 else
4107 ei_next (&ei);
4108 }
4109 }
4110
242229bb
JH
4111 blocks = sbitmap_alloc (last_basic_block);
4112 sbitmap_ones (blocks);
4113 find_many_sub_basic_blocks (blocks);
242229bb 4114 sbitmap_free (blocks);
4e3825db 4115 purge_all_dead_edges ();
242229bb
JH
4116
4117 compact_blocks ();
2e3f842f
L
4118
4119 expand_stack_alignment ();
4120
242229bb 4121#ifdef ENABLE_CHECKING
62e5bf5d 4122 verify_flow_info ();
242229bb 4123#endif
9f8628ba
PB
4124
4125 /* There's no need to defer outputting this function any more; we
4126 know we want to output it. */
4127 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4128
4129 /* Now that we're done expanding trees to RTL, we shouldn't have any
4130 more CONCATs anywhere. */
4131 generating_concat_p = 0;
4132
b7211528
SB
4133 if (dump_file)
4134 {
4135 fprintf (dump_file,
4136 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4137 /* And the pass manager will dump RTL for us. */
4138 }
ef330312
PB
4139
4140 /* If we're emitting a nested function, make sure its parent gets
4141 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4142 {
ef330312
PB
4143 tree parent;
4144 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4145 parent != NULL_TREE;
4146 parent = get_containing_scope (parent))
ef330312 4147 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4148 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4149 }
c22cacf3 4150
ef330312
PB
4151 /* We are now committed to emitting code for this function. Do any
4152 preparation, such as emitting abstract debug info for the inline
4153 before it gets mangled by optimization. */
4154 if (cgraph_function_possibly_inlined_p (current_function_decl))
4155 (*debug_hooks->outlining_inline_function) (current_function_decl);
4156
4157 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4158
4159 /* After expanding, the return labels are no longer needed. */
4160 return_label = NULL;
4161 naked_return_label = NULL;
55e092c4
JH
4162 /* Tag the blocks with a depth number so that change_scope can find
4163 the common parent easily. */
4164 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4165 default_rtl_profile ();
f029db69 4166 timevar_pop (TV_POST_EXPAND);
c2924966 4167 return 0;
242229bb
JH
4168}
4169
e3b5732b 4170struct rtl_opt_pass pass_expand =
242229bb 4171{
8ddbbcae 4172 {
e3b5732b 4173 RTL_PASS,
c22cacf3 4174 "expand", /* name */
242229bb 4175 NULL, /* gate */
726a989a 4176 gimple_expand_cfg, /* execute */
242229bb
JH
4177 NULL, /* sub */
4178 NULL, /* next */
4179 0, /* static_pass_number */
c22cacf3 4180 TV_EXPAND, /* tv_id */
688a482d
RG
4181 PROP_ssa | PROP_gimple_leh | PROP_cfg
4182 | PROP_gimple_lcx, /* properties_required */
242229bb 4183 PROP_rtl, /* properties_provided */
4e3825db
MM
4184 PROP_ssa | PROP_trees, /* properties_destroyed */
4185 TODO_verify_ssa | TODO_verify_flow
4186 | TODO_verify_stmts, /* todo_flags_start */
4187 TODO_dump_func
4188 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4189 }
242229bb 4190};