]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
Daily bump.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
4c0c3228 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
2b21299c 50#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 51
4e3825db
MM
52/* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54struct ssaexpand SA;
55
a5883ba0
MM
56/* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58gimple currently_expanding_gimple_stmt;
59
ddb555ed
JJ
60static rtx expand_debug_expr (tree);
61
726a989a
RB
62/* Return an expression tree corresponding to the RHS of GIMPLE
63 statement STMT. */
64
65tree
66gimple_assign_rhs_to_tree (gimple stmt)
67{
68 tree t;
82d6e6fc 69 enum gimple_rhs_class grhs_class;
b8698a0f 70
82d6e6fc 71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 72
0354c0c7
BS
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
82d6e6fc 84 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
82d6e6fc 88 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
89 {
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
96 && EXPR_P (t)
97 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
98 t = copy_node (t);
99 }
726a989a
RB
100 else
101 gcc_unreachable ();
102
f5045c96
AM
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 107
726a989a
RB
108 return t;
109}
110
726a989a 111
1f6d3a08
RH
112#ifndef STACK_ALIGNMENT_NEEDED
113#define STACK_ALIGNMENT_NEEDED 1
114#endif
115
4e3825db
MM
116#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
117
118/* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
121static inline void
122set_rtl (tree t, rtx x)
123{
124 if (TREE_CODE (t) == SSA_NAME)
125 {
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127 if (x && !MEM_P (x))
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
133 {
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set alrady to "multiple places" don't
139 change this. */
140 else if (DECL_RTL (var) == pc_rtx)
141 ;
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
150 }
4e3825db
MM
151 }
152 else
153 SET_DECL_RTL (t, x);
154}
1f6d3a08
RH
155
156/* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
158struct stack_var
159{
160 /* The Variable. */
161 tree decl;
162
1f6d3a08
RH
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
165 HOST_WIDE_INT size;
166
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
169 unsigned int alignb;
170
171 /* The partition representative. */
172 size_t representative;
173
174 /* The next stack variable in the partition, or EOC. */
175 size_t next;
2bdbbe94
MM
176
177 /* The numbers of conflicting stack variables. */
178 bitmap conflicts;
1f6d3a08
RH
179};
180
181#define EOC ((size_t)-1)
182
183/* We have an array of such objects while deciding allocation. */
184static struct stack_var *stack_vars;
185static size_t stack_vars_alloc;
186static size_t stack_vars_num;
187
fa10beec 188/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
189 is non-decreasing. */
190static size_t *stack_vars_sorted;
191
1f6d3a08
RH
192/* The phase of the stack frame. This is the known misalignment of
193 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
194 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
195static int frame_phase;
196
7d69de61
RH
197/* Used during expand_used_vars to remember if we saw any decls for
198 which we'd like to enable stack smashing protection. */
199static bool has_protected_decls;
200
201/* Used during expand_used_vars. Remember if we say a character buffer
202 smaller than our cutoff threshold. Used for -Wstack-protector. */
203static bool has_short_buffer;
1f6d3a08 204
6f197850 205/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
206 we can't do with expected alignment of the stack boundary. */
207
208static unsigned int
6f197850 209align_local_variable (tree decl)
765c3e8f 210{
3a42502d 211 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 212 DECL_ALIGN (decl) = align;
1f6d3a08
RH
213 return align / BITS_PER_UNIT;
214}
215
216/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
217 Return the frame offset. */
218
219static HOST_WIDE_INT
3a42502d 220alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
221{
222 HOST_WIDE_INT offset, new_frame_offset;
223
224 new_frame_offset = frame_offset;
225 if (FRAME_GROWS_DOWNWARD)
226 {
227 new_frame_offset -= size + frame_phase;
228 new_frame_offset &= -align;
229 new_frame_offset += frame_phase;
230 offset = new_frame_offset;
231 }
232 else
233 {
234 new_frame_offset -= frame_phase;
235 new_frame_offset += align - 1;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 new_frame_offset += size;
240 }
241 frame_offset = new_frame_offset;
242
9fb798d7
EB
243 if (frame_offset_overflow (frame_offset, cfun->decl))
244 frame_offset = offset = 0;
245
1f6d3a08
RH
246 return offset;
247}
248
249/* Accumulate DECL into STACK_VARS. */
250
251static void
252add_stack_var (tree decl)
253{
533f611a
RH
254 struct stack_var *v;
255
1f6d3a08
RH
256 if (stack_vars_num >= stack_vars_alloc)
257 {
258 if (stack_vars_alloc)
259 stack_vars_alloc = stack_vars_alloc * 3 / 2;
260 else
261 stack_vars_alloc = 32;
262 stack_vars
263 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
264 }
533f611a
RH
265 v = &stack_vars[stack_vars_num];
266
267 v->decl = decl;
533f611a
RH
268 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
269 /* Ensure that all variables have size, so that &a != &b for any two
270 variables that are simultaneously live. */
271 if (v->size == 0)
272 v->size = 1;
6f197850 273 v->alignb = align_local_variable (SSAVAR (decl));
1f6d3a08
RH
274
275 /* All variables are initially in their own partition. */
533f611a
RH
276 v->representative = stack_vars_num;
277 v->next = EOC;
1f6d3a08 278
2bdbbe94 279 /* All variables initially conflict with no other. */
533f611a 280 v->conflicts = NULL;
2bdbbe94 281
1f6d3a08 282 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 283 set_rtl (decl, pc_rtx);
1f6d3a08
RH
284
285 stack_vars_num++;
286}
287
1f6d3a08
RH
288/* Make the decls associated with luid's X and Y conflict. */
289
290static void
291add_stack_var_conflict (size_t x, size_t y)
292{
2bdbbe94
MM
293 struct stack_var *a = &stack_vars[x];
294 struct stack_var *b = &stack_vars[y];
295 if (!a->conflicts)
296 a->conflicts = BITMAP_ALLOC (NULL);
297 if (!b->conflicts)
298 b->conflicts = BITMAP_ALLOC (NULL);
299 bitmap_set_bit (a->conflicts, y);
300 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
301}
302
303/* Check whether the decls associated with luid's X and Y conflict. */
304
305static bool
306stack_var_conflict_p (size_t x, size_t y)
307{
2bdbbe94
MM
308 struct stack_var *a = &stack_vars[x];
309 struct stack_var *b = &stack_vars[y];
310 if (!a->conflicts || !b->conflicts)
311 return false;
312 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 313}
b8698a0f 314
d239ed56
SB
315/* Returns true if TYPE is or contains a union type. */
316
317static bool
318aggregate_contains_union_type (tree type)
319{
320 tree field;
321
322 if (TREE_CODE (type) == UNION_TYPE
323 || TREE_CODE (type) == QUAL_UNION_TYPE)
324 return true;
325 if (TREE_CODE (type) == ARRAY_TYPE)
326 return aggregate_contains_union_type (TREE_TYPE (type));
327 if (TREE_CODE (type) != RECORD_TYPE)
328 return false;
329
910ad8de 330 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
331 if (TREE_CODE (field) == FIELD_DECL)
332 if (aggregate_contains_union_type (TREE_TYPE (field)))
333 return true;
334
335 return false;
336}
337
1f6d3a08
RH
338/* A subroutine of expand_used_vars. If two variables X and Y have alias
339 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
340 in the interference graph. We also need to make sure to add conflicts
341 for union containing structures. Else RTL alias analysis comes along
342 and due to type based aliasing rules decides that for two overlapping
343 union temporaries { short s; int i; } accesses to the same mem through
344 different types may not alias and happily reorders stores across
345 life-time boundaries of the temporaries (See PR25654).
346 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
347
348static void
349add_alias_set_conflicts (void)
350{
351 size_t i, j, n = stack_vars_num;
352
353 for (i = 0; i < n; ++i)
354 {
a4d25453
RH
355 tree type_i = TREE_TYPE (stack_vars[i].decl);
356 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 357 bool contains_union;
1f6d3a08 358
d239ed56 359 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
360 for (j = 0; j < i; ++j)
361 {
a4d25453
RH
362 tree type_j = TREE_TYPE (stack_vars[j].decl);
363 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
364 if (aggr_i != aggr_j
365 /* Either the objects conflict by means of type based
366 aliasing rules, or we need to add a conflict. */
367 || !objects_must_conflict_p (type_i, type_j)
368 /* In case the types do not conflict ensure that access
369 to elements will conflict. In case of unions we have
370 to be careful as type based aliasing rules may say
371 access to the same memory does not conflict. So play
4a25752b
ER
372 safe and add a conflict in this case when
373 -fstrict-aliasing is used. */
374 || (contains_union && flag_strict_aliasing))
1f6d3a08
RH
375 add_stack_var_conflict (i, j);
376 }
377 }
378}
379
380/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 381 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
382
383static int
3a42502d 384stack_var_cmp (const void *a, const void *b)
1f6d3a08 385{
3a42502d
RH
386 size_t ia = *(const size_t *)a;
387 size_t ib = *(const size_t *)b;
388 unsigned int aligna = stack_vars[ia].alignb;
389 unsigned int alignb = stack_vars[ib].alignb;
390 HOST_WIDE_INT sizea = stack_vars[ia].size;
391 HOST_WIDE_INT sizeb = stack_vars[ib].size;
392 tree decla = stack_vars[ia].decl;
393 tree declb = stack_vars[ib].decl;
394 bool largea, largeb;
4e3825db 395 unsigned int uida, uidb;
1f6d3a08 396
3a42502d
RH
397 /* Primary compare on "large" alignment. Large comes first. */
398 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
399 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
400 if (largea != largeb)
401 return (int)largeb - (int)largea;
402
403 /* Secondary compare on size, decreasing */
3a42502d 404 if (sizea > sizeb)
6ddfda8a
ER
405 return -1;
406 if (sizea < sizeb)
1f6d3a08 407 return 1;
3a42502d
RH
408
409 /* Tertiary compare on true alignment, decreasing. */
410 if (aligna < alignb)
411 return -1;
412 if (aligna > alignb)
413 return 1;
414
415 /* Final compare on ID for sort stability, increasing.
416 Two SSA names are compared by their version, SSA names come before
417 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
418 if (TREE_CODE (decla) == SSA_NAME)
419 {
420 if (TREE_CODE (declb) == SSA_NAME)
421 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
422 else
423 return -1;
424 }
425 else if (TREE_CODE (declb) == SSA_NAME)
426 return 1;
427 else
428 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 429 if (uida < uidb)
79f802f5 430 return 1;
3a42502d
RH
431 if (uida > uidb)
432 return -1;
1f6d3a08
RH
433 return 0;
434}
435
55b34b5f
RG
436
437/* If the points-to solution *PI points to variables that are in a partition
438 together with other variables add all partition members to the pointed-to
439 variables bitmap. */
440
441static void
442add_partitioned_vars_to_ptset (struct pt_solution *pt,
443 struct pointer_map_t *decls_to_partitions,
444 struct pointer_set_t *visited, bitmap temp)
445{
446 bitmap_iterator bi;
447 unsigned i;
448 bitmap *part;
449
450 if (pt->anything
451 || pt->vars == NULL
452 /* The pointed-to vars bitmap is shared, it is enough to
453 visit it once. */
454 || pointer_set_insert(visited, pt->vars))
455 return;
456
457 bitmap_clear (temp);
458
459 /* By using a temporary bitmap to store all members of the partitions
460 we have to add we make sure to visit each of the partitions only
461 once. */
462 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
463 if ((!temp
464 || !bitmap_bit_p (temp, i))
465 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
466 (void *)(size_t) i)))
467 bitmap_ior_into (temp, *part);
468 if (!bitmap_empty_p (temp))
469 bitmap_ior_into (pt->vars, temp);
470}
471
472/* Update points-to sets based on partition info, so we can use them on RTL.
473 The bitmaps representing stack partitions will be saved until expand,
474 where partitioned decls used as bases in memory expressions will be
475 rewritten. */
476
477static void
478update_alias_info_with_stack_vars (void)
479{
480 struct pointer_map_t *decls_to_partitions = NULL;
481 size_t i, j;
482 tree var = NULL_TREE;
483
484 for (i = 0; i < stack_vars_num; i++)
485 {
486 bitmap part = NULL;
487 tree name;
488 struct ptr_info_def *pi;
489
490 /* Not interested in partitions with single variable. */
491 if (stack_vars[i].representative != i
492 || stack_vars[i].next == EOC)
493 continue;
494
495 if (!decls_to_partitions)
496 {
497 decls_to_partitions = pointer_map_create ();
498 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
499 }
500
501 /* Create an SSA_NAME that points to the partition for use
502 as base during alias-oracle queries on RTL for bases that
503 have been partitioned. */
504 if (var == NULL_TREE)
505 var = create_tmp_var (ptr_type_node, NULL);
506 name = make_ssa_name (var, NULL);
507
508 /* Create bitmaps representing partitions. They will be used for
509 points-to sets later, so use GGC alloc. */
510 part = BITMAP_GGC_ALLOC ();
511 for (j = i; j != EOC; j = stack_vars[j].next)
512 {
513 tree decl = stack_vars[j].decl;
25a6a873 514 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
515 /* We should never end up partitioning SSA names (though they
516 may end up on the stack). Neither should we allocate stack
9b999dc5
JJ
517 space to something that is unused and thus unreferenced, except
518 for -O0 where we are preserving even unreferenced variables. */
55b34b5f 519 gcc_assert (DECL_P (decl)
9b999dc5 520 && (!optimize
27c6b086 521 || referenced_var_lookup (cfun, DECL_UID (decl))));
55b34b5f
RG
522 bitmap_set_bit (part, uid);
523 *((bitmap *) pointer_map_insert (decls_to_partitions,
524 (void *)(size_t) uid)) = part;
525 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
526 decl)) = name;
527 }
528
529 /* Make the SSA name point to all partition members. */
530 pi = get_ptr_info (name);
25a6a873 531 pt_solution_set (&pi->pt, part, false, false);
55b34b5f
RG
532 }
533
534 /* Make all points-to sets that contain one member of a partition
535 contain all members of the partition. */
536 if (decls_to_partitions)
537 {
538 unsigned i;
539 struct pointer_set_t *visited = pointer_set_create ();
540 bitmap temp = BITMAP_ALLOC (NULL);
541
542 for (i = 1; i < num_ssa_names; i++)
543 {
544 tree name = ssa_name (i);
545 struct ptr_info_def *pi;
546
547 if (name
548 && POINTER_TYPE_P (TREE_TYPE (name))
549 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
550 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
551 visited, temp);
552 }
553
554 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
555 decls_to_partitions, visited, temp);
55b34b5f
RG
556
557 pointer_set_destroy (visited);
558 pointer_map_destroy (decls_to_partitions);
559 BITMAP_FREE (temp);
560 }
561}
562
1f6d3a08
RH
563/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
564 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 565 Merge them into a single partition A. */
1f6d3a08
RH
566
567static void
6ddfda8a 568union_stack_vars (size_t a, size_t b)
1f6d3a08 569{
2bdbbe94
MM
570 struct stack_var *vb = &stack_vars[b];
571 bitmap_iterator bi;
572 unsigned u;
1f6d3a08 573
6ddfda8a
ER
574 gcc_assert (stack_vars[b].next == EOC);
575 /* Add B to A's partition. */
576 stack_vars[b].next = stack_vars[a].next;
577 stack_vars[b].representative = a;
1f6d3a08
RH
578 stack_vars[a].next = b;
579
580 /* Update the required alignment of partition A to account for B. */
581 if (stack_vars[a].alignb < stack_vars[b].alignb)
582 stack_vars[a].alignb = stack_vars[b].alignb;
583
584 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
585 if (vb->conflicts)
586 {
587 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
588 add_stack_var_conflict (a, stack_vars[u].representative);
589 BITMAP_FREE (vb->conflicts);
590 }
1f6d3a08
RH
591}
592
593/* A subroutine of expand_used_vars. Binpack the variables into
594 partitions constrained by the interference graph. The overall
595 algorithm used is as follows:
596
6ddfda8a 597 Sort the objects by size in descending order.
1f6d3a08
RH
598 For each object A {
599 S = size(A)
600 O = 0
601 loop {
602 Look for the largest non-conflicting object B with size <= S.
603 UNION (A, B)
1f6d3a08
RH
604 }
605 }
606*/
607
608static void
609partition_stack_vars (void)
610{
611 size_t si, sj, n = stack_vars_num;
612
613 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
614 for (si = 0; si < n; ++si)
615 stack_vars_sorted[si] = si;
616
617 if (n == 1)
618 return;
619
3a42502d 620 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 621
1f6d3a08
RH
622 for (si = 0; si < n; ++si)
623 {
624 size_t i = stack_vars_sorted[si];
3a42502d 625 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08 626
6ddfda8a
ER
627 /* Ignore objects that aren't partition representatives. If we
628 see a var that is not a partition representative, it must
629 have been merged earlier. */
630 if (stack_vars[i].representative != i)
631 continue;
632
633 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
634 {
635 size_t j = stack_vars_sorted[sj];
1f6d3a08
RH
636 unsigned int jalign = stack_vars[j].alignb;
637
638 /* Ignore objects that aren't partition representatives. */
639 if (stack_vars[j].representative != j)
640 continue;
641
1f6d3a08
RH
642 /* Ignore conflicting objects. */
643 if (stack_var_conflict_p (i, j))
644 continue;
645
3a42502d
RH
646 /* Do not mix objects of "small" (supported) alignment
647 and "large" (unsupported) alignment. */
648 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
649 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
650 continue;
651
1f6d3a08 652 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 653 union_stack_vars (i, j);
1f6d3a08
RH
654 }
655 }
55b34b5f 656
9b999dc5 657 update_alias_info_with_stack_vars ();
1f6d3a08
RH
658}
659
660/* A debugging aid for expand_used_vars. Dump the generated partitions. */
661
662static void
663dump_stack_var_partition (void)
664{
665 size_t si, i, j, n = stack_vars_num;
666
667 for (si = 0; si < n; ++si)
668 {
669 i = stack_vars_sorted[si];
670
671 /* Skip variables that aren't partition representatives, for now. */
672 if (stack_vars[i].representative != i)
673 continue;
674
675 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
676 " align %u\n", (unsigned long) i, stack_vars[i].size,
677 stack_vars[i].alignb);
678
679 for (j = i; j != EOC; j = stack_vars[j].next)
680 {
681 fputc ('\t', dump_file);
682 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 683 }
6ddfda8a 684 fputc ('\n', dump_file);
1f6d3a08
RH
685 }
686}
687
3a42502d 688/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
689
690static void
3a42502d
RH
691expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
692 HOST_WIDE_INT offset)
1f6d3a08 693{
3a42502d 694 unsigned align;
1f6d3a08 695 rtx x;
c22cacf3 696
1f6d3a08
RH
697 /* If this fails, we've overflowed the stack frame. Error nicely? */
698 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
699
3a42502d 700 x = plus_constant (base, offset);
4e3825db 701 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 702
4e3825db
MM
703 if (TREE_CODE (decl) != SSA_NAME)
704 {
705 /* Set alignment we actually gave this decl if it isn't an SSA name.
706 If it is we generate stack slots only accidentally so it isn't as
707 important, we'll simply use the alignment that is already set. */
3a42502d
RH
708 if (base == virtual_stack_vars_rtx)
709 offset -= frame_phase;
4e3825db
MM
710 align = offset & -offset;
711 align *= BITS_PER_UNIT;
3a42502d
RH
712 if (align == 0 || align > base_align)
713 align = base_align;
714
715 /* One would think that we could assert that we're not decreasing
716 alignment here, but (at least) the i386 port does exactly this
717 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
718
719 DECL_ALIGN (decl) = align;
720 DECL_USER_ALIGN (decl) = 0;
721 }
722
723 set_mem_attributes (x, SSAVAR (decl), true);
724 set_rtl (decl, x);
1f6d3a08
RH
725}
726
727/* A subroutine of expand_used_vars. Give each partition representative
728 a unique location within the stack frame. Update each partition member
729 with that location. */
730
731static void
7d69de61 732expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
733{
734 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
735 HOST_WIDE_INT large_size = 0, large_alloc = 0;
736 rtx large_base = NULL;
737 unsigned large_align = 0;
738 tree decl;
739
740 /* Determine if there are any variables requiring "large" alignment.
741 Since these are dynamically allocated, we only process these if
742 no predicate involved. */
743 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
744 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
745 {
746 /* Find the total size of these variables. */
747 for (si = 0; si < n; ++si)
748 {
749 unsigned alignb;
750
751 i = stack_vars_sorted[si];
752 alignb = stack_vars[i].alignb;
753
754 /* Stop when we get to the first decl with "small" alignment. */
755 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
756 break;
757
758 /* Skip variables that aren't partition representatives. */
759 if (stack_vars[i].representative != i)
760 continue;
761
762 /* Skip variables that have already had rtl assigned. See also
763 add_stack_var where we perpetrate this pc_rtx hack. */
764 decl = stack_vars[i].decl;
765 if ((TREE_CODE (decl) == SSA_NAME
766 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
767 : DECL_RTL (decl)) != pc_rtx)
768 continue;
769
770 large_size += alignb - 1;
771 large_size &= -(HOST_WIDE_INT)alignb;
772 large_size += stack_vars[i].size;
773 }
774
775 /* If there were any, allocate space. */
776 if (large_size > 0)
777 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
778 large_align, true);
779 }
1f6d3a08
RH
780
781 for (si = 0; si < n; ++si)
782 {
3a42502d
RH
783 rtx base;
784 unsigned base_align, alignb;
1f6d3a08
RH
785 HOST_WIDE_INT offset;
786
787 i = stack_vars_sorted[si];
788
789 /* Skip variables that aren't partition representatives, for now. */
790 if (stack_vars[i].representative != i)
791 continue;
792
7d69de61
RH
793 /* Skip variables that have already had rtl assigned. See also
794 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
795 decl = stack_vars[i].decl;
796 if ((TREE_CODE (decl) == SSA_NAME
797 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
798 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
799 continue;
800
c22cacf3 801 /* Check the predicate to see whether this variable should be
7d69de61 802 allocated in this pass. */
3a42502d 803 if (pred && !pred (decl))
7d69de61
RH
804 continue;
805
3a42502d
RH
806 alignb = stack_vars[i].alignb;
807 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
808 {
809 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
810 base = virtual_stack_vars_rtx;
811 base_align = crtl->max_used_stack_slot_alignment;
812 }
813 else
814 {
815 /* Large alignment is only processed in the last pass. */
816 if (pred)
817 continue;
533f611a 818 gcc_assert (large_base != NULL);
3a42502d
RH
819
820 large_alloc += alignb - 1;
821 large_alloc &= -(HOST_WIDE_INT)alignb;
822 offset = large_alloc;
823 large_alloc += stack_vars[i].size;
824
825 base = large_base;
826 base_align = large_align;
827 }
1f6d3a08
RH
828
829 /* Create rtl for each variable based on their location within the
830 partition. */
831 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 832 {
f8da8190 833 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 834 base, base_align,
6ddfda8a 835 offset);
f8da8190 836 }
1f6d3a08 837 }
3a42502d
RH
838
839 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
840}
841
ff28a94d
JH
842/* Take into account all sizes of partitions and reset DECL_RTLs. */
843static HOST_WIDE_INT
844account_stack_vars (void)
845{
846 size_t si, j, i, n = stack_vars_num;
847 HOST_WIDE_INT size = 0;
848
849 for (si = 0; si < n; ++si)
850 {
851 i = stack_vars_sorted[si];
852
853 /* Skip variables that aren't partition representatives, for now. */
854 if (stack_vars[i].representative != i)
855 continue;
856
857 size += stack_vars[i].size;
858 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 859 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
860 }
861 return size;
862}
863
1f6d3a08
RH
864/* A subroutine of expand_one_var. Called to immediately assign rtl
865 to a variable to be allocated in the stack frame. */
866
867static void
868expand_one_stack_var (tree var)
869{
3a42502d
RH
870 HOST_WIDE_INT size, offset;
871 unsigned byte_align;
1f6d3a08 872
4e3825db 873 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 874 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
875
876 /* We handle highly aligned variables in expand_stack_vars. */
877 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 878
3a42502d
RH
879 offset = alloc_stack_frame_space (size, byte_align);
880
881 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
882 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
883}
884
1f6d3a08
RH
885/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
886 that will reside in a hard register. */
887
888static void
889expand_one_hard_reg_var (tree var)
890{
891 rest_of_decl_compilation (var, 0, 0);
892}
893
894/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
895 that will reside in a pseudo register. */
896
897static void
898expand_one_register_var (tree var)
899{
4e3825db
MM
900 tree decl = SSAVAR (var);
901 tree type = TREE_TYPE (decl);
cde0f3fd 902 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
903 rtx x = gen_reg_rtx (reg_mode);
904
4e3825db 905 set_rtl (var, x);
1f6d3a08
RH
906
907 /* Note if the object is a user variable. */
4e3825db
MM
908 if (!DECL_ARTIFICIAL (decl))
909 mark_user_reg (x);
1f6d3a08 910
61021c2c 911 if (POINTER_TYPE_P (type))
d466b407 912 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
913}
914
915/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 916 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
917 to pick something that won't crash the rest of the compiler. */
918
919static void
920expand_one_error_var (tree var)
921{
922 enum machine_mode mode = DECL_MODE (var);
923 rtx x;
924
925 if (mode == BLKmode)
926 x = gen_rtx_MEM (BLKmode, const0_rtx);
927 else if (mode == VOIDmode)
928 x = const0_rtx;
929 else
930 x = gen_reg_rtx (mode);
931
932 SET_DECL_RTL (var, x);
933}
934
c22cacf3 935/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
936 allocated to the local stack frame. Return true if we wish to
937 add VAR to STACK_VARS so that it will be coalesced with other
938 variables. Return false to allocate VAR immediately.
939
940 This function is used to reduce the number of variables considered
941 for coalescing, which reduces the size of the quadratic problem. */
942
943static bool
944defer_stack_allocation (tree var, bool toplevel)
945{
7d69de61
RH
946 /* If stack protection is enabled, *all* stack variables must be deferred,
947 so that we can re-order the strings to the top of the frame. */
948 if (flag_stack_protect)
949 return true;
950
3a42502d
RH
951 /* We handle "large" alignment via dynamic allocation. We want to handle
952 this extra complication in only one place, so defer them. */
953 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
954 return true;
955
1f6d3a08
RH
956 /* Variables in the outermost scope automatically conflict with
957 every other variable. The only reason to want to defer them
958 at all is that, after sorting, we can more efficiently pack
959 small variables in the stack frame. Continue to defer at -O2. */
960 if (toplevel && optimize < 2)
961 return false;
962
963 /* Without optimization, *most* variables are allocated from the
964 stack, which makes the quadratic problem large exactly when we
c22cacf3 965 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
966 other hand, we don't want the function's stack frame size to
967 get completely out of hand. So we avoid adding scalars and
968 "small" aggregates to the list at all. */
969 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
970 return false;
971
972 return true;
973}
974
975/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 976 its flavor. Variables to be placed on the stack are not actually
b8698a0f 977 expanded yet, merely recorded.
ff28a94d
JH
978 When REALLY_EXPAND is false, only add stack values to be allocated.
979 Return stack usage this variable is supposed to take.
980*/
1f6d3a08 981
ff28a94d
JH
982static HOST_WIDE_INT
983expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 984{
3a42502d 985 unsigned int align = BITS_PER_UNIT;
4e3825db 986 tree origvar = var;
3a42502d 987
4e3825db
MM
988 var = SSAVAR (var);
989
3a42502d 990 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 991 {
2e3f842f
L
992 /* Because we don't know if VAR will be in register or on stack,
993 we conservatively assume it will be on stack even if VAR is
994 eventually put into register after RA pass. For non-automatic
995 variables, which won't be on stack, we collect alignment of
996 type and ignore user specified alignment. */
997 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
998 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
999 TYPE_MODE (TREE_TYPE (var)),
1000 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1001 else if (DECL_HAS_VALUE_EXPR_P (var)
1002 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1003 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1004 or variables which were assigned a stack slot already by
1005 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1006 changed from the offset chosen to it. */
1007 align = crtl->stack_alignment_estimated;
2e3f842f 1008 else
ae58e548 1009 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1010
3a42502d
RH
1011 /* If the variable alignment is very large we'll dynamicaly allocate
1012 it, which means that in-frame portion is just a pointer. */
1013 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1014 align = POINTER_SIZE;
1015 }
1016
1017 if (SUPPORTS_STACK_ALIGNMENT
1018 && crtl->stack_alignment_estimated < align)
1019 {
1020 /* stack_alignment_estimated shouldn't change after stack
1021 realign decision made */
1022 gcc_assert(!crtl->stack_realign_processed);
1023 crtl->stack_alignment_estimated = align;
2e3f842f
L
1024 }
1025
3a42502d
RH
1026 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1027 So here we only make sure stack_alignment_needed >= align. */
1028 if (crtl->stack_alignment_needed < align)
1029 crtl->stack_alignment_needed = align;
1030 if (crtl->max_used_stack_slot_alignment < align)
1031 crtl->max_used_stack_slot_alignment = align;
1032
4e3825db
MM
1033 if (TREE_CODE (origvar) == SSA_NAME)
1034 {
1035 gcc_assert (TREE_CODE (var) != VAR_DECL
1036 || (!DECL_EXTERNAL (var)
1037 && !DECL_HAS_VALUE_EXPR_P (var)
1038 && !TREE_STATIC (var)
4e3825db
MM
1039 && TREE_TYPE (var) != error_mark_node
1040 && !DECL_HARD_REGISTER (var)
1041 && really_expand));
1042 }
1043 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1044 ;
1f6d3a08
RH
1045 else if (DECL_EXTERNAL (var))
1046 ;
833b3afe 1047 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1048 ;
1049 else if (TREE_STATIC (var))
7e8b322a 1050 ;
eb7adebc 1051 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1052 ;
1053 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1054 {
1055 if (really_expand)
1056 expand_one_error_var (var);
1057 }
4e3825db 1058 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1059 {
1060 if (really_expand)
1061 expand_one_hard_reg_var (var);
1062 }
1f6d3a08 1063 else if (use_register_for_decl (var))
ff28a94d
JH
1064 {
1065 if (really_expand)
4e3825db 1066 expand_one_register_var (origvar);
ff28a94d 1067 }
7604eb4e
JJ
1068 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1069 {
1070 if (really_expand)
1071 {
1072 error ("size of variable %q+D is too large", var);
1073 expand_one_error_var (var);
1074 }
1075 }
1f6d3a08 1076 else if (defer_stack_allocation (var, toplevel))
4e3825db 1077 add_stack_var (origvar);
1f6d3a08 1078 else
ff28a94d 1079 {
bd9f1b4b 1080 if (really_expand)
4e3825db 1081 expand_one_stack_var (origvar);
ff28a94d
JH
1082 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1083 }
1084 return 0;
1f6d3a08
RH
1085}
1086
1087/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1088 expanding variables. Those variables that can be put into registers
1089 are allocated pseudos; those that can't are put on the stack.
1090
1091 TOPLEVEL is true if this is the outermost BLOCK. */
1092
1093static void
1094expand_used_vars_for_block (tree block, bool toplevel)
1095{
1096 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1097 tree t;
1098
1099 old_sv_num = toplevel ? 0 : stack_vars_num;
1100
1101 /* Expand all variables at this level. */
910ad8de 1102 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1103 if (TREE_USED (t)
1104 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1105 || !DECL_NONSHAREABLE (t)))
ff28a94d 1106 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1107
1108 this_sv_num = stack_vars_num;
1109
1110 /* Expand all variables at containing levels. */
1111 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1112 expand_used_vars_for_block (t, false);
1113
1114 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1115 possible for variables whose address escapes), we mirror the block
1f6d3a08 1116 tree in the interference graph. Here we cause all variables at this
2bdbbe94 1117 level, and all sublevels, to conflict. */
1f6d3a08
RH
1118 if (old_sv_num < this_sv_num)
1119 {
1120 new_sv_num = stack_vars_num;
1f6d3a08
RH
1121
1122 for (i = old_sv_num; i < new_sv_num; ++i)
2bdbbe94 1123 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
f4a6d54e 1124 add_stack_var_conflict (i, j);
1f6d3a08
RH
1125 }
1126}
1127
1128/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1129 and clear TREE_USED on all local variables. */
1130
1131static void
1132clear_tree_used (tree block)
1133{
1134 tree t;
1135
910ad8de 1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1137 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1138 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1139 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1140 TREE_USED (t) = 0;
1141
1142 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1143 clear_tree_used (t);
1144}
1145
7d69de61
RH
1146/* Examine TYPE and determine a bit mask of the following features. */
1147
1148#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1149#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1150#define SPCT_HAS_ARRAY 4
1151#define SPCT_HAS_AGGREGATE 8
1152
1153static unsigned int
1154stack_protect_classify_type (tree type)
1155{
1156 unsigned int ret = 0;
1157 tree t;
1158
1159 switch (TREE_CODE (type))
1160 {
1161 case ARRAY_TYPE:
1162 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1163 if (t == char_type_node
1164 || t == signed_char_type_node
1165 || t == unsigned_char_type_node)
1166 {
15362b89
JJ
1167 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1168 unsigned HOST_WIDE_INT len;
7d69de61 1169
15362b89
JJ
1170 if (!TYPE_SIZE_UNIT (type)
1171 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1172 len = max;
7d69de61 1173 else
15362b89 1174 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1175
1176 if (len < max)
1177 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1178 else
1179 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1180 }
1181 else
1182 ret = SPCT_HAS_ARRAY;
1183 break;
1184
1185 case UNION_TYPE:
1186 case QUAL_UNION_TYPE:
1187 case RECORD_TYPE:
1188 ret = SPCT_HAS_AGGREGATE;
1189 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1190 if (TREE_CODE (t) == FIELD_DECL)
1191 ret |= stack_protect_classify_type (TREE_TYPE (t));
1192 break;
1193
1194 default:
1195 break;
1196 }
1197
1198 return ret;
1199}
1200
a4d05547
KH
1201/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1202 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1203 any variable in this function. The return value is the phase number in
1204 which the variable should be allocated. */
1205
1206static int
1207stack_protect_decl_phase (tree decl)
1208{
1209 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1210 int ret = 0;
1211
1212 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1213 has_short_buffer = true;
1214
1215 if (flag_stack_protect == 2)
1216 {
1217 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1218 && !(bits & SPCT_HAS_AGGREGATE))
1219 ret = 1;
1220 else if (bits & SPCT_HAS_ARRAY)
1221 ret = 2;
1222 }
1223 else
1224 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1225
1226 if (ret)
1227 has_protected_decls = true;
1228
1229 return ret;
1230}
1231
1232/* Two helper routines that check for phase 1 and phase 2. These are used
1233 as callbacks for expand_stack_vars. */
1234
1235static bool
1236stack_protect_decl_phase_1 (tree decl)
1237{
1238 return stack_protect_decl_phase (decl) == 1;
1239}
1240
1241static bool
1242stack_protect_decl_phase_2 (tree decl)
1243{
1244 return stack_protect_decl_phase (decl) == 2;
1245}
1246
1247/* Ensure that variables in different stack protection phases conflict
1248 so that they are not merged and share the same stack slot. */
1249
1250static void
1251add_stack_protection_conflicts (void)
1252{
1253 size_t i, j, n = stack_vars_num;
1254 unsigned char *phase;
1255
1256 phase = XNEWVEC (unsigned char, n);
1257 for (i = 0; i < n; ++i)
1258 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1259
1260 for (i = 0; i < n; ++i)
1261 {
1262 unsigned char ph_i = phase[i];
1263 for (j = 0; j < i; ++j)
1264 if (ph_i != phase[j])
1265 add_stack_var_conflict (i, j);
1266 }
1267
1268 XDELETEVEC (phase);
1269}
1270
1271/* Create a decl for the guard at the top of the stack frame. */
1272
1273static void
1274create_stack_guard (void)
1275{
c2255bc4
AH
1276 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1277 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1278 TREE_THIS_VOLATILE (guard) = 1;
1279 TREE_USED (guard) = 1;
1280 expand_one_stack_var (guard);
cb91fab0 1281 crtl->stack_protect_guard = guard;
7d69de61
RH
1282}
1283
ff28a94d 1284/* Prepare for expanding variables. */
b8698a0f 1285static void
ff28a94d
JH
1286init_vars_expansion (void)
1287{
1288 tree t;
c021f10b 1289 unsigned ix;
cb91fab0 1290 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1291 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1292 TREE_USED (t) = 1;
ff28a94d
JH
1293
1294 /* Clear TREE_USED on all variables associated with a block scope. */
1295 clear_tree_used (DECL_INITIAL (current_function_decl));
1296
1297 /* Initialize local stack smashing state. */
1298 has_protected_decls = false;
1299 has_short_buffer = false;
1300}
1301
1302/* Free up stack variable graph data. */
1303static void
1304fini_vars_expansion (void)
1305{
2bdbbe94
MM
1306 size_t i, n = stack_vars_num;
1307 for (i = 0; i < n; i++)
1308 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1309 XDELETEVEC (stack_vars);
1310 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1311 stack_vars = NULL;
1312 stack_vars_alloc = stack_vars_num = 0;
ff28a94d
JH
1313}
1314
30925d94
AO
1315/* Make a fair guess for the size of the stack frame of the function
1316 in NODE. This doesn't have to be exact, the result is only used in
1317 the inline heuristics. So we don't want to run the full stack var
1318 packing algorithm (which is quadratic in the number of stack vars).
1319 Instead, we calculate the total size of all stack vars. This turns
1320 out to be a pretty fair estimate -- packing of stack vars doesn't
1321 happen very often. */
b5a430f3 1322
ff28a94d 1323HOST_WIDE_INT
30925d94 1324estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1325{
1326 HOST_WIDE_INT size = 0;
b5a430f3 1327 size_t i;
bb7e6d55 1328 tree var;
2e1ec94f 1329 tree old_cur_fun_decl = current_function_decl;
bb7e6d55
AO
1330 referenced_var_iterator rvi;
1331 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94
AO
1332
1333 current_function_decl = node->decl;
bb7e6d55 1334 push_cfun (fn);
ff28a94d 1335
bb7e6d55
AO
1336 gcc_checking_assert (gimple_referenced_vars (fn));
1337 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1338 size += expand_one_var (var, true, false);
b5a430f3 1339
ff28a94d
JH
1340 if (stack_vars_num > 0)
1341 {
b5a430f3
SB
1342 /* Fake sorting the stack vars for account_stack_vars (). */
1343 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1344 for (i = 0; i < stack_vars_num; ++i)
1345 stack_vars_sorted[i] = i;
ff28a94d
JH
1346 size += account_stack_vars ();
1347 fini_vars_expansion ();
1348 }
2e1ec94f
RR
1349 pop_cfun ();
1350 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1351 return size;
1352}
1353
1f6d3a08 1354/* Expand all variables used in the function. */
727a31fa
RH
1355
1356static void
1357expand_used_vars (void)
1358{
c021f10b
NF
1359 tree var, outer_block = DECL_INITIAL (current_function_decl);
1360 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1361 unsigned i;
c021f10b 1362 unsigned len;
727a31fa 1363
1f6d3a08
RH
1364 /* Compute the phase of the stack frame for this function. */
1365 {
1366 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1367 int off = STARTING_FRAME_OFFSET % align;
1368 frame_phase = off ? align - off : 0;
1369 }
727a31fa 1370
ff28a94d 1371 init_vars_expansion ();
7d69de61 1372
4e3825db
MM
1373 for (i = 0; i < SA.map->num_partitions; i++)
1374 {
1375 tree var = partition_to_var (SA.map, i);
1376
1377 gcc_assert (is_gimple_reg (var));
1378 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1379 expand_one_var (var, true, true);
1380 else
1381 {
1382 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1383 contain the default def (representing the parm or result itself)
1384 we don't do anything here. But those which don't contain the
1385 default def (representing a temporary based on the parm/result)
1386 we need to allocate space just like for normal VAR_DECLs. */
1387 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1388 {
1389 expand_one_var (var, true, true);
1390 gcc_assert (SA.partition_to_pseudo[i]);
1391 }
1392 }
1393 }
1394
cb91fab0 1395 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1396 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1397
1398 len = VEC_length (tree, cfun->local_decls);
1399 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1400 {
1f6d3a08
RH
1401 bool expand_now = false;
1402
4e3825db
MM
1403 /* Expanded above already. */
1404 if (is_gimple_reg (var))
eb7adebc
MM
1405 {
1406 TREE_USED (var) = 0;
3adcf52c 1407 goto next;
eb7adebc 1408 }
1f6d3a08
RH
1409 /* We didn't set a block for static or extern because it's hard
1410 to tell the difference between a global variable (re)declared
1411 in a local scope, and one that's really declared there to
1412 begin with. And it doesn't really matter much, since we're
1413 not giving them stack space. Expand them now. */
4e3825db 1414 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1415 expand_now = true;
1416
1417 /* If the variable is not associated with any block, then it
1418 was created by the optimizers, and could be live anywhere
1419 in the function. */
1420 else if (TREE_USED (var))
1421 expand_now = true;
1422
1423 /* Finally, mark all variables on the list as used. We'll use
1424 this in a moment when we expand those associated with scopes. */
1425 TREE_USED (var) = 1;
1426
1427 if (expand_now)
3adcf52c
JM
1428 expand_one_var (var, true, true);
1429
1430 next:
1431 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1432 {
3adcf52c
JM
1433 rtx rtl = DECL_RTL_IF_SET (var);
1434
1435 /* Keep artificial non-ignored vars in cfun->local_decls
1436 chain until instantiate_decls. */
1437 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1438 add_local_decl (cfun, var);
6c6366f6 1439 else if (rtl == NULL_RTX)
c021f10b
NF
1440 /* If rtl isn't set yet, which can happen e.g. with
1441 -fstack-protector, retry before returning from this
1442 function. */
1443 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1444 }
1f6d3a08 1445 }
1f6d3a08 1446
c021f10b
NF
1447 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1448
1449 +-----------------+-----------------+
1450 | ...processed... | ...duplicates...|
1451 +-----------------+-----------------+
1452 ^
1453 +-- LEN points here.
1454
1455 We just want the duplicates, as those are the artificial
1456 non-ignored vars that we want to keep until instantiate_decls.
1457 Move them down and truncate the array. */
1458 if (!VEC_empty (tree, cfun->local_decls))
1459 VEC_block_remove (tree, cfun->local_decls, 0, len);
1460
1f6d3a08
RH
1461 /* At this point, all variables within the block tree with TREE_USED
1462 set are actually used by the optimized function. Lay them out. */
1463 expand_used_vars_for_block (outer_block, true);
1464
1465 if (stack_vars_num > 0)
1466 {
1467 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1468 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1469 reflect this. */
1470 add_alias_set_conflicts ();
1471
c22cacf3 1472 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1473 vulnerable data and non-vulnerable data. */
1474 if (flag_stack_protect)
1475 add_stack_protection_conflicts ();
1476
c22cacf3 1477 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1478 minimal interference graph, attempt to save some stack space. */
1479 partition_stack_vars ();
1480 if (dump_file)
1481 dump_stack_var_partition ();
7d69de61
RH
1482 }
1483
1484 /* There are several conditions under which we should create a
1485 stack guard: protect-all, alloca used, protected decls present. */
1486 if (flag_stack_protect == 2
1487 || (flag_stack_protect
e3b5732b 1488 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1489 create_stack_guard ();
1f6d3a08 1490
7d69de61
RH
1491 /* Assign rtl to each variable based on these partitions. */
1492 if (stack_vars_num > 0)
1493 {
1494 /* Reorder decls to be protected by iterating over the variables
1495 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1496 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1497 earlier, such that we naturally see these variables first,
1498 and thus naturally allocate things in the right order. */
1499 if (has_protected_decls)
1500 {
1501 /* Phase 1 contains only character arrays. */
1502 expand_stack_vars (stack_protect_decl_phase_1);
1503
1504 /* Phase 2 contains other kinds of arrays. */
1505 if (flag_stack_protect == 2)
1506 expand_stack_vars (stack_protect_decl_phase_2);
1507 }
1508
1509 expand_stack_vars (NULL);
1f6d3a08 1510
ff28a94d 1511 fini_vars_expansion ();
1f6d3a08
RH
1512 }
1513
6c6366f6
JJ
1514 /* If there were any artificial non-ignored vars without rtl
1515 found earlier, see if deferred stack allocation hasn't assigned
1516 rtl to them. */
c021f10b 1517 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1518 {
6c6366f6
JJ
1519 rtx rtl = DECL_RTL_IF_SET (var);
1520
6c6366f6
JJ
1521 /* Keep artificial non-ignored vars in cfun->local_decls
1522 chain until instantiate_decls. */
1523 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1524 add_local_decl (cfun, var);
6c6366f6 1525 }
c021f10b 1526 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1527
1f6d3a08
RH
1528 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1529 if (STACK_ALIGNMENT_NEEDED)
1530 {
1531 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1532 if (!FRAME_GROWS_DOWNWARD)
1533 frame_offset += align - 1;
1534 frame_offset &= -align;
1535 }
727a31fa
RH
1536}
1537
1538
b7211528
SB
1539/* If we need to produce a detailed dump, print the tree representation
1540 for STMT to the dump file. SINCE is the last RTX after which the RTL
1541 generated for STMT should have been appended. */
1542
1543static void
726a989a 1544maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1545{
1546 if (dump_file && (dump_flags & TDF_DETAILS))
1547 {
1548 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1549 print_gimple_stmt (dump_file, stmt, 0,
1550 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1551 fprintf (dump_file, "\n");
1552
1553 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1554 }
1555}
1556
8b11009b
ZD
1557/* Maps the blocks that do not contain tree labels to rtx labels. */
1558
1559static struct pointer_map_t *lab_rtx_for_bb;
1560
a9b77cd1
ZD
1561/* Returns the label_rtx expression for a label starting basic block BB. */
1562
1563static rtx
726a989a 1564label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1565{
726a989a
RB
1566 gimple_stmt_iterator gsi;
1567 tree lab;
1568 gimple lab_stmt;
8b11009b 1569 void **elt;
a9b77cd1
ZD
1570
1571 if (bb->flags & BB_RTL)
1572 return block_label (bb);
1573
8b11009b
ZD
1574 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1575 if (elt)
ae50c0cb 1576 return (rtx) *elt;
8b11009b
ZD
1577
1578 /* Find the tree label if it is present. */
b8698a0f 1579
726a989a 1580 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1581 {
726a989a
RB
1582 lab_stmt = gsi_stmt (gsi);
1583 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1584 break;
1585
726a989a 1586 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1587 if (DECL_NONLOCAL (lab))
1588 break;
1589
1590 return label_rtx (lab);
1591 }
1592
8b11009b
ZD
1593 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1594 *elt = gen_label_rtx ();
ae50c0cb 1595 return (rtx) *elt;
a9b77cd1
ZD
1596}
1597
726a989a 1598
529ff441
MM
1599/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1600 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1601 possibly clean up the CFG and instruction sequence. LAST is the
1602 last instruction before the just emitted jump sequence. */
529ff441
MM
1603
1604static void
315adeda 1605maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1606{
1607 /* Special case: when jumpif decides that the condition is
1608 trivial it emits an unconditional jump (and the necessary
1609 barrier). But we still have two edges, the fallthru one is
1610 wrong. purge_dead_edges would clean this up later. Unfortunately
1611 we have to insert insns (and split edges) before
1612 find_many_sub_basic_blocks and hence before purge_dead_edges.
1613 But splitting edges might create new blocks which depend on the
1614 fact that if there are two edges there's no barrier. So the
1615 barrier would get lost and verify_flow_info would ICE. Instead
1616 of auditing all edge splitters to care for the barrier (which
1617 normally isn't there in a cleaned CFG), fix it here. */
1618 if (BARRIER_P (get_last_insn ()))
1619 {
529ff441
MM
1620 rtx insn;
1621 remove_edge (e);
1622 /* Now, we have a single successor block, if we have insns to
1623 insert on the remaining edge we potentially will insert
1624 it at the end of this block (if the dest block isn't feasible)
1625 in order to avoid splitting the edge. This insertion will take
1626 place in front of the last jump. But we might have emitted
1627 multiple jumps (conditional and one unconditional) to the
1628 same destination. Inserting in front of the last one then
1629 is a problem. See PR 40021. We fix this by deleting all
1630 jumps except the last unconditional one. */
1631 insn = PREV_INSN (get_last_insn ());
1632 /* Make sure we have an unconditional jump. Otherwise we're
1633 confused. */
1634 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1635 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1636 {
1637 insn = PREV_INSN (insn);
1638 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1639 {
8a269cb7 1640 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1641 {
1642 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1643 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1644 }
1645 delete_insn (NEXT_INSN (insn));
1646 }
529ff441
MM
1647 }
1648 }
1649}
1650
726a989a 1651/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1652 Returns a new basic block if we've terminated the current basic
1653 block and created a new one. */
1654
1655static basic_block
726a989a 1656expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1657{
1658 basic_block new_bb, dest;
1659 edge new_edge;
1660 edge true_edge;
1661 edge false_edge;
b7211528 1662 rtx last2, last;
28ed065e
MM
1663 enum tree_code code;
1664 tree op0, op1;
1665
1666 code = gimple_cond_code (stmt);
1667 op0 = gimple_cond_lhs (stmt);
1668 op1 = gimple_cond_rhs (stmt);
1669 /* We're sometimes presented with such code:
1670 D.123_1 = x < y;
1671 if (D.123_1 != 0)
1672 ...
1673 This would expand to two comparisons which then later might
1674 be cleaned up by combine. But some pattern matchers like if-conversion
1675 work better when there's only one compare, so make up for this
1676 here as special exception if TER would have made the same change. */
1677 if (gimple_cond_single_var_p (stmt)
1678 && SA.values
1679 && TREE_CODE (op0) == SSA_NAME
1680 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1681 {
1682 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1683 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1684 {
e83f4b68
MM
1685 enum tree_code code2 = gimple_assign_rhs_code (second);
1686 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1687 {
1688 code = code2;
1689 op0 = gimple_assign_rhs1 (second);
1690 op1 = gimple_assign_rhs2 (second);
1691 }
1692 /* If jumps are cheap turn some more codes into
1693 jumpy sequences. */
1694 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1695 {
1696 if ((code2 == BIT_AND_EXPR
1697 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1698 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1699 || code2 == TRUTH_AND_EXPR)
1700 {
1701 code = TRUTH_ANDIF_EXPR;
1702 op0 = gimple_assign_rhs1 (second);
1703 op1 = gimple_assign_rhs2 (second);
1704 }
1705 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1706 {
1707 code = TRUTH_ORIF_EXPR;
1708 op0 = gimple_assign_rhs1 (second);
1709 op1 = gimple_assign_rhs2 (second);
1710 }
1711 }
28ed065e
MM
1712 }
1713 }
b7211528
SB
1714
1715 last2 = last = get_last_insn ();
80c7a9eb
RH
1716
1717 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
c82fee88
EB
1718 set_curr_insn_source_location (gimple_location (stmt));
1719 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1720
1721 /* These flags have no purpose in RTL land. */
1722 true_edge->flags &= ~EDGE_TRUE_VALUE;
1723 false_edge->flags &= ~EDGE_FALSE_VALUE;
1724
1725 /* We can either have a pure conditional jump with one fallthru edge or
1726 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1727 if (false_edge->dest == bb->next_bb)
80c7a9eb 1728 {
40e90eac
JJ
1729 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1730 true_edge->probability);
726a989a 1731 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1732 if (true_edge->goto_locus)
7241571e
JJ
1733 {
1734 set_curr_insn_source_location (true_edge->goto_locus);
1735 set_curr_insn_block (true_edge->goto_block);
1736 true_edge->goto_locus = curr_insn_locator ();
1737 }
1738 true_edge->goto_block = NULL;
a9b77cd1 1739 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1740 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1741 return NULL;
1742 }
a9b77cd1 1743 if (true_edge->dest == bb->next_bb)
80c7a9eb 1744 {
40e90eac
JJ
1745 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1746 false_edge->probability);
726a989a 1747 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1748 if (false_edge->goto_locus)
7241571e
JJ
1749 {
1750 set_curr_insn_source_location (false_edge->goto_locus);
1751 set_curr_insn_block (false_edge->goto_block);
1752 false_edge->goto_locus = curr_insn_locator ();
1753 }
1754 false_edge->goto_block = NULL;
a9b77cd1 1755 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1756 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1757 return NULL;
1758 }
80c7a9eb 1759
40e90eac
JJ
1760 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1761 true_edge->probability);
80c7a9eb 1762 last = get_last_insn ();
7241571e
JJ
1763 if (false_edge->goto_locus)
1764 {
1765 set_curr_insn_source_location (false_edge->goto_locus);
1766 set_curr_insn_block (false_edge->goto_block);
1767 false_edge->goto_locus = curr_insn_locator ();
1768 }
1769 false_edge->goto_block = NULL;
a9b77cd1 1770 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1771
1772 BB_END (bb) = last;
1773 if (BARRIER_P (BB_END (bb)))
1774 BB_END (bb) = PREV_INSN (BB_END (bb));
1775 update_bb_for_insn (bb);
1776
1777 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1778 dest = false_edge->dest;
1779 redirect_edge_succ (false_edge, new_bb);
1780 false_edge->flags |= EDGE_FALLTHRU;
1781 new_bb->count = false_edge->count;
1782 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1783 new_edge = make_edge (new_bb, dest, 0);
1784 new_edge->probability = REG_BR_PROB_BASE;
1785 new_edge->count = new_bb->count;
1786 if (BARRIER_P (BB_END (new_bb)))
1787 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1788 update_bb_for_insn (new_bb);
1789
726a989a 1790 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1791
7787b4aa
JJ
1792 if (true_edge->goto_locus)
1793 {
1794 set_curr_insn_source_location (true_edge->goto_locus);
1795 set_curr_insn_block (true_edge->goto_block);
1796 true_edge->goto_locus = curr_insn_locator ();
1797 }
1798 true_edge->goto_block = NULL;
1799
80c7a9eb
RH
1800 return new_bb;
1801}
1802
28ed065e
MM
1803/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1804 statement STMT. */
1805
1806static void
1807expand_call_stmt (gimple stmt)
1808{
25583c4f 1809 tree exp, decl, lhs;
e23817b3 1810 bool builtin_p;
e7925582 1811 size_t i;
28ed065e 1812
25583c4f
RS
1813 if (gimple_call_internal_p (stmt))
1814 {
1815 expand_internal_call (stmt);
1816 return;
1817 }
1818
28ed065e
MM
1819 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1820
1821 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
1822 decl = gimple_call_fndecl (stmt);
1823 builtin_p = decl && DECL_BUILT_IN (decl);
1824
e7925582
EB
1825 /* If this is not a builtin function, the function type through which the
1826 call is made may be different from the type of the function. */
1827 if (!builtin_p)
1828 CALL_EXPR_FN (exp)
b25aa0e8
EB
1829 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1830 CALL_EXPR_FN (exp));
e7925582 1831
28ed065e
MM
1832 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1833 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1834
1835 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
1836 {
1837 tree arg = gimple_call_arg (stmt, i);
1838 gimple def;
1839 /* TER addresses into arguments of builtin functions so we have a
1840 chance to infer more correct alignment information. See PR39954. */
1841 if (builtin_p
1842 && TREE_CODE (arg) == SSA_NAME
1843 && (def = get_gimple_for_ssa_name (arg))
1844 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1845 arg = gimple_assign_rhs1 (def);
1846 CALL_EXPR_ARG (exp, i) = arg;
1847 }
28ed065e 1848
93f28ca7 1849 if (gimple_has_side_effects (stmt))
28ed065e
MM
1850 TREE_SIDE_EFFECTS (exp) = 1;
1851
93f28ca7 1852 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
1853 TREE_NOTHROW (exp) = 1;
1854
1855 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1856 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
1857 if (decl
1858 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1859 && DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA)
1860 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1861 else
1862 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
1863 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1864 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1865 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1866 TREE_BLOCK (exp) = gimple_block (stmt);
1867
ddb555ed
JJ
1868 /* Ensure RTL is created for debug args. */
1869 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
1870 {
1871 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
1872 unsigned int ix;
1873 tree dtemp;
1874
1875 if (debug_args)
1876 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
1877 {
1878 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
1879 expand_debug_expr (dtemp);
1880 }
1881 }
1882
25583c4f 1883 lhs = gimple_call_lhs (stmt);
28ed065e
MM
1884 if (lhs)
1885 expand_assignment (lhs, exp, false);
1886 else
1887 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1888}
1889
1890/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1891 STMT that doesn't require special handling for outgoing edges. That
1892 is no tailcalls and no GIMPLE_COND. */
1893
1894static void
1895expand_gimple_stmt_1 (gimple stmt)
1896{
1897 tree op0;
c82fee88
EB
1898
1899 set_curr_insn_source_location (gimple_location (stmt));
1900 set_curr_insn_block (gimple_block (stmt));
1901
28ed065e
MM
1902 switch (gimple_code (stmt))
1903 {
1904 case GIMPLE_GOTO:
1905 op0 = gimple_goto_dest (stmt);
1906 if (TREE_CODE (op0) == LABEL_DECL)
1907 expand_goto (op0);
1908 else
1909 expand_computed_goto (op0);
1910 break;
1911 case GIMPLE_LABEL:
1912 expand_label (gimple_label_label (stmt));
1913 break;
1914 case GIMPLE_NOP:
1915 case GIMPLE_PREDICT:
1916 break;
28ed065e
MM
1917 case GIMPLE_SWITCH:
1918 expand_case (stmt);
1919 break;
1920 case GIMPLE_ASM:
1921 expand_asm_stmt (stmt);
1922 break;
1923 case GIMPLE_CALL:
1924 expand_call_stmt (stmt);
1925 break;
1926
1927 case GIMPLE_RETURN:
1928 op0 = gimple_return_retval (stmt);
1929
1930 if (op0 && op0 != error_mark_node)
1931 {
1932 tree result = DECL_RESULT (current_function_decl);
1933
1934 /* If we are not returning the current function's RESULT_DECL,
1935 build an assignment to it. */
1936 if (op0 != result)
1937 {
1938 /* I believe that a function's RESULT_DECL is unique. */
1939 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1940
1941 /* ??? We'd like to use simply expand_assignment here,
1942 but this fails if the value is of BLKmode but the return
1943 decl is a register. expand_return has special handling
1944 for this combination, which eventually should move
1945 to common code. See comments there. Until then, let's
1946 build a modify expression :-/ */
1947 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1948 result, op0);
1949 }
1950 }
1951 if (!op0)
1952 expand_null_return ();
1953 else
1954 expand_return (op0);
1955 break;
1956
1957 case GIMPLE_ASSIGN:
1958 {
1959 tree lhs = gimple_assign_lhs (stmt);
1960
1961 /* Tree expand used to fiddle with |= and &= of two bitfield
1962 COMPONENT_REFs here. This can't happen with gimple, the LHS
1963 of binary assigns must be a gimple reg. */
1964
1965 if (TREE_CODE (lhs) != SSA_NAME
1966 || get_gimple_rhs_class (gimple_expr_code (stmt))
1967 == GIMPLE_SINGLE_RHS)
1968 {
1969 tree rhs = gimple_assign_rhs1 (stmt);
1970 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1971 == GIMPLE_SINGLE_RHS);
1972 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1973 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1974 expand_assignment (lhs, rhs,
1975 gimple_assign_nontemporal_move_p (stmt));
1976 }
1977 else
1978 {
1979 rtx target, temp;
1980 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1981 struct separate_ops ops;
1982 bool promoted = false;
1983
1984 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1985 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1986 promoted = true;
1987
1988 ops.code = gimple_assign_rhs_code (stmt);
1989 ops.type = TREE_TYPE (lhs);
1990 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1991 {
0354c0c7
BS
1992 case GIMPLE_TERNARY_RHS:
1993 ops.op2 = gimple_assign_rhs3 (stmt);
1994 /* Fallthru */
28ed065e
MM
1995 case GIMPLE_BINARY_RHS:
1996 ops.op1 = gimple_assign_rhs2 (stmt);
1997 /* Fallthru */
1998 case GIMPLE_UNARY_RHS:
1999 ops.op0 = gimple_assign_rhs1 (stmt);
2000 break;
2001 default:
2002 gcc_unreachable ();
2003 }
2004 ops.location = gimple_location (stmt);
2005
2006 /* If we want to use a nontemporal store, force the value to
2007 register first. If we store into a promoted register,
2008 don't directly expand to target. */
2009 temp = nontemporal || promoted ? NULL_RTX : target;
2010 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2011 EXPAND_NORMAL);
2012
2013 if (temp == target)
2014 ;
2015 else if (promoted)
2016 {
4e18a7d4 2017 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2018 /* If TEMP is a VOIDmode constant, use convert_modes to make
2019 sure that we properly convert it. */
2020 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2021 {
2022 temp = convert_modes (GET_MODE (target),
2023 TYPE_MODE (ops.type),
4e18a7d4 2024 temp, unsignedp);
28ed065e 2025 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2026 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2027 }
2028
4e18a7d4 2029 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2030 }
2031 else if (nontemporal && emit_storent_insn (target, temp))
2032 ;
2033 else
2034 {
2035 temp = force_operand (temp, target);
2036 if (temp != target)
2037 emit_move_insn (target, temp);
2038 }
2039 }
2040 }
2041 break;
2042
2043 default:
2044 gcc_unreachable ();
2045 }
2046}
2047
2048/* Expand one gimple statement STMT and return the last RTL instruction
2049 before any of the newly generated ones.
2050
2051 In addition to generating the necessary RTL instructions this also
2052 sets REG_EH_REGION notes if necessary and sets the current source
2053 location for diagnostics. */
2054
2055static rtx
2056expand_gimple_stmt (gimple stmt)
2057{
28ed065e 2058 location_t saved_location = input_location;
c82fee88
EB
2059 rtx last = get_last_insn ();
2060 int lp_nr;
28ed065e 2061
28ed065e
MM
2062 gcc_assert (cfun);
2063
c82fee88
EB
2064 /* We need to save and restore the current source location so that errors
2065 discovered during expansion are emitted with the right location. But
2066 it would be better if the diagnostic routines used the source location
2067 embedded in the tree nodes rather than globals. */
28ed065e 2068 if (gimple_has_location (stmt))
c82fee88 2069 input_location = gimple_location (stmt);
28ed065e
MM
2070
2071 expand_gimple_stmt_1 (stmt);
c82fee88 2072
28ed065e
MM
2073 /* Free any temporaries used to evaluate this statement. */
2074 free_temp_slots ();
2075
2076 input_location = saved_location;
2077
2078 /* Mark all insns that may trap. */
1d65f45c
RH
2079 lp_nr = lookup_stmt_eh_lp (stmt);
2080 if (lp_nr)
28ed065e
MM
2081 {
2082 rtx insn;
2083 for (insn = next_real_insn (last); insn;
2084 insn = next_real_insn (insn))
2085 {
2086 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2087 /* If we want exceptions for non-call insns, any
2088 may_trap_p instruction may throw. */
2089 && GET_CODE (PATTERN (insn)) != CLOBBER
2090 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2091 && insn_could_throw_p (insn))
2092 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2093 }
2094 }
2095
2096 return last;
2097}
2098
726a989a 2099/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2100 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2101 generated a tail call (something that might be denied by the ABI
cea49550
RH
2102 rules governing the call; see calls.c).
2103
2104 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2105 can still reach the rest of BB. The case here is __builtin_sqrt,
2106 where the NaN result goes through the external function (with a
2107 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2108
2109static basic_block
726a989a 2110expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2111{
b7211528 2112 rtx last2, last;
224e770b 2113 edge e;
628f6a4e 2114 edge_iterator ei;
224e770b
RH
2115 int probability;
2116 gcov_type count;
80c7a9eb 2117
28ed065e 2118 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2119
2120 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2121 if (CALL_P (last) && SIBLING_CALL_P (last))
2122 goto found;
80c7a9eb 2123
726a989a 2124 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2125
cea49550 2126 *can_fallthru = true;
224e770b 2127 return NULL;
80c7a9eb 2128
224e770b
RH
2129 found:
2130 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2131 Any instructions emitted here are about to be deleted. */
2132 do_pending_stack_adjust ();
2133
2134 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2135 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2136 EH or abnormal edges, we shouldn't have created a tail call in
2137 the first place. So it seems to me we should just be removing
2138 all edges here, or redirecting the existing fallthru edge to
2139 the exit block. */
2140
224e770b
RH
2141 probability = 0;
2142 count = 0;
224e770b 2143
628f6a4e
BE
2144 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2145 {
224e770b
RH
2146 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2147 {
2148 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2149 {
224e770b
RH
2150 e->dest->count -= e->count;
2151 e->dest->frequency -= EDGE_FREQUENCY (e);
2152 if (e->dest->count < 0)
c22cacf3 2153 e->dest->count = 0;
224e770b 2154 if (e->dest->frequency < 0)
c22cacf3 2155 e->dest->frequency = 0;
80c7a9eb 2156 }
224e770b
RH
2157 count += e->count;
2158 probability += e->probability;
2159 remove_edge (e);
80c7a9eb 2160 }
628f6a4e
BE
2161 else
2162 ei_next (&ei);
80c7a9eb
RH
2163 }
2164
224e770b
RH
2165 /* This is somewhat ugly: the call_expr expander often emits instructions
2166 after the sibcall (to perform the function return). These confuse the
12eff7b7 2167 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2168 last = NEXT_INSN (last);
341c100f 2169 gcc_assert (BARRIER_P (last));
cea49550
RH
2170
2171 *can_fallthru = false;
224e770b
RH
2172 while (NEXT_INSN (last))
2173 {
2174 /* For instance an sqrt builtin expander expands if with
2175 sibcall in the then and label for `else`. */
2176 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2177 {
2178 *can_fallthru = true;
2179 break;
2180 }
224e770b
RH
2181 delete_insn (NEXT_INSN (last));
2182 }
2183
2184 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2185 e->probability += probability;
2186 e->count += count;
2187 BB_END (bb) = last;
2188 update_bb_for_insn (bb);
2189
2190 if (NEXT_INSN (last))
2191 {
2192 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2193
2194 last = BB_END (bb);
2195 if (BARRIER_P (last))
2196 BB_END (bb) = PREV_INSN (last);
2197 }
2198
726a989a 2199 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2200
224e770b 2201 return bb;
80c7a9eb
RH
2202}
2203
b5b8b0ac
AO
2204/* Return the difference between the floor and the truncated result of
2205 a signed division by OP1 with remainder MOD. */
2206static rtx
2207floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2208{
2209 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2210 return gen_rtx_IF_THEN_ELSE
2211 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2212 gen_rtx_IF_THEN_ELSE
2213 (mode, gen_rtx_LT (BImode,
2214 gen_rtx_DIV (mode, op1, mod),
2215 const0_rtx),
2216 constm1_rtx, const0_rtx),
2217 const0_rtx);
2218}
2219
2220/* Return the difference between the ceil and the truncated result of
2221 a signed division by OP1 with remainder MOD. */
2222static rtx
2223ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2224{
2225 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2226 return gen_rtx_IF_THEN_ELSE
2227 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2228 gen_rtx_IF_THEN_ELSE
2229 (mode, gen_rtx_GT (BImode,
2230 gen_rtx_DIV (mode, op1, mod),
2231 const0_rtx),
2232 const1_rtx, const0_rtx),
2233 const0_rtx);
2234}
2235
2236/* Return the difference between the ceil and the truncated result of
2237 an unsigned division by OP1 with remainder MOD. */
2238static rtx
2239ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2240{
2241 /* (mod != 0 ? 1 : 0) */
2242 return gen_rtx_IF_THEN_ELSE
2243 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2244 const1_rtx, const0_rtx);
2245}
2246
2247/* Return the difference between the rounded and the truncated result
2248 of a signed division by OP1 with remainder MOD. Halfway cases are
2249 rounded away from zero, rather than to the nearest even number. */
2250static rtx
2251round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2252{
2253 /* (abs (mod) >= abs (op1) - abs (mod)
2254 ? (op1 / mod > 0 ? 1 : -1)
2255 : 0) */
2256 return gen_rtx_IF_THEN_ELSE
2257 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2258 gen_rtx_MINUS (mode,
2259 gen_rtx_ABS (mode, op1),
2260 gen_rtx_ABS (mode, mod))),
2261 gen_rtx_IF_THEN_ELSE
2262 (mode, gen_rtx_GT (BImode,
2263 gen_rtx_DIV (mode, op1, mod),
2264 const0_rtx),
2265 const1_rtx, constm1_rtx),
2266 const0_rtx);
2267}
2268
2269/* Return the difference between the rounded and the truncated result
2270 of a unsigned division by OP1 with remainder MOD. Halfway cases
2271 are rounded away from zero, rather than to the nearest even
2272 number. */
2273static rtx
2274round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2275{
2276 /* (mod >= op1 - mod ? 1 : 0) */
2277 return gen_rtx_IF_THEN_ELSE
2278 (mode, gen_rtx_GE (BImode, mod,
2279 gen_rtx_MINUS (mode, op1, mod)),
2280 const1_rtx, const0_rtx);
2281}
2282
dda2da58
AO
2283/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2284 any rtl. */
2285
2286static rtx
f61c6f34
JJ
2287convert_debug_memory_address (enum machine_mode mode, rtx x,
2288 addr_space_t as)
dda2da58
AO
2289{
2290 enum machine_mode xmode = GET_MODE (x);
2291
2292#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
2293 gcc_assert (mode == Pmode
2294 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
2295 gcc_assert (xmode == mode || xmode == VOIDmode);
2296#else
f61c6f34
JJ
2297 rtx temp;
2298 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2299 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2300
2301 gcc_assert (mode == address_mode || mode == pointer_mode);
dda2da58
AO
2302
2303 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2304 return x;
2305
69660a70 2306 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
2307 x = simplify_gen_subreg (mode, x, xmode,
2308 subreg_lowpart_offset
2309 (mode, xmode));
2310 else if (POINTERS_EXTEND_UNSIGNED > 0)
2311 x = gen_rtx_ZERO_EXTEND (mode, x);
2312 else if (!POINTERS_EXTEND_UNSIGNED)
2313 x = gen_rtx_SIGN_EXTEND (mode, x);
2314 else
f61c6f34
JJ
2315 {
2316 switch (GET_CODE (x))
2317 {
2318 case SUBREG:
2319 if ((SUBREG_PROMOTED_VAR_P (x)
2320 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2321 || (GET_CODE (SUBREG_REG (x)) == PLUS
2322 && REG_P (XEXP (SUBREG_REG (x), 0))
2323 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2324 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2325 && GET_MODE (SUBREG_REG (x)) == mode)
2326 return SUBREG_REG (x);
2327 break;
2328 case LABEL_REF:
2329 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2330 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2331 return temp;
2332 case SYMBOL_REF:
2333 temp = shallow_copy_rtx (x);
2334 PUT_MODE (temp, mode);
2335 return temp;
2336 case CONST:
2337 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2338 if (temp)
2339 temp = gen_rtx_CONST (mode, temp);
2340 return temp;
2341 case PLUS:
2342 case MINUS:
2343 if (CONST_INT_P (XEXP (x, 1)))
2344 {
2345 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2346 if (temp)
2347 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2348 }
2349 break;
2350 default:
2351 break;
2352 }
2353 /* Don't know how to express ptr_extend as operation in debug info. */
2354 return NULL;
2355 }
dda2da58
AO
2356#endif /* POINTERS_EXTEND_UNSIGNED */
2357
2358 return x;
2359}
2360
12c5ffe5
EB
2361/* Return an RTX equivalent to the value of the parameter DECL. */
2362
2363static rtx
2364expand_debug_parm_decl (tree decl)
2365{
2366 rtx incoming = DECL_INCOMING_RTL (decl);
2367
2368 if (incoming
2369 && GET_MODE (incoming) != BLKmode
2370 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2371 || (MEM_P (incoming)
2372 && REG_P (XEXP (incoming, 0))
2373 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2374 {
2375 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2376
2377#ifdef HAVE_window_save
2378 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2379 If the target machine has an explicit window save instruction, the
2380 actual entry value is the corresponding OUTGOING_REGNO instead. */
2381 if (REG_P (incoming)
2382 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2383 incoming
2384 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2385 OUTGOING_REGNO (REGNO (incoming)), 0);
2386 else if (MEM_P (incoming))
2387 {
2388 rtx reg = XEXP (incoming, 0);
2389 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2390 {
2391 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2392 incoming = replace_equiv_address_nv (incoming, reg);
2393 }
2394 }
2395#endif
2396
2397 ENTRY_VALUE_EXP (rtl) = incoming;
2398 return rtl;
2399 }
2400
2401 if (incoming
2402 && GET_MODE (incoming) != BLKmode
2403 && !TREE_ADDRESSABLE (decl)
2404 && MEM_P (incoming)
2405 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2406 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2407 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2408 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2409 return incoming;
2410
2411 return NULL_RTX;
2412}
2413
2414/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
2415
2416static rtx
2417expand_debug_expr (tree exp)
2418{
2419 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2420 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 2421 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 2422 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2423 addr_space_t as;
b5b8b0ac
AO
2424
2425 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2426 {
2427 case tcc_expression:
2428 switch (TREE_CODE (exp))
2429 {
2430 case COND_EXPR:
7ece48b1 2431 case DOT_PROD_EXPR:
0354c0c7
BS
2432 case WIDEN_MULT_PLUS_EXPR:
2433 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2434 case FMA_EXPR:
b5b8b0ac
AO
2435 goto ternary;
2436
2437 case TRUTH_ANDIF_EXPR:
2438 case TRUTH_ORIF_EXPR:
2439 case TRUTH_AND_EXPR:
2440 case TRUTH_OR_EXPR:
2441 case TRUTH_XOR_EXPR:
2442 goto binary;
2443
2444 case TRUTH_NOT_EXPR:
2445 goto unary;
2446
2447 default:
2448 break;
2449 }
2450 break;
2451
2452 ternary:
2453 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2454 if (!op2)
2455 return NULL_RTX;
2456 /* Fall through. */
2457
2458 binary:
2459 case tcc_binary:
2460 case tcc_comparison:
2461 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2462 if (!op1)
2463 return NULL_RTX;
2464 /* Fall through. */
2465
2466 unary:
2467 case tcc_unary:
2ba172e0 2468 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2469 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2470 if (!op0)
2471 return NULL_RTX;
2472 break;
2473
2474 case tcc_type:
2475 case tcc_statement:
2476 gcc_unreachable ();
2477
2478 case tcc_constant:
2479 case tcc_exceptional:
2480 case tcc_declaration:
2481 case tcc_reference:
2482 case tcc_vl_exp:
2483 break;
2484 }
2485
2486 switch (TREE_CODE (exp))
2487 {
2488 case STRING_CST:
2489 if (!lookup_constant_def (exp))
2490 {
e1b243a8
JJ
2491 if (strlen (TREE_STRING_POINTER (exp)) + 1
2492 != (size_t) TREE_STRING_LENGTH (exp))
2493 return NULL_RTX;
b5b8b0ac
AO
2494 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2495 op0 = gen_rtx_MEM (BLKmode, op0);
2496 set_mem_attributes (op0, exp, 0);
2497 return op0;
2498 }
2499 /* Fall through... */
2500
2501 case INTEGER_CST:
2502 case REAL_CST:
2503 case FIXED_CST:
2504 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2505 return op0;
2506
2507 case COMPLEX_CST:
2508 gcc_assert (COMPLEX_MODE_P (mode));
2509 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2510 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2511 return gen_rtx_CONCAT (mode, op0, op1);
2512
0ca5af51
AO
2513 case DEBUG_EXPR_DECL:
2514 op0 = DECL_RTL_IF_SET (exp);
2515
2516 if (op0)
2517 return op0;
2518
2519 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2520 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2521 SET_DECL_RTL (exp, op0);
2522
2523 return op0;
2524
b5b8b0ac
AO
2525 case VAR_DECL:
2526 case PARM_DECL:
2527 case FUNCTION_DECL:
2528 case LABEL_DECL:
2529 case CONST_DECL:
2530 case RESULT_DECL:
2531 op0 = DECL_RTL_IF_SET (exp);
2532
2533 /* This decl was probably optimized away. */
2534 if (!op0)
e1b243a8
JJ
2535 {
2536 if (TREE_CODE (exp) != VAR_DECL
2537 || DECL_EXTERNAL (exp)
2538 || !TREE_STATIC (exp)
2539 || !DECL_NAME (exp)
0fba566c 2540 || DECL_HARD_REGISTER (exp)
7d5fc814 2541 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 2542 || mode == VOIDmode)
e1b243a8
JJ
2543 return NULL;
2544
b1aa0655 2545 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2546 if (!MEM_P (op0)
2547 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2548 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2549 return NULL;
2550 }
2551 else
2552 op0 = copy_rtx (op0);
b5b8b0ac 2553
06796564
JJ
2554 if (GET_MODE (op0) == BLKmode
2555 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2556 below would ICE. While it is likely a FE bug,
2557 try to be robust here. See PR43166. */
132b4e82
JJ
2558 || mode == BLKmode
2559 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2560 {
2561 gcc_assert (MEM_P (op0));
2562 op0 = adjust_address_nv (op0, mode, 0);
2563 return op0;
2564 }
2565
2566 /* Fall through. */
2567
2568 adjust_mode:
2569 case PAREN_EXPR:
2570 case NOP_EXPR:
2571 case CONVERT_EXPR:
2572 {
2ba172e0 2573 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
2574
2575 if (mode == inner_mode)
2576 return op0;
2577
2578 if (inner_mode == VOIDmode)
2579 {
2a8e30fb
MM
2580 if (TREE_CODE (exp) == SSA_NAME)
2581 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2582 else
2583 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2584 if (mode == inner_mode)
2585 return op0;
2586 }
2587
2588 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2589 {
2590 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2591 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2592 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2593 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2594 else
2595 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2596 }
2597 else if (FLOAT_MODE_P (mode))
2598 {
2a8e30fb 2599 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2600 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2601 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2602 else
2603 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2604 }
2605 else if (FLOAT_MODE_P (inner_mode))
2606 {
2607 if (unsignedp)
2608 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2609 else
2610 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2611 }
2612 else if (CONSTANT_P (op0)
69660a70 2613 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
2614 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2615 subreg_lowpart_offset (mode,
2616 inner_mode));
1b47fe3f
JJ
2617 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2618 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2619 : unsignedp)
2ba172e0 2620 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 2621 else
2ba172e0 2622 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
2623
2624 return op0;
2625 }
2626
70f34814 2627 case MEM_REF:
71f3a3f5
JJ
2628 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2629 {
2630 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2631 TREE_OPERAND (exp, 0),
2632 TREE_OPERAND (exp, 1));
2633 if (newexp)
2634 return expand_debug_expr (newexp);
2635 }
2636 /* FALLTHROUGH */
b5b8b0ac 2637 case INDIRECT_REF:
b5b8b0ac
AO
2638 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2639 if (!op0)
2640 return NULL;
2641
cb115041
JJ
2642 if (TREE_CODE (exp) == MEM_REF)
2643 {
583ac69c
JJ
2644 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2645 || (GET_CODE (op0) == PLUS
2646 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2647 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2648 Instead just use get_inner_reference. */
2649 goto component_ref;
2650
cb115041
JJ
2651 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2652 if (!op1 || !CONST_INT_P (op1))
2653 return NULL;
2654
2655 op0 = plus_constant (op0, INTVAL (op1));
2656 }
2657
09e881c9 2658 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2659 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2660 else
75421dcd 2661 as = ADDR_SPACE_GENERIC;
b5b8b0ac 2662
f61c6f34
JJ
2663 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2664 op0, as);
2665 if (op0 == NULL_RTX)
2666 return NULL;
b5b8b0ac 2667
f61c6f34 2668 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 2669 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
2670 if (TREE_CODE (exp) == MEM_REF
2671 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2672 set_mem_expr (op0, NULL_TREE);
09e881c9 2673 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2674
2675 return op0;
2676
2677 case TARGET_MEM_REF:
4d948885
RG
2678 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2679 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2680 return NULL;
2681
2682 op0 = expand_debug_expr
4e25ca6b 2683 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2684 if (!op0)
2685 return NULL;
2686
f61c6f34
JJ
2687 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2688 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2689 else
2690 as = ADDR_SPACE_GENERIC;
2691
2692 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2693 op0, as);
2694 if (op0 == NULL_RTX)
2695 return NULL;
b5b8b0ac
AO
2696
2697 op0 = gen_rtx_MEM (mode, op0);
2698
2699 set_mem_attributes (op0, exp, 0);
09e881c9 2700 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2701
2702 return op0;
2703
583ac69c 2704 component_ref:
b5b8b0ac
AO
2705 case ARRAY_REF:
2706 case ARRAY_RANGE_REF:
2707 case COMPONENT_REF:
2708 case BIT_FIELD_REF:
2709 case REALPART_EXPR:
2710 case IMAGPART_EXPR:
2711 case VIEW_CONVERT_EXPR:
2712 {
2713 enum machine_mode mode1;
2714 HOST_WIDE_INT bitsize, bitpos;
2715 tree offset;
2716 int volatilep = 0;
2717 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2718 &mode1, &unsignedp, &volatilep, false);
2719 rtx orig_op0;
2720
4f2a9af8
JJ
2721 if (bitsize == 0)
2722 return NULL;
2723
b5b8b0ac
AO
2724 orig_op0 = op0 = expand_debug_expr (tem);
2725
2726 if (!op0)
2727 return NULL;
2728
2729 if (offset)
2730 {
dda2da58
AO
2731 enum machine_mode addrmode, offmode;
2732
aa847cc8
JJ
2733 if (!MEM_P (op0))
2734 return NULL;
b5b8b0ac 2735
dda2da58
AO
2736 op0 = XEXP (op0, 0);
2737 addrmode = GET_MODE (op0);
2738 if (addrmode == VOIDmode)
2739 addrmode = Pmode;
2740
b5b8b0ac
AO
2741 op1 = expand_debug_expr (offset);
2742 if (!op1)
2743 return NULL;
2744
dda2da58
AO
2745 offmode = GET_MODE (op1);
2746 if (offmode == VOIDmode)
2747 offmode = TYPE_MODE (TREE_TYPE (offset));
2748
2749 if (addrmode != offmode)
2750 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2751 subreg_lowpart_offset (addrmode,
2752 offmode));
2753
2754 /* Don't use offset_address here, we don't need a
2755 recognizable address, and we don't want to generate
2756 code. */
2ba172e0
JJ
2757 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2758 op0, op1));
b5b8b0ac
AO
2759 }
2760
2761 if (MEM_P (op0))
2762 {
4f2a9af8
JJ
2763 if (mode1 == VOIDmode)
2764 /* Bitfield. */
2765 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2766 if (bitpos >= BITS_PER_UNIT)
2767 {
2768 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2769 bitpos %= BITS_PER_UNIT;
2770 }
2771 else if (bitpos < 0)
2772 {
4f2a9af8
JJ
2773 HOST_WIDE_INT units
2774 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2775 op0 = adjust_address_nv (op0, mode1, units);
2776 bitpos += units * BITS_PER_UNIT;
2777 }
2778 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2779 op0 = adjust_address_nv (op0, mode, 0);
2780 else if (GET_MODE (op0) != mode1)
2781 op0 = adjust_address_nv (op0, mode1, 0);
2782 else
2783 op0 = copy_rtx (op0);
2784 if (op0 == orig_op0)
2785 op0 = shallow_copy_rtx (op0);
2786 set_mem_attributes (op0, exp, 0);
2787 }
2788
2789 if (bitpos == 0 && mode == GET_MODE (op0))
2790 return op0;
2791
2d3fc6aa
JJ
2792 if (bitpos < 0)
2793 return NULL;
2794
88c04a5d
JJ
2795 if (GET_MODE (op0) == BLKmode)
2796 return NULL;
2797
b5b8b0ac
AO
2798 if ((bitpos % BITS_PER_UNIT) == 0
2799 && bitsize == GET_MODE_BITSIZE (mode1))
2800 {
2801 enum machine_mode opmode = GET_MODE (op0);
2802
b5b8b0ac 2803 if (opmode == VOIDmode)
9712cba0 2804 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
2805
2806 /* This condition may hold if we're expanding the address
2807 right past the end of an array that turned out not to
2808 be addressable (i.e., the address was only computed in
2809 debug stmts). The gen_subreg below would rightfully
2810 crash, and the address doesn't really exist, so just
2811 drop it. */
2812 if (bitpos >= GET_MODE_BITSIZE (opmode))
2813 return NULL;
2814
7d5d39bb
JJ
2815 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2816 return simplify_gen_subreg (mode, op0, opmode,
2817 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
2818 }
2819
2820 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2821 && TYPE_UNSIGNED (TREE_TYPE (exp))
2822 ? SIGN_EXTRACT
2823 : ZERO_EXTRACT, mode,
2824 GET_MODE (op0) != VOIDmode
9712cba0
JJ
2825 ? GET_MODE (op0)
2826 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
2827 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2828 }
2829
b5b8b0ac 2830 case ABS_EXPR:
2ba172e0 2831 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
2832
2833 case NEGATE_EXPR:
2ba172e0 2834 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
2835
2836 case BIT_NOT_EXPR:
2ba172e0 2837 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
2838
2839 case FLOAT_EXPR:
2ba172e0
JJ
2840 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2841 0)))
2842 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2843 inner_mode);
b5b8b0ac
AO
2844
2845 case FIX_TRUNC_EXPR:
2ba172e0
JJ
2846 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2847 inner_mode);
b5b8b0ac
AO
2848
2849 case POINTER_PLUS_EXPR:
576319a7
DD
2850 /* For the rare target where pointers are not the same size as
2851 size_t, we need to check for mis-matched modes and correct
2852 the addend. */
2853 if (op0 && op1
2854 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2855 && GET_MODE (op0) != GET_MODE (op1))
2856 {
2857 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2ba172e0
JJ
2858 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2859 GET_MODE (op1));
576319a7
DD
2860 else
2861 /* We always sign-extend, regardless of the signedness of
2862 the operand, because the operand is always unsigned
2863 here even if the original C expression is signed. */
2ba172e0
JJ
2864 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2865 GET_MODE (op1));
576319a7
DD
2866 }
2867 /* Fall through. */
b5b8b0ac 2868 case PLUS_EXPR:
2ba172e0 2869 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
2870
2871 case MINUS_EXPR:
2ba172e0 2872 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
2873
2874 case MULT_EXPR:
2ba172e0 2875 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
2876
2877 case RDIV_EXPR:
2878 case TRUNC_DIV_EXPR:
2879 case EXACT_DIV_EXPR:
2880 if (unsignedp)
2ba172e0 2881 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 2882 else
2ba172e0 2883 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
2884
2885 case TRUNC_MOD_EXPR:
2ba172e0 2886 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
2887
2888 case FLOOR_DIV_EXPR:
2889 if (unsignedp)
2ba172e0 2890 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
2891 else
2892 {
2ba172e0
JJ
2893 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2894 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 2895 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 2896 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
2897 }
2898
2899 case FLOOR_MOD_EXPR:
2900 if (unsignedp)
2ba172e0 2901 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
2902 else
2903 {
2ba172e0 2904 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 2905 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
2906 adj = simplify_gen_unary (NEG, mode,
2907 simplify_gen_binary (MULT, mode, adj, op1),
2908 mode);
2909 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
2910 }
2911
2912 case CEIL_DIV_EXPR:
2913 if (unsignedp)
2914 {
2ba172e0
JJ
2915 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2916 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 2917 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 2918 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
2919 }
2920 else
2921 {
2ba172e0
JJ
2922 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2923 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 2924 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 2925 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
2926 }
2927
2928 case CEIL_MOD_EXPR:
2929 if (unsignedp)
2930 {
2ba172e0 2931 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 2932 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
2933 adj = simplify_gen_unary (NEG, mode,
2934 simplify_gen_binary (MULT, mode, adj, op1),
2935 mode);
2936 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
2937 }
2938 else
2939 {
2ba172e0 2940 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 2941 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
2942 adj = simplify_gen_unary (NEG, mode,
2943 simplify_gen_binary (MULT, mode, adj, op1),
2944 mode);
2945 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
2946 }
2947
2948 case ROUND_DIV_EXPR:
2949 if (unsignedp)
2950 {
2ba172e0
JJ
2951 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2952 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 2953 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 2954 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
2955 }
2956 else
2957 {
2ba172e0
JJ
2958 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2959 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 2960 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 2961 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
2962 }
2963
2964 case ROUND_MOD_EXPR:
2965 if (unsignedp)
2966 {
2ba172e0 2967 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 2968 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
2969 adj = simplify_gen_unary (NEG, mode,
2970 simplify_gen_binary (MULT, mode, adj, op1),
2971 mode);
2972 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
2973 }
2974 else
2975 {
2ba172e0 2976 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 2977 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
2978 adj = simplify_gen_unary (NEG, mode,
2979 simplify_gen_binary (MULT, mode, adj, op1),
2980 mode);
2981 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
2982 }
2983
2984 case LSHIFT_EXPR:
2ba172e0 2985 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
2986
2987 case RSHIFT_EXPR:
2988 if (unsignedp)
2ba172e0 2989 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 2990 else
2ba172e0 2991 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
2992
2993 case LROTATE_EXPR:
2ba172e0 2994 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
2995
2996 case RROTATE_EXPR:
2ba172e0 2997 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
2998
2999 case MIN_EXPR:
2ba172e0 3000 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
3001
3002 case MAX_EXPR:
2ba172e0 3003 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
3004
3005 case BIT_AND_EXPR:
3006 case TRUTH_AND_EXPR:
2ba172e0 3007 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
3008
3009 case BIT_IOR_EXPR:
3010 case TRUTH_OR_EXPR:
2ba172e0 3011 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
3012
3013 case BIT_XOR_EXPR:
3014 case TRUTH_XOR_EXPR:
2ba172e0 3015 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
3016
3017 case TRUTH_ANDIF_EXPR:
3018 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3019
3020 case TRUTH_ORIF_EXPR:
3021 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3022
3023 case TRUTH_NOT_EXPR:
2ba172e0 3024 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
3025
3026 case LT_EXPR:
2ba172e0
JJ
3027 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3028 op0, op1);
b5b8b0ac
AO
3029
3030 case LE_EXPR:
2ba172e0
JJ
3031 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3032 op0, op1);
b5b8b0ac
AO
3033
3034 case GT_EXPR:
2ba172e0
JJ
3035 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3036 op0, op1);
b5b8b0ac
AO
3037
3038 case GE_EXPR:
2ba172e0
JJ
3039 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3040 op0, op1);
b5b8b0ac
AO
3041
3042 case EQ_EXPR:
2ba172e0 3043 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3044
3045 case NE_EXPR:
2ba172e0 3046 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3047
3048 case UNORDERED_EXPR:
2ba172e0 3049 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3050
3051 case ORDERED_EXPR:
2ba172e0 3052 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3053
3054 case UNLT_EXPR:
2ba172e0 3055 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3056
3057 case UNLE_EXPR:
2ba172e0 3058 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3059
3060 case UNGT_EXPR:
2ba172e0 3061 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3062
3063 case UNGE_EXPR:
2ba172e0 3064 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3065
3066 case UNEQ_EXPR:
2ba172e0 3067 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3068
3069 case LTGT_EXPR:
2ba172e0 3070 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3071
3072 case COND_EXPR:
3073 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3074
3075 case COMPLEX_EXPR:
3076 gcc_assert (COMPLEX_MODE_P (mode));
3077 if (GET_MODE (op0) == VOIDmode)
3078 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3079 if (GET_MODE (op1) == VOIDmode)
3080 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3081 return gen_rtx_CONCAT (mode, op0, op1);
3082
d02a5a4b
JJ
3083 case CONJ_EXPR:
3084 if (GET_CODE (op0) == CONCAT)
3085 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
3086 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3087 XEXP (op0, 1),
3088 GET_MODE_INNER (mode)));
d02a5a4b
JJ
3089 else
3090 {
3091 enum machine_mode imode = GET_MODE_INNER (mode);
3092 rtx re, im;
3093
3094 if (MEM_P (op0))
3095 {
3096 re = adjust_address_nv (op0, imode, 0);
3097 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3098 }
3099 else
3100 {
3101 enum machine_mode ifmode = int_mode_for_mode (mode);
3102 enum machine_mode ihmode = int_mode_for_mode (imode);
3103 rtx halfsize;
3104 if (ifmode == BLKmode || ihmode == BLKmode)
3105 return NULL;
3106 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3107 re = op0;
3108 if (mode != ifmode)
3109 re = gen_rtx_SUBREG (ifmode, re, 0);
3110 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3111 if (imode != ihmode)
3112 re = gen_rtx_SUBREG (imode, re, 0);
3113 im = copy_rtx (op0);
3114 if (mode != ifmode)
3115 im = gen_rtx_SUBREG (ifmode, im, 0);
3116 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3117 if (imode != ihmode)
3118 im = gen_rtx_SUBREG (imode, im, 0);
3119 }
3120 im = gen_rtx_NEG (imode, im);
3121 return gen_rtx_CONCAT (mode, re, im);
3122 }
3123
b5b8b0ac
AO
3124 case ADDR_EXPR:
3125 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3126 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3127 {
3128 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3129 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3130 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3131 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3132 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3133
3134 if (handled_component_p (TREE_OPERAND (exp, 0)))
3135 {
3136 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3137 tree decl
3138 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3139 &bitoffset, &bitsize, &maxsize);
3140 if ((TREE_CODE (decl) == VAR_DECL
3141 || TREE_CODE (decl) == PARM_DECL
3142 || TREE_CODE (decl) == RESULT_DECL)
3143 && !TREE_ADDRESSABLE (decl)
3144 && (bitoffset % BITS_PER_UNIT) == 0
3145 && bitsize > 0
3146 && bitsize == maxsize)
3147 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3148 bitoffset / BITS_PER_UNIT);
3149 }
3150
3151 return NULL;
3152 }
b5b8b0ac 3153
f61c6f34
JJ
3154 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3155 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
3156
3157 return op0;
b5b8b0ac
AO
3158
3159 case VECTOR_CST:
3160 exp = build_constructor_from_list (TREE_TYPE (exp),
3161 TREE_VECTOR_CST_ELTS (exp));
3162 /* Fall through. */
3163
3164 case CONSTRUCTOR:
3165 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3166 {
3167 unsigned i;
3168 tree val;
3169
3170 op0 = gen_rtx_CONCATN
3171 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3172
3173 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3174 {
3175 op1 = expand_debug_expr (val);
3176 if (!op1)
3177 return NULL;
3178 XVECEXP (op0, 0, i) = op1;
3179 }
3180
3181 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3182 {
3183 op1 = expand_debug_expr
e8160c9a 3184 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3185
3186 if (!op1)
3187 return NULL;
3188
3189 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3190 XVECEXP (op0, 0, i) = op1;
3191 }
3192
3193 return op0;
3194 }
3195 else
3196 goto flag_unsupported;
3197
3198 case CALL_EXPR:
3199 /* ??? Maybe handle some builtins? */
3200 return NULL;
3201
3202 case SSA_NAME:
3203 {
2a8e30fb
MM
3204 gimple g = get_gimple_for_ssa_name (exp);
3205 if (g)
3206 {
3207 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3208 if (!op0)
3209 return NULL;
3210 }
3211 else
3212 {
3213 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3214
2a8e30fb 3215 if (part == NO_PARTITION)
a58a8e4b
JJ
3216 {
3217 /* If this is a reference to an incoming value of parameter
3218 that is never used in the code or where the incoming
3219 value is never used in the code, use PARM_DECL's
3220 DECL_RTL if set. */
3221 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3222 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3223 {
12c5ffe5
EB
3224 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3225 if (op0)
3226 goto adjust_mode;
a58a8e4b 3227 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
3228 if (op0)
3229 goto adjust_mode;
a58a8e4b
JJ
3230 }
3231 return NULL;
3232 }
b5b8b0ac 3233
2a8e30fb 3234 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3235
abfea58d 3236 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 3237 }
b5b8b0ac
AO
3238 goto adjust_mode;
3239 }
3240
3241 case ERROR_MARK:
3242 return NULL;
3243
7ece48b1
JJ
3244 /* Vector stuff. For most of the codes we don't have rtl codes. */
3245 case REALIGN_LOAD_EXPR:
3246 case REDUC_MAX_EXPR:
3247 case REDUC_MIN_EXPR:
3248 case REDUC_PLUS_EXPR:
3249 case VEC_COND_EXPR:
3250 case VEC_EXTRACT_EVEN_EXPR:
3251 case VEC_EXTRACT_ODD_EXPR:
3252 case VEC_INTERLEAVE_HIGH_EXPR:
3253 case VEC_INTERLEAVE_LOW_EXPR:
3254 case VEC_LSHIFT_EXPR:
3255 case VEC_PACK_FIX_TRUNC_EXPR:
3256 case VEC_PACK_SAT_EXPR:
3257 case VEC_PACK_TRUNC_EXPR:
3258 case VEC_RSHIFT_EXPR:
3259 case VEC_UNPACK_FLOAT_HI_EXPR:
3260 case VEC_UNPACK_FLOAT_LO_EXPR:
3261 case VEC_UNPACK_HI_EXPR:
3262 case VEC_UNPACK_LO_EXPR:
3263 case VEC_WIDEN_MULT_HI_EXPR:
3264 case VEC_WIDEN_MULT_LO_EXPR:
3265 return NULL;
3266
3267 /* Misc codes. */
3268 case ADDR_SPACE_CONVERT_EXPR:
3269 case FIXED_CONVERT_EXPR:
3270 case OBJ_TYPE_REF:
3271 case WITH_SIZE_EXPR:
3272 return NULL;
3273
3274 case DOT_PROD_EXPR:
3275 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3276 && SCALAR_INT_MODE_P (mode))
3277 {
2ba172e0
JJ
3278 op0
3279 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3280 0)))
3281 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3282 inner_mode);
3283 op1
3284 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3285 1)))
3286 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3287 inner_mode);
3288 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3289 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
3290 }
3291 return NULL;
3292
3293 case WIDEN_MULT_EXPR:
0354c0c7
BS
3294 case WIDEN_MULT_PLUS_EXPR:
3295 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3296 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3297 && SCALAR_INT_MODE_P (mode))
3298 {
2ba172e0 3299 inner_mode = GET_MODE (op0);
7ece48b1 3300 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3301 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3302 else
5b58b39b 3303 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3304 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3305 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3306 else
5b58b39b 3307 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 3308 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
3309 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3310 return op0;
3311 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 3312 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 3313 else
2ba172e0 3314 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
3315 }
3316 return NULL;
3317
3318 case WIDEN_SUM_EXPR:
3319 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3320 && SCALAR_INT_MODE_P (mode))
3321 {
2ba172e0
JJ
3322 op0
3323 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3324 0)))
3325 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3326 inner_mode);
3327 return simplify_gen_binary (PLUS, mode, op0, op1);
7ece48b1
JJ
3328 }
3329 return NULL;
3330
0f59b812 3331 case FMA_EXPR:
2ba172e0 3332 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 3333
b5b8b0ac
AO
3334 default:
3335 flag_unsupported:
3336#ifdef ENABLE_CHECKING
3337 debug_tree (exp);
3338 gcc_unreachable ();
3339#else
3340 return NULL;
3341#endif
3342 }
3343}
3344
ddb555ed
JJ
3345/* Return an RTX equivalent to the source bind value of the tree expression
3346 EXP. */
3347
3348static rtx
3349expand_debug_source_expr (tree exp)
3350{
3351 rtx op0 = NULL_RTX;
3352 enum machine_mode mode = VOIDmode, inner_mode;
3353
3354 switch (TREE_CODE (exp))
3355 {
3356 case PARM_DECL:
3357 {
ddb555ed 3358 mode = DECL_MODE (exp);
12c5ffe5
EB
3359 op0 = expand_debug_parm_decl (exp);
3360 if (op0)
3361 break;
ddb555ed
JJ
3362 /* See if this isn't an argument that has been completely
3363 optimized out. */
3364 if (!DECL_RTL_SET_P (exp)
12c5ffe5 3365 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
3366 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3367 {
3368 tree aexp = exp;
3369 if (DECL_ABSTRACT_ORIGIN (exp))
3370 aexp = DECL_ABSTRACT_ORIGIN (exp);
3371 if (DECL_CONTEXT (aexp)
3372 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3373 {
3374 VEC(tree, gc) **debug_args;
3375 unsigned int ix;
3376 tree ddecl;
3377#ifdef ENABLE_CHECKING
3378 tree parm;
3379 for (parm = DECL_ARGUMENTS (current_function_decl);
3380 parm; parm = DECL_CHAIN (parm))
3381 gcc_assert (parm != exp
3382 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3383#endif
3384 debug_args = decl_debug_args_lookup (current_function_decl);
3385 if (debug_args != NULL)
3386 {
3387 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3388 ix += 2)
3389 if (ddecl == aexp)
3390 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3391 }
3392 }
3393 }
3394 break;
3395 }
3396 default:
3397 break;
3398 }
3399
3400 if (op0 == NULL_RTX)
3401 return NULL_RTX;
3402
3403 inner_mode = GET_MODE (op0);
3404 if (mode == inner_mode)
3405 return op0;
3406
3407 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3408 {
3409 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3410 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3411 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3412 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3413 else
3414 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3415 }
3416 else if (FLOAT_MODE_P (mode))
3417 gcc_unreachable ();
3418 else if (FLOAT_MODE_P (inner_mode))
3419 {
3420 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3421 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3422 else
3423 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3424 }
3425 else if (CONSTANT_P (op0)
3426 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3427 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3428 subreg_lowpart_offset (mode, inner_mode));
3429 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3430 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3431 else
3432 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3433
3434 return op0;
3435}
3436
b5b8b0ac
AO
3437/* Expand the _LOCs in debug insns. We run this after expanding all
3438 regular insns, so that any variables referenced in the function
3439 will have their DECL_RTLs set. */
3440
3441static void
3442expand_debug_locations (void)
3443{
3444 rtx insn;
3445 rtx last = get_last_insn ();
3446 int save_strict_alias = flag_strict_aliasing;
3447
3448 /* New alias sets while setting up memory attributes cause
3449 -fcompare-debug failures, even though it doesn't bring about any
3450 codegen changes. */
3451 flag_strict_aliasing = 0;
3452
3453 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3454 if (DEBUG_INSN_P (insn))
3455 {
3456 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3457 rtx val;
3458 enum machine_mode mode;
3459
3460 if (value == NULL_TREE)
3461 val = NULL_RTX;
3462 else
3463 {
ddb555ed
JJ
3464 if (INSN_VAR_LOCATION_STATUS (insn)
3465 == VAR_INIT_STATUS_UNINITIALIZED)
3466 val = expand_debug_source_expr (value);
3467 else
3468 val = expand_debug_expr (value);
b5b8b0ac
AO
3469 gcc_assert (last == get_last_insn ());
3470 }
3471
3472 if (!val)
3473 val = gen_rtx_UNKNOWN_VAR_LOC ();
3474 else
3475 {
3476 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3477
3478 gcc_assert (mode == GET_MODE (val)
3479 || (GET_MODE (val) == VOIDmode
3480 && (CONST_INT_P (val)
3481 || GET_CODE (val) == CONST_FIXED
3482 || GET_CODE (val) == CONST_DOUBLE
3483 || GET_CODE (val) == LABEL_REF)));
3484 }
3485
3486 INSN_VAR_LOCATION_LOC (insn) = val;
3487 }
3488
3489 flag_strict_aliasing = save_strict_alias;
3490}
3491
242229bb
JH
3492/* Expand basic block BB from GIMPLE trees to RTL. */
3493
3494static basic_block
10d22567 3495expand_gimple_basic_block (basic_block bb)
242229bb 3496{
726a989a
RB
3497 gimple_stmt_iterator gsi;
3498 gimple_seq stmts;
3499 gimple stmt = NULL;
242229bb
JH
3500 rtx note, last;
3501 edge e;
628f6a4e 3502 edge_iterator ei;
8b11009b 3503 void **elt;
242229bb
JH
3504
3505 if (dump_file)
726a989a
RB
3506 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3507 bb->index);
3508
3509 /* Note that since we are now transitioning from GIMPLE to RTL, we
3510 cannot use the gsi_*_bb() routines because they expect the basic
3511 block to be in GIMPLE, instead of RTL. Therefore, we need to
3512 access the BB sequence directly. */
3513 stmts = bb_seq (bb);
3514 bb->il.gimple = NULL;
bf08ebeb 3515 rtl_profile_for_bb (bb);
5e2d947c
JH
3516 init_rtl_bb_info (bb);
3517 bb->flags |= BB_RTL;
3518
a9b77cd1
ZD
3519 /* Remove the RETURN_EXPR if we may fall though to the exit
3520 instead. */
726a989a
RB
3521 gsi = gsi_last (stmts);
3522 if (!gsi_end_p (gsi)
3523 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3524 {
726a989a 3525 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3526
3527 gcc_assert (single_succ_p (bb));
3528 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3529
3530 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3531 && !gimple_return_retval (ret_stmt))
a9b77cd1 3532 {
726a989a 3533 gsi_remove (&gsi, false);
a9b77cd1
ZD
3534 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3535 }
3536 }
3537
726a989a
RB
3538 gsi = gsi_start (stmts);
3539 if (!gsi_end_p (gsi))
8b11009b 3540 {
726a989a
RB
3541 stmt = gsi_stmt (gsi);
3542 if (gimple_code (stmt) != GIMPLE_LABEL)
3543 stmt = NULL;
8b11009b 3544 }
242229bb 3545
8b11009b
ZD
3546 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3547
3548 if (stmt || elt)
242229bb
JH
3549 {
3550 last = get_last_insn ();
3551
8b11009b
ZD
3552 if (stmt)
3553 {
28ed065e 3554 expand_gimple_stmt (stmt);
726a989a 3555 gsi_next (&gsi);
8b11009b
ZD
3556 }
3557
3558 if (elt)
ae50c0cb 3559 emit_label ((rtx) *elt);
242229bb 3560
caf93cb0 3561 /* Java emits line number notes in the top of labels.
c22cacf3 3562 ??? Make this go away once line number notes are obsoleted. */
242229bb 3563 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3564 if (NOTE_P (BB_HEAD (bb)))
242229bb 3565 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3566 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3567
726a989a 3568 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3569 }
3570 else
3571 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3572
3573 NOTE_BASIC_BLOCK (note) = bb;
3574
726a989a 3575 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3576 {
cea49550 3577 basic_block new_bb;
242229bb 3578
b5b8b0ac 3579 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3580
3581 /* If this statement is a non-debug one, and we generate debug
3582 insns, then this one might be the last real use of a TERed
3583 SSA_NAME, but where there are still some debug uses further
3584 down. Expanding the current SSA name in such further debug
3585 uses by their RHS might lead to wrong debug info, as coalescing
3586 might make the operands of such RHS be placed into the same
3587 pseudo as something else. Like so:
3588 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3589 use(a_1);
3590 a_2 = ...
3591 #DEBUG ... => a_1
3592 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3593 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3594 the write to a_2 would actually have clobbered the place which
3595 formerly held a_0.
3596
3597 So, instead of that, we recognize the situation, and generate
3598 debug temporaries at the last real use of TERed SSA names:
3599 a_1 = a_0 + 1;
3600 #DEBUG #D1 => a_1
3601 use(a_1);
3602 a_2 = ...
3603 #DEBUG ... => #D1
3604 */
3605 if (MAY_HAVE_DEBUG_INSNS
3606 && SA.values
3607 && !is_gimple_debug (stmt))
3608 {
3609 ssa_op_iter iter;
3610 tree op;
3611 gimple def;
3612
3613 location_t sloc = get_curr_insn_source_location ();
3614 tree sblock = get_curr_insn_block ();
3615
3616 /* Look for SSA names that have their last use here (TERed
3617 names always have only one real use). */
3618 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3619 if ((def = get_gimple_for_ssa_name (op)))
3620 {
3621 imm_use_iterator imm_iter;
3622 use_operand_p use_p;
3623 bool have_debug_uses = false;
3624
3625 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3626 {
3627 if (gimple_debug_bind_p (USE_STMT (use_p)))
3628 {
3629 have_debug_uses = true;
3630 break;
3631 }
3632 }
3633
3634 if (have_debug_uses)
3635 {
3636 /* OP is a TERed SSA name, with DEF it's defining
3637 statement, and where OP is used in further debug
3638 instructions. Generate a debug temporary, and
3639 replace all uses of OP in debug insns with that
3640 temporary. */
3641 gimple debugstmt;
3642 tree value = gimple_assign_rhs_to_tree (def);
3643 tree vexpr = make_node (DEBUG_EXPR_DECL);
3644 rtx val;
3645 enum machine_mode mode;
3646
3647 set_curr_insn_source_location (gimple_location (def));
3648 set_curr_insn_block (gimple_block (def));
3649
3650 DECL_ARTIFICIAL (vexpr) = 1;
3651 TREE_TYPE (vexpr) = TREE_TYPE (value);
3652 if (DECL_P (value))
3653 mode = DECL_MODE (value);
3654 else
3655 mode = TYPE_MODE (TREE_TYPE (value));
3656 DECL_MODE (vexpr) = mode;
3657
3658 val = gen_rtx_VAR_LOCATION
3659 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3660
e8c6bb74 3661 emit_debug_insn (val);
2a8e30fb
MM
3662
3663 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3664 {
3665 if (!gimple_debug_bind_p (debugstmt))
3666 continue;
3667
3668 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3669 SET_USE (use_p, vexpr);
3670
3671 update_stmt (debugstmt);
3672 }
3673 }
3674 }
3675 set_curr_insn_source_location (sloc);
3676 set_curr_insn_block (sblock);
3677 }
3678
a5883ba0 3679 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3680
242229bb
JH
3681 /* Expand this statement, then evaluate the resulting RTL and
3682 fixup the CFG accordingly. */
726a989a 3683 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3684 {
726a989a 3685 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3686 if (new_bb)
3687 return new_bb;
3688 }
b5b8b0ac
AO
3689 else if (gimple_debug_bind_p (stmt))
3690 {
3691 location_t sloc = get_curr_insn_source_location ();
3692 tree sblock = get_curr_insn_block ();
3693 gimple_stmt_iterator nsi = gsi;
3694
3695 for (;;)
3696 {
3697 tree var = gimple_debug_bind_get_var (stmt);
3698 tree value;
3699 rtx val;
3700 enum machine_mode mode;
3701
3702 if (gimple_debug_bind_has_value_p (stmt))
3703 value = gimple_debug_bind_get_value (stmt);
3704 else
3705 value = NULL_TREE;
3706
3707 last = get_last_insn ();
3708
3709 set_curr_insn_source_location (gimple_location (stmt));
3710 set_curr_insn_block (gimple_block (stmt));
3711
3712 if (DECL_P (var))
3713 mode = DECL_MODE (var);
3714 else
3715 mode = TYPE_MODE (TREE_TYPE (var));
3716
3717 val = gen_rtx_VAR_LOCATION
3718 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3719
e16b6fd0 3720 emit_debug_insn (val);
b5b8b0ac
AO
3721
3722 if (dump_file && (dump_flags & TDF_DETAILS))
3723 {
3724 /* We can't dump the insn with a TREE where an RTX
3725 is expected. */
e8c6bb74 3726 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 3727 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 3728 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
3729 }
3730
2a8e30fb
MM
3731 /* In order not to generate too many debug temporaries,
3732 we delink all uses of debug statements we already expanded.
3733 Therefore debug statements between definition and real
3734 use of TERed SSA names will continue to use the SSA name,
3735 and not be replaced with debug temps. */
3736 delink_stmt_imm_use (stmt);
3737
b5b8b0ac
AO
3738 gsi = nsi;
3739 gsi_next (&nsi);
3740 if (gsi_end_p (nsi))
3741 break;
3742 stmt = gsi_stmt (nsi);
3743 if (!gimple_debug_bind_p (stmt))
3744 break;
3745 }
3746
ddb555ed
JJ
3747 set_curr_insn_source_location (sloc);
3748 set_curr_insn_block (sblock);
3749 }
3750 else if (gimple_debug_source_bind_p (stmt))
3751 {
3752 location_t sloc = get_curr_insn_source_location ();
3753 tree sblock = get_curr_insn_block ();
3754 tree var = gimple_debug_source_bind_get_var (stmt);
3755 tree value = gimple_debug_source_bind_get_value (stmt);
3756 rtx val;
3757 enum machine_mode mode;
3758
3759 last = get_last_insn ();
3760
3761 set_curr_insn_source_location (gimple_location (stmt));
3762 set_curr_insn_block (gimple_block (stmt));
3763
3764 mode = DECL_MODE (var);
3765
3766 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3767 VAR_INIT_STATUS_UNINITIALIZED);
3768
3769 emit_debug_insn (val);
3770
3771 if (dump_file && (dump_flags & TDF_DETAILS))
3772 {
3773 /* We can't dump the insn with a TREE where an RTX
3774 is expected. */
3775 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3776 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3777 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3778 }
3779
b5b8b0ac
AO
3780 set_curr_insn_source_location (sloc);
3781 set_curr_insn_block (sblock);
3782 }
80c7a9eb 3783 else
242229bb 3784 {
726a989a 3785 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3786 {
3787 bool can_fallthru;
3788 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3789 if (new_bb)
3790 {
3791 if (can_fallthru)
3792 bb = new_bb;
3793 else
3794 return new_bb;
3795 }
3796 }
4d7a65ea 3797 else
b7211528 3798 {
4e3825db 3799 def_operand_p def_p;
4e3825db
MM
3800 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3801
3802 if (def_p != NULL)
3803 {
3804 /* Ignore this stmt if it is in the list of
3805 replaceable expressions. */
3806 if (SA.values
b8698a0f 3807 && bitmap_bit_p (SA.values,
e97809c6 3808 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3809 continue;
3810 }
28ed065e 3811 last = expand_gimple_stmt (stmt);
726a989a 3812 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3813 }
242229bb
JH
3814 }
3815 }
3816
a5883ba0
MM
3817 currently_expanding_gimple_stmt = NULL;
3818
7241571e 3819 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3820 FOR_EACH_EDGE (e, ei, bb->succs)
3821 {
7241571e
JJ
3822 if (e->goto_locus && e->goto_block)
3823 {
3824 set_curr_insn_source_location (e->goto_locus);
3825 set_curr_insn_block (e->goto_block);
3826 e->goto_locus = curr_insn_locator ();
3827 }
3828 e->goto_block = NULL;
3829 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3830 {
3831 emit_jump (label_rtx_for_bb (e->dest));
3832 e->flags &= ~EDGE_FALLTHRU;
3833 }
a9b77cd1
ZD
3834 }
3835
ae761c45
AH
3836 /* Expanded RTL can create a jump in the last instruction of block.
3837 This later might be assumed to be a jump to successor and break edge insertion.
3838 We need to insert dummy move to prevent this. PR41440. */
3839 if (single_succ_p (bb)
3840 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3841 && (last = get_last_insn ())
3842 && JUMP_P (last))
3843 {
3844 rtx dummy = gen_reg_rtx (SImode);
3845 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3846 }
3847
242229bb
JH
3848 do_pending_stack_adjust ();
3849
3f117656 3850 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3851 before a barrier and/or table jump insn. */
3852 last = get_last_insn ();
4b4bf941 3853 if (BARRIER_P (last))
242229bb
JH
3854 last = PREV_INSN (last);
3855 if (JUMP_TABLE_DATA_P (last))
3856 last = PREV_INSN (PREV_INSN (last));
3857 BB_END (bb) = last;
caf93cb0 3858
242229bb 3859 update_bb_for_insn (bb);
80c7a9eb 3860
242229bb
JH
3861 return bb;
3862}
3863
3864
3865/* Create a basic block for initialization code. */
3866
3867static basic_block
3868construct_init_block (void)
3869{
3870 basic_block init_block, first_block;
fd44f634
JH
3871 edge e = NULL;
3872 int flags;
275a4187 3873
fd44f634
JH
3874 /* Multiple entry points not supported yet. */
3875 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3876 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3877 init_rtl_bb_info (EXIT_BLOCK_PTR);
3878 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3879 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3880
fd44f634 3881 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3882
fd44f634
JH
3883 /* When entry edge points to first basic block, we don't need jump,
3884 otherwise we have to jump into proper target. */
3885 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3886 {
726a989a 3887 tree label = gimple_block_label (e->dest);
fd44f634
JH
3888
3889 emit_jump (label_rtx (label));
3890 flags = 0;
275a4187 3891 }
fd44f634
JH
3892 else
3893 flags = EDGE_FALLTHRU;
242229bb
JH
3894
3895 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3896 get_last_insn (),
3897 ENTRY_BLOCK_PTR);
3898 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3899 init_block->count = ENTRY_BLOCK_PTR->count;
3900 if (e)
3901 {
3902 first_block = e->dest;
3903 redirect_edge_succ (e, init_block);
fd44f634 3904 e = make_edge (init_block, first_block, flags);
242229bb
JH
3905 }
3906 else
3907 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3908 e->probability = REG_BR_PROB_BASE;
3909 e->count = ENTRY_BLOCK_PTR->count;
3910
3911 update_bb_for_insn (init_block);
3912 return init_block;
3913}
3914
55e092c4
JH
3915/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3916 found in the block tree. */
3917
3918static void
3919set_block_levels (tree block, int level)
3920{
3921 while (block)
3922 {
3923 BLOCK_NUMBER (block) = level;
3924 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3925 block = BLOCK_CHAIN (block);
3926 }
3927}
242229bb
JH
3928
3929/* Create a block containing landing pads and similar stuff. */
3930
3931static void
3932construct_exit_block (void)
3933{
3934 rtx head = get_last_insn ();
3935 rtx end;
3936 basic_block exit_block;
628f6a4e
BE
3937 edge e, e2;
3938 unsigned ix;
3939 edge_iterator ei;
071a42f9 3940 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3941
bf08ebeb
JH
3942 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3943
caf93cb0 3944 /* Make sure the locus is set to the end of the function, so that
242229bb 3945 epilogue line numbers and warnings are set properly. */
6773e15f 3946 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3947 input_location = cfun->function_end_locus;
3948
3949 /* The following insns belong to the top scope. */
55e092c4 3950 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3951
242229bb
JH
3952 /* Generate rtl for function exit. */
3953 expand_function_end ();
3954
3955 end = get_last_insn ();
3956 if (head == end)
3957 return;
071a42f9
JH
3958 /* While emitting the function end we could move end of the last basic block.
3959 */
3960 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3961 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3962 head = NEXT_INSN (head);
80c7a9eb
RH
3963 exit_block = create_basic_block (NEXT_INSN (head), end,
3964 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3965 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3966 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3967
3968 ix = 0;
3969 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3970 {
8fb790fd 3971 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3972 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3973 redirect_edge_succ (e, exit_block);
3974 else
3975 ix++;
242229bb 3976 }
628f6a4e 3977
242229bb
JH
3978 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3979 e->probability = REG_BR_PROB_BASE;
3980 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3981 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3982 if (e2 != e)
3983 {
c22cacf3 3984 e->count -= e2->count;
242229bb
JH
3985 exit_block->count -= e2->count;
3986 exit_block->frequency -= EDGE_FREQUENCY (e2);
3987 }
3988 if (e->count < 0)
3989 e->count = 0;
3990 if (exit_block->count < 0)
3991 exit_block->count = 0;
3992 if (exit_block->frequency < 0)
3993 exit_block->frequency = 0;
3994 update_bb_for_insn (exit_block);
3995}
3996
c22cacf3 3997/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3998 Look for ARRAY_REF nodes with non-constant indexes and mark them
3999 addressable. */
4000
4001static tree
4002discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4003 void *data ATTRIBUTE_UNUSED)
4004{
4005 tree t = *tp;
4006
4007 if (IS_TYPE_OR_DECL_P (t))
4008 *walk_subtrees = 0;
4009 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4010 {
4011 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4012 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4013 && (!TREE_OPERAND (t, 2)
4014 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4015 || (TREE_CODE (t) == COMPONENT_REF
4016 && (!TREE_OPERAND (t,2)
4017 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4018 || TREE_CODE (t) == BIT_FIELD_REF
4019 || TREE_CODE (t) == REALPART_EXPR
4020 || TREE_CODE (t) == IMAGPART_EXPR
4021 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 4022 || CONVERT_EXPR_P (t))
a1b23b2f
UW
4023 t = TREE_OPERAND (t, 0);
4024
4025 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4026 {
4027 t = get_base_address (t);
6f11d690
RG
4028 if (t && DECL_P (t)
4029 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
4030 TREE_ADDRESSABLE (t) = 1;
4031 }
4032
4033 *walk_subtrees = 0;
4034 }
4035
4036 return NULL_TREE;
4037}
4038
4039/* RTL expansion is not able to compile array references with variable
4040 offsets for arrays stored in single register. Discover such
4041 expressions and mark variables as addressable to avoid this
4042 scenario. */
4043
4044static void
4045discover_nonconstant_array_refs (void)
4046{
4047 basic_block bb;
726a989a 4048 gimple_stmt_iterator gsi;
a1b23b2f
UW
4049
4050 FOR_EACH_BB (bb)
726a989a
RB
4051 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4052 {
4053 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
4054 if (!is_gimple_debug (stmt))
4055 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 4056 }
a1b23b2f
UW
4057}
4058
2e3f842f
L
4059/* This function sets crtl->args.internal_arg_pointer to a virtual
4060 register if DRAP is needed. Local register allocator will replace
4061 virtual_incoming_args_rtx with the virtual register. */
4062
4063static void
4064expand_stack_alignment (void)
4065{
4066 rtx drap_rtx;
e939805b 4067 unsigned int preferred_stack_boundary;
2e3f842f
L
4068
4069 if (! SUPPORTS_STACK_ALIGNMENT)
4070 return;
b8698a0f 4071
2e3f842f
L
4072 if (cfun->calls_alloca
4073 || cfun->has_nonlocal_label
4074 || crtl->has_nonlocal_goto)
4075 crtl->need_drap = true;
4076
890b9b96
L
4077 /* Call update_stack_boundary here again to update incoming stack
4078 boundary. It may set incoming stack alignment to a different
4079 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4080 use the minimum incoming stack alignment to check if it is OK
4081 to perform sibcall optimization since sibcall optimization will
4082 only align the outgoing stack to incoming stack boundary. */
4083 if (targetm.calls.update_stack_boundary)
4084 targetm.calls.update_stack_boundary ();
4085
4086 /* The incoming stack frame has to be aligned at least at
4087 parm_stack_boundary. */
4088 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 4089
2e3f842f
L
4090 /* Update crtl->stack_alignment_estimated and use it later to align
4091 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4092 exceptions since callgraph doesn't collect incoming stack alignment
4093 in this case. */
8f4f502f 4094 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
4095 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4096 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4097 else
4098 preferred_stack_boundary = crtl->preferred_stack_boundary;
4099 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4100 crtl->stack_alignment_estimated = preferred_stack_boundary;
4101 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4102 crtl->stack_alignment_needed = preferred_stack_boundary;
4103
890b9b96
L
4104 gcc_assert (crtl->stack_alignment_needed
4105 <= crtl->stack_alignment_estimated);
4106
2e3f842f 4107 crtl->stack_realign_needed
e939805b 4108 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 4109 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
4110
4111 crtl->stack_realign_processed = true;
4112
4113 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4114 alignment. */
4115 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 4116 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 4117
d015f7cc
L
4118 /* stack_realign_drap and drap_rtx must match. */
4119 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4120
2e3f842f
L
4121 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4122 if (NULL != drap_rtx)
4123 {
4124 crtl->args.internal_arg_pointer = drap_rtx;
4125
4126 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4127 needed. */
4128 fixup_tail_calls ();
4129 }
4130}
4131
242229bb
JH
4132/* Translate the intermediate representation contained in the CFG
4133 from GIMPLE trees to RTL.
4134
4135 We do conversion per basic block and preserve/update the tree CFG.
4136 This implies we have to do some magic as the CFG can simultaneously
4137 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 4138 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
4139 the expansion. */
4140
c2924966 4141static unsigned int
726a989a 4142gimple_expand_cfg (void)
242229bb
JH
4143{
4144 basic_block bb, init_block;
4145 sbitmap blocks;
0ef90296
ZD
4146 edge_iterator ei;
4147 edge e;
3a42502d 4148 rtx var_seq;
4e3825db
MM
4149 unsigned i;
4150
f029db69 4151 timevar_push (TV_OUT_OF_SSA);
4e3825db 4152 rewrite_out_of_ssa (&SA);
f029db69 4153 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
4154 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4155 sizeof (rtx));
242229bb 4156
4586b4ca
SB
4157 /* Some backends want to know that we are expanding to RTL. */
4158 currently_expanding_to_rtl = 1;
4159
bf08ebeb
JH
4160 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4161
55e092c4 4162 insn_locators_alloc ();
fe8a7779 4163 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
4164 {
4165 /* Eventually, all FEs should explicitly set function_start_locus. */
4166 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4167 set_curr_insn_source_location
4168 (DECL_SOURCE_LOCATION (current_function_decl));
4169 else
4170 set_curr_insn_source_location (cfun->function_start_locus);
4171 }
9ff70652
JJ
4172 else
4173 set_curr_insn_source_location (UNKNOWN_LOCATION);
55e092c4
JH
4174 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4175 prologue_locator = curr_insn_locator ();
4176
2b21299c
JJ
4177#ifdef INSN_SCHEDULING
4178 init_sched_attrs ();
4179#endif
4180
55e092c4
JH
4181 /* Make sure first insn is a note even if we don't want linenums.
4182 This makes sure the first insn will never be deleted.
4183 Also, final expects a note to appear there. */
4184 emit_note (NOTE_INSN_DELETED);
6429e3be 4185
a1b23b2f
UW
4186 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4187 discover_nonconstant_array_refs ();
4188
e41b2a33 4189 targetm.expand_to_rtl_hook ();
cb91fab0 4190 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 4191 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 4192 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
4193 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4194 cfun->cfg->max_jumptable_ents = 0;
4195
ae9fd6b7
JH
4196 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4197 of the function section at exapnsion time to predict distance of calls. */
4198 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4199
727a31fa 4200 /* Expand the variables recorded during gimple lowering. */
f029db69 4201 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
4202 start_sequence ();
4203
242229bb 4204 expand_used_vars ();
3a42502d
RH
4205
4206 var_seq = get_insns ();
4207 end_sequence ();
f029db69 4208 timevar_pop (TV_VAR_EXPAND);
242229bb 4209
7d69de61
RH
4210 /* Honor stack protection warnings. */
4211 if (warn_stack_protect)
4212 {
e3b5732b 4213 if (cfun->calls_alloca)
b8698a0f 4214 warning (OPT_Wstack_protector,
3b123595
SB
4215 "stack protector not protecting local variables: "
4216 "variable length buffer");
cb91fab0 4217 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 4218 warning (OPT_Wstack_protector,
3b123595
SB
4219 "stack protector not protecting function: "
4220 "all local arrays are less than %d bytes long",
7d69de61
RH
4221 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4222 }
4223
242229bb 4224 /* Set up parameters and prepare for return, for the function. */
b79c5284 4225 expand_function_start (current_function_decl);
242229bb 4226
3a42502d
RH
4227 /* If we emitted any instructions for setting up the variables,
4228 emit them before the FUNCTION_START note. */
4229 if (var_seq)
4230 {
4231 emit_insn_before (var_seq, parm_birth_insn);
4232
4233 /* In expand_function_end we'll insert the alloca save/restore
4234 before parm_birth_insn. We've just insertted an alloca call.
4235 Adjust the pointer to match. */
4236 parm_birth_insn = var_seq;
4237 }
4238
4e3825db
MM
4239 /* Now that we also have the parameter RTXs, copy them over to our
4240 partitions. */
4241 for (i = 0; i < SA.map->num_partitions; i++)
4242 {
4243 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4244
4245 if (TREE_CODE (var) != VAR_DECL
4246 && !SA.partition_to_pseudo[i])
4247 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4248 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4249
4250 /* If this decl was marked as living in multiple places, reset
4251 this now to NULL. */
4252 if (DECL_RTL_IF_SET (var) == pc_rtx)
4253 SET_DECL_RTL (var, NULL);
4254
4e3825db
MM
4255 /* Some RTL parts really want to look at DECL_RTL(x) when x
4256 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4257 SET_DECL_RTL here making this available, but that would mean
4258 to select one of the potentially many RTLs for one DECL. Instead
4259 of doing that we simply reset the MEM_EXPR of the RTL in question,
4260 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4261 if (!DECL_RTL_SET_P (var))
4262 {
4263 if (MEM_P (SA.partition_to_pseudo[i]))
4264 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4265 }
4266 }
4267
d466b407
MM
4268 /* If we have a class containing differently aligned pointers
4269 we need to merge those into the corresponding RTL pointer
4270 alignment. */
4271 for (i = 1; i < num_ssa_names; i++)
4272 {
4273 tree name = ssa_name (i);
4274 int part;
4275 rtx r;
4276
4277 if (!name
4278 || !POINTER_TYPE_P (TREE_TYPE (name))
4279 /* We might have generated new SSA names in
4280 update_alias_info_with_stack_vars. They will have a NULL
4281 defining statements, and won't be part of the partitioning,
4282 so ignore those. */
4283 || !SSA_NAME_DEF_STMT (name))
4284 continue;
4285 part = var_to_partition (SA.map, name);
4286 if (part == NO_PARTITION)
4287 continue;
4288 r = SA.partition_to_pseudo[part];
4289 if (REG_P (r))
4290 mark_reg_pointer (r, get_pointer_alignment (name));
4291 }
4292
242229bb
JH
4293 /* If this function is `main', emit a call to `__main'
4294 to run global initializers, etc. */
4295 if (DECL_NAME (current_function_decl)
4296 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4297 && DECL_FILE_SCOPE_P (current_function_decl))
4298 expand_main_function ();
4299
7d69de61
RH
4300 /* Initialize the stack_protect_guard field. This must happen after the
4301 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4302 if (crtl->stack_protect_guard)
7d69de61
RH
4303 stack_protect_prologue ();
4304
4e3825db
MM
4305 expand_phi_nodes (&SA);
4306
3fbd86b1 4307 /* Register rtl specific functions for cfg. */
242229bb
JH
4308 rtl_register_cfg_hooks ();
4309
4310 init_block = construct_init_block ();
4311
0ef90296 4312 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4313 remaining edges later. */
0ef90296
ZD
4314 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4315 e->flags &= ~EDGE_EXECUTABLE;
4316
8b11009b 4317 lab_rtx_for_bb = pointer_map_create ();
242229bb 4318 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4319 bb = expand_gimple_basic_block (bb);
bf08ebeb 4320
b5b8b0ac
AO
4321 if (MAY_HAVE_DEBUG_INSNS)
4322 expand_debug_locations ();
4323
4e3825db 4324 execute_free_datastructures ();
f029db69 4325 timevar_push (TV_OUT_OF_SSA);
4e3825db 4326 finish_out_of_ssa (&SA);
f029db69 4327 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4328
f029db69 4329 timevar_push (TV_POST_EXPAND);
91753e21
RG
4330 /* We are no longer in SSA form. */
4331 cfun->gimple_df->in_ssa_p = false;
4332
bf08ebeb
JH
4333 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4334 conservatively to true until they are all profile aware. */
8b11009b 4335 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4336 free_histograms ();
242229bb
JH
4337
4338 construct_exit_block ();
55e092c4
JH
4339 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4340 insn_locators_finalize ();
242229bb 4341
1d65f45c 4342 /* Zap the tree EH table. */
e8a2a782 4343 set_eh_throw_stmt_table (cfun, NULL);
242229bb 4344
42821aff
MM
4345 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4346 split edges which edge insertions might do. */
242229bb 4347 rebuild_jump_labels (get_insns ());
242229bb 4348
4e3825db
MM
4349 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4350 {
4351 edge e;
4352 edge_iterator ei;
4353 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4354 {
4355 if (e->insns.r)
bc470c24 4356 {
42821aff 4357 rebuild_jump_labels_chain (e->insns.r);
bc470c24
JJ
4358 /* Avoid putting insns before parm_birth_insn. */
4359 if (e->src == ENTRY_BLOCK_PTR
4360 && single_succ_p (ENTRY_BLOCK_PTR)
4361 && parm_birth_insn)
4362 {
4363 rtx insns = e->insns.r;
4364 e->insns.r = NULL_RTX;
4365 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4366 }
4367 else
4368 commit_one_edge_insertion (e);
4369 }
4e3825db
MM
4370 else
4371 ei_next (&ei);
4372 }
4373 }
4374
4375 /* We're done expanding trees to RTL. */
4376 currently_expanding_to_rtl = 0;
4377
4378 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4379 {
4380 edge e;
4381 edge_iterator ei;
4382 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4383 {
4384 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4385 e->flags &= ~EDGE_EXECUTABLE;
4386
4387 /* At the moment not all abnormal edges match the RTL
4388 representation. It is safe to remove them here as
4389 find_many_sub_basic_blocks will rediscover them.
4390 In the future we should get this fixed properly. */
4391 if ((e->flags & EDGE_ABNORMAL)
4392 && !(e->flags & EDGE_SIBCALL))
4393 remove_edge (e);
4394 else
4395 ei_next (&ei);
4396 }
4397 }
4398
242229bb
JH
4399 blocks = sbitmap_alloc (last_basic_block);
4400 sbitmap_ones (blocks);
4401 find_many_sub_basic_blocks (blocks);
242229bb 4402 sbitmap_free (blocks);
4e3825db 4403 purge_all_dead_edges ();
242229bb
JH
4404
4405 compact_blocks ();
2e3f842f
L
4406
4407 expand_stack_alignment ();
4408
242229bb 4409#ifdef ENABLE_CHECKING
62e5bf5d 4410 verify_flow_info ();
242229bb 4411#endif
9f8628ba
PB
4412
4413 /* There's no need to defer outputting this function any more; we
4414 know we want to output it. */
4415 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4416
4417 /* Now that we're done expanding trees to RTL, we shouldn't have any
4418 more CONCATs anywhere. */
4419 generating_concat_p = 0;
4420
b7211528
SB
4421 if (dump_file)
4422 {
4423 fprintf (dump_file,
4424 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4425 /* And the pass manager will dump RTL for us. */
4426 }
ef330312
PB
4427
4428 /* If we're emitting a nested function, make sure its parent gets
4429 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4430 {
ef330312
PB
4431 tree parent;
4432 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4433 parent != NULL_TREE;
4434 parent = get_containing_scope (parent))
ef330312 4435 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4436 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4437 }
c22cacf3 4438
ef330312
PB
4439 /* We are now committed to emitting code for this function. Do any
4440 preparation, such as emitting abstract debug info for the inline
4441 before it gets mangled by optimization. */
4442 if (cgraph_function_possibly_inlined_p (current_function_decl))
4443 (*debug_hooks->outlining_inline_function) (current_function_decl);
4444
4445 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4446
4447 /* After expanding, the return labels are no longer needed. */
4448 return_label = NULL;
4449 naked_return_label = NULL;
55e092c4
JH
4450 /* Tag the blocks with a depth number so that change_scope can find
4451 the common parent easily. */
4452 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4453 default_rtl_profile ();
f029db69 4454 timevar_pop (TV_POST_EXPAND);
c2924966 4455 return 0;
242229bb
JH
4456}
4457
e3b5732b 4458struct rtl_opt_pass pass_expand =
242229bb 4459{
8ddbbcae 4460 {
e3b5732b 4461 RTL_PASS,
c22cacf3 4462 "expand", /* name */
242229bb 4463 NULL, /* gate */
726a989a 4464 gimple_expand_cfg, /* execute */
242229bb
JH
4465 NULL, /* sub */
4466 NULL, /* next */
4467 0, /* static_pass_number */
c22cacf3 4468 TV_EXPAND, /* tv_id */
688a482d
RG
4469 PROP_ssa | PROP_gimple_leh | PROP_cfg
4470 | PROP_gimple_lcx, /* properties_required */
242229bb 4471 PROP_rtl, /* properties_provided */
4e3825db
MM
4472 PROP_ssa | PROP_trees, /* properties_destroyed */
4473 TODO_verify_ssa | TODO_verify_flow
4474 | TODO_verify_stmts, /* todo_flags_start */
22c5fa5f 4475 TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4476 }
242229bb 4477};