]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
tree-ssa-structalias.c: Remove tm_p.h from include.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
7604eb4e 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
ef330312 40#include "debug.h"
7d69de61 41#include "params.h"
ff28a94d 42#include "tree-inline.h"
6946b3f7 43#include "value-prof.h"
e41b2a33 44#include "target.h"
4e3825db 45#include "ssaexpand.h"
7d69de61 46
726a989a 47
4e3825db
MM
48/* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50struct ssaexpand SA;
51
a5883ba0
MM
52/* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54gimple currently_expanding_gimple_stmt;
55
726a989a
RB
56/* Return an expression tree corresponding to the RHS of GIMPLE
57 statement STMT. */
58
59tree
60gimple_assign_rhs_to_tree (gimple stmt)
61{
62 tree t;
82d6e6fc 63 enum gimple_rhs_class grhs_class;
b8698a0f 64
82d6e6fc 65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 66
82d6e6fc 67 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
82d6e6fc 72 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
82d6e6fc 76 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
77 {
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
80 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
82 || (gimple_block (stmt)
83 && currently_expanding_to_rtl
84 && EXPR_P (t)
85 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
86 t = copy_node (t);
87 }
726a989a
RB
88 else
89 gcc_unreachable ();
90
f5045c96
AM
91 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
92 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
93 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
94 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 95
726a989a
RB
96 return t;
97}
98
726a989a 99
1f6d3a08
RH
100#ifndef STACK_ALIGNMENT_NEEDED
101#define STACK_ALIGNMENT_NEEDED 1
102#endif
103
4e3825db
MM
104#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
105
106/* Associate declaration T with storage space X. If T is no
107 SSA name this is exactly SET_DECL_RTL, otherwise make the
108 partition of T associated with X. */
109static inline void
110set_rtl (tree t, rtx x)
111{
112 if (TREE_CODE (t) == SSA_NAME)
113 {
114 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
115 if (x && !MEM_P (x))
116 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
117 /* For the benefit of debug information at -O0 (where vartracking
118 doesn't run) record the place also in the base DECL if it's
119 a normal variable (not a parameter). */
120 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
121 {
122 tree var = SSA_NAME_VAR (t);
123 /* If we don't yet have something recorded, just record it now. */
124 if (!DECL_RTL_SET_P (var))
125 SET_DECL_RTL (var, x);
126 /* If we have it set alrady to "multiple places" don't
127 change this. */
128 else if (DECL_RTL (var) == pc_rtx)
129 ;
130 /* If we have something recorded and it's not the same place
131 as we want to record now, we have multiple partitions for the
132 same base variable, with different places. We can't just
133 randomly chose one, hence we have to say that we don't know.
134 This only happens with optimization, and there var-tracking
135 will figure out the right thing. */
136 else if (DECL_RTL (var) != x)
137 SET_DECL_RTL (var, pc_rtx);
138 }
4e3825db
MM
139 }
140 else
141 SET_DECL_RTL (t, x);
142}
1f6d3a08
RH
143
144/* This structure holds data relevant to one variable that will be
145 placed in a stack slot. */
146struct stack_var
147{
148 /* The Variable. */
149 tree decl;
150
151 /* The offset of the variable. During partitioning, this is the
152 offset relative to the partition. After partitioning, this
153 is relative to the stack frame. */
154 HOST_WIDE_INT offset;
155
156 /* Initially, the size of the variable. Later, the size of the partition,
157 if this variable becomes it's partition's representative. */
158 HOST_WIDE_INT size;
159
160 /* The *byte* alignment required for this variable. Or as, with the
161 size, the alignment for this partition. */
162 unsigned int alignb;
163
164 /* The partition representative. */
165 size_t representative;
166
167 /* The next stack variable in the partition, or EOC. */
168 size_t next;
2bdbbe94
MM
169
170 /* The numbers of conflicting stack variables. */
171 bitmap conflicts;
1f6d3a08
RH
172};
173
174#define EOC ((size_t)-1)
175
176/* We have an array of such objects while deciding allocation. */
177static struct stack_var *stack_vars;
178static size_t stack_vars_alloc;
179static size_t stack_vars_num;
180
fa10beec 181/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
182 is non-decreasing. */
183static size_t *stack_vars_sorted;
184
1f6d3a08
RH
185/* The phase of the stack frame. This is the known misalignment of
186 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
187 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
188static int frame_phase;
189
7d69de61
RH
190/* Used during expand_used_vars to remember if we saw any decls for
191 which we'd like to enable stack smashing protection. */
192static bool has_protected_decls;
193
194/* Used during expand_used_vars. Remember if we say a character buffer
195 smaller than our cutoff threshold. Used for -Wstack-protector. */
196static bool has_short_buffer;
1f6d3a08
RH
197
198/* Discover the byte alignment to use for DECL. Ignore alignment
199 we can't do with expected alignment of the stack boundary. */
200
201static unsigned int
202get_decl_align_unit (tree decl)
203{
204 unsigned int align;
205
9bfaf89d 206 align = LOCAL_DECL_ALIGNMENT (decl);
2e3f842f
L
207
208 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
209 align = MAX_SUPPORTED_STACK_ALIGNMENT;
210
211 if (SUPPORTS_STACK_ALIGNMENT)
212 {
213 if (crtl->stack_alignment_estimated < align)
214 {
215 gcc_assert(!crtl->stack_realign_processed);
216 crtl->stack_alignment_estimated = align;
217 }
218 }
219
220 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
221 So here we only make sure stack_alignment_needed >= align. */
cb91fab0
JH
222 if (crtl->stack_alignment_needed < align)
223 crtl->stack_alignment_needed = align;
f85882d8
JY
224 if (crtl->max_used_stack_slot_alignment < align)
225 crtl->max_used_stack_slot_alignment = align;
1f6d3a08
RH
226
227 return align / BITS_PER_UNIT;
228}
229
230/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
231 Return the frame offset. */
232
233static HOST_WIDE_INT
234alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
235{
236 HOST_WIDE_INT offset, new_frame_offset;
237
238 new_frame_offset = frame_offset;
239 if (FRAME_GROWS_DOWNWARD)
240 {
241 new_frame_offset -= size + frame_phase;
242 new_frame_offset &= -align;
243 new_frame_offset += frame_phase;
244 offset = new_frame_offset;
245 }
246 else
247 {
248 new_frame_offset -= frame_phase;
249 new_frame_offset += align - 1;
250 new_frame_offset &= -align;
251 new_frame_offset += frame_phase;
252 offset = new_frame_offset;
253 new_frame_offset += size;
254 }
255 frame_offset = new_frame_offset;
256
9fb798d7
EB
257 if (frame_offset_overflow (frame_offset, cfun->decl))
258 frame_offset = offset = 0;
259
1f6d3a08
RH
260 return offset;
261}
262
263/* Accumulate DECL into STACK_VARS. */
264
265static void
266add_stack_var (tree decl)
267{
268 if (stack_vars_num >= stack_vars_alloc)
269 {
270 if (stack_vars_alloc)
271 stack_vars_alloc = stack_vars_alloc * 3 / 2;
272 else
273 stack_vars_alloc = 32;
274 stack_vars
275 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
276 }
277 stack_vars[stack_vars_num].decl = decl;
278 stack_vars[stack_vars_num].offset = 0;
4e3825db
MM
279 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
280 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
281
282 /* All variables are initially in their own partition. */
283 stack_vars[stack_vars_num].representative = stack_vars_num;
284 stack_vars[stack_vars_num].next = EOC;
285
2bdbbe94
MM
286 /* All variables initially conflict with no other. */
287 stack_vars[stack_vars_num].conflicts = NULL;
288
1f6d3a08 289 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 290 set_rtl (decl, pc_rtx);
1f6d3a08
RH
291
292 stack_vars_num++;
293}
294
1f6d3a08
RH
295/* Make the decls associated with luid's X and Y conflict. */
296
297static void
298add_stack_var_conflict (size_t x, size_t y)
299{
2bdbbe94
MM
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (NULL);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
308}
309
310/* Check whether the decls associated with luid's X and Y conflict. */
311
312static bool
313stack_var_conflict_p (size_t x, size_t y)
314{
2bdbbe94
MM
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (!a->conflicts || !b->conflicts)
318 return false;
319 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 320}
b8698a0f 321
d239ed56
SB
322/* Returns true if TYPE is or contains a union type. */
323
324static bool
325aggregate_contains_union_type (tree type)
326{
327 tree field;
328
329 if (TREE_CODE (type) == UNION_TYPE
330 || TREE_CODE (type) == QUAL_UNION_TYPE)
331 return true;
332 if (TREE_CODE (type) == ARRAY_TYPE)
333 return aggregate_contains_union_type (TREE_TYPE (type));
334 if (TREE_CODE (type) != RECORD_TYPE)
335 return false;
336
337 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
338 if (TREE_CODE (field) == FIELD_DECL)
339 if (aggregate_contains_union_type (TREE_TYPE (field)))
340 return true;
341
342 return false;
343}
344
1f6d3a08
RH
345/* A subroutine of expand_used_vars. If two variables X and Y have alias
346 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
347 in the interference graph. We also need to make sure to add conflicts
348 for union containing structures. Else RTL alias analysis comes along
349 and due to type based aliasing rules decides that for two overlapping
350 union temporaries { short s; int i; } accesses to the same mem through
351 different types may not alias and happily reorders stores across
352 life-time boundaries of the temporaries (See PR25654).
353 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
354
355static void
356add_alias_set_conflicts (void)
357{
358 size_t i, j, n = stack_vars_num;
359
360 for (i = 0; i < n; ++i)
361 {
a4d25453
RH
362 tree type_i = TREE_TYPE (stack_vars[i].decl);
363 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 364 bool contains_union;
1f6d3a08 365
d239ed56 366 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
367 for (j = 0; j < i; ++j)
368 {
a4d25453
RH
369 tree type_j = TREE_TYPE (stack_vars[j].decl);
370 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
371 if (aggr_i != aggr_j
372 /* Either the objects conflict by means of type based
373 aliasing rules, or we need to add a conflict. */
374 || !objects_must_conflict_p (type_i, type_j)
375 /* In case the types do not conflict ensure that access
376 to elements will conflict. In case of unions we have
377 to be careful as type based aliasing rules may say
378 access to the same memory does not conflict. So play
379 safe and add a conflict in this case. */
380 || contains_union)
1f6d3a08
RH
381 add_stack_var_conflict (i, j);
382 }
383 }
384}
385
386/* A subroutine of partition_stack_vars. A comparison function for qsort,
4e3825db 387 sorting an array of indices by the size and type of the object. */
1f6d3a08
RH
388
389static int
390stack_var_size_cmp (const void *a, const void *b)
391{
392 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
393 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
4e3825db
MM
394 tree decla, declb;
395 unsigned int uida, uidb;
1f6d3a08
RH
396
397 if (sa < sb)
398 return -1;
399 if (sa > sb)
400 return 1;
4e3825db
MM
401 decla = stack_vars[*(const size_t *)a].decl;
402 declb = stack_vars[*(const size_t *)b].decl;
403 /* For stack variables of the same size use and id of the decls
404 to make the sort stable. Two SSA names are compared by their
405 version, SSA names come before non-SSA names, and two normal
406 decls are compared by their DECL_UID. */
407 if (TREE_CODE (decla) == SSA_NAME)
408 {
409 if (TREE_CODE (declb) == SSA_NAME)
410 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
411 else
412 return -1;
413 }
414 else if (TREE_CODE (declb) == SSA_NAME)
415 return 1;
416 else
417 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5
RG
418 if (uida < uidb)
419 return -1;
420 if (uida > uidb)
421 return 1;
1f6d3a08
RH
422 return 0;
423}
424
55b34b5f
RG
425
426/* If the points-to solution *PI points to variables that are in a partition
427 together with other variables add all partition members to the pointed-to
428 variables bitmap. */
429
430static void
431add_partitioned_vars_to_ptset (struct pt_solution *pt,
432 struct pointer_map_t *decls_to_partitions,
433 struct pointer_set_t *visited, bitmap temp)
434{
435 bitmap_iterator bi;
436 unsigned i;
437 bitmap *part;
438
439 if (pt->anything
440 || pt->vars == NULL
441 /* The pointed-to vars bitmap is shared, it is enough to
442 visit it once. */
443 || pointer_set_insert(visited, pt->vars))
444 return;
445
446 bitmap_clear (temp);
447
448 /* By using a temporary bitmap to store all members of the partitions
449 we have to add we make sure to visit each of the partitions only
450 once. */
451 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
452 if ((!temp
453 || !bitmap_bit_p (temp, i))
454 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
455 (void *)(size_t) i)))
456 bitmap_ior_into (temp, *part);
457 if (!bitmap_empty_p (temp))
458 bitmap_ior_into (pt->vars, temp);
459}
460
461/* Update points-to sets based on partition info, so we can use them on RTL.
462 The bitmaps representing stack partitions will be saved until expand,
463 where partitioned decls used as bases in memory expressions will be
464 rewritten. */
465
466static void
467update_alias_info_with_stack_vars (void)
468{
469 struct pointer_map_t *decls_to_partitions = NULL;
470 size_t i, j;
471 tree var = NULL_TREE;
472
473 for (i = 0; i < stack_vars_num; i++)
474 {
475 bitmap part = NULL;
476 tree name;
477 struct ptr_info_def *pi;
478
479 /* Not interested in partitions with single variable. */
480 if (stack_vars[i].representative != i
481 || stack_vars[i].next == EOC)
482 continue;
483
484 if (!decls_to_partitions)
485 {
486 decls_to_partitions = pointer_map_create ();
487 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
488 }
489
490 /* Create an SSA_NAME that points to the partition for use
491 as base during alias-oracle queries on RTL for bases that
492 have been partitioned. */
493 if (var == NULL_TREE)
494 var = create_tmp_var (ptr_type_node, NULL);
495 name = make_ssa_name (var, NULL);
496
497 /* Create bitmaps representing partitions. They will be used for
498 points-to sets later, so use GGC alloc. */
499 part = BITMAP_GGC_ALLOC ();
500 for (j = i; j != EOC; j = stack_vars[j].next)
501 {
502 tree decl = stack_vars[j].decl;
25a6a873 503 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
504 /* We should never end up partitioning SSA names (though they
505 may end up on the stack). Neither should we allocate stack
506 space to something that is unused and thus unreferenced. */
507 gcc_assert (DECL_P (decl)
25a6a873 508 && referenced_var_lookup (DECL_UID (decl)));
55b34b5f
RG
509 bitmap_set_bit (part, uid);
510 *((bitmap *) pointer_map_insert (decls_to_partitions,
511 (void *)(size_t) uid)) = part;
512 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
513 decl)) = name;
514 }
515
516 /* Make the SSA name point to all partition members. */
517 pi = get_ptr_info (name);
25a6a873 518 pt_solution_set (&pi->pt, part, false, false);
55b34b5f
RG
519 }
520
521 /* Make all points-to sets that contain one member of a partition
522 contain all members of the partition. */
523 if (decls_to_partitions)
524 {
525 unsigned i;
526 struct pointer_set_t *visited = pointer_set_create ();
527 bitmap temp = BITMAP_ALLOC (NULL);
528
529 for (i = 1; i < num_ssa_names; i++)
530 {
531 tree name = ssa_name (i);
532 struct ptr_info_def *pi;
533
534 if (name
535 && POINTER_TYPE_P (TREE_TYPE (name))
536 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
537 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
538 visited, temp);
539 }
540
541 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
542 decls_to_partitions, visited, temp);
55b34b5f
RG
543
544 pointer_set_destroy (visited);
545 pointer_map_destroy (decls_to_partitions);
546 BITMAP_FREE (temp);
547 }
548}
549
1f6d3a08
RH
550/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
551 partitioning algorithm. Partitions A and B are known to be non-conflicting.
552 Merge them into a single partition A.
553
554 At the same time, add OFFSET to all variables in partition B. At the end
555 of the partitioning process we've have a nice block easy to lay out within
556 the stack frame. */
557
558static void
559union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
560{
561 size_t i, last;
2bdbbe94
MM
562 struct stack_var *vb = &stack_vars[b];
563 bitmap_iterator bi;
564 unsigned u;
1f6d3a08
RH
565
566 /* Update each element of partition B with the given offset,
567 and merge them into partition A. */
568 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
569 {
570 stack_vars[i].offset += offset;
571 stack_vars[i].representative = a;
572 }
573 stack_vars[last].next = stack_vars[a].next;
574 stack_vars[a].next = b;
575
576 /* Update the required alignment of partition A to account for B. */
577 if (stack_vars[a].alignb < stack_vars[b].alignb)
578 stack_vars[a].alignb = stack_vars[b].alignb;
579
580 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
581 if (vb->conflicts)
582 {
583 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
584 add_stack_var_conflict (a, stack_vars[u].representative);
585 BITMAP_FREE (vb->conflicts);
586 }
1f6d3a08
RH
587}
588
589/* A subroutine of expand_used_vars. Binpack the variables into
590 partitions constrained by the interference graph. The overall
591 algorithm used is as follows:
592
593 Sort the objects by size.
594 For each object A {
595 S = size(A)
596 O = 0
597 loop {
598 Look for the largest non-conflicting object B with size <= S.
599 UNION (A, B)
600 offset(B) = O
601 O += size(B)
602 S -= size(B)
603 }
604 }
605*/
606
607static void
608partition_stack_vars (void)
609{
610 size_t si, sj, n = stack_vars_num;
611
612 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
613 for (si = 0; si < n; ++si)
614 stack_vars_sorted[si] = si;
615
616 if (n == 1)
617 return;
618
619 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
620
1f6d3a08
RH
621 for (si = 0; si < n; ++si)
622 {
623 size_t i = stack_vars_sorted[si];
624 HOST_WIDE_INT isize = stack_vars[i].size;
625 HOST_WIDE_INT offset = 0;
626
627 for (sj = si; sj-- > 0; )
628 {
629 size_t j = stack_vars_sorted[sj];
630 HOST_WIDE_INT jsize = stack_vars[j].size;
631 unsigned int jalign = stack_vars[j].alignb;
632
633 /* Ignore objects that aren't partition representatives. */
634 if (stack_vars[j].representative != j)
635 continue;
636
637 /* Ignore objects too large for the remaining space. */
638 if (isize < jsize)
639 continue;
640
641 /* Ignore conflicting objects. */
642 if (stack_var_conflict_p (i, j))
643 continue;
644
645 /* Refine the remaining space check to include alignment. */
646 if (offset & (jalign - 1))
647 {
648 HOST_WIDE_INT toff = offset;
649 toff += jalign - 1;
650 toff &= -(HOST_WIDE_INT)jalign;
651 if (isize - (toff - offset) < jsize)
652 continue;
653
654 isize -= toff - offset;
655 offset = toff;
656 }
657
658 /* UNION the objects, placing J at OFFSET. */
659 union_stack_vars (i, j, offset);
660
661 isize -= jsize;
662 if (isize == 0)
663 break;
664 }
665 }
55b34b5f 666
0b200b80
RG
667 if (optimize)
668 update_alias_info_with_stack_vars ();
1f6d3a08
RH
669}
670
671/* A debugging aid for expand_used_vars. Dump the generated partitions. */
672
673static void
674dump_stack_var_partition (void)
675{
676 size_t si, i, j, n = stack_vars_num;
677
678 for (si = 0; si < n; ++si)
679 {
680 i = stack_vars_sorted[si];
681
682 /* Skip variables that aren't partition representatives, for now. */
683 if (stack_vars[i].representative != i)
684 continue;
685
686 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
687 " align %u\n", (unsigned long) i, stack_vars[i].size,
688 stack_vars[i].alignb);
689
690 for (j = i; j != EOC; j = stack_vars[j].next)
691 {
692 fputc ('\t', dump_file);
693 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
694 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 695 stack_vars[j].offset);
1f6d3a08
RH
696 }
697 }
698}
699
700/* Assign rtl to DECL at frame offset OFFSET. */
701
702static void
703expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
704{
2ac26e15
L
705 /* Alignment is unsigned. */
706 unsigned HOST_WIDE_INT align;
1f6d3a08 707 rtx x;
c22cacf3 708
1f6d3a08
RH
709 /* If this fails, we've overflowed the stack frame. Error nicely? */
710 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
711
712 x = plus_constant (virtual_stack_vars_rtx, offset);
4e3825db 713 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 714
4e3825db
MM
715 if (TREE_CODE (decl) != SSA_NAME)
716 {
717 /* Set alignment we actually gave this decl if it isn't an SSA name.
718 If it is we generate stack slots only accidentally so it isn't as
719 important, we'll simply use the alignment that is already set. */
720 offset -= frame_phase;
721 align = offset & -offset;
722 align *= BITS_PER_UNIT;
723 if (align == 0)
724 align = STACK_BOUNDARY;
725 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
726 align = MAX_SUPPORTED_STACK_ALIGNMENT;
727
728 DECL_ALIGN (decl) = align;
729 DECL_USER_ALIGN (decl) = 0;
730 }
731
732 set_mem_attributes (x, SSAVAR (decl), true);
733 set_rtl (decl, x);
1f6d3a08
RH
734}
735
736/* A subroutine of expand_used_vars. Give each partition representative
737 a unique location within the stack frame. Update each partition member
738 with that location. */
739
740static void
7d69de61 741expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
742{
743 size_t si, i, j, n = stack_vars_num;
744
745 for (si = 0; si < n; ++si)
746 {
747 HOST_WIDE_INT offset;
748
749 i = stack_vars_sorted[si];
750
751 /* Skip variables that aren't partition representatives, for now. */
752 if (stack_vars[i].representative != i)
753 continue;
754
7d69de61
RH
755 /* Skip variables that have already had rtl assigned. See also
756 add_stack_var where we perpetrate this pc_rtx hack. */
4e3825db
MM
757 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
758 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
759 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
7d69de61
RH
760 continue;
761
c22cacf3 762 /* Check the predicate to see whether this variable should be
7d69de61
RH
763 allocated in this pass. */
764 if (pred && !pred (stack_vars[i].decl))
765 continue;
766
1f6d3a08
RH
767 offset = alloc_stack_frame_space (stack_vars[i].size,
768 stack_vars[i].alignb);
769
770 /* Create rtl for each variable based on their location within the
771 partition. */
772 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
773 {
774 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
775 expand_one_stack_var_at (stack_vars[j].decl,
776 stack_vars[j].offset + offset);
777 }
1f6d3a08
RH
778 }
779}
780
ff28a94d
JH
781/* Take into account all sizes of partitions and reset DECL_RTLs. */
782static HOST_WIDE_INT
783account_stack_vars (void)
784{
785 size_t si, j, i, n = stack_vars_num;
786 HOST_WIDE_INT size = 0;
787
788 for (si = 0; si < n; ++si)
789 {
790 i = stack_vars_sorted[si];
791
792 /* Skip variables that aren't partition representatives, for now. */
793 if (stack_vars[i].representative != i)
794 continue;
795
796 size += stack_vars[i].size;
797 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 798 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
799 }
800 return size;
801}
802
1f6d3a08
RH
803/* A subroutine of expand_one_var. Called to immediately assign rtl
804 to a variable to be allocated in the stack frame. */
805
806static void
807expand_one_stack_var (tree var)
808{
809 HOST_WIDE_INT size, offset, align;
810
4e3825db
MM
811 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
812 align = get_decl_align_unit (SSAVAR (var));
1f6d3a08
RH
813 offset = alloc_stack_frame_space (size, align);
814
815 expand_one_stack_var_at (var, offset);
816}
817
1f6d3a08
RH
818/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
819 that will reside in a hard register. */
820
821static void
822expand_one_hard_reg_var (tree var)
823{
824 rest_of_decl_compilation (var, 0, 0);
825}
826
827/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
828 that will reside in a pseudo register. */
829
830static void
831expand_one_register_var (tree var)
832{
4e3825db
MM
833 tree decl = SSAVAR (var);
834 tree type = TREE_TYPE (decl);
cde0f3fd 835 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
836 rtx x = gen_reg_rtx (reg_mode);
837
4e3825db 838 set_rtl (var, x);
1f6d3a08
RH
839
840 /* Note if the object is a user variable. */
4e3825db
MM
841 if (!DECL_ARTIFICIAL (decl))
842 mark_user_reg (x);
1f6d3a08 843
61021c2c 844 if (POINTER_TYPE_P (type))
4e3825db 845 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
846}
847
848/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 849 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
850 to pick something that won't crash the rest of the compiler. */
851
852static void
853expand_one_error_var (tree var)
854{
855 enum machine_mode mode = DECL_MODE (var);
856 rtx x;
857
858 if (mode == BLKmode)
859 x = gen_rtx_MEM (BLKmode, const0_rtx);
860 else if (mode == VOIDmode)
861 x = const0_rtx;
862 else
863 x = gen_reg_rtx (mode);
864
865 SET_DECL_RTL (var, x);
866}
867
c22cacf3 868/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
869 allocated to the local stack frame. Return true if we wish to
870 add VAR to STACK_VARS so that it will be coalesced with other
871 variables. Return false to allocate VAR immediately.
872
873 This function is used to reduce the number of variables considered
874 for coalescing, which reduces the size of the quadratic problem. */
875
876static bool
877defer_stack_allocation (tree var, bool toplevel)
878{
7d69de61
RH
879 /* If stack protection is enabled, *all* stack variables must be deferred,
880 so that we can re-order the strings to the top of the frame. */
881 if (flag_stack_protect)
882 return true;
883
1f6d3a08
RH
884 /* Variables in the outermost scope automatically conflict with
885 every other variable. The only reason to want to defer them
886 at all is that, after sorting, we can more efficiently pack
887 small variables in the stack frame. Continue to defer at -O2. */
888 if (toplevel && optimize < 2)
889 return false;
890
891 /* Without optimization, *most* variables are allocated from the
892 stack, which makes the quadratic problem large exactly when we
c22cacf3 893 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
894 other hand, we don't want the function's stack frame size to
895 get completely out of hand. So we avoid adding scalars and
896 "small" aggregates to the list at all. */
897 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
898 return false;
899
900 return true;
901}
902
903/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 904 its flavor. Variables to be placed on the stack are not actually
b8698a0f 905 expanded yet, merely recorded.
ff28a94d
JH
906 When REALLY_EXPAND is false, only add stack values to be allocated.
907 Return stack usage this variable is supposed to take.
908*/
1f6d3a08 909
ff28a94d
JH
910static HOST_WIDE_INT
911expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 912{
4e3825db
MM
913 tree origvar = var;
914 var = SSAVAR (var);
915
2e3f842f
L
916 if (SUPPORTS_STACK_ALIGNMENT
917 && TREE_TYPE (var) != error_mark_node
918 && TREE_CODE (var) == VAR_DECL)
919 {
920 unsigned int align;
921
922 /* Because we don't know if VAR will be in register or on stack,
923 we conservatively assume it will be on stack even if VAR is
924 eventually put into register after RA pass. For non-automatic
925 variables, which won't be on stack, we collect alignment of
926 type and ignore user specified alignment. */
927 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
928 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
929 TYPE_MODE (TREE_TYPE (var)),
930 TYPE_ALIGN (TREE_TYPE (var)));
2e3f842f 931 else
ae58e548 932 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f
L
933
934 if (crtl->stack_alignment_estimated < align)
935 {
936 /* stack_alignment_estimated shouldn't change after stack
937 realign decision made */
938 gcc_assert(!crtl->stack_realign_processed);
939 crtl->stack_alignment_estimated = align;
940 }
941 }
942
4e3825db
MM
943 if (TREE_CODE (origvar) == SSA_NAME)
944 {
945 gcc_assert (TREE_CODE (var) != VAR_DECL
946 || (!DECL_EXTERNAL (var)
947 && !DECL_HAS_VALUE_EXPR_P (var)
948 && !TREE_STATIC (var)
4e3825db
MM
949 && TREE_TYPE (var) != error_mark_node
950 && !DECL_HARD_REGISTER (var)
951 && really_expand));
952 }
953 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 954 ;
1f6d3a08
RH
955 else if (DECL_EXTERNAL (var))
956 ;
833b3afe 957 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
958 ;
959 else if (TREE_STATIC (var))
7e8b322a 960 ;
eb7adebc 961 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
962 ;
963 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
964 {
965 if (really_expand)
966 expand_one_error_var (var);
967 }
4e3825db 968 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
969 {
970 if (really_expand)
971 expand_one_hard_reg_var (var);
972 }
1f6d3a08 973 else if (use_register_for_decl (var))
ff28a94d
JH
974 {
975 if (really_expand)
4e3825db 976 expand_one_register_var (origvar);
ff28a94d 977 }
7604eb4e
JJ
978 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
979 {
980 if (really_expand)
981 {
982 error ("size of variable %q+D is too large", var);
983 expand_one_error_var (var);
984 }
985 }
1f6d3a08 986 else if (defer_stack_allocation (var, toplevel))
4e3825db 987 add_stack_var (origvar);
1f6d3a08 988 else
ff28a94d 989 {
bd9f1b4b 990 if (really_expand)
4e3825db 991 expand_one_stack_var (origvar);
ff28a94d
JH
992 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
993 }
994 return 0;
1f6d3a08
RH
995}
996
997/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
998 expanding variables. Those variables that can be put into registers
999 are allocated pseudos; those that can't are put on the stack.
1000
1001 TOPLEVEL is true if this is the outermost BLOCK. */
1002
1003static void
1004expand_used_vars_for_block (tree block, bool toplevel)
1005{
1006 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1007 tree t;
1008
1009 old_sv_num = toplevel ? 0 : stack_vars_num;
1010
1011 /* Expand all variables at this level. */
1012 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
7e8b322a 1013 if (TREE_USED (t))
ff28a94d 1014 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1015
1016 this_sv_num = stack_vars_num;
1017
1018 /* Expand all variables at containing levels. */
1019 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1020 expand_used_vars_for_block (t, false);
1021
1022 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1023 possible for variables whose address escapes), we mirror the block
1f6d3a08 1024 tree in the interference graph. Here we cause all variables at this
2bdbbe94 1025 level, and all sublevels, to conflict. */
1f6d3a08
RH
1026 if (old_sv_num < this_sv_num)
1027 {
1028 new_sv_num = stack_vars_num;
1f6d3a08
RH
1029
1030 for (i = old_sv_num; i < new_sv_num; ++i)
2bdbbe94 1031 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
f4a6d54e 1032 add_stack_var_conflict (i, j);
1f6d3a08
RH
1033 }
1034}
1035
1036/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1037 and clear TREE_USED on all local variables. */
1038
1039static void
1040clear_tree_used (tree block)
1041{
1042 tree t;
1043
1044 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1045 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1046 TREE_USED (t) = 0;
1047
1048 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1049 clear_tree_used (t);
1050}
1051
7d69de61
RH
1052/* Examine TYPE and determine a bit mask of the following features. */
1053
1054#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1055#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1056#define SPCT_HAS_ARRAY 4
1057#define SPCT_HAS_AGGREGATE 8
1058
1059static unsigned int
1060stack_protect_classify_type (tree type)
1061{
1062 unsigned int ret = 0;
1063 tree t;
1064
1065 switch (TREE_CODE (type))
1066 {
1067 case ARRAY_TYPE:
1068 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1069 if (t == char_type_node
1070 || t == signed_char_type_node
1071 || t == unsigned_char_type_node)
1072 {
15362b89
JJ
1073 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1074 unsigned HOST_WIDE_INT len;
7d69de61 1075
15362b89
JJ
1076 if (!TYPE_SIZE_UNIT (type)
1077 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1078 len = max;
7d69de61 1079 else
15362b89 1080 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1081
1082 if (len < max)
1083 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1084 else
1085 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1086 }
1087 else
1088 ret = SPCT_HAS_ARRAY;
1089 break;
1090
1091 case UNION_TYPE:
1092 case QUAL_UNION_TYPE:
1093 case RECORD_TYPE:
1094 ret = SPCT_HAS_AGGREGATE;
1095 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1096 if (TREE_CODE (t) == FIELD_DECL)
1097 ret |= stack_protect_classify_type (TREE_TYPE (t));
1098 break;
1099
1100 default:
1101 break;
1102 }
1103
1104 return ret;
1105}
1106
a4d05547
KH
1107/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1108 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1109 any variable in this function. The return value is the phase number in
1110 which the variable should be allocated. */
1111
1112static int
1113stack_protect_decl_phase (tree decl)
1114{
1115 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1116 int ret = 0;
1117
1118 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1119 has_short_buffer = true;
1120
1121 if (flag_stack_protect == 2)
1122 {
1123 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1124 && !(bits & SPCT_HAS_AGGREGATE))
1125 ret = 1;
1126 else if (bits & SPCT_HAS_ARRAY)
1127 ret = 2;
1128 }
1129 else
1130 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1131
1132 if (ret)
1133 has_protected_decls = true;
1134
1135 return ret;
1136}
1137
1138/* Two helper routines that check for phase 1 and phase 2. These are used
1139 as callbacks for expand_stack_vars. */
1140
1141static bool
1142stack_protect_decl_phase_1 (tree decl)
1143{
1144 return stack_protect_decl_phase (decl) == 1;
1145}
1146
1147static bool
1148stack_protect_decl_phase_2 (tree decl)
1149{
1150 return stack_protect_decl_phase (decl) == 2;
1151}
1152
1153/* Ensure that variables in different stack protection phases conflict
1154 so that they are not merged and share the same stack slot. */
1155
1156static void
1157add_stack_protection_conflicts (void)
1158{
1159 size_t i, j, n = stack_vars_num;
1160 unsigned char *phase;
1161
1162 phase = XNEWVEC (unsigned char, n);
1163 for (i = 0; i < n; ++i)
1164 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1165
1166 for (i = 0; i < n; ++i)
1167 {
1168 unsigned char ph_i = phase[i];
1169 for (j = 0; j < i; ++j)
1170 if (ph_i != phase[j])
1171 add_stack_var_conflict (i, j);
1172 }
1173
1174 XDELETEVEC (phase);
1175}
1176
1177/* Create a decl for the guard at the top of the stack frame. */
1178
1179static void
1180create_stack_guard (void)
1181{
c2255bc4
AH
1182 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1183 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1184 TREE_THIS_VOLATILE (guard) = 1;
1185 TREE_USED (guard) = 1;
1186 expand_one_stack_var (guard);
cb91fab0 1187 crtl->stack_protect_guard = guard;
7d69de61
RH
1188}
1189
ff28a94d
JH
1190/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1191 expanding variables. Those variables that can be put into registers
1192 are allocated pseudos; those that can't are put on the stack.
1193
1194 TOPLEVEL is true if this is the outermost BLOCK. */
1195
1196static HOST_WIDE_INT
1197account_used_vars_for_block (tree block, bool toplevel)
1198{
ff28a94d
JH
1199 tree t;
1200 HOST_WIDE_INT size = 0;
1201
ff28a94d
JH
1202 /* Expand all variables at this level. */
1203 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1204 if (TREE_USED (t))
1205 size += expand_one_var (t, toplevel, false);
1206
ff28a94d
JH
1207 /* Expand all variables at containing levels. */
1208 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1209 size += account_used_vars_for_block (t, false);
1210
ff28a94d
JH
1211 return size;
1212}
1213
1214/* Prepare for expanding variables. */
b8698a0f 1215static void
ff28a94d
JH
1216init_vars_expansion (void)
1217{
1218 tree t;
cb91fab0
JH
1219 /* Set TREE_USED on all variables in the local_decls. */
1220 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1221 TREE_USED (TREE_VALUE (t)) = 1;
1222
1223 /* Clear TREE_USED on all variables associated with a block scope. */
1224 clear_tree_used (DECL_INITIAL (current_function_decl));
1225
1226 /* Initialize local stack smashing state. */
1227 has_protected_decls = false;
1228 has_short_buffer = false;
1229}
1230
1231/* Free up stack variable graph data. */
1232static void
1233fini_vars_expansion (void)
1234{
2bdbbe94
MM
1235 size_t i, n = stack_vars_num;
1236 for (i = 0; i < n; i++)
1237 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1238 XDELETEVEC (stack_vars);
1239 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1240 stack_vars = NULL;
1241 stack_vars_alloc = stack_vars_num = 0;
ff28a94d
JH
1242}
1243
b5a430f3
SB
1244/* Make a fair guess for the size of the stack frame of the current
1245 function. This doesn't have to be exact, the result is only used
1246 in the inline heuristics. So we don't want to run the full stack
1247 var packing algorithm (which is quadratic in the number of stack
1248 vars). Instead, we calculate the total size of all stack vars.
1249 This turns out to be a pretty fair estimate -- packing of stack
1250 vars doesn't happen very often. */
1251
ff28a94d
JH
1252HOST_WIDE_INT
1253estimated_stack_frame_size (void)
1254{
1255 HOST_WIDE_INT size = 0;
b5a430f3 1256 size_t i;
ff28a94d
JH
1257 tree t, outer_block = DECL_INITIAL (current_function_decl);
1258
1259 init_vars_expansion ();
1260
cb91fab0 1261 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1262 {
1263 tree var = TREE_VALUE (t);
1264
1265 if (TREE_USED (var))
1266 size += expand_one_var (var, true, false);
1267 TREE_USED (var) = 1;
1268 }
1269 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1270
ff28a94d
JH
1271 if (stack_vars_num > 0)
1272 {
b5a430f3
SB
1273 /* Fake sorting the stack vars for account_stack_vars (). */
1274 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1275 for (i = 0; i < stack_vars_num; ++i)
1276 stack_vars_sorted[i] = i;
ff28a94d
JH
1277 size += account_stack_vars ();
1278 fini_vars_expansion ();
1279 }
b5a430f3 1280
ff28a94d
JH
1281 return size;
1282}
1283
1f6d3a08 1284/* Expand all variables used in the function. */
727a31fa
RH
1285
1286static void
1287expand_used_vars (void)
1288{
802e9f8e 1289 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
6c6366f6 1290 tree maybe_local_decls = NULL_TREE;
4e3825db 1291 unsigned i;
727a31fa 1292
1f6d3a08
RH
1293 /* Compute the phase of the stack frame for this function. */
1294 {
1295 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1296 int off = STARTING_FRAME_OFFSET % align;
1297 frame_phase = off ? align - off : 0;
1298 }
727a31fa 1299
ff28a94d 1300 init_vars_expansion ();
7d69de61 1301
4e3825db
MM
1302 for (i = 0; i < SA.map->num_partitions; i++)
1303 {
1304 tree var = partition_to_var (SA.map, i);
1305
1306 gcc_assert (is_gimple_reg (var));
1307 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1308 expand_one_var (var, true, true);
1309 else
1310 {
1311 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1312 contain the default def (representing the parm or result itself)
1313 we don't do anything here. But those which don't contain the
1314 default def (representing a temporary based on the parm/result)
1315 we need to allocate space just like for normal VAR_DECLs. */
1316 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1317 {
1318 expand_one_var (var, true, true);
1319 gcc_assert (SA.partition_to_pseudo[i]);
1320 }
1321 }
1322 }
1323
cb91fab0 1324 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1325 set are not associated with any block scope. Lay them out. */
802e9f8e
JJ
1326 t = cfun->local_decls;
1327 cfun->local_decls = NULL_TREE;
1328 for (; t; t = next)
1f6d3a08
RH
1329 {
1330 tree var = TREE_VALUE (t);
1331 bool expand_now = false;
1332
802e9f8e
JJ
1333 next = TREE_CHAIN (t);
1334
4e3825db
MM
1335 /* Expanded above already. */
1336 if (is_gimple_reg (var))
eb7adebc
MM
1337 {
1338 TREE_USED (var) = 0;
3adcf52c 1339 goto next;
eb7adebc 1340 }
1f6d3a08
RH
1341 /* We didn't set a block for static or extern because it's hard
1342 to tell the difference between a global variable (re)declared
1343 in a local scope, and one that's really declared there to
1344 begin with. And it doesn't really matter much, since we're
1345 not giving them stack space. Expand them now. */
4e3825db 1346 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1347 expand_now = true;
1348
1349 /* If the variable is not associated with any block, then it
1350 was created by the optimizers, and could be live anywhere
1351 in the function. */
1352 else if (TREE_USED (var))
1353 expand_now = true;
1354
1355 /* Finally, mark all variables on the list as used. We'll use
1356 this in a moment when we expand those associated with scopes. */
1357 TREE_USED (var) = 1;
1358
1359 if (expand_now)
3adcf52c
JM
1360 expand_one_var (var, true, true);
1361
1362 next:
1363 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1364 {
3adcf52c
JM
1365 rtx rtl = DECL_RTL_IF_SET (var);
1366
1367 /* Keep artificial non-ignored vars in cfun->local_decls
1368 chain until instantiate_decls. */
1369 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
802e9f8e 1370 {
3adcf52c
JM
1371 TREE_CHAIN (t) = cfun->local_decls;
1372 cfun->local_decls = t;
1373 continue;
802e9f8e 1374 }
6c6366f6
JJ
1375 else if (rtl == NULL_RTX)
1376 {
1377 /* If rtl isn't set yet, which can happen e.g. with
1378 -fstack-protector, retry before returning from this
1379 function. */
1380 TREE_CHAIN (t) = maybe_local_decls;
1381 maybe_local_decls = t;
1382 continue;
1383 }
802e9f8e
JJ
1384 }
1385
1386 ggc_free (t);
1f6d3a08 1387 }
1f6d3a08
RH
1388
1389 /* At this point, all variables within the block tree with TREE_USED
1390 set are actually used by the optimized function. Lay them out. */
1391 expand_used_vars_for_block (outer_block, true);
1392
1393 if (stack_vars_num > 0)
1394 {
1395 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1396 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1397 reflect this. */
1398 add_alias_set_conflicts ();
1399
c22cacf3 1400 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1401 vulnerable data and non-vulnerable data. */
1402 if (flag_stack_protect)
1403 add_stack_protection_conflicts ();
1404
c22cacf3 1405 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1406 minimal interference graph, attempt to save some stack space. */
1407 partition_stack_vars ();
1408 if (dump_file)
1409 dump_stack_var_partition ();
7d69de61
RH
1410 }
1411
1412 /* There are several conditions under which we should create a
1413 stack guard: protect-all, alloca used, protected decls present. */
1414 if (flag_stack_protect == 2
1415 || (flag_stack_protect
e3b5732b 1416 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1417 create_stack_guard ();
1f6d3a08 1418
7d69de61
RH
1419 /* Assign rtl to each variable based on these partitions. */
1420 if (stack_vars_num > 0)
1421 {
1422 /* Reorder decls to be protected by iterating over the variables
1423 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1424 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1425 earlier, such that we naturally see these variables first,
1426 and thus naturally allocate things in the right order. */
1427 if (has_protected_decls)
1428 {
1429 /* Phase 1 contains only character arrays. */
1430 expand_stack_vars (stack_protect_decl_phase_1);
1431
1432 /* Phase 2 contains other kinds of arrays. */
1433 if (flag_stack_protect == 2)
1434 expand_stack_vars (stack_protect_decl_phase_2);
1435 }
1436
1437 expand_stack_vars (NULL);
1f6d3a08 1438
ff28a94d 1439 fini_vars_expansion ();
1f6d3a08
RH
1440 }
1441
6c6366f6
JJ
1442 /* If there were any artificial non-ignored vars without rtl
1443 found earlier, see if deferred stack allocation hasn't assigned
1444 rtl to them. */
1445 for (t = maybe_local_decls; t; t = next)
1446 {
1447 tree var = TREE_VALUE (t);
1448 rtx rtl = DECL_RTL_IF_SET (var);
1449
1450 next = TREE_CHAIN (t);
1451
1452 /* Keep artificial non-ignored vars in cfun->local_decls
1453 chain until instantiate_decls. */
1454 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1455 {
1456 TREE_CHAIN (t) = cfun->local_decls;
1457 cfun->local_decls = t;
1458 continue;
1459 }
1460
1461 ggc_free (t);
1462 }
1463
1f6d3a08
RH
1464 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1465 if (STACK_ALIGNMENT_NEEDED)
1466 {
1467 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1468 if (!FRAME_GROWS_DOWNWARD)
1469 frame_offset += align - 1;
1470 frame_offset &= -align;
1471 }
727a31fa
RH
1472}
1473
1474
b7211528
SB
1475/* If we need to produce a detailed dump, print the tree representation
1476 for STMT to the dump file. SINCE is the last RTX after which the RTL
1477 generated for STMT should have been appended. */
1478
1479static void
726a989a 1480maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1481{
1482 if (dump_file && (dump_flags & TDF_DETAILS))
1483 {
1484 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1485 print_gimple_stmt (dump_file, stmt, 0,
1486 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1487 fprintf (dump_file, "\n");
1488
1489 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1490 }
1491}
1492
8b11009b
ZD
1493/* Maps the blocks that do not contain tree labels to rtx labels. */
1494
1495static struct pointer_map_t *lab_rtx_for_bb;
1496
a9b77cd1
ZD
1497/* Returns the label_rtx expression for a label starting basic block BB. */
1498
1499static rtx
726a989a 1500label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1501{
726a989a
RB
1502 gimple_stmt_iterator gsi;
1503 tree lab;
1504 gimple lab_stmt;
8b11009b 1505 void **elt;
a9b77cd1
ZD
1506
1507 if (bb->flags & BB_RTL)
1508 return block_label (bb);
1509
8b11009b
ZD
1510 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1511 if (elt)
ae50c0cb 1512 return (rtx) *elt;
8b11009b
ZD
1513
1514 /* Find the tree label if it is present. */
b8698a0f 1515
726a989a 1516 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1517 {
726a989a
RB
1518 lab_stmt = gsi_stmt (gsi);
1519 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1520 break;
1521
726a989a 1522 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1523 if (DECL_NONLOCAL (lab))
1524 break;
1525
1526 return label_rtx (lab);
1527 }
1528
8b11009b
ZD
1529 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1530 *elt = gen_label_rtx ();
ae50c0cb 1531 return (rtx) *elt;
a9b77cd1
ZD
1532}
1533
726a989a 1534
529ff441
MM
1535/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1536 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1537 possibly clean up the CFG and instruction sequence. LAST is the
1538 last instruction before the just emitted jump sequence. */
529ff441
MM
1539
1540static void
315adeda 1541maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1542{
1543 /* Special case: when jumpif decides that the condition is
1544 trivial it emits an unconditional jump (and the necessary
1545 barrier). But we still have two edges, the fallthru one is
1546 wrong. purge_dead_edges would clean this up later. Unfortunately
1547 we have to insert insns (and split edges) before
1548 find_many_sub_basic_blocks and hence before purge_dead_edges.
1549 But splitting edges might create new blocks which depend on the
1550 fact that if there are two edges there's no barrier. So the
1551 barrier would get lost and verify_flow_info would ICE. Instead
1552 of auditing all edge splitters to care for the barrier (which
1553 normally isn't there in a cleaned CFG), fix it here. */
1554 if (BARRIER_P (get_last_insn ()))
1555 {
529ff441
MM
1556 rtx insn;
1557 remove_edge (e);
1558 /* Now, we have a single successor block, if we have insns to
1559 insert on the remaining edge we potentially will insert
1560 it at the end of this block (if the dest block isn't feasible)
1561 in order to avoid splitting the edge. This insertion will take
1562 place in front of the last jump. But we might have emitted
1563 multiple jumps (conditional and one unconditional) to the
1564 same destination. Inserting in front of the last one then
1565 is a problem. See PR 40021. We fix this by deleting all
1566 jumps except the last unconditional one. */
1567 insn = PREV_INSN (get_last_insn ());
1568 /* Make sure we have an unconditional jump. Otherwise we're
1569 confused. */
1570 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1571 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1572 {
1573 insn = PREV_INSN (insn);
1574 if (JUMP_P (NEXT_INSN (insn)))
1575 delete_insn (NEXT_INSN (insn));
1576 }
1577 }
1578}
1579
726a989a 1580/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1581 Returns a new basic block if we've terminated the current basic
1582 block and created a new one. */
1583
1584static basic_block
726a989a 1585expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1586{
1587 basic_block new_bb, dest;
1588 edge new_edge;
1589 edge true_edge;
1590 edge false_edge;
b7211528 1591 rtx last2, last;
28ed065e
MM
1592 enum tree_code code;
1593 tree op0, op1;
1594
1595 code = gimple_cond_code (stmt);
1596 op0 = gimple_cond_lhs (stmt);
1597 op1 = gimple_cond_rhs (stmt);
1598 /* We're sometimes presented with such code:
1599 D.123_1 = x < y;
1600 if (D.123_1 != 0)
1601 ...
1602 This would expand to two comparisons which then later might
1603 be cleaned up by combine. But some pattern matchers like if-conversion
1604 work better when there's only one compare, so make up for this
1605 here as special exception if TER would have made the same change. */
1606 if (gimple_cond_single_var_p (stmt)
1607 && SA.values
1608 && TREE_CODE (op0) == SSA_NAME
1609 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1610 {
1611 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1612 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1613 {
e83f4b68
MM
1614 enum tree_code code2 = gimple_assign_rhs_code (second);
1615 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1616 {
1617 code = code2;
1618 op0 = gimple_assign_rhs1 (second);
1619 op1 = gimple_assign_rhs2 (second);
1620 }
1621 /* If jumps are cheap turn some more codes into
1622 jumpy sequences. */
1623 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1624 {
1625 if ((code2 == BIT_AND_EXPR
1626 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1627 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1628 || code2 == TRUTH_AND_EXPR)
1629 {
1630 code = TRUTH_ANDIF_EXPR;
1631 op0 = gimple_assign_rhs1 (second);
1632 op1 = gimple_assign_rhs2 (second);
1633 }
1634 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1635 {
1636 code = TRUTH_ORIF_EXPR;
1637 op0 = gimple_assign_rhs1 (second);
1638 op1 = gimple_assign_rhs2 (second);
1639 }
1640 }
28ed065e
MM
1641 }
1642 }
b7211528
SB
1643
1644 last2 = last = get_last_insn ();
80c7a9eb
RH
1645
1646 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1647 if (gimple_has_location (stmt))
80c7a9eb 1648 {
726a989a
RB
1649 set_curr_insn_source_location (gimple_location (stmt));
1650 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1651 }
1652
1653 /* These flags have no purpose in RTL land. */
1654 true_edge->flags &= ~EDGE_TRUE_VALUE;
1655 false_edge->flags &= ~EDGE_FALSE_VALUE;
1656
1657 /* We can either have a pure conditional jump with one fallthru edge or
1658 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1659 if (false_edge->dest == bb->next_bb)
80c7a9eb 1660 {
40e90eac
JJ
1661 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1662 true_edge->probability);
726a989a 1663 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1664 if (true_edge->goto_locus)
7241571e
JJ
1665 {
1666 set_curr_insn_source_location (true_edge->goto_locus);
1667 set_curr_insn_block (true_edge->goto_block);
1668 true_edge->goto_locus = curr_insn_locator ();
1669 }
1670 true_edge->goto_block = NULL;
a9b77cd1 1671 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1672 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1673 return NULL;
1674 }
a9b77cd1 1675 if (true_edge->dest == bb->next_bb)
80c7a9eb 1676 {
40e90eac
JJ
1677 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1678 false_edge->probability);
726a989a 1679 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1680 if (false_edge->goto_locus)
7241571e
JJ
1681 {
1682 set_curr_insn_source_location (false_edge->goto_locus);
1683 set_curr_insn_block (false_edge->goto_block);
1684 false_edge->goto_locus = curr_insn_locator ();
1685 }
1686 false_edge->goto_block = NULL;
a9b77cd1 1687 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1688 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1689 return NULL;
1690 }
80c7a9eb 1691
40e90eac
JJ
1692 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1693 true_edge->probability);
80c7a9eb 1694 last = get_last_insn ();
7241571e
JJ
1695 if (false_edge->goto_locus)
1696 {
1697 set_curr_insn_source_location (false_edge->goto_locus);
1698 set_curr_insn_block (false_edge->goto_block);
1699 false_edge->goto_locus = curr_insn_locator ();
1700 }
1701 false_edge->goto_block = NULL;
a9b77cd1 1702 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1703
1704 BB_END (bb) = last;
1705 if (BARRIER_P (BB_END (bb)))
1706 BB_END (bb) = PREV_INSN (BB_END (bb));
1707 update_bb_for_insn (bb);
1708
1709 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1710 dest = false_edge->dest;
1711 redirect_edge_succ (false_edge, new_bb);
1712 false_edge->flags |= EDGE_FALLTHRU;
1713 new_bb->count = false_edge->count;
1714 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1715 new_edge = make_edge (new_bb, dest, 0);
1716 new_edge->probability = REG_BR_PROB_BASE;
1717 new_edge->count = new_bb->count;
1718 if (BARRIER_P (BB_END (new_bb)))
1719 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1720 update_bb_for_insn (new_bb);
1721
726a989a 1722 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1723
7787b4aa
JJ
1724 if (true_edge->goto_locus)
1725 {
1726 set_curr_insn_source_location (true_edge->goto_locus);
1727 set_curr_insn_block (true_edge->goto_block);
1728 true_edge->goto_locus = curr_insn_locator ();
1729 }
1730 true_edge->goto_block = NULL;
1731
80c7a9eb
RH
1732 return new_bb;
1733}
1734
28ed065e
MM
1735/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1736 statement STMT. */
1737
1738static void
1739expand_call_stmt (gimple stmt)
1740{
1741 tree exp;
1742 tree lhs = gimple_call_lhs (stmt);
28ed065e 1743 size_t i;
e23817b3
RG
1744 bool builtin_p;
1745 tree decl;
28ed065e
MM
1746
1747 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1748
1749 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
1750 decl = gimple_call_fndecl (stmt);
1751 builtin_p = decl && DECL_BUILT_IN (decl);
1752
28ed065e
MM
1753 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1754 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1755
1756 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
1757 {
1758 tree arg = gimple_call_arg (stmt, i);
1759 gimple def;
1760 /* TER addresses into arguments of builtin functions so we have a
1761 chance to infer more correct alignment information. See PR39954. */
1762 if (builtin_p
1763 && TREE_CODE (arg) == SSA_NAME
1764 && (def = get_gimple_for_ssa_name (arg))
1765 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1766 arg = gimple_assign_rhs1 (def);
1767 CALL_EXPR_ARG (exp, i) = arg;
1768 }
28ed065e 1769
93f28ca7 1770 if (gimple_has_side_effects (stmt))
28ed065e
MM
1771 TREE_SIDE_EFFECTS (exp) = 1;
1772
93f28ca7 1773 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
1774 TREE_NOTHROW (exp) = 1;
1775
1776 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1777 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1778 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1779 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1780 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1781 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1782 TREE_BLOCK (exp) = gimple_block (stmt);
1783
28ed065e
MM
1784 if (lhs)
1785 expand_assignment (lhs, exp, false);
1786 else
1787 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1788}
1789
1790/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1791 STMT that doesn't require special handling for outgoing edges. That
1792 is no tailcalls and no GIMPLE_COND. */
1793
1794static void
1795expand_gimple_stmt_1 (gimple stmt)
1796{
1797 tree op0;
1798 switch (gimple_code (stmt))
1799 {
1800 case GIMPLE_GOTO:
1801 op0 = gimple_goto_dest (stmt);
1802 if (TREE_CODE (op0) == LABEL_DECL)
1803 expand_goto (op0);
1804 else
1805 expand_computed_goto (op0);
1806 break;
1807 case GIMPLE_LABEL:
1808 expand_label (gimple_label_label (stmt));
1809 break;
1810 case GIMPLE_NOP:
1811 case GIMPLE_PREDICT:
1812 break;
28ed065e
MM
1813 case GIMPLE_SWITCH:
1814 expand_case (stmt);
1815 break;
1816 case GIMPLE_ASM:
1817 expand_asm_stmt (stmt);
1818 break;
1819 case GIMPLE_CALL:
1820 expand_call_stmt (stmt);
1821 break;
1822
1823 case GIMPLE_RETURN:
1824 op0 = gimple_return_retval (stmt);
1825
1826 if (op0 && op0 != error_mark_node)
1827 {
1828 tree result = DECL_RESULT (current_function_decl);
1829
1830 /* If we are not returning the current function's RESULT_DECL,
1831 build an assignment to it. */
1832 if (op0 != result)
1833 {
1834 /* I believe that a function's RESULT_DECL is unique. */
1835 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1836
1837 /* ??? We'd like to use simply expand_assignment here,
1838 but this fails if the value is of BLKmode but the return
1839 decl is a register. expand_return has special handling
1840 for this combination, which eventually should move
1841 to common code. See comments there. Until then, let's
1842 build a modify expression :-/ */
1843 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1844 result, op0);
1845 }
1846 }
1847 if (!op0)
1848 expand_null_return ();
1849 else
1850 expand_return (op0);
1851 break;
1852
1853 case GIMPLE_ASSIGN:
1854 {
1855 tree lhs = gimple_assign_lhs (stmt);
1856
1857 /* Tree expand used to fiddle with |= and &= of two bitfield
1858 COMPONENT_REFs here. This can't happen with gimple, the LHS
1859 of binary assigns must be a gimple reg. */
1860
1861 if (TREE_CODE (lhs) != SSA_NAME
1862 || get_gimple_rhs_class (gimple_expr_code (stmt))
1863 == GIMPLE_SINGLE_RHS)
1864 {
1865 tree rhs = gimple_assign_rhs1 (stmt);
1866 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1867 == GIMPLE_SINGLE_RHS);
1868 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1869 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1870 expand_assignment (lhs, rhs,
1871 gimple_assign_nontemporal_move_p (stmt));
1872 }
1873 else
1874 {
1875 rtx target, temp;
1876 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1877 struct separate_ops ops;
1878 bool promoted = false;
1879
1880 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1881 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1882 promoted = true;
1883
1884 ops.code = gimple_assign_rhs_code (stmt);
1885 ops.type = TREE_TYPE (lhs);
1886 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1887 {
1888 case GIMPLE_BINARY_RHS:
1889 ops.op1 = gimple_assign_rhs2 (stmt);
1890 /* Fallthru */
1891 case GIMPLE_UNARY_RHS:
1892 ops.op0 = gimple_assign_rhs1 (stmt);
1893 break;
1894 default:
1895 gcc_unreachable ();
1896 }
1897 ops.location = gimple_location (stmt);
1898
1899 /* If we want to use a nontemporal store, force the value to
1900 register first. If we store into a promoted register,
1901 don't directly expand to target. */
1902 temp = nontemporal || promoted ? NULL_RTX : target;
1903 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1904 EXPAND_NORMAL);
1905
1906 if (temp == target)
1907 ;
1908 else if (promoted)
1909 {
4e18a7d4 1910 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
1911 /* If TEMP is a VOIDmode constant, use convert_modes to make
1912 sure that we properly convert it. */
1913 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1914 {
1915 temp = convert_modes (GET_MODE (target),
1916 TYPE_MODE (ops.type),
4e18a7d4 1917 temp, unsignedp);
28ed065e 1918 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 1919 GET_MODE (target), temp, unsignedp);
28ed065e
MM
1920 }
1921
4e18a7d4 1922 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
1923 }
1924 else if (nontemporal && emit_storent_insn (target, temp))
1925 ;
1926 else
1927 {
1928 temp = force_operand (temp, target);
1929 if (temp != target)
1930 emit_move_insn (target, temp);
1931 }
1932 }
1933 }
1934 break;
1935
1936 default:
1937 gcc_unreachable ();
1938 }
1939}
1940
1941/* Expand one gimple statement STMT and return the last RTL instruction
1942 before any of the newly generated ones.
1943
1944 In addition to generating the necessary RTL instructions this also
1945 sets REG_EH_REGION notes if necessary and sets the current source
1946 location for diagnostics. */
1947
1948static rtx
1949expand_gimple_stmt (gimple stmt)
1950{
1d65f45c 1951 int lp_nr = 0;
28ed065e
MM
1952 rtx last = NULL;
1953 location_t saved_location = input_location;
1954
1955 last = get_last_insn ();
1956
1957 /* If this is an expression of some kind and it has an associated line
1958 number, then emit the line number before expanding the expression.
1959
1960 We need to save and restore the file and line information so that
1961 errors discovered during expansion are emitted with the right
1962 information. It would be better of the diagnostic routines
1963 used the file/line information embedded in the tree nodes rather
1964 than globals. */
1965 gcc_assert (cfun);
1966
1967 if (gimple_has_location (stmt))
1968 {
1969 input_location = gimple_location (stmt);
1970 set_curr_insn_source_location (input_location);
1971
1972 /* Record where the insns produced belong. */
1973 set_curr_insn_block (gimple_block (stmt));
1974 }
1975
1976 expand_gimple_stmt_1 (stmt);
1977 /* Free any temporaries used to evaluate this statement. */
1978 free_temp_slots ();
1979
1980 input_location = saved_location;
1981
1982 /* Mark all insns that may trap. */
1d65f45c
RH
1983 lp_nr = lookup_stmt_eh_lp (stmt);
1984 if (lp_nr)
28ed065e
MM
1985 {
1986 rtx insn;
1987 for (insn = next_real_insn (last); insn;
1988 insn = next_real_insn (insn))
1989 {
1990 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1991 /* If we want exceptions for non-call insns, any
1992 may_trap_p instruction may throw. */
1993 && GET_CODE (PATTERN (insn)) != CLOBBER
1994 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
1995 && insn_could_throw_p (insn))
1996 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
1997 }
1998 }
1999
2000 return last;
2001}
2002
726a989a 2003/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2004 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2005 generated a tail call (something that might be denied by the ABI
cea49550
RH
2006 rules governing the call; see calls.c).
2007
2008 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2009 can still reach the rest of BB. The case here is __builtin_sqrt,
2010 where the NaN result goes through the external function (with a
2011 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2012
2013static basic_block
726a989a 2014expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2015{
b7211528 2016 rtx last2, last;
224e770b 2017 edge e;
628f6a4e 2018 edge_iterator ei;
224e770b
RH
2019 int probability;
2020 gcov_type count;
80c7a9eb 2021
28ed065e 2022 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2023
2024 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2025 if (CALL_P (last) && SIBLING_CALL_P (last))
2026 goto found;
80c7a9eb 2027
726a989a 2028 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2029
cea49550 2030 *can_fallthru = true;
224e770b 2031 return NULL;
80c7a9eb 2032
224e770b
RH
2033 found:
2034 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2035 Any instructions emitted here are about to be deleted. */
2036 do_pending_stack_adjust ();
2037
2038 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2039 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2040 EH or abnormal edges, we shouldn't have created a tail call in
2041 the first place. So it seems to me we should just be removing
2042 all edges here, or redirecting the existing fallthru edge to
2043 the exit block. */
2044
224e770b
RH
2045 probability = 0;
2046 count = 0;
224e770b 2047
628f6a4e
BE
2048 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2049 {
224e770b
RH
2050 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2051 {
2052 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2053 {
224e770b
RH
2054 e->dest->count -= e->count;
2055 e->dest->frequency -= EDGE_FREQUENCY (e);
2056 if (e->dest->count < 0)
c22cacf3 2057 e->dest->count = 0;
224e770b 2058 if (e->dest->frequency < 0)
c22cacf3 2059 e->dest->frequency = 0;
80c7a9eb 2060 }
224e770b
RH
2061 count += e->count;
2062 probability += e->probability;
2063 remove_edge (e);
80c7a9eb 2064 }
628f6a4e
BE
2065 else
2066 ei_next (&ei);
80c7a9eb
RH
2067 }
2068
224e770b
RH
2069 /* This is somewhat ugly: the call_expr expander often emits instructions
2070 after the sibcall (to perform the function return). These confuse the
12eff7b7 2071 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2072 last = NEXT_INSN (last);
341c100f 2073 gcc_assert (BARRIER_P (last));
cea49550
RH
2074
2075 *can_fallthru = false;
224e770b
RH
2076 while (NEXT_INSN (last))
2077 {
2078 /* For instance an sqrt builtin expander expands if with
2079 sibcall in the then and label for `else`. */
2080 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2081 {
2082 *can_fallthru = true;
2083 break;
2084 }
224e770b
RH
2085 delete_insn (NEXT_INSN (last));
2086 }
2087
2088 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2089 e->probability += probability;
2090 e->count += count;
2091 BB_END (bb) = last;
2092 update_bb_for_insn (bb);
2093
2094 if (NEXT_INSN (last))
2095 {
2096 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2097
2098 last = BB_END (bb);
2099 if (BARRIER_P (last))
2100 BB_END (bb) = PREV_INSN (last);
2101 }
2102
726a989a 2103 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2104
224e770b 2105 return bb;
80c7a9eb
RH
2106}
2107
b5b8b0ac
AO
2108/* Return the difference between the floor and the truncated result of
2109 a signed division by OP1 with remainder MOD. */
2110static rtx
2111floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2112{
2113 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2114 return gen_rtx_IF_THEN_ELSE
2115 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2116 gen_rtx_IF_THEN_ELSE
2117 (mode, gen_rtx_LT (BImode,
2118 gen_rtx_DIV (mode, op1, mod),
2119 const0_rtx),
2120 constm1_rtx, const0_rtx),
2121 const0_rtx);
2122}
2123
2124/* Return the difference between the ceil and the truncated result of
2125 a signed division by OP1 with remainder MOD. */
2126static rtx
2127ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2128{
2129 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2130 return gen_rtx_IF_THEN_ELSE
2131 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2132 gen_rtx_IF_THEN_ELSE
2133 (mode, gen_rtx_GT (BImode,
2134 gen_rtx_DIV (mode, op1, mod),
2135 const0_rtx),
2136 const1_rtx, const0_rtx),
2137 const0_rtx);
2138}
2139
2140/* Return the difference between the ceil and the truncated result of
2141 an unsigned division by OP1 with remainder MOD. */
2142static rtx
2143ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2144{
2145 /* (mod != 0 ? 1 : 0) */
2146 return gen_rtx_IF_THEN_ELSE
2147 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2148 const1_rtx, const0_rtx);
2149}
2150
2151/* Return the difference between the rounded and the truncated result
2152 of a signed division by OP1 with remainder MOD. Halfway cases are
2153 rounded away from zero, rather than to the nearest even number. */
2154static rtx
2155round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2156{
2157 /* (abs (mod) >= abs (op1) - abs (mod)
2158 ? (op1 / mod > 0 ? 1 : -1)
2159 : 0) */
2160 return gen_rtx_IF_THEN_ELSE
2161 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2162 gen_rtx_MINUS (mode,
2163 gen_rtx_ABS (mode, op1),
2164 gen_rtx_ABS (mode, mod))),
2165 gen_rtx_IF_THEN_ELSE
2166 (mode, gen_rtx_GT (BImode,
2167 gen_rtx_DIV (mode, op1, mod),
2168 const0_rtx),
2169 const1_rtx, constm1_rtx),
2170 const0_rtx);
2171}
2172
2173/* Return the difference between the rounded and the truncated result
2174 of a unsigned division by OP1 with remainder MOD. Halfway cases
2175 are rounded away from zero, rather than to the nearest even
2176 number. */
2177static rtx
2178round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2179{
2180 /* (mod >= op1 - mod ? 1 : 0) */
2181 return gen_rtx_IF_THEN_ELSE
2182 (mode, gen_rtx_GE (BImode, mod,
2183 gen_rtx_MINUS (mode, op1, mod)),
2184 const1_rtx, const0_rtx);
2185}
2186
dda2da58
AO
2187/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2188 any rtl. */
2189
2190static rtx
2191convert_debug_memory_address (enum machine_mode mode, rtx x)
2192{
2193 enum machine_mode xmode = GET_MODE (x);
2194
2195#ifndef POINTERS_EXTEND_UNSIGNED
2196 gcc_assert (mode == Pmode);
2197 gcc_assert (xmode == mode || xmode == VOIDmode);
2198#else
2199 gcc_assert (mode == Pmode || mode == ptr_mode);
2200
2201 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2202 return x;
2203
2204 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2205 x = simplify_gen_subreg (mode, x, xmode,
2206 subreg_lowpart_offset
2207 (mode, xmode));
2208 else if (POINTERS_EXTEND_UNSIGNED > 0)
2209 x = gen_rtx_ZERO_EXTEND (mode, x);
2210 else if (!POINTERS_EXTEND_UNSIGNED)
2211 x = gen_rtx_SIGN_EXTEND (mode, x);
2212 else
2213 gcc_unreachable ();
2214#endif /* POINTERS_EXTEND_UNSIGNED */
2215
2216 return x;
2217}
2218
b5b8b0ac
AO
2219/* Return an RTX equivalent to the value of the tree expression
2220 EXP. */
2221
2222static rtx
2223expand_debug_expr (tree exp)
2224{
2225 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2226 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2227 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2228 addr_space_t as;
d4ebfa65 2229 enum machine_mode address_mode;
b5b8b0ac
AO
2230
2231 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2232 {
2233 case tcc_expression:
2234 switch (TREE_CODE (exp))
2235 {
2236 case COND_EXPR:
7ece48b1 2237 case DOT_PROD_EXPR:
b5b8b0ac
AO
2238 goto ternary;
2239
2240 case TRUTH_ANDIF_EXPR:
2241 case TRUTH_ORIF_EXPR:
2242 case TRUTH_AND_EXPR:
2243 case TRUTH_OR_EXPR:
2244 case TRUTH_XOR_EXPR:
2245 goto binary;
2246
2247 case TRUTH_NOT_EXPR:
2248 goto unary;
2249
2250 default:
2251 break;
2252 }
2253 break;
2254
2255 ternary:
2256 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2257 if (!op2)
2258 return NULL_RTX;
2259 /* Fall through. */
2260
2261 binary:
2262 case tcc_binary:
2263 case tcc_comparison:
2264 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2265 if (!op1)
2266 return NULL_RTX;
2267 /* Fall through. */
2268
2269 unary:
2270 case tcc_unary:
2271 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2272 if (!op0)
2273 return NULL_RTX;
2274 break;
2275
2276 case tcc_type:
2277 case tcc_statement:
2278 gcc_unreachable ();
2279
2280 case tcc_constant:
2281 case tcc_exceptional:
2282 case tcc_declaration:
2283 case tcc_reference:
2284 case tcc_vl_exp:
2285 break;
2286 }
2287
2288 switch (TREE_CODE (exp))
2289 {
2290 case STRING_CST:
2291 if (!lookup_constant_def (exp))
2292 {
e1b243a8
JJ
2293 if (strlen (TREE_STRING_POINTER (exp)) + 1
2294 != (size_t) TREE_STRING_LENGTH (exp))
2295 return NULL_RTX;
b5b8b0ac
AO
2296 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2297 op0 = gen_rtx_MEM (BLKmode, op0);
2298 set_mem_attributes (op0, exp, 0);
2299 return op0;
2300 }
2301 /* Fall through... */
2302
2303 case INTEGER_CST:
2304 case REAL_CST:
2305 case FIXED_CST:
2306 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2307 return op0;
2308
2309 case COMPLEX_CST:
2310 gcc_assert (COMPLEX_MODE_P (mode));
2311 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2312 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2313 return gen_rtx_CONCAT (mode, op0, op1);
2314
0ca5af51
AO
2315 case DEBUG_EXPR_DECL:
2316 op0 = DECL_RTL_IF_SET (exp);
2317
2318 if (op0)
2319 return op0;
2320
2321 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2322 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2323 SET_DECL_RTL (exp, op0);
2324
2325 return op0;
2326
b5b8b0ac
AO
2327 case VAR_DECL:
2328 case PARM_DECL:
2329 case FUNCTION_DECL:
2330 case LABEL_DECL:
2331 case CONST_DECL:
2332 case RESULT_DECL:
2333 op0 = DECL_RTL_IF_SET (exp);
2334
2335 /* This decl was probably optimized away. */
2336 if (!op0)
e1b243a8
JJ
2337 {
2338 if (TREE_CODE (exp) != VAR_DECL
2339 || DECL_EXTERNAL (exp)
2340 || !TREE_STATIC (exp)
2341 || !DECL_NAME (exp)
0fba566c
JJ
2342 || DECL_HARD_REGISTER (exp)
2343 || mode == VOIDmode)
e1b243a8
JJ
2344 return NULL;
2345
b1aa0655 2346 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2347 if (!MEM_P (op0)
2348 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2349 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2350 return NULL;
2351 }
2352 else
2353 op0 = copy_rtx (op0);
b5b8b0ac 2354
06796564
JJ
2355 if (GET_MODE (op0) == BLKmode
2356 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2357 below would ICE. While it is likely a FE bug,
2358 try to be robust here. See PR43166. */
132b4e82
JJ
2359 || mode == BLKmode
2360 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2361 {
2362 gcc_assert (MEM_P (op0));
2363 op0 = adjust_address_nv (op0, mode, 0);
2364 return op0;
2365 }
2366
2367 /* Fall through. */
2368
2369 adjust_mode:
2370 case PAREN_EXPR:
2371 case NOP_EXPR:
2372 case CONVERT_EXPR:
2373 {
2374 enum machine_mode inner_mode = GET_MODE (op0);
2375
2376 if (mode == inner_mode)
2377 return op0;
2378
2379 if (inner_mode == VOIDmode)
2380 {
2a8e30fb
MM
2381 if (TREE_CODE (exp) == SSA_NAME)
2382 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2383 else
2384 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2385 if (mode == inner_mode)
2386 return op0;
2387 }
2388
2389 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2390 {
2391 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2392 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2393 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2394 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2395 else
2396 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2397 }
2398 else if (FLOAT_MODE_P (mode))
2399 {
2a8e30fb 2400 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2401 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2402 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2403 else
2404 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2405 }
2406 else if (FLOAT_MODE_P (inner_mode))
2407 {
2408 if (unsignedp)
2409 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2410 else
2411 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2412 }
2413 else if (CONSTANT_P (op0)
2414 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2415 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2416 subreg_lowpart_offset (mode,
2417 inner_mode));
2418 else if (unsignedp)
2419 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2420 else
2421 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2422
2423 return op0;
2424 }
2425
2426 case INDIRECT_REF:
2427 case ALIGN_INDIRECT_REF:
2428 case MISALIGNED_INDIRECT_REF:
2429 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2430 if (!op0)
2431 return NULL;
2432
09e881c9 2433 if (POINTER_TYPE_P (TREE_TYPE (exp)))
4e25ca6b
EB
2434 {
2435 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2436 address_mode = targetm.addr_space.address_mode (as);
2437 }
09e881c9 2438 else
4e25ca6b
EB
2439 {
2440 as = ADDR_SPACE_GENERIC;
2441 address_mode = Pmode;
2442 }
b5b8b0ac
AO
2443
2444 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2445 {
2446 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
d4ebfa65 2447 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
b5b8b0ac
AO
2448 }
2449
2450 op0 = gen_rtx_MEM (mode, op0);
2451
2452 set_mem_attributes (op0, exp, 0);
09e881c9 2453 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2454
2455 return op0;
2456
2457 case TARGET_MEM_REF:
2458 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2459 return NULL;
2460
2461 op0 = expand_debug_expr
4e25ca6b 2462 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2463 if (!op0)
2464 return NULL;
2465
09e881c9 2466 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
b5b8b0ac
AO
2467
2468 op0 = gen_rtx_MEM (mode, op0);
2469
2470 set_mem_attributes (op0, exp, 0);
09e881c9 2471 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2472
2473 return op0;
2474
2475 case ARRAY_REF:
2476 case ARRAY_RANGE_REF:
2477 case COMPONENT_REF:
2478 case BIT_FIELD_REF:
2479 case REALPART_EXPR:
2480 case IMAGPART_EXPR:
2481 case VIEW_CONVERT_EXPR:
2482 {
2483 enum machine_mode mode1;
2484 HOST_WIDE_INT bitsize, bitpos;
2485 tree offset;
2486 int volatilep = 0;
2487 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2488 &mode1, &unsignedp, &volatilep, false);
2489 rtx orig_op0;
2490
4f2a9af8
JJ
2491 if (bitsize == 0)
2492 return NULL;
2493
b5b8b0ac
AO
2494 orig_op0 = op0 = expand_debug_expr (tem);
2495
2496 if (!op0)
2497 return NULL;
2498
2499 if (offset)
2500 {
dda2da58
AO
2501 enum machine_mode addrmode, offmode;
2502
aa847cc8
JJ
2503 if (!MEM_P (op0))
2504 return NULL;
b5b8b0ac 2505
dda2da58
AO
2506 op0 = XEXP (op0, 0);
2507 addrmode = GET_MODE (op0);
2508 if (addrmode == VOIDmode)
2509 addrmode = Pmode;
2510
b5b8b0ac
AO
2511 op1 = expand_debug_expr (offset);
2512 if (!op1)
2513 return NULL;
2514
dda2da58
AO
2515 offmode = GET_MODE (op1);
2516 if (offmode == VOIDmode)
2517 offmode = TYPE_MODE (TREE_TYPE (offset));
2518
2519 if (addrmode != offmode)
2520 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2521 subreg_lowpart_offset (addrmode,
2522 offmode));
2523
2524 /* Don't use offset_address here, we don't need a
2525 recognizable address, and we don't want to generate
2526 code. */
2527 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
b5b8b0ac
AO
2528 }
2529
2530 if (MEM_P (op0))
2531 {
4f2a9af8
JJ
2532 if (mode1 == VOIDmode)
2533 /* Bitfield. */
2534 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2535 if (bitpos >= BITS_PER_UNIT)
2536 {
2537 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2538 bitpos %= BITS_PER_UNIT;
2539 }
2540 else if (bitpos < 0)
2541 {
4f2a9af8
JJ
2542 HOST_WIDE_INT units
2543 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2544 op0 = adjust_address_nv (op0, mode1, units);
2545 bitpos += units * BITS_PER_UNIT;
2546 }
2547 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2548 op0 = adjust_address_nv (op0, mode, 0);
2549 else if (GET_MODE (op0) != mode1)
2550 op0 = adjust_address_nv (op0, mode1, 0);
2551 else
2552 op0 = copy_rtx (op0);
2553 if (op0 == orig_op0)
2554 op0 = shallow_copy_rtx (op0);
2555 set_mem_attributes (op0, exp, 0);
2556 }
2557
2558 if (bitpos == 0 && mode == GET_MODE (op0))
2559 return op0;
2560
2d3fc6aa
JJ
2561 if (bitpos < 0)
2562 return NULL;
2563
88c04a5d
JJ
2564 if (GET_MODE (op0) == BLKmode)
2565 return NULL;
2566
b5b8b0ac
AO
2567 if ((bitpos % BITS_PER_UNIT) == 0
2568 && bitsize == GET_MODE_BITSIZE (mode1))
2569 {
2570 enum machine_mode opmode = GET_MODE (op0);
2571
b5b8b0ac
AO
2572 if (opmode == VOIDmode)
2573 opmode = mode1;
2574
2575 /* This condition may hold if we're expanding the address
2576 right past the end of an array that turned out not to
2577 be addressable (i.e., the address was only computed in
2578 debug stmts). The gen_subreg below would rightfully
2579 crash, and the address doesn't really exist, so just
2580 drop it. */
2581 if (bitpos >= GET_MODE_BITSIZE (opmode))
2582 return NULL;
2583
7d5d39bb
JJ
2584 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2585 return simplify_gen_subreg (mode, op0, opmode,
2586 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
2587 }
2588
2589 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2590 && TYPE_UNSIGNED (TREE_TYPE (exp))
2591 ? SIGN_EXTRACT
2592 : ZERO_EXTRACT, mode,
2593 GET_MODE (op0) != VOIDmode
2594 ? GET_MODE (op0) : mode1,
2595 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2596 }
2597
b5b8b0ac
AO
2598 case ABS_EXPR:
2599 return gen_rtx_ABS (mode, op0);
2600
2601 case NEGATE_EXPR:
2602 return gen_rtx_NEG (mode, op0);
2603
2604 case BIT_NOT_EXPR:
2605 return gen_rtx_NOT (mode, op0);
2606
2607 case FLOAT_EXPR:
2608 if (unsignedp)
2609 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2610 else
2611 return gen_rtx_FLOAT (mode, op0);
2612
2613 case FIX_TRUNC_EXPR:
2614 if (unsignedp)
2615 return gen_rtx_UNSIGNED_FIX (mode, op0);
2616 else
2617 return gen_rtx_FIX (mode, op0);
2618
2619 case POINTER_PLUS_EXPR:
576319a7
DD
2620 /* For the rare target where pointers are not the same size as
2621 size_t, we need to check for mis-matched modes and correct
2622 the addend. */
2623 if (op0 && op1
2624 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2625 && GET_MODE (op0) != GET_MODE (op1))
2626 {
2627 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2628 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2629 else
2630 /* We always sign-extend, regardless of the signedness of
2631 the operand, because the operand is always unsigned
2632 here even if the original C expression is signed. */
2633 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2634 }
2635 /* Fall through. */
b5b8b0ac
AO
2636 case PLUS_EXPR:
2637 return gen_rtx_PLUS (mode, op0, op1);
2638
2639 case MINUS_EXPR:
2640 return gen_rtx_MINUS (mode, op0, op1);
2641
2642 case MULT_EXPR:
2643 return gen_rtx_MULT (mode, op0, op1);
2644
2645 case RDIV_EXPR:
2646 case TRUNC_DIV_EXPR:
2647 case EXACT_DIV_EXPR:
2648 if (unsignedp)
2649 return gen_rtx_UDIV (mode, op0, op1);
2650 else
2651 return gen_rtx_DIV (mode, op0, op1);
2652
2653 case TRUNC_MOD_EXPR:
2654 if (unsignedp)
2655 return gen_rtx_UMOD (mode, op0, op1);
2656 else
2657 return gen_rtx_MOD (mode, op0, op1);
2658
2659 case FLOOR_DIV_EXPR:
2660 if (unsignedp)
2661 return gen_rtx_UDIV (mode, op0, op1);
2662 else
2663 {
2664 rtx div = gen_rtx_DIV (mode, op0, op1);
2665 rtx mod = gen_rtx_MOD (mode, op0, op1);
2666 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2667 return gen_rtx_PLUS (mode, div, adj);
2668 }
2669
2670 case FLOOR_MOD_EXPR:
2671 if (unsignedp)
2672 return gen_rtx_UMOD (mode, op0, op1);
2673 else
2674 {
2675 rtx mod = gen_rtx_MOD (mode, op0, op1);
2676 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2677 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2678 return gen_rtx_PLUS (mode, mod, adj);
2679 }
2680
2681 case CEIL_DIV_EXPR:
2682 if (unsignedp)
2683 {
2684 rtx div = gen_rtx_UDIV (mode, op0, op1);
2685 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2686 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2687 return gen_rtx_PLUS (mode, div, adj);
2688 }
2689 else
2690 {
2691 rtx div = gen_rtx_DIV (mode, op0, op1);
2692 rtx mod = gen_rtx_MOD (mode, op0, op1);
2693 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2694 return gen_rtx_PLUS (mode, div, adj);
2695 }
2696
2697 case CEIL_MOD_EXPR:
2698 if (unsignedp)
2699 {
2700 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2701 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2702 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2703 return gen_rtx_PLUS (mode, mod, adj);
2704 }
2705 else
2706 {
2707 rtx mod = gen_rtx_MOD (mode, op0, op1);
2708 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2709 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2710 return gen_rtx_PLUS (mode, mod, adj);
2711 }
2712
2713 case ROUND_DIV_EXPR:
2714 if (unsignedp)
2715 {
2716 rtx div = gen_rtx_UDIV (mode, op0, op1);
2717 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2718 rtx adj = round_udiv_adjust (mode, mod, op1);
2719 return gen_rtx_PLUS (mode, div, adj);
2720 }
2721 else
2722 {
2723 rtx div = gen_rtx_DIV (mode, op0, op1);
2724 rtx mod = gen_rtx_MOD (mode, op0, op1);
2725 rtx adj = round_sdiv_adjust (mode, mod, op1);
2726 return gen_rtx_PLUS (mode, div, adj);
2727 }
2728
2729 case ROUND_MOD_EXPR:
2730 if (unsignedp)
2731 {
2732 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2733 rtx adj = round_udiv_adjust (mode, mod, op1);
2734 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2735 return gen_rtx_PLUS (mode, mod, adj);
2736 }
2737 else
2738 {
2739 rtx mod = gen_rtx_MOD (mode, op0, op1);
2740 rtx adj = round_sdiv_adjust (mode, mod, op1);
2741 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2742 return gen_rtx_PLUS (mode, mod, adj);
2743 }
2744
2745 case LSHIFT_EXPR:
2746 return gen_rtx_ASHIFT (mode, op0, op1);
2747
2748 case RSHIFT_EXPR:
2749 if (unsignedp)
2750 return gen_rtx_LSHIFTRT (mode, op0, op1);
2751 else
2752 return gen_rtx_ASHIFTRT (mode, op0, op1);
2753
2754 case LROTATE_EXPR:
2755 return gen_rtx_ROTATE (mode, op0, op1);
2756
2757 case RROTATE_EXPR:
2758 return gen_rtx_ROTATERT (mode, op0, op1);
2759
2760 case MIN_EXPR:
2761 if (unsignedp)
2762 return gen_rtx_UMIN (mode, op0, op1);
2763 else
2764 return gen_rtx_SMIN (mode, op0, op1);
2765
2766 case MAX_EXPR:
2767 if (unsignedp)
2768 return gen_rtx_UMAX (mode, op0, op1);
2769 else
2770 return gen_rtx_SMAX (mode, op0, op1);
2771
2772 case BIT_AND_EXPR:
2773 case TRUTH_AND_EXPR:
2774 return gen_rtx_AND (mode, op0, op1);
2775
2776 case BIT_IOR_EXPR:
2777 case TRUTH_OR_EXPR:
2778 return gen_rtx_IOR (mode, op0, op1);
2779
2780 case BIT_XOR_EXPR:
2781 case TRUTH_XOR_EXPR:
2782 return gen_rtx_XOR (mode, op0, op1);
2783
2784 case TRUTH_ANDIF_EXPR:
2785 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2786
2787 case TRUTH_ORIF_EXPR:
2788 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2789
2790 case TRUTH_NOT_EXPR:
2791 return gen_rtx_EQ (mode, op0, const0_rtx);
2792
2793 case LT_EXPR:
2794 if (unsignedp)
2795 return gen_rtx_LTU (mode, op0, op1);
2796 else
2797 return gen_rtx_LT (mode, op0, op1);
2798
2799 case LE_EXPR:
2800 if (unsignedp)
2801 return gen_rtx_LEU (mode, op0, op1);
2802 else
2803 return gen_rtx_LE (mode, op0, op1);
2804
2805 case GT_EXPR:
2806 if (unsignedp)
2807 return gen_rtx_GTU (mode, op0, op1);
2808 else
2809 return gen_rtx_GT (mode, op0, op1);
2810
2811 case GE_EXPR:
2812 if (unsignedp)
2813 return gen_rtx_GEU (mode, op0, op1);
2814 else
2815 return gen_rtx_GE (mode, op0, op1);
2816
2817 case EQ_EXPR:
2818 return gen_rtx_EQ (mode, op0, op1);
2819
2820 case NE_EXPR:
2821 return gen_rtx_NE (mode, op0, op1);
2822
2823 case UNORDERED_EXPR:
2824 return gen_rtx_UNORDERED (mode, op0, op1);
2825
2826 case ORDERED_EXPR:
2827 return gen_rtx_ORDERED (mode, op0, op1);
2828
2829 case UNLT_EXPR:
2830 return gen_rtx_UNLT (mode, op0, op1);
2831
2832 case UNLE_EXPR:
2833 return gen_rtx_UNLE (mode, op0, op1);
2834
2835 case UNGT_EXPR:
2836 return gen_rtx_UNGT (mode, op0, op1);
2837
2838 case UNGE_EXPR:
2839 return gen_rtx_UNGE (mode, op0, op1);
2840
2841 case UNEQ_EXPR:
2842 return gen_rtx_UNEQ (mode, op0, op1);
2843
2844 case LTGT_EXPR:
2845 return gen_rtx_LTGT (mode, op0, op1);
2846
2847 case COND_EXPR:
2848 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2849
2850 case COMPLEX_EXPR:
2851 gcc_assert (COMPLEX_MODE_P (mode));
2852 if (GET_MODE (op0) == VOIDmode)
2853 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2854 if (GET_MODE (op1) == VOIDmode)
2855 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2856 return gen_rtx_CONCAT (mode, op0, op1);
2857
d02a5a4b
JJ
2858 case CONJ_EXPR:
2859 if (GET_CODE (op0) == CONCAT)
2860 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2861 gen_rtx_NEG (GET_MODE_INNER (mode),
2862 XEXP (op0, 1)));
2863 else
2864 {
2865 enum machine_mode imode = GET_MODE_INNER (mode);
2866 rtx re, im;
2867
2868 if (MEM_P (op0))
2869 {
2870 re = adjust_address_nv (op0, imode, 0);
2871 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2872 }
2873 else
2874 {
2875 enum machine_mode ifmode = int_mode_for_mode (mode);
2876 enum machine_mode ihmode = int_mode_for_mode (imode);
2877 rtx halfsize;
2878 if (ifmode == BLKmode || ihmode == BLKmode)
2879 return NULL;
2880 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2881 re = op0;
2882 if (mode != ifmode)
2883 re = gen_rtx_SUBREG (ifmode, re, 0);
2884 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2885 if (imode != ihmode)
2886 re = gen_rtx_SUBREG (imode, re, 0);
2887 im = copy_rtx (op0);
2888 if (mode != ifmode)
2889 im = gen_rtx_SUBREG (ifmode, im, 0);
2890 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2891 if (imode != ihmode)
2892 im = gen_rtx_SUBREG (imode, im, 0);
2893 }
2894 im = gen_rtx_NEG (imode, im);
2895 return gen_rtx_CONCAT (mode, re, im);
2896 }
2897
b5b8b0ac
AO
2898 case ADDR_EXPR:
2899 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2900 if (!op0 || !MEM_P (op0))
2901 return NULL;
2902
dda2da58
AO
2903 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2904
2905 return op0;
b5b8b0ac
AO
2906
2907 case VECTOR_CST:
2908 exp = build_constructor_from_list (TREE_TYPE (exp),
2909 TREE_VECTOR_CST_ELTS (exp));
2910 /* Fall through. */
2911
2912 case CONSTRUCTOR:
2913 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2914 {
2915 unsigned i;
2916 tree val;
2917
2918 op0 = gen_rtx_CONCATN
2919 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2920
2921 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2922 {
2923 op1 = expand_debug_expr (val);
2924 if (!op1)
2925 return NULL;
2926 XVECEXP (op0, 0, i) = op1;
2927 }
2928
2929 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2930 {
2931 op1 = expand_debug_expr
2932 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2933
2934 if (!op1)
2935 return NULL;
2936
2937 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2938 XVECEXP (op0, 0, i) = op1;
2939 }
2940
2941 return op0;
2942 }
2943 else
2944 goto flag_unsupported;
2945
2946 case CALL_EXPR:
2947 /* ??? Maybe handle some builtins? */
2948 return NULL;
2949
2950 case SSA_NAME:
2951 {
2a8e30fb
MM
2952 gimple g = get_gimple_for_ssa_name (exp);
2953 if (g)
2954 {
2955 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2956 if (!op0)
2957 return NULL;
2958 }
2959 else
2960 {
2961 int part = var_to_partition (SA.map, exp);
b5b8b0ac 2962
2a8e30fb
MM
2963 if (part == NO_PARTITION)
2964 return NULL;
b5b8b0ac 2965
2a8e30fb 2966 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 2967
2a8e30fb
MM
2968 op0 = SA.partition_to_pseudo[part];
2969 }
b5b8b0ac
AO
2970 goto adjust_mode;
2971 }
2972
2973 case ERROR_MARK:
2974 return NULL;
2975
7ece48b1
JJ
2976 /* Vector stuff. For most of the codes we don't have rtl codes. */
2977 case REALIGN_LOAD_EXPR:
2978 case REDUC_MAX_EXPR:
2979 case REDUC_MIN_EXPR:
2980 case REDUC_PLUS_EXPR:
2981 case VEC_COND_EXPR:
2982 case VEC_EXTRACT_EVEN_EXPR:
2983 case VEC_EXTRACT_ODD_EXPR:
2984 case VEC_INTERLEAVE_HIGH_EXPR:
2985 case VEC_INTERLEAVE_LOW_EXPR:
2986 case VEC_LSHIFT_EXPR:
2987 case VEC_PACK_FIX_TRUNC_EXPR:
2988 case VEC_PACK_SAT_EXPR:
2989 case VEC_PACK_TRUNC_EXPR:
2990 case VEC_RSHIFT_EXPR:
2991 case VEC_UNPACK_FLOAT_HI_EXPR:
2992 case VEC_UNPACK_FLOAT_LO_EXPR:
2993 case VEC_UNPACK_HI_EXPR:
2994 case VEC_UNPACK_LO_EXPR:
2995 case VEC_WIDEN_MULT_HI_EXPR:
2996 case VEC_WIDEN_MULT_LO_EXPR:
2997 return NULL;
2998
2999 /* Misc codes. */
3000 case ADDR_SPACE_CONVERT_EXPR:
3001 case FIXED_CONVERT_EXPR:
3002 case OBJ_TYPE_REF:
3003 case WITH_SIZE_EXPR:
3004 return NULL;
3005
3006 case DOT_PROD_EXPR:
3007 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3008 && SCALAR_INT_MODE_P (mode))
3009 {
3010 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3011 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3012 else
3013 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3014 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3015 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3016 else
3017 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3018 op0 = gen_rtx_MULT (mode, op0, op1);
3019 return gen_rtx_PLUS (mode, op0, op2);
3020 }
3021 return NULL;
3022
3023 case WIDEN_MULT_EXPR:
3024 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3025 && SCALAR_INT_MODE_P (mode))
3026 {
5b58b39b 3027 enum machine_mode inner_mode = GET_MODE (op0);
7ece48b1 3028 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3029 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3030 else
5b58b39b 3031 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3032 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3033 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3034 else
5b58b39b 3035 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
7ece48b1
JJ
3036 return gen_rtx_MULT (mode, op0, op1);
3037 }
3038 return NULL;
3039
3040 case WIDEN_SUM_EXPR:
3041 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3042 && SCALAR_INT_MODE_P (mode))
3043 {
3044 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3045 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3046 else
3047 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3048 return gen_rtx_PLUS (mode, op0, op1);
3049 }
3050 return NULL;
3051
b5b8b0ac
AO
3052 default:
3053 flag_unsupported:
3054#ifdef ENABLE_CHECKING
3055 debug_tree (exp);
3056 gcc_unreachable ();
3057#else
3058 return NULL;
3059#endif
3060 }
3061}
3062
3063/* Expand the _LOCs in debug insns. We run this after expanding all
3064 regular insns, so that any variables referenced in the function
3065 will have their DECL_RTLs set. */
3066
3067static void
3068expand_debug_locations (void)
3069{
3070 rtx insn;
3071 rtx last = get_last_insn ();
3072 int save_strict_alias = flag_strict_aliasing;
3073
3074 /* New alias sets while setting up memory attributes cause
3075 -fcompare-debug failures, even though it doesn't bring about any
3076 codegen changes. */
3077 flag_strict_aliasing = 0;
3078
3079 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3080 if (DEBUG_INSN_P (insn))
3081 {
3082 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3083 rtx val;
3084 enum machine_mode mode;
3085
3086 if (value == NULL_TREE)
3087 val = NULL_RTX;
3088 else
3089 {
3090 val = expand_debug_expr (value);
3091 gcc_assert (last == get_last_insn ());
3092 }
3093
3094 if (!val)
3095 val = gen_rtx_UNKNOWN_VAR_LOC ();
3096 else
3097 {
3098 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3099
3100 gcc_assert (mode == GET_MODE (val)
3101 || (GET_MODE (val) == VOIDmode
3102 && (CONST_INT_P (val)
3103 || GET_CODE (val) == CONST_FIXED
3104 || GET_CODE (val) == CONST_DOUBLE
3105 || GET_CODE (val) == LABEL_REF)));
3106 }
3107
3108 INSN_VAR_LOCATION_LOC (insn) = val;
3109 }
3110
3111 flag_strict_aliasing = save_strict_alias;
3112}
3113
242229bb
JH
3114/* Expand basic block BB from GIMPLE trees to RTL. */
3115
3116static basic_block
10d22567 3117expand_gimple_basic_block (basic_block bb)
242229bb 3118{
726a989a
RB
3119 gimple_stmt_iterator gsi;
3120 gimple_seq stmts;
3121 gimple stmt = NULL;
242229bb
JH
3122 rtx note, last;
3123 edge e;
628f6a4e 3124 edge_iterator ei;
8b11009b 3125 void **elt;
242229bb
JH
3126
3127 if (dump_file)
726a989a
RB
3128 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3129 bb->index);
3130
3131 /* Note that since we are now transitioning from GIMPLE to RTL, we
3132 cannot use the gsi_*_bb() routines because they expect the basic
3133 block to be in GIMPLE, instead of RTL. Therefore, we need to
3134 access the BB sequence directly. */
3135 stmts = bb_seq (bb);
3136 bb->il.gimple = NULL;
bf08ebeb 3137 rtl_profile_for_bb (bb);
5e2d947c
JH
3138 init_rtl_bb_info (bb);
3139 bb->flags |= BB_RTL;
3140
a9b77cd1
ZD
3141 /* Remove the RETURN_EXPR if we may fall though to the exit
3142 instead. */
726a989a
RB
3143 gsi = gsi_last (stmts);
3144 if (!gsi_end_p (gsi)
3145 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3146 {
726a989a 3147 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3148
3149 gcc_assert (single_succ_p (bb));
3150 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3151
3152 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3153 && !gimple_return_retval (ret_stmt))
a9b77cd1 3154 {
726a989a 3155 gsi_remove (&gsi, false);
a9b77cd1
ZD
3156 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3157 }
3158 }
3159
726a989a
RB
3160 gsi = gsi_start (stmts);
3161 if (!gsi_end_p (gsi))
8b11009b 3162 {
726a989a
RB
3163 stmt = gsi_stmt (gsi);
3164 if (gimple_code (stmt) != GIMPLE_LABEL)
3165 stmt = NULL;
8b11009b 3166 }
242229bb 3167
8b11009b
ZD
3168 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3169
3170 if (stmt || elt)
242229bb
JH
3171 {
3172 last = get_last_insn ();
3173
8b11009b
ZD
3174 if (stmt)
3175 {
28ed065e 3176 expand_gimple_stmt (stmt);
726a989a 3177 gsi_next (&gsi);
8b11009b
ZD
3178 }
3179
3180 if (elt)
ae50c0cb 3181 emit_label ((rtx) *elt);
242229bb 3182
caf93cb0 3183 /* Java emits line number notes in the top of labels.
c22cacf3 3184 ??? Make this go away once line number notes are obsoleted. */
242229bb 3185 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3186 if (NOTE_P (BB_HEAD (bb)))
242229bb 3187 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3188 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3189
726a989a 3190 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3191 }
3192 else
3193 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3194
3195 NOTE_BASIC_BLOCK (note) = bb;
3196
726a989a 3197 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3198 {
cea49550 3199 basic_block new_bb;
242229bb 3200
b5b8b0ac 3201 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3202
3203 /* If this statement is a non-debug one, and we generate debug
3204 insns, then this one might be the last real use of a TERed
3205 SSA_NAME, but where there are still some debug uses further
3206 down. Expanding the current SSA name in such further debug
3207 uses by their RHS might lead to wrong debug info, as coalescing
3208 might make the operands of such RHS be placed into the same
3209 pseudo as something else. Like so:
3210 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3211 use(a_1);
3212 a_2 = ...
3213 #DEBUG ... => a_1
3214 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3215 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3216 the write to a_2 would actually have clobbered the place which
3217 formerly held a_0.
3218
3219 So, instead of that, we recognize the situation, and generate
3220 debug temporaries at the last real use of TERed SSA names:
3221 a_1 = a_0 + 1;
3222 #DEBUG #D1 => a_1
3223 use(a_1);
3224 a_2 = ...
3225 #DEBUG ... => #D1
3226 */
3227 if (MAY_HAVE_DEBUG_INSNS
3228 && SA.values
3229 && !is_gimple_debug (stmt))
3230 {
3231 ssa_op_iter iter;
3232 tree op;
3233 gimple def;
3234
3235 location_t sloc = get_curr_insn_source_location ();
3236 tree sblock = get_curr_insn_block ();
3237
3238 /* Look for SSA names that have their last use here (TERed
3239 names always have only one real use). */
3240 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3241 if ((def = get_gimple_for_ssa_name (op)))
3242 {
3243 imm_use_iterator imm_iter;
3244 use_operand_p use_p;
3245 bool have_debug_uses = false;
3246
3247 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3248 {
3249 if (gimple_debug_bind_p (USE_STMT (use_p)))
3250 {
3251 have_debug_uses = true;
3252 break;
3253 }
3254 }
3255
3256 if (have_debug_uses)
3257 {
3258 /* OP is a TERed SSA name, with DEF it's defining
3259 statement, and where OP is used in further debug
3260 instructions. Generate a debug temporary, and
3261 replace all uses of OP in debug insns with that
3262 temporary. */
3263 gimple debugstmt;
3264 tree value = gimple_assign_rhs_to_tree (def);
3265 tree vexpr = make_node (DEBUG_EXPR_DECL);
3266 rtx val;
3267 enum machine_mode mode;
3268
3269 set_curr_insn_source_location (gimple_location (def));
3270 set_curr_insn_block (gimple_block (def));
3271
3272 DECL_ARTIFICIAL (vexpr) = 1;
3273 TREE_TYPE (vexpr) = TREE_TYPE (value);
3274 if (DECL_P (value))
3275 mode = DECL_MODE (value);
3276 else
3277 mode = TYPE_MODE (TREE_TYPE (value));
3278 DECL_MODE (vexpr) = mode;
3279
3280 val = gen_rtx_VAR_LOCATION
3281 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3282
3283 val = emit_debug_insn (val);
3284
3285 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3286 {
3287 if (!gimple_debug_bind_p (debugstmt))
3288 continue;
3289
3290 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3291 SET_USE (use_p, vexpr);
3292
3293 update_stmt (debugstmt);
3294 }
3295 }
3296 }
3297 set_curr_insn_source_location (sloc);
3298 set_curr_insn_block (sblock);
3299 }
3300
a5883ba0 3301 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3302
242229bb
JH
3303 /* Expand this statement, then evaluate the resulting RTL and
3304 fixup the CFG accordingly. */
726a989a 3305 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3306 {
726a989a 3307 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3308 if (new_bb)
3309 return new_bb;
3310 }
b5b8b0ac
AO
3311 else if (gimple_debug_bind_p (stmt))
3312 {
3313 location_t sloc = get_curr_insn_source_location ();
3314 tree sblock = get_curr_insn_block ();
3315 gimple_stmt_iterator nsi = gsi;
3316
3317 for (;;)
3318 {
3319 tree var = gimple_debug_bind_get_var (stmt);
3320 tree value;
3321 rtx val;
3322 enum machine_mode mode;
3323
3324 if (gimple_debug_bind_has_value_p (stmt))
3325 value = gimple_debug_bind_get_value (stmt);
3326 else
3327 value = NULL_TREE;
3328
3329 last = get_last_insn ();
3330
3331 set_curr_insn_source_location (gimple_location (stmt));
3332 set_curr_insn_block (gimple_block (stmt));
3333
3334 if (DECL_P (var))
3335 mode = DECL_MODE (var);
3336 else
3337 mode = TYPE_MODE (TREE_TYPE (var));
3338
3339 val = gen_rtx_VAR_LOCATION
3340 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3341
3342 val = emit_debug_insn (val);
3343
3344 if (dump_file && (dump_flags & TDF_DETAILS))
3345 {
3346 /* We can't dump the insn with a TREE where an RTX
3347 is expected. */
3348 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3349 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3350 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3351 }
3352
2a8e30fb
MM
3353 /* In order not to generate too many debug temporaries,
3354 we delink all uses of debug statements we already expanded.
3355 Therefore debug statements between definition and real
3356 use of TERed SSA names will continue to use the SSA name,
3357 and not be replaced with debug temps. */
3358 delink_stmt_imm_use (stmt);
3359
b5b8b0ac
AO
3360 gsi = nsi;
3361 gsi_next (&nsi);
3362 if (gsi_end_p (nsi))
3363 break;
3364 stmt = gsi_stmt (nsi);
3365 if (!gimple_debug_bind_p (stmt))
3366 break;
3367 }
3368
3369 set_curr_insn_source_location (sloc);
3370 set_curr_insn_block (sblock);
3371 }
80c7a9eb 3372 else
242229bb 3373 {
726a989a 3374 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3375 {
3376 bool can_fallthru;
3377 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3378 if (new_bb)
3379 {
3380 if (can_fallthru)
3381 bb = new_bb;
3382 else
3383 return new_bb;
3384 }
3385 }
4d7a65ea 3386 else
b7211528 3387 {
4e3825db 3388 def_operand_p def_p;
4e3825db
MM
3389 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3390
3391 if (def_p != NULL)
3392 {
3393 /* Ignore this stmt if it is in the list of
3394 replaceable expressions. */
3395 if (SA.values
b8698a0f 3396 && bitmap_bit_p (SA.values,
e97809c6 3397 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3398 continue;
3399 }
28ed065e 3400 last = expand_gimple_stmt (stmt);
726a989a 3401 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3402 }
242229bb
JH
3403 }
3404 }
3405
a5883ba0
MM
3406 currently_expanding_gimple_stmt = NULL;
3407
7241571e 3408 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3409 FOR_EACH_EDGE (e, ei, bb->succs)
3410 {
7241571e
JJ
3411 if (e->goto_locus && e->goto_block)
3412 {
3413 set_curr_insn_source_location (e->goto_locus);
3414 set_curr_insn_block (e->goto_block);
3415 e->goto_locus = curr_insn_locator ();
3416 }
3417 e->goto_block = NULL;
3418 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3419 {
3420 emit_jump (label_rtx_for_bb (e->dest));
3421 e->flags &= ~EDGE_FALLTHRU;
3422 }
a9b77cd1
ZD
3423 }
3424
ae761c45
AH
3425 /* Expanded RTL can create a jump in the last instruction of block.
3426 This later might be assumed to be a jump to successor and break edge insertion.
3427 We need to insert dummy move to prevent this. PR41440. */
3428 if (single_succ_p (bb)
3429 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3430 && (last = get_last_insn ())
3431 && JUMP_P (last))
3432 {
3433 rtx dummy = gen_reg_rtx (SImode);
3434 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3435 }
3436
242229bb
JH
3437 do_pending_stack_adjust ();
3438
3f117656 3439 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3440 before a barrier and/or table jump insn. */
3441 last = get_last_insn ();
4b4bf941 3442 if (BARRIER_P (last))
242229bb
JH
3443 last = PREV_INSN (last);
3444 if (JUMP_TABLE_DATA_P (last))
3445 last = PREV_INSN (PREV_INSN (last));
3446 BB_END (bb) = last;
caf93cb0 3447
242229bb 3448 update_bb_for_insn (bb);
80c7a9eb 3449
242229bb
JH
3450 return bb;
3451}
3452
3453
3454/* Create a basic block for initialization code. */
3455
3456static basic_block
3457construct_init_block (void)
3458{
3459 basic_block init_block, first_block;
fd44f634
JH
3460 edge e = NULL;
3461 int flags;
275a4187 3462
fd44f634
JH
3463 /* Multiple entry points not supported yet. */
3464 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3465 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3466 init_rtl_bb_info (EXIT_BLOCK_PTR);
3467 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3468 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3469
fd44f634 3470 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3471
fd44f634
JH
3472 /* When entry edge points to first basic block, we don't need jump,
3473 otherwise we have to jump into proper target. */
3474 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3475 {
726a989a 3476 tree label = gimple_block_label (e->dest);
fd44f634
JH
3477
3478 emit_jump (label_rtx (label));
3479 flags = 0;
275a4187 3480 }
fd44f634
JH
3481 else
3482 flags = EDGE_FALLTHRU;
242229bb
JH
3483
3484 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3485 get_last_insn (),
3486 ENTRY_BLOCK_PTR);
3487 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3488 init_block->count = ENTRY_BLOCK_PTR->count;
3489 if (e)
3490 {
3491 first_block = e->dest;
3492 redirect_edge_succ (e, init_block);
fd44f634 3493 e = make_edge (init_block, first_block, flags);
242229bb
JH
3494 }
3495 else
3496 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3497 e->probability = REG_BR_PROB_BASE;
3498 e->count = ENTRY_BLOCK_PTR->count;
3499
3500 update_bb_for_insn (init_block);
3501 return init_block;
3502}
3503
55e092c4
JH
3504/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3505 found in the block tree. */
3506
3507static void
3508set_block_levels (tree block, int level)
3509{
3510 while (block)
3511 {
3512 BLOCK_NUMBER (block) = level;
3513 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3514 block = BLOCK_CHAIN (block);
3515 }
3516}
242229bb
JH
3517
3518/* Create a block containing landing pads and similar stuff. */
3519
3520static void
3521construct_exit_block (void)
3522{
3523 rtx head = get_last_insn ();
3524 rtx end;
3525 basic_block exit_block;
628f6a4e
BE
3526 edge e, e2;
3527 unsigned ix;
3528 edge_iterator ei;
071a42f9 3529 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3530
bf08ebeb
JH
3531 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3532
caf93cb0 3533 /* Make sure the locus is set to the end of the function, so that
242229bb 3534 epilogue line numbers and warnings are set properly. */
6773e15f 3535 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3536 input_location = cfun->function_end_locus;
3537
3538 /* The following insns belong to the top scope. */
55e092c4 3539 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3540
242229bb
JH
3541 /* Generate rtl for function exit. */
3542 expand_function_end ();
3543
3544 end = get_last_insn ();
3545 if (head == end)
3546 return;
071a42f9
JH
3547 /* While emitting the function end we could move end of the last basic block.
3548 */
3549 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3550 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3551 head = NEXT_INSN (head);
80c7a9eb
RH
3552 exit_block = create_basic_block (NEXT_INSN (head), end,
3553 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3554 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3555 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3556
3557 ix = 0;
3558 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3559 {
8fb790fd 3560 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3561 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3562 redirect_edge_succ (e, exit_block);
3563 else
3564 ix++;
242229bb 3565 }
628f6a4e 3566
242229bb
JH
3567 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3568 e->probability = REG_BR_PROB_BASE;
3569 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3570 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3571 if (e2 != e)
3572 {
c22cacf3 3573 e->count -= e2->count;
242229bb
JH
3574 exit_block->count -= e2->count;
3575 exit_block->frequency -= EDGE_FREQUENCY (e2);
3576 }
3577 if (e->count < 0)
3578 e->count = 0;
3579 if (exit_block->count < 0)
3580 exit_block->count = 0;
3581 if (exit_block->frequency < 0)
3582 exit_block->frequency = 0;
3583 update_bb_for_insn (exit_block);
3584}
3585
c22cacf3 3586/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3587 Look for ARRAY_REF nodes with non-constant indexes and mark them
3588 addressable. */
3589
3590static tree
3591discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3592 void *data ATTRIBUTE_UNUSED)
3593{
3594 tree t = *tp;
3595
3596 if (IS_TYPE_OR_DECL_P (t))
3597 *walk_subtrees = 0;
3598 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3599 {
3600 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3601 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3602 && (!TREE_OPERAND (t, 2)
3603 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3604 || (TREE_CODE (t) == COMPONENT_REF
3605 && (!TREE_OPERAND (t,2)
3606 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3607 || TREE_CODE (t) == BIT_FIELD_REF
3608 || TREE_CODE (t) == REALPART_EXPR
3609 || TREE_CODE (t) == IMAGPART_EXPR
3610 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 3611 || CONVERT_EXPR_P (t))
a1b23b2f
UW
3612 t = TREE_OPERAND (t, 0);
3613
3614 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3615 {
3616 t = get_base_address (t);
6f11d690
RG
3617 if (t && DECL_P (t)
3618 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
3619 TREE_ADDRESSABLE (t) = 1;
3620 }
3621
3622 *walk_subtrees = 0;
3623 }
3624
3625 return NULL_TREE;
3626}
3627
3628/* RTL expansion is not able to compile array references with variable
3629 offsets for arrays stored in single register. Discover such
3630 expressions and mark variables as addressable to avoid this
3631 scenario. */
3632
3633static void
3634discover_nonconstant_array_refs (void)
3635{
3636 basic_block bb;
726a989a 3637 gimple_stmt_iterator gsi;
a1b23b2f
UW
3638
3639 FOR_EACH_BB (bb)
726a989a
RB
3640 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3641 {
3642 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
3643 if (!is_gimple_debug (stmt))
3644 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 3645 }
a1b23b2f
UW
3646}
3647
2e3f842f
L
3648/* This function sets crtl->args.internal_arg_pointer to a virtual
3649 register if DRAP is needed. Local register allocator will replace
3650 virtual_incoming_args_rtx with the virtual register. */
3651
3652static void
3653expand_stack_alignment (void)
3654{
3655 rtx drap_rtx;
e939805b 3656 unsigned int preferred_stack_boundary;
2e3f842f
L
3657
3658 if (! SUPPORTS_STACK_ALIGNMENT)
3659 return;
b8698a0f 3660
2e3f842f
L
3661 if (cfun->calls_alloca
3662 || cfun->has_nonlocal_label
3663 || crtl->has_nonlocal_goto)
3664 crtl->need_drap = true;
3665
890b9b96
L
3666 /* Call update_stack_boundary here again to update incoming stack
3667 boundary. It may set incoming stack alignment to a different
3668 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3669 use the minimum incoming stack alignment to check if it is OK
3670 to perform sibcall optimization since sibcall optimization will
3671 only align the outgoing stack to incoming stack boundary. */
3672 if (targetm.calls.update_stack_boundary)
3673 targetm.calls.update_stack_boundary ();
3674
3675 /* The incoming stack frame has to be aligned at least at
3676 parm_stack_boundary. */
3677 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 3678
2e3f842f
L
3679 /* Update crtl->stack_alignment_estimated and use it later to align
3680 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3681 exceptions since callgraph doesn't collect incoming stack alignment
3682 in this case. */
3683 if (flag_non_call_exceptions
3684 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3685 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3686 else
3687 preferred_stack_boundary = crtl->preferred_stack_boundary;
3688 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3689 crtl->stack_alignment_estimated = preferred_stack_boundary;
3690 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3691 crtl->stack_alignment_needed = preferred_stack_boundary;
3692
890b9b96
L
3693 gcc_assert (crtl->stack_alignment_needed
3694 <= crtl->stack_alignment_estimated);
3695
2e3f842f 3696 crtl->stack_realign_needed
e939805b 3697 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 3698 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
3699
3700 crtl->stack_realign_processed = true;
3701
3702 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3703 alignment. */
3704 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 3705 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 3706
d015f7cc
L
3707 /* stack_realign_drap and drap_rtx must match. */
3708 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3709
2e3f842f
L
3710 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3711 if (NULL != drap_rtx)
3712 {
3713 crtl->args.internal_arg_pointer = drap_rtx;
3714
3715 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3716 needed. */
3717 fixup_tail_calls ();
3718 }
3719}
3720
242229bb
JH
3721/* Translate the intermediate representation contained in the CFG
3722 from GIMPLE trees to RTL.
3723
3724 We do conversion per basic block and preserve/update the tree CFG.
3725 This implies we have to do some magic as the CFG can simultaneously
3726 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 3727 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
3728 the expansion. */
3729
c2924966 3730static unsigned int
726a989a 3731gimple_expand_cfg (void)
242229bb
JH
3732{
3733 basic_block bb, init_block;
3734 sbitmap blocks;
0ef90296
ZD
3735 edge_iterator ei;
3736 edge e;
4e3825db
MM
3737 unsigned i;
3738
3739 rewrite_out_of_ssa (&SA);
3740 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3741 sizeof (rtx));
242229bb 3742
4586b4ca
SB
3743 /* Some backends want to know that we are expanding to RTL. */
3744 currently_expanding_to_rtl = 1;
3745
bf08ebeb
JH
3746 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3747
55e092c4 3748 insn_locators_alloc ();
fe8a7779 3749 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
3750 {
3751 /* Eventually, all FEs should explicitly set function_start_locus. */
3752 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3753 set_curr_insn_source_location
3754 (DECL_SOURCE_LOCATION (current_function_decl));
3755 else
3756 set_curr_insn_source_location (cfun->function_start_locus);
3757 }
55e092c4
JH
3758 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3759 prologue_locator = curr_insn_locator ();
3760
3761 /* Make sure first insn is a note even if we don't want linenums.
3762 This makes sure the first insn will never be deleted.
3763 Also, final expects a note to appear there. */
3764 emit_note (NOTE_INSN_DELETED);
6429e3be 3765
a1b23b2f
UW
3766 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3767 discover_nonconstant_array_refs ();
3768
e41b2a33 3769 targetm.expand_to_rtl_hook ();
cb91fab0 3770 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 3771 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 3772 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
3773 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3774 cfun->cfg->max_jumptable_ents = 0;
3775
e41b2a33 3776
727a31fa 3777 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
3778 expand_used_vars ();
3779
7d69de61
RH
3780 /* Honor stack protection warnings. */
3781 if (warn_stack_protect)
3782 {
e3b5732b 3783 if (cfun->calls_alloca)
b8698a0f 3784 warning (OPT_Wstack_protector,
3b123595
SB
3785 "stack protector not protecting local variables: "
3786 "variable length buffer");
cb91fab0 3787 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 3788 warning (OPT_Wstack_protector,
3b123595
SB
3789 "stack protector not protecting function: "
3790 "all local arrays are less than %d bytes long",
7d69de61
RH
3791 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3792 }
3793
242229bb 3794 /* Set up parameters and prepare for return, for the function. */
b79c5284 3795 expand_function_start (current_function_decl);
242229bb 3796
4e3825db
MM
3797 /* Now that we also have the parameter RTXs, copy them over to our
3798 partitions. */
3799 for (i = 0; i < SA.map->num_partitions; i++)
3800 {
3801 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3802
3803 if (TREE_CODE (var) != VAR_DECL
3804 && !SA.partition_to_pseudo[i])
3805 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3806 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
3807
3808 /* If this decl was marked as living in multiple places, reset
3809 this now to NULL. */
3810 if (DECL_RTL_IF_SET (var) == pc_rtx)
3811 SET_DECL_RTL (var, NULL);
3812
4e3825db
MM
3813 /* Some RTL parts really want to look at DECL_RTL(x) when x
3814 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3815 SET_DECL_RTL here making this available, but that would mean
3816 to select one of the potentially many RTLs for one DECL. Instead
3817 of doing that we simply reset the MEM_EXPR of the RTL in question,
3818 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3819 if (!DECL_RTL_SET_P (var))
3820 {
3821 if (MEM_P (SA.partition_to_pseudo[i]))
3822 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3823 }
3824 }
3825
242229bb
JH
3826 /* If this function is `main', emit a call to `__main'
3827 to run global initializers, etc. */
3828 if (DECL_NAME (current_function_decl)
3829 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3830 && DECL_FILE_SCOPE_P (current_function_decl))
3831 expand_main_function ();
3832
7d69de61
RH
3833 /* Initialize the stack_protect_guard field. This must happen after the
3834 call to __main (if any) so that the external decl is initialized. */
cb91fab0 3835 if (crtl->stack_protect_guard)
7d69de61
RH
3836 stack_protect_prologue ();
3837
4e3825db
MM
3838 expand_phi_nodes (&SA);
3839
3fbd86b1 3840 /* Register rtl specific functions for cfg. */
242229bb
JH
3841 rtl_register_cfg_hooks ();
3842
3843 init_block = construct_init_block ();
3844
0ef90296 3845 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 3846 remaining edges later. */
0ef90296
ZD
3847 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3848 e->flags &= ~EDGE_EXECUTABLE;
3849
8b11009b 3850 lab_rtx_for_bb = pointer_map_create ();
242229bb 3851 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 3852 bb = expand_gimple_basic_block (bb);
bf08ebeb 3853
b5b8b0ac
AO
3854 if (MAY_HAVE_DEBUG_INSNS)
3855 expand_debug_locations ();
3856
4e3825db
MM
3857 execute_free_datastructures ();
3858 finish_out_of_ssa (&SA);
3859
91753e21
RG
3860 /* We are no longer in SSA form. */
3861 cfun->gimple_df->in_ssa_p = false;
3862
bf08ebeb
JH
3863 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3864 conservatively to true until they are all profile aware. */
8b11009b 3865 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 3866 free_histograms ();
242229bb
JH
3867
3868 construct_exit_block ();
55e092c4
JH
3869 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3870 insn_locators_finalize ();
242229bb 3871
1d65f45c 3872 /* Zap the tree EH table. */
e8a2a782 3873 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
3874
3875 rebuild_jump_labels (get_insns ());
242229bb 3876
4e3825db
MM
3877 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3878 {
3879 edge e;
3880 edge_iterator ei;
3881 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3882 {
3883 if (e->insns.r)
3884 commit_one_edge_insertion (e);
3885 else
3886 ei_next (&ei);
3887 }
3888 }
3889
3890 /* We're done expanding trees to RTL. */
3891 currently_expanding_to_rtl = 0;
3892
3893 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3894 {
3895 edge e;
3896 edge_iterator ei;
3897 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3898 {
3899 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3900 e->flags &= ~EDGE_EXECUTABLE;
3901
3902 /* At the moment not all abnormal edges match the RTL
3903 representation. It is safe to remove them here as
3904 find_many_sub_basic_blocks will rediscover them.
3905 In the future we should get this fixed properly. */
3906 if ((e->flags & EDGE_ABNORMAL)
3907 && !(e->flags & EDGE_SIBCALL))
3908 remove_edge (e);
3909 else
3910 ei_next (&ei);
3911 }
3912 }
3913
242229bb
JH
3914 blocks = sbitmap_alloc (last_basic_block);
3915 sbitmap_ones (blocks);
3916 find_many_sub_basic_blocks (blocks);
242229bb 3917 sbitmap_free (blocks);
4e3825db 3918 purge_all_dead_edges ();
242229bb
JH
3919
3920 compact_blocks ();
2e3f842f
L
3921
3922 expand_stack_alignment ();
3923
242229bb 3924#ifdef ENABLE_CHECKING
62e5bf5d 3925 verify_flow_info ();
242229bb 3926#endif
9f8628ba
PB
3927
3928 /* There's no need to defer outputting this function any more; we
3929 know we want to output it. */
3930 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3931
3932 /* Now that we're done expanding trees to RTL, we shouldn't have any
3933 more CONCATs anywhere. */
3934 generating_concat_p = 0;
3935
b7211528
SB
3936 if (dump_file)
3937 {
3938 fprintf (dump_file,
3939 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3940 /* And the pass manager will dump RTL for us. */
3941 }
ef330312
PB
3942
3943 /* If we're emitting a nested function, make sure its parent gets
3944 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 3945 {
ef330312
PB
3946 tree parent;
3947 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
3948 parent != NULL_TREE;
3949 parent = get_containing_scope (parent))
ef330312 3950 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 3951 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 3952 }
c22cacf3 3953
ef330312
PB
3954 /* We are now committed to emitting code for this function. Do any
3955 preparation, such as emitting abstract debug info for the inline
3956 before it gets mangled by optimization. */
3957 if (cgraph_function_possibly_inlined_p (current_function_decl))
3958 (*debug_hooks->outlining_inline_function) (current_function_decl);
3959
3960 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
3961
3962 /* After expanding, the return labels are no longer needed. */
3963 return_label = NULL;
3964 naked_return_label = NULL;
55e092c4
JH
3965 /* Tag the blocks with a depth number so that change_scope can find
3966 the common parent easily. */
3967 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 3968 default_rtl_profile ();
c2924966 3969 return 0;
242229bb
JH
3970}
3971
e3b5732b 3972struct rtl_opt_pass pass_expand =
242229bb 3973{
8ddbbcae 3974 {
e3b5732b 3975 RTL_PASS,
c22cacf3 3976 "expand", /* name */
242229bb 3977 NULL, /* gate */
726a989a 3978 gimple_expand_cfg, /* execute */
242229bb
JH
3979 NULL, /* sub */
3980 NULL, /* next */
3981 0, /* static_pass_number */
c22cacf3 3982 TV_EXPAND, /* tv_id */
688a482d
RG
3983 PROP_ssa | PROP_gimple_leh | PROP_cfg
3984 | PROP_gimple_lcx, /* properties_required */
242229bb 3985 PROP_rtl, /* properties_provided */
4e3825db
MM
3986 PROP_ssa | PROP_trees, /* properties_destroyed */
3987 TODO_verify_ssa | TODO_verify_flow
3988 | TODO_verify_stmts, /* todo_flags_start */
3989 TODO_dump_func
3990 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 3991 }
242229bb 3992};