]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
cuintp.c: Replace host_integerp (..., 0) with tree_fits_shwi_p throughout.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
242229bb 24#include "rtl.h"
862d0b35
DN
25#include "hard-reg-set.h"
26#include "tree.h"
242229bb
JH
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
442b4905
AM
32#include "bitmap.h"
33#include "gimple.h"
5be5c238
AM
34#include "gimple-iterator.h"
35#include "gimple-walk.h"
442b4905
AM
36#include "gimple-ssa.h"
37#include "cgraph.h"
38#include "tree-cfg.h"
39#include "tree-phinodes.h"
40#include "ssa-iterators.h"
41#include "tree-ssanames.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
242229bb
JH
44#include "tree-pass.h"
45#include "except.h"
46#include "flags.h"
1f6d3a08 47#include "diagnostic.h"
cf835838 48#include "gimple-pretty-print.h"
1f6d3a08 49#include "toplev.h"
ef330312 50#include "debug.h"
7d69de61 51#include "params.h"
ff28a94d 52#include "tree-inline.h"
6946b3f7 53#include "value-prof.h"
e41b2a33 54#include "target.h"
8e9055ae 55#include "tree-ssa-live.h"
78bca40d 56#include "tree-outof-ssa.h"
7a8cba34 57#include "sbitmap.h"
7d776ee2 58#include "cfgloop.h"
be147e84 59#include "regs.h" /* For reg_renumber. */
2b21299c 60#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 61#include "asan.h"
4484a35a 62#include "tree-ssa-address.h"
862d0b35
DN
63#include "recog.h"
64#include "output.h"
726a989a 65
8a6ce562
JBG
66/* Some systems use __main in a way incompatible with its use in gcc, in these
67 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
68 give the same symbol without quotes for an alternative entry point. You
69 must define both, or neither. */
70#ifndef NAME__MAIN
71#define NAME__MAIN "__main"
72#endif
73
4e3825db
MM
74/* This variable holds information helping the rewriting of SSA trees
75 into RTL. */
76struct ssaexpand SA;
77
a5883ba0
MM
78/* This variable holds the currently expanded gimple statement for purposes
79 of comminucating the profile info to the builtin expanders. */
80gimple currently_expanding_gimple_stmt;
81
ddb555ed
JJ
82static rtx expand_debug_expr (tree);
83
726a989a
RB
84/* Return an expression tree corresponding to the RHS of GIMPLE
85 statement STMT. */
86
87tree
88gimple_assign_rhs_to_tree (gimple stmt)
89{
90 tree t;
82d6e6fc 91 enum gimple_rhs_class grhs_class;
b8698a0f 92
82d6e6fc 93 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 94
0354c0c7
BS
95 if (grhs_class == GIMPLE_TERNARY_RHS)
96 t = build3 (gimple_assign_rhs_code (stmt),
97 TREE_TYPE (gimple_assign_lhs (stmt)),
98 gimple_assign_rhs1 (stmt),
99 gimple_assign_rhs2 (stmt),
100 gimple_assign_rhs3 (stmt));
101 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
102 t = build2 (gimple_assign_rhs_code (stmt),
103 TREE_TYPE (gimple_assign_lhs (stmt)),
104 gimple_assign_rhs1 (stmt),
105 gimple_assign_rhs2 (stmt));
82d6e6fc 106 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
107 t = build1 (gimple_assign_rhs_code (stmt),
108 TREE_TYPE (gimple_assign_lhs (stmt)),
109 gimple_assign_rhs1 (stmt));
82d6e6fc 110 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
111 {
112 t = gimple_assign_rhs1 (stmt);
113 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
114 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
115 && gimple_location (stmt) != EXPR_LOCATION (t))
116 || (gimple_block (stmt)
117 && currently_expanding_to_rtl
5368224f 118 && EXPR_P (t)))
b5b8b0ac
AO
119 t = copy_node (t);
120 }
726a989a
RB
121 else
122 gcc_unreachable ();
123
f5045c96
AM
124 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
125 SET_EXPR_LOCATION (t, gimple_location (stmt));
126
726a989a
RB
127 return t;
128}
129
726a989a 130
1f6d3a08
RH
131#ifndef STACK_ALIGNMENT_NEEDED
132#define STACK_ALIGNMENT_NEEDED 1
133#endif
134
4e3825db
MM
135#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
136
137/* Associate declaration T with storage space X. If T is no
138 SSA name this is exactly SET_DECL_RTL, otherwise make the
139 partition of T associated with X. */
140static inline void
141set_rtl (tree t, rtx x)
142{
143 if (TREE_CODE (t) == SSA_NAME)
144 {
145 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
146 if (x && !MEM_P (x))
147 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
148 /* For the benefit of debug information at -O0 (where vartracking
149 doesn't run) record the place also in the base DECL if it's
150 a normal variable (not a parameter). */
151 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
152 {
153 tree var = SSA_NAME_VAR (t);
154 /* If we don't yet have something recorded, just record it now. */
155 if (!DECL_RTL_SET_P (var))
156 SET_DECL_RTL (var, x);
47598145 157 /* If we have it set already to "multiple places" don't
eb7adebc
MM
158 change this. */
159 else if (DECL_RTL (var) == pc_rtx)
160 ;
161 /* If we have something recorded and it's not the same place
162 as we want to record now, we have multiple partitions for the
163 same base variable, with different places. We can't just
164 randomly chose one, hence we have to say that we don't know.
165 This only happens with optimization, and there var-tracking
166 will figure out the right thing. */
167 else if (DECL_RTL (var) != x)
168 SET_DECL_RTL (var, pc_rtx);
169 }
4e3825db
MM
170 }
171 else
172 SET_DECL_RTL (t, x);
173}
1f6d3a08
RH
174
175/* This structure holds data relevant to one variable that will be
176 placed in a stack slot. */
177struct stack_var
178{
179 /* The Variable. */
180 tree decl;
181
1f6d3a08
RH
182 /* Initially, the size of the variable. Later, the size of the partition,
183 if this variable becomes it's partition's representative. */
184 HOST_WIDE_INT size;
185
186 /* The *byte* alignment required for this variable. Or as, with the
187 size, the alignment for this partition. */
188 unsigned int alignb;
189
190 /* The partition representative. */
191 size_t representative;
192
193 /* The next stack variable in the partition, or EOC. */
194 size_t next;
2bdbbe94
MM
195
196 /* The numbers of conflicting stack variables. */
197 bitmap conflicts;
1f6d3a08
RH
198};
199
200#define EOC ((size_t)-1)
201
202/* We have an array of such objects while deciding allocation. */
203static struct stack_var *stack_vars;
204static size_t stack_vars_alloc;
205static size_t stack_vars_num;
47598145 206static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 207
3f9b14ff
SB
208/* Conflict bitmaps go on this obstack. This allows us to destroy
209 all of them in one big sweep. */
210static bitmap_obstack stack_var_bitmap_obstack;
211
fa10beec 212/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
213 is non-decreasing. */
214static size_t *stack_vars_sorted;
215
1f6d3a08
RH
216/* The phase of the stack frame. This is the known misalignment of
217 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
218 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
219static int frame_phase;
220
7d69de61
RH
221/* Used during expand_used_vars to remember if we saw any decls for
222 which we'd like to enable stack smashing protection. */
223static bool has_protected_decls;
224
225/* Used during expand_used_vars. Remember if we say a character buffer
226 smaller than our cutoff threshold. Used for -Wstack-protector. */
227static bool has_short_buffer;
1f6d3a08 228
6f197850 229/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
230 we can't do with expected alignment of the stack boundary. */
231
232static unsigned int
6f197850 233align_local_variable (tree decl)
765c3e8f 234{
3a42502d 235 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 236 DECL_ALIGN (decl) = align;
1f6d3a08
RH
237 return align / BITS_PER_UNIT;
238}
239
240/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
241 Return the frame offset. */
242
243static HOST_WIDE_INT
3a42502d 244alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
245{
246 HOST_WIDE_INT offset, new_frame_offset;
247
248 new_frame_offset = frame_offset;
249 if (FRAME_GROWS_DOWNWARD)
250 {
251 new_frame_offset -= size + frame_phase;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
255 }
256 else
257 {
258 new_frame_offset -= frame_phase;
259 new_frame_offset += align - 1;
260 new_frame_offset &= -align;
261 new_frame_offset += frame_phase;
262 offset = new_frame_offset;
263 new_frame_offset += size;
264 }
265 frame_offset = new_frame_offset;
266
9fb798d7
EB
267 if (frame_offset_overflow (frame_offset, cfun->decl))
268 frame_offset = offset = 0;
269
1f6d3a08
RH
270 return offset;
271}
272
273/* Accumulate DECL into STACK_VARS. */
274
275static void
276add_stack_var (tree decl)
277{
533f611a
RH
278 struct stack_var *v;
279
1f6d3a08
RH
280 if (stack_vars_num >= stack_vars_alloc)
281 {
282 if (stack_vars_alloc)
283 stack_vars_alloc = stack_vars_alloc * 3 / 2;
284 else
285 stack_vars_alloc = 32;
286 stack_vars
287 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
288 }
47598145
MM
289 if (!decl_to_stack_part)
290 decl_to_stack_part = pointer_map_create ();
291
533f611a 292 v = &stack_vars[stack_vars_num];
47598145 293 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
294
295 v->decl = decl;
533f611a
RH
296 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
297 /* Ensure that all variables have size, so that &a != &b for any two
298 variables that are simultaneously live. */
299 if (v->size == 0)
300 v->size = 1;
6f197850 301 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
302 /* An alignment of zero can mightily confuse us later. */
303 gcc_assert (v->alignb != 0);
1f6d3a08
RH
304
305 /* All variables are initially in their own partition. */
533f611a
RH
306 v->representative = stack_vars_num;
307 v->next = EOC;
1f6d3a08 308
2bdbbe94 309 /* All variables initially conflict with no other. */
533f611a 310 v->conflicts = NULL;
2bdbbe94 311
1f6d3a08 312 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 313 set_rtl (decl, pc_rtx);
1f6d3a08
RH
314
315 stack_vars_num++;
316}
317
1f6d3a08
RH
318/* Make the decls associated with luid's X and Y conflict. */
319
320static void
321add_stack_var_conflict (size_t x, size_t y)
322{
2bdbbe94
MM
323 struct stack_var *a = &stack_vars[x];
324 struct stack_var *b = &stack_vars[y];
325 if (!a->conflicts)
3f9b14ff 326 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 327 if (!b->conflicts)
3f9b14ff 328 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
329 bitmap_set_bit (a->conflicts, y);
330 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
331}
332
333/* Check whether the decls associated with luid's X and Y conflict. */
334
335static bool
336stack_var_conflict_p (size_t x, size_t y)
337{
2bdbbe94
MM
338 struct stack_var *a = &stack_vars[x];
339 struct stack_var *b = &stack_vars[y];
47598145
MM
340 if (x == y)
341 return false;
342 /* Partitions containing an SSA name result from gimple registers
343 with things like unsupported modes. They are top-level and
344 hence conflict with everything else. */
345 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
346 return true;
347
2bdbbe94
MM
348 if (!a->conflicts || !b->conflicts)
349 return false;
350 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 351}
b8698a0f 352
47598145
MM
353/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
354 enter its partition number into bitmap DATA. */
355
356static bool
357visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
358{
359 bitmap active = (bitmap)data;
360 op = get_base_address (op);
361 if (op
362 && DECL_P (op)
363 && DECL_RTL_IF_SET (op) == pc_rtx)
364 {
365 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
366 if (v)
367 bitmap_set_bit (active, *v);
368 }
369 return false;
370}
371
372/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
373 record conflicts between it and all currently active other partitions
374 from bitmap DATA. */
375
376static bool
377visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
378{
379 bitmap active = (bitmap)data;
380 op = get_base_address (op);
381 if (op
382 && DECL_P (op)
383 && DECL_RTL_IF_SET (op) == pc_rtx)
384 {
385 size_t *v =
386 (size_t *) pointer_map_contains (decl_to_stack_part, op);
387 if (v && bitmap_set_bit (active, *v))
388 {
389 size_t num = *v;
390 bitmap_iterator bi;
391 unsigned i;
392 gcc_assert (num < stack_vars_num);
393 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
394 add_stack_var_conflict (num, i);
395 }
396 }
397 return false;
398}
399
400/* Helper routine for add_scope_conflicts, calculating the active partitions
401 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
402 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
403 liveness. */
47598145
MM
404
405static void
81bfd197 406add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
407{
408 edge e;
409 edge_iterator ei;
410 gimple_stmt_iterator gsi;
411 bool (*visit)(gimple, tree, void *);
412
413 bitmap_clear (work);
414 FOR_EACH_EDGE (e, ei, bb->preds)
415 bitmap_ior_into (work, (bitmap)e->src->aux);
416
ea85edfe 417 visit = visit_op;
47598145
MM
418
419 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
420 {
421 gimple stmt = gsi_stmt (gsi);
ea85edfe 422 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 423 }
ea85edfe 424 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
425 {
426 gimple stmt = gsi_stmt (gsi);
427
428 if (gimple_clobber_p (stmt))
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431 size_t *v;
432 /* Nested function lowering might introduce LHSs
433 that are COMPONENT_REFs. */
434 if (TREE_CODE (lhs) != VAR_DECL)
435 continue;
436 if (DECL_RTL_IF_SET (lhs) == pc_rtx
437 && (v = (size_t *)
438 pointer_map_contains (decl_to_stack_part, lhs)))
439 bitmap_clear_bit (work, *v);
440 }
441 else if (!is_gimple_debug (stmt))
ea85edfe 442 {
81bfd197 443 if (for_conflict
ea85edfe
JJ
444 && visit == visit_op)
445 {
446 /* If this is the first real instruction in this BB we need
88d599dc
MM
447 to add conflicts for everything live at this point now.
448 Unlike classical liveness for named objects we can't
ea85edfe
JJ
449 rely on seeing a def/use of the names we're interested in.
450 There might merely be indirect loads/stores. We'd not add any
81bfd197 451 conflicts for such partitions. */
ea85edfe
JJ
452 bitmap_iterator bi;
453 unsigned i;
81bfd197 454 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 455 {
9b44f5d9
MM
456 struct stack_var *a = &stack_vars[i];
457 if (!a->conflicts)
3f9b14ff 458 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 459 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
460 }
461 visit = visit_conflict;
462 }
463 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
464 }
47598145
MM
465 }
466}
467
468/* Generate stack partition conflicts between all partitions that are
469 simultaneously live. */
470
471static void
472add_scope_conflicts (void)
473{
474 basic_block bb;
475 bool changed;
476 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
477 int *rpo;
478 int n_bbs;
47598145 479
88d599dc 480 /* We approximate the live range of a stack variable by taking the first
47598145
MM
481 mention of its name as starting point(s), and by the end-of-scope
482 death clobber added by gimplify as ending point(s) of the range.
483 This overapproximates in the case we for instance moved an address-taken
484 operation upward, without also moving a dereference to it upwards.
485 But it's conservatively correct as a variable never can hold values
486 before its name is mentioned at least once.
487
88d599dc 488 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
489
490 FOR_ALL_BB (bb)
3f9b14ff 491 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 492
9b44f5d9
MM
493 rpo = XNEWVEC (int, last_basic_block);
494 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
495
47598145
MM
496 changed = true;
497 while (changed)
498 {
9b44f5d9 499 int i;
47598145 500 changed = false;
9b44f5d9 501 for (i = 0; i < n_bbs; i++)
47598145 502 {
9b44f5d9
MM
503 bitmap active;
504 bb = BASIC_BLOCK (rpo[i]);
505 active = (bitmap)bb->aux;
81bfd197 506 add_scope_conflicts_1 (bb, work, false);
47598145
MM
507 if (bitmap_ior_into (active, work))
508 changed = true;
509 }
510 }
511
512 FOR_EACH_BB (bb)
81bfd197 513 add_scope_conflicts_1 (bb, work, true);
47598145 514
9b44f5d9 515 free (rpo);
47598145
MM
516 BITMAP_FREE (work);
517 FOR_ALL_BB (bb)
518 BITMAP_FREE (bb->aux);
519}
520
1f6d3a08 521/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 522 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
523
524static int
3a42502d 525stack_var_cmp (const void *a, const void *b)
1f6d3a08 526{
3a42502d
RH
527 size_t ia = *(const size_t *)a;
528 size_t ib = *(const size_t *)b;
529 unsigned int aligna = stack_vars[ia].alignb;
530 unsigned int alignb = stack_vars[ib].alignb;
531 HOST_WIDE_INT sizea = stack_vars[ia].size;
532 HOST_WIDE_INT sizeb = stack_vars[ib].size;
533 tree decla = stack_vars[ia].decl;
534 tree declb = stack_vars[ib].decl;
535 bool largea, largeb;
4e3825db 536 unsigned int uida, uidb;
1f6d3a08 537
3a42502d
RH
538 /* Primary compare on "large" alignment. Large comes first. */
539 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
540 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
541 if (largea != largeb)
542 return (int)largeb - (int)largea;
543
544 /* Secondary compare on size, decreasing */
3a42502d 545 if (sizea > sizeb)
6ddfda8a
ER
546 return -1;
547 if (sizea < sizeb)
1f6d3a08 548 return 1;
3a42502d
RH
549
550 /* Tertiary compare on true alignment, decreasing. */
551 if (aligna < alignb)
552 return -1;
553 if (aligna > alignb)
554 return 1;
555
556 /* Final compare on ID for sort stability, increasing.
557 Two SSA names are compared by their version, SSA names come before
558 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
559 if (TREE_CODE (decla) == SSA_NAME)
560 {
561 if (TREE_CODE (declb) == SSA_NAME)
562 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
563 else
564 return -1;
565 }
566 else if (TREE_CODE (declb) == SSA_NAME)
567 return 1;
568 else
569 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 570 if (uida < uidb)
79f802f5 571 return 1;
3a42502d
RH
572 if (uida > uidb)
573 return -1;
1f6d3a08
RH
574 return 0;
575}
576
55b34b5f
RG
577
578/* If the points-to solution *PI points to variables that are in a partition
579 together with other variables add all partition members to the pointed-to
580 variables bitmap. */
581
582static void
583add_partitioned_vars_to_ptset (struct pt_solution *pt,
584 struct pointer_map_t *decls_to_partitions,
585 struct pointer_set_t *visited, bitmap temp)
586{
587 bitmap_iterator bi;
588 unsigned i;
589 bitmap *part;
590
591 if (pt->anything
592 || pt->vars == NULL
593 /* The pointed-to vars bitmap is shared, it is enough to
594 visit it once. */
c3284718 595 || pointer_set_insert (visited, pt->vars))
55b34b5f
RG
596 return;
597
598 bitmap_clear (temp);
599
600 /* By using a temporary bitmap to store all members of the partitions
601 we have to add we make sure to visit each of the partitions only
602 once. */
603 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
604 if ((!temp
605 || !bitmap_bit_p (temp, i))
606 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
607 (void *)(size_t) i)))
608 bitmap_ior_into (temp, *part);
609 if (!bitmap_empty_p (temp))
610 bitmap_ior_into (pt->vars, temp);
611}
612
613/* Update points-to sets based on partition info, so we can use them on RTL.
614 The bitmaps representing stack partitions will be saved until expand,
615 where partitioned decls used as bases in memory expressions will be
616 rewritten. */
617
618static void
619update_alias_info_with_stack_vars (void)
620{
621 struct pointer_map_t *decls_to_partitions = NULL;
622 size_t i, j;
623 tree var = NULL_TREE;
624
625 for (i = 0; i < stack_vars_num; i++)
626 {
627 bitmap part = NULL;
628 tree name;
629 struct ptr_info_def *pi;
630
631 /* Not interested in partitions with single variable. */
632 if (stack_vars[i].representative != i
633 || stack_vars[i].next == EOC)
634 continue;
635
636 if (!decls_to_partitions)
637 {
638 decls_to_partitions = pointer_map_create ();
639 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
640 }
641
642 /* Create an SSA_NAME that points to the partition for use
643 as base during alias-oracle queries on RTL for bases that
644 have been partitioned. */
645 if (var == NULL_TREE)
646 var = create_tmp_var (ptr_type_node, NULL);
647 name = make_ssa_name (var, NULL);
648
649 /* Create bitmaps representing partitions. They will be used for
650 points-to sets later, so use GGC alloc. */
651 part = BITMAP_GGC_ALLOC ();
652 for (j = i; j != EOC; j = stack_vars[j].next)
653 {
654 tree decl = stack_vars[j].decl;
25a6a873 655 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
656 bitmap_set_bit (part, uid);
657 *((bitmap *) pointer_map_insert (decls_to_partitions,
658 (void *)(size_t) uid)) = part;
659 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
660 decl)) = name;
88d8330d
EB
661 if (TREE_ADDRESSABLE (decl))
662 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
663 }
664
665 /* Make the SSA name point to all partition members. */
666 pi = get_ptr_info (name);
d3553615 667 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
668 }
669
670 /* Make all points-to sets that contain one member of a partition
671 contain all members of the partition. */
672 if (decls_to_partitions)
673 {
674 unsigned i;
675 struct pointer_set_t *visited = pointer_set_create ();
3f9b14ff 676 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f
RG
677
678 for (i = 1; i < num_ssa_names; i++)
679 {
680 tree name = ssa_name (i);
681 struct ptr_info_def *pi;
682
683 if (name
684 && POINTER_TYPE_P (TREE_TYPE (name))
685 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
686 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
687 visited, temp);
688 }
689
690 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
691 decls_to_partitions, visited, temp);
55b34b5f
RG
692
693 pointer_set_destroy (visited);
694 pointer_map_destroy (decls_to_partitions);
695 BITMAP_FREE (temp);
696 }
697}
698
1f6d3a08
RH
699/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
700 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 701 Merge them into a single partition A. */
1f6d3a08
RH
702
703static void
6ddfda8a 704union_stack_vars (size_t a, size_t b)
1f6d3a08 705{
2bdbbe94
MM
706 struct stack_var *vb = &stack_vars[b];
707 bitmap_iterator bi;
708 unsigned u;
1f6d3a08 709
6ddfda8a
ER
710 gcc_assert (stack_vars[b].next == EOC);
711 /* Add B to A's partition. */
712 stack_vars[b].next = stack_vars[a].next;
713 stack_vars[b].representative = a;
1f6d3a08
RH
714 stack_vars[a].next = b;
715
716 /* Update the required alignment of partition A to account for B. */
717 if (stack_vars[a].alignb < stack_vars[b].alignb)
718 stack_vars[a].alignb = stack_vars[b].alignb;
719
720 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
721 if (vb->conflicts)
722 {
723 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
724 add_stack_var_conflict (a, stack_vars[u].representative);
725 BITMAP_FREE (vb->conflicts);
726 }
1f6d3a08
RH
727}
728
729/* A subroutine of expand_used_vars. Binpack the variables into
730 partitions constrained by the interference graph. The overall
731 algorithm used is as follows:
732
6ddfda8a 733 Sort the objects by size in descending order.
1f6d3a08
RH
734 For each object A {
735 S = size(A)
736 O = 0
737 loop {
738 Look for the largest non-conflicting object B with size <= S.
739 UNION (A, B)
1f6d3a08
RH
740 }
741 }
742*/
743
744static void
745partition_stack_vars (void)
746{
747 size_t si, sj, n = stack_vars_num;
748
749 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
750 for (si = 0; si < n; ++si)
751 stack_vars_sorted[si] = si;
752
753 if (n == 1)
754 return;
755
3a42502d 756 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 757
1f6d3a08
RH
758 for (si = 0; si < n; ++si)
759 {
760 size_t i = stack_vars_sorted[si];
3a42502d 761 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 762 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 763
6ddfda8a
ER
764 /* Ignore objects that aren't partition representatives. If we
765 see a var that is not a partition representative, it must
766 have been merged earlier. */
767 if (stack_vars[i].representative != i)
768 continue;
769
770 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
771 {
772 size_t j = stack_vars_sorted[sj];
1f6d3a08 773 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 774 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
775
776 /* Ignore objects that aren't partition representatives. */
777 if (stack_vars[j].representative != j)
778 continue;
779
3a42502d
RH
780 /* Do not mix objects of "small" (supported) alignment
781 and "large" (unsupported) alignment. */
782 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
783 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
784 break;
785
786 /* For Address Sanitizer do not mix objects with different
787 sizes, as the shorter vars wouldn't be adequately protected.
788 Don't do that for "large" (unsupported) alignment objects,
789 those aren't protected anyway. */
de5a5fa1 790 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
f3ddd692
JJ
791 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
792 break;
793
794 /* Ignore conflicting objects. */
795 if (stack_var_conflict_p (i, j))
3a42502d
RH
796 continue;
797
1f6d3a08 798 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 799 union_stack_vars (i, j);
1f6d3a08
RH
800 }
801 }
55b34b5f 802
9b999dc5 803 update_alias_info_with_stack_vars ();
1f6d3a08
RH
804}
805
806/* A debugging aid for expand_used_vars. Dump the generated partitions. */
807
808static void
809dump_stack_var_partition (void)
810{
811 size_t si, i, j, n = stack_vars_num;
812
813 for (si = 0; si < n; ++si)
814 {
815 i = stack_vars_sorted[si];
816
817 /* Skip variables that aren't partition representatives, for now. */
818 if (stack_vars[i].representative != i)
819 continue;
820
821 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
822 " align %u\n", (unsigned long) i, stack_vars[i].size,
823 stack_vars[i].alignb);
824
825 for (j = i; j != EOC; j = stack_vars[j].next)
826 {
827 fputc ('\t', dump_file);
828 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 829 }
6ddfda8a 830 fputc ('\n', dump_file);
1f6d3a08
RH
831 }
832}
833
3a42502d 834/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
835
836static void
3a42502d
RH
837expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
838 HOST_WIDE_INT offset)
1f6d3a08 839{
3a42502d 840 unsigned align;
1f6d3a08 841 rtx x;
c22cacf3 842
1f6d3a08
RH
843 /* If this fails, we've overflowed the stack frame. Error nicely? */
844 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
845
0a81f074 846 x = plus_constant (Pmode, base, offset);
4e3825db 847 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 848
4e3825db
MM
849 if (TREE_CODE (decl) != SSA_NAME)
850 {
851 /* Set alignment we actually gave this decl if it isn't an SSA name.
852 If it is we generate stack slots only accidentally so it isn't as
853 important, we'll simply use the alignment that is already set. */
3a42502d
RH
854 if (base == virtual_stack_vars_rtx)
855 offset -= frame_phase;
4e3825db
MM
856 align = offset & -offset;
857 align *= BITS_PER_UNIT;
3a42502d
RH
858 if (align == 0 || align > base_align)
859 align = base_align;
860
861 /* One would think that we could assert that we're not decreasing
862 alignment here, but (at least) the i386 port does exactly this
863 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
864
865 DECL_ALIGN (decl) = align;
866 DECL_USER_ALIGN (decl) = 0;
867 }
868
869 set_mem_attributes (x, SSAVAR (decl), true);
870 set_rtl (decl, x);
1f6d3a08
RH
871}
872
f3ddd692
JJ
873struct stack_vars_data
874{
875 /* Vector of offset pairs, always end of some padding followed
876 by start of the padding that needs Address Sanitizer protection.
877 The vector is in reversed, highest offset pairs come first. */
9771b263 878 vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
879
880 /* Vector of partition representative decls in between the paddings. */
9771b263 881 vec<tree> asan_decl_vec;
f3ddd692
JJ
882};
883
1f6d3a08
RH
884/* A subroutine of expand_used_vars. Give each partition representative
885 a unique location within the stack frame. Update each partition member
886 with that location. */
887
888static void
f3ddd692 889expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
890{
891 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
892 HOST_WIDE_INT large_size = 0, large_alloc = 0;
893 rtx large_base = NULL;
894 unsigned large_align = 0;
895 tree decl;
896
897 /* Determine if there are any variables requiring "large" alignment.
898 Since these are dynamically allocated, we only process these if
899 no predicate involved. */
900 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
901 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
902 {
903 /* Find the total size of these variables. */
904 for (si = 0; si < n; ++si)
905 {
906 unsigned alignb;
907
908 i = stack_vars_sorted[si];
909 alignb = stack_vars[i].alignb;
910
911 /* Stop when we get to the first decl with "small" alignment. */
912 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
913 break;
914
915 /* Skip variables that aren't partition representatives. */
916 if (stack_vars[i].representative != i)
917 continue;
918
919 /* Skip variables that have already had rtl assigned. See also
920 add_stack_var where we perpetrate this pc_rtx hack. */
921 decl = stack_vars[i].decl;
922 if ((TREE_CODE (decl) == SSA_NAME
923 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
924 : DECL_RTL (decl)) != pc_rtx)
925 continue;
926
927 large_size += alignb - 1;
928 large_size &= -(HOST_WIDE_INT)alignb;
929 large_size += stack_vars[i].size;
930 }
931
932 /* If there were any, allocate space. */
933 if (large_size > 0)
934 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
935 large_align, true);
936 }
1f6d3a08
RH
937
938 for (si = 0; si < n; ++si)
939 {
3a42502d
RH
940 rtx base;
941 unsigned base_align, alignb;
1f6d3a08
RH
942 HOST_WIDE_INT offset;
943
944 i = stack_vars_sorted[si];
945
946 /* Skip variables that aren't partition representatives, for now. */
947 if (stack_vars[i].representative != i)
948 continue;
949
7d69de61
RH
950 /* Skip variables that have already had rtl assigned. See also
951 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
952 decl = stack_vars[i].decl;
953 if ((TREE_CODE (decl) == SSA_NAME
954 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
955 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
956 continue;
957
c22cacf3 958 /* Check the predicate to see whether this variable should be
7d69de61 959 allocated in this pass. */
f3ddd692 960 if (pred && !pred (i))
7d69de61
RH
961 continue;
962
3a42502d
RH
963 alignb = stack_vars[i].alignb;
964 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
965 {
de5a5fa1 966 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
f3ddd692
JJ
967 {
968 HOST_WIDE_INT prev_offset = frame_offset;
969 tree repr_decl = NULL_TREE;
970
971 offset
972 = alloc_stack_frame_space (stack_vars[i].size
973 + ASAN_RED_ZONE_SIZE,
974 MAX (alignb, ASAN_RED_ZONE_SIZE));
9771b263
DN
975 data->asan_vec.safe_push (prev_offset);
976 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
977 /* Find best representative of the partition.
978 Prefer those with DECL_NAME, even better
979 satisfying asan_protect_stack_decl predicate. */
980 for (j = i; j != EOC; j = stack_vars[j].next)
981 if (asan_protect_stack_decl (stack_vars[j].decl)
982 && DECL_NAME (stack_vars[j].decl))
983 {
984 repr_decl = stack_vars[j].decl;
985 break;
986 }
987 else if (repr_decl == NULL_TREE
988 && DECL_P (stack_vars[j].decl)
989 && DECL_NAME (stack_vars[j].decl))
990 repr_decl = stack_vars[j].decl;
991 if (repr_decl == NULL_TREE)
992 repr_decl = stack_vars[i].decl;
9771b263 993 data->asan_decl_vec.safe_push (repr_decl);
f3ddd692
JJ
994 }
995 else
996 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
3a42502d
RH
997 base = virtual_stack_vars_rtx;
998 base_align = crtl->max_used_stack_slot_alignment;
999 }
1000 else
1001 {
1002 /* Large alignment is only processed in the last pass. */
1003 if (pred)
1004 continue;
533f611a 1005 gcc_assert (large_base != NULL);
3a42502d
RH
1006
1007 large_alloc += alignb - 1;
1008 large_alloc &= -(HOST_WIDE_INT)alignb;
1009 offset = large_alloc;
1010 large_alloc += stack_vars[i].size;
1011
1012 base = large_base;
1013 base_align = large_align;
1014 }
1f6d3a08
RH
1015
1016 /* Create rtl for each variable based on their location within the
1017 partition. */
1018 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1019 {
f8da8190 1020 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1021 base, base_align,
6ddfda8a 1022 offset);
f8da8190 1023 }
1f6d3a08 1024 }
3a42502d
RH
1025
1026 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1027}
1028
ff28a94d
JH
1029/* Take into account all sizes of partitions and reset DECL_RTLs. */
1030static HOST_WIDE_INT
1031account_stack_vars (void)
1032{
1033 size_t si, j, i, n = stack_vars_num;
1034 HOST_WIDE_INT size = 0;
1035
1036 for (si = 0; si < n; ++si)
1037 {
1038 i = stack_vars_sorted[si];
1039
1040 /* Skip variables that aren't partition representatives, for now. */
1041 if (stack_vars[i].representative != i)
1042 continue;
1043
1044 size += stack_vars[i].size;
1045 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1046 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1047 }
1048 return size;
1049}
1050
1f6d3a08
RH
1051/* A subroutine of expand_one_var. Called to immediately assign rtl
1052 to a variable to be allocated in the stack frame. */
1053
1054static void
1055expand_one_stack_var (tree var)
1056{
3a42502d
RH
1057 HOST_WIDE_INT size, offset;
1058 unsigned byte_align;
1f6d3a08 1059
4e3825db 1060 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1061 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1062
1063 /* We handle highly aligned variables in expand_stack_vars. */
1064 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1065
3a42502d
RH
1066 offset = alloc_stack_frame_space (size, byte_align);
1067
1068 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1069 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1070}
1071
1f6d3a08
RH
1072/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1073 that will reside in a hard register. */
1074
1075static void
1076expand_one_hard_reg_var (tree var)
1077{
1078 rest_of_decl_compilation (var, 0, 0);
1079}
1080
1081/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1082 that will reside in a pseudo register. */
1083
1084static void
1085expand_one_register_var (tree var)
1086{
4e3825db
MM
1087 tree decl = SSAVAR (var);
1088 tree type = TREE_TYPE (decl);
cde0f3fd 1089 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1090 rtx x = gen_reg_rtx (reg_mode);
1091
4e3825db 1092 set_rtl (var, x);
1f6d3a08
RH
1093
1094 /* Note if the object is a user variable. */
4e3825db
MM
1095 if (!DECL_ARTIFICIAL (decl))
1096 mark_user_reg (x);
1f6d3a08 1097
61021c2c 1098 if (POINTER_TYPE_P (type))
d466b407 1099 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1100}
1101
1102/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1103 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1104 to pick something that won't crash the rest of the compiler. */
1105
1106static void
1107expand_one_error_var (tree var)
1108{
1109 enum machine_mode mode = DECL_MODE (var);
1110 rtx x;
1111
1112 if (mode == BLKmode)
1113 x = gen_rtx_MEM (BLKmode, const0_rtx);
1114 else if (mode == VOIDmode)
1115 x = const0_rtx;
1116 else
1117 x = gen_reg_rtx (mode);
1118
1119 SET_DECL_RTL (var, x);
1120}
1121
c22cacf3 1122/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1123 allocated to the local stack frame. Return true if we wish to
1124 add VAR to STACK_VARS so that it will be coalesced with other
1125 variables. Return false to allocate VAR immediately.
1126
1127 This function is used to reduce the number of variables considered
1128 for coalescing, which reduces the size of the quadratic problem. */
1129
1130static bool
1131defer_stack_allocation (tree var, bool toplevel)
1132{
ee2e8462
EB
1133 /* Whether the variable is small enough for immediate allocation not to be
1134 a problem with regard to the frame size. */
1135 bool smallish
1136 = (tree_low_cst (DECL_SIZE_UNIT (var), 1)
1137 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1138
7d69de61 1139 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1140 so that we can re-order the strings to the top of the frame.
1141 Similarly for Address Sanitizer. */
de5a5fa1 1142 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
7d69de61
RH
1143 return true;
1144
3a42502d
RH
1145 /* We handle "large" alignment via dynamic allocation. We want to handle
1146 this extra complication in only one place, so defer them. */
1147 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1148 return true;
1149
ee2e8462
EB
1150 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1151 might be detached from their block and appear at toplevel when we reach
1152 here. We want to coalesce them with variables from other blocks when
1153 the immediate contribution to the frame size would be noticeable. */
1154 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1155 return true;
1156
1157 /* Variables declared in the outermost scope automatically conflict
1158 with every other variable. The only reason to want to defer them
1f6d3a08
RH
1159 at all is that, after sorting, we can more efficiently pack
1160 small variables in the stack frame. Continue to defer at -O2. */
1161 if (toplevel && optimize < 2)
1162 return false;
1163
1164 /* Without optimization, *most* variables are allocated from the
1165 stack, which makes the quadratic problem large exactly when we
c22cacf3 1166 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1167 other hand, we don't want the function's stack frame size to
1168 get completely out of hand. So we avoid adding scalars and
1169 "small" aggregates to the list at all. */
ee2e8462 1170 if (optimize == 0 && smallish)
1f6d3a08
RH
1171 return false;
1172
1173 return true;
1174}
1175
1176/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1177 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1178 expanded yet, merely recorded.
ff28a94d
JH
1179 When REALLY_EXPAND is false, only add stack values to be allocated.
1180 Return stack usage this variable is supposed to take.
1181*/
1f6d3a08 1182
ff28a94d
JH
1183static HOST_WIDE_INT
1184expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1185{
3a42502d 1186 unsigned int align = BITS_PER_UNIT;
4e3825db 1187 tree origvar = var;
3a42502d 1188
4e3825db
MM
1189 var = SSAVAR (var);
1190
3a42502d 1191 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1192 {
2e3f842f
L
1193 /* Because we don't know if VAR will be in register or on stack,
1194 we conservatively assume it will be on stack even if VAR is
1195 eventually put into register after RA pass. For non-automatic
1196 variables, which won't be on stack, we collect alignment of
1197 type and ignore user specified alignment. */
1198 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1199 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1200 TYPE_MODE (TREE_TYPE (var)),
1201 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1202 else if (DECL_HAS_VALUE_EXPR_P (var)
1203 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1204 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1205 or variables which were assigned a stack slot already by
1206 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1207 changed from the offset chosen to it. */
1208 align = crtl->stack_alignment_estimated;
2e3f842f 1209 else
ae58e548 1210 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1211
3a42502d
RH
1212 /* If the variable alignment is very large we'll dynamicaly allocate
1213 it, which means that in-frame portion is just a pointer. */
1214 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1215 align = POINTER_SIZE;
1216 }
1217
1218 if (SUPPORTS_STACK_ALIGNMENT
1219 && crtl->stack_alignment_estimated < align)
1220 {
1221 /* stack_alignment_estimated shouldn't change after stack
1222 realign decision made */
c3284718 1223 gcc_assert (!crtl->stack_realign_processed);
3a42502d 1224 crtl->stack_alignment_estimated = align;
2e3f842f
L
1225 }
1226
3a42502d
RH
1227 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1228 So here we only make sure stack_alignment_needed >= align. */
1229 if (crtl->stack_alignment_needed < align)
1230 crtl->stack_alignment_needed = align;
1231 if (crtl->max_used_stack_slot_alignment < align)
1232 crtl->max_used_stack_slot_alignment = align;
1233
4e3825db
MM
1234 if (TREE_CODE (origvar) == SSA_NAME)
1235 {
1236 gcc_assert (TREE_CODE (var) != VAR_DECL
1237 || (!DECL_EXTERNAL (var)
1238 && !DECL_HAS_VALUE_EXPR_P (var)
1239 && !TREE_STATIC (var)
4e3825db
MM
1240 && TREE_TYPE (var) != error_mark_node
1241 && !DECL_HARD_REGISTER (var)
1242 && really_expand));
1243 }
1244 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1245 ;
1f6d3a08
RH
1246 else if (DECL_EXTERNAL (var))
1247 ;
833b3afe 1248 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1249 ;
1250 else if (TREE_STATIC (var))
7e8b322a 1251 ;
eb7adebc 1252 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1253 ;
1254 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1255 {
1256 if (really_expand)
1257 expand_one_error_var (var);
1258 }
4e3825db 1259 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1260 {
1261 if (really_expand)
1262 expand_one_hard_reg_var (var);
1263 }
1f6d3a08 1264 else if (use_register_for_decl (var))
ff28a94d
JH
1265 {
1266 if (really_expand)
4e3825db 1267 expand_one_register_var (origvar);
ff28a94d 1268 }
56099f00 1269 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1270 {
56099f00 1271 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1272 if (really_expand)
1273 {
1274 error ("size of variable %q+D is too large", var);
1275 expand_one_error_var (var);
1276 }
1277 }
1f6d3a08 1278 else if (defer_stack_allocation (var, toplevel))
4e3825db 1279 add_stack_var (origvar);
1f6d3a08 1280 else
ff28a94d 1281 {
bd9f1b4b 1282 if (really_expand)
4e3825db 1283 expand_one_stack_var (origvar);
ff28a94d
JH
1284 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1285 }
1286 return 0;
1f6d3a08
RH
1287}
1288
1289/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1290 expanding variables. Those variables that can be put into registers
1291 are allocated pseudos; those that can't are put on the stack.
1292
1293 TOPLEVEL is true if this is the outermost BLOCK. */
1294
1295static void
1296expand_used_vars_for_block (tree block, bool toplevel)
1297{
1f6d3a08
RH
1298 tree t;
1299
1f6d3a08 1300 /* Expand all variables at this level. */
910ad8de 1301 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1302 if (TREE_USED (t)
1303 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1304 || !DECL_NONSHAREABLE (t)))
ff28a94d 1305 expand_one_var (t, toplevel, true);
1f6d3a08 1306
1f6d3a08
RH
1307 /* Expand all variables at containing levels. */
1308 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1309 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1310}
1311
1312/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1313 and clear TREE_USED on all local variables. */
1314
1315static void
1316clear_tree_used (tree block)
1317{
1318 tree t;
1319
910ad8de 1320 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1321 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1322 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1323 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1324 TREE_USED (t) = 0;
1325
1326 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1327 clear_tree_used (t);
1328}
1329
f6bc1c4a
HS
1330enum {
1331 SPCT_FLAG_DEFAULT = 1,
1332 SPCT_FLAG_ALL = 2,
1333 SPCT_FLAG_STRONG = 3
1334};
1335
7d69de61
RH
1336/* Examine TYPE and determine a bit mask of the following features. */
1337
1338#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1339#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1340#define SPCT_HAS_ARRAY 4
1341#define SPCT_HAS_AGGREGATE 8
1342
1343static unsigned int
1344stack_protect_classify_type (tree type)
1345{
1346 unsigned int ret = 0;
1347 tree t;
1348
1349 switch (TREE_CODE (type))
1350 {
1351 case ARRAY_TYPE:
1352 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1353 if (t == char_type_node
1354 || t == signed_char_type_node
1355 || t == unsigned_char_type_node)
1356 {
15362b89
JJ
1357 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1358 unsigned HOST_WIDE_INT len;
7d69de61 1359
15362b89
JJ
1360 if (!TYPE_SIZE_UNIT (type)
1361 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1362 len = max;
7d69de61 1363 else
15362b89 1364 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1365
1366 if (len < max)
1367 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1368 else
1369 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1370 }
1371 else
1372 ret = SPCT_HAS_ARRAY;
1373 break;
1374
1375 case UNION_TYPE:
1376 case QUAL_UNION_TYPE:
1377 case RECORD_TYPE:
1378 ret = SPCT_HAS_AGGREGATE;
1379 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1380 if (TREE_CODE (t) == FIELD_DECL)
1381 ret |= stack_protect_classify_type (TREE_TYPE (t));
1382 break;
1383
1384 default:
1385 break;
1386 }
1387
1388 return ret;
1389}
1390
a4d05547
KH
1391/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1392 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1393 any variable in this function. The return value is the phase number in
1394 which the variable should be allocated. */
1395
1396static int
1397stack_protect_decl_phase (tree decl)
1398{
1399 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1400 int ret = 0;
1401
1402 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1403 has_short_buffer = true;
1404
f6bc1c4a
HS
1405 if (flag_stack_protect == SPCT_FLAG_ALL
1406 || flag_stack_protect == SPCT_FLAG_STRONG)
7d69de61
RH
1407 {
1408 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1409 && !(bits & SPCT_HAS_AGGREGATE))
1410 ret = 1;
1411 else if (bits & SPCT_HAS_ARRAY)
1412 ret = 2;
1413 }
1414 else
1415 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1416
1417 if (ret)
1418 has_protected_decls = true;
1419
1420 return ret;
1421}
1422
1423/* Two helper routines that check for phase 1 and phase 2. These are used
1424 as callbacks for expand_stack_vars. */
1425
1426static bool
f3ddd692
JJ
1427stack_protect_decl_phase_1 (size_t i)
1428{
1429 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1430}
1431
1432static bool
1433stack_protect_decl_phase_2 (size_t i)
7d69de61 1434{
f3ddd692 1435 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1436}
1437
f3ddd692
JJ
1438/* And helper function that checks for asan phase (with stack protector
1439 it is phase 3). This is used as callback for expand_stack_vars.
1440 Returns true if any of the vars in the partition need to be protected. */
1441
7d69de61 1442static bool
f3ddd692 1443asan_decl_phase_3 (size_t i)
7d69de61 1444{
f3ddd692
JJ
1445 while (i != EOC)
1446 {
1447 if (asan_protect_stack_decl (stack_vars[i].decl))
1448 return true;
1449 i = stack_vars[i].next;
1450 }
1451 return false;
7d69de61
RH
1452}
1453
1454/* Ensure that variables in different stack protection phases conflict
1455 so that they are not merged and share the same stack slot. */
1456
1457static void
1458add_stack_protection_conflicts (void)
1459{
1460 size_t i, j, n = stack_vars_num;
1461 unsigned char *phase;
1462
1463 phase = XNEWVEC (unsigned char, n);
1464 for (i = 0; i < n; ++i)
1465 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1466
1467 for (i = 0; i < n; ++i)
1468 {
1469 unsigned char ph_i = phase[i];
9b44f5d9 1470 for (j = i + 1; j < n; ++j)
7d69de61
RH
1471 if (ph_i != phase[j])
1472 add_stack_var_conflict (i, j);
1473 }
1474
1475 XDELETEVEC (phase);
1476}
1477
1478/* Create a decl for the guard at the top of the stack frame. */
1479
1480static void
1481create_stack_guard (void)
1482{
c2255bc4
AH
1483 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1484 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1485 TREE_THIS_VOLATILE (guard) = 1;
1486 TREE_USED (guard) = 1;
1487 expand_one_stack_var (guard);
cb91fab0 1488 crtl->stack_protect_guard = guard;
7d69de61
RH
1489}
1490
ff28a94d 1491/* Prepare for expanding variables. */
b8698a0f 1492static void
ff28a94d
JH
1493init_vars_expansion (void)
1494{
3f9b14ff
SB
1495 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1496 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1497
3f9b14ff
SB
1498 /* A map from decl to stack partition. */
1499 decl_to_stack_part = pointer_map_create ();
ff28a94d
JH
1500
1501 /* Initialize local stack smashing state. */
1502 has_protected_decls = false;
1503 has_short_buffer = false;
1504}
1505
1506/* Free up stack variable graph data. */
1507static void
1508fini_vars_expansion (void)
1509{
3f9b14ff
SB
1510 bitmap_obstack_release (&stack_var_bitmap_obstack);
1511 if (stack_vars)
1512 XDELETEVEC (stack_vars);
1513 if (stack_vars_sorted)
1514 XDELETEVEC (stack_vars_sorted);
ff28a94d 1515 stack_vars = NULL;
9b44f5d9 1516 stack_vars_sorted = NULL;
ff28a94d 1517 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1518 pointer_map_destroy (decl_to_stack_part);
1519 decl_to_stack_part = NULL;
ff28a94d
JH
1520}
1521
30925d94
AO
1522/* Make a fair guess for the size of the stack frame of the function
1523 in NODE. This doesn't have to be exact, the result is only used in
1524 the inline heuristics. So we don't want to run the full stack var
1525 packing algorithm (which is quadratic in the number of stack vars).
1526 Instead, we calculate the total size of all stack vars. This turns
1527 out to be a pretty fair estimate -- packing of stack vars doesn't
1528 happen very often. */
b5a430f3 1529
ff28a94d 1530HOST_WIDE_INT
30925d94 1531estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1532{
1533 HOST_WIDE_INT size = 0;
b5a430f3 1534 size_t i;
bb7e6d55 1535 tree var;
67348ccc 1536 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1537
bb7e6d55 1538 push_cfun (fn);
ff28a94d 1539
3f9b14ff
SB
1540 init_vars_expansion ();
1541
824f71b9
RG
1542 FOR_EACH_LOCAL_DECL (fn, i, var)
1543 if (auto_var_in_fn_p (var, fn->decl))
1544 size += expand_one_var (var, true, false);
b5a430f3 1545
ff28a94d
JH
1546 if (stack_vars_num > 0)
1547 {
b5a430f3
SB
1548 /* Fake sorting the stack vars for account_stack_vars (). */
1549 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1550 for (i = 0; i < stack_vars_num; ++i)
1551 stack_vars_sorted[i] = i;
ff28a94d 1552 size += account_stack_vars ();
ff28a94d 1553 }
3f9b14ff
SB
1554
1555 fini_vars_expansion ();
2e1ec94f 1556 pop_cfun ();
ff28a94d
JH
1557 return size;
1558}
1559
f6bc1c4a
HS
1560/* Helper routine to check if a record or union contains an array field. */
1561
1562static int
1563record_or_union_type_has_array_p (const_tree tree_type)
1564{
1565 tree fields = TYPE_FIELDS (tree_type);
1566 tree f;
1567
1568 for (f = fields; f; f = DECL_CHAIN (f))
1569 if (TREE_CODE (f) == FIELD_DECL)
1570 {
1571 tree field_type = TREE_TYPE (f);
1572 if (RECORD_OR_UNION_TYPE_P (field_type)
1573 && record_or_union_type_has_array_p (field_type))
1574 return 1;
1575 if (TREE_CODE (field_type) == ARRAY_TYPE)
1576 return 1;
1577 }
1578 return 0;
1579}
1580
1f6d3a08 1581/* Expand all variables used in the function. */
727a31fa 1582
f3ddd692 1583static rtx
727a31fa
RH
1584expand_used_vars (void)
1585{
c021f10b 1586 tree var, outer_block = DECL_INITIAL (current_function_decl);
6e1aa848 1587 vec<tree> maybe_local_decls = vNULL;
f3ddd692 1588 rtx var_end_seq = NULL_RTX;
70b5e7dc 1589 struct pointer_map_t *ssa_name_decls;
4e3825db 1590 unsigned i;
c021f10b 1591 unsigned len;
f6bc1c4a 1592 bool gen_stack_protect_signal = false;
727a31fa 1593
1f6d3a08
RH
1594 /* Compute the phase of the stack frame for this function. */
1595 {
1596 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1597 int off = STARTING_FRAME_OFFSET % align;
1598 frame_phase = off ? align - off : 0;
1599 }
727a31fa 1600
3f9b14ff
SB
1601 /* Set TREE_USED on all variables in the local_decls. */
1602 FOR_EACH_LOCAL_DECL (cfun, i, var)
1603 TREE_USED (var) = 1;
1604 /* Clear TREE_USED on all variables associated with a block scope. */
1605 clear_tree_used (DECL_INITIAL (current_function_decl));
1606
ff28a94d 1607 init_vars_expansion ();
7d69de61 1608
70b5e7dc 1609 ssa_name_decls = pointer_map_create ();
4e3825db
MM
1610 for (i = 0; i < SA.map->num_partitions; i++)
1611 {
1612 tree var = partition_to_var (SA.map, i);
1613
ea057359 1614 gcc_assert (!virtual_operand_p (var));
70b5e7dc
RG
1615
1616 /* Assign decls to each SSA name partition, share decls for partitions
1617 we could have coalesced (those with the same type). */
1618 if (SSA_NAME_VAR (var) == NULL_TREE)
1619 {
1620 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1621 if (!*slot)
1622 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1623 replace_ssa_name_symbol (var, (tree) *slot);
1624 }
1625
cfb9edba
EB
1626 /* Always allocate space for partitions based on VAR_DECLs. But for
1627 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1628 debug info, there is no need to do so if optimization is disabled
1629 because all the SSA_NAMEs based on these DECLs have been coalesced
1630 into a single partition, which is thus assigned the canonical RTL
1631 location of the DECLs. */
4e3825db
MM
1632 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1633 expand_one_var (var, true, true);
cfb9edba 1634 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize)
4e3825db
MM
1635 {
1636 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1637 contain the default def (representing the parm or result itself)
1638 we don't do anything here. But those which don't contain the
1639 default def (representing a temporary based on the parm/result)
1640 we need to allocate space just like for normal VAR_DECLs. */
1641 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1642 {
1643 expand_one_var (var, true, true);
1644 gcc_assert (SA.partition_to_pseudo[i]);
1645 }
1646 }
1647 }
70b5e7dc 1648 pointer_map_destroy (ssa_name_decls);
4e3825db 1649
f6bc1c4a
HS
1650 if (flag_stack_protect == SPCT_FLAG_STRONG)
1651 FOR_EACH_LOCAL_DECL (cfun, i, var)
1652 if (!is_global_var (var))
1653 {
1654 tree var_type = TREE_TYPE (var);
1655 /* Examine local referenced variables that have their addresses taken,
1656 contain an array, or are arrays. */
1657 if (TREE_CODE (var) == VAR_DECL
1658 && (TREE_CODE (var_type) == ARRAY_TYPE
1659 || TREE_ADDRESSABLE (var)
1660 || (RECORD_OR_UNION_TYPE_P (var_type)
1661 && record_or_union_type_has_array_p (var_type))))
1662 {
1663 gen_stack_protect_signal = true;
1664 break;
1665 }
1666 }
1667
cb91fab0 1668 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1669 set are not associated with any block scope. Lay them out. */
c021f10b 1670
9771b263 1671 len = vec_safe_length (cfun->local_decls);
c021f10b 1672 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1673 {
1f6d3a08
RH
1674 bool expand_now = false;
1675
4e3825db
MM
1676 /* Expanded above already. */
1677 if (is_gimple_reg (var))
eb7adebc
MM
1678 {
1679 TREE_USED (var) = 0;
3adcf52c 1680 goto next;
eb7adebc 1681 }
1f6d3a08
RH
1682 /* We didn't set a block for static or extern because it's hard
1683 to tell the difference between a global variable (re)declared
1684 in a local scope, and one that's really declared there to
1685 begin with. And it doesn't really matter much, since we're
1686 not giving them stack space. Expand them now. */
4e3825db 1687 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1688 expand_now = true;
1689
ee2e8462
EB
1690 /* Expand variables not associated with any block now. Those created by
1691 the optimizers could be live anywhere in the function. Those that
1692 could possibly have been scoped originally and detached from their
1693 block will have their allocation deferred so we coalesce them with
1694 others when optimization is enabled. */
1f6d3a08
RH
1695 else if (TREE_USED (var))
1696 expand_now = true;
1697
1698 /* Finally, mark all variables on the list as used. We'll use
1699 this in a moment when we expand those associated with scopes. */
1700 TREE_USED (var) = 1;
1701
1702 if (expand_now)
3adcf52c
JM
1703 expand_one_var (var, true, true);
1704
1705 next:
1706 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1707 {
3adcf52c
JM
1708 rtx rtl = DECL_RTL_IF_SET (var);
1709
1710 /* Keep artificial non-ignored vars in cfun->local_decls
1711 chain until instantiate_decls. */
1712 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1713 add_local_decl (cfun, var);
6c6366f6 1714 else if (rtl == NULL_RTX)
c021f10b
NF
1715 /* If rtl isn't set yet, which can happen e.g. with
1716 -fstack-protector, retry before returning from this
1717 function. */
9771b263 1718 maybe_local_decls.safe_push (var);
802e9f8e 1719 }
1f6d3a08 1720 }
1f6d3a08 1721
c021f10b
NF
1722 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1723
1724 +-----------------+-----------------+
1725 | ...processed... | ...duplicates...|
1726 +-----------------+-----------------+
1727 ^
1728 +-- LEN points here.
1729
1730 We just want the duplicates, as those are the artificial
1731 non-ignored vars that we want to keep until instantiate_decls.
1732 Move them down and truncate the array. */
9771b263
DN
1733 if (!vec_safe_is_empty (cfun->local_decls))
1734 cfun->local_decls->block_remove (0, len);
c021f10b 1735
1f6d3a08
RH
1736 /* At this point, all variables within the block tree with TREE_USED
1737 set are actually used by the optimized function. Lay them out. */
1738 expand_used_vars_for_block (outer_block, true);
1739
1740 if (stack_vars_num > 0)
1741 {
47598145 1742 add_scope_conflicts ();
1f6d3a08 1743
c22cacf3 1744 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1745 vulnerable data and non-vulnerable data. */
1746 if (flag_stack_protect)
1747 add_stack_protection_conflicts ();
1748
c22cacf3 1749 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1750 minimal interference graph, attempt to save some stack space. */
1751 partition_stack_vars ();
1752 if (dump_file)
1753 dump_stack_var_partition ();
7d69de61
RH
1754 }
1755
f6bc1c4a
HS
1756 switch (flag_stack_protect)
1757 {
1758 case SPCT_FLAG_ALL:
1759 create_stack_guard ();
1760 break;
1761
1762 case SPCT_FLAG_STRONG:
1763 if (gen_stack_protect_signal
1764 || cfun->calls_alloca || has_protected_decls)
1765 create_stack_guard ();
1766 break;
1767
1768 case SPCT_FLAG_DEFAULT:
1769 if (cfun->calls_alloca || has_protected_decls)
c3284718 1770 create_stack_guard ();
f6bc1c4a
HS
1771 break;
1772
1773 default:
1774 ;
1775 }
1f6d3a08 1776
7d69de61
RH
1777 /* Assign rtl to each variable based on these partitions. */
1778 if (stack_vars_num > 0)
1779 {
f3ddd692
JJ
1780 struct stack_vars_data data;
1781
6e1aa848
DN
1782 data.asan_vec = vNULL;
1783 data.asan_decl_vec = vNULL;
f3ddd692 1784
7d69de61
RH
1785 /* Reorder decls to be protected by iterating over the variables
1786 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1787 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1788 earlier, such that we naturally see these variables first,
1789 and thus naturally allocate things in the right order. */
1790 if (has_protected_decls)
1791 {
1792 /* Phase 1 contains only character arrays. */
f3ddd692 1793 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
1794
1795 /* Phase 2 contains other kinds of arrays. */
1796 if (flag_stack_protect == 2)
f3ddd692 1797 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
1798 }
1799
de5a5fa1 1800 if (flag_sanitize & SANITIZE_ADDRESS)
f3ddd692
JJ
1801 /* Phase 3, any partitions that need asan protection
1802 in addition to phase 1 and 2. */
1803 expand_stack_vars (asan_decl_phase_3, &data);
1804
9771b263 1805 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
1806 {
1807 HOST_WIDE_INT prev_offset = frame_offset;
1808 HOST_WIDE_INT offset
1809 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1810 ASAN_RED_ZONE_SIZE);
9771b263
DN
1811 data.asan_vec.safe_push (prev_offset);
1812 data.asan_vec.safe_push (offset);
f3ddd692
JJ
1813
1814 var_end_seq
1815 = asan_emit_stack_protection (virtual_stack_vars_rtx,
9771b263 1816 data.asan_vec.address (),
c3284718 1817 data.asan_decl_vec. address (),
9771b263 1818 data.asan_vec.length ());
f3ddd692
JJ
1819 }
1820
1821 expand_stack_vars (NULL, &data);
1822
9771b263
DN
1823 data.asan_vec.release ();
1824 data.asan_decl_vec.release ();
1f6d3a08
RH
1825 }
1826
3f9b14ff
SB
1827 fini_vars_expansion ();
1828
6c6366f6
JJ
1829 /* If there were any artificial non-ignored vars without rtl
1830 found earlier, see if deferred stack allocation hasn't assigned
1831 rtl to them. */
9771b263 1832 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 1833 {
6c6366f6
JJ
1834 rtx rtl = DECL_RTL_IF_SET (var);
1835
6c6366f6
JJ
1836 /* Keep artificial non-ignored vars in cfun->local_decls
1837 chain until instantiate_decls. */
1838 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1839 add_local_decl (cfun, var);
6c6366f6 1840 }
9771b263 1841 maybe_local_decls.release ();
6c6366f6 1842
1f6d3a08
RH
1843 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1844 if (STACK_ALIGNMENT_NEEDED)
1845 {
1846 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1847 if (!FRAME_GROWS_DOWNWARD)
1848 frame_offset += align - 1;
1849 frame_offset &= -align;
1850 }
f3ddd692
JJ
1851
1852 return var_end_seq;
727a31fa
RH
1853}
1854
1855
b7211528
SB
1856/* If we need to produce a detailed dump, print the tree representation
1857 for STMT to the dump file. SINCE is the last RTX after which the RTL
1858 generated for STMT should have been appended. */
1859
1860static void
726a989a 1861maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1862{
1863 if (dump_file && (dump_flags & TDF_DETAILS))
1864 {
1865 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1866 print_gimple_stmt (dump_file, stmt, 0,
1867 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1868 fprintf (dump_file, "\n");
1869
1870 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1871 }
1872}
1873
8b11009b
ZD
1874/* Maps the blocks that do not contain tree labels to rtx labels. */
1875
1876static struct pointer_map_t *lab_rtx_for_bb;
1877
a9b77cd1
ZD
1878/* Returns the label_rtx expression for a label starting basic block BB. */
1879
1880static rtx
726a989a 1881label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1882{
726a989a
RB
1883 gimple_stmt_iterator gsi;
1884 tree lab;
1885 gimple lab_stmt;
8b11009b 1886 void **elt;
a9b77cd1
ZD
1887
1888 if (bb->flags & BB_RTL)
1889 return block_label (bb);
1890
8b11009b
ZD
1891 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1892 if (elt)
ae50c0cb 1893 return (rtx) *elt;
8b11009b
ZD
1894
1895 /* Find the tree label if it is present. */
b8698a0f 1896
726a989a 1897 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1898 {
726a989a
RB
1899 lab_stmt = gsi_stmt (gsi);
1900 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1901 break;
1902
726a989a 1903 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1904 if (DECL_NONLOCAL (lab))
1905 break;
1906
1907 return label_rtx (lab);
1908 }
1909
8b11009b
ZD
1910 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1911 *elt = gen_label_rtx ();
ae50c0cb 1912 return (rtx) *elt;
a9b77cd1
ZD
1913}
1914
726a989a 1915
529ff441
MM
1916/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1917 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1918 possibly clean up the CFG and instruction sequence. LAST is the
1919 last instruction before the just emitted jump sequence. */
529ff441
MM
1920
1921static void
315adeda 1922maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1923{
1924 /* Special case: when jumpif decides that the condition is
1925 trivial it emits an unconditional jump (and the necessary
1926 barrier). But we still have two edges, the fallthru one is
1927 wrong. purge_dead_edges would clean this up later. Unfortunately
1928 we have to insert insns (and split edges) before
1929 find_many_sub_basic_blocks and hence before purge_dead_edges.
1930 But splitting edges might create new blocks which depend on the
1931 fact that if there are two edges there's no barrier. So the
1932 barrier would get lost and verify_flow_info would ICE. Instead
1933 of auditing all edge splitters to care for the barrier (which
1934 normally isn't there in a cleaned CFG), fix it here. */
1935 if (BARRIER_P (get_last_insn ()))
1936 {
529ff441
MM
1937 rtx insn;
1938 remove_edge (e);
1939 /* Now, we have a single successor block, if we have insns to
1940 insert on the remaining edge we potentially will insert
1941 it at the end of this block (if the dest block isn't feasible)
1942 in order to avoid splitting the edge. This insertion will take
1943 place in front of the last jump. But we might have emitted
1944 multiple jumps (conditional and one unconditional) to the
1945 same destination. Inserting in front of the last one then
1946 is a problem. See PR 40021. We fix this by deleting all
1947 jumps except the last unconditional one. */
1948 insn = PREV_INSN (get_last_insn ());
1949 /* Make sure we have an unconditional jump. Otherwise we're
1950 confused. */
1951 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1952 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1953 {
1954 insn = PREV_INSN (insn);
1955 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1956 {
8a269cb7 1957 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1958 {
1959 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1960 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1961 }
1962 delete_insn (NEXT_INSN (insn));
1963 }
529ff441
MM
1964 }
1965 }
1966}
1967
726a989a 1968/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1969 Returns a new basic block if we've terminated the current basic
1970 block and created a new one. */
1971
1972static basic_block
726a989a 1973expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1974{
1975 basic_block new_bb, dest;
1976 edge new_edge;
1977 edge true_edge;
1978 edge false_edge;
b7211528 1979 rtx last2, last;
28ed065e
MM
1980 enum tree_code code;
1981 tree op0, op1;
1982
1983 code = gimple_cond_code (stmt);
1984 op0 = gimple_cond_lhs (stmt);
1985 op1 = gimple_cond_rhs (stmt);
1986 /* We're sometimes presented with such code:
1987 D.123_1 = x < y;
1988 if (D.123_1 != 0)
1989 ...
1990 This would expand to two comparisons which then later might
1991 be cleaned up by combine. But some pattern matchers like if-conversion
1992 work better when there's only one compare, so make up for this
1993 here as special exception if TER would have made the same change. */
31348d52 1994 if (SA.values
28ed065e 1995 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
1996 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1997 && TREE_CODE (op1) == INTEGER_CST
1998 && ((gimple_cond_code (stmt) == NE_EXPR
1999 && integer_zerop (op1))
2000 || (gimple_cond_code (stmt) == EQ_EXPR
2001 && integer_onep (op1)))
28ed065e
MM
2002 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2003 {
2004 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 2005 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 2006 {
e83f4b68
MM
2007 enum tree_code code2 = gimple_assign_rhs_code (second);
2008 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2009 {
2010 code = code2;
2011 op0 = gimple_assign_rhs1 (second);
2012 op1 = gimple_assign_rhs2 (second);
2013 }
2014 /* If jumps are cheap turn some more codes into
2015 jumpy sequences. */
2016 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2017 {
2018 if ((code2 == BIT_AND_EXPR
2019 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2020 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2021 || code2 == TRUTH_AND_EXPR)
2022 {
2023 code = TRUTH_ANDIF_EXPR;
2024 op0 = gimple_assign_rhs1 (second);
2025 op1 = gimple_assign_rhs2 (second);
2026 }
2027 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2028 {
2029 code = TRUTH_ORIF_EXPR;
2030 op0 = gimple_assign_rhs1 (second);
2031 op1 = gimple_assign_rhs2 (second);
2032 }
2033 }
28ed065e
MM
2034 }
2035 }
b7211528
SB
2036
2037 last2 = last = get_last_insn ();
80c7a9eb
RH
2038
2039 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2040 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2041
2042 /* These flags have no purpose in RTL land. */
2043 true_edge->flags &= ~EDGE_TRUE_VALUE;
2044 false_edge->flags &= ~EDGE_FALSE_VALUE;
2045
2046 /* We can either have a pure conditional jump with one fallthru edge or
2047 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2048 if (false_edge->dest == bb->next_bb)
80c7a9eb 2049 {
40e90eac
JJ
2050 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2051 true_edge->probability);
726a989a 2052 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2053 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2054 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2055 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2056 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2057 return NULL;
2058 }
a9b77cd1 2059 if (true_edge->dest == bb->next_bb)
80c7a9eb 2060 {
40e90eac
JJ
2061 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2062 false_edge->probability);
726a989a 2063 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2064 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2065 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2066 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2067 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2068 return NULL;
2069 }
80c7a9eb 2070
40e90eac
JJ
2071 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2072 true_edge->probability);
80c7a9eb 2073 last = get_last_insn ();
2f13f2de 2074 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2075 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2076 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
2077
2078 BB_END (bb) = last;
2079 if (BARRIER_P (BB_END (bb)))
2080 BB_END (bb) = PREV_INSN (BB_END (bb));
2081 update_bb_for_insn (bb);
2082
2083 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2084 dest = false_edge->dest;
2085 redirect_edge_succ (false_edge, new_bb);
2086 false_edge->flags |= EDGE_FALLTHRU;
2087 new_bb->count = false_edge->count;
2088 new_bb->frequency = EDGE_FREQUENCY (false_edge);
7d776ee2
RG
2089 if (current_loops && bb->loop_father)
2090 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2091 new_edge = make_edge (new_bb, dest, 0);
2092 new_edge->probability = REG_BR_PROB_BASE;
2093 new_edge->count = new_bb->count;
2094 if (BARRIER_P (BB_END (new_bb)))
2095 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2096 update_bb_for_insn (new_bb);
2097
726a989a 2098 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2099
2f13f2de 2100 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2101 {
5368224f
DC
2102 set_curr_insn_location (true_edge->goto_locus);
2103 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2104 }
7787b4aa 2105
80c7a9eb
RH
2106 return new_bb;
2107}
2108
0a35513e
AH
2109/* Mark all calls that can have a transaction restart. */
2110
2111static void
2112mark_transaction_restart_calls (gimple stmt)
2113{
2114 struct tm_restart_node dummy;
2115 void **slot;
2116
2117 if (!cfun->gimple_df->tm_restart)
2118 return;
2119
2120 dummy.stmt = stmt;
2121 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2122 if (slot)
2123 {
2124 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2125 tree list = n->label_or_list;
2126 rtx insn;
2127
2128 for (insn = next_real_insn (get_last_insn ());
2129 !CALL_P (insn);
2130 insn = next_real_insn (insn))
2131 continue;
2132
2133 if (TREE_CODE (list) == LABEL_DECL)
2134 add_reg_note (insn, REG_TM, label_rtx (list));
2135 else
2136 for (; list ; list = TREE_CHAIN (list))
2137 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2138 }
2139}
2140
28ed065e
MM
2141/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2142 statement STMT. */
2143
2144static void
2145expand_call_stmt (gimple stmt)
2146{
25583c4f 2147 tree exp, decl, lhs;
e23817b3 2148 bool builtin_p;
e7925582 2149 size_t i;
28ed065e 2150
25583c4f
RS
2151 if (gimple_call_internal_p (stmt))
2152 {
2153 expand_internal_call (stmt);
2154 return;
2155 }
2156
e23817b3
RG
2157 decl = gimple_call_fndecl (stmt);
2158 builtin_p = decl && DECL_BUILT_IN (decl);
2159
01156003
IE
2160 /* Bind bounds call is expanded as assignment. */
2161 if (builtin_p
2162 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2163 && DECL_FUNCTION_CODE (decl) == BUILT_IN_CHKP_BIND_BOUNDS)
2164 {
2165 expand_assignment (gimple_call_lhs (stmt),
2166 gimple_call_arg (stmt, 0), false);
2167 return;
2168 }
2169
2170 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2171 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2172
e7925582
EB
2173 /* If this is not a builtin function, the function type through which the
2174 call is made may be different from the type of the function. */
2175 if (!builtin_p)
2176 CALL_EXPR_FN (exp)
b25aa0e8
EB
2177 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2178 CALL_EXPR_FN (exp));
e7925582 2179
28ed065e
MM
2180 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2181 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2182
2183 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2184 {
2185 tree arg = gimple_call_arg (stmt, i);
2186 gimple def;
2187 /* TER addresses into arguments of builtin functions so we have a
2188 chance to infer more correct alignment information. See PR39954. */
2189 if (builtin_p
2190 && TREE_CODE (arg) == SSA_NAME
2191 && (def = get_gimple_for_ssa_name (arg))
2192 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2193 arg = gimple_assign_rhs1 (def);
2194 CALL_EXPR_ARG (exp, i) = arg;
2195 }
28ed065e 2196
93f28ca7 2197 if (gimple_has_side_effects (stmt))
28ed065e
MM
2198 TREE_SIDE_EFFECTS (exp) = 1;
2199
93f28ca7 2200 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2201 TREE_NOTHROW (exp) = 1;
2202
2203 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2204 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2205 if (decl
2206 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2207 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2208 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2209 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2210 else
2211 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2212 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2213 SET_EXPR_LOCATION (exp, gimple_location (stmt));
28ed065e 2214
ddb555ed
JJ
2215 /* Ensure RTL is created for debug args. */
2216 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2217 {
9771b263 2218 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2219 unsigned int ix;
2220 tree dtemp;
2221
2222 if (debug_args)
9771b263 2223 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2224 {
2225 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2226 expand_debug_expr (dtemp);
2227 }
2228 }
2229
25583c4f 2230 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2231 if (lhs)
2232 expand_assignment (lhs, exp, false);
2233 else
2234 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2235
2236 mark_transaction_restart_calls (stmt);
28ed065e
MM
2237}
2238
862d0b35
DN
2239
2240/* Generate RTL for an asm statement (explicit assembler code).
2241 STRING is a STRING_CST node containing the assembler code text,
2242 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2243 insn is volatile; don't optimize it. */
2244
2245static void
2246expand_asm_loc (tree string, int vol, location_t locus)
2247{
2248 rtx body;
2249
2250 if (TREE_CODE (string) == ADDR_EXPR)
2251 string = TREE_OPERAND (string, 0);
2252
2253 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2254 ggc_strdup (TREE_STRING_POINTER (string)),
2255 locus);
2256
2257 MEM_VOLATILE_P (body) = vol;
2258
2259 emit_insn (body);
2260}
2261
2262/* Return the number of times character C occurs in string S. */
2263static int
2264n_occurrences (int c, const char *s)
2265{
2266 int n = 0;
2267 while (*s)
2268 n += (*s++ == c);
2269 return n;
2270}
2271
2272/* A subroutine of expand_asm_operands. Check that all operands have
2273 the same number of alternatives. Return true if so. */
2274
2275static bool
2276check_operand_nalternatives (tree outputs, tree inputs)
2277{
2278 if (outputs || inputs)
2279 {
2280 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2281 int nalternatives
2282 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2283 tree next = inputs;
2284
2285 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2286 {
2287 error ("too many alternatives in %<asm%>");
2288 return false;
2289 }
2290
2291 tmp = outputs;
2292 while (tmp)
2293 {
2294 const char *constraint
2295 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2296
2297 if (n_occurrences (',', constraint) != nalternatives)
2298 {
2299 error ("operand constraints for %<asm%> differ "
2300 "in number of alternatives");
2301 return false;
2302 }
2303
2304 if (TREE_CHAIN (tmp))
2305 tmp = TREE_CHAIN (tmp);
2306 else
2307 tmp = next, next = 0;
2308 }
2309 }
2310
2311 return true;
2312}
2313
2314/* Check for overlap between registers marked in CLOBBERED_REGS and
2315 anything inappropriate in T. Emit error and return the register
2316 variable definition for error, NULL_TREE for ok. */
2317
2318static bool
2319tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2320{
2321 /* Conflicts between asm-declared register variables and the clobber
2322 list are not allowed. */
2323 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2324
2325 if (overlap)
2326 {
2327 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2328 DECL_NAME (overlap));
2329
2330 /* Reset registerness to stop multiple errors emitted for a single
2331 variable. */
2332 DECL_REGISTER (overlap) = 0;
2333 return true;
2334 }
2335
2336 return false;
2337}
2338
2339/* Generate RTL for an asm statement with arguments.
2340 STRING is the instruction template.
2341 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2342 Each output or input has an expression in the TREE_VALUE and
2343 a tree list in TREE_PURPOSE which in turn contains a constraint
2344 name in TREE_VALUE (or NULL_TREE) and a constraint string
2345 in TREE_PURPOSE.
2346 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2347 that is clobbered by this insn.
2348
2349 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2350 should be the fallthru basic block of the asm goto.
2351
2352 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2353 Some elements of OUTPUTS may be replaced with trees representing temporary
2354 values. The caller should copy those temporary values to the originally
2355 specified lvalues.
2356
2357 VOL nonzero means the insn is volatile; don't optimize it. */
2358
2359static void
2360expand_asm_operands (tree string, tree outputs, tree inputs,
2361 tree clobbers, tree labels, basic_block fallthru_bb,
2362 int vol, location_t locus)
2363{
2364 rtvec argvec, constraintvec, labelvec;
2365 rtx body;
2366 int ninputs = list_length (inputs);
2367 int noutputs = list_length (outputs);
2368 int nlabels = list_length (labels);
2369 int ninout;
2370 int nclobbers;
2371 HARD_REG_SET clobbered_regs;
2372 int clobber_conflict_found = 0;
2373 tree tail;
2374 tree t;
2375 int i;
2376 /* Vector of RTX's of evaluated output operands. */
2377 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2378 int *inout_opnum = XALLOCAVEC (int, noutputs);
2379 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2380 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2381 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2382 int old_generating_concat_p = generating_concat_p;
2383 rtx fallthru_label = NULL_RTX;
2384
2385 /* An ASM with no outputs needs to be treated as volatile, for now. */
2386 if (noutputs == 0)
2387 vol = 1;
2388
2389 if (! check_operand_nalternatives (outputs, inputs))
2390 return;
2391
2392 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2393
2394 /* Collect constraints. */
2395 i = 0;
2396 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2397 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2398 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2399 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2400
2401 /* Sometimes we wish to automatically clobber registers across an asm.
2402 Case in point is when the i386 backend moved from cc0 to a hard reg --
2403 maintaining source-level compatibility means automatically clobbering
2404 the flags register. */
2405 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2406
2407 /* Count the number of meaningful clobbered registers, ignoring what
2408 we would ignore later. */
2409 nclobbers = 0;
2410 CLEAR_HARD_REG_SET (clobbered_regs);
2411 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2412 {
2413 const char *regname;
2414 int nregs;
2415
2416 if (TREE_VALUE (tail) == error_mark_node)
2417 return;
2418 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2419
2420 i = decode_reg_name_and_count (regname, &nregs);
2421 if (i == -4)
2422 ++nclobbers;
2423 else if (i == -2)
2424 error ("unknown register name %qs in %<asm%>", regname);
2425
2426 /* Mark clobbered registers. */
2427 if (i >= 0)
2428 {
2429 int reg;
2430
2431 for (reg = i; reg < i + nregs; reg++)
2432 {
2433 ++nclobbers;
2434
2435 /* Clobbering the PIC register is an error. */
2436 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2437 {
2438 error ("PIC register clobbered by %qs in %<asm%>", regname);
2439 return;
2440 }
2441
2442 SET_HARD_REG_BIT (clobbered_regs, reg);
2443 }
2444 }
2445 }
2446
2447 /* First pass over inputs and outputs checks validity and sets
2448 mark_addressable if needed. */
2449
2450 ninout = 0;
2451 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2452 {
2453 tree val = TREE_VALUE (tail);
2454 tree type = TREE_TYPE (val);
2455 const char *constraint;
2456 bool is_inout;
2457 bool allows_reg;
2458 bool allows_mem;
2459
2460 /* If there's an erroneous arg, emit no insn. */
2461 if (type == error_mark_node)
2462 return;
2463
2464 /* Try to parse the output constraint. If that fails, there's
2465 no point in going further. */
2466 constraint = constraints[i];
2467 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2468 &allows_mem, &allows_reg, &is_inout))
2469 return;
2470
2471 if (! allows_reg
2472 && (allows_mem
2473 || is_inout
2474 || (DECL_P (val)
2475 && REG_P (DECL_RTL (val))
2476 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2477 mark_addressable (val);
2478
2479 if (is_inout)
2480 ninout++;
2481 }
2482
2483 ninputs += ninout;
2484 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2485 {
2486 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2487 return;
2488 }
2489
2490 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2491 {
2492 bool allows_reg, allows_mem;
2493 const char *constraint;
2494
2495 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2496 would get VOIDmode and that could cause a crash in reload. */
2497 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2498 return;
2499
2500 constraint = constraints[i + noutputs];
2501 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2502 constraints, &allows_mem, &allows_reg))
2503 return;
2504
2505 if (! allows_reg && allows_mem)
2506 mark_addressable (TREE_VALUE (tail));
2507 }
2508
2509 /* Second pass evaluates arguments. */
2510
2511 /* Make sure stack is consistent for asm goto. */
2512 if (nlabels > 0)
2513 do_pending_stack_adjust ();
2514
2515 ninout = 0;
2516 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2517 {
2518 tree val = TREE_VALUE (tail);
2519 tree type = TREE_TYPE (val);
2520 bool is_inout;
2521 bool allows_reg;
2522 bool allows_mem;
2523 rtx op;
2524 bool ok;
2525
2526 ok = parse_output_constraint (&constraints[i], i, ninputs,
2527 noutputs, &allows_mem, &allows_reg,
2528 &is_inout);
2529 gcc_assert (ok);
2530
2531 /* If an output operand is not a decl or indirect ref and our constraint
2532 allows a register, make a temporary to act as an intermediate.
2533 Make the asm insn write into that, then our caller will copy it to
2534 the real output operand. Likewise for promoted variables. */
2535
2536 generating_concat_p = 0;
2537
2538 real_output_rtx[i] = NULL_RTX;
2539 if ((TREE_CODE (val) == INDIRECT_REF
2540 && allows_mem)
2541 || (DECL_P (val)
2542 && (allows_mem || REG_P (DECL_RTL (val)))
2543 && ! (REG_P (DECL_RTL (val))
2544 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2545 || ! allows_reg
2546 || is_inout)
2547 {
2548 op = expand_expr (val, NULL_RTX, VOIDmode,
2549 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2550 if (MEM_P (op))
2551 op = validize_mem (op);
2552
2553 if (! allows_reg && !MEM_P (op))
2554 error ("output number %d not directly addressable", i);
2555 if ((! allows_mem && MEM_P (op))
2556 || GET_CODE (op) == CONCAT)
2557 {
2558 real_output_rtx[i] = op;
2559 op = gen_reg_rtx (GET_MODE (op));
2560 if (is_inout)
2561 emit_move_insn (op, real_output_rtx[i]);
2562 }
2563 }
2564 else
2565 {
2566 op = assign_temp (type, 0, 1);
2567 op = validize_mem (op);
2568 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2569 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2570 TREE_VALUE (tail) = make_tree (type, op);
2571 }
2572 output_rtx[i] = op;
2573
2574 generating_concat_p = old_generating_concat_p;
2575
2576 if (is_inout)
2577 {
2578 inout_mode[ninout] = TYPE_MODE (type);
2579 inout_opnum[ninout++] = i;
2580 }
2581
2582 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2583 clobber_conflict_found = 1;
2584 }
2585
2586 /* Make vectors for the expression-rtx, constraint strings,
2587 and named operands. */
2588
2589 argvec = rtvec_alloc (ninputs);
2590 constraintvec = rtvec_alloc (ninputs);
2591 labelvec = rtvec_alloc (nlabels);
2592
2593 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2594 : GET_MODE (output_rtx[0])),
2595 ggc_strdup (TREE_STRING_POINTER (string)),
2596 empty_string, 0, argvec, constraintvec,
2597 labelvec, locus);
2598
2599 MEM_VOLATILE_P (body) = vol;
2600
2601 /* Eval the inputs and put them into ARGVEC.
2602 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2603
2604 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2605 {
2606 bool allows_reg, allows_mem;
2607 const char *constraint;
2608 tree val, type;
2609 rtx op;
2610 bool ok;
2611
2612 constraint = constraints[i + noutputs];
2613 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2614 constraints, &allows_mem, &allows_reg);
2615 gcc_assert (ok);
2616
2617 generating_concat_p = 0;
2618
2619 val = TREE_VALUE (tail);
2620 type = TREE_TYPE (val);
2621 /* EXPAND_INITIALIZER will not generate code for valid initializer
2622 constants, but will still generate code for other types of operand.
2623 This is the behavior we want for constant constraints. */
2624 op = expand_expr (val, NULL_RTX, VOIDmode,
2625 allows_reg ? EXPAND_NORMAL
2626 : allows_mem ? EXPAND_MEMORY
2627 : EXPAND_INITIALIZER);
2628
2629 /* Never pass a CONCAT to an ASM. */
2630 if (GET_CODE (op) == CONCAT)
2631 op = force_reg (GET_MODE (op), op);
2632 else if (MEM_P (op))
2633 op = validize_mem (op);
2634
2635 if (asm_operand_ok (op, constraint, NULL) <= 0)
2636 {
2637 if (allows_reg && TYPE_MODE (type) != BLKmode)
2638 op = force_reg (TYPE_MODE (type), op);
2639 else if (!allows_mem)
2640 warning (0, "asm operand %d probably doesn%'t match constraints",
2641 i + noutputs);
2642 else if (MEM_P (op))
2643 {
2644 /* We won't recognize either volatile memory or memory
2645 with a queued address as available a memory_operand
2646 at this point. Ignore it: clearly this *is* a memory. */
2647 }
2648 else
2649 gcc_unreachable ();
2650 }
2651
2652 generating_concat_p = old_generating_concat_p;
2653 ASM_OPERANDS_INPUT (body, i) = op;
2654
2655 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2656 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
2657 ggc_strdup (constraints[i + noutputs]));
2658
2659 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2660 clobber_conflict_found = 1;
2661 }
2662
2663 /* Protect all the operands from the queue now that they have all been
2664 evaluated. */
2665
2666 generating_concat_p = 0;
2667
2668 /* For in-out operands, copy output rtx to input rtx. */
2669 for (i = 0; i < ninout; i++)
2670 {
2671 int j = inout_opnum[i];
2672 char buffer[16];
2673
2674 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2675 = output_rtx[j];
2676
2677 sprintf (buffer, "%d", j);
2678 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2679 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
2680 }
2681
2682 /* Copy labels to the vector. */
2683 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2684 {
2685 rtx r;
2686 /* If asm goto has any labels in the fallthru basic block, use
2687 a label that we emit immediately after the asm goto. Expansion
2688 may insert further instructions into the same basic block after
2689 asm goto and if we don't do this, insertion of instructions on
2690 the fallthru edge might misbehave. See PR58670. */
2691 if (fallthru_bb
2692 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2693 {
2694 if (fallthru_label == NULL_RTX)
2695 fallthru_label = gen_label_rtx ();
2696 r = fallthru_label;
2697 }
2698 else
2699 r = label_rtx (TREE_VALUE (tail));
2700 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2701 }
2702
2703 generating_concat_p = old_generating_concat_p;
2704
2705 /* Now, for each output, construct an rtx
2706 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2707 ARGVEC CONSTRAINTS OPNAMES))
2708 If there is more than one, put them inside a PARALLEL. */
2709
2710 if (nlabels > 0 && nclobbers == 0)
2711 {
2712 gcc_assert (noutputs == 0);
2713 emit_jump_insn (body);
2714 }
2715 else if (noutputs == 0 && nclobbers == 0)
2716 {
2717 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2718 emit_insn (body);
2719 }
2720 else if (noutputs == 1 && nclobbers == 0)
2721 {
2722 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2723 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2724 }
2725 else
2726 {
2727 rtx obody = body;
2728 int num = noutputs;
2729
2730 if (num == 0)
2731 num = 1;
2732
2733 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2734
2735 /* For each output operand, store a SET. */
2736 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2737 {
2738 XVECEXP (body, 0, i)
2739 = gen_rtx_SET (VOIDmode,
2740 output_rtx[i],
2741 gen_rtx_ASM_OPERANDS
2742 (GET_MODE (output_rtx[i]),
2743 ggc_strdup (TREE_STRING_POINTER (string)),
2744 ggc_strdup (constraints[i]),
2745 i, argvec, constraintvec, labelvec, locus));
2746
2747 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2748 }
2749
2750 /* If there are no outputs (but there are some clobbers)
2751 store the bare ASM_OPERANDS into the PARALLEL. */
2752
2753 if (i == 0)
2754 XVECEXP (body, 0, i++) = obody;
2755
2756 /* Store (clobber REG) for each clobbered register specified. */
2757
2758 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2759 {
2760 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2761 int reg, nregs;
2762 int j = decode_reg_name_and_count (regname, &nregs);
2763 rtx clobbered_reg;
2764
2765 if (j < 0)
2766 {
2767 if (j == -3) /* `cc', which is not a register */
2768 continue;
2769
2770 if (j == -4) /* `memory', don't cache memory across asm */
2771 {
2772 XVECEXP (body, 0, i++)
2773 = gen_rtx_CLOBBER (VOIDmode,
2774 gen_rtx_MEM
2775 (BLKmode,
2776 gen_rtx_SCRATCH (VOIDmode)));
2777 continue;
2778 }
2779
2780 /* Ignore unknown register, error already signaled. */
2781 continue;
2782 }
2783
2784 for (reg = j; reg < j + nregs; reg++)
2785 {
2786 /* Use QImode since that's guaranteed to clobber just
2787 * one reg. */
2788 clobbered_reg = gen_rtx_REG (QImode, reg);
2789
2790 /* Do sanity check for overlap between clobbers and
2791 respectively input and outputs that hasn't been
2792 handled. Such overlap should have been detected and
2793 reported above. */
2794 if (!clobber_conflict_found)
2795 {
2796 int opno;
2797
2798 /* We test the old body (obody) contents to avoid
2799 tripping over the under-construction body. */
2800 for (opno = 0; opno < noutputs; opno++)
2801 if (reg_overlap_mentioned_p (clobbered_reg,
2802 output_rtx[opno]))
2803 internal_error
2804 ("asm clobber conflict with output operand");
2805
2806 for (opno = 0; opno < ninputs - ninout; opno++)
2807 if (reg_overlap_mentioned_p (clobbered_reg,
2808 ASM_OPERANDS_INPUT (obody,
2809 opno)))
2810 internal_error
2811 ("asm clobber conflict with input operand");
2812 }
2813
2814 XVECEXP (body, 0, i++)
2815 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2816 }
2817 }
2818
2819 if (nlabels > 0)
2820 emit_jump_insn (body);
2821 else
2822 emit_insn (body);
2823 }
2824
2825 if (fallthru_label)
2826 emit_label (fallthru_label);
2827
2828 /* For any outputs that needed reloading into registers, spill them
2829 back to where they belong. */
2830 for (i = 0; i < noutputs; ++i)
2831 if (real_output_rtx[i])
2832 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2833
2834 crtl->has_asm_statement = 1;
2835 free_temp_slots ();
2836}
2837
2838
2839static void
2840expand_asm_stmt (gimple stmt)
2841{
2842 int noutputs;
2843 tree outputs, tail, t;
2844 tree *o;
2845 size_t i, n;
2846 const char *s;
2847 tree str, out, in, cl, labels;
2848 location_t locus = gimple_location (stmt);
2849 basic_block fallthru_bb = NULL;
2850
2851 /* Meh... convert the gimple asm operands into real tree lists.
2852 Eventually we should make all routines work on the vectors instead
2853 of relying on TREE_CHAIN. */
2854 out = NULL_TREE;
2855 n = gimple_asm_noutputs (stmt);
2856 if (n > 0)
2857 {
2858 t = out = gimple_asm_output_op (stmt, 0);
2859 for (i = 1; i < n; i++)
2860 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2861 }
2862
2863 in = NULL_TREE;
2864 n = gimple_asm_ninputs (stmt);
2865 if (n > 0)
2866 {
2867 t = in = gimple_asm_input_op (stmt, 0);
2868 for (i = 1; i < n; i++)
2869 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2870 }
2871
2872 cl = NULL_TREE;
2873 n = gimple_asm_nclobbers (stmt);
2874 if (n > 0)
2875 {
2876 t = cl = gimple_asm_clobber_op (stmt, 0);
2877 for (i = 1; i < n; i++)
2878 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2879 }
2880
2881 labels = NULL_TREE;
2882 n = gimple_asm_nlabels (stmt);
2883 if (n > 0)
2884 {
2885 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2886 if (fallthru)
2887 fallthru_bb = fallthru->dest;
2888 t = labels = gimple_asm_label_op (stmt, 0);
2889 for (i = 1; i < n; i++)
2890 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2891 }
2892
2893 s = gimple_asm_string (stmt);
2894 str = build_string (strlen (s), s);
2895
2896 if (gimple_asm_input_p (stmt))
2897 {
2898 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
2899 return;
2900 }
2901
2902 outputs = out;
2903 noutputs = gimple_asm_noutputs (stmt);
2904 /* o[I] is the place that output number I should be written. */
2905 o = (tree *) alloca (noutputs * sizeof (tree));
2906
2907 /* Record the contents of OUTPUTS before it is modified. */
2908 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2909 o[i] = TREE_VALUE (tail);
2910
2911 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
2912 OUTPUTS some trees for where the values were actually stored. */
2913 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
2914 gimple_asm_volatile_p (stmt), locus);
2915
2916 /* Copy all the intermediate outputs into the specified outputs. */
2917 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2918 {
2919 if (o[i] != TREE_VALUE (tail))
2920 {
2921 expand_assignment (o[i], TREE_VALUE (tail), false);
2922 free_temp_slots ();
2923
2924 /* Restore the original value so that it's correct the next
2925 time we expand this function. */
2926 TREE_VALUE (tail) = o[i];
2927 }
2928 }
2929}
2930
2931/* Emit code to jump to the address
2932 specified by the pointer expression EXP. */
2933
2934static void
2935expand_computed_goto (tree exp)
2936{
2937 rtx x = expand_normal (exp);
2938
2939 x = convert_memory_address (Pmode, x);
2940
2941 do_pending_stack_adjust ();
2942 emit_indirect_jump (x);
2943}
2944
2945/* Generate RTL code for a `goto' statement with target label LABEL.
2946 LABEL should be a LABEL_DECL tree node that was or will later be
2947 defined with `expand_label'. */
2948
2949static void
2950expand_goto (tree label)
2951{
2952#ifdef ENABLE_CHECKING
2953 /* Check for a nonlocal goto to a containing function. Should have
2954 gotten translated to __builtin_nonlocal_goto. */
2955 tree context = decl_function_context (label);
2956 gcc_assert (!context || context == current_function_decl);
2957#endif
2958
2959 emit_jump (label_rtx (label));
2960}
2961
2962/* Output a return with no value. */
2963
2964static void
2965expand_null_return_1 (void)
2966{
2967 clear_pending_stack_adjust ();
2968 do_pending_stack_adjust ();
2969 emit_jump (return_label);
2970}
2971
2972/* Generate RTL to return from the current function, with no value.
2973 (That is, we do not do anything about returning any value.) */
2974
2975void
2976expand_null_return (void)
2977{
2978 /* If this function was declared to return a value, but we
2979 didn't, clobber the return registers so that they are not
2980 propagated live to the rest of the function. */
2981 clobber_return_register ();
2982
2983 expand_null_return_1 ();
2984}
2985
2986/* Generate RTL to return from the current function, with value VAL. */
2987
2988static void
2989expand_value_return (rtx val)
2990{
2991 /* Copy the value to the return location unless it's already there. */
2992
2993 tree decl = DECL_RESULT (current_function_decl);
2994 rtx return_reg = DECL_RTL (decl);
2995 if (return_reg != val)
2996 {
2997 tree funtype = TREE_TYPE (current_function_decl);
2998 tree type = TREE_TYPE (decl);
2999 int unsignedp = TYPE_UNSIGNED (type);
3000 enum machine_mode old_mode = DECL_MODE (decl);
3001 enum machine_mode mode;
3002 if (DECL_BY_REFERENCE (decl))
3003 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3004 else
3005 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3006
3007 if (mode != old_mode)
3008 val = convert_modes (mode, old_mode, val, unsignedp);
3009
3010 if (GET_CODE (return_reg) == PARALLEL)
3011 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3012 else
3013 emit_move_insn (return_reg, val);
3014 }
3015
3016 expand_null_return_1 ();
3017}
3018
3019/* Generate RTL to evaluate the expression RETVAL and return it
3020 from the current function. */
3021
3022static void
3023expand_return (tree retval)
3024{
3025 rtx result_rtl;
3026 rtx val = 0;
3027 tree retval_rhs;
3028
3029 /* If function wants no value, give it none. */
3030 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3031 {
3032 expand_normal (retval);
3033 expand_null_return ();
3034 return;
3035 }
3036
3037 if (retval == error_mark_node)
3038 {
3039 /* Treat this like a return of no value from a function that
3040 returns a value. */
3041 expand_null_return ();
3042 return;
3043 }
3044 else if ((TREE_CODE (retval) == MODIFY_EXPR
3045 || TREE_CODE (retval) == INIT_EXPR)
3046 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3047 retval_rhs = TREE_OPERAND (retval, 1);
3048 else
3049 retval_rhs = retval;
3050
3051 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3052
3053 /* If we are returning the RESULT_DECL, then the value has already
3054 been stored into it, so we don't have to do anything special. */
3055 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3056 expand_value_return (result_rtl);
3057
3058 /* If the result is an aggregate that is being returned in one (or more)
3059 registers, load the registers here. */
3060
3061 else if (retval_rhs != 0
3062 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3063 && REG_P (result_rtl))
3064 {
3065 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3066 if (val)
3067 {
3068 /* Use the mode of the result value on the return register. */
3069 PUT_MODE (result_rtl, GET_MODE (val));
3070 expand_value_return (val);
3071 }
3072 else
3073 expand_null_return ();
3074 }
3075 else if (retval_rhs != 0
3076 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3077 && (REG_P (result_rtl)
3078 || (GET_CODE (result_rtl) == PARALLEL)))
3079 {
3080 /* Calculate the return value into a temporary (usually a pseudo
3081 reg). */
3082 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3083 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3084
3085 val = assign_temp (nt, 0, 1);
3086 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3087 val = force_not_mem (val);
3088 /* Return the calculated value. */
3089 expand_value_return (val);
3090 }
3091 else
3092 {
3093 /* No hard reg used; calculate value into hard return reg. */
3094 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3095 expand_value_return (result_rtl);
3096 }
3097}
3098
28ed065e
MM
3099/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3100 STMT that doesn't require special handling for outgoing edges. That
3101 is no tailcalls and no GIMPLE_COND. */
3102
3103static void
3104expand_gimple_stmt_1 (gimple stmt)
3105{
3106 tree op0;
c82fee88 3107
5368224f 3108 set_curr_insn_location (gimple_location (stmt));
c82fee88 3109
28ed065e
MM
3110 switch (gimple_code (stmt))
3111 {
3112 case GIMPLE_GOTO:
3113 op0 = gimple_goto_dest (stmt);
3114 if (TREE_CODE (op0) == LABEL_DECL)
3115 expand_goto (op0);
3116 else
3117 expand_computed_goto (op0);
3118 break;
3119 case GIMPLE_LABEL:
3120 expand_label (gimple_label_label (stmt));
3121 break;
3122 case GIMPLE_NOP:
3123 case GIMPLE_PREDICT:
3124 break;
28ed065e
MM
3125 case GIMPLE_SWITCH:
3126 expand_case (stmt);
3127 break;
3128 case GIMPLE_ASM:
3129 expand_asm_stmt (stmt);
3130 break;
3131 case GIMPLE_CALL:
3132 expand_call_stmt (stmt);
3133 break;
3134
3135 case GIMPLE_RETURN:
3136 op0 = gimple_return_retval (stmt);
3137
3138 if (op0 && op0 != error_mark_node)
3139 {
3140 tree result = DECL_RESULT (current_function_decl);
3141
3142 /* If we are not returning the current function's RESULT_DECL,
3143 build an assignment to it. */
3144 if (op0 != result)
3145 {
3146 /* I believe that a function's RESULT_DECL is unique. */
3147 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3148
3149 /* ??? We'd like to use simply expand_assignment here,
3150 but this fails if the value is of BLKmode but the return
3151 decl is a register. expand_return has special handling
3152 for this combination, which eventually should move
3153 to common code. See comments there. Until then, let's
3154 build a modify expression :-/ */
3155 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3156 result, op0);
3157 }
3158 }
3159 if (!op0)
3160 expand_null_return ();
3161 else
3162 expand_return (op0);
3163 break;
3164
3165 case GIMPLE_ASSIGN:
3166 {
3167 tree lhs = gimple_assign_lhs (stmt);
3168
3169 /* Tree expand used to fiddle with |= and &= of two bitfield
3170 COMPONENT_REFs here. This can't happen with gimple, the LHS
3171 of binary assigns must be a gimple reg. */
3172
3173 if (TREE_CODE (lhs) != SSA_NAME
3174 || get_gimple_rhs_class (gimple_expr_code (stmt))
3175 == GIMPLE_SINGLE_RHS)
3176 {
3177 tree rhs = gimple_assign_rhs1 (stmt);
3178 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3179 == GIMPLE_SINGLE_RHS);
3180 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3181 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3182 if (TREE_CLOBBER_P (rhs))
3183 /* This is a clobber to mark the going out of scope for
3184 this LHS. */
3185 ;
3186 else
3187 expand_assignment (lhs, rhs,
3188 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
3189 }
3190 else
3191 {
3192 rtx target, temp;
3193 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3194 struct separate_ops ops;
3195 bool promoted = false;
3196
3197 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3198 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3199 promoted = true;
3200
3201 ops.code = gimple_assign_rhs_code (stmt);
3202 ops.type = TREE_TYPE (lhs);
3203 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3204 {
0354c0c7
BS
3205 case GIMPLE_TERNARY_RHS:
3206 ops.op2 = gimple_assign_rhs3 (stmt);
3207 /* Fallthru */
28ed065e
MM
3208 case GIMPLE_BINARY_RHS:
3209 ops.op1 = gimple_assign_rhs2 (stmt);
3210 /* Fallthru */
3211 case GIMPLE_UNARY_RHS:
3212 ops.op0 = gimple_assign_rhs1 (stmt);
3213 break;
3214 default:
3215 gcc_unreachable ();
3216 }
3217 ops.location = gimple_location (stmt);
3218
3219 /* If we want to use a nontemporal store, force the value to
3220 register first. If we store into a promoted register,
3221 don't directly expand to target. */
3222 temp = nontemporal || promoted ? NULL_RTX : target;
3223 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3224 EXPAND_NORMAL);
3225
3226 if (temp == target)
3227 ;
3228 else if (promoted)
3229 {
4e18a7d4 3230 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
3231 /* If TEMP is a VOIDmode constant, use convert_modes to make
3232 sure that we properly convert it. */
3233 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3234 {
3235 temp = convert_modes (GET_MODE (target),
3236 TYPE_MODE (ops.type),
4e18a7d4 3237 temp, unsignedp);
28ed065e 3238 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3239 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3240 }
3241
4e18a7d4 3242 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3243 }
3244 else if (nontemporal && emit_storent_insn (target, temp))
3245 ;
3246 else
3247 {
3248 temp = force_operand (temp, target);
3249 if (temp != target)
3250 emit_move_insn (target, temp);
3251 }
3252 }
3253 }
3254 break;
3255
3256 default:
3257 gcc_unreachable ();
3258 }
3259}
3260
3261/* Expand one gimple statement STMT and return the last RTL instruction
3262 before any of the newly generated ones.
3263
3264 In addition to generating the necessary RTL instructions this also
3265 sets REG_EH_REGION notes if necessary and sets the current source
3266 location for diagnostics. */
3267
3268static rtx
3269expand_gimple_stmt (gimple stmt)
3270{
28ed065e 3271 location_t saved_location = input_location;
c82fee88
EB
3272 rtx last = get_last_insn ();
3273 int lp_nr;
28ed065e 3274
28ed065e
MM
3275 gcc_assert (cfun);
3276
c82fee88
EB
3277 /* We need to save and restore the current source location so that errors
3278 discovered during expansion are emitted with the right location. But
3279 it would be better if the diagnostic routines used the source location
3280 embedded in the tree nodes rather than globals. */
28ed065e 3281 if (gimple_has_location (stmt))
c82fee88 3282 input_location = gimple_location (stmt);
28ed065e
MM
3283
3284 expand_gimple_stmt_1 (stmt);
c82fee88 3285
28ed065e
MM
3286 /* Free any temporaries used to evaluate this statement. */
3287 free_temp_slots ();
3288
3289 input_location = saved_location;
3290
3291 /* Mark all insns that may trap. */
1d65f45c
RH
3292 lp_nr = lookup_stmt_eh_lp (stmt);
3293 if (lp_nr)
28ed065e
MM
3294 {
3295 rtx insn;
3296 for (insn = next_real_insn (last); insn;
3297 insn = next_real_insn (insn))
3298 {
3299 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3300 /* If we want exceptions for non-call insns, any
3301 may_trap_p instruction may throw. */
3302 && GET_CODE (PATTERN (insn)) != CLOBBER
3303 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3304 && insn_could_throw_p (insn))
3305 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3306 }
3307 }
3308
3309 return last;
3310}
3311
726a989a 3312/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3313 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3314 generated a tail call (something that might be denied by the ABI
cea49550
RH
3315 rules governing the call; see calls.c).
3316
3317 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3318 can still reach the rest of BB. The case here is __builtin_sqrt,
3319 where the NaN result goes through the external function (with a
3320 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3321
3322static basic_block
726a989a 3323expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 3324{
b7211528 3325 rtx last2, last;
224e770b 3326 edge e;
628f6a4e 3327 edge_iterator ei;
224e770b
RH
3328 int probability;
3329 gcov_type count;
80c7a9eb 3330
28ed065e 3331 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3332
3333 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3334 if (CALL_P (last) && SIBLING_CALL_P (last))
3335 goto found;
80c7a9eb 3336
726a989a 3337 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3338
cea49550 3339 *can_fallthru = true;
224e770b 3340 return NULL;
80c7a9eb 3341
224e770b
RH
3342 found:
3343 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3344 Any instructions emitted here are about to be deleted. */
3345 do_pending_stack_adjust ();
3346
3347 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3348 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3349 EH or abnormal edges, we shouldn't have created a tail call in
3350 the first place. So it seems to me we should just be removing
3351 all edges here, or redirecting the existing fallthru edge to
3352 the exit block. */
3353
224e770b
RH
3354 probability = 0;
3355 count = 0;
224e770b 3356
628f6a4e
BE
3357 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3358 {
224e770b
RH
3359 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3360 {
3361 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 3362 {
224e770b
RH
3363 e->dest->count -= e->count;
3364 e->dest->frequency -= EDGE_FREQUENCY (e);
3365 if (e->dest->count < 0)
c22cacf3 3366 e->dest->count = 0;
224e770b 3367 if (e->dest->frequency < 0)
c22cacf3 3368 e->dest->frequency = 0;
80c7a9eb 3369 }
224e770b
RH
3370 count += e->count;
3371 probability += e->probability;
3372 remove_edge (e);
80c7a9eb 3373 }
628f6a4e
BE
3374 else
3375 ei_next (&ei);
80c7a9eb
RH
3376 }
3377
224e770b
RH
3378 /* This is somewhat ugly: the call_expr expander often emits instructions
3379 after the sibcall (to perform the function return). These confuse the
12eff7b7 3380 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3381 last = NEXT_INSN (last);
341c100f 3382 gcc_assert (BARRIER_P (last));
cea49550
RH
3383
3384 *can_fallthru = false;
224e770b
RH
3385 while (NEXT_INSN (last))
3386 {
3387 /* For instance an sqrt builtin expander expands if with
3388 sibcall in the then and label for `else`. */
3389 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3390 {
3391 *can_fallthru = true;
3392 break;
3393 }
224e770b
RH
3394 delete_insn (NEXT_INSN (last));
3395 }
3396
3397 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
3398 e->probability += probability;
3399 e->count += count;
3400 BB_END (bb) = last;
3401 update_bb_for_insn (bb);
3402
3403 if (NEXT_INSN (last))
3404 {
3405 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3406
3407 last = BB_END (bb);
3408 if (BARRIER_P (last))
3409 BB_END (bb) = PREV_INSN (last);
3410 }
3411
726a989a 3412 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3413
224e770b 3414 return bb;
80c7a9eb
RH
3415}
3416
b5b8b0ac
AO
3417/* Return the difference between the floor and the truncated result of
3418 a signed division by OP1 with remainder MOD. */
3419static rtx
3420floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3421{
3422 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3423 return gen_rtx_IF_THEN_ELSE
3424 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3425 gen_rtx_IF_THEN_ELSE
3426 (mode, gen_rtx_LT (BImode,
3427 gen_rtx_DIV (mode, op1, mod),
3428 const0_rtx),
3429 constm1_rtx, const0_rtx),
3430 const0_rtx);
3431}
3432
3433/* Return the difference between the ceil and the truncated result of
3434 a signed division by OP1 with remainder MOD. */
3435static rtx
3436ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3437{
3438 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3439 return gen_rtx_IF_THEN_ELSE
3440 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3441 gen_rtx_IF_THEN_ELSE
3442 (mode, gen_rtx_GT (BImode,
3443 gen_rtx_DIV (mode, op1, mod),
3444 const0_rtx),
3445 const1_rtx, const0_rtx),
3446 const0_rtx);
3447}
3448
3449/* Return the difference between the ceil and the truncated result of
3450 an unsigned division by OP1 with remainder MOD. */
3451static rtx
3452ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3453{
3454 /* (mod != 0 ? 1 : 0) */
3455 return gen_rtx_IF_THEN_ELSE
3456 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3457 const1_rtx, const0_rtx);
3458}
3459
3460/* Return the difference between the rounded and the truncated result
3461 of a signed division by OP1 with remainder MOD. Halfway cases are
3462 rounded away from zero, rather than to the nearest even number. */
3463static rtx
3464round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3465{
3466 /* (abs (mod) >= abs (op1) - abs (mod)
3467 ? (op1 / mod > 0 ? 1 : -1)
3468 : 0) */
3469 return gen_rtx_IF_THEN_ELSE
3470 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3471 gen_rtx_MINUS (mode,
3472 gen_rtx_ABS (mode, op1),
3473 gen_rtx_ABS (mode, mod))),
3474 gen_rtx_IF_THEN_ELSE
3475 (mode, gen_rtx_GT (BImode,
3476 gen_rtx_DIV (mode, op1, mod),
3477 const0_rtx),
3478 const1_rtx, constm1_rtx),
3479 const0_rtx);
3480}
3481
3482/* Return the difference between the rounded and the truncated result
3483 of a unsigned division by OP1 with remainder MOD. Halfway cases
3484 are rounded away from zero, rather than to the nearest even
3485 number. */
3486static rtx
3487round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3488{
3489 /* (mod >= op1 - mod ? 1 : 0) */
3490 return gen_rtx_IF_THEN_ELSE
3491 (mode, gen_rtx_GE (BImode, mod,
3492 gen_rtx_MINUS (mode, op1, mod)),
3493 const1_rtx, const0_rtx);
3494}
3495
dda2da58
AO
3496/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3497 any rtl. */
3498
3499static rtx
f61c6f34
JJ
3500convert_debug_memory_address (enum machine_mode mode, rtx x,
3501 addr_space_t as)
dda2da58
AO
3502{
3503 enum machine_mode xmode = GET_MODE (x);
3504
3505#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3506 gcc_assert (mode == Pmode
3507 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3508 gcc_assert (xmode == mode || xmode == VOIDmode);
3509#else
f61c6f34 3510 rtx temp;
f61c6f34 3511
639d4bb8 3512 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3513
3514 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3515 return x;
3516
69660a70 3517 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
3518 x = simplify_gen_subreg (mode, x, xmode,
3519 subreg_lowpart_offset
3520 (mode, xmode));
3521 else if (POINTERS_EXTEND_UNSIGNED > 0)
3522 x = gen_rtx_ZERO_EXTEND (mode, x);
3523 else if (!POINTERS_EXTEND_UNSIGNED)
3524 x = gen_rtx_SIGN_EXTEND (mode, x);
3525 else
f61c6f34
JJ
3526 {
3527 switch (GET_CODE (x))
3528 {
3529 case SUBREG:
3530 if ((SUBREG_PROMOTED_VAR_P (x)
3531 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3532 || (GET_CODE (SUBREG_REG (x)) == PLUS
3533 && REG_P (XEXP (SUBREG_REG (x), 0))
3534 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3535 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3536 && GET_MODE (SUBREG_REG (x)) == mode)
3537 return SUBREG_REG (x);
3538 break;
3539 case LABEL_REF:
3540 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
3541 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3542 return temp;
3543 case SYMBOL_REF:
3544 temp = shallow_copy_rtx (x);
3545 PUT_MODE (temp, mode);
3546 return temp;
3547 case CONST:
3548 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3549 if (temp)
3550 temp = gen_rtx_CONST (mode, temp);
3551 return temp;
3552 case PLUS:
3553 case MINUS:
3554 if (CONST_INT_P (XEXP (x, 1)))
3555 {
3556 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3557 if (temp)
3558 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3559 }
3560 break;
3561 default:
3562 break;
3563 }
3564 /* Don't know how to express ptr_extend as operation in debug info. */
3565 return NULL;
3566 }
dda2da58
AO
3567#endif /* POINTERS_EXTEND_UNSIGNED */
3568
3569 return x;
3570}
3571
12c5ffe5
EB
3572/* Return an RTX equivalent to the value of the parameter DECL. */
3573
3574static rtx
3575expand_debug_parm_decl (tree decl)
3576{
3577 rtx incoming = DECL_INCOMING_RTL (decl);
3578
3579 if (incoming
3580 && GET_MODE (incoming) != BLKmode
3581 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3582 || (MEM_P (incoming)
3583 && REG_P (XEXP (incoming, 0))
3584 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3585 {
3586 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3587
3588#ifdef HAVE_window_save
3589 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3590 If the target machine has an explicit window save instruction, the
3591 actual entry value is the corresponding OUTGOING_REGNO instead. */
3592 if (REG_P (incoming)
3593 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3594 incoming
3595 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3596 OUTGOING_REGNO (REGNO (incoming)), 0);
3597 else if (MEM_P (incoming))
3598 {
3599 rtx reg = XEXP (incoming, 0);
3600 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3601 {
3602 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3603 incoming = replace_equiv_address_nv (incoming, reg);
3604 }
6cfa417f
JJ
3605 else
3606 incoming = copy_rtx (incoming);
12c5ffe5
EB
3607 }
3608#endif
3609
3610 ENTRY_VALUE_EXP (rtl) = incoming;
3611 return rtl;
3612 }
3613
3614 if (incoming
3615 && GET_MODE (incoming) != BLKmode
3616 && !TREE_ADDRESSABLE (decl)
3617 && MEM_P (incoming)
3618 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3619 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3620 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3621 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 3622 return copy_rtx (incoming);
12c5ffe5
EB
3623
3624 return NULL_RTX;
3625}
3626
3627/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
3628
3629static rtx
3630expand_debug_expr (tree exp)
3631{
3632 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3633 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 3634 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 3635 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 3636 addr_space_t as;
b5b8b0ac
AO
3637
3638 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3639 {
3640 case tcc_expression:
3641 switch (TREE_CODE (exp))
3642 {
3643 case COND_EXPR:
7ece48b1 3644 case DOT_PROD_EXPR:
0354c0c7
BS
3645 case WIDEN_MULT_PLUS_EXPR:
3646 case WIDEN_MULT_MINUS_EXPR:
0f59b812 3647 case FMA_EXPR:
b5b8b0ac
AO
3648 goto ternary;
3649
3650 case TRUTH_ANDIF_EXPR:
3651 case TRUTH_ORIF_EXPR:
3652 case TRUTH_AND_EXPR:
3653 case TRUTH_OR_EXPR:
3654 case TRUTH_XOR_EXPR:
3655 goto binary;
3656
3657 case TRUTH_NOT_EXPR:
3658 goto unary;
3659
3660 default:
3661 break;
3662 }
3663 break;
3664
3665 ternary:
3666 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3667 if (!op2)
3668 return NULL_RTX;
3669 /* Fall through. */
3670
3671 binary:
3672 case tcc_binary:
3673 case tcc_comparison:
3674 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3675 if (!op1)
3676 return NULL_RTX;
3677 /* Fall through. */
3678
3679 unary:
3680 case tcc_unary:
2ba172e0 3681 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3682 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3683 if (!op0)
3684 return NULL_RTX;
3685 break;
3686
3687 case tcc_type:
3688 case tcc_statement:
3689 gcc_unreachable ();
3690
3691 case tcc_constant:
3692 case tcc_exceptional:
3693 case tcc_declaration:
3694 case tcc_reference:
3695 case tcc_vl_exp:
3696 break;
3697 }
3698
3699 switch (TREE_CODE (exp))
3700 {
3701 case STRING_CST:
3702 if (!lookup_constant_def (exp))
3703 {
e1b243a8
JJ
3704 if (strlen (TREE_STRING_POINTER (exp)) + 1
3705 != (size_t) TREE_STRING_LENGTH (exp))
3706 return NULL_RTX;
b5b8b0ac
AO
3707 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3708 op0 = gen_rtx_MEM (BLKmode, op0);
3709 set_mem_attributes (op0, exp, 0);
3710 return op0;
3711 }
3712 /* Fall through... */
3713
3714 case INTEGER_CST:
3715 case REAL_CST:
3716 case FIXED_CST:
3717 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3718 return op0;
3719
3720 case COMPLEX_CST:
3721 gcc_assert (COMPLEX_MODE_P (mode));
3722 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 3723 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
3724 return gen_rtx_CONCAT (mode, op0, op1);
3725
0ca5af51
AO
3726 case DEBUG_EXPR_DECL:
3727 op0 = DECL_RTL_IF_SET (exp);
3728
3729 if (op0)
3730 return op0;
3731
3732 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 3733 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
3734 SET_DECL_RTL (exp, op0);
3735
3736 return op0;
3737
b5b8b0ac
AO
3738 case VAR_DECL:
3739 case PARM_DECL:
3740 case FUNCTION_DECL:
3741 case LABEL_DECL:
3742 case CONST_DECL:
3743 case RESULT_DECL:
3744 op0 = DECL_RTL_IF_SET (exp);
3745
3746 /* This decl was probably optimized away. */
3747 if (!op0)
e1b243a8
JJ
3748 {
3749 if (TREE_CODE (exp) != VAR_DECL
3750 || DECL_EXTERNAL (exp)
3751 || !TREE_STATIC (exp)
3752 || !DECL_NAME (exp)
0fba566c 3753 || DECL_HARD_REGISTER (exp)
7d5fc814 3754 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 3755 || mode == VOIDmode)
e1b243a8
JJ
3756 return NULL;
3757
b1aa0655 3758 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
3759 if (!MEM_P (op0)
3760 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3761 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3762 return NULL;
3763 }
3764 else
3765 op0 = copy_rtx (op0);
b5b8b0ac 3766
06796564
JJ
3767 if (GET_MODE (op0) == BLKmode
3768 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3769 below would ICE. While it is likely a FE bug,
3770 try to be robust here. See PR43166. */
132b4e82
JJ
3771 || mode == BLKmode
3772 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
3773 {
3774 gcc_assert (MEM_P (op0));
3775 op0 = adjust_address_nv (op0, mode, 0);
3776 return op0;
3777 }
3778
3779 /* Fall through. */
3780
3781 adjust_mode:
3782 case PAREN_EXPR:
3783 case NOP_EXPR:
3784 case CONVERT_EXPR:
3785 {
2ba172e0 3786 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
3787
3788 if (mode == inner_mode)
3789 return op0;
3790
3791 if (inner_mode == VOIDmode)
3792 {
2a8e30fb
MM
3793 if (TREE_CODE (exp) == SSA_NAME)
3794 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3795 else
3796 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3797 if (mode == inner_mode)
3798 return op0;
3799 }
3800
3801 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3802 {
3803 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3804 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3805 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3806 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3807 else
3808 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3809 }
3810 else if (FLOAT_MODE_P (mode))
3811 {
2a8e30fb 3812 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
3813 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3814 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3815 else
3816 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3817 }
3818 else if (FLOAT_MODE_P (inner_mode))
3819 {
3820 if (unsignedp)
3821 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3822 else
3823 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3824 }
3825 else if (CONSTANT_P (op0)
69660a70 3826 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
3827 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3828 subreg_lowpart_offset (mode,
3829 inner_mode));
1b47fe3f
JJ
3830 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3831 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3832 : unsignedp)
2ba172e0 3833 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 3834 else
2ba172e0 3835 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
3836
3837 return op0;
3838 }
3839
70f34814 3840 case MEM_REF:
71f3a3f5
JJ
3841 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3842 {
3843 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3844 TREE_OPERAND (exp, 0),
3845 TREE_OPERAND (exp, 1));
3846 if (newexp)
3847 return expand_debug_expr (newexp);
3848 }
3849 /* FALLTHROUGH */
b5b8b0ac 3850 case INDIRECT_REF:
0a81f074 3851 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3852 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3853 if (!op0)
3854 return NULL;
3855
cb115041
JJ
3856 if (TREE_CODE (exp) == MEM_REF)
3857 {
583ac69c
JJ
3858 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3859 || (GET_CODE (op0) == PLUS
3860 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3861 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3862 Instead just use get_inner_reference. */
3863 goto component_ref;
3864
cb115041
JJ
3865 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3866 if (!op1 || !CONST_INT_P (op1))
3867 return NULL;
3868
0a81f074 3869 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
3870 }
3871
09e881c9 3872 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 3873 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 3874 else
75421dcd 3875 as = ADDR_SPACE_GENERIC;
b5b8b0ac 3876
f61c6f34
JJ
3877 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3878 op0, as);
3879 if (op0 == NULL_RTX)
3880 return NULL;
b5b8b0ac 3881
f61c6f34 3882 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 3883 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
3884 if (TREE_CODE (exp) == MEM_REF
3885 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3886 set_mem_expr (op0, NULL_TREE);
09e881c9 3887 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3888
3889 return op0;
3890
3891 case TARGET_MEM_REF:
4d948885
RG
3892 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3893 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
3894 return NULL;
3895
3896 op0 = expand_debug_expr
4e25ca6b 3897 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
3898 if (!op0)
3899 return NULL;
3900
f61c6f34
JJ
3901 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3902 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3903 else
3904 as = ADDR_SPACE_GENERIC;
3905
3906 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3907 op0, as);
3908 if (op0 == NULL_RTX)
3909 return NULL;
b5b8b0ac
AO
3910
3911 op0 = gen_rtx_MEM (mode, op0);
3912
3913 set_mem_attributes (op0, exp, 0);
09e881c9 3914 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3915
3916 return op0;
3917
583ac69c 3918 component_ref:
b5b8b0ac
AO
3919 case ARRAY_REF:
3920 case ARRAY_RANGE_REF:
3921 case COMPONENT_REF:
3922 case BIT_FIELD_REF:
3923 case REALPART_EXPR:
3924 case IMAGPART_EXPR:
3925 case VIEW_CONVERT_EXPR:
3926 {
3927 enum machine_mode mode1;
3928 HOST_WIDE_INT bitsize, bitpos;
3929 tree offset;
3930 int volatilep = 0;
3931 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3932 &mode1, &unsignedp, &volatilep, false);
3933 rtx orig_op0;
3934
4f2a9af8
JJ
3935 if (bitsize == 0)
3936 return NULL;
3937
b5b8b0ac
AO
3938 orig_op0 = op0 = expand_debug_expr (tem);
3939
3940 if (!op0)
3941 return NULL;
3942
3943 if (offset)
3944 {
dda2da58
AO
3945 enum machine_mode addrmode, offmode;
3946
aa847cc8
JJ
3947 if (!MEM_P (op0))
3948 return NULL;
b5b8b0ac 3949
dda2da58
AO
3950 op0 = XEXP (op0, 0);
3951 addrmode = GET_MODE (op0);
3952 if (addrmode == VOIDmode)
3953 addrmode = Pmode;
3954
b5b8b0ac
AO
3955 op1 = expand_debug_expr (offset);
3956 if (!op1)
3957 return NULL;
3958
dda2da58
AO
3959 offmode = GET_MODE (op1);
3960 if (offmode == VOIDmode)
3961 offmode = TYPE_MODE (TREE_TYPE (offset));
3962
3963 if (addrmode != offmode)
3964 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3965 subreg_lowpart_offset (addrmode,
3966 offmode));
3967
3968 /* Don't use offset_address here, we don't need a
3969 recognizable address, and we don't want to generate
3970 code. */
2ba172e0
JJ
3971 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3972 op0, op1));
b5b8b0ac
AO
3973 }
3974
3975 if (MEM_P (op0))
3976 {
4f2a9af8
JJ
3977 if (mode1 == VOIDmode)
3978 /* Bitfield. */
3979 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
3980 if (bitpos >= BITS_PER_UNIT)
3981 {
3982 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3983 bitpos %= BITS_PER_UNIT;
3984 }
3985 else if (bitpos < 0)
3986 {
4f2a9af8
JJ
3987 HOST_WIDE_INT units
3988 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
3989 op0 = adjust_address_nv (op0, mode1, units);
3990 bitpos += units * BITS_PER_UNIT;
3991 }
3992 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3993 op0 = adjust_address_nv (op0, mode, 0);
3994 else if (GET_MODE (op0) != mode1)
3995 op0 = adjust_address_nv (op0, mode1, 0);
3996 else
3997 op0 = copy_rtx (op0);
3998 if (op0 == orig_op0)
3999 op0 = shallow_copy_rtx (op0);
4000 set_mem_attributes (op0, exp, 0);
4001 }
4002
4003 if (bitpos == 0 && mode == GET_MODE (op0))
4004 return op0;
4005
2d3fc6aa
JJ
4006 if (bitpos < 0)
4007 return NULL;
4008
88c04a5d
JJ
4009 if (GET_MODE (op0) == BLKmode)
4010 return NULL;
4011
b5b8b0ac
AO
4012 if ((bitpos % BITS_PER_UNIT) == 0
4013 && bitsize == GET_MODE_BITSIZE (mode1))
4014 {
4015 enum machine_mode opmode = GET_MODE (op0);
4016
b5b8b0ac 4017 if (opmode == VOIDmode)
9712cba0 4018 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
4019
4020 /* This condition may hold if we're expanding the address
4021 right past the end of an array that turned out not to
4022 be addressable (i.e., the address was only computed in
4023 debug stmts). The gen_subreg below would rightfully
4024 crash, and the address doesn't really exist, so just
4025 drop it. */
4026 if (bitpos >= GET_MODE_BITSIZE (opmode))
4027 return NULL;
4028
7d5d39bb
JJ
4029 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4030 return simplify_gen_subreg (mode, op0, opmode,
4031 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4032 }
4033
4034 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4035 && TYPE_UNSIGNED (TREE_TYPE (exp))
4036 ? SIGN_EXTRACT
4037 : ZERO_EXTRACT, mode,
4038 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4039 ? GET_MODE (op0)
4040 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4041 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4042 }
4043
b5b8b0ac 4044 case ABS_EXPR:
2ba172e0 4045 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4046
4047 case NEGATE_EXPR:
2ba172e0 4048 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4049
4050 case BIT_NOT_EXPR:
2ba172e0 4051 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4052
4053 case FLOAT_EXPR:
2ba172e0
JJ
4054 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4055 0)))
4056 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4057 inner_mode);
b5b8b0ac
AO
4058
4059 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4060 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4061 inner_mode);
b5b8b0ac
AO
4062
4063 case POINTER_PLUS_EXPR:
576319a7
DD
4064 /* For the rare target where pointers are not the same size as
4065 size_t, we need to check for mis-matched modes and correct
4066 the addend. */
4067 if (op0 && op1
4068 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4069 && GET_MODE (op0) != GET_MODE (op1))
4070 {
8369f38a
DD
4071 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4072 /* If OP0 is a partial mode, then we must truncate, even if it has
4073 the same bitsize as OP1 as GCC's representation of partial modes
4074 is opaque. */
4075 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4076 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4077 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4078 GET_MODE (op1));
576319a7
DD
4079 else
4080 /* We always sign-extend, regardless of the signedness of
4081 the operand, because the operand is always unsigned
4082 here even if the original C expression is signed. */
2ba172e0
JJ
4083 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4084 GET_MODE (op1));
576319a7
DD
4085 }
4086 /* Fall through. */
b5b8b0ac 4087 case PLUS_EXPR:
2ba172e0 4088 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4089
4090 case MINUS_EXPR:
2ba172e0 4091 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4092
4093 case MULT_EXPR:
2ba172e0 4094 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4095
4096 case RDIV_EXPR:
4097 case TRUNC_DIV_EXPR:
4098 case EXACT_DIV_EXPR:
4099 if (unsignedp)
2ba172e0 4100 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4101 else
2ba172e0 4102 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4103
4104 case TRUNC_MOD_EXPR:
2ba172e0 4105 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4106
4107 case FLOOR_DIV_EXPR:
4108 if (unsignedp)
2ba172e0 4109 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4110 else
4111 {
2ba172e0
JJ
4112 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4113 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4114 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4115 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4116 }
4117
4118 case FLOOR_MOD_EXPR:
4119 if (unsignedp)
2ba172e0 4120 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4121 else
4122 {
2ba172e0 4123 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4124 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4125 adj = simplify_gen_unary (NEG, mode,
4126 simplify_gen_binary (MULT, mode, adj, op1),
4127 mode);
4128 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4129 }
4130
4131 case CEIL_DIV_EXPR:
4132 if (unsignedp)
4133 {
2ba172e0
JJ
4134 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4135 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4136 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4137 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4138 }
4139 else
4140 {
2ba172e0
JJ
4141 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4142 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4143 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4144 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4145 }
4146
4147 case CEIL_MOD_EXPR:
4148 if (unsignedp)
4149 {
2ba172e0 4150 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4151 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4152 adj = simplify_gen_unary (NEG, mode,
4153 simplify_gen_binary (MULT, mode, adj, op1),
4154 mode);
4155 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4156 }
4157 else
4158 {
2ba172e0 4159 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4160 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4161 adj = simplify_gen_unary (NEG, mode,
4162 simplify_gen_binary (MULT, mode, adj, op1),
4163 mode);
4164 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4165 }
4166
4167 case ROUND_DIV_EXPR:
4168 if (unsignedp)
4169 {
2ba172e0
JJ
4170 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4171 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4172 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4173 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4174 }
4175 else
4176 {
2ba172e0
JJ
4177 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4178 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4179 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4180 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4181 }
4182
4183 case ROUND_MOD_EXPR:
4184 if (unsignedp)
4185 {
2ba172e0 4186 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4187 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4188 adj = simplify_gen_unary (NEG, mode,
4189 simplify_gen_binary (MULT, mode, adj, op1),
4190 mode);
4191 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4192 }
4193 else
4194 {
2ba172e0 4195 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4196 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4197 adj = simplify_gen_unary (NEG, mode,
4198 simplify_gen_binary (MULT, mode, adj, op1),
4199 mode);
4200 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4201 }
4202
4203 case LSHIFT_EXPR:
2ba172e0 4204 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4205
4206 case RSHIFT_EXPR:
4207 if (unsignedp)
2ba172e0 4208 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4209 else
2ba172e0 4210 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4211
4212 case LROTATE_EXPR:
2ba172e0 4213 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4214
4215 case RROTATE_EXPR:
2ba172e0 4216 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4217
4218 case MIN_EXPR:
2ba172e0 4219 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4220
4221 case MAX_EXPR:
2ba172e0 4222 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4223
4224 case BIT_AND_EXPR:
4225 case TRUTH_AND_EXPR:
2ba172e0 4226 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4227
4228 case BIT_IOR_EXPR:
4229 case TRUTH_OR_EXPR:
2ba172e0 4230 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4231
4232 case BIT_XOR_EXPR:
4233 case TRUTH_XOR_EXPR:
2ba172e0 4234 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4235
4236 case TRUTH_ANDIF_EXPR:
4237 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4238
4239 case TRUTH_ORIF_EXPR:
4240 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4241
4242 case TRUTH_NOT_EXPR:
2ba172e0 4243 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4244
4245 case LT_EXPR:
2ba172e0
JJ
4246 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4247 op0, op1);
b5b8b0ac
AO
4248
4249 case LE_EXPR:
2ba172e0
JJ
4250 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4251 op0, op1);
b5b8b0ac
AO
4252
4253 case GT_EXPR:
2ba172e0
JJ
4254 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4255 op0, op1);
b5b8b0ac
AO
4256
4257 case GE_EXPR:
2ba172e0
JJ
4258 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4259 op0, op1);
b5b8b0ac
AO
4260
4261 case EQ_EXPR:
2ba172e0 4262 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4263
4264 case NE_EXPR:
2ba172e0 4265 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4266
4267 case UNORDERED_EXPR:
2ba172e0 4268 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4269
4270 case ORDERED_EXPR:
2ba172e0 4271 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4272
4273 case UNLT_EXPR:
2ba172e0 4274 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4275
4276 case UNLE_EXPR:
2ba172e0 4277 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4278
4279 case UNGT_EXPR:
2ba172e0 4280 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4281
4282 case UNGE_EXPR:
2ba172e0 4283 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4284
4285 case UNEQ_EXPR:
2ba172e0 4286 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4287
4288 case LTGT_EXPR:
2ba172e0 4289 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4290
4291 case COND_EXPR:
4292 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4293
4294 case COMPLEX_EXPR:
4295 gcc_assert (COMPLEX_MODE_P (mode));
4296 if (GET_MODE (op0) == VOIDmode)
4297 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4298 if (GET_MODE (op1) == VOIDmode)
4299 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4300 return gen_rtx_CONCAT (mode, op0, op1);
4301
d02a5a4b
JJ
4302 case CONJ_EXPR:
4303 if (GET_CODE (op0) == CONCAT)
4304 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4305 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4306 XEXP (op0, 1),
4307 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4308 else
4309 {
4310 enum machine_mode imode = GET_MODE_INNER (mode);
4311 rtx re, im;
4312
4313 if (MEM_P (op0))
4314 {
4315 re = adjust_address_nv (op0, imode, 0);
4316 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4317 }
4318 else
4319 {
4320 enum machine_mode ifmode = int_mode_for_mode (mode);
4321 enum machine_mode ihmode = int_mode_for_mode (imode);
4322 rtx halfsize;
4323 if (ifmode == BLKmode || ihmode == BLKmode)
4324 return NULL;
4325 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4326 re = op0;
4327 if (mode != ifmode)
4328 re = gen_rtx_SUBREG (ifmode, re, 0);
4329 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4330 if (imode != ihmode)
4331 re = gen_rtx_SUBREG (imode, re, 0);
4332 im = copy_rtx (op0);
4333 if (mode != ifmode)
4334 im = gen_rtx_SUBREG (ifmode, im, 0);
4335 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4336 if (imode != ihmode)
4337 im = gen_rtx_SUBREG (imode, im, 0);
4338 }
4339 im = gen_rtx_NEG (imode, im);
4340 return gen_rtx_CONCAT (mode, re, im);
4341 }
4342
b5b8b0ac
AO
4343 case ADDR_EXPR:
4344 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4345 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4346 {
4347 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4348 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4349 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4350 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4351 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4352 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4353
4354 if (handled_component_p (TREE_OPERAND (exp, 0)))
4355 {
4356 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4357 tree decl
4358 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4359 &bitoffset, &bitsize, &maxsize);
4360 if ((TREE_CODE (decl) == VAR_DECL
4361 || TREE_CODE (decl) == PARM_DECL
4362 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4363 && (!TREE_ADDRESSABLE (decl)
4364 || target_for_debug_bind (decl))
c8a27c40
JJ
4365 && (bitoffset % BITS_PER_UNIT) == 0
4366 && bitsize > 0
4367 && bitsize == maxsize)
0a81f074
RS
4368 {
4369 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4370 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4371 }
c8a27c40
JJ
4372 }
4373
9430b7ba
JJ
4374 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4375 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4376 == ADDR_EXPR)
4377 {
4378 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4379 0));
4380 if (op0 != NULL
4381 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4382 || (GET_CODE (op0) == PLUS
4383 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4384 && CONST_INT_P (XEXP (op0, 1)))))
4385 {
4386 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4387 1));
4388 if (!op1 || !CONST_INT_P (op1))
4389 return NULL;
4390
4391 return plus_constant (mode, op0, INTVAL (op1));
4392 }
4393 }
4394
c8a27c40
JJ
4395 return NULL;
4396 }
b5b8b0ac 4397
f61c6f34
JJ
4398 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
4399 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4400
4401 return op0;
b5b8b0ac
AO
4402
4403 case VECTOR_CST:
d2a12ae7
RG
4404 {
4405 unsigned i;
4406
4407 op0 = gen_rtx_CONCATN
4408 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4409
4410 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4411 {
4412 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4413 if (!op1)
4414 return NULL;
4415 XVECEXP (op0, 0, i) = op1;
4416 }
4417
4418 return op0;
4419 }
b5b8b0ac
AO
4420
4421 case CONSTRUCTOR:
47598145
MM
4422 if (TREE_CLOBBER_P (exp))
4423 return NULL;
4424 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4425 {
4426 unsigned i;
4427 tree val;
4428
4429 op0 = gen_rtx_CONCATN
4430 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4431
4432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4433 {
4434 op1 = expand_debug_expr (val);
4435 if (!op1)
4436 return NULL;
4437 XVECEXP (op0, 0, i) = op1;
4438 }
4439
4440 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4441 {
4442 op1 = expand_debug_expr
e8160c9a 4443 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4444
4445 if (!op1)
4446 return NULL;
4447
4448 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4449 XVECEXP (op0, 0, i) = op1;
4450 }
4451
4452 return op0;
4453 }
4454 else
4455 goto flag_unsupported;
4456
4457 case CALL_EXPR:
4458 /* ??? Maybe handle some builtins? */
4459 return NULL;
4460
4461 case SSA_NAME:
4462 {
2a8e30fb
MM
4463 gimple g = get_gimple_for_ssa_name (exp);
4464 if (g)
4465 {
4466 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4467 if (!op0)
4468 return NULL;
4469 }
4470 else
4471 {
4472 int part = var_to_partition (SA.map, exp);
b5b8b0ac 4473
2a8e30fb 4474 if (part == NO_PARTITION)
a58a8e4b
JJ
4475 {
4476 /* If this is a reference to an incoming value of parameter
4477 that is never used in the code or where the incoming
4478 value is never used in the code, use PARM_DECL's
4479 DECL_RTL if set. */
4480 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4481 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4482 {
12c5ffe5
EB
4483 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4484 if (op0)
4485 goto adjust_mode;
a58a8e4b 4486 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
4487 if (op0)
4488 goto adjust_mode;
a58a8e4b
JJ
4489 }
4490 return NULL;
4491 }
b5b8b0ac 4492
2a8e30fb 4493 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 4494
abfea58d 4495 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 4496 }
b5b8b0ac
AO
4497 goto adjust_mode;
4498 }
4499
4500 case ERROR_MARK:
4501 return NULL;
4502
7ece48b1
JJ
4503 /* Vector stuff. For most of the codes we don't have rtl codes. */
4504 case REALIGN_LOAD_EXPR:
4505 case REDUC_MAX_EXPR:
4506 case REDUC_MIN_EXPR:
4507 case REDUC_PLUS_EXPR:
4508 case VEC_COND_EXPR:
7ece48b1
JJ
4509 case VEC_LSHIFT_EXPR:
4510 case VEC_PACK_FIX_TRUNC_EXPR:
4511 case VEC_PACK_SAT_EXPR:
4512 case VEC_PACK_TRUNC_EXPR:
4513 case VEC_RSHIFT_EXPR:
4514 case VEC_UNPACK_FLOAT_HI_EXPR:
4515 case VEC_UNPACK_FLOAT_LO_EXPR:
4516 case VEC_UNPACK_HI_EXPR:
4517 case VEC_UNPACK_LO_EXPR:
4518 case VEC_WIDEN_MULT_HI_EXPR:
4519 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
4520 case VEC_WIDEN_MULT_EVEN_EXPR:
4521 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
4522 case VEC_WIDEN_LSHIFT_HI_EXPR:
4523 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 4524 case VEC_PERM_EXPR:
7ece48b1
JJ
4525 return NULL;
4526
98449720 4527 /* Misc codes. */
7ece48b1
JJ
4528 case ADDR_SPACE_CONVERT_EXPR:
4529 case FIXED_CONVERT_EXPR:
4530 case OBJ_TYPE_REF:
4531 case WITH_SIZE_EXPR:
4532 return NULL;
4533
4534 case DOT_PROD_EXPR:
4535 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4536 && SCALAR_INT_MODE_P (mode))
4537 {
2ba172e0
JJ
4538 op0
4539 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4540 0)))
4541 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4542 inner_mode);
4543 op1
4544 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4545 1)))
4546 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4547 inner_mode);
4548 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4549 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
4550 }
4551 return NULL;
4552
4553 case WIDEN_MULT_EXPR:
0354c0c7
BS
4554 case WIDEN_MULT_PLUS_EXPR:
4555 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
4556 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4557 && SCALAR_INT_MODE_P (mode))
4558 {
2ba172e0 4559 inner_mode = GET_MODE (op0);
7ece48b1 4560 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 4561 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 4562 else
5b58b39b 4563 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 4564 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 4565 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 4566 else
5b58b39b 4567 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 4568 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
4569 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4570 return op0;
4571 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 4572 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 4573 else
2ba172e0 4574 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
4575 }
4576 return NULL;
4577
98449720
RH
4578 case MULT_HIGHPART_EXPR:
4579 /* ??? Similar to the above. */
4580 return NULL;
4581
7ece48b1 4582 case WIDEN_SUM_EXPR:
3f3af9df 4583 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
4584 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4585 && SCALAR_INT_MODE_P (mode))
4586 {
2ba172e0
JJ
4587 op0
4588 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4589 0)))
4590 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4591 inner_mode);
3f3af9df
JJ
4592 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4593 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
4594 }
4595 return NULL;
4596
0f59b812 4597 case FMA_EXPR:
2ba172e0 4598 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 4599
b5b8b0ac
AO
4600 default:
4601 flag_unsupported:
4602#ifdef ENABLE_CHECKING
4603 debug_tree (exp);
4604 gcc_unreachable ();
4605#else
4606 return NULL;
4607#endif
4608 }
4609}
4610
ddb555ed
JJ
4611/* Return an RTX equivalent to the source bind value of the tree expression
4612 EXP. */
4613
4614static rtx
4615expand_debug_source_expr (tree exp)
4616{
4617 rtx op0 = NULL_RTX;
4618 enum machine_mode mode = VOIDmode, inner_mode;
4619
4620 switch (TREE_CODE (exp))
4621 {
4622 case PARM_DECL:
4623 {
ddb555ed 4624 mode = DECL_MODE (exp);
12c5ffe5
EB
4625 op0 = expand_debug_parm_decl (exp);
4626 if (op0)
4627 break;
ddb555ed
JJ
4628 /* See if this isn't an argument that has been completely
4629 optimized out. */
4630 if (!DECL_RTL_SET_P (exp)
12c5ffe5 4631 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
4632 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4633 {
7b575cfa 4634 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
4635 if (DECL_CONTEXT (aexp)
4636 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4637 {
9771b263 4638 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
4639 unsigned int ix;
4640 tree ddecl;
ddb555ed
JJ
4641 debug_args = decl_debug_args_lookup (current_function_decl);
4642 if (debug_args != NULL)
4643 {
9771b263 4644 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
4645 ix += 2)
4646 if (ddecl == aexp)
4647 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4648 }
4649 }
4650 }
4651 break;
4652 }
4653 default:
4654 break;
4655 }
4656
4657 if (op0 == NULL_RTX)
4658 return NULL_RTX;
4659
4660 inner_mode = GET_MODE (op0);
4661 if (mode == inner_mode)
4662 return op0;
4663
4664 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4665 {
4666 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4667 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4668 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4669 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4670 else
4671 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4672 }
4673 else if (FLOAT_MODE_P (mode))
4674 gcc_unreachable ();
4675 else if (FLOAT_MODE_P (inner_mode))
4676 {
4677 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4678 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4679 else
4680 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4681 }
4682 else if (CONSTANT_P (op0)
4683 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4684 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4685 subreg_lowpart_offset (mode, inner_mode));
4686 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4687 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4688 else
4689 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4690
4691 return op0;
4692}
4693
6cfa417f
JJ
4694/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4695 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4696 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4697
4698static void
4699avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
4700{
4701 rtx exp = *exp_p;
4702
4703 if (exp == NULL_RTX)
4704 return;
4705
4706 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4707 return;
4708
4709 if (depth == 4)
4710 {
4711 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4712 rtx dval = make_debug_expr_from_rtl (exp);
4713
4714 /* Emit a debug bind insn before INSN. */
4715 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4716 DEBUG_EXPR_TREE_DECL (dval), exp,
4717 VAR_INIT_STATUS_INITIALIZED);
4718
4719 emit_debug_insn_before (bind, insn);
4720 *exp_p = dval;
4721 return;
4722 }
4723
4724 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4725 int i, j;
4726 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4727 switch (*format_ptr++)
4728 {
4729 case 'e':
4730 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4731 break;
4732
4733 case 'E':
4734 case 'V':
4735 for (j = 0; j < XVECLEN (exp, i); j++)
4736 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4737 break;
4738
4739 default:
4740 break;
4741 }
4742}
4743
b5b8b0ac
AO
4744/* Expand the _LOCs in debug insns. We run this after expanding all
4745 regular insns, so that any variables referenced in the function
4746 will have their DECL_RTLs set. */
4747
4748static void
4749expand_debug_locations (void)
4750{
4751 rtx insn;
4752 rtx last = get_last_insn ();
4753 int save_strict_alias = flag_strict_aliasing;
4754
4755 /* New alias sets while setting up memory attributes cause
4756 -fcompare-debug failures, even though it doesn't bring about any
4757 codegen changes. */
4758 flag_strict_aliasing = 0;
4759
4760 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4761 if (DEBUG_INSN_P (insn))
4762 {
4763 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
6cfa417f 4764 rtx val, prev_insn, insn2;
b5b8b0ac
AO
4765 enum machine_mode mode;
4766
4767 if (value == NULL_TREE)
4768 val = NULL_RTX;
4769 else
4770 {
ddb555ed
JJ
4771 if (INSN_VAR_LOCATION_STATUS (insn)
4772 == VAR_INIT_STATUS_UNINITIALIZED)
4773 val = expand_debug_source_expr (value);
4774 else
4775 val = expand_debug_expr (value);
b5b8b0ac
AO
4776 gcc_assert (last == get_last_insn ());
4777 }
4778
4779 if (!val)
4780 val = gen_rtx_UNKNOWN_VAR_LOC ();
4781 else
4782 {
4783 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4784
4785 gcc_assert (mode == GET_MODE (val)
4786 || (GET_MODE (val) == VOIDmode
33ffb5c5 4787 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 4788 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
4789 || GET_CODE (val) == LABEL_REF)));
4790 }
4791
4792 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
4793 prev_insn = PREV_INSN (insn);
4794 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4795 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
4796 }
4797
4798 flag_strict_aliasing = save_strict_alias;
4799}
4800
242229bb
JH
4801/* Expand basic block BB from GIMPLE trees to RTL. */
4802
4803static basic_block
f3ddd692 4804expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 4805{
726a989a
RB
4806 gimple_stmt_iterator gsi;
4807 gimple_seq stmts;
4808 gimple stmt = NULL;
242229bb
JH
4809 rtx note, last;
4810 edge e;
628f6a4e 4811 edge_iterator ei;
8b11009b 4812 void **elt;
242229bb
JH
4813
4814 if (dump_file)
726a989a
RB
4815 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4816 bb->index);
4817
4818 /* Note that since we are now transitioning from GIMPLE to RTL, we
4819 cannot use the gsi_*_bb() routines because they expect the basic
4820 block to be in GIMPLE, instead of RTL. Therefore, we need to
4821 access the BB sequence directly. */
4822 stmts = bb_seq (bb);
3e8b732e
MM
4823 bb->il.gimple.seq = NULL;
4824 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 4825 rtl_profile_for_bb (bb);
5e2d947c
JH
4826 init_rtl_bb_info (bb);
4827 bb->flags |= BB_RTL;
4828
a9b77cd1
ZD
4829 /* Remove the RETURN_EXPR if we may fall though to the exit
4830 instead. */
726a989a
RB
4831 gsi = gsi_last (stmts);
4832 if (!gsi_end_p (gsi)
4833 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 4834 {
726a989a 4835 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
4836
4837 gcc_assert (single_succ_p (bb));
4838 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
4839
4840 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 4841 && !gimple_return_retval (ret_stmt))
a9b77cd1 4842 {
726a989a 4843 gsi_remove (&gsi, false);
a9b77cd1
ZD
4844 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4845 }
4846 }
4847
726a989a
RB
4848 gsi = gsi_start (stmts);
4849 if (!gsi_end_p (gsi))
8b11009b 4850 {
726a989a
RB
4851 stmt = gsi_stmt (gsi);
4852 if (gimple_code (stmt) != GIMPLE_LABEL)
4853 stmt = NULL;
8b11009b 4854 }
242229bb 4855
8b11009b
ZD
4856 elt = pointer_map_contains (lab_rtx_for_bb, bb);
4857
4858 if (stmt || elt)
242229bb
JH
4859 {
4860 last = get_last_insn ();
4861
8b11009b
ZD
4862 if (stmt)
4863 {
28ed065e 4864 expand_gimple_stmt (stmt);
726a989a 4865 gsi_next (&gsi);
8b11009b
ZD
4866 }
4867
4868 if (elt)
ae50c0cb 4869 emit_label ((rtx) *elt);
242229bb 4870
caf93cb0 4871 /* Java emits line number notes in the top of labels.
c22cacf3 4872 ??? Make this go away once line number notes are obsoleted. */
242229bb 4873 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 4874 if (NOTE_P (BB_HEAD (bb)))
242229bb 4875 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 4876 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 4877
726a989a 4878 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
4879 }
4880 else
4881 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
4882
4883 NOTE_BASIC_BLOCK (note) = bb;
4884
726a989a 4885 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 4886 {
cea49550 4887 basic_block new_bb;
242229bb 4888
b5b8b0ac 4889 stmt = gsi_stmt (gsi);
2a8e30fb
MM
4890
4891 /* If this statement is a non-debug one, and we generate debug
4892 insns, then this one might be the last real use of a TERed
4893 SSA_NAME, but where there are still some debug uses further
4894 down. Expanding the current SSA name in such further debug
4895 uses by their RHS might lead to wrong debug info, as coalescing
4896 might make the operands of such RHS be placed into the same
4897 pseudo as something else. Like so:
4898 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4899 use(a_1);
4900 a_2 = ...
4901 #DEBUG ... => a_1
4902 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4903 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4904 the write to a_2 would actually have clobbered the place which
4905 formerly held a_0.
4906
4907 So, instead of that, we recognize the situation, and generate
4908 debug temporaries at the last real use of TERed SSA names:
4909 a_1 = a_0 + 1;
4910 #DEBUG #D1 => a_1
4911 use(a_1);
4912 a_2 = ...
4913 #DEBUG ... => #D1
4914 */
4915 if (MAY_HAVE_DEBUG_INSNS
4916 && SA.values
4917 && !is_gimple_debug (stmt))
4918 {
4919 ssa_op_iter iter;
4920 tree op;
4921 gimple def;
4922
5368224f 4923 location_t sloc = curr_insn_location ();
2a8e30fb
MM
4924
4925 /* Look for SSA names that have their last use here (TERed
4926 names always have only one real use). */
4927 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4928 if ((def = get_gimple_for_ssa_name (op)))
4929 {
4930 imm_use_iterator imm_iter;
4931 use_operand_p use_p;
4932 bool have_debug_uses = false;
4933
4934 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4935 {
4936 if (gimple_debug_bind_p (USE_STMT (use_p)))
4937 {
4938 have_debug_uses = true;
4939 break;
4940 }
4941 }
4942
4943 if (have_debug_uses)
4944 {
4945 /* OP is a TERed SSA name, with DEF it's defining
4946 statement, and where OP is used in further debug
4947 instructions. Generate a debug temporary, and
4948 replace all uses of OP in debug insns with that
4949 temporary. */
4950 gimple debugstmt;
4951 tree value = gimple_assign_rhs_to_tree (def);
4952 tree vexpr = make_node (DEBUG_EXPR_DECL);
4953 rtx val;
4954 enum machine_mode mode;
4955
5368224f 4956 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
4957
4958 DECL_ARTIFICIAL (vexpr) = 1;
4959 TREE_TYPE (vexpr) = TREE_TYPE (value);
4960 if (DECL_P (value))
4961 mode = DECL_MODE (value);
4962 else
4963 mode = TYPE_MODE (TREE_TYPE (value));
4964 DECL_MODE (vexpr) = mode;
4965
4966 val = gen_rtx_VAR_LOCATION
4967 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4968
e8c6bb74 4969 emit_debug_insn (val);
2a8e30fb
MM
4970
4971 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4972 {
4973 if (!gimple_debug_bind_p (debugstmt))
4974 continue;
4975
4976 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4977 SET_USE (use_p, vexpr);
4978
4979 update_stmt (debugstmt);
4980 }
4981 }
4982 }
5368224f 4983 set_curr_insn_location (sloc);
2a8e30fb
MM
4984 }
4985
a5883ba0 4986 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 4987
242229bb
JH
4988 /* Expand this statement, then evaluate the resulting RTL and
4989 fixup the CFG accordingly. */
726a989a 4990 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 4991 {
726a989a 4992 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
4993 if (new_bb)
4994 return new_bb;
4995 }
b5b8b0ac
AO
4996 else if (gimple_debug_bind_p (stmt))
4997 {
5368224f 4998 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
4999 gimple_stmt_iterator nsi = gsi;
5000
5001 for (;;)
5002 {
5003 tree var = gimple_debug_bind_get_var (stmt);
5004 tree value;
5005 rtx val;
5006 enum machine_mode mode;
5007
ec8c1492
JJ
5008 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5009 && TREE_CODE (var) != LABEL_DECL
5010 && !target_for_debug_bind (var))
5011 goto delink_debug_stmt;
5012
b5b8b0ac
AO
5013 if (gimple_debug_bind_has_value_p (stmt))
5014 value = gimple_debug_bind_get_value (stmt);
5015 else
5016 value = NULL_TREE;
5017
5018 last = get_last_insn ();
5019
5368224f 5020 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
5021
5022 if (DECL_P (var))
5023 mode = DECL_MODE (var);
5024 else
5025 mode = TYPE_MODE (TREE_TYPE (var));
5026
5027 val = gen_rtx_VAR_LOCATION
5028 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5029
e16b6fd0 5030 emit_debug_insn (val);
b5b8b0ac
AO
5031
5032 if (dump_file && (dump_flags & TDF_DETAILS))
5033 {
5034 /* We can't dump the insn with a TREE where an RTX
5035 is expected. */
e8c6bb74 5036 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5037 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5038 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5039 }
5040
ec8c1492 5041 delink_debug_stmt:
2a8e30fb
MM
5042 /* In order not to generate too many debug temporaries,
5043 we delink all uses of debug statements we already expanded.
5044 Therefore debug statements between definition and real
5045 use of TERed SSA names will continue to use the SSA name,
5046 and not be replaced with debug temps. */
5047 delink_stmt_imm_use (stmt);
5048
b5b8b0ac
AO
5049 gsi = nsi;
5050 gsi_next (&nsi);
5051 if (gsi_end_p (nsi))
5052 break;
5053 stmt = gsi_stmt (nsi);
5054 if (!gimple_debug_bind_p (stmt))
5055 break;
5056 }
5057
5368224f 5058 set_curr_insn_location (sloc);
ddb555ed
JJ
5059 }
5060 else if (gimple_debug_source_bind_p (stmt))
5061 {
5368224f 5062 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5063 tree var = gimple_debug_source_bind_get_var (stmt);
5064 tree value = gimple_debug_source_bind_get_value (stmt);
5065 rtx val;
5066 enum machine_mode mode;
5067
5068 last = get_last_insn ();
5069
5368224f 5070 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5071
5072 mode = DECL_MODE (var);
5073
5074 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5075 VAR_INIT_STATUS_UNINITIALIZED);
5076
5077 emit_debug_insn (val);
5078
5079 if (dump_file && (dump_flags & TDF_DETAILS))
5080 {
5081 /* We can't dump the insn with a TREE where an RTX
5082 is expected. */
5083 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5084 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5085 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5086 }
5087
5368224f 5088 set_curr_insn_location (sloc);
b5b8b0ac 5089 }
80c7a9eb 5090 else
242229bb 5091 {
f3ddd692
JJ
5092 if (is_gimple_call (stmt)
5093 && gimple_call_tail_p (stmt)
5094 && disable_tail_calls)
5095 gimple_call_set_tail (stmt, false);
5096
726a989a 5097 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
5098 {
5099 bool can_fallthru;
5100 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5101 if (new_bb)
5102 {
5103 if (can_fallthru)
5104 bb = new_bb;
5105 else
5106 return new_bb;
5107 }
5108 }
4d7a65ea 5109 else
b7211528 5110 {
4e3825db 5111 def_operand_p def_p;
4e3825db
MM
5112 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5113
5114 if (def_p != NULL)
5115 {
5116 /* Ignore this stmt if it is in the list of
5117 replaceable expressions. */
5118 if (SA.values
b8698a0f 5119 && bitmap_bit_p (SA.values,
e97809c6 5120 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5121 continue;
5122 }
28ed065e 5123 last = expand_gimple_stmt (stmt);
726a989a 5124 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5125 }
242229bb
JH
5126 }
5127 }
5128
a5883ba0
MM
5129 currently_expanding_gimple_stmt = NULL;
5130
7241571e 5131 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5132 FOR_EACH_EDGE (e, ei, bb->succs)
5133 {
2f13f2de 5134 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5135 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5136 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5137 {
5138 emit_jump (label_rtx_for_bb (e->dest));
5139 e->flags &= ~EDGE_FALLTHRU;
5140 }
a9b77cd1
ZD
5141 }
5142
ae761c45
AH
5143 /* Expanded RTL can create a jump in the last instruction of block.
5144 This later might be assumed to be a jump to successor and break edge insertion.
5145 We need to insert dummy move to prevent this. PR41440. */
5146 if (single_succ_p (bb)
5147 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5148 && (last = get_last_insn ())
5149 && JUMP_P (last))
5150 {
5151 rtx dummy = gen_reg_rtx (SImode);
5152 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5153 }
5154
242229bb
JH
5155 do_pending_stack_adjust ();
5156
3f117656 5157 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5158 before a barrier and/or table jump insn. */
5159 last = get_last_insn ();
4b4bf941 5160 if (BARRIER_P (last))
242229bb
JH
5161 last = PREV_INSN (last);
5162 if (JUMP_TABLE_DATA_P (last))
5163 last = PREV_INSN (PREV_INSN (last));
5164 BB_END (bb) = last;
caf93cb0 5165
242229bb 5166 update_bb_for_insn (bb);
80c7a9eb 5167
242229bb
JH
5168 return bb;
5169}
5170
5171
5172/* Create a basic block for initialization code. */
5173
5174static basic_block
5175construct_init_block (void)
5176{
5177 basic_block init_block, first_block;
fd44f634
JH
5178 edge e = NULL;
5179 int flags;
275a4187 5180
fd44f634
JH
5181 /* Multiple entry points not supported yet. */
5182 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
5183 init_rtl_bb_info (ENTRY_BLOCK_PTR);
5184 init_rtl_bb_info (EXIT_BLOCK_PTR);
5185 ENTRY_BLOCK_PTR->flags |= BB_RTL;
5186 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 5187
fd44f634 5188 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 5189
fd44f634
JH
5190 /* When entry edge points to first basic block, we don't need jump,
5191 otherwise we have to jump into proper target. */
5192 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
5193 {
726a989a 5194 tree label = gimple_block_label (e->dest);
fd44f634
JH
5195
5196 emit_jump (label_rtx (label));
5197 flags = 0;
275a4187 5198 }
fd44f634
JH
5199 else
5200 flags = EDGE_FALLTHRU;
242229bb
JH
5201
5202 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5203 get_last_insn (),
5204 ENTRY_BLOCK_PTR);
5205 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
5206 init_block->count = ENTRY_BLOCK_PTR->count;
7d776ee2
RG
5207 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
5208 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
242229bb
JH
5209 if (e)
5210 {
5211 first_block = e->dest;
5212 redirect_edge_succ (e, init_block);
fd44f634 5213 e = make_edge (init_block, first_block, flags);
242229bb
JH
5214 }
5215 else
5216 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5217 e->probability = REG_BR_PROB_BASE;
5218 e->count = ENTRY_BLOCK_PTR->count;
5219
5220 update_bb_for_insn (init_block);
5221 return init_block;
5222}
5223
55e092c4
JH
5224/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5225 found in the block tree. */
5226
5227static void
5228set_block_levels (tree block, int level)
5229{
5230 while (block)
5231 {
5232 BLOCK_NUMBER (block) = level;
5233 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5234 block = BLOCK_CHAIN (block);
5235 }
5236}
242229bb
JH
5237
5238/* Create a block containing landing pads and similar stuff. */
5239
5240static void
5241construct_exit_block (void)
5242{
5243 rtx head = get_last_insn ();
5244 rtx end;
5245 basic_block exit_block;
628f6a4e
BE
5246 edge e, e2;
5247 unsigned ix;
5248 edge_iterator ei;
071a42f9 5249 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 5250
bf08ebeb
JH
5251 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5252
caf93cb0 5253 /* Make sure the locus is set to the end of the function, so that
242229bb 5254 epilogue line numbers and warnings are set properly. */
2f13f2de 5255 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5256 input_location = cfun->function_end_locus;
5257
242229bb
JH
5258 /* Generate rtl for function exit. */
5259 expand_function_end ();
5260
5261 end = get_last_insn ();
5262 if (head == end)
5263 return;
071a42f9
JH
5264 /* While emitting the function end we could move end of the last basic block.
5265 */
5266 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 5267 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5268 head = NEXT_INSN (head);
80c7a9eb
RH
5269 exit_block = create_basic_block (NEXT_INSN (head), end,
5270 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
5271 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
5272 exit_block->count = EXIT_BLOCK_PTR->count;
7d776ee2
RG
5273 if (current_loops && EXIT_BLOCK_PTR->loop_father)
5274 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
628f6a4e
BE
5275
5276 ix = 0;
5277 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 5278 {
8fb790fd 5279 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 5280 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5281 redirect_edge_succ (e, exit_block);
5282 else
5283 ix++;
242229bb 5284 }
628f6a4e 5285
242229bb
JH
5286 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5287 e->probability = REG_BR_PROB_BASE;
5288 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 5289 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
5290 if (e2 != e)
5291 {
c22cacf3 5292 e->count -= e2->count;
242229bb
JH
5293 exit_block->count -= e2->count;
5294 exit_block->frequency -= EDGE_FREQUENCY (e2);
5295 }
5296 if (e->count < 0)
5297 e->count = 0;
5298 if (exit_block->count < 0)
5299 exit_block->count = 0;
5300 if (exit_block->frequency < 0)
5301 exit_block->frequency = 0;
5302 update_bb_for_insn (exit_block);
5303}
5304
c22cacf3 5305/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5306 Look for ARRAY_REF nodes with non-constant indexes and mark them
5307 addressable. */
5308
5309static tree
5310discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5311 void *data ATTRIBUTE_UNUSED)
5312{
5313 tree t = *tp;
5314
5315 if (IS_TYPE_OR_DECL_P (t))
5316 *walk_subtrees = 0;
5317 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5318 {
5319 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5320 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5321 && (!TREE_OPERAND (t, 2)
5322 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5323 || (TREE_CODE (t) == COMPONENT_REF
5324 && (!TREE_OPERAND (t,2)
5325 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5326 || TREE_CODE (t) == BIT_FIELD_REF
5327 || TREE_CODE (t) == REALPART_EXPR
5328 || TREE_CODE (t) == IMAGPART_EXPR
5329 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5330 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5331 t = TREE_OPERAND (t, 0);
5332
5333 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5334 {
5335 t = get_base_address (t);
6f11d690
RG
5336 if (t && DECL_P (t)
5337 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5338 TREE_ADDRESSABLE (t) = 1;
5339 }
5340
5341 *walk_subtrees = 0;
5342 }
5343
5344 return NULL_TREE;
5345}
5346
5347/* RTL expansion is not able to compile array references with variable
5348 offsets for arrays stored in single register. Discover such
5349 expressions and mark variables as addressable to avoid this
5350 scenario. */
5351
5352static void
5353discover_nonconstant_array_refs (void)
5354{
5355 basic_block bb;
726a989a 5356 gimple_stmt_iterator gsi;
a1b23b2f
UW
5357
5358 FOR_EACH_BB (bb)
726a989a
RB
5359 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5360 {
5361 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
5362 if (!is_gimple_debug (stmt))
5363 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 5364 }
a1b23b2f
UW
5365}
5366
2e3f842f
L
5367/* This function sets crtl->args.internal_arg_pointer to a virtual
5368 register if DRAP is needed. Local register allocator will replace
5369 virtual_incoming_args_rtx with the virtual register. */
5370
5371static void
5372expand_stack_alignment (void)
5373{
5374 rtx drap_rtx;
e939805b 5375 unsigned int preferred_stack_boundary;
2e3f842f
L
5376
5377 if (! SUPPORTS_STACK_ALIGNMENT)
5378 return;
b8698a0f 5379
2e3f842f
L
5380 if (cfun->calls_alloca
5381 || cfun->has_nonlocal_label
5382 || crtl->has_nonlocal_goto)
5383 crtl->need_drap = true;
5384
890b9b96
L
5385 /* Call update_stack_boundary here again to update incoming stack
5386 boundary. It may set incoming stack alignment to a different
5387 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5388 use the minimum incoming stack alignment to check if it is OK
5389 to perform sibcall optimization since sibcall optimization will
5390 only align the outgoing stack to incoming stack boundary. */
5391 if (targetm.calls.update_stack_boundary)
5392 targetm.calls.update_stack_boundary ();
5393
5394 /* The incoming stack frame has to be aligned at least at
5395 parm_stack_boundary. */
5396 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 5397
2e3f842f
L
5398 /* Update crtl->stack_alignment_estimated and use it later to align
5399 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5400 exceptions since callgraph doesn't collect incoming stack alignment
5401 in this case. */
8f4f502f 5402 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
5403 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5404 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5405 else
5406 preferred_stack_boundary = crtl->preferred_stack_boundary;
5407 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5408 crtl->stack_alignment_estimated = preferred_stack_boundary;
5409 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5410 crtl->stack_alignment_needed = preferred_stack_boundary;
5411
890b9b96
L
5412 gcc_assert (crtl->stack_alignment_needed
5413 <= crtl->stack_alignment_estimated);
5414
2e3f842f 5415 crtl->stack_realign_needed
e939805b 5416 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 5417 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
5418
5419 crtl->stack_realign_processed = true;
5420
5421 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5422 alignment. */
5423 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 5424 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 5425
d015f7cc
L
5426 /* stack_realign_drap and drap_rtx must match. */
5427 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5428
2e3f842f
L
5429 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5430 if (NULL != drap_rtx)
5431 {
5432 crtl->args.internal_arg_pointer = drap_rtx;
5433
5434 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5435 needed. */
5436 fixup_tail_calls ();
5437 }
5438}
862d0b35
DN
5439\f
5440
5441static void
5442expand_main_function (void)
5443{
5444#if (defined(INVOKE__main) \
5445 || (!defined(HAS_INIT_SECTION) \
5446 && !defined(INIT_SECTION_ASM_OP) \
5447 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5448 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5449#endif
5450}
5451\f
5452
5453/* Expand code to initialize the stack_protect_guard. This is invoked at
5454 the beginning of a function to be protected. */
5455
5456#ifndef HAVE_stack_protect_set
5457# define HAVE_stack_protect_set 0
5458# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5459#endif
5460
5461static void
5462stack_protect_prologue (void)
5463{
5464 tree guard_decl = targetm.stack_protect_guard ();
5465 rtx x, y;
5466
5467 x = expand_normal (crtl->stack_protect_guard);
5468 y = expand_normal (guard_decl);
5469
5470 /* Allow the target to copy from Y to X without leaking Y into a
5471 register. */
5472 if (HAVE_stack_protect_set)
5473 {
5474 rtx insn = gen_stack_protect_set (x, y);
5475 if (insn)
5476 {
5477 emit_insn (insn);
5478 return;
5479 }
5480 }
5481
5482 /* Otherwise do a straight move. */
5483 emit_move_insn (x, y);
5484}
2e3f842f 5485
242229bb
JH
5486/* Translate the intermediate representation contained in the CFG
5487 from GIMPLE trees to RTL.
5488
5489 We do conversion per basic block and preserve/update the tree CFG.
5490 This implies we have to do some magic as the CFG can simultaneously
5491 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 5492 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
5493 the expansion. */
5494
c2924966 5495static unsigned int
726a989a 5496gimple_expand_cfg (void)
242229bb
JH
5497{
5498 basic_block bb, init_block;
5499 sbitmap blocks;
0ef90296
ZD
5500 edge_iterator ei;
5501 edge e;
f3ddd692 5502 rtx var_seq, var_ret_seq;
4e3825db
MM
5503 unsigned i;
5504
f029db69 5505 timevar_push (TV_OUT_OF_SSA);
4e3825db 5506 rewrite_out_of_ssa (&SA);
f029db69 5507 timevar_pop (TV_OUT_OF_SSA);
c302207e 5508 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 5509
be147e84
RG
5510 /* Make sure all values used by the optimization passes have sane
5511 defaults. */
5512 reg_renumber = 0;
5513
4586b4ca
SB
5514 /* Some backends want to know that we are expanding to RTL. */
5515 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
5516 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5517 free_dominance_info (CDI_DOMINATORS);
4586b4ca 5518
bf08ebeb
JH
5519 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5520
5368224f 5521 insn_locations_init ();
fe8a7779 5522 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
5523 {
5524 /* Eventually, all FEs should explicitly set function_start_locus. */
2f13f2de 5525 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
5368224f 5526 set_curr_insn_location
1751ecd6
AH
5527 (DECL_SOURCE_LOCATION (current_function_decl));
5528 else
5368224f 5529 set_curr_insn_location (cfun->function_start_locus);
1751ecd6 5530 }
9ff70652 5531 else
5368224f
DC
5532 set_curr_insn_location (UNKNOWN_LOCATION);
5533 prologue_location = curr_insn_location ();
55e092c4 5534
2b21299c
JJ
5535#ifdef INSN_SCHEDULING
5536 init_sched_attrs ();
5537#endif
5538
55e092c4
JH
5539 /* Make sure first insn is a note even if we don't want linenums.
5540 This makes sure the first insn will never be deleted.
5541 Also, final expects a note to appear there. */
5542 emit_note (NOTE_INSN_DELETED);
6429e3be 5543
a1b23b2f
UW
5544 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5545 discover_nonconstant_array_refs ();
5546
e41b2a33 5547 targetm.expand_to_rtl_hook ();
cb91fab0 5548 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 5549 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 5550 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
5551 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5552 cfun->cfg->max_jumptable_ents = 0;
5553
ae9fd6b7
JH
5554 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5555 of the function section at exapnsion time to predict distance of calls. */
5556 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5557
727a31fa 5558 /* Expand the variables recorded during gimple lowering. */
f029db69 5559 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
5560 start_sequence ();
5561
f3ddd692 5562 var_ret_seq = expand_used_vars ();
3a42502d
RH
5563
5564 var_seq = get_insns ();
5565 end_sequence ();
f029db69 5566 timevar_pop (TV_VAR_EXPAND);
242229bb 5567
7d69de61
RH
5568 /* Honor stack protection warnings. */
5569 if (warn_stack_protect)
5570 {
e3b5732b 5571 if (cfun->calls_alloca)
b8698a0f 5572 warning (OPT_Wstack_protector,
3b123595
SB
5573 "stack protector not protecting local variables: "
5574 "variable length buffer");
cb91fab0 5575 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 5576 warning (OPT_Wstack_protector,
3b123595
SB
5577 "stack protector not protecting function: "
5578 "all local arrays are less than %d bytes long",
7d69de61
RH
5579 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5580 }
5581
242229bb 5582 /* Set up parameters and prepare for return, for the function. */
b79c5284 5583 expand_function_start (current_function_decl);
242229bb 5584
3a42502d
RH
5585 /* If we emitted any instructions for setting up the variables,
5586 emit them before the FUNCTION_START note. */
5587 if (var_seq)
5588 {
5589 emit_insn_before (var_seq, parm_birth_insn);
5590
5591 /* In expand_function_end we'll insert the alloca save/restore
5592 before parm_birth_insn. We've just insertted an alloca call.
5593 Adjust the pointer to match. */
5594 parm_birth_insn = var_seq;
5595 }
5596
4e3825db
MM
5597 /* Now that we also have the parameter RTXs, copy them over to our
5598 partitions. */
5599 for (i = 0; i < SA.map->num_partitions; i++)
5600 {
5601 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5602
5603 if (TREE_CODE (var) != VAR_DECL
5604 && !SA.partition_to_pseudo[i])
5605 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5606 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
5607
5608 /* If this decl was marked as living in multiple places, reset
5609 this now to NULL. */
5610 if (DECL_RTL_IF_SET (var) == pc_rtx)
5611 SET_DECL_RTL (var, NULL);
5612
4e3825db
MM
5613 /* Some RTL parts really want to look at DECL_RTL(x) when x
5614 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5615 SET_DECL_RTL here making this available, but that would mean
5616 to select one of the potentially many RTLs for one DECL. Instead
5617 of doing that we simply reset the MEM_EXPR of the RTL in question,
5618 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5619 if (!DECL_RTL_SET_P (var))
5620 {
5621 if (MEM_P (SA.partition_to_pseudo[i]))
5622 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5623 }
5624 }
5625
d466b407
MM
5626 /* If we have a class containing differently aligned pointers
5627 we need to merge those into the corresponding RTL pointer
5628 alignment. */
5629 for (i = 1; i < num_ssa_names; i++)
5630 {
5631 tree name = ssa_name (i);
5632 int part;
5633 rtx r;
5634
5635 if (!name
d466b407
MM
5636 /* We might have generated new SSA names in
5637 update_alias_info_with_stack_vars. They will have a NULL
5638 defining statements, and won't be part of the partitioning,
5639 so ignore those. */
5640 || !SSA_NAME_DEF_STMT (name))
5641 continue;
5642 part = var_to_partition (SA.map, name);
5643 if (part == NO_PARTITION)
5644 continue;
70b5e7dc
RG
5645
5646 /* Adjust all partition members to get the underlying decl of
5647 the representative which we might have created in expand_one_var. */
5648 if (SSA_NAME_VAR (name) == NULL_TREE)
5649 {
5650 tree leader = partition_to_var (SA.map, part);
5651 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5652 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5653 }
5654 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5655 continue;
5656
d466b407
MM
5657 r = SA.partition_to_pseudo[part];
5658 if (REG_P (r))
5659 mark_reg_pointer (r, get_pointer_alignment (name));
5660 }
5661
242229bb
JH
5662 /* If this function is `main', emit a call to `__main'
5663 to run global initializers, etc. */
5664 if (DECL_NAME (current_function_decl)
5665 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5666 && DECL_FILE_SCOPE_P (current_function_decl))
5667 expand_main_function ();
5668
7d69de61
RH
5669 /* Initialize the stack_protect_guard field. This must happen after the
5670 call to __main (if any) so that the external decl is initialized. */
cb91fab0 5671 if (crtl->stack_protect_guard)
7d69de61
RH
5672 stack_protect_prologue ();
5673
4e3825db
MM
5674 expand_phi_nodes (&SA);
5675
3fbd86b1 5676 /* Register rtl specific functions for cfg. */
242229bb
JH
5677 rtl_register_cfg_hooks ();
5678
5679 init_block = construct_init_block ();
5680
0ef90296 5681 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 5682 remaining edges later. */
0ef90296
ZD
5683 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
5684 e->flags &= ~EDGE_EXECUTABLE;
5685
8b11009b 5686 lab_rtx_for_bb = pointer_map_create ();
242229bb 5687 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
f3ddd692 5688 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 5689
b5b8b0ac
AO
5690 if (MAY_HAVE_DEBUG_INSNS)
5691 expand_debug_locations ();
5692
452aa9c5
RG
5693 /* Free stuff we no longer need after GIMPLE optimizations. */
5694 free_dominance_info (CDI_DOMINATORS);
5695 free_dominance_info (CDI_POST_DOMINATORS);
5696 delete_tree_cfg_annotations ();
5697
f029db69 5698 timevar_push (TV_OUT_OF_SSA);
4e3825db 5699 finish_out_of_ssa (&SA);
f029db69 5700 timevar_pop (TV_OUT_OF_SSA);
4e3825db 5701
f029db69 5702 timevar_push (TV_POST_EXPAND);
91753e21
RG
5703 /* We are no longer in SSA form. */
5704 cfun->gimple_df->in_ssa_p = false;
7d776ee2
RG
5705 if (current_loops)
5706 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 5707
bf08ebeb
JH
5708 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5709 conservatively to true until they are all profile aware. */
8b11009b 5710 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 5711 free_histograms ();
242229bb
JH
5712
5713 construct_exit_block ();
5368224f 5714 insn_locations_finalize ();
242229bb 5715
f3ddd692
JJ
5716 if (var_ret_seq)
5717 {
5718 rtx after = return_label;
5719 rtx next = NEXT_INSN (after);
5720 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5721 after = next;
5722 emit_insn_after (var_ret_seq, after);
5723 }
5724
1d65f45c 5725 /* Zap the tree EH table. */
e8a2a782 5726 set_eh_throw_stmt_table (cfun, NULL);
242229bb 5727
42821aff
MM
5728 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5729 split edges which edge insertions might do. */
242229bb 5730 rebuild_jump_labels (get_insns ());
242229bb 5731
4e3825db
MM
5732 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
5733 {
5734 edge e;
5735 edge_iterator ei;
5736 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5737 {
5738 if (e->insns.r)
bc470c24 5739 {
42821aff 5740 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
5741 /* Put insns after parm birth, but before
5742 NOTE_INSNS_FUNCTION_BEG. */
bc470c24 5743 if (e->src == ENTRY_BLOCK_PTR
e40191f1 5744 && single_succ_p (ENTRY_BLOCK_PTR))
bc470c24
JJ
5745 {
5746 rtx insns = e->insns.r;
5747 e->insns.r = NULL_RTX;
e40191f1
TV
5748 if (NOTE_P (parm_birth_insn)
5749 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5750 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5751 else
5752 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
5753 }
5754 else
5755 commit_one_edge_insertion (e);
5756 }
4e3825db
MM
5757 else
5758 ei_next (&ei);
5759 }
5760 }
5761
5762 /* We're done expanding trees to RTL. */
5763 currently_expanding_to_rtl = 0;
5764
5765 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
5766 {
5767 edge e;
5768 edge_iterator ei;
5769 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5770 {
5771 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5772 e->flags &= ~EDGE_EXECUTABLE;
5773
5774 /* At the moment not all abnormal edges match the RTL
5775 representation. It is safe to remove them here as
5776 find_many_sub_basic_blocks will rediscover them.
5777 In the future we should get this fixed properly. */
5778 if ((e->flags & EDGE_ABNORMAL)
5779 && !(e->flags & EDGE_SIBCALL))
5780 remove_edge (e);
5781 else
5782 ei_next (&ei);
5783 }
5784 }
5785
242229bb 5786 blocks = sbitmap_alloc (last_basic_block);
f61e445a 5787 bitmap_ones (blocks);
242229bb 5788 find_many_sub_basic_blocks (blocks);
242229bb 5789 sbitmap_free (blocks);
4e3825db 5790 purge_all_dead_edges ();
242229bb 5791
2e3f842f
L
5792 expand_stack_alignment ();
5793
be147e84
RG
5794 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5795 function. */
5796 if (crtl->tail_call_emit)
5797 fixup_tail_calls ();
5798
dac1fbf8
RG
5799 /* After initial rtl generation, call back to finish generating
5800 exception support code. We need to do this before cleaning up
5801 the CFG as the code does not expect dead landing pads. */
5802 if (cfun->eh->region_tree != NULL)
5803 finish_eh_generation ();
5804
5805 /* Remove unreachable blocks, otherwise we cannot compute dominators
5806 which are needed for loop state verification. As a side-effect
5807 this also compacts blocks.
5808 ??? We cannot remove trivially dead insns here as for example
5809 the DRAP reg on i?86 is not magically live at this point.
5810 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5811 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5812
242229bb 5813#ifdef ENABLE_CHECKING
62e5bf5d 5814 verify_flow_info ();
242229bb 5815#endif
9f8628ba 5816
be147e84
RG
5817 /* Initialize pseudos allocated for hard registers. */
5818 emit_initial_value_sets ();
5819
5820 /* And finally unshare all RTL. */
5821 unshare_all_rtl ();
5822
9f8628ba
PB
5823 /* There's no need to defer outputting this function any more; we
5824 know we want to output it. */
5825 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5826
5827 /* Now that we're done expanding trees to RTL, we shouldn't have any
5828 more CONCATs anywhere. */
5829 generating_concat_p = 0;
5830
b7211528
SB
5831 if (dump_file)
5832 {
5833 fprintf (dump_file,
5834 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5835 /* And the pass manager will dump RTL for us. */
5836 }
ef330312
PB
5837
5838 /* If we're emitting a nested function, make sure its parent gets
5839 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 5840 {
ef330312
PB
5841 tree parent;
5842 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
5843 parent != NULL_TREE;
5844 parent = get_containing_scope (parent))
ef330312 5845 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 5846 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 5847 }
c22cacf3 5848
ef330312
PB
5849 /* We are now committed to emitting code for this function. Do any
5850 preparation, such as emitting abstract debug info for the inline
5851 before it gets mangled by optimization. */
5852 if (cgraph_function_possibly_inlined_p (current_function_decl))
5853 (*debug_hooks->outlining_inline_function) (current_function_decl);
5854
5855 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
5856
5857 /* After expanding, the return labels are no longer needed. */
5858 return_label = NULL;
5859 naked_return_label = NULL;
0a35513e
AH
5860
5861 /* After expanding, the tm_restart map is no longer needed. */
5862 if (cfun->gimple_df->tm_restart)
5863 {
5864 htab_delete (cfun->gimple_df->tm_restart);
5865 cfun->gimple_df->tm_restart = NULL;
5866 }
5867
55e092c4
JH
5868 /* Tag the blocks with a depth number so that change_scope can find
5869 the common parent easily. */
5870 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 5871 default_rtl_profile ();
be147e84 5872
f029db69 5873 timevar_pop (TV_POST_EXPAND);
be147e84 5874
c2924966 5875 return 0;
242229bb
JH
5876}
5877
27a4cd48
DM
5878namespace {
5879
5880const pass_data pass_data_expand =
242229bb 5881{
27a4cd48
DM
5882 RTL_PASS, /* type */
5883 "expand", /* name */
5884 OPTGROUP_NONE, /* optinfo_flags */
5885 false, /* has_gate */
5886 true, /* has_execute */
5887 TV_EXPAND, /* tv_id */
5888 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6f37411d 5889 | PROP_gimple_lcx
27a4cd48
DM
5890 | PROP_gimple_lvec ), /* properties_required */
5891 PROP_rtl, /* properties_provided */
5892 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5893 ( TODO_verify_ssa | TODO_verify_flow
5894 | TODO_verify_stmts ), /* todo_flags_start */
5895 0, /* todo_flags_finish */
242229bb 5896};
27a4cd48
DM
5897
5898class pass_expand : public rtl_opt_pass
5899{
5900public:
c3284718
RS
5901 pass_expand (gcc::context *ctxt)
5902 : rtl_opt_pass (pass_data_expand, ctxt)
27a4cd48
DM
5903 {}
5904
5905 /* opt_pass methods: */
5906 unsigned int execute () { return gimple_expand_cfg (); }
5907
5908}; // class pass_expand
5909
5910} // anon namespace
5911
5912rtl_opt_pass *
5913make_pass_expand (gcc::context *ctxt)
5914{
5915 return new pass_expand (ctxt);
5916}