]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
Revert the last change
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
242229bb 24#include "rtl.h"
862d0b35
DN
25#include "hard-reg-set.h"
26#include "tree.h"
242229bb
JH
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
442b4905
AM
32#include "bitmap.h"
33#include "gimple.h"
5be5c238
AM
34#include "gimple-iterator.h"
35#include "gimple-walk.h"
442b4905
AM
36#include "gimple-ssa.h"
37#include "cgraph.h"
38#include "tree-cfg.h"
39#include "tree-phinodes.h"
40#include "ssa-iterators.h"
41#include "tree-ssanames.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
242229bb
JH
44#include "tree-pass.h"
45#include "except.h"
46#include "flags.h"
1f6d3a08 47#include "diagnostic.h"
cf835838 48#include "gimple-pretty-print.h"
1f6d3a08 49#include "toplev.h"
ef330312 50#include "debug.h"
7d69de61 51#include "params.h"
ff28a94d 52#include "tree-inline.h"
6946b3f7 53#include "value-prof.h"
e41b2a33 54#include "target.h"
8e9055ae 55#include "tree-ssa-live.h"
78bca40d 56#include "tree-outof-ssa.h"
7a8cba34 57#include "sbitmap.h"
7d776ee2 58#include "cfgloop.h"
be147e84 59#include "regs.h" /* For reg_renumber. */
2b21299c 60#include "insn-attr.h" /* For INSN_SCHEDULING. */
f3ddd692 61#include "asan.h"
4484a35a 62#include "tree-ssa-address.h"
862d0b35
DN
63#include "recog.h"
64#include "output.h"
726a989a 65
8a6ce562
JBG
66/* Some systems use __main in a way incompatible with its use in gcc, in these
67 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
68 give the same symbol without quotes for an alternative entry point. You
69 must define both, or neither. */
70#ifndef NAME__MAIN
71#define NAME__MAIN "__main"
72#endif
73
4e3825db
MM
74/* This variable holds information helping the rewriting of SSA trees
75 into RTL. */
76struct ssaexpand SA;
77
a5883ba0
MM
78/* This variable holds the currently expanded gimple statement for purposes
79 of comminucating the profile info to the builtin expanders. */
80gimple currently_expanding_gimple_stmt;
81
ddb555ed
JJ
82static rtx expand_debug_expr (tree);
83
726a989a
RB
84/* Return an expression tree corresponding to the RHS of GIMPLE
85 statement STMT. */
86
87tree
88gimple_assign_rhs_to_tree (gimple stmt)
89{
90 tree t;
82d6e6fc 91 enum gimple_rhs_class grhs_class;
b8698a0f 92
82d6e6fc 93 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 94
0354c0c7
BS
95 if (grhs_class == GIMPLE_TERNARY_RHS)
96 t = build3 (gimple_assign_rhs_code (stmt),
97 TREE_TYPE (gimple_assign_lhs (stmt)),
98 gimple_assign_rhs1 (stmt),
99 gimple_assign_rhs2 (stmt),
100 gimple_assign_rhs3 (stmt));
101 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
102 t = build2 (gimple_assign_rhs_code (stmt),
103 TREE_TYPE (gimple_assign_lhs (stmt)),
104 gimple_assign_rhs1 (stmt),
105 gimple_assign_rhs2 (stmt));
82d6e6fc 106 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
107 t = build1 (gimple_assign_rhs_code (stmt),
108 TREE_TYPE (gimple_assign_lhs (stmt)),
109 gimple_assign_rhs1 (stmt));
82d6e6fc 110 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
111 {
112 t = gimple_assign_rhs1 (stmt);
113 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
114 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
115 && gimple_location (stmt) != EXPR_LOCATION (t))
116 || (gimple_block (stmt)
117 && currently_expanding_to_rtl
5368224f 118 && EXPR_P (t)))
b5b8b0ac
AO
119 t = copy_node (t);
120 }
726a989a
RB
121 else
122 gcc_unreachable ();
123
f5045c96
AM
124 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
125 SET_EXPR_LOCATION (t, gimple_location (stmt));
126
726a989a
RB
127 return t;
128}
129
726a989a 130
1f6d3a08
RH
131#ifndef STACK_ALIGNMENT_NEEDED
132#define STACK_ALIGNMENT_NEEDED 1
133#endif
134
4e3825db
MM
135#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
136
137/* Associate declaration T with storage space X. If T is no
138 SSA name this is exactly SET_DECL_RTL, otherwise make the
139 partition of T associated with X. */
140static inline void
141set_rtl (tree t, rtx x)
142{
143 if (TREE_CODE (t) == SSA_NAME)
144 {
145 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
146 if (x && !MEM_P (x))
147 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
148 /* For the benefit of debug information at -O0 (where vartracking
149 doesn't run) record the place also in the base DECL if it's
150 a normal variable (not a parameter). */
151 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
152 {
153 tree var = SSA_NAME_VAR (t);
154 /* If we don't yet have something recorded, just record it now. */
155 if (!DECL_RTL_SET_P (var))
156 SET_DECL_RTL (var, x);
47598145 157 /* If we have it set already to "multiple places" don't
eb7adebc
MM
158 change this. */
159 else if (DECL_RTL (var) == pc_rtx)
160 ;
161 /* If we have something recorded and it's not the same place
162 as we want to record now, we have multiple partitions for the
163 same base variable, with different places. We can't just
164 randomly chose one, hence we have to say that we don't know.
165 This only happens with optimization, and there var-tracking
166 will figure out the right thing. */
167 else if (DECL_RTL (var) != x)
168 SET_DECL_RTL (var, pc_rtx);
169 }
4e3825db
MM
170 }
171 else
172 SET_DECL_RTL (t, x);
173}
1f6d3a08
RH
174
175/* This structure holds data relevant to one variable that will be
176 placed in a stack slot. */
177struct stack_var
178{
179 /* The Variable. */
180 tree decl;
181
1f6d3a08
RH
182 /* Initially, the size of the variable. Later, the size of the partition,
183 if this variable becomes it's partition's representative. */
184 HOST_WIDE_INT size;
185
186 /* The *byte* alignment required for this variable. Or as, with the
187 size, the alignment for this partition. */
188 unsigned int alignb;
189
190 /* The partition representative. */
191 size_t representative;
192
193 /* The next stack variable in the partition, or EOC. */
194 size_t next;
2bdbbe94
MM
195
196 /* The numbers of conflicting stack variables. */
197 bitmap conflicts;
1f6d3a08
RH
198};
199
200#define EOC ((size_t)-1)
201
202/* We have an array of such objects while deciding allocation. */
203static struct stack_var *stack_vars;
204static size_t stack_vars_alloc;
205static size_t stack_vars_num;
47598145 206static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 207
3f9b14ff
SB
208/* Conflict bitmaps go on this obstack. This allows us to destroy
209 all of them in one big sweep. */
210static bitmap_obstack stack_var_bitmap_obstack;
211
fa10beec 212/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
213 is non-decreasing. */
214static size_t *stack_vars_sorted;
215
1f6d3a08
RH
216/* The phase of the stack frame. This is the known misalignment of
217 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
218 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
219static int frame_phase;
220
7d69de61
RH
221/* Used during expand_used_vars to remember if we saw any decls for
222 which we'd like to enable stack smashing protection. */
223static bool has_protected_decls;
224
225/* Used during expand_used_vars. Remember if we say a character buffer
226 smaller than our cutoff threshold. Used for -Wstack-protector. */
227static bool has_short_buffer;
1f6d3a08 228
6f197850 229/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
230 we can't do with expected alignment of the stack boundary. */
231
232static unsigned int
6f197850 233align_local_variable (tree decl)
765c3e8f 234{
3a42502d 235 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 236 DECL_ALIGN (decl) = align;
1f6d3a08
RH
237 return align / BITS_PER_UNIT;
238}
239
240/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
241 Return the frame offset. */
242
243static HOST_WIDE_INT
3a42502d 244alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
245{
246 HOST_WIDE_INT offset, new_frame_offset;
247
248 new_frame_offset = frame_offset;
249 if (FRAME_GROWS_DOWNWARD)
250 {
251 new_frame_offset -= size + frame_phase;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
255 }
256 else
257 {
258 new_frame_offset -= frame_phase;
259 new_frame_offset += align - 1;
260 new_frame_offset &= -align;
261 new_frame_offset += frame_phase;
262 offset = new_frame_offset;
263 new_frame_offset += size;
264 }
265 frame_offset = new_frame_offset;
266
9fb798d7
EB
267 if (frame_offset_overflow (frame_offset, cfun->decl))
268 frame_offset = offset = 0;
269
1f6d3a08
RH
270 return offset;
271}
272
273/* Accumulate DECL into STACK_VARS. */
274
275static void
276add_stack_var (tree decl)
277{
533f611a
RH
278 struct stack_var *v;
279
1f6d3a08
RH
280 if (stack_vars_num >= stack_vars_alloc)
281 {
282 if (stack_vars_alloc)
283 stack_vars_alloc = stack_vars_alloc * 3 / 2;
284 else
285 stack_vars_alloc = 32;
286 stack_vars
287 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
288 }
47598145
MM
289 if (!decl_to_stack_part)
290 decl_to_stack_part = pointer_map_create ();
291
533f611a 292 v = &stack_vars[stack_vars_num];
47598145 293 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
294
295 v->decl = decl;
533f611a
RH
296 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
297 /* Ensure that all variables have size, so that &a != &b for any two
298 variables that are simultaneously live. */
299 if (v->size == 0)
300 v->size = 1;
6f197850 301 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
302 /* An alignment of zero can mightily confuse us later. */
303 gcc_assert (v->alignb != 0);
1f6d3a08
RH
304
305 /* All variables are initially in their own partition. */
533f611a
RH
306 v->representative = stack_vars_num;
307 v->next = EOC;
1f6d3a08 308
2bdbbe94 309 /* All variables initially conflict with no other. */
533f611a 310 v->conflicts = NULL;
2bdbbe94 311
1f6d3a08 312 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 313 set_rtl (decl, pc_rtx);
1f6d3a08
RH
314
315 stack_vars_num++;
316}
317
1f6d3a08
RH
318/* Make the decls associated with luid's X and Y conflict. */
319
320static void
321add_stack_var_conflict (size_t x, size_t y)
322{
2bdbbe94
MM
323 struct stack_var *a = &stack_vars[x];
324 struct stack_var *b = &stack_vars[y];
325 if (!a->conflicts)
3f9b14ff 326 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94 327 if (!b->conflicts)
3f9b14ff 328 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
2bdbbe94
MM
329 bitmap_set_bit (a->conflicts, y);
330 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
331}
332
333/* Check whether the decls associated with luid's X and Y conflict. */
334
335static bool
336stack_var_conflict_p (size_t x, size_t y)
337{
2bdbbe94
MM
338 struct stack_var *a = &stack_vars[x];
339 struct stack_var *b = &stack_vars[y];
47598145
MM
340 if (x == y)
341 return false;
342 /* Partitions containing an SSA name result from gimple registers
343 with things like unsupported modes. They are top-level and
344 hence conflict with everything else. */
345 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
346 return true;
347
2bdbbe94
MM
348 if (!a->conflicts || !b->conflicts)
349 return false;
350 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 351}
b8698a0f 352
47598145
MM
353/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
354 enter its partition number into bitmap DATA. */
355
356static bool
357visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
358{
359 bitmap active = (bitmap)data;
360 op = get_base_address (op);
361 if (op
362 && DECL_P (op)
363 && DECL_RTL_IF_SET (op) == pc_rtx)
364 {
365 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
366 if (v)
367 bitmap_set_bit (active, *v);
368 }
369 return false;
370}
371
372/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
373 record conflicts between it and all currently active other partitions
374 from bitmap DATA. */
375
376static bool
377visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
378{
379 bitmap active = (bitmap)data;
380 op = get_base_address (op);
381 if (op
382 && DECL_P (op)
383 && DECL_RTL_IF_SET (op) == pc_rtx)
384 {
385 size_t *v =
386 (size_t *) pointer_map_contains (decl_to_stack_part, op);
387 if (v && bitmap_set_bit (active, *v))
388 {
389 size_t num = *v;
390 bitmap_iterator bi;
391 unsigned i;
392 gcc_assert (num < stack_vars_num);
393 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
394 add_stack_var_conflict (num, i);
395 }
396 }
397 return false;
398}
399
400/* Helper routine for add_scope_conflicts, calculating the active partitions
401 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
402 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
403 liveness. */
47598145
MM
404
405static void
81bfd197 406add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
407{
408 edge e;
409 edge_iterator ei;
410 gimple_stmt_iterator gsi;
411 bool (*visit)(gimple, tree, void *);
412
413 bitmap_clear (work);
414 FOR_EACH_EDGE (e, ei, bb->preds)
415 bitmap_ior_into (work, (bitmap)e->src->aux);
416
ea85edfe 417 visit = visit_op;
47598145
MM
418
419 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
420 {
421 gimple stmt = gsi_stmt (gsi);
ea85edfe 422 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 423 }
ea85edfe 424 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
425 {
426 gimple stmt = gsi_stmt (gsi);
427
428 if (gimple_clobber_p (stmt))
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431 size_t *v;
432 /* Nested function lowering might introduce LHSs
433 that are COMPONENT_REFs. */
434 if (TREE_CODE (lhs) != VAR_DECL)
435 continue;
436 if (DECL_RTL_IF_SET (lhs) == pc_rtx
437 && (v = (size_t *)
438 pointer_map_contains (decl_to_stack_part, lhs)))
439 bitmap_clear_bit (work, *v);
440 }
441 else if (!is_gimple_debug (stmt))
ea85edfe 442 {
81bfd197 443 if (for_conflict
ea85edfe
JJ
444 && visit == visit_op)
445 {
446 /* If this is the first real instruction in this BB we need
88d599dc
MM
447 to add conflicts for everything live at this point now.
448 Unlike classical liveness for named objects we can't
ea85edfe
JJ
449 rely on seeing a def/use of the names we're interested in.
450 There might merely be indirect loads/stores. We'd not add any
81bfd197 451 conflicts for such partitions. */
ea85edfe
JJ
452 bitmap_iterator bi;
453 unsigned i;
81bfd197 454 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe 455 {
9b44f5d9
MM
456 struct stack_var *a = &stack_vars[i];
457 if (!a->conflicts)
3f9b14ff 458 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
9b44f5d9 459 bitmap_ior_into (a->conflicts, work);
ea85edfe
JJ
460 }
461 visit = visit_conflict;
462 }
463 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
464 }
47598145
MM
465 }
466}
467
468/* Generate stack partition conflicts between all partitions that are
469 simultaneously live. */
470
471static void
472add_scope_conflicts (void)
473{
474 basic_block bb;
475 bool changed;
476 bitmap work = BITMAP_ALLOC (NULL);
9b44f5d9
MM
477 int *rpo;
478 int n_bbs;
47598145 479
88d599dc 480 /* We approximate the live range of a stack variable by taking the first
47598145
MM
481 mention of its name as starting point(s), and by the end-of-scope
482 death clobber added by gimplify as ending point(s) of the range.
483 This overapproximates in the case we for instance moved an address-taken
484 operation upward, without also moving a dereference to it upwards.
485 But it's conservatively correct as a variable never can hold values
486 before its name is mentioned at least once.
487
88d599dc 488 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
489
490 FOR_ALL_BB (bb)
3f9b14ff 491 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47598145 492
9b44f5d9
MM
493 rpo = XNEWVEC (int, last_basic_block);
494 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
495
47598145
MM
496 changed = true;
497 while (changed)
498 {
9b44f5d9 499 int i;
47598145 500 changed = false;
9b44f5d9 501 for (i = 0; i < n_bbs; i++)
47598145 502 {
9b44f5d9
MM
503 bitmap active;
504 bb = BASIC_BLOCK (rpo[i]);
505 active = (bitmap)bb->aux;
81bfd197 506 add_scope_conflicts_1 (bb, work, false);
47598145
MM
507 if (bitmap_ior_into (active, work))
508 changed = true;
509 }
510 }
511
512 FOR_EACH_BB (bb)
81bfd197 513 add_scope_conflicts_1 (bb, work, true);
47598145 514
9b44f5d9 515 free (rpo);
47598145
MM
516 BITMAP_FREE (work);
517 FOR_ALL_BB (bb)
518 BITMAP_FREE (bb->aux);
519}
520
1f6d3a08 521/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 522 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
523
524static int
3a42502d 525stack_var_cmp (const void *a, const void *b)
1f6d3a08 526{
3a42502d
RH
527 size_t ia = *(const size_t *)a;
528 size_t ib = *(const size_t *)b;
529 unsigned int aligna = stack_vars[ia].alignb;
530 unsigned int alignb = stack_vars[ib].alignb;
531 HOST_WIDE_INT sizea = stack_vars[ia].size;
532 HOST_WIDE_INT sizeb = stack_vars[ib].size;
533 tree decla = stack_vars[ia].decl;
534 tree declb = stack_vars[ib].decl;
535 bool largea, largeb;
4e3825db 536 unsigned int uida, uidb;
1f6d3a08 537
3a42502d
RH
538 /* Primary compare on "large" alignment. Large comes first. */
539 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
540 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
541 if (largea != largeb)
542 return (int)largeb - (int)largea;
543
544 /* Secondary compare on size, decreasing */
3a42502d 545 if (sizea > sizeb)
6ddfda8a
ER
546 return -1;
547 if (sizea < sizeb)
1f6d3a08 548 return 1;
3a42502d
RH
549
550 /* Tertiary compare on true alignment, decreasing. */
551 if (aligna < alignb)
552 return -1;
553 if (aligna > alignb)
554 return 1;
555
556 /* Final compare on ID for sort stability, increasing.
557 Two SSA names are compared by their version, SSA names come before
558 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
559 if (TREE_CODE (decla) == SSA_NAME)
560 {
561 if (TREE_CODE (declb) == SSA_NAME)
562 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
563 else
564 return -1;
565 }
566 else if (TREE_CODE (declb) == SSA_NAME)
567 return 1;
568 else
569 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 570 if (uida < uidb)
79f802f5 571 return 1;
3a42502d
RH
572 if (uida > uidb)
573 return -1;
1f6d3a08
RH
574 return 0;
575}
576
55b34b5f
RG
577
578/* If the points-to solution *PI points to variables that are in a partition
579 together with other variables add all partition members to the pointed-to
580 variables bitmap. */
581
582static void
583add_partitioned_vars_to_ptset (struct pt_solution *pt,
584 struct pointer_map_t *decls_to_partitions,
585 struct pointer_set_t *visited, bitmap temp)
586{
587 bitmap_iterator bi;
588 unsigned i;
589 bitmap *part;
590
591 if (pt->anything
592 || pt->vars == NULL
593 /* The pointed-to vars bitmap is shared, it is enough to
594 visit it once. */
c3284718 595 || pointer_set_insert (visited, pt->vars))
55b34b5f
RG
596 return;
597
598 bitmap_clear (temp);
599
600 /* By using a temporary bitmap to store all members of the partitions
601 we have to add we make sure to visit each of the partitions only
602 once. */
603 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
604 if ((!temp
605 || !bitmap_bit_p (temp, i))
606 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
607 (void *)(size_t) i)))
608 bitmap_ior_into (temp, *part);
609 if (!bitmap_empty_p (temp))
610 bitmap_ior_into (pt->vars, temp);
611}
612
613/* Update points-to sets based on partition info, so we can use them on RTL.
614 The bitmaps representing stack partitions will be saved until expand,
615 where partitioned decls used as bases in memory expressions will be
616 rewritten. */
617
618static void
619update_alias_info_with_stack_vars (void)
620{
621 struct pointer_map_t *decls_to_partitions = NULL;
622 size_t i, j;
623 tree var = NULL_TREE;
624
625 for (i = 0; i < stack_vars_num; i++)
626 {
627 bitmap part = NULL;
628 tree name;
629 struct ptr_info_def *pi;
630
631 /* Not interested in partitions with single variable. */
632 if (stack_vars[i].representative != i
633 || stack_vars[i].next == EOC)
634 continue;
635
636 if (!decls_to_partitions)
637 {
638 decls_to_partitions = pointer_map_create ();
639 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
640 }
641
642 /* Create an SSA_NAME that points to the partition for use
643 as base during alias-oracle queries on RTL for bases that
644 have been partitioned. */
645 if (var == NULL_TREE)
646 var = create_tmp_var (ptr_type_node, NULL);
647 name = make_ssa_name (var, NULL);
648
649 /* Create bitmaps representing partitions. They will be used for
650 points-to sets later, so use GGC alloc. */
651 part = BITMAP_GGC_ALLOC ();
652 for (j = i; j != EOC; j = stack_vars[j].next)
653 {
654 tree decl = stack_vars[j].decl;
25a6a873 655 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
656 bitmap_set_bit (part, uid);
657 *((bitmap *) pointer_map_insert (decls_to_partitions,
658 (void *)(size_t) uid)) = part;
659 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
660 decl)) = name;
88d8330d
EB
661 if (TREE_ADDRESSABLE (decl))
662 TREE_ADDRESSABLE (name) = 1;
55b34b5f
RG
663 }
664
665 /* Make the SSA name point to all partition members. */
666 pi = get_ptr_info (name);
d3553615 667 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
668 }
669
670 /* Make all points-to sets that contain one member of a partition
671 contain all members of the partition. */
672 if (decls_to_partitions)
673 {
674 unsigned i;
675 struct pointer_set_t *visited = pointer_set_create ();
3f9b14ff 676 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
55b34b5f
RG
677
678 for (i = 1; i < num_ssa_names; i++)
679 {
680 tree name = ssa_name (i);
681 struct ptr_info_def *pi;
682
683 if (name
684 && POINTER_TYPE_P (TREE_TYPE (name))
685 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
686 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
687 visited, temp);
688 }
689
690 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
691 decls_to_partitions, visited, temp);
55b34b5f
RG
692
693 pointer_set_destroy (visited);
694 pointer_map_destroy (decls_to_partitions);
695 BITMAP_FREE (temp);
696 }
697}
698
1f6d3a08
RH
699/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
700 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 701 Merge them into a single partition A. */
1f6d3a08
RH
702
703static void
6ddfda8a 704union_stack_vars (size_t a, size_t b)
1f6d3a08 705{
2bdbbe94
MM
706 struct stack_var *vb = &stack_vars[b];
707 bitmap_iterator bi;
708 unsigned u;
1f6d3a08 709
6ddfda8a
ER
710 gcc_assert (stack_vars[b].next == EOC);
711 /* Add B to A's partition. */
712 stack_vars[b].next = stack_vars[a].next;
713 stack_vars[b].representative = a;
1f6d3a08
RH
714 stack_vars[a].next = b;
715
716 /* Update the required alignment of partition A to account for B. */
717 if (stack_vars[a].alignb < stack_vars[b].alignb)
718 stack_vars[a].alignb = stack_vars[b].alignb;
719
720 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
721 if (vb->conflicts)
722 {
723 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
724 add_stack_var_conflict (a, stack_vars[u].representative);
725 BITMAP_FREE (vb->conflicts);
726 }
1f6d3a08
RH
727}
728
729/* A subroutine of expand_used_vars. Binpack the variables into
730 partitions constrained by the interference graph. The overall
731 algorithm used is as follows:
732
6ddfda8a 733 Sort the objects by size in descending order.
1f6d3a08
RH
734 For each object A {
735 S = size(A)
736 O = 0
737 loop {
738 Look for the largest non-conflicting object B with size <= S.
739 UNION (A, B)
1f6d3a08
RH
740 }
741 }
742*/
743
744static void
745partition_stack_vars (void)
746{
747 size_t si, sj, n = stack_vars_num;
748
749 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
750 for (si = 0; si < n; ++si)
751 stack_vars_sorted[si] = si;
752
753 if (n == 1)
754 return;
755
3a42502d 756 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 757
1f6d3a08
RH
758 for (si = 0; si < n; ++si)
759 {
760 size_t i = stack_vars_sorted[si];
3a42502d 761 unsigned int ialign = stack_vars[i].alignb;
f3ddd692 762 HOST_WIDE_INT isize = stack_vars[i].size;
1f6d3a08 763
6ddfda8a
ER
764 /* Ignore objects that aren't partition representatives. If we
765 see a var that is not a partition representative, it must
766 have been merged earlier. */
767 if (stack_vars[i].representative != i)
768 continue;
769
770 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
771 {
772 size_t j = stack_vars_sorted[sj];
1f6d3a08 773 unsigned int jalign = stack_vars[j].alignb;
f3ddd692 774 HOST_WIDE_INT jsize = stack_vars[j].size;
1f6d3a08
RH
775
776 /* Ignore objects that aren't partition representatives. */
777 if (stack_vars[j].representative != j)
778 continue;
779
3a42502d
RH
780 /* Do not mix objects of "small" (supported) alignment
781 and "large" (unsupported) alignment. */
782 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
783 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
f3ddd692
JJ
784 break;
785
786 /* For Address Sanitizer do not mix objects with different
787 sizes, as the shorter vars wouldn't be adequately protected.
788 Don't do that for "large" (unsupported) alignment objects,
789 those aren't protected anyway. */
de5a5fa1 790 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
f3ddd692
JJ
791 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
792 break;
793
794 /* Ignore conflicting objects. */
795 if (stack_var_conflict_p (i, j))
3a42502d
RH
796 continue;
797
1f6d3a08 798 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 799 union_stack_vars (i, j);
1f6d3a08
RH
800 }
801 }
55b34b5f 802
9b999dc5 803 update_alias_info_with_stack_vars ();
1f6d3a08
RH
804}
805
806/* A debugging aid for expand_used_vars. Dump the generated partitions. */
807
808static void
809dump_stack_var_partition (void)
810{
811 size_t si, i, j, n = stack_vars_num;
812
813 for (si = 0; si < n; ++si)
814 {
815 i = stack_vars_sorted[si];
816
817 /* Skip variables that aren't partition representatives, for now. */
818 if (stack_vars[i].representative != i)
819 continue;
820
821 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
822 " align %u\n", (unsigned long) i, stack_vars[i].size,
823 stack_vars[i].alignb);
824
825 for (j = i; j != EOC; j = stack_vars[j].next)
826 {
827 fputc ('\t', dump_file);
828 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 829 }
6ddfda8a 830 fputc ('\n', dump_file);
1f6d3a08
RH
831 }
832}
833
3a42502d 834/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
835
836static void
3a42502d
RH
837expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
838 HOST_WIDE_INT offset)
1f6d3a08 839{
3a42502d 840 unsigned align;
1f6d3a08 841 rtx x;
c22cacf3 842
1f6d3a08
RH
843 /* If this fails, we've overflowed the stack frame. Error nicely? */
844 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
845
0a81f074 846 x = plus_constant (Pmode, base, offset);
4e3825db 847 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 848
4e3825db
MM
849 if (TREE_CODE (decl) != SSA_NAME)
850 {
851 /* Set alignment we actually gave this decl if it isn't an SSA name.
852 If it is we generate stack slots only accidentally so it isn't as
853 important, we'll simply use the alignment that is already set. */
3a42502d
RH
854 if (base == virtual_stack_vars_rtx)
855 offset -= frame_phase;
4e3825db
MM
856 align = offset & -offset;
857 align *= BITS_PER_UNIT;
3a42502d
RH
858 if (align == 0 || align > base_align)
859 align = base_align;
860
861 /* One would think that we could assert that we're not decreasing
862 alignment here, but (at least) the i386 port does exactly this
863 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
864
865 DECL_ALIGN (decl) = align;
866 DECL_USER_ALIGN (decl) = 0;
867 }
868
869 set_mem_attributes (x, SSAVAR (decl), true);
870 set_rtl (decl, x);
1f6d3a08
RH
871}
872
f3ddd692
JJ
873struct stack_vars_data
874{
875 /* Vector of offset pairs, always end of some padding followed
876 by start of the padding that needs Address Sanitizer protection.
877 The vector is in reversed, highest offset pairs come first. */
9771b263 878 vec<HOST_WIDE_INT> asan_vec;
f3ddd692
JJ
879
880 /* Vector of partition representative decls in between the paddings. */
9771b263 881 vec<tree> asan_decl_vec;
f3ddd692
JJ
882};
883
1f6d3a08
RH
884/* A subroutine of expand_used_vars. Give each partition representative
885 a unique location within the stack frame. Update each partition member
886 with that location. */
887
888static void
f3ddd692 889expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1f6d3a08
RH
890{
891 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
892 HOST_WIDE_INT large_size = 0, large_alloc = 0;
893 rtx large_base = NULL;
894 unsigned large_align = 0;
895 tree decl;
896
897 /* Determine if there are any variables requiring "large" alignment.
898 Since these are dynamically allocated, we only process these if
899 no predicate involved. */
900 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
901 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
902 {
903 /* Find the total size of these variables. */
904 for (si = 0; si < n; ++si)
905 {
906 unsigned alignb;
907
908 i = stack_vars_sorted[si];
909 alignb = stack_vars[i].alignb;
910
911 /* Stop when we get to the first decl with "small" alignment. */
912 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
913 break;
914
915 /* Skip variables that aren't partition representatives. */
916 if (stack_vars[i].representative != i)
917 continue;
918
919 /* Skip variables that have already had rtl assigned. See also
920 add_stack_var where we perpetrate this pc_rtx hack. */
921 decl = stack_vars[i].decl;
922 if ((TREE_CODE (decl) == SSA_NAME
923 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
924 : DECL_RTL (decl)) != pc_rtx)
925 continue;
926
927 large_size += alignb - 1;
928 large_size &= -(HOST_WIDE_INT)alignb;
929 large_size += stack_vars[i].size;
930 }
931
932 /* If there were any, allocate space. */
933 if (large_size > 0)
934 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
935 large_align, true);
936 }
1f6d3a08
RH
937
938 for (si = 0; si < n; ++si)
939 {
3a42502d
RH
940 rtx base;
941 unsigned base_align, alignb;
1f6d3a08
RH
942 HOST_WIDE_INT offset;
943
944 i = stack_vars_sorted[si];
945
946 /* Skip variables that aren't partition representatives, for now. */
947 if (stack_vars[i].representative != i)
948 continue;
949
7d69de61
RH
950 /* Skip variables that have already had rtl assigned. See also
951 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
952 decl = stack_vars[i].decl;
953 if ((TREE_CODE (decl) == SSA_NAME
954 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
955 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
956 continue;
957
c22cacf3 958 /* Check the predicate to see whether this variable should be
7d69de61 959 allocated in this pass. */
f3ddd692 960 if (pred && !pred (i))
7d69de61
RH
961 continue;
962
3a42502d
RH
963 alignb = stack_vars[i].alignb;
964 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
965 {
de5a5fa1 966 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
f3ddd692
JJ
967 {
968 HOST_WIDE_INT prev_offset = frame_offset;
969 tree repr_decl = NULL_TREE;
970
971 offset
972 = alloc_stack_frame_space (stack_vars[i].size
973 + ASAN_RED_ZONE_SIZE,
974 MAX (alignb, ASAN_RED_ZONE_SIZE));
9771b263
DN
975 data->asan_vec.safe_push (prev_offset);
976 data->asan_vec.safe_push (offset + stack_vars[i].size);
f3ddd692
JJ
977 /* Find best representative of the partition.
978 Prefer those with DECL_NAME, even better
979 satisfying asan_protect_stack_decl predicate. */
980 for (j = i; j != EOC; j = stack_vars[j].next)
981 if (asan_protect_stack_decl (stack_vars[j].decl)
982 && DECL_NAME (stack_vars[j].decl))
983 {
984 repr_decl = stack_vars[j].decl;
985 break;
986 }
987 else if (repr_decl == NULL_TREE
988 && DECL_P (stack_vars[j].decl)
989 && DECL_NAME (stack_vars[j].decl))
990 repr_decl = stack_vars[j].decl;
991 if (repr_decl == NULL_TREE)
992 repr_decl = stack_vars[i].decl;
9771b263 993 data->asan_decl_vec.safe_push (repr_decl);
f3ddd692
JJ
994 }
995 else
996 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
3a42502d
RH
997 base = virtual_stack_vars_rtx;
998 base_align = crtl->max_used_stack_slot_alignment;
999 }
1000 else
1001 {
1002 /* Large alignment is only processed in the last pass. */
1003 if (pred)
1004 continue;
533f611a 1005 gcc_assert (large_base != NULL);
3a42502d
RH
1006
1007 large_alloc += alignb - 1;
1008 large_alloc &= -(HOST_WIDE_INT)alignb;
1009 offset = large_alloc;
1010 large_alloc += stack_vars[i].size;
1011
1012 base = large_base;
1013 base_align = large_align;
1014 }
1f6d3a08
RH
1015
1016 /* Create rtl for each variable based on their location within the
1017 partition. */
1018 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1019 {
f8da8190 1020 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1021 base, base_align,
6ddfda8a 1022 offset);
f8da8190 1023 }
1f6d3a08 1024 }
3a42502d
RH
1025
1026 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1027}
1028
ff28a94d
JH
1029/* Take into account all sizes of partitions and reset DECL_RTLs. */
1030static HOST_WIDE_INT
1031account_stack_vars (void)
1032{
1033 size_t si, j, i, n = stack_vars_num;
1034 HOST_WIDE_INT size = 0;
1035
1036 for (si = 0; si < n; ++si)
1037 {
1038 i = stack_vars_sorted[si];
1039
1040 /* Skip variables that aren't partition representatives, for now. */
1041 if (stack_vars[i].representative != i)
1042 continue;
1043
1044 size += stack_vars[i].size;
1045 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1046 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1047 }
1048 return size;
1049}
1050
1f6d3a08
RH
1051/* A subroutine of expand_one_var. Called to immediately assign rtl
1052 to a variable to be allocated in the stack frame. */
1053
1054static void
1055expand_one_stack_var (tree var)
1056{
3a42502d
RH
1057 HOST_WIDE_INT size, offset;
1058 unsigned byte_align;
1f6d3a08 1059
4e3825db 1060 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1061 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1062
1063 /* We handle highly aligned variables in expand_stack_vars. */
1064 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1065
3a42502d
RH
1066 offset = alloc_stack_frame_space (size, byte_align);
1067
1068 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1069 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1070}
1071
1f6d3a08
RH
1072/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1073 that will reside in a hard register. */
1074
1075static void
1076expand_one_hard_reg_var (tree var)
1077{
1078 rest_of_decl_compilation (var, 0, 0);
1079}
1080
1081/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1082 that will reside in a pseudo register. */
1083
1084static void
1085expand_one_register_var (tree var)
1086{
4e3825db
MM
1087 tree decl = SSAVAR (var);
1088 tree type = TREE_TYPE (decl);
cde0f3fd 1089 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1090 rtx x = gen_reg_rtx (reg_mode);
1091
4e3825db 1092 set_rtl (var, x);
1f6d3a08
RH
1093
1094 /* Note if the object is a user variable. */
4e3825db
MM
1095 if (!DECL_ARTIFICIAL (decl))
1096 mark_user_reg (x);
1f6d3a08 1097
61021c2c 1098 if (POINTER_TYPE_P (type))
d466b407 1099 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1100}
1101
1102/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1103 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1104 to pick something that won't crash the rest of the compiler. */
1105
1106static void
1107expand_one_error_var (tree var)
1108{
1109 enum machine_mode mode = DECL_MODE (var);
1110 rtx x;
1111
1112 if (mode == BLKmode)
1113 x = gen_rtx_MEM (BLKmode, const0_rtx);
1114 else if (mode == VOIDmode)
1115 x = const0_rtx;
1116 else
1117 x = gen_reg_rtx (mode);
1118
1119 SET_DECL_RTL (var, x);
1120}
1121
c22cacf3 1122/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1123 allocated to the local stack frame. Return true if we wish to
1124 add VAR to STACK_VARS so that it will be coalesced with other
1125 variables. Return false to allocate VAR immediately.
1126
1127 This function is used to reduce the number of variables considered
1128 for coalescing, which reduces the size of the quadratic problem. */
1129
1130static bool
1131defer_stack_allocation (tree var, bool toplevel)
1132{
7d69de61 1133 /* If stack protection is enabled, *all* stack variables must be deferred,
f3ddd692
JJ
1134 so that we can re-order the strings to the top of the frame.
1135 Similarly for Address Sanitizer. */
de5a5fa1 1136 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
7d69de61
RH
1137 return true;
1138
3a42502d
RH
1139 /* We handle "large" alignment via dynamic allocation. We want to handle
1140 this extra complication in only one place, so defer them. */
1141 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1142 return true;
1143
1f6d3a08
RH
1144 /* Variables in the outermost scope automatically conflict with
1145 every other variable. The only reason to want to defer them
1146 at all is that, after sorting, we can more efficiently pack
1147 small variables in the stack frame. Continue to defer at -O2. */
1148 if (toplevel && optimize < 2)
1149 return false;
1150
1151 /* Without optimization, *most* variables are allocated from the
1152 stack, which makes the quadratic problem large exactly when we
c22cacf3 1153 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1154 other hand, we don't want the function's stack frame size to
1155 get completely out of hand. So we avoid adding scalars and
1156 "small" aggregates to the list at all. */
4d5b5e9f
ER
1157 if (optimize == 0
1158 && (tree_low_cst (DECL_SIZE_UNIT (var), 1)
1159 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)))
1f6d3a08
RH
1160 return false;
1161
1162 return true;
1163}
1164
1165/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1166 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1167 expanded yet, merely recorded.
ff28a94d
JH
1168 When REALLY_EXPAND is false, only add stack values to be allocated.
1169 Return stack usage this variable is supposed to take.
1170*/
1f6d3a08 1171
ff28a94d
JH
1172static HOST_WIDE_INT
1173expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1174{
3a42502d 1175 unsigned int align = BITS_PER_UNIT;
4e3825db 1176 tree origvar = var;
3a42502d 1177
4e3825db
MM
1178 var = SSAVAR (var);
1179
3a42502d 1180 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1181 {
2e3f842f
L
1182 /* Because we don't know if VAR will be in register or on stack,
1183 we conservatively assume it will be on stack even if VAR is
1184 eventually put into register after RA pass. For non-automatic
1185 variables, which won't be on stack, we collect alignment of
1186 type and ignore user specified alignment. */
1187 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1188 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1189 TYPE_MODE (TREE_TYPE (var)),
1190 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1191 else if (DECL_HAS_VALUE_EXPR_P (var)
1192 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1193 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1194 or variables which were assigned a stack slot already by
1195 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1196 changed from the offset chosen to it. */
1197 align = crtl->stack_alignment_estimated;
2e3f842f 1198 else
ae58e548 1199 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1200
3a42502d
RH
1201 /* If the variable alignment is very large we'll dynamicaly allocate
1202 it, which means that in-frame portion is just a pointer. */
1203 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1204 align = POINTER_SIZE;
1205 }
1206
1207 if (SUPPORTS_STACK_ALIGNMENT
1208 && crtl->stack_alignment_estimated < align)
1209 {
1210 /* stack_alignment_estimated shouldn't change after stack
1211 realign decision made */
c3284718 1212 gcc_assert (!crtl->stack_realign_processed);
3a42502d 1213 crtl->stack_alignment_estimated = align;
2e3f842f
L
1214 }
1215
3a42502d
RH
1216 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1217 So here we only make sure stack_alignment_needed >= align. */
1218 if (crtl->stack_alignment_needed < align)
1219 crtl->stack_alignment_needed = align;
1220 if (crtl->max_used_stack_slot_alignment < align)
1221 crtl->max_used_stack_slot_alignment = align;
1222
4e3825db
MM
1223 if (TREE_CODE (origvar) == SSA_NAME)
1224 {
1225 gcc_assert (TREE_CODE (var) != VAR_DECL
1226 || (!DECL_EXTERNAL (var)
1227 && !DECL_HAS_VALUE_EXPR_P (var)
1228 && !TREE_STATIC (var)
4e3825db
MM
1229 && TREE_TYPE (var) != error_mark_node
1230 && !DECL_HARD_REGISTER (var)
1231 && really_expand));
1232 }
1233 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1234 ;
1f6d3a08
RH
1235 else if (DECL_EXTERNAL (var))
1236 ;
833b3afe 1237 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1238 ;
1239 else if (TREE_STATIC (var))
7e8b322a 1240 ;
eb7adebc 1241 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1242 ;
1243 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1244 {
1245 if (really_expand)
1246 expand_one_error_var (var);
1247 }
4e3825db 1248 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1249 {
1250 if (really_expand)
1251 expand_one_hard_reg_var (var);
1252 }
1f6d3a08 1253 else if (use_register_for_decl (var))
ff28a94d
JH
1254 {
1255 if (really_expand)
4e3825db 1256 expand_one_register_var (origvar);
ff28a94d 1257 }
56099f00 1258 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1259 {
56099f00 1260 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1261 if (really_expand)
1262 {
1263 error ("size of variable %q+D is too large", var);
1264 expand_one_error_var (var);
1265 }
1266 }
1f6d3a08 1267 else if (defer_stack_allocation (var, toplevel))
4e3825db 1268 add_stack_var (origvar);
1f6d3a08 1269 else
ff28a94d 1270 {
bd9f1b4b 1271 if (really_expand)
4e3825db 1272 expand_one_stack_var (origvar);
ff28a94d
JH
1273 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1274 }
1275 return 0;
1f6d3a08
RH
1276}
1277
1278/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1279 expanding variables. Those variables that can be put into registers
1280 are allocated pseudos; those that can't are put on the stack.
1281
1282 TOPLEVEL is true if this is the outermost BLOCK. */
1283
1284static void
1285expand_used_vars_for_block (tree block, bool toplevel)
1286{
1f6d3a08
RH
1287 tree t;
1288
1f6d3a08 1289 /* Expand all variables at this level. */
910ad8de 1290 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1291 if (TREE_USED (t)
1292 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1293 || !DECL_NONSHAREABLE (t)))
ff28a94d 1294 expand_one_var (t, toplevel, true);
1f6d3a08 1295
1f6d3a08
RH
1296 /* Expand all variables at containing levels. */
1297 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1298 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1299}
1300
1301/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1302 and clear TREE_USED on all local variables. */
1303
1304static void
1305clear_tree_used (tree block)
1306{
1307 tree t;
1308
910ad8de 1309 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1310 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1311 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1312 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1313 TREE_USED (t) = 0;
1314
1315 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1316 clear_tree_used (t);
1317}
1318
f6bc1c4a
HS
1319enum {
1320 SPCT_FLAG_DEFAULT = 1,
1321 SPCT_FLAG_ALL = 2,
1322 SPCT_FLAG_STRONG = 3
1323};
1324
7d69de61
RH
1325/* Examine TYPE and determine a bit mask of the following features. */
1326
1327#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1328#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1329#define SPCT_HAS_ARRAY 4
1330#define SPCT_HAS_AGGREGATE 8
1331
1332static unsigned int
1333stack_protect_classify_type (tree type)
1334{
1335 unsigned int ret = 0;
1336 tree t;
1337
1338 switch (TREE_CODE (type))
1339 {
1340 case ARRAY_TYPE:
1341 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1342 if (t == char_type_node
1343 || t == signed_char_type_node
1344 || t == unsigned_char_type_node)
1345 {
15362b89
JJ
1346 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1347 unsigned HOST_WIDE_INT len;
7d69de61 1348
15362b89
JJ
1349 if (!TYPE_SIZE_UNIT (type)
1350 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1351 len = max;
7d69de61 1352 else
15362b89 1353 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1354
1355 if (len < max)
1356 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1357 else
1358 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1359 }
1360 else
1361 ret = SPCT_HAS_ARRAY;
1362 break;
1363
1364 case UNION_TYPE:
1365 case QUAL_UNION_TYPE:
1366 case RECORD_TYPE:
1367 ret = SPCT_HAS_AGGREGATE;
1368 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1369 if (TREE_CODE (t) == FIELD_DECL)
1370 ret |= stack_protect_classify_type (TREE_TYPE (t));
1371 break;
1372
1373 default:
1374 break;
1375 }
1376
1377 return ret;
1378}
1379
a4d05547
KH
1380/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1381 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1382 any variable in this function. The return value is the phase number in
1383 which the variable should be allocated. */
1384
1385static int
1386stack_protect_decl_phase (tree decl)
1387{
1388 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1389 int ret = 0;
1390
1391 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1392 has_short_buffer = true;
1393
f6bc1c4a
HS
1394 if (flag_stack_protect == SPCT_FLAG_ALL
1395 || flag_stack_protect == SPCT_FLAG_STRONG)
7d69de61
RH
1396 {
1397 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1398 && !(bits & SPCT_HAS_AGGREGATE))
1399 ret = 1;
1400 else if (bits & SPCT_HAS_ARRAY)
1401 ret = 2;
1402 }
1403 else
1404 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1405
1406 if (ret)
1407 has_protected_decls = true;
1408
1409 return ret;
1410}
1411
1412/* Two helper routines that check for phase 1 and phase 2. These are used
1413 as callbacks for expand_stack_vars. */
1414
1415static bool
f3ddd692
JJ
1416stack_protect_decl_phase_1 (size_t i)
1417{
1418 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1419}
1420
1421static bool
1422stack_protect_decl_phase_2 (size_t i)
7d69de61 1423{
f3ddd692 1424 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
7d69de61
RH
1425}
1426
f3ddd692
JJ
1427/* And helper function that checks for asan phase (with stack protector
1428 it is phase 3). This is used as callback for expand_stack_vars.
1429 Returns true if any of the vars in the partition need to be protected. */
1430
7d69de61 1431static bool
f3ddd692 1432asan_decl_phase_3 (size_t i)
7d69de61 1433{
f3ddd692
JJ
1434 while (i != EOC)
1435 {
1436 if (asan_protect_stack_decl (stack_vars[i].decl))
1437 return true;
1438 i = stack_vars[i].next;
1439 }
1440 return false;
7d69de61
RH
1441}
1442
1443/* Ensure that variables in different stack protection phases conflict
1444 so that they are not merged and share the same stack slot. */
1445
1446static void
1447add_stack_protection_conflicts (void)
1448{
1449 size_t i, j, n = stack_vars_num;
1450 unsigned char *phase;
1451
1452 phase = XNEWVEC (unsigned char, n);
1453 for (i = 0; i < n; ++i)
1454 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1455
1456 for (i = 0; i < n; ++i)
1457 {
1458 unsigned char ph_i = phase[i];
9b44f5d9 1459 for (j = i + 1; j < n; ++j)
7d69de61
RH
1460 if (ph_i != phase[j])
1461 add_stack_var_conflict (i, j);
1462 }
1463
1464 XDELETEVEC (phase);
1465}
1466
1467/* Create a decl for the guard at the top of the stack frame. */
1468
1469static void
1470create_stack_guard (void)
1471{
c2255bc4
AH
1472 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1473 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1474 TREE_THIS_VOLATILE (guard) = 1;
1475 TREE_USED (guard) = 1;
1476 expand_one_stack_var (guard);
cb91fab0 1477 crtl->stack_protect_guard = guard;
7d69de61
RH
1478}
1479
ff28a94d 1480/* Prepare for expanding variables. */
b8698a0f 1481static void
ff28a94d
JH
1482init_vars_expansion (void)
1483{
3f9b14ff
SB
1484 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1485 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
ff28a94d 1486
3f9b14ff
SB
1487 /* A map from decl to stack partition. */
1488 decl_to_stack_part = pointer_map_create ();
ff28a94d
JH
1489
1490 /* Initialize local stack smashing state. */
1491 has_protected_decls = false;
1492 has_short_buffer = false;
1493}
1494
1495/* Free up stack variable graph data. */
1496static void
1497fini_vars_expansion (void)
1498{
3f9b14ff
SB
1499 bitmap_obstack_release (&stack_var_bitmap_obstack);
1500 if (stack_vars)
1501 XDELETEVEC (stack_vars);
1502 if (stack_vars_sorted)
1503 XDELETEVEC (stack_vars_sorted);
ff28a94d 1504 stack_vars = NULL;
9b44f5d9 1505 stack_vars_sorted = NULL;
ff28a94d 1506 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1507 pointer_map_destroy (decl_to_stack_part);
1508 decl_to_stack_part = NULL;
ff28a94d
JH
1509}
1510
30925d94
AO
1511/* Make a fair guess for the size of the stack frame of the function
1512 in NODE. This doesn't have to be exact, the result is only used in
1513 the inline heuristics. So we don't want to run the full stack var
1514 packing algorithm (which is quadratic in the number of stack vars).
1515 Instead, we calculate the total size of all stack vars. This turns
1516 out to be a pretty fair estimate -- packing of stack vars doesn't
1517 happen very often. */
b5a430f3 1518
ff28a94d 1519HOST_WIDE_INT
30925d94 1520estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1521{
1522 HOST_WIDE_INT size = 0;
b5a430f3 1523 size_t i;
bb7e6d55 1524 tree var;
67348ccc 1525 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94 1526
bb7e6d55 1527 push_cfun (fn);
ff28a94d 1528
3f9b14ff
SB
1529 init_vars_expansion ();
1530
824f71b9
RG
1531 FOR_EACH_LOCAL_DECL (fn, i, var)
1532 if (auto_var_in_fn_p (var, fn->decl))
1533 size += expand_one_var (var, true, false);
b5a430f3 1534
ff28a94d
JH
1535 if (stack_vars_num > 0)
1536 {
b5a430f3
SB
1537 /* Fake sorting the stack vars for account_stack_vars (). */
1538 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1539 for (i = 0; i < stack_vars_num; ++i)
1540 stack_vars_sorted[i] = i;
ff28a94d 1541 size += account_stack_vars ();
ff28a94d 1542 }
3f9b14ff
SB
1543
1544 fini_vars_expansion ();
2e1ec94f 1545 pop_cfun ();
ff28a94d
JH
1546 return size;
1547}
1548
f6bc1c4a
HS
1549/* Helper routine to check if a record or union contains an array field. */
1550
1551static int
1552record_or_union_type_has_array_p (const_tree tree_type)
1553{
1554 tree fields = TYPE_FIELDS (tree_type);
1555 tree f;
1556
1557 for (f = fields; f; f = DECL_CHAIN (f))
1558 if (TREE_CODE (f) == FIELD_DECL)
1559 {
1560 tree field_type = TREE_TYPE (f);
1561 if (RECORD_OR_UNION_TYPE_P (field_type)
1562 && record_or_union_type_has_array_p (field_type))
1563 return 1;
1564 if (TREE_CODE (field_type) == ARRAY_TYPE)
1565 return 1;
1566 }
1567 return 0;
1568}
1569
1f6d3a08 1570/* Expand all variables used in the function. */
727a31fa 1571
f3ddd692 1572static rtx
727a31fa
RH
1573expand_used_vars (void)
1574{
c021f10b 1575 tree var, outer_block = DECL_INITIAL (current_function_decl);
6e1aa848 1576 vec<tree> maybe_local_decls = vNULL;
f3ddd692 1577 rtx var_end_seq = NULL_RTX;
70b5e7dc 1578 struct pointer_map_t *ssa_name_decls;
4e3825db 1579 unsigned i;
c021f10b 1580 unsigned len;
f6bc1c4a 1581 bool gen_stack_protect_signal = false;
727a31fa 1582
1f6d3a08
RH
1583 /* Compute the phase of the stack frame for this function. */
1584 {
1585 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1586 int off = STARTING_FRAME_OFFSET % align;
1587 frame_phase = off ? align - off : 0;
1588 }
727a31fa 1589
3f9b14ff
SB
1590 /* Set TREE_USED on all variables in the local_decls. */
1591 FOR_EACH_LOCAL_DECL (cfun, i, var)
1592 TREE_USED (var) = 1;
1593 /* Clear TREE_USED on all variables associated with a block scope. */
1594 clear_tree_used (DECL_INITIAL (current_function_decl));
1595
ff28a94d 1596 init_vars_expansion ();
7d69de61 1597
70b5e7dc 1598 ssa_name_decls = pointer_map_create ();
4e3825db
MM
1599 for (i = 0; i < SA.map->num_partitions; i++)
1600 {
1601 tree var = partition_to_var (SA.map, i);
1602
ea057359 1603 gcc_assert (!virtual_operand_p (var));
70b5e7dc
RG
1604
1605 /* Assign decls to each SSA name partition, share decls for partitions
1606 we could have coalesced (those with the same type). */
1607 if (SSA_NAME_VAR (var) == NULL_TREE)
1608 {
1609 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1610 if (!*slot)
1611 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1612 replace_ssa_name_symbol (var, (tree) *slot);
1613 }
1614
cfb9edba
EB
1615 /* Always allocate space for partitions based on VAR_DECLs. But for
1616 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1617 debug info, there is no need to do so if optimization is disabled
1618 because all the SSA_NAMEs based on these DECLs have been coalesced
1619 into a single partition, which is thus assigned the canonical RTL
1620 location of the DECLs. */
4e3825db
MM
1621 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1622 expand_one_var (var, true, true);
cfb9edba 1623 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize)
4e3825db
MM
1624 {
1625 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1626 contain the default def (representing the parm or result itself)
1627 we don't do anything here. But those which don't contain the
1628 default def (representing a temporary based on the parm/result)
1629 we need to allocate space just like for normal VAR_DECLs. */
1630 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1631 {
1632 expand_one_var (var, true, true);
1633 gcc_assert (SA.partition_to_pseudo[i]);
1634 }
1635 }
1636 }
70b5e7dc 1637 pointer_map_destroy (ssa_name_decls);
4e3825db 1638
f6bc1c4a
HS
1639 if (flag_stack_protect == SPCT_FLAG_STRONG)
1640 FOR_EACH_LOCAL_DECL (cfun, i, var)
1641 if (!is_global_var (var))
1642 {
1643 tree var_type = TREE_TYPE (var);
1644 /* Examine local referenced variables that have their addresses taken,
1645 contain an array, or are arrays. */
1646 if (TREE_CODE (var) == VAR_DECL
1647 && (TREE_CODE (var_type) == ARRAY_TYPE
1648 || TREE_ADDRESSABLE (var)
1649 || (RECORD_OR_UNION_TYPE_P (var_type)
1650 && record_or_union_type_has_array_p (var_type))))
1651 {
1652 gen_stack_protect_signal = true;
1653 break;
1654 }
1655 }
1656
cb91fab0 1657 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1658 set are not associated with any block scope. Lay them out. */
c021f10b 1659
9771b263 1660 len = vec_safe_length (cfun->local_decls);
c021f10b 1661 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1662 {
1f6d3a08
RH
1663 bool expand_now = false;
1664
4e3825db
MM
1665 /* Expanded above already. */
1666 if (is_gimple_reg (var))
eb7adebc
MM
1667 {
1668 TREE_USED (var) = 0;
3adcf52c 1669 goto next;
eb7adebc 1670 }
1f6d3a08
RH
1671 /* We didn't set a block for static or extern because it's hard
1672 to tell the difference between a global variable (re)declared
1673 in a local scope, and one that's really declared there to
1674 begin with. And it doesn't really matter much, since we're
1675 not giving them stack space. Expand them now. */
4e3825db 1676 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1677 expand_now = true;
1678
1679 /* If the variable is not associated with any block, then it
1680 was created by the optimizers, and could be live anywhere
1681 in the function. */
1682 else if (TREE_USED (var))
1683 expand_now = true;
1684
1685 /* Finally, mark all variables on the list as used. We'll use
1686 this in a moment when we expand those associated with scopes. */
1687 TREE_USED (var) = 1;
1688
1689 if (expand_now)
3adcf52c
JM
1690 expand_one_var (var, true, true);
1691
1692 next:
1693 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1694 {
3adcf52c
JM
1695 rtx rtl = DECL_RTL_IF_SET (var);
1696
1697 /* Keep artificial non-ignored vars in cfun->local_decls
1698 chain until instantiate_decls. */
1699 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1700 add_local_decl (cfun, var);
6c6366f6 1701 else if (rtl == NULL_RTX)
c021f10b
NF
1702 /* If rtl isn't set yet, which can happen e.g. with
1703 -fstack-protector, retry before returning from this
1704 function. */
9771b263 1705 maybe_local_decls.safe_push (var);
802e9f8e 1706 }
1f6d3a08 1707 }
1f6d3a08 1708
c021f10b
NF
1709 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1710
1711 +-----------------+-----------------+
1712 | ...processed... | ...duplicates...|
1713 +-----------------+-----------------+
1714 ^
1715 +-- LEN points here.
1716
1717 We just want the duplicates, as those are the artificial
1718 non-ignored vars that we want to keep until instantiate_decls.
1719 Move them down and truncate the array. */
9771b263
DN
1720 if (!vec_safe_is_empty (cfun->local_decls))
1721 cfun->local_decls->block_remove (0, len);
c021f10b 1722
1f6d3a08
RH
1723 /* At this point, all variables within the block tree with TREE_USED
1724 set are actually used by the optimized function. Lay them out. */
1725 expand_used_vars_for_block (outer_block, true);
1726
1727 if (stack_vars_num > 0)
1728 {
47598145 1729 add_scope_conflicts ();
1f6d3a08 1730
c22cacf3 1731 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1732 vulnerable data and non-vulnerable data. */
1733 if (flag_stack_protect)
1734 add_stack_protection_conflicts ();
1735
c22cacf3 1736 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1737 minimal interference graph, attempt to save some stack space. */
1738 partition_stack_vars ();
1739 if (dump_file)
1740 dump_stack_var_partition ();
7d69de61
RH
1741 }
1742
f6bc1c4a
HS
1743 switch (flag_stack_protect)
1744 {
1745 case SPCT_FLAG_ALL:
1746 create_stack_guard ();
1747 break;
1748
1749 case SPCT_FLAG_STRONG:
1750 if (gen_stack_protect_signal
1751 || cfun->calls_alloca || has_protected_decls)
1752 create_stack_guard ();
1753 break;
1754
1755 case SPCT_FLAG_DEFAULT:
1756 if (cfun->calls_alloca || has_protected_decls)
c3284718 1757 create_stack_guard ();
f6bc1c4a
HS
1758 break;
1759
1760 default:
1761 ;
1762 }
1f6d3a08 1763
7d69de61
RH
1764 /* Assign rtl to each variable based on these partitions. */
1765 if (stack_vars_num > 0)
1766 {
f3ddd692
JJ
1767 struct stack_vars_data data;
1768
6e1aa848
DN
1769 data.asan_vec = vNULL;
1770 data.asan_decl_vec = vNULL;
f3ddd692 1771
7d69de61
RH
1772 /* Reorder decls to be protected by iterating over the variables
1773 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1774 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1775 earlier, such that we naturally see these variables first,
1776 and thus naturally allocate things in the right order. */
1777 if (has_protected_decls)
1778 {
1779 /* Phase 1 contains only character arrays. */
f3ddd692 1780 expand_stack_vars (stack_protect_decl_phase_1, &data);
7d69de61
RH
1781
1782 /* Phase 2 contains other kinds of arrays. */
1783 if (flag_stack_protect == 2)
f3ddd692 1784 expand_stack_vars (stack_protect_decl_phase_2, &data);
7d69de61
RH
1785 }
1786
de5a5fa1 1787 if (flag_sanitize & SANITIZE_ADDRESS)
f3ddd692
JJ
1788 /* Phase 3, any partitions that need asan protection
1789 in addition to phase 1 and 2. */
1790 expand_stack_vars (asan_decl_phase_3, &data);
1791
9771b263 1792 if (!data.asan_vec.is_empty ())
f3ddd692
JJ
1793 {
1794 HOST_WIDE_INT prev_offset = frame_offset;
1795 HOST_WIDE_INT offset
1796 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1797 ASAN_RED_ZONE_SIZE);
9771b263
DN
1798 data.asan_vec.safe_push (prev_offset);
1799 data.asan_vec.safe_push (offset);
f3ddd692
JJ
1800
1801 var_end_seq
1802 = asan_emit_stack_protection (virtual_stack_vars_rtx,
9771b263 1803 data.asan_vec.address (),
c3284718 1804 data.asan_decl_vec. address (),
9771b263 1805 data.asan_vec.length ());
f3ddd692
JJ
1806 }
1807
1808 expand_stack_vars (NULL, &data);
1809
9771b263
DN
1810 data.asan_vec.release ();
1811 data.asan_decl_vec.release ();
1f6d3a08
RH
1812 }
1813
3f9b14ff
SB
1814 fini_vars_expansion ();
1815
6c6366f6
JJ
1816 /* If there were any artificial non-ignored vars without rtl
1817 found earlier, see if deferred stack allocation hasn't assigned
1818 rtl to them. */
9771b263 1819 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
6c6366f6 1820 {
6c6366f6
JJ
1821 rtx rtl = DECL_RTL_IF_SET (var);
1822
6c6366f6
JJ
1823 /* Keep artificial non-ignored vars in cfun->local_decls
1824 chain until instantiate_decls. */
1825 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1826 add_local_decl (cfun, var);
6c6366f6 1827 }
9771b263 1828 maybe_local_decls.release ();
6c6366f6 1829
1f6d3a08
RH
1830 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1831 if (STACK_ALIGNMENT_NEEDED)
1832 {
1833 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1834 if (!FRAME_GROWS_DOWNWARD)
1835 frame_offset += align - 1;
1836 frame_offset &= -align;
1837 }
f3ddd692
JJ
1838
1839 return var_end_seq;
727a31fa
RH
1840}
1841
1842
b7211528
SB
1843/* If we need to produce a detailed dump, print the tree representation
1844 for STMT to the dump file. SINCE is the last RTX after which the RTL
1845 generated for STMT should have been appended. */
1846
1847static void
726a989a 1848maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1849{
1850 if (dump_file && (dump_flags & TDF_DETAILS))
1851 {
1852 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1853 print_gimple_stmt (dump_file, stmt, 0,
1854 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1855 fprintf (dump_file, "\n");
1856
1857 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1858 }
1859}
1860
8b11009b
ZD
1861/* Maps the blocks that do not contain tree labels to rtx labels. */
1862
1863static struct pointer_map_t *lab_rtx_for_bb;
1864
a9b77cd1
ZD
1865/* Returns the label_rtx expression for a label starting basic block BB. */
1866
1867static rtx
726a989a 1868label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1869{
726a989a
RB
1870 gimple_stmt_iterator gsi;
1871 tree lab;
1872 gimple lab_stmt;
8b11009b 1873 void **elt;
a9b77cd1
ZD
1874
1875 if (bb->flags & BB_RTL)
1876 return block_label (bb);
1877
8b11009b
ZD
1878 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1879 if (elt)
ae50c0cb 1880 return (rtx) *elt;
8b11009b
ZD
1881
1882 /* Find the tree label if it is present. */
b8698a0f 1883
726a989a 1884 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1885 {
726a989a
RB
1886 lab_stmt = gsi_stmt (gsi);
1887 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1888 break;
1889
726a989a 1890 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1891 if (DECL_NONLOCAL (lab))
1892 break;
1893
1894 return label_rtx (lab);
1895 }
1896
8b11009b
ZD
1897 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1898 *elt = gen_label_rtx ();
ae50c0cb 1899 return (rtx) *elt;
a9b77cd1
ZD
1900}
1901
726a989a 1902
529ff441
MM
1903/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1904 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1905 possibly clean up the CFG and instruction sequence. LAST is the
1906 last instruction before the just emitted jump sequence. */
529ff441
MM
1907
1908static void
315adeda 1909maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1910{
1911 /* Special case: when jumpif decides that the condition is
1912 trivial it emits an unconditional jump (and the necessary
1913 barrier). But we still have two edges, the fallthru one is
1914 wrong. purge_dead_edges would clean this up later. Unfortunately
1915 we have to insert insns (and split edges) before
1916 find_many_sub_basic_blocks and hence before purge_dead_edges.
1917 But splitting edges might create new blocks which depend on the
1918 fact that if there are two edges there's no barrier. So the
1919 barrier would get lost and verify_flow_info would ICE. Instead
1920 of auditing all edge splitters to care for the barrier (which
1921 normally isn't there in a cleaned CFG), fix it here. */
1922 if (BARRIER_P (get_last_insn ()))
1923 {
529ff441
MM
1924 rtx insn;
1925 remove_edge (e);
1926 /* Now, we have a single successor block, if we have insns to
1927 insert on the remaining edge we potentially will insert
1928 it at the end of this block (if the dest block isn't feasible)
1929 in order to avoid splitting the edge. This insertion will take
1930 place in front of the last jump. But we might have emitted
1931 multiple jumps (conditional and one unconditional) to the
1932 same destination. Inserting in front of the last one then
1933 is a problem. See PR 40021. We fix this by deleting all
1934 jumps except the last unconditional one. */
1935 insn = PREV_INSN (get_last_insn ());
1936 /* Make sure we have an unconditional jump. Otherwise we're
1937 confused. */
1938 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1939 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1940 {
1941 insn = PREV_INSN (insn);
1942 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1943 {
8a269cb7 1944 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1945 {
1946 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1947 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1948 }
1949 delete_insn (NEXT_INSN (insn));
1950 }
529ff441
MM
1951 }
1952 }
1953}
1954
726a989a 1955/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1956 Returns a new basic block if we've terminated the current basic
1957 block and created a new one. */
1958
1959static basic_block
726a989a 1960expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1961{
1962 basic_block new_bb, dest;
1963 edge new_edge;
1964 edge true_edge;
1965 edge false_edge;
b7211528 1966 rtx last2, last;
28ed065e
MM
1967 enum tree_code code;
1968 tree op0, op1;
1969
1970 code = gimple_cond_code (stmt);
1971 op0 = gimple_cond_lhs (stmt);
1972 op1 = gimple_cond_rhs (stmt);
1973 /* We're sometimes presented with such code:
1974 D.123_1 = x < y;
1975 if (D.123_1 != 0)
1976 ...
1977 This would expand to two comparisons which then later might
1978 be cleaned up by combine. But some pattern matchers like if-conversion
1979 work better when there's only one compare, so make up for this
1980 here as special exception if TER would have made the same change. */
31348d52 1981 if (SA.values
28ed065e 1982 && TREE_CODE (op0) == SSA_NAME
31348d52
RB
1983 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1984 && TREE_CODE (op1) == INTEGER_CST
1985 && ((gimple_cond_code (stmt) == NE_EXPR
1986 && integer_zerop (op1))
1987 || (gimple_cond_code (stmt) == EQ_EXPR
1988 && integer_onep (op1)))
28ed065e
MM
1989 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1990 {
1991 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1992 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1993 {
e83f4b68
MM
1994 enum tree_code code2 = gimple_assign_rhs_code (second);
1995 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1996 {
1997 code = code2;
1998 op0 = gimple_assign_rhs1 (second);
1999 op1 = gimple_assign_rhs2 (second);
2000 }
2001 /* If jumps are cheap turn some more codes into
2002 jumpy sequences. */
2003 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2004 {
2005 if ((code2 == BIT_AND_EXPR
2006 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2007 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2008 || code2 == TRUTH_AND_EXPR)
2009 {
2010 code = TRUTH_ANDIF_EXPR;
2011 op0 = gimple_assign_rhs1 (second);
2012 op1 = gimple_assign_rhs2 (second);
2013 }
2014 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2015 {
2016 code = TRUTH_ORIF_EXPR;
2017 op0 = gimple_assign_rhs1 (second);
2018 op1 = gimple_assign_rhs2 (second);
2019 }
2020 }
28ed065e
MM
2021 }
2022 }
b7211528
SB
2023
2024 last2 = last = get_last_insn ();
80c7a9eb
RH
2025
2026 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5368224f 2027 set_curr_insn_location (gimple_location (stmt));
80c7a9eb
RH
2028
2029 /* These flags have no purpose in RTL land. */
2030 true_edge->flags &= ~EDGE_TRUE_VALUE;
2031 false_edge->flags &= ~EDGE_FALSE_VALUE;
2032
2033 /* We can either have a pure conditional jump with one fallthru edge or
2034 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 2035 if (false_edge->dest == bb->next_bb)
80c7a9eb 2036 {
40e90eac
JJ
2037 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2038 true_edge->probability);
726a989a 2039 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2040 if (true_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2041 set_curr_insn_location (true_edge->goto_locus);
a9b77cd1 2042 false_edge->flags |= EDGE_FALLTHRU;
315adeda 2043 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
2044 return NULL;
2045 }
a9b77cd1 2046 if (true_edge->dest == bb->next_bb)
80c7a9eb 2047 {
40e90eac
JJ
2048 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2049 false_edge->probability);
726a989a 2050 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2f13f2de 2051 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2052 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2053 true_edge->flags |= EDGE_FALLTHRU;
315adeda 2054 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
2055 return NULL;
2056 }
80c7a9eb 2057
40e90eac
JJ
2058 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2059 true_edge->probability);
80c7a9eb 2060 last = get_last_insn ();
2f13f2de 2061 if (false_edge->goto_locus != UNKNOWN_LOCATION)
5368224f 2062 set_curr_insn_location (false_edge->goto_locus);
a9b77cd1 2063 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
2064
2065 BB_END (bb) = last;
2066 if (BARRIER_P (BB_END (bb)))
2067 BB_END (bb) = PREV_INSN (BB_END (bb));
2068 update_bb_for_insn (bb);
2069
2070 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2071 dest = false_edge->dest;
2072 redirect_edge_succ (false_edge, new_bb);
2073 false_edge->flags |= EDGE_FALLTHRU;
2074 new_bb->count = false_edge->count;
2075 new_bb->frequency = EDGE_FREQUENCY (false_edge);
7d776ee2
RG
2076 if (current_loops && bb->loop_father)
2077 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
2078 new_edge = make_edge (new_bb, dest, 0);
2079 new_edge->probability = REG_BR_PROB_BASE;
2080 new_edge->count = new_bb->count;
2081 if (BARRIER_P (BB_END (new_bb)))
2082 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2083 update_bb_for_insn (new_bb);
2084
726a989a 2085 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2086
2f13f2de 2087 if (true_edge->goto_locus != UNKNOWN_LOCATION)
7787b4aa 2088 {
5368224f
DC
2089 set_curr_insn_location (true_edge->goto_locus);
2090 true_edge->goto_locus = curr_insn_location ();
7787b4aa 2091 }
7787b4aa 2092
80c7a9eb
RH
2093 return new_bb;
2094}
2095
0a35513e
AH
2096/* Mark all calls that can have a transaction restart. */
2097
2098static void
2099mark_transaction_restart_calls (gimple stmt)
2100{
2101 struct tm_restart_node dummy;
2102 void **slot;
2103
2104 if (!cfun->gimple_df->tm_restart)
2105 return;
2106
2107 dummy.stmt = stmt;
2108 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2109 if (slot)
2110 {
2111 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2112 tree list = n->label_or_list;
2113 rtx insn;
2114
2115 for (insn = next_real_insn (get_last_insn ());
2116 !CALL_P (insn);
2117 insn = next_real_insn (insn))
2118 continue;
2119
2120 if (TREE_CODE (list) == LABEL_DECL)
2121 add_reg_note (insn, REG_TM, label_rtx (list));
2122 else
2123 for (; list ; list = TREE_CHAIN (list))
2124 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2125 }
2126}
2127
28ed065e
MM
2128/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2129 statement STMT. */
2130
2131static void
2132expand_call_stmt (gimple stmt)
2133{
25583c4f 2134 tree exp, decl, lhs;
e23817b3 2135 bool builtin_p;
e7925582 2136 size_t i;
28ed065e 2137
25583c4f
RS
2138 if (gimple_call_internal_p (stmt))
2139 {
2140 expand_internal_call (stmt);
2141 return;
2142 }
2143
28ed065e
MM
2144 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2145
2146 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
2147 decl = gimple_call_fndecl (stmt);
2148 builtin_p = decl && DECL_BUILT_IN (decl);
2149
e7925582
EB
2150 /* If this is not a builtin function, the function type through which the
2151 call is made may be different from the type of the function. */
2152 if (!builtin_p)
2153 CALL_EXPR_FN (exp)
b25aa0e8
EB
2154 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2155 CALL_EXPR_FN (exp));
e7925582 2156
28ed065e
MM
2157 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2158 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2159
2160 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2161 {
2162 tree arg = gimple_call_arg (stmt, i);
2163 gimple def;
2164 /* TER addresses into arguments of builtin functions so we have a
2165 chance to infer more correct alignment information. See PR39954. */
2166 if (builtin_p
2167 && TREE_CODE (arg) == SSA_NAME
2168 && (def = get_gimple_for_ssa_name (arg))
2169 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2170 arg = gimple_assign_rhs1 (def);
2171 CALL_EXPR_ARG (exp, i) = arg;
2172 }
28ed065e 2173
93f28ca7 2174 if (gimple_has_side_effects (stmt))
28ed065e
MM
2175 TREE_SIDE_EFFECTS (exp) = 1;
2176
93f28ca7 2177 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2178 TREE_NOTHROW (exp) = 1;
2179
2180 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2181 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2182 if (decl
2183 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2184 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2185 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2186 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2187 else
2188 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2189 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2190 SET_EXPR_LOCATION (exp, gimple_location (stmt));
28ed065e 2191
ddb555ed
JJ
2192 /* Ensure RTL is created for debug args. */
2193 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2194 {
9771b263 2195 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
ddb555ed
JJ
2196 unsigned int ix;
2197 tree dtemp;
2198
2199 if (debug_args)
9771b263 2200 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
ddb555ed
JJ
2201 {
2202 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2203 expand_debug_expr (dtemp);
2204 }
2205 }
2206
25583c4f 2207 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2208 if (lhs)
2209 expand_assignment (lhs, exp, false);
2210 else
2211 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2212
2213 mark_transaction_restart_calls (stmt);
28ed065e
MM
2214}
2215
862d0b35
DN
2216
2217/* Generate RTL for an asm statement (explicit assembler code).
2218 STRING is a STRING_CST node containing the assembler code text,
2219 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2220 insn is volatile; don't optimize it. */
2221
2222static void
2223expand_asm_loc (tree string, int vol, location_t locus)
2224{
2225 rtx body;
2226
2227 if (TREE_CODE (string) == ADDR_EXPR)
2228 string = TREE_OPERAND (string, 0);
2229
2230 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2231 ggc_strdup (TREE_STRING_POINTER (string)),
2232 locus);
2233
2234 MEM_VOLATILE_P (body) = vol;
2235
2236 emit_insn (body);
2237}
2238
2239/* Return the number of times character C occurs in string S. */
2240static int
2241n_occurrences (int c, const char *s)
2242{
2243 int n = 0;
2244 while (*s)
2245 n += (*s++ == c);
2246 return n;
2247}
2248
2249/* A subroutine of expand_asm_operands. Check that all operands have
2250 the same number of alternatives. Return true if so. */
2251
2252static bool
2253check_operand_nalternatives (tree outputs, tree inputs)
2254{
2255 if (outputs || inputs)
2256 {
2257 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2258 int nalternatives
2259 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2260 tree next = inputs;
2261
2262 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2263 {
2264 error ("too many alternatives in %<asm%>");
2265 return false;
2266 }
2267
2268 tmp = outputs;
2269 while (tmp)
2270 {
2271 const char *constraint
2272 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2273
2274 if (n_occurrences (',', constraint) != nalternatives)
2275 {
2276 error ("operand constraints for %<asm%> differ "
2277 "in number of alternatives");
2278 return false;
2279 }
2280
2281 if (TREE_CHAIN (tmp))
2282 tmp = TREE_CHAIN (tmp);
2283 else
2284 tmp = next, next = 0;
2285 }
2286 }
2287
2288 return true;
2289}
2290
2291/* Check for overlap between registers marked in CLOBBERED_REGS and
2292 anything inappropriate in T. Emit error and return the register
2293 variable definition for error, NULL_TREE for ok. */
2294
2295static bool
2296tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2297{
2298 /* Conflicts between asm-declared register variables and the clobber
2299 list are not allowed. */
2300 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2301
2302 if (overlap)
2303 {
2304 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2305 DECL_NAME (overlap));
2306
2307 /* Reset registerness to stop multiple errors emitted for a single
2308 variable. */
2309 DECL_REGISTER (overlap) = 0;
2310 return true;
2311 }
2312
2313 return false;
2314}
2315
2316/* Generate RTL for an asm statement with arguments.
2317 STRING is the instruction template.
2318 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2319 Each output or input has an expression in the TREE_VALUE and
2320 a tree list in TREE_PURPOSE which in turn contains a constraint
2321 name in TREE_VALUE (or NULL_TREE) and a constraint string
2322 in TREE_PURPOSE.
2323 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2324 that is clobbered by this insn.
2325
2326 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2327 should be the fallthru basic block of the asm goto.
2328
2329 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2330 Some elements of OUTPUTS may be replaced with trees representing temporary
2331 values. The caller should copy those temporary values to the originally
2332 specified lvalues.
2333
2334 VOL nonzero means the insn is volatile; don't optimize it. */
2335
2336static void
2337expand_asm_operands (tree string, tree outputs, tree inputs,
2338 tree clobbers, tree labels, basic_block fallthru_bb,
2339 int vol, location_t locus)
2340{
2341 rtvec argvec, constraintvec, labelvec;
2342 rtx body;
2343 int ninputs = list_length (inputs);
2344 int noutputs = list_length (outputs);
2345 int nlabels = list_length (labels);
2346 int ninout;
2347 int nclobbers;
2348 HARD_REG_SET clobbered_regs;
2349 int clobber_conflict_found = 0;
2350 tree tail;
2351 tree t;
2352 int i;
2353 /* Vector of RTX's of evaluated output operands. */
2354 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2355 int *inout_opnum = XALLOCAVEC (int, noutputs);
2356 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2357 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2358 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2359 int old_generating_concat_p = generating_concat_p;
2360 rtx fallthru_label = NULL_RTX;
2361
2362 /* An ASM with no outputs needs to be treated as volatile, for now. */
2363 if (noutputs == 0)
2364 vol = 1;
2365
2366 if (! check_operand_nalternatives (outputs, inputs))
2367 return;
2368
2369 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2370
2371 /* Collect constraints. */
2372 i = 0;
2373 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2374 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2375 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2376 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2377
2378 /* Sometimes we wish to automatically clobber registers across an asm.
2379 Case in point is when the i386 backend moved from cc0 to a hard reg --
2380 maintaining source-level compatibility means automatically clobbering
2381 the flags register. */
2382 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2383
2384 /* Count the number of meaningful clobbered registers, ignoring what
2385 we would ignore later. */
2386 nclobbers = 0;
2387 CLEAR_HARD_REG_SET (clobbered_regs);
2388 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2389 {
2390 const char *regname;
2391 int nregs;
2392
2393 if (TREE_VALUE (tail) == error_mark_node)
2394 return;
2395 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2396
2397 i = decode_reg_name_and_count (regname, &nregs);
2398 if (i == -4)
2399 ++nclobbers;
2400 else if (i == -2)
2401 error ("unknown register name %qs in %<asm%>", regname);
2402
2403 /* Mark clobbered registers. */
2404 if (i >= 0)
2405 {
2406 int reg;
2407
2408 for (reg = i; reg < i + nregs; reg++)
2409 {
2410 ++nclobbers;
2411
2412 /* Clobbering the PIC register is an error. */
2413 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2414 {
2415 error ("PIC register clobbered by %qs in %<asm%>", regname);
2416 return;
2417 }
2418
2419 SET_HARD_REG_BIT (clobbered_regs, reg);
2420 }
2421 }
2422 }
2423
2424 /* First pass over inputs and outputs checks validity and sets
2425 mark_addressable if needed. */
2426
2427 ninout = 0;
2428 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2429 {
2430 tree val = TREE_VALUE (tail);
2431 tree type = TREE_TYPE (val);
2432 const char *constraint;
2433 bool is_inout;
2434 bool allows_reg;
2435 bool allows_mem;
2436
2437 /* If there's an erroneous arg, emit no insn. */
2438 if (type == error_mark_node)
2439 return;
2440
2441 /* Try to parse the output constraint. If that fails, there's
2442 no point in going further. */
2443 constraint = constraints[i];
2444 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2445 &allows_mem, &allows_reg, &is_inout))
2446 return;
2447
2448 if (! allows_reg
2449 && (allows_mem
2450 || is_inout
2451 || (DECL_P (val)
2452 && REG_P (DECL_RTL (val))
2453 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2454 mark_addressable (val);
2455
2456 if (is_inout)
2457 ninout++;
2458 }
2459
2460 ninputs += ninout;
2461 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2462 {
2463 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2464 return;
2465 }
2466
2467 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2468 {
2469 bool allows_reg, allows_mem;
2470 const char *constraint;
2471
2472 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2473 would get VOIDmode and that could cause a crash in reload. */
2474 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2475 return;
2476
2477 constraint = constraints[i + noutputs];
2478 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2479 constraints, &allows_mem, &allows_reg))
2480 return;
2481
2482 if (! allows_reg && allows_mem)
2483 mark_addressable (TREE_VALUE (tail));
2484 }
2485
2486 /* Second pass evaluates arguments. */
2487
2488 /* Make sure stack is consistent for asm goto. */
2489 if (nlabels > 0)
2490 do_pending_stack_adjust ();
2491
2492 ninout = 0;
2493 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2494 {
2495 tree val = TREE_VALUE (tail);
2496 tree type = TREE_TYPE (val);
2497 bool is_inout;
2498 bool allows_reg;
2499 bool allows_mem;
2500 rtx op;
2501 bool ok;
2502
2503 ok = parse_output_constraint (&constraints[i], i, ninputs,
2504 noutputs, &allows_mem, &allows_reg,
2505 &is_inout);
2506 gcc_assert (ok);
2507
2508 /* If an output operand is not a decl or indirect ref and our constraint
2509 allows a register, make a temporary to act as an intermediate.
2510 Make the asm insn write into that, then our caller will copy it to
2511 the real output operand. Likewise for promoted variables. */
2512
2513 generating_concat_p = 0;
2514
2515 real_output_rtx[i] = NULL_RTX;
2516 if ((TREE_CODE (val) == INDIRECT_REF
2517 && allows_mem)
2518 || (DECL_P (val)
2519 && (allows_mem || REG_P (DECL_RTL (val)))
2520 && ! (REG_P (DECL_RTL (val))
2521 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2522 || ! allows_reg
2523 || is_inout)
2524 {
2525 op = expand_expr (val, NULL_RTX, VOIDmode,
2526 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2527 if (MEM_P (op))
2528 op = validize_mem (op);
2529
2530 if (! allows_reg && !MEM_P (op))
2531 error ("output number %d not directly addressable", i);
2532 if ((! allows_mem && MEM_P (op))
2533 || GET_CODE (op) == CONCAT)
2534 {
2535 real_output_rtx[i] = op;
2536 op = gen_reg_rtx (GET_MODE (op));
2537 if (is_inout)
2538 emit_move_insn (op, real_output_rtx[i]);
2539 }
2540 }
2541 else
2542 {
2543 op = assign_temp (type, 0, 1);
2544 op = validize_mem (op);
2545 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2546 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2547 TREE_VALUE (tail) = make_tree (type, op);
2548 }
2549 output_rtx[i] = op;
2550
2551 generating_concat_p = old_generating_concat_p;
2552
2553 if (is_inout)
2554 {
2555 inout_mode[ninout] = TYPE_MODE (type);
2556 inout_opnum[ninout++] = i;
2557 }
2558
2559 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2560 clobber_conflict_found = 1;
2561 }
2562
2563 /* Make vectors for the expression-rtx, constraint strings,
2564 and named operands. */
2565
2566 argvec = rtvec_alloc (ninputs);
2567 constraintvec = rtvec_alloc (ninputs);
2568 labelvec = rtvec_alloc (nlabels);
2569
2570 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2571 : GET_MODE (output_rtx[0])),
2572 ggc_strdup (TREE_STRING_POINTER (string)),
2573 empty_string, 0, argvec, constraintvec,
2574 labelvec, locus);
2575
2576 MEM_VOLATILE_P (body) = vol;
2577
2578 /* Eval the inputs and put them into ARGVEC.
2579 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2580
2581 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2582 {
2583 bool allows_reg, allows_mem;
2584 const char *constraint;
2585 tree val, type;
2586 rtx op;
2587 bool ok;
2588
2589 constraint = constraints[i + noutputs];
2590 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2591 constraints, &allows_mem, &allows_reg);
2592 gcc_assert (ok);
2593
2594 generating_concat_p = 0;
2595
2596 val = TREE_VALUE (tail);
2597 type = TREE_TYPE (val);
2598 /* EXPAND_INITIALIZER will not generate code for valid initializer
2599 constants, but will still generate code for other types of operand.
2600 This is the behavior we want for constant constraints. */
2601 op = expand_expr (val, NULL_RTX, VOIDmode,
2602 allows_reg ? EXPAND_NORMAL
2603 : allows_mem ? EXPAND_MEMORY
2604 : EXPAND_INITIALIZER);
2605
2606 /* Never pass a CONCAT to an ASM. */
2607 if (GET_CODE (op) == CONCAT)
2608 op = force_reg (GET_MODE (op), op);
2609 else if (MEM_P (op))
2610 op = validize_mem (op);
2611
2612 if (asm_operand_ok (op, constraint, NULL) <= 0)
2613 {
2614 if (allows_reg && TYPE_MODE (type) != BLKmode)
2615 op = force_reg (TYPE_MODE (type), op);
2616 else if (!allows_mem)
2617 warning (0, "asm operand %d probably doesn%'t match constraints",
2618 i + noutputs);
2619 else if (MEM_P (op))
2620 {
2621 /* We won't recognize either volatile memory or memory
2622 with a queued address as available a memory_operand
2623 at this point. Ignore it: clearly this *is* a memory. */
2624 }
2625 else
2626 gcc_unreachable ();
2627 }
2628
2629 generating_concat_p = old_generating_concat_p;
2630 ASM_OPERANDS_INPUT (body, i) = op;
2631
2632 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2633 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
2634 ggc_strdup (constraints[i + noutputs]));
2635
2636 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2637 clobber_conflict_found = 1;
2638 }
2639
2640 /* Protect all the operands from the queue now that they have all been
2641 evaluated. */
2642
2643 generating_concat_p = 0;
2644
2645 /* For in-out operands, copy output rtx to input rtx. */
2646 for (i = 0; i < ninout; i++)
2647 {
2648 int j = inout_opnum[i];
2649 char buffer[16];
2650
2651 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2652 = output_rtx[j];
2653
2654 sprintf (buffer, "%d", j);
2655 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2656 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
2657 }
2658
2659 /* Copy labels to the vector. */
2660 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2661 {
2662 rtx r;
2663 /* If asm goto has any labels in the fallthru basic block, use
2664 a label that we emit immediately after the asm goto. Expansion
2665 may insert further instructions into the same basic block after
2666 asm goto and if we don't do this, insertion of instructions on
2667 the fallthru edge might misbehave. See PR58670. */
2668 if (fallthru_bb
2669 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2670 {
2671 if (fallthru_label == NULL_RTX)
2672 fallthru_label = gen_label_rtx ();
2673 r = fallthru_label;
2674 }
2675 else
2676 r = label_rtx (TREE_VALUE (tail));
2677 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2678 }
2679
2680 generating_concat_p = old_generating_concat_p;
2681
2682 /* Now, for each output, construct an rtx
2683 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2684 ARGVEC CONSTRAINTS OPNAMES))
2685 If there is more than one, put them inside a PARALLEL. */
2686
2687 if (nlabels > 0 && nclobbers == 0)
2688 {
2689 gcc_assert (noutputs == 0);
2690 emit_jump_insn (body);
2691 }
2692 else if (noutputs == 0 && nclobbers == 0)
2693 {
2694 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2695 emit_insn (body);
2696 }
2697 else if (noutputs == 1 && nclobbers == 0)
2698 {
2699 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2700 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2701 }
2702 else
2703 {
2704 rtx obody = body;
2705 int num = noutputs;
2706
2707 if (num == 0)
2708 num = 1;
2709
2710 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2711
2712 /* For each output operand, store a SET. */
2713 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2714 {
2715 XVECEXP (body, 0, i)
2716 = gen_rtx_SET (VOIDmode,
2717 output_rtx[i],
2718 gen_rtx_ASM_OPERANDS
2719 (GET_MODE (output_rtx[i]),
2720 ggc_strdup (TREE_STRING_POINTER (string)),
2721 ggc_strdup (constraints[i]),
2722 i, argvec, constraintvec, labelvec, locus));
2723
2724 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2725 }
2726
2727 /* If there are no outputs (but there are some clobbers)
2728 store the bare ASM_OPERANDS into the PARALLEL. */
2729
2730 if (i == 0)
2731 XVECEXP (body, 0, i++) = obody;
2732
2733 /* Store (clobber REG) for each clobbered register specified. */
2734
2735 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2736 {
2737 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2738 int reg, nregs;
2739 int j = decode_reg_name_and_count (regname, &nregs);
2740 rtx clobbered_reg;
2741
2742 if (j < 0)
2743 {
2744 if (j == -3) /* `cc', which is not a register */
2745 continue;
2746
2747 if (j == -4) /* `memory', don't cache memory across asm */
2748 {
2749 XVECEXP (body, 0, i++)
2750 = gen_rtx_CLOBBER (VOIDmode,
2751 gen_rtx_MEM
2752 (BLKmode,
2753 gen_rtx_SCRATCH (VOIDmode)));
2754 continue;
2755 }
2756
2757 /* Ignore unknown register, error already signaled. */
2758 continue;
2759 }
2760
2761 for (reg = j; reg < j + nregs; reg++)
2762 {
2763 /* Use QImode since that's guaranteed to clobber just
2764 * one reg. */
2765 clobbered_reg = gen_rtx_REG (QImode, reg);
2766
2767 /* Do sanity check for overlap between clobbers and
2768 respectively input and outputs that hasn't been
2769 handled. Such overlap should have been detected and
2770 reported above. */
2771 if (!clobber_conflict_found)
2772 {
2773 int opno;
2774
2775 /* We test the old body (obody) contents to avoid
2776 tripping over the under-construction body. */
2777 for (opno = 0; opno < noutputs; opno++)
2778 if (reg_overlap_mentioned_p (clobbered_reg,
2779 output_rtx[opno]))
2780 internal_error
2781 ("asm clobber conflict with output operand");
2782
2783 for (opno = 0; opno < ninputs - ninout; opno++)
2784 if (reg_overlap_mentioned_p (clobbered_reg,
2785 ASM_OPERANDS_INPUT (obody,
2786 opno)))
2787 internal_error
2788 ("asm clobber conflict with input operand");
2789 }
2790
2791 XVECEXP (body, 0, i++)
2792 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2793 }
2794 }
2795
2796 if (nlabels > 0)
2797 emit_jump_insn (body);
2798 else
2799 emit_insn (body);
2800 }
2801
2802 if (fallthru_label)
2803 emit_label (fallthru_label);
2804
2805 /* For any outputs that needed reloading into registers, spill them
2806 back to where they belong. */
2807 for (i = 0; i < noutputs; ++i)
2808 if (real_output_rtx[i])
2809 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2810
2811 crtl->has_asm_statement = 1;
2812 free_temp_slots ();
2813}
2814
2815
2816static void
2817expand_asm_stmt (gimple stmt)
2818{
2819 int noutputs;
2820 tree outputs, tail, t;
2821 tree *o;
2822 size_t i, n;
2823 const char *s;
2824 tree str, out, in, cl, labels;
2825 location_t locus = gimple_location (stmt);
2826 basic_block fallthru_bb = NULL;
2827
2828 /* Meh... convert the gimple asm operands into real tree lists.
2829 Eventually we should make all routines work on the vectors instead
2830 of relying on TREE_CHAIN. */
2831 out = NULL_TREE;
2832 n = gimple_asm_noutputs (stmt);
2833 if (n > 0)
2834 {
2835 t = out = gimple_asm_output_op (stmt, 0);
2836 for (i = 1; i < n; i++)
2837 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2838 }
2839
2840 in = NULL_TREE;
2841 n = gimple_asm_ninputs (stmt);
2842 if (n > 0)
2843 {
2844 t = in = gimple_asm_input_op (stmt, 0);
2845 for (i = 1; i < n; i++)
2846 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2847 }
2848
2849 cl = NULL_TREE;
2850 n = gimple_asm_nclobbers (stmt);
2851 if (n > 0)
2852 {
2853 t = cl = gimple_asm_clobber_op (stmt, 0);
2854 for (i = 1; i < n; i++)
2855 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2856 }
2857
2858 labels = NULL_TREE;
2859 n = gimple_asm_nlabels (stmt);
2860 if (n > 0)
2861 {
2862 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2863 if (fallthru)
2864 fallthru_bb = fallthru->dest;
2865 t = labels = gimple_asm_label_op (stmt, 0);
2866 for (i = 1; i < n; i++)
2867 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2868 }
2869
2870 s = gimple_asm_string (stmt);
2871 str = build_string (strlen (s), s);
2872
2873 if (gimple_asm_input_p (stmt))
2874 {
2875 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
2876 return;
2877 }
2878
2879 outputs = out;
2880 noutputs = gimple_asm_noutputs (stmt);
2881 /* o[I] is the place that output number I should be written. */
2882 o = (tree *) alloca (noutputs * sizeof (tree));
2883
2884 /* Record the contents of OUTPUTS before it is modified. */
2885 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2886 o[i] = TREE_VALUE (tail);
2887
2888 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
2889 OUTPUTS some trees for where the values were actually stored. */
2890 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
2891 gimple_asm_volatile_p (stmt), locus);
2892
2893 /* Copy all the intermediate outputs into the specified outputs. */
2894 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2895 {
2896 if (o[i] != TREE_VALUE (tail))
2897 {
2898 expand_assignment (o[i], TREE_VALUE (tail), false);
2899 free_temp_slots ();
2900
2901 /* Restore the original value so that it's correct the next
2902 time we expand this function. */
2903 TREE_VALUE (tail) = o[i];
2904 }
2905 }
2906}
2907
2908/* Emit code to jump to the address
2909 specified by the pointer expression EXP. */
2910
2911static void
2912expand_computed_goto (tree exp)
2913{
2914 rtx x = expand_normal (exp);
2915
2916 x = convert_memory_address (Pmode, x);
2917
2918 do_pending_stack_adjust ();
2919 emit_indirect_jump (x);
2920}
2921
2922/* Generate RTL code for a `goto' statement with target label LABEL.
2923 LABEL should be a LABEL_DECL tree node that was or will later be
2924 defined with `expand_label'. */
2925
2926static void
2927expand_goto (tree label)
2928{
2929#ifdef ENABLE_CHECKING
2930 /* Check for a nonlocal goto to a containing function. Should have
2931 gotten translated to __builtin_nonlocal_goto. */
2932 tree context = decl_function_context (label);
2933 gcc_assert (!context || context == current_function_decl);
2934#endif
2935
2936 emit_jump (label_rtx (label));
2937}
2938
2939/* Output a return with no value. */
2940
2941static void
2942expand_null_return_1 (void)
2943{
2944 clear_pending_stack_adjust ();
2945 do_pending_stack_adjust ();
2946 emit_jump (return_label);
2947}
2948
2949/* Generate RTL to return from the current function, with no value.
2950 (That is, we do not do anything about returning any value.) */
2951
2952void
2953expand_null_return (void)
2954{
2955 /* If this function was declared to return a value, but we
2956 didn't, clobber the return registers so that they are not
2957 propagated live to the rest of the function. */
2958 clobber_return_register ();
2959
2960 expand_null_return_1 ();
2961}
2962
2963/* Generate RTL to return from the current function, with value VAL. */
2964
2965static void
2966expand_value_return (rtx val)
2967{
2968 /* Copy the value to the return location unless it's already there. */
2969
2970 tree decl = DECL_RESULT (current_function_decl);
2971 rtx return_reg = DECL_RTL (decl);
2972 if (return_reg != val)
2973 {
2974 tree funtype = TREE_TYPE (current_function_decl);
2975 tree type = TREE_TYPE (decl);
2976 int unsignedp = TYPE_UNSIGNED (type);
2977 enum machine_mode old_mode = DECL_MODE (decl);
2978 enum machine_mode mode;
2979 if (DECL_BY_REFERENCE (decl))
2980 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
2981 else
2982 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
2983
2984 if (mode != old_mode)
2985 val = convert_modes (mode, old_mode, val, unsignedp);
2986
2987 if (GET_CODE (return_reg) == PARALLEL)
2988 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2989 else
2990 emit_move_insn (return_reg, val);
2991 }
2992
2993 expand_null_return_1 ();
2994}
2995
2996/* Generate RTL to evaluate the expression RETVAL and return it
2997 from the current function. */
2998
2999static void
3000expand_return (tree retval)
3001{
3002 rtx result_rtl;
3003 rtx val = 0;
3004 tree retval_rhs;
3005
3006 /* If function wants no value, give it none. */
3007 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3008 {
3009 expand_normal (retval);
3010 expand_null_return ();
3011 return;
3012 }
3013
3014 if (retval == error_mark_node)
3015 {
3016 /* Treat this like a return of no value from a function that
3017 returns a value. */
3018 expand_null_return ();
3019 return;
3020 }
3021 else if ((TREE_CODE (retval) == MODIFY_EXPR
3022 || TREE_CODE (retval) == INIT_EXPR)
3023 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3024 retval_rhs = TREE_OPERAND (retval, 1);
3025 else
3026 retval_rhs = retval;
3027
3028 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3029
3030 /* If we are returning the RESULT_DECL, then the value has already
3031 been stored into it, so we don't have to do anything special. */
3032 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3033 expand_value_return (result_rtl);
3034
3035 /* If the result is an aggregate that is being returned in one (or more)
3036 registers, load the registers here. */
3037
3038 else if (retval_rhs != 0
3039 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3040 && REG_P (result_rtl))
3041 {
3042 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3043 if (val)
3044 {
3045 /* Use the mode of the result value on the return register. */
3046 PUT_MODE (result_rtl, GET_MODE (val));
3047 expand_value_return (val);
3048 }
3049 else
3050 expand_null_return ();
3051 }
3052 else if (retval_rhs != 0
3053 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3054 && (REG_P (result_rtl)
3055 || (GET_CODE (result_rtl) == PARALLEL)))
3056 {
3057 /* Calculate the return value into a temporary (usually a pseudo
3058 reg). */
3059 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3060 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3061
3062 val = assign_temp (nt, 0, 1);
3063 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3064 val = force_not_mem (val);
3065 /* Return the calculated value. */
3066 expand_value_return (val);
3067 }
3068 else
3069 {
3070 /* No hard reg used; calculate value into hard return reg. */
3071 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3072 expand_value_return (result_rtl);
3073 }
3074}
3075
28ed065e
MM
3076/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3077 STMT that doesn't require special handling for outgoing edges. That
3078 is no tailcalls and no GIMPLE_COND. */
3079
3080static void
3081expand_gimple_stmt_1 (gimple stmt)
3082{
3083 tree op0;
c82fee88 3084
5368224f 3085 set_curr_insn_location (gimple_location (stmt));
c82fee88 3086
28ed065e
MM
3087 switch (gimple_code (stmt))
3088 {
3089 case GIMPLE_GOTO:
3090 op0 = gimple_goto_dest (stmt);
3091 if (TREE_CODE (op0) == LABEL_DECL)
3092 expand_goto (op0);
3093 else
3094 expand_computed_goto (op0);
3095 break;
3096 case GIMPLE_LABEL:
3097 expand_label (gimple_label_label (stmt));
3098 break;
3099 case GIMPLE_NOP:
3100 case GIMPLE_PREDICT:
3101 break;
28ed065e
MM
3102 case GIMPLE_SWITCH:
3103 expand_case (stmt);
3104 break;
3105 case GIMPLE_ASM:
3106 expand_asm_stmt (stmt);
3107 break;
3108 case GIMPLE_CALL:
3109 expand_call_stmt (stmt);
3110 break;
3111
3112 case GIMPLE_RETURN:
3113 op0 = gimple_return_retval (stmt);
3114
3115 if (op0 && op0 != error_mark_node)
3116 {
3117 tree result = DECL_RESULT (current_function_decl);
3118
3119 /* If we are not returning the current function's RESULT_DECL,
3120 build an assignment to it. */
3121 if (op0 != result)
3122 {
3123 /* I believe that a function's RESULT_DECL is unique. */
3124 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3125
3126 /* ??? We'd like to use simply expand_assignment here,
3127 but this fails if the value is of BLKmode but the return
3128 decl is a register. expand_return has special handling
3129 for this combination, which eventually should move
3130 to common code. See comments there. Until then, let's
3131 build a modify expression :-/ */
3132 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3133 result, op0);
3134 }
3135 }
3136 if (!op0)
3137 expand_null_return ();
3138 else
3139 expand_return (op0);
3140 break;
3141
3142 case GIMPLE_ASSIGN:
3143 {
3144 tree lhs = gimple_assign_lhs (stmt);
3145
3146 /* Tree expand used to fiddle with |= and &= of two bitfield
3147 COMPONENT_REFs here. This can't happen with gimple, the LHS
3148 of binary assigns must be a gimple reg. */
3149
3150 if (TREE_CODE (lhs) != SSA_NAME
3151 || get_gimple_rhs_class (gimple_expr_code (stmt))
3152 == GIMPLE_SINGLE_RHS)
3153 {
3154 tree rhs = gimple_assign_rhs1 (stmt);
3155 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3156 == GIMPLE_SINGLE_RHS);
3157 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3158 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
3159 if (TREE_CLOBBER_P (rhs))
3160 /* This is a clobber to mark the going out of scope for
3161 this LHS. */
3162 ;
3163 else
3164 expand_assignment (lhs, rhs,
3165 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
3166 }
3167 else
3168 {
3169 rtx target, temp;
3170 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3171 struct separate_ops ops;
3172 bool promoted = false;
3173
3174 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3175 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3176 promoted = true;
3177
3178 ops.code = gimple_assign_rhs_code (stmt);
3179 ops.type = TREE_TYPE (lhs);
3180 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3181 {
0354c0c7
BS
3182 case GIMPLE_TERNARY_RHS:
3183 ops.op2 = gimple_assign_rhs3 (stmt);
3184 /* Fallthru */
28ed065e
MM
3185 case GIMPLE_BINARY_RHS:
3186 ops.op1 = gimple_assign_rhs2 (stmt);
3187 /* Fallthru */
3188 case GIMPLE_UNARY_RHS:
3189 ops.op0 = gimple_assign_rhs1 (stmt);
3190 break;
3191 default:
3192 gcc_unreachable ();
3193 }
3194 ops.location = gimple_location (stmt);
3195
3196 /* If we want to use a nontemporal store, force the value to
3197 register first. If we store into a promoted register,
3198 don't directly expand to target. */
3199 temp = nontemporal || promoted ? NULL_RTX : target;
3200 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3201 EXPAND_NORMAL);
3202
3203 if (temp == target)
3204 ;
3205 else if (promoted)
3206 {
4e18a7d4 3207 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
3208 /* If TEMP is a VOIDmode constant, use convert_modes to make
3209 sure that we properly convert it. */
3210 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3211 {
3212 temp = convert_modes (GET_MODE (target),
3213 TYPE_MODE (ops.type),
4e18a7d4 3214 temp, unsignedp);
28ed065e 3215 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 3216 GET_MODE (target), temp, unsignedp);
28ed065e
MM
3217 }
3218
4e18a7d4 3219 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
3220 }
3221 else if (nontemporal && emit_storent_insn (target, temp))
3222 ;
3223 else
3224 {
3225 temp = force_operand (temp, target);
3226 if (temp != target)
3227 emit_move_insn (target, temp);
3228 }
3229 }
3230 }
3231 break;
3232
3233 default:
3234 gcc_unreachable ();
3235 }
3236}
3237
3238/* Expand one gimple statement STMT and return the last RTL instruction
3239 before any of the newly generated ones.
3240
3241 In addition to generating the necessary RTL instructions this also
3242 sets REG_EH_REGION notes if necessary and sets the current source
3243 location for diagnostics. */
3244
3245static rtx
3246expand_gimple_stmt (gimple stmt)
3247{
28ed065e 3248 location_t saved_location = input_location;
c82fee88
EB
3249 rtx last = get_last_insn ();
3250 int lp_nr;
28ed065e 3251
28ed065e
MM
3252 gcc_assert (cfun);
3253
c82fee88
EB
3254 /* We need to save and restore the current source location so that errors
3255 discovered during expansion are emitted with the right location. But
3256 it would be better if the diagnostic routines used the source location
3257 embedded in the tree nodes rather than globals. */
28ed065e 3258 if (gimple_has_location (stmt))
c82fee88 3259 input_location = gimple_location (stmt);
28ed065e
MM
3260
3261 expand_gimple_stmt_1 (stmt);
c82fee88 3262
28ed065e
MM
3263 /* Free any temporaries used to evaluate this statement. */
3264 free_temp_slots ();
3265
3266 input_location = saved_location;
3267
3268 /* Mark all insns that may trap. */
1d65f45c
RH
3269 lp_nr = lookup_stmt_eh_lp (stmt);
3270 if (lp_nr)
28ed065e
MM
3271 {
3272 rtx insn;
3273 for (insn = next_real_insn (last); insn;
3274 insn = next_real_insn (insn))
3275 {
3276 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3277 /* If we want exceptions for non-call insns, any
3278 may_trap_p instruction may throw. */
3279 && GET_CODE (PATTERN (insn)) != CLOBBER
3280 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
3281 && insn_could_throw_p (insn))
3282 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
3283 }
3284 }
3285
3286 return last;
3287}
3288
726a989a 3289/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
3290 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3291 generated a tail call (something that might be denied by the ABI
cea49550
RH
3292 rules governing the call; see calls.c).
3293
3294 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3295 can still reach the rest of BB. The case here is __builtin_sqrt,
3296 where the NaN result goes through the external function (with a
3297 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
3298
3299static basic_block
726a989a 3300expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 3301{
b7211528 3302 rtx last2, last;
224e770b 3303 edge e;
628f6a4e 3304 edge_iterator ei;
224e770b
RH
3305 int probability;
3306 gcov_type count;
80c7a9eb 3307
28ed065e 3308 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
3309
3310 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
3311 if (CALL_P (last) && SIBLING_CALL_P (last))
3312 goto found;
80c7a9eb 3313
726a989a 3314 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3315
cea49550 3316 *can_fallthru = true;
224e770b 3317 return NULL;
80c7a9eb 3318
224e770b
RH
3319 found:
3320 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3321 Any instructions emitted here are about to be deleted. */
3322 do_pending_stack_adjust ();
3323
3324 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3325 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3326 EH or abnormal edges, we shouldn't have created a tail call in
3327 the first place. So it seems to me we should just be removing
3328 all edges here, or redirecting the existing fallthru edge to
3329 the exit block. */
3330
224e770b
RH
3331 probability = 0;
3332 count = 0;
224e770b 3333
628f6a4e
BE
3334 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3335 {
224e770b
RH
3336 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3337 {
3338 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 3339 {
224e770b
RH
3340 e->dest->count -= e->count;
3341 e->dest->frequency -= EDGE_FREQUENCY (e);
3342 if (e->dest->count < 0)
c22cacf3 3343 e->dest->count = 0;
224e770b 3344 if (e->dest->frequency < 0)
c22cacf3 3345 e->dest->frequency = 0;
80c7a9eb 3346 }
224e770b
RH
3347 count += e->count;
3348 probability += e->probability;
3349 remove_edge (e);
80c7a9eb 3350 }
628f6a4e
BE
3351 else
3352 ei_next (&ei);
80c7a9eb
RH
3353 }
3354
224e770b
RH
3355 /* This is somewhat ugly: the call_expr expander often emits instructions
3356 after the sibcall (to perform the function return). These confuse the
12eff7b7 3357 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 3358 last = NEXT_INSN (last);
341c100f 3359 gcc_assert (BARRIER_P (last));
cea49550
RH
3360
3361 *can_fallthru = false;
224e770b
RH
3362 while (NEXT_INSN (last))
3363 {
3364 /* For instance an sqrt builtin expander expands if with
3365 sibcall in the then and label for `else`. */
3366 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
3367 {
3368 *can_fallthru = true;
3369 break;
3370 }
224e770b
RH
3371 delete_insn (NEXT_INSN (last));
3372 }
3373
3374 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
3375 e->probability += probability;
3376 e->count += count;
3377 BB_END (bb) = last;
3378 update_bb_for_insn (bb);
3379
3380 if (NEXT_INSN (last))
3381 {
3382 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3383
3384 last = BB_END (bb);
3385 if (BARRIER_P (last))
3386 BB_END (bb) = PREV_INSN (last);
3387 }
3388
726a989a 3389 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 3390
224e770b 3391 return bb;
80c7a9eb
RH
3392}
3393
b5b8b0ac
AO
3394/* Return the difference between the floor and the truncated result of
3395 a signed division by OP1 with remainder MOD. */
3396static rtx
3397floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3398{
3399 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3400 return gen_rtx_IF_THEN_ELSE
3401 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3402 gen_rtx_IF_THEN_ELSE
3403 (mode, gen_rtx_LT (BImode,
3404 gen_rtx_DIV (mode, op1, mod),
3405 const0_rtx),
3406 constm1_rtx, const0_rtx),
3407 const0_rtx);
3408}
3409
3410/* Return the difference between the ceil and the truncated result of
3411 a signed division by OP1 with remainder MOD. */
3412static rtx
3413ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3414{
3415 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3416 return gen_rtx_IF_THEN_ELSE
3417 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3418 gen_rtx_IF_THEN_ELSE
3419 (mode, gen_rtx_GT (BImode,
3420 gen_rtx_DIV (mode, op1, mod),
3421 const0_rtx),
3422 const1_rtx, const0_rtx),
3423 const0_rtx);
3424}
3425
3426/* Return the difference between the ceil and the truncated result of
3427 an unsigned division by OP1 with remainder MOD. */
3428static rtx
3429ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3430{
3431 /* (mod != 0 ? 1 : 0) */
3432 return gen_rtx_IF_THEN_ELSE
3433 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3434 const1_rtx, const0_rtx);
3435}
3436
3437/* Return the difference between the rounded and the truncated result
3438 of a signed division by OP1 with remainder MOD. Halfway cases are
3439 rounded away from zero, rather than to the nearest even number. */
3440static rtx
3441round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3442{
3443 /* (abs (mod) >= abs (op1) - abs (mod)
3444 ? (op1 / mod > 0 ? 1 : -1)
3445 : 0) */
3446 return gen_rtx_IF_THEN_ELSE
3447 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3448 gen_rtx_MINUS (mode,
3449 gen_rtx_ABS (mode, op1),
3450 gen_rtx_ABS (mode, mod))),
3451 gen_rtx_IF_THEN_ELSE
3452 (mode, gen_rtx_GT (BImode,
3453 gen_rtx_DIV (mode, op1, mod),
3454 const0_rtx),
3455 const1_rtx, constm1_rtx),
3456 const0_rtx);
3457}
3458
3459/* Return the difference between the rounded and the truncated result
3460 of a unsigned division by OP1 with remainder MOD. Halfway cases
3461 are rounded away from zero, rather than to the nearest even
3462 number. */
3463static rtx
3464round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3465{
3466 /* (mod >= op1 - mod ? 1 : 0) */
3467 return gen_rtx_IF_THEN_ELSE
3468 (mode, gen_rtx_GE (BImode, mod,
3469 gen_rtx_MINUS (mode, op1, mod)),
3470 const1_rtx, const0_rtx);
3471}
3472
dda2da58
AO
3473/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3474 any rtl. */
3475
3476static rtx
f61c6f34
JJ
3477convert_debug_memory_address (enum machine_mode mode, rtx x,
3478 addr_space_t as)
dda2da58
AO
3479{
3480 enum machine_mode xmode = GET_MODE (x);
3481
3482#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
3483 gcc_assert (mode == Pmode
3484 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
3485 gcc_assert (xmode == mode || xmode == VOIDmode);
3486#else
f61c6f34 3487 rtx temp;
f61c6f34 3488
639d4bb8 3489 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
3490
3491 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3492 return x;
3493
69660a70 3494 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
3495 x = simplify_gen_subreg (mode, x, xmode,
3496 subreg_lowpart_offset
3497 (mode, xmode));
3498 else if (POINTERS_EXTEND_UNSIGNED > 0)
3499 x = gen_rtx_ZERO_EXTEND (mode, x);
3500 else if (!POINTERS_EXTEND_UNSIGNED)
3501 x = gen_rtx_SIGN_EXTEND (mode, x);
3502 else
f61c6f34
JJ
3503 {
3504 switch (GET_CODE (x))
3505 {
3506 case SUBREG:
3507 if ((SUBREG_PROMOTED_VAR_P (x)
3508 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3509 || (GET_CODE (SUBREG_REG (x)) == PLUS
3510 && REG_P (XEXP (SUBREG_REG (x), 0))
3511 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3512 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3513 && GET_MODE (SUBREG_REG (x)) == mode)
3514 return SUBREG_REG (x);
3515 break;
3516 case LABEL_REF:
3517 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
3518 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3519 return temp;
3520 case SYMBOL_REF:
3521 temp = shallow_copy_rtx (x);
3522 PUT_MODE (temp, mode);
3523 return temp;
3524 case CONST:
3525 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3526 if (temp)
3527 temp = gen_rtx_CONST (mode, temp);
3528 return temp;
3529 case PLUS:
3530 case MINUS:
3531 if (CONST_INT_P (XEXP (x, 1)))
3532 {
3533 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3534 if (temp)
3535 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3536 }
3537 break;
3538 default:
3539 break;
3540 }
3541 /* Don't know how to express ptr_extend as operation in debug info. */
3542 return NULL;
3543 }
dda2da58
AO
3544#endif /* POINTERS_EXTEND_UNSIGNED */
3545
3546 return x;
3547}
3548
12c5ffe5
EB
3549/* Return an RTX equivalent to the value of the parameter DECL. */
3550
3551static rtx
3552expand_debug_parm_decl (tree decl)
3553{
3554 rtx incoming = DECL_INCOMING_RTL (decl);
3555
3556 if (incoming
3557 && GET_MODE (incoming) != BLKmode
3558 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3559 || (MEM_P (incoming)
3560 && REG_P (XEXP (incoming, 0))
3561 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3562 {
3563 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3564
3565#ifdef HAVE_window_save
3566 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3567 If the target machine has an explicit window save instruction, the
3568 actual entry value is the corresponding OUTGOING_REGNO instead. */
3569 if (REG_P (incoming)
3570 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3571 incoming
3572 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3573 OUTGOING_REGNO (REGNO (incoming)), 0);
3574 else if (MEM_P (incoming))
3575 {
3576 rtx reg = XEXP (incoming, 0);
3577 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3578 {
3579 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3580 incoming = replace_equiv_address_nv (incoming, reg);
3581 }
6cfa417f
JJ
3582 else
3583 incoming = copy_rtx (incoming);
12c5ffe5
EB
3584 }
3585#endif
3586
3587 ENTRY_VALUE_EXP (rtl) = incoming;
3588 return rtl;
3589 }
3590
3591 if (incoming
3592 && GET_MODE (incoming) != BLKmode
3593 && !TREE_ADDRESSABLE (decl)
3594 && MEM_P (incoming)
3595 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3596 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3597 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3598 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
6cfa417f 3599 return copy_rtx (incoming);
12c5ffe5
EB
3600
3601 return NULL_RTX;
3602}
3603
3604/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
3605
3606static rtx
3607expand_debug_expr (tree exp)
3608{
3609 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3610 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 3611 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 3612 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 3613 addr_space_t as;
b5b8b0ac
AO
3614
3615 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3616 {
3617 case tcc_expression:
3618 switch (TREE_CODE (exp))
3619 {
3620 case COND_EXPR:
7ece48b1 3621 case DOT_PROD_EXPR:
0354c0c7
BS
3622 case WIDEN_MULT_PLUS_EXPR:
3623 case WIDEN_MULT_MINUS_EXPR:
0f59b812 3624 case FMA_EXPR:
b5b8b0ac
AO
3625 goto ternary;
3626
3627 case TRUTH_ANDIF_EXPR:
3628 case TRUTH_ORIF_EXPR:
3629 case TRUTH_AND_EXPR:
3630 case TRUTH_OR_EXPR:
3631 case TRUTH_XOR_EXPR:
3632 goto binary;
3633
3634 case TRUTH_NOT_EXPR:
3635 goto unary;
3636
3637 default:
3638 break;
3639 }
3640 break;
3641
3642 ternary:
3643 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3644 if (!op2)
3645 return NULL_RTX;
3646 /* Fall through. */
3647
3648 binary:
3649 case tcc_binary:
3650 case tcc_comparison:
3651 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3652 if (!op1)
3653 return NULL_RTX;
3654 /* Fall through. */
3655
3656 unary:
3657 case tcc_unary:
2ba172e0 3658 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3659 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3660 if (!op0)
3661 return NULL_RTX;
3662 break;
3663
3664 case tcc_type:
3665 case tcc_statement:
3666 gcc_unreachable ();
3667
3668 case tcc_constant:
3669 case tcc_exceptional:
3670 case tcc_declaration:
3671 case tcc_reference:
3672 case tcc_vl_exp:
3673 break;
3674 }
3675
3676 switch (TREE_CODE (exp))
3677 {
3678 case STRING_CST:
3679 if (!lookup_constant_def (exp))
3680 {
e1b243a8
JJ
3681 if (strlen (TREE_STRING_POINTER (exp)) + 1
3682 != (size_t) TREE_STRING_LENGTH (exp))
3683 return NULL_RTX;
b5b8b0ac
AO
3684 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3685 op0 = gen_rtx_MEM (BLKmode, op0);
3686 set_mem_attributes (op0, exp, 0);
3687 return op0;
3688 }
3689 /* Fall through... */
3690
3691 case INTEGER_CST:
3692 case REAL_CST:
3693 case FIXED_CST:
3694 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3695 return op0;
3696
3697 case COMPLEX_CST:
3698 gcc_assert (COMPLEX_MODE_P (mode));
3699 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 3700 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
3701 return gen_rtx_CONCAT (mode, op0, op1);
3702
0ca5af51
AO
3703 case DEBUG_EXPR_DECL:
3704 op0 = DECL_RTL_IF_SET (exp);
3705
3706 if (op0)
3707 return op0;
3708
3709 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 3710 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
3711 SET_DECL_RTL (exp, op0);
3712
3713 return op0;
3714
b5b8b0ac
AO
3715 case VAR_DECL:
3716 case PARM_DECL:
3717 case FUNCTION_DECL:
3718 case LABEL_DECL:
3719 case CONST_DECL:
3720 case RESULT_DECL:
3721 op0 = DECL_RTL_IF_SET (exp);
3722
3723 /* This decl was probably optimized away. */
3724 if (!op0)
e1b243a8
JJ
3725 {
3726 if (TREE_CODE (exp) != VAR_DECL
3727 || DECL_EXTERNAL (exp)
3728 || !TREE_STATIC (exp)
3729 || !DECL_NAME (exp)
0fba566c 3730 || DECL_HARD_REGISTER (exp)
7d5fc814 3731 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 3732 || mode == VOIDmode)
e1b243a8
JJ
3733 return NULL;
3734
b1aa0655 3735 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
3736 if (!MEM_P (op0)
3737 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3738 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3739 return NULL;
3740 }
3741 else
3742 op0 = copy_rtx (op0);
b5b8b0ac 3743
06796564
JJ
3744 if (GET_MODE (op0) == BLKmode
3745 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3746 below would ICE. While it is likely a FE bug,
3747 try to be robust here. See PR43166. */
132b4e82
JJ
3748 || mode == BLKmode
3749 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
3750 {
3751 gcc_assert (MEM_P (op0));
3752 op0 = adjust_address_nv (op0, mode, 0);
3753 return op0;
3754 }
3755
3756 /* Fall through. */
3757
3758 adjust_mode:
3759 case PAREN_EXPR:
3760 case NOP_EXPR:
3761 case CONVERT_EXPR:
3762 {
2ba172e0 3763 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
3764
3765 if (mode == inner_mode)
3766 return op0;
3767
3768 if (inner_mode == VOIDmode)
3769 {
2a8e30fb
MM
3770 if (TREE_CODE (exp) == SSA_NAME)
3771 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3772 else
3773 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3774 if (mode == inner_mode)
3775 return op0;
3776 }
3777
3778 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3779 {
3780 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3781 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3782 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3783 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3784 else
3785 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3786 }
3787 else if (FLOAT_MODE_P (mode))
3788 {
2a8e30fb 3789 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
3790 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3791 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3792 else
3793 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3794 }
3795 else if (FLOAT_MODE_P (inner_mode))
3796 {
3797 if (unsignedp)
3798 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3799 else
3800 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3801 }
3802 else if (CONSTANT_P (op0)
69660a70 3803 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
3804 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3805 subreg_lowpart_offset (mode,
3806 inner_mode));
1b47fe3f
JJ
3807 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3808 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3809 : unsignedp)
2ba172e0 3810 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 3811 else
2ba172e0 3812 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
3813
3814 return op0;
3815 }
3816
70f34814 3817 case MEM_REF:
71f3a3f5
JJ
3818 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3819 {
3820 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3821 TREE_OPERAND (exp, 0),
3822 TREE_OPERAND (exp, 1));
3823 if (newexp)
3824 return expand_debug_expr (newexp);
3825 }
3826 /* FALLTHROUGH */
b5b8b0ac 3827 case INDIRECT_REF:
0a81f074 3828 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
3829 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3830 if (!op0)
3831 return NULL;
3832
cb115041
JJ
3833 if (TREE_CODE (exp) == MEM_REF)
3834 {
583ac69c
JJ
3835 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3836 || (GET_CODE (op0) == PLUS
3837 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3838 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3839 Instead just use get_inner_reference. */
3840 goto component_ref;
3841
cb115041
JJ
3842 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3843 if (!op1 || !CONST_INT_P (op1))
3844 return NULL;
3845
0a81f074 3846 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
cb115041
JJ
3847 }
3848
09e881c9 3849 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 3850 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 3851 else
75421dcd 3852 as = ADDR_SPACE_GENERIC;
b5b8b0ac 3853
f61c6f34
JJ
3854 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3855 op0, as);
3856 if (op0 == NULL_RTX)
3857 return NULL;
b5b8b0ac 3858
f61c6f34 3859 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 3860 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
3861 if (TREE_CODE (exp) == MEM_REF
3862 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3863 set_mem_expr (op0, NULL_TREE);
09e881c9 3864 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3865
3866 return op0;
3867
3868 case TARGET_MEM_REF:
4d948885
RG
3869 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3870 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
3871 return NULL;
3872
3873 op0 = expand_debug_expr
4e25ca6b 3874 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
3875 if (!op0)
3876 return NULL;
3877
f61c6f34
JJ
3878 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3879 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3880 else
3881 as = ADDR_SPACE_GENERIC;
3882
3883 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3884 op0, as);
3885 if (op0 == NULL_RTX)
3886 return NULL;
b5b8b0ac
AO
3887
3888 op0 = gen_rtx_MEM (mode, op0);
3889
3890 set_mem_attributes (op0, exp, 0);
09e881c9 3891 set_mem_addr_space (op0, as);
b5b8b0ac
AO
3892
3893 return op0;
3894
583ac69c 3895 component_ref:
b5b8b0ac
AO
3896 case ARRAY_REF:
3897 case ARRAY_RANGE_REF:
3898 case COMPONENT_REF:
3899 case BIT_FIELD_REF:
3900 case REALPART_EXPR:
3901 case IMAGPART_EXPR:
3902 case VIEW_CONVERT_EXPR:
3903 {
3904 enum machine_mode mode1;
3905 HOST_WIDE_INT bitsize, bitpos;
3906 tree offset;
3907 int volatilep = 0;
3908 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3909 &mode1, &unsignedp, &volatilep, false);
3910 rtx orig_op0;
3911
4f2a9af8
JJ
3912 if (bitsize == 0)
3913 return NULL;
3914
b5b8b0ac
AO
3915 orig_op0 = op0 = expand_debug_expr (tem);
3916
3917 if (!op0)
3918 return NULL;
3919
3920 if (offset)
3921 {
dda2da58
AO
3922 enum machine_mode addrmode, offmode;
3923
aa847cc8
JJ
3924 if (!MEM_P (op0))
3925 return NULL;
b5b8b0ac 3926
dda2da58
AO
3927 op0 = XEXP (op0, 0);
3928 addrmode = GET_MODE (op0);
3929 if (addrmode == VOIDmode)
3930 addrmode = Pmode;
3931
b5b8b0ac
AO
3932 op1 = expand_debug_expr (offset);
3933 if (!op1)
3934 return NULL;
3935
dda2da58
AO
3936 offmode = GET_MODE (op1);
3937 if (offmode == VOIDmode)
3938 offmode = TYPE_MODE (TREE_TYPE (offset));
3939
3940 if (addrmode != offmode)
3941 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3942 subreg_lowpart_offset (addrmode,
3943 offmode));
3944
3945 /* Don't use offset_address here, we don't need a
3946 recognizable address, and we don't want to generate
3947 code. */
2ba172e0
JJ
3948 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3949 op0, op1));
b5b8b0ac
AO
3950 }
3951
3952 if (MEM_P (op0))
3953 {
4f2a9af8
JJ
3954 if (mode1 == VOIDmode)
3955 /* Bitfield. */
3956 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
3957 if (bitpos >= BITS_PER_UNIT)
3958 {
3959 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3960 bitpos %= BITS_PER_UNIT;
3961 }
3962 else if (bitpos < 0)
3963 {
4f2a9af8
JJ
3964 HOST_WIDE_INT units
3965 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
3966 op0 = adjust_address_nv (op0, mode1, units);
3967 bitpos += units * BITS_PER_UNIT;
3968 }
3969 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3970 op0 = adjust_address_nv (op0, mode, 0);
3971 else if (GET_MODE (op0) != mode1)
3972 op0 = adjust_address_nv (op0, mode1, 0);
3973 else
3974 op0 = copy_rtx (op0);
3975 if (op0 == orig_op0)
3976 op0 = shallow_copy_rtx (op0);
3977 set_mem_attributes (op0, exp, 0);
3978 }
3979
3980 if (bitpos == 0 && mode == GET_MODE (op0))
3981 return op0;
3982
2d3fc6aa
JJ
3983 if (bitpos < 0)
3984 return NULL;
3985
88c04a5d
JJ
3986 if (GET_MODE (op0) == BLKmode)
3987 return NULL;
3988
b5b8b0ac
AO
3989 if ((bitpos % BITS_PER_UNIT) == 0
3990 && bitsize == GET_MODE_BITSIZE (mode1))
3991 {
3992 enum machine_mode opmode = GET_MODE (op0);
3993
b5b8b0ac 3994 if (opmode == VOIDmode)
9712cba0 3995 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
3996
3997 /* This condition may hold if we're expanding the address
3998 right past the end of an array that turned out not to
3999 be addressable (i.e., the address was only computed in
4000 debug stmts). The gen_subreg below would rightfully
4001 crash, and the address doesn't really exist, so just
4002 drop it. */
4003 if (bitpos >= GET_MODE_BITSIZE (opmode))
4004 return NULL;
4005
7d5d39bb
JJ
4006 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4007 return simplify_gen_subreg (mode, op0, opmode,
4008 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
4009 }
4010
4011 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4012 && TYPE_UNSIGNED (TREE_TYPE (exp))
4013 ? SIGN_EXTRACT
4014 : ZERO_EXTRACT, mode,
4015 GET_MODE (op0) != VOIDmode
9712cba0
JJ
4016 ? GET_MODE (op0)
4017 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
4018 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4019 }
4020
b5b8b0ac 4021 case ABS_EXPR:
2ba172e0 4022 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
4023
4024 case NEGATE_EXPR:
2ba172e0 4025 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
4026
4027 case BIT_NOT_EXPR:
2ba172e0 4028 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
4029
4030 case FLOAT_EXPR:
2ba172e0
JJ
4031 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4032 0)))
4033 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4034 inner_mode);
b5b8b0ac
AO
4035
4036 case FIX_TRUNC_EXPR:
2ba172e0
JJ
4037 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4038 inner_mode);
b5b8b0ac
AO
4039
4040 case POINTER_PLUS_EXPR:
576319a7
DD
4041 /* For the rare target where pointers are not the same size as
4042 size_t, we need to check for mis-matched modes and correct
4043 the addend. */
4044 if (op0 && op1
4045 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4046 && GET_MODE (op0) != GET_MODE (op1))
4047 {
8369f38a
DD
4048 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4049 /* If OP0 is a partial mode, then we must truncate, even if it has
4050 the same bitsize as OP1 as GCC's representation of partial modes
4051 is opaque. */
4052 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4053 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
2ba172e0
JJ
4054 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4055 GET_MODE (op1));
576319a7
DD
4056 else
4057 /* We always sign-extend, regardless of the signedness of
4058 the operand, because the operand is always unsigned
4059 here even if the original C expression is signed. */
2ba172e0
JJ
4060 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4061 GET_MODE (op1));
576319a7
DD
4062 }
4063 /* Fall through. */
b5b8b0ac 4064 case PLUS_EXPR:
2ba172e0 4065 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
4066
4067 case MINUS_EXPR:
2ba172e0 4068 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
4069
4070 case MULT_EXPR:
2ba172e0 4071 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
4072
4073 case RDIV_EXPR:
4074 case TRUNC_DIV_EXPR:
4075 case EXACT_DIV_EXPR:
4076 if (unsignedp)
2ba172e0 4077 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 4078 else
2ba172e0 4079 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
4080
4081 case TRUNC_MOD_EXPR:
2ba172e0 4082 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
4083
4084 case FLOOR_DIV_EXPR:
4085 if (unsignedp)
2ba172e0 4086 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
4087 else
4088 {
2ba172e0
JJ
4089 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4090 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4091 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 4092 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4093 }
4094
4095 case FLOOR_MOD_EXPR:
4096 if (unsignedp)
2ba172e0 4097 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
4098 else
4099 {
2ba172e0 4100 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4101 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4102 adj = simplify_gen_unary (NEG, mode,
4103 simplify_gen_binary (MULT, mode, adj, op1),
4104 mode);
4105 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4106 }
4107
4108 case CEIL_DIV_EXPR:
4109 if (unsignedp)
4110 {
2ba172e0
JJ
4111 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4112 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4113 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 4114 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4115 }
4116 else
4117 {
2ba172e0
JJ
4118 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4119 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4120 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 4121 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4122 }
4123
4124 case CEIL_MOD_EXPR:
4125 if (unsignedp)
4126 {
2ba172e0 4127 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4128 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4129 adj = simplify_gen_unary (NEG, mode,
4130 simplify_gen_binary (MULT, mode, adj, op1),
4131 mode);
4132 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4133 }
4134 else
4135 {
2ba172e0 4136 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4137 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4138 adj = simplify_gen_unary (NEG, mode,
4139 simplify_gen_binary (MULT, mode, adj, op1),
4140 mode);
4141 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4142 }
4143
4144 case ROUND_DIV_EXPR:
4145 if (unsignedp)
4146 {
2ba172e0
JJ
4147 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4148 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4149 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 4150 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4151 }
4152 else
4153 {
2ba172e0
JJ
4154 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4155 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4156 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 4157 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
4158 }
4159
4160 case ROUND_MOD_EXPR:
4161 if (unsignedp)
4162 {
2ba172e0 4163 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 4164 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
4165 adj = simplify_gen_unary (NEG, mode,
4166 simplify_gen_binary (MULT, mode, adj, op1),
4167 mode);
4168 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4169 }
4170 else
4171 {
2ba172e0 4172 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 4173 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
4174 adj = simplify_gen_unary (NEG, mode,
4175 simplify_gen_binary (MULT, mode, adj, op1),
4176 mode);
4177 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
4178 }
4179
4180 case LSHIFT_EXPR:
2ba172e0 4181 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
4182
4183 case RSHIFT_EXPR:
4184 if (unsignedp)
2ba172e0 4185 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 4186 else
2ba172e0 4187 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
4188
4189 case LROTATE_EXPR:
2ba172e0 4190 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
4191
4192 case RROTATE_EXPR:
2ba172e0 4193 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
4194
4195 case MIN_EXPR:
2ba172e0 4196 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
4197
4198 case MAX_EXPR:
2ba172e0 4199 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
4200
4201 case BIT_AND_EXPR:
4202 case TRUTH_AND_EXPR:
2ba172e0 4203 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
4204
4205 case BIT_IOR_EXPR:
4206 case TRUTH_OR_EXPR:
2ba172e0 4207 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
4208
4209 case BIT_XOR_EXPR:
4210 case TRUTH_XOR_EXPR:
2ba172e0 4211 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
4212
4213 case TRUTH_ANDIF_EXPR:
4214 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4215
4216 case TRUTH_ORIF_EXPR:
4217 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4218
4219 case TRUTH_NOT_EXPR:
2ba172e0 4220 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
4221
4222 case LT_EXPR:
2ba172e0
JJ
4223 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4224 op0, op1);
b5b8b0ac
AO
4225
4226 case LE_EXPR:
2ba172e0
JJ
4227 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4228 op0, op1);
b5b8b0ac
AO
4229
4230 case GT_EXPR:
2ba172e0
JJ
4231 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4232 op0, op1);
b5b8b0ac
AO
4233
4234 case GE_EXPR:
2ba172e0
JJ
4235 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4236 op0, op1);
b5b8b0ac
AO
4237
4238 case EQ_EXPR:
2ba172e0 4239 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4240
4241 case NE_EXPR:
2ba172e0 4242 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4243
4244 case UNORDERED_EXPR:
2ba172e0 4245 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4246
4247 case ORDERED_EXPR:
2ba172e0 4248 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4249
4250 case UNLT_EXPR:
2ba172e0 4251 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4252
4253 case UNLE_EXPR:
2ba172e0 4254 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4255
4256 case UNGT_EXPR:
2ba172e0 4257 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4258
4259 case UNGE_EXPR:
2ba172e0 4260 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4261
4262 case UNEQ_EXPR:
2ba172e0 4263 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4264
4265 case LTGT_EXPR:
2ba172e0 4266 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
4267
4268 case COND_EXPR:
4269 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4270
4271 case COMPLEX_EXPR:
4272 gcc_assert (COMPLEX_MODE_P (mode));
4273 if (GET_MODE (op0) == VOIDmode)
4274 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4275 if (GET_MODE (op1) == VOIDmode)
4276 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4277 return gen_rtx_CONCAT (mode, op0, op1);
4278
d02a5a4b
JJ
4279 case CONJ_EXPR:
4280 if (GET_CODE (op0) == CONCAT)
4281 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
4282 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4283 XEXP (op0, 1),
4284 GET_MODE_INNER (mode)));
d02a5a4b
JJ
4285 else
4286 {
4287 enum machine_mode imode = GET_MODE_INNER (mode);
4288 rtx re, im;
4289
4290 if (MEM_P (op0))
4291 {
4292 re = adjust_address_nv (op0, imode, 0);
4293 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4294 }
4295 else
4296 {
4297 enum machine_mode ifmode = int_mode_for_mode (mode);
4298 enum machine_mode ihmode = int_mode_for_mode (imode);
4299 rtx halfsize;
4300 if (ifmode == BLKmode || ihmode == BLKmode)
4301 return NULL;
4302 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4303 re = op0;
4304 if (mode != ifmode)
4305 re = gen_rtx_SUBREG (ifmode, re, 0);
4306 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4307 if (imode != ihmode)
4308 re = gen_rtx_SUBREG (imode, re, 0);
4309 im = copy_rtx (op0);
4310 if (mode != ifmode)
4311 im = gen_rtx_SUBREG (ifmode, im, 0);
4312 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4313 if (imode != ihmode)
4314 im = gen_rtx_SUBREG (imode, im, 0);
4315 }
4316 im = gen_rtx_NEG (imode, im);
4317 return gen_rtx_CONCAT (mode, re, im);
4318 }
4319
b5b8b0ac
AO
4320 case ADDR_EXPR:
4321 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4322 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
4323 {
4324 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4325 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4326 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
4327 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4328 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
4329 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4330
4331 if (handled_component_p (TREE_OPERAND (exp, 0)))
4332 {
4333 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4334 tree decl
4335 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4336 &bitoffset, &bitsize, &maxsize);
4337 if ((TREE_CODE (decl) == VAR_DECL
4338 || TREE_CODE (decl) == PARM_DECL
4339 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
4340 && (!TREE_ADDRESSABLE (decl)
4341 || target_for_debug_bind (decl))
c8a27c40
JJ
4342 && (bitoffset % BITS_PER_UNIT) == 0
4343 && bitsize > 0
4344 && bitsize == maxsize)
0a81f074
RS
4345 {
4346 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4347 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4348 }
c8a27c40
JJ
4349 }
4350
9430b7ba
JJ
4351 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4352 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4353 == ADDR_EXPR)
4354 {
4355 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4356 0));
4357 if (op0 != NULL
4358 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4359 || (GET_CODE (op0) == PLUS
4360 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4361 && CONST_INT_P (XEXP (op0, 1)))))
4362 {
4363 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4364 1));
4365 if (!op1 || !CONST_INT_P (op1))
4366 return NULL;
4367
4368 return plus_constant (mode, op0, INTVAL (op1));
4369 }
4370 }
4371
c8a27c40
JJ
4372 return NULL;
4373 }
b5b8b0ac 4374
f61c6f34
JJ
4375 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
4376 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
4377
4378 return op0;
b5b8b0ac
AO
4379
4380 case VECTOR_CST:
d2a12ae7
RG
4381 {
4382 unsigned i;
4383
4384 op0 = gen_rtx_CONCATN
4385 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4386
4387 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4388 {
4389 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4390 if (!op1)
4391 return NULL;
4392 XVECEXP (op0, 0, i) = op1;
4393 }
4394
4395 return op0;
4396 }
b5b8b0ac
AO
4397
4398 case CONSTRUCTOR:
47598145
MM
4399 if (TREE_CLOBBER_P (exp))
4400 return NULL;
4401 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
4402 {
4403 unsigned i;
4404 tree val;
4405
4406 op0 = gen_rtx_CONCATN
4407 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4408
4409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4410 {
4411 op1 = expand_debug_expr (val);
4412 if (!op1)
4413 return NULL;
4414 XVECEXP (op0, 0, i) = op1;
4415 }
4416
4417 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4418 {
4419 op1 = expand_debug_expr
e8160c9a 4420 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
4421
4422 if (!op1)
4423 return NULL;
4424
4425 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4426 XVECEXP (op0, 0, i) = op1;
4427 }
4428
4429 return op0;
4430 }
4431 else
4432 goto flag_unsupported;
4433
4434 case CALL_EXPR:
4435 /* ??? Maybe handle some builtins? */
4436 return NULL;
4437
4438 case SSA_NAME:
4439 {
2a8e30fb
MM
4440 gimple g = get_gimple_for_ssa_name (exp);
4441 if (g)
4442 {
4443 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4444 if (!op0)
4445 return NULL;
4446 }
4447 else
4448 {
4449 int part = var_to_partition (SA.map, exp);
b5b8b0ac 4450
2a8e30fb 4451 if (part == NO_PARTITION)
a58a8e4b
JJ
4452 {
4453 /* If this is a reference to an incoming value of parameter
4454 that is never used in the code or where the incoming
4455 value is never used in the code, use PARM_DECL's
4456 DECL_RTL if set. */
4457 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4458 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4459 {
12c5ffe5
EB
4460 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4461 if (op0)
4462 goto adjust_mode;
a58a8e4b 4463 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
4464 if (op0)
4465 goto adjust_mode;
a58a8e4b
JJ
4466 }
4467 return NULL;
4468 }
b5b8b0ac 4469
2a8e30fb 4470 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 4471
abfea58d 4472 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 4473 }
b5b8b0ac
AO
4474 goto adjust_mode;
4475 }
4476
4477 case ERROR_MARK:
4478 return NULL;
4479
7ece48b1
JJ
4480 /* Vector stuff. For most of the codes we don't have rtl codes. */
4481 case REALIGN_LOAD_EXPR:
4482 case REDUC_MAX_EXPR:
4483 case REDUC_MIN_EXPR:
4484 case REDUC_PLUS_EXPR:
4485 case VEC_COND_EXPR:
7ece48b1
JJ
4486 case VEC_LSHIFT_EXPR:
4487 case VEC_PACK_FIX_TRUNC_EXPR:
4488 case VEC_PACK_SAT_EXPR:
4489 case VEC_PACK_TRUNC_EXPR:
4490 case VEC_RSHIFT_EXPR:
4491 case VEC_UNPACK_FLOAT_HI_EXPR:
4492 case VEC_UNPACK_FLOAT_LO_EXPR:
4493 case VEC_UNPACK_HI_EXPR:
4494 case VEC_UNPACK_LO_EXPR:
4495 case VEC_WIDEN_MULT_HI_EXPR:
4496 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
4497 case VEC_WIDEN_MULT_EVEN_EXPR:
4498 case VEC_WIDEN_MULT_ODD_EXPR:
36ba4aae
IR
4499 case VEC_WIDEN_LSHIFT_HI_EXPR:
4500 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 4501 case VEC_PERM_EXPR:
7ece48b1
JJ
4502 return NULL;
4503
98449720 4504 /* Misc codes. */
7ece48b1
JJ
4505 case ADDR_SPACE_CONVERT_EXPR:
4506 case FIXED_CONVERT_EXPR:
4507 case OBJ_TYPE_REF:
4508 case WITH_SIZE_EXPR:
4509 return NULL;
4510
4511 case DOT_PROD_EXPR:
4512 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4513 && SCALAR_INT_MODE_P (mode))
4514 {
2ba172e0
JJ
4515 op0
4516 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4517 0)))
4518 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4519 inner_mode);
4520 op1
4521 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4522 1)))
4523 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4524 inner_mode);
4525 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4526 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
4527 }
4528 return NULL;
4529
4530 case WIDEN_MULT_EXPR:
0354c0c7
BS
4531 case WIDEN_MULT_PLUS_EXPR:
4532 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
4533 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4534 && SCALAR_INT_MODE_P (mode))
4535 {
2ba172e0 4536 inner_mode = GET_MODE (op0);
7ece48b1 4537 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 4538 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 4539 else
5b58b39b 4540 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 4541 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 4542 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 4543 else
5b58b39b 4544 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 4545 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
4546 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4547 return op0;
4548 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 4549 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 4550 else
2ba172e0 4551 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
4552 }
4553 return NULL;
4554
98449720
RH
4555 case MULT_HIGHPART_EXPR:
4556 /* ??? Similar to the above. */
4557 return NULL;
4558
7ece48b1 4559 case WIDEN_SUM_EXPR:
3f3af9df 4560 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
4561 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4562 && SCALAR_INT_MODE_P (mode))
4563 {
2ba172e0
JJ
4564 op0
4565 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4566 0)))
4567 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4568 inner_mode);
3f3af9df
JJ
4569 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4570 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
4571 }
4572 return NULL;
4573
0f59b812 4574 case FMA_EXPR:
2ba172e0 4575 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 4576
b5b8b0ac
AO
4577 default:
4578 flag_unsupported:
4579#ifdef ENABLE_CHECKING
4580 debug_tree (exp);
4581 gcc_unreachable ();
4582#else
4583 return NULL;
4584#endif
4585 }
4586}
4587
ddb555ed
JJ
4588/* Return an RTX equivalent to the source bind value of the tree expression
4589 EXP. */
4590
4591static rtx
4592expand_debug_source_expr (tree exp)
4593{
4594 rtx op0 = NULL_RTX;
4595 enum machine_mode mode = VOIDmode, inner_mode;
4596
4597 switch (TREE_CODE (exp))
4598 {
4599 case PARM_DECL:
4600 {
ddb555ed 4601 mode = DECL_MODE (exp);
12c5ffe5
EB
4602 op0 = expand_debug_parm_decl (exp);
4603 if (op0)
4604 break;
ddb555ed
JJ
4605 /* See if this isn't an argument that has been completely
4606 optimized out. */
4607 if (!DECL_RTL_SET_P (exp)
12c5ffe5 4608 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
4609 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4610 {
7b575cfa 4611 tree aexp = DECL_ORIGIN (exp);
ddb555ed
JJ
4612 if (DECL_CONTEXT (aexp)
4613 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4614 {
9771b263 4615 vec<tree, va_gc> **debug_args;
ddb555ed
JJ
4616 unsigned int ix;
4617 tree ddecl;
ddb555ed
JJ
4618 debug_args = decl_debug_args_lookup (current_function_decl);
4619 if (debug_args != NULL)
4620 {
9771b263 4621 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ddb555ed
JJ
4622 ix += 2)
4623 if (ddecl == aexp)
4624 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4625 }
4626 }
4627 }
4628 break;
4629 }
4630 default:
4631 break;
4632 }
4633
4634 if (op0 == NULL_RTX)
4635 return NULL_RTX;
4636
4637 inner_mode = GET_MODE (op0);
4638 if (mode == inner_mode)
4639 return op0;
4640
4641 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4642 {
4643 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4644 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4645 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4646 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4647 else
4648 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4649 }
4650 else if (FLOAT_MODE_P (mode))
4651 gcc_unreachable ();
4652 else if (FLOAT_MODE_P (inner_mode))
4653 {
4654 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4655 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4656 else
4657 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4658 }
4659 else if (CONSTANT_P (op0)
4660 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4661 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4662 subreg_lowpart_offset (mode, inner_mode));
4663 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4664 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4665 else
4666 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4667
4668 return op0;
4669}
4670
6cfa417f
JJ
4671/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4672 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4673 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4674
4675static void
4676avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
4677{
4678 rtx exp = *exp_p;
4679
4680 if (exp == NULL_RTX)
4681 return;
4682
4683 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4684 return;
4685
4686 if (depth == 4)
4687 {
4688 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4689 rtx dval = make_debug_expr_from_rtl (exp);
4690
4691 /* Emit a debug bind insn before INSN. */
4692 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4693 DEBUG_EXPR_TREE_DECL (dval), exp,
4694 VAR_INIT_STATUS_INITIALIZED);
4695
4696 emit_debug_insn_before (bind, insn);
4697 *exp_p = dval;
4698 return;
4699 }
4700
4701 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4702 int i, j;
4703 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4704 switch (*format_ptr++)
4705 {
4706 case 'e':
4707 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4708 break;
4709
4710 case 'E':
4711 case 'V':
4712 for (j = 0; j < XVECLEN (exp, i); j++)
4713 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4714 break;
4715
4716 default:
4717 break;
4718 }
4719}
4720
b5b8b0ac
AO
4721/* Expand the _LOCs in debug insns. We run this after expanding all
4722 regular insns, so that any variables referenced in the function
4723 will have their DECL_RTLs set. */
4724
4725static void
4726expand_debug_locations (void)
4727{
4728 rtx insn;
4729 rtx last = get_last_insn ();
4730 int save_strict_alias = flag_strict_aliasing;
4731
4732 /* New alias sets while setting up memory attributes cause
4733 -fcompare-debug failures, even though it doesn't bring about any
4734 codegen changes. */
4735 flag_strict_aliasing = 0;
4736
4737 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4738 if (DEBUG_INSN_P (insn))
4739 {
4740 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
6cfa417f 4741 rtx val, prev_insn, insn2;
b5b8b0ac
AO
4742 enum machine_mode mode;
4743
4744 if (value == NULL_TREE)
4745 val = NULL_RTX;
4746 else
4747 {
ddb555ed
JJ
4748 if (INSN_VAR_LOCATION_STATUS (insn)
4749 == VAR_INIT_STATUS_UNINITIALIZED)
4750 val = expand_debug_source_expr (value);
4751 else
4752 val = expand_debug_expr (value);
b5b8b0ac
AO
4753 gcc_assert (last == get_last_insn ());
4754 }
4755
4756 if (!val)
4757 val = gen_rtx_UNKNOWN_VAR_LOC ();
4758 else
4759 {
4760 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4761
4762 gcc_assert (mode == GET_MODE (val)
4763 || (GET_MODE (val) == VOIDmode
33ffb5c5 4764 && (CONST_SCALAR_INT_P (val)
b5b8b0ac 4765 || GET_CODE (val) == CONST_FIXED
b5b8b0ac
AO
4766 || GET_CODE (val) == LABEL_REF)));
4767 }
4768
4769 INSN_VAR_LOCATION_LOC (insn) = val;
6cfa417f
JJ
4770 prev_insn = PREV_INSN (insn);
4771 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4772 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
b5b8b0ac
AO
4773 }
4774
4775 flag_strict_aliasing = save_strict_alias;
4776}
4777
242229bb
JH
4778/* Expand basic block BB from GIMPLE trees to RTL. */
4779
4780static basic_block
f3ddd692 4781expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
242229bb 4782{
726a989a
RB
4783 gimple_stmt_iterator gsi;
4784 gimple_seq stmts;
4785 gimple stmt = NULL;
242229bb
JH
4786 rtx note, last;
4787 edge e;
628f6a4e 4788 edge_iterator ei;
8b11009b 4789 void **elt;
242229bb
JH
4790
4791 if (dump_file)
726a989a
RB
4792 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4793 bb->index);
4794
4795 /* Note that since we are now transitioning from GIMPLE to RTL, we
4796 cannot use the gsi_*_bb() routines because they expect the basic
4797 block to be in GIMPLE, instead of RTL. Therefore, we need to
4798 access the BB sequence directly. */
4799 stmts = bb_seq (bb);
3e8b732e
MM
4800 bb->il.gimple.seq = NULL;
4801 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 4802 rtl_profile_for_bb (bb);
5e2d947c
JH
4803 init_rtl_bb_info (bb);
4804 bb->flags |= BB_RTL;
4805
a9b77cd1
ZD
4806 /* Remove the RETURN_EXPR if we may fall though to the exit
4807 instead. */
726a989a
RB
4808 gsi = gsi_last (stmts);
4809 if (!gsi_end_p (gsi)
4810 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 4811 {
726a989a 4812 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
4813
4814 gcc_assert (single_succ_p (bb));
4815 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
4816
4817 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 4818 && !gimple_return_retval (ret_stmt))
a9b77cd1 4819 {
726a989a 4820 gsi_remove (&gsi, false);
a9b77cd1
ZD
4821 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4822 }
4823 }
4824
726a989a
RB
4825 gsi = gsi_start (stmts);
4826 if (!gsi_end_p (gsi))
8b11009b 4827 {
726a989a
RB
4828 stmt = gsi_stmt (gsi);
4829 if (gimple_code (stmt) != GIMPLE_LABEL)
4830 stmt = NULL;
8b11009b 4831 }
242229bb 4832
8b11009b
ZD
4833 elt = pointer_map_contains (lab_rtx_for_bb, bb);
4834
4835 if (stmt || elt)
242229bb
JH
4836 {
4837 last = get_last_insn ();
4838
8b11009b
ZD
4839 if (stmt)
4840 {
28ed065e 4841 expand_gimple_stmt (stmt);
726a989a 4842 gsi_next (&gsi);
8b11009b
ZD
4843 }
4844
4845 if (elt)
ae50c0cb 4846 emit_label ((rtx) *elt);
242229bb 4847
caf93cb0 4848 /* Java emits line number notes in the top of labels.
c22cacf3 4849 ??? Make this go away once line number notes are obsoleted. */
242229bb 4850 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 4851 if (NOTE_P (BB_HEAD (bb)))
242229bb 4852 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 4853 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 4854
726a989a 4855 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
4856 }
4857 else
4858 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
4859
4860 NOTE_BASIC_BLOCK (note) = bb;
4861
726a989a 4862 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 4863 {
cea49550 4864 basic_block new_bb;
242229bb 4865
b5b8b0ac 4866 stmt = gsi_stmt (gsi);
2a8e30fb
MM
4867
4868 /* If this statement is a non-debug one, and we generate debug
4869 insns, then this one might be the last real use of a TERed
4870 SSA_NAME, but where there are still some debug uses further
4871 down. Expanding the current SSA name in such further debug
4872 uses by their RHS might lead to wrong debug info, as coalescing
4873 might make the operands of such RHS be placed into the same
4874 pseudo as something else. Like so:
4875 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4876 use(a_1);
4877 a_2 = ...
4878 #DEBUG ... => a_1
4879 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4880 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4881 the write to a_2 would actually have clobbered the place which
4882 formerly held a_0.
4883
4884 So, instead of that, we recognize the situation, and generate
4885 debug temporaries at the last real use of TERed SSA names:
4886 a_1 = a_0 + 1;
4887 #DEBUG #D1 => a_1
4888 use(a_1);
4889 a_2 = ...
4890 #DEBUG ... => #D1
4891 */
4892 if (MAY_HAVE_DEBUG_INSNS
4893 && SA.values
4894 && !is_gimple_debug (stmt))
4895 {
4896 ssa_op_iter iter;
4897 tree op;
4898 gimple def;
4899
5368224f 4900 location_t sloc = curr_insn_location ();
2a8e30fb
MM
4901
4902 /* Look for SSA names that have their last use here (TERed
4903 names always have only one real use). */
4904 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4905 if ((def = get_gimple_for_ssa_name (op)))
4906 {
4907 imm_use_iterator imm_iter;
4908 use_operand_p use_p;
4909 bool have_debug_uses = false;
4910
4911 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4912 {
4913 if (gimple_debug_bind_p (USE_STMT (use_p)))
4914 {
4915 have_debug_uses = true;
4916 break;
4917 }
4918 }
4919
4920 if (have_debug_uses)
4921 {
4922 /* OP is a TERed SSA name, with DEF it's defining
4923 statement, and where OP is used in further debug
4924 instructions. Generate a debug temporary, and
4925 replace all uses of OP in debug insns with that
4926 temporary. */
4927 gimple debugstmt;
4928 tree value = gimple_assign_rhs_to_tree (def);
4929 tree vexpr = make_node (DEBUG_EXPR_DECL);
4930 rtx val;
4931 enum machine_mode mode;
4932
5368224f 4933 set_curr_insn_location (gimple_location (def));
2a8e30fb
MM
4934
4935 DECL_ARTIFICIAL (vexpr) = 1;
4936 TREE_TYPE (vexpr) = TREE_TYPE (value);
4937 if (DECL_P (value))
4938 mode = DECL_MODE (value);
4939 else
4940 mode = TYPE_MODE (TREE_TYPE (value));
4941 DECL_MODE (vexpr) = mode;
4942
4943 val = gen_rtx_VAR_LOCATION
4944 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4945
e8c6bb74 4946 emit_debug_insn (val);
2a8e30fb
MM
4947
4948 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4949 {
4950 if (!gimple_debug_bind_p (debugstmt))
4951 continue;
4952
4953 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4954 SET_USE (use_p, vexpr);
4955
4956 update_stmt (debugstmt);
4957 }
4958 }
4959 }
5368224f 4960 set_curr_insn_location (sloc);
2a8e30fb
MM
4961 }
4962
a5883ba0 4963 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 4964
242229bb
JH
4965 /* Expand this statement, then evaluate the resulting RTL and
4966 fixup the CFG accordingly. */
726a989a 4967 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 4968 {
726a989a 4969 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
4970 if (new_bb)
4971 return new_bb;
4972 }
b5b8b0ac
AO
4973 else if (gimple_debug_bind_p (stmt))
4974 {
5368224f 4975 location_t sloc = curr_insn_location ();
b5b8b0ac
AO
4976 gimple_stmt_iterator nsi = gsi;
4977
4978 for (;;)
4979 {
4980 tree var = gimple_debug_bind_get_var (stmt);
4981 tree value;
4982 rtx val;
4983 enum machine_mode mode;
4984
ec8c1492
JJ
4985 if (TREE_CODE (var) != DEBUG_EXPR_DECL
4986 && TREE_CODE (var) != LABEL_DECL
4987 && !target_for_debug_bind (var))
4988 goto delink_debug_stmt;
4989
b5b8b0ac
AO
4990 if (gimple_debug_bind_has_value_p (stmt))
4991 value = gimple_debug_bind_get_value (stmt);
4992 else
4993 value = NULL_TREE;
4994
4995 last = get_last_insn ();
4996
5368224f 4997 set_curr_insn_location (gimple_location (stmt));
b5b8b0ac
AO
4998
4999 if (DECL_P (var))
5000 mode = DECL_MODE (var);
5001 else
5002 mode = TYPE_MODE (TREE_TYPE (var));
5003
5004 val = gen_rtx_VAR_LOCATION
5005 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5006
e16b6fd0 5007 emit_debug_insn (val);
b5b8b0ac
AO
5008
5009 if (dump_file && (dump_flags & TDF_DETAILS))
5010 {
5011 /* We can't dump the insn with a TREE where an RTX
5012 is expected. */
e8c6bb74 5013 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 5014 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 5015 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
5016 }
5017
ec8c1492 5018 delink_debug_stmt:
2a8e30fb
MM
5019 /* In order not to generate too many debug temporaries,
5020 we delink all uses of debug statements we already expanded.
5021 Therefore debug statements between definition and real
5022 use of TERed SSA names will continue to use the SSA name,
5023 and not be replaced with debug temps. */
5024 delink_stmt_imm_use (stmt);
5025
b5b8b0ac
AO
5026 gsi = nsi;
5027 gsi_next (&nsi);
5028 if (gsi_end_p (nsi))
5029 break;
5030 stmt = gsi_stmt (nsi);
5031 if (!gimple_debug_bind_p (stmt))
5032 break;
5033 }
5034
5368224f 5035 set_curr_insn_location (sloc);
ddb555ed
JJ
5036 }
5037 else if (gimple_debug_source_bind_p (stmt))
5038 {
5368224f 5039 location_t sloc = curr_insn_location ();
ddb555ed
JJ
5040 tree var = gimple_debug_source_bind_get_var (stmt);
5041 tree value = gimple_debug_source_bind_get_value (stmt);
5042 rtx val;
5043 enum machine_mode mode;
5044
5045 last = get_last_insn ();
5046
5368224f 5047 set_curr_insn_location (gimple_location (stmt));
ddb555ed
JJ
5048
5049 mode = DECL_MODE (var);
5050
5051 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5052 VAR_INIT_STATUS_UNINITIALIZED);
5053
5054 emit_debug_insn (val);
5055
5056 if (dump_file && (dump_flags & TDF_DETAILS))
5057 {
5058 /* We can't dump the insn with a TREE where an RTX
5059 is expected. */
5060 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5061 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5062 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5063 }
5064
5368224f 5065 set_curr_insn_location (sloc);
b5b8b0ac 5066 }
80c7a9eb 5067 else
242229bb 5068 {
f3ddd692
JJ
5069 if (is_gimple_call (stmt)
5070 && gimple_call_tail_p (stmt)
5071 && disable_tail_calls)
5072 gimple_call_set_tail (stmt, false);
5073
726a989a 5074 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
5075 {
5076 bool can_fallthru;
5077 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5078 if (new_bb)
5079 {
5080 if (can_fallthru)
5081 bb = new_bb;
5082 else
5083 return new_bb;
5084 }
5085 }
4d7a65ea 5086 else
b7211528 5087 {
4e3825db 5088 def_operand_p def_p;
4e3825db
MM
5089 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5090
5091 if (def_p != NULL)
5092 {
5093 /* Ignore this stmt if it is in the list of
5094 replaceable expressions. */
5095 if (SA.values
b8698a0f 5096 && bitmap_bit_p (SA.values,
e97809c6 5097 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
5098 continue;
5099 }
28ed065e 5100 last = expand_gimple_stmt (stmt);
726a989a 5101 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 5102 }
242229bb
JH
5103 }
5104 }
5105
a5883ba0
MM
5106 currently_expanding_gimple_stmt = NULL;
5107
7241571e 5108 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
5109 FOR_EACH_EDGE (e, ei, bb->succs)
5110 {
2f13f2de 5111 if (e->goto_locus != UNKNOWN_LOCATION)
5368224f 5112 set_curr_insn_location (e->goto_locus);
7241571e
JJ
5113 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5114 {
5115 emit_jump (label_rtx_for_bb (e->dest));
5116 e->flags &= ~EDGE_FALLTHRU;
5117 }
a9b77cd1
ZD
5118 }
5119
ae761c45
AH
5120 /* Expanded RTL can create a jump in the last instruction of block.
5121 This later might be assumed to be a jump to successor and break edge insertion.
5122 We need to insert dummy move to prevent this. PR41440. */
5123 if (single_succ_p (bb)
5124 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5125 && (last = get_last_insn ())
5126 && JUMP_P (last))
5127 {
5128 rtx dummy = gen_reg_rtx (SImode);
5129 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5130 }
5131
242229bb
JH
5132 do_pending_stack_adjust ();
5133
3f117656 5134 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
5135 before a barrier and/or table jump insn. */
5136 last = get_last_insn ();
4b4bf941 5137 if (BARRIER_P (last))
242229bb
JH
5138 last = PREV_INSN (last);
5139 if (JUMP_TABLE_DATA_P (last))
5140 last = PREV_INSN (PREV_INSN (last));
5141 BB_END (bb) = last;
caf93cb0 5142
242229bb 5143 update_bb_for_insn (bb);
80c7a9eb 5144
242229bb
JH
5145 return bb;
5146}
5147
5148
5149/* Create a basic block for initialization code. */
5150
5151static basic_block
5152construct_init_block (void)
5153{
5154 basic_block init_block, first_block;
fd44f634
JH
5155 edge e = NULL;
5156 int flags;
275a4187 5157
fd44f634
JH
5158 /* Multiple entry points not supported yet. */
5159 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
5160 init_rtl_bb_info (ENTRY_BLOCK_PTR);
5161 init_rtl_bb_info (EXIT_BLOCK_PTR);
5162 ENTRY_BLOCK_PTR->flags |= BB_RTL;
5163 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 5164
fd44f634 5165 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 5166
fd44f634
JH
5167 /* When entry edge points to first basic block, we don't need jump,
5168 otherwise we have to jump into proper target. */
5169 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
5170 {
726a989a 5171 tree label = gimple_block_label (e->dest);
fd44f634
JH
5172
5173 emit_jump (label_rtx (label));
5174 flags = 0;
275a4187 5175 }
fd44f634
JH
5176 else
5177 flags = EDGE_FALLTHRU;
242229bb
JH
5178
5179 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5180 get_last_insn (),
5181 ENTRY_BLOCK_PTR);
5182 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
5183 init_block->count = ENTRY_BLOCK_PTR->count;
7d776ee2
RG
5184 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
5185 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
242229bb
JH
5186 if (e)
5187 {
5188 first_block = e->dest;
5189 redirect_edge_succ (e, init_block);
fd44f634 5190 e = make_edge (init_block, first_block, flags);
242229bb
JH
5191 }
5192 else
5193 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5194 e->probability = REG_BR_PROB_BASE;
5195 e->count = ENTRY_BLOCK_PTR->count;
5196
5197 update_bb_for_insn (init_block);
5198 return init_block;
5199}
5200
55e092c4
JH
5201/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5202 found in the block tree. */
5203
5204static void
5205set_block_levels (tree block, int level)
5206{
5207 while (block)
5208 {
5209 BLOCK_NUMBER (block) = level;
5210 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5211 block = BLOCK_CHAIN (block);
5212 }
5213}
242229bb
JH
5214
5215/* Create a block containing landing pads and similar stuff. */
5216
5217static void
5218construct_exit_block (void)
5219{
5220 rtx head = get_last_insn ();
5221 rtx end;
5222 basic_block exit_block;
628f6a4e
BE
5223 edge e, e2;
5224 unsigned ix;
5225 edge_iterator ei;
071a42f9 5226 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 5227
bf08ebeb
JH
5228 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5229
caf93cb0 5230 /* Make sure the locus is set to the end of the function, so that
242229bb 5231 epilogue line numbers and warnings are set properly. */
2f13f2de 5232 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
242229bb
JH
5233 input_location = cfun->function_end_locus;
5234
242229bb
JH
5235 /* Generate rtl for function exit. */
5236 expand_function_end ();
5237
5238 end = get_last_insn ();
5239 if (head == end)
5240 return;
071a42f9
JH
5241 /* While emitting the function end we could move end of the last basic block.
5242 */
5243 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 5244 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 5245 head = NEXT_INSN (head);
80c7a9eb
RH
5246 exit_block = create_basic_block (NEXT_INSN (head), end,
5247 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
5248 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
5249 exit_block->count = EXIT_BLOCK_PTR->count;
7d776ee2
RG
5250 if (current_loops && EXIT_BLOCK_PTR->loop_father)
5251 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
628f6a4e
BE
5252
5253 ix = 0;
5254 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 5255 {
8fb790fd 5256 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 5257 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
5258 redirect_edge_succ (e, exit_block);
5259 else
5260 ix++;
242229bb 5261 }
628f6a4e 5262
242229bb
JH
5263 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5264 e->probability = REG_BR_PROB_BASE;
5265 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 5266 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
5267 if (e2 != e)
5268 {
c22cacf3 5269 e->count -= e2->count;
242229bb
JH
5270 exit_block->count -= e2->count;
5271 exit_block->frequency -= EDGE_FREQUENCY (e2);
5272 }
5273 if (e->count < 0)
5274 e->count = 0;
5275 if (exit_block->count < 0)
5276 exit_block->count = 0;
5277 if (exit_block->frequency < 0)
5278 exit_block->frequency = 0;
5279 update_bb_for_insn (exit_block);
5280}
5281
c22cacf3 5282/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
5283 Look for ARRAY_REF nodes with non-constant indexes and mark them
5284 addressable. */
5285
5286static tree
5287discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5288 void *data ATTRIBUTE_UNUSED)
5289{
5290 tree t = *tp;
5291
5292 if (IS_TYPE_OR_DECL_P (t))
5293 *walk_subtrees = 0;
5294 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5295 {
5296 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5297 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5298 && (!TREE_OPERAND (t, 2)
5299 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5300 || (TREE_CODE (t) == COMPONENT_REF
5301 && (!TREE_OPERAND (t,2)
5302 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5303 || TREE_CODE (t) == BIT_FIELD_REF
5304 || TREE_CODE (t) == REALPART_EXPR
5305 || TREE_CODE (t) == IMAGPART_EXPR
5306 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 5307 || CONVERT_EXPR_P (t))
a1b23b2f
UW
5308 t = TREE_OPERAND (t, 0);
5309
5310 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5311 {
5312 t = get_base_address (t);
6f11d690
RG
5313 if (t && DECL_P (t)
5314 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
5315 TREE_ADDRESSABLE (t) = 1;
5316 }
5317
5318 *walk_subtrees = 0;
5319 }
5320
5321 return NULL_TREE;
5322}
5323
5324/* RTL expansion is not able to compile array references with variable
5325 offsets for arrays stored in single register. Discover such
5326 expressions and mark variables as addressable to avoid this
5327 scenario. */
5328
5329static void
5330discover_nonconstant_array_refs (void)
5331{
5332 basic_block bb;
726a989a 5333 gimple_stmt_iterator gsi;
a1b23b2f
UW
5334
5335 FOR_EACH_BB (bb)
726a989a
RB
5336 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5337 {
5338 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
5339 if (!is_gimple_debug (stmt))
5340 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 5341 }
a1b23b2f
UW
5342}
5343
2e3f842f
L
5344/* This function sets crtl->args.internal_arg_pointer to a virtual
5345 register if DRAP is needed. Local register allocator will replace
5346 virtual_incoming_args_rtx with the virtual register. */
5347
5348static void
5349expand_stack_alignment (void)
5350{
5351 rtx drap_rtx;
e939805b 5352 unsigned int preferred_stack_boundary;
2e3f842f
L
5353
5354 if (! SUPPORTS_STACK_ALIGNMENT)
5355 return;
b8698a0f 5356
2e3f842f
L
5357 if (cfun->calls_alloca
5358 || cfun->has_nonlocal_label
5359 || crtl->has_nonlocal_goto)
5360 crtl->need_drap = true;
5361
890b9b96
L
5362 /* Call update_stack_boundary here again to update incoming stack
5363 boundary. It may set incoming stack alignment to a different
5364 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5365 use the minimum incoming stack alignment to check if it is OK
5366 to perform sibcall optimization since sibcall optimization will
5367 only align the outgoing stack to incoming stack boundary. */
5368 if (targetm.calls.update_stack_boundary)
5369 targetm.calls.update_stack_boundary ();
5370
5371 /* The incoming stack frame has to be aligned at least at
5372 parm_stack_boundary. */
5373 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 5374
2e3f842f
L
5375 /* Update crtl->stack_alignment_estimated and use it later to align
5376 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5377 exceptions since callgraph doesn't collect incoming stack alignment
5378 in this case. */
8f4f502f 5379 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
5380 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5381 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5382 else
5383 preferred_stack_boundary = crtl->preferred_stack_boundary;
5384 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5385 crtl->stack_alignment_estimated = preferred_stack_boundary;
5386 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5387 crtl->stack_alignment_needed = preferred_stack_boundary;
5388
890b9b96
L
5389 gcc_assert (crtl->stack_alignment_needed
5390 <= crtl->stack_alignment_estimated);
5391
2e3f842f 5392 crtl->stack_realign_needed
e939805b 5393 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 5394 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
5395
5396 crtl->stack_realign_processed = true;
5397
5398 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5399 alignment. */
5400 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 5401 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 5402
d015f7cc
L
5403 /* stack_realign_drap and drap_rtx must match. */
5404 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5405
2e3f842f
L
5406 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5407 if (NULL != drap_rtx)
5408 {
5409 crtl->args.internal_arg_pointer = drap_rtx;
5410
5411 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5412 needed. */
5413 fixup_tail_calls ();
5414 }
5415}
862d0b35
DN
5416\f
5417
5418static void
5419expand_main_function (void)
5420{
5421#if (defined(INVOKE__main) \
5422 || (!defined(HAS_INIT_SECTION) \
5423 && !defined(INIT_SECTION_ASM_OP) \
5424 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5425 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5426#endif
5427}
5428\f
5429
5430/* Expand code to initialize the stack_protect_guard. This is invoked at
5431 the beginning of a function to be protected. */
5432
5433#ifndef HAVE_stack_protect_set
5434# define HAVE_stack_protect_set 0
5435# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5436#endif
5437
5438static void
5439stack_protect_prologue (void)
5440{
5441 tree guard_decl = targetm.stack_protect_guard ();
5442 rtx x, y;
5443
5444 x = expand_normal (crtl->stack_protect_guard);
5445 y = expand_normal (guard_decl);
5446
5447 /* Allow the target to copy from Y to X without leaking Y into a
5448 register. */
5449 if (HAVE_stack_protect_set)
5450 {
5451 rtx insn = gen_stack_protect_set (x, y);
5452 if (insn)
5453 {
5454 emit_insn (insn);
5455 return;
5456 }
5457 }
5458
5459 /* Otherwise do a straight move. */
5460 emit_move_insn (x, y);
5461}
2e3f842f 5462
242229bb
JH
5463/* Translate the intermediate representation contained in the CFG
5464 from GIMPLE trees to RTL.
5465
5466 We do conversion per basic block and preserve/update the tree CFG.
5467 This implies we have to do some magic as the CFG can simultaneously
5468 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 5469 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
5470 the expansion. */
5471
c2924966 5472static unsigned int
726a989a 5473gimple_expand_cfg (void)
242229bb
JH
5474{
5475 basic_block bb, init_block;
5476 sbitmap blocks;
0ef90296
ZD
5477 edge_iterator ei;
5478 edge e;
f3ddd692 5479 rtx var_seq, var_ret_seq;
4e3825db
MM
5480 unsigned i;
5481
f029db69 5482 timevar_push (TV_OUT_OF_SSA);
4e3825db 5483 rewrite_out_of_ssa (&SA);
f029db69 5484 timevar_pop (TV_OUT_OF_SSA);
c302207e 5485 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
242229bb 5486
be147e84
RG
5487 /* Make sure all values used by the optimization passes have sane
5488 defaults. */
5489 reg_renumber = 0;
5490
4586b4ca
SB
5491 /* Some backends want to know that we are expanding to RTL. */
5492 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
5493 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5494 free_dominance_info (CDI_DOMINATORS);
4586b4ca 5495
bf08ebeb
JH
5496 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5497
5368224f 5498 insn_locations_init ();
fe8a7779 5499 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
5500 {
5501 /* Eventually, all FEs should explicitly set function_start_locus. */
2f13f2de 5502 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
5368224f 5503 set_curr_insn_location
1751ecd6
AH
5504 (DECL_SOURCE_LOCATION (current_function_decl));
5505 else
5368224f 5506 set_curr_insn_location (cfun->function_start_locus);
1751ecd6 5507 }
9ff70652 5508 else
5368224f
DC
5509 set_curr_insn_location (UNKNOWN_LOCATION);
5510 prologue_location = curr_insn_location ();
55e092c4 5511
2b21299c
JJ
5512#ifdef INSN_SCHEDULING
5513 init_sched_attrs ();
5514#endif
5515
55e092c4
JH
5516 /* Make sure first insn is a note even if we don't want linenums.
5517 This makes sure the first insn will never be deleted.
5518 Also, final expects a note to appear there. */
5519 emit_note (NOTE_INSN_DELETED);
6429e3be 5520
a1b23b2f
UW
5521 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5522 discover_nonconstant_array_refs ();
5523
e41b2a33 5524 targetm.expand_to_rtl_hook ();
cb91fab0 5525 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 5526 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 5527 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
5528 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5529 cfun->cfg->max_jumptable_ents = 0;
5530
ae9fd6b7
JH
5531 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5532 of the function section at exapnsion time to predict distance of calls. */
5533 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5534
727a31fa 5535 /* Expand the variables recorded during gimple lowering. */
f029db69 5536 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
5537 start_sequence ();
5538
f3ddd692 5539 var_ret_seq = expand_used_vars ();
3a42502d
RH
5540
5541 var_seq = get_insns ();
5542 end_sequence ();
f029db69 5543 timevar_pop (TV_VAR_EXPAND);
242229bb 5544
7d69de61
RH
5545 /* Honor stack protection warnings. */
5546 if (warn_stack_protect)
5547 {
e3b5732b 5548 if (cfun->calls_alloca)
b8698a0f 5549 warning (OPT_Wstack_protector,
3b123595
SB
5550 "stack protector not protecting local variables: "
5551 "variable length buffer");
cb91fab0 5552 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 5553 warning (OPT_Wstack_protector,
3b123595
SB
5554 "stack protector not protecting function: "
5555 "all local arrays are less than %d bytes long",
7d69de61
RH
5556 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5557 }
5558
242229bb 5559 /* Set up parameters and prepare for return, for the function. */
b79c5284 5560 expand_function_start (current_function_decl);
242229bb 5561
3a42502d
RH
5562 /* If we emitted any instructions for setting up the variables,
5563 emit them before the FUNCTION_START note. */
5564 if (var_seq)
5565 {
5566 emit_insn_before (var_seq, parm_birth_insn);
5567
5568 /* In expand_function_end we'll insert the alloca save/restore
5569 before parm_birth_insn. We've just insertted an alloca call.
5570 Adjust the pointer to match. */
5571 parm_birth_insn = var_seq;
5572 }
5573
4e3825db
MM
5574 /* Now that we also have the parameter RTXs, copy them over to our
5575 partitions. */
5576 for (i = 0; i < SA.map->num_partitions; i++)
5577 {
5578 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5579
5580 if (TREE_CODE (var) != VAR_DECL
5581 && !SA.partition_to_pseudo[i])
5582 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5583 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
5584
5585 /* If this decl was marked as living in multiple places, reset
5586 this now to NULL. */
5587 if (DECL_RTL_IF_SET (var) == pc_rtx)
5588 SET_DECL_RTL (var, NULL);
5589
4e3825db
MM
5590 /* Some RTL parts really want to look at DECL_RTL(x) when x
5591 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5592 SET_DECL_RTL here making this available, but that would mean
5593 to select one of the potentially many RTLs for one DECL. Instead
5594 of doing that we simply reset the MEM_EXPR of the RTL in question,
5595 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5596 if (!DECL_RTL_SET_P (var))
5597 {
5598 if (MEM_P (SA.partition_to_pseudo[i]))
5599 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5600 }
5601 }
5602
d466b407
MM
5603 /* If we have a class containing differently aligned pointers
5604 we need to merge those into the corresponding RTL pointer
5605 alignment. */
5606 for (i = 1; i < num_ssa_names; i++)
5607 {
5608 tree name = ssa_name (i);
5609 int part;
5610 rtx r;
5611
5612 if (!name
d466b407
MM
5613 /* We might have generated new SSA names in
5614 update_alias_info_with_stack_vars. They will have a NULL
5615 defining statements, and won't be part of the partitioning,
5616 so ignore those. */
5617 || !SSA_NAME_DEF_STMT (name))
5618 continue;
5619 part = var_to_partition (SA.map, name);
5620 if (part == NO_PARTITION)
5621 continue;
70b5e7dc
RG
5622
5623 /* Adjust all partition members to get the underlying decl of
5624 the representative which we might have created in expand_one_var. */
5625 if (SSA_NAME_VAR (name) == NULL_TREE)
5626 {
5627 tree leader = partition_to_var (SA.map, part);
5628 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5629 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5630 }
5631 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5632 continue;
5633
d466b407
MM
5634 r = SA.partition_to_pseudo[part];
5635 if (REG_P (r))
5636 mark_reg_pointer (r, get_pointer_alignment (name));
5637 }
5638
242229bb
JH
5639 /* If this function is `main', emit a call to `__main'
5640 to run global initializers, etc. */
5641 if (DECL_NAME (current_function_decl)
5642 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5643 && DECL_FILE_SCOPE_P (current_function_decl))
5644 expand_main_function ();
5645
7d69de61
RH
5646 /* Initialize the stack_protect_guard field. This must happen after the
5647 call to __main (if any) so that the external decl is initialized. */
cb91fab0 5648 if (crtl->stack_protect_guard)
7d69de61
RH
5649 stack_protect_prologue ();
5650
4e3825db
MM
5651 expand_phi_nodes (&SA);
5652
3fbd86b1 5653 /* Register rtl specific functions for cfg. */
242229bb
JH
5654 rtl_register_cfg_hooks ();
5655
5656 init_block = construct_init_block ();
5657
0ef90296 5658 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 5659 remaining edges later. */
0ef90296
ZD
5660 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
5661 e->flags &= ~EDGE_EXECUTABLE;
5662
8b11009b 5663 lab_rtx_for_bb = pointer_map_create ();
242229bb 5664 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
f3ddd692 5665 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
bf08ebeb 5666
b5b8b0ac
AO
5667 if (MAY_HAVE_DEBUG_INSNS)
5668 expand_debug_locations ();
5669
452aa9c5
RG
5670 /* Free stuff we no longer need after GIMPLE optimizations. */
5671 free_dominance_info (CDI_DOMINATORS);
5672 free_dominance_info (CDI_POST_DOMINATORS);
5673 delete_tree_cfg_annotations ();
5674
f029db69 5675 timevar_push (TV_OUT_OF_SSA);
4e3825db 5676 finish_out_of_ssa (&SA);
f029db69 5677 timevar_pop (TV_OUT_OF_SSA);
4e3825db 5678
f029db69 5679 timevar_push (TV_POST_EXPAND);
91753e21
RG
5680 /* We are no longer in SSA form. */
5681 cfun->gimple_df->in_ssa_p = false;
7d776ee2
RG
5682 if (current_loops)
5683 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 5684
bf08ebeb
JH
5685 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5686 conservatively to true until they are all profile aware. */
8b11009b 5687 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 5688 free_histograms ();
242229bb
JH
5689
5690 construct_exit_block ();
5368224f 5691 insn_locations_finalize ();
242229bb 5692
f3ddd692
JJ
5693 if (var_ret_seq)
5694 {
5695 rtx after = return_label;
5696 rtx next = NEXT_INSN (after);
5697 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5698 after = next;
5699 emit_insn_after (var_ret_seq, after);
5700 }
5701
1d65f45c 5702 /* Zap the tree EH table. */
e8a2a782 5703 set_eh_throw_stmt_table (cfun, NULL);
242229bb 5704
42821aff
MM
5705 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5706 split edges which edge insertions might do. */
242229bb 5707 rebuild_jump_labels (get_insns ());
242229bb 5708
4e3825db
MM
5709 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
5710 {
5711 edge e;
5712 edge_iterator ei;
5713 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5714 {
5715 if (e->insns.r)
bc470c24 5716 {
42821aff 5717 rebuild_jump_labels_chain (e->insns.r);
e40191f1
TV
5718 /* Put insns after parm birth, but before
5719 NOTE_INSNS_FUNCTION_BEG. */
bc470c24 5720 if (e->src == ENTRY_BLOCK_PTR
e40191f1 5721 && single_succ_p (ENTRY_BLOCK_PTR))
bc470c24
JJ
5722 {
5723 rtx insns = e->insns.r;
5724 e->insns.r = NULL_RTX;
e40191f1
TV
5725 if (NOTE_P (parm_birth_insn)
5726 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5727 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5728 else
5729 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
bc470c24
JJ
5730 }
5731 else
5732 commit_one_edge_insertion (e);
5733 }
4e3825db
MM
5734 else
5735 ei_next (&ei);
5736 }
5737 }
5738
5739 /* We're done expanding trees to RTL. */
5740 currently_expanding_to_rtl = 0;
5741
5742 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
5743 {
5744 edge e;
5745 edge_iterator ei;
5746 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5747 {
5748 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5749 e->flags &= ~EDGE_EXECUTABLE;
5750
5751 /* At the moment not all abnormal edges match the RTL
5752 representation. It is safe to remove them here as
5753 find_many_sub_basic_blocks will rediscover them.
5754 In the future we should get this fixed properly. */
5755 if ((e->flags & EDGE_ABNORMAL)
5756 && !(e->flags & EDGE_SIBCALL))
5757 remove_edge (e);
5758 else
5759 ei_next (&ei);
5760 }
5761 }
5762
242229bb 5763 blocks = sbitmap_alloc (last_basic_block);
f61e445a 5764 bitmap_ones (blocks);
242229bb 5765 find_many_sub_basic_blocks (blocks);
242229bb 5766 sbitmap_free (blocks);
4e3825db 5767 purge_all_dead_edges ();
242229bb 5768
2e3f842f
L
5769 expand_stack_alignment ();
5770
be147e84
RG
5771 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5772 function. */
5773 if (crtl->tail_call_emit)
5774 fixup_tail_calls ();
5775
dac1fbf8
RG
5776 /* After initial rtl generation, call back to finish generating
5777 exception support code. We need to do this before cleaning up
5778 the CFG as the code does not expect dead landing pads. */
5779 if (cfun->eh->region_tree != NULL)
5780 finish_eh_generation ();
5781
5782 /* Remove unreachable blocks, otherwise we cannot compute dominators
5783 which are needed for loop state verification. As a side-effect
5784 this also compacts blocks.
5785 ??? We cannot remove trivially dead insns here as for example
5786 the DRAP reg on i?86 is not magically live at this point.
5787 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5788 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5789
242229bb 5790#ifdef ENABLE_CHECKING
62e5bf5d 5791 verify_flow_info ();
242229bb 5792#endif
9f8628ba 5793
be147e84
RG
5794 /* Initialize pseudos allocated for hard registers. */
5795 emit_initial_value_sets ();
5796
5797 /* And finally unshare all RTL. */
5798 unshare_all_rtl ();
5799
9f8628ba
PB
5800 /* There's no need to defer outputting this function any more; we
5801 know we want to output it. */
5802 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5803
5804 /* Now that we're done expanding trees to RTL, we shouldn't have any
5805 more CONCATs anywhere. */
5806 generating_concat_p = 0;
5807
b7211528
SB
5808 if (dump_file)
5809 {
5810 fprintf (dump_file,
5811 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5812 /* And the pass manager will dump RTL for us. */
5813 }
ef330312
PB
5814
5815 /* If we're emitting a nested function, make sure its parent gets
5816 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 5817 {
ef330312
PB
5818 tree parent;
5819 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
5820 parent != NULL_TREE;
5821 parent = get_containing_scope (parent))
ef330312 5822 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 5823 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 5824 }
c22cacf3 5825
ef330312
PB
5826 /* We are now committed to emitting code for this function. Do any
5827 preparation, such as emitting abstract debug info for the inline
5828 before it gets mangled by optimization. */
5829 if (cgraph_function_possibly_inlined_p (current_function_decl))
5830 (*debug_hooks->outlining_inline_function) (current_function_decl);
5831
5832 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
5833
5834 /* After expanding, the return labels are no longer needed. */
5835 return_label = NULL;
5836 naked_return_label = NULL;
0a35513e
AH
5837
5838 /* After expanding, the tm_restart map is no longer needed. */
5839 if (cfun->gimple_df->tm_restart)
5840 {
5841 htab_delete (cfun->gimple_df->tm_restart);
5842 cfun->gimple_df->tm_restart = NULL;
5843 }
5844
55e092c4
JH
5845 /* Tag the blocks with a depth number so that change_scope can find
5846 the common parent easily. */
5847 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 5848 default_rtl_profile ();
be147e84 5849
f029db69 5850 timevar_pop (TV_POST_EXPAND);
be147e84 5851
c2924966 5852 return 0;
242229bb
JH
5853}
5854
27a4cd48
DM
5855namespace {
5856
5857const pass_data pass_data_expand =
242229bb 5858{
27a4cd48
DM
5859 RTL_PASS, /* type */
5860 "expand", /* name */
5861 OPTGROUP_NONE, /* optinfo_flags */
5862 false, /* has_gate */
5863 true, /* has_execute */
5864 TV_EXPAND, /* tv_id */
5865 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6f37411d 5866 | PROP_gimple_lcx
27a4cd48
DM
5867 | PROP_gimple_lvec ), /* properties_required */
5868 PROP_rtl, /* properties_provided */
5869 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5870 ( TODO_verify_ssa | TODO_verify_flow
5871 | TODO_verify_stmts ), /* todo_flags_start */
5872 0, /* todo_flags_finish */
242229bb 5873};
27a4cd48
DM
5874
5875class pass_expand : public rtl_opt_pass
5876{
5877public:
c3284718
RS
5878 pass_expand (gcc::context *ctxt)
5879 : rtl_opt_pass (pass_data_expand, ctxt)
27a4cd48
DM
5880 {}
5881
5882 /* opt_pass methods: */
5883 unsigned int execute () { return gimple_expand_cfg (); }
5884
5885}; // class pass_expand
5886
5887} // anon namespace
5888
5889rtl_opt_pass *
5890make_pass_expand (gcc::context *ctxt)
5891{
5892 return new pass_expand (ctxt);
5893}