]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
6793ba68daf0e086843f79242caef89fd6661e75
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "except.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "toplev.h"
39 #include "debug.h"
40 #include "params.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
43 #include "target.h"
44 #include "ssaexpand.h"
45 #include "bitmap.h"
46 #include "sbitmap.h"
47 #include "cfgloop.h"
48 #include "regs.h" /* For reg_renumber. */
49 #include "insn-attr.h" /* For INSN_SCHEDULING. */
50
51 /* This variable holds information helping the rewriting of SSA trees
52 into RTL. */
53 struct ssaexpand SA;
54
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
58
59 static rtx expand_debug_expr (tree);
60
61 /* Return an expression tree corresponding to the RHS of GIMPLE
62 statement STMT. */
63
64 tree
65 gimple_assign_rhs_to_tree (gimple stmt)
66 {
67 tree t;
68 enum gimple_rhs_class grhs_class;
69
70 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71
72 if (grhs_class == GIMPLE_TERNARY_RHS)
73 t = build3 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt),
76 gimple_assign_rhs2 (stmt),
77 gimple_assign_rhs3 (stmt));
78 else if (grhs_class == GIMPLE_BINARY_RHS)
79 t = build2 (gimple_assign_rhs_code (stmt),
80 TREE_TYPE (gimple_assign_lhs (stmt)),
81 gimple_assign_rhs1 (stmt),
82 gimple_assign_rhs2 (stmt));
83 else if (grhs_class == GIMPLE_UNARY_RHS)
84 t = build1 (gimple_assign_rhs_code (stmt),
85 TREE_TYPE (gimple_assign_lhs (stmt)),
86 gimple_assign_rhs1 (stmt));
87 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 {
89 t = gimple_assign_rhs1 (stmt);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 && gimple_location (stmt) != EXPR_LOCATION (t))
93 || (gimple_block (stmt)
94 && currently_expanding_to_rtl
95 && EXPR_P (t)
96 && gimple_block (stmt) != TREE_BLOCK (t)))
97 t = copy_node (t);
98 }
99 else
100 gcc_unreachable ();
101
102 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
103 SET_EXPR_LOCATION (t, gimple_location (stmt));
104 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
105 TREE_BLOCK (t) = gimple_block (stmt);
106
107 return t;
108 }
109
110
111 #ifndef STACK_ALIGNMENT_NEEDED
112 #define STACK_ALIGNMENT_NEEDED 1
113 #endif
114
115 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116
117 /* Associate declaration T with storage space X. If T is no
118 SSA name this is exactly SET_DECL_RTL, otherwise make the
119 partition of T associated with X. */
120 static inline void
121 set_rtl (tree t, rtx x)
122 {
123 if (TREE_CODE (t) == SSA_NAME)
124 {
125 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 if (x && !MEM_P (x))
127 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
128 /* For the benefit of debug information at -O0 (where vartracking
129 doesn't run) record the place also in the base DECL if it's
130 a normal variable (not a parameter). */
131 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 {
133 tree var = SSA_NAME_VAR (t);
134 /* If we don't yet have something recorded, just record it now. */
135 if (!DECL_RTL_SET_P (var))
136 SET_DECL_RTL (var, x);
137 /* If we have it set already to "multiple places" don't
138 change this. */
139 else if (DECL_RTL (var) == pc_rtx)
140 ;
141 /* If we have something recorded and it's not the same place
142 as we want to record now, we have multiple partitions for the
143 same base variable, with different places. We can't just
144 randomly chose one, hence we have to say that we don't know.
145 This only happens with optimization, and there var-tracking
146 will figure out the right thing. */
147 else if (DECL_RTL (var) != x)
148 SET_DECL_RTL (var, pc_rtx);
149 }
150 }
151 else
152 SET_DECL_RTL (t, x);
153 }
154
155 /* This structure holds data relevant to one variable that will be
156 placed in a stack slot. */
157 struct stack_var
158 {
159 /* The Variable. */
160 tree decl;
161
162 /* Initially, the size of the variable. Later, the size of the partition,
163 if this variable becomes it's partition's representative. */
164 HOST_WIDE_INT size;
165
166 /* The *byte* alignment required for this variable. Or as, with the
167 size, the alignment for this partition. */
168 unsigned int alignb;
169
170 /* The partition representative. */
171 size_t representative;
172
173 /* The next stack variable in the partition, or EOC. */
174 size_t next;
175
176 /* The numbers of conflicting stack variables. */
177 bitmap conflicts;
178 };
179
180 #define EOC ((size_t)-1)
181
182 /* We have an array of such objects while deciding allocation. */
183 static struct stack_var *stack_vars;
184 static size_t stack_vars_alloc;
185 static size_t stack_vars_num;
186 static struct pointer_map_t *decl_to_stack_part;
187
188 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
189 is non-decreasing. */
190 static size_t *stack_vars_sorted;
191
192 /* The phase of the stack frame. This is the known misalignment of
193 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
194 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
195 static int frame_phase;
196
197 /* Used during expand_used_vars to remember if we saw any decls for
198 which we'd like to enable stack smashing protection. */
199 static bool has_protected_decls;
200
201 /* Used during expand_used_vars. Remember if we say a character buffer
202 smaller than our cutoff threshold. Used for -Wstack-protector. */
203 static bool has_short_buffer;
204
205 /* Compute the byte alignment to use for DECL. Ignore alignment
206 we can't do with expected alignment of the stack boundary. */
207
208 static unsigned int
209 align_local_variable (tree decl)
210 {
211 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
212 DECL_ALIGN (decl) = align;
213 return align / BITS_PER_UNIT;
214 }
215
216 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
217 Return the frame offset. */
218
219 static HOST_WIDE_INT
220 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
221 {
222 HOST_WIDE_INT offset, new_frame_offset;
223
224 new_frame_offset = frame_offset;
225 if (FRAME_GROWS_DOWNWARD)
226 {
227 new_frame_offset -= size + frame_phase;
228 new_frame_offset &= -align;
229 new_frame_offset += frame_phase;
230 offset = new_frame_offset;
231 }
232 else
233 {
234 new_frame_offset -= frame_phase;
235 new_frame_offset += align - 1;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 new_frame_offset += size;
240 }
241 frame_offset = new_frame_offset;
242
243 if (frame_offset_overflow (frame_offset, cfun->decl))
244 frame_offset = offset = 0;
245
246 return offset;
247 }
248
249 /* Accumulate DECL into STACK_VARS. */
250
251 static void
252 add_stack_var (tree decl)
253 {
254 struct stack_var *v;
255
256 if (stack_vars_num >= stack_vars_alloc)
257 {
258 if (stack_vars_alloc)
259 stack_vars_alloc = stack_vars_alloc * 3 / 2;
260 else
261 stack_vars_alloc = 32;
262 stack_vars
263 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
264 }
265 if (!decl_to_stack_part)
266 decl_to_stack_part = pointer_map_create ();
267
268 v = &stack_vars[stack_vars_num];
269 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
270
271 v->decl = decl;
272 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
273 /* Ensure that all variables have size, so that &a != &b for any two
274 variables that are simultaneously live. */
275 if (v->size == 0)
276 v->size = 1;
277 v->alignb = align_local_variable (SSAVAR (decl));
278 /* An alignment of zero can mightily confuse us later. */
279 gcc_assert (v->alignb != 0);
280
281 /* All variables are initially in their own partition. */
282 v->representative = stack_vars_num;
283 v->next = EOC;
284
285 /* All variables initially conflict with no other. */
286 v->conflicts = NULL;
287
288 /* Ensure that this decl doesn't get put onto the list twice. */
289 set_rtl (decl, pc_rtx);
290
291 stack_vars_num++;
292 }
293
294 /* Make the decls associated with luid's X and Y conflict. */
295
296 static void
297 add_stack_var_conflict (size_t x, size_t y)
298 {
299 struct stack_var *a = &stack_vars[x];
300 struct stack_var *b = &stack_vars[y];
301 if (!a->conflicts)
302 a->conflicts = BITMAP_ALLOC (NULL);
303 if (!b->conflicts)
304 b->conflicts = BITMAP_ALLOC (NULL);
305 bitmap_set_bit (a->conflicts, y);
306 bitmap_set_bit (b->conflicts, x);
307 }
308
309 /* Check whether the decls associated with luid's X and Y conflict. */
310
311 static bool
312 stack_var_conflict_p (size_t x, size_t y)
313 {
314 struct stack_var *a = &stack_vars[x];
315 struct stack_var *b = &stack_vars[y];
316 if (x == y)
317 return false;
318 /* Partitions containing an SSA name result from gimple registers
319 with things like unsupported modes. They are top-level and
320 hence conflict with everything else. */
321 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
322 return true;
323
324 if (!a->conflicts || !b->conflicts)
325 return false;
326 return bitmap_bit_p (a->conflicts, y);
327 }
328
329 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
330 enter its partition number into bitmap DATA. */
331
332 static bool
333 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
334 {
335 bitmap active = (bitmap)data;
336 op = get_base_address (op);
337 if (op
338 && DECL_P (op)
339 && DECL_RTL_IF_SET (op) == pc_rtx)
340 {
341 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
342 if (v)
343 bitmap_set_bit (active, *v);
344 }
345 return false;
346 }
347
348 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
349 record conflicts between it and all currently active other partitions
350 from bitmap DATA. */
351
352 static bool
353 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
354 {
355 bitmap active = (bitmap)data;
356 op = get_base_address (op);
357 if (op
358 && DECL_P (op)
359 && DECL_RTL_IF_SET (op) == pc_rtx)
360 {
361 size_t *v =
362 (size_t *) pointer_map_contains (decl_to_stack_part, op);
363 if (v && bitmap_set_bit (active, *v))
364 {
365 size_t num = *v;
366 bitmap_iterator bi;
367 unsigned i;
368 gcc_assert (num < stack_vars_num);
369 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
370 add_stack_var_conflict (num, i);
371 }
372 }
373 return false;
374 }
375
376 /* Helper routine for add_scope_conflicts, calculating the active partitions
377 at the end of BB, leaving the result in WORK. We're called to generate
378 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
379 liveness. */
380
381 static void
382 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
383 {
384 edge e;
385 edge_iterator ei;
386 gimple_stmt_iterator gsi;
387 bool (*visit)(gimple, tree, void *);
388
389 bitmap_clear (work);
390 FOR_EACH_EDGE (e, ei, bb->preds)
391 bitmap_ior_into (work, (bitmap)e->src->aux);
392
393 visit = visit_op;
394
395 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
396 {
397 gimple stmt = gsi_stmt (gsi);
398 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
399 }
400 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
401 {
402 gimple stmt = gsi_stmt (gsi);
403
404 if (gimple_clobber_p (stmt))
405 {
406 tree lhs = gimple_assign_lhs (stmt);
407 size_t *v;
408 /* Nested function lowering might introduce LHSs
409 that are COMPONENT_REFs. */
410 if (TREE_CODE (lhs) != VAR_DECL)
411 continue;
412 if (DECL_RTL_IF_SET (lhs) == pc_rtx
413 && (v = (size_t *)
414 pointer_map_contains (decl_to_stack_part, lhs)))
415 bitmap_clear_bit (work, *v);
416 }
417 else if (!is_gimple_debug (stmt))
418 {
419 if (for_conflict
420 && visit == visit_op)
421 {
422 /* If this is the first real instruction in this BB we need
423 to add conflicts for everything live at this point now.
424 Unlike classical liveness for named objects we can't
425 rely on seeing a def/use of the names we're interested in.
426 There might merely be indirect loads/stores. We'd not add any
427 conflicts for such partitions. */
428 bitmap_iterator bi;
429 unsigned i;
430 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
431 {
432 struct stack_var *a = &stack_vars[i];
433 if (!a->conflicts)
434 a->conflicts = BITMAP_ALLOC (NULL);
435 bitmap_ior_into (a->conflicts, work);
436 }
437 visit = visit_conflict;
438 }
439 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
440 }
441 }
442 }
443
444 /* Generate stack partition conflicts between all partitions that are
445 simultaneously live. */
446
447 static void
448 add_scope_conflicts (void)
449 {
450 basic_block bb;
451 bool changed;
452 bitmap work = BITMAP_ALLOC (NULL);
453 int *rpo;
454 int n_bbs;
455
456 /* We approximate the live range of a stack variable by taking the first
457 mention of its name as starting point(s), and by the end-of-scope
458 death clobber added by gimplify as ending point(s) of the range.
459 This overapproximates in the case we for instance moved an address-taken
460 operation upward, without also moving a dereference to it upwards.
461 But it's conservatively correct as a variable never can hold values
462 before its name is mentioned at least once.
463
464 We then do a mostly classical bitmap liveness algorithm. */
465
466 FOR_ALL_BB (bb)
467 bb->aux = BITMAP_ALLOC (NULL);
468
469 rpo = XNEWVEC (int, last_basic_block);
470 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
471
472 changed = true;
473 while (changed)
474 {
475 int i;
476 changed = false;
477 for (i = 0; i < n_bbs; i++)
478 {
479 bitmap active;
480 bb = BASIC_BLOCK (rpo[i]);
481 active = (bitmap)bb->aux;
482 add_scope_conflicts_1 (bb, work, false);
483 if (bitmap_ior_into (active, work))
484 changed = true;
485 }
486 }
487
488 FOR_EACH_BB (bb)
489 add_scope_conflicts_1 (bb, work, true);
490
491 free (rpo);
492 BITMAP_FREE (work);
493 FOR_ALL_BB (bb)
494 BITMAP_FREE (bb->aux);
495 }
496
497 /* A subroutine of partition_stack_vars. A comparison function for qsort,
498 sorting an array of indices by the properties of the object. */
499
500 static int
501 stack_var_cmp (const void *a, const void *b)
502 {
503 size_t ia = *(const size_t *)a;
504 size_t ib = *(const size_t *)b;
505 unsigned int aligna = stack_vars[ia].alignb;
506 unsigned int alignb = stack_vars[ib].alignb;
507 HOST_WIDE_INT sizea = stack_vars[ia].size;
508 HOST_WIDE_INT sizeb = stack_vars[ib].size;
509 tree decla = stack_vars[ia].decl;
510 tree declb = stack_vars[ib].decl;
511 bool largea, largeb;
512 unsigned int uida, uidb;
513
514 /* Primary compare on "large" alignment. Large comes first. */
515 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
516 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
517 if (largea != largeb)
518 return (int)largeb - (int)largea;
519
520 /* Secondary compare on size, decreasing */
521 if (sizea > sizeb)
522 return -1;
523 if (sizea < sizeb)
524 return 1;
525
526 /* Tertiary compare on true alignment, decreasing. */
527 if (aligna < alignb)
528 return -1;
529 if (aligna > alignb)
530 return 1;
531
532 /* Final compare on ID for sort stability, increasing.
533 Two SSA names are compared by their version, SSA names come before
534 non-SSA names, and two normal decls are compared by their DECL_UID. */
535 if (TREE_CODE (decla) == SSA_NAME)
536 {
537 if (TREE_CODE (declb) == SSA_NAME)
538 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
539 else
540 return -1;
541 }
542 else if (TREE_CODE (declb) == SSA_NAME)
543 return 1;
544 else
545 uida = DECL_UID (decla), uidb = DECL_UID (declb);
546 if (uida < uidb)
547 return 1;
548 if (uida > uidb)
549 return -1;
550 return 0;
551 }
552
553
554 /* If the points-to solution *PI points to variables that are in a partition
555 together with other variables add all partition members to the pointed-to
556 variables bitmap. */
557
558 static void
559 add_partitioned_vars_to_ptset (struct pt_solution *pt,
560 struct pointer_map_t *decls_to_partitions,
561 struct pointer_set_t *visited, bitmap temp)
562 {
563 bitmap_iterator bi;
564 unsigned i;
565 bitmap *part;
566
567 if (pt->anything
568 || pt->vars == NULL
569 /* The pointed-to vars bitmap is shared, it is enough to
570 visit it once. */
571 || pointer_set_insert(visited, pt->vars))
572 return;
573
574 bitmap_clear (temp);
575
576 /* By using a temporary bitmap to store all members of the partitions
577 we have to add we make sure to visit each of the partitions only
578 once. */
579 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
580 if ((!temp
581 || !bitmap_bit_p (temp, i))
582 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
583 (void *)(size_t) i)))
584 bitmap_ior_into (temp, *part);
585 if (!bitmap_empty_p (temp))
586 bitmap_ior_into (pt->vars, temp);
587 }
588
589 /* Update points-to sets based on partition info, so we can use them on RTL.
590 The bitmaps representing stack partitions will be saved until expand,
591 where partitioned decls used as bases in memory expressions will be
592 rewritten. */
593
594 static void
595 update_alias_info_with_stack_vars (void)
596 {
597 struct pointer_map_t *decls_to_partitions = NULL;
598 size_t i, j;
599 tree var = NULL_TREE;
600
601 for (i = 0; i < stack_vars_num; i++)
602 {
603 bitmap part = NULL;
604 tree name;
605 struct ptr_info_def *pi;
606
607 /* Not interested in partitions with single variable. */
608 if (stack_vars[i].representative != i
609 || stack_vars[i].next == EOC)
610 continue;
611
612 if (!decls_to_partitions)
613 {
614 decls_to_partitions = pointer_map_create ();
615 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
616 }
617
618 /* Create an SSA_NAME that points to the partition for use
619 as base during alias-oracle queries on RTL for bases that
620 have been partitioned. */
621 if (var == NULL_TREE)
622 var = create_tmp_var (ptr_type_node, NULL);
623 name = make_ssa_name (var, NULL);
624
625 /* Create bitmaps representing partitions. They will be used for
626 points-to sets later, so use GGC alloc. */
627 part = BITMAP_GGC_ALLOC ();
628 for (j = i; j != EOC; j = stack_vars[j].next)
629 {
630 tree decl = stack_vars[j].decl;
631 unsigned int uid = DECL_PT_UID (decl);
632 bitmap_set_bit (part, uid);
633 *((bitmap *) pointer_map_insert (decls_to_partitions,
634 (void *)(size_t) uid)) = part;
635 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
636 decl)) = name;
637 }
638
639 /* Make the SSA name point to all partition members. */
640 pi = get_ptr_info (name);
641 pt_solution_set (&pi->pt, part, false);
642 }
643
644 /* Make all points-to sets that contain one member of a partition
645 contain all members of the partition. */
646 if (decls_to_partitions)
647 {
648 unsigned i;
649 struct pointer_set_t *visited = pointer_set_create ();
650 bitmap temp = BITMAP_ALLOC (NULL);
651
652 for (i = 1; i < num_ssa_names; i++)
653 {
654 tree name = ssa_name (i);
655 struct ptr_info_def *pi;
656
657 if (name
658 && POINTER_TYPE_P (TREE_TYPE (name))
659 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
660 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
661 visited, temp);
662 }
663
664 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
665 decls_to_partitions, visited, temp);
666
667 pointer_set_destroy (visited);
668 pointer_map_destroy (decls_to_partitions);
669 BITMAP_FREE (temp);
670 }
671 }
672
673 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
674 partitioning algorithm. Partitions A and B are known to be non-conflicting.
675 Merge them into a single partition A. */
676
677 static void
678 union_stack_vars (size_t a, size_t b)
679 {
680 struct stack_var *vb = &stack_vars[b];
681 bitmap_iterator bi;
682 unsigned u;
683
684 gcc_assert (stack_vars[b].next == EOC);
685 /* Add B to A's partition. */
686 stack_vars[b].next = stack_vars[a].next;
687 stack_vars[b].representative = a;
688 stack_vars[a].next = b;
689
690 /* Update the required alignment of partition A to account for B. */
691 if (stack_vars[a].alignb < stack_vars[b].alignb)
692 stack_vars[a].alignb = stack_vars[b].alignb;
693
694 /* Update the interference graph and merge the conflicts. */
695 if (vb->conflicts)
696 {
697 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
698 add_stack_var_conflict (a, stack_vars[u].representative);
699 BITMAP_FREE (vb->conflicts);
700 }
701 }
702
703 /* A subroutine of expand_used_vars. Binpack the variables into
704 partitions constrained by the interference graph. The overall
705 algorithm used is as follows:
706
707 Sort the objects by size in descending order.
708 For each object A {
709 S = size(A)
710 O = 0
711 loop {
712 Look for the largest non-conflicting object B with size <= S.
713 UNION (A, B)
714 }
715 }
716 */
717
718 static void
719 partition_stack_vars (void)
720 {
721 size_t si, sj, n = stack_vars_num;
722
723 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
724 for (si = 0; si < n; ++si)
725 stack_vars_sorted[si] = si;
726
727 if (n == 1)
728 return;
729
730 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
731
732 for (si = 0; si < n; ++si)
733 {
734 size_t i = stack_vars_sorted[si];
735 unsigned int ialign = stack_vars[i].alignb;
736
737 /* Ignore objects that aren't partition representatives. If we
738 see a var that is not a partition representative, it must
739 have been merged earlier. */
740 if (stack_vars[i].representative != i)
741 continue;
742
743 for (sj = si + 1; sj < n; ++sj)
744 {
745 size_t j = stack_vars_sorted[sj];
746 unsigned int jalign = stack_vars[j].alignb;
747
748 /* Ignore objects that aren't partition representatives. */
749 if (stack_vars[j].representative != j)
750 continue;
751
752 /* Ignore conflicting objects. */
753 if (stack_var_conflict_p (i, j))
754 continue;
755
756 /* Do not mix objects of "small" (supported) alignment
757 and "large" (unsupported) alignment. */
758 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
759 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
760 continue;
761
762 /* UNION the objects, placing J at OFFSET. */
763 union_stack_vars (i, j);
764 }
765 }
766
767 update_alias_info_with_stack_vars ();
768 }
769
770 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
771
772 static void
773 dump_stack_var_partition (void)
774 {
775 size_t si, i, j, n = stack_vars_num;
776
777 for (si = 0; si < n; ++si)
778 {
779 i = stack_vars_sorted[si];
780
781 /* Skip variables that aren't partition representatives, for now. */
782 if (stack_vars[i].representative != i)
783 continue;
784
785 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
786 " align %u\n", (unsigned long) i, stack_vars[i].size,
787 stack_vars[i].alignb);
788
789 for (j = i; j != EOC; j = stack_vars[j].next)
790 {
791 fputc ('\t', dump_file);
792 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
793 }
794 fputc ('\n', dump_file);
795 }
796 }
797
798 /* Assign rtl to DECL at BASE + OFFSET. */
799
800 static void
801 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
802 HOST_WIDE_INT offset)
803 {
804 unsigned align;
805 rtx x;
806
807 /* If this fails, we've overflowed the stack frame. Error nicely? */
808 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
809
810 x = plus_constant (Pmode, base, offset);
811 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
812
813 if (TREE_CODE (decl) != SSA_NAME)
814 {
815 /* Set alignment we actually gave this decl if it isn't an SSA name.
816 If it is we generate stack slots only accidentally so it isn't as
817 important, we'll simply use the alignment that is already set. */
818 if (base == virtual_stack_vars_rtx)
819 offset -= frame_phase;
820 align = offset & -offset;
821 align *= BITS_PER_UNIT;
822 if (align == 0 || align > base_align)
823 align = base_align;
824
825 /* One would think that we could assert that we're not decreasing
826 alignment here, but (at least) the i386 port does exactly this
827 via the MINIMUM_ALIGNMENT hook. */
828
829 DECL_ALIGN (decl) = align;
830 DECL_USER_ALIGN (decl) = 0;
831 }
832
833 set_mem_attributes (x, SSAVAR (decl), true);
834 set_rtl (decl, x);
835 }
836
837 /* A subroutine of expand_used_vars. Give each partition representative
838 a unique location within the stack frame. Update each partition member
839 with that location. */
840
841 static void
842 expand_stack_vars (bool (*pred) (tree))
843 {
844 size_t si, i, j, n = stack_vars_num;
845 HOST_WIDE_INT large_size = 0, large_alloc = 0;
846 rtx large_base = NULL;
847 unsigned large_align = 0;
848 tree decl;
849
850 /* Determine if there are any variables requiring "large" alignment.
851 Since these are dynamically allocated, we only process these if
852 no predicate involved. */
853 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
854 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
855 {
856 /* Find the total size of these variables. */
857 for (si = 0; si < n; ++si)
858 {
859 unsigned alignb;
860
861 i = stack_vars_sorted[si];
862 alignb = stack_vars[i].alignb;
863
864 /* Stop when we get to the first decl with "small" alignment. */
865 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
866 break;
867
868 /* Skip variables that aren't partition representatives. */
869 if (stack_vars[i].representative != i)
870 continue;
871
872 /* Skip variables that have already had rtl assigned. See also
873 add_stack_var where we perpetrate this pc_rtx hack. */
874 decl = stack_vars[i].decl;
875 if ((TREE_CODE (decl) == SSA_NAME
876 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
877 : DECL_RTL (decl)) != pc_rtx)
878 continue;
879
880 large_size += alignb - 1;
881 large_size &= -(HOST_WIDE_INT)alignb;
882 large_size += stack_vars[i].size;
883 }
884
885 /* If there were any, allocate space. */
886 if (large_size > 0)
887 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
888 large_align, true);
889 }
890
891 for (si = 0; si < n; ++si)
892 {
893 rtx base;
894 unsigned base_align, alignb;
895 HOST_WIDE_INT offset;
896
897 i = stack_vars_sorted[si];
898
899 /* Skip variables that aren't partition representatives, for now. */
900 if (stack_vars[i].representative != i)
901 continue;
902
903 /* Skip variables that have already had rtl assigned. See also
904 add_stack_var where we perpetrate this pc_rtx hack. */
905 decl = stack_vars[i].decl;
906 if ((TREE_CODE (decl) == SSA_NAME
907 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
908 : DECL_RTL (decl)) != pc_rtx)
909 continue;
910
911 /* Check the predicate to see whether this variable should be
912 allocated in this pass. */
913 if (pred && !pred (decl))
914 continue;
915
916 alignb = stack_vars[i].alignb;
917 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
918 {
919 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
920 base = virtual_stack_vars_rtx;
921 base_align = crtl->max_used_stack_slot_alignment;
922 }
923 else
924 {
925 /* Large alignment is only processed in the last pass. */
926 if (pred)
927 continue;
928 gcc_assert (large_base != NULL);
929
930 large_alloc += alignb - 1;
931 large_alloc &= -(HOST_WIDE_INT)alignb;
932 offset = large_alloc;
933 large_alloc += stack_vars[i].size;
934
935 base = large_base;
936 base_align = large_align;
937 }
938
939 /* Create rtl for each variable based on their location within the
940 partition. */
941 for (j = i; j != EOC; j = stack_vars[j].next)
942 {
943 expand_one_stack_var_at (stack_vars[j].decl,
944 base, base_align,
945 offset);
946 }
947 }
948
949 gcc_assert (large_alloc == large_size);
950 }
951
952 /* Take into account all sizes of partitions and reset DECL_RTLs. */
953 static HOST_WIDE_INT
954 account_stack_vars (void)
955 {
956 size_t si, j, i, n = stack_vars_num;
957 HOST_WIDE_INT size = 0;
958
959 for (si = 0; si < n; ++si)
960 {
961 i = stack_vars_sorted[si];
962
963 /* Skip variables that aren't partition representatives, for now. */
964 if (stack_vars[i].representative != i)
965 continue;
966
967 size += stack_vars[i].size;
968 for (j = i; j != EOC; j = stack_vars[j].next)
969 set_rtl (stack_vars[j].decl, NULL);
970 }
971 return size;
972 }
973
974 /* A subroutine of expand_one_var. Called to immediately assign rtl
975 to a variable to be allocated in the stack frame. */
976
977 static void
978 expand_one_stack_var (tree var)
979 {
980 HOST_WIDE_INT size, offset;
981 unsigned byte_align;
982
983 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
984 byte_align = align_local_variable (SSAVAR (var));
985
986 /* We handle highly aligned variables in expand_stack_vars. */
987 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
988
989 offset = alloc_stack_frame_space (size, byte_align);
990
991 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
992 crtl->max_used_stack_slot_alignment, offset);
993 }
994
995 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
996 that will reside in a hard register. */
997
998 static void
999 expand_one_hard_reg_var (tree var)
1000 {
1001 rest_of_decl_compilation (var, 0, 0);
1002 }
1003
1004 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1005 that will reside in a pseudo register. */
1006
1007 static void
1008 expand_one_register_var (tree var)
1009 {
1010 tree decl = SSAVAR (var);
1011 tree type = TREE_TYPE (decl);
1012 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1013 rtx x = gen_reg_rtx (reg_mode);
1014
1015 set_rtl (var, x);
1016
1017 /* Note if the object is a user variable. */
1018 if (!DECL_ARTIFICIAL (decl))
1019 mark_user_reg (x);
1020
1021 if (POINTER_TYPE_P (type))
1022 mark_reg_pointer (x, get_pointer_alignment (var));
1023 }
1024
1025 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1026 has some associated error, e.g. its type is error-mark. We just need
1027 to pick something that won't crash the rest of the compiler. */
1028
1029 static void
1030 expand_one_error_var (tree var)
1031 {
1032 enum machine_mode mode = DECL_MODE (var);
1033 rtx x;
1034
1035 if (mode == BLKmode)
1036 x = gen_rtx_MEM (BLKmode, const0_rtx);
1037 else if (mode == VOIDmode)
1038 x = const0_rtx;
1039 else
1040 x = gen_reg_rtx (mode);
1041
1042 SET_DECL_RTL (var, x);
1043 }
1044
1045 /* A subroutine of expand_one_var. VAR is a variable that will be
1046 allocated to the local stack frame. Return true if we wish to
1047 add VAR to STACK_VARS so that it will be coalesced with other
1048 variables. Return false to allocate VAR immediately.
1049
1050 This function is used to reduce the number of variables considered
1051 for coalescing, which reduces the size of the quadratic problem. */
1052
1053 static bool
1054 defer_stack_allocation (tree var, bool toplevel)
1055 {
1056 /* If stack protection is enabled, *all* stack variables must be deferred,
1057 so that we can re-order the strings to the top of the frame. */
1058 if (flag_stack_protect)
1059 return true;
1060
1061 /* We handle "large" alignment via dynamic allocation. We want to handle
1062 this extra complication in only one place, so defer them. */
1063 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1064 return true;
1065
1066 /* Variables in the outermost scope automatically conflict with
1067 every other variable. The only reason to want to defer them
1068 at all is that, after sorting, we can more efficiently pack
1069 small variables in the stack frame. Continue to defer at -O2. */
1070 if (toplevel && optimize < 2)
1071 return false;
1072
1073 /* Without optimization, *most* variables are allocated from the
1074 stack, which makes the quadratic problem large exactly when we
1075 want compilation to proceed as quickly as possible. On the
1076 other hand, we don't want the function's stack frame size to
1077 get completely out of hand. So we avoid adding scalars and
1078 "small" aggregates to the list at all. */
1079 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1080 return false;
1081
1082 return true;
1083 }
1084
1085 /* A subroutine of expand_used_vars. Expand one variable according to
1086 its flavor. Variables to be placed on the stack are not actually
1087 expanded yet, merely recorded.
1088 When REALLY_EXPAND is false, only add stack values to be allocated.
1089 Return stack usage this variable is supposed to take.
1090 */
1091
1092 static HOST_WIDE_INT
1093 expand_one_var (tree var, bool toplevel, bool really_expand)
1094 {
1095 unsigned int align = BITS_PER_UNIT;
1096 tree origvar = var;
1097
1098 var = SSAVAR (var);
1099
1100 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1101 {
1102 /* Because we don't know if VAR will be in register or on stack,
1103 we conservatively assume it will be on stack even if VAR is
1104 eventually put into register after RA pass. For non-automatic
1105 variables, which won't be on stack, we collect alignment of
1106 type and ignore user specified alignment. */
1107 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1108 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1109 TYPE_MODE (TREE_TYPE (var)),
1110 TYPE_ALIGN (TREE_TYPE (var)));
1111 else if (DECL_HAS_VALUE_EXPR_P (var)
1112 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1113 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1114 or variables which were assigned a stack slot already by
1115 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1116 changed from the offset chosen to it. */
1117 align = crtl->stack_alignment_estimated;
1118 else
1119 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1120
1121 /* If the variable alignment is very large we'll dynamicaly allocate
1122 it, which means that in-frame portion is just a pointer. */
1123 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1124 align = POINTER_SIZE;
1125 }
1126
1127 if (SUPPORTS_STACK_ALIGNMENT
1128 && crtl->stack_alignment_estimated < align)
1129 {
1130 /* stack_alignment_estimated shouldn't change after stack
1131 realign decision made */
1132 gcc_assert(!crtl->stack_realign_processed);
1133 crtl->stack_alignment_estimated = align;
1134 }
1135
1136 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1137 So here we only make sure stack_alignment_needed >= align. */
1138 if (crtl->stack_alignment_needed < align)
1139 crtl->stack_alignment_needed = align;
1140 if (crtl->max_used_stack_slot_alignment < align)
1141 crtl->max_used_stack_slot_alignment = align;
1142
1143 if (TREE_CODE (origvar) == SSA_NAME)
1144 {
1145 gcc_assert (TREE_CODE (var) != VAR_DECL
1146 || (!DECL_EXTERNAL (var)
1147 && !DECL_HAS_VALUE_EXPR_P (var)
1148 && !TREE_STATIC (var)
1149 && TREE_TYPE (var) != error_mark_node
1150 && !DECL_HARD_REGISTER (var)
1151 && really_expand));
1152 }
1153 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1154 ;
1155 else if (DECL_EXTERNAL (var))
1156 ;
1157 else if (DECL_HAS_VALUE_EXPR_P (var))
1158 ;
1159 else if (TREE_STATIC (var))
1160 ;
1161 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1162 ;
1163 else if (TREE_TYPE (var) == error_mark_node)
1164 {
1165 if (really_expand)
1166 expand_one_error_var (var);
1167 }
1168 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1169 {
1170 if (really_expand)
1171 expand_one_hard_reg_var (var);
1172 }
1173 else if (use_register_for_decl (var))
1174 {
1175 if (really_expand)
1176 expand_one_register_var (origvar);
1177 }
1178 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1179 {
1180 /* Reject variables which cover more than half of the address-space. */
1181 if (really_expand)
1182 {
1183 error ("size of variable %q+D is too large", var);
1184 expand_one_error_var (var);
1185 }
1186 }
1187 else if (defer_stack_allocation (var, toplevel))
1188 add_stack_var (origvar);
1189 else
1190 {
1191 if (really_expand)
1192 expand_one_stack_var (origvar);
1193 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1194 }
1195 return 0;
1196 }
1197
1198 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1199 expanding variables. Those variables that can be put into registers
1200 are allocated pseudos; those that can't are put on the stack.
1201
1202 TOPLEVEL is true if this is the outermost BLOCK. */
1203
1204 static void
1205 expand_used_vars_for_block (tree block, bool toplevel)
1206 {
1207 tree t;
1208
1209 /* Expand all variables at this level. */
1210 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1211 if (TREE_USED (t)
1212 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1213 || !DECL_NONSHAREABLE (t)))
1214 expand_one_var (t, toplevel, true);
1215
1216 /* Expand all variables at containing levels. */
1217 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1218 expand_used_vars_for_block (t, false);
1219 }
1220
1221 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1222 and clear TREE_USED on all local variables. */
1223
1224 static void
1225 clear_tree_used (tree block)
1226 {
1227 tree t;
1228
1229 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1230 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1231 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1232 || !DECL_NONSHAREABLE (t))
1233 TREE_USED (t) = 0;
1234
1235 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1236 clear_tree_used (t);
1237 }
1238
1239 /* Examine TYPE and determine a bit mask of the following features. */
1240
1241 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1242 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1243 #define SPCT_HAS_ARRAY 4
1244 #define SPCT_HAS_AGGREGATE 8
1245
1246 static unsigned int
1247 stack_protect_classify_type (tree type)
1248 {
1249 unsigned int ret = 0;
1250 tree t;
1251
1252 switch (TREE_CODE (type))
1253 {
1254 case ARRAY_TYPE:
1255 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1256 if (t == char_type_node
1257 || t == signed_char_type_node
1258 || t == unsigned_char_type_node)
1259 {
1260 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1261 unsigned HOST_WIDE_INT len;
1262
1263 if (!TYPE_SIZE_UNIT (type)
1264 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1265 len = max;
1266 else
1267 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1268
1269 if (len < max)
1270 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1271 else
1272 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1273 }
1274 else
1275 ret = SPCT_HAS_ARRAY;
1276 break;
1277
1278 case UNION_TYPE:
1279 case QUAL_UNION_TYPE:
1280 case RECORD_TYPE:
1281 ret = SPCT_HAS_AGGREGATE;
1282 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1283 if (TREE_CODE (t) == FIELD_DECL)
1284 ret |= stack_protect_classify_type (TREE_TYPE (t));
1285 break;
1286
1287 default:
1288 break;
1289 }
1290
1291 return ret;
1292 }
1293
1294 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1295 part of the local stack frame. Remember if we ever return nonzero for
1296 any variable in this function. The return value is the phase number in
1297 which the variable should be allocated. */
1298
1299 static int
1300 stack_protect_decl_phase (tree decl)
1301 {
1302 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1303 int ret = 0;
1304
1305 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1306 has_short_buffer = true;
1307
1308 if (flag_stack_protect == 2)
1309 {
1310 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1311 && !(bits & SPCT_HAS_AGGREGATE))
1312 ret = 1;
1313 else if (bits & SPCT_HAS_ARRAY)
1314 ret = 2;
1315 }
1316 else
1317 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1318
1319 if (ret)
1320 has_protected_decls = true;
1321
1322 return ret;
1323 }
1324
1325 /* Two helper routines that check for phase 1 and phase 2. These are used
1326 as callbacks for expand_stack_vars. */
1327
1328 static bool
1329 stack_protect_decl_phase_1 (tree decl)
1330 {
1331 return stack_protect_decl_phase (decl) == 1;
1332 }
1333
1334 static bool
1335 stack_protect_decl_phase_2 (tree decl)
1336 {
1337 return stack_protect_decl_phase (decl) == 2;
1338 }
1339
1340 /* Ensure that variables in different stack protection phases conflict
1341 so that they are not merged and share the same stack slot. */
1342
1343 static void
1344 add_stack_protection_conflicts (void)
1345 {
1346 size_t i, j, n = stack_vars_num;
1347 unsigned char *phase;
1348
1349 phase = XNEWVEC (unsigned char, n);
1350 for (i = 0; i < n; ++i)
1351 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1352
1353 for (i = 0; i < n; ++i)
1354 {
1355 unsigned char ph_i = phase[i];
1356 for (j = i + 1; j < n; ++j)
1357 if (ph_i != phase[j])
1358 add_stack_var_conflict (i, j);
1359 }
1360
1361 XDELETEVEC (phase);
1362 }
1363
1364 /* Create a decl for the guard at the top of the stack frame. */
1365
1366 static void
1367 create_stack_guard (void)
1368 {
1369 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1370 VAR_DECL, NULL, ptr_type_node);
1371 TREE_THIS_VOLATILE (guard) = 1;
1372 TREE_USED (guard) = 1;
1373 expand_one_stack_var (guard);
1374 crtl->stack_protect_guard = guard;
1375 }
1376
1377 /* Prepare for expanding variables. */
1378 static void
1379 init_vars_expansion (void)
1380 {
1381 tree t;
1382 unsigned ix;
1383 /* Set TREE_USED on all variables in the local_decls. */
1384 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1385 TREE_USED (t) = 1;
1386
1387 /* Clear TREE_USED on all variables associated with a block scope. */
1388 clear_tree_used (DECL_INITIAL (current_function_decl));
1389
1390 /* Initialize local stack smashing state. */
1391 has_protected_decls = false;
1392 has_short_buffer = false;
1393 }
1394
1395 /* Free up stack variable graph data. */
1396 static void
1397 fini_vars_expansion (void)
1398 {
1399 size_t i, n = stack_vars_num;
1400 for (i = 0; i < n; i++)
1401 BITMAP_FREE (stack_vars[i].conflicts);
1402 XDELETEVEC (stack_vars);
1403 XDELETEVEC (stack_vars_sorted);
1404 stack_vars = NULL;
1405 stack_vars_sorted = NULL;
1406 stack_vars_alloc = stack_vars_num = 0;
1407 pointer_map_destroy (decl_to_stack_part);
1408 decl_to_stack_part = NULL;
1409 }
1410
1411 /* Make a fair guess for the size of the stack frame of the function
1412 in NODE. This doesn't have to be exact, the result is only used in
1413 the inline heuristics. So we don't want to run the full stack var
1414 packing algorithm (which is quadratic in the number of stack vars).
1415 Instead, we calculate the total size of all stack vars. This turns
1416 out to be a pretty fair estimate -- packing of stack vars doesn't
1417 happen very often. */
1418
1419 HOST_WIDE_INT
1420 estimated_stack_frame_size (struct cgraph_node *node)
1421 {
1422 HOST_WIDE_INT size = 0;
1423 size_t i;
1424 tree var;
1425 tree old_cur_fun_decl = current_function_decl;
1426 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1427
1428 current_function_decl = node->symbol.decl;
1429 push_cfun (fn);
1430
1431 FOR_EACH_LOCAL_DECL (fn, i, var)
1432 if (auto_var_in_fn_p (var, fn->decl))
1433 size += expand_one_var (var, true, false);
1434
1435 if (stack_vars_num > 0)
1436 {
1437 /* Fake sorting the stack vars for account_stack_vars (). */
1438 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1439 for (i = 0; i < stack_vars_num; ++i)
1440 stack_vars_sorted[i] = i;
1441 size += account_stack_vars ();
1442 fini_vars_expansion ();
1443 }
1444 pop_cfun ();
1445 current_function_decl = old_cur_fun_decl;
1446 return size;
1447 }
1448
1449 /* Expand all variables used in the function. */
1450
1451 static void
1452 expand_used_vars (void)
1453 {
1454 tree var, outer_block = DECL_INITIAL (current_function_decl);
1455 VEC(tree,heap) *maybe_local_decls = NULL;
1456 struct pointer_map_t *ssa_name_decls;
1457 unsigned i;
1458 unsigned len;
1459
1460 /* Compute the phase of the stack frame for this function. */
1461 {
1462 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1463 int off = STARTING_FRAME_OFFSET % align;
1464 frame_phase = off ? align - off : 0;
1465 }
1466
1467 init_vars_expansion ();
1468
1469 ssa_name_decls = pointer_map_create ();
1470 for (i = 0; i < SA.map->num_partitions; i++)
1471 {
1472 tree var = partition_to_var (SA.map, i);
1473
1474 gcc_assert (is_gimple_reg (var));
1475
1476 /* Assign decls to each SSA name partition, share decls for partitions
1477 we could have coalesced (those with the same type). */
1478 if (SSA_NAME_VAR (var) == NULL_TREE)
1479 {
1480 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1481 if (!*slot)
1482 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1483 replace_ssa_name_symbol (var, (tree) *slot);
1484 }
1485
1486 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1487 expand_one_var (var, true, true);
1488 else
1489 {
1490 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1491 contain the default def (representing the parm or result itself)
1492 we don't do anything here. But those which don't contain the
1493 default def (representing a temporary based on the parm/result)
1494 we need to allocate space just like for normal VAR_DECLs. */
1495 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1496 {
1497 expand_one_var (var, true, true);
1498 gcc_assert (SA.partition_to_pseudo[i]);
1499 }
1500 }
1501 }
1502 pointer_map_destroy (ssa_name_decls);
1503
1504 /* At this point all variables on the local_decls with TREE_USED
1505 set are not associated with any block scope. Lay them out. */
1506
1507 len = VEC_length (tree, cfun->local_decls);
1508 FOR_EACH_LOCAL_DECL (cfun, i, var)
1509 {
1510 bool expand_now = false;
1511
1512 /* Expanded above already. */
1513 if (is_gimple_reg (var))
1514 {
1515 TREE_USED (var) = 0;
1516 goto next;
1517 }
1518 /* We didn't set a block for static or extern because it's hard
1519 to tell the difference between a global variable (re)declared
1520 in a local scope, and one that's really declared there to
1521 begin with. And it doesn't really matter much, since we're
1522 not giving them stack space. Expand them now. */
1523 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1524 expand_now = true;
1525
1526 /* If the variable is not associated with any block, then it
1527 was created by the optimizers, and could be live anywhere
1528 in the function. */
1529 else if (TREE_USED (var))
1530 expand_now = true;
1531
1532 /* Finally, mark all variables on the list as used. We'll use
1533 this in a moment when we expand those associated with scopes. */
1534 TREE_USED (var) = 1;
1535
1536 if (expand_now)
1537 expand_one_var (var, true, true);
1538
1539 next:
1540 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1541 {
1542 rtx rtl = DECL_RTL_IF_SET (var);
1543
1544 /* Keep artificial non-ignored vars in cfun->local_decls
1545 chain until instantiate_decls. */
1546 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1547 add_local_decl (cfun, var);
1548 else if (rtl == NULL_RTX)
1549 /* If rtl isn't set yet, which can happen e.g. with
1550 -fstack-protector, retry before returning from this
1551 function. */
1552 VEC_safe_push (tree, heap, maybe_local_decls, var);
1553 }
1554 }
1555
1556 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1557
1558 +-----------------+-----------------+
1559 | ...processed... | ...duplicates...|
1560 +-----------------+-----------------+
1561 ^
1562 +-- LEN points here.
1563
1564 We just want the duplicates, as those are the artificial
1565 non-ignored vars that we want to keep until instantiate_decls.
1566 Move them down and truncate the array. */
1567 if (!VEC_empty (tree, cfun->local_decls))
1568 VEC_block_remove (tree, cfun->local_decls, 0, len);
1569
1570 /* At this point, all variables within the block tree with TREE_USED
1571 set are actually used by the optimized function. Lay them out. */
1572 expand_used_vars_for_block (outer_block, true);
1573
1574 if (stack_vars_num > 0)
1575 {
1576 add_scope_conflicts ();
1577
1578 /* If stack protection is enabled, we don't share space between
1579 vulnerable data and non-vulnerable data. */
1580 if (flag_stack_protect)
1581 add_stack_protection_conflicts ();
1582
1583 /* Now that we have collected all stack variables, and have computed a
1584 minimal interference graph, attempt to save some stack space. */
1585 partition_stack_vars ();
1586 if (dump_file)
1587 dump_stack_var_partition ();
1588 }
1589
1590 /* There are several conditions under which we should create a
1591 stack guard: protect-all, alloca used, protected decls present. */
1592 if (flag_stack_protect == 2
1593 || (flag_stack_protect
1594 && (cfun->calls_alloca || has_protected_decls)))
1595 create_stack_guard ();
1596
1597 /* Assign rtl to each variable based on these partitions. */
1598 if (stack_vars_num > 0)
1599 {
1600 /* Reorder decls to be protected by iterating over the variables
1601 array multiple times, and allocating out of each phase in turn. */
1602 /* ??? We could probably integrate this into the qsort we did
1603 earlier, such that we naturally see these variables first,
1604 and thus naturally allocate things in the right order. */
1605 if (has_protected_decls)
1606 {
1607 /* Phase 1 contains only character arrays. */
1608 expand_stack_vars (stack_protect_decl_phase_1);
1609
1610 /* Phase 2 contains other kinds of arrays. */
1611 if (flag_stack_protect == 2)
1612 expand_stack_vars (stack_protect_decl_phase_2);
1613 }
1614
1615 expand_stack_vars (NULL);
1616
1617 fini_vars_expansion ();
1618 }
1619
1620 /* If there were any artificial non-ignored vars without rtl
1621 found earlier, see if deferred stack allocation hasn't assigned
1622 rtl to them. */
1623 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1624 {
1625 rtx rtl = DECL_RTL_IF_SET (var);
1626
1627 /* Keep artificial non-ignored vars in cfun->local_decls
1628 chain until instantiate_decls. */
1629 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1630 add_local_decl (cfun, var);
1631 }
1632 VEC_free (tree, heap, maybe_local_decls);
1633
1634 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1635 if (STACK_ALIGNMENT_NEEDED)
1636 {
1637 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1638 if (!FRAME_GROWS_DOWNWARD)
1639 frame_offset += align - 1;
1640 frame_offset &= -align;
1641 }
1642 }
1643
1644
1645 /* If we need to produce a detailed dump, print the tree representation
1646 for STMT to the dump file. SINCE is the last RTX after which the RTL
1647 generated for STMT should have been appended. */
1648
1649 static void
1650 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1651 {
1652 if (dump_file && (dump_flags & TDF_DETAILS))
1653 {
1654 fprintf (dump_file, "\n;; ");
1655 print_gimple_stmt (dump_file, stmt, 0,
1656 TDF_SLIM | (dump_flags & TDF_LINENO));
1657 fprintf (dump_file, "\n");
1658
1659 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1660 }
1661 }
1662
1663 /* Maps the blocks that do not contain tree labels to rtx labels. */
1664
1665 static struct pointer_map_t *lab_rtx_for_bb;
1666
1667 /* Returns the label_rtx expression for a label starting basic block BB. */
1668
1669 static rtx
1670 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1671 {
1672 gimple_stmt_iterator gsi;
1673 tree lab;
1674 gimple lab_stmt;
1675 void **elt;
1676
1677 if (bb->flags & BB_RTL)
1678 return block_label (bb);
1679
1680 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1681 if (elt)
1682 return (rtx) *elt;
1683
1684 /* Find the tree label if it is present. */
1685
1686 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1687 {
1688 lab_stmt = gsi_stmt (gsi);
1689 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1690 break;
1691
1692 lab = gimple_label_label (lab_stmt);
1693 if (DECL_NONLOCAL (lab))
1694 break;
1695
1696 return label_rtx (lab);
1697 }
1698
1699 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1700 *elt = gen_label_rtx ();
1701 return (rtx) *elt;
1702 }
1703
1704
1705 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1706 of a basic block where we just expanded the conditional at the end,
1707 possibly clean up the CFG and instruction sequence. LAST is the
1708 last instruction before the just emitted jump sequence. */
1709
1710 static void
1711 maybe_cleanup_end_of_block (edge e, rtx last)
1712 {
1713 /* Special case: when jumpif decides that the condition is
1714 trivial it emits an unconditional jump (and the necessary
1715 barrier). But we still have two edges, the fallthru one is
1716 wrong. purge_dead_edges would clean this up later. Unfortunately
1717 we have to insert insns (and split edges) before
1718 find_many_sub_basic_blocks and hence before purge_dead_edges.
1719 But splitting edges might create new blocks which depend on the
1720 fact that if there are two edges there's no barrier. So the
1721 barrier would get lost and verify_flow_info would ICE. Instead
1722 of auditing all edge splitters to care for the barrier (which
1723 normally isn't there in a cleaned CFG), fix it here. */
1724 if (BARRIER_P (get_last_insn ()))
1725 {
1726 rtx insn;
1727 remove_edge (e);
1728 /* Now, we have a single successor block, if we have insns to
1729 insert on the remaining edge we potentially will insert
1730 it at the end of this block (if the dest block isn't feasible)
1731 in order to avoid splitting the edge. This insertion will take
1732 place in front of the last jump. But we might have emitted
1733 multiple jumps (conditional and one unconditional) to the
1734 same destination. Inserting in front of the last one then
1735 is a problem. See PR 40021. We fix this by deleting all
1736 jumps except the last unconditional one. */
1737 insn = PREV_INSN (get_last_insn ());
1738 /* Make sure we have an unconditional jump. Otherwise we're
1739 confused. */
1740 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1741 for (insn = PREV_INSN (insn); insn != last;)
1742 {
1743 insn = PREV_INSN (insn);
1744 if (JUMP_P (NEXT_INSN (insn)))
1745 {
1746 if (!any_condjump_p (NEXT_INSN (insn)))
1747 {
1748 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1749 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1750 }
1751 delete_insn (NEXT_INSN (insn));
1752 }
1753 }
1754 }
1755 }
1756
1757 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1758 Returns a new basic block if we've terminated the current basic
1759 block and created a new one. */
1760
1761 static basic_block
1762 expand_gimple_cond (basic_block bb, gimple stmt)
1763 {
1764 basic_block new_bb, dest;
1765 edge new_edge;
1766 edge true_edge;
1767 edge false_edge;
1768 rtx last2, last;
1769 enum tree_code code;
1770 tree op0, op1;
1771
1772 code = gimple_cond_code (stmt);
1773 op0 = gimple_cond_lhs (stmt);
1774 op1 = gimple_cond_rhs (stmt);
1775 /* We're sometimes presented with such code:
1776 D.123_1 = x < y;
1777 if (D.123_1 != 0)
1778 ...
1779 This would expand to two comparisons which then later might
1780 be cleaned up by combine. But some pattern matchers like if-conversion
1781 work better when there's only one compare, so make up for this
1782 here as special exception if TER would have made the same change. */
1783 if (gimple_cond_single_var_p (stmt)
1784 && SA.values
1785 && TREE_CODE (op0) == SSA_NAME
1786 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1787 {
1788 gimple second = SSA_NAME_DEF_STMT (op0);
1789 if (gimple_code (second) == GIMPLE_ASSIGN)
1790 {
1791 enum tree_code code2 = gimple_assign_rhs_code (second);
1792 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1793 {
1794 code = code2;
1795 op0 = gimple_assign_rhs1 (second);
1796 op1 = gimple_assign_rhs2 (second);
1797 }
1798 /* If jumps are cheap turn some more codes into
1799 jumpy sequences. */
1800 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1801 {
1802 if ((code2 == BIT_AND_EXPR
1803 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1804 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1805 || code2 == TRUTH_AND_EXPR)
1806 {
1807 code = TRUTH_ANDIF_EXPR;
1808 op0 = gimple_assign_rhs1 (second);
1809 op1 = gimple_assign_rhs2 (second);
1810 }
1811 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1812 {
1813 code = TRUTH_ORIF_EXPR;
1814 op0 = gimple_assign_rhs1 (second);
1815 op1 = gimple_assign_rhs2 (second);
1816 }
1817 }
1818 }
1819 }
1820
1821 last2 = last = get_last_insn ();
1822
1823 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1824 set_curr_insn_source_location (gimple_location (stmt));
1825 set_curr_insn_block (gimple_block (stmt));
1826
1827 /* These flags have no purpose in RTL land. */
1828 true_edge->flags &= ~EDGE_TRUE_VALUE;
1829 false_edge->flags &= ~EDGE_FALSE_VALUE;
1830
1831 /* We can either have a pure conditional jump with one fallthru edge or
1832 two-way jump that needs to be decomposed into two basic blocks. */
1833 if (false_edge->dest == bb->next_bb)
1834 {
1835 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1836 true_edge->probability);
1837 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1838 if (true_edge->goto_locus)
1839 {
1840 set_curr_insn_source_location (true_edge->goto_locus);
1841 set_curr_insn_block (true_edge->goto_block);
1842 true_edge->goto_locus = curr_insn_locator ();
1843 }
1844 true_edge->goto_block = NULL;
1845 false_edge->flags |= EDGE_FALLTHRU;
1846 maybe_cleanup_end_of_block (false_edge, last);
1847 return NULL;
1848 }
1849 if (true_edge->dest == bb->next_bb)
1850 {
1851 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1852 false_edge->probability);
1853 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1854 if (false_edge->goto_locus)
1855 {
1856 set_curr_insn_source_location (false_edge->goto_locus);
1857 set_curr_insn_block (false_edge->goto_block);
1858 false_edge->goto_locus = curr_insn_locator ();
1859 }
1860 false_edge->goto_block = NULL;
1861 true_edge->flags |= EDGE_FALLTHRU;
1862 maybe_cleanup_end_of_block (true_edge, last);
1863 return NULL;
1864 }
1865
1866 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1867 true_edge->probability);
1868 last = get_last_insn ();
1869 if (false_edge->goto_locus)
1870 {
1871 set_curr_insn_source_location (false_edge->goto_locus);
1872 set_curr_insn_block (false_edge->goto_block);
1873 false_edge->goto_locus = curr_insn_locator ();
1874 }
1875 false_edge->goto_block = NULL;
1876 emit_jump (label_rtx_for_bb (false_edge->dest));
1877
1878 BB_END (bb) = last;
1879 if (BARRIER_P (BB_END (bb)))
1880 BB_END (bb) = PREV_INSN (BB_END (bb));
1881 update_bb_for_insn (bb);
1882
1883 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1884 dest = false_edge->dest;
1885 redirect_edge_succ (false_edge, new_bb);
1886 false_edge->flags |= EDGE_FALLTHRU;
1887 new_bb->count = false_edge->count;
1888 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1889 if (current_loops && bb->loop_father)
1890 add_bb_to_loop (new_bb, bb->loop_father);
1891 new_edge = make_edge (new_bb, dest, 0);
1892 new_edge->probability = REG_BR_PROB_BASE;
1893 new_edge->count = new_bb->count;
1894 if (BARRIER_P (BB_END (new_bb)))
1895 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1896 update_bb_for_insn (new_bb);
1897
1898 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1899
1900 if (true_edge->goto_locus)
1901 {
1902 set_curr_insn_source_location (true_edge->goto_locus);
1903 set_curr_insn_block (true_edge->goto_block);
1904 true_edge->goto_locus = curr_insn_locator ();
1905 }
1906 true_edge->goto_block = NULL;
1907
1908 return new_bb;
1909 }
1910
1911 /* Mark all calls that can have a transaction restart. */
1912
1913 static void
1914 mark_transaction_restart_calls (gimple stmt)
1915 {
1916 struct tm_restart_node dummy;
1917 void **slot;
1918
1919 if (!cfun->gimple_df->tm_restart)
1920 return;
1921
1922 dummy.stmt = stmt;
1923 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1924 if (slot)
1925 {
1926 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1927 tree list = n->label_or_list;
1928 rtx insn;
1929
1930 for (insn = next_real_insn (get_last_insn ());
1931 !CALL_P (insn);
1932 insn = next_real_insn (insn))
1933 continue;
1934
1935 if (TREE_CODE (list) == LABEL_DECL)
1936 add_reg_note (insn, REG_TM, label_rtx (list));
1937 else
1938 for (; list ; list = TREE_CHAIN (list))
1939 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1940 }
1941 }
1942
1943 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1944 statement STMT. */
1945
1946 static void
1947 expand_call_stmt (gimple stmt)
1948 {
1949 tree exp, decl, lhs;
1950 bool builtin_p;
1951 size_t i;
1952
1953 if (gimple_call_internal_p (stmt))
1954 {
1955 expand_internal_call (stmt);
1956 return;
1957 }
1958
1959 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1960
1961 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1962 decl = gimple_call_fndecl (stmt);
1963 builtin_p = decl && DECL_BUILT_IN (decl);
1964
1965 /* If this is not a builtin function, the function type through which the
1966 call is made may be different from the type of the function. */
1967 if (!builtin_p)
1968 CALL_EXPR_FN (exp)
1969 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1970 CALL_EXPR_FN (exp));
1971
1972 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1973 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1974
1975 for (i = 0; i < gimple_call_num_args (stmt); i++)
1976 {
1977 tree arg = gimple_call_arg (stmt, i);
1978 gimple def;
1979 /* TER addresses into arguments of builtin functions so we have a
1980 chance to infer more correct alignment information. See PR39954. */
1981 if (builtin_p
1982 && TREE_CODE (arg) == SSA_NAME
1983 && (def = get_gimple_for_ssa_name (arg))
1984 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1985 arg = gimple_assign_rhs1 (def);
1986 CALL_EXPR_ARG (exp, i) = arg;
1987 }
1988
1989 if (gimple_has_side_effects (stmt))
1990 TREE_SIDE_EFFECTS (exp) = 1;
1991
1992 if (gimple_call_nothrow_p (stmt))
1993 TREE_NOTHROW (exp) = 1;
1994
1995 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1996 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1997 if (decl
1998 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1999 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2000 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2001 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2002 else
2003 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2004 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2005 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2006 TREE_BLOCK (exp) = gimple_block (stmt);
2007
2008 /* Ensure RTL is created for debug args. */
2009 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2010 {
2011 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2012 unsigned int ix;
2013 tree dtemp;
2014
2015 if (debug_args)
2016 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2017 {
2018 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2019 expand_debug_expr (dtemp);
2020 }
2021 }
2022
2023 lhs = gimple_call_lhs (stmt);
2024 if (lhs)
2025 expand_assignment (lhs, exp, false);
2026 else
2027 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2028
2029 mark_transaction_restart_calls (stmt);
2030 }
2031
2032 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2033 STMT that doesn't require special handling for outgoing edges. That
2034 is no tailcalls and no GIMPLE_COND. */
2035
2036 static void
2037 expand_gimple_stmt_1 (gimple stmt)
2038 {
2039 tree op0;
2040
2041 set_curr_insn_source_location (gimple_location (stmt));
2042 set_curr_insn_block (gimple_block (stmt));
2043
2044 switch (gimple_code (stmt))
2045 {
2046 case GIMPLE_GOTO:
2047 op0 = gimple_goto_dest (stmt);
2048 if (TREE_CODE (op0) == LABEL_DECL)
2049 expand_goto (op0);
2050 else
2051 expand_computed_goto (op0);
2052 break;
2053 case GIMPLE_LABEL:
2054 expand_label (gimple_label_label (stmt));
2055 break;
2056 case GIMPLE_NOP:
2057 case GIMPLE_PREDICT:
2058 break;
2059 case GIMPLE_SWITCH:
2060 expand_case (stmt);
2061 break;
2062 case GIMPLE_ASM:
2063 expand_asm_stmt (stmt);
2064 break;
2065 case GIMPLE_CALL:
2066 expand_call_stmt (stmt);
2067 break;
2068
2069 case GIMPLE_RETURN:
2070 op0 = gimple_return_retval (stmt);
2071
2072 if (op0 && op0 != error_mark_node)
2073 {
2074 tree result = DECL_RESULT (current_function_decl);
2075
2076 /* If we are not returning the current function's RESULT_DECL,
2077 build an assignment to it. */
2078 if (op0 != result)
2079 {
2080 /* I believe that a function's RESULT_DECL is unique. */
2081 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2082
2083 /* ??? We'd like to use simply expand_assignment here,
2084 but this fails if the value is of BLKmode but the return
2085 decl is a register. expand_return has special handling
2086 for this combination, which eventually should move
2087 to common code. See comments there. Until then, let's
2088 build a modify expression :-/ */
2089 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2090 result, op0);
2091 }
2092 }
2093 if (!op0)
2094 expand_null_return ();
2095 else
2096 expand_return (op0);
2097 break;
2098
2099 case GIMPLE_ASSIGN:
2100 {
2101 tree lhs = gimple_assign_lhs (stmt);
2102
2103 /* Tree expand used to fiddle with |= and &= of two bitfield
2104 COMPONENT_REFs here. This can't happen with gimple, the LHS
2105 of binary assigns must be a gimple reg. */
2106
2107 if (TREE_CODE (lhs) != SSA_NAME
2108 || get_gimple_rhs_class (gimple_expr_code (stmt))
2109 == GIMPLE_SINGLE_RHS)
2110 {
2111 tree rhs = gimple_assign_rhs1 (stmt);
2112 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2113 == GIMPLE_SINGLE_RHS);
2114 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2115 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2116 if (TREE_CLOBBER_P (rhs))
2117 /* This is a clobber to mark the going out of scope for
2118 this LHS. */
2119 ;
2120 else
2121 expand_assignment (lhs, rhs,
2122 gimple_assign_nontemporal_move_p (stmt));
2123 }
2124 else
2125 {
2126 rtx target, temp;
2127 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2128 struct separate_ops ops;
2129 bool promoted = false;
2130
2131 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2132 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2133 promoted = true;
2134
2135 ops.code = gimple_assign_rhs_code (stmt);
2136 ops.type = TREE_TYPE (lhs);
2137 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2138 {
2139 case GIMPLE_TERNARY_RHS:
2140 ops.op2 = gimple_assign_rhs3 (stmt);
2141 /* Fallthru */
2142 case GIMPLE_BINARY_RHS:
2143 ops.op1 = gimple_assign_rhs2 (stmt);
2144 /* Fallthru */
2145 case GIMPLE_UNARY_RHS:
2146 ops.op0 = gimple_assign_rhs1 (stmt);
2147 break;
2148 default:
2149 gcc_unreachable ();
2150 }
2151 ops.location = gimple_location (stmt);
2152
2153 /* If we want to use a nontemporal store, force the value to
2154 register first. If we store into a promoted register,
2155 don't directly expand to target. */
2156 temp = nontemporal || promoted ? NULL_RTX : target;
2157 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2158 EXPAND_NORMAL);
2159
2160 if (temp == target)
2161 ;
2162 else if (promoted)
2163 {
2164 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2165 /* If TEMP is a VOIDmode constant, use convert_modes to make
2166 sure that we properly convert it. */
2167 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2168 {
2169 temp = convert_modes (GET_MODE (target),
2170 TYPE_MODE (ops.type),
2171 temp, unsignedp);
2172 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2173 GET_MODE (target), temp, unsignedp);
2174 }
2175
2176 convert_move (SUBREG_REG (target), temp, unsignedp);
2177 }
2178 else if (nontemporal && emit_storent_insn (target, temp))
2179 ;
2180 else
2181 {
2182 temp = force_operand (temp, target);
2183 if (temp != target)
2184 emit_move_insn (target, temp);
2185 }
2186 }
2187 }
2188 break;
2189
2190 default:
2191 gcc_unreachable ();
2192 }
2193 }
2194
2195 /* Expand one gimple statement STMT and return the last RTL instruction
2196 before any of the newly generated ones.
2197
2198 In addition to generating the necessary RTL instructions this also
2199 sets REG_EH_REGION notes if necessary and sets the current source
2200 location for diagnostics. */
2201
2202 static rtx
2203 expand_gimple_stmt (gimple stmt)
2204 {
2205 location_t saved_location = input_location;
2206 rtx last = get_last_insn ();
2207 int lp_nr;
2208
2209 gcc_assert (cfun);
2210
2211 /* We need to save and restore the current source location so that errors
2212 discovered during expansion are emitted with the right location. But
2213 it would be better if the diagnostic routines used the source location
2214 embedded in the tree nodes rather than globals. */
2215 if (gimple_has_location (stmt))
2216 input_location = gimple_location (stmt);
2217
2218 expand_gimple_stmt_1 (stmt);
2219
2220 /* Free any temporaries used to evaluate this statement. */
2221 free_temp_slots ();
2222
2223 input_location = saved_location;
2224
2225 /* Mark all insns that may trap. */
2226 lp_nr = lookup_stmt_eh_lp (stmt);
2227 if (lp_nr)
2228 {
2229 rtx insn;
2230 for (insn = next_real_insn (last); insn;
2231 insn = next_real_insn (insn))
2232 {
2233 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2234 /* If we want exceptions for non-call insns, any
2235 may_trap_p instruction may throw. */
2236 && GET_CODE (PATTERN (insn)) != CLOBBER
2237 && GET_CODE (PATTERN (insn)) != USE
2238 && insn_could_throw_p (insn))
2239 make_reg_eh_region_note (insn, 0, lp_nr);
2240 }
2241 }
2242
2243 return last;
2244 }
2245
2246 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2247 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2248 generated a tail call (something that might be denied by the ABI
2249 rules governing the call; see calls.c).
2250
2251 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2252 can still reach the rest of BB. The case here is __builtin_sqrt,
2253 where the NaN result goes through the external function (with a
2254 tailcall) and the normal result happens via a sqrt instruction. */
2255
2256 static basic_block
2257 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2258 {
2259 rtx last2, last;
2260 edge e;
2261 edge_iterator ei;
2262 int probability;
2263 gcov_type count;
2264
2265 last2 = last = expand_gimple_stmt (stmt);
2266
2267 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2268 if (CALL_P (last) && SIBLING_CALL_P (last))
2269 goto found;
2270
2271 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2272
2273 *can_fallthru = true;
2274 return NULL;
2275
2276 found:
2277 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2278 Any instructions emitted here are about to be deleted. */
2279 do_pending_stack_adjust ();
2280
2281 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2282 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2283 EH or abnormal edges, we shouldn't have created a tail call in
2284 the first place. So it seems to me we should just be removing
2285 all edges here, or redirecting the existing fallthru edge to
2286 the exit block. */
2287
2288 probability = 0;
2289 count = 0;
2290
2291 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2292 {
2293 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2294 {
2295 if (e->dest != EXIT_BLOCK_PTR)
2296 {
2297 e->dest->count -= e->count;
2298 e->dest->frequency -= EDGE_FREQUENCY (e);
2299 if (e->dest->count < 0)
2300 e->dest->count = 0;
2301 if (e->dest->frequency < 0)
2302 e->dest->frequency = 0;
2303 }
2304 count += e->count;
2305 probability += e->probability;
2306 remove_edge (e);
2307 }
2308 else
2309 ei_next (&ei);
2310 }
2311
2312 /* This is somewhat ugly: the call_expr expander often emits instructions
2313 after the sibcall (to perform the function return). These confuse the
2314 find_many_sub_basic_blocks code, so we need to get rid of these. */
2315 last = NEXT_INSN (last);
2316 gcc_assert (BARRIER_P (last));
2317
2318 *can_fallthru = false;
2319 while (NEXT_INSN (last))
2320 {
2321 /* For instance an sqrt builtin expander expands if with
2322 sibcall in the then and label for `else`. */
2323 if (LABEL_P (NEXT_INSN (last)))
2324 {
2325 *can_fallthru = true;
2326 break;
2327 }
2328 delete_insn (NEXT_INSN (last));
2329 }
2330
2331 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2332 e->probability += probability;
2333 e->count += count;
2334 BB_END (bb) = last;
2335 update_bb_for_insn (bb);
2336
2337 if (NEXT_INSN (last))
2338 {
2339 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2340
2341 last = BB_END (bb);
2342 if (BARRIER_P (last))
2343 BB_END (bb) = PREV_INSN (last);
2344 }
2345
2346 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2347
2348 return bb;
2349 }
2350
2351 /* Return the difference between the floor and the truncated result of
2352 a signed division by OP1 with remainder MOD. */
2353 static rtx
2354 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2355 {
2356 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2357 return gen_rtx_IF_THEN_ELSE
2358 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2359 gen_rtx_IF_THEN_ELSE
2360 (mode, gen_rtx_LT (BImode,
2361 gen_rtx_DIV (mode, op1, mod),
2362 const0_rtx),
2363 constm1_rtx, const0_rtx),
2364 const0_rtx);
2365 }
2366
2367 /* Return the difference between the ceil and the truncated result of
2368 a signed division by OP1 with remainder MOD. */
2369 static rtx
2370 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2371 {
2372 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2373 return gen_rtx_IF_THEN_ELSE
2374 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2375 gen_rtx_IF_THEN_ELSE
2376 (mode, gen_rtx_GT (BImode,
2377 gen_rtx_DIV (mode, op1, mod),
2378 const0_rtx),
2379 const1_rtx, const0_rtx),
2380 const0_rtx);
2381 }
2382
2383 /* Return the difference between the ceil and the truncated result of
2384 an unsigned division by OP1 with remainder MOD. */
2385 static rtx
2386 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2387 {
2388 /* (mod != 0 ? 1 : 0) */
2389 return gen_rtx_IF_THEN_ELSE
2390 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2391 const1_rtx, const0_rtx);
2392 }
2393
2394 /* Return the difference between the rounded and the truncated result
2395 of a signed division by OP1 with remainder MOD. Halfway cases are
2396 rounded away from zero, rather than to the nearest even number. */
2397 static rtx
2398 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2399 {
2400 /* (abs (mod) >= abs (op1) - abs (mod)
2401 ? (op1 / mod > 0 ? 1 : -1)
2402 : 0) */
2403 return gen_rtx_IF_THEN_ELSE
2404 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2405 gen_rtx_MINUS (mode,
2406 gen_rtx_ABS (mode, op1),
2407 gen_rtx_ABS (mode, mod))),
2408 gen_rtx_IF_THEN_ELSE
2409 (mode, gen_rtx_GT (BImode,
2410 gen_rtx_DIV (mode, op1, mod),
2411 const0_rtx),
2412 const1_rtx, constm1_rtx),
2413 const0_rtx);
2414 }
2415
2416 /* Return the difference between the rounded and the truncated result
2417 of a unsigned division by OP1 with remainder MOD. Halfway cases
2418 are rounded away from zero, rather than to the nearest even
2419 number. */
2420 static rtx
2421 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2422 {
2423 /* (mod >= op1 - mod ? 1 : 0) */
2424 return gen_rtx_IF_THEN_ELSE
2425 (mode, gen_rtx_GE (BImode, mod,
2426 gen_rtx_MINUS (mode, op1, mod)),
2427 const1_rtx, const0_rtx);
2428 }
2429
2430 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2431 any rtl. */
2432
2433 static rtx
2434 convert_debug_memory_address (enum machine_mode mode, rtx x,
2435 addr_space_t as)
2436 {
2437 enum machine_mode xmode = GET_MODE (x);
2438
2439 #ifndef POINTERS_EXTEND_UNSIGNED
2440 gcc_assert (mode == Pmode
2441 || mode == targetm.addr_space.address_mode (as));
2442 gcc_assert (xmode == mode || xmode == VOIDmode);
2443 #else
2444 rtx temp;
2445
2446 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2447
2448 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2449 return x;
2450
2451 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2452 x = simplify_gen_subreg (mode, x, xmode,
2453 subreg_lowpart_offset
2454 (mode, xmode));
2455 else if (POINTERS_EXTEND_UNSIGNED > 0)
2456 x = gen_rtx_ZERO_EXTEND (mode, x);
2457 else if (!POINTERS_EXTEND_UNSIGNED)
2458 x = gen_rtx_SIGN_EXTEND (mode, x);
2459 else
2460 {
2461 switch (GET_CODE (x))
2462 {
2463 case SUBREG:
2464 if ((SUBREG_PROMOTED_VAR_P (x)
2465 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2466 || (GET_CODE (SUBREG_REG (x)) == PLUS
2467 && REG_P (XEXP (SUBREG_REG (x), 0))
2468 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2469 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2470 && GET_MODE (SUBREG_REG (x)) == mode)
2471 return SUBREG_REG (x);
2472 break;
2473 case LABEL_REF:
2474 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2475 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2476 return temp;
2477 case SYMBOL_REF:
2478 temp = shallow_copy_rtx (x);
2479 PUT_MODE (temp, mode);
2480 return temp;
2481 case CONST:
2482 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2483 if (temp)
2484 temp = gen_rtx_CONST (mode, temp);
2485 return temp;
2486 case PLUS:
2487 case MINUS:
2488 if (CONST_INT_P (XEXP (x, 1)))
2489 {
2490 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2491 if (temp)
2492 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2493 }
2494 break;
2495 default:
2496 break;
2497 }
2498 /* Don't know how to express ptr_extend as operation in debug info. */
2499 return NULL;
2500 }
2501 #endif /* POINTERS_EXTEND_UNSIGNED */
2502
2503 return x;
2504 }
2505
2506 /* Return an RTX equivalent to the value of the parameter DECL. */
2507
2508 static rtx
2509 expand_debug_parm_decl (tree decl)
2510 {
2511 rtx incoming = DECL_INCOMING_RTL (decl);
2512
2513 if (incoming
2514 && GET_MODE (incoming) != BLKmode
2515 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2516 || (MEM_P (incoming)
2517 && REG_P (XEXP (incoming, 0))
2518 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2519 {
2520 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2521
2522 #ifdef HAVE_window_save
2523 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2524 If the target machine has an explicit window save instruction, the
2525 actual entry value is the corresponding OUTGOING_REGNO instead. */
2526 if (REG_P (incoming)
2527 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2528 incoming
2529 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2530 OUTGOING_REGNO (REGNO (incoming)), 0);
2531 else if (MEM_P (incoming))
2532 {
2533 rtx reg = XEXP (incoming, 0);
2534 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2535 {
2536 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2537 incoming = replace_equiv_address_nv (incoming, reg);
2538 }
2539 }
2540 #endif
2541
2542 ENTRY_VALUE_EXP (rtl) = incoming;
2543 return rtl;
2544 }
2545
2546 if (incoming
2547 && GET_MODE (incoming) != BLKmode
2548 && !TREE_ADDRESSABLE (decl)
2549 && MEM_P (incoming)
2550 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2551 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2552 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2553 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2554 return incoming;
2555
2556 return NULL_RTX;
2557 }
2558
2559 /* Return an RTX equivalent to the value of the tree expression EXP. */
2560
2561 static rtx
2562 expand_debug_expr (tree exp)
2563 {
2564 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2565 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2566 enum machine_mode inner_mode = VOIDmode;
2567 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2568 addr_space_t as;
2569
2570 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2571 {
2572 case tcc_expression:
2573 switch (TREE_CODE (exp))
2574 {
2575 case COND_EXPR:
2576 case DOT_PROD_EXPR:
2577 case WIDEN_MULT_PLUS_EXPR:
2578 case WIDEN_MULT_MINUS_EXPR:
2579 case FMA_EXPR:
2580 goto ternary;
2581
2582 case TRUTH_ANDIF_EXPR:
2583 case TRUTH_ORIF_EXPR:
2584 case TRUTH_AND_EXPR:
2585 case TRUTH_OR_EXPR:
2586 case TRUTH_XOR_EXPR:
2587 goto binary;
2588
2589 case TRUTH_NOT_EXPR:
2590 goto unary;
2591
2592 default:
2593 break;
2594 }
2595 break;
2596
2597 ternary:
2598 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2599 if (!op2)
2600 return NULL_RTX;
2601 /* Fall through. */
2602
2603 binary:
2604 case tcc_binary:
2605 case tcc_comparison:
2606 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2607 if (!op1)
2608 return NULL_RTX;
2609 /* Fall through. */
2610
2611 unary:
2612 case tcc_unary:
2613 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2614 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2615 if (!op0)
2616 return NULL_RTX;
2617 break;
2618
2619 case tcc_type:
2620 case tcc_statement:
2621 gcc_unreachable ();
2622
2623 case tcc_constant:
2624 case tcc_exceptional:
2625 case tcc_declaration:
2626 case tcc_reference:
2627 case tcc_vl_exp:
2628 break;
2629 }
2630
2631 switch (TREE_CODE (exp))
2632 {
2633 case STRING_CST:
2634 if (!lookup_constant_def (exp))
2635 {
2636 if (strlen (TREE_STRING_POINTER (exp)) + 1
2637 != (size_t) TREE_STRING_LENGTH (exp))
2638 return NULL_RTX;
2639 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2640 op0 = gen_rtx_MEM (BLKmode, op0);
2641 set_mem_attributes (op0, exp, 0);
2642 return op0;
2643 }
2644 /* Fall through... */
2645
2646 case INTEGER_CST:
2647 case REAL_CST:
2648 case FIXED_CST:
2649 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2650 return op0;
2651
2652 case COMPLEX_CST:
2653 gcc_assert (COMPLEX_MODE_P (mode));
2654 op0 = expand_debug_expr (TREE_REALPART (exp));
2655 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2656 return gen_rtx_CONCAT (mode, op0, op1);
2657
2658 case DEBUG_EXPR_DECL:
2659 op0 = DECL_RTL_IF_SET (exp);
2660
2661 if (op0)
2662 return op0;
2663
2664 op0 = gen_rtx_DEBUG_EXPR (mode);
2665 DEBUG_EXPR_TREE_DECL (op0) = exp;
2666 SET_DECL_RTL (exp, op0);
2667
2668 return op0;
2669
2670 case VAR_DECL:
2671 case PARM_DECL:
2672 case FUNCTION_DECL:
2673 case LABEL_DECL:
2674 case CONST_DECL:
2675 case RESULT_DECL:
2676 op0 = DECL_RTL_IF_SET (exp);
2677
2678 /* This decl was probably optimized away. */
2679 if (!op0)
2680 {
2681 if (TREE_CODE (exp) != VAR_DECL
2682 || DECL_EXTERNAL (exp)
2683 || !TREE_STATIC (exp)
2684 || !DECL_NAME (exp)
2685 || DECL_HARD_REGISTER (exp)
2686 || DECL_IN_CONSTANT_POOL (exp)
2687 || mode == VOIDmode)
2688 return NULL;
2689
2690 op0 = make_decl_rtl_for_debug (exp);
2691 if (!MEM_P (op0)
2692 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2693 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2694 return NULL;
2695 }
2696 else
2697 op0 = copy_rtx (op0);
2698
2699 if (GET_MODE (op0) == BLKmode
2700 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2701 below would ICE. While it is likely a FE bug,
2702 try to be robust here. See PR43166. */
2703 || mode == BLKmode
2704 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2705 {
2706 gcc_assert (MEM_P (op0));
2707 op0 = adjust_address_nv (op0, mode, 0);
2708 return op0;
2709 }
2710
2711 /* Fall through. */
2712
2713 adjust_mode:
2714 case PAREN_EXPR:
2715 case NOP_EXPR:
2716 case CONVERT_EXPR:
2717 {
2718 inner_mode = GET_MODE (op0);
2719
2720 if (mode == inner_mode)
2721 return op0;
2722
2723 if (inner_mode == VOIDmode)
2724 {
2725 if (TREE_CODE (exp) == SSA_NAME)
2726 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2727 else
2728 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2729 if (mode == inner_mode)
2730 return op0;
2731 }
2732
2733 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2734 {
2735 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2736 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2737 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2738 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2739 else
2740 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2741 }
2742 else if (FLOAT_MODE_P (mode))
2743 {
2744 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2745 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2746 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2747 else
2748 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2749 }
2750 else if (FLOAT_MODE_P (inner_mode))
2751 {
2752 if (unsignedp)
2753 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2754 else
2755 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2756 }
2757 else if (CONSTANT_P (op0)
2758 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2759 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2760 subreg_lowpart_offset (mode,
2761 inner_mode));
2762 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2763 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2764 : unsignedp)
2765 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2766 else
2767 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2768
2769 return op0;
2770 }
2771
2772 case MEM_REF:
2773 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2774 {
2775 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2776 TREE_OPERAND (exp, 0),
2777 TREE_OPERAND (exp, 1));
2778 if (newexp)
2779 return expand_debug_expr (newexp);
2780 }
2781 /* FALLTHROUGH */
2782 case INDIRECT_REF:
2783 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2784 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2785 if (!op0)
2786 return NULL;
2787
2788 if (TREE_CODE (exp) == MEM_REF)
2789 {
2790 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2791 || (GET_CODE (op0) == PLUS
2792 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2793 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2794 Instead just use get_inner_reference. */
2795 goto component_ref;
2796
2797 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2798 if (!op1 || !CONST_INT_P (op1))
2799 return NULL;
2800
2801 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2802 }
2803
2804 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2805 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2806 else
2807 as = ADDR_SPACE_GENERIC;
2808
2809 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2810 op0, as);
2811 if (op0 == NULL_RTX)
2812 return NULL;
2813
2814 op0 = gen_rtx_MEM (mode, op0);
2815 set_mem_attributes (op0, exp, 0);
2816 if (TREE_CODE (exp) == MEM_REF
2817 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2818 set_mem_expr (op0, NULL_TREE);
2819 set_mem_addr_space (op0, as);
2820
2821 return op0;
2822
2823 case TARGET_MEM_REF:
2824 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2825 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2826 return NULL;
2827
2828 op0 = expand_debug_expr
2829 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2830 if (!op0)
2831 return NULL;
2832
2833 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2834 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2835 else
2836 as = ADDR_SPACE_GENERIC;
2837
2838 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2839 op0, as);
2840 if (op0 == NULL_RTX)
2841 return NULL;
2842
2843 op0 = gen_rtx_MEM (mode, op0);
2844
2845 set_mem_attributes (op0, exp, 0);
2846 set_mem_addr_space (op0, as);
2847
2848 return op0;
2849
2850 component_ref:
2851 case ARRAY_REF:
2852 case ARRAY_RANGE_REF:
2853 case COMPONENT_REF:
2854 case BIT_FIELD_REF:
2855 case REALPART_EXPR:
2856 case IMAGPART_EXPR:
2857 case VIEW_CONVERT_EXPR:
2858 {
2859 enum machine_mode mode1;
2860 HOST_WIDE_INT bitsize, bitpos;
2861 tree offset;
2862 int volatilep = 0;
2863 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2864 &mode1, &unsignedp, &volatilep, false);
2865 rtx orig_op0;
2866
2867 if (bitsize == 0)
2868 return NULL;
2869
2870 orig_op0 = op0 = expand_debug_expr (tem);
2871
2872 if (!op0)
2873 return NULL;
2874
2875 if (offset)
2876 {
2877 enum machine_mode addrmode, offmode;
2878
2879 if (!MEM_P (op0))
2880 return NULL;
2881
2882 op0 = XEXP (op0, 0);
2883 addrmode = GET_MODE (op0);
2884 if (addrmode == VOIDmode)
2885 addrmode = Pmode;
2886
2887 op1 = expand_debug_expr (offset);
2888 if (!op1)
2889 return NULL;
2890
2891 offmode = GET_MODE (op1);
2892 if (offmode == VOIDmode)
2893 offmode = TYPE_MODE (TREE_TYPE (offset));
2894
2895 if (addrmode != offmode)
2896 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2897 subreg_lowpart_offset (addrmode,
2898 offmode));
2899
2900 /* Don't use offset_address here, we don't need a
2901 recognizable address, and we don't want to generate
2902 code. */
2903 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2904 op0, op1));
2905 }
2906
2907 if (MEM_P (op0))
2908 {
2909 if (mode1 == VOIDmode)
2910 /* Bitfield. */
2911 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2912 if (bitpos >= BITS_PER_UNIT)
2913 {
2914 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2915 bitpos %= BITS_PER_UNIT;
2916 }
2917 else if (bitpos < 0)
2918 {
2919 HOST_WIDE_INT units
2920 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2921 op0 = adjust_address_nv (op0, mode1, units);
2922 bitpos += units * BITS_PER_UNIT;
2923 }
2924 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2925 op0 = adjust_address_nv (op0, mode, 0);
2926 else if (GET_MODE (op0) != mode1)
2927 op0 = adjust_address_nv (op0, mode1, 0);
2928 else
2929 op0 = copy_rtx (op0);
2930 if (op0 == orig_op0)
2931 op0 = shallow_copy_rtx (op0);
2932 set_mem_attributes (op0, exp, 0);
2933 }
2934
2935 if (bitpos == 0 && mode == GET_MODE (op0))
2936 return op0;
2937
2938 if (bitpos < 0)
2939 return NULL;
2940
2941 if (GET_MODE (op0) == BLKmode)
2942 return NULL;
2943
2944 if ((bitpos % BITS_PER_UNIT) == 0
2945 && bitsize == GET_MODE_BITSIZE (mode1))
2946 {
2947 enum machine_mode opmode = GET_MODE (op0);
2948
2949 if (opmode == VOIDmode)
2950 opmode = TYPE_MODE (TREE_TYPE (tem));
2951
2952 /* This condition may hold if we're expanding the address
2953 right past the end of an array that turned out not to
2954 be addressable (i.e., the address was only computed in
2955 debug stmts). The gen_subreg below would rightfully
2956 crash, and the address doesn't really exist, so just
2957 drop it. */
2958 if (bitpos >= GET_MODE_BITSIZE (opmode))
2959 return NULL;
2960
2961 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2962 return simplify_gen_subreg (mode, op0, opmode,
2963 bitpos / BITS_PER_UNIT);
2964 }
2965
2966 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2967 && TYPE_UNSIGNED (TREE_TYPE (exp))
2968 ? SIGN_EXTRACT
2969 : ZERO_EXTRACT, mode,
2970 GET_MODE (op0) != VOIDmode
2971 ? GET_MODE (op0)
2972 : TYPE_MODE (TREE_TYPE (tem)),
2973 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2974 }
2975
2976 case ABS_EXPR:
2977 return simplify_gen_unary (ABS, mode, op0, mode);
2978
2979 case NEGATE_EXPR:
2980 return simplify_gen_unary (NEG, mode, op0, mode);
2981
2982 case BIT_NOT_EXPR:
2983 return simplify_gen_unary (NOT, mode, op0, mode);
2984
2985 case FLOAT_EXPR:
2986 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2987 0)))
2988 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2989 inner_mode);
2990
2991 case FIX_TRUNC_EXPR:
2992 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2993 inner_mode);
2994
2995 case POINTER_PLUS_EXPR:
2996 /* For the rare target where pointers are not the same size as
2997 size_t, we need to check for mis-matched modes and correct
2998 the addend. */
2999 if (op0 && op1
3000 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3001 && GET_MODE (op0) != GET_MODE (op1))
3002 {
3003 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3004 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3005 GET_MODE (op1));
3006 else
3007 /* We always sign-extend, regardless of the signedness of
3008 the operand, because the operand is always unsigned
3009 here even if the original C expression is signed. */
3010 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3011 GET_MODE (op1));
3012 }
3013 /* Fall through. */
3014 case PLUS_EXPR:
3015 return simplify_gen_binary (PLUS, mode, op0, op1);
3016
3017 case MINUS_EXPR:
3018 return simplify_gen_binary (MINUS, mode, op0, op1);
3019
3020 case MULT_EXPR:
3021 return simplify_gen_binary (MULT, mode, op0, op1);
3022
3023 case RDIV_EXPR:
3024 case TRUNC_DIV_EXPR:
3025 case EXACT_DIV_EXPR:
3026 if (unsignedp)
3027 return simplify_gen_binary (UDIV, mode, op0, op1);
3028 else
3029 return simplify_gen_binary (DIV, mode, op0, op1);
3030
3031 case TRUNC_MOD_EXPR:
3032 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3033
3034 case FLOOR_DIV_EXPR:
3035 if (unsignedp)
3036 return simplify_gen_binary (UDIV, mode, op0, op1);
3037 else
3038 {
3039 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3040 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3041 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3042 return simplify_gen_binary (PLUS, mode, div, adj);
3043 }
3044
3045 case FLOOR_MOD_EXPR:
3046 if (unsignedp)
3047 return simplify_gen_binary (UMOD, mode, op0, op1);
3048 else
3049 {
3050 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3051 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3052 adj = simplify_gen_unary (NEG, mode,
3053 simplify_gen_binary (MULT, mode, adj, op1),
3054 mode);
3055 return simplify_gen_binary (PLUS, mode, mod, adj);
3056 }
3057
3058 case CEIL_DIV_EXPR:
3059 if (unsignedp)
3060 {
3061 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3062 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3063 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3064 return simplify_gen_binary (PLUS, mode, div, adj);
3065 }
3066 else
3067 {
3068 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3069 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3070 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3071 return simplify_gen_binary (PLUS, mode, div, adj);
3072 }
3073
3074 case CEIL_MOD_EXPR:
3075 if (unsignedp)
3076 {
3077 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3078 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3079 adj = simplify_gen_unary (NEG, mode,
3080 simplify_gen_binary (MULT, mode, adj, op1),
3081 mode);
3082 return simplify_gen_binary (PLUS, mode, mod, adj);
3083 }
3084 else
3085 {
3086 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3087 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3088 adj = simplify_gen_unary (NEG, mode,
3089 simplify_gen_binary (MULT, mode, adj, op1),
3090 mode);
3091 return simplify_gen_binary (PLUS, mode, mod, adj);
3092 }
3093
3094 case ROUND_DIV_EXPR:
3095 if (unsignedp)
3096 {
3097 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3098 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3099 rtx adj = round_udiv_adjust (mode, mod, op1);
3100 return simplify_gen_binary (PLUS, mode, div, adj);
3101 }
3102 else
3103 {
3104 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3105 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3106 rtx adj = round_sdiv_adjust (mode, mod, op1);
3107 return simplify_gen_binary (PLUS, mode, div, adj);
3108 }
3109
3110 case ROUND_MOD_EXPR:
3111 if (unsignedp)
3112 {
3113 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3114 rtx adj = round_udiv_adjust (mode, mod, op1);
3115 adj = simplify_gen_unary (NEG, mode,
3116 simplify_gen_binary (MULT, mode, adj, op1),
3117 mode);
3118 return simplify_gen_binary (PLUS, mode, mod, adj);
3119 }
3120 else
3121 {
3122 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3123 rtx adj = round_sdiv_adjust (mode, mod, op1);
3124 adj = simplify_gen_unary (NEG, mode,
3125 simplify_gen_binary (MULT, mode, adj, op1),
3126 mode);
3127 return simplify_gen_binary (PLUS, mode, mod, adj);
3128 }
3129
3130 case LSHIFT_EXPR:
3131 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3132
3133 case RSHIFT_EXPR:
3134 if (unsignedp)
3135 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3136 else
3137 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3138
3139 case LROTATE_EXPR:
3140 return simplify_gen_binary (ROTATE, mode, op0, op1);
3141
3142 case RROTATE_EXPR:
3143 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3144
3145 case MIN_EXPR:
3146 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3147
3148 case MAX_EXPR:
3149 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3150
3151 case BIT_AND_EXPR:
3152 case TRUTH_AND_EXPR:
3153 return simplify_gen_binary (AND, mode, op0, op1);
3154
3155 case BIT_IOR_EXPR:
3156 case TRUTH_OR_EXPR:
3157 return simplify_gen_binary (IOR, mode, op0, op1);
3158
3159 case BIT_XOR_EXPR:
3160 case TRUTH_XOR_EXPR:
3161 return simplify_gen_binary (XOR, mode, op0, op1);
3162
3163 case TRUTH_ANDIF_EXPR:
3164 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3165
3166 case TRUTH_ORIF_EXPR:
3167 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3168
3169 case TRUTH_NOT_EXPR:
3170 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3171
3172 case LT_EXPR:
3173 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3174 op0, op1);
3175
3176 case LE_EXPR:
3177 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3178 op0, op1);
3179
3180 case GT_EXPR:
3181 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3182 op0, op1);
3183
3184 case GE_EXPR:
3185 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3186 op0, op1);
3187
3188 case EQ_EXPR:
3189 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3190
3191 case NE_EXPR:
3192 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3193
3194 case UNORDERED_EXPR:
3195 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3196
3197 case ORDERED_EXPR:
3198 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3199
3200 case UNLT_EXPR:
3201 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3202
3203 case UNLE_EXPR:
3204 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3205
3206 case UNGT_EXPR:
3207 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3208
3209 case UNGE_EXPR:
3210 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3211
3212 case UNEQ_EXPR:
3213 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3214
3215 case LTGT_EXPR:
3216 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3217
3218 case COND_EXPR:
3219 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3220
3221 case COMPLEX_EXPR:
3222 gcc_assert (COMPLEX_MODE_P (mode));
3223 if (GET_MODE (op0) == VOIDmode)
3224 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3225 if (GET_MODE (op1) == VOIDmode)
3226 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3227 return gen_rtx_CONCAT (mode, op0, op1);
3228
3229 case CONJ_EXPR:
3230 if (GET_CODE (op0) == CONCAT)
3231 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3232 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3233 XEXP (op0, 1),
3234 GET_MODE_INNER (mode)));
3235 else
3236 {
3237 enum machine_mode imode = GET_MODE_INNER (mode);
3238 rtx re, im;
3239
3240 if (MEM_P (op0))
3241 {
3242 re = adjust_address_nv (op0, imode, 0);
3243 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3244 }
3245 else
3246 {
3247 enum machine_mode ifmode = int_mode_for_mode (mode);
3248 enum machine_mode ihmode = int_mode_for_mode (imode);
3249 rtx halfsize;
3250 if (ifmode == BLKmode || ihmode == BLKmode)
3251 return NULL;
3252 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3253 re = op0;
3254 if (mode != ifmode)
3255 re = gen_rtx_SUBREG (ifmode, re, 0);
3256 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3257 if (imode != ihmode)
3258 re = gen_rtx_SUBREG (imode, re, 0);
3259 im = copy_rtx (op0);
3260 if (mode != ifmode)
3261 im = gen_rtx_SUBREG (ifmode, im, 0);
3262 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3263 if (imode != ihmode)
3264 im = gen_rtx_SUBREG (imode, im, 0);
3265 }
3266 im = gen_rtx_NEG (imode, im);
3267 return gen_rtx_CONCAT (mode, re, im);
3268 }
3269
3270 case ADDR_EXPR:
3271 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3272 if (!op0 || !MEM_P (op0))
3273 {
3274 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3275 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3276 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3277 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3278 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3279 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3280
3281 if (handled_component_p (TREE_OPERAND (exp, 0)))
3282 {
3283 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3284 tree decl
3285 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3286 &bitoffset, &bitsize, &maxsize);
3287 if ((TREE_CODE (decl) == VAR_DECL
3288 || TREE_CODE (decl) == PARM_DECL
3289 || TREE_CODE (decl) == RESULT_DECL)
3290 && (!TREE_ADDRESSABLE (decl)
3291 || target_for_debug_bind (decl))
3292 && (bitoffset % BITS_PER_UNIT) == 0
3293 && bitsize > 0
3294 && bitsize == maxsize)
3295 {
3296 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3297 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3298 }
3299 }
3300
3301 return NULL;
3302 }
3303
3304 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3305 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3306
3307 return op0;
3308
3309 case VECTOR_CST:
3310 {
3311 unsigned i;
3312
3313 op0 = gen_rtx_CONCATN
3314 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3315
3316 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3317 {
3318 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3319 if (!op1)
3320 return NULL;
3321 XVECEXP (op0, 0, i) = op1;
3322 }
3323
3324 return op0;
3325 }
3326
3327 case CONSTRUCTOR:
3328 if (TREE_CLOBBER_P (exp))
3329 return NULL;
3330 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3331 {
3332 unsigned i;
3333 tree val;
3334
3335 op0 = gen_rtx_CONCATN
3336 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3337
3338 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3339 {
3340 op1 = expand_debug_expr (val);
3341 if (!op1)
3342 return NULL;
3343 XVECEXP (op0, 0, i) = op1;
3344 }
3345
3346 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3347 {
3348 op1 = expand_debug_expr
3349 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3350
3351 if (!op1)
3352 return NULL;
3353
3354 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3355 XVECEXP (op0, 0, i) = op1;
3356 }
3357
3358 return op0;
3359 }
3360 else
3361 goto flag_unsupported;
3362
3363 case CALL_EXPR:
3364 /* ??? Maybe handle some builtins? */
3365 return NULL;
3366
3367 case SSA_NAME:
3368 {
3369 gimple g = get_gimple_for_ssa_name (exp);
3370 if (g)
3371 {
3372 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3373 if (!op0)
3374 return NULL;
3375 }
3376 else
3377 {
3378 int part = var_to_partition (SA.map, exp);
3379
3380 if (part == NO_PARTITION)
3381 {
3382 /* If this is a reference to an incoming value of parameter
3383 that is never used in the code or where the incoming
3384 value is never used in the code, use PARM_DECL's
3385 DECL_RTL if set. */
3386 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3387 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3388 {
3389 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3390 if (op0)
3391 goto adjust_mode;
3392 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3393 if (op0)
3394 goto adjust_mode;
3395 }
3396 return NULL;
3397 }
3398
3399 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3400
3401 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3402 }
3403 goto adjust_mode;
3404 }
3405
3406 case ERROR_MARK:
3407 return NULL;
3408
3409 /* Vector stuff. For most of the codes we don't have rtl codes. */
3410 case REALIGN_LOAD_EXPR:
3411 case REDUC_MAX_EXPR:
3412 case REDUC_MIN_EXPR:
3413 case REDUC_PLUS_EXPR:
3414 case VEC_COND_EXPR:
3415 case VEC_LSHIFT_EXPR:
3416 case VEC_PACK_FIX_TRUNC_EXPR:
3417 case VEC_PACK_SAT_EXPR:
3418 case VEC_PACK_TRUNC_EXPR:
3419 case VEC_RSHIFT_EXPR:
3420 case VEC_UNPACK_FLOAT_HI_EXPR:
3421 case VEC_UNPACK_FLOAT_LO_EXPR:
3422 case VEC_UNPACK_HI_EXPR:
3423 case VEC_UNPACK_LO_EXPR:
3424 case VEC_WIDEN_MULT_HI_EXPR:
3425 case VEC_WIDEN_MULT_LO_EXPR:
3426 case VEC_WIDEN_MULT_EVEN_EXPR:
3427 case VEC_WIDEN_MULT_ODD_EXPR:
3428 case VEC_WIDEN_LSHIFT_HI_EXPR:
3429 case VEC_WIDEN_LSHIFT_LO_EXPR:
3430 case VEC_PERM_EXPR:
3431 return NULL;
3432
3433 /* Misc codes. */
3434 case ADDR_SPACE_CONVERT_EXPR:
3435 case FIXED_CONVERT_EXPR:
3436 case OBJ_TYPE_REF:
3437 case WITH_SIZE_EXPR:
3438 return NULL;
3439
3440 case DOT_PROD_EXPR:
3441 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3442 && SCALAR_INT_MODE_P (mode))
3443 {
3444 op0
3445 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3446 0)))
3447 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3448 inner_mode);
3449 op1
3450 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3451 1)))
3452 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3453 inner_mode);
3454 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3455 return simplify_gen_binary (PLUS, mode, op0, op2);
3456 }
3457 return NULL;
3458
3459 case WIDEN_MULT_EXPR:
3460 case WIDEN_MULT_PLUS_EXPR:
3461 case WIDEN_MULT_MINUS_EXPR:
3462 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3463 && SCALAR_INT_MODE_P (mode))
3464 {
3465 inner_mode = GET_MODE (op0);
3466 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3467 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3468 else
3469 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3470 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3471 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3472 else
3473 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3474 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3475 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3476 return op0;
3477 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3478 return simplify_gen_binary (PLUS, mode, op0, op2);
3479 else
3480 return simplify_gen_binary (MINUS, mode, op2, op0);
3481 }
3482 return NULL;
3483
3484 case MULT_HIGHPART_EXPR:
3485 /* ??? Similar to the above. */
3486 return NULL;
3487
3488 case WIDEN_SUM_EXPR:
3489 case WIDEN_LSHIFT_EXPR:
3490 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3491 && SCALAR_INT_MODE_P (mode))
3492 {
3493 op0
3494 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3495 0)))
3496 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3497 inner_mode);
3498 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3499 ? ASHIFT : PLUS, mode, op0, op1);
3500 }
3501 return NULL;
3502
3503 case FMA_EXPR:
3504 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3505
3506 default:
3507 flag_unsupported:
3508 #ifdef ENABLE_CHECKING
3509 debug_tree (exp);
3510 gcc_unreachable ();
3511 #else
3512 return NULL;
3513 #endif
3514 }
3515 }
3516
3517 /* Return an RTX equivalent to the source bind value of the tree expression
3518 EXP. */
3519
3520 static rtx
3521 expand_debug_source_expr (tree exp)
3522 {
3523 rtx op0 = NULL_RTX;
3524 enum machine_mode mode = VOIDmode, inner_mode;
3525
3526 switch (TREE_CODE (exp))
3527 {
3528 case PARM_DECL:
3529 {
3530 mode = DECL_MODE (exp);
3531 op0 = expand_debug_parm_decl (exp);
3532 if (op0)
3533 break;
3534 /* See if this isn't an argument that has been completely
3535 optimized out. */
3536 if (!DECL_RTL_SET_P (exp)
3537 && !DECL_INCOMING_RTL (exp)
3538 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3539 {
3540 tree aexp = exp;
3541 if (DECL_ABSTRACT_ORIGIN (exp))
3542 aexp = DECL_ABSTRACT_ORIGIN (exp);
3543 if (DECL_CONTEXT (aexp)
3544 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3545 {
3546 VEC(tree, gc) **debug_args;
3547 unsigned int ix;
3548 tree ddecl;
3549 #ifdef ENABLE_CHECKING
3550 tree parm;
3551 for (parm = DECL_ARGUMENTS (current_function_decl);
3552 parm; parm = DECL_CHAIN (parm))
3553 gcc_assert (parm != exp
3554 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3555 #endif
3556 debug_args = decl_debug_args_lookup (current_function_decl);
3557 if (debug_args != NULL)
3558 {
3559 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3560 ix += 2)
3561 if (ddecl == aexp)
3562 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3563 }
3564 }
3565 }
3566 break;
3567 }
3568 default:
3569 break;
3570 }
3571
3572 if (op0 == NULL_RTX)
3573 return NULL_RTX;
3574
3575 inner_mode = GET_MODE (op0);
3576 if (mode == inner_mode)
3577 return op0;
3578
3579 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3580 {
3581 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3582 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3583 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3584 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3585 else
3586 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3587 }
3588 else if (FLOAT_MODE_P (mode))
3589 gcc_unreachable ();
3590 else if (FLOAT_MODE_P (inner_mode))
3591 {
3592 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3593 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3594 else
3595 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3596 }
3597 else if (CONSTANT_P (op0)
3598 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3599 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3600 subreg_lowpart_offset (mode, inner_mode));
3601 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3602 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3603 else
3604 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3605
3606 return op0;
3607 }
3608
3609 /* Expand the _LOCs in debug insns. We run this after expanding all
3610 regular insns, so that any variables referenced in the function
3611 will have their DECL_RTLs set. */
3612
3613 static void
3614 expand_debug_locations (void)
3615 {
3616 rtx insn;
3617 rtx last = get_last_insn ();
3618 int save_strict_alias = flag_strict_aliasing;
3619
3620 /* New alias sets while setting up memory attributes cause
3621 -fcompare-debug failures, even though it doesn't bring about any
3622 codegen changes. */
3623 flag_strict_aliasing = 0;
3624
3625 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3626 if (DEBUG_INSN_P (insn))
3627 {
3628 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3629 rtx val;
3630 enum machine_mode mode;
3631
3632 if (value == NULL_TREE)
3633 val = NULL_RTX;
3634 else
3635 {
3636 if (INSN_VAR_LOCATION_STATUS (insn)
3637 == VAR_INIT_STATUS_UNINITIALIZED)
3638 val = expand_debug_source_expr (value);
3639 else
3640 val = expand_debug_expr (value);
3641 gcc_assert (last == get_last_insn ());
3642 }
3643
3644 if (!val)
3645 val = gen_rtx_UNKNOWN_VAR_LOC ();
3646 else
3647 {
3648 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3649
3650 gcc_assert (mode == GET_MODE (val)
3651 || (GET_MODE (val) == VOIDmode
3652 && (CONST_INT_P (val)
3653 || GET_CODE (val) == CONST_FIXED
3654 || CONST_DOUBLE_AS_INT_P (val)
3655 || GET_CODE (val) == LABEL_REF)));
3656 }
3657
3658 INSN_VAR_LOCATION_LOC (insn) = val;
3659 }
3660
3661 flag_strict_aliasing = save_strict_alias;
3662 }
3663
3664 /* Expand basic block BB from GIMPLE trees to RTL. */
3665
3666 static basic_block
3667 expand_gimple_basic_block (basic_block bb)
3668 {
3669 gimple_stmt_iterator gsi;
3670 gimple_seq stmts;
3671 gimple stmt = NULL;
3672 rtx note, last;
3673 edge e;
3674 edge_iterator ei;
3675 void **elt;
3676
3677 if (dump_file)
3678 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3679 bb->index);
3680
3681 /* Note that since we are now transitioning from GIMPLE to RTL, we
3682 cannot use the gsi_*_bb() routines because they expect the basic
3683 block to be in GIMPLE, instead of RTL. Therefore, we need to
3684 access the BB sequence directly. */
3685 stmts = bb_seq (bb);
3686 bb->il.gimple.seq = NULL;
3687 bb->il.gimple.phi_nodes = NULL;
3688 rtl_profile_for_bb (bb);
3689 init_rtl_bb_info (bb);
3690 bb->flags |= BB_RTL;
3691
3692 /* Remove the RETURN_EXPR if we may fall though to the exit
3693 instead. */
3694 gsi = gsi_last (stmts);
3695 if (!gsi_end_p (gsi)
3696 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3697 {
3698 gimple ret_stmt = gsi_stmt (gsi);
3699
3700 gcc_assert (single_succ_p (bb));
3701 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3702
3703 if (bb->next_bb == EXIT_BLOCK_PTR
3704 && !gimple_return_retval (ret_stmt))
3705 {
3706 gsi_remove (&gsi, false);
3707 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3708 }
3709 }
3710
3711 gsi = gsi_start (stmts);
3712 if (!gsi_end_p (gsi))
3713 {
3714 stmt = gsi_stmt (gsi);
3715 if (gimple_code (stmt) != GIMPLE_LABEL)
3716 stmt = NULL;
3717 }
3718
3719 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3720
3721 if (stmt || elt)
3722 {
3723 last = get_last_insn ();
3724
3725 if (stmt)
3726 {
3727 expand_gimple_stmt (stmt);
3728 gsi_next (&gsi);
3729 }
3730
3731 if (elt)
3732 emit_label ((rtx) *elt);
3733
3734 /* Java emits line number notes in the top of labels.
3735 ??? Make this go away once line number notes are obsoleted. */
3736 BB_HEAD (bb) = NEXT_INSN (last);
3737 if (NOTE_P (BB_HEAD (bb)))
3738 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3739 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3740
3741 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3742 }
3743 else
3744 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3745
3746 NOTE_BASIC_BLOCK (note) = bb;
3747
3748 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3749 {
3750 basic_block new_bb;
3751
3752 stmt = gsi_stmt (gsi);
3753
3754 /* If this statement is a non-debug one, and we generate debug
3755 insns, then this one might be the last real use of a TERed
3756 SSA_NAME, but where there are still some debug uses further
3757 down. Expanding the current SSA name in such further debug
3758 uses by their RHS might lead to wrong debug info, as coalescing
3759 might make the operands of such RHS be placed into the same
3760 pseudo as something else. Like so:
3761 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3762 use(a_1);
3763 a_2 = ...
3764 #DEBUG ... => a_1
3765 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3766 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3767 the write to a_2 would actually have clobbered the place which
3768 formerly held a_0.
3769
3770 So, instead of that, we recognize the situation, and generate
3771 debug temporaries at the last real use of TERed SSA names:
3772 a_1 = a_0 + 1;
3773 #DEBUG #D1 => a_1
3774 use(a_1);
3775 a_2 = ...
3776 #DEBUG ... => #D1
3777 */
3778 if (MAY_HAVE_DEBUG_INSNS
3779 && SA.values
3780 && !is_gimple_debug (stmt))
3781 {
3782 ssa_op_iter iter;
3783 tree op;
3784 gimple def;
3785
3786 location_t sloc = get_curr_insn_source_location ();
3787 tree sblock = get_curr_insn_block ();
3788
3789 /* Look for SSA names that have their last use here (TERed
3790 names always have only one real use). */
3791 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3792 if ((def = get_gimple_for_ssa_name (op)))
3793 {
3794 imm_use_iterator imm_iter;
3795 use_operand_p use_p;
3796 bool have_debug_uses = false;
3797
3798 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3799 {
3800 if (gimple_debug_bind_p (USE_STMT (use_p)))
3801 {
3802 have_debug_uses = true;
3803 break;
3804 }
3805 }
3806
3807 if (have_debug_uses)
3808 {
3809 /* OP is a TERed SSA name, with DEF it's defining
3810 statement, and where OP is used in further debug
3811 instructions. Generate a debug temporary, and
3812 replace all uses of OP in debug insns with that
3813 temporary. */
3814 gimple debugstmt;
3815 tree value = gimple_assign_rhs_to_tree (def);
3816 tree vexpr = make_node (DEBUG_EXPR_DECL);
3817 rtx val;
3818 enum machine_mode mode;
3819
3820 set_curr_insn_source_location (gimple_location (def));
3821 set_curr_insn_block (gimple_block (def));
3822
3823 DECL_ARTIFICIAL (vexpr) = 1;
3824 TREE_TYPE (vexpr) = TREE_TYPE (value);
3825 if (DECL_P (value))
3826 mode = DECL_MODE (value);
3827 else
3828 mode = TYPE_MODE (TREE_TYPE (value));
3829 DECL_MODE (vexpr) = mode;
3830
3831 val = gen_rtx_VAR_LOCATION
3832 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3833
3834 emit_debug_insn (val);
3835
3836 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3837 {
3838 if (!gimple_debug_bind_p (debugstmt))
3839 continue;
3840
3841 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3842 SET_USE (use_p, vexpr);
3843
3844 update_stmt (debugstmt);
3845 }
3846 }
3847 }
3848 set_curr_insn_source_location (sloc);
3849 set_curr_insn_block (sblock);
3850 }
3851
3852 currently_expanding_gimple_stmt = stmt;
3853
3854 /* Expand this statement, then evaluate the resulting RTL and
3855 fixup the CFG accordingly. */
3856 if (gimple_code (stmt) == GIMPLE_COND)
3857 {
3858 new_bb = expand_gimple_cond (bb, stmt);
3859 if (new_bb)
3860 return new_bb;
3861 }
3862 else if (gimple_debug_bind_p (stmt))
3863 {
3864 location_t sloc = get_curr_insn_source_location ();
3865 tree sblock = get_curr_insn_block ();
3866 gimple_stmt_iterator nsi = gsi;
3867
3868 for (;;)
3869 {
3870 tree var = gimple_debug_bind_get_var (stmt);
3871 tree value;
3872 rtx val;
3873 enum machine_mode mode;
3874
3875 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3876 && TREE_CODE (var) != LABEL_DECL
3877 && !target_for_debug_bind (var))
3878 goto delink_debug_stmt;
3879
3880 if (gimple_debug_bind_has_value_p (stmt))
3881 value = gimple_debug_bind_get_value (stmt);
3882 else
3883 value = NULL_TREE;
3884
3885 last = get_last_insn ();
3886
3887 set_curr_insn_source_location (gimple_location (stmt));
3888 set_curr_insn_block (gimple_block (stmt));
3889
3890 if (DECL_P (var))
3891 mode = DECL_MODE (var);
3892 else
3893 mode = TYPE_MODE (TREE_TYPE (var));
3894
3895 val = gen_rtx_VAR_LOCATION
3896 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3897
3898 emit_debug_insn (val);
3899
3900 if (dump_file && (dump_flags & TDF_DETAILS))
3901 {
3902 /* We can't dump the insn with a TREE where an RTX
3903 is expected. */
3904 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3905 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3906 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3907 }
3908
3909 delink_debug_stmt:
3910 /* In order not to generate too many debug temporaries,
3911 we delink all uses of debug statements we already expanded.
3912 Therefore debug statements between definition and real
3913 use of TERed SSA names will continue to use the SSA name,
3914 and not be replaced with debug temps. */
3915 delink_stmt_imm_use (stmt);
3916
3917 gsi = nsi;
3918 gsi_next (&nsi);
3919 if (gsi_end_p (nsi))
3920 break;
3921 stmt = gsi_stmt (nsi);
3922 if (!gimple_debug_bind_p (stmt))
3923 break;
3924 }
3925
3926 set_curr_insn_source_location (sloc);
3927 set_curr_insn_block (sblock);
3928 }
3929 else if (gimple_debug_source_bind_p (stmt))
3930 {
3931 location_t sloc = get_curr_insn_source_location ();
3932 tree sblock = get_curr_insn_block ();
3933 tree var = gimple_debug_source_bind_get_var (stmt);
3934 tree value = gimple_debug_source_bind_get_value (stmt);
3935 rtx val;
3936 enum machine_mode mode;
3937
3938 last = get_last_insn ();
3939
3940 set_curr_insn_source_location (gimple_location (stmt));
3941 set_curr_insn_block (gimple_block (stmt));
3942
3943 mode = DECL_MODE (var);
3944
3945 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3946 VAR_INIT_STATUS_UNINITIALIZED);
3947
3948 emit_debug_insn (val);
3949
3950 if (dump_file && (dump_flags & TDF_DETAILS))
3951 {
3952 /* We can't dump the insn with a TREE where an RTX
3953 is expected. */
3954 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3955 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3956 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3957 }
3958
3959 set_curr_insn_source_location (sloc);
3960 set_curr_insn_block (sblock);
3961 }
3962 else
3963 {
3964 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3965 {
3966 bool can_fallthru;
3967 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3968 if (new_bb)
3969 {
3970 if (can_fallthru)
3971 bb = new_bb;
3972 else
3973 return new_bb;
3974 }
3975 }
3976 else
3977 {
3978 def_operand_p def_p;
3979 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3980
3981 if (def_p != NULL)
3982 {
3983 /* Ignore this stmt if it is in the list of
3984 replaceable expressions. */
3985 if (SA.values
3986 && bitmap_bit_p (SA.values,
3987 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3988 continue;
3989 }
3990 last = expand_gimple_stmt (stmt);
3991 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3992 }
3993 }
3994 }
3995
3996 currently_expanding_gimple_stmt = NULL;
3997
3998 /* Expand implicit goto and convert goto_locus. */
3999 FOR_EACH_EDGE (e, ei, bb->succs)
4000 {
4001 if (e->goto_locus && e->goto_block)
4002 {
4003 set_curr_insn_source_location (e->goto_locus);
4004 set_curr_insn_block (e->goto_block);
4005 e->goto_locus = curr_insn_locator ();
4006 }
4007 e->goto_block = NULL;
4008 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4009 {
4010 emit_jump (label_rtx_for_bb (e->dest));
4011 e->flags &= ~EDGE_FALLTHRU;
4012 }
4013 }
4014
4015 /* Expanded RTL can create a jump in the last instruction of block.
4016 This later might be assumed to be a jump to successor and break edge insertion.
4017 We need to insert dummy move to prevent this. PR41440. */
4018 if (single_succ_p (bb)
4019 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4020 && (last = get_last_insn ())
4021 && JUMP_P (last))
4022 {
4023 rtx dummy = gen_reg_rtx (SImode);
4024 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4025 }
4026
4027 do_pending_stack_adjust ();
4028
4029 /* Find the block tail. The last insn in the block is the insn
4030 before a barrier and/or table jump insn. */
4031 last = get_last_insn ();
4032 if (BARRIER_P (last))
4033 last = PREV_INSN (last);
4034 if (JUMP_TABLE_DATA_P (last))
4035 last = PREV_INSN (PREV_INSN (last));
4036 BB_END (bb) = last;
4037
4038 update_bb_for_insn (bb);
4039
4040 return bb;
4041 }
4042
4043
4044 /* Create a basic block for initialization code. */
4045
4046 static basic_block
4047 construct_init_block (void)
4048 {
4049 basic_block init_block, first_block;
4050 edge e = NULL;
4051 int flags;
4052
4053 /* Multiple entry points not supported yet. */
4054 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4055 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4056 init_rtl_bb_info (EXIT_BLOCK_PTR);
4057 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4058 EXIT_BLOCK_PTR->flags |= BB_RTL;
4059
4060 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4061
4062 /* When entry edge points to first basic block, we don't need jump,
4063 otherwise we have to jump into proper target. */
4064 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4065 {
4066 tree label = gimple_block_label (e->dest);
4067
4068 emit_jump (label_rtx (label));
4069 flags = 0;
4070 }
4071 else
4072 flags = EDGE_FALLTHRU;
4073
4074 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4075 get_last_insn (),
4076 ENTRY_BLOCK_PTR);
4077 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4078 init_block->count = ENTRY_BLOCK_PTR->count;
4079 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4080 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4081 if (e)
4082 {
4083 first_block = e->dest;
4084 redirect_edge_succ (e, init_block);
4085 e = make_edge (init_block, first_block, flags);
4086 }
4087 else
4088 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4089 e->probability = REG_BR_PROB_BASE;
4090 e->count = ENTRY_BLOCK_PTR->count;
4091
4092 update_bb_for_insn (init_block);
4093 return init_block;
4094 }
4095
4096 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4097 found in the block tree. */
4098
4099 static void
4100 set_block_levels (tree block, int level)
4101 {
4102 while (block)
4103 {
4104 BLOCK_NUMBER (block) = level;
4105 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4106 block = BLOCK_CHAIN (block);
4107 }
4108 }
4109
4110 /* Create a block containing landing pads and similar stuff. */
4111
4112 static void
4113 construct_exit_block (void)
4114 {
4115 rtx head = get_last_insn ();
4116 rtx end;
4117 basic_block exit_block;
4118 edge e, e2;
4119 unsigned ix;
4120 edge_iterator ei;
4121 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4122
4123 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4124
4125 /* Make sure the locus is set to the end of the function, so that
4126 epilogue line numbers and warnings are set properly. */
4127 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4128 input_location = cfun->function_end_locus;
4129
4130 /* The following insns belong to the top scope. */
4131 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4132
4133 /* Generate rtl for function exit. */
4134 expand_function_end ();
4135
4136 end = get_last_insn ();
4137 if (head == end)
4138 return;
4139 /* While emitting the function end we could move end of the last basic block.
4140 */
4141 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4142 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4143 head = NEXT_INSN (head);
4144 exit_block = create_basic_block (NEXT_INSN (head), end,
4145 EXIT_BLOCK_PTR->prev_bb);
4146 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4147 exit_block->count = EXIT_BLOCK_PTR->count;
4148 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4149 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4150
4151 ix = 0;
4152 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4153 {
4154 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4155 if (!(e->flags & EDGE_ABNORMAL))
4156 redirect_edge_succ (e, exit_block);
4157 else
4158 ix++;
4159 }
4160
4161 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4162 e->probability = REG_BR_PROB_BASE;
4163 e->count = EXIT_BLOCK_PTR->count;
4164 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4165 if (e2 != e)
4166 {
4167 e->count -= e2->count;
4168 exit_block->count -= e2->count;
4169 exit_block->frequency -= EDGE_FREQUENCY (e2);
4170 }
4171 if (e->count < 0)
4172 e->count = 0;
4173 if (exit_block->count < 0)
4174 exit_block->count = 0;
4175 if (exit_block->frequency < 0)
4176 exit_block->frequency = 0;
4177 update_bb_for_insn (exit_block);
4178 }
4179
4180 /* Helper function for discover_nonconstant_array_refs.
4181 Look for ARRAY_REF nodes with non-constant indexes and mark them
4182 addressable. */
4183
4184 static tree
4185 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4186 void *data ATTRIBUTE_UNUSED)
4187 {
4188 tree t = *tp;
4189
4190 if (IS_TYPE_OR_DECL_P (t))
4191 *walk_subtrees = 0;
4192 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4193 {
4194 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4195 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4196 && (!TREE_OPERAND (t, 2)
4197 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4198 || (TREE_CODE (t) == COMPONENT_REF
4199 && (!TREE_OPERAND (t,2)
4200 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4201 || TREE_CODE (t) == BIT_FIELD_REF
4202 || TREE_CODE (t) == REALPART_EXPR
4203 || TREE_CODE (t) == IMAGPART_EXPR
4204 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4205 || CONVERT_EXPR_P (t))
4206 t = TREE_OPERAND (t, 0);
4207
4208 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4209 {
4210 t = get_base_address (t);
4211 if (t && DECL_P (t)
4212 && DECL_MODE (t) != BLKmode)
4213 TREE_ADDRESSABLE (t) = 1;
4214 }
4215
4216 *walk_subtrees = 0;
4217 }
4218
4219 return NULL_TREE;
4220 }
4221
4222 /* RTL expansion is not able to compile array references with variable
4223 offsets for arrays stored in single register. Discover such
4224 expressions and mark variables as addressable to avoid this
4225 scenario. */
4226
4227 static void
4228 discover_nonconstant_array_refs (void)
4229 {
4230 basic_block bb;
4231 gimple_stmt_iterator gsi;
4232
4233 FOR_EACH_BB (bb)
4234 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4235 {
4236 gimple stmt = gsi_stmt (gsi);
4237 if (!is_gimple_debug (stmt))
4238 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4239 }
4240 }
4241
4242 /* This function sets crtl->args.internal_arg_pointer to a virtual
4243 register if DRAP is needed. Local register allocator will replace
4244 virtual_incoming_args_rtx with the virtual register. */
4245
4246 static void
4247 expand_stack_alignment (void)
4248 {
4249 rtx drap_rtx;
4250 unsigned int preferred_stack_boundary;
4251
4252 if (! SUPPORTS_STACK_ALIGNMENT)
4253 return;
4254
4255 if (cfun->calls_alloca
4256 || cfun->has_nonlocal_label
4257 || crtl->has_nonlocal_goto)
4258 crtl->need_drap = true;
4259
4260 /* Call update_stack_boundary here again to update incoming stack
4261 boundary. It may set incoming stack alignment to a different
4262 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4263 use the minimum incoming stack alignment to check if it is OK
4264 to perform sibcall optimization since sibcall optimization will
4265 only align the outgoing stack to incoming stack boundary. */
4266 if (targetm.calls.update_stack_boundary)
4267 targetm.calls.update_stack_boundary ();
4268
4269 /* The incoming stack frame has to be aligned at least at
4270 parm_stack_boundary. */
4271 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4272
4273 /* Update crtl->stack_alignment_estimated and use it later to align
4274 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4275 exceptions since callgraph doesn't collect incoming stack alignment
4276 in this case. */
4277 if (cfun->can_throw_non_call_exceptions
4278 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4279 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4280 else
4281 preferred_stack_boundary = crtl->preferred_stack_boundary;
4282 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4283 crtl->stack_alignment_estimated = preferred_stack_boundary;
4284 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4285 crtl->stack_alignment_needed = preferred_stack_boundary;
4286
4287 gcc_assert (crtl->stack_alignment_needed
4288 <= crtl->stack_alignment_estimated);
4289
4290 crtl->stack_realign_needed
4291 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4292 crtl->stack_realign_tried = crtl->stack_realign_needed;
4293
4294 crtl->stack_realign_processed = true;
4295
4296 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4297 alignment. */
4298 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4299 drap_rtx = targetm.calls.get_drap_rtx ();
4300
4301 /* stack_realign_drap and drap_rtx must match. */
4302 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4303
4304 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4305 if (NULL != drap_rtx)
4306 {
4307 crtl->args.internal_arg_pointer = drap_rtx;
4308
4309 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4310 needed. */
4311 fixup_tail_calls ();
4312 }
4313 }
4314
4315 /* Translate the intermediate representation contained in the CFG
4316 from GIMPLE trees to RTL.
4317
4318 We do conversion per basic block and preserve/update the tree CFG.
4319 This implies we have to do some magic as the CFG can simultaneously
4320 consist of basic blocks containing RTL and GIMPLE trees. This can
4321 confuse the CFG hooks, so be careful to not manipulate CFG during
4322 the expansion. */
4323
4324 static unsigned int
4325 gimple_expand_cfg (void)
4326 {
4327 basic_block bb, init_block;
4328 sbitmap blocks;
4329 edge_iterator ei;
4330 edge e;
4331 rtx var_seq;
4332 unsigned i;
4333
4334 timevar_push (TV_OUT_OF_SSA);
4335 rewrite_out_of_ssa (&SA);
4336 timevar_pop (TV_OUT_OF_SSA);
4337 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4338
4339 /* Make sure all values used by the optimization passes have sane
4340 defaults. */
4341 reg_renumber = 0;
4342
4343 /* Some backends want to know that we are expanding to RTL. */
4344 currently_expanding_to_rtl = 1;
4345 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4346 free_dominance_info (CDI_DOMINATORS);
4347
4348 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4349
4350 insn_locators_alloc ();
4351 if (!DECL_IS_BUILTIN (current_function_decl))
4352 {
4353 /* Eventually, all FEs should explicitly set function_start_locus. */
4354 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4355 set_curr_insn_source_location
4356 (DECL_SOURCE_LOCATION (current_function_decl));
4357 else
4358 set_curr_insn_source_location (cfun->function_start_locus);
4359 }
4360 else
4361 set_curr_insn_source_location (UNKNOWN_LOCATION);
4362 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4363 prologue_locator = curr_insn_locator ();
4364
4365 #ifdef INSN_SCHEDULING
4366 init_sched_attrs ();
4367 #endif
4368
4369 /* Make sure first insn is a note even if we don't want linenums.
4370 This makes sure the first insn will never be deleted.
4371 Also, final expects a note to appear there. */
4372 emit_note (NOTE_INSN_DELETED);
4373
4374 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4375 discover_nonconstant_array_refs ();
4376
4377 targetm.expand_to_rtl_hook ();
4378 crtl->stack_alignment_needed = STACK_BOUNDARY;
4379 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4380 crtl->stack_alignment_estimated = 0;
4381 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4382 cfun->cfg->max_jumptable_ents = 0;
4383
4384 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4385 of the function section at exapnsion time to predict distance of calls. */
4386 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4387
4388 /* Expand the variables recorded during gimple lowering. */
4389 timevar_push (TV_VAR_EXPAND);
4390 start_sequence ();
4391
4392 expand_used_vars ();
4393
4394 var_seq = get_insns ();
4395 end_sequence ();
4396 timevar_pop (TV_VAR_EXPAND);
4397
4398 /* Honor stack protection warnings. */
4399 if (warn_stack_protect)
4400 {
4401 if (cfun->calls_alloca)
4402 warning (OPT_Wstack_protector,
4403 "stack protector not protecting local variables: "
4404 "variable length buffer");
4405 if (has_short_buffer && !crtl->stack_protect_guard)
4406 warning (OPT_Wstack_protector,
4407 "stack protector not protecting function: "
4408 "all local arrays are less than %d bytes long",
4409 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4410 }
4411
4412 /* Set up parameters and prepare for return, for the function. */
4413 expand_function_start (current_function_decl);
4414
4415 /* If we emitted any instructions for setting up the variables,
4416 emit them before the FUNCTION_START note. */
4417 if (var_seq)
4418 {
4419 emit_insn_before (var_seq, parm_birth_insn);
4420
4421 /* In expand_function_end we'll insert the alloca save/restore
4422 before parm_birth_insn. We've just insertted an alloca call.
4423 Adjust the pointer to match. */
4424 parm_birth_insn = var_seq;
4425 }
4426
4427 /* Now that we also have the parameter RTXs, copy them over to our
4428 partitions. */
4429 for (i = 0; i < SA.map->num_partitions; i++)
4430 {
4431 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4432
4433 if (TREE_CODE (var) != VAR_DECL
4434 && !SA.partition_to_pseudo[i])
4435 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4436 gcc_assert (SA.partition_to_pseudo[i]);
4437
4438 /* If this decl was marked as living in multiple places, reset
4439 this now to NULL. */
4440 if (DECL_RTL_IF_SET (var) == pc_rtx)
4441 SET_DECL_RTL (var, NULL);
4442
4443 /* Some RTL parts really want to look at DECL_RTL(x) when x
4444 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4445 SET_DECL_RTL here making this available, but that would mean
4446 to select one of the potentially many RTLs for one DECL. Instead
4447 of doing that we simply reset the MEM_EXPR of the RTL in question,
4448 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4449 if (!DECL_RTL_SET_P (var))
4450 {
4451 if (MEM_P (SA.partition_to_pseudo[i]))
4452 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4453 }
4454 }
4455
4456 /* If we have a class containing differently aligned pointers
4457 we need to merge those into the corresponding RTL pointer
4458 alignment. */
4459 for (i = 1; i < num_ssa_names; i++)
4460 {
4461 tree name = ssa_name (i);
4462 int part;
4463 rtx r;
4464
4465 if (!name
4466 /* We might have generated new SSA names in
4467 update_alias_info_with_stack_vars. They will have a NULL
4468 defining statements, and won't be part of the partitioning,
4469 so ignore those. */
4470 || !SSA_NAME_DEF_STMT (name))
4471 continue;
4472 part = var_to_partition (SA.map, name);
4473 if (part == NO_PARTITION)
4474 continue;
4475
4476 /* Adjust all partition members to get the underlying decl of
4477 the representative which we might have created in expand_one_var. */
4478 if (SSA_NAME_VAR (name) == NULL_TREE)
4479 {
4480 tree leader = partition_to_var (SA.map, part);
4481 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4482 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4483 }
4484 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4485 continue;
4486
4487 r = SA.partition_to_pseudo[part];
4488 if (REG_P (r))
4489 mark_reg_pointer (r, get_pointer_alignment (name));
4490 }
4491
4492 /* If this function is `main', emit a call to `__main'
4493 to run global initializers, etc. */
4494 if (DECL_NAME (current_function_decl)
4495 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4496 && DECL_FILE_SCOPE_P (current_function_decl))
4497 expand_main_function ();
4498
4499 /* Initialize the stack_protect_guard field. This must happen after the
4500 call to __main (if any) so that the external decl is initialized. */
4501 if (crtl->stack_protect_guard)
4502 stack_protect_prologue ();
4503
4504 expand_phi_nodes (&SA);
4505
4506 /* Register rtl specific functions for cfg. */
4507 rtl_register_cfg_hooks ();
4508
4509 init_block = construct_init_block ();
4510
4511 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4512 remaining edges later. */
4513 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4514 e->flags &= ~EDGE_EXECUTABLE;
4515
4516 lab_rtx_for_bb = pointer_map_create ();
4517 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4518 bb = expand_gimple_basic_block (bb);
4519
4520 if (MAY_HAVE_DEBUG_INSNS)
4521 expand_debug_locations ();
4522
4523 /* Free stuff we no longer need after GIMPLE optimizations. */
4524 free_dominance_info (CDI_DOMINATORS);
4525 free_dominance_info (CDI_POST_DOMINATORS);
4526 delete_tree_cfg_annotations ();
4527
4528 timevar_push (TV_OUT_OF_SSA);
4529 finish_out_of_ssa (&SA);
4530 timevar_pop (TV_OUT_OF_SSA);
4531
4532 timevar_push (TV_POST_EXPAND);
4533 /* We are no longer in SSA form. */
4534 cfun->gimple_df->in_ssa_p = false;
4535 if (current_loops)
4536 loops_state_clear (LOOP_CLOSED_SSA);
4537
4538 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4539 conservatively to true until they are all profile aware. */
4540 pointer_map_destroy (lab_rtx_for_bb);
4541 free_histograms ();
4542
4543 construct_exit_block ();
4544 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4545 insn_locators_finalize ();
4546
4547 /* Zap the tree EH table. */
4548 set_eh_throw_stmt_table (cfun, NULL);
4549
4550 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4551 split edges which edge insertions might do. */
4552 rebuild_jump_labels (get_insns ());
4553
4554 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4555 {
4556 edge e;
4557 edge_iterator ei;
4558 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4559 {
4560 if (e->insns.r)
4561 {
4562 rebuild_jump_labels_chain (e->insns.r);
4563 /* Avoid putting insns before parm_birth_insn. */
4564 if (e->src == ENTRY_BLOCK_PTR
4565 && single_succ_p (ENTRY_BLOCK_PTR)
4566 && parm_birth_insn)
4567 {
4568 rtx insns = e->insns.r;
4569 e->insns.r = NULL_RTX;
4570 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4571 }
4572 else
4573 commit_one_edge_insertion (e);
4574 }
4575 else
4576 ei_next (&ei);
4577 }
4578 }
4579
4580 /* We're done expanding trees to RTL. */
4581 currently_expanding_to_rtl = 0;
4582
4583 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4584 {
4585 edge e;
4586 edge_iterator ei;
4587 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4588 {
4589 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4590 e->flags &= ~EDGE_EXECUTABLE;
4591
4592 /* At the moment not all abnormal edges match the RTL
4593 representation. It is safe to remove them here as
4594 find_many_sub_basic_blocks will rediscover them.
4595 In the future we should get this fixed properly. */
4596 if ((e->flags & EDGE_ABNORMAL)
4597 && !(e->flags & EDGE_SIBCALL))
4598 remove_edge (e);
4599 else
4600 ei_next (&ei);
4601 }
4602 }
4603
4604 blocks = sbitmap_alloc (last_basic_block);
4605 sbitmap_ones (blocks);
4606 find_many_sub_basic_blocks (blocks);
4607 sbitmap_free (blocks);
4608 purge_all_dead_edges ();
4609
4610 expand_stack_alignment ();
4611
4612 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4613 function. */
4614 if (crtl->tail_call_emit)
4615 fixup_tail_calls ();
4616
4617 /* After initial rtl generation, call back to finish generating
4618 exception support code. We need to do this before cleaning up
4619 the CFG as the code does not expect dead landing pads. */
4620 if (cfun->eh->region_tree != NULL)
4621 finish_eh_generation ();
4622
4623 /* Remove unreachable blocks, otherwise we cannot compute dominators
4624 which are needed for loop state verification. As a side-effect
4625 this also compacts blocks.
4626 ??? We cannot remove trivially dead insns here as for example
4627 the DRAP reg on i?86 is not magically live at this point.
4628 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4629 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4630
4631 #ifdef ENABLE_CHECKING
4632 verify_flow_info ();
4633 #endif
4634
4635 /* Initialize pseudos allocated for hard registers. */
4636 emit_initial_value_sets ();
4637
4638 /* And finally unshare all RTL. */
4639 unshare_all_rtl ();
4640
4641 /* There's no need to defer outputting this function any more; we
4642 know we want to output it. */
4643 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4644
4645 /* Now that we're done expanding trees to RTL, we shouldn't have any
4646 more CONCATs anywhere. */
4647 generating_concat_p = 0;
4648
4649 if (dump_file)
4650 {
4651 fprintf (dump_file,
4652 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4653 /* And the pass manager will dump RTL for us. */
4654 }
4655
4656 /* If we're emitting a nested function, make sure its parent gets
4657 emitted as well. Doing otherwise confuses debug info. */
4658 {
4659 tree parent;
4660 for (parent = DECL_CONTEXT (current_function_decl);
4661 parent != NULL_TREE;
4662 parent = get_containing_scope (parent))
4663 if (TREE_CODE (parent) == FUNCTION_DECL)
4664 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4665 }
4666
4667 /* We are now committed to emitting code for this function. Do any
4668 preparation, such as emitting abstract debug info for the inline
4669 before it gets mangled by optimization. */
4670 if (cgraph_function_possibly_inlined_p (current_function_decl))
4671 (*debug_hooks->outlining_inline_function) (current_function_decl);
4672
4673 TREE_ASM_WRITTEN (current_function_decl) = 1;
4674
4675 /* After expanding, the return labels are no longer needed. */
4676 return_label = NULL;
4677 naked_return_label = NULL;
4678
4679 /* After expanding, the tm_restart map is no longer needed. */
4680 if (cfun->gimple_df->tm_restart)
4681 {
4682 htab_delete (cfun->gimple_df->tm_restart);
4683 cfun->gimple_df->tm_restart = NULL;
4684 }
4685
4686 /* Tag the blocks with a depth number so that change_scope can find
4687 the common parent easily. */
4688 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4689 default_rtl_profile ();
4690
4691 timevar_pop (TV_POST_EXPAND);
4692
4693 return 0;
4694 }
4695
4696 struct rtl_opt_pass pass_expand =
4697 {
4698 {
4699 RTL_PASS,
4700 "expand", /* name */
4701 NULL, /* gate */
4702 gimple_expand_cfg, /* execute */
4703 NULL, /* sub */
4704 NULL, /* next */
4705 0, /* static_pass_number */
4706 TV_EXPAND, /* tv_id */
4707 PROP_ssa | PROP_gimple_leh | PROP_cfg
4708 | PROP_gimple_lcx, /* properties_required */
4709 PROP_rtl, /* properties_provided */
4710 PROP_ssa | PROP_trees, /* properties_destroyed */
4711 TODO_verify_ssa | TODO_verify_flow
4712 | TODO_verify_stmts, /* todo_flags_start */
4713 TODO_ggc_collect /* todo_flags_finish */
4714 }
4715 };