]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
Merge from transactional-memory branch.
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
51
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
55
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
59
60 static rtx expand_debug_expr (tree);
61
62 /* Return an expression tree corresponding to the RHS of GIMPLE
63 statement STMT. */
64
65 tree
66 gimple_assign_rhs_to_tree (gimple stmt)
67 {
68 tree t;
69 enum gimple_rhs_class grhs_class;
70
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
72
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
89 {
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
96 && EXPR_P (t)
97 && gimple_block (stmt) != TREE_BLOCK (t)))
98 t = copy_node (t);
99 }
100 else
101 gcc_unreachable ();
102
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
107
108 return t;
109 }
110
111
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
114 #endif
115
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
117
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
121 static inline void
122 set_rtl (tree t, rtx x)
123 {
124 if (TREE_CODE (t) == SSA_NAME)
125 {
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127 if (x && !MEM_P (x))
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
133 {
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set alrady to "multiple places" don't
139 change this. */
140 else if (DECL_RTL (var) == pc_rtx)
141 ;
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
150 }
151 }
152 else
153 SET_DECL_RTL (t, x);
154 }
155
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
158 struct stack_var
159 {
160 /* The Variable. */
161 tree decl;
162
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
165 HOST_WIDE_INT size;
166
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
169 unsigned int alignb;
170
171 /* The partition representative. */
172 size_t representative;
173
174 /* The next stack variable in the partition, or EOC. */
175 size_t next;
176
177 /* The numbers of conflicting stack variables. */
178 bitmap conflicts;
179 };
180
181 #define EOC ((size_t)-1)
182
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
187
188 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
189 is non-decreasing. */
190 static size_t *stack_vars_sorted;
191
192 /* The phase of the stack frame. This is the known misalignment of
193 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
194 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
195 static int frame_phase;
196
197 /* Used during expand_used_vars to remember if we saw any decls for
198 which we'd like to enable stack smashing protection. */
199 static bool has_protected_decls;
200
201 /* Used during expand_used_vars. Remember if we say a character buffer
202 smaller than our cutoff threshold. Used for -Wstack-protector. */
203 static bool has_short_buffer;
204
205 /* Compute the byte alignment to use for DECL. Ignore alignment
206 we can't do with expected alignment of the stack boundary. */
207
208 static unsigned int
209 align_local_variable (tree decl)
210 {
211 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
212 DECL_ALIGN (decl) = align;
213 return align / BITS_PER_UNIT;
214 }
215
216 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
217 Return the frame offset. */
218
219 static HOST_WIDE_INT
220 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
221 {
222 HOST_WIDE_INT offset, new_frame_offset;
223
224 new_frame_offset = frame_offset;
225 if (FRAME_GROWS_DOWNWARD)
226 {
227 new_frame_offset -= size + frame_phase;
228 new_frame_offset &= -align;
229 new_frame_offset += frame_phase;
230 offset = new_frame_offset;
231 }
232 else
233 {
234 new_frame_offset -= frame_phase;
235 new_frame_offset += align - 1;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 new_frame_offset += size;
240 }
241 frame_offset = new_frame_offset;
242
243 if (frame_offset_overflow (frame_offset, cfun->decl))
244 frame_offset = offset = 0;
245
246 return offset;
247 }
248
249 /* Accumulate DECL into STACK_VARS. */
250
251 static void
252 add_stack_var (tree decl)
253 {
254 struct stack_var *v;
255
256 if (stack_vars_num >= stack_vars_alloc)
257 {
258 if (stack_vars_alloc)
259 stack_vars_alloc = stack_vars_alloc * 3 / 2;
260 else
261 stack_vars_alloc = 32;
262 stack_vars
263 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
264 }
265 v = &stack_vars[stack_vars_num];
266
267 v->decl = decl;
268 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
269 /* Ensure that all variables have size, so that &a != &b for any two
270 variables that are simultaneously live. */
271 if (v->size == 0)
272 v->size = 1;
273 v->alignb = align_local_variable (SSAVAR (decl));
274 /* An alignment of zero can mightily confuse us later. */
275 gcc_assert (v->alignb != 0);
276
277 /* All variables are initially in their own partition. */
278 v->representative = stack_vars_num;
279 v->next = EOC;
280
281 /* All variables initially conflict with no other. */
282 v->conflicts = NULL;
283
284 /* Ensure that this decl doesn't get put onto the list twice. */
285 set_rtl (decl, pc_rtx);
286
287 stack_vars_num++;
288 }
289
290 /* Make the decls associated with luid's X and Y conflict. */
291
292 static void
293 add_stack_var_conflict (size_t x, size_t y)
294 {
295 struct stack_var *a = &stack_vars[x];
296 struct stack_var *b = &stack_vars[y];
297 if (!a->conflicts)
298 a->conflicts = BITMAP_ALLOC (NULL);
299 if (!b->conflicts)
300 b->conflicts = BITMAP_ALLOC (NULL);
301 bitmap_set_bit (a->conflicts, y);
302 bitmap_set_bit (b->conflicts, x);
303 }
304
305 /* Check whether the decls associated with luid's X and Y conflict. */
306
307 static bool
308 stack_var_conflict_p (size_t x, size_t y)
309 {
310 struct stack_var *a = &stack_vars[x];
311 struct stack_var *b = &stack_vars[y];
312 if (!a->conflicts || !b->conflicts)
313 return false;
314 return bitmap_bit_p (a->conflicts, y);
315 }
316
317 /* Returns true if TYPE is or contains a union type. */
318
319 static bool
320 aggregate_contains_union_type (tree type)
321 {
322 tree field;
323
324 if (TREE_CODE (type) == UNION_TYPE
325 || TREE_CODE (type) == QUAL_UNION_TYPE)
326 return true;
327 if (TREE_CODE (type) == ARRAY_TYPE)
328 return aggregate_contains_union_type (TREE_TYPE (type));
329 if (TREE_CODE (type) != RECORD_TYPE)
330 return false;
331
332 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
333 if (TREE_CODE (field) == FIELD_DECL)
334 if (aggregate_contains_union_type (TREE_TYPE (field)))
335 return true;
336
337 return false;
338 }
339
340 /* A subroutine of expand_used_vars. If two variables X and Y have alias
341 sets that do not conflict, then do add a conflict for these variables
342 in the interference graph. We also need to make sure to add conflicts
343 for union containing structures. Else RTL alias analysis comes along
344 and due to type based aliasing rules decides that for two overlapping
345 union temporaries { short s; int i; } accesses to the same mem through
346 different types may not alias and happily reorders stores across
347 life-time boundaries of the temporaries (See PR25654).
348 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
349
350 static void
351 add_alias_set_conflicts (void)
352 {
353 size_t i, j, n = stack_vars_num;
354
355 for (i = 0; i < n; ++i)
356 {
357 tree type_i = TREE_TYPE (stack_vars[i].decl);
358 bool aggr_i = AGGREGATE_TYPE_P (type_i);
359 bool contains_union;
360
361 contains_union = aggregate_contains_union_type (type_i);
362 for (j = 0; j < i; ++j)
363 {
364 tree type_j = TREE_TYPE (stack_vars[j].decl);
365 bool aggr_j = AGGREGATE_TYPE_P (type_j);
366 if (aggr_i != aggr_j
367 /* Either the objects conflict by means of type based
368 aliasing rules, or we need to add a conflict. */
369 || !objects_must_conflict_p (type_i, type_j)
370 /* In case the types do not conflict ensure that access
371 to elements will conflict. In case of unions we have
372 to be careful as type based aliasing rules may say
373 access to the same memory does not conflict. So play
374 safe and add a conflict in this case when
375 -fstrict-aliasing is used. */
376 || (contains_union && flag_strict_aliasing))
377 add_stack_var_conflict (i, j);
378 }
379 }
380 }
381
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
384
385 static int
386 stack_var_cmp (const void *a, const void *b)
387 {
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
396 bool largea, largeb;
397 unsigned int uida, uidb;
398
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
404
405 /* Secondary compare on size, decreasing */
406 if (sizea > sizeb)
407 return -1;
408 if (sizea < sizeb)
409 return 1;
410
411 /* Tertiary compare on true alignment, decreasing. */
412 if (aligna < alignb)
413 return -1;
414 if (aligna > alignb)
415 return 1;
416
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
421 {
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else
425 return -1;
426 }
427 else if (TREE_CODE (declb) == SSA_NAME)
428 return 1;
429 else
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
431 if (uida < uidb)
432 return 1;
433 if (uida > uidb)
434 return -1;
435 return 0;
436 }
437
438
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
441 variables bitmap. */
442
443 static void
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
447 {
448 bitmap_iterator bi;
449 unsigned i;
450 bitmap *part;
451
452 if (pt->anything
453 || pt->vars == NULL
454 /* The pointed-to vars bitmap is shared, it is enough to
455 visit it once. */
456 || pointer_set_insert(visited, pt->vars))
457 return;
458
459 bitmap_clear (temp);
460
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
463 once. */
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
465 if ((!temp
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
472 }
473
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
477 rewritten. */
478
479 static void
480 update_alias_info_with_stack_vars (void)
481 {
482 struct pointer_map_t *decls_to_partitions = NULL;
483 size_t i, j;
484 tree var = NULL_TREE;
485
486 for (i = 0; i < stack_vars_num; i++)
487 {
488 bitmap part = NULL;
489 tree name;
490 struct ptr_info_def *pi;
491
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
495 continue;
496
497 if (!decls_to_partitions)
498 {
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
501 }
502
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
509
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
514 {
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl)
522 && (!optimize
523 || referenced_var_lookup (cfun, DECL_UID (decl))));
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
528 decl)) = name;
529 }
530
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
533 pt_solution_set (&pi->pt, part, false);
534 }
535
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
539 {
540 unsigned i;
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
543
544 for (i = 1; i < num_ssa_names; i++)
545 {
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
548
549 if (name
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
553 visited, temp);
554 }
555
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
558
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
561 BITMAP_FREE (temp);
562 }
563 }
564
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A. */
568
569 static void
570 union_stack_vars (size_t a, size_t b)
571 {
572 struct stack_var *vb = &stack_vars[b];
573 bitmap_iterator bi;
574 unsigned u;
575
576 gcc_assert (stack_vars[b].next == EOC);
577 /* Add B to A's partition. */
578 stack_vars[b].next = stack_vars[a].next;
579 stack_vars[b].representative = a;
580 stack_vars[a].next = b;
581
582 /* Update the required alignment of partition A to account for B. */
583 if (stack_vars[a].alignb < stack_vars[b].alignb)
584 stack_vars[a].alignb = stack_vars[b].alignb;
585
586 /* Update the interference graph and merge the conflicts. */
587 if (vb->conflicts)
588 {
589 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
590 add_stack_var_conflict (a, stack_vars[u].representative);
591 BITMAP_FREE (vb->conflicts);
592 }
593 }
594
595 /* A subroutine of expand_used_vars. Binpack the variables into
596 partitions constrained by the interference graph. The overall
597 algorithm used is as follows:
598
599 Sort the objects by size in descending order.
600 For each object A {
601 S = size(A)
602 O = 0
603 loop {
604 Look for the largest non-conflicting object B with size <= S.
605 UNION (A, B)
606 }
607 }
608 */
609
610 static void
611 partition_stack_vars (void)
612 {
613 size_t si, sj, n = stack_vars_num;
614
615 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
616 for (si = 0; si < n; ++si)
617 stack_vars_sorted[si] = si;
618
619 if (n == 1)
620 return;
621
622 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
623
624 for (si = 0; si < n; ++si)
625 {
626 size_t i = stack_vars_sorted[si];
627 unsigned int ialign = stack_vars[i].alignb;
628
629 /* Ignore objects that aren't partition representatives. If we
630 see a var that is not a partition representative, it must
631 have been merged earlier. */
632 if (stack_vars[i].representative != i)
633 continue;
634
635 for (sj = si + 1; sj < n; ++sj)
636 {
637 size_t j = stack_vars_sorted[sj];
638 unsigned int jalign = stack_vars[j].alignb;
639
640 /* Ignore objects that aren't partition representatives. */
641 if (stack_vars[j].representative != j)
642 continue;
643
644 /* Ignore conflicting objects. */
645 if (stack_var_conflict_p (i, j))
646 continue;
647
648 /* Do not mix objects of "small" (supported) alignment
649 and "large" (unsupported) alignment. */
650 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
651 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
652 continue;
653
654 /* UNION the objects, placing J at OFFSET. */
655 union_stack_vars (i, j);
656 }
657 }
658
659 update_alias_info_with_stack_vars ();
660 }
661
662 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
663
664 static void
665 dump_stack_var_partition (void)
666 {
667 size_t si, i, j, n = stack_vars_num;
668
669 for (si = 0; si < n; ++si)
670 {
671 i = stack_vars_sorted[si];
672
673 /* Skip variables that aren't partition representatives, for now. */
674 if (stack_vars[i].representative != i)
675 continue;
676
677 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
678 " align %u\n", (unsigned long) i, stack_vars[i].size,
679 stack_vars[i].alignb);
680
681 for (j = i; j != EOC; j = stack_vars[j].next)
682 {
683 fputc ('\t', dump_file);
684 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
685 }
686 fputc ('\n', dump_file);
687 }
688 }
689
690 /* Assign rtl to DECL at BASE + OFFSET. */
691
692 static void
693 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
694 HOST_WIDE_INT offset)
695 {
696 unsigned align;
697 rtx x;
698
699 /* If this fails, we've overflowed the stack frame. Error nicely? */
700 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
701
702 x = plus_constant (base, offset);
703 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
704
705 if (TREE_CODE (decl) != SSA_NAME)
706 {
707 /* Set alignment we actually gave this decl if it isn't an SSA name.
708 If it is we generate stack slots only accidentally so it isn't as
709 important, we'll simply use the alignment that is already set. */
710 if (base == virtual_stack_vars_rtx)
711 offset -= frame_phase;
712 align = offset & -offset;
713 align *= BITS_PER_UNIT;
714 if (align == 0 || align > base_align)
715 align = base_align;
716
717 /* One would think that we could assert that we're not decreasing
718 alignment here, but (at least) the i386 port does exactly this
719 via the MINIMUM_ALIGNMENT hook. */
720
721 DECL_ALIGN (decl) = align;
722 DECL_USER_ALIGN (decl) = 0;
723 }
724
725 set_mem_attributes (x, SSAVAR (decl), true);
726 set_rtl (decl, x);
727 }
728
729 /* A subroutine of expand_used_vars. Give each partition representative
730 a unique location within the stack frame. Update each partition member
731 with that location. */
732
733 static void
734 expand_stack_vars (bool (*pred) (tree))
735 {
736 size_t si, i, j, n = stack_vars_num;
737 HOST_WIDE_INT large_size = 0, large_alloc = 0;
738 rtx large_base = NULL;
739 unsigned large_align = 0;
740 tree decl;
741
742 /* Determine if there are any variables requiring "large" alignment.
743 Since these are dynamically allocated, we only process these if
744 no predicate involved. */
745 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
746 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
747 {
748 /* Find the total size of these variables. */
749 for (si = 0; si < n; ++si)
750 {
751 unsigned alignb;
752
753 i = stack_vars_sorted[si];
754 alignb = stack_vars[i].alignb;
755
756 /* Stop when we get to the first decl with "small" alignment. */
757 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
758 break;
759
760 /* Skip variables that aren't partition representatives. */
761 if (stack_vars[i].representative != i)
762 continue;
763
764 /* Skip variables that have already had rtl assigned. See also
765 add_stack_var where we perpetrate this pc_rtx hack. */
766 decl = stack_vars[i].decl;
767 if ((TREE_CODE (decl) == SSA_NAME
768 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
769 : DECL_RTL (decl)) != pc_rtx)
770 continue;
771
772 large_size += alignb - 1;
773 large_size &= -(HOST_WIDE_INT)alignb;
774 large_size += stack_vars[i].size;
775 }
776
777 /* If there were any, allocate space. */
778 if (large_size > 0)
779 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
780 large_align, true);
781 }
782
783 for (si = 0; si < n; ++si)
784 {
785 rtx base;
786 unsigned base_align, alignb;
787 HOST_WIDE_INT offset;
788
789 i = stack_vars_sorted[si];
790
791 /* Skip variables that aren't partition representatives, for now. */
792 if (stack_vars[i].representative != i)
793 continue;
794
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
801 continue;
802
803 /* Check the predicate to see whether this variable should be
804 allocated in this pass. */
805 if (pred && !pred (decl))
806 continue;
807
808 alignb = stack_vars[i].alignb;
809 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
810 {
811 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
812 base = virtual_stack_vars_rtx;
813 base_align = crtl->max_used_stack_slot_alignment;
814 }
815 else
816 {
817 /* Large alignment is only processed in the last pass. */
818 if (pred)
819 continue;
820 gcc_assert (large_base != NULL);
821
822 large_alloc += alignb - 1;
823 large_alloc &= -(HOST_WIDE_INT)alignb;
824 offset = large_alloc;
825 large_alloc += stack_vars[i].size;
826
827 base = large_base;
828 base_align = large_align;
829 }
830
831 /* Create rtl for each variable based on their location within the
832 partition. */
833 for (j = i; j != EOC; j = stack_vars[j].next)
834 {
835 expand_one_stack_var_at (stack_vars[j].decl,
836 base, base_align,
837 offset);
838 }
839 }
840
841 gcc_assert (large_alloc == large_size);
842 }
843
844 /* Take into account all sizes of partitions and reset DECL_RTLs. */
845 static HOST_WIDE_INT
846 account_stack_vars (void)
847 {
848 size_t si, j, i, n = stack_vars_num;
849 HOST_WIDE_INT size = 0;
850
851 for (si = 0; si < n; ++si)
852 {
853 i = stack_vars_sorted[si];
854
855 /* Skip variables that aren't partition representatives, for now. */
856 if (stack_vars[i].representative != i)
857 continue;
858
859 size += stack_vars[i].size;
860 for (j = i; j != EOC; j = stack_vars[j].next)
861 set_rtl (stack_vars[j].decl, NULL);
862 }
863 return size;
864 }
865
866 /* A subroutine of expand_one_var. Called to immediately assign rtl
867 to a variable to be allocated in the stack frame. */
868
869 static void
870 expand_one_stack_var (tree var)
871 {
872 HOST_WIDE_INT size, offset;
873 unsigned byte_align;
874
875 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
876 byte_align = align_local_variable (SSAVAR (var));
877
878 /* We handle highly aligned variables in expand_stack_vars. */
879 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
880
881 offset = alloc_stack_frame_space (size, byte_align);
882
883 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
884 crtl->max_used_stack_slot_alignment, offset);
885 }
886
887 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
888 that will reside in a hard register. */
889
890 static void
891 expand_one_hard_reg_var (tree var)
892 {
893 rest_of_decl_compilation (var, 0, 0);
894 }
895
896 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
897 that will reside in a pseudo register. */
898
899 static void
900 expand_one_register_var (tree var)
901 {
902 tree decl = SSAVAR (var);
903 tree type = TREE_TYPE (decl);
904 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
905 rtx x = gen_reg_rtx (reg_mode);
906
907 set_rtl (var, x);
908
909 /* Note if the object is a user variable. */
910 if (!DECL_ARTIFICIAL (decl))
911 mark_user_reg (x);
912
913 if (POINTER_TYPE_P (type))
914 mark_reg_pointer (x, get_pointer_alignment (var));
915 }
916
917 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
918 has some associated error, e.g. its type is error-mark. We just need
919 to pick something that won't crash the rest of the compiler. */
920
921 static void
922 expand_one_error_var (tree var)
923 {
924 enum machine_mode mode = DECL_MODE (var);
925 rtx x;
926
927 if (mode == BLKmode)
928 x = gen_rtx_MEM (BLKmode, const0_rtx);
929 else if (mode == VOIDmode)
930 x = const0_rtx;
931 else
932 x = gen_reg_rtx (mode);
933
934 SET_DECL_RTL (var, x);
935 }
936
937 /* A subroutine of expand_one_var. VAR is a variable that will be
938 allocated to the local stack frame. Return true if we wish to
939 add VAR to STACK_VARS so that it will be coalesced with other
940 variables. Return false to allocate VAR immediately.
941
942 This function is used to reduce the number of variables considered
943 for coalescing, which reduces the size of the quadratic problem. */
944
945 static bool
946 defer_stack_allocation (tree var, bool toplevel)
947 {
948 /* If stack protection is enabled, *all* stack variables must be deferred,
949 so that we can re-order the strings to the top of the frame. */
950 if (flag_stack_protect)
951 return true;
952
953 /* We handle "large" alignment via dynamic allocation. We want to handle
954 this extra complication in only one place, so defer them. */
955 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
956 return true;
957
958 /* Variables in the outermost scope automatically conflict with
959 every other variable. The only reason to want to defer them
960 at all is that, after sorting, we can more efficiently pack
961 small variables in the stack frame. Continue to defer at -O2. */
962 if (toplevel && optimize < 2)
963 return false;
964
965 /* Without optimization, *most* variables are allocated from the
966 stack, which makes the quadratic problem large exactly when we
967 want compilation to proceed as quickly as possible. On the
968 other hand, we don't want the function's stack frame size to
969 get completely out of hand. So we avoid adding scalars and
970 "small" aggregates to the list at all. */
971 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
972 return false;
973
974 return true;
975 }
976
977 /* A subroutine of expand_used_vars. Expand one variable according to
978 its flavor. Variables to be placed on the stack are not actually
979 expanded yet, merely recorded.
980 When REALLY_EXPAND is false, only add stack values to be allocated.
981 Return stack usage this variable is supposed to take.
982 */
983
984 static HOST_WIDE_INT
985 expand_one_var (tree var, bool toplevel, bool really_expand)
986 {
987 unsigned int align = BITS_PER_UNIT;
988 tree origvar = var;
989
990 var = SSAVAR (var);
991
992 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
993 {
994 /* Because we don't know if VAR will be in register or on stack,
995 we conservatively assume it will be on stack even if VAR is
996 eventually put into register after RA pass. For non-automatic
997 variables, which won't be on stack, we collect alignment of
998 type and ignore user specified alignment. */
999 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1000 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1001 TYPE_MODE (TREE_TYPE (var)),
1002 TYPE_ALIGN (TREE_TYPE (var)));
1003 else if (DECL_HAS_VALUE_EXPR_P (var)
1004 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1005 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1006 or variables which were assigned a stack slot already by
1007 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1008 changed from the offset chosen to it. */
1009 align = crtl->stack_alignment_estimated;
1010 else
1011 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1012
1013 /* If the variable alignment is very large we'll dynamicaly allocate
1014 it, which means that in-frame portion is just a pointer. */
1015 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1016 align = POINTER_SIZE;
1017 }
1018
1019 if (SUPPORTS_STACK_ALIGNMENT
1020 && crtl->stack_alignment_estimated < align)
1021 {
1022 /* stack_alignment_estimated shouldn't change after stack
1023 realign decision made */
1024 gcc_assert(!crtl->stack_realign_processed);
1025 crtl->stack_alignment_estimated = align;
1026 }
1027
1028 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1029 So here we only make sure stack_alignment_needed >= align. */
1030 if (crtl->stack_alignment_needed < align)
1031 crtl->stack_alignment_needed = align;
1032 if (crtl->max_used_stack_slot_alignment < align)
1033 crtl->max_used_stack_slot_alignment = align;
1034
1035 if (TREE_CODE (origvar) == SSA_NAME)
1036 {
1037 gcc_assert (TREE_CODE (var) != VAR_DECL
1038 || (!DECL_EXTERNAL (var)
1039 && !DECL_HAS_VALUE_EXPR_P (var)
1040 && !TREE_STATIC (var)
1041 && TREE_TYPE (var) != error_mark_node
1042 && !DECL_HARD_REGISTER (var)
1043 && really_expand));
1044 }
1045 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1046 ;
1047 else if (DECL_EXTERNAL (var))
1048 ;
1049 else if (DECL_HAS_VALUE_EXPR_P (var))
1050 ;
1051 else if (TREE_STATIC (var))
1052 ;
1053 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1054 ;
1055 else if (TREE_TYPE (var) == error_mark_node)
1056 {
1057 if (really_expand)
1058 expand_one_error_var (var);
1059 }
1060 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1061 {
1062 if (really_expand)
1063 expand_one_hard_reg_var (var);
1064 }
1065 else if (use_register_for_decl (var))
1066 {
1067 if (really_expand)
1068 expand_one_register_var (origvar);
1069 }
1070 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1071 {
1072 if (really_expand)
1073 {
1074 error ("size of variable %q+D is too large", var);
1075 expand_one_error_var (var);
1076 }
1077 }
1078 else if (defer_stack_allocation (var, toplevel))
1079 add_stack_var (origvar);
1080 else
1081 {
1082 if (really_expand)
1083 expand_one_stack_var (origvar);
1084 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1085 }
1086 return 0;
1087 }
1088
1089 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1090 expanding variables. Those variables that can be put into registers
1091 are allocated pseudos; those that can't are put on the stack.
1092
1093 TOPLEVEL is true if this is the outermost BLOCK. */
1094
1095 static void
1096 expand_used_vars_for_block (tree block, bool toplevel)
1097 {
1098 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1099 tree t;
1100
1101 old_sv_num = toplevel ? 0 : stack_vars_num;
1102
1103 /* Expand all variables at this level. */
1104 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1105 if (TREE_USED (t)
1106 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1107 || !DECL_NONSHAREABLE (t)))
1108 expand_one_var (t, toplevel, true);
1109
1110 this_sv_num = stack_vars_num;
1111
1112 /* Expand all variables at containing levels. */
1113 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1114 expand_used_vars_for_block (t, false);
1115
1116 /* Since we do not track exact variable lifetimes (which is not even
1117 possible for variables whose address escapes), we mirror the block
1118 tree in the interference graph. Here we cause all variables at this
1119 level, and all sublevels, to conflict. */
1120 if (old_sv_num < this_sv_num)
1121 {
1122 new_sv_num = stack_vars_num;
1123
1124 for (i = old_sv_num; i < new_sv_num; ++i)
1125 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1126 add_stack_var_conflict (i, j);
1127 }
1128 }
1129
1130 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1131 and clear TREE_USED on all local variables. */
1132
1133 static void
1134 clear_tree_used (tree block)
1135 {
1136 tree t;
1137
1138 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1139 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1140 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1141 || !DECL_NONSHAREABLE (t))
1142 TREE_USED (t) = 0;
1143
1144 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1145 clear_tree_used (t);
1146 }
1147
1148 /* Examine TYPE and determine a bit mask of the following features. */
1149
1150 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1151 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1152 #define SPCT_HAS_ARRAY 4
1153 #define SPCT_HAS_AGGREGATE 8
1154
1155 static unsigned int
1156 stack_protect_classify_type (tree type)
1157 {
1158 unsigned int ret = 0;
1159 tree t;
1160
1161 switch (TREE_CODE (type))
1162 {
1163 case ARRAY_TYPE:
1164 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1165 if (t == char_type_node
1166 || t == signed_char_type_node
1167 || t == unsigned_char_type_node)
1168 {
1169 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1170 unsigned HOST_WIDE_INT len;
1171
1172 if (!TYPE_SIZE_UNIT (type)
1173 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1174 len = max;
1175 else
1176 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1177
1178 if (len < max)
1179 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1180 else
1181 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1182 }
1183 else
1184 ret = SPCT_HAS_ARRAY;
1185 break;
1186
1187 case UNION_TYPE:
1188 case QUAL_UNION_TYPE:
1189 case RECORD_TYPE:
1190 ret = SPCT_HAS_AGGREGATE;
1191 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1192 if (TREE_CODE (t) == FIELD_DECL)
1193 ret |= stack_protect_classify_type (TREE_TYPE (t));
1194 break;
1195
1196 default:
1197 break;
1198 }
1199
1200 return ret;
1201 }
1202
1203 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1204 part of the local stack frame. Remember if we ever return nonzero for
1205 any variable in this function. The return value is the phase number in
1206 which the variable should be allocated. */
1207
1208 static int
1209 stack_protect_decl_phase (tree decl)
1210 {
1211 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1212 int ret = 0;
1213
1214 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1215 has_short_buffer = true;
1216
1217 if (flag_stack_protect == 2)
1218 {
1219 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1220 && !(bits & SPCT_HAS_AGGREGATE))
1221 ret = 1;
1222 else if (bits & SPCT_HAS_ARRAY)
1223 ret = 2;
1224 }
1225 else
1226 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1227
1228 if (ret)
1229 has_protected_decls = true;
1230
1231 return ret;
1232 }
1233
1234 /* Two helper routines that check for phase 1 and phase 2. These are used
1235 as callbacks for expand_stack_vars. */
1236
1237 static bool
1238 stack_protect_decl_phase_1 (tree decl)
1239 {
1240 return stack_protect_decl_phase (decl) == 1;
1241 }
1242
1243 static bool
1244 stack_protect_decl_phase_2 (tree decl)
1245 {
1246 return stack_protect_decl_phase (decl) == 2;
1247 }
1248
1249 /* Ensure that variables in different stack protection phases conflict
1250 so that they are not merged and share the same stack slot. */
1251
1252 static void
1253 add_stack_protection_conflicts (void)
1254 {
1255 size_t i, j, n = stack_vars_num;
1256 unsigned char *phase;
1257
1258 phase = XNEWVEC (unsigned char, n);
1259 for (i = 0; i < n; ++i)
1260 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1261
1262 for (i = 0; i < n; ++i)
1263 {
1264 unsigned char ph_i = phase[i];
1265 for (j = 0; j < i; ++j)
1266 if (ph_i != phase[j])
1267 add_stack_var_conflict (i, j);
1268 }
1269
1270 XDELETEVEC (phase);
1271 }
1272
1273 /* Create a decl for the guard at the top of the stack frame. */
1274
1275 static void
1276 create_stack_guard (void)
1277 {
1278 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1279 VAR_DECL, NULL, ptr_type_node);
1280 TREE_THIS_VOLATILE (guard) = 1;
1281 TREE_USED (guard) = 1;
1282 expand_one_stack_var (guard);
1283 crtl->stack_protect_guard = guard;
1284 }
1285
1286 /* Prepare for expanding variables. */
1287 static void
1288 init_vars_expansion (void)
1289 {
1290 tree t;
1291 unsigned ix;
1292 /* Set TREE_USED on all variables in the local_decls. */
1293 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1294 TREE_USED (t) = 1;
1295
1296 /* Clear TREE_USED on all variables associated with a block scope. */
1297 clear_tree_used (DECL_INITIAL (current_function_decl));
1298
1299 /* Initialize local stack smashing state. */
1300 has_protected_decls = false;
1301 has_short_buffer = false;
1302 }
1303
1304 /* Free up stack variable graph data. */
1305 static void
1306 fini_vars_expansion (void)
1307 {
1308 size_t i, n = stack_vars_num;
1309 for (i = 0; i < n; i++)
1310 BITMAP_FREE (stack_vars[i].conflicts);
1311 XDELETEVEC (stack_vars);
1312 XDELETEVEC (stack_vars_sorted);
1313 stack_vars = NULL;
1314 stack_vars_alloc = stack_vars_num = 0;
1315 }
1316
1317 /* Make a fair guess for the size of the stack frame of the function
1318 in NODE. This doesn't have to be exact, the result is only used in
1319 the inline heuristics. So we don't want to run the full stack var
1320 packing algorithm (which is quadratic in the number of stack vars).
1321 Instead, we calculate the total size of all stack vars. This turns
1322 out to be a pretty fair estimate -- packing of stack vars doesn't
1323 happen very often. */
1324
1325 HOST_WIDE_INT
1326 estimated_stack_frame_size (struct cgraph_node *node)
1327 {
1328 HOST_WIDE_INT size = 0;
1329 size_t i;
1330 tree var;
1331 tree old_cur_fun_decl = current_function_decl;
1332 referenced_var_iterator rvi;
1333 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1334
1335 current_function_decl = node->decl;
1336 push_cfun (fn);
1337
1338 gcc_checking_assert (gimple_referenced_vars (fn));
1339 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1340 size += expand_one_var (var, true, false);
1341
1342 if (stack_vars_num > 0)
1343 {
1344 /* Fake sorting the stack vars for account_stack_vars (). */
1345 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1346 for (i = 0; i < stack_vars_num; ++i)
1347 stack_vars_sorted[i] = i;
1348 size += account_stack_vars ();
1349 fini_vars_expansion ();
1350 }
1351 pop_cfun ();
1352 current_function_decl = old_cur_fun_decl;
1353 return size;
1354 }
1355
1356 /* Expand all variables used in the function. */
1357
1358 static void
1359 expand_used_vars (void)
1360 {
1361 tree var, outer_block = DECL_INITIAL (current_function_decl);
1362 VEC(tree,heap) *maybe_local_decls = NULL;
1363 unsigned i;
1364 unsigned len;
1365
1366 /* Compute the phase of the stack frame for this function. */
1367 {
1368 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1369 int off = STARTING_FRAME_OFFSET % align;
1370 frame_phase = off ? align - off : 0;
1371 }
1372
1373 init_vars_expansion ();
1374
1375 for (i = 0; i < SA.map->num_partitions; i++)
1376 {
1377 tree var = partition_to_var (SA.map, i);
1378
1379 gcc_assert (is_gimple_reg (var));
1380 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1381 expand_one_var (var, true, true);
1382 else
1383 {
1384 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1385 contain the default def (representing the parm or result itself)
1386 we don't do anything here. But those which don't contain the
1387 default def (representing a temporary based on the parm/result)
1388 we need to allocate space just like for normal VAR_DECLs. */
1389 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1390 {
1391 expand_one_var (var, true, true);
1392 gcc_assert (SA.partition_to_pseudo[i]);
1393 }
1394 }
1395 }
1396
1397 /* At this point all variables on the local_decls with TREE_USED
1398 set are not associated with any block scope. Lay them out. */
1399
1400 len = VEC_length (tree, cfun->local_decls);
1401 FOR_EACH_LOCAL_DECL (cfun, i, var)
1402 {
1403 bool expand_now = false;
1404
1405 /* Expanded above already. */
1406 if (is_gimple_reg (var))
1407 {
1408 TREE_USED (var) = 0;
1409 goto next;
1410 }
1411 /* We didn't set a block for static or extern because it's hard
1412 to tell the difference between a global variable (re)declared
1413 in a local scope, and one that's really declared there to
1414 begin with. And it doesn't really matter much, since we're
1415 not giving them stack space. Expand them now. */
1416 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1417 expand_now = true;
1418
1419 /* If the variable is not associated with any block, then it
1420 was created by the optimizers, and could be live anywhere
1421 in the function. */
1422 else if (TREE_USED (var))
1423 expand_now = true;
1424
1425 /* Finally, mark all variables on the list as used. We'll use
1426 this in a moment when we expand those associated with scopes. */
1427 TREE_USED (var) = 1;
1428
1429 if (expand_now)
1430 expand_one_var (var, true, true);
1431
1432 next:
1433 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1434 {
1435 rtx rtl = DECL_RTL_IF_SET (var);
1436
1437 /* Keep artificial non-ignored vars in cfun->local_decls
1438 chain until instantiate_decls. */
1439 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1440 add_local_decl (cfun, var);
1441 else if (rtl == NULL_RTX)
1442 /* If rtl isn't set yet, which can happen e.g. with
1443 -fstack-protector, retry before returning from this
1444 function. */
1445 VEC_safe_push (tree, heap, maybe_local_decls, var);
1446 }
1447 }
1448
1449 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1450
1451 +-----------------+-----------------+
1452 | ...processed... | ...duplicates...|
1453 +-----------------+-----------------+
1454 ^
1455 +-- LEN points here.
1456
1457 We just want the duplicates, as those are the artificial
1458 non-ignored vars that we want to keep until instantiate_decls.
1459 Move them down and truncate the array. */
1460 if (!VEC_empty (tree, cfun->local_decls))
1461 VEC_block_remove (tree, cfun->local_decls, 0, len);
1462
1463 /* At this point, all variables within the block tree with TREE_USED
1464 set are actually used by the optimized function. Lay them out. */
1465 expand_used_vars_for_block (outer_block, true);
1466
1467 if (stack_vars_num > 0)
1468 {
1469 /* Due to the way alias sets work, no variables with non-conflicting
1470 alias sets may be assigned the same address. Add conflicts to
1471 reflect this. */
1472 add_alias_set_conflicts ();
1473
1474 /* If stack protection is enabled, we don't share space between
1475 vulnerable data and non-vulnerable data. */
1476 if (flag_stack_protect)
1477 add_stack_protection_conflicts ();
1478
1479 /* Now that we have collected all stack variables, and have computed a
1480 minimal interference graph, attempt to save some stack space. */
1481 partition_stack_vars ();
1482 if (dump_file)
1483 dump_stack_var_partition ();
1484 }
1485
1486 /* There are several conditions under which we should create a
1487 stack guard: protect-all, alloca used, protected decls present. */
1488 if (flag_stack_protect == 2
1489 || (flag_stack_protect
1490 && (cfun->calls_alloca || has_protected_decls)))
1491 create_stack_guard ();
1492
1493 /* Assign rtl to each variable based on these partitions. */
1494 if (stack_vars_num > 0)
1495 {
1496 /* Reorder decls to be protected by iterating over the variables
1497 array multiple times, and allocating out of each phase in turn. */
1498 /* ??? We could probably integrate this into the qsort we did
1499 earlier, such that we naturally see these variables first,
1500 and thus naturally allocate things in the right order. */
1501 if (has_protected_decls)
1502 {
1503 /* Phase 1 contains only character arrays. */
1504 expand_stack_vars (stack_protect_decl_phase_1);
1505
1506 /* Phase 2 contains other kinds of arrays. */
1507 if (flag_stack_protect == 2)
1508 expand_stack_vars (stack_protect_decl_phase_2);
1509 }
1510
1511 expand_stack_vars (NULL);
1512
1513 fini_vars_expansion ();
1514 }
1515
1516 /* If there were any artificial non-ignored vars without rtl
1517 found earlier, see if deferred stack allocation hasn't assigned
1518 rtl to them. */
1519 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1520 {
1521 rtx rtl = DECL_RTL_IF_SET (var);
1522
1523 /* Keep artificial non-ignored vars in cfun->local_decls
1524 chain until instantiate_decls. */
1525 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1526 add_local_decl (cfun, var);
1527 }
1528 VEC_free (tree, heap, maybe_local_decls);
1529
1530 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1531 if (STACK_ALIGNMENT_NEEDED)
1532 {
1533 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1534 if (!FRAME_GROWS_DOWNWARD)
1535 frame_offset += align - 1;
1536 frame_offset &= -align;
1537 }
1538 }
1539
1540
1541 /* If we need to produce a detailed dump, print the tree representation
1542 for STMT to the dump file. SINCE is the last RTX after which the RTL
1543 generated for STMT should have been appended. */
1544
1545 static void
1546 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1547 {
1548 if (dump_file && (dump_flags & TDF_DETAILS))
1549 {
1550 fprintf (dump_file, "\n;; ");
1551 print_gimple_stmt (dump_file, stmt, 0,
1552 TDF_SLIM | (dump_flags & TDF_LINENO));
1553 fprintf (dump_file, "\n");
1554
1555 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1556 }
1557 }
1558
1559 /* Maps the blocks that do not contain tree labels to rtx labels. */
1560
1561 static struct pointer_map_t *lab_rtx_for_bb;
1562
1563 /* Returns the label_rtx expression for a label starting basic block BB. */
1564
1565 static rtx
1566 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1567 {
1568 gimple_stmt_iterator gsi;
1569 tree lab;
1570 gimple lab_stmt;
1571 void **elt;
1572
1573 if (bb->flags & BB_RTL)
1574 return block_label (bb);
1575
1576 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1577 if (elt)
1578 return (rtx) *elt;
1579
1580 /* Find the tree label if it is present. */
1581
1582 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1583 {
1584 lab_stmt = gsi_stmt (gsi);
1585 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1586 break;
1587
1588 lab = gimple_label_label (lab_stmt);
1589 if (DECL_NONLOCAL (lab))
1590 break;
1591
1592 return label_rtx (lab);
1593 }
1594
1595 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1596 *elt = gen_label_rtx ();
1597 return (rtx) *elt;
1598 }
1599
1600
1601 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1602 of a basic block where we just expanded the conditional at the end,
1603 possibly clean up the CFG and instruction sequence. LAST is the
1604 last instruction before the just emitted jump sequence. */
1605
1606 static void
1607 maybe_cleanup_end_of_block (edge e, rtx last)
1608 {
1609 /* Special case: when jumpif decides that the condition is
1610 trivial it emits an unconditional jump (and the necessary
1611 barrier). But we still have two edges, the fallthru one is
1612 wrong. purge_dead_edges would clean this up later. Unfortunately
1613 we have to insert insns (and split edges) before
1614 find_many_sub_basic_blocks and hence before purge_dead_edges.
1615 But splitting edges might create new blocks which depend on the
1616 fact that if there are two edges there's no barrier. So the
1617 barrier would get lost and verify_flow_info would ICE. Instead
1618 of auditing all edge splitters to care for the barrier (which
1619 normally isn't there in a cleaned CFG), fix it here. */
1620 if (BARRIER_P (get_last_insn ()))
1621 {
1622 rtx insn;
1623 remove_edge (e);
1624 /* Now, we have a single successor block, if we have insns to
1625 insert on the remaining edge we potentially will insert
1626 it at the end of this block (if the dest block isn't feasible)
1627 in order to avoid splitting the edge. This insertion will take
1628 place in front of the last jump. But we might have emitted
1629 multiple jumps (conditional and one unconditional) to the
1630 same destination. Inserting in front of the last one then
1631 is a problem. See PR 40021. We fix this by deleting all
1632 jumps except the last unconditional one. */
1633 insn = PREV_INSN (get_last_insn ());
1634 /* Make sure we have an unconditional jump. Otherwise we're
1635 confused. */
1636 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1637 for (insn = PREV_INSN (insn); insn != last;)
1638 {
1639 insn = PREV_INSN (insn);
1640 if (JUMP_P (NEXT_INSN (insn)))
1641 {
1642 if (!any_condjump_p (NEXT_INSN (insn)))
1643 {
1644 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1645 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1646 }
1647 delete_insn (NEXT_INSN (insn));
1648 }
1649 }
1650 }
1651 }
1652
1653 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1654 Returns a new basic block if we've terminated the current basic
1655 block and created a new one. */
1656
1657 static basic_block
1658 expand_gimple_cond (basic_block bb, gimple stmt)
1659 {
1660 basic_block new_bb, dest;
1661 edge new_edge;
1662 edge true_edge;
1663 edge false_edge;
1664 rtx last2, last;
1665 enum tree_code code;
1666 tree op0, op1;
1667
1668 code = gimple_cond_code (stmt);
1669 op0 = gimple_cond_lhs (stmt);
1670 op1 = gimple_cond_rhs (stmt);
1671 /* We're sometimes presented with such code:
1672 D.123_1 = x < y;
1673 if (D.123_1 != 0)
1674 ...
1675 This would expand to two comparisons which then later might
1676 be cleaned up by combine. But some pattern matchers like if-conversion
1677 work better when there's only one compare, so make up for this
1678 here as special exception if TER would have made the same change. */
1679 if (gimple_cond_single_var_p (stmt)
1680 && SA.values
1681 && TREE_CODE (op0) == SSA_NAME
1682 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1683 {
1684 gimple second = SSA_NAME_DEF_STMT (op0);
1685 if (gimple_code (second) == GIMPLE_ASSIGN)
1686 {
1687 enum tree_code code2 = gimple_assign_rhs_code (second);
1688 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1689 {
1690 code = code2;
1691 op0 = gimple_assign_rhs1 (second);
1692 op1 = gimple_assign_rhs2 (second);
1693 }
1694 /* If jumps are cheap turn some more codes into
1695 jumpy sequences. */
1696 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1697 {
1698 if ((code2 == BIT_AND_EXPR
1699 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1700 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1701 || code2 == TRUTH_AND_EXPR)
1702 {
1703 code = TRUTH_ANDIF_EXPR;
1704 op0 = gimple_assign_rhs1 (second);
1705 op1 = gimple_assign_rhs2 (second);
1706 }
1707 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1708 {
1709 code = TRUTH_ORIF_EXPR;
1710 op0 = gimple_assign_rhs1 (second);
1711 op1 = gimple_assign_rhs2 (second);
1712 }
1713 }
1714 }
1715 }
1716
1717 last2 = last = get_last_insn ();
1718
1719 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1720 set_curr_insn_source_location (gimple_location (stmt));
1721 set_curr_insn_block (gimple_block (stmt));
1722
1723 /* These flags have no purpose in RTL land. */
1724 true_edge->flags &= ~EDGE_TRUE_VALUE;
1725 false_edge->flags &= ~EDGE_FALSE_VALUE;
1726
1727 /* We can either have a pure conditional jump with one fallthru edge or
1728 two-way jump that needs to be decomposed into two basic blocks. */
1729 if (false_edge->dest == bb->next_bb)
1730 {
1731 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1732 true_edge->probability);
1733 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1734 if (true_edge->goto_locus)
1735 {
1736 set_curr_insn_source_location (true_edge->goto_locus);
1737 set_curr_insn_block (true_edge->goto_block);
1738 true_edge->goto_locus = curr_insn_locator ();
1739 }
1740 true_edge->goto_block = NULL;
1741 false_edge->flags |= EDGE_FALLTHRU;
1742 maybe_cleanup_end_of_block (false_edge, last);
1743 return NULL;
1744 }
1745 if (true_edge->dest == bb->next_bb)
1746 {
1747 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1748 false_edge->probability);
1749 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1750 if (false_edge->goto_locus)
1751 {
1752 set_curr_insn_source_location (false_edge->goto_locus);
1753 set_curr_insn_block (false_edge->goto_block);
1754 false_edge->goto_locus = curr_insn_locator ();
1755 }
1756 false_edge->goto_block = NULL;
1757 true_edge->flags |= EDGE_FALLTHRU;
1758 maybe_cleanup_end_of_block (true_edge, last);
1759 return NULL;
1760 }
1761
1762 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1763 true_edge->probability);
1764 last = get_last_insn ();
1765 if (false_edge->goto_locus)
1766 {
1767 set_curr_insn_source_location (false_edge->goto_locus);
1768 set_curr_insn_block (false_edge->goto_block);
1769 false_edge->goto_locus = curr_insn_locator ();
1770 }
1771 false_edge->goto_block = NULL;
1772 emit_jump (label_rtx_for_bb (false_edge->dest));
1773
1774 BB_END (bb) = last;
1775 if (BARRIER_P (BB_END (bb)))
1776 BB_END (bb) = PREV_INSN (BB_END (bb));
1777 update_bb_for_insn (bb);
1778
1779 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1780 dest = false_edge->dest;
1781 redirect_edge_succ (false_edge, new_bb);
1782 false_edge->flags |= EDGE_FALLTHRU;
1783 new_bb->count = false_edge->count;
1784 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1785 new_edge = make_edge (new_bb, dest, 0);
1786 new_edge->probability = REG_BR_PROB_BASE;
1787 new_edge->count = new_bb->count;
1788 if (BARRIER_P (BB_END (new_bb)))
1789 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1790 update_bb_for_insn (new_bb);
1791
1792 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1793
1794 if (true_edge->goto_locus)
1795 {
1796 set_curr_insn_source_location (true_edge->goto_locus);
1797 set_curr_insn_block (true_edge->goto_block);
1798 true_edge->goto_locus = curr_insn_locator ();
1799 }
1800 true_edge->goto_block = NULL;
1801
1802 return new_bb;
1803 }
1804
1805 /* Mark all calls that can have a transaction restart. */
1806
1807 static void
1808 mark_transaction_restart_calls (gimple stmt)
1809 {
1810 struct tm_restart_node dummy;
1811 void **slot;
1812
1813 if (!cfun->gimple_df->tm_restart)
1814 return;
1815
1816 dummy.stmt = stmt;
1817 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1818 if (slot)
1819 {
1820 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1821 tree list = n->label_or_list;
1822 rtx insn;
1823
1824 for (insn = next_real_insn (get_last_insn ());
1825 !CALL_P (insn);
1826 insn = next_real_insn (insn))
1827 continue;
1828
1829 if (TREE_CODE (list) == LABEL_DECL)
1830 add_reg_note (insn, REG_TM, label_rtx (list));
1831 else
1832 for (; list ; list = TREE_CHAIN (list))
1833 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1834 }
1835 }
1836
1837 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1838 statement STMT. */
1839
1840 static void
1841 expand_call_stmt (gimple stmt)
1842 {
1843 tree exp, decl, lhs;
1844 bool builtin_p;
1845 size_t i;
1846
1847 if (gimple_call_internal_p (stmt))
1848 {
1849 expand_internal_call (stmt);
1850 return;
1851 }
1852
1853 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1854
1855 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1856 decl = gimple_call_fndecl (stmt);
1857 builtin_p = decl && DECL_BUILT_IN (decl);
1858
1859 /* If this is not a builtin function, the function type through which the
1860 call is made may be different from the type of the function. */
1861 if (!builtin_p)
1862 CALL_EXPR_FN (exp)
1863 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1864 CALL_EXPR_FN (exp));
1865
1866 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1867 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1868
1869 for (i = 0; i < gimple_call_num_args (stmt); i++)
1870 {
1871 tree arg = gimple_call_arg (stmt, i);
1872 gimple def;
1873 /* TER addresses into arguments of builtin functions so we have a
1874 chance to infer more correct alignment information. See PR39954. */
1875 if (builtin_p
1876 && TREE_CODE (arg) == SSA_NAME
1877 && (def = get_gimple_for_ssa_name (arg))
1878 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1879 arg = gimple_assign_rhs1 (def);
1880 CALL_EXPR_ARG (exp, i) = arg;
1881 }
1882
1883 if (gimple_has_side_effects (stmt))
1884 TREE_SIDE_EFFECTS (exp) = 1;
1885
1886 if (gimple_call_nothrow_p (stmt))
1887 TREE_NOTHROW (exp) = 1;
1888
1889 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1890 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1891 if (decl
1892 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1893 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
1894 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
1895 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1896 else
1897 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1898 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1899 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1900 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1901 TREE_BLOCK (exp) = gimple_block (stmt);
1902
1903 /* Ensure RTL is created for debug args. */
1904 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
1905 {
1906 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
1907 unsigned int ix;
1908 tree dtemp;
1909
1910 if (debug_args)
1911 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
1912 {
1913 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
1914 expand_debug_expr (dtemp);
1915 }
1916 }
1917
1918 lhs = gimple_call_lhs (stmt);
1919 if (lhs)
1920 expand_assignment (lhs, exp, false);
1921 else
1922 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1923
1924 mark_transaction_restart_calls (stmt);
1925 }
1926
1927 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1928 STMT that doesn't require special handling for outgoing edges. That
1929 is no tailcalls and no GIMPLE_COND. */
1930
1931 static void
1932 expand_gimple_stmt_1 (gimple stmt)
1933 {
1934 tree op0;
1935
1936 set_curr_insn_source_location (gimple_location (stmt));
1937 set_curr_insn_block (gimple_block (stmt));
1938
1939 switch (gimple_code (stmt))
1940 {
1941 case GIMPLE_GOTO:
1942 op0 = gimple_goto_dest (stmt);
1943 if (TREE_CODE (op0) == LABEL_DECL)
1944 expand_goto (op0);
1945 else
1946 expand_computed_goto (op0);
1947 break;
1948 case GIMPLE_LABEL:
1949 expand_label (gimple_label_label (stmt));
1950 break;
1951 case GIMPLE_NOP:
1952 case GIMPLE_PREDICT:
1953 break;
1954 case GIMPLE_SWITCH:
1955 expand_case (stmt);
1956 break;
1957 case GIMPLE_ASM:
1958 expand_asm_stmt (stmt);
1959 break;
1960 case GIMPLE_CALL:
1961 expand_call_stmt (stmt);
1962 break;
1963
1964 case GIMPLE_RETURN:
1965 op0 = gimple_return_retval (stmt);
1966
1967 if (op0 && op0 != error_mark_node)
1968 {
1969 tree result = DECL_RESULT (current_function_decl);
1970
1971 /* If we are not returning the current function's RESULT_DECL,
1972 build an assignment to it. */
1973 if (op0 != result)
1974 {
1975 /* I believe that a function's RESULT_DECL is unique. */
1976 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1977
1978 /* ??? We'd like to use simply expand_assignment here,
1979 but this fails if the value is of BLKmode but the return
1980 decl is a register. expand_return has special handling
1981 for this combination, which eventually should move
1982 to common code. See comments there. Until then, let's
1983 build a modify expression :-/ */
1984 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1985 result, op0);
1986 }
1987 }
1988 if (!op0)
1989 expand_null_return ();
1990 else
1991 expand_return (op0);
1992 break;
1993
1994 case GIMPLE_ASSIGN:
1995 {
1996 tree lhs = gimple_assign_lhs (stmt);
1997
1998 /* Tree expand used to fiddle with |= and &= of two bitfield
1999 COMPONENT_REFs here. This can't happen with gimple, the LHS
2000 of binary assigns must be a gimple reg. */
2001
2002 if (TREE_CODE (lhs) != SSA_NAME
2003 || get_gimple_rhs_class (gimple_expr_code (stmt))
2004 == GIMPLE_SINGLE_RHS)
2005 {
2006 tree rhs = gimple_assign_rhs1 (stmt);
2007 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2008 == GIMPLE_SINGLE_RHS);
2009 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2010 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2011 expand_assignment (lhs, rhs,
2012 gimple_assign_nontemporal_move_p (stmt));
2013 }
2014 else
2015 {
2016 rtx target, temp;
2017 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2018 struct separate_ops ops;
2019 bool promoted = false;
2020
2021 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2022 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2023 promoted = true;
2024
2025 ops.code = gimple_assign_rhs_code (stmt);
2026 ops.type = TREE_TYPE (lhs);
2027 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2028 {
2029 case GIMPLE_TERNARY_RHS:
2030 ops.op2 = gimple_assign_rhs3 (stmt);
2031 /* Fallthru */
2032 case GIMPLE_BINARY_RHS:
2033 ops.op1 = gimple_assign_rhs2 (stmt);
2034 /* Fallthru */
2035 case GIMPLE_UNARY_RHS:
2036 ops.op0 = gimple_assign_rhs1 (stmt);
2037 break;
2038 default:
2039 gcc_unreachable ();
2040 }
2041 ops.location = gimple_location (stmt);
2042
2043 /* If we want to use a nontemporal store, force the value to
2044 register first. If we store into a promoted register,
2045 don't directly expand to target. */
2046 temp = nontemporal || promoted ? NULL_RTX : target;
2047 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2048 EXPAND_NORMAL);
2049
2050 if (temp == target)
2051 ;
2052 else if (promoted)
2053 {
2054 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2055 /* If TEMP is a VOIDmode constant, use convert_modes to make
2056 sure that we properly convert it. */
2057 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2058 {
2059 temp = convert_modes (GET_MODE (target),
2060 TYPE_MODE (ops.type),
2061 temp, unsignedp);
2062 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2063 GET_MODE (target), temp, unsignedp);
2064 }
2065
2066 convert_move (SUBREG_REG (target), temp, unsignedp);
2067 }
2068 else if (nontemporal && emit_storent_insn (target, temp))
2069 ;
2070 else
2071 {
2072 temp = force_operand (temp, target);
2073 if (temp != target)
2074 emit_move_insn (target, temp);
2075 }
2076 }
2077 }
2078 break;
2079
2080 default:
2081 gcc_unreachable ();
2082 }
2083 }
2084
2085 /* Expand one gimple statement STMT and return the last RTL instruction
2086 before any of the newly generated ones.
2087
2088 In addition to generating the necessary RTL instructions this also
2089 sets REG_EH_REGION notes if necessary and sets the current source
2090 location for diagnostics. */
2091
2092 static rtx
2093 expand_gimple_stmt (gimple stmt)
2094 {
2095 location_t saved_location = input_location;
2096 rtx last = get_last_insn ();
2097 int lp_nr;
2098
2099 gcc_assert (cfun);
2100
2101 /* We need to save and restore the current source location so that errors
2102 discovered during expansion are emitted with the right location. But
2103 it would be better if the diagnostic routines used the source location
2104 embedded in the tree nodes rather than globals. */
2105 if (gimple_has_location (stmt))
2106 input_location = gimple_location (stmt);
2107
2108 expand_gimple_stmt_1 (stmt);
2109
2110 /* Free any temporaries used to evaluate this statement. */
2111 free_temp_slots ();
2112
2113 input_location = saved_location;
2114
2115 /* Mark all insns that may trap. */
2116 lp_nr = lookup_stmt_eh_lp (stmt);
2117 if (lp_nr)
2118 {
2119 rtx insn;
2120 for (insn = next_real_insn (last); insn;
2121 insn = next_real_insn (insn))
2122 {
2123 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2124 /* If we want exceptions for non-call insns, any
2125 may_trap_p instruction may throw. */
2126 && GET_CODE (PATTERN (insn)) != CLOBBER
2127 && GET_CODE (PATTERN (insn)) != USE
2128 && insn_could_throw_p (insn))
2129 make_reg_eh_region_note (insn, 0, lp_nr);
2130 }
2131 }
2132
2133 return last;
2134 }
2135
2136 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2137 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2138 generated a tail call (something that might be denied by the ABI
2139 rules governing the call; see calls.c).
2140
2141 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2142 can still reach the rest of BB. The case here is __builtin_sqrt,
2143 where the NaN result goes through the external function (with a
2144 tailcall) and the normal result happens via a sqrt instruction. */
2145
2146 static basic_block
2147 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2148 {
2149 rtx last2, last;
2150 edge e;
2151 edge_iterator ei;
2152 int probability;
2153 gcov_type count;
2154
2155 last2 = last = expand_gimple_stmt (stmt);
2156
2157 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2158 if (CALL_P (last) && SIBLING_CALL_P (last))
2159 goto found;
2160
2161 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2162
2163 *can_fallthru = true;
2164 return NULL;
2165
2166 found:
2167 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2168 Any instructions emitted here are about to be deleted. */
2169 do_pending_stack_adjust ();
2170
2171 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2172 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2173 EH or abnormal edges, we shouldn't have created a tail call in
2174 the first place. So it seems to me we should just be removing
2175 all edges here, or redirecting the existing fallthru edge to
2176 the exit block. */
2177
2178 probability = 0;
2179 count = 0;
2180
2181 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2182 {
2183 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2184 {
2185 if (e->dest != EXIT_BLOCK_PTR)
2186 {
2187 e->dest->count -= e->count;
2188 e->dest->frequency -= EDGE_FREQUENCY (e);
2189 if (e->dest->count < 0)
2190 e->dest->count = 0;
2191 if (e->dest->frequency < 0)
2192 e->dest->frequency = 0;
2193 }
2194 count += e->count;
2195 probability += e->probability;
2196 remove_edge (e);
2197 }
2198 else
2199 ei_next (&ei);
2200 }
2201
2202 /* This is somewhat ugly: the call_expr expander often emits instructions
2203 after the sibcall (to perform the function return). These confuse the
2204 find_many_sub_basic_blocks code, so we need to get rid of these. */
2205 last = NEXT_INSN (last);
2206 gcc_assert (BARRIER_P (last));
2207
2208 *can_fallthru = false;
2209 while (NEXT_INSN (last))
2210 {
2211 /* For instance an sqrt builtin expander expands if with
2212 sibcall in the then and label for `else`. */
2213 if (LABEL_P (NEXT_INSN (last)))
2214 {
2215 *can_fallthru = true;
2216 break;
2217 }
2218 delete_insn (NEXT_INSN (last));
2219 }
2220
2221 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2222 e->probability += probability;
2223 e->count += count;
2224 BB_END (bb) = last;
2225 update_bb_for_insn (bb);
2226
2227 if (NEXT_INSN (last))
2228 {
2229 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2230
2231 last = BB_END (bb);
2232 if (BARRIER_P (last))
2233 BB_END (bb) = PREV_INSN (last);
2234 }
2235
2236 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2237
2238 return bb;
2239 }
2240
2241 /* Return the difference between the floor and the truncated result of
2242 a signed division by OP1 with remainder MOD. */
2243 static rtx
2244 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2245 {
2246 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2247 return gen_rtx_IF_THEN_ELSE
2248 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2249 gen_rtx_IF_THEN_ELSE
2250 (mode, gen_rtx_LT (BImode,
2251 gen_rtx_DIV (mode, op1, mod),
2252 const0_rtx),
2253 constm1_rtx, const0_rtx),
2254 const0_rtx);
2255 }
2256
2257 /* Return the difference between the ceil and the truncated result of
2258 a signed division by OP1 with remainder MOD. */
2259 static rtx
2260 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2261 {
2262 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2263 return gen_rtx_IF_THEN_ELSE
2264 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2265 gen_rtx_IF_THEN_ELSE
2266 (mode, gen_rtx_GT (BImode,
2267 gen_rtx_DIV (mode, op1, mod),
2268 const0_rtx),
2269 const1_rtx, const0_rtx),
2270 const0_rtx);
2271 }
2272
2273 /* Return the difference between the ceil and the truncated result of
2274 an unsigned division by OP1 with remainder MOD. */
2275 static rtx
2276 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2277 {
2278 /* (mod != 0 ? 1 : 0) */
2279 return gen_rtx_IF_THEN_ELSE
2280 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2281 const1_rtx, const0_rtx);
2282 }
2283
2284 /* Return the difference between the rounded and the truncated result
2285 of a signed division by OP1 with remainder MOD. Halfway cases are
2286 rounded away from zero, rather than to the nearest even number. */
2287 static rtx
2288 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2289 {
2290 /* (abs (mod) >= abs (op1) - abs (mod)
2291 ? (op1 / mod > 0 ? 1 : -1)
2292 : 0) */
2293 return gen_rtx_IF_THEN_ELSE
2294 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2295 gen_rtx_MINUS (mode,
2296 gen_rtx_ABS (mode, op1),
2297 gen_rtx_ABS (mode, mod))),
2298 gen_rtx_IF_THEN_ELSE
2299 (mode, gen_rtx_GT (BImode,
2300 gen_rtx_DIV (mode, op1, mod),
2301 const0_rtx),
2302 const1_rtx, constm1_rtx),
2303 const0_rtx);
2304 }
2305
2306 /* Return the difference between the rounded and the truncated result
2307 of a unsigned division by OP1 with remainder MOD. Halfway cases
2308 are rounded away from zero, rather than to the nearest even
2309 number. */
2310 static rtx
2311 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2312 {
2313 /* (mod >= op1 - mod ? 1 : 0) */
2314 return gen_rtx_IF_THEN_ELSE
2315 (mode, gen_rtx_GE (BImode, mod,
2316 gen_rtx_MINUS (mode, op1, mod)),
2317 const1_rtx, const0_rtx);
2318 }
2319
2320 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2321 any rtl. */
2322
2323 static rtx
2324 convert_debug_memory_address (enum machine_mode mode, rtx x,
2325 addr_space_t as)
2326 {
2327 enum machine_mode xmode = GET_MODE (x);
2328
2329 #ifndef POINTERS_EXTEND_UNSIGNED
2330 gcc_assert (mode == Pmode
2331 || mode == targetm.addr_space.address_mode (as));
2332 gcc_assert (xmode == mode || xmode == VOIDmode);
2333 #else
2334 rtx temp;
2335 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2336 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2337
2338 gcc_assert (mode == address_mode || mode == pointer_mode);
2339
2340 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2341 return x;
2342
2343 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2344 x = simplify_gen_subreg (mode, x, xmode,
2345 subreg_lowpart_offset
2346 (mode, xmode));
2347 else if (POINTERS_EXTEND_UNSIGNED > 0)
2348 x = gen_rtx_ZERO_EXTEND (mode, x);
2349 else if (!POINTERS_EXTEND_UNSIGNED)
2350 x = gen_rtx_SIGN_EXTEND (mode, x);
2351 else
2352 {
2353 switch (GET_CODE (x))
2354 {
2355 case SUBREG:
2356 if ((SUBREG_PROMOTED_VAR_P (x)
2357 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2358 || (GET_CODE (SUBREG_REG (x)) == PLUS
2359 && REG_P (XEXP (SUBREG_REG (x), 0))
2360 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2361 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2362 && GET_MODE (SUBREG_REG (x)) == mode)
2363 return SUBREG_REG (x);
2364 break;
2365 case LABEL_REF:
2366 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2367 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2368 return temp;
2369 case SYMBOL_REF:
2370 temp = shallow_copy_rtx (x);
2371 PUT_MODE (temp, mode);
2372 return temp;
2373 case CONST:
2374 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2375 if (temp)
2376 temp = gen_rtx_CONST (mode, temp);
2377 return temp;
2378 case PLUS:
2379 case MINUS:
2380 if (CONST_INT_P (XEXP (x, 1)))
2381 {
2382 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2383 if (temp)
2384 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2385 }
2386 break;
2387 default:
2388 break;
2389 }
2390 /* Don't know how to express ptr_extend as operation in debug info. */
2391 return NULL;
2392 }
2393 #endif /* POINTERS_EXTEND_UNSIGNED */
2394
2395 return x;
2396 }
2397
2398 /* Return an RTX equivalent to the value of the parameter DECL. */
2399
2400 static rtx
2401 expand_debug_parm_decl (tree decl)
2402 {
2403 rtx incoming = DECL_INCOMING_RTL (decl);
2404
2405 if (incoming
2406 && GET_MODE (incoming) != BLKmode
2407 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2408 || (MEM_P (incoming)
2409 && REG_P (XEXP (incoming, 0))
2410 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2411 {
2412 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2413
2414 #ifdef HAVE_window_save
2415 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2416 If the target machine has an explicit window save instruction, the
2417 actual entry value is the corresponding OUTGOING_REGNO instead. */
2418 if (REG_P (incoming)
2419 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2420 incoming
2421 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2422 OUTGOING_REGNO (REGNO (incoming)), 0);
2423 else if (MEM_P (incoming))
2424 {
2425 rtx reg = XEXP (incoming, 0);
2426 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2427 {
2428 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2429 incoming = replace_equiv_address_nv (incoming, reg);
2430 }
2431 }
2432 #endif
2433
2434 ENTRY_VALUE_EXP (rtl) = incoming;
2435 return rtl;
2436 }
2437
2438 if (incoming
2439 && GET_MODE (incoming) != BLKmode
2440 && !TREE_ADDRESSABLE (decl)
2441 && MEM_P (incoming)
2442 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2443 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2444 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2445 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2446 return incoming;
2447
2448 return NULL_RTX;
2449 }
2450
2451 /* Return an RTX equivalent to the value of the tree expression EXP. */
2452
2453 static rtx
2454 expand_debug_expr (tree exp)
2455 {
2456 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2457 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2458 enum machine_mode inner_mode = VOIDmode;
2459 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2460 addr_space_t as;
2461
2462 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2463 {
2464 case tcc_expression:
2465 switch (TREE_CODE (exp))
2466 {
2467 case COND_EXPR:
2468 case DOT_PROD_EXPR:
2469 case WIDEN_MULT_PLUS_EXPR:
2470 case WIDEN_MULT_MINUS_EXPR:
2471 case FMA_EXPR:
2472 goto ternary;
2473
2474 case TRUTH_ANDIF_EXPR:
2475 case TRUTH_ORIF_EXPR:
2476 case TRUTH_AND_EXPR:
2477 case TRUTH_OR_EXPR:
2478 case TRUTH_XOR_EXPR:
2479 goto binary;
2480
2481 case TRUTH_NOT_EXPR:
2482 goto unary;
2483
2484 default:
2485 break;
2486 }
2487 break;
2488
2489 ternary:
2490 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2491 if (!op2)
2492 return NULL_RTX;
2493 /* Fall through. */
2494
2495 binary:
2496 case tcc_binary:
2497 case tcc_comparison:
2498 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2499 if (!op1)
2500 return NULL_RTX;
2501 /* Fall through. */
2502
2503 unary:
2504 case tcc_unary:
2505 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2506 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2507 if (!op0)
2508 return NULL_RTX;
2509 break;
2510
2511 case tcc_type:
2512 case tcc_statement:
2513 gcc_unreachable ();
2514
2515 case tcc_constant:
2516 case tcc_exceptional:
2517 case tcc_declaration:
2518 case tcc_reference:
2519 case tcc_vl_exp:
2520 break;
2521 }
2522
2523 switch (TREE_CODE (exp))
2524 {
2525 case STRING_CST:
2526 if (!lookup_constant_def (exp))
2527 {
2528 if (strlen (TREE_STRING_POINTER (exp)) + 1
2529 != (size_t) TREE_STRING_LENGTH (exp))
2530 return NULL_RTX;
2531 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2532 op0 = gen_rtx_MEM (BLKmode, op0);
2533 set_mem_attributes (op0, exp, 0);
2534 return op0;
2535 }
2536 /* Fall through... */
2537
2538 case INTEGER_CST:
2539 case REAL_CST:
2540 case FIXED_CST:
2541 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2542 return op0;
2543
2544 case COMPLEX_CST:
2545 gcc_assert (COMPLEX_MODE_P (mode));
2546 op0 = expand_debug_expr (TREE_REALPART (exp));
2547 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2548 return gen_rtx_CONCAT (mode, op0, op1);
2549
2550 case DEBUG_EXPR_DECL:
2551 op0 = DECL_RTL_IF_SET (exp);
2552
2553 if (op0)
2554 return op0;
2555
2556 op0 = gen_rtx_DEBUG_EXPR (mode);
2557 DEBUG_EXPR_TREE_DECL (op0) = exp;
2558 SET_DECL_RTL (exp, op0);
2559
2560 return op0;
2561
2562 case VAR_DECL:
2563 case PARM_DECL:
2564 case FUNCTION_DECL:
2565 case LABEL_DECL:
2566 case CONST_DECL:
2567 case RESULT_DECL:
2568 op0 = DECL_RTL_IF_SET (exp);
2569
2570 /* This decl was probably optimized away. */
2571 if (!op0)
2572 {
2573 if (TREE_CODE (exp) != VAR_DECL
2574 || DECL_EXTERNAL (exp)
2575 || !TREE_STATIC (exp)
2576 || !DECL_NAME (exp)
2577 || DECL_HARD_REGISTER (exp)
2578 || DECL_IN_CONSTANT_POOL (exp)
2579 || mode == VOIDmode)
2580 return NULL;
2581
2582 op0 = make_decl_rtl_for_debug (exp);
2583 if (!MEM_P (op0)
2584 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2585 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2586 return NULL;
2587 }
2588 else
2589 op0 = copy_rtx (op0);
2590
2591 if (GET_MODE (op0) == BLKmode
2592 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2593 below would ICE. While it is likely a FE bug,
2594 try to be robust here. See PR43166. */
2595 || mode == BLKmode
2596 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2597 {
2598 gcc_assert (MEM_P (op0));
2599 op0 = adjust_address_nv (op0, mode, 0);
2600 return op0;
2601 }
2602
2603 /* Fall through. */
2604
2605 adjust_mode:
2606 case PAREN_EXPR:
2607 case NOP_EXPR:
2608 case CONVERT_EXPR:
2609 {
2610 inner_mode = GET_MODE (op0);
2611
2612 if (mode == inner_mode)
2613 return op0;
2614
2615 if (inner_mode == VOIDmode)
2616 {
2617 if (TREE_CODE (exp) == SSA_NAME)
2618 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2619 else
2620 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2621 if (mode == inner_mode)
2622 return op0;
2623 }
2624
2625 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2626 {
2627 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2628 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2629 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2630 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2631 else
2632 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2633 }
2634 else if (FLOAT_MODE_P (mode))
2635 {
2636 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2637 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2638 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2639 else
2640 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2641 }
2642 else if (FLOAT_MODE_P (inner_mode))
2643 {
2644 if (unsignedp)
2645 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2646 else
2647 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2648 }
2649 else if (CONSTANT_P (op0)
2650 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2651 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2652 subreg_lowpart_offset (mode,
2653 inner_mode));
2654 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2655 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2656 : unsignedp)
2657 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2658 else
2659 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2660
2661 return op0;
2662 }
2663
2664 case MEM_REF:
2665 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2666 {
2667 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2668 TREE_OPERAND (exp, 0),
2669 TREE_OPERAND (exp, 1));
2670 if (newexp)
2671 return expand_debug_expr (newexp);
2672 }
2673 /* FALLTHROUGH */
2674 case INDIRECT_REF:
2675 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2676 if (!op0)
2677 return NULL;
2678
2679 if (TREE_CODE (exp) == MEM_REF)
2680 {
2681 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2682 || (GET_CODE (op0) == PLUS
2683 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2684 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2685 Instead just use get_inner_reference. */
2686 goto component_ref;
2687
2688 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2689 if (!op1 || !CONST_INT_P (op1))
2690 return NULL;
2691
2692 op0 = plus_constant (op0, INTVAL (op1));
2693 }
2694
2695 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2696 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2697 else
2698 as = ADDR_SPACE_GENERIC;
2699
2700 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2701 op0, as);
2702 if (op0 == NULL_RTX)
2703 return NULL;
2704
2705 op0 = gen_rtx_MEM (mode, op0);
2706 set_mem_attributes (op0, exp, 0);
2707 if (TREE_CODE (exp) == MEM_REF
2708 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2709 set_mem_expr (op0, NULL_TREE);
2710 set_mem_addr_space (op0, as);
2711
2712 return op0;
2713
2714 case TARGET_MEM_REF:
2715 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2716 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2717 return NULL;
2718
2719 op0 = expand_debug_expr
2720 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2721 if (!op0)
2722 return NULL;
2723
2724 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2725 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2726 else
2727 as = ADDR_SPACE_GENERIC;
2728
2729 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2730 op0, as);
2731 if (op0 == NULL_RTX)
2732 return NULL;
2733
2734 op0 = gen_rtx_MEM (mode, op0);
2735
2736 set_mem_attributes (op0, exp, 0);
2737 set_mem_addr_space (op0, as);
2738
2739 return op0;
2740
2741 component_ref:
2742 case ARRAY_REF:
2743 case ARRAY_RANGE_REF:
2744 case COMPONENT_REF:
2745 case BIT_FIELD_REF:
2746 case REALPART_EXPR:
2747 case IMAGPART_EXPR:
2748 case VIEW_CONVERT_EXPR:
2749 {
2750 enum machine_mode mode1;
2751 HOST_WIDE_INT bitsize, bitpos;
2752 tree offset;
2753 int volatilep = 0;
2754 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2755 &mode1, &unsignedp, &volatilep, false);
2756 rtx orig_op0;
2757
2758 if (bitsize == 0)
2759 return NULL;
2760
2761 orig_op0 = op0 = expand_debug_expr (tem);
2762
2763 if (!op0)
2764 return NULL;
2765
2766 if (offset)
2767 {
2768 enum machine_mode addrmode, offmode;
2769
2770 if (!MEM_P (op0))
2771 return NULL;
2772
2773 op0 = XEXP (op0, 0);
2774 addrmode = GET_MODE (op0);
2775 if (addrmode == VOIDmode)
2776 addrmode = Pmode;
2777
2778 op1 = expand_debug_expr (offset);
2779 if (!op1)
2780 return NULL;
2781
2782 offmode = GET_MODE (op1);
2783 if (offmode == VOIDmode)
2784 offmode = TYPE_MODE (TREE_TYPE (offset));
2785
2786 if (addrmode != offmode)
2787 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2788 subreg_lowpart_offset (addrmode,
2789 offmode));
2790
2791 /* Don't use offset_address here, we don't need a
2792 recognizable address, and we don't want to generate
2793 code. */
2794 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2795 op0, op1));
2796 }
2797
2798 if (MEM_P (op0))
2799 {
2800 if (mode1 == VOIDmode)
2801 /* Bitfield. */
2802 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2803 if (bitpos >= BITS_PER_UNIT)
2804 {
2805 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2806 bitpos %= BITS_PER_UNIT;
2807 }
2808 else if (bitpos < 0)
2809 {
2810 HOST_WIDE_INT units
2811 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2812 op0 = adjust_address_nv (op0, mode1, units);
2813 bitpos += units * BITS_PER_UNIT;
2814 }
2815 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2816 op0 = adjust_address_nv (op0, mode, 0);
2817 else if (GET_MODE (op0) != mode1)
2818 op0 = adjust_address_nv (op0, mode1, 0);
2819 else
2820 op0 = copy_rtx (op0);
2821 if (op0 == orig_op0)
2822 op0 = shallow_copy_rtx (op0);
2823 set_mem_attributes (op0, exp, 0);
2824 }
2825
2826 if (bitpos == 0 && mode == GET_MODE (op0))
2827 return op0;
2828
2829 if (bitpos < 0)
2830 return NULL;
2831
2832 if (GET_MODE (op0) == BLKmode)
2833 return NULL;
2834
2835 if ((bitpos % BITS_PER_UNIT) == 0
2836 && bitsize == GET_MODE_BITSIZE (mode1))
2837 {
2838 enum machine_mode opmode = GET_MODE (op0);
2839
2840 if (opmode == VOIDmode)
2841 opmode = TYPE_MODE (TREE_TYPE (tem));
2842
2843 /* This condition may hold if we're expanding the address
2844 right past the end of an array that turned out not to
2845 be addressable (i.e., the address was only computed in
2846 debug stmts). The gen_subreg below would rightfully
2847 crash, and the address doesn't really exist, so just
2848 drop it. */
2849 if (bitpos >= GET_MODE_BITSIZE (opmode))
2850 return NULL;
2851
2852 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2853 return simplify_gen_subreg (mode, op0, opmode,
2854 bitpos / BITS_PER_UNIT);
2855 }
2856
2857 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2858 && TYPE_UNSIGNED (TREE_TYPE (exp))
2859 ? SIGN_EXTRACT
2860 : ZERO_EXTRACT, mode,
2861 GET_MODE (op0) != VOIDmode
2862 ? GET_MODE (op0)
2863 : TYPE_MODE (TREE_TYPE (tem)),
2864 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2865 }
2866
2867 case ABS_EXPR:
2868 return simplify_gen_unary (ABS, mode, op0, mode);
2869
2870 case NEGATE_EXPR:
2871 return simplify_gen_unary (NEG, mode, op0, mode);
2872
2873 case BIT_NOT_EXPR:
2874 return simplify_gen_unary (NOT, mode, op0, mode);
2875
2876 case FLOAT_EXPR:
2877 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2878 0)))
2879 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2880 inner_mode);
2881
2882 case FIX_TRUNC_EXPR:
2883 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2884 inner_mode);
2885
2886 case POINTER_PLUS_EXPR:
2887 /* For the rare target where pointers are not the same size as
2888 size_t, we need to check for mis-matched modes and correct
2889 the addend. */
2890 if (op0 && op1
2891 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2892 && GET_MODE (op0) != GET_MODE (op1))
2893 {
2894 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2895 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2896 GET_MODE (op1));
2897 else
2898 /* We always sign-extend, regardless of the signedness of
2899 the operand, because the operand is always unsigned
2900 here even if the original C expression is signed. */
2901 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2902 GET_MODE (op1));
2903 }
2904 /* Fall through. */
2905 case PLUS_EXPR:
2906 return simplify_gen_binary (PLUS, mode, op0, op1);
2907
2908 case MINUS_EXPR:
2909 return simplify_gen_binary (MINUS, mode, op0, op1);
2910
2911 case MULT_EXPR:
2912 return simplify_gen_binary (MULT, mode, op0, op1);
2913
2914 case RDIV_EXPR:
2915 case TRUNC_DIV_EXPR:
2916 case EXACT_DIV_EXPR:
2917 if (unsignedp)
2918 return simplify_gen_binary (UDIV, mode, op0, op1);
2919 else
2920 return simplify_gen_binary (DIV, mode, op0, op1);
2921
2922 case TRUNC_MOD_EXPR:
2923 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
2924
2925 case FLOOR_DIV_EXPR:
2926 if (unsignedp)
2927 return simplify_gen_binary (UDIV, mode, op0, op1);
2928 else
2929 {
2930 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2931 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2932 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2933 return simplify_gen_binary (PLUS, mode, div, adj);
2934 }
2935
2936 case FLOOR_MOD_EXPR:
2937 if (unsignedp)
2938 return simplify_gen_binary (UMOD, mode, op0, op1);
2939 else
2940 {
2941 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2942 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2943 adj = simplify_gen_unary (NEG, mode,
2944 simplify_gen_binary (MULT, mode, adj, op1),
2945 mode);
2946 return simplify_gen_binary (PLUS, mode, mod, adj);
2947 }
2948
2949 case CEIL_DIV_EXPR:
2950 if (unsignedp)
2951 {
2952 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2953 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2954 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2955 return simplify_gen_binary (PLUS, mode, div, adj);
2956 }
2957 else
2958 {
2959 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2960 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2961 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2962 return simplify_gen_binary (PLUS, mode, div, adj);
2963 }
2964
2965 case CEIL_MOD_EXPR:
2966 if (unsignedp)
2967 {
2968 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2969 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2970 adj = simplify_gen_unary (NEG, mode,
2971 simplify_gen_binary (MULT, mode, adj, op1),
2972 mode);
2973 return simplify_gen_binary (PLUS, mode, mod, adj);
2974 }
2975 else
2976 {
2977 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2978 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2979 adj = simplify_gen_unary (NEG, mode,
2980 simplify_gen_binary (MULT, mode, adj, op1),
2981 mode);
2982 return simplify_gen_binary (PLUS, mode, mod, adj);
2983 }
2984
2985 case ROUND_DIV_EXPR:
2986 if (unsignedp)
2987 {
2988 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2989 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2990 rtx adj = round_udiv_adjust (mode, mod, op1);
2991 return simplify_gen_binary (PLUS, mode, div, adj);
2992 }
2993 else
2994 {
2995 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2996 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2997 rtx adj = round_sdiv_adjust (mode, mod, op1);
2998 return simplify_gen_binary (PLUS, mode, div, adj);
2999 }
3000
3001 case ROUND_MOD_EXPR:
3002 if (unsignedp)
3003 {
3004 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3005 rtx adj = round_udiv_adjust (mode, mod, op1);
3006 adj = simplify_gen_unary (NEG, mode,
3007 simplify_gen_binary (MULT, mode, adj, op1),
3008 mode);
3009 return simplify_gen_binary (PLUS, mode, mod, adj);
3010 }
3011 else
3012 {
3013 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3014 rtx adj = round_sdiv_adjust (mode, mod, op1);
3015 adj = simplify_gen_unary (NEG, mode,
3016 simplify_gen_binary (MULT, mode, adj, op1),
3017 mode);
3018 return simplify_gen_binary (PLUS, mode, mod, adj);
3019 }
3020
3021 case LSHIFT_EXPR:
3022 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3023
3024 case RSHIFT_EXPR:
3025 if (unsignedp)
3026 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3027 else
3028 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3029
3030 case LROTATE_EXPR:
3031 return simplify_gen_binary (ROTATE, mode, op0, op1);
3032
3033 case RROTATE_EXPR:
3034 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3035
3036 case MIN_EXPR:
3037 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3038
3039 case MAX_EXPR:
3040 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3041
3042 case BIT_AND_EXPR:
3043 case TRUTH_AND_EXPR:
3044 return simplify_gen_binary (AND, mode, op0, op1);
3045
3046 case BIT_IOR_EXPR:
3047 case TRUTH_OR_EXPR:
3048 return simplify_gen_binary (IOR, mode, op0, op1);
3049
3050 case BIT_XOR_EXPR:
3051 case TRUTH_XOR_EXPR:
3052 return simplify_gen_binary (XOR, mode, op0, op1);
3053
3054 case TRUTH_ANDIF_EXPR:
3055 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3056
3057 case TRUTH_ORIF_EXPR:
3058 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3059
3060 case TRUTH_NOT_EXPR:
3061 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3062
3063 case LT_EXPR:
3064 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3065 op0, op1);
3066
3067 case LE_EXPR:
3068 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3069 op0, op1);
3070
3071 case GT_EXPR:
3072 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3073 op0, op1);
3074
3075 case GE_EXPR:
3076 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3077 op0, op1);
3078
3079 case EQ_EXPR:
3080 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3081
3082 case NE_EXPR:
3083 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3084
3085 case UNORDERED_EXPR:
3086 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3087
3088 case ORDERED_EXPR:
3089 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3090
3091 case UNLT_EXPR:
3092 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3093
3094 case UNLE_EXPR:
3095 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3096
3097 case UNGT_EXPR:
3098 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3099
3100 case UNGE_EXPR:
3101 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3102
3103 case UNEQ_EXPR:
3104 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3105
3106 case LTGT_EXPR:
3107 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3108
3109 case COND_EXPR:
3110 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3111
3112 case COMPLEX_EXPR:
3113 gcc_assert (COMPLEX_MODE_P (mode));
3114 if (GET_MODE (op0) == VOIDmode)
3115 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3116 if (GET_MODE (op1) == VOIDmode)
3117 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3118 return gen_rtx_CONCAT (mode, op0, op1);
3119
3120 case CONJ_EXPR:
3121 if (GET_CODE (op0) == CONCAT)
3122 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3123 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3124 XEXP (op0, 1),
3125 GET_MODE_INNER (mode)));
3126 else
3127 {
3128 enum machine_mode imode = GET_MODE_INNER (mode);
3129 rtx re, im;
3130
3131 if (MEM_P (op0))
3132 {
3133 re = adjust_address_nv (op0, imode, 0);
3134 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3135 }
3136 else
3137 {
3138 enum machine_mode ifmode = int_mode_for_mode (mode);
3139 enum machine_mode ihmode = int_mode_for_mode (imode);
3140 rtx halfsize;
3141 if (ifmode == BLKmode || ihmode == BLKmode)
3142 return NULL;
3143 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3144 re = op0;
3145 if (mode != ifmode)
3146 re = gen_rtx_SUBREG (ifmode, re, 0);
3147 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3148 if (imode != ihmode)
3149 re = gen_rtx_SUBREG (imode, re, 0);
3150 im = copy_rtx (op0);
3151 if (mode != ifmode)
3152 im = gen_rtx_SUBREG (ifmode, im, 0);
3153 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3154 if (imode != ihmode)
3155 im = gen_rtx_SUBREG (imode, im, 0);
3156 }
3157 im = gen_rtx_NEG (imode, im);
3158 return gen_rtx_CONCAT (mode, re, im);
3159 }
3160
3161 case ADDR_EXPR:
3162 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3163 if (!op0 || !MEM_P (op0))
3164 {
3165 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3166 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3167 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3168 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3169 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3170
3171 if (handled_component_p (TREE_OPERAND (exp, 0)))
3172 {
3173 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3174 tree decl
3175 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3176 &bitoffset, &bitsize, &maxsize);
3177 if ((TREE_CODE (decl) == VAR_DECL
3178 || TREE_CODE (decl) == PARM_DECL
3179 || TREE_CODE (decl) == RESULT_DECL)
3180 && !TREE_ADDRESSABLE (decl)
3181 && (bitoffset % BITS_PER_UNIT) == 0
3182 && bitsize > 0
3183 && bitsize == maxsize)
3184 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3185 bitoffset / BITS_PER_UNIT);
3186 }
3187
3188 return NULL;
3189 }
3190
3191 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3192 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3193
3194 return op0;
3195
3196 case VECTOR_CST:
3197 exp = build_constructor_from_list (TREE_TYPE (exp),
3198 TREE_VECTOR_CST_ELTS (exp));
3199 /* Fall through. */
3200
3201 case CONSTRUCTOR:
3202 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3203 {
3204 unsigned i;
3205 tree val;
3206
3207 op0 = gen_rtx_CONCATN
3208 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3209
3210 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3211 {
3212 op1 = expand_debug_expr (val);
3213 if (!op1)
3214 return NULL;
3215 XVECEXP (op0, 0, i) = op1;
3216 }
3217
3218 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3219 {
3220 op1 = expand_debug_expr
3221 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3222
3223 if (!op1)
3224 return NULL;
3225
3226 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3227 XVECEXP (op0, 0, i) = op1;
3228 }
3229
3230 return op0;
3231 }
3232 else
3233 goto flag_unsupported;
3234
3235 case CALL_EXPR:
3236 /* ??? Maybe handle some builtins? */
3237 return NULL;
3238
3239 case SSA_NAME:
3240 {
3241 gimple g = get_gimple_for_ssa_name (exp);
3242 if (g)
3243 {
3244 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3245 if (!op0)
3246 return NULL;
3247 }
3248 else
3249 {
3250 int part = var_to_partition (SA.map, exp);
3251
3252 if (part == NO_PARTITION)
3253 {
3254 /* If this is a reference to an incoming value of parameter
3255 that is never used in the code or where the incoming
3256 value is never used in the code, use PARM_DECL's
3257 DECL_RTL if set. */
3258 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3259 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3260 {
3261 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3262 if (op0)
3263 goto adjust_mode;
3264 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3265 if (op0)
3266 goto adjust_mode;
3267 }
3268 return NULL;
3269 }
3270
3271 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3272
3273 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3274 }
3275 goto adjust_mode;
3276 }
3277
3278 case ERROR_MARK:
3279 return NULL;
3280
3281 /* Vector stuff. For most of the codes we don't have rtl codes. */
3282 case REALIGN_LOAD_EXPR:
3283 case REDUC_MAX_EXPR:
3284 case REDUC_MIN_EXPR:
3285 case REDUC_PLUS_EXPR:
3286 case VEC_COND_EXPR:
3287 case VEC_EXTRACT_EVEN_EXPR:
3288 case VEC_EXTRACT_ODD_EXPR:
3289 case VEC_INTERLEAVE_HIGH_EXPR:
3290 case VEC_INTERLEAVE_LOW_EXPR:
3291 case VEC_LSHIFT_EXPR:
3292 case VEC_PACK_FIX_TRUNC_EXPR:
3293 case VEC_PACK_SAT_EXPR:
3294 case VEC_PACK_TRUNC_EXPR:
3295 case VEC_RSHIFT_EXPR:
3296 case VEC_UNPACK_FLOAT_HI_EXPR:
3297 case VEC_UNPACK_FLOAT_LO_EXPR:
3298 case VEC_UNPACK_HI_EXPR:
3299 case VEC_UNPACK_LO_EXPR:
3300 case VEC_WIDEN_MULT_HI_EXPR:
3301 case VEC_WIDEN_MULT_LO_EXPR:
3302 case VEC_WIDEN_LSHIFT_HI_EXPR:
3303 case VEC_WIDEN_LSHIFT_LO_EXPR:
3304 case VEC_PERM_EXPR:
3305 return NULL;
3306
3307 /* Misc codes. */
3308 case ADDR_SPACE_CONVERT_EXPR:
3309 case FIXED_CONVERT_EXPR:
3310 case OBJ_TYPE_REF:
3311 case WITH_SIZE_EXPR:
3312 return NULL;
3313
3314 case DOT_PROD_EXPR:
3315 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3316 && SCALAR_INT_MODE_P (mode))
3317 {
3318 op0
3319 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3320 0)))
3321 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3322 inner_mode);
3323 op1
3324 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3325 1)))
3326 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3327 inner_mode);
3328 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3329 return simplify_gen_binary (PLUS, mode, op0, op2);
3330 }
3331 return NULL;
3332
3333 case WIDEN_MULT_EXPR:
3334 case WIDEN_MULT_PLUS_EXPR:
3335 case WIDEN_MULT_MINUS_EXPR:
3336 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3337 && SCALAR_INT_MODE_P (mode))
3338 {
3339 inner_mode = GET_MODE (op0);
3340 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3341 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3342 else
3343 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3344 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3345 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3346 else
3347 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3348 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3349 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3350 return op0;
3351 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3352 return simplify_gen_binary (PLUS, mode, op0, op2);
3353 else
3354 return simplify_gen_binary (MINUS, mode, op2, op0);
3355 }
3356 return NULL;
3357
3358 case WIDEN_SUM_EXPR:
3359 case WIDEN_LSHIFT_EXPR:
3360 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3361 && SCALAR_INT_MODE_P (mode))
3362 {
3363 op0
3364 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3365 0)))
3366 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3367 inner_mode);
3368 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3369 ? ASHIFT : PLUS, mode, op0, op1);
3370 }
3371 return NULL;
3372
3373 case FMA_EXPR:
3374 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3375
3376 default:
3377 flag_unsupported:
3378 #ifdef ENABLE_CHECKING
3379 debug_tree (exp);
3380 gcc_unreachable ();
3381 #else
3382 return NULL;
3383 #endif
3384 }
3385 }
3386
3387 /* Return an RTX equivalent to the source bind value of the tree expression
3388 EXP. */
3389
3390 static rtx
3391 expand_debug_source_expr (tree exp)
3392 {
3393 rtx op0 = NULL_RTX;
3394 enum machine_mode mode = VOIDmode, inner_mode;
3395
3396 switch (TREE_CODE (exp))
3397 {
3398 case PARM_DECL:
3399 {
3400 mode = DECL_MODE (exp);
3401 op0 = expand_debug_parm_decl (exp);
3402 if (op0)
3403 break;
3404 /* See if this isn't an argument that has been completely
3405 optimized out. */
3406 if (!DECL_RTL_SET_P (exp)
3407 && !DECL_INCOMING_RTL (exp)
3408 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3409 {
3410 tree aexp = exp;
3411 if (DECL_ABSTRACT_ORIGIN (exp))
3412 aexp = DECL_ABSTRACT_ORIGIN (exp);
3413 if (DECL_CONTEXT (aexp)
3414 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3415 {
3416 VEC(tree, gc) **debug_args;
3417 unsigned int ix;
3418 tree ddecl;
3419 #ifdef ENABLE_CHECKING
3420 tree parm;
3421 for (parm = DECL_ARGUMENTS (current_function_decl);
3422 parm; parm = DECL_CHAIN (parm))
3423 gcc_assert (parm != exp
3424 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3425 #endif
3426 debug_args = decl_debug_args_lookup (current_function_decl);
3427 if (debug_args != NULL)
3428 {
3429 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3430 ix += 2)
3431 if (ddecl == aexp)
3432 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3433 }
3434 }
3435 }
3436 break;
3437 }
3438 default:
3439 break;
3440 }
3441
3442 if (op0 == NULL_RTX)
3443 return NULL_RTX;
3444
3445 inner_mode = GET_MODE (op0);
3446 if (mode == inner_mode)
3447 return op0;
3448
3449 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3450 {
3451 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3452 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3453 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3454 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3455 else
3456 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3457 }
3458 else if (FLOAT_MODE_P (mode))
3459 gcc_unreachable ();
3460 else if (FLOAT_MODE_P (inner_mode))
3461 {
3462 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3463 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3464 else
3465 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3466 }
3467 else if (CONSTANT_P (op0)
3468 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3469 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3470 subreg_lowpart_offset (mode, inner_mode));
3471 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3472 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3473 else
3474 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3475
3476 return op0;
3477 }
3478
3479 /* Expand the _LOCs in debug insns. We run this after expanding all
3480 regular insns, so that any variables referenced in the function
3481 will have their DECL_RTLs set. */
3482
3483 static void
3484 expand_debug_locations (void)
3485 {
3486 rtx insn;
3487 rtx last = get_last_insn ();
3488 int save_strict_alias = flag_strict_aliasing;
3489
3490 /* New alias sets while setting up memory attributes cause
3491 -fcompare-debug failures, even though it doesn't bring about any
3492 codegen changes. */
3493 flag_strict_aliasing = 0;
3494
3495 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3496 if (DEBUG_INSN_P (insn))
3497 {
3498 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3499 rtx val;
3500 enum machine_mode mode;
3501
3502 if (value == NULL_TREE)
3503 val = NULL_RTX;
3504 else
3505 {
3506 if (INSN_VAR_LOCATION_STATUS (insn)
3507 == VAR_INIT_STATUS_UNINITIALIZED)
3508 val = expand_debug_source_expr (value);
3509 else
3510 val = expand_debug_expr (value);
3511 gcc_assert (last == get_last_insn ());
3512 }
3513
3514 if (!val)
3515 val = gen_rtx_UNKNOWN_VAR_LOC ();
3516 else
3517 {
3518 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3519
3520 gcc_assert (mode == GET_MODE (val)
3521 || (GET_MODE (val) == VOIDmode
3522 && (CONST_INT_P (val)
3523 || GET_CODE (val) == CONST_FIXED
3524 || GET_CODE (val) == CONST_DOUBLE
3525 || GET_CODE (val) == LABEL_REF)));
3526 }
3527
3528 INSN_VAR_LOCATION_LOC (insn) = val;
3529 }
3530
3531 flag_strict_aliasing = save_strict_alias;
3532 }
3533
3534 /* Expand basic block BB from GIMPLE trees to RTL. */
3535
3536 static basic_block
3537 expand_gimple_basic_block (basic_block bb)
3538 {
3539 gimple_stmt_iterator gsi;
3540 gimple_seq stmts;
3541 gimple stmt = NULL;
3542 rtx note, last;
3543 edge e;
3544 edge_iterator ei;
3545 void **elt;
3546
3547 if (dump_file)
3548 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3549 bb->index);
3550
3551 /* Note that since we are now transitioning from GIMPLE to RTL, we
3552 cannot use the gsi_*_bb() routines because they expect the basic
3553 block to be in GIMPLE, instead of RTL. Therefore, we need to
3554 access the BB sequence directly. */
3555 stmts = bb_seq (bb);
3556 bb->il.gimple = NULL;
3557 rtl_profile_for_bb (bb);
3558 init_rtl_bb_info (bb);
3559 bb->flags |= BB_RTL;
3560
3561 /* Remove the RETURN_EXPR if we may fall though to the exit
3562 instead. */
3563 gsi = gsi_last (stmts);
3564 if (!gsi_end_p (gsi)
3565 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3566 {
3567 gimple ret_stmt = gsi_stmt (gsi);
3568
3569 gcc_assert (single_succ_p (bb));
3570 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3571
3572 if (bb->next_bb == EXIT_BLOCK_PTR
3573 && !gimple_return_retval (ret_stmt))
3574 {
3575 gsi_remove (&gsi, false);
3576 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3577 }
3578 }
3579
3580 gsi = gsi_start (stmts);
3581 if (!gsi_end_p (gsi))
3582 {
3583 stmt = gsi_stmt (gsi);
3584 if (gimple_code (stmt) != GIMPLE_LABEL)
3585 stmt = NULL;
3586 }
3587
3588 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3589
3590 if (stmt || elt)
3591 {
3592 last = get_last_insn ();
3593
3594 if (stmt)
3595 {
3596 expand_gimple_stmt (stmt);
3597 gsi_next (&gsi);
3598 }
3599
3600 if (elt)
3601 emit_label ((rtx) *elt);
3602
3603 /* Java emits line number notes in the top of labels.
3604 ??? Make this go away once line number notes are obsoleted. */
3605 BB_HEAD (bb) = NEXT_INSN (last);
3606 if (NOTE_P (BB_HEAD (bb)))
3607 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3608 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3609
3610 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3611 }
3612 else
3613 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3614
3615 NOTE_BASIC_BLOCK (note) = bb;
3616
3617 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3618 {
3619 basic_block new_bb;
3620
3621 stmt = gsi_stmt (gsi);
3622
3623 /* If this statement is a non-debug one, and we generate debug
3624 insns, then this one might be the last real use of a TERed
3625 SSA_NAME, but where there are still some debug uses further
3626 down. Expanding the current SSA name in such further debug
3627 uses by their RHS might lead to wrong debug info, as coalescing
3628 might make the operands of such RHS be placed into the same
3629 pseudo as something else. Like so:
3630 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3631 use(a_1);
3632 a_2 = ...
3633 #DEBUG ... => a_1
3634 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3635 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3636 the write to a_2 would actually have clobbered the place which
3637 formerly held a_0.
3638
3639 So, instead of that, we recognize the situation, and generate
3640 debug temporaries at the last real use of TERed SSA names:
3641 a_1 = a_0 + 1;
3642 #DEBUG #D1 => a_1
3643 use(a_1);
3644 a_2 = ...
3645 #DEBUG ... => #D1
3646 */
3647 if (MAY_HAVE_DEBUG_INSNS
3648 && SA.values
3649 && !is_gimple_debug (stmt))
3650 {
3651 ssa_op_iter iter;
3652 tree op;
3653 gimple def;
3654
3655 location_t sloc = get_curr_insn_source_location ();
3656 tree sblock = get_curr_insn_block ();
3657
3658 /* Look for SSA names that have their last use here (TERed
3659 names always have only one real use). */
3660 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3661 if ((def = get_gimple_for_ssa_name (op)))
3662 {
3663 imm_use_iterator imm_iter;
3664 use_operand_p use_p;
3665 bool have_debug_uses = false;
3666
3667 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3668 {
3669 if (gimple_debug_bind_p (USE_STMT (use_p)))
3670 {
3671 have_debug_uses = true;
3672 break;
3673 }
3674 }
3675
3676 if (have_debug_uses)
3677 {
3678 /* OP is a TERed SSA name, with DEF it's defining
3679 statement, and where OP is used in further debug
3680 instructions. Generate a debug temporary, and
3681 replace all uses of OP in debug insns with that
3682 temporary. */
3683 gimple debugstmt;
3684 tree value = gimple_assign_rhs_to_tree (def);
3685 tree vexpr = make_node (DEBUG_EXPR_DECL);
3686 rtx val;
3687 enum machine_mode mode;
3688
3689 set_curr_insn_source_location (gimple_location (def));
3690 set_curr_insn_block (gimple_block (def));
3691
3692 DECL_ARTIFICIAL (vexpr) = 1;
3693 TREE_TYPE (vexpr) = TREE_TYPE (value);
3694 if (DECL_P (value))
3695 mode = DECL_MODE (value);
3696 else
3697 mode = TYPE_MODE (TREE_TYPE (value));
3698 DECL_MODE (vexpr) = mode;
3699
3700 val = gen_rtx_VAR_LOCATION
3701 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3702
3703 emit_debug_insn (val);
3704
3705 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3706 {
3707 if (!gimple_debug_bind_p (debugstmt))
3708 continue;
3709
3710 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3711 SET_USE (use_p, vexpr);
3712
3713 update_stmt (debugstmt);
3714 }
3715 }
3716 }
3717 set_curr_insn_source_location (sloc);
3718 set_curr_insn_block (sblock);
3719 }
3720
3721 currently_expanding_gimple_stmt = stmt;
3722
3723 /* Expand this statement, then evaluate the resulting RTL and
3724 fixup the CFG accordingly. */
3725 if (gimple_code (stmt) == GIMPLE_COND)
3726 {
3727 new_bb = expand_gimple_cond (bb, stmt);
3728 if (new_bb)
3729 return new_bb;
3730 }
3731 else if (gimple_debug_bind_p (stmt))
3732 {
3733 location_t sloc = get_curr_insn_source_location ();
3734 tree sblock = get_curr_insn_block ();
3735 gimple_stmt_iterator nsi = gsi;
3736
3737 for (;;)
3738 {
3739 tree var = gimple_debug_bind_get_var (stmt);
3740 tree value;
3741 rtx val;
3742 enum machine_mode mode;
3743
3744 if (gimple_debug_bind_has_value_p (stmt))
3745 value = gimple_debug_bind_get_value (stmt);
3746 else
3747 value = NULL_TREE;
3748
3749 last = get_last_insn ();
3750
3751 set_curr_insn_source_location (gimple_location (stmt));
3752 set_curr_insn_block (gimple_block (stmt));
3753
3754 if (DECL_P (var))
3755 mode = DECL_MODE (var);
3756 else
3757 mode = TYPE_MODE (TREE_TYPE (var));
3758
3759 val = gen_rtx_VAR_LOCATION
3760 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3761
3762 emit_debug_insn (val);
3763
3764 if (dump_file && (dump_flags & TDF_DETAILS))
3765 {
3766 /* We can't dump the insn with a TREE where an RTX
3767 is expected. */
3768 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3769 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3770 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3771 }
3772
3773 /* In order not to generate too many debug temporaries,
3774 we delink all uses of debug statements we already expanded.
3775 Therefore debug statements between definition and real
3776 use of TERed SSA names will continue to use the SSA name,
3777 and not be replaced with debug temps. */
3778 delink_stmt_imm_use (stmt);
3779
3780 gsi = nsi;
3781 gsi_next (&nsi);
3782 if (gsi_end_p (nsi))
3783 break;
3784 stmt = gsi_stmt (nsi);
3785 if (!gimple_debug_bind_p (stmt))
3786 break;
3787 }
3788
3789 set_curr_insn_source_location (sloc);
3790 set_curr_insn_block (sblock);
3791 }
3792 else if (gimple_debug_source_bind_p (stmt))
3793 {
3794 location_t sloc = get_curr_insn_source_location ();
3795 tree sblock = get_curr_insn_block ();
3796 tree var = gimple_debug_source_bind_get_var (stmt);
3797 tree value = gimple_debug_source_bind_get_value (stmt);
3798 rtx val;
3799 enum machine_mode mode;
3800
3801 last = get_last_insn ();
3802
3803 set_curr_insn_source_location (gimple_location (stmt));
3804 set_curr_insn_block (gimple_block (stmt));
3805
3806 mode = DECL_MODE (var);
3807
3808 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3809 VAR_INIT_STATUS_UNINITIALIZED);
3810
3811 emit_debug_insn (val);
3812
3813 if (dump_file && (dump_flags & TDF_DETAILS))
3814 {
3815 /* We can't dump the insn with a TREE where an RTX
3816 is expected. */
3817 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3818 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3819 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3820 }
3821
3822 set_curr_insn_source_location (sloc);
3823 set_curr_insn_block (sblock);
3824 }
3825 else
3826 {
3827 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3828 {
3829 bool can_fallthru;
3830 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3831 if (new_bb)
3832 {
3833 if (can_fallthru)
3834 bb = new_bb;
3835 else
3836 return new_bb;
3837 }
3838 }
3839 else
3840 {
3841 def_operand_p def_p;
3842 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3843
3844 if (def_p != NULL)
3845 {
3846 /* Ignore this stmt if it is in the list of
3847 replaceable expressions. */
3848 if (SA.values
3849 && bitmap_bit_p (SA.values,
3850 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3851 continue;
3852 }
3853 last = expand_gimple_stmt (stmt);
3854 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3855 }
3856 }
3857 }
3858
3859 currently_expanding_gimple_stmt = NULL;
3860
3861 /* Expand implicit goto and convert goto_locus. */
3862 FOR_EACH_EDGE (e, ei, bb->succs)
3863 {
3864 if (e->goto_locus && e->goto_block)
3865 {
3866 set_curr_insn_source_location (e->goto_locus);
3867 set_curr_insn_block (e->goto_block);
3868 e->goto_locus = curr_insn_locator ();
3869 }
3870 e->goto_block = NULL;
3871 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3872 {
3873 emit_jump (label_rtx_for_bb (e->dest));
3874 e->flags &= ~EDGE_FALLTHRU;
3875 }
3876 }
3877
3878 /* Expanded RTL can create a jump in the last instruction of block.
3879 This later might be assumed to be a jump to successor and break edge insertion.
3880 We need to insert dummy move to prevent this. PR41440. */
3881 if (single_succ_p (bb)
3882 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3883 && (last = get_last_insn ())
3884 && JUMP_P (last))
3885 {
3886 rtx dummy = gen_reg_rtx (SImode);
3887 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3888 }
3889
3890 do_pending_stack_adjust ();
3891
3892 /* Find the block tail. The last insn in the block is the insn
3893 before a barrier and/or table jump insn. */
3894 last = get_last_insn ();
3895 if (BARRIER_P (last))
3896 last = PREV_INSN (last);
3897 if (JUMP_TABLE_DATA_P (last))
3898 last = PREV_INSN (PREV_INSN (last));
3899 BB_END (bb) = last;
3900
3901 update_bb_for_insn (bb);
3902
3903 return bb;
3904 }
3905
3906
3907 /* Create a basic block for initialization code. */
3908
3909 static basic_block
3910 construct_init_block (void)
3911 {
3912 basic_block init_block, first_block;
3913 edge e = NULL;
3914 int flags;
3915
3916 /* Multiple entry points not supported yet. */
3917 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3918 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3919 init_rtl_bb_info (EXIT_BLOCK_PTR);
3920 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3921 EXIT_BLOCK_PTR->flags |= BB_RTL;
3922
3923 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3924
3925 /* When entry edge points to first basic block, we don't need jump,
3926 otherwise we have to jump into proper target. */
3927 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3928 {
3929 tree label = gimple_block_label (e->dest);
3930
3931 emit_jump (label_rtx (label));
3932 flags = 0;
3933 }
3934 else
3935 flags = EDGE_FALLTHRU;
3936
3937 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3938 get_last_insn (),
3939 ENTRY_BLOCK_PTR);
3940 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3941 init_block->count = ENTRY_BLOCK_PTR->count;
3942 if (e)
3943 {
3944 first_block = e->dest;
3945 redirect_edge_succ (e, init_block);
3946 e = make_edge (init_block, first_block, flags);
3947 }
3948 else
3949 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3950 e->probability = REG_BR_PROB_BASE;
3951 e->count = ENTRY_BLOCK_PTR->count;
3952
3953 update_bb_for_insn (init_block);
3954 return init_block;
3955 }
3956
3957 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3958 found in the block tree. */
3959
3960 static void
3961 set_block_levels (tree block, int level)
3962 {
3963 while (block)
3964 {
3965 BLOCK_NUMBER (block) = level;
3966 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3967 block = BLOCK_CHAIN (block);
3968 }
3969 }
3970
3971 /* Create a block containing landing pads and similar stuff. */
3972
3973 static void
3974 construct_exit_block (void)
3975 {
3976 rtx head = get_last_insn ();
3977 rtx end;
3978 basic_block exit_block;
3979 edge e, e2;
3980 unsigned ix;
3981 edge_iterator ei;
3982 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3983
3984 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3985
3986 /* Make sure the locus is set to the end of the function, so that
3987 epilogue line numbers and warnings are set properly. */
3988 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3989 input_location = cfun->function_end_locus;
3990
3991 /* The following insns belong to the top scope. */
3992 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3993
3994 /* Generate rtl for function exit. */
3995 expand_function_end ();
3996
3997 end = get_last_insn ();
3998 if (head == end)
3999 return;
4000 /* While emitting the function end we could move end of the last basic block.
4001 */
4002 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4003 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4004 head = NEXT_INSN (head);
4005 exit_block = create_basic_block (NEXT_INSN (head), end,
4006 EXIT_BLOCK_PTR->prev_bb);
4007 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4008 exit_block->count = EXIT_BLOCK_PTR->count;
4009
4010 ix = 0;
4011 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4012 {
4013 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4014 if (!(e->flags & EDGE_ABNORMAL))
4015 redirect_edge_succ (e, exit_block);
4016 else
4017 ix++;
4018 }
4019
4020 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4021 e->probability = REG_BR_PROB_BASE;
4022 e->count = EXIT_BLOCK_PTR->count;
4023 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4024 if (e2 != e)
4025 {
4026 e->count -= e2->count;
4027 exit_block->count -= e2->count;
4028 exit_block->frequency -= EDGE_FREQUENCY (e2);
4029 }
4030 if (e->count < 0)
4031 e->count = 0;
4032 if (exit_block->count < 0)
4033 exit_block->count = 0;
4034 if (exit_block->frequency < 0)
4035 exit_block->frequency = 0;
4036 update_bb_for_insn (exit_block);
4037 }
4038
4039 /* Helper function for discover_nonconstant_array_refs.
4040 Look for ARRAY_REF nodes with non-constant indexes and mark them
4041 addressable. */
4042
4043 static tree
4044 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4045 void *data ATTRIBUTE_UNUSED)
4046 {
4047 tree t = *tp;
4048
4049 if (IS_TYPE_OR_DECL_P (t))
4050 *walk_subtrees = 0;
4051 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4052 {
4053 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4054 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4055 && (!TREE_OPERAND (t, 2)
4056 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4057 || (TREE_CODE (t) == COMPONENT_REF
4058 && (!TREE_OPERAND (t,2)
4059 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4060 || TREE_CODE (t) == BIT_FIELD_REF
4061 || TREE_CODE (t) == REALPART_EXPR
4062 || TREE_CODE (t) == IMAGPART_EXPR
4063 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4064 || CONVERT_EXPR_P (t))
4065 t = TREE_OPERAND (t, 0);
4066
4067 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4068 {
4069 t = get_base_address (t);
4070 if (t && DECL_P (t)
4071 && DECL_MODE (t) != BLKmode)
4072 TREE_ADDRESSABLE (t) = 1;
4073 }
4074
4075 *walk_subtrees = 0;
4076 }
4077
4078 return NULL_TREE;
4079 }
4080
4081 /* RTL expansion is not able to compile array references with variable
4082 offsets for arrays stored in single register. Discover such
4083 expressions and mark variables as addressable to avoid this
4084 scenario. */
4085
4086 static void
4087 discover_nonconstant_array_refs (void)
4088 {
4089 basic_block bb;
4090 gimple_stmt_iterator gsi;
4091
4092 FOR_EACH_BB (bb)
4093 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4094 {
4095 gimple stmt = gsi_stmt (gsi);
4096 if (!is_gimple_debug (stmt))
4097 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4098 }
4099 }
4100
4101 /* This function sets crtl->args.internal_arg_pointer to a virtual
4102 register if DRAP is needed. Local register allocator will replace
4103 virtual_incoming_args_rtx with the virtual register. */
4104
4105 static void
4106 expand_stack_alignment (void)
4107 {
4108 rtx drap_rtx;
4109 unsigned int preferred_stack_boundary;
4110
4111 if (! SUPPORTS_STACK_ALIGNMENT)
4112 return;
4113
4114 if (cfun->calls_alloca
4115 || cfun->has_nonlocal_label
4116 || crtl->has_nonlocal_goto)
4117 crtl->need_drap = true;
4118
4119 /* Call update_stack_boundary here again to update incoming stack
4120 boundary. It may set incoming stack alignment to a different
4121 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4122 use the minimum incoming stack alignment to check if it is OK
4123 to perform sibcall optimization since sibcall optimization will
4124 only align the outgoing stack to incoming stack boundary. */
4125 if (targetm.calls.update_stack_boundary)
4126 targetm.calls.update_stack_boundary ();
4127
4128 /* The incoming stack frame has to be aligned at least at
4129 parm_stack_boundary. */
4130 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4131
4132 /* Update crtl->stack_alignment_estimated and use it later to align
4133 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4134 exceptions since callgraph doesn't collect incoming stack alignment
4135 in this case. */
4136 if (cfun->can_throw_non_call_exceptions
4137 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4138 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4139 else
4140 preferred_stack_boundary = crtl->preferred_stack_boundary;
4141 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4142 crtl->stack_alignment_estimated = preferred_stack_boundary;
4143 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4144 crtl->stack_alignment_needed = preferred_stack_boundary;
4145
4146 gcc_assert (crtl->stack_alignment_needed
4147 <= crtl->stack_alignment_estimated);
4148
4149 crtl->stack_realign_needed
4150 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4151 crtl->stack_realign_tried = crtl->stack_realign_needed;
4152
4153 crtl->stack_realign_processed = true;
4154
4155 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4156 alignment. */
4157 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4158 drap_rtx = targetm.calls.get_drap_rtx ();
4159
4160 /* stack_realign_drap and drap_rtx must match. */
4161 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4162
4163 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4164 if (NULL != drap_rtx)
4165 {
4166 crtl->args.internal_arg_pointer = drap_rtx;
4167
4168 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4169 needed. */
4170 fixup_tail_calls ();
4171 }
4172 }
4173
4174 /* Translate the intermediate representation contained in the CFG
4175 from GIMPLE trees to RTL.
4176
4177 We do conversion per basic block and preserve/update the tree CFG.
4178 This implies we have to do some magic as the CFG can simultaneously
4179 consist of basic blocks containing RTL and GIMPLE trees. This can
4180 confuse the CFG hooks, so be careful to not manipulate CFG during
4181 the expansion. */
4182
4183 static unsigned int
4184 gimple_expand_cfg (void)
4185 {
4186 basic_block bb, init_block;
4187 sbitmap blocks;
4188 edge_iterator ei;
4189 edge e;
4190 rtx var_seq;
4191 unsigned i;
4192
4193 timevar_push (TV_OUT_OF_SSA);
4194 rewrite_out_of_ssa (&SA);
4195 timevar_pop (TV_OUT_OF_SSA);
4196 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4197 sizeof (rtx));
4198
4199 /* Some backends want to know that we are expanding to RTL. */
4200 currently_expanding_to_rtl = 1;
4201
4202 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4203
4204 insn_locators_alloc ();
4205 if (!DECL_IS_BUILTIN (current_function_decl))
4206 {
4207 /* Eventually, all FEs should explicitly set function_start_locus. */
4208 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4209 set_curr_insn_source_location
4210 (DECL_SOURCE_LOCATION (current_function_decl));
4211 else
4212 set_curr_insn_source_location (cfun->function_start_locus);
4213 }
4214 else
4215 set_curr_insn_source_location (UNKNOWN_LOCATION);
4216 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4217 prologue_locator = curr_insn_locator ();
4218
4219 #ifdef INSN_SCHEDULING
4220 init_sched_attrs ();
4221 #endif
4222
4223 /* Make sure first insn is a note even if we don't want linenums.
4224 This makes sure the first insn will never be deleted.
4225 Also, final expects a note to appear there. */
4226 emit_note (NOTE_INSN_DELETED);
4227
4228 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4229 discover_nonconstant_array_refs ();
4230
4231 targetm.expand_to_rtl_hook ();
4232 crtl->stack_alignment_needed = STACK_BOUNDARY;
4233 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4234 crtl->stack_alignment_estimated = 0;
4235 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4236 cfun->cfg->max_jumptable_ents = 0;
4237
4238 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4239 of the function section at exapnsion time to predict distance of calls. */
4240 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4241
4242 /* Expand the variables recorded during gimple lowering. */
4243 timevar_push (TV_VAR_EXPAND);
4244 start_sequence ();
4245
4246 expand_used_vars ();
4247
4248 var_seq = get_insns ();
4249 end_sequence ();
4250 timevar_pop (TV_VAR_EXPAND);
4251
4252 /* Honor stack protection warnings. */
4253 if (warn_stack_protect)
4254 {
4255 if (cfun->calls_alloca)
4256 warning (OPT_Wstack_protector,
4257 "stack protector not protecting local variables: "
4258 "variable length buffer");
4259 if (has_short_buffer && !crtl->stack_protect_guard)
4260 warning (OPT_Wstack_protector,
4261 "stack protector not protecting function: "
4262 "all local arrays are less than %d bytes long",
4263 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4264 }
4265
4266 /* Set up parameters and prepare for return, for the function. */
4267 expand_function_start (current_function_decl);
4268
4269 /* If we emitted any instructions for setting up the variables,
4270 emit them before the FUNCTION_START note. */
4271 if (var_seq)
4272 {
4273 emit_insn_before (var_seq, parm_birth_insn);
4274
4275 /* In expand_function_end we'll insert the alloca save/restore
4276 before parm_birth_insn. We've just insertted an alloca call.
4277 Adjust the pointer to match. */
4278 parm_birth_insn = var_seq;
4279 }
4280
4281 /* Now that we also have the parameter RTXs, copy them over to our
4282 partitions. */
4283 for (i = 0; i < SA.map->num_partitions; i++)
4284 {
4285 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4286
4287 if (TREE_CODE (var) != VAR_DECL
4288 && !SA.partition_to_pseudo[i])
4289 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4290 gcc_assert (SA.partition_to_pseudo[i]);
4291
4292 /* If this decl was marked as living in multiple places, reset
4293 this now to NULL. */
4294 if (DECL_RTL_IF_SET (var) == pc_rtx)
4295 SET_DECL_RTL (var, NULL);
4296
4297 /* Some RTL parts really want to look at DECL_RTL(x) when x
4298 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4299 SET_DECL_RTL here making this available, but that would mean
4300 to select one of the potentially many RTLs for one DECL. Instead
4301 of doing that we simply reset the MEM_EXPR of the RTL in question,
4302 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4303 if (!DECL_RTL_SET_P (var))
4304 {
4305 if (MEM_P (SA.partition_to_pseudo[i]))
4306 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4307 }
4308 }
4309
4310 /* If we have a class containing differently aligned pointers
4311 we need to merge those into the corresponding RTL pointer
4312 alignment. */
4313 for (i = 1; i < num_ssa_names; i++)
4314 {
4315 tree name = ssa_name (i);
4316 int part;
4317 rtx r;
4318
4319 if (!name
4320 || !POINTER_TYPE_P (TREE_TYPE (name))
4321 /* We might have generated new SSA names in
4322 update_alias_info_with_stack_vars. They will have a NULL
4323 defining statements, and won't be part of the partitioning,
4324 so ignore those. */
4325 || !SSA_NAME_DEF_STMT (name))
4326 continue;
4327 part = var_to_partition (SA.map, name);
4328 if (part == NO_PARTITION)
4329 continue;
4330 r = SA.partition_to_pseudo[part];
4331 if (REG_P (r))
4332 mark_reg_pointer (r, get_pointer_alignment (name));
4333 }
4334
4335 /* If this function is `main', emit a call to `__main'
4336 to run global initializers, etc. */
4337 if (DECL_NAME (current_function_decl)
4338 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4339 && DECL_FILE_SCOPE_P (current_function_decl))
4340 expand_main_function ();
4341
4342 /* Initialize the stack_protect_guard field. This must happen after the
4343 call to __main (if any) so that the external decl is initialized. */
4344 if (crtl->stack_protect_guard)
4345 stack_protect_prologue ();
4346
4347 expand_phi_nodes (&SA);
4348
4349 /* Register rtl specific functions for cfg. */
4350 rtl_register_cfg_hooks ();
4351
4352 init_block = construct_init_block ();
4353
4354 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4355 remaining edges later. */
4356 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4357 e->flags &= ~EDGE_EXECUTABLE;
4358
4359 lab_rtx_for_bb = pointer_map_create ();
4360 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4361 bb = expand_gimple_basic_block (bb);
4362
4363 if (MAY_HAVE_DEBUG_INSNS)
4364 expand_debug_locations ();
4365
4366 execute_free_datastructures ();
4367 timevar_push (TV_OUT_OF_SSA);
4368 finish_out_of_ssa (&SA);
4369 timevar_pop (TV_OUT_OF_SSA);
4370
4371 timevar_push (TV_POST_EXPAND);
4372 /* We are no longer in SSA form. */
4373 cfun->gimple_df->in_ssa_p = false;
4374
4375 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4376 conservatively to true until they are all profile aware. */
4377 pointer_map_destroy (lab_rtx_for_bb);
4378 free_histograms ();
4379
4380 construct_exit_block ();
4381 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4382 insn_locators_finalize ();
4383
4384 /* Zap the tree EH table. */
4385 set_eh_throw_stmt_table (cfun, NULL);
4386
4387 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4388 split edges which edge insertions might do. */
4389 rebuild_jump_labels (get_insns ());
4390
4391 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4392 {
4393 edge e;
4394 edge_iterator ei;
4395 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4396 {
4397 if (e->insns.r)
4398 {
4399 rebuild_jump_labels_chain (e->insns.r);
4400 /* Avoid putting insns before parm_birth_insn. */
4401 if (e->src == ENTRY_BLOCK_PTR
4402 && single_succ_p (ENTRY_BLOCK_PTR)
4403 && parm_birth_insn)
4404 {
4405 rtx insns = e->insns.r;
4406 e->insns.r = NULL_RTX;
4407 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4408 }
4409 else
4410 commit_one_edge_insertion (e);
4411 }
4412 else
4413 ei_next (&ei);
4414 }
4415 }
4416
4417 /* We're done expanding trees to RTL. */
4418 currently_expanding_to_rtl = 0;
4419
4420 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4421 {
4422 edge e;
4423 edge_iterator ei;
4424 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4425 {
4426 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4427 e->flags &= ~EDGE_EXECUTABLE;
4428
4429 /* At the moment not all abnormal edges match the RTL
4430 representation. It is safe to remove them here as
4431 find_many_sub_basic_blocks will rediscover them.
4432 In the future we should get this fixed properly. */
4433 if ((e->flags & EDGE_ABNORMAL)
4434 && !(e->flags & EDGE_SIBCALL))
4435 remove_edge (e);
4436 else
4437 ei_next (&ei);
4438 }
4439 }
4440
4441 blocks = sbitmap_alloc (last_basic_block);
4442 sbitmap_ones (blocks);
4443 find_many_sub_basic_blocks (blocks);
4444 sbitmap_free (blocks);
4445 purge_all_dead_edges ();
4446
4447 compact_blocks ();
4448
4449 expand_stack_alignment ();
4450
4451 #ifdef ENABLE_CHECKING
4452 verify_flow_info ();
4453 #endif
4454
4455 /* There's no need to defer outputting this function any more; we
4456 know we want to output it. */
4457 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4458
4459 /* Now that we're done expanding trees to RTL, we shouldn't have any
4460 more CONCATs anywhere. */
4461 generating_concat_p = 0;
4462
4463 if (dump_file)
4464 {
4465 fprintf (dump_file,
4466 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4467 /* And the pass manager will dump RTL for us. */
4468 }
4469
4470 /* If we're emitting a nested function, make sure its parent gets
4471 emitted as well. Doing otherwise confuses debug info. */
4472 {
4473 tree parent;
4474 for (parent = DECL_CONTEXT (current_function_decl);
4475 parent != NULL_TREE;
4476 parent = get_containing_scope (parent))
4477 if (TREE_CODE (parent) == FUNCTION_DECL)
4478 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4479 }
4480
4481 /* We are now committed to emitting code for this function. Do any
4482 preparation, such as emitting abstract debug info for the inline
4483 before it gets mangled by optimization. */
4484 if (cgraph_function_possibly_inlined_p (current_function_decl))
4485 (*debug_hooks->outlining_inline_function) (current_function_decl);
4486
4487 TREE_ASM_WRITTEN (current_function_decl) = 1;
4488
4489 /* After expanding, the return labels are no longer needed. */
4490 return_label = NULL;
4491 naked_return_label = NULL;
4492
4493 /* After expanding, the tm_restart map is no longer needed. */
4494 if (cfun->gimple_df->tm_restart)
4495 {
4496 htab_delete (cfun->gimple_df->tm_restart);
4497 cfun->gimple_df->tm_restart = NULL;
4498 }
4499
4500 /* Tag the blocks with a depth number so that change_scope can find
4501 the common parent easily. */
4502 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4503 default_rtl_profile ();
4504 timevar_pop (TV_POST_EXPAND);
4505 return 0;
4506 }
4507
4508 struct rtl_opt_pass pass_expand =
4509 {
4510 {
4511 RTL_PASS,
4512 "expand", /* name */
4513 NULL, /* gate */
4514 gimple_expand_cfg, /* execute */
4515 NULL, /* sub */
4516 NULL, /* next */
4517 0, /* static_pass_number */
4518 TV_EXPAND, /* tv_id */
4519 PROP_ssa | PROP_gimple_leh | PROP_cfg
4520 | PROP_gimple_lcx, /* properties_required */
4521 PROP_rtl, /* properties_provided */
4522 PROP_ssa | PROP_trees, /* properties_destroyed */
4523 TODO_verify_ssa | TODO_verify_flow
4524 | TODO_verify_stmts, /* todo_flags_start */
4525 TODO_ggc_collect /* todo_flags_finish */
4526 }
4527 };