]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
rtlanal.c (note_uses): Deal with SEQUENCEs.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
f93089d2 2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to
366ccddb
KC
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
ef330312 40#include "debug.h"
7d69de61 41#include "params.h"
ff28a94d 42#include "tree-inline.h"
7d69de61 43
e53de54d
JH
44/* Verify that there is exactly single jump instruction since last and attach
45 REG_BR_PROB note specifying probability.
46 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
47 re-distribute it when the conditional expands into multiple conditionals.
48 This is however difficult to do. */
ef950eba 49void
10d22567 50add_reg_br_prob_note (rtx last, int probability)
e53de54d
JH
51{
52 if (profile_status == PROFILE_ABSENT)
53 return;
54 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 55 if (JUMP_P (last))
e53de54d
JH
56 {
57 /* It is common to emit condjump-around-jump sequence when we don't know
58 how to reverse the conditional. Special case this. */
59 if (!any_condjump_p (last)
2ca202e7 60 || !JUMP_P (NEXT_INSN (last))
e53de54d 61 || !simplejump_p (NEXT_INSN (last))
fa1ff4eb 62 || !NEXT_INSN (NEXT_INSN (last))
2ca202e7 63 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
fa1ff4eb 64 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
2ca202e7 65 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
66 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
67 goto failed;
41806d92 68 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
e53de54d
JH
69 REG_NOTES (last)
70 = gen_rtx_EXPR_LIST (REG_BR_PROB,
71 GEN_INT (REG_BR_PROB_BASE - probability),
72 REG_NOTES (last));
73 return;
74 }
2ca202e7 75 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
76 goto failed;
77 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
e53de54d
JH
78 REG_NOTES (last)
79 = gen_rtx_EXPR_LIST (REG_BR_PROB,
80 GEN_INT (probability), REG_NOTES (last));
81 return;
82failed:
83 if (dump_file)
84 fprintf (dump_file, "Failed to add probability note\n");
85}
86
80c7a9eb 87
1f6d3a08
RH
88#ifndef LOCAL_ALIGNMENT
89#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
90#endif
91
92#ifndef STACK_ALIGNMENT_NEEDED
93#define STACK_ALIGNMENT_NEEDED 1
94#endif
95
1f6d3a08
RH
96
97/* This structure holds data relevant to one variable that will be
98 placed in a stack slot. */
99struct stack_var
100{
101 /* The Variable. */
102 tree decl;
103
104 /* The offset of the variable. During partitioning, this is the
105 offset relative to the partition. After partitioning, this
106 is relative to the stack frame. */
107 HOST_WIDE_INT offset;
108
109 /* Initially, the size of the variable. Later, the size of the partition,
110 if this variable becomes it's partition's representative. */
111 HOST_WIDE_INT size;
112
113 /* The *byte* alignment required for this variable. Or as, with the
114 size, the alignment for this partition. */
115 unsigned int alignb;
116
117 /* The partition representative. */
118 size_t representative;
119
120 /* The next stack variable in the partition, or EOC. */
121 size_t next;
122};
123
124#define EOC ((size_t)-1)
125
126/* We have an array of such objects while deciding allocation. */
127static struct stack_var *stack_vars;
128static size_t stack_vars_alloc;
129static size_t stack_vars_num;
130
131/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
132 is non-decreasing. */
133static size_t *stack_vars_sorted;
134
135/* We have an interference graph between such objects. This graph
136 is lower triangular. */
137static bool *stack_vars_conflict;
138static size_t stack_vars_conflict_alloc;
139
140/* The phase of the stack frame. This is the known misalignment of
141 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
142 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
143static int frame_phase;
144
7d69de61
RH
145/* Used during expand_used_vars to remember if we saw any decls for
146 which we'd like to enable stack smashing protection. */
147static bool has_protected_decls;
148
149/* Used during expand_used_vars. Remember if we say a character buffer
150 smaller than our cutoff threshold. Used for -Wstack-protector. */
151static bool has_short_buffer;
1f6d3a08
RH
152
153/* Discover the byte alignment to use for DECL. Ignore alignment
154 we can't do with expected alignment of the stack boundary. */
155
156static unsigned int
157get_decl_align_unit (tree decl)
158{
159 unsigned int align;
160
161 align = DECL_ALIGN (decl);
162 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
163 if (align > PREFERRED_STACK_BOUNDARY)
164 align = PREFERRED_STACK_BOUNDARY;
165 if (cfun->stack_alignment_needed < align)
166 cfun->stack_alignment_needed = align;
167
168 return align / BITS_PER_UNIT;
169}
170
171/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
172 Return the frame offset. */
173
174static HOST_WIDE_INT
175alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
176{
177 HOST_WIDE_INT offset, new_frame_offset;
178
179 new_frame_offset = frame_offset;
180 if (FRAME_GROWS_DOWNWARD)
181 {
182 new_frame_offset -= size + frame_phase;
183 new_frame_offset &= -align;
184 new_frame_offset += frame_phase;
185 offset = new_frame_offset;
186 }
187 else
188 {
189 new_frame_offset -= frame_phase;
190 new_frame_offset += align - 1;
191 new_frame_offset &= -align;
192 new_frame_offset += frame_phase;
193 offset = new_frame_offset;
194 new_frame_offset += size;
195 }
196 frame_offset = new_frame_offset;
197
9fb798d7
EB
198 if (frame_offset_overflow (frame_offset, cfun->decl))
199 frame_offset = offset = 0;
200
1f6d3a08
RH
201 return offset;
202}
203
204/* Accumulate DECL into STACK_VARS. */
205
206static void
207add_stack_var (tree decl)
208{
209 if (stack_vars_num >= stack_vars_alloc)
210 {
211 if (stack_vars_alloc)
212 stack_vars_alloc = stack_vars_alloc * 3 / 2;
213 else
214 stack_vars_alloc = 32;
215 stack_vars
216 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
217 }
218 stack_vars[stack_vars_num].decl = decl;
219 stack_vars[stack_vars_num].offset = 0;
220 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
221 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
222
223 /* All variables are initially in their own partition. */
224 stack_vars[stack_vars_num].representative = stack_vars_num;
225 stack_vars[stack_vars_num].next = EOC;
226
227 /* Ensure that this decl doesn't get put onto the list twice. */
228 SET_DECL_RTL (decl, pc_rtx);
229
230 stack_vars_num++;
231}
232
233/* Compute the linear index of a lower-triangular coordinate (I, J). */
234
235static size_t
236triangular_index (size_t i, size_t j)
237{
238 if (i < j)
239 {
240 size_t t;
241 t = i, i = j, j = t;
242 }
243 return (i * (i + 1)) / 2 + j;
244}
245
246/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
247
248static void
249resize_stack_vars_conflict (size_t n)
250{
251 size_t size = triangular_index (n-1, n-1) + 1;
252
253 if (size <= stack_vars_conflict_alloc)
254 return;
255
256 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
257 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
258 (size - stack_vars_conflict_alloc) * sizeof (bool));
259 stack_vars_conflict_alloc = size;
260}
261
262/* Make the decls associated with luid's X and Y conflict. */
263
264static void
265add_stack_var_conflict (size_t x, size_t y)
266{
267 size_t index = triangular_index (x, y);
268 gcc_assert (index < stack_vars_conflict_alloc);
269 stack_vars_conflict[index] = true;
270}
271
272/* Check whether the decls associated with luid's X and Y conflict. */
273
274static bool
275stack_var_conflict_p (size_t x, size_t y)
276{
277 size_t index = triangular_index (x, y);
278 gcc_assert (index < stack_vars_conflict_alloc);
279 return stack_vars_conflict[index];
280}
d239ed56
SB
281
282/* Returns true if TYPE is or contains a union type. */
283
284static bool
285aggregate_contains_union_type (tree type)
286{
287 tree field;
288
289 if (TREE_CODE (type) == UNION_TYPE
290 || TREE_CODE (type) == QUAL_UNION_TYPE)
291 return true;
292 if (TREE_CODE (type) == ARRAY_TYPE)
293 return aggregate_contains_union_type (TREE_TYPE (type));
294 if (TREE_CODE (type) != RECORD_TYPE)
295 return false;
296
297 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
298 if (TREE_CODE (field) == FIELD_DECL)
299 if (aggregate_contains_union_type (TREE_TYPE (field)))
300 return true;
301
302 return false;
303}
304
1f6d3a08
RH
305/* A subroutine of expand_used_vars. If two variables X and Y have alias
306 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
307 in the interference graph. We also need to make sure to add conflicts
308 for union containing structures. Else RTL alias analysis comes along
309 and due to type based aliasing rules decides that for two overlapping
310 union temporaries { short s; int i; } accesses to the same mem through
311 different types may not alias and happily reorders stores across
312 life-time boundaries of the temporaries (See PR25654).
313 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
314
315static void
316add_alias_set_conflicts (void)
317{
318 size_t i, j, n = stack_vars_num;
319
320 for (i = 0; i < n; ++i)
321 {
a4d25453
RH
322 tree type_i = TREE_TYPE (stack_vars[i].decl);
323 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 324 bool contains_union;
1f6d3a08 325
d239ed56 326 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
327 for (j = 0; j < i; ++j)
328 {
a4d25453
RH
329 tree type_j = TREE_TYPE (stack_vars[j].decl);
330 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
331 if (aggr_i != aggr_j
332 /* Either the objects conflict by means of type based
333 aliasing rules, or we need to add a conflict. */
334 || !objects_must_conflict_p (type_i, type_j)
335 /* In case the types do not conflict ensure that access
336 to elements will conflict. In case of unions we have
337 to be careful as type based aliasing rules may say
338 access to the same memory does not conflict. So play
339 safe and add a conflict in this case. */
340 || contains_union)
1f6d3a08
RH
341 add_stack_var_conflict (i, j);
342 }
343 }
344}
345
346/* A subroutine of partition_stack_vars. A comparison function for qsort,
347 sorting an array of indicies by the size of the object. */
348
349static int
350stack_var_size_cmp (const void *a, const void *b)
351{
352 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
353 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
79f802f5
RG
354 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
355 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
1f6d3a08
RH
356
357 if (sa < sb)
358 return -1;
359 if (sa > sb)
360 return 1;
79f802f5
RG
361 /* For stack variables of the same size use the uid of the decl
362 to make the sort stable. */
363 if (uida < uidb)
364 return -1;
365 if (uida > uidb)
366 return 1;
1f6d3a08
RH
367 return 0;
368}
369
370/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
371 partitioning algorithm. Partitions A and B are known to be non-conflicting.
372 Merge them into a single partition A.
373
374 At the same time, add OFFSET to all variables in partition B. At the end
375 of the partitioning process we've have a nice block easy to lay out within
376 the stack frame. */
377
378static void
379union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
380{
381 size_t i, last;
382
383 /* Update each element of partition B with the given offset,
384 and merge them into partition A. */
385 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
386 {
387 stack_vars[i].offset += offset;
388 stack_vars[i].representative = a;
389 }
390 stack_vars[last].next = stack_vars[a].next;
391 stack_vars[a].next = b;
392
393 /* Update the required alignment of partition A to account for B. */
394 if (stack_vars[a].alignb < stack_vars[b].alignb)
395 stack_vars[a].alignb = stack_vars[b].alignb;
396
397 /* Update the interference graph and merge the conflicts. */
398 for (last = stack_vars_num, i = 0; i < last; ++i)
399 if (stack_var_conflict_p (b, i))
400 add_stack_var_conflict (a, i);
401}
402
403/* A subroutine of expand_used_vars. Binpack the variables into
404 partitions constrained by the interference graph. The overall
405 algorithm used is as follows:
406
407 Sort the objects by size.
408 For each object A {
409 S = size(A)
410 O = 0
411 loop {
412 Look for the largest non-conflicting object B with size <= S.
413 UNION (A, B)
414 offset(B) = O
415 O += size(B)
416 S -= size(B)
417 }
418 }
419*/
420
421static void
422partition_stack_vars (void)
423{
424 size_t si, sj, n = stack_vars_num;
425
426 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
427 for (si = 0; si < n; ++si)
428 stack_vars_sorted[si] = si;
429
430 if (n == 1)
431 return;
432
433 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
434
435 /* Special case: detect when all variables conflict, and thus we can't
436 do anything during the partitioning loop. It isn't uncommon (with
437 C code at least) to declare all variables at the top of the function,
438 and if we're not inlining, then all variables will be in the same scope.
439 Take advantage of very fast libc routines for this scan. */
440 gcc_assert (sizeof(bool) == sizeof(char));
441 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
442 return;
443
444 for (si = 0; si < n; ++si)
445 {
446 size_t i = stack_vars_sorted[si];
447 HOST_WIDE_INT isize = stack_vars[i].size;
448 HOST_WIDE_INT offset = 0;
449
450 for (sj = si; sj-- > 0; )
451 {
452 size_t j = stack_vars_sorted[sj];
453 HOST_WIDE_INT jsize = stack_vars[j].size;
454 unsigned int jalign = stack_vars[j].alignb;
455
456 /* Ignore objects that aren't partition representatives. */
457 if (stack_vars[j].representative != j)
458 continue;
459
460 /* Ignore objects too large for the remaining space. */
461 if (isize < jsize)
462 continue;
463
464 /* Ignore conflicting objects. */
465 if (stack_var_conflict_p (i, j))
466 continue;
467
468 /* Refine the remaining space check to include alignment. */
469 if (offset & (jalign - 1))
470 {
471 HOST_WIDE_INT toff = offset;
472 toff += jalign - 1;
473 toff &= -(HOST_WIDE_INT)jalign;
474 if (isize - (toff - offset) < jsize)
475 continue;
476
477 isize -= toff - offset;
478 offset = toff;
479 }
480
481 /* UNION the objects, placing J at OFFSET. */
482 union_stack_vars (i, j, offset);
483
484 isize -= jsize;
485 if (isize == 0)
486 break;
487 }
488 }
489}
490
491/* A debugging aid for expand_used_vars. Dump the generated partitions. */
492
493static void
494dump_stack_var_partition (void)
495{
496 size_t si, i, j, n = stack_vars_num;
497
498 for (si = 0; si < n; ++si)
499 {
500 i = stack_vars_sorted[si];
501
502 /* Skip variables that aren't partition representatives, for now. */
503 if (stack_vars[i].representative != i)
504 continue;
505
506 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
507 " align %u\n", (unsigned long) i, stack_vars[i].size,
508 stack_vars[i].alignb);
509
510 for (j = i; j != EOC; j = stack_vars[j].next)
511 {
512 fputc ('\t', dump_file);
513 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
514 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
515 stack_vars[i].offset);
516 }
517 }
518}
519
520/* Assign rtl to DECL at frame offset OFFSET. */
521
522static void
523expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
524{
525 HOST_WIDE_INT align;
526 rtx x;
c22cacf3 527
1f6d3a08
RH
528 /* If this fails, we've overflowed the stack frame. Error nicely? */
529 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
530
531 x = plus_constant (virtual_stack_vars_rtx, offset);
532 x = gen_rtx_MEM (DECL_MODE (decl), x);
533
534 /* Set alignment we actually gave this decl. */
535 offset -= frame_phase;
536 align = offset & -offset;
537 align *= BITS_PER_UNIT;
538 if (align > STACK_BOUNDARY || align == 0)
539 align = STACK_BOUNDARY;
540 DECL_ALIGN (decl) = align;
541 DECL_USER_ALIGN (decl) = 0;
542
543 set_mem_attributes (x, decl, true);
544 SET_DECL_RTL (decl, x);
545}
546
547/* A subroutine of expand_used_vars. Give each partition representative
548 a unique location within the stack frame. Update each partition member
549 with that location. */
550
551static void
7d69de61 552expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
553{
554 size_t si, i, j, n = stack_vars_num;
555
556 for (si = 0; si < n; ++si)
557 {
558 HOST_WIDE_INT offset;
559
560 i = stack_vars_sorted[si];
561
562 /* Skip variables that aren't partition representatives, for now. */
563 if (stack_vars[i].representative != i)
564 continue;
565
7d69de61
RH
566 /* Skip variables that have already had rtl assigned. See also
567 add_stack_var where we perpetrate this pc_rtx hack. */
568 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
569 continue;
570
c22cacf3 571 /* Check the predicate to see whether this variable should be
7d69de61
RH
572 allocated in this pass. */
573 if (pred && !pred (stack_vars[i].decl))
574 continue;
575
1f6d3a08
RH
576 offset = alloc_stack_frame_space (stack_vars[i].size,
577 stack_vars[i].alignb);
578
579 /* Create rtl for each variable based on their location within the
580 partition. */
581 for (j = i; j != EOC; j = stack_vars[j].next)
582 expand_one_stack_var_at (stack_vars[j].decl,
583 stack_vars[j].offset + offset);
584 }
585}
586
ff28a94d
JH
587/* Take into account all sizes of partitions and reset DECL_RTLs. */
588static HOST_WIDE_INT
589account_stack_vars (void)
590{
591 size_t si, j, i, n = stack_vars_num;
592 HOST_WIDE_INT size = 0;
593
594 for (si = 0; si < n; ++si)
595 {
596 i = stack_vars_sorted[si];
597
598 /* Skip variables that aren't partition representatives, for now. */
599 if (stack_vars[i].representative != i)
600 continue;
601
602 size += stack_vars[i].size;
603 for (j = i; j != EOC; j = stack_vars[j].next)
604 SET_DECL_RTL (stack_vars[j].decl, NULL);
605 }
606 return size;
607}
608
1f6d3a08
RH
609/* A subroutine of expand_one_var. Called to immediately assign rtl
610 to a variable to be allocated in the stack frame. */
611
612static void
613expand_one_stack_var (tree var)
614{
615 HOST_WIDE_INT size, offset, align;
616
617 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
618 align = get_decl_align_unit (var);
619 offset = alloc_stack_frame_space (size, align);
620
621 expand_one_stack_var_at (var, offset);
622}
623
624/* A subroutine of expand_one_var. Called to assign rtl
625 to a TREE_STATIC VAR_DECL. */
626
627static void
628expand_one_static_var (tree var)
629{
3b9ade75
JH
630 /* In unit-at-a-time all the static variables are expanded at the end
631 of compilation process. */
632 if (flag_unit_at_a_time)
633 return;
1f6d3a08
RH
634 /* If this is an inlined copy of a static local variable,
635 look up the original. */
636 var = DECL_ORIGIN (var);
637
638 /* If we've already processed this variable because of that, do nothing. */
639 if (TREE_ASM_WRITTEN (var))
640 return;
641
642 /* Give the front end a chance to do whatever. In practice, this is
643 resolving duplicate names for IMA in C. */
644 if (lang_hooks.expand_decl (var))
645 return;
646
647 /* Otherwise, just emit the variable. */
648 rest_of_decl_compilation (var, 0, 0);
649}
650
651/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
652 that will reside in a hard register. */
653
654static void
655expand_one_hard_reg_var (tree var)
656{
657 rest_of_decl_compilation (var, 0, 0);
658}
659
660/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
661 that will reside in a pseudo register. */
662
663static void
664expand_one_register_var (tree var)
665{
666 tree type = TREE_TYPE (var);
667 int unsignedp = TYPE_UNSIGNED (type);
668 enum machine_mode reg_mode
669 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
670 rtx x = gen_reg_rtx (reg_mode);
671
672 SET_DECL_RTL (var, x);
673
674 /* Note if the object is a user variable. */
675 if (!DECL_ARTIFICIAL (var))
676 {
677 mark_user_reg (x);
678
679 /* Trust user variables which have a pointer type to really
680 be pointers. Do not trust compiler generated temporaries
681 as our type system is totally busted as it relates to
682 pointer arithmetic which translates into lots of compiler
683 generated objects with pointer types, but which are not really
684 pointers. */
685 if (POINTER_TYPE_P (type))
686 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
687 }
688}
689
690/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 691 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
692 to pick something that won't crash the rest of the compiler. */
693
694static void
695expand_one_error_var (tree var)
696{
697 enum machine_mode mode = DECL_MODE (var);
698 rtx x;
699
700 if (mode == BLKmode)
701 x = gen_rtx_MEM (BLKmode, const0_rtx);
702 else if (mode == VOIDmode)
703 x = const0_rtx;
704 else
705 x = gen_reg_rtx (mode);
706
707 SET_DECL_RTL (var, x);
708}
709
c22cacf3 710/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
711 allocated to the local stack frame. Return true if we wish to
712 add VAR to STACK_VARS so that it will be coalesced with other
713 variables. Return false to allocate VAR immediately.
714
715 This function is used to reduce the number of variables considered
716 for coalescing, which reduces the size of the quadratic problem. */
717
718static bool
719defer_stack_allocation (tree var, bool toplevel)
720{
7d69de61
RH
721 /* If stack protection is enabled, *all* stack variables must be deferred,
722 so that we can re-order the strings to the top of the frame. */
723 if (flag_stack_protect)
724 return true;
725
1f6d3a08
RH
726 /* Variables in the outermost scope automatically conflict with
727 every other variable. The only reason to want to defer them
728 at all is that, after sorting, we can more efficiently pack
729 small variables in the stack frame. Continue to defer at -O2. */
730 if (toplevel && optimize < 2)
731 return false;
732
733 /* Without optimization, *most* variables are allocated from the
734 stack, which makes the quadratic problem large exactly when we
c22cacf3 735 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
736 other hand, we don't want the function's stack frame size to
737 get completely out of hand. So we avoid adding scalars and
738 "small" aggregates to the list at all. */
739 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
740 return false;
741
742 return true;
743}
744
745/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 746 its flavor. Variables to be placed on the stack are not actually
ff28a94d
JH
747 expanded yet, merely recorded.
748 When REALLY_EXPAND is false, only add stack values to be allocated.
749 Return stack usage this variable is supposed to take.
750*/
1f6d3a08 751
ff28a94d
JH
752static HOST_WIDE_INT
753expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08
RH
754{
755 if (TREE_CODE (var) != VAR_DECL)
ff28a94d
JH
756 {
757 if (really_expand)
758 lang_hooks.expand_decl (var);
759 }
1f6d3a08
RH
760 else if (DECL_EXTERNAL (var))
761 ;
833b3afe 762 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
763 ;
764 else if (TREE_STATIC (var))
ff28a94d
JH
765 {
766 if (really_expand)
767 expand_one_static_var (var);
768 }
1f6d3a08
RH
769 else if (DECL_RTL_SET_P (var))
770 ;
771 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
772 {
773 if (really_expand)
774 expand_one_error_var (var);
775 }
1f6d3a08 776 else if (DECL_HARD_REGISTER (var))
ff28a94d
JH
777 {
778 if (really_expand)
779 expand_one_hard_reg_var (var);
780 }
1f6d3a08 781 else if (use_register_for_decl (var))
ff28a94d
JH
782 {
783 if (really_expand)
784 expand_one_register_var (var);
785 }
1f6d3a08
RH
786 else if (defer_stack_allocation (var, toplevel))
787 add_stack_var (var);
788 else
ff28a94d
JH
789 {
790 expand_one_stack_var (var);
791 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
792 }
793 return 0;
1f6d3a08
RH
794}
795
796/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
797 expanding variables. Those variables that can be put into registers
798 are allocated pseudos; those that can't are put on the stack.
799
800 TOPLEVEL is true if this is the outermost BLOCK. */
801
802static void
803expand_used_vars_for_block (tree block, bool toplevel)
804{
805 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
806 tree t;
807
808 old_sv_num = toplevel ? 0 : stack_vars_num;
809
810 /* Expand all variables at this level. */
811 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1368453c
JH
812 if (TREE_USED (t)
813 /* Force local static variables to be output when marked by
814 used attribute. For unit-at-a-time, cgraph code already takes
815 care of this. */
816 || (!flag_unit_at_a_time && TREE_STATIC (t)
817 && DECL_PRESERVE_P (t)))
ff28a94d 818 expand_one_var (t, toplevel, true);
1f6d3a08
RH
819
820 this_sv_num = stack_vars_num;
821
822 /* Expand all variables at containing levels. */
823 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
824 expand_used_vars_for_block (t, false);
825
826 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 827 possible for variables whose address escapes), we mirror the block
1f6d3a08
RH
828 tree in the interference graph. Here we cause all variables at this
829 level, and all sublevels, to conflict. Do make certain that a
830 variable conflicts with itself. */
831 if (old_sv_num < this_sv_num)
832 {
833 new_sv_num = stack_vars_num;
834 resize_stack_vars_conflict (new_sv_num);
835
836 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
837 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
838 add_stack_var_conflict (i, j);
1f6d3a08
RH
839 }
840}
841
842/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
843 and clear TREE_USED on all local variables. */
844
845static void
846clear_tree_used (tree block)
847{
848 tree t;
849
850 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
851 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
852 TREE_USED (t) = 0;
853
854 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
855 clear_tree_used (t);
856}
857
7d69de61
RH
858/* Examine TYPE and determine a bit mask of the following features. */
859
860#define SPCT_HAS_LARGE_CHAR_ARRAY 1
861#define SPCT_HAS_SMALL_CHAR_ARRAY 2
862#define SPCT_HAS_ARRAY 4
863#define SPCT_HAS_AGGREGATE 8
864
865static unsigned int
866stack_protect_classify_type (tree type)
867{
868 unsigned int ret = 0;
869 tree t;
870
871 switch (TREE_CODE (type))
872 {
873 case ARRAY_TYPE:
874 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
875 if (t == char_type_node
876 || t == signed_char_type_node
877 || t == unsigned_char_type_node)
878 {
15362b89
JJ
879 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
880 unsigned HOST_WIDE_INT len;
7d69de61 881
15362b89
JJ
882 if (!TYPE_SIZE_UNIT (type)
883 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
884 len = max;
7d69de61 885 else
15362b89 886 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
887
888 if (len < max)
889 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
890 else
891 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
892 }
893 else
894 ret = SPCT_HAS_ARRAY;
895 break;
896
897 case UNION_TYPE:
898 case QUAL_UNION_TYPE:
899 case RECORD_TYPE:
900 ret = SPCT_HAS_AGGREGATE;
901 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
902 if (TREE_CODE (t) == FIELD_DECL)
903 ret |= stack_protect_classify_type (TREE_TYPE (t));
904 break;
905
906 default:
907 break;
908 }
909
910 return ret;
911}
912
a4d05547
KH
913/* Return nonzero if DECL should be segregated into the "vulnerable" upper
914 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
915 any variable in this function. The return value is the phase number in
916 which the variable should be allocated. */
917
918static int
919stack_protect_decl_phase (tree decl)
920{
921 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
922 int ret = 0;
923
924 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
925 has_short_buffer = true;
926
927 if (flag_stack_protect == 2)
928 {
929 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
930 && !(bits & SPCT_HAS_AGGREGATE))
931 ret = 1;
932 else if (bits & SPCT_HAS_ARRAY)
933 ret = 2;
934 }
935 else
936 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
937
938 if (ret)
939 has_protected_decls = true;
940
941 return ret;
942}
943
944/* Two helper routines that check for phase 1 and phase 2. These are used
945 as callbacks for expand_stack_vars. */
946
947static bool
948stack_protect_decl_phase_1 (tree decl)
949{
950 return stack_protect_decl_phase (decl) == 1;
951}
952
953static bool
954stack_protect_decl_phase_2 (tree decl)
955{
956 return stack_protect_decl_phase (decl) == 2;
957}
958
959/* Ensure that variables in different stack protection phases conflict
960 so that they are not merged and share the same stack slot. */
961
962static void
963add_stack_protection_conflicts (void)
964{
965 size_t i, j, n = stack_vars_num;
966 unsigned char *phase;
967
968 phase = XNEWVEC (unsigned char, n);
969 for (i = 0; i < n; ++i)
970 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
971
972 for (i = 0; i < n; ++i)
973 {
974 unsigned char ph_i = phase[i];
975 for (j = 0; j < i; ++j)
976 if (ph_i != phase[j])
977 add_stack_var_conflict (i, j);
978 }
979
980 XDELETEVEC (phase);
981}
982
983/* Create a decl for the guard at the top of the stack frame. */
984
985static void
986create_stack_guard (void)
987{
988 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
989 TREE_THIS_VOLATILE (guard) = 1;
990 TREE_USED (guard) = 1;
991 expand_one_stack_var (guard);
992 cfun->stack_protect_guard = guard;
993}
994
ff28a94d
JH
995/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
996 expanding variables. Those variables that can be put into registers
997 are allocated pseudos; those that can't are put on the stack.
998
999 TOPLEVEL is true if this is the outermost BLOCK. */
1000
1001static HOST_WIDE_INT
1002account_used_vars_for_block (tree block, bool toplevel)
1003{
1004 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1005 tree t;
1006 HOST_WIDE_INT size = 0;
1007
1008 old_sv_num = toplevel ? 0 : stack_vars_num;
1009
1010 /* Expand all variables at this level. */
1011 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1012 if (TREE_USED (t))
1013 size += expand_one_var (t, toplevel, false);
1014
1015 this_sv_num = stack_vars_num;
1016
1017 /* Expand all variables at containing levels. */
1018 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1019 size += account_used_vars_for_block (t, false);
1020
1021 /* Since we do not track exact variable lifetimes (which is not even
1022 possible for variables whose address escapes), we mirror the block
1023 tree in the interference graph. Here we cause all variables at this
1024 level, and all sublevels, to conflict. Do make certain that a
1025 variable conflicts with itself. */
1026 if (old_sv_num < this_sv_num)
1027 {
1028 new_sv_num = stack_vars_num;
1029 resize_stack_vars_conflict (new_sv_num);
1030
1031 for (i = old_sv_num; i < new_sv_num; ++i)
1032 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1033 add_stack_var_conflict (i, j);
1034 }
1035 return size;
1036}
1037
1038/* Prepare for expanding variables. */
1039static void
1040init_vars_expansion (void)
1041{
1042 tree t;
1043 /* Set TREE_USED on all variables in the unexpanded_var_list. */
1044 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1045 TREE_USED (TREE_VALUE (t)) = 1;
1046
1047 /* Clear TREE_USED on all variables associated with a block scope. */
1048 clear_tree_used (DECL_INITIAL (current_function_decl));
1049
1050 /* Initialize local stack smashing state. */
1051 has_protected_decls = false;
1052 has_short_buffer = false;
1053}
1054
1055/* Free up stack variable graph data. */
1056static void
1057fini_vars_expansion (void)
1058{
1059 XDELETEVEC (stack_vars);
1060 XDELETEVEC (stack_vars_sorted);
1061 XDELETEVEC (stack_vars_conflict);
1062 stack_vars = NULL;
1063 stack_vars_alloc = stack_vars_num = 0;
1064 stack_vars_conflict = NULL;
1065 stack_vars_conflict_alloc = 0;
1066}
1067
1068HOST_WIDE_INT
1069estimated_stack_frame_size (void)
1070{
1071 HOST_WIDE_INT size = 0;
1072 tree t, outer_block = DECL_INITIAL (current_function_decl);
1073
1074 init_vars_expansion ();
1075
1076 /* At this point all variables on the unexpanded_var_list with TREE_USED
1077 set are not associated with any block scope. Lay them out. */
1078 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1079 {
1080 tree var = TREE_VALUE (t);
1081
1082 if (TREE_USED (var))
1083 size += expand_one_var (var, true, false);
1084 TREE_USED (var) = 1;
1085 }
1086 size += account_used_vars_for_block (outer_block, true);
1087 if (stack_vars_num > 0)
1088 {
1089 /* Due to the way alias sets work, no variables with non-conflicting
1090 alias sets may be assigned the same address. Add conflicts to
1091 reflect this. */
1092 add_alias_set_conflicts ();
1093
1094 /* If stack protection is enabled, we don't share space between
1095 vulnerable data and non-vulnerable data. */
1096 if (flag_stack_protect)
1097 add_stack_protection_conflicts ();
1098
1099 /* Now that we have collected all stack variables, and have computed a
1100 minimal interference graph, attempt to save some stack space. */
1101 partition_stack_vars ();
1102 if (dump_file)
1103 dump_stack_var_partition ();
1104
1105 size += account_stack_vars ();
1106 fini_vars_expansion ();
1107 }
1108 return size;
1109}
1110
1f6d3a08 1111/* Expand all variables used in the function. */
727a31fa
RH
1112
1113static void
1114expand_used_vars (void)
1115{
1f6d3a08 1116 tree t, outer_block = DECL_INITIAL (current_function_decl);
727a31fa 1117
1f6d3a08
RH
1118 /* Compute the phase of the stack frame for this function. */
1119 {
1120 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1121 int off = STARTING_FRAME_OFFSET % align;
1122 frame_phase = off ? align - off : 0;
1123 }
727a31fa 1124
ff28a94d 1125 init_vars_expansion ();
7d69de61 1126
1f6d3a08
RH
1127 /* At this point all variables on the unexpanded_var_list with TREE_USED
1128 set are not associated with any block scope. Lay them out. */
1129 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1130 {
1131 tree var = TREE_VALUE (t);
1132 bool expand_now = false;
1133
1134 /* We didn't set a block for static or extern because it's hard
1135 to tell the difference between a global variable (re)declared
1136 in a local scope, and one that's really declared there to
1137 begin with. And it doesn't really matter much, since we're
1138 not giving them stack space. Expand them now. */
1139 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1140 expand_now = true;
1141
1142 /* Any variable that could have been hoisted into an SSA_NAME
1143 will have been propagated anywhere the optimizers chose,
1144 i.e. not confined to their original block. Allocate them
1145 as if they were defined in the outermost scope. */
1146 else if (is_gimple_reg (var))
1147 expand_now = true;
1148
1149 /* If the variable is not associated with any block, then it
1150 was created by the optimizers, and could be live anywhere
1151 in the function. */
1152 else if (TREE_USED (var))
1153 expand_now = true;
1154
1155 /* Finally, mark all variables on the list as used. We'll use
1156 this in a moment when we expand those associated with scopes. */
1157 TREE_USED (var) = 1;
1158
1159 if (expand_now)
ff28a94d 1160 expand_one_var (var, true, true);
1f6d3a08 1161 }
727a31fa 1162 cfun->unexpanded_var_list = NULL_TREE;
1f6d3a08
RH
1163
1164 /* At this point, all variables within the block tree with TREE_USED
1165 set are actually used by the optimized function. Lay them out. */
1166 expand_used_vars_for_block (outer_block, true);
1167
1168 if (stack_vars_num > 0)
1169 {
1170 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1171 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1172 reflect this. */
1173 add_alias_set_conflicts ();
1174
c22cacf3 1175 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1176 vulnerable data and non-vulnerable data. */
1177 if (flag_stack_protect)
1178 add_stack_protection_conflicts ();
1179
c22cacf3 1180 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1181 minimal interference graph, attempt to save some stack space. */
1182 partition_stack_vars ();
1183 if (dump_file)
1184 dump_stack_var_partition ();
7d69de61
RH
1185 }
1186
1187 /* There are several conditions under which we should create a
1188 stack guard: protect-all, alloca used, protected decls present. */
1189 if (flag_stack_protect == 2
1190 || (flag_stack_protect
1191 && (current_function_calls_alloca || has_protected_decls)))
1192 create_stack_guard ();
1f6d3a08 1193
7d69de61
RH
1194 /* Assign rtl to each variable based on these partitions. */
1195 if (stack_vars_num > 0)
1196 {
1197 /* Reorder decls to be protected by iterating over the variables
1198 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1199 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1200 earlier, such that we naturally see these variables first,
1201 and thus naturally allocate things in the right order. */
1202 if (has_protected_decls)
1203 {
1204 /* Phase 1 contains only character arrays. */
1205 expand_stack_vars (stack_protect_decl_phase_1);
1206
1207 /* Phase 2 contains other kinds of arrays. */
1208 if (flag_stack_protect == 2)
1209 expand_stack_vars (stack_protect_decl_phase_2);
1210 }
1211
1212 expand_stack_vars (NULL);
1f6d3a08 1213
ff28a94d 1214 fini_vars_expansion ();
1f6d3a08
RH
1215 }
1216
1217 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1218 if (STACK_ALIGNMENT_NEEDED)
1219 {
1220 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1221 if (!FRAME_GROWS_DOWNWARD)
1222 frame_offset += align - 1;
1223 frame_offset &= -align;
1224 }
727a31fa
RH
1225}
1226
1227
b7211528
SB
1228/* If we need to produce a detailed dump, print the tree representation
1229 for STMT to the dump file. SINCE is the last RTX after which the RTL
1230 generated for STMT should have been appended. */
1231
1232static void
1233maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1234{
1235 if (dump_file && (dump_flags & TDF_DETAILS))
1236 {
1237 fprintf (dump_file, "\n;; ");
1238 print_generic_expr (dump_file, stmt, TDF_SLIM);
1239 fprintf (dump_file, "\n");
1240
1241 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1242 }
1243}
1244
80c7a9eb
RH
1245/* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1246 Returns a new basic block if we've terminated the current basic
1247 block and created a new one. */
1248
1249static basic_block
1250expand_gimple_cond_expr (basic_block bb, tree stmt)
1251{
1252 basic_block new_bb, dest;
1253 edge new_edge;
1254 edge true_edge;
1255 edge false_edge;
1256 tree pred = COND_EXPR_COND (stmt);
1257 tree then_exp = COND_EXPR_THEN (stmt);
1258 tree else_exp = COND_EXPR_ELSE (stmt);
b7211528
SB
1259 rtx last2, last;
1260
1261 last2 = last = get_last_insn ();
80c7a9eb
RH
1262
1263 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1264 if (EXPR_LOCUS (stmt))
1265 {
1266 emit_line_note (*(EXPR_LOCUS (stmt)));
1267 record_block_change (TREE_BLOCK (stmt));
1268 }
1269
1270 /* These flags have no purpose in RTL land. */
1271 true_edge->flags &= ~EDGE_TRUE_VALUE;
1272 false_edge->flags &= ~EDGE_FALSE_VALUE;
1273
1274 /* We can either have a pure conditional jump with one fallthru edge or
1275 two-way jump that needs to be decomposed into two basic blocks. */
1276 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1277 {
1278 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
10d22567 1279 add_reg_br_prob_note (last, true_edge->probability);
b7211528 1280 maybe_dump_rtl_for_tree_stmt (stmt, last);
ac8245fa
AP
1281 if (EXPR_LOCUS (then_exp))
1282 emit_line_note (*(EXPR_LOCUS (then_exp)));
80c7a9eb
RH
1283 return NULL;
1284 }
1285 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1286 {
1287 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
10d22567 1288 add_reg_br_prob_note (last, false_edge->probability);
b7211528 1289 maybe_dump_rtl_for_tree_stmt (stmt, last);
ac8245fa
AP
1290 if (EXPR_LOCUS (else_exp))
1291 emit_line_note (*(EXPR_LOCUS (else_exp)));
80c7a9eb
RH
1292 return NULL;
1293 }
341c100f
NS
1294 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1295 && TREE_CODE (else_exp) == GOTO_EXPR);
80c7a9eb
RH
1296
1297 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
10d22567 1298 add_reg_br_prob_note (last, true_edge->probability);
80c7a9eb
RH
1299 last = get_last_insn ();
1300 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1301
1302 BB_END (bb) = last;
1303 if (BARRIER_P (BB_END (bb)))
1304 BB_END (bb) = PREV_INSN (BB_END (bb));
1305 update_bb_for_insn (bb);
1306
1307 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1308 dest = false_edge->dest;
1309 redirect_edge_succ (false_edge, new_bb);
1310 false_edge->flags |= EDGE_FALLTHRU;
1311 new_bb->count = false_edge->count;
1312 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1313 new_edge = make_edge (new_bb, dest, 0);
1314 new_edge->probability = REG_BR_PROB_BASE;
1315 new_edge->count = new_bb->count;
1316 if (BARRIER_P (BB_END (new_bb)))
1317 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1318 update_bb_for_insn (new_bb);
1319
b7211528 1320 maybe_dump_rtl_for_tree_stmt (stmt, last2);
c22cacf3 1321
ac8245fa
AP
1322 if (EXPR_LOCUS (else_exp))
1323 emit_line_note (*(EXPR_LOCUS (else_exp)));
80c7a9eb
RH
1324
1325 return new_bb;
1326}
1327
1328/* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
224e770b
RH
1329 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1330 generated a tail call (something that might be denied by the ABI
cea49550
RH
1331 rules governing the call; see calls.c).
1332
1333 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1334 can still reach the rest of BB. The case here is __builtin_sqrt,
1335 where the NaN result goes through the external function (with a
1336 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
1337
1338static basic_block
cea49550 1339expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
80c7a9eb 1340{
b7211528 1341 rtx last2, last;
224e770b 1342 edge e;
628f6a4e 1343 edge_iterator ei;
224e770b
RH
1344 int probability;
1345 gcov_type count;
80c7a9eb 1346
b7211528
SB
1347 last2 = last = get_last_insn ();
1348
80c7a9eb
RH
1349 expand_expr_stmt (stmt);
1350
1351 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
1352 if (CALL_P (last) && SIBLING_CALL_P (last))
1353 goto found;
80c7a9eb 1354
7315a949 1355 maybe_dump_rtl_for_tree_stmt (stmt, last2);
b7211528 1356
cea49550 1357 *can_fallthru = true;
224e770b 1358 return NULL;
80c7a9eb 1359
224e770b
RH
1360 found:
1361 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1362 Any instructions emitted here are about to be deleted. */
1363 do_pending_stack_adjust ();
1364
1365 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1366 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1367 EH or abnormal edges, we shouldn't have created a tail call in
1368 the first place. So it seems to me we should just be removing
1369 all edges here, or redirecting the existing fallthru edge to
1370 the exit block. */
1371
224e770b
RH
1372 probability = 0;
1373 count = 0;
224e770b 1374
628f6a4e
BE
1375 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1376 {
224e770b
RH
1377 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1378 {
1379 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 1380 {
224e770b
RH
1381 e->dest->count -= e->count;
1382 e->dest->frequency -= EDGE_FREQUENCY (e);
1383 if (e->dest->count < 0)
c22cacf3 1384 e->dest->count = 0;
224e770b 1385 if (e->dest->frequency < 0)
c22cacf3 1386 e->dest->frequency = 0;
80c7a9eb 1387 }
224e770b
RH
1388 count += e->count;
1389 probability += e->probability;
1390 remove_edge (e);
80c7a9eb 1391 }
628f6a4e
BE
1392 else
1393 ei_next (&ei);
80c7a9eb
RH
1394 }
1395
224e770b
RH
1396 /* This is somewhat ugly: the call_expr expander often emits instructions
1397 after the sibcall (to perform the function return). These confuse the
12eff7b7 1398 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 1399 last = NEXT_INSN (last);
341c100f 1400 gcc_assert (BARRIER_P (last));
cea49550
RH
1401
1402 *can_fallthru = false;
224e770b
RH
1403 while (NEXT_INSN (last))
1404 {
1405 /* For instance an sqrt builtin expander expands if with
1406 sibcall in the then and label for `else`. */
1407 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
1408 {
1409 *can_fallthru = true;
1410 break;
1411 }
224e770b
RH
1412 delete_insn (NEXT_INSN (last));
1413 }
1414
1415 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1416 e->probability += probability;
1417 e->count += count;
1418 BB_END (bb) = last;
1419 update_bb_for_insn (bb);
1420
1421 if (NEXT_INSN (last))
1422 {
1423 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1424
1425 last = BB_END (bb);
1426 if (BARRIER_P (last))
1427 BB_END (bb) = PREV_INSN (last);
1428 }
1429
b7211528
SB
1430 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1431
224e770b 1432 return bb;
80c7a9eb
RH
1433}
1434
242229bb
JH
1435/* Expand basic block BB from GIMPLE trees to RTL. */
1436
1437static basic_block
10d22567 1438expand_gimple_basic_block (basic_block bb)
242229bb
JH
1439{
1440 block_stmt_iterator bsi = bsi_start (bb);
1441 tree stmt = NULL;
1442 rtx note, last;
1443 edge e;
628f6a4e 1444 edge_iterator ei;
242229bb
JH
1445
1446 if (dump_file)
1447 {
b7211528
SB
1448 fprintf (dump_file,
1449 "\n;; Generating RTL for tree basic block %d\n",
1450 bb->index);
242229bb
JH
1451 }
1452
5e2d947c
JH
1453 init_rtl_bb_info (bb);
1454 bb->flags |= BB_RTL;
1455
242229bb
JH
1456 if (!bsi_end_p (bsi))
1457 stmt = bsi_stmt (bsi);
1458
1459 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1460 {
1461 last = get_last_insn ();
1462
4dfa0342 1463 expand_expr_stmt (stmt);
242229bb 1464
caf93cb0 1465 /* Java emits line number notes in the top of labels.
c22cacf3 1466 ??? Make this go away once line number notes are obsoleted. */
242229bb 1467 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 1468 if (NOTE_P (BB_HEAD (bb)))
242229bb
JH
1469 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1470 bsi_next (&bsi);
1471 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528
SB
1472
1473 maybe_dump_rtl_for_tree_stmt (stmt, last);
242229bb
JH
1474 }
1475 else
1476 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1477
1478 NOTE_BASIC_BLOCK (note) = bb;
1479
628f6a4e 1480 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
242229bb 1481 {
242229bb
JH
1482 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1483 e->flags &= ~EDGE_EXECUTABLE;
1484
1485 /* At the moment not all abnormal edges match the RTL representation.
c22cacf3
MS
1486 It is safe to remove them here as find_many_sub_basic_blocks will
1487 rediscover them. In the future we should get this fixed properly. */
242229bb
JH
1488 if (e->flags & EDGE_ABNORMAL)
1489 remove_edge (e);
628f6a4e
BE
1490 else
1491 ei_next (&ei);
242229bb
JH
1492 }
1493
1494 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1495 {
1496 tree stmt = bsi_stmt (bsi);
cea49550 1497 basic_block new_bb;
242229bb
JH
1498
1499 if (!stmt)
1500 continue;
1501
1502 /* Expand this statement, then evaluate the resulting RTL and
1503 fixup the CFG accordingly. */
80c7a9eb 1504 if (TREE_CODE (stmt) == COND_EXPR)
cea49550
RH
1505 {
1506 new_bb = expand_gimple_cond_expr (bb, stmt);
1507 if (new_bb)
1508 return new_bb;
1509 }
80c7a9eb 1510 else
242229bb 1511 {
80c7a9eb
RH
1512 tree call = get_call_expr_in (stmt);
1513 if (call && CALL_EXPR_TAILCALL (call))
cea49550
RH
1514 {
1515 bool can_fallthru;
1516 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1517 if (new_bb)
1518 {
1519 if (can_fallthru)
1520 bb = new_bb;
1521 else
1522 return new_bb;
1523 }
1524 }
80c7a9eb 1525 else
b7211528
SB
1526 {
1527 last = get_last_insn ();
1528 expand_expr_stmt (stmt);
1529 maybe_dump_rtl_for_tree_stmt (stmt, last);
1530 }
242229bb
JH
1531 }
1532 }
1533
1534 do_pending_stack_adjust ();
1535
3f117656 1536 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
1537 before a barrier and/or table jump insn. */
1538 last = get_last_insn ();
4b4bf941 1539 if (BARRIER_P (last))
242229bb
JH
1540 last = PREV_INSN (last);
1541 if (JUMP_TABLE_DATA_P (last))
1542 last = PREV_INSN (PREV_INSN (last));
1543 BB_END (bb) = last;
caf93cb0 1544
242229bb 1545 update_bb_for_insn (bb);
80c7a9eb 1546
242229bb
JH
1547 return bb;
1548}
1549
1550
1551/* Create a basic block for initialization code. */
1552
1553static basic_block
1554construct_init_block (void)
1555{
1556 basic_block init_block, first_block;
fd44f634
JH
1557 edge e = NULL;
1558 int flags;
275a4187 1559
fd44f634
JH
1560 /* Multiple entry points not supported yet. */
1561 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
1562 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1563 init_rtl_bb_info (EXIT_BLOCK_PTR);
1564 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1565 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 1566
fd44f634 1567 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 1568
fd44f634
JH
1569 /* When entry edge points to first basic block, we don't need jump,
1570 otherwise we have to jump into proper target. */
1571 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1572 {
1573 tree label = tree_block_label (e->dest);
1574
1575 emit_jump (label_rtx (label));
1576 flags = 0;
275a4187 1577 }
fd44f634
JH
1578 else
1579 flags = EDGE_FALLTHRU;
242229bb
JH
1580
1581 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1582 get_last_insn (),
1583 ENTRY_BLOCK_PTR);
1584 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1585 init_block->count = ENTRY_BLOCK_PTR->count;
1586 if (e)
1587 {
1588 first_block = e->dest;
1589 redirect_edge_succ (e, init_block);
fd44f634 1590 e = make_edge (init_block, first_block, flags);
242229bb
JH
1591 }
1592 else
1593 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1594 e->probability = REG_BR_PROB_BASE;
1595 e->count = ENTRY_BLOCK_PTR->count;
1596
1597 update_bb_for_insn (init_block);
1598 return init_block;
1599}
1600
1601
1602/* Create a block containing landing pads and similar stuff. */
1603
1604static void
1605construct_exit_block (void)
1606{
1607 rtx head = get_last_insn ();
1608 rtx end;
1609 basic_block exit_block;
628f6a4e
BE
1610 edge e, e2;
1611 unsigned ix;
1612 edge_iterator ei;
071a42f9 1613 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 1614
caf93cb0 1615 /* Make sure the locus is set to the end of the function, so that
242229bb 1616 epilogue line numbers and warnings are set properly. */
6773e15f
PB
1617#ifdef USE_MAPPED_LOCATION
1618 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1619#else
242229bb 1620 if (cfun->function_end_locus.file)
6773e15f 1621#endif
242229bb
JH
1622 input_location = cfun->function_end_locus;
1623
1624 /* The following insns belong to the top scope. */
1625 record_block_change (DECL_INITIAL (current_function_decl));
1626
242229bb
JH
1627 /* Generate rtl for function exit. */
1628 expand_function_end ();
1629
1630 end = get_last_insn ();
1631 if (head == end)
1632 return;
071a42f9
JH
1633 /* While emitting the function end we could move end of the last basic block.
1634 */
1635 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 1636 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 1637 head = NEXT_INSN (head);
80c7a9eb
RH
1638 exit_block = create_basic_block (NEXT_INSN (head), end,
1639 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
1640 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1641 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
1642
1643 ix = 0;
1644 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 1645 {
8fb790fd 1646 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 1647 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
1648 redirect_edge_succ (e, exit_block);
1649 else
1650 ix++;
242229bb 1651 }
628f6a4e 1652
242229bb
JH
1653 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1654 e->probability = REG_BR_PROB_BASE;
1655 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 1656 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
1657 if (e2 != e)
1658 {
c22cacf3 1659 e->count -= e2->count;
242229bb
JH
1660 exit_block->count -= e2->count;
1661 exit_block->frequency -= EDGE_FREQUENCY (e2);
1662 }
1663 if (e->count < 0)
1664 e->count = 0;
1665 if (exit_block->count < 0)
1666 exit_block->count = 0;
1667 if (exit_block->frequency < 0)
1668 exit_block->frequency = 0;
1669 update_bb_for_insn (exit_block);
1670}
1671
c22cacf3 1672/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
1673 Look for ARRAY_REF nodes with non-constant indexes and mark them
1674 addressable. */
1675
1676static tree
1677discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1678 void *data ATTRIBUTE_UNUSED)
1679{
1680 tree t = *tp;
1681
1682 if (IS_TYPE_OR_DECL_P (t))
1683 *walk_subtrees = 0;
1684 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1685 {
1686 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1687 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1688 && (!TREE_OPERAND (t, 2)
1689 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1690 || (TREE_CODE (t) == COMPONENT_REF
1691 && (!TREE_OPERAND (t,2)
1692 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1693 || TREE_CODE (t) == BIT_FIELD_REF
1694 || TREE_CODE (t) == REALPART_EXPR
1695 || TREE_CODE (t) == IMAGPART_EXPR
1696 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1697 || TREE_CODE (t) == NOP_EXPR
1698 || TREE_CODE (t) == CONVERT_EXPR)
1699 t = TREE_OPERAND (t, 0);
1700
1701 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1702 {
1703 t = get_base_address (t);
1704 if (t && DECL_P (t))
1705 TREE_ADDRESSABLE (t) = 1;
1706 }
1707
1708 *walk_subtrees = 0;
1709 }
1710
1711 return NULL_TREE;
1712}
1713
1714/* RTL expansion is not able to compile array references with variable
1715 offsets for arrays stored in single register. Discover such
1716 expressions and mark variables as addressable to avoid this
1717 scenario. */
1718
1719static void
1720discover_nonconstant_array_refs (void)
1721{
1722 basic_block bb;
1723 block_stmt_iterator bsi;
1724
1725 FOR_EACH_BB (bb)
1726 {
1727 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1728 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1729 NULL , NULL);
1730 }
1731}
1732
242229bb
JH
1733/* Translate the intermediate representation contained in the CFG
1734 from GIMPLE trees to RTL.
1735
1736 We do conversion per basic block and preserve/update the tree CFG.
1737 This implies we have to do some magic as the CFG can simultaneously
1738 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 1739 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
1740 the expansion. */
1741
c2924966 1742static unsigned int
242229bb
JH
1743tree_expand_cfg (void)
1744{
1745 basic_block bb, init_block;
1746 sbitmap blocks;
0ef90296
ZD
1747 edge_iterator ei;
1748 edge e;
242229bb 1749
4586b4ca
SB
1750 /* Some backends want to know that we are expanding to RTL. */
1751 currently_expanding_to_rtl = 1;
1752
6429e3be
RH
1753 /* Prepare the rtl middle end to start recording block changes. */
1754 reset_block_changes ();
1755
a1b23b2f
UW
1756 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1757 discover_nonconstant_array_refs ();
1758
727a31fa 1759 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
1760 expand_used_vars ();
1761
7d69de61
RH
1762 /* Honor stack protection warnings. */
1763 if (warn_stack_protect)
1764 {
1765 if (current_function_calls_alloca)
1766 warning (0, "not protecting local variables: variable length buffer");
1767 if (has_short_buffer && !cfun->stack_protect_guard)
1768 warning (0, "not protecting function: no buffer at least %d bytes long",
1769 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1770 }
1771
242229bb 1772 /* Set up parameters and prepare for return, for the function. */
b79c5284 1773 expand_function_start (current_function_decl);
242229bb
JH
1774
1775 /* If this function is `main', emit a call to `__main'
1776 to run global initializers, etc. */
1777 if (DECL_NAME (current_function_decl)
1778 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1779 && DECL_FILE_SCOPE_P (current_function_decl))
1780 expand_main_function ();
1781
7d69de61
RH
1782 /* Initialize the stack_protect_guard field. This must happen after the
1783 call to __main (if any) so that the external decl is initialized. */
1784 if (cfun->stack_protect_guard)
1785 stack_protect_prologue ();
1786
3fbd86b1 1787 /* Register rtl specific functions for cfg. */
242229bb
JH
1788 rtl_register_cfg_hooks ();
1789
1790 init_block = construct_init_block ();
1791
0ef90296 1792 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2a8a8292 1793 remaining edges in expand_gimple_basic_block. */
0ef90296
ZD
1794 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1795 e->flags &= ~EDGE_EXECUTABLE;
1796
242229bb 1797 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 1798 bb = expand_gimple_basic_block (bb);
242229bb
JH
1799
1800 construct_exit_block ();
1801
4586b4ca
SB
1802 /* We're done expanding trees to RTL. */
1803 currently_expanding_to_rtl = 0;
1804
f39e46ba
SB
1805 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1806 EH regions. */
242229bb
JH
1807 convert_from_eh_region_ranges ();
1808
1809 rebuild_jump_labels (get_insns ());
1810 find_exception_handler_labels ();
1811
1812 blocks = sbitmap_alloc (last_basic_block);
1813 sbitmap_ones (blocks);
1814 find_many_sub_basic_blocks (blocks);
25cd19de 1815 purge_all_dead_edges ();
242229bb
JH
1816 sbitmap_free (blocks);
1817
1818 compact_blocks ();
1819#ifdef ENABLE_CHECKING
1820 verify_flow_info();
1821#endif
9f8628ba
PB
1822
1823 /* There's no need to defer outputting this function any more; we
1824 know we want to output it. */
1825 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1826
1827 /* Now that we're done expanding trees to RTL, we shouldn't have any
1828 more CONCATs anywhere. */
1829 generating_concat_p = 0;
1830
1831 finalize_block_changes ();
b7211528
SB
1832
1833 if (dump_file)
1834 {
1835 fprintf (dump_file,
1836 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1837 /* And the pass manager will dump RTL for us. */
1838 }
ef330312
PB
1839
1840 /* If we're emitting a nested function, make sure its parent gets
1841 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 1842 {
ef330312
PB
1843 tree parent;
1844 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
1845 parent != NULL_TREE;
1846 parent = get_containing_scope (parent))
ef330312 1847 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 1848 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 1849 }
c22cacf3 1850
ef330312
PB
1851 /* We are now committed to emitting code for this function. Do any
1852 preparation, such as emitting abstract debug info for the inline
1853 before it gets mangled by optimization. */
1854 if (cgraph_function_possibly_inlined_p (current_function_decl))
1855 (*debug_hooks->outlining_inline_function) (current_function_decl);
1856
1857 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
1858
1859 /* After expanding, the return labels are no longer needed. */
1860 return_label = NULL;
1861 naked_return_label = NULL;
c2924966 1862 return 0;
242229bb
JH
1863}
1864
1865struct tree_opt_pass pass_expand =
1866{
c22cacf3 1867 "expand", /* name */
242229bb 1868 NULL, /* gate */
c22cacf3 1869 tree_expand_cfg, /* execute */
242229bb
JH
1870 NULL, /* sub */
1871 NULL, /* next */
1872 0, /* static_pass_number */
c22cacf3 1873 TV_EXPAND, /* tv_id */
242229bb
JH
1874 /* ??? If TER is enabled, we actually receive GENERIC. */
1875 PROP_gimple_leh | PROP_cfg, /* properties_required */
1876 PROP_rtl, /* properties_provided */
bbbe4e7b 1877 PROP_trees, /* properties_destroyed */
242229bb 1878 0, /* todo_flags_start */
ef330312 1879 TODO_dump_func, /* todo_flags_finish */
9f8628ba 1880 'r' /* letter */
242229bb 1881};