]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-stdarg.c
Postpone expanding va_arg until pass_stdarg
[thirdparty/gcc.git] / gcc / tree-stdarg.c
CommitLineData
9d30f3c1 1/* Pass computing data for optimizing stdarg functions.
5624e564 2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
9d30f3c1
JJ
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
9d30f3c1
JJ
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9d30f3c1
JJ
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
83685514 25#include "hash-set.h"
83685514 26#include "machmode.h"
40e23961
MC
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
83685514
AM
36#include "hard-reg-set.h"
37#include "input.h"
9d30f3c1
JJ
38#include "function.h"
39#include "langhooks.h"
cf835838 40#include "gimple-pretty-print.h"
9d30f3c1 41#include "target.h"
442b4905 42#include "bitmap.h"
60393bbc
AM
43#include "predict.h"
44#include "dominance.h"
45#include "cfg.h"
2fb9a547
AM
46#include "basic-block.h"
47#include "tree-ssa-alias.h"
48#include "internal-fn.h"
49#include "gimple-expr.h"
50#include "is-a.h"
442b4905 51#include "gimple.h"
5be5c238
AM
52#include "gimple-iterator.h"
53#include "gimple-walk.h"
442b4905 54#include "gimple-ssa.h"
f8e89441 55#include "gimplify.h"
442b4905
AM
56#include "tree-phinodes.h"
57#include "ssa-iterators.h"
d8a2d370 58#include "stringpool.h"
442b4905 59#include "tree-ssanames.h"
f8e89441 60#include "tree-into-ssa.h"
4484a35a 61#include "sbitmap.h"
f8e89441 62#include "tree-cfg.h"
9d30f3c1
JJ
63#include "tree-pass.h"
64#include "tree-stdarg.h"
65
66/* A simple pass that attempts to optimize stdarg functions on architectures
67 that need to save register arguments to stack on entry to stdarg functions.
68 If the function doesn't use any va_start macros, no registers need to
69 be saved. If va_start macros are used, the va_list variables don't escape
70 the function, it is only necessary to save registers that will be used
71 in va_arg macros. E.g. if va_arg is only used with integral types
72 in the function, floating point registers don't need to be saved, etc. */
73
74
75/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
76 is executed at most as many times as VA_START_BB. */
77
78static bool
79reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
80{
6e1aa848 81 vec<edge> stack = vNULL;
134c2de3 82 edge e;
9d30f3c1 83 edge_iterator ei;
9d30f3c1
JJ
84 sbitmap visited;
85 bool ret;
86
87 if (va_arg_bb == va_start_bb)
88 return true;
89
90 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
91 return false;
92
8b1c6fd7 93 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 94 bitmap_clear (visited);
9d30f3c1
JJ
95 ret = true;
96
97 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
9771b263 98 stack.safe_push (e);
9d30f3c1 99
9771b263 100 while (! stack.is_empty ())
9d30f3c1
JJ
101 {
102 basic_block src;
103
9771b263 104 e = stack.pop ();
9d30f3c1
JJ
105 src = e->src;
106
107 if (e->flags & EDGE_COMPLEX)
108 {
109 ret = false;
110 break;
111 }
112
113 if (src == va_start_bb)
114 continue;
115
116 /* va_arg_bb can be executed more times than va_start_bb. */
117 if (src == va_arg_bb)
118 {
119 ret = false;
120 break;
121 }
122
fefa31b5 123 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
9d30f3c1 124
d7c028c0 125 if (! bitmap_bit_p (visited, src->index))
9d30f3c1 126 {
d7c028c0 127 bitmap_set_bit (visited, src->index);
9d30f3c1 128 FOR_EACH_EDGE (e, ei, src->preds)
9771b263 129 stack.safe_push (e);
9d30f3c1
JJ
130 }
131 }
132
9771b263 133 stack.release ();
9d30f3c1
JJ
134 sbitmap_free (visited);
135 return ret;
136}
137
138
139/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
0cadbfaa 140 return constant, otherwise return HOST_WIDE_INT_M1U.
9d30f3c1
JJ
141 GPR_P is true if this is GPR counter. */
142
143static unsigned HOST_WIDE_INT
144va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
145 bool gpr_p)
146{
726a989a
RB
147 tree lhs, orig_lhs;
148 gimple stmt;
9d30f3c1
JJ
149 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
150 unsigned int max_size;
151
152 if (si->offsets == NULL)
153 {
154 unsigned int i;
155
5ed6ace5 156 si->offsets = XNEWVEC (int, num_ssa_names);
9d30f3c1
JJ
157 for (i = 0; i < num_ssa_names; ++i)
158 si->offsets[i] = -1;
159 }
160
161 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
162 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
163 orig_lhs = lhs = rhs;
164 while (lhs)
165 {
726a989a 166 enum tree_code rhs_code;
58da96fe 167 tree rhs1;
726a989a 168
9d30f3c1
JJ
169 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
170 {
171 if (counter_val >= max_size)
172 {
173 ret = max_size;
174 break;
175 }
176
177 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
178 break;
179 }
180
181 stmt = SSA_NAME_DEF_STMT (lhs);
182
726a989a 183 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
0cadbfaa 184 return HOST_WIDE_INT_M1U;
9d30f3c1 185
726a989a 186 rhs_code = gimple_assign_rhs_code (stmt);
58da96fe 187 rhs1 = gimple_assign_rhs1 (stmt);
726a989a
RB
188 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
189 || gimple_assign_cast_p (stmt))
58da96fe 190 && TREE_CODE (rhs1) == SSA_NAME)
9d30f3c1 191 {
58da96fe 192 lhs = rhs1;
9d30f3c1
JJ
193 continue;
194 }
195
726a989a
RB
196 if ((rhs_code == POINTER_PLUS_EXPR
197 || rhs_code == PLUS_EXPR)
58da96fe 198 && TREE_CODE (rhs1) == SSA_NAME
cc269bb6 199 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
9d30f3c1 200 {
ae7e9ddd 201 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
58da96fe
RG
202 lhs = rhs1;
203 continue;
204 }
205
206 if (rhs_code == ADDR_EXPR
207 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
208 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cc269bb6 209 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
58da96fe 210 {
ae7e9ddd 211 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
58da96fe 212 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
9d30f3c1
JJ
213 continue;
214 }
215
726a989a 216 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
0cadbfaa 217 return HOST_WIDE_INT_M1U;
9d30f3c1 218
726a989a 219 rhs = gimple_assign_rhs1 (stmt);
9d30f3c1 220 if (TREE_CODE (counter) != TREE_CODE (rhs))
0cadbfaa 221 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
222
223 if (TREE_CODE (counter) == COMPONENT_REF)
224 {
225 if (get_base_address (counter) != get_base_address (rhs)
226 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
227 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
0cadbfaa 228 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
229 }
230 else if (counter != rhs)
0cadbfaa 231 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
232
233 lhs = NULL;
234 }
235
236 lhs = orig_lhs;
237 val = ret + counter_val;
238 while (lhs)
239 {
726a989a 240 enum tree_code rhs_code;
58da96fe 241 tree rhs1;
726a989a 242
9d30f3c1
JJ
243 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
244 break;
245
246 if (val >= max_size)
247 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
248 else
249 si->offsets[SSA_NAME_VERSION (lhs)] = val;
250
251 stmt = SSA_NAME_DEF_STMT (lhs);
252
726a989a 253 rhs_code = gimple_assign_rhs_code (stmt);
58da96fe 254 rhs1 = gimple_assign_rhs1 (stmt);
726a989a
RB
255 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
256 || gimple_assign_cast_p (stmt))
58da96fe 257 && TREE_CODE (rhs1) == SSA_NAME)
9d30f3c1 258 {
58da96fe 259 lhs = rhs1;
9d30f3c1
JJ
260 continue;
261 }
262
726a989a
RB
263 if ((rhs_code == POINTER_PLUS_EXPR
264 || rhs_code == PLUS_EXPR)
58da96fe 265 && TREE_CODE (rhs1) == SSA_NAME
cc269bb6 266 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
9d30f3c1 267 {
ae7e9ddd 268 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
58da96fe
RG
269 lhs = rhs1;
270 continue;
271 }
272
273 if (rhs_code == ADDR_EXPR
274 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
275 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cc269bb6 276 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
58da96fe 277 {
ae7e9ddd 278 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
58da96fe 279 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
9d30f3c1
JJ
280 continue;
281 }
282
283 lhs = NULL;
284 }
285
286 return ret;
287}
288
289
290/* Called by walk_tree to look for references to va_list variables. */
291
292static tree
293find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
294 void *data)
295{
726a989a 296 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
9d30f3c1
JJ
297 tree var = *tp;
298
299 if (TREE_CODE (var) == SSA_NAME)
6b4a85ad
RG
300 {
301 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
302 return var;
303 }
304 else if (TREE_CODE (var) == VAR_DECL)
305 {
306 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
307 return var;
308 }
9d30f3c1
JJ
309
310 return NULL_TREE;
311}
312
313
314/* Helper function of va_list_counter_struct_op. Compute
315 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
316 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
317 statement. GPR_P is true if AP is a GPR counter, false if it is
318 a FPR counter. */
319
320static void
321va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
322 bool write_p)
323{
324 unsigned HOST_WIDE_INT increment;
325
326 if (si->compute_sizes < 0)
327 {
328 si->compute_sizes = 0;
329 if (si->va_start_count == 1
330 && reachable_at_most_once (si->bb, si->va_start_bb))
331 si->compute_sizes = 1;
332
333 if (dump_file && (dump_flags & TDF_DETAILS))
334 fprintf (dump_file,
335 "bb%d will %sbe executed at most once for each va_start "
336 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
337 si->va_start_bb->index);
338 }
339
340 if (write_p
341 && si->compute_sizes
342 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
343 {
344 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
345 {
346 cfun->va_list_gpr_size += increment;
347 return;
348 }
349
350 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
351 {
352 cfun->va_list_fpr_size += increment;
353 return;
354 }
355 }
356
357 if (write_p || !si->compute_sizes)
358 {
359 if (gpr_p)
360 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
361 else
362 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
363 }
364}
365
366
367/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
368 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
369 is false, AP has been seen in VAR = AP assignment.
370 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
371 va_arg operation that doesn't cause the va_list variable to escape
372 current function. */
373
374static bool
375va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
376 bool write_p)
377{
378 tree base;
379
380 if (TREE_CODE (ap) != COMPONENT_REF
381 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
382 return false;
383
384 if (TREE_CODE (var) != SSA_NAME
6b4a85ad 385 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
9d30f3c1
JJ
386 return false;
387
388 base = get_base_address (ap);
389 if (TREE_CODE (base) != VAR_DECL
6b4a85ad 390 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
9d30f3c1
JJ
391 return false;
392
393 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
394 va_list_counter_op (si, ap, var, true, write_p);
395 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
396 va_list_counter_op (si, ap, var, false, write_p);
397
398 return true;
399}
400
401
402/* Check for TEM = AP. Return true if found and the caller shouldn't
403 search for va_list references in the statement. */
404
405static bool
406va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
407{
408 if (TREE_CODE (ap) != VAR_DECL
6b4a85ad 409 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
9d30f3c1
JJ
410 return false;
411
412 if (TREE_CODE (tem) != SSA_NAME
6b4a85ad 413 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
9d30f3c1
JJ
414 return false;
415
416 if (si->compute_sizes < 0)
417 {
418 si->compute_sizes = 0;
419 if (si->va_start_count == 1
420 && reachable_at_most_once (si->bb, si->va_start_bb))
421 si->compute_sizes = 1;
422
423 if (dump_file && (dump_flags & TDF_DETAILS))
424 fprintf (dump_file,
425 "bb%d will %sbe executed at most once for each va_start "
426 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
427 si->va_start_bb->index);
428 }
429
430 /* For void * or char * va_list types, there is just one counter.
431 If va_arg is used in a loop, we don't know how many registers need
432 saving. */
433 if (! si->compute_sizes)
434 return false;
435
0cadbfaa 436 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
9d30f3c1
JJ
437 return false;
438
439 /* Note the temporary, as we need to track whether it doesn't escape
440 the current function. */
6b4a85ad
RG
441 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
442
9d30f3c1
JJ
443 return true;
444}
445
446
447/* Check for:
448 tem1 = AP;
449 TEM2 = tem1 + CST;
450 AP = TEM2;
451 sequence and update cfun->va_list_gpr_size. Return true if found. */
452
453static bool
454va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
455{
456 unsigned HOST_WIDE_INT increment;
457
458 if (TREE_CODE (ap) != VAR_DECL
6b4a85ad 459 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
9d30f3c1
JJ
460 return false;
461
462 if (TREE_CODE (tem2) != SSA_NAME
6b4a85ad 463 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
9d30f3c1
JJ
464 return false;
465
466 if (si->compute_sizes <= 0)
467 return false;
468
469 increment = va_list_counter_bump (si, ap, tem2, true);
470 if (increment + 1 <= 1)
471 return false;
472
473 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
474 cfun->va_list_gpr_size += increment;
475 else
476 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
477
478 return true;
479}
480
481
482/* If RHS is X, (some type *) X or X + CST for X a temporary variable
483 containing value of some va_list variable plus optionally some constant,
484 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
485 depending whether LHS is a function local temporary. */
486
487static void
488check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
489{
490 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
491 return;
492
58da96fe
RG
493 if (TREE_CODE (rhs) == SSA_NAME)
494 {
6b4a85ad 495 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
58da96fe
RG
496 return;
497 }
498 else if (TREE_CODE (rhs) == ADDR_EXPR
499 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
500 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
501 {
6b4a85ad
RG
502 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
503 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
58da96fe
RG
504 return;
505 }
506 else
9d30f3c1
JJ
507 return;
508
6b4a85ad 509 if (TREE_CODE (lhs) != SSA_NAME)
9d30f3c1
JJ
510 {
511 si->va_list_escapes = true;
512 return;
513 }
514
515 if (si->compute_sizes < 0)
516 {
517 si->compute_sizes = 0;
518 if (si->va_start_count == 1
519 && reachable_at_most_once (si->bb, si->va_start_bb))
520 si->compute_sizes = 1;
521
522 if (dump_file && (dump_flags & TDF_DETAILS))
523 fprintf (dump_file,
524 "bb%d will %sbe executed at most once for each va_start "
525 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
526 si->va_start_bb->index);
527 }
528
529 /* For void * or char * va_list types, there is just one counter.
530 If va_arg is used in a loop, we don't know how many registers need
531 saving. */
532 if (! si->compute_sizes)
533 {
534 si->va_list_escapes = true;
535 return;
536 }
537
538 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
0cadbfaa 539 == HOST_WIDE_INT_M1U)
9d30f3c1
JJ
540 {
541 si->va_list_escapes = true;
542 return;
543 }
544
6b4a85ad 545 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
9d30f3c1
JJ
546}
547
548
549/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
550 Return true if va_list might be escaping. */
551
552static bool
553check_all_va_list_escapes (struct stdarg_info *si)
554{
555 basic_block bb;
556
11cd3bed 557 FOR_EACH_BB_FN (bb, cfun)
9d30f3c1 558 {
538dd0b7
DM
559 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
560 gsi_next (&i))
11f1e3ab
JJ
561 {
562 tree lhs;
563 use_operand_p uop;
564 ssa_op_iter soi;
538dd0b7 565 gphi *phi = i.phi ();
11f1e3ab
JJ
566
567 lhs = PHI_RESULT (phi);
568 if (virtual_operand_p (lhs)
569 || bitmap_bit_p (si->va_list_escape_vars,
570 SSA_NAME_VERSION (lhs)))
571 continue;
572
573 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
574 {
575 tree rhs = USE_FROM_PTR (uop);
576 if (TREE_CODE (rhs) == SSA_NAME
577 && bitmap_bit_p (si->va_list_escape_vars,
578 SSA_NAME_VERSION (rhs)))
579 {
580 if (dump_file && (dump_flags & TDF_DETAILS))
581 {
582 fputs ("va_list escapes in ", dump_file);
583 print_gimple_stmt (dump_file, phi, 0, dump_flags);
584 fputc ('\n', dump_file);
585 }
586 return true;
587 }
588 }
589 }
590
538dd0b7
DM
591 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
592 gsi_next (&i))
9d30f3c1 593 {
726a989a
RB
594 gimple stmt = gsi_stmt (i);
595 tree use;
9d30f3c1
JJ
596 ssa_op_iter iter;
597
b5b8b0ac
AO
598 if (is_gimple_debug (stmt))
599 continue;
600
9d30f3c1
JJ
601 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
602 {
603 if (! bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 604 SSA_NAME_VERSION (use)))
9d30f3c1
JJ
605 continue;
606
726a989a 607 if (is_gimple_assign (stmt))
9d30f3c1 608 {
726a989a
RB
609 tree rhs = gimple_assign_rhs1 (stmt);
610 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
9d30f3c1
JJ
611
612 /* x = *ap_temp; */
58da96fe 613 if (rhs_code == MEM_REF
9d30f3c1
JJ
614 && TREE_OPERAND (rhs, 0) == use
615 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
cc269bb6 616 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
9d30f3c1
JJ
617 && si->offsets[SSA_NAME_VERSION (use)] != -1)
618 {
619 unsigned HOST_WIDE_INT gpr_size;
620 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
621
622 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
9439e9a1 623 + tree_to_shwi (TREE_OPERAND (rhs, 1))
ae7e9ddd 624 + tree_to_uhwi (access_size);
9d30f3c1
JJ
625 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
626 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
627 else if (gpr_size > cfun->va_list_gpr_size)
628 cfun->va_list_gpr_size = gpr_size;
629 continue;
630 }
631
632 /* va_arg sequences may contain
633 other_ap_temp = ap_temp;
634 other_ap_temp = ap_temp + constant;
635 other_ap_temp = (some_type *) ap_temp;
636 ap = ap_temp;
637 statements. */
726a989a
RB
638 if (rhs == use
639 && ((rhs_code == POINTER_PLUS_EXPR
640 && (TREE_CODE (gimple_assign_rhs2 (stmt))
641 == INTEGER_CST))
642 || gimple_assign_cast_p (stmt)
643 || (get_gimple_rhs_class (rhs_code)
644 == GIMPLE_SINGLE_RHS)))
9d30f3c1 645 {
726a989a
RB
646 tree lhs = gimple_assign_lhs (stmt);
647
9d30f3c1
JJ
648 if (TREE_CODE (lhs) == SSA_NAME
649 && bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 650 SSA_NAME_VERSION (lhs)))
9d30f3c1
JJ
651 continue;
652
653 if (TREE_CODE (lhs) == VAR_DECL
654 && bitmap_bit_p (si->va_list_vars,
6b4a85ad 655 DECL_UID (lhs) + num_ssa_names))
9d30f3c1
JJ
656 continue;
657 }
58da96fe
RG
658 else if (rhs_code == ADDR_EXPR
659 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
660 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
661 {
662 tree lhs = gimple_assign_lhs (stmt);
663
664 if (bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 665 SSA_NAME_VERSION (lhs)))
58da96fe
RG
666 continue;
667 }
9d30f3c1
JJ
668 }
669
670 if (dump_file && (dump_flags & TDF_DETAILS))
671 {
672 fputs ("va_list escapes in ", dump_file);
726a989a 673 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
9d30f3c1
JJ
674 fputc ('\n', dump_file);
675 }
676 return true;
677 }
678 }
679 }
680
681 return false;
682}
683
302f0807 684/* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
9d30f3c1 685
302f0807
TV
686static void
687optimize_va_list_gpr_fpr_size (function *fun)
9d30f3c1
JJ
688{
689 basic_block bb;
690 bool va_list_escapes = false;
691 bool va_list_simple_ptr;
692 struct stdarg_info si;
726a989a 693 struct walk_stmt_info wi;
9d30f3c1 694 const char *funcname = NULL;
35cbb299 695 tree cfun_va_list;
9d30f3c1 696
be55bfe6
TS
697 fun->va_list_gpr_size = 0;
698 fun->va_list_fpr_size = 0;
9d30f3c1
JJ
699 memset (&si, 0, sizeof (si));
700 si.va_list_vars = BITMAP_ALLOC (NULL);
701 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
702
703 if (dump_file)
704 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
705
be55bfe6 706 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
35cbb299
KT
707 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
708 && (TREE_TYPE (cfun_va_list) == void_type_node
709 || TREE_TYPE (cfun_va_list) == char_type_node);
710 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
9d30f3c1 711
be55bfe6 712 FOR_EACH_BB_FN (bb, fun)
9d30f3c1 713 {
726a989a 714 gimple_stmt_iterator i;
9d30f3c1 715
726a989a 716 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
9d30f3c1 717 {
726a989a
RB
718 gimple stmt = gsi_stmt (i);
719 tree callee, ap;
9d30f3c1 720
726a989a 721 if (!is_gimple_call (stmt))
9d30f3c1
JJ
722 continue;
723
726a989a 724 callee = gimple_call_fndecl (stmt);
9d30f3c1
JJ
725 if (!callee
726 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
727 continue;
728
729 switch (DECL_FUNCTION_CODE (callee))
730 {
731 case BUILT_IN_VA_START:
732 break;
733 /* If old style builtins are used, don't optimize anything. */
734 case BUILT_IN_SAVEREGS:
9d30f3c1
JJ
735 case BUILT_IN_NEXT_ARG:
736 va_list_escapes = true;
737 continue;
738 default:
739 continue;
740 }
741
742 si.va_start_count++;
726a989a 743 ap = gimple_call_arg (stmt, 0);
bb673b41
RG
744
745 if (TREE_CODE (ap) != ADDR_EXPR)
9d30f3c1
JJ
746 {
747 va_list_escapes = true;
748 break;
749 }
9d30f3c1 750 ap = TREE_OPERAND (ap, 0);
bb673b41
RG
751 if (TREE_CODE (ap) == ARRAY_REF)
752 {
753 if (! integer_zerop (TREE_OPERAND (ap, 1)))
754 {
755 va_list_escapes = true;
756 break;
757 }
758 ap = TREE_OPERAND (ap, 0);
759 }
760 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
be55bfe6 761 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
bb673b41
RG
762 || TREE_CODE (ap) != VAR_DECL)
763 {
764 va_list_escapes = true;
765 break;
766 }
767
9d30f3c1
JJ
768 if (is_global_var (ap))
769 {
770 va_list_escapes = true;
771 break;
772 }
773
6b4a85ad 774 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
9d30f3c1
JJ
775
776 /* VA_START_BB and VA_START_AP will be only used if there is just
777 one va_start in the function. */
778 si.va_start_bb = bb;
779 si.va_start_ap = ap;
780 }
781
782 if (va_list_escapes)
783 break;
784 }
785
786 /* If there were no va_start uses in the function, there is no need to
787 save anything. */
788 if (si.va_start_count == 0)
789 goto finish;
790
791 /* If some va_list arguments weren't local, we can't optimize. */
792 if (va_list_escapes)
793 goto finish;
794
795 /* For void * or char * va_list, something useful can be done only
796 if there is just one va_start. */
797 if (va_list_simple_ptr && si.va_start_count > 1)
798 {
799 va_list_escapes = true;
800 goto finish;
801 }
802
803 /* For struct * va_list, if the backend didn't tell us what the counter fields
804 are, there is nothing more we can do. */
805 if (!va_list_simple_ptr
806 && va_list_gpr_counter_field == NULL_TREE
807 && va_list_fpr_counter_field == NULL_TREE)
808 {
809 va_list_escapes = true;
810 goto finish;
811 }
812
813 /* For void * or char * va_list there is just one counter
814 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
815 if (va_list_simple_ptr)
be55bfe6 816 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
9d30f3c1 817
18c57f2c 818 calculate_dominance_info (CDI_DOMINATORS);
726a989a
RB
819 memset (&wi, 0, sizeof (wi));
820 wi.info = si.va_list_vars;
18c57f2c 821
be55bfe6 822 FOR_EACH_BB_FN (bb, fun)
9d30f3c1 823 {
9d30f3c1
JJ
824 si.compute_sizes = -1;
825 si.bb = bb;
746077ff
RH
826
827 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
828 them as assignments for the purpose of escape analysis. This is
829 not needed for non-simple va_list because virtual phis don't perform
21c0137a
JJ
830 any real data movement. Also, check PHI nodes for taking address of
831 the va_list vars. */
832 tree lhs, rhs;
833 use_operand_p uop;
834 ssa_op_iter soi;
746077ff 835
21c0137a
JJ
836 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
837 gsi_next (&i))
838 {
839 gphi *phi = i.phi ();
840 lhs = PHI_RESULT (phi);
746077ff 841
21c0137a
JJ
842 if (virtual_operand_p (lhs))
843 continue;
746077ff 844
21c0137a
JJ
845 if (va_list_simple_ptr)
846 {
746077ff
RH
847 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
848 {
849 rhs = USE_FROM_PTR (uop);
850 if (va_list_ptr_read (&si, rhs, lhs))
851 continue;
852 else if (va_list_ptr_write (&si, lhs, rhs))
853 continue;
854 else
855 check_va_list_escapes (&si, lhs, rhs);
856
726a989a 857 if (si.va_list_escapes)
746077ff
RH
858 {
859 if (dump_file && (dump_flags & TDF_DETAILS))
860 {
861 fputs ("va_list escapes in ", dump_file);
726a989a 862 print_gimple_stmt (dump_file, phi, 0, dump_flags);
746077ff
RH
863 fputc ('\n', dump_file);
864 }
865 va_list_escapes = true;
866 }
867 }
868 }
21c0137a
JJ
869
870 for (unsigned j = 0; !va_list_escapes
871 && j < gimple_phi_num_args (phi); ++j)
872 if ((!va_list_simple_ptr
873 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
874 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
875 find_va_list_reference, &wi, NULL))
876 {
877 if (dump_file && (dump_flags & TDF_DETAILS))
878 {
879 fputs ("va_list escapes in ", dump_file);
880 print_gimple_stmt (dump_file, phi, 0, dump_flags);
881 fputc ('\n', dump_file);
882 }
883 va_list_escapes = true;
884 }
746077ff
RH
885 }
886
538dd0b7 887 for (gimple_stmt_iterator i = gsi_start_bb (bb);
726a989a
RB
888 !gsi_end_p (i) && !va_list_escapes;
889 gsi_next (&i))
9d30f3c1 890 {
726a989a 891 gimple stmt = gsi_stmt (i);
9d30f3c1
JJ
892
893 /* Don't look at __builtin_va_{start,end}, they are ok. */
726a989a 894 if (is_gimple_call (stmt))
9d30f3c1 895 {
726a989a 896 tree callee = gimple_call_fndecl (stmt);
9d30f3c1
JJ
897
898 if (callee
899 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
900 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
901 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
902 continue;
903 }
904
726a989a 905 if (is_gimple_assign (stmt))
9d30f3c1 906 {
21c0137a
JJ
907 lhs = gimple_assign_lhs (stmt);
908 rhs = gimple_assign_rhs1 (stmt);
9d30f3c1
JJ
909
910 if (va_list_simple_ptr)
911 {
726a989a
RB
912 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
913 == GIMPLE_SINGLE_RHS)
914 {
bad25453
JJ
915 /* Check for ap ={v} {}. */
916 if (TREE_CLOBBER_P (rhs))
917 continue;
918
726a989a 919 /* Check for tem = ap. */
bad25453 920 else if (va_list_ptr_read (&si, rhs, lhs))
726a989a 921 continue;
9d30f3c1 922
726a989a
RB
923 /* Check for the last insn in:
924 tem1 = ap;
925 tem2 = tem1 + CST;
926 ap = tem2;
927 sequence. */
928 else if (va_list_ptr_write (&si, lhs, rhs))
929 continue;
930 }
9d30f3c1 931
726a989a
RB
932 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
933 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
1a87cf0c 934 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
726a989a
RB
935 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
936 == GIMPLE_SINGLE_RHS))
9d30f3c1
JJ
937 check_va_list_escapes (&si, lhs, rhs);
938 }
939 else
940 {
726a989a
RB
941 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
942 == GIMPLE_SINGLE_RHS)
943 {
47598145
MM
944 /* Check for ap ={v} {}. */
945 if (TREE_CLOBBER_P (rhs))
946 continue;
bad25453 947
726a989a 948 /* Check for ap[0].field = temp. */
47598145 949 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
726a989a 950 continue;
9d30f3c1 951
726a989a
RB
952 /* Check for temp = ap[0].field. */
953 else if (va_list_counter_struct_op (&si, rhs, lhs,
954 false))
955 continue;
956 }
9d30f3c1
JJ
957
958 /* Do any architecture specific checking. */
726a989a
RB
959 if (targetm.stdarg_optimize_hook
960 && targetm.stdarg_optimize_hook (&si, stmt))
9d30f3c1
JJ
961 continue;
962 }
963 }
b5b8b0ac
AO
964 else if (is_gimple_debug (stmt))
965 continue;
9d30f3c1
JJ
966
967 /* All other uses of va_list are either va_copy (that is not handled
968 in this optimization), taking address of va_list variable or
969 passing va_list to other functions (in that case va_list might
970 escape the function and therefore va_start needs to set it up
971 fully), or some unexpected use of va_list. None of these should
972 happen in a gimplified VA_ARG_EXPR. */
973 if (si.va_list_escapes
726a989a 974 || walk_gimple_op (stmt, find_va_list_reference, &wi))
9d30f3c1
JJ
975 {
976 if (dump_file && (dump_flags & TDF_DETAILS))
977 {
978 fputs ("va_list escapes in ", dump_file);
726a989a 979 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
9d30f3c1
JJ
980 fputc ('\n', dump_file);
981 }
982 va_list_escapes = true;
983 }
984 }
985
986 if (va_list_escapes)
987 break;
988 }
989
990 if (! va_list_escapes
991 && va_list_simple_ptr
992 && ! bitmap_empty_p (si.va_list_escape_vars)
993 && check_all_va_list_escapes (&si))
994 va_list_escapes = true;
995
996finish:
997 if (va_list_escapes)
998 {
be55bfe6
TS
999 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1000 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
9d30f3c1
JJ
1001 }
1002 BITMAP_FREE (si.va_list_vars);
1003 BITMAP_FREE (si.va_list_escape_vars);
1004 free (si.offsets);
1005 if (dump_file)
1006 {
1007 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1008 funcname, (int) va_list_escapes);
be55bfe6 1009 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
9d30f3c1
JJ
1010 fputs ("all", dump_file);
1011 else
1012 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1013 fputs (" GPR units and ", dump_file);
be55bfe6 1014 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
9d30f3c1
JJ
1015 fputs ("all", dump_file);
1016 else
1017 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1018 fputs (" FPR units.\n", dump_file);
1019 }
302f0807
TV
1020}
1021
f8e89441
TV
1022/* Return true if STMT is IFN_VA_ARG. */
1023
1024static bool
1025gimple_call_ifn_va_arg_p (gimple stmt)
1026{
1027 return (is_gimple_call (stmt)
1028 && gimple_call_internal_p (stmt)
1029 && gimple_call_internal_fn (stmt) == IFN_VA_ARG);
1030}
1031
1032/* Expand IFN_VA_ARGs in FUN. */
1033
1034static void
1035expand_ifn_va_arg_1 (function *fun)
1036{
1037 bool modified = false;
1038 basic_block bb;
1039 gimple_stmt_iterator i;
1040
1041 FOR_EACH_BB_FN (bb, fun)
1042 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1043 {
1044 gimple stmt = gsi_stmt (i);
1045 tree ap, expr, lhs, type;
1046 gimple_seq pre = NULL, post = NULL;
1047
1048 if (!gimple_call_ifn_va_arg_p (stmt))
1049 continue;
1050
1051 modified = true;
1052
1053 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1054 ap = gimple_call_arg (stmt, 0);
1055 ap = build_fold_indirect_ref (ap);
1056
1057 push_gimplify_context (false);
1058
1059 expr = gimplify_va_arg_internal (ap, type, gimple_location (stmt),
1060 &pre, &post);
1061
1062 lhs = gimple_call_lhs (stmt);
1063 if (lhs != NULL_TREE)
1064 {
1065 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1066
1067 if (gimple_call_num_args (stmt) == 3)
1068 {
1069 /* We've transported the size of with WITH_SIZE_EXPR here as
1070 the 3rd argument of the internal fn call. Now reinstate
1071 it. */
1072 tree size = gimple_call_arg (stmt, 2);
1073 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1074 }
1075
1076 /* We use gimplify_assign here, rather than gimple_build_assign,
1077 because gimple_assign knows how to deal with variable-sized
1078 types. */
1079 gimplify_assign (lhs, expr, &pre);
1080 }
1081
1082 pop_gimplify_context (NULL);
1083
1084 gimple_seq_add_seq (&pre, post);
1085 update_modified_stmts (pre);
1086
1087 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1088 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1089 inbetween. */
1090 gimple_find_sub_bbs (pre, &i);
1091
1092 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1093 bb. */
1094 gsi_remove (&i, true);
1095 gcc_assert (gsi_end_p (i));
1096
1097 /* We're walking here into the bbs which contain the expansion of
1098 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1099 expanding. We could try to skip walking these bbs, perhaps by
1100 walking backwards over gimples and bbs. */
1101 break;
1102 }
1103
1104 if (!modified)
1105 return;
1106
1107 free_dominance_info (CDI_DOMINATORS);
1108 update_ssa (TODO_update_ssa);
1109}
1110
1111/* Expand IFN_VA_ARGs in FUN, if necessary. */
1112
1113static void
1114expand_ifn_va_arg (function *fun)
1115{
1116 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1117 expand_ifn_va_arg_1 (fun);
1118
1119#if ENABLE_CHECKING
1120 basic_block bb;
1121 gimple_stmt_iterator i;
1122 FOR_EACH_BB_FN (bb, fun)
1123 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1124 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
1125#endif
1126}
1127
302f0807
TV
1128namespace {
1129
1130const pass_data pass_data_stdarg =
1131{
1132 GIMPLE_PASS, /* type */
1133 "stdarg", /* name */
1134 OPTGROUP_NONE, /* optinfo_flags */
1135 TV_NONE, /* tv_id */
1136 ( PROP_cfg | PROP_ssa ), /* properties_required */
f8e89441 1137 PROP_gimple_lva, /* properties_provided */
302f0807
TV
1138 0, /* properties_destroyed */
1139 0, /* todo_flags_start */
1140 0, /* todo_flags_finish */
1141};
1142
1143class pass_stdarg : public gimple_opt_pass
1144{
1145public:
1146 pass_stdarg (gcc::context *ctxt)
1147 : gimple_opt_pass (pass_data_stdarg, ctxt)
1148 {}
1149
1150 /* opt_pass methods: */
f8e89441 1151 virtual bool gate (function *)
302f0807 1152 {
f8e89441
TV
1153 /* Always run this pass, in order to expand va_arg internal_fns. We
1154 also need to do that if fun->stdarg == 0, because a va_arg may also
1155 occur in a function without varargs, f.i. if when passing a va_list to
1156 another function. */
1157 return true;
302f0807
TV
1158 }
1159
1160 virtual unsigned int execute (function *);
1161
1162}; // class pass_stdarg
1163
1164unsigned int
1165pass_stdarg::execute (function *fun)
1166{
f8e89441
TV
1167 /* TODO: Postpone expand_ifn_va_arg till after
1168 optimize_va_list_gpr_fpr_size. */
1169 expand_ifn_va_arg (fun);
1170
1171 if (flag_stdarg_opt
1172 /* This optimization is only for stdarg functions. */
1173 && fun->stdarg != 0)
1174 optimize_va_list_gpr_fpr_size (fun);
302f0807 1175
c2924966 1176 return 0;
9d30f3c1
JJ
1177}
1178
27a4cd48
DM
1179} // anon namespace
1180
1181gimple_opt_pass *
1182make_pass_stdarg (gcc::context *ctxt)
1183{
1184 return new pass_stdarg (ctxt);
1185}
f8e89441
TV
1186
1187namespace {
1188
1189const pass_data pass_data_lower_vaarg =
1190{
1191 GIMPLE_PASS, /* type */
1192 "lower_vaarg", /* name */
1193 OPTGROUP_NONE, /* optinfo_flags */
1194 TV_NONE, /* tv_id */
1195 ( PROP_cfg | PROP_ssa ), /* properties_required */
1196 PROP_gimple_lva, /* properties_provided */
1197 0, /* properties_destroyed */
1198 0, /* todo_flags_start */
1199 0, /* todo_flags_finish */
1200};
1201
1202class pass_lower_vaarg : public gimple_opt_pass
1203{
1204public:
1205 pass_lower_vaarg (gcc::context *ctxt)
1206 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1207 {}
1208
1209 /* opt_pass methods: */
1210 virtual bool gate (function *)
1211 {
1212 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1213 }
1214
1215 virtual unsigned int execute (function *);
1216
1217}; // class pass_lower_vaarg
1218
1219unsigned int
1220pass_lower_vaarg::execute (function *fun)
1221{
1222 expand_ifn_va_arg (fun);
1223 return 0;
1224}
1225
1226} // anon namespace
1227
1228gimple_opt_pass *
1229make_pass_lower_vaarg (gcc::context *ctxt)
1230{
1231 return new pass_lower_vaarg (ctxt);
1232}