]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-stdarg.c
fix PR68343: disable fuse-*.c tests for isl 0.14 or earlier
[thirdparty/gcc.git] / gcc / tree-stdarg.c
CommitLineData
9d30f3c1 1/* Pass computing data for optimizing stdarg functions.
818ab71a 2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
9d30f3c1
JJ
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
9d30f3c1
JJ
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9d30f3c1
JJ
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5 25#include "target.h"
40e23961 26#include "tree.h"
c7131fb2 27#include "gimple.h"
957060b5 28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5 30#include "gimple-pretty-print.h"
c7131fb2 31#include "fold-const.h"
9d30f3c1 32#include "langhooks.h"
5be5c238
AM
33#include "gimple-iterator.h"
34#include "gimple-walk.h"
f8e89441 35#include "gimplify.h"
f8e89441 36#include "tree-into-ssa.h"
f8e89441 37#include "tree-cfg.h"
9d30f3c1 38#include "tree-stdarg.h"
64a948e9 39#include "tree-chkp.h"
9d30f3c1
JJ
40
41/* A simple pass that attempts to optimize stdarg functions on architectures
42 that need to save register arguments to stack on entry to stdarg functions.
43 If the function doesn't use any va_start macros, no registers need to
44 be saved. If va_start macros are used, the va_list variables don't escape
45 the function, it is only necessary to save registers that will be used
46 in va_arg macros. E.g. if va_arg is only used with integral types
47 in the function, floating point registers don't need to be saved, etc. */
48
49
50/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
51 is executed at most as many times as VA_START_BB. */
52
53static bool
54reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
55{
6e1aa848 56 vec<edge> stack = vNULL;
134c2de3 57 edge e;
9d30f3c1 58 edge_iterator ei;
9d30f3c1
JJ
59 sbitmap visited;
60 bool ret;
61
62 if (va_arg_bb == va_start_bb)
63 return true;
64
65 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
66 return false;
67
8b1c6fd7 68 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 69 bitmap_clear (visited);
9d30f3c1
JJ
70 ret = true;
71
72 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
9771b263 73 stack.safe_push (e);
9d30f3c1 74
9771b263 75 while (! stack.is_empty ())
9d30f3c1
JJ
76 {
77 basic_block src;
78
9771b263 79 e = stack.pop ();
9d30f3c1
JJ
80 src = e->src;
81
82 if (e->flags & EDGE_COMPLEX)
83 {
84 ret = false;
85 break;
86 }
87
88 if (src == va_start_bb)
89 continue;
90
91 /* va_arg_bb can be executed more times than va_start_bb. */
92 if (src == va_arg_bb)
93 {
94 ret = false;
95 break;
96 }
97
fefa31b5 98 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
9d30f3c1 99
d7c028c0 100 if (! bitmap_bit_p (visited, src->index))
9d30f3c1 101 {
d7c028c0 102 bitmap_set_bit (visited, src->index);
9d30f3c1 103 FOR_EACH_EDGE (e, ei, src->preds)
9771b263 104 stack.safe_push (e);
9d30f3c1
JJ
105 }
106 }
107
9771b263 108 stack.release ();
9d30f3c1
JJ
109 sbitmap_free (visited);
110 return ret;
111}
112
113
114/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
0cadbfaa 115 return constant, otherwise return HOST_WIDE_INT_M1U.
9d30f3c1
JJ
116 GPR_P is true if this is GPR counter. */
117
118static unsigned HOST_WIDE_INT
119va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
120 bool gpr_p)
121{
726a989a 122 tree lhs, orig_lhs;
355fe088 123 gimple *stmt;
9d30f3c1
JJ
124 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
125 unsigned int max_size;
126
127 if (si->offsets == NULL)
128 {
129 unsigned int i;
130
5ed6ace5 131 si->offsets = XNEWVEC (int, num_ssa_names);
9d30f3c1
JJ
132 for (i = 0; i < num_ssa_names; ++i)
133 si->offsets[i] = -1;
134 }
135
136 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
137 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
138 orig_lhs = lhs = rhs;
139 while (lhs)
140 {
726a989a 141 enum tree_code rhs_code;
58da96fe 142 tree rhs1;
726a989a 143
9d30f3c1
JJ
144 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
145 {
146 if (counter_val >= max_size)
147 {
148 ret = max_size;
149 break;
150 }
151
152 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
153 break;
154 }
155
156 stmt = SSA_NAME_DEF_STMT (lhs);
157
726a989a 158 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
0cadbfaa 159 return HOST_WIDE_INT_M1U;
9d30f3c1 160
726a989a 161 rhs_code = gimple_assign_rhs_code (stmt);
58da96fe 162 rhs1 = gimple_assign_rhs1 (stmt);
726a989a
RB
163 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
164 || gimple_assign_cast_p (stmt))
58da96fe 165 && TREE_CODE (rhs1) == SSA_NAME)
9d30f3c1 166 {
58da96fe 167 lhs = rhs1;
9d30f3c1
JJ
168 continue;
169 }
170
726a989a
RB
171 if ((rhs_code == POINTER_PLUS_EXPR
172 || rhs_code == PLUS_EXPR)
58da96fe 173 && TREE_CODE (rhs1) == SSA_NAME
cc269bb6 174 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
9d30f3c1 175 {
ae7e9ddd 176 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
58da96fe
RG
177 lhs = rhs1;
178 continue;
179 }
180
181 if (rhs_code == ADDR_EXPR
182 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
183 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cc269bb6 184 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
58da96fe 185 {
ae7e9ddd 186 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
58da96fe 187 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
9d30f3c1
JJ
188 continue;
189 }
190
726a989a 191 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
0cadbfaa 192 return HOST_WIDE_INT_M1U;
9d30f3c1 193
726a989a 194 rhs = gimple_assign_rhs1 (stmt);
9d30f3c1 195 if (TREE_CODE (counter) != TREE_CODE (rhs))
0cadbfaa 196 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
197
198 if (TREE_CODE (counter) == COMPONENT_REF)
199 {
200 if (get_base_address (counter) != get_base_address (rhs)
201 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
202 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
0cadbfaa 203 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
204 }
205 else if (counter != rhs)
0cadbfaa 206 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
207
208 lhs = NULL;
209 }
210
211 lhs = orig_lhs;
212 val = ret + counter_val;
213 while (lhs)
214 {
726a989a 215 enum tree_code rhs_code;
58da96fe 216 tree rhs1;
726a989a 217
9d30f3c1
JJ
218 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
219 break;
220
221 if (val >= max_size)
222 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
223 else
224 si->offsets[SSA_NAME_VERSION (lhs)] = val;
225
226 stmt = SSA_NAME_DEF_STMT (lhs);
227
726a989a 228 rhs_code = gimple_assign_rhs_code (stmt);
58da96fe 229 rhs1 = gimple_assign_rhs1 (stmt);
726a989a
RB
230 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
231 || gimple_assign_cast_p (stmt))
58da96fe 232 && TREE_CODE (rhs1) == SSA_NAME)
9d30f3c1 233 {
58da96fe 234 lhs = rhs1;
9d30f3c1
JJ
235 continue;
236 }
237
726a989a
RB
238 if ((rhs_code == POINTER_PLUS_EXPR
239 || rhs_code == PLUS_EXPR)
58da96fe 240 && TREE_CODE (rhs1) == SSA_NAME
cc269bb6 241 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
9d30f3c1 242 {
ae7e9ddd 243 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
58da96fe
RG
244 lhs = rhs1;
245 continue;
246 }
247
248 if (rhs_code == ADDR_EXPR
249 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
250 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cc269bb6 251 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
58da96fe 252 {
ae7e9ddd 253 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
58da96fe 254 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
9d30f3c1
JJ
255 continue;
256 }
257
258 lhs = NULL;
259 }
260
261 return ret;
262}
263
264
265/* Called by walk_tree to look for references to va_list variables. */
266
267static tree
268find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
269 void *data)
270{
726a989a 271 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
9d30f3c1
JJ
272 tree var = *tp;
273
274 if (TREE_CODE (var) == SSA_NAME)
6b4a85ad
RG
275 {
276 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
277 return var;
278 }
279 else if (TREE_CODE (var) == VAR_DECL)
280 {
281 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
282 return var;
283 }
9d30f3c1
JJ
284
285 return NULL_TREE;
286}
287
288
289/* Helper function of va_list_counter_struct_op. Compute
290 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
291 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
292 statement. GPR_P is true if AP is a GPR counter, false if it is
293 a FPR counter. */
294
295static void
296va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
297 bool write_p)
298{
299 unsigned HOST_WIDE_INT increment;
300
301 if (si->compute_sizes < 0)
302 {
303 si->compute_sizes = 0;
304 if (si->va_start_count == 1
305 && reachable_at_most_once (si->bb, si->va_start_bb))
306 si->compute_sizes = 1;
307
308 if (dump_file && (dump_flags & TDF_DETAILS))
309 fprintf (dump_file,
310 "bb%d will %sbe executed at most once for each va_start "
311 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
312 si->va_start_bb->index);
313 }
314
315 if (write_p
316 && si->compute_sizes
317 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
318 {
319 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
320 {
321 cfun->va_list_gpr_size += increment;
322 return;
323 }
324
325 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
326 {
327 cfun->va_list_fpr_size += increment;
328 return;
329 }
330 }
331
332 if (write_p || !si->compute_sizes)
333 {
334 if (gpr_p)
335 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
336 else
337 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
338 }
339}
340
341
342/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
343 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
344 is false, AP has been seen in VAR = AP assignment.
345 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
346 va_arg operation that doesn't cause the va_list variable to escape
347 current function. */
348
349static bool
350va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
351 bool write_p)
352{
353 tree base;
354
355 if (TREE_CODE (ap) != COMPONENT_REF
356 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
357 return false;
358
359 if (TREE_CODE (var) != SSA_NAME
6b4a85ad 360 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
9d30f3c1
JJ
361 return false;
362
363 base = get_base_address (ap);
364 if (TREE_CODE (base) != VAR_DECL
6b4a85ad 365 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
9d30f3c1
JJ
366 return false;
367
368 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
369 va_list_counter_op (si, ap, var, true, write_p);
370 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
371 va_list_counter_op (si, ap, var, false, write_p);
372
373 return true;
374}
375
376
377/* Check for TEM = AP. Return true if found and the caller shouldn't
378 search for va_list references in the statement. */
379
380static bool
381va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
382{
383 if (TREE_CODE (ap) != VAR_DECL
6b4a85ad 384 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
9d30f3c1
JJ
385 return false;
386
387 if (TREE_CODE (tem) != SSA_NAME
6b4a85ad 388 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
9d30f3c1
JJ
389 return false;
390
391 if (si->compute_sizes < 0)
392 {
393 si->compute_sizes = 0;
394 if (si->va_start_count == 1
395 && reachable_at_most_once (si->bb, si->va_start_bb))
396 si->compute_sizes = 1;
397
398 if (dump_file && (dump_flags & TDF_DETAILS))
399 fprintf (dump_file,
400 "bb%d will %sbe executed at most once for each va_start "
401 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
402 si->va_start_bb->index);
403 }
404
405 /* For void * or char * va_list types, there is just one counter.
406 If va_arg is used in a loop, we don't know how many registers need
407 saving. */
408 if (! si->compute_sizes)
409 return false;
410
0cadbfaa 411 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
9d30f3c1
JJ
412 return false;
413
414 /* Note the temporary, as we need to track whether it doesn't escape
415 the current function. */
6b4a85ad
RG
416 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
417
9d30f3c1
JJ
418 return true;
419}
420
421
422/* Check for:
423 tem1 = AP;
424 TEM2 = tem1 + CST;
425 AP = TEM2;
426 sequence and update cfun->va_list_gpr_size. Return true if found. */
427
428static bool
429va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
430{
431 unsigned HOST_WIDE_INT increment;
432
433 if (TREE_CODE (ap) != VAR_DECL
6b4a85ad 434 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
9d30f3c1
JJ
435 return false;
436
437 if (TREE_CODE (tem2) != SSA_NAME
6b4a85ad 438 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
9d30f3c1
JJ
439 return false;
440
441 if (si->compute_sizes <= 0)
442 return false;
443
444 increment = va_list_counter_bump (si, ap, tem2, true);
445 if (increment + 1 <= 1)
446 return false;
447
448 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
449 cfun->va_list_gpr_size += increment;
450 else
451 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
452
453 return true;
454}
455
456
457/* If RHS is X, (some type *) X or X + CST for X a temporary variable
458 containing value of some va_list variable plus optionally some constant,
459 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
460 depending whether LHS is a function local temporary. */
461
462static void
463check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
464{
465 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
466 return;
467
58da96fe
RG
468 if (TREE_CODE (rhs) == SSA_NAME)
469 {
6b4a85ad 470 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
58da96fe
RG
471 return;
472 }
473 else if (TREE_CODE (rhs) == ADDR_EXPR
474 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
475 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
476 {
6b4a85ad
RG
477 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
478 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
58da96fe
RG
479 return;
480 }
481 else
9d30f3c1
JJ
482 return;
483
6b4a85ad 484 if (TREE_CODE (lhs) != SSA_NAME)
9d30f3c1
JJ
485 {
486 si->va_list_escapes = true;
487 return;
488 }
489
490 if (si->compute_sizes < 0)
491 {
492 si->compute_sizes = 0;
493 if (si->va_start_count == 1
494 && reachable_at_most_once (si->bb, si->va_start_bb))
495 si->compute_sizes = 1;
496
497 if (dump_file && (dump_flags & TDF_DETAILS))
498 fprintf (dump_file,
499 "bb%d will %sbe executed at most once for each va_start "
500 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
501 si->va_start_bb->index);
502 }
503
504 /* For void * or char * va_list types, there is just one counter.
505 If va_arg is used in a loop, we don't know how many registers need
506 saving. */
507 if (! si->compute_sizes)
508 {
509 si->va_list_escapes = true;
510 return;
511 }
512
513 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
0cadbfaa 514 == HOST_WIDE_INT_M1U)
9d30f3c1
JJ
515 {
516 si->va_list_escapes = true;
517 return;
518 }
519
6b4a85ad 520 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
9d30f3c1
JJ
521}
522
523
524/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
525 Return true if va_list might be escaping. */
526
527static bool
528check_all_va_list_escapes (struct stdarg_info *si)
529{
530 basic_block bb;
531
11cd3bed 532 FOR_EACH_BB_FN (bb, cfun)
9d30f3c1 533 {
538dd0b7
DM
534 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
535 gsi_next (&i))
11f1e3ab
JJ
536 {
537 tree lhs;
538 use_operand_p uop;
539 ssa_op_iter soi;
538dd0b7 540 gphi *phi = i.phi ();
11f1e3ab
JJ
541
542 lhs = PHI_RESULT (phi);
543 if (virtual_operand_p (lhs)
544 || bitmap_bit_p (si->va_list_escape_vars,
545 SSA_NAME_VERSION (lhs)))
546 continue;
547
548 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
549 {
550 tree rhs = USE_FROM_PTR (uop);
551 if (TREE_CODE (rhs) == SSA_NAME
552 && bitmap_bit_p (si->va_list_escape_vars,
553 SSA_NAME_VERSION (rhs)))
554 {
555 if (dump_file && (dump_flags & TDF_DETAILS))
556 {
557 fputs ("va_list escapes in ", dump_file);
558 print_gimple_stmt (dump_file, phi, 0, dump_flags);
559 fputc ('\n', dump_file);
560 }
561 return true;
562 }
563 }
564 }
565
538dd0b7
DM
566 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
567 gsi_next (&i))
9d30f3c1 568 {
355fe088 569 gimple *stmt = gsi_stmt (i);
726a989a 570 tree use;
9d30f3c1
JJ
571 ssa_op_iter iter;
572
b5b8b0ac
AO
573 if (is_gimple_debug (stmt))
574 continue;
575
9d30f3c1
JJ
576 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
577 {
578 if (! bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 579 SSA_NAME_VERSION (use)))
9d30f3c1
JJ
580 continue;
581
726a989a 582 if (is_gimple_assign (stmt))
9d30f3c1 583 {
726a989a
RB
584 tree rhs = gimple_assign_rhs1 (stmt);
585 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
9d30f3c1
JJ
586
587 /* x = *ap_temp; */
58da96fe 588 if (rhs_code == MEM_REF
9d30f3c1
JJ
589 && TREE_OPERAND (rhs, 0) == use
590 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
cc269bb6 591 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
9d30f3c1
JJ
592 && si->offsets[SSA_NAME_VERSION (use)] != -1)
593 {
594 unsigned HOST_WIDE_INT gpr_size;
595 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
596
597 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
9439e9a1 598 + tree_to_shwi (TREE_OPERAND (rhs, 1))
ae7e9ddd 599 + tree_to_uhwi (access_size);
9d30f3c1
JJ
600 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
601 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
602 else if (gpr_size > cfun->va_list_gpr_size)
603 cfun->va_list_gpr_size = gpr_size;
604 continue;
605 }
606
607 /* va_arg sequences may contain
608 other_ap_temp = ap_temp;
609 other_ap_temp = ap_temp + constant;
610 other_ap_temp = (some_type *) ap_temp;
611 ap = ap_temp;
612 statements. */
726a989a
RB
613 if (rhs == use
614 && ((rhs_code == POINTER_PLUS_EXPR
615 && (TREE_CODE (gimple_assign_rhs2 (stmt))
616 == INTEGER_CST))
617 || gimple_assign_cast_p (stmt)
618 || (get_gimple_rhs_class (rhs_code)
619 == GIMPLE_SINGLE_RHS)))
9d30f3c1 620 {
726a989a
RB
621 tree lhs = gimple_assign_lhs (stmt);
622
9d30f3c1
JJ
623 if (TREE_CODE (lhs) == SSA_NAME
624 && bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 625 SSA_NAME_VERSION (lhs)))
9d30f3c1
JJ
626 continue;
627
628 if (TREE_CODE (lhs) == VAR_DECL
629 && bitmap_bit_p (si->va_list_vars,
6b4a85ad 630 DECL_UID (lhs) + num_ssa_names))
9d30f3c1
JJ
631 continue;
632 }
58da96fe
RG
633 else if (rhs_code == ADDR_EXPR
634 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
635 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
636 {
637 tree lhs = gimple_assign_lhs (stmt);
638
639 if (bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 640 SSA_NAME_VERSION (lhs)))
58da96fe
RG
641 continue;
642 }
9d30f3c1
JJ
643 }
644
645 if (dump_file && (dump_flags & TDF_DETAILS))
646 {
647 fputs ("va_list escapes in ", dump_file);
726a989a 648 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
9d30f3c1
JJ
649 fputc ('\n', dump_file);
650 }
651 return true;
652 }
653 }
654 }
655
656 return false;
657}
658
302f0807 659/* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
9d30f3c1 660
302f0807
TV
661static void
662optimize_va_list_gpr_fpr_size (function *fun)
9d30f3c1
JJ
663{
664 basic_block bb;
665 bool va_list_escapes = false;
666 bool va_list_simple_ptr;
667 struct stdarg_info si;
726a989a 668 struct walk_stmt_info wi;
9d30f3c1 669 const char *funcname = NULL;
35cbb299 670 tree cfun_va_list;
9d30f3c1 671
be55bfe6
TS
672 fun->va_list_gpr_size = 0;
673 fun->va_list_fpr_size = 0;
9d30f3c1
JJ
674 memset (&si, 0, sizeof (si));
675 si.va_list_vars = BITMAP_ALLOC (NULL);
676 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
677
678 if (dump_file)
679 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
680
be55bfe6 681 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
35cbb299
KT
682 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
683 && (TREE_TYPE (cfun_va_list) == void_type_node
684 || TREE_TYPE (cfun_va_list) == char_type_node);
685 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
9d30f3c1 686
be55bfe6 687 FOR_EACH_BB_FN (bb, fun)
9d30f3c1 688 {
726a989a 689 gimple_stmt_iterator i;
9d30f3c1 690
726a989a 691 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
9d30f3c1 692 {
355fe088 693 gimple *stmt = gsi_stmt (i);
726a989a 694 tree callee, ap;
9d30f3c1 695
726a989a 696 if (!is_gimple_call (stmt))
9d30f3c1
JJ
697 continue;
698
726a989a 699 callee = gimple_call_fndecl (stmt);
9d30f3c1
JJ
700 if (!callee
701 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
702 continue;
703
704 switch (DECL_FUNCTION_CODE (callee))
705 {
706 case BUILT_IN_VA_START:
707 break;
708 /* If old style builtins are used, don't optimize anything. */
709 case BUILT_IN_SAVEREGS:
9d30f3c1
JJ
710 case BUILT_IN_NEXT_ARG:
711 va_list_escapes = true;
712 continue;
713 default:
714 continue;
715 }
716
717 si.va_start_count++;
726a989a 718 ap = gimple_call_arg (stmt, 0);
bb673b41
RG
719
720 if (TREE_CODE (ap) != ADDR_EXPR)
9d30f3c1
JJ
721 {
722 va_list_escapes = true;
723 break;
724 }
9d30f3c1 725 ap = TREE_OPERAND (ap, 0);
bb673b41
RG
726 if (TREE_CODE (ap) == ARRAY_REF)
727 {
728 if (! integer_zerop (TREE_OPERAND (ap, 1)))
729 {
730 va_list_escapes = true;
731 break;
732 }
733 ap = TREE_OPERAND (ap, 0);
734 }
735 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
be55bfe6 736 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
bb673b41
RG
737 || TREE_CODE (ap) != VAR_DECL)
738 {
739 va_list_escapes = true;
740 break;
741 }
742
9d30f3c1
JJ
743 if (is_global_var (ap))
744 {
745 va_list_escapes = true;
746 break;
747 }
748
6b4a85ad 749 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
9d30f3c1
JJ
750
751 /* VA_START_BB and VA_START_AP will be only used if there is just
752 one va_start in the function. */
753 si.va_start_bb = bb;
754 si.va_start_ap = ap;
755 }
756
757 if (va_list_escapes)
758 break;
759 }
760
761 /* If there were no va_start uses in the function, there is no need to
762 save anything. */
763 if (si.va_start_count == 0)
764 goto finish;
765
766 /* If some va_list arguments weren't local, we can't optimize. */
767 if (va_list_escapes)
768 goto finish;
769
770 /* For void * or char * va_list, something useful can be done only
771 if there is just one va_start. */
772 if (va_list_simple_ptr && si.va_start_count > 1)
773 {
774 va_list_escapes = true;
775 goto finish;
776 }
777
778 /* For struct * va_list, if the backend didn't tell us what the counter fields
779 are, there is nothing more we can do. */
780 if (!va_list_simple_ptr
781 && va_list_gpr_counter_field == NULL_TREE
782 && va_list_fpr_counter_field == NULL_TREE)
783 {
784 va_list_escapes = true;
785 goto finish;
786 }
787
788 /* For void * or char * va_list there is just one counter
789 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
790 if (va_list_simple_ptr)
be55bfe6 791 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
9d30f3c1 792
18c57f2c 793 calculate_dominance_info (CDI_DOMINATORS);
726a989a
RB
794 memset (&wi, 0, sizeof (wi));
795 wi.info = si.va_list_vars;
18c57f2c 796
be55bfe6 797 FOR_EACH_BB_FN (bb, fun)
9d30f3c1 798 {
9d30f3c1
JJ
799 si.compute_sizes = -1;
800 si.bb = bb;
746077ff
RH
801
802 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
803 them as assignments for the purpose of escape analysis. This is
804 not needed for non-simple va_list because virtual phis don't perform
21c0137a
JJ
805 any real data movement. Also, check PHI nodes for taking address of
806 the va_list vars. */
807 tree lhs, rhs;
808 use_operand_p uop;
809 ssa_op_iter soi;
746077ff 810
21c0137a
JJ
811 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
812 gsi_next (&i))
813 {
814 gphi *phi = i.phi ();
815 lhs = PHI_RESULT (phi);
746077ff 816
21c0137a
JJ
817 if (virtual_operand_p (lhs))
818 continue;
746077ff 819
21c0137a
JJ
820 if (va_list_simple_ptr)
821 {
746077ff
RH
822 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
823 {
824 rhs = USE_FROM_PTR (uop);
825 if (va_list_ptr_read (&si, rhs, lhs))
826 continue;
827 else if (va_list_ptr_write (&si, lhs, rhs))
828 continue;
829 else
830 check_va_list_escapes (&si, lhs, rhs);
831
726a989a 832 if (si.va_list_escapes)
746077ff
RH
833 {
834 if (dump_file && (dump_flags & TDF_DETAILS))
835 {
836 fputs ("va_list escapes in ", dump_file);
726a989a 837 print_gimple_stmt (dump_file, phi, 0, dump_flags);
746077ff
RH
838 fputc ('\n', dump_file);
839 }
840 va_list_escapes = true;
841 }
842 }
843 }
21c0137a
JJ
844
845 for (unsigned j = 0; !va_list_escapes
846 && j < gimple_phi_num_args (phi); ++j)
847 if ((!va_list_simple_ptr
848 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
849 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
850 find_va_list_reference, &wi, NULL))
851 {
852 if (dump_file && (dump_flags & TDF_DETAILS))
853 {
854 fputs ("va_list escapes in ", dump_file);
855 print_gimple_stmt (dump_file, phi, 0, dump_flags);
856 fputc ('\n', dump_file);
857 }
858 va_list_escapes = true;
859 }
746077ff
RH
860 }
861
538dd0b7 862 for (gimple_stmt_iterator i = gsi_start_bb (bb);
726a989a
RB
863 !gsi_end_p (i) && !va_list_escapes;
864 gsi_next (&i))
9d30f3c1 865 {
355fe088 866 gimple *stmt = gsi_stmt (i);
9d30f3c1
JJ
867
868 /* Don't look at __builtin_va_{start,end}, they are ok. */
726a989a 869 if (is_gimple_call (stmt))
9d30f3c1 870 {
726a989a 871 tree callee = gimple_call_fndecl (stmt);
9d30f3c1
JJ
872
873 if (callee
874 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
875 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
876 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
877 continue;
878 }
879
726a989a 880 if (is_gimple_assign (stmt))
9d30f3c1 881 {
21c0137a
JJ
882 lhs = gimple_assign_lhs (stmt);
883 rhs = gimple_assign_rhs1 (stmt);
9d30f3c1
JJ
884
885 if (va_list_simple_ptr)
886 {
726a989a
RB
887 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
888 == GIMPLE_SINGLE_RHS)
889 {
bad25453
JJ
890 /* Check for ap ={v} {}. */
891 if (TREE_CLOBBER_P (rhs))
892 continue;
893
726a989a 894 /* Check for tem = ap. */
bad25453 895 else if (va_list_ptr_read (&si, rhs, lhs))
726a989a 896 continue;
9d30f3c1 897
726a989a
RB
898 /* Check for the last insn in:
899 tem1 = ap;
900 tem2 = tem1 + CST;
901 ap = tem2;
902 sequence. */
903 else if (va_list_ptr_write (&si, lhs, rhs))
904 continue;
905 }
9d30f3c1 906
726a989a
RB
907 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
908 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
1a87cf0c 909 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
726a989a
RB
910 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
911 == GIMPLE_SINGLE_RHS))
9d30f3c1
JJ
912 check_va_list_escapes (&si, lhs, rhs);
913 }
914 else
915 {
726a989a
RB
916 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
917 == GIMPLE_SINGLE_RHS)
918 {
47598145
MM
919 /* Check for ap ={v} {}. */
920 if (TREE_CLOBBER_P (rhs))
921 continue;
bad25453 922
726a989a 923 /* Check for ap[0].field = temp. */
47598145 924 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
726a989a 925 continue;
9d30f3c1 926
726a989a
RB
927 /* Check for temp = ap[0].field. */
928 else if (va_list_counter_struct_op (&si, rhs, lhs,
929 false))
930 continue;
931 }
9d30f3c1
JJ
932
933 /* Do any architecture specific checking. */
726a989a
RB
934 if (targetm.stdarg_optimize_hook
935 && targetm.stdarg_optimize_hook (&si, stmt))
9d30f3c1
JJ
936 continue;
937 }
938 }
b5b8b0ac
AO
939 else if (is_gimple_debug (stmt))
940 continue;
9d30f3c1
JJ
941
942 /* All other uses of va_list are either va_copy (that is not handled
943 in this optimization), taking address of va_list variable or
944 passing va_list to other functions (in that case va_list might
945 escape the function and therefore va_start needs to set it up
946 fully), or some unexpected use of va_list. None of these should
947 happen in a gimplified VA_ARG_EXPR. */
948 if (si.va_list_escapes
726a989a 949 || walk_gimple_op (stmt, find_va_list_reference, &wi))
9d30f3c1
JJ
950 {
951 if (dump_file && (dump_flags & TDF_DETAILS))
952 {
953 fputs ("va_list escapes in ", dump_file);
726a989a 954 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
9d30f3c1
JJ
955 fputc ('\n', dump_file);
956 }
957 va_list_escapes = true;
958 }
959 }
960
961 if (va_list_escapes)
962 break;
963 }
964
965 if (! va_list_escapes
966 && va_list_simple_ptr
967 && ! bitmap_empty_p (si.va_list_escape_vars)
968 && check_all_va_list_escapes (&si))
969 va_list_escapes = true;
970
971finish:
972 if (va_list_escapes)
973 {
be55bfe6
TS
974 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
975 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
9d30f3c1
JJ
976 }
977 BITMAP_FREE (si.va_list_vars);
978 BITMAP_FREE (si.va_list_escape_vars);
979 free (si.offsets);
980 if (dump_file)
981 {
982 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
983 funcname, (int) va_list_escapes);
be55bfe6 984 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
9d30f3c1
JJ
985 fputs ("all", dump_file);
986 else
987 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
988 fputs (" GPR units and ", dump_file);
be55bfe6 989 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
9d30f3c1
JJ
990 fputs ("all", dump_file);
991 else
992 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
993 fputs (" FPR units.\n", dump_file);
994 }
302f0807
TV
995}
996
f8e89441
TV
997/* Return true if STMT is IFN_VA_ARG. */
998
999static bool
355fe088 1000gimple_call_ifn_va_arg_p (gimple *stmt)
f8e89441
TV
1001{
1002 return (is_gimple_call (stmt)
1003 && gimple_call_internal_p (stmt)
1004 && gimple_call_internal_fn (stmt) == IFN_VA_ARG);
1005}
1006
1007/* Expand IFN_VA_ARGs in FUN. */
1008
1009static void
1010expand_ifn_va_arg_1 (function *fun)
1011{
1012 bool modified = false;
1013 basic_block bb;
1014 gimple_stmt_iterator i;
86b76c20 1015 location_t saved_location;
f8e89441
TV
1016
1017 FOR_EACH_BB_FN (bb, fun)
1018 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1019 {
355fe088 1020 gimple *stmt = gsi_stmt (i);
33f0852f 1021 tree ap, aptype, expr, lhs, type;
f8e89441
TV
1022 gimple_seq pre = NULL, post = NULL;
1023
1024 if (!gimple_call_ifn_va_arg_p (stmt))
1025 continue;
1026
1027 modified = true;
1028
1029 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1030 ap = gimple_call_arg (stmt, 0);
33f0852f
JJ
1031 aptype = TREE_TYPE (gimple_call_arg (stmt, 2));
1032 gcc_assert (POINTER_TYPE_P (aptype));
c7b38fd5 1033
2fe1d762 1034 /* Balanced out the &ap, usually added by build_va_arg. */
33f0852f
JJ
1035 ap = build2 (MEM_REF, TREE_TYPE (aptype), ap,
1036 build_int_cst (aptype, 0));
f8e89441
TV
1037
1038 push_gimplify_context (false);
86b76c20
TV
1039 saved_location = input_location;
1040 input_location = gimple_location (stmt);
f8e89441 1041
fcd424ce
TV
1042 /* Make it easier for the backends by protecting the valist argument
1043 from multiple evaluations. */
2fe1d762 1044 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
fcd424ce
TV
1045
1046 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
f8e89441
TV
1047
1048 lhs = gimple_call_lhs (stmt);
1049 if (lhs != NULL_TREE)
1050 {
c7b38fd5 1051 unsigned int nargs = gimple_call_num_args (stmt);
f8e89441
TV
1052 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1053
64a948e9
IE
1054 /* We replace call with a new expr. This may require
1055 corresponding bndret call fixup. */
1056 if (chkp_function_instrumented_p (fun->decl))
1057 chkp_fixup_inlined_call (lhs, expr);
1058
33f0852f 1059 if (nargs == 4)
f8e89441
TV
1060 {
1061 /* We've transported the size of with WITH_SIZE_EXPR here as
c7b38fd5 1062 the last argument of the internal fn call. Now reinstate
f8e89441 1063 it. */
c7b38fd5 1064 tree size = gimple_call_arg (stmt, nargs - 1);
f8e89441
TV
1065 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1066 }
1067
1068 /* We use gimplify_assign here, rather than gimple_build_assign,
1069 because gimple_assign knows how to deal with variable-sized
1070 types. */
1071 gimplify_assign (lhs, expr, &pre);
1072 }
a66dfffd
TV
1073 else
1074 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue);
f8e89441 1075
86b76c20 1076 input_location = saved_location;
f8e89441
TV
1077 pop_gimplify_context (NULL);
1078
1079 gimple_seq_add_seq (&pre, post);
1080 update_modified_stmts (pre);
1081
1082 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1083 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1084 inbetween. */
1085 gimple_find_sub_bbs (pre, &i);
1086
1087 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1088 bb. */
7af9b2ff
JL
1089 unlink_stmt_vdef (stmt);
1090 release_ssa_name_fn (fun, gimple_vdef (stmt));
f8e89441
TV
1091 gsi_remove (&i, true);
1092 gcc_assert (gsi_end_p (i));
1093
1094 /* We're walking here into the bbs which contain the expansion of
1095 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1096 expanding. We could try to skip walking these bbs, perhaps by
1097 walking backwards over gimples and bbs. */
1098 break;
1099 }
1100
1101 if (!modified)
1102 return;
1103
1104 free_dominance_info (CDI_DOMINATORS);
1105 update_ssa (TODO_update_ssa);
1106}
1107
1108/* Expand IFN_VA_ARGs in FUN, if necessary. */
1109
1110static void
1111expand_ifn_va_arg (function *fun)
1112{
1113 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1114 expand_ifn_va_arg_1 (fun);
1115
b2b29377
MM
1116 if (flag_checking)
1117 {
1118 basic_block bb;
1119 gimple_stmt_iterator i;
1120 FOR_EACH_BB_FN (bb, fun)
1121 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1122 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
1123 }
f8e89441
TV
1124}
1125
17795822
TS
1126namespace {
1127
1128const pass_data pass_data_stdarg =
302f0807
TV
1129{
1130 GIMPLE_PASS, /* type */
1131 "stdarg", /* name */
1132 OPTGROUP_NONE, /* optinfo_flags */
1133 TV_NONE, /* tv_id */
1134 ( PROP_cfg | PROP_ssa ), /* properties_required */
f8e89441 1135 PROP_gimple_lva, /* properties_provided */
302f0807
TV
1136 0, /* properties_destroyed */
1137 0, /* todo_flags_start */
1138 0, /* todo_flags_finish */
1139};
1140
17795822 1141class pass_stdarg : public gimple_opt_pass
302f0807
TV
1142{
1143public:
1144 pass_stdarg (gcc::context *ctxt)
1145 : gimple_opt_pass (pass_data_stdarg, ctxt)
1146 {}
1147
1148 /* opt_pass methods: */
f8e89441 1149 virtual bool gate (function *)
302f0807 1150 {
f8e89441
TV
1151 /* Always run this pass, in order to expand va_arg internal_fns. We
1152 also need to do that if fun->stdarg == 0, because a va_arg may also
1153 occur in a function without varargs, f.i. if when passing a va_list to
1154 another function. */
1155 return true;
302f0807
TV
1156 }
1157
1158 virtual unsigned int execute (function *);
1159
1160}; // class pass_stdarg
1161
1162unsigned int
1163pass_stdarg::execute (function *fun)
1164{
f8e89441
TV
1165 /* TODO: Postpone expand_ifn_va_arg till after
1166 optimize_va_list_gpr_fpr_size. */
1167 expand_ifn_va_arg (fun);
1168
1169 if (flag_stdarg_opt
1170 /* This optimization is only for stdarg functions. */
1171 && fun->stdarg != 0)
1172 optimize_va_list_gpr_fpr_size (fun);
302f0807 1173
c2924966 1174 return 0;
9d30f3c1
JJ
1175}
1176
17795822
TS
1177} // anon namespace
1178
27a4cd48
DM
1179gimple_opt_pass *
1180make_pass_stdarg (gcc::context *ctxt)
1181{
1182 return new pass_stdarg (ctxt);
1183}
f8e89441 1184
17795822
TS
1185namespace {
1186
1187const pass_data pass_data_lower_vaarg =
f8e89441
TV
1188{
1189 GIMPLE_PASS, /* type */
1190 "lower_vaarg", /* name */
1191 OPTGROUP_NONE, /* optinfo_flags */
1192 TV_NONE, /* tv_id */
1193 ( PROP_cfg | PROP_ssa ), /* properties_required */
1194 PROP_gimple_lva, /* properties_provided */
1195 0, /* properties_destroyed */
1196 0, /* todo_flags_start */
1197 0, /* todo_flags_finish */
1198};
1199
17795822 1200class pass_lower_vaarg : public gimple_opt_pass
f8e89441
TV
1201{
1202public:
1203 pass_lower_vaarg (gcc::context *ctxt)
1204 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1205 {}
1206
1207 /* opt_pass methods: */
1208 virtual bool gate (function *)
1209 {
1210 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1211 }
1212
1213 virtual unsigned int execute (function *);
1214
1215}; // class pass_lower_vaarg
1216
1217unsigned int
1218pass_lower_vaarg::execute (function *fun)
1219{
1220 expand_ifn_va_arg (fun);
1221 return 0;
1222}
1223
17795822
TS
1224} // anon namespace
1225
f8e89441
TV
1226gimple_opt_pass *
1227make_pass_lower_vaarg (gcc::context *ctxt)
1228{
1229 return new pass_lower_vaarg (ctxt);
1230}