1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "gimple-pretty-print.h"
32 #include "fold-const.h"
33 #include "langhooks.h"
34 #include "internal-fn.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
38 #include "tree-into-ssa.h"
40 #include "tree-stdarg.h"
42 /* A simple pass that attempts to optimize stdarg functions on architectures
43 that need to save register arguments to stack on entry to stdarg functions.
44 If the function doesn't use any va_start macros, no registers need to
45 be saved. If va_start macros are used, the va_list variables don't escape
46 the function, it is only necessary to save registers that will be used
47 in va_arg macros. E.g. if va_arg is only used with integral types
48 in the function, floating point registers don't need to be saved, etc. */
51 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
52 is executed at most as many times as VA_START_BB. */
55 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
57 vec
<edge
> stack
= vNULL
;
63 if (va_arg_bb
== va_start_bb
)
66 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
69 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
70 bitmap_clear (visited
);
73 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
76 while (! stack
.is_empty ())
83 if (e
->flags
& EDGE_COMPLEX
)
89 if (src
== va_start_bb
)
92 /* va_arg_bb can be executed more times than va_start_bb. */
99 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
101 if (! bitmap_bit_p (visited
, src
->index
))
103 bitmap_set_bit (visited
, src
->index
);
104 FOR_EACH_EDGE (e
, ei
, src
->preds
)
110 sbitmap_free (visited
);
115 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
116 return constant, otherwise return HOST_WIDE_INT_M1U.
117 GPR_P is true if this is GPR counter. */
119 static unsigned HOST_WIDE_INT
120 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
125 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
126 unsigned int max_size
;
128 if (si
->offsets
== NULL
)
132 si
->offsets
= XNEWVEC (int, num_ssa_names
);
133 for (i
= 0; i
< num_ssa_names
; ++i
)
137 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
138 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
139 orig_lhs
= lhs
= rhs
;
142 enum tree_code rhs_code
;
145 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
147 if (counter_val
>= max_size
)
153 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
157 stmt
= SSA_NAME_DEF_STMT (lhs
);
159 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
160 return HOST_WIDE_INT_M1U
;
162 rhs_code
= gimple_assign_rhs_code (stmt
);
163 rhs1
= gimple_assign_rhs1 (stmt
);
164 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
165 || gimple_assign_cast_p (stmt
))
166 && TREE_CODE (rhs1
) == SSA_NAME
)
172 if ((rhs_code
== POINTER_PLUS_EXPR
173 || rhs_code
== PLUS_EXPR
)
174 && TREE_CODE (rhs1
) == SSA_NAME
175 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
177 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
182 if (rhs_code
== ADDR_EXPR
183 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
184 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
185 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
187 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
188 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
192 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
193 return HOST_WIDE_INT_M1U
;
195 rhs
= gimple_assign_rhs1 (stmt
);
196 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
197 return HOST_WIDE_INT_M1U
;
199 if (TREE_CODE (counter
) == COMPONENT_REF
)
201 if (get_base_address (counter
) != get_base_address (rhs
)
202 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
203 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
204 return HOST_WIDE_INT_M1U
;
206 else if (counter
!= rhs
)
207 return HOST_WIDE_INT_M1U
;
213 val
= ret
+ counter_val
;
216 enum tree_code rhs_code
;
219 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
223 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
225 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
227 stmt
= SSA_NAME_DEF_STMT (lhs
);
229 rhs_code
= gimple_assign_rhs_code (stmt
);
230 rhs1
= gimple_assign_rhs1 (stmt
);
231 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
232 || gimple_assign_cast_p (stmt
))
233 && TREE_CODE (rhs1
) == SSA_NAME
)
239 if ((rhs_code
== POINTER_PLUS_EXPR
240 || rhs_code
== PLUS_EXPR
)
241 && TREE_CODE (rhs1
) == SSA_NAME
242 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
244 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
249 if (rhs_code
== ADDR_EXPR
250 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
251 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
252 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
254 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
255 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
266 /* Called by walk_tree to look for references to va_list variables. */
269 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
272 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
275 if (TREE_CODE (var
) == SSA_NAME
)
277 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
280 else if (TREE_CODE (var
) == VAR_DECL
)
282 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
290 /* Helper function of va_list_counter_struct_op. Compute
291 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
292 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
293 statement. GPR_P is true if AP is a GPR counter, false if it is
297 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
300 unsigned HOST_WIDE_INT increment
;
302 if (si
->compute_sizes
< 0)
304 si
->compute_sizes
= 0;
305 if (si
->va_start_count
== 1
306 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
307 si
->compute_sizes
= 1;
309 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
311 "bb%d will %sbe executed at most once for each va_start "
312 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
313 si
->va_start_bb
->index
);
318 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
320 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
322 cfun
->va_list_gpr_size
+= increment
;
326 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
328 cfun
->va_list_fpr_size
+= increment
;
333 if (write_p
|| !si
->compute_sizes
)
336 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
338 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
343 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
344 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
345 is false, AP has been seen in VAR = AP assignment.
346 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
347 va_arg operation that doesn't cause the va_list variable to escape
351 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
356 if (TREE_CODE (ap
) != COMPONENT_REF
357 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
360 if (TREE_CODE (var
) != SSA_NAME
361 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
364 base
= get_base_address (ap
);
365 if (TREE_CODE (base
) != VAR_DECL
366 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
369 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
370 va_list_counter_op (si
, ap
, var
, true, write_p
);
371 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
372 va_list_counter_op (si
, ap
, var
, false, write_p
);
378 /* Check for TEM = AP. Return true if found and the caller shouldn't
379 search for va_list references in the statement. */
382 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
384 if (TREE_CODE (ap
) != VAR_DECL
385 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
388 if (TREE_CODE (tem
) != SSA_NAME
389 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
392 if (si
->compute_sizes
< 0)
394 si
->compute_sizes
= 0;
395 if (si
->va_start_count
== 1
396 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
397 si
->compute_sizes
= 1;
399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
401 "bb%d will %sbe executed at most once for each va_start "
402 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
403 si
->va_start_bb
->index
);
406 /* For void * or char * va_list types, there is just one counter.
407 If va_arg is used in a loop, we don't know how many registers need
409 if (! si
->compute_sizes
)
412 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
415 /* Note the temporary, as we need to track whether it doesn't escape
416 the current function. */
417 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
427 sequence and update cfun->va_list_gpr_size. Return true if found. */
430 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
432 unsigned HOST_WIDE_INT increment
;
434 if (TREE_CODE (ap
) != VAR_DECL
435 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
438 if (TREE_CODE (tem2
) != SSA_NAME
439 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
442 if (si
->compute_sizes
<= 0)
445 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
446 if (increment
+ 1 <= 1)
449 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
450 cfun
->va_list_gpr_size
+= increment
;
452 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
458 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
459 containing value of some va_list variable plus optionally some constant,
460 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
461 depending whether LHS is a function local temporary. */
464 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
466 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
469 if (TREE_CODE (rhs
) == SSA_NAME
)
471 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
474 else if (TREE_CODE (rhs
) == ADDR_EXPR
475 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
476 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
478 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
479 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
485 if (TREE_CODE (lhs
) != SSA_NAME
)
487 si
->va_list_escapes
= true;
491 if (si
->compute_sizes
< 0)
493 si
->compute_sizes
= 0;
494 if (si
->va_start_count
== 1
495 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
496 si
->compute_sizes
= 1;
498 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
500 "bb%d will %sbe executed at most once for each va_start "
501 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
502 si
->va_start_bb
->index
);
505 /* For void * or char * va_list types, there is just one counter.
506 If va_arg is used in a loop, we don't know how many registers need
508 if (! si
->compute_sizes
)
510 si
->va_list_escapes
= true;
514 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
515 == HOST_WIDE_INT_M1U
)
517 si
->va_list_escapes
= true;
521 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
525 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
526 Return true if va_list might be escaping. */
529 check_all_va_list_escapes (struct stdarg_info
*si
)
533 FOR_EACH_BB_FN (bb
, cfun
)
535 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
541 gphi
*phi
= i
.phi ();
543 lhs
= PHI_RESULT (phi
);
544 if (virtual_operand_p (lhs
)
545 || bitmap_bit_p (si
->va_list_escape_vars
,
546 SSA_NAME_VERSION (lhs
)))
549 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
551 tree rhs
= USE_FROM_PTR (uop
);
552 if (TREE_CODE (rhs
) == SSA_NAME
553 && bitmap_bit_p (si
->va_list_escape_vars
,
554 SSA_NAME_VERSION (rhs
)))
556 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
558 fputs ("va_list escapes in ", dump_file
);
559 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
560 fputc ('\n', dump_file
);
567 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
570 gimple
*stmt
= gsi_stmt (i
);
574 if (is_gimple_debug (stmt
))
577 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
579 if (! bitmap_bit_p (si
->va_list_escape_vars
,
580 SSA_NAME_VERSION (use
)))
583 if (is_gimple_assign (stmt
))
585 tree rhs
= gimple_assign_rhs1 (stmt
);
586 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
589 if (rhs_code
== MEM_REF
590 && TREE_OPERAND (rhs
, 0) == use
591 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
592 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
593 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
595 unsigned HOST_WIDE_INT gpr_size
;
596 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
598 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
599 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
600 + tree_to_uhwi (access_size
);
601 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
602 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
603 else if (gpr_size
> cfun
->va_list_gpr_size
)
604 cfun
->va_list_gpr_size
= gpr_size
;
608 /* va_arg sequences may contain
609 other_ap_temp = ap_temp;
610 other_ap_temp = ap_temp + constant;
611 other_ap_temp = (some_type *) ap_temp;
615 && ((rhs_code
== POINTER_PLUS_EXPR
616 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
618 || gimple_assign_cast_p (stmt
)
619 || (get_gimple_rhs_class (rhs_code
)
620 == GIMPLE_SINGLE_RHS
)))
622 tree lhs
= gimple_assign_lhs (stmt
);
624 if (TREE_CODE (lhs
) == SSA_NAME
625 && bitmap_bit_p (si
->va_list_escape_vars
,
626 SSA_NAME_VERSION (lhs
)))
629 if (TREE_CODE (lhs
) == VAR_DECL
630 && bitmap_bit_p (si
->va_list_vars
,
631 DECL_UID (lhs
) + num_ssa_names
))
634 else if (rhs_code
== ADDR_EXPR
635 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
636 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
638 tree lhs
= gimple_assign_lhs (stmt
);
640 if (bitmap_bit_p (si
->va_list_escape_vars
,
641 SSA_NAME_VERSION (lhs
)))
646 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
648 fputs ("va_list escapes in ", dump_file
);
649 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
650 fputc ('\n', dump_file
);
660 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
663 optimize_va_list_gpr_fpr_size (function
*fun
)
666 bool va_list_escapes
= false;
667 bool va_list_simple_ptr
;
668 struct stdarg_info si
;
669 struct walk_stmt_info wi
;
670 const char *funcname
= NULL
;
673 fun
->va_list_gpr_size
= 0;
674 fun
->va_list_fpr_size
= 0;
675 memset (&si
, 0, sizeof (si
));
676 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
677 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
680 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
682 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
683 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
684 && (TREE_TYPE (cfun_va_list
) == void_type_node
685 || TREE_TYPE (cfun_va_list
) == char_type_node
);
686 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
688 FOR_EACH_BB_FN (bb
, fun
)
690 gimple_stmt_iterator i
;
692 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
694 gimple
*stmt
= gsi_stmt (i
);
697 if (!is_gimple_call (stmt
))
700 callee
= gimple_call_fndecl (stmt
);
702 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
705 switch (DECL_FUNCTION_CODE (callee
))
707 case BUILT_IN_VA_START
:
709 /* If old style builtins are used, don't optimize anything. */
710 case BUILT_IN_SAVEREGS
:
711 case BUILT_IN_NEXT_ARG
:
712 va_list_escapes
= true;
719 ap
= gimple_call_arg (stmt
, 0);
721 if (TREE_CODE (ap
) != ADDR_EXPR
)
723 va_list_escapes
= true;
726 ap
= TREE_OPERAND (ap
, 0);
727 if (TREE_CODE (ap
) == ARRAY_REF
)
729 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
731 va_list_escapes
= true;
734 ap
= TREE_OPERAND (ap
, 0);
736 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
737 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
738 || TREE_CODE (ap
) != VAR_DECL
)
740 va_list_escapes
= true;
744 if (is_global_var (ap
))
746 va_list_escapes
= true;
750 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
752 /* VA_START_BB and VA_START_AP will be only used if there is just
753 one va_start in the function. */
762 /* If there were no va_start uses in the function, there is no need to
764 if (si
.va_start_count
== 0)
767 /* If some va_list arguments weren't local, we can't optimize. */
771 /* For void * or char * va_list, something useful can be done only
772 if there is just one va_start. */
773 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
775 va_list_escapes
= true;
779 /* For struct * va_list, if the backend didn't tell us what the counter fields
780 are, there is nothing more we can do. */
781 if (!va_list_simple_ptr
782 && va_list_gpr_counter_field
== NULL_TREE
783 && va_list_fpr_counter_field
== NULL_TREE
)
785 va_list_escapes
= true;
789 /* For void * or char * va_list there is just one counter
790 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
791 if (va_list_simple_ptr
)
792 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
794 calculate_dominance_info (CDI_DOMINATORS
);
795 memset (&wi
, 0, sizeof (wi
));
796 wi
.info
= si
.va_list_vars
;
798 FOR_EACH_BB_FN (bb
, fun
)
800 si
.compute_sizes
= -1;
803 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
804 them as assignments for the purpose of escape analysis. This is
805 not needed for non-simple va_list because virtual phis don't perform
806 any real data movement. Also, check PHI nodes for taking address of
812 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
815 gphi
*phi
= i
.phi ();
816 lhs
= PHI_RESULT (phi
);
818 if (virtual_operand_p (lhs
))
821 if (va_list_simple_ptr
)
823 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
825 rhs
= USE_FROM_PTR (uop
);
826 if (va_list_ptr_read (&si
, rhs
, lhs
))
828 else if (va_list_ptr_write (&si
, lhs
, rhs
))
831 check_va_list_escapes (&si
, lhs
, rhs
);
833 if (si
.va_list_escapes
)
835 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
837 fputs ("va_list escapes in ", dump_file
);
838 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
839 fputc ('\n', dump_file
);
841 va_list_escapes
= true;
846 for (unsigned j
= 0; !va_list_escapes
847 && j
< gimple_phi_num_args (phi
); ++j
)
848 if ((!va_list_simple_ptr
849 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
850 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
851 find_va_list_reference
, &wi
, NULL
))
853 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
855 fputs ("va_list escapes in ", dump_file
);
856 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
857 fputc ('\n', dump_file
);
859 va_list_escapes
= true;
863 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
864 !gsi_end_p (i
) && !va_list_escapes
;
867 gimple
*stmt
= gsi_stmt (i
);
869 /* Don't look at __builtin_va_{start,end}, they are ok. */
870 if (is_gimple_call (stmt
))
872 tree callee
= gimple_call_fndecl (stmt
);
875 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
876 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
877 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
881 if (is_gimple_assign (stmt
))
883 lhs
= gimple_assign_lhs (stmt
);
884 rhs
= gimple_assign_rhs1 (stmt
);
886 if (va_list_simple_ptr
)
888 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
889 == GIMPLE_SINGLE_RHS
)
891 /* Check for ap ={v} {}. */
892 if (TREE_CLOBBER_P (rhs
))
895 /* Check for tem = ap. */
896 else if (va_list_ptr_read (&si
, rhs
, lhs
))
899 /* Check for the last insn in:
904 else if (va_list_ptr_write (&si
, lhs
, rhs
))
908 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
909 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
910 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
911 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
912 == GIMPLE_SINGLE_RHS
))
913 check_va_list_escapes (&si
, lhs
, rhs
);
917 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
918 == GIMPLE_SINGLE_RHS
)
920 /* Check for ap ={v} {}. */
921 if (TREE_CLOBBER_P (rhs
))
924 /* Check for ap[0].field = temp. */
925 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
928 /* Check for temp = ap[0].field. */
929 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
934 /* Do any architecture specific checking. */
935 if (targetm
.stdarg_optimize_hook
936 && targetm
.stdarg_optimize_hook (&si
, stmt
))
940 else if (is_gimple_debug (stmt
))
943 /* All other uses of va_list are either va_copy (that is not handled
944 in this optimization), taking address of va_list variable or
945 passing va_list to other functions (in that case va_list might
946 escape the function and therefore va_start needs to set it up
947 fully), or some unexpected use of va_list. None of these should
948 happen in a gimplified VA_ARG_EXPR. */
949 if (si
.va_list_escapes
950 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
952 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
954 fputs ("va_list escapes in ", dump_file
);
955 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
956 fputc ('\n', dump_file
);
958 va_list_escapes
= true;
966 if (! va_list_escapes
967 && va_list_simple_ptr
968 && ! bitmap_empty_p (si
.va_list_escape_vars
)
969 && check_all_va_list_escapes (&si
))
970 va_list_escapes
= true;
975 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
976 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
978 BITMAP_FREE (si
.va_list_vars
);
979 BITMAP_FREE (si
.va_list_escape_vars
);
983 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
984 funcname
, (int) va_list_escapes
);
985 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
986 fputs ("all", dump_file
);
988 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
989 fputs (" GPR units and ", dump_file
);
990 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
991 fputs ("all", dump_file
);
993 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
994 fputs (" FPR units.\n", dump_file
);
998 /* Return true if STMT is IFN_VA_ARG. */
1001 gimple_call_ifn_va_arg_p (gimple
*stmt
)
1003 return (is_gimple_call (stmt
)
1004 && gimple_call_internal_p (stmt
)
1005 && gimple_call_internal_fn (stmt
) == IFN_VA_ARG
);
1008 /* Expand IFN_VA_ARGs in FUN. */
1011 expand_ifn_va_arg_1 (function
*fun
)
1013 bool modified
= false;
1015 gimple_stmt_iterator i
;
1016 location_t saved_location
;
1018 FOR_EACH_BB_FN (bb
, fun
)
1019 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1021 gimple
*stmt
= gsi_stmt (i
);
1022 tree ap
, expr
, lhs
, type
;
1023 gimple_seq pre
= NULL
, post
= NULL
;
1025 if (!gimple_call_ifn_va_arg_p (stmt
))
1030 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1031 ap
= gimple_call_arg (stmt
, 0);
1033 /* Balanced out the &ap, usually added by build_va_arg. */
1034 ap
= build_fold_indirect_ref (ap
);
1036 push_gimplify_context (false);
1037 saved_location
= input_location
;
1038 input_location
= gimple_location (stmt
);
1040 /* Make it easier for the backends by protecting the valist argument
1041 from multiple evaluations. */
1042 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1044 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1046 lhs
= gimple_call_lhs (stmt
);
1047 if (lhs
!= NULL_TREE
)
1049 unsigned int nargs
= gimple_call_num_args (stmt
);
1050 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1054 /* We've transported the size of with WITH_SIZE_EXPR here as
1055 the last argument of the internal fn call. Now reinstate
1057 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1058 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1061 /* We use gimplify_assign here, rather than gimple_build_assign,
1062 because gimple_assign knows how to deal with variable-sized
1064 gimplify_assign (lhs
, expr
, &pre
);
1067 gimplify_expr (&expr
, &pre
, &post
, is_gimple_lvalue
, fb_lvalue
);
1069 input_location
= saved_location
;
1070 pop_gimplify_context (NULL
);
1072 gimple_seq_add_seq (&pre
, post
);
1073 update_modified_stmts (pre
);
1075 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1076 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1078 gimple_find_sub_bbs (pre
, &i
);
1080 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1082 unlink_stmt_vdef (stmt
);
1083 release_ssa_name_fn (fun
, gimple_vdef (stmt
));
1084 gsi_remove (&i
, true);
1085 gcc_assert (gsi_end_p (i
));
1087 /* We're walking here into the bbs which contain the expansion of
1088 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1089 expanding. We could try to skip walking these bbs, perhaps by
1090 walking backwards over gimples and bbs. */
1097 free_dominance_info (CDI_DOMINATORS
);
1098 update_ssa (TODO_update_ssa
);
1101 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1104 expand_ifn_va_arg (function
*fun
)
1106 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1107 expand_ifn_va_arg_1 (fun
);
1112 gimple_stmt_iterator i
;
1113 FOR_EACH_BB_FN (bb
, fun
)
1114 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1115 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i
)));
1121 const pass_data pass_data_stdarg
=
1123 GIMPLE_PASS
, /* type */
1124 "stdarg", /* name */
1125 OPTGROUP_NONE
, /* optinfo_flags */
1126 TV_NONE
, /* tv_id */
1127 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1128 PROP_gimple_lva
, /* properties_provided */
1129 0, /* properties_destroyed */
1130 0, /* todo_flags_start */
1131 0, /* todo_flags_finish */
1134 class pass_stdarg
: public gimple_opt_pass
1137 pass_stdarg (gcc::context
*ctxt
)
1138 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1141 /* opt_pass methods: */
1142 virtual bool gate (function
*)
1144 /* Always run this pass, in order to expand va_arg internal_fns. We
1145 also need to do that if fun->stdarg == 0, because a va_arg may also
1146 occur in a function without varargs, f.i. if when passing a va_list to
1147 another function. */
1151 virtual unsigned int execute (function
*);
1153 }; // class pass_stdarg
1156 pass_stdarg::execute (function
*fun
)
1158 /* TODO: Postpone expand_ifn_va_arg till after
1159 optimize_va_list_gpr_fpr_size. */
1160 expand_ifn_va_arg (fun
);
1163 /* This optimization is only for stdarg functions. */
1164 && fun
->stdarg
!= 0)
1165 optimize_va_list_gpr_fpr_size (fun
);
1173 make_pass_stdarg (gcc::context
*ctxt
)
1175 return new pass_stdarg (ctxt
);
1180 const pass_data pass_data_lower_vaarg
=
1182 GIMPLE_PASS
, /* type */
1183 "lower_vaarg", /* name */
1184 OPTGROUP_NONE
, /* optinfo_flags */
1185 TV_NONE
, /* tv_id */
1186 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1187 PROP_gimple_lva
, /* properties_provided */
1188 0, /* properties_destroyed */
1189 0, /* todo_flags_start */
1190 0, /* todo_flags_finish */
1193 class pass_lower_vaarg
: public gimple_opt_pass
1196 pass_lower_vaarg (gcc::context
*ctxt
)
1197 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1200 /* opt_pass methods: */
1201 virtual bool gate (function
*)
1203 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1206 virtual unsigned int execute (function
*);
1208 }; // class pass_lower_vaarg
1211 pass_lower_vaarg::execute (function
*fun
)
1213 expand_ifn_va_arg (fun
);
1220 make_pass_lower_vaarg (gcc::context
*ctxt
)
1222 return new pass_lower_vaarg (ctxt
);