]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-stdarg.c
fortran/
[thirdparty/gcc.git] / gcc / tree-stdarg.c
CommitLineData
a6c787e5 1/* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to
67ce556b 19the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20Boston, MA 02110-1301, USA. */
a6c787e5 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "function.h"
28#include "langhooks.h"
29#include "diagnostic.h"
30#include "target.h"
31#include "tree-flow.h"
32#include "tree-pass.h"
33#include "tree-stdarg.h"
34
35/* A simple pass that attempts to optimize stdarg functions on architectures
36 that need to save register arguments to stack on entry to stdarg functions.
37 If the function doesn't use any va_start macros, no registers need to
38 be saved. If va_start macros are used, the va_list variables don't escape
39 the function, it is only necessary to save registers that will be used
40 in va_arg macros. E.g. if va_arg is only used with integral types
41 in the function, floating point registers don't need to be saved, etc. */
42
43
44/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45 is executed at most as many times as VA_START_BB. */
46
47static bool
48reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
49{
50 edge *stack, e;
51 edge_iterator ei;
52 int sp;
53 sbitmap visited;
54 bool ret;
55
56 if (va_arg_bb == va_start_bb)
57 return true;
58
59 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
60 return false;
61
62 stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge));
63 sp = 0;
64
65 visited = sbitmap_alloc (last_basic_block);
66 sbitmap_zero (visited);
67 ret = true;
68
69 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
70 stack[sp++] = e;
71
72 while (sp)
73 {
74 basic_block src;
75
76 --sp;
77 e = stack[sp];
78 src = e->src;
79
80 if (e->flags & EDGE_COMPLEX)
81 {
82 ret = false;
83 break;
84 }
85
86 if (src == va_start_bb)
87 continue;
88
89 /* va_arg_bb can be executed more times than va_start_bb. */
90 if (src == va_arg_bb)
91 {
92 ret = false;
93 break;
94 }
95
96 gcc_assert (src != ENTRY_BLOCK_PTR);
97
98 if (! TEST_BIT (visited, src->index))
99 {
100 SET_BIT (visited, src->index);
101 FOR_EACH_EDGE (e, ei, src->preds)
102 stack[sp++] = e;
103 }
104 }
105
106 free (stack);
107 sbitmap_free (visited);
108 return ret;
109}
110
111
112/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
113 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
114 GPR_P is true if this is GPR counter. */
115
116static unsigned HOST_WIDE_INT
117va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
118 bool gpr_p)
119{
120 tree stmt, lhs, orig_lhs;
121 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
122 unsigned int max_size;
123
124 if (si->offsets == NULL)
125 {
126 unsigned int i;
127
128 si->offsets = xmalloc (num_ssa_names * sizeof (int));
129 for (i = 0; i < num_ssa_names; ++i)
130 si->offsets[i] = -1;
131 }
132
133 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
134 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
135 orig_lhs = lhs = rhs;
136 while (lhs)
137 {
138 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
139 {
140 if (counter_val >= max_size)
141 {
142 ret = max_size;
143 break;
144 }
145
146 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
147 break;
148 }
149
150 stmt = SSA_NAME_DEF_STMT (lhs);
151
152 if (TREE_CODE (stmt) != MODIFY_EXPR
153 || TREE_OPERAND (stmt, 0) != lhs)
154 return (unsigned HOST_WIDE_INT) -1;
155
156 rhs = TREE_OPERAND (stmt, 1);
157 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
158 rhs = TREE_OPERAND (rhs, 0);
159
160 if (TREE_CODE (rhs) == SSA_NAME)
161 {
162 lhs = rhs;
163 continue;
164 }
165
166 if ((TREE_CODE (rhs) == NOP_EXPR
167 || TREE_CODE (rhs) == CONVERT_EXPR)
168 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
169 {
170 lhs = TREE_OPERAND (rhs, 0);
171 continue;
172 }
173
174 if (TREE_CODE (rhs) == PLUS_EXPR
175 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
176 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
177 && host_integerp (TREE_OPERAND (rhs, 1), 1))
178 {
179 ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
180 lhs = TREE_OPERAND (rhs, 0);
181 continue;
182 }
183
184 if (TREE_CODE (counter) != TREE_CODE (rhs))
185 return (unsigned HOST_WIDE_INT) -1;
186
187 if (TREE_CODE (counter) == COMPONENT_REF)
188 {
189 if (get_base_address (counter) != get_base_address (rhs)
190 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
191 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
192 return (unsigned HOST_WIDE_INT) -1;
193 }
194 else if (counter != rhs)
195 return (unsigned HOST_WIDE_INT) -1;
196
197 lhs = NULL;
198 }
199
200 lhs = orig_lhs;
201 val = ret + counter_val;
202 while (lhs)
203 {
204 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
205 break;
206
207 if (val >= max_size)
208 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
209 else
210 si->offsets[SSA_NAME_VERSION (lhs)] = val;
211
212 stmt = SSA_NAME_DEF_STMT (lhs);
213
214 rhs = TREE_OPERAND (stmt, 1);
215 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
216 rhs = TREE_OPERAND (rhs, 0);
217
218 if (TREE_CODE (rhs) == SSA_NAME)
219 {
220 lhs = rhs;
221 continue;
222 }
223
224 if ((TREE_CODE (rhs) == NOP_EXPR
225 || TREE_CODE (rhs) == CONVERT_EXPR)
226 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
227 {
228 lhs = TREE_OPERAND (rhs, 0);
229 continue;
230 }
231
232 if (TREE_CODE (rhs) == PLUS_EXPR
233 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
234 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
235 && host_integerp (TREE_OPERAND (rhs, 1), 1))
236 {
237 val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
238 lhs = TREE_OPERAND (rhs, 0);
239 continue;
240 }
241
242 lhs = NULL;
243 }
244
245 return ret;
246}
247
248
249/* Called by walk_tree to look for references to va_list variables. */
250
251static tree
252find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
253 void *data)
254{
255 bitmap va_list_vars = (bitmap) data;
256 tree var = *tp;
257
258 if (TREE_CODE (var) == SSA_NAME)
259 var = SSA_NAME_VAR (var);
260
261 if (TREE_CODE (var) == VAR_DECL
a55dc2cd 262 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
a6c787e5 263 return var;
264
265 return NULL_TREE;
266}
267
268
269/* Helper function of va_list_counter_struct_op. Compute
270 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
271 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
272 statement. GPR_P is true if AP is a GPR counter, false if it is
273 a FPR counter. */
274
275static void
276va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
277 bool write_p)
278{
279 unsigned HOST_WIDE_INT increment;
280
281 if (si->compute_sizes < 0)
282 {
283 si->compute_sizes = 0;
284 if (si->va_start_count == 1
285 && reachable_at_most_once (si->bb, si->va_start_bb))
286 si->compute_sizes = 1;
287
288 if (dump_file && (dump_flags & TDF_DETAILS))
289 fprintf (dump_file,
290 "bb%d will %sbe executed at most once for each va_start "
291 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
292 si->va_start_bb->index);
293 }
294
295 if (write_p
296 && si->compute_sizes
297 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
298 {
299 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
300 {
301 cfun->va_list_gpr_size += increment;
302 return;
303 }
304
305 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
306 {
307 cfun->va_list_fpr_size += increment;
308 return;
309 }
310 }
311
312 if (write_p || !si->compute_sizes)
313 {
314 if (gpr_p)
315 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
316 else
317 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
318 }
319}
320
321
322/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
323 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
324 is false, AP has been seen in VAR = AP assignment.
325 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
326 va_arg operation that doesn't cause the va_list variable to escape
327 current function. */
328
329static bool
330va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
331 bool write_p)
332{
333 tree base;
334
335 if (TREE_CODE (ap) != COMPONENT_REF
336 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
337 return false;
338
339 if (TREE_CODE (var) != SSA_NAME
a55dc2cd 340 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
a6c787e5 341 return false;
342
343 base = get_base_address (ap);
344 if (TREE_CODE (base) != VAR_DECL
a55dc2cd 345 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
a6c787e5 346 return false;
347
348 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
349 va_list_counter_op (si, ap, var, true, write_p);
350 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
351 va_list_counter_op (si, ap, var, false, write_p);
352
353 return true;
354}
355
356
357/* Check for TEM = AP. Return true if found and the caller shouldn't
358 search for va_list references in the statement. */
359
360static bool
361va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
362{
363 if (TREE_CODE (ap) != VAR_DECL
a55dc2cd 364 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
a6c787e5 365 return false;
366
367 if (TREE_CODE (tem) != SSA_NAME
368 || bitmap_bit_p (si->va_list_vars,
a55dc2cd 369 DECL_UID (SSA_NAME_VAR (tem)))
a6c787e5 370 || is_global_var (SSA_NAME_VAR (tem)))
371 return false;
372
373 if (si->compute_sizes < 0)
374 {
375 si->compute_sizes = 0;
376 if (si->va_start_count == 1
377 && reachable_at_most_once (si->bb, si->va_start_bb))
378 si->compute_sizes = 1;
379
380 if (dump_file && (dump_flags & TDF_DETAILS))
381 fprintf (dump_file,
382 "bb%d will %sbe executed at most once for each va_start "
383 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
384 si->va_start_bb->index);
385 }
386
387 /* For void * or char * va_list types, there is just one counter.
388 If va_arg is used in a loop, we don't know how many registers need
389 saving. */
390 if (! si->compute_sizes)
391 return false;
392
393 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
394 return false;
395
396 /* Note the temporary, as we need to track whether it doesn't escape
397 the current function. */
398 bitmap_set_bit (si->va_list_escape_vars,
a55dc2cd 399 DECL_UID (SSA_NAME_VAR (tem)));
a6c787e5 400 return true;
401}
402
403
404/* Check for:
405 tem1 = AP;
406 TEM2 = tem1 + CST;
407 AP = TEM2;
408 sequence and update cfun->va_list_gpr_size. Return true if found. */
409
410static bool
411va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
412{
413 unsigned HOST_WIDE_INT increment;
414
415 if (TREE_CODE (ap) != VAR_DECL
a55dc2cd 416 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
a6c787e5 417 return false;
418
419 if (TREE_CODE (tem2) != SSA_NAME
a55dc2cd 420 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
a6c787e5 421 return false;
422
423 if (si->compute_sizes <= 0)
424 return false;
425
426 increment = va_list_counter_bump (si, ap, tem2, true);
427 if (increment + 1 <= 1)
428 return false;
429
430 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
431 cfun->va_list_gpr_size += increment;
432 else
433 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
434
435 return true;
436}
437
438
439/* If RHS is X, (some type *) X or X + CST for X a temporary variable
440 containing value of some va_list variable plus optionally some constant,
441 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
442 depending whether LHS is a function local temporary. */
443
444static void
445check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
446{
447 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
448 return;
449
450 if ((TREE_CODE (rhs) == PLUS_EXPR
451 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
452 || TREE_CODE (rhs) == NOP_EXPR
453 || TREE_CODE (rhs) == CONVERT_EXPR)
454 rhs = TREE_OPERAND (rhs, 0);
455
456 if (TREE_CODE (rhs) != SSA_NAME
457 || ! bitmap_bit_p (si->va_list_escape_vars,
a55dc2cd 458 DECL_UID (SSA_NAME_VAR (rhs))))
a6c787e5 459 return;
460
461 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
462 {
463 si->va_list_escapes = true;
464 return;
465 }
466
467 if (si->compute_sizes < 0)
468 {
469 si->compute_sizes = 0;
470 if (si->va_start_count == 1
471 && reachable_at_most_once (si->bb, si->va_start_bb))
472 si->compute_sizes = 1;
473
474 if (dump_file && (dump_flags & TDF_DETAILS))
475 fprintf (dump_file,
476 "bb%d will %sbe executed at most once for each va_start "
477 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
478 si->va_start_bb->index);
479 }
480
481 /* For void * or char * va_list types, there is just one counter.
482 If va_arg is used in a loop, we don't know how many registers need
483 saving. */
484 if (! si->compute_sizes)
485 {
486 si->va_list_escapes = true;
487 return;
488 }
489
490 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
491 == (unsigned HOST_WIDE_INT) -1)
492 {
493 si->va_list_escapes = true;
494 return;
495 }
496
497 bitmap_set_bit (si->va_list_escape_vars,
a55dc2cd 498 DECL_UID (SSA_NAME_VAR (lhs)));
a6c787e5 499}
500
501
502/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
503 Return true if va_list might be escaping. */
504
505static bool
506check_all_va_list_escapes (struct stdarg_info *si)
507{
508 basic_block bb;
509
510 FOR_EACH_BB (bb)
511 {
512 block_stmt_iterator i;
513
514 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
515 {
516 tree stmt = bsi_stmt (i), use;
517 ssa_op_iter iter;
518
519 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
520 {
521 if (! bitmap_bit_p (si->va_list_escape_vars,
a55dc2cd 522 DECL_UID (SSA_NAME_VAR (use))))
a6c787e5 523 continue;
524
525 if (TREE_CODE (stmt) == MODIFY_EXPR)
526 {
527 tree lhs = TREE_OPERAND (stmt, 0);
528 tree rhs = TREE_OPERAND (stmt, 1);
529
530 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
531 rhs = TREE_OPERAND (rhs, 0);
532
533 /* x = *ap_temp; */
534 if (TREE_CODE (rhs) == INDIRECT_REF
535 && TREE_OPERAND (rhs, 0) == use
536 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
537 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
538 && si->offsets[SSA_NAME_VERSION (use)] != -1)
539 {
540 unsigned HOST_WIDE_INT gpr_size;
541 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
542
543 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
544 + tree_low_cst (access_size, 1);
545 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
546 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
547 else if (gpr_size > cfun->va_list_gpr_size)
548 cfun->va_list_gpr_size = gpr_size;
549 continue;
550 }
551
552 /* va_arg sequences may contain
553 other_ap_temp = ap_temp;
554 other_ap_temp = ap_temp + constant;
555 other_ap_temp = (some_type *) ap_temp;
556 ap = ap_temp;
557 statements. */
558 if ((TREE_CODE (rhs) == PLUS_EXPR
559 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
560 || TREE_CODE (rhs) == NOP_EXPR
561 || TREE_CODE (rhs) == CONVERT_EXPR)
562 rhs = TREE_OPERAND (rhs, 0);
563
564 if (rhs == use)
565 {
566 if (TREE_CODE (lhs) == SSA_NAME
567 && bitmap_bit_p (si->va_list_escape_vars,
a55dc2cd 568 DECL_UID (SSA_NAME_VAR (lhs))))
a6c787e5 569 continue;
570
571 if (TREE_CODE (lhs) == VAR_DECL
572 && bitmap_bit_p (si->va_list_vars,
a55dc2cd 573 DECL_UID (lhs)))
a6c787e5 574 continue;
575 }
576 }
577
578 if (dump_file && (dump_flags & TDF_DETAILS))
579 {
580 fputs ("va_list escapes in ", dump_file);
581 print_generic_expr (dump_file, stmt, dump_flags);
582 fputc ('\n', dump_file);
583 }
584 return true;
585 }
586 }
587 }
588
589 return false;
590}
591
592
593/* Return true if this optimization pass should be done.
594 It makes only sense for stdarg functions. */
595
596static bool
597gate_optimize_stdarg (void)
598{
599 /* This optimization is only for stdarg functions. */
600 return current_function_stdarg != 0;
601}
602
603
604/* Entry point to the stdarg optimization pass. */
605
606static void
607execute_optimize_stdarg (void)
608{
609 basic_block bb;
610 bool va_list_escapes = false;
611 bool va_list_simple_ptr;
612 struct stdarg_info si;
613 const char *funcname = NULL;
614
615 cfun->va_list_gpr_size = 0;
616 cfun->va_list_fpr_size = 0;
617 memset (&si, 0, sizeof (si));
618 si.va_list_vars = BITMAP_ALLOC (NULL);
619 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
620
621 if (dump_file)
622 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
623
624 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
625 && (TREE_TYPE (va_list_type_node) == void_type_node
626 || TREE_TYPE (va_list_type_node) == char_type_node);
627
628 FOR_EACH_BB (bb)
629 {
630 block_stmt_iterator i;
631
632 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
633 {
634 tree stmt = bsi_stmt (i);
635 tree call = get_call_expr_in (stmt), callee;
636 tree ap;
637
638 if (!call)
639 continue;
640
641 callee = get_callee_fndecl (call);
642 if (!callee
643 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
644 continue;
645
646 switch (DECL_FUNCTION_CODE (callee))
647 {
648 case BUILT_IN_VA_START:
649 break;
650 /* If old style builtins are used, don't optimize anything. */
651 case BUILT_IN_SAVEREGS:
652 case BUILT_IN_STDARG_START:
653 case BUILT_IN_ARGS_INFO:
654 case BUILT_IN_NEXT_ARG:
655 va_list_escapes = true;
656 continue;
657 default:
658 continue;
659 }
660
661 si.va_start_count++;
662 ap = TREE_VALUE (TREE_OPERAND (call, 1));
8b151587 663
664 if (TREE_CODE (ap) != ADDR_EXPR)
a6c787e5 665 {
666 va_list_escapes = true;
667 break;
668 }
a6c787e5 669 ap = TREE_OPERAND (ap, 0);
8b151587 670 if (TREE_CODE (ap) == ARRAY_REF)
671 {
672 if (! integer_zerop (TREE_OPERAND (ap, 1)))
673 {
674 va_list_escapes = true;
675 break;
676 }
677 ap = TREE_OPERAND (ap, 0);
678 }
679 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
680 != TYPE_MAIN_VARIANT (va_list_type_node)
681 || TREE_CODE (ap) != VAR_DECL)
682 {
683 va_list_escapes = true;
684 break;
685 }
686
a6c787e5 687 if (is_global_var (ap))
688 {
689 va_list_escapes = true;
690 break;
691 }
692
a55dc2cd 693 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
a6c787e5 694
695 /* VA_START_BB and VA_START_AP will be only used if there is just
696 one va_start in the function. */
697 si.va_start_bb = bb;
698 si.va_start_ap = ap;
699 }
700
701 if (va_list_escapes)
702 break;
703 }
704
705 /* If there were no va_start uses in the function, there is no need to
706 save anything. */
707 if (si.va_start_count == 0)
708 goto finish;
709
710 /* If some va_list arguments weren't local, we can't optimize. */
711 if (va_list_escapes)
712 goto finish;
713
714 /* For void * or char * va_list, something useful can be done only
715 if there is just one va_start. */
716 if (va_list_simple_ptr && si.va_start_count > 1)
717 {
718 va_list_escapes = true;
719 goto finish;
720 }
721
722 /* For struct * va_list, if the backend didn't tell us what the counter fields
723 are, there is nothing more we can do. */
724 if (!va_list_simple_ptr
725 && va_list_gpr_counter_field == NULL_TREE
726 && va_list_fpr_counter_field == NULL_TREE)
727 {
728 va_list_escapes = true;
729 goto finish;
730 }
731
732 /* For void * or char * va_list there is just one counter
733 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
734 if (va_list_simple_ptr)
735 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
736
737 FOR_EACH_BB (bb)
738 {
739 block_stmt_iterator i;
740
741 si.compute_sizes = -1;
742 si.bb = bb;
743 for (i = bsi_start (bb);
744 !bsi_end_p (i) && !va_list_escapes;
745 bsi_next (&i))
746 {
747 tree stmt = bsi_stmt (i);
748 tree call;
749
750 /* Don't look at __builtin_va_{start,end}, they are ok. */
751 call = get_call_expr_in (stmt);
752 if (call)
753 {
754 tree callee = get_callee_fndecl (call);
755
756 if (callee
757 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
758 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
759 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
760 continue;
761 }
762
763 if (TREE_CODE (stmt) == MODIFY_EXPR)
764 {
765 tree lhs = TREE_OPERAND (stmt, 0);
766 tree rhs = TREE_OPERAND (stmt, 1);
767
768 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
769 rhs = TREE_OPERAND (rhs, 0);
770
771 if (va_list_simple_ptr)
772 {
773 /* Check for tem = ap. */
774 if (va_list_ptr_read (&si, rhs, lhs))
775 continue;
776
777 /* Check for the last insn in:
778 tem1 = ap;
779 tem2 = tem1 + CST;
780 ap = tem2;
781 sequence. */
782 else if (va_list_ptr_write (&si, lhs, rhs))
783 continue;
784
785 else
786 check_va_list_escapes (&si, lhs, rhs);
787 }
788 else
789 {
790 /* Check for ap[0].field = temp. */
791 if (va_list_counter_struct_op (&si, lhs, rhs, true))
792 continue;
793
794 /* Check for temp = ap[0].field. */
795 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
796 continue;
797
798 /* Do any architecture specific checking. */
799 else if (targetm.stdarg_optimize_hook
ffa47ae0 800 && targetm.stdarg_optimize_hook (&si, lhs, rhs))
a6c787e5 801 continue;
802 }
803 }
804
805 /* All other uses of va_list are either va_copy (that is not handled
806 in this optimization), taking address of va_list variable or
807 passing va_list to other functions (in that case va_list might
808 escape the function and therefore va_start needs to set it up
809 fully), or some unexpected use of va_list. None of these should
810 happen in a gimplified VA_ARG_EXPR. */
811 if (si.va_list_escapes
812 || walk_tree (&stmt, find_va_list_reference,
813 si.va_list_vars, NULL))
814 {
815 if (dump_file && (dump_flags & TDF_DETAILS))
816 {
817 fputs ("va_list escapes in ", dump_file);
818 print_generic_expr (dump_file, stmt, dump_flags);
819 fputc ('\n', dump_file);
820 }
821 va_list_escapes = true;
822 }
823 }
824
825 if (va_list_escapes)
826 break;
827 }
828
829 if (! va_list_escapes
830 && va_list_simple_ptr
831 && ! bitmap_empty_p (si.va_list_escape_vars)
832 && check_all_va_list_escapes (&si))
833 va_list_escapes = true;
834
835finish:
836 if (va_list_escapes)
837 {
838 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
839 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
840 }
841 BITMAP_FREE (si.va_list_vars);
842 BITMAP_FREE (si.va_list_escape_vars);
843 free (si.offsets);
844 if (dump_file)
845 {
846 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
847 funcname, (int) va_list_escapes);
848 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
849 fputs ("all", dump_file);
850 else
851 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
852 fputs (" GPR units and ", dump_file);
853 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
854 fputs ("all", dump_file);
855 else
856 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
857 fputs (" FPR units.\n", dump_file);
858 }
859}
860
861
862struct tree_opt_pass pass_stdarg =
863{
864 "stdarg", /* name */
865 gate_optimize_stdarg, /* gate */
866 execute_optimize_stdarg, /* execute */
867 NULL, /* sub */
868 NULL, /* next */
869 0, /* static_pass_number */
870 0, /* tv_id */
871 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
872 0, /* properties_provided */
873 0, /* properties_destroyed */
874 0, /* todo_flags_start */
875 TODO_dump_func, /* todo_flags_finish */
876 0 /* letter */
877};