]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-stdarg.c
re PR tree-optimization/26264 (Extraneous warning with __builtin_stdarg_start and...
[thirdparty/gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "diagnostic.h"
29 #include "target.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
33
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
41
42
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
45
46 static bool
47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
48 {
49 VEC (edge, heap) *stack = NULL;
50 edge e;
51 edge_iterator ei;
52 sbitmap visited;
53 bool ret;
54
55 if (va_arg_bb == va_start_bb)
56 return true;
57
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59 return false;
60
61 visited = sbitmap_alloc (last_basic_block);
62 sbitmap_zero (visited);
63 ret = true;
64
65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
66 VEC_safe_push (edge, heap, stack, e);
67
68 while (! VEC_empty (edge, stack))
69 {
70 basic_block src;
71
72 e = VEC_pop (edge, stack);
73 src = e->src;
74
75 if (e->flags & EDGE_COMPLEX)
76 {
77 ret = false;
78 break;
79 }
80
81 if (src == va_start_bb)
82 continue;
83
84 /* va_arg_bb can be executed more times than va_start_bb. */
85 if (src == va_arg_bb)
86 {
87 ret = false;
88 break;
89 }
90
91 gcc_assert (src != ENTRY_BLOCK_PTR);
92
93 if (! TEST_BIT (visited, src->index))
94 {
95 SET_BIT (visited, src->index);
96 FOR_EACH_EDGE (e, ei, src->preds)
97 VEC_safe_push (edge, heap, stack, e);
98 }
99 }
100
101 VEC_free (edge, heap, stack);
102 sbitmap_free (visited);
103 return ret;
104 }
105
106
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
109 GPR_P is true if this is GPR counter. */
110
111 static unsigned HOST_WIDE_INT
112 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
113 bool gpr_p)
114 {
115 tree stmt, lhs, orig_lhs;
116 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
117 unsigned int max_size;
118
119 if (si->offsets == NULL)
120 {
121 unsigned int i;
122
123 si->offsets = XNEWVEC (int, num_ssa_names);
124 for (i = 0; i < num_ssa_names; ++i)
125 si->offsets[i] = -1;
126 }
127
128 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
129 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
130 orig_lhs = lhs = rhs;
131 while (lhs)
132 {
133 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
134 {
135 if (counter_val >= max_size)
136 {
137 ret = max_size;
138 break;
139 }
140
141 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
142 break;
143 }
144
145 stmt = SSA_NAME_DEF_STMT (lhs);
146
147 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
148 || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
149 return (unsigned HOST_WIDE_INT) -1;
150
151 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
152 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
153 rhs = TREE_OPERAND (rhs, 0);
154
155 if (TREE_CODE (rhs) == SSA_NAME)
156 {
157 lhs = rhs;
158 continue;
159 }
160
161 if ((TREE_CODE (rhs) == NOP_EXPR
162 || TREE_CODE (rhs) == CONVERT_EXPR)
163 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
164 {
165 lhs = TREE_OPERAND (rhs, 0);
166 continue;
167 }
168
169 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
170 || TREE_CODE (rhs) == PLUS_EXPR)
171 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
172 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
173 && host_integerp (TREE_OPERAND (rhs, 1), 1))
174 {
175 ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
176 lhs = TREE_OPERAND (rhs, 0);
177 continue;
178 }
179
180 if (TREE_CODE (counter) != TREE_CODE (rhs))
181 return (unsigned HOST_WIDE_INT) -1;
182
183 if (TREE_CODE (counter) == COMPONENT_REF)
184 {
185 if (get_base_address (counter) != get_base_address (rhs)
186 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
187 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
188 return (unsigned HOST_WIDE_INT) -1;
189 }
190 else if (counter != rhs)
191 return (unsigned HOST_WIDE_INT) -1;
192
193 lhs = NULL;
194 }
195
196 lhs = orig_lhs;
197 val = ret + counter_val;
198 while (lhs)
199 {
200 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
201 break;
202
203 if (val >= max_size)
204 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
205 else
206 si->offsets[SSA_NAME_VERSION (lhs)] = val;
207
208 stmt = SSA_NAME_DEF_STMT (lhs);
209
210 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
211 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
212 rhs = TREE_OPERAND (rhs, 0);
213
214 if (TREE_CODE (rhs) == SSA_NAME)
215 {
216 lhs = rhs;
217 continue;
218 }
219
220 if ((TREE_CODE (rhs) == NOP_EXPR
221 || TREE_CODE (rhs) == CONVERT_EXPR)
222 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
223 {
224 lhs = TREE_OPERAND (rhs, 0);
225 continue;
226 }
227
228 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
229 || TREE_CODE (rhs) == PLUS_EXPR)
230 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
231 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
232 && host_integerp (TREE_OPERAND (rhs, 1), 1))
233 {
234 val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
235 lhs = TREE_OPERAND (rhs, 0);
236 continue;
237 }
238
239 lhs = NULL;
240 }
241
242 return ret;
243 }
244
245
246 /* Called by walk_tree to look for references to va_list variables. */
247
248 static tree
249 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
250 void *data)
251 {
252 bitmap va_list_vars = (bitmap) data;
253 tree var = *tp;
254
255 if (TREE_CODE (var) == SSA_NAME)
256 var = SSA_NAME_VAR (var);
257
258 if (TREE_CODE (var) == VAR_DECL
259 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
260 return var;
261
262 return NULL_TREE;
263 }
264
265
266 /* Helper function of va_list_counter_struct_op. Compute
267 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
268 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
269 statement. GPR_P is true if AP is a GPR counter, false if it is
270 a FPR counter. */
271
272 static void
273 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
274 bool write_p)
275 {
276 unsigned HOST_WIDE_INT increment;
277
278 if (si->compute_sizes < 0)
279 {
280 si->compute_sizes = 0;
281 if (si->va_start_count == 1
282 && reachable_at_most_once (si->bb, si->va_start_bb))
283 si->compute_sizes = 1;
284
285 if (dump_file && (dump_flags & TDF_DETAILS))
286 fprintf (dump_file,
287 "bb%d will %sbe executed at most once for each va_start "
288 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
289 si->va_start_bb->index);
290 }
291
292 if (write_p
293 && si->compute_sizes
294 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
295 {
296 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
297 {
298 cfun->va_list_gpr_size += increment;
299 return;
300 }
301
302 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
303 {
304 cfun->va_list_fpr_size += increment;
305 return;
306 }
307 }
308
309 if (write_p || !si->compute_sizes)
310 {
311 if (gpr_p)
312 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
313 else
314 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
315 }
316 }
317
318
319 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
320 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
321 is false, AP has been seen in VAR = AP assignment.
322 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
323 va_arg operation that doesn't cause the va_list variable to escape
324 current function. */
325
326 static bool
327 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
328 bool write_p)
329 {
330 tree base;
331
332 if (TREE_CODE (ap) != COMPONENT_REF
333 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
334 return false;
335
336 if (TREE_CODE (var) != SSA_NAME
337 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
338 return false;
339
340 base = get_base_address (ap);
341 if (TREE_CODE (base) != VAR_DECL
342 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
343 return false;
344
345 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
346 va_list_counter_op (si, ap, var, true, write_p);
347 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
348 va_list_counter_op (si, ap, var, false, write_p);
349
350 return true;
351 }
352
353
354 /* Check for TEM = AP. Return true if found and the caller shouldn't
355 search for va_list references in the statement. */
356
357 static bool
358 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
359 {
360 if (TREE_CODE (ap) != VAR_DECL
361 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
362 return false;
363
364 if (TREE_CODE (tem) != SSA_NAME
365 || bitmap_bit_p (si->va_list_vars,
366 DECL_UID (SSA_NAME_VAR (tem)))
367 || is_global_var (SSA_NAME_VAR (tem)))
368 return false;
369
370 if (si->compute_sizes < 0)
371 {
372 si->compute_sizes = 0;
373 if (si->va_start_count == 1
374 && reachable_at_most_once (si->bb, si->va_start_bb))
375 si->compute_sizes = 1;
376
377 if (dump_file && (dump_flags & TDF_DETAILS))
378 fprintf (dump_file,
379 "bb%d will %sbe executed at most once for each va_start "
380 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
381 si->va_start_bb->index);
382 }
383
384 /* For void * or char * va_list types, there is just one counter.
385 If va_arg is used in a loop, we don't know how many registers need
386 saving. */
387 if (! si->compute_sizes)
388 return false;
389
390 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
391 return false;
392
393 /* Note the temporary, as we need to track whether it doesn't escape
394 the current function. */
395 bitmap_set_bit (si->va_list_escape_vars,
396 DECL_UID (SSA_NAME_VAR (tem)));
397 return true;
398 }
399
400
401 /* Check for:
402 tem1 = AP;
403 TEM2 = tem1 + CST;
404 AP = TEM2;
405 sequence and update cfun->va_list_gpr_size. Return true if found. */
406
407 static bool
408 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
409 {
410 unsigned HOST_WIDE_INT increment;
411
412 if (TREE_CODE (ap) != VAR_DECL
413 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
414 return false;
415
416 if (TREE_CODE (tem2) != SSA_NAME
417 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
418 return false;
419
420 if (si->compute_sizes <= 0)
421 return false;
422
423 increment = va_list_counter_bump (si, ap, tem2, true);
424 if (increment + 1 <= 1)
425 return false;
426
427 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
428 cfun->va_list_gpr_size += increment;
429 else
430 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
431
432 return true;
433 }
434
435
436 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
437 containing value of some va_list variable plus optionally some constant,
438 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
439 depending whether LHS is a function local temporary. */
440
441 static void
442 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
443 {
444 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
445 return;
446
447 if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
448 || TREE_CODE (rhs) == PLUS_EXPR)
449 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
450 || TREE_CODE (rhs) == NOP_EXPR
451 || TREE_CODE (rhs) == CONVERT_EXPR)
452 rhs = TREE_OPERAND (rhs, 0);
453
454 if (TREE_CODE (rhs) != SSA_NAME
455 || ! bitmap_bit_p (si->va_list_escape_vars,
456 DECL_UID (SSA_NAME_VAR (rhs))))
457 return;
458
459 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
460 {
461 si->va_list_escapes = true;
462 return;
463 }
464
465 if (si->compute_sizes < 0)
466 {
467 si->compute_sizes = 0;
468 if (si->va_start_count == 1
469 && reachable_at_most_once (si->bb, si->va_start_bb))
470 si->compute_sizes = 1;
471
472 if (dump_file && (dump_flags & TDF_DETAILS))
473 fprintf (dump_file,
474 "bb%d will %sbe executed at most once for each va_start "
475 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
476 si->va_start_bb->index);
477 }
478
479 /* For void * or char * va_list types, there is just one counter.
480 If va_arg is used in a loop, we don't know how many registers need
481 saving. */
482 if (! si->compute_sizes)
483 {
484 si->va_list_escapes = true;
485 return;
486 }
487
488 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
489 == (unsigned HOST_WIDE_INT) -1)
490 {
491 si->va_list_escapes = true;
492 return;
493 }
494
495 bitmap_set_bit (si->va_list_escape_vars,
496 DECL_UID (SSA_NAME_VAR (lhs)));
497 }
498
499
500 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
501 Return true if va_list might be escaping. */
502
503 static bool
504 check_all_va_list_escapes (struct stdarg_info *si)
505 {
506 basic_block bb;
507
508 FOR_EACH_BB (bb)
509 {
510 block_stmt_iterator i;
511
512 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
513 {
514 tree stmt = bsi_stmt (i), use;
515 ssa_op_iter iter;
516
517 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
518 {
519 if (! bitmap_bit_p (si->va_list_escape_vars,
520 DECL_UID (SSA_NAME_VAR (use))))
521 continue;
522
523 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
524 {
525 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
526 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
527
528 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
529 rhs = TREE_OPERAND (rhs, 0);
530
531 /* x = *ap_temp; */
532 if (TREE_CODE (rhs) == INDIRECT_REF
533 && TREE_OPERAND (rhs, 0) == use
534 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
535 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
536 && si->offsets[SSA_NAME_VERSION (use)] != -1)
537 {
538 unsigned HOST_WIDE_INT gpr_size;
539 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
540
541 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
542 + tree_low_cst (access_size, 1);
543 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
544 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
545 else if (gpr_size > cfun->va_list_gpr_size)
546 cfun->va_list_gpr_size = gpr_size;
547 continue;
548 }
549
550 /* va_arg sequences may contain
551 other_ap_temp = ap_temp;
552 other_ap_temp = ap_temp + constant;
553 other_ap_temp = (some_type *) ap_temp;
554 ap = ap_temp;
555 statements. */
556 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
557 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
558 || TREE_CODE (rhs) == NOP_EXPR
559 || TREE_CODE (rhs) == CONVERT_EXPR)
560 rhs = TREE_OPERAND (rhs, 0);
561
562 if (rhs == use)
563 {
564 if (TREE_CODE (lhs) == SSA_NAME
565 && bitmap_bit_p (si->va_list_escape_vars,
566 DECL_UID (SSA_NAME_VAR (lhs))))
567 continue;
568
569 if (TREE_CODE (lhs) == VAR_DECL
570 && bitmap_bit_p (si->va_list_vars,
571 DECL_UID (lhs)))
572 continue;
573 }
574 }
575
576 if (dump_file && (dump_flags & TDF_DETAILS))
577 {
578 fputs ("va_list escapes in ", dump_file);
579 print_generic_expr (dump_file, stmt, dump_flags);
580 fputc ('\n', dump_file);
581 }
582 return true;
583 }
584 }
585 }
586
587 return false;
588 }
589
590
591 /* Return true if this optimization pass should be done.
592 It makes only sense for stdarg functions. */
593
594 static bool
595 gate_optimize_stdarg (void)
596 {
597 /* This optimization is only for stdarg functions. */
598 return current_function_stdarg != 0;
599 }
600
601
602 /* Entry point to the stdarg optimization pass. */
603
604 static unsigned int
605 execute_optimize_stdarg (void)
606 {
607 basic_block bb;
608 bool va_list_escapes = false;
609 bool va_list_simple_ptr;
610 struct stdarg_info si;
611 const char *funcname = NULL;
612
613 cfun->va_list_gpr_size = 0;
614 cfun->va_list_fpr_size = 0;
615 memset (&si, 0, sizeof (si));
616 si.va_list_vars = BITMAP_ALLOC (NULL);
617 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
618
619 if (dump_file)
620 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
621
622 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
623 && (TREE_TYPE (va_list_type_node) == void_type_node
624 || TREE_TYPE (va_list_type_node) == char_type_node);
625 gcc_assert (is_gimple_reg_type (va_list_type_node) == va_list_simple_ptr);
626
627 FOR_EACH_BB (bb)
628 {
629 block_stmt_iterator i;
630
631 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
632 {
633 tree stmt = bsi_stmt (i);
634 tree call = get_call_expr_in (stmt), callee;
635 tree ap;
636
637 if (!call)
638 continue;
639
640 callee = get_callee_fndecl (call);
641 if (!callee
642 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
643 continue;
644
645 switch (DECL_FUNCTION_CODE (callee))
646 {
647 case BUILT_IN_VA_START:
648 break;
649 /* If old style builtins are used, don't optimize anything. */
650 case BUILT_IN_SAVEREGS:
651 case BUILT_IN_ARGS_INFO:
652 case BUILT_IN_NEXT_ARG:
653 va_list_escapes = true;
654 continue;
655 default:
656 continue;
657 }
658
659 si.va_start_count++;
660 ap = CALL_EXPR_ARG (call, 0);
661
662 if (TREE_CODE (ap) != ADDR_EXPR)
663 {
664 va_list_escapes = true;
665 break;
666 }
667 ap = TREE_OPERAND (ap, 0);
668 if (TREE_CODE (ap) == ARRAY_REF)
669 {
670 if (! integer_zerop (TREE_OPERAND (ap, 1)))
671 {
672 va_list_escapes = true;
673 break;
674 }
675 ap = TREE_OPERAND (ap, 0);
676 }
677 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
678 != TYPE_MAIN_VARIANT (va_list_type_node)
679 || TREE_CODE (ap) != VAR_DECL)
680 {
681 va_list_escapes = true;
682 break;
683 }
684
685 if (is_global_var (ap))
686 {
687 va_list_escapes = true;
688 break;
689 }
690
691 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
692
693 /* VA_START_BB and VA_START_AP will be only used if there is just
694 one va_start in the function. */
695 si.va_start_bb = bb;
696 si.va_start_ap = ap;
697 }
698
699 if (va_list_escapes)
700 break;
701 }
702
703 /* If there were no va_start uses in the function, there is no need to
704 save anything. */
705 if (si.va_start_count == 0)
706 goto finish;
707
708 /* If some va_list arguments weren't local, we can't optimize. */
709 if (va_list_escapes)
710 goto finish;
711
712 /* For void * or char * va_list, something useful can be done only
713 if there is just one va_start. */
714 if (va_list_simple_ptr && si.va_start_count > 1)
715 {
716 va_list_escapes = true;
717 goto finish;
718 }
719
720 /* For struct * va_list, if the backend didn't tell us what the counter fields
721 are, there is nothing more we can do. */
722 if (!va_list_simple_ptr
723 && va_list_gpr_counter_field == NULL_TREE
724 && va_list_fpr_counter_field == NULL_TREE)
725 {
726 va_list_escapes = true;
727 goto finish;
728 }
729
730 /* For void * or char * va_list there is just one counter
731 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
732 if (va_list_simple_ptr)
733 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
734
735 calculate_dominance_info (CDI_DOMINATORS);
736
737 FOR_EACH_BB (bb)
738 {
739 block_stmt_iterator i;
740
741 si.compute_sizes = -1;
742 si.bb = bb;
743
744 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
745 them as assignments for the purpose of escape analysis. This is
746 not needed for non-simple va_list because virtual phis don't perform
747 any real data movement. */
748 if (va_list_simple_ptr)
749 {
750 tree phi, lhs, rhs;
751 use_operand_p uop;
752 ssa_op_iter soi;
753
754 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
755 {
756 lhs = PHI_RESULT (phi);
757
758 if (!is_gimple_reg (lhs))
759 continue;
760
761 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
762 {
763 rhs = USE_FROM_PTR (uop);
764 if (va_list_ptr_read (&si, rhs, lhs))
765 continue;
766 else if (va_list_ptr_write (&si, lhs, rhs))
767 continue;
768 else
769 check_va_list_escapes (&si, lhs, rhs);
770
771 if (si.va_list_escapes
772 || walk_tree (&phi, find_va_list_reference,
773 si.va_list_vars, NULL))
774 {
775 if (dump_file && (dump_flags & TDF_DETAILS))
776 {
777 fputs ("va_list escapes in ", dump_file);
778 print_generic_expr (dump_file, phi, dump_flags);
779 fputc ('\n', dump_file);
780 }
781 va_list_escapes = true;
782 }
783 }
784 }
785 }
786
787 for (i = bsi_start (bb);
788 !bsi_end_p (i) && !va_list_escapes;
789 bsi_next (&i))
790 {
791 tree stmt = bsi_stmt (i);
792 tree call;
793
794 /* Don't look at __builtin_va_{start,end}, they are ok. */
795 call = get_call_expr_in (stmt);
796 if (call)
797 {
798 tree callee = get_callee_fndecl (call);
799
800 if (callee
801 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
802 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
803 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
804 continue;
805 }
806
807 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
808 {
809 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
810 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
811
812 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
813 rhs = TREE_OPERAND (rhs, 0);
814
815 if (va_list_simple_ptr)
816 {
817 /* Check for tem = ap. */
818 if (va_list_ptr_read (&si, rhs, lhs))
819 continue;
820
821 /* Check for the last insn in:
822 tem1 = ap;
823 tem2 = tem1 + CST;
824 ap = tem2;
825 sequence. */
826 else if (va_list_ptr_write (&si, lhs, rhs))
827 continue;
828
829 else
830 check_va_list_escapes (&si, lhs, rhs);
831 }
832 else
833 {
834 /* Check for ap[0].field = temp. */
835 if (va_list_counter_struct_op (&si, lhs, rhs, true))
836 continue;
837
838 /* Check for temp = ap[0].field. */
839 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
840 continue;
841
842 /* Do any architecture specific checking. */
843 else if (targetm.stdarg_optimize_hook
844 && targetm.stdarg_optimize_hook (&si, lhs, rhs))
845 continue;
846 }
847 }
848
849 /* All other uses of va_list are either va_copy (that is not handled
850 in this optimization), taking address of va_list variable or
851 passing va_list to other functions (in that case va_list might
852 escape the function and therefore va_start needs to set it up
853 fully), or some unexpected use of va_list. None of these should
854 happen in a gimplified VA_ARG_EXPR. */
855 if (si.va_list_escapes
856 || walk_tree (&stmt, find_va_list_reference,
857 si.va_list_vars, NULL))
858 {
859 if (dump_file && (dump_flags & TDF_DETAILS))
860 {
861 fputs ("va_list escapes in ", dump_file);
862 print_generic_expr (dump_file, stmt, dump_flags);
863 fputc ('\n', dump_file);
864 }
865 va_list_escapes = true;
866 }
867 }
868
869 if (va_list_escapes)
870 break;
871 }
872
873 if (! va_list_escapes
874 && va_list_simple_ptr
875 && ! bitmap_empty_p (si.va_list_escape_vars)
876 && check_all_va_list_escapes (&si))
877 va_list_escapes = true;
878
879 finish:
880 if (va_list_escapes)
881 {
882 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
883 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
884 }
885 BITMAP_FREE (si.va_list_vars);
886 BITMAP_FREE (si.va_list_escape_vars);
887 free (si.offsets);
888 if (dump_file)
889 {
890 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
891 funcname, (int) va_list_escapes);
892 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
893 fputs ("all", dump_file);
894 else
895 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
896 fputs (" GPR units and ", dump_file);
897 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
898 fputs ("all", dump_file);
899 else
900 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
901 fputs (" FPR units.\n", dump_file);
902 }
903 return 0;
904 }
905
906
907 struct tree_opt_pass pass_stdarg =
908 {
909 "stdarg", /* name */
910 gate_optimize_stdarg, /* gate */
911 execute_optimize_stdarg, /* execute */
912 NULL, /* sub */
913 NULL, /* next */
914 0, /* static_pass_number */
915 0, /* tv_id */
916 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
917 0, /* properties_provided */
918 0, /* properties_destroyed */
919 0, /* todo_flags_start */
920 TODO_dump_func, /* todo_flags_finish */
921 0 /* letter */
922 };