]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-stdarg.c
Change copyright header to refer to version 3 of the GNU General Public License and...
[thirdparty/gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "diagnostic.h"
29 #include "target.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
33
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
41
42
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
45
46 static bool
47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
48 {
49 edge *stack, e;
50 edge_iterator ei;
51 int sp;
52 sbitmap visited;
53 bool ret;
54
55 if (va_arg_bb == va_start_bb)
56 return true;
57
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59 return false;
60
61 stack = XNEWVEC (edge, n_basic_blocks + 1);
62 sp = 0;
63
64 visited = sbitmap_alloc (last_basic_block);
65 sbitmap_zero (visited);
66 ret = true;
67
68 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
69 stack[sp++] = e;
70
71 while (sp)
72 {
73 basic_block src;
74
75 --sp;
76 e = stack[sp];
77 src = e->src;
78
79 if (e->flags & EDGE_COMPLEX)
80 {
81 ret = false;
82 break;
83 }
84
85 if (src == va_start_bb)
86 continue;
87
88 /* va_arg_bb can be executed more times than va_start_bb. */
89 if (src == va_arg_bb)
90 {
91 ret = false;
92 break;
93 }
94
95 gcc_assert (src != ENTRY_BLOCK_PTR);
96
97 if (! TEST_BIT (visited, src->index))
98 {
99 SET_BIT (visited, src->index);
100 FOR_EACH_EDGE (e, ei, src->preds)
101 stack[sp++] = e;
102 }
103 }
104
105 free (stack);
106 sbitmap_free (visited);
107 return ret;
108 }
109
110
111 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
112 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
113 GPR_P is true if this is GPR counter. */
114
115 static unsigned HOST_WIDE_INT
116 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
117 bool gpr_p)
118 {
119 tree stmt, lhs, orig_lhs;
120 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
121 unsigned int max_size;
122
123 if (si->offsets == NULL)
124 {
125 unsigned int i;
126
127 si->offsets = XNEWVEC (int, num_ssa_names);
128 for (i = 0; i < num_ssa_names; ++i)
129 si->offsets[i] = -1;
130 }
131
132 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
133 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
134 orig_lhs = lhs = rhs;
135 while (lhs)
136 {
137 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
138 {
139 if (counter_val >= max_size)
140 {
141 ret = max_size;
142 break;
143 }
144
145 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
146 break;
147 }
148
149 stmt = SSA_NAME_DEF_STMT (lhs);
150
151 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
152 || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
153 return (unsigned HOST_WIDE_INT) -1;
154
155 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
156 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
157 rhs = TREE_OPERAND (rhs, 0);
158
159 if (TREE_CODE (rhs) == SSA_NAME)
160 {
161 lhs = rhs;
162 continue;
163 }
164
165 if ((TREE_CODE (rhs) == NOP_EXPR
166 || TREE_CODE (rhs) == CONVERT_EXPR)
167 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
168 {
169 lhs = TREE_OPERAND (rhs, 0);
170 continue;
171 }
172
173 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
174 || TREE_CODE (rhs) == PLUS_EXPR)
175 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
176 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
177 && host_integerp (TREE_OPERAND (rhs, 1), 1))
178 {
179 ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
180 lhs = TREE_OPERAND (rhs, 0);
181 continue;
182 }
183
184 if (TREE_CODE (counter) != TREE_CODE (rhs))
185 return (unsigned HOST_WIDE_INT) -1;
186
187 if (TREE_CODE (counter) == COMPONENT_REF)
188 {
189 if (get_base_address (counter) != get_base_address (rhs)
190 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
191 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
192 return (unsigned HOST_WIDE_INT) -1;
193 }
194 else if (counter != rhs)
195 return (unsigned HOST_WIDE_INT) -1;
196
197 lhs = NULL;
198 }
199
200 lhs = orig_lhs;
201 val = ret + counter_val;
202 while (lhs)
203 {
204 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
205 break;
206
207 if (val >= max_size)
208 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
209 else
210 si->offsets[SSA_NAME_VERSION (lhs)] = val;
211
212 stmt = SSA_NAME_DEF_STMT (lhs);
213
214 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
215 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
216 rhs = TREE_OPERAND (rhs, 0);
217
218 if (TREE_CODE (rhs) == SSA_NAME)
219 {
220 lhs = rhs;
221 continue;
222 }
223
224 if ((TREE_CODE (rhs) == NOP_EXPR
225 || TREE_CODE (rhs) == CONVERT_EXPR)
226 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
227 {
228 lhs = TREE_OPERAND (rhs, 0);
229 continue;
230 }
231
232 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
233 || TREE_CODE (rhs) == PLUS_EXPR)
234 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
235 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
236 && host_integerp (TREE_OPERAND (rhs, 1), 1))
237 {
238 val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
239 lhs = TREE_OPERAND (rhs, 0);
240 continue;
241 }
242
243 lhs = NULL;
244 }
245
246 return ret;
247 }
248
249
250 /* Called by walk_tree to look for references to va_list variables. */
251
252 static tree
253 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
254 void *data)
255 {
256 bitmap va_list_vars = (bitmap) data;
257 tree var = *tp;
258
259 if (TREE_CODE (var) == SSA_NAME)
260 var = SSA_NAME_VAR (var);
261
262 if (TREE_CODE (var) == VAR_DECL
263 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
264 return var;
265
266 return NULL_TREE;
267 }
268
269
270 /* Helper function of va_list_counter_struct_op. Compute
271 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
272 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
273 statement. GPR_P is true if AP is a GPR counter, false if it is
274 a FPR counter. */
275
276 static void
277 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
278 bool write_p)
279 {
280 unsigned HOST_WIDE_INT increment;
281
282 if (si->compute_sizes < 0)
283 {
284 si->compute_sizes = 0;
285 if (si->va_start_count == 1
286 && reachable_at_most_once (si->bb, si->va_start_bb))
287 si->compute_sizes = 1;
288
289 if (dump_file && (dump_flags & TDF_DETAILS))
290 fprintf (dump_file,
291 "bb%d will %sbe executed at most once for each va_start "
292 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
293 si->va_start_bb->index);
294 }
295
296 if (write_p
297 && si->compute_sizes
298 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
299 {
300 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
301 {
302 cfun->va_list_gpr_size += increment;
303 return;
304 }
305
306 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
307 {
308 cfun->va_list_fpr_size += increment;
309 return;
310 }
311 }
312
313 if (write_p || !si->compute_sizes)
314 {
315 if (gpr_p)
316 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
317 else
318 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
319 }
320 }
321
322
323 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
324 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
325 is false, AP has been seen in VAR = AP assignment.
326 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
327 va_arg operation that doesn't cause the va_list variable to escape
328 current function. */
329
330 static bool
331 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
332 bool write_p)
333 {
334 tree base;
335
336 if (TREE_CODE (ap) != COMPONENT_REF
337 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
338 return false;
339
340 if (TREE_CODE (var) != SSA_NAME
341 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
342 return false;
343
344 base = get_base_address (ap);
345 if (TREE_CODE (base) != VAR_DECL
346 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
347 return false;
348
349 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
350 va_list_counter_op (si, ap, var, true, write_p);
351 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
352 va_list_counter_op (si, ap, var, false, write_p);
353
354 return true;
355 }
356
357
358 /* Check for TEM = AP. Return true if found and the caller shouldn't
359 search for va_list references in the statement. */
360
361 static bool
362 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
363 {
364 if (TREE_CODE (ap) != VAR_DECL
365 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
366 return false;
367
368 if (TREE_CODE (tem) != SSA_NAME
369 || bitmap_bit_p (si->va_list_vars,
370 DECL_UID (SSA_NAME_VAR (tem)))
371 || is_global_var (SSA_NAME_VAR (tem)))
372 return false;
373
374 if (si->compute_sizes < 0)
375 {
376 si->compute_sizes = 0;
377 if (si->va_start_count == 1
378 && reachable_at_most_once (si->bb, si->va_start_bb))
379 si->compute_sizes = 1;
380
381 if (dump_file && (dump_flags & TDF_DETAILS))
382 fprintf (dump_file,
383 "bb%d will %sbe executed at most once for each va_start "
384 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
385 si->va_start_bb->index);
386 }
387
388 /* For void * or char * va_list types, there is just one counter.
389 If va_arg is used in a loop, we don't know how many registers need
390 saving. */
391 if (! si->compute_sizes)
392 return false;
393
394 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
395 return false;
396
397 /* Note the temporary, as we need to track whether it doesn't escape
398 the current function. */
399 bitmap_set_bit (si->va_list_escape_vars,
400 DECL_UID (SSA_NAME_VAR (tem)));
401 return true;
402 }
403
404
405 /* Check for:
406 tem1 = AP;
407 TEM2 = tem1 + CST;
408 AP = TEM2;
409 sequence and update cfun->va_list_gpr_size. Return true if found. */
410
411 static bool
412 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
413 {
414 unsigned HOST_WIDE_INT increment;
415
416 if (TREE_CODE (ap) != VAR_DECL
417 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
418 return false;
419
420 if (TREE_CODE (tem2) != SSA_NAME
421 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
422 return false;
423
424 if (si->compute_sizes <= 0)
425 return false;
426
427 increment = va_list_counter_bump (si, ap, tem2, true);
428 if (increment + 1 <= 1)
429 return false;
430
431 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
432 cfun->va_list_gpr_size += increment;
433 else
434 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
435
436 return true;
437 }
438
439
440 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
441 containing value of some va_list variable plus optionally some constant,
442 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
443 depending whether LHS is a function local temporary. */
444
445 static void
446 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
447 {
448 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
449 return;
450
451 if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
452 || TREE_CODE (rhs) == PLUS_EXPR)
453 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
454 || TREE_CODE (rhs) == NOP_EXPR
455 || TREE_CODE (rhs) == CONVERT_EXPR)
456 rhs = TREE_OPERAND (rhs, 0);
457
458 if (TREE_CODE (rhs) != SSA_NAME
459 || ! bitmap_bit_p (si->va_list_escape_vars,
460 DECL_UID (SSA_NAME_VAR (rhs))))
461 return;
462
463 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
464 {
465 si->va_list_escapes = true;
466 return;
467 }
468
469 if (si->compute_sizes < 0)
470 {
471 si->compute_sizes = 0;
472 if (si->va_start_count == 1
473 && reachable_at_most_once (si->bb, si->va_start_bb))
474 si->compute_sizes = 1;
475
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 fprintf (dump_file,
478 "bb%d will %sbe executed at most once for each va_start "
479 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
480 si->va_start_bb->index);
481 }
482
483 /* For void * or char * va_list types, there is just one counter.
484 If va_arg is used in a loop, we don't know how many registers need
485 saving. */
486 if (! si->compute_sizes)
487 {
488 si->va_list_escapes = true;
489 return;
490 }
491
492 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
493 == (unsigned HOST_WIDE_INT) -1)
494 {
495 si->va_list_escapes = true;
496 return;
497 }
498
499 bitmap_set_bit (si->va_list_escape_vars,
500 DECL_UID (SSA_NAME_VAR (lhs)));
501 }
502
503
504 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
505 Return true if va_list might be escaping. */
506
507 static bool
508 check_all_va_list_escapes (struct stdarg_info *si)
509 {
510 basic_block bb;
511
512 FOR_EACH_BB (bb)
513 {
514 block_stmt_iterator i;
515
516 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
517 {
518 tree stmt = bsi_stmt (i), use;
519 ssa_op_iter iter;
520
521 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
522 {
523 if (! bitmap_bit_p (si->va_list_escape_vars,
524 DECL_UID (SSA_NAME_VAR (use))))
525 continue;
526
527 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
528 {
529 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
530 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
531
532 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
533 rhs = TREE_OPERAND (rhs, 0);
534
535 /* x = *ap_temp; */
536 if (TREE_CODE (rhs) == INDIRECT_REF
537 && TREE_OPERAND (rhs, 0) == use
538 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
539 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
540 && si->offsets[SSA_NAME_VERSION (use)] != -1)
541 {
542 unsigned HOST_WIDE_INT gpr_size;
543 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
544
545 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
546 + tree_low_cst (access_size, 1);
547 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
548 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
549 else if (gpr_size > cfun->va_list_gpr_size)
550 cfun->va_list_gpr_size = gpr_size;
551 continue;
552 }
553
554 /* va_arg sequences may contain
555 other_ap_temp = ap_temp;
556 other_ap_temp = ap_temp + constant;
557 other_ap_temp = (some_type *) ap_temp;
558 ap = ap_temp;
559 statements. */
560 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
561 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
562 || TREE_CODE (rhs) == NOP_EXPR
563 || TREE_CODE (rhs) == CONVERT_EXPR)
564 rhs = TREE_OPERAND (rhs, 0);
565
566 if (rhs == use)
567 {
568 if (TREE_CODE (lhs) == SSA_NAME
569 && bitmap_bit_p (si->va_list_escape_vars,
570 DECL_UID (SSA_NAME_VAR (lhs))))
571 continue;
572
573 if (TREE_CODE (lhs) == VAR_DECL
574 && bitmap_bit_p (si->va_list_vars,
575 DECL_UID (lhs)))
576 continue;
577 }
578 }
579
580 if (dump_file && (dump_flags & TDF_DETAILS))
581 {
582 fputs ("va_list escapes in ", dump_file);
583 print_generic_expr (dump_file, stmt, dump_flags);
584 fputc ('\n', dump_file);
585 }
586 return true;
587 }
588 }
589 }
590
591 return false;
592 }
593
594
595 /* Return true if this optimization pass should be done.
596 It makes only sense for stdarg functions. */
597
598 static bool
599 gate_optimize_stdarg (void)
600 {
601 /* This optimization is only for stdarg functions. */
602 return current_function_stdarg != 0;
603 }
604
605
606 /* Entry point to the stdarg optimization pass. */
607
608 static unsigned int
609 execute_optimize_stdarg (void)
610 {
611 basic_block bb;
612 bool va_list_escapes = false;
613 bool va_list_simple_ptr;
614 struct stdarg_info si;
615 const char *funcname = NULL;
616
617 cfun->va_list_gpr_size = 0;
618 cfun->va_list_fpr_size = 0;
619 memset (&si, 0, sizeof (si));
620 si.va_list_vars = BITMAP_ALLOC (NULL);
621 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
622
623 if (dump_file)
624 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
625
626 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
627 && (TREE_TYPE (va_list_type_node) == void_type_node
628 || TREE_TYPE (va_list_type_node) == char_type_node);
629 gcc_assert (is_gimple_reg_type (va_list_type_node) == va_list_simple_ptr);
630
631 FOR_EACH_BB (bb)
632 {
633 block_stmt_iterator i;
634
635 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
636 {
637 tree stmt = bsi_stmt (i);
638 tree call = get_call_expr_in (stmt), callee;
639 tree ap;
640
641 if (!call)
642 continue;
643
644 callee = get_callee_fndecl (call);
645 if (!callee
646 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
647 continue;
648
649 switch (DECL_FUNCTION_CODE (callee))
650 {
651 case BUILT_IN_VA_START:
652 break;
653 /* If old style builtins are used, don't optimize anything. */
654 case BUILT_IN_SAVEREGS:
655 case BUILT_IN_STDARG_START:
656 case BUILT_IN_ARGS_INFO:
657 case BUILT_IN_NEXT_ARG:
658 va_list_escapes = true;
659 continue;
660 default:
661 continue;
662 }
663
664 si.va_start_count++;
665 ap = CALL_EXPR_ARG (call, 0);
666
667 if (TREE_CODE (ap) != ADDR_EXPR)
668 {
669 va_list_escapes = true;
670 break;
671 }
672 ap = TREE_OPERAND (ap, 0);
673 if (TREE_CODE (ap) == ARRAY_REF)
674 {
675 if (! integer_zerop (TREE_OPERAND (ap, 1)))
676 {
677 va_list_escapes = true;
678 break;
679 }
680 ap = TREE_OPERAND (ap, 0);
681 }
682 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
683 != TYPE_MAIN_VARIANT (va_list_type_node)
684 || TREE_CODE (ap) != VAR_DECL)
685 {
686 va_list_escapes = true;
687 break;
688 }
689
690 if (is_global_var (ap))
691 {
692 va_list_escapes = true;
693 break;
694 }
695
696 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
697
698 /* VA_START_BB and VA_START_AP will be only used if there is just
699 one va_start in the function. */
700 si.va_start_bb = bb;
701 si.va_start_ap = ap;
702 }
703
704 if (va_list_escapes)
705 break;
706 }
707
708 /* If there were no va_start uses in the function, there is no need to
709 save anything. */
710 if (si.va_start_count == 0)
711 goto finish;
712
713 /* If some va_list arguments weren't local, we can't optimize. */
714 if (va_list_escapes)
715 goto finish;
716
717 /* For void * or char * va_list, something useful can be done only
718 if there is just one va_start. */
719 if (va_list_simple_ptr && si.va_start_count > 1)
720 {
721 va_list_escapes = true;
722 goto finish;
723 }
724
725 /* For struct * va_list, if the backend didn't tell us what the counter fields
726 are, there is nothing more we can do. */
727 if (!va_list_simple_ptr
728 && va_list_gpr_counter_field == NULL_TREE
729 && va_list_fpr_counter_field == NULL_TREE)
730 {
731 va_list_escapes = true;
732 goto finish;
733 }
734
735 /* For void * or char * va_list there is just one counter
736 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
737 if (va_list_simple_ptr)
738 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
739
740 calculate_dominance_info (CDI_DOMINATORS);
741
742 FOR_EACH_BB (bb)
743 {
744 block_stmt_iterator i;
745
746 si.compute_sizes = -1;
747 si.bb = bb;
748
749 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
750 them as assignments for the purpose of escape analysis. This is
751 not needed for non-simple va_list because virtual phis don't perform
752 any real data movement. */
753 if (va_list_simple_ptr)
754 {
755 tree phi, lhs, rhs;
756 use_operand_p uop;
757 ssa_op_iter soi;
758
759 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
760 {
761 lhs = PHI_RESULT (phi);
762
763 if (!is_gimple_reg (lhs))
764 continue;
765
766 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
767 {
768 rhs = USE_FROM_PTR (uop);
769 if (va_list_ptr_read (&si, rhs, lhs))
770 continue;
771 else if (va_list_ptr_write (&si, lhs, rhs))
772 continue;
773 else
774 check_va_list_escapes (&si, lhs, rhs);
775
776 if (si.va_list_escapes
777 || walk_tree (&phi, find_va_list_reference,
778 si.va_list_vars, NULL))
779 {
780 if (dump_file && (dump_flags & TDF_DETAILS))
781 {
782 fputs ("va_list escapes in ", dump_file);
783 print_generic_expr (dump_file, phi, dump_flags);
784 fputc ('\n', dump_file);
785 }
786 va_list_escapes = true;
787 }
788 }
789 }
790 }
791
792 for (i = bsi_start (bb);
793 !bsi_end_p (i) && !va_list_escapes;
794 bsi_next (&i))
795 {
796 tree stmt = bsi_stmt (i);
797 tree call;
798
799 /* Don't look at __builtin_va_{start,end}, they are ok. */
800 call = get_call_expr_in (stmt);
801 if (call)
802 {
803 tree callee = get_callee_fndecl (call);
804
805 if (callee
806 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
807 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
808 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
809 continue;
810 }
811
812 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
813 {
814 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
815 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
816
817 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
818 rhs = TREE_OPERAND (rhs, 0);
819
820 if (va_list_simple_ptr)
821 {
822 /* Check for tem = ap. */
823 if (va_list_ptr_read (&si, rhs, lhs))
824 continue;
825
826 /* Check for the last insn in:
827 tem1 = ap;
828 tem2 = tem1 + CST;
829 ap = tem2;
830 sequence. */
831 else if (va_list_ptr_write (&si, lhs, rhs))
832 continue;
833
834 else
835 check_va_list_escapes (&si, lhs, rhs);
836 }
837 else
838 {
839 /* Check for ap[0].field = temp. */
840 if (va_list_counter_struct_op (&si, lhs, rhs, true))
841 continue;
842
843 /* Check for temp = ap[0].field. */
844 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
845 continue;
846
847 /* Do any architecture specific checking. */
848 else if (targetm.stdarg_optimize_hook
849 && targetm.stdarg_optimize_hook (&si, lhs, rhs))
850 continue;
851 }
852 }
853
854 /* All other uses of va_list are either va_copy (that is not handled
855 in this optimization), taking address of va_list variable or
856 passing va_list to other functions (in that case va_list might
857 escape the function and therefore va_start needs to set it up
858 fully), or some unexpected use of va_list. None of these should
859 happen in a gimplified VA_ARG_EXPR. */
860 if (si.va_list_escapes
861 || walk_tree (&stmt, find_va_list_reference,
862 si.va_list_vars, NULL))
863 {
864 if (dump_file && (dump_flags & TDF_DETAILS))
865 {
866 fputs ("va_list escapes in ", dump_file);
867 print_generic_expr (dump_file, stmt, dump_flags);
868 fputc ('\n', dump_file);
869 }
870 va_list_escapes = true;
871 }
872 }
873
874 if (va_list_escapes)
875 break;
876 }
877
878 if (! va_list_escapes
879 && va_list_simple_ptr
880 && ! bitmap_empty_p (si.va_list_escape_vars)
881 && check_all_va_list_escapes (&si))
882 va_list_escapes = true;
883
884 finish:
885 if (va_list_escapes)
886 {
887 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
888 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
889 }
890 BITMAP_FREE (si.va_list_vars);
891 BITMAP_FREE (si.va_list_escape_vars);
892 free (si.offsets);
893 if (dump_file)
894 {
895 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
896 funcname, (int) va_list_escapes);
897 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
898 fputs ("all", dump_file);
899 else
900 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
901 fputs (" GPR units and ", dump_file);
902 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
903 fputs ("all", dump_file);
904 else
905 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
906 fputs (" FPR units.\n", dump_file);
907 }
908 return 0;
909 }
910
911
912 struct tree_opt_pass pass_stdarg =
913 {
914 "stdarg", /* name */
915 gate_optimize_stdarg, /* gate */
916 execute_optimize_stdarg, /* execute */
917 NULL, /* sub */
918 NULL, /* next */
919 0, /* static_pass_number */
920 0, /* tv_id */
921 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
922 0, /* properties_provided */
923 0, /* properties_destroyed */
924 0, /* todo_flags_start */
925 TODO_dump_func, /* todo_flags_finish */
926 0 /* letter */
927 };