]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-stdarg.c
remove has_gate
[thirdparty/gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "gimple-pretty-print.h"
29 #include "target.h"
30 #include "bitmap.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "sbitmap.h"
45 #include "tree-pass.h"
46 #include "tree-stdarg.h"
47
48 /* A simple pass that attempts to optimize stdarg functions on architectures
49 that need to save register arguments to stack on entry to stdarg functions.
50 If the function doesn't use any va_start macros, no registers need to
51 be saved. If va_start macros are used, the va_list variables don't escape
52 the function, it is only necessary to save registers that will be used
53 in va_arg macros. E.g. if va_arg is only used with integral types
54 in the function, floating point registers don't need to be saved, etc. */
55
56
57 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
58 is executed at most as many times as VA_START_BB. */
59
60 static bool
61 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
62 {
63 vec<edge> stack = vNULL;
64 edge e;
65 edge_iterator ei;
66 sbitmap visited;
67 bool ret;
68
69 if (va_arg_bb == va_start_bb)
70 return true;
71
72 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
73 return false;
74
75 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
76 bitmap_clear (visited);
77 ret = true;
78
79 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
80 stack.safe_push (e);
81
82 while (! stack.is_empty ())
83 {
84 basic_block src;
85
86 e = stack.pop ();
87 src = e->src;
88
89 if (e->flags & EDGE_COMPLEX)
90 {
91 ret = false;
92 break;
93 }
94
95 if (src == va_start_bb)
96 continue;
97
98 /* va_arg_bb can be executed more times than va_start_bb. */
99 if (src == va_arg_bb)
100 {
101 ret = false;
102 break;
103 }
104
105 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
106
107 if (! bitmap_bit_p (visited, src->index))
108 {
109 bitmap_set_bit (visited, src->index);
110 FOR_EACH_EDGE (e, ei, src->preds)
111 stack.safe_push (e);
112 }
113 }
114
115 stack.release ();
116 sbitmap_free (visited);
117 return ret;
118 }
119
120
121 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
122 return constant, otherwise return HOST_WIDE_INT_M1U.
123 GPR_P is true if this is GPR counter. */
124
125 static unsigned HOST_WIDE_INT
126 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
127 bool gpr_p)
128 {
129 tree lhs, orig_lhs;
130 gimple stmt;
131 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
132 unsigned int max_size;
133
134 if (si->offsets == NULL)
135 {
136 unsigned int i;
137
138 si->offsets = XNEWVEC (int, num_ssa_names);
139 for (i = 0; i < num_ssa_names; ++i)
140 si->offsets[i] = -1;
141 }
142
143 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
144 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
145 orig_lhs = lhs = rhs;
146 while (lhs)
147 {
148 enum tree_code rhs_code;
149 tree rhs1;
150
151 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
152 {
153 if (counter_val >= max_size)
154 {
155 ret = max_size;
156 break;
157 }
158
159 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
160 break;
161 }
162
163 stmt = SSA_NAME_DEF_STMT (lhs);
164
165 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
166 return HOST_WIDE_INT_M1U;
167
168 rhs_code = gimple_assign_rhs_code (stmt);
169 rhs1 = gimple_assign_rhs1 (stmt);
170 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
171 || gimple_assign_cast_p (stmt))
172 && TREE_CODE (rhs1) == SSA_NAME)
173 {
174 lhs = rhs1;
175 continue;
176 }
177
178 if ((rhs_code == POINTER_PLUS_EXPR
179 || rhs_code == PLUS_EXPR)
180 && TREE_CODE (rhs1) == SSA_NAME
181 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
182 {
183 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
184 lhs = rhs1;
185 continue;
186 }
187
188 if (rhs_code == ADDR_EXPR
189 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
190 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
191 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
192 {
193 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
194 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
195 continue;
196 }
197
198 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
199 return HOST_WIDE_INT_M1U;
200
201 rhs = gimple_assign_rhs1 (stmt);
202 if (TREE_CODE (counter) != TREE_CODE (rhs))
203 return HOST_WIDE_INT_M1U;
204
205 if (TREE_CODE (counter) == COMPONENT_REF)
206 {
207 if (get_base_address (counter) != get_base_address (rhs)
208 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
209 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
210 return HOST_WIDE_INT_M1U;
211 }
212 else if (counter != rhs)
213 return HOST_WIDE_INT_M1U;
214
215 lhs = NULL;
216 }
217
218 lhs = orig_lhs;
219 val = ret + counter_val;
220 while (lhs)
221 {
222 enum tree_code rhs_code;
223 tree rhs1;
224
225 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
226 break;
227
228 if (val >= max_size)
229 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
230 else
231 si->offsets[SSA_NAME_VERSION (lhs)] = val;
232
233 stmt = SSA_NAME_DEF_STMT (lhs);
234
235 rhs_code = gimple_assign_rhs_code (stmt);
236 rhs1 = gimple_assign_rhs1 (stmt);
237 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
238 || gimple_assign_cast_p (stmt))
239 && TREE_CODE (rhs1) == SSA_NAME)
240 {
241 lhs = rhs1;
242 continue;
243 }
244
245 if ((rhs_code == POINTER_PLUS_EXPR
246 || rhs_code == PLUS_EXPR)
247 && TREE_CODE (rhs1) == SSA_NAME
248 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
249 {
250 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
251 lhs = rhs1;
252 continue;
253 }
254
255 if (rhs_code == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
257 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
258 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
259 {
260 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
261 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
262 continue;
263 }
264
265 lhs = NULL;
266 }
267
268 return ret;
269 }
270
271
272 /* Called by walk_tree to look for references to va_list variables. */
273
274 static tree
275 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
276 void *data)
277 {
278 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
279 tree var = *tp;
280
281 if (TREE_CODE (var) == SSA_NAME)
282 {
283 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
284 return var;
285 }
286 else if (TREE_CODE (var) == VAR_DECL)
287 {
288 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
289 return var;
290 }
291
292 return NULL_TREE;
293 }
294
295
296 /* Helper function of va_list_counter_struct_op. Compute
297 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
298 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
299 statement. GPR_P is true if AP is a GPR counter, false if it is
300 a FPR counter. */
301
302 static void
303 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
304 bool write_p)
305 {
306 unsigned HOST_WIDE_INT increment;
307
308 if (si->compute_sizes < 0)
309 {
310 si->compute_sizes = 0;
311 if (si->va_start_count == 1
312 && reachable_at_most_once (si->bb, si->va_start_bb))
313 si->compute_sizes = 1;
314
315 if (dump_file && (dump_flags & TDF_DETAILS))
316 fprintf (dump_file,
317 "bb%d will %sbe executed at most once for each va_start "
318 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
319 si->va_start_bb->index);
320 }
321
322 if (write_p
323 && si->compute_sizes
324 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
325 {
326 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
327 {
328 cfun->va_list_gpr_size += increment;
329 return;
330 }
331
332 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
333 {
334 cfun->va_list_fpr_size += increment;
335 return;
336 }
337 }
338
339 if (write_p || !si->compute_sizes)
340 {
341 if (gpr_p)
342 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
343 else
344 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
345 }
346 }
347
348
349 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
350 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
351 is false, AP has been seen in VAR = AP assignment.
352 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
353 va_arg operation that doesn't cause the va_list variable to escape
354 current function. */
355
356 static bool
357 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
358 bool write_p)
359 {
360 tree base;
361
362 if (TREE_CODE (ap) != COMPONENT_REF
363 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
364 return false;
365
366 if (TREE_CODE (var) != SSA_NAME
367 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
368 return false;
369
370 base = get_base_address (ap);
371 if (TREE_CODE (base) != VAR_DECL
372 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
373 return false;
374
375 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
376 va_list_counter_op (si, ap, var, true, write_p);
377 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
378 va_list_counter_op (si, ap, var, false, write_p);
379
380 return true;
381 }
382
383
384 /* Check for TEM = AP. Return true if found and the caller shouldn't
385 search for va_list references in the statement. */
386
387 static bool
388 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
389 {
390 if (TREE_CODE (ap) != VAR_DECL
391 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
392 return false;
393
394 if (TREE_CODE (tem) != SSA_NAME
395 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
396 return false;
397
398 if (si->compute_sizes < 0)
399 {
400 si->compute_sizes = 0;
401 if (si->va_start_count == 1
402 && reachable_at_most_once (si->bb, si->va_start_bb))
403 si->compute_sizes = 1;
404
405 if (dump_file && (dump_flags & TDF_DETAILS))
406 fprintf (dump_file,
407 "bb%d will %sbe executed at most once for each va_start "
408 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
409 si->va_start_bb->index);
410 }
411
412 /* For void * or char * va_list types, there is just one counter.
413 If va_arg is used in a loop, we don't know how many registers need
414 saving. */
415 if (! si->compute_sizes)
416 return false;
417
418 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
419 return false;
420
421 /* Note the temporary, as we need to track whether it doesn't escape
422 the current function. */
423 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
424
425 return true;
426 }
427
428
429 /* Check for:
430 tem1 = AP;
431 TEM2 = tem1 + CST;
432 AP = TEM2;
433 sequence and update cfun->va_list_gpr_size. Return true if found. */
434
435 static bool
436 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
437 {
438 unsigned HOST_WIDE_INT increment;
439
440 if (TREE_CODE (ap) != VAR_DECL
441 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
442 return false;
443
444 if (TREE_CODE (tem2) != SSA_NAME
445 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
446 return false;
447
448 if (si->compute_sizes <= 0)
449 return false;
450
451 increment = va_list_counter_bump (si, ap, tem2, true);
452 if (increment + 1 <= 1)
453 return false;
454
455 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
456 cfun->va_list_gpr_size += increment;
457 else
458 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
459
460 return true;
461 }
462
463
464 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
465 containing value of some va_list variable plus optionally some constant,
466 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
467 depending whether LHS is a function local temporary. */
468
469 static void
470 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
471 {
472 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
473 return;
474
475 if (TREE_CODE (rhs) == SSA_NAME)
476 {
477 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
478 return;
479 }
480 else if (TREE_CODE (rhs) == ADDR_EXPR
481 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
482 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
483 {
484 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
485 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
486 return;
487 }
488 else
489 return;
490
491 if (TREE_CODE (lhs) != SSA_NAME)
492 {
493 si->va_list_escapes = true;
494 return;
495 }
496
497 if (si->compute_sizes < 0)
498 {
499 si->compute_sizes = 0;
500 if (si->va_start_count == 1
501 && reachable_at_most_once (si->bb, si->va_start_bb))
502 si->compute_sizes = 1;
503
504 if (dump_file && (dump_flags & TDF_DETAILS))
505 fprintf (dump_file,
506 "bb%d will %sbe executed at most once for each va_start "
507 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
508 si->va_start_bb->index);
509 }
510
511 /* For void * or char * va_list types, there is just one counter.
512 If va_arg is used in a loop, we don't know how many registers need
513 saving. */
514 if (! si->compute_sizes)
515 {
516 si->va_list_escapes = true;
517 return;
518 }
519
520 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
521 == HOST_WIDE_INT_M1U)
522 {
523 si->va_list_escapes = true;
524 return;
525 }
526
527 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
528 }
529
530
531 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
532 Return true if va_list might be escaping. */
533
534 static bool
535 check_all_va_list_escapes (struct stdarg_info *si)
536 {
537 basic_block bb;
538
539 FOR_EACH_BB_FN (bb, cfun)
540 {
541 gimple_stmt_iterator i;
542
543 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
544 {
545 tree lhs;
546 use_operand_p uop;
547 ssa_op_iter soi;
548 gimple phi = gsi_stmt (i);
549
550 lhs = PHI_RESULT (phi);
551 if (virtual_operand_p (lhs)
552 || bitmap_bit_p (si->va_list_escape_vars,
553 SSA_NAME_VERSION (lhs)))
554 continue;
555
556 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
557 {
558 tree rhs = USE_FROM_PTR (uop);
559 if (TREE_CODE (rhs) == SSA_NAME
560 && bitmap_bit_p (si->va_list_escape_vars,
561 SSA_NAME_VERSION (rhs)))
562 {
563 if (dump_file && (dump_flags & TDF_DETAILS))
564 {
565 fputs ("va_list escapes in ", dump_file);
566 print_gimple_stmt (dump_file, phi, 0, dump_flags);
567 fputc ('\n', dump_file);
568 }
569 return true;
570 }
571 }
572 }
573
574 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
575 {
576 gimple stmt = gsi_stmt (i);
577 tree use;
578 ssa_op_iter iter;
579
580 if (is_gimple_debug (stmt))
581 continue;
582
583 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
584 {
585 if (! bitmap_bit_p (si->va_list_escape_vars,
586 SSA_NAME_VERSION (use)))
587 continue;
588
589 if (is_gimple_assign (stmt))
590 {
591 tree rhs = gimple_assign_rhs1 (stmt);
592 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
593
594 /* x = *ap_temp; */
595 if (rhs_code == MEM_REF
596 && TREE_OPERAND (rhs, 0) == use
597 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
598 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
599 && si->offsets[SSA_NAME_VERSION (use)] != -1)
600 {
601 unsigned HOST_WIDE_INT gpr_size;
602 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
603
604 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
605 + tree_to_shwi (TREE_OPERAND (rhs, 1))
606 + tree_to_uhwi (access_size);
607 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
608 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
609 else if (gpr_size > cfun->va_list_gpr_size)
610 cfun->va_list_gpr_size = gpr_size;
611 continue;
612 }
613
614 /* va_arg sequences may contain
615 other_ap_temp = ap_temp;
616 other_ap_temp = ap_temp + constant;
617 other_ap_temp = (some_type *) ap_temp;
618 ap = ap_temp;
619 statements. */
620 if (rhs == use
621 && ((rhs_code == POINTER_PLUS_EXPR
622 && (TREE_CODE (gimple_assign_rhs2 (stmt))
623 == INTEGER_CST))
624 || gimple_assign_cast_p (stmt)
625 || (get_gimple_rhs_class (rhs_code)
626 == GIMPLE_SINGLE_RHS)))
627 {
628 tree lhs = gimple_assign_lhs (stmt);
629
630 if (TREE_CODE (lhs) == SSA_NAME
631 && bitmap_bit_p (si->va_list_escape_vars,
632 SSA_NAME_VERSION (lhs)))
633 continue;
634
635 if (TREE_CODE (lhs) == VAR_DECL
636 && bitmap_bit_p (si->va_list_vars,
637 DECL_UID (lhs) + num_ssa_names))
638 continue;
639 }
640 else if (rhs_code == ADDR_EXPR
641 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
642 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
643 {
644 tree lhs = gimple_assign_lhs (stmt);
645
646 if (bitmap_bit_p (si->va_list_escape_vars,
647 SSA_NAME_VERSION (lhs)))
648 continue;
649 }
650 }
651
652 if (dump_file && (dump_flags & TDF_DETAILS))
653 {
654 fputs ("va_list escapes in ", dump_file);
655 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
656 fputc ('\n', dump_file);
657 }
658 return true;
659 }
660 }
661 }
662
663 return false;
664 }
665
666
667 /* Return true if this optimization pass should be done.
668 It makes only sense for stdarg functions. */
669
670 static bool
671 gate_optimize_stdarg (void)
672 {
673 /* This optimization is only for stdarg functions. */
674 return cfun->stdarg != 0;
675 }
676
677
678 /* Entry point to the stdarg optimization pass. */
679
680 static unsigned int
681 execute_optimize_stdarg (void)
682 {
683 basic_block bb;
684 bool va_list_escapes = false;
685 bool va_list_simple_ptr;
686 struct stdarg_info si;
687 struct walk_stmt_info wi;
688 const char *funcname = NULL;
689 tree cfun_va_list;
690
691 cfun->va_list_gpr_size = 0;
692 cfun->va_list_fpr_size = 0;
693 memset (&si, 0, sizeof (si));
694 si.va_list_vars = BITMAP_ALLOC (NULL);
695 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
696
697 if (dump_file)
698 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
699
700 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
701 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
702 && (TREE_TYPE (cfun_va_list) == void_type_node
703 || TREE_TYPE (cfun_va_list) == char_type_node);
704 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
705
706 FOR_EACH_BB_FN (bb, cfun)
707 {
708 gimple_stmt_iterator i;
709
710 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
711 {
712 gimple stmt = gsi_stmt (i);
713 tree callee, ap;
714
715 if (!is_gimple_call (stmt))
716 continue;
717
718 callee = gimple_call_fndecl (stmt);
719 if (!callee
720 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
721 continue;
722
723 switch (DECL_FUNCTION_CODE (callee))
724 {
725 case BUILT_IN_VA_START:
726 break;
727 /* If old style builtins are used, don't optimize anything. */
728 case BUILT_IN_SAVEREGS:
729 case BUILT_IN_NEXT_ARG:
730 va_list_escapes = true;
731 continue;
732 default:
733 continue;
734 }
735
736 si.va_start_count++;
737 ap = gimple_call_arg (stmt, 0);
738
739 if (TREE_CODE (ap) != ADDR_EXPR)
740 {
741 va_list_escapes = true;
742 break;
743 }
744 ap = TREE_OPERAND (ap, 0);
745 if (TREE_CODE (ap) == ARRAY_REF)
746 {
747 if (! integer_zerop (TREE_OPERAND (ap, 1)))
748 {
749 va_list_escapes = true;
750 break;
751 }
752 ap = TREE_OPERAND (ap, 0);
753 }
754 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
755 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
756 || TREE_CODE (ap) != VAR_DECL)
757 {
758 va_list_escapes = true;
759 break;
760 }
761
762 if (is_global_var (ap))
763 {
764 va_list_escapes = true;
765 break;
766 }
767
768 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
769
770 /* VA_START_BB and VA_START_AP will be only used if there is just
771 one va_start in the function. */
772 si.va_start_bb = bb;
773 si.va_start_ap = ap;
774 }
775
776 if (va_list_escapes)
777 break;
778 }
779
780 /* If there were no va_start uses in the function, there is no need to
781 save anything. */
782 if (si.va_start_count == 0)
783 goto finish;
784
785 /* If some va_list arguments weren't local, we can't optimize. */
786 if (va_list_escapes)
787 goto finish;
788
789 /* For void * or char * va_list, something useful can be done only
790 if there is just one va_start. */
791 if (va_list_simple_ptr && si.va_start_count > 1)
792 {
793 va_list_escapes = true;
794 goto finish;
795 }
796
797 /* For struct * va_list, if the backend didn't tell us what the counter fields
798 are, there is nothing more we can do. */
799 if (!va_list_simple_ptr
800 && va_list_gpr_counter_field == NULL_TREE
801 && va_list_fpr_counter_field == NULL_TREE)
802 {
803 va_list_escapes = true;
804 goto finish;
805 }
806
807 /* For void * or char * va_list there is just one counter
808 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
809 if (va_list_simple_ptr)
810 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
811
812 calculate_dominance_info (CDI_DOMINATORS);
813 memset (&wi, 0, sizeof (wi));
814 wi.info = si.va_list_vars;
815
816 FOR_EACH_BB_FN (bb, cfun)
817 {
818 gimple_stmt_iterator i;
819
820 si.compute_sizes = -1;
821 si.bb = bb;
822
823 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
824 them as assignments for the purpose of escape analysis. This is
825 not needed for non-simple va_list because virtual phis don't perform
826 any real data movement. */
827 if (va_list_simple_ptr)
828 {
829 tree lhs, rhs;
830 use_operand_p uop;
831 ssa_op_iter soi;
832
833 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
834 {
835 gimple phi = gsi_stmt (i);
836 lhs = PHI_RESULT (phi);
837
838 if (virtual_operand_p (lhs))
839 continue;
840
841 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
842 {
843 rhs = USE_FROM_PTR (uop);
844 if (va_list_ptr_read (&si, rhs, lhs))
845 continue;
846 else if (va_list_ptr_write (&si, lhs, rhs))
847 continue;
848 else
849 check_va_list_escapes (&si, lhs, rhs);
850
851 if (si.va_list_escapes)
852 {
853 if (dump_file && (dump_flags & TDF_DETAILS))
854 {
855 fputs ("va_list escapes in ", dump_file);
856 print_gimple_stmt (dump_file, phi, 0, dump_flags);
857 fputc ('\n', dump_file);
858 }
859 va_list_escapes = true;
860 }
861 }
862 }
863 }
864
865 for (i = gsi_start_bb (bb);
866 !gsi_end_p (i) && !va_list_escapes;
867 gsi_next (&i))
868 {
869 gimple stmt = gsi_stmt (i);
870
871 /* Don't look at __builtin_va_{start,end}, they are ok. */
872 if (is_gimple_call (stmt))
873 {
874 tree callee = gimple_call_fndecl (stmt);
875
876 if (callee
877 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
878 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
879 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
880 continue;
881 }
882
883 if (is_gimple_assign (stmt))
884 {
885 tree lhs = gimple_assign_lhs (stmt);
886 tree rhs = gimple_assign_rhs1 (stmt);
887
888 if (va_list_simple_ptr)
889 {
890 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
891 == GIMPLE_SINGLE_RHS)
892 {
893 /* Check for ap ={v} {}. */
894 if (TREE_CLOBBER_P (rhs))
895 continue;
896
897 /* Check for tem = ap. */
898 else if (va_list_ptr_read (&si, rhs, lhs))
899 continue;
900
901 /* Check for the last insn in:
902 tem1 = ap;
903 tem2 = tem1 + CST;
904 ap = tem2;
905 sequence. */
906 else if (va_list_ptr_write (&si, lhs, rhs))
907 continue;
908 }
909
910 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
911 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
912 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
913 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
914 == GIMPLE_SINGLE_RHS))
915 check_va_list_escapes (&si, lhs, rhs);
916 }
917 else
918 {
919 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
920 == GIMPLE_SINGLE_RHS)
921 {
922 /* Check for ap ={v} {}. */
923 if (TREE_CLOBBER_P (rhs))
924 continue;
925
926 /* Check for ap[0].field = temp. */
927 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
928 continue;
929
930 /* Check for temp = ap[0].field. */
931 else if (va_list_counter_struct_op (&si, rhs, lhs,
932 false))
933 continue;
934 }
935
936 /* Do any architecture specific checking. */
937 if (targetm.stdarg_optimize_hook
938 && targetm.stdarg_optimize_hook (&si, stmt))
939 continue;
940 }
941 }
942 else if (is_gimple_debug (stmt))
943 continue;
944
945 /* All other uses of va_list are either va_copy (that is not handled
946 in this optimization), taking address of va_list variable or
947 passing va_list to other functions (in that case va_list might
948 escape the function and therefore va_start needs to set it up
949 fully), or some unexpected use of va_list. None of these should
950 happen in a gimplified VA_ARG_EXPR. */
951 if (si.va_list_escapes
952 || walk_gimple_op (stmt, find_va_list_reference, &wi))
953 {
954 if (dump_file && (dump_flags & TDF_DETAILS))
955 {
956 fputs ("va_list escapes in ", dump_file);
957 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
958 fputc ('\n', dump_file);
959 }
960 va_list_escapes = true;
961 }
962 }
963
964 if (va_list_escapes)
965 break;
966 }
967
968 if (! va_list_escapes
969 && va_list_simple_ptr
970 && ! bitmap_empty_p (si.va_list_escape_vars)
971 && check_all_va_list_escapes (&si))
972 va_list_escapes = true;
973
974 finish:
975 if (va_list_escapes)
976 {
977 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
978 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
979 }
980 BITMAP_FREE (si.va_list_vars);
981 BITMAP_FREE (si.va_list_escape_vars);
982 free (si.offsets);
983 if (dump_file)
984 {
985 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
986 funcname, (int) va_list_escapes);
987 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
988 fputs ("all", dump_file);
989 else
990 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
991 fputs (" GPR units and ", dump_file);
992 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
993 fputs ("all", dump_file);
994 else
995 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
996 fputs (" FPR units.\n", dump_file);
997 }
998 return 0;
999 }
1000
1001
1002 namespace {
1003
1004 const pass_data pass_data_stdarg =
1005 {
1006 GIMPLE_PASS, /* type */
1007 "stdarg", /* name */
1008 OPTGROUP_NONE, /* optinfo_flags */
1009 true, /* has_execute */
1010 TV_NONE, /* tv_id */
1011 ( PROP_cfg | PROP_ssa ), /* properties_required */
1012 0, /* properties_provided */
1013 0, /* properties_destroyed */
1014 0, /* todo_flags_start */
1015 0, /* todo_flags_finish */
1016 };
1017
1018 class pass_stdarg : public gimple_opt_pass
1019 {
1020 public:
1021 pass_stdarg (gcc::context *ctxt)
1022 : gimple_opt_pass (pass_data_stdarg, ctxt)
1023 {}
1024
1025 /* opt_pass methods: */
1026 bool gate () { return gate_optimize_stdarg (); }
1027 unsigned int execute () { return execute_optimize_stdarg (); }
1028
1029 }; // class pass_stdarg
1030
1031 } // anon namespace
1032
1033 gimple_opt_pass *
1034 make_pass_stdarg (gcc::context *ctxt)
1035 {
1036 return new pass_stdarg (ctxt);
1037 }