]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-stdarg.c
Eliminate FOR_EACH_BB macro.
[thirdparty/gcc.git] / gcc / tree-stdarg.c
CommitLineData
a6c787e5 1/* Pass computing data for optimizing stdarg functions.
711789cc 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
a6c787e5 3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
a6c787e5 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
a6c787e5 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "function.h"
27#include "langhooks.h"
ce084dfc 28#include "gimple-pretty-print.h"
a6c787e5 29#include "target.h"
073c1fd5 30#include "bitmap.h"
bc61cadb 31#include "basic-block.h"
32#include "tree-ssa-alias.h"
33#include "internal-fn.h"
34#include "gimple-expr.h"
35#include "is-a.h"
073c1fd5 36#include "gimple.h"
dcf1a1ec 37#include "gimple-iterator.h"
38#include "gimple-walk.h"
073c1fd5 39#include "gimple-ssa.h"
40#include "tree-phinodes.h"
41#include "ssa-iterators.h"
9ed99284 42#include "stringpool.h"
073c1fd5 43#include "tree-ssanames.h"
424a4a92 44#include "sbitmap.h"
a6c787e5 45#include "tree-pass.h"
46#include "tree-stdarg.h"
47
48/* A simple pass that attempts to optimize stdarg functions on architectures
49 that need to save register arguments to stack on entry to stdarg functions.
50 If the function doesn't use any va_start macros, no registers need to
51 be saved. If va_start macros are used, the va_list variables don't escape
52 the function, it is only necessary to save registers that will be used
53 in va_arg macros. E.g. if va_arg is only used with integral types
54 in the function, floating point registers don't need to be saved, etc. */
55
56
57/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
58 is executed at most as many times as VA_START_BB. */
59
60static bool
61reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
62{
1e094109 63 vec<edge> stack = vNULL;
4b53147e 64 edge e;
a6c787e5 65 edge_iterator ei;
a6c787e5 66 sbitmap visited;
67 bool ret;
68
69 if (va_arg_bb == va_start_bb)
70 return true;
71
72 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
73 return false;
74
fe672ac0 75 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
53c5d9d4 76 bitmap_clear (visited);
a6c787e5 77 ret = true;
78
79 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
f1f41a6c 80 stack.safe_push (e);
a6c787e5 81
f1f41a6c 82 while (! stack.is_empty ())
a6c787e5 83 {
84 basic_block src;
85
f1f41a6c 86 e = stack.pop ();
a6c787e5 87 src = e->src;
88
89 if (e->flags & EDGE_COMPLEX)
90 {
91 ret = false;
92 break;
93 }
94
95 if (src == va_start_bb)
96 continue;
97
98 /* va_arg_bb can be executed more times than va_start_bb. */
99 if (src == va_arg_bb)
100 {
101 ret = false;
102 break;
103 }
104
34154e27 105 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
a6c787e5 106
08b7917c 107 if (! bitmap_bit_p (visited, src->index))
a6c787e5 108 {
08b7917c 109 bitmap_set_bit (visited, src->index);
a6c787e5 110 FOR_EACH_EDGE (e, ei, src->preds)
f1f41a6c 111 stack.safe_push (e);
a6c787e5 112 }
113 }
114
f1f41a6c 115 stack.release ();
a6c787e5 116 sbitmap_free (visited);
117 return ret;
118}
119
120
121/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
561f0ec8 122 return constant, otherwise return HOST_WIDE_INT_M1U.
a6c787e5 123 GPR_P is true if this is GPR counter. */
124
125static unsigned HOST_WIDE_INT
126va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
127 bool gpr_p)
128{
75a70cf9 129 tree lhs, orig_lhs;
130 gimple stmt;
a6c787e5 131 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
132 unsigned int max_size;
133
134 if (si->offsets == NULL)
135 {
136 unsigned int i;
137
4c36ffe6 138 si->offsets = XNEWVEC (int, num_ssa_names);
a6c787e5 139 for (i = 0; i < num_ssa_names; ++i)
140 si->offsets[i] = -1;
141 }
142
143 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
144 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
145 orig_lhs = lhs = rhs;
146 while (lhs)
147 {
75a70cf9 148 enum tree_code rhs_code;
de508de3 149 tree rhs1;
75a70cf9 150
a6c787e5 151 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
152 {
153 if (counter_val >= max_size)
154 {
155 ret = max_size;
156 break;
157 }
158
159 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
160 break;
161 }
162
163 stmt = SSA_NAME_DEF_STMT (lhs);
164
75a70cf9 165 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
561f0ec8 166 return HOST_WIDE_INT_M1U;
a6c787e5 167
75a70cf9 168 rhs_code = gimple_assign_rhs_code (stmt);
de508de3 169 rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 170 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
171 || gimple_assign_cast_p (stmt))
de508de3 172 && TREE_CODE (rhs1) == SSA_NAME)
a6c787e5 173 {
de508de3 174 lhs = rhs1;
a6c787e5 175 continue;
176 }
177
75a70cf9 178 if ((rhs_code == POINTER_PLUS_EXPR
179 || rhs_code == PLUS_EXPR)
de508de3 180 && TREE_CODE (rhs1) == SSA_NAME
cd4547bf 181 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
a6c787e5 182 {
6a0712d4 183 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
de508de3 184 lhs = rhs1;
185 continue;
186 }
187
188 if (rhs_code == ADDR_EXPR
189 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
190 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cd4547bf 191 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
de508de3 192 {
6a0712d4 193 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
de508de3 194 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
a6c787e5 195 continue;
196 }
197
75a70cf9 198 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
561f0ec8 199 return HOST_WIDE_INT_M1U;
a6c787e5 200
75a70cf9 201 rhs = gimple_assign_rhs1 (stmt);
a6c787e5 202 if (TREE_CODE (counter) != TREE_CODE (rhs))
561f0ec8 203 return HOST_WIDE_INT_M1U;
a6c787e5 204
205 if (TREE_CODE (counter) == COMPONENT_REF)
206 {
207 if (get_base_address (counter) != get_base_address (rhs)
208 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
209 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
561f0ec8 210 return HOST_WIDE_INT_M1U;
a6c787e5 211 }
212 else if (counter != rhs)
561f0ec8 213 return HOST_WIDE_INT_M1U;
a6c787e5 214
215 lhs = NULL;
216 }
217
218 lhs = orig_lhs;
219 val = ret + counter_val;
220 while (lhs)
221 {
75a70cf9 222 enum tree_code rhs_code;
de508de3 223 tree rhs1;
75a70cf9 224
a6c787e5 225 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
226 break;
227
228 if (val >= max_size)
229 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
230 else
231 si->offsets[SSA_NAME_VERSION (lhs)] = val;
232
233 stmt = SSA_NAME_DEF_STMT (lhs);
234
75a70cf9 235 rhs_code = gimple_assign_rhs_code (stmt);
de508de3 236 rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 237 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
238 || gimple_assign_cast_p (stmt))
de508de3 239 && TREE_CODE (rhs1) == SSA_NAME)
a6c787e5 240 {
de508de3 241 lhs = rhs1;
a6c787e5 242 continue;
243 }
244
75a70cf9 245 if ((rhs_code == POINTER_PLUS_EXPR
246 || rhs_code == PLUS_EXPR)
de508de3 247 && TREE_CODE (rhs1) == SSA_NAME
cd4547bf 248 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
a6c787e5 249 {
6a0712d4 250 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
de508de3 251 lhs = rhs1;
252 continue;
253 }
254
255 if (rhs_code == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
257 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cd4547bf 258 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
de508de3 259 {
6a0712d4 260 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
de508de3 261 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
a6c787e5 262 continue;
263 }
264
265 lhs = NULL;
266 }
267
268 return ret;
269}
270
271
272/* Called by walk_tree to look for references to va_list variables. */
273
274static tree
275find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
276 void *data)
277{
75a70cf9 278 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
a6c787e5 279 tree var = *tp;
280
281 if (TREE_CODE (var) == SSA_NAME)
7ecda5e8 282 {
283 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
284 return var;
285 }
286 else if (TREE_CODE (var) == VAR_DECL)
287 {
288 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
289 return var;
290 }
a6c787e5 291
292 return NULL_TREE;
293}
294
295
296/* Helper function of va_list_counter_struct_op. Compute
297 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
298 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
299 statement. GPR_P is true if AP is a GPR counter, false if it is
300 a FPR counter. */
301
302static void
303va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
304 bool write_p)
305{
306 unsigned HOST_WIDE_INT increment;
307
308 if (si->compute_sizes < 0)
309 {
310 si->compute_sizes = 0;
311 if (si->va_start_count == 1
312 && reachable_at_most_once (si->bb, si->va_start_bb))
313 si->compute_sizes = 1;
314
315 if (dump_file && (dump_flags & TDF_DETAILS))
316 fprintf (dump_file,
317 "bb%d will %sbe executed at most once for each va_start "
318 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
319 si->va_start_bb->index);
320 }
321
322 if (write_p
323 && si->compute_sizes
324 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
325 {
326 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
327 {
328 cfun->va_list_gpr_size += increment;
329 return;
330 }
331
332 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
333 {
334 cfun->va_list_fpr_size += increment;
335 return;
336 }
337 }
338
339 if (write_p || !si->compute_sizes)
340 {
341 if (gpr_p)
342 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
343 else
344 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
345 }
346}
347
348
349/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
350 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
351 is false, AP has been seen in VAR = AP assignment.
352 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
353 va_arg operation that doesn't cause the va_list variable to escape
354 current function. */
355
356static bool
357va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
358 bool write_p)
359{
360 tree base;
361
362 if (TREE_CODE (ap) != COMPONENT_REF
363 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
364 return false;
365
366 if (TREE_CODE (var) != SSA_NAME
7ecda5e8 367 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
a6c787e5 368 return false;
369
370 base = get_base_address (ap);
371 if (TREE_CODE (base) != VAR_DECL
7ecda5e8 372 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
a6c787e5 373 return false;
374
375 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
376 va_list_counter_op (si, ap, var, true, write_p);
377 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
378 va_list_counter_op (si, ap, var, false, write_p);
379
380 return true;
381}
382
383
384/* Check for TEM = AP. Return true if found and the caller shouldn't
385 search for va_list references in the statement. */
386
387static bool
388va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
389{
390 if (TREE_CODE (ap) != VAR_DECL
7ecda5e8 391 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
a6c787e5 392 return false;
393
394 if (TREE_CODE (tem) != SSA_NAME
7ecda5e8 395 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
a6c787e5 396 return false;
397
398 if (si->compute_sizes < 0)
399 {
400 si->compute_sizes = 0;
401 if (si->va_start_count == 1
402 && reachable_at_most_once (si->bb, si->va_start_bb))
403 si->compute_sizes = 1;
404
405 if (dump_file && (dump_flags & TDF_DETAILS))
406 fprintf (dump_file,
407 "bb%d will %sbe executed at most once for each va_start "
408 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
409 si->va_start_bb->index);
410 }
411
412 /* For void * or char * va_list types, there is just one counter.
413 If va_arg is used in a loop, we don't know how many registers need
414 saving. */
415 if (! si->compute_sizes)
416 return false;
417
561f0ec8 418 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
a6c787e5 419 return false;
420
421 /* Note the temporary, as we need to track whether it doesn't escape
422 the current function. */
7ecda5e8 423 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
424
a6c787e5 425 return true;
426}
427
428
429/* Check for:
430 tem1 = AP;
431 TEM2 = tem1 + CST;
432 AP = TEM2;
433 sequence and update cfun->va_list_gpr_size. Return true if found. */
434
435static bool
436va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
437{
438 unsigned HOST_WIDE_INT increment;
439
440 if (TREE_CODE (ap) != VAR_DECL
7ecda5e8 441 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
a6c787e5 442 return false;
443
444 if (TREE_CODE (tem2) != SSA_NAME
7ecda5e8 445 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
a6c787e5 446 return false;
447
448 if (si->compute_sizes <= 0)
449 return false;
450
451 increment = va_list_counter_bump (si, ap, tem2, true);
452 if (increment + 1 <= 1)
453 return false;
454
455 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
456 cfun->va_list_gpr_size += increment;
457 else
458 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
459
460 return true;
461}
462
463
464/* If RHS is X, (some type *) X or X + CST for X a temporary variable
465 containing value of some va_list variable plus optionally some constant,
466 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
467 depending whether LHS is a function local temporary. */
468
469static void
470check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
471{
472 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
473 return;
474
de508de3 475 if (TREE_CODE (rhs) == SSA_NAME)
476 {
7ecda5e8 477 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
de508de3 478 return;
479 }
480 else if (TREE_CODE (rhs) == ADDR_EXPR
481 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
482 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
483 {
7ecda5e8 484 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
485 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
de508de3 486 return;
487 }
488 else
a6c787e5 489 return;
490
7ecda5e8 491 if (TREE_CODE (lhs) != SSA_NAME)
a6c787e5 492 {
493 si->va_list_escapes = true;
494 return;
495 }
496
497 if (si->compute_sizes < 0)
498 {
499 si->compute_sizes = 0;
500 if (si->va_start_count == 1
501 && reachable_at_most_once (si->bb, si->va_start_bb))
502 si->compute_sizes = 1;
503
504 if (dump_file && (dump_flags & TDF_DETAILS))
505 fprintf (dump_file,
506 "bb%d will %sbe executed at most once for each va_start "
507 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
508 si->va_start_bb->index);
509 }
510
511 /* For void * or char * va_list types, there is just one counter.
512 If va_arg is used in a loop, we don't know how many registers need
513 saving. */
514 if (! si->compute_sizes)
515 {
516 si->va_list_escapes = true;
517 return;
518 }
519
520 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
561f0ec8 521 == HOST_WIDE_INT_M1U)
a6c787e5 522 {
523 si->va_list_escapes = true;
524 return;
525 }
526
7ecda5e8 527 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
a6c787e5 528}
529
530
531/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
532 Return true if va_list might be escaping. */
533
534static bool
535check_all_va_list_escapes (struct stdarg_info *si)
536{
537 basic_block bb;
538
fc00614f 539 FOR_EACH_BB_FN (bb, cfun)
a6c787e5 540 {
75a70cf9 541 gimple_stmt_iterator i;
a6c787e5 542
2a111350 543 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
544 {
545 tree lhs;
546 use_operand_p uop;
547 ssa_op_iter soi;
548 gimple phi = gsi_stmt (i);
549
550 lhs = PHI_RESULT (phi);
551 if (virtual_operand_p (lhs)
552 || bitmap_bit_p (si->va_list_escape_vars,
553 SSA_NAME_VERSION (lhs)))
554 continue;
555
556 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
557 {
558 tree rhs = USE_FROM_PTR (uop);
559 if (TREE_CODE (rhs) == SSA_NAME
560 && bitmap_bit_p (si->va_list_escape_vars,
561 SSA_NAME_VERSION (rhs)))
562 {
563 if (dump_file && (dump_flags & TDF_DETAILS))
564 {
565 fputs ("va_list escapes in ", dump_file);
566 print_gimple_stmt (dump_file, phi, 0, dump_flags);
567 fputc ('\n', dump_file);
568 }
569 return true;
570 }
571 }
572 }
573
75a70cf9 574 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
a6c787e5 575 {
75a70cf9 576 gimple stmt = gsi_stmt (i);
577 tree use;
a6c787e5 578 ssa_op_iter iter;
579
9845d120 580 if (is_gimple_debug (stmt))
581 continue;
582
a6c787e5 583 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
584 {
585 if (! bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 586 SSA_NAME_VERSION (use)))
a6c787e5 587 continue;
588
75a70cf9 589 if (is_gimple_assign (stmt))
a6c787e5 590 {
75a70cf9 591 tree rhs = gimple_assign_rhs1 (stmt);
592 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
a6c787e5 593
594 /* x = *ap_temp; */
de508de3 595 if (rhs_code == MEM_REF
a6c787e5 596 && TREE_OPERAND (rhs, 0) == use
597 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
cd4547bf 598 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
a6c787e5 599 && si->offsets[SSA_NAME_VERSION (use)] != -1)
600 {
601 unsigned HOST_WIDE_INT gpr_size;
602 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
603
604 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
fcb97e84 605 + tree_to_shwi (TREE_OPERAND (rhs, 1))
6a0712d4 606 + tree_to_uhwi (access_size);
a6c787e5 607 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
608 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
609 else if (gpr_size > cfun->va_list_gpr_size)
610 cfun->va_list_gpr_size = gpr_size;
611 continue;
612 }
613
614 /* va_arg sequences may contain
615 other_ap_temp = ap_temp;
616 other_ap_temp = ap_temp + constant;
617 other_ap_temp = (some_type *) ap_temp;
618 ap = ap_temp;
619 statements. */
75a70cf9 620 if (rhs == use
621 && ((rhs_code == POINTER_PLUS_EXPR
622 && (TREE_CODE (gimple_assign_rhs2 (stmt))
623 == INTEGER_CST))
624 || gimple_assign_cast_p (stmt)
625 || (get_gimple_rhs_class (rhs_code)
626 == GIMPLE_SINGLE_RHS)))
a6c787e5 627 {
75a70cf9 628 tree lhs = gimple_assign_lhs (stmt);
629
a6c787e5 630 if (TREE_CODE (lhs) == SSA_NAME
631 && bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 632 SSA_NAME_VERSION (lhs)))
a6c787e5 633 continue;
634
635 if (TREE_CODE (lhs) == VAR_DECL
636 && bitmap_bit_p (si->va_list_vars,
7ecda5e8 637 DECL_UID (lhs) + num_ssa_names))
a6c787e5 638 continue;
639 }
de508de3 640 else if (rhs_code == ADDR_EXPR
641 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
642 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
643 {
644 tree lhs = gimple_assign_lhs (stmt);
645
646 if (bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 647 SSA_NAME_VERSION (lhs)))
de508de3 648 continue;
649 }
a6c787e5 650 }
651
652 if (dump_file && (dump_flags & TDF_DETAILS))
653 {
654 fputs ("va_list escapes in ", dump_file);
75a70cf9 655 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
a6c787e5 656 fputc ('\n', dump_file);
657 }
658 return true;
659 }
660 }
661 }
662
663 return false;
664}
665
666
667/* Return true if this optimization pass should be done.
668 It makes only sense for stdarg functions. */
669
670static bool
671gate_optimize_stdarg (void)
672{
de2ca00d 673 /* This optimization is only for stdarg functions. */
674 return cfun->stdarg != 0;
a6c787e5 675}
676
677
678/* Entry point to the stdarg optimization pass. */
679
2a1990e9 680static unsigned int
a6c787e5 681execute_optimize_stdarg (void)
682{
683 basic_block bb;
684 bool va_list_escapes = false;
685 bool va_list_simple_ptr;
686 struct stdarg_info si;
75a70cf9 687 struct walk_stmt_info wi;
a6c787e5 688 const char *funcname = NULL;
5f57a8b1 689 tree cfun_va_list;
a6c787e5 690
691 cfun->va_list_gpr_size = 0;
692 cfun->va_list_fpr_size = 0;
693 memset (&si, 0, sizeof (si));
694 si.va_list_vars = BITMAP_ALLOC (NULL);
695 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
696
697 if (dump_file)
698 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
699
5f57a8b1 700 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
701 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
702 && (TREE_TYPE (cfun_va_list) == void_type_node
703 || TREE_TYPE (cfun_va_list) == char_type_node);
704 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
a6c787e5 705
fc00614f 706 FOR_EACH_BB_FN (bb, cfun)
a6c787e5 707 {
75a70cf9 708 gimple_stmt_iterator i;
a6c787e5 709
75a70cf9 710 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
a6c787e5 711 {
75a70cf9 712 gimple stmt = gsi_stmt (i);
713 tree callee, ap;
a6c787e5 714
75a70cf9 715 if (!is_gimple_call (stmt))
a6c787e5 716 continue;
717
75a70cf9 718 callee = gimple_call_fndecl (stmt);
a6c787e5 719 if (!callee
720 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
721 continue;
722
723 switch (DECL_FUNCTION_CODE (callee))
724 {
725 case BUILT_IN_VA_START:
726 break;
727 /* If old style builtins are used, don't optimize anything. */
728 case BUILT_IN_SAVEREGS:
a6c787e5 729 case BUILT_IN_NEXT_ARG:
730 va_list_escapes = true;
731 continue;
732 default:
733 continue;
734 }
735
736 si.va_start_count++;
75a70cf9 737 ap = gimple_call_arg (stmt, 0);
8b151587 738
739 if (TREE_CODE (ap) != ADDR_EXPR)
a6c787e5 740 {
741 va_list_escapes = true;
742 break;
743 }
a6c787e5 744 ap = TREE_OPERAND (ap, 0);
8b151587 745 if (TREE_CODE (ap) == ARRAY_REF)
746 {
747 if (! integer_zerop (TREE_OPERAND (ap, 1)))
748 {
749 va_list_escapes = true;
750 break;
751 }
752 ap = TREE_OPERAND (ap, 0);
753 }
754 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
5f57a8b1 755 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
8b151587 756 || TREE_CODE (ap) != VAR_DECL)
757 {
758 va_list_escapes = true;
759 break;
760 }
761
a6c787e5 762 if (is_global_var (ap))
763 {
764 va_list_escapes = true;
765 break;
766 }
767
7ecda5e8 768 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
a6c787e5 769
770 /* VA_START_BB and VA_START_AP will be only used if there is just
771 one va_start in the function. */
772 si.va_start_bb = bb;
773 si.va_start_ap = ap;
774 }
775
776 if (va_list_escapes)
777 break;
778 }
779
780 /* If there were no va_start uses in the function, there is no need to
781 save anything. */
782 if (si.va_start_count == 0)
783 goto finish;
784
785 /* If some va_list arguments weren't local, we can't optimize. */
786 if (va_list_escapes)
787 goto finish;
788
789 /* For void * or char * va_list, something useful can be done only
790 if there is just one va_start. */
791 if (va_list_simple_ptr && si.va_start_count > 1)
792 {
793 va_list_escapes = true;
794 goto finish;
795 }
796
797 /* For struct * va_list, if the backend didn't tell us what the counter fields
798 are, there is nothing more we can do. */
799 if (!va_list_simple_ptr
800 && va_list_gpr_counter_field == NULL_TREE
801 && va_list_fpr_counter_field == NULL_TREE)
802 {
803 va_list_escapes = true;
804 goto finish;
805 }
806
807 /* For void * or char * va_list there is just one counter
808 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
809 if (va_list_simple_ptr)
810 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
811
ccc5d6f1 812 calculate_dominance_info (CDI_DOMINATORS);
75a70cf9 813 memset (&wi, 0, sizeof (wi));
814 wi.info = si.va_list_vars;
ccc5d6f1 815
fc00614f 816 FOR_EACH_BB_FN (bb, cfun)
a6c787e5 817 {
75a70cf9 818 gimple_stmt_iterator i;
a6c787e5 819
820 si.compute_sizes = -1;
821 si.bb = bb;
35bf4ce8 822
823 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
824 them as assignments for the purpose of escape analysis. This is
825 not needed for non-simple va_list because virtual phis don't perform
826 any real data movement. */
827 if (va_list_simple_ptr)
828 {
75a70cf9 829 tree lhs, rhs;
35bf4ce8 830 use_operand_p uop;
831 ssa_op_iter soi;
832
75a70cf9 833 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
35bf4ce8 834 {
75a70cf9 835 gimple phi = gsi_stmt (i);
35bf4ce8 836 lhs = PHI_RESULT (phi);
837
7c782c9b 838 if (virtual_operand_p (lhs))
35bf4ce8 839 continue;
840
841 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
842 {
843 rhs = USE_FROM_PTR (uop);
844 if (va_list_ptr_read (&si, rhs, lhs))
845 continue;
846 else if (va_list_ptr_write (&si, lhs, rhs))
847 continue;
848 else
849 check_va_list_escapes (&si, lhs, rhs);
850
75a70cf9 851 if (si.va_list_escapes)
35bf4ce8 852 {
853 if (dump_file && (dump_flags & TDF_DETAILS))
854 {
855 fputs ("va_list escapes in ", dump_file);
75a70cf9 856 print_gimple_stmt (dump_file, phi, 0, dump_flags);
35bf4ce8 857 fputc ('\n', dump_file);
858 }
859 va_list_escapes = true;
860 }
861 }
862 }
863 }
864
75a70cf9 865 for (i = gsi_start_bb (bb);
866 !gsi_end_p (i) && !va_list_escapes;
867 gsi_next (&i))
a6c787e5 868 {
75a70cf9 869 gimple stmt = gsi_stmt (i);
a6c787e5 870
871 /* Don't look at __builtin_va_{start,end}, they are ok. */
75a70cf9 872 if (is_gimple_call (stmt))
a6c787e5 873 {
75a70cf9 874 tree callee = gimple_call_fndecl (stmt);
a6c787e5 875
876 if (callee
877 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
878 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
879 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
880 continue;
881 }
882
75a70cf9 883 if (is_gimple_assign (stmt))
a6c787e5 884 {
75a70cf9 885 tree lhs = gimple_assign_lhs (stmt);
886 tree rhs = gimple_assign_rhs1 (stmt);
a6c787e5 887
888 if (va_list_simple_ptr)
889 {
75a70cf9 890 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
891 == GIMPLE_SINGLE_RHS)
892 {
4b4d1c3b 893 /* Check for ap ={v} {}. */
894 if (TREE_CLOBBER_P (rhs))
895 continue;
896
75a70cf9 897 /* Check for tem = ap. */
4b4d1c3b 898 else if (va_list_ptr_read (&si, rhs, lhs))
75a70cf9 899 continue;
a6c787e5 900
75a70cf9 901 /* Check for the last insn in:
902 tem1 = ap;
903 tem2 = tem1 + CST;
904 ap = tem2;
905 sequence. */
906 else if (va_list_ptr_write (&si, lhs, rhs))
907 continue;
908 }
a6c787e5 909
75a70cf9 910 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
911 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
d9659041 912 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
75a70cf9 913 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
914 == GIMPLE_SINGLE_RHS))
a6c787e5 915 check_va_list_escapes (&si, lhs, rhs);
916 }
917 else
918 {
75a70cf9 919 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
920 == GIMPLE_SINGLE_RHS)
921 {
3c25489e 922 /* Check for ap ={v} {}. */
923 if (TREE_CLOBBER_P (rhs))
924 continue;
4b4d1c3b 925
75a70cf9 926 /* Check for ap[0].field = temp. */
3c25489e 927 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
75a70cf9 928 continue;
a6c787e5 929
75a70cf9 930 /* Check for temp = ap[0].field. */
931 else if (va_list_counter_struct_op (&si, rhs, lhs,
932 false))
933 continue;
934 }
a6c787e5 935
936 /* Do any architecture specific checking. */
75a70cf9 937 if (targetm.stdarg_optimize_hook
938 && targetm.stdarg_optimize_hook (&si, stmt))
a6c787e5 939 continue;
940 }
941 }
9845d120 942 else if (is_gimple_debug (stmt))
943 continue;
a6c787e5 944
945 /* All other uses of va_list are either va_copy (that is not handled
946 in this optimization), taking address of va_list variable or
947 passing va_list to other functions (in that case va_list might
948 escape the function and therefore va_start needs to set it up
949 fully), or some unexpected use of va_list. None of these should
950 happen in a gimplified VA_ARG_EXPR. */
951 if (si.va_list_escapes
75a70cf9 952 || walk_gimple_op (stmt, find_va_list_reference, &wi))
a6c787e5 953 {
954 if (dump_file && (dump_flags & TDF_DETAILS))
955 {
956 fputs ("va_list escapes in ", dump_file);
75a70cf9 957 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
a6c787e5 958 fputc ('\n', dump_file);
959 }
960 va_list_escapes = true;
961 }
962 }
963
964 if (va_list_escapes)
965 break;
966 }
967
968 if (! va_list_escapes
969 && va_list_simple_ptr
970 && ! bitmap_empty_p (si.va_list_escape_vars)
971 && check_all_va_list_escapes (&si))
972 va_list_escapes = true;
973
974finish:
975 if (va_list_escapes)
976 {
977 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
978 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
979 }
980 BITMAP_FREE (si.va_list_vars);
981 BITMAP_FREE (si.va_list_escape_vars);
982 free (si.offsets);
983 if (dump_file)
984 {
985 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
986 funcname, (int) va_list_escapes);
987 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
988 fputs ("all", dump_file);
989 else
990 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
991 fputs (" GPR units and ", dump_file);
992 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
993 fputs ("all", dump_file);
994 else
995 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
996 fputs (" FPR units.\n", dump_file);
997 }
2a1990e9 998 return 0;
a6c787e5 999}
1000
1001
cbe8bda8 1002namespace {
1003
1004const pass_data pass_data_stdarg =
a6c787e5 1005{
cbe8bda8 1006 GIMPLE_PASS, /* type */
1007 "stdarg", /* name */
1008 OPTGROUP_NONE, /* optinfo_flags */
1009 true, /* has_gate */
1010 true, /* has_execute */
1011 TV_NONE, /* tv_id */
1012 ( PROP_cfg | PROP_ssa ), /* properties_required */
1013 0, /* properties_provided */
1014 0, /* properties_destroyed */
1015 0, /* todo_flags_start */
1016 0, /* todo_flags_finish */
a6c787e5 1017};
cbe8bda8 1018
1019class pass_stdarg : public gimple_opt_pass
1020{
1021public:
9af5ce0c 1022 pass_stdarg (gcc::context *ctxt)
1023 : gimple_opt_pass (pass_data_stdarg, ctxt)
cbe8bda8 1024 {}
1025
1026 /* opt_pass methods: */
1027 bool gate () { return gate_optimize_stdarg (); }
1028 unsigned int execute () { return execute_optimize_stdarg (); }
1029
1030}; // class pass_stdarg
1031
1032} // anon namespace
1033
1034gimple_opt_pass *
1035make_pass_stdarg (gcc::context *ctxt)
1036{
1037 return new pass_stdarg (ctxt);
1038}