]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-stdarg.c
Merger of git branch "gimple-classes-v2-option-3"
[thirdparty/gcc.git] / gcc / tree-stdarg.c
CommitLineData
a6c787e5 1/* Pass computing data for optimizing stdarg functions.
3aea1f79 2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
a6c787e5 3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
a6c787e5 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
a6c787e5 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
a3020f2f 26#include "hashtab.h"
27#include "hash-set.h"
28#include "vec.h"
29#include "machmode.h"
30#include "hard-reg-set.h"
31#include "input.h"
a6c787e5 32#include "function.h"
33#include "langhooks.h"
ce084dfc 34#include "gimple-pretty-print.h"
a6c787e5 35#include "target.h"
073c1fd5 36#include "bitmap.h"
94ea8568 37#include "predict.h"
38#include "dominance.h"
39#include "cfg.h"
bc61cadb 40#include "basic-block.h"
41#include "tree-ssa-alias.h"
42#include "internal-fn.h"
43#include "gimple-expr.h"
44#include "is-a.h"
073c1fd5 45#include "gimple.h"
dcf1a1ec 46#include "gimple-iterator.h"
47#include "gimple-walk.h"
073c1fd5 48#include "gimple-ssa.h"
49#include "tree-phinodes.h"
50#include "ssa-iterators.h"
9ed99284 51#include "stringpool.h"
073c1fd5 52#include "tree-ssanames.h"
424a4a92 53#include "sbitmap.h"
a6c787e5 54#include "tree-pass.h"
55#include "tree-stdarg.h"
56
57/* A simple pass that attempts to optimize stdarg functions on architectures
58 that need to save register arguments to stack on entry to stdarg functions.
59 If the function doesn't use any va_start macros, no registers need to
60 be saved. If va_start macros are used, the va_list variables don't escape
61 the function, it is only necessary to save registers that will be used
62 in va_arg macros. E.g. if va_arg is only used with integral types
63 in the function, floating point registers don't need to be saved, etc. */
64
65
66/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
67 is executed at most as many times as VA_START_BB. */
68
69static bool
70reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
71{
1e094109 72 vec<edge> stack = vNULL;
4b53147e 73 edge e;
a6c787e5 74 edge_iterator ei;
a6c787e5 75 sbitmap visited;
76 bool ret;
77
78 if (va_arg_bb == va_start_bb)
79 return true;
80
81 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
82 return false;
83
fe672ac0 84 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
53c5d9d4 85 bitmap_clear (visited);
a6c787e5 86 ret = true;
87
88 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
f1f41a6c 89 stack.safe_push (e);
a6c787e5 90
f1f41a6c 91 while (! stack.is_empty ())
a6c787e5 92 {
93 basic_block src;
94
f1f41a6c 95 e = stack.pop ();
a6c787e5 96 src = e->src;
97
98 if (e->flags & EDGE_COMPLEX)
99 {
100 ret = false;
101 break;
102 }
103
104 if (src == va_start_bb)
105 continue;
106
107 /* va_arg_bb can be executed more times than va_start_bb. */
108 if (src == va_arg_bb)
109 {
110 ret = false;
111 break;
112 }
113
34154e27 114 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
a6c787e5 115
08b7917c 116 if (! bitmap_bit_p (visited, src->index))
a6c787e5 117 {
08b7917c 118 bitmap_set_bit (visited, src->index);
a6c787e5 119 FOR_EACH_EDGE (e, ei, src->preds)
f1f41a6c 120 stack.safe_push (e);
a6c787e5 121 }
122 }
123
f1f41a6c 124 stack.release ();
a6c787e5 125 sbitmap_free (visited);
126 return ret;
127}
128
129
130/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
561f0ec8 131 return constant, otherwise return HOST_WIDE_INT_M1U.
a6c787e5 132 GPR_P is true if this is GPR counter. */
133
134static unsigned HOST_WIDE_INT
135va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
136 bool gpr_p)
137{
75a70cf9 138 tree lhs, orig_lhs;
139 gimple stmt;
a6c787e5 140 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
141 unsigned int max_size;
142
143 if (si->offsets == NULL)
144 {
145 unsigned int i;
146
4c36ffe6 147 si->offsets = XNEWVEC (int, num_ssa_names);
a6c787e5 148 for (i = 0; i < num_ssa_names; ++i)
149 si->offsets[i] = -1;
150 }
151
152 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
153 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
154 orig_lhs = lhs = rhs;
155 while (lhs)
156 {
75a70cf9 157 enum tree_code rhs_code;
de508de3 158 tree rhs1;
75a70cf9 159
a6c787e5 160 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
161 {
162 if (counter_val >= max_size)
163 {
164 ret = max_size;
165 break;
166 }
167
168 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
169 break;
170 }
171
172 stmt = SSA_NAME_DEF_STMT (lhs);
173
75a70cf9 174 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
561f0ec8 175 return HOST_WIDE_INT_M1U;
a6c787e5 176
75a70cf9 177 rhs_code = gimple_assign_rhs_code (stmt);
de508de3 178 rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 179 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
180 || gimple_assign_cast_p (stmt))
de508de3 181 && TREE_CODE (rhs1) == SSA_NAME)
a6c787e5 182 {
de508de3 183 lhs = rhs1;
a6c787e5 184 continue;
185 }
186
75a70cf9 187 if ((rhs_code == POINTER_PLUS_EXPR
188 || rhs_code == PLUS_EXPR)
de508de3 189 && TREE_CODE (rhs1) == SSA_NAME
cd4547bf 190 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
a6c787e5 191 {
6a0712d4 192 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
de508de3 193 lhs = rhs1;
194 continue;
195 }
196
197 if (rhs_code == ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
199 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cd4547bf 200 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
de508de3 201 {
6a0712d4 202 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
de508de3 203 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
a6c787e5 204 continue;
205 }
206
75a70cf9 207 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
561f0ec8 208 return HOST_WIDE_INT_M1U;
a6c787e5 209
75a70cf9 210 rhs = gimple_assign_rhs1 (stmt);
a6c787e5 211 if (TREE_CODE (counter) != TREE_CODE (rhs))
561f0ec8 212 return HOST_WIDE_INT_M1U;
a6c787e5 213
214 if (TREE_CODE (counter) == COMPONENT_REF)
215 {
216 if (get_base_address (counter) != get_base_address (rhs)
217 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
218 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
561f0ec8 219 return HOST_WIDE_INT_M1U;
a6c787e5 220 }
221 else if (counter != rhs)
561f0ec8 222 return HOST_WIDE_INT_M1U;
a6c787e5 223
224 lhs = NULL;
225 }
226
227 lhs = orig_lhs;
228 val = ret + counter_val;
229 while (lhs)
230 {
75a70cf9 231 enum tree_code rhs_code;
de508de3 232 tree rhs1;
75a70cf9 233
a6c787e5 234 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
235 break;
236
237 if (val >= max_size)
238 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
239 else
240 si->offsets[SSA_NAME_VERSION (lhs)] = val;
241
242 stmt = SSA_NAME_DEF_STMT (lhs);
243
75a70cf9 244 rhs_code = gimple_assign_rhs_code (stmt);
de508de3 245 rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 246 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
247 || gimple_assign_cast_p (stmt))
de508de3 248 && TREE_CODE (rhs1) == SSA_NAME)
a6c787e5 249 {
de508de3 250 lhs = rhs1;
a6c787e5 251 continue;
252 }
253
75a70cf9 254 if ((rhs_code == POINTER_PLUS_EXPR
255 || rhs_code == PLUS_EXPR)
de508de3 256 && TREE_CODE (rhs1) == SSA_NAME
cd4547bf 257 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
a6c787e5 258 {
6a0712d4 259 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
de508de3 260 lhs = rhs1;
261 continue;
262 }
263
264 if (rhs_code == ADDR_EXPR
265 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
266 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cd4547bf 267 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
de508de3 268 {
6a0712d4 269 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
de508de3 270 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
a6c787e5 271 continue;
272 }
273
274 lhs = NULL;
275 }
276
277 return ret;
278}
279
280
281/* Called by walk_tree to look for references to va_list variables. */
282
283static tree
284find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
285 void *data)
286{
75a70cf9 287 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
a6c787e5 288 tree var = *tp;
289
290 if (TREE_CODE (var) == SSA_NAME)
7ecda5e8 291 {
292 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
293 return var;
294 }
295 else if (TREE_CODE (var) == VAR_DECL)
296 {
297 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
298 return var;
299 }
a6c787e5 300
301 return NULL_TREE;
302}
303
304
305/* Helper function of va_list_counter_struct_op. Compute
306 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
307 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
308 statement. GPR_P is true if AP is a GPR counter, false if it is
309 a FPR counter. */
310
311static void
312va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
313 bool write_p)
314{
315 unsigned HOST_WIDE_INT increment;
316
317 if (si->compute_sizes < 0)
318 {
319 si->compute_sizes = 0;
320 if (si->va_start_count == 1
321 && reachable_at_most_once (si->bb, si->va_start_bb))
322 si->compute_sizes = 1;
323
324 if (dump_file && (dump_flags & TDF_DETAILS))
325 fprintf (dump_file,
326 "bb%d will %sbe executed at most once for each va_start "
327 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
328 si->va_start_bb->index);
329 }
330
331 if (write_p
332 && si->compute_sizes
333 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
334 {
335 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
336 {
337 cfun->va_list_gpr_size += increment;
338 return;
339 }
340
341 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
342 {
343 cfun->va_list_fpr_size += increment;
344 return;
345 }
346 }
347
348 if (write_p || !si->compute_sizes)
349 {
350 if (gpr_p)
351 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
352 else
353 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
354 }
355}
356
357
358/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
359 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
360 is false, AP has been seen in VAR = AP assignment.
361 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
362 va_arg operation that doesn't cause the va_list variable to escape
363 current function. */
364
365static bool
366va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
367 bool write_p)
368{
369 tree base;
370
371 if (TREE_CODE (ap) != COMPONENT_REF
372 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
373 return false;
374
375 if (TREE_CODE (var) != SSA_NAME
7ecda5e8 376 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
a6c787e5 377 return false;
378
379 base = get_base_address (ap);
380 if (TREE_CODE (base) != VAR_DECL
7ecda5e8 381 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
a6c787e5 382 return false;
383
384 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
385 va_list_counter_op (si, ap, var, true, write_p);
386 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
387 va_list_counter_op (si, ap, var, false, write_p);
388
389 return true;
390}
391
392
393/* Check for TEM = AP. Return true if found and the caller shouldn't
394 search for va_list references in the statement. */
395
396static bool
397va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
398{
399 if (TREE_CODE (ap) != VAR_DECL
7ecda5e8 400 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
a6c787e5 401 return false;
402
403 if (TREE_CODE (tem) != SSA_NAME
7ecda5e8 404 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
a6c787e5 405 return false;
406
407 if (si->compute_sizes < 0)
408 {
409 si->compute_sizes = 0;
410 if (si->va_start_count == 1
411 && reachable_at_most_once (si->bb, si->va_start_bb))
412 si->compute_sizes = 1;
413
414 if (dump_file && (dump_flags & TDF_DETAILS))
415 fprintf (dump_file,
416 "bb%d will %sbe executed at most once for each va_start "
417 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
418 si->va_start_bb->index);
419 }
420
421 /* For void * or char * va_list types, there is just one counter.
422 If va_arg is used in a loop, we don't know how many registers need
423 saving. */
424 if (! si->compute_sizes)
425 return false;
426
561f0ec8 427 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
a6c787e5 428 return false;
429
430 /* Note the temporary, as we need to track whether it doesn't escape
431 the current function. */
7ecda5e8 432 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
433
a6c787e5 434 return true;
435}
436
437
438/* Check for:
439 tem1 = AP;
440 TEM2 = tem1 + CST;
441 AP = TEM2;
442 sequence and update cfun->va_list_gpr_size. Return true if found. */
443
444static bool
445va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
446{
447 unsigned HOST_WIDE_INT increment;
448
449 if (TREE_CODE (ap) != VAR_DECL
7ecda5e8 450 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
a6c787e5 451 return false;
452
453 if (TREE_CODE (tem2) != SSA_NAME
7ecda5e8 454 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
a6c787e5 455 return false;
456
457 if (si->compute_sizes <= 0)
458 return false;
459
460 increment = va_list_counter_bump (si, ap, tem2, true);
461 if (increment + 1 <= 1)
462 return false;
463
464 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
465 cfun->va_list_gpr_size += increment;
466 else
467 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
468
469 return true;
470}
471
472
473/* If RHS is X, (some type *) X or X + CST for X a temporary variable
474 containing value of some va_list variable plus optionally some constant,
475 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
476 depending whether LHS is a function local temporary. */
477
478static void
479check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
480{
481 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
482 return;
483
de508de3 484 if (TREE_CODE (rhs) == SSA_NAME)
485 {
7ecda5e8 486 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
de508de3 487 return;
488 }
489 else if (TREE_CODE (rhs) == ADDR_EXPR
490 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
492 {
7ecda5e8 493 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
494 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
de508de3 495 return;
496 }
497 else
a6c787e5 498 return;
499
7ecda5e8 500 if (TREE_CODE (lhs) != SSA_NAME)
a6c787e5 501 {
502 si->va_list_escapes = true;
503 return;
504 }
505
506 if (si->compute_sizes < 0)
507 {
508 si->compute_sizes = 0;
509 if (si->va_start_count == 1
510 && reachable_at_most_once (si->bb, si->va_start_bb))
511 si->compute_sizes = 1;
512
513 if (dump_file && (dump_flags & TDF_DETAILS))
514 fprintf (dump_file,
515 "bb%d will %sbe executed at most once for each va_start "
516 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
517 si->va_start_bb->index);
518 }
519
520 /* For void * or char * va_list types, there is just one counter.
521 If va_arg is used in a loop, we don't know how many registers need
522 saving. */
523 if (! si->compute_sizes)
524 {
525 si->va_list_escapes = true;
526 return;
527 }
528
529 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
561f0ec8 530 == HOST_WIDE_INT_M1U)
a6c787e5 531 {
532 si->va_list_escapes = true;
533 return;
534 }
535
7ecda5e8 536 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
a6c787e5 537}
538
539
540/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
541 Return true if va_list might be escaping. */
542
543static bool
544check_all_va_list_escapes (struct stdarg_info *si)
545{
546 basic_block bb;
547
fc00614f 548 FOR_EACH_BB_FN (bb, cfun)
a6c787e5 549 {
1a91d914 550 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
551 gsi_next (&i))
2a111350 552 {
553 tree lhs;
554 use_operand_p uop;
555 ssa_op_iter soi;
1a91d914 556 gphi *phi = i.phi ();
2a111350 557
558 lhs = PHI_RESULT (phi);
559 if (virtual_operand_p (lhs)
560 || bitmap_bit_p (si->va_list_escape_vars,
561 SSA_NAME_VERSION (lhs)))
562 continue;
563
564 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
565 {
566 tree rhs = USE_FROM_PTR (uop);
567 if (TREE_CODE (rhs) == SSA_NAME
568 && bitmap_bit_p (si->va_list_escape_vars,
569 SSA_NAME_VERSION (rhs)))
570 {
571 if (dump_file && (dump_flags & TDF_DETAILS))
572 {
573 fputs ("va_list escapes in ", dump_file);
574 print_gimple_stmt (dump_file, phi, 0, dump_flags);
575 fputc ('\n', dump_file);
576 }
577 return true;
578 }
579 }
580 }
581
1a91d914 582 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
583 gsi_next (&i))
a6c787e5 584 {
75a70cf9 585 gimple stmt = gsi_stmt (i);
586 tree use;
a6c787e5 587 ssa_op_iter iter;
588
9845d120 589 if (is_gimple_debug (stmt))
590 continue;
591
a6c787e5 592 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
593 {
594 if (! bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 595 SSA_NAME_VERSION (use)))
a6c787e5 596 continue;
597
75a70cf9 598 if (is_gimple_assign (stmt))
a6c787e5 599 {
75a70cf9 600 tree rhs = gimple_assign_rhs1 (stmt);
601 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
a6c787e5 602
603 /* x = *ap_temp; */
de508de3 604 if (rhs_code == MEM_REF
a6c787e5 605 && TREE_OPERAND (rhs, 0) == use
606 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
cd4547bf 607 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
a6c787e5 608 && si->offsets[SSA_NAME_VERSION (use)] != -1)
609 {
610 unsigned HOST_WIDE_INT gpr_size;
611 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
612
613 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
fcb97e84 614 + tree_to_shwi (TREE_OPERAND (rhs, 1))
6a0712d4 615 + tree_to_uhwi (access_size);
a6c787e5 616 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
617 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
618 else if (gpr_size > cfun->va_list_gpr_size)
619 cfun->va_list_gpr_size = gpr_size;
620 continue;
621 }
622
623 /* va_arg sequences may contain
624 other_ap_temp = ap_temp;
625 other_ap_temp = ap_temp + constant;
626 other_ap_temp = (some_type *) ap_temp;
627 ap = ap_temp;
628 statements. */
75a70cf9 629 if (rhs == use
630 && ((rhs_code == POINTER_PLUS_EXPR
631 && (TREE_CODE (gimple_assign_rhs2 (stmt))
632 == INTEGER_CST))
633 || gimple_assign_cast_p (stmt)
634 || (get_gimple_rhs_class (rhs_code)
635 == GIMPLE_SINGLE_RHS)))
a6c787e5 636 {
75a70cf9 637 tree lhs = gimple_assign_lhs (stmt);
638
a6c787e5 639 if (TREE_CODE (lhs) == SSA_NAME
640 && bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 641 SSA_NAME_VERSION (lhs)))
a6c787e5 642 continue;
643
644 if (TREE_CODE (lhs) == VAR_DECL
645 && bitmap_bit_p (si->va_list_vars,
7ecda5e8 646 DECL_UID (lhs) + num_ssa_names))
a6c787e5 647 continue;
648 }
de508de3 649 else if (rhs_code == ADDR_EXPR
650 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
651 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
652 {
653 tree lhs = gimple_assign_lhs (stmt);
654
655 if (bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 656 SSA_NAME_VERSION (lhs)))
de508de3 657 continue;
658 }
a6c787e5 659 }
660
661 if (dump_file && (dump_flags & TDF_DETAILS))
662 {
663 fputs ("va_list escapes in ", dump_file);
75a70cf9 664 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
a6c787e5 665 fputc ('\n', dump_file);
666 }
667 return true;
668 }
669 }
670 }
671
672 return false;
673}
674
a6c787e5 675
65b0537f 676namespace {
677
678const pass_data pass_data_stdarg =
679{
680 GIMPLE_PASS, /* type */
681 "stdarg", /* name */
682 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 683 TV_NONE, /* tv_id */
684 ( PROP_cfg | PROP_ssa ), /* properties_required */
685 0, /* properties_provided */
686 0, /* properties_destroyed */
687 0, /* todo_flags_start */
688 0, /* todo_flags_finish */
689};
690
691class pass_stdarg : public gimple_opt_pass
692{
693public:
694 pass_stdarg (gcc::context *ctxt)
695 : gimple_opt_pass (pass_data_stdarg, ctxt)
696 {}
697
698 /* opt_pass methods: */
699 virtual bool gate (function *fun)
700 {
701 /* This optimization is only for stdarg functions. */
702 return fun->stdarg != 0;
703 }
704
705 virtual unsigned int execute (function *);
706
707}; // class pass_stdarg
708
709unsigned int
710pass_stdarg::execute (function *fun)
a6c787e5 711{
712 basic_block bb;
713 bool va_list_escapes = false;
714 bool va_list_simple_ptr;
715 struct stdarg_info si;
75a70cf9 716 struct walk_stmt_info wi;
a6c787e5 717 const char *funcname = NULL;
5f57a8b1 718 tree cfun_va_list;
a6c787e5 719
65b0537f 720 fun->va_list_gpr_size = 0;
721 fun->va_list_fpr_size = 0;
a6c787e5 722 memset (&si, 0, sizeof (si));
723 si.va_list_vars = BITMAP_ALLOC (NULL);
724 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
725
726 if (dump_file)
727 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
728
65b0537f 729 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
5f57a8b1 730 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
731 && (TREE_TYPE (cfun_va_list) == void_type_node
732 || TREE_TYPE (cfun_va_list) == char_type_node);
733 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
a6c787e5 734
65b0537f 735 FOR_EACH_BB_FN (bb, fun)
a6c787e5 736 {
75a70cf9 737 gimple_stmt_iterator i;
a6c787e5 738
75a70cf9 739 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
a6c787e5 740 {
75a70cf9 741 gimple stmt = gsi_stmt (i);
742 tree callee, ap;
a6c787e5 743
75a70cf9 744 if (!is_gimple_call (stmt))
a6c787e5 745 continue;
746
75a70cf9 747 callee = gimple_call_fndecl (stmt);
a6c787e5 748 if (!callee
749 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
750 continue;
751
752 switch (DECL_FUNCTION_CODE (callee))
753 {
754 case BUILT_IN_VA_START:
755 break;
756 /* If old style builtins are used, don't optimize anything. */
757 case BUILT_IN_SAVEREGS:
a6c787e5 758 case BUILT_IN_NEXT_ARG:
759 va_list_escapes = true;
760 continue;
761 default:
762 continue;
763 }
764
765 si.va_start_count++;
75a70cf9 766 ap = gimple_call_arg (stmt, 0);
8b151587 767
768 if (TREE_CODE (ap) != ADDR_EXPR)
a6c787e5 769 {
770 va_list_escapes = true;
771 break;
772 }
a6c787e5 773 ap = TREE_OPERAND (ap, 0);
8b151587 774 if (TREE_CODE (ap) == ARRAY_REF)
775 {
776 if (! integer_zerop (TREE_OPERAND (ap, 1)))
777 {
778 va_list_escapes = true;
779 break;
780 }
781 ap = TREE_OPERAND (ap, 0);
782 }
783 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
65b0537f 784 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
8b151587 785 || TREE_CODE (ap) != VAR_DECL)
786 {
787 va_list_escapes = true;
788 break;
789 }
790
a6c787e5 791 if (is_global_var (ap))
792 {
793 va_list_escapes = true;
794 break;
795 }
796
7ecda5e8 797 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
a6c787e5 798
799 /* VA_START_BB and VA_START_AP will be only used if there is just
800 one va_start in the function. */
801 si.va_start_bb = bb;
802 si.va_start_ap = ap;
803 }
804
805 if (va_list_escapes)
806 break;
807 }
808
809 /* If there were no va_start uses in the function, there is no need to
810 save anything. */
811 if (si.va_start_count == 0)
812 goto finish;
813
814 /* If some va_list arguments weren't local, we can't optimize. */
815 if (va_list_escapes)
816 goto finish;
817
818 /* For void * or char * va_list, something useful can be done only
819 if there is just one va_start. */
820 if (va_list_simple_ptr && si.va_start_count > 1)
821 {
822 va_list_escapes = true;
823 goto finish;
824 }
825
826 /* For struct * va_list, if the backend didn't tell us what the counter fields
827 are, there is nothing more we can do. */
828 if (!va_list_simple_ptr
829 && va_list_gpr_counter_field == NULL_TREE
830 && va_list_fpr_counter_field == NULL_TREE)
831 {
832 va_list_escapes = true;
833 goto finish;
834 }
835
836 /* For void * or char * va_list there is just one counter
837 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
838 if (va_list_simple_ptr)
65b0537f 839 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
a6c787e5 840
ccc5d6f1 841 calculate_dominance_info (CDI_DOMINATORS);
75a70cf9 842 memset (&wi, 0, sizeof (wi));
843 wi.info = si.va_list_vars;
ccc5d6f1 844
65b0537f 845 FOR_EACH_BB_FN (bb, fun)
a6c787e5 846 {
a6c787e5 847 si.compute_sizes = -1;
848 si.bb = bb;
35bf4ce8 849
850 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
851 them as assignments for the purpose of escape analysis. This is
852 not needed for non-simple va_list because virtual phis don't perform
853 any real data movement. */
854 if (va_list_simple_ptr)
855 {
75a70cf9 856 tree lhs, rhs;
35bf4ce8 857 use_operand_p uop;
858 ssa_op_iter soi;
859
1a91d914 860 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
861 gsi_next (&i))
35bf4ce8 862 {
1a91d914 863 gphi *phi = i.phi ();
35bf4ce8 864 lhs = PHI_RESULT (phi);
865
7c782c9b 866 if (virtual_operand_p (lhs))
35bf4ce8 867 continue;
868
869 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
870 {
871 rhs = USE_FROM_PTR (uop);
872 if (va_list_ptr_read (&si, rhs, lhs))
873 continue;
874 else if (va_list_ptr_write (&si, lhs, rhs))
875 continue;
876 else
877 check_va_list_escapes (&si, lhs, rhs);
878
75a70cf9 879 if (si.va_list_escapes)
35bf4ce8 880 {
881 if (dump_file && (dump_flags & TDF_DETAILS))
882 {
883 fputs ("va_list escapes in ", dump_file);
75a70cf9 884 print_gimple_stmt (dump_file, phi, 0, dump_flags);
35bf4ce8 885 fputc ('\n', dump_file);
886 }
887 va_list_escapes = true;
888 }
889 }
890 }
891 }
892
1a91d914 893 for (gimple_stmt_iterator i = gsi_start_bb (bb);
75a70cf9 894 !gsi_end_p (i) && !va_list_escapes;
895 gsi_next (&i))
a6c787e5 896 {
75a70cf9 897 gimple stmt = gsi_stmt (i);
a6c787e5 898
899 /* Don't look at __builtin_va_{start,end}, they are ok. */
75a70cf9 900 if (is_gimple_call (stmt))
a6c787e5 901 {
75a70cf9 902 tree callee = gimple_call_fndecl (stmt);
a6c787e5 903
904 if (callee
905 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
906 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
907 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
908 continue;
909 }
910
75a70cf9 911 if (is_gimple_assign (stmt))
a6c787e5 912 {
75a70cf9 913 tree lhs = gimple_assign_lhs (stmt);
914 tree rhs = gimple_assign_rhs1 (stmt);
a6c787e5 915
916 if (va_list_simple_ptr)
917 {
75a70cf9 918 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
919 == GIMPLE_SINGLE_RHS)
920 {
4b4d1c3b 921 /* Check for ap ={v} {}. */
922 if (TREE_CLOBBER_P (rhs))
923 continue;
924
75a70cf9 925 /* Check for tem = ap. */
4b4d1c3b 926 else if (va_list_ptr_read (&si, rhs, lhs))
75a70cf9 927 continue;
a6c787e5 928
75a70cf9 929 /* Check for the last insn in:
930 tem1 = ap;
931 tem2 = tem1 + CST;
932 ap = tem2;
933 sequence. */
934 else if (va_list_ptr_write (&si, lhs, rhs))
935 continue;
936 }
a6c787e5 937
75a70cf9 938 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
939 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
d9659041 940 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
75a70cf9 941 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
942 == GIMPLE_SINGLE_RHS))
a6c787e5 943 check_va_list_escapes (&si, lhs, rhs);
944 }
945 else
946 {
75a70cf9 947 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
948 == GIMPLE_SINGLE_RHS)
949 {
3c25489e 950 /* Check for ap ={v} {}. */
951 if (TREE_CLOBBER_P (rhs))
952 continue;
4b4d1c3b 953
75a70cf9 954 /* Check for ap[0].field = temp. */
3c25489e 955 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
75a70cf9 956 continue;
a6c787e5 957
75a70cf9 958 /* Check for temp = ap[0].field. */
959 else if (va_list_counter_struct_op (&si, rhs, lhs,
960 false))
961 continue;
962 }
a6c787e5 963
964 /* Do any architecture specific checking. */
75a70cf9 965 if (targetm.stdarg_optimize_hook
966 && targetm.stdarg_optimize_hook (&si, stmt))
a6c787e5 967 continue;
968 }
969 }
9845d120 970 else if (is_gimple_debug (stmt))
971 continue;
a6c787e5 972
973 /* All other uses of va_list are either va_copy (that is not handled
974 in this optimization), taking address of va_list variable or
975 passing va_list to other functions (in that case va_list might
976 escape the function and therefore va_start needs to set it up
977 fully), or some unexpected use of va_list. None of these should
978 happen in a gimplified VA_ARG_EXPR. */
979 if (si.va_list_escapes
75a70cf9 980 || walk_gimple_op (stmt, find_va_list_reference, &wi))
a6c787e5 981 {
982 if (dump_file && (dump_flags & TDF_DETAILS))
983 {
984 fputs ("va_list escapes in ", dump_file);
75a70cf9 985 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
a6c787e5 986 fputc ('\n', dump_file);
987 }
988 va_list_escapes = true;
989 }
990 }
991
992 if (va_list_escapes)
993 break;
994 }
995
996 if (! va_list_escapes
997 && va_list_simple_ptr
998 && ! bitmap_empty_p (si.va_list_escape_vars)
999 && check_all_va_list_escapes (&si))
1000 va_list_escapes = true;
1001
1002finish:
1003 if (va_list_escapes)
1004 {
65b0537f 1005 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1006 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
a6c787e5 1007 }
1008 BITMAP_FREE (si.va_list_vars);
1009 BITMAP_FREE (si.va_list_escape_vars);
1010 free (si.offsets);
1011 if (dump_file)
1012 {
1013 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1014 funcname, (int) va_list_escapes);
65b0537f 1015 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
a6c787e5 1016 fputs ("all", dump_file);
1017 else
1018 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1019 fputs (" GPR units and ", dump_file);
65b0537f 1020 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
a6c787e5 1021 fputs ("all", dump_file);
1022 else
1023 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1024 fputs (" FPR units.\n", dump_file);
1025 }
2a1990e9 1026 return 0;
a6c787e5 1027}
1028
cbe8bda8 1029} // anon namespace
1030
1031gimple_opt_pass *
1032make_pass_stdarg (gcc::context *ctxt)
1033{
1034 return new pass_stdarg (ctxt);
1035}