]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-stdarg.c
libgo: update to Go 1.12.2
[thirdparty/gcc.git] / gcc / tree-stdarg.c
CommitLineData
a6c787e5 1/* Pass computing data for optimizing stdarg functions.
fbd26352 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
a6c787e5 3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
a6c787e5 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
a6c787e5 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
7c29e30e 25#include "target.h"
b20a8bb4 26#include "tree.h"
9ef16211 27#include "gimple.h"
7c29e30e 28#include "tree-pass.h"
9ef16211 29#include "ssa.h"
7c29e30e 30#include "gimple-pretty-print.h"
9ef16211 31#include "fold-const.h"
a6c787e5 32#include "langhooks.h"
dcf1a1ec 33#include "gimple-iterator.h"
34#include "gimple-walk.h"
82fc0e0a 35#include "gimplify.h"
82fc0e0a 36#include "tree-into-ssa.h"
82fc0e0a 37#include "tree-cfg.h"
a6c787e5 38#include "tree-stdarg.h"
39
40/* A simple pass that attempts to optimize stdarg functions on architectures
41 that need to save register arguments to stack on entry to stdarg functions.
42 If the function doesn't use any va_start macros, no registers need to
43 be saved. If va_start macros are used, the va_list variables don't escape
44 the function, it is only necessary to save registers that will be used
45 in va_arg macros. E.g. if va_arg is only used with integral types
46 in the function, floating point registers don't need to be saved, etc. */
47
48
49/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
50 is executed at most as many times as VA_START_BB. */
51
52static bool
53reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
54{
e9258aee 55 auto_vec<edge, 10> stack;
4b53147e 56 edge e;
a6c787e5 57 edge_iterator ei;
a6c787e5 58 bool ret;
59
60 if (va_arg_bb == va_start_bb)
61 return true;
62
63 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
64 return false;
65
3c6549f8 66 auto_sbitmap visited (last_basic_block_for_fn (cfun));
53c5d9d4 67 bitmap_clear (visited);
a6c787e5 68 ret = true;
69
70 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
f1f41a6c 71 stack.safe_push (e);
a6c787e5 72
f1f41a6c 73 while (! stack.is_empty ())
a6c787e5 74 {
75 basic_block src;
76
f1f41a6c 77 e = stack.pop ();
a6c787e5 78 src = e->src;
79
80 if (e->flags & EDGE_COMPLEX)
81 {
82 ret = false;
83 break;
84 }
85
86 if (src == va_start_bb)
87 continue;
88
89 /* va_arg_bb can be executed more times than va_start_bb. */
90 if (src == va_arg_bb)
91 {
92 ret = false;
93 break;
94 }
95
34154e27 96 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
a6c787e5 97
08b7917c 98 if (! bitmap_bit_p (visited, src->index))
a6c787e5 99 {
08b7917c 100 bitmap_set_bit (visited, src->index);
a6c787e5 101 FOR_EACH_EDGE (e, ei, src->preds)
f1f41a6c 102 stack.safe_push (e);
a6c787e5 103 }
104 }
105
a6c787e5 106 return ret;
107}
108
109
110/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
561f0ec8 111 return constant, otherwise return HOST_WIDE_INT_M1U.
a6c787e5 112 GPR_P is true if this is GPR counter. */
113
114static unsigned HOST_WIDE_INT
115va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
116 bool gpr_p)
117{
75a70cf9 118 tree lhs, orig_lhs;
42acab1c 119 gimple *stmt;
a6c787e5 120 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
121 unsigned int max_size;
122
123 if (si->offsets == NULL)
124 {
125 unsigned int i;
126
4c36ffe6 127 si->offsets = XNEWVEC (int, num_ssa_names);
a6c787e5 128 for (i = 0; i < num_ssa_names; ++i)
129 si->offsets[i] = -1;
130 }
131
132 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
133 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
134 orig_lhs = lhs = rhs;
135 while (lhs)
136 {
75a70cf9 137 enum tree_code rhs_code;
de508de3 138 tree rhs1;
75a70cf9 139
a6c787e5 140 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
141 {
142 if (counter_val >= max_size)
143 {
144 ret = max_size;
145 break;
146 }
147
148 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
149 break;
150 }
151
152 stmt = SSA_NAME_DEF_STMT (lhs);
153
75a70cf9 154 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
561f0ec8 155 return HOST_WIDE_INT_M1U;
a6c787e5 156
75a70cf9 157 rhs_code = gimple_assign_rhs_code (stmt);
de508de3 158 rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 159 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
160 || gimple_assign_cast_p (stmt))
de508de3 161 && TREE_CODE (rhs1) == SSA_NAME)
a6c787e5 162 {
de508de3 163 lhs = rhs1;
a6c787e5 164 continue;
165 }
166
75a70cf9 167 if ((rhs_code == POINTER_PLUS_EXPR
168 || rhs_code == PLUS_EXPR)
de508de3 169 && TREE_CODE (rhs1) == SSA_NAME
cd4547bf 170 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
a6c787e5 171 {
6a0712d4 172 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
de508de3 173 lhs = rhs1;
174 continue;
175 }
176
177 if (rhs_code == ADDR_EXPR
178 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
179 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cd4547bf 180 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
de508de3 181 {
6a0712d4 182 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
de508de3 183 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
a6c787e5 184 continue;
185 }
186
75a70cf9 187 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
561f0ec8 188 return HOST_WIDE_INT_M1U;
a6c787e5 189
75a70cf9 190 rhs = gimple_assign_rhs1 (stmt);
a6c787e5 191 if (TREE_CODE (counter) != TREE_CODE (rhs))
561f0ec8 192 return HOST_WIDE_INT_M1U;
a6c787e5 193
194 if (TREE_CODE (counter) == COMPONENT_REF)
195 {
196 if (get_base_address (counter) != get_base_address (rhs)
197 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
198 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
561f0ec8 199 return HOST_WIDE_INT_M1U;
a6c787e5 200 }
201 else if (counter != rhs)
561f0ec8 202 return HOST_WIDE_INT_M1U;
a6c787e5 203
204 lhs = NULL;
205 }
206
207 lhs = orig_lhs;
208 val = ret + counter_val;
209 while (lhs)
210 {
75a70cf9 211 enum tree_code rhs_code;
de508de3 212 tree rhs1;
75a70cf9 213
a6c787e5 214 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
215 break;
216
217 if (val >= max_size)
218 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
219 else
220 si->offsets[SSA_NAME_VERSION (lhs)] = val;
221
222 stmt = SSA_NAME_DEF_STMT (lhs);
223
75a70cf9 224 rhs_code = gimple_assign_rhs_code (stmt);
de508de3 225 rhs1 = gimple_assign_rhs1 (stmt);
75a70cf9 226 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
227 || gimple_assign_cast_p (stmt))
de508de3 228 && TREE_CODE (rhs1) == SSA_NAME)
a6c787e5 229 {
de508de3 230 lhs = rhs1;
a6c787e5 231 continue;
232 }
233
75a70cf9 234 if ((rhs_code == POINTER_PLUS_EXPR
235 || rhs_code == PLUS_EXPR)
de508de3 236 && TREE_CODE (rhs1) == SSA_NAME
cd4547bf 237 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
a6c787e5 238 {
6a0712d4 239 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
de508de3 240 lhs = rhs1;
241 continue;
242 }
243
244 if (rhs_code == ADDR_EXPR
245 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
246 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
cd4547bf 247 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
de508de3 248 {
6a0712d4 249 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
de508de3 250 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
a6c787e5 251 continue;
252 }
253
254 lhs = NULL;
255 }
256
257 return ret;
258}
259
260
261/* Called by walk_tree to look for references to va_list variables. */
262
263static tree
264find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
265 void *data)
266{
75a70cf9 267 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
a6c787e5 268 tree var = *tp;
269
270 if (TREE_CODE (var) == SSA_NAME)
7ecda5e8 271 {
272 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
273 return var;
274 }
53e9c5c4 275 else if (VAR_P (var))
7ecda5e8 276 {
277 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
278 return var;
279 }
a6c787e5 280
281 return NULL_TREE;
282}
283
284
285/* Helper function of va_list_counter_struct_op. Compute
286 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
287 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
288 statement. GPR_P is true if AP is a GPR counter, false if it is
289 a FPR counter. */
290
291static void
292va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
293 bool write_p)
294{
295 unsigned HOST_WIDE_INT increment;
296
297 if (si->compute_sizes < 0)
298 {
299 si->compute_sizes = 0;
300 if (si->va_start_count == 1
301 && reachable_at_most_once (si->bb, si->va_start_bb))
302 si->compute_sizes = 1;
303
304 if (dump_file && (dump_flags & TDF_DETAILS))
305 fprintf (dump_file,
306 "bb%d will %sbe executed at most once for each va_start "
307 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
308 si->va_start_bb->index);
309 }
310
311 if (write_p
312 && si->compute_sizes
313 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
314 {
315 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
316 {
317 cfun->va_list_gpr_size += increment;
318 return;
319 }
320
321 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
322 {
323 cfun->va_list_fpr_size += increment;
324 return;
325 }
326 }
327
328 if (write_p || !si->compute_sizes)
329 {
330 if (gpr_p)
331 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
332 else
333 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
334 }
335}
336
337
338/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
339 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
340 is false, AP has been seen in VAR = AP assignment.
341 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
342 va_arg operation that doesn't cause the va_list variable to escape
343 current function. */
344
345static bool
346va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
347 bool write_p)
348{
349 tree base;
350
351 if (TREE_CODE (ap) != COMPONENT_REF
352 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
353 return false;
354
355 if (TREE_CODE (var) != SSA_NAME
7ecda5e8 356 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
a6c787e5 357 return false;
358
359 base = get_base_address (ap);
53e9c5c4 360 if (!VAR_P (base)
7ecda5e8 361 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
a6c787e5 362 return false;
363
364 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
365 va_list_counter_op (si, ap, var, true, write_p);
366 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
367 va_list_counter_op (si, ap, var, false, write_p);
368
369 return true;
370}
371
372
373/* Check for TEM = AP. Return true if found and the caller shouldn't
374 search for va_list references in the statement. */
375
376static bool
377va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
378{
53e9c5c4 379 if (!VAR_P (ap)
7ecda5e8 380 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
a6c787e5 381 return false;
382
383 if (TREE_CODE (tem) != SSA_NAME
7ecda5e8 384 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
a6c787e5 385 return false;
386
387 if (si->compute_sizes < 0)
388 {
389 si->compute_sizes = 0;
390 if (si->va_start_count == 1
391 && reachable_at_most_once (si->bb, si->va_start_bb))
392 si->compute_sizes = 1;
393
394 if (dump_file && (dump_flags & TDF_DETAILS))
395 fprintf (dump_file,
396 "bb%d will %sbe executed at most once for each va_start "
397 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
398 si->va_start_bb->index);
399 }
400
401 /* For void * or char * va_list types, there is just one counter.
402 If va_arg is used in a loop, we don't know how many registers need
403 saving. */
404 if (! si->compute_sizes)
405 return false;
406
561f0ec8 407 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
a6c787e5 408 return false;
409
410 /* Note the temporary, as we need to track whether it doesn't escape
411 the current function. */
7ecda5e8 412 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
413
a6c787e5 414 return true;
415}
416
417
418/* Check for:
419 tem1 = AP;
420 TEM2 = tem1 + CST;
421 AP = TEM2;
422 sequence and update cfun->va_list_gpr_size. Return true if found. */
423
424static bool
425va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
426{
427 unsigned HOST_WIDE_INT increment;
428
53e9c5c4 429 if (!VAR_P (ap)
7ecda5e8 430 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
a6c787e5 431 return false;
432
433 if (TREE_CODE (tem2) != SSA_NAME
7ecda5e8 434 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
a6c787e5 435 return false;
436
437 if (si->compute_sizes <= 0)
438 return false;
439
440 increment = va_list_counter_bump (si, ap, tem2, true);
441 if (increment + 1 <= 1)
442 return false;
443
444 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
445 cfun->va_list_gpr_size += increment;
446 else
447 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
448
449 return true;
450}
451
452
453/* If RHS is X, (some type *) X or X + CST for X a temporary variable
454 containing value of some va_list variable plus optionally some constant,
455 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
456 depending whether LHS is a function local temporary. */
457
458static void
459check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
460{
461 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
462 return;
463
de508de3 464 if (TREE_CODE (rhs) == SSA_NAME)
465 {
7ecda5e8 466 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
de508de3 467 return;
468 }
469 else if (TREE_CODE (rhs) == ADDR_EXPR
470 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
471 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
472 {
7ecda5e8 473 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
474 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
de508de3 475 return;
476 }
477 else
a6c787e5 478 return;
479
7ecda5e8 480 if (TREE_CODE (lhs) != SSA_NAME)
a6c787e5 481 {
482 si->va_list_escapes = true;
483 return;
484 }
485
486 if (si->compute_sizes < 0)
487 {
488 si->compute_sizes = 0;
489 if (si->va_start_count == 1
490 && reachable_at_most_once (si->bb, si->va_start_bb))
491 si->compute_sizes = 1;
492
493 if (dump_file && (dump_flags & TDF_DETAILS))
494 fprintf (dump_file,
495 "bb%d will %sbe executed at most once for each va_start "
496 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
497 si->va_start_bb->index);
498 }
499
500 /* For void * or char * va_list types, there is just one counter.
501 If va_arg is used in a loop, we don't know how many registers need
502 saving. */
503 if (! si->compute_sizes)
504 {
505 si->va_list_escapes = true;
506 return;
507 }
508
509 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
561f0ec8 510 == HOST_WIDE_INT_M1U)
a6c787e5 511 {
512 si->va_list_escapes = true;
513 return;
514 }
515
7ecda5e8 516 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
a6c787e5 517}
518
519
520/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
521 Return true if va_list might be escaping. */
522
523static bool
524check_all_va_list_escapes (struct stdarg_info *si)
525{
526 basic_block bb;
527
fc00614f 528 FOR_EACH_BB_FN (bb, cfun)
a6c787e5 529 {
1a91d914 530 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
531 gsi_next (&i))
2a111350 532 {
533 tree lhs;
534 use_operand_p uop;
535 ssa_op_iter soi;
1a91d914 536 gphi *phi = i.phi ();
2a111350 537
538 lhs = PHI_RESULT (phi);
539 if (virtual_operand_p (lhs)
540 || bitmap_bit_p (si->va_list_escape_vars,
541 SSA_NAME_VERSION (lhs)))
542 continue;
543
544 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
545 {
546 tree rhs = USE_FROM_PTR (uop);
547 if (TREE_CODE (rhs) == SSA_NAME
548 && bitmap_bit_p (si->va_list_escape_vars,
549 SSA_NAME_VERSION (rhs)))
550 {
551 if (dump_file && (dump_flags & TDF_DETAILS))
552 {
553 fputs ("va_list escapes in ", dump_file);
554 print_gimple_stmt (dump_file, phi, 0, dump_flags);
555 fputc ('\n', dump_file);
556 }
557 return true;
558 }
559 }
560 }
561
1a91d914 562 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
563 gsi_next (&i))
a6c787e5 564 {
42acab1c 565 gimple *stmt = gsi_stmt (i);
75a70cf9 566 tree use;
a6c787e5 567 ssa_op_iter iter;
568
9845d120 569 if (is_gimple_debug (stmt))
570 continue;
571
a6c787e5 572 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
573 {
574 if (! bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 575 SSA_NAME_VERSION (use)))
a6c787e5 576 continue;
577
75a70cf9 578 if (is_gimple_assign (stmt))
a6c787e5 579 {
75a70cf9 580 tree rhs = gimple_assign_rhs1 (stmt);
581 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
a6c787e5 582
583 /* x = *ap_temp; */
de508de3 584 if (rhs_code == MEM_REF
a6c787e5 585 && TREE_OPERAND (rhs, 0) == use
586 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
cd4547bf 587 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
a6c787e5 588 && si->offsets[SSA_NAME_VERSION (use)] != -1)
589 {
590 unsigned HOST_WIDE_INT gpr_size;
591 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
592
593 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
fcb97e84 594 + tree_to_shwi (TREE_OPERAND (rhs, 1))
6a0712d4 595 + tree_to_uhwi (access_size);
a6c787e5 596 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
597 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
598 else if (gpr_size > cfun->va_list_gpr_size)
599 cfun->va_list_gpr_size = gpr_size;
600 continue;
601 }
602
603 /* va_arg sequences may contain
604 other_ap_temp = ap_temp;
605 other_ap_temp = ap_temp + constant;
606 other_ap_temp = (some_type *) ap_temp;
607 ap = ap_temp;
608 statements. */
75a70cf9 609 if (rhs == use
610 && ((rhs_code == POINTER_PLUS_EXPR
611 && (TREE_CODE (gimple_assign_rhs2 (stmt))
612 == INTEGER_CST))
613 || gimple_assign_cast_p (stmt)
614 || (get_gimple_rhs_class (rhs_code)
615 == GIMPLE_SINGLE_RHS)))
a6c787e5 616 {
75a70cf9 617 tree lhs = gimple_assign_lhs (stmt);
618
a6c787e5 619 if (TREE_CODE (lhs) == SSA_NAME
620 && bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 621 SSA_NAME_VERSION (lhs)))
a6c787e5 622 continue;
623
53e9c5c4 624 if (VAR_P (lhs)
a6c787e5 625 && bitmap_bit_p (si->va_list_vars,
7ecda5e8 626 DECL_UID (lhs) + num_ssa_names))
a6c787e5 627 continue;
628 }
de508de3 629 else if (rhs_code == ADDR_EXPR
630 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
631 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
632 {
633 tree lhs = gimple_assign_lhs (stmt);
634
635 if (bitmap_bit_p (si->va_list_escape_vars,
7ecda5e8 636 SSA_NAME_VERSION (lhs)))
de508de3 637 continue;
638 }
a6c787e5 639 }
640
641 if (dump_file && (dump_flags & TDF_DETAILS))
642 {
643 fputs ("va_list escapes in ", dump_file);
75a70cf9 644 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
a6c787e5 645 fputc ('\n', dump_file);
646 }
647 return true;
648 }
649 }
650 }
651
652 return false;
653}
654
0de0b987 655/* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
a6c787e5 656
0de0b987 657static void
658optimize_va_list_gpr_fpr_size (function *fun)
a6c787e5 659{
660 basic_block bb;
661 bool va_list_escapes = false;
662 bool va_list_simple_ptr;
663 struct stdarg_info si;
75a70cf9 664 struct walk_stmt_info wi;
a6c787e5 665 const char *funcname = NULL;
5f57a8b1 666 tree cfun_va_list;
a6c787e5 667
65b0537f 668 fun->va_list_gpr_size = 0;
669 fun->va_list_fpr_size = 0;
a6c787e5 670 memset (&si, 0, sizeof (si));
671 si.va_list_vars = BITMAP_ALLOC (NULL);
672 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
673
674 if (dump_file)
675 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
676
65b0537f 677 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
5f57a8b1 678 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
679 && (TREE_TYPE (cfun_va_list) == void_type_node
680 || TREE_TYPE (cfun_va_list) == char_type_node);
681 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
a6c787e5 682
65b0537f 683 FOR_EACH_BB_FN (bb, fun)
a6c787e5 684 {
75a70cf9 685 gimple_stmt_iterator i;
a6c787e5 686
75a70cf9 687 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
a6c787e5 688 {
42acab1c 689 gimple *stmt = gsi_stmt (i);
75a70cf9 690 tree callee, ap;
a6c787e5 691
75a70cf9 692 if (!is_gimple_call (stmt))
a6c787e5 693 continue;
694
75a70cf9 695 callee = gimple_call_fndecl (stmt);
a6c787e5 696 if (!callee
a0e9bfbb 697 || !fndecl_built_in_p (callee, BUILT_IN_NORMAL))
a6c787e5 698 continue;
699
700 switch (DECL_FUNCTION_CODE (callee))
701 {
702 case BUILT_IN_VA_START:
703 break;
704 /* If old style builtins are used, don't optimize anything. */
705 case BUILT_IN_SAVEREGS:
a6c787e5 706 case BUILT_IN_NEXT_ARG:
707 va_list_escapes = true;
708 continue;
709 default:
710 continue;
711 }
712
713 si.va_start_count++;
75a70cf9 714 ap = gimple_call_arg (stmt, 0);
8b151587 715
716 if (TREE_CODE (ap) != ADDR_EXPR)
a6c787e5 717 {
718 va_list_escapes = true;
719 break;
720 }
a6c787e5 721 ap = TREE_OPERAND (ap, 0);
8b151587 722 if (TREE_CODE (ap) == ARRAY_REF)
723 {
724 if (! integer_zerop (TREE_OPERAND (ap, 1)))
725 {
726 va_list_escapes = true;
727 break;
728 }
729 ap = TREE_OPERAND (ap, 0);
730 }
731 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
65b0537f 732 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
53e9c5c4 733 || !VAR_P (ap))
8b151587 734 {
735 va_list_escapes = true;
736 break;
737 }
738
a6c787e5 739 if (is_global_var (ap))
740 {
741 va_list_escapes = true;
742 break;
743 }
744
7ecda5e8 745 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
a6c787e5 746
747 /* VA_START_BB and VA_START_AP will be only used if there is just
748 one va_start in the function. */
749 si.va_start_bb = bb;
750 si.va_start_ap = ap;
751 }
752
753 if (va_list_escapes)
754 break;
755 }
756
757 /* If there were no va_start uses in the function, there is no need to
758 save anything. */
759 if (si.va_start_count == 0)
760 goto finish;
761
762 /* If some va_list arguments weren't local, we can't optimize. */
763 if (va_list_escapes)
764 goto finish;
765
766 /* For void * or char * va_list, something useful can be done only
767 if there is just one va_start. */
768 if (va_list_simple_ptr && si.va_start_count > 1)
769 {
770 va_list_escapes = true;
771 goto finish;
772 }
773
774 /* For struct * va_list, if the backend didn't tell us what the counter fields
775 are, there is nothing more we can do. */
776 if (!va_list_simple_ptr
777 && va_list_gpr_counter_field == NULL_TREE
778 && va_list_fpr_counter_field == NULL_TREE)
779 {
780 va_list_escapes = true;
781 goto finish;
782 }
783
784 /* For void * or char * va_list there is just one counter
785 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
786 if (va_list_simple_ptr)
65b0537f 787 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
a6c787e5 788
ccc5d6f1 789 calculate_dominance_info (CDI_DOMINATORS);
75a70cf9 790 memset (&wi, 0, sizeof (wi));
791 wi.info = si.va_list_vars;
ccc5d6f1 792
65b0537f 793 FOR_EACH_BB_FN (bb, fun)
a6c787e5 794 {
a6c787e5 795 si.compute_sizes = -1;
796 si.bb = bb;
35bf4ce8 797
798 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
799 them as assignments for the purpose of escape analysis. This is
800 not needed for non-simple va_list because virtual phis don't perform
5300bef0 801 any real data movement. Also, check PHI nodes for taking address of
802 the va_list vars. */
803 tree lhs, rhs;
804 use_operand_p uop;
805 ssa_op_iter soi;
35bf4ce8 806
5300bef0 807 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
808 gsi_next (&i))
809 {
810 gphi *phi = i.phi ();
811 lhs = PHI_RESULT (phi);
35bf4ce8 812
5300bef0 813 if (virtual_operand_p (lhs))
814 continue;
35bf4ce8 815
5300bef0 816 if (va_list_simple_ptr)
817 {
35bf4ce8 818 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
819 {
820 rhs = USE_FROM_PTR (uop);
821 if (va_list_ptr_read (&si, rhs, lhs))
822 continue;
823 else if (va_list_ptr_write (&si, lhs, rhs))
824 continue;
825 else
826 check_va_list_escapes (&si, lhs, rhs);
827
75a70cf9 828 if (si.va_list_escapes)
35bf4ce8 829 {
830 if (dump_file && (dump_flags & TDF_DETAILS))
831 {
832 fputs ("va_list escapes in ", dump_file);
75a70cf9 833 print_gimple_stmt (dump_file, phi, 0, dump_flags);
35bf4ce8 834 fputc ('\n', dump_file);
835 }
836 va_list_escapes = true;
837 }
838 }
839 }
5300bef0 840
841 for (unsigned j = 0; !va_list_escapes
842 && j < gimple_phi_num_args (phi); ++j)
843 if ((!va_list_simple_ptr
844 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
845 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
846 find_va_list_reference, &wi, NULL))
847 {
848 if (dump_file && (dump_flags & TDF_DETAILS))
849 {
850 fputs ("va_list escapes in ", dump_file);
851 print_gimple_stmt (dump_file, phi, 0, dump_flags);
852 fputc ('\n', dump_file);
853 }
854 va_list_escapes = true;
855 }
35bf4ce8 856 }
857
1a91d914 858 for (gimple_stmt_iterator i = gsi_start_bb (bb);
75a70cf9 859 !gsi_end_p (i) && !va_list_escapes;
860 gsi_next (&i))
a6c787e5 861 {
42acab1c 862 gimple *stmt = gsi_stmt (i);
a6c787e5 863
864 /* Don't look at __builtin_va_{start,end}, they are ok. */
75a70cf9 865 if (is_gimple_call (stmt))
a6c787e5 866 {
75a70cf9 867 tree callee = gimple_call_fndecl (stmt);
a6c787e5 868
869 if (callee
a0e9bfbb 870 && (fndecl_built_in_p (callee, BUILT_IN_VA_START)
871 || fndecl_built_in_p (callee, BUILT_IN_VA_END)))
a6c787e5 872 continue;
873 }
874
75a70cf9 875 if (is_gimple_assign (stmt))
a6c787e5 876 {
5300bef0 877 lhs = gimple_assign_lhs (stmt);
878 rhs = gimple_assign_rhs1 (stmt);
a6c787e5 879
880 if (va_list_simple_ptr)
881 {
75a70cf9 882 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
883 == GIMPLE_SINGLE_RHS)
884 {
4b4d1c3b 885 /* Check for ap ={v} {}. */
886 if (TREE_CLOBBER_P (rhs))
887 continue;
888
75a70cf9 889 /* Check for tem = ap. */
4b4d1c3b 890 else if (va_list_ptr_read (&si, rhs, lhs))
75a70cf9 891 continue;
a6c787e5 892
75a70cf9 893 /* Check for the last insn in:
894 tem1 = ap;
895 tem2 = tem1 + CST;
896 ap = tem2;
897 sequence. */
898 else if (va_list_ptr_write (&si, lhs, rhs))
899 continue;
900 }
a6c787e5 901
75a70cf9 902 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
903 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
d9659041 904 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
75a70cf9 905 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
906 == GIMPLE_SINGLE_RHS))
a6c787e5 907 check_va_list_escapes (&si, lhs, rhs);
908 }
909 else
910 {
75a70cf9 911 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
912 == GIMPLE_SINGLE_RHS)
913 {
3c25489e 914 /* Check for ap ={v} {}. */
915 if (TREE_CLOBBER_P (rhs))
916 continue;
4b4d1c3b 917
75a70cf9 918 /* Check for ap[0].field = temp. */
3c25489e 919 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
75a70cf9 920 continue;
a6c787e5 921
75a70cf9 922 /* Check for temp = ap[0].field. */
923 else if (va_list_counter_struct_op (&si, rhs, lhs,
924 false))
925 continue;
926 }
a6c787e5 927
928 /* Do any architecture specific checking. */
75a70cf9 929 if (targetm.stdarg_optimize_hook
930 && targetm.stdarg_optimize_hook (&si, stmt))
a6c787e5 931 continue;
932 }
933 }
9845d120 934 else if (is_gimple_debug (stmt))
935 continue;
a6c787e5 936
937 /* All other uses of va_list are either va_copy (that is not handled
938 in this optimization), taking address of va_list variable or
939 passing va_list to other functions (in that case va_list might
940 escape the function and therefore va_start needs to set it up
941 fully), or some unexpected use of va_list. None of these should
942 happen in a gimplified VA_ARG_EXPR. */
943 if (si.va_list_escapes
75a70cf9 944 || walk_gimple_op (stmt, find_va_list_reference, &wi))
a6c787e5 945 {
946 if (dump_file && (dump_flags & TDF_DETAILS))
947 {
948 fputs ("va_list escapes in ", dump_file);
75a70cf9 949 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
a6c787e5 950 fputc ('\n', dump_file);
951 }
952 va_list_escapes = true;
953 }
954 }
955
956 if (va_list_escapes)
957 break;
958 }
959
960 if (! va_list_escapes
961 && va_list_simple_ptr
962 && ! bitmap_empty_p (si.va_list_escape_vars)
963 && check_all_va_list_escapes (&si))
964 va_list_escapes = true;
965
966finish:
967 if (va_list_escapes)
968 {
65b0537f 969 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
970 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
a6c787e5 971 }
972 BITMAP_FREE (si.va_list_vars);
973 BITMAP_FREE (si.va_list_escape_vars);
974 free (si.offsets);
975 if (dump_file)
976 {
977 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
978 funcname, (int) va_list_escapes);
65b0537f 979 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
a6c787e5 980 fputs ("all", dump_file);
981 else
982 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
983 fputs (" GPR units and ", dump_file);
65b0537f 984 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
a6c787e5 985 fputs ("all", dump_file);
986 else
987 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
988 fputs (" FPR units.\n", dump_file);
989 }
0de0b987 990}
991
82fc0e0a 992/* Expand IFN_VA_ARGs in FUN. */
993
994static void
995expand_ifn_va_arg_1 (function *fun)
996{
997 bool modified = false;
998 basic_block bb;
999 gimple_stmt_iterator i;
83853457 1000 location_t saved_location;
82fc0e0a 1001
1002 FOR_EACH_BB_FN (bb, fun)
1003 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1004 {
42acab1c 1005 gimple *stmt = gsi_stmt (i);
c9595c51 1006 tree ap, aptype, expr, lhs, type;
82fc0e0a 1007 gimple_seq pre = NULL, post = NULL;
1008
7408cd7d 1009 if (!gimple_call_internal_p (stmt, IFN_VA_ARG))
82fc0e0a 1010 continue;
1011
1012 modified = true;
1013
1014 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1015 ap = gimple_call_arg (stmt, 0);
c9595c51 1016 aptype = TREE_TYPE (gimple_call_arg (stmt, 2));
1017 gcc_assert (POINTER_TYPE_P (aptype));
c37be9ec 1018
2e474820 1019 /* Balanced out the &ap, usually added by build_va_arg. */
c9595c51 1020 ap = build2 (MEM_REF, TREE_TYPE (aptype), ap,
1021 build_int_cst (aptype, 0));
82fc0e0a 1022
1023 push_gimplify_context (false);
83853457 1024 saved_location = input_location;
1025 input_location = gimple_location (stmt);
82fc0e0a 1026
20d9b9e9 1027 /* Make it easier for the backends by protecting the valist argument
1028 from multiple evaluations. */
2e474820 1029 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
20d9b9e9 1030
1031 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
82fc0e0a 1032
1033 lhs = gimple_call_lhs (stmt);
1034 if (lhs != NULL_TREE)
1035 {
c37be9ec 1036 unsigned int nargs = gimple_call_num_args (stmt);
82fc0e0a 1037 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1038
c9595c51 1039 if (nargs == 4)
82fc0e0a 1040 {
1041 /* We've transported the size of with WITH_SIZE_EXPR here as
c37be9ec 1042 the last argument of the internal fn call. Now reinstate
82fc0e0a 1043 it. */
c37be9ec 1044 tree size = gimple_call_arg (stmt, nargs - 1);
82fc0e0a 1045 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1046 }
1047
1048 /* We use gimplify_assign here, rather than gimple_build_assign,
1049 because gimple_assign knows how to deal with variable-sized
1050 types. */
1051 gimplify_assign (lhs, expr, &pre);
1052 }
5beb86d5 1053 else
9479533b 1054 gimplify_and_add (expr, &pre);
82fc0e0a 1055
83853457 1056 input_location = saved_location;
82fc0e0a 1057 pop_gimplify_context (NULL);
1058
0971fb56 1059 gimple_seq_add_seq (&pre, post);
82fc0e0a 1060 update_modified_stmts (pre);
1061
1062 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1063 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1064 inbetween. */
1065 gimple_find_sub_bbs (pre, &i);
1066
1067 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
0971fb56 1068 bb. */
b50f273d 1069 unlink_stmt_vdef (stmt);
1070 release_ssa_name_fn (fun, gimple_vdef (stmt));
82fc0e0a 1071 gsi_remove (&i, true);
0971fb56 1072 gcc_assert (gsi_end_p (i));
82fc0e0a 1073
1074 /* We're walking here into the bbs which contain the expansion of
1075 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1076 expanding. We could try to skip walking these bbs, perhaps by
1077 walking backwards over gimples and bbs. */
1078 break;
1079 }
1080
1081 if (!modified)
1082 return;
1083
1084 free_dominance_info (CDI_DOMINATORS);
1085 update_ssa (TODO_update_ssa);
1086}
1087
1088/* Expand IFN_VA_ARGs in FUN, if necessary. */
1089
1090static void
1091expand_ifn_va_arg (function *fun)
1092{
1093 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1094 expand_ifn_va_arg_1 (fun);
1095
382ecba7 1096 if (flag_checking)
1097 {
1098 basic_block bb;
1099 gimple_stmt_iterator i;
1100 FOR_EACH_BB_FN (bb, fun)
1101 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7408cd7d 1102 gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG));
382ecba7 1103 }
82fc0e0a 1104}
1105
7620bc82 1106namespace {
1107
1108const pass_data pass_data_stdarg =
0de0b987 1109{
1110 GIMPLE_PASS, /* type */
1111 "stdarg", /* name */
1112 OPTGROUP_NONE, /* optinfo_flags */
1113 TV_NONE, /* tv_id */
1114 ( PROP_cfg | PROP_ssa ), /* properties_required */
82fc0e0a 1115 PROP_gimple_lva, /* properties_provided */
0de0b987 1116 0, /* properties_destroyed */
1117 0, /* todo_flags_start */
1118 0, /* todo_flags_finish */
1119};
1120
7620bc82 1121class pass_stdarg : public gimple_opt_pass
0de0b987 1122{
1123public:
1124 pass_stdarg (gcc::context *ctxt)
1125 : gimple_opt_pass (pass_data_stdarg, ctxt)
1126 {}
1127
1128 /* opt_pass methods: */
82fc0e0a 1129 virtual bool gate (function *)
0de0b987 1130 {
82fc0e0a 1131 /* Always run this pass, in order to expand va_arg internal_fns. We
1132 also need to do that if fun->stdarg == 0, because a va_arg may also
1133 occur in a function without varargs, f.i. if when passing a va_list to
1134 another function. */
1135 return true;
0de0b987 1136 }
1137
1138 virtual unsigned int execute (function *);
1139
1140}; // class pass_stdarg
1141
1142unsigned int
1143pass_stdarg::execute (function *fun)
1144{
82fc0e0a 1145 /* TODO: Postpone expand_ifn_va_arg till after
1146 optimize_va_list_gpr_fpr_size. */
1147 expand_ifn_va_arg (fun);
1148
1149 if (flag_stdarg_opt
1150 /* This optimization is only for stdarg functions. */
1151 && fun->stdarg != 0)
1152 optimize_va_list_gpr_fpr_size (fun);
0de0b987 1153
2a1990e9 1154 return 0;
a6c787e5 1155}
1156
7620bc82 1157} // anon namespace
1158
cbe8bda8 1159gimple_opt_pass *
1160make_pass_stdarg (gcc::context *ctxt)
1161{
1162 return new pass_stdarg (ctxt);
1163}
82fc0e0a 1164
7620bc82 1165namespace {
1166
1167const pass_data pass_data_lower_vaarg =
82fc0e0a 1168{
1169 GIMPLE_PASS, /* type */
1170 "lower_vaarg", /* name */
1171 OPTGROUP_NONE, /* optinfo_flags */
1172 TV_NONE, /* tv_id */
1173 ( PROP_cfg | PROP_ssa ), /* properties_required */
1174 PROP_gimple_lva, /* properties_provided */
1175 0, /* properties_destroyed */
1176 0, /* todo_flags_start */
1177 0, /* todo_flags_finish */
1178};
1179
7620bc82 1180class pass_lower_vaarg : public gimple_opt_pass
82fc0e0a 1181{
1182public:
1183 pass_lower_vaarg (gcc::context *ctxt)
1184 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1185 {}
1186
1187 /* opt_pass methods: */
1188 virtual bool gate (function *)
1189 {
1190 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1191 }
1192
1193 virtual unsigned int execute (function *);
1194
1195}; // class pass_lower_vaarg
1196
1197unsigned int
1198pass_lower_vaarg::execute (function *fun)
1199{
1200 expand_ifn_va_arg (fun);
1201 return 0;
1202}
1203
7620bc82 1204} // anon namespace
1205
82fc0e0a 1206gimple_opt_pass *
1207make_pass_lower_vaarg (gcc::context *ctxt)
1208{
1209 return new pass_lower_vaarg (ctxt);
1210}