]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-stdarg.c
tree-ssa.h: New.
[thirdparty/gcc.git] / gcc / tree-stdarg.c
CommitLineData
9d30f3c1 1/* Pass computing data for optimizing stdarg functions.
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
9d30f3c1
JJ
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
9d30f3c1
JJ
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9d30f3c1
JJ
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "function.h"
27#include "langhooks.h"
cf835838 28#include "gimple-pretty-print.h"
9d30f3c1 29#include "target.h"
7a300452 30#include "tree-ssa.h"
9d30f3c1
JJ
31#include "tree-pass.h"
32#include "tree-stdarg.h"
33
34/* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
41
42
43/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
45
46static bool
47reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
48{
6e1aa848 49 vec<edge> stack = vNULL;
134c2de3 50 edge e;
9d30f3c1 51 edge_iterator ei;
9d30f3c1
JJ
52 sbitmap visited;
53 bool ret;
54
55 if (va_arg_bb == va_start_bb)
56 return true;
57
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59 return false;
60
9d30f3c1 61 visited = sbitmap_alloc (last_basic_block);
f61e445a 62 bitmap_clear (visited);
9d30f3c1
JJ
63 ret = true;
64
65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
9771b263 66 stack.safe_push (e);
9d30f3c1 67
9771b263 68 while (! stack.is_empty ())
9d30f3c1
JJ
69 {
70 basic_block src;
71
9771b263 72 e = stack.pop ();
9d30f3c1
JJ
73 src = e->src;
74
75 if (e->flags & EDGE_COMPLEX)
76 {
77 ret = false;
78 break;
79 }
80
81 if (src == va_start_bb)
82 continue;
83
84 /* va_arg_bb can be executed more times than va_start_bb. */
85 if (src == va_arg_bb)
86 {
87 ret = false;
88 break;
89 }
90
91 gcc_assert (src != ENTRY_BLOCK_PTR);
92
d7c028c0 93 if (! bitmap_bit_p (visited, src->index))
9d30f3c1 94 {
d7c028c0 95 bitmap_set_bit (visited, src->index);
9d30f3c1 96 FOR_EACH_EDGE (e, ei, src->preds)
9771b263 97 stack.safe_push (e);
9d30f3c1
JJ
98 }
99 }
100
9771b263 101 stack.release ();
9d30f3c1
JJ
102 sbitmap_free (visited);
103 return ret;
104}
105
106
107/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
0cadbfaa 108 return constant, otherwise return HOST_WIDE_INT_M1U.
9d30f3c1
JJ
109 GPR_P is true if this is GPR counter. */
110
111static unsigned HOST_WIDE_INT
112va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
113 bool gpr_p)
114{
726a989a
RB
115 tree lhs, orig_lhs;
116 gimple stmt;
9d30f3c1
JJ
117 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
118 unsigned int max_size;
119
120 if (si->offsets == NULL)
121 {
122 unsigned int i;
123
5ed6ace5 124 si->offsets = XNEWVEC (int, num_ssa_names);
9d30f3c1
JJ
125 for (i = 0; i < num_ssa_names; ++i)
126 si->offsets[i] = -1;
127 }
128
129 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
130 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
131 orig_lhs = lhs = rhs;
132 while (lhs)
133 {
726a989a 134 enum tree_code rhs_code;
58da96fe 135 tree rhs1;
726a989a 136
9d30f3c1
JJ
137 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
138 {
139 if (counter_val >= max_size)
140 {
141 ret = max_size;
142 break;
143 }
144
145 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
146 break;
147 }
148
149 stmt = SSA_NAME_DEF_STMT (lhs);
150
726a989a 151 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
0cadbfaa 152 return HOST_WIDE_INT_M1U;
9d30f3c1 153
726a989a 154 rhs_code = gimple_assign_rhs_code (stmt);
58da96fe 155 rhs1 = gimple_assign_rhs1 (stmt);
726a989a
RB
156 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
157 || gimple_assign_cast_p (stmt))
58da96fe 158 && TREE_CODE (rhs1) == SSA_NAME)
9d30f3c1 159 {
58da96fe 160 lhs = rhs1;
9d30f3c1
JJ
161 continue;
162 }
163
726a989a
RB
164 if ((rhs_code == POINTER_PLUS_EXPR
165 || rhs_code == PLUS_EXPR)
58da96fe 166 && TREE_CODE (rhs1) == SSA_NAME
726a989a 167 && host_integerp (gimple_assign_rhs2 (stmt), 1))
9d30f3c1 168 {
726a989a 169 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
58da96fe
RG
170 lhs = rhs1;
171 continue;
172 }
173
174 if (rhs_code == ADDR_EXPR
175 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
176 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
177 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
178 {
179 ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
180 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
9d30f3c1
JJ
181 continue;
182 }
183
726a989a 184 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
0cadbfaa 185 return HOST_WIDE_INT_M1U;
9d30f3c1 186
726a989a 187 rhs = gimple_assign_rhs1 (stmt);
9d30f3c1 188 if (TREE_CODE (counter) != TREE_CODE (rhs))
0cadbfaa 189 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
190
191 if (TREE_CODE (counter) == COMPONENT_REF)
192 {
193 if (get_base_address (counter) != get_base_address (rhs)
194 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
195 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
0cadbfaa 196 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
197 }
198 else if (counter != rhs)
0cadbfaa 199 return HOST_WIDE_INT_M1U;
9d30f3c1
JJ
200
201 lhs = NULL;
202 }
203
204 lhs = orig_lhs;
205 val = ret + counter_val;
206 while (lhs)
207 {
726a989a 208 enum tree_code rhs_code;
58da96fe 209 tree rhs1;
726a989a 210
9d30f3c1
JJ
211 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
212 break;
213
214 if (val >= max_size)
215 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
216 else
217 si->offsets[SSA_NAME_VERSION (lhs)] = val;
218
219 stmt = SSA_NAME_DEF_STMT (lhs);
220
726a989a 221 rhs_code = gimple_assign_rhs_code (stmt);
58da96fe 222 rhs1 = gimple_assign_rhs1 (stmt);
726a989a
RB
223 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
224 || gimple_assign_cast_p (stmt))
58da96fe 225 && TREE_CODE (rhs1) == SSA_NAME)
9d30f3c1 226 {
58da96fe 227 lhs = rhs1;
9d30f3c1
JJ
228 continue;
229 }
230
726a989a
RB
231 if ((rhs_code == POINTER_PLUS_EXPR
232 || rhs_code == PLUS_EXPR)
58da96fe 233 && TREE_CODE (rhs1) == SSA_NAME
726a989a 234 && host_integerp (gimple_assign_rhs2 (stmt), 1))
9d30f3c1 235 {
726a989a 236 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
58da96fe
RG
237 lhs = rhs1;
238 continue;
239 }
240
241 if (rhs_code == ADDR_EXPR
242 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
243 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
244 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
245 {
246 val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
247 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
9d30f3c1
JJ
248 continue;
249 }
250
251 lhs = NULL;
252 }
253
254 return ret;
255}
256
257
258/* Called by walk_tree to look for references to va_list variables. */
259
260static tree
261find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
262 void *data)
263{
726a989a 264 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
9d30f3c1
JJ
265 tree var = *tp;
266
267 if (TREE_CODE (var) == SSA_NAME)
6b4a85ad
RG
268 {
269 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
270 return var;
271 }
272 else if (TREE_CODE (var) == VAR_DECL)
273 {
274 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
275 return var;
276 }
9d30f3c1
JJ
277
278 return NULL_TREE;
279}
280
281
282/* Helper function of va_list_counter_struct_op. Compute
283 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
284 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
285 statement. GPR_P is true if AP is a GPR counter, false if it is
286 a FPR counter. */
287
288static void
289va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
290 bool write_p)
291{
292 unsigned HOST_WIDE_INT increment;
293
294 if (si->compute_sizes < 0)
295 {
296 si->compute_sizes = 0;
297 if (si->va_start_count == 1
298 && reachable_at_most_once (si->bb, si->va_start_bb))
299 si->compute_sizes = 1;
300
301 if (dump_file && (dump_flags & TDF_DETAILS))
302 fprintf (dump_file,
303 "bb%d will %sbe executed at most once for each va_start "
304 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
305 si->va_start_bb->index);
306 }
307
308 if (write_p
309 && si->compute_sizes
310 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
311 {
312 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
313 {
314 cfun->va_list_gpr_size += increment;
315 return;
316 }
317
318 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
319 {
320 cfun->va_list_fpr_size += increment;
321 return;
322 }
323 }
324
325 if (write_p || !si->compute_sizes)
326 {
327 if (gpr_p)
328 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
329 else
330 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
331 }
332}
333
334
335/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
336 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
337 is false, AP has been seen in VAR = AP assignment.
338 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
339 va_arg operation that doesn't cause the va_list variable to escape
340 current function. */
341
342static bool
343va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
344 bool write_p)
345{
346 tree base;
347
348 if (TREE_CODE (ap) != COMPONENT_REF
349 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
350 return false;
351
352 if (TREE_CODE (var) != SSA_NAME
6b4a85ad 353 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
9d30f3c1
JJ
354 return false;
355
356 base = get_base_address (ap);
357 if (TREE_CODE (base) != VAR_DECL
6b4a85ad 358 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
9d30f3c1
JJ
359 return false;
360
361 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
362 va_list_counter_op (si, ap, var, true, write_p);
363 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
364 va_list_counter_op (si, ap, var, false, write_p);
365
366 return true;
367}
368
369
370/* Check for TEM = AP. Return true if found and the caller shouldn't
371 search for va_list references in the statement. */
372
373static bool
374va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
375{
376 if (TREE_CODE (ap) != VAR_DECL
6b4a85ad 377 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
9d30f3c1
JJ
378 return false;
379
380 if (TREE_CODE (tem) != SSA_NAME
6b4a85ad 381 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
9d30f3c1
JJ
382 return false;
383
384 if (si->compute_sizes < 0)
385 {
386 si->compute_sizes = 0;
387 if (si->va_start_count == 1
388 && reachable_at_most_once (si->bb, si->va_start_bb))
389 si->compute_sizes = 1;
390
391 if (dump_file && (dump_flags & TDF_DETAILS))
392 fprintf (dump_file,
393 "bb%d will %sbe executed at most once for each va_start "
394 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
395 si->va_start_bb->index);
396 }
397
398 /* For void * or char * va_list types, there is just one counter.
399 If va_arg is used in a loop, we don't know how many registers need
400 saving. */
401 if (! si->compute_sizes)
402 return false;
403
0cadbfaa 404 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
9d30f3c1
JJ
405 return false;
406
407 /* Note the temporary, as we need to track whether it doesn't escape
408 the current function. */
6b4a85ad
RG
409 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
410
9d30f3c1
JJ
411 return true;
412}
413
414
415/* Check for:
416 tem1 = AP;
417 TEM2 = tem1 + CST;
418 AP = TEM2;
419 sequence and update cfun->va_list_gpr_size. Return true if found. */
420
421static bool
422va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
423{
424 unsigned HOST_WIDE_INT increment;
425
426 if (TREE_CODE (ap) != VAR_DECL
6b4a85ad 427 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
9d30f3c1
JJ
428 return false;
429
430 if (TREE_CODE (tem2) != SSA_NAME
6b4a85ad 431 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
9d30f3c1
JJ
432 return false;
433
434 if (si->compute_sizes <= 0)
435 return false;
436
437 increment = va_list_counter_bump (si, ap, tem2, true);
438 if (increment + 1 <= 1)
439 return false;
440
441 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
442 cfun->va_list_gpr_size += increment;
443 else
444 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
445
446 return true;
447}
448
449
450/* If RHS is X, (some type *) X or X + CST for X a temporary variable
451 containing value of some va_list variable plus optionally some constant,
452 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
453 depending whether LHS is a function local temporary. */
454
455static void
456check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
457{
458 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
459 return;
460
58da96fe
RG
461 if (TREE_CODE (rhs) == SSA_NAME)
462 {
6b4a85ad 463 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
58da96fe
RG
464 return;
465 }
466 else if (TREE_CODE (rhs) == ADDR_EXPR
467 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
468 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
469 {
6b4a85ad
RG
470 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
471 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
58da96fe
RG
472 return;
473 }
474 else
9d30f3c1
JJ
475 return;
476
6b4a85ad 477 if (TREE_CODE (lhs) != SSA_NAME)
9d30f3c1
JJ
478 {
479 si->va_list_escapes = true;
480 return;
481 }
482
483 if (si->compute_sizes < 0)
484 {
485 si->compute_sizes = 0;
486 if (si->va_start_count == 1
487 && reachable_at_most_once (si->bb, si->va_start_bb))
488 si->compute_sizes = 1;
489
490 if (dump_file && (dump_flags & TDF_DETAILS))
491 fprintf (dump_file,
492 "bb%d will %sbe executed at most once for each va_start "
493 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
494 si->va_start_bb->index);
495 }
496
497 /* For void * or char * va_list types, there is just one counter.
498 If va_arg is used in a loop, we don't know how many registers need
499 saving. */
500 if (! si->compute_sizes)
501 {
502 si->va_list_escapes = true;
503 return;
504 }
505
506 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
0cadbfaa 507 == HOST_WIDE_INT_M1U)
9d30f3c1
JJ
508 {
509 si->va_list_escapes = true;
510 return;
511 }
512
6b4a85ad 513 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
9d30f3c1
JJ
514}
515
516
517/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
518 Return true if va_list might be escaping. */
519
520static bool
521check_all_va_list_escapes (struct stdarg_info *si)
522{
523 basic_block bb;
524
525 FOR_EACH_BB (bb)
526 {
726a989a 527 gimple_stmt_iterator i;
9d30f3c1 528
11f1e3ab
JJ
529 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
530 {
531 tree lhs;
532 use_operand_p uop;
533 ssa_op_iter soi;
534 gimple phi = gsi_stmt (i);
535
536 lhs = PHI_RESULT (phi);
537 if (virtual_operand_p (lhs)
538 || bitmap_bit_p (si->va_list_escape_vars,
539 SSA_NAME_VERSION (lhs)))
540 continue;
541
542 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
543 {
544 tree rhs = USE_FROM_PTR (uop);
545 if (TREE_CODE (rhs) == SSA_NAME
546 && bitmap_bit_p (si->va_list_escape_vars,
547 SSA_NAME_VERSION (rhs)))
548 {
549 if (dump_file && (dump_flags & TDF_DETAILS))
550 {
551 fputs ("va_list escapes in ", dump_file);
552 print_gimple_stmt (dump_file, phi, 0, dump_flags);
553 fputc ('\n', dump_file);
554 }
555 return true;
556 }
557 }
558 }
559
726a989a 560 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
9d30f3c1 561 {
726a989a
RB
562 gimple stmt = gsi_stmt (i);
563 tree use;
9d30f3c1
JJ
564 ssa_op_iter iter;
565
b5b8b0ac
AO
566 if (is_gimple_debug (stmt))
567 continue;
568
9d30f3c1
JJ
569 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
570 {
571 if (! bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 572 SSA_NAME_VERSION (use)))
9d30f3c1
JJ
573 continue;
574
726a989a 575 if (is_gimple_assign (stmt))
9d30f3c1 576 {
726a989a
RB
577 tree rhs = gimple_assign_rhs1 (stmt);
578 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
9d30f3c1
JJ
579
580 /* x = *ap_temp; */
58da96fe 581 if (rhs_code == MEM_REF
9d30f3c1
JJ
582 && TREE_OPERAND (rhs, 0) == use
583 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
584 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
585 && si->offsets[SSA_NAME_VERSION (use)] != -1)
586 {
587 unsigned HOST_WIDE_INT gpr_size;
588 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
589
590 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
70f34814 591 + tree_low_cst (TREE_OPERAND (rhs, 1), 0)
9d30f3c1
JJ
592 + tree_low_cst (access_size, 1);
593 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
594 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
595 else if (gpr_size > cfun->va_list_gpr_size)
596 cfun->va_list_gpr_size = gpr_size;
597 continue;
598 }
599
600 /* va_arg sequences may contain
601 other_ap_temp = ap_temp;
602 other_ap_temp = ap_temp + constant;
603 other_ap_temp = (some_type *) ap_temp;
604 ap = ap_temp;
605 statements. */
726a989a
RB
606 if (rhs == use
607 && ((rhs_code == POINTER_PLUS_EXPR
608 && (TREE_CODE (gimple_assign_rhs2 (stmt))
609 == INTEGER_CST))
610 || gimple_assign_cast_p (stmt)
611 || (get_gimple_rhs_class (rhs_code)
612 == GIMPLE_SINGLE_RHS)))
9d30f3c1 613 {
726a989a
RB
614 tree lhs = gimple_assign_lhs (stmt);
615
9d30f3c1
JJ
616 if (TREE_CODE (lhs) == SSA_NAME
617 && bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 618 SSA_NAME_VERSION (lhs)))
9d30f3c1
JJ
619 continue;
620
621 if (TREE_CODE (lhs) == VAR_DECL
622 && bitmap_bit_p (si->va_list_vars,
6b4a85ad 623 DECL_UID (lhs) + num_ssa_names))
9d30f3c1
JJ
624 continue;
625 }
58da96fe
RG
626 else if (rhs_code == ADDR_EXPR
627 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
628 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
629 {
630 tree lhs = gimple_assign_lhs (stmt);
631
632 if (bitmap_bit_p (si->va_list_escape_vars,
6b4a85ad 633 SSA_NAME_VERSION (lhs)))
58da96fe
RG
634 continue;
635 }
9d30f3c1
JJ
636 }
637
638 if (dump_file && (dump_flags & TDF_DETAILS))
639 {
640 fputs ("va_list escapes in ", dump_file);
726a989a 641 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
9d30f3c1
JJ
642 fputc ('\n', dump_file);
643 }
644 return true;
645 }
646 }
647 }
648
649 return false;
650}
651
652
653/* Return true if this optimization pass should be done.
654 It makes only sense for stdarg functions. */
655
656static bool
657gate_optimize_stdarg (void)
658{
1f9081d1
XDL
659 /* This optimization is only for stdarg functions. */
660 return cfun->stdarg != 0;
9d30f3c1
JJ
661}
662
663
664/* Entry point to the stdarg optimization pass. */
665
c2924966 666static unsigned int
9d30f3c1
JJ
667execute_optimize_stdarg (void)
668{
669 basic_block bb;
670 bool va_list_escapes = false;
671 bool va_list_simple_ptr;
672 struct stdarg_info si;
726a989a 673 struct walk_stmt_info wi;
9d30f3c1 674 const char *funcname = NULL;
35cbb299 675 tree cfun_va_list;
9d30f3c1
JJ
676
677 cfun->va_list_gpr_size = 0;
678 cfun->va_list_fpr_size = 0;
679 memset (&si, 0, sizeof (si));
680 si.va_list_vars = BITMAP_ALLOC (NULL);
681 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
682
683 if (dump_file)
684 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
685
35cbb299
KT
686 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
687 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
688 && (TREE_TYPE (cfun_va_list) == void_type_node
689 || TREE_TYPE (cfun_va_list) == char_type_node);
690 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
9d30f3c1
JJ
691
692 FOR_EACH_BB (bb)
693 {
726a989a 694 gimple_stmt_iterator i;
9d30f3c1 695
726a989a 696 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
9d30f3c1 697 {
726a989a
RB
698 gimple stmt = gsi_stmt (i);
699 tree callee, ap;
9d30f3c1 700
726a989a 701 if (!is_gimple_call (stmt))
9d30f3c1
JJ
702 continue;
703
726a989a 704 callee = gimple_call_fndecl (stmt);
9d30f3c1
JJ
705 if (!callee
706 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
707 continue;
708
709 switch (DECL_FUNCTION_CODE (callee))
710 {
711 case BUILT_IN_VA_START:
712 break;
713 /* If old style builtins are used, don't optimize anything. */
714 case BUILT_IN_SAVEREGS:
9d30f3c1
JJ
715 case BUILT_IN_NEXT_ARG:
716 va_list_escapes = true;
717 continue;
718 default:
719 continue;
720 }
721
722 si.va_start_count++;
726a989a 723 ap = gimple_call_arg (stmt, 0);
bb673b41
RG
724
725 if (TREE_CODE (ap) != ADDR_EXPR)
9d30f3c1
JJ
726 {
727 va_list_escapes = true;
728 break;
729 }
9d30f3c1 730 ap = TREE_OPERAND (ap, 0);
bb673b41
RG
731 if (TREE_CODE (ap) == ARRAY_REF)
732 {
733 if (! integer_zerop (TREE_OPERAND (ap, 1)))
734 {
735 va_list_escapes = true;
736 break;
737 }
738 ap = TREE_OPERAND (ap, 0);
739 }
740 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
35cbb299 741 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
bb673b41
RG
742 || TREE_CODE (ap) != VAR_DECL)
743 {
744 va_list_escapes = true;
745 break;
746 }
747
9d30f3c1
JJ
748 if (is_global_var (ap))
749 {
750 va_list_escapes = true;
751 break;
752 }
753
6b4a85ad 754 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
9d30f3c1
JJ
755
756 /* VA_START_BB and VA_START_AP will be only used if there is just
757 one va_start in the function. */
758 si.va_start_bb = bb;
759 si.va_start_ap = ap;
760 }
761
762 if (va_list_escapes)
763 break;
764 }
765
766 /* If there were no va_start uses in the function, there is no need to
767 save anything. */
768 if (si.va_start_count == 0)
769 goto finish;
770
771 /* If some va_list arguments weren't local, we can't optimize. */
772 if (va_list_escapes)
773 goto finish;
774
775 /* For void * or char * va_list, something useful can be done only
776 if there is just one va_start. */
777 if (va_list_simple_ptr && si.va_start_count > 1)
778 {
779 va_list_escapes = true;
780 goto finish;
781 }
782
783 /* For struct * va_list, if the backend didn't tell us what the counter fields
784 are, there is nothing more we can do. */
785 if (!va_list_simple_ptr
786 && va_list_gpr_counter_field == NULL_TREE
787 && va_list_fpr_counter_field == NULL_TREE)
788 {
789 va_list_escapes = true;
790 goto finish;
791 }
792
793 /* For void * or char * va_list there is just one counter
794 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
795 if (va_list_simple_ptr)
796 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
797
18c57f2c 798 calculate_dominance_info (CDI_DOMINATORS);
726a989a
RB
799 memset (&wi, 0, sizeof (wi));
800 wi.info = si.va_list_vars;
18c57f2c 801
9d30f3c1
JJ
802 FOR_EACH_BB (bb)
803 {
726a989a 804 gimple_stmt_iterator i;
9d30f3c1
JJ
805
806 si.compute_sizes = -1;
807 si.bb = bb;
746077ff
RH
808
809 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
810 them as assignments for the purpose of escape analysis. This is
811 not needed for non-simple va_list because virtual phis don't perform
812 any real data movement. */
813 if (va_list_simple_ptr)
814 {
726a989a 815 tree lhs, rhs;
746077ff
RH
816 use_operand_p uop;
817 ssa_op_iter soi;
818
726a989a 819 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
746077ff 820 {
726a989a 821 gimple phi = gsi_stmt (i);
746077ff
RH
822 lhs = PHI_RESULT (phi);
823
ea057359 824 if (virtual_operand_p (lhs))
746077ff
RH
825 continue;
826
827 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
828 {
829 rhs = USE_FROM_PTR (uop);
830 if (va_list_ptr_read (&si, rhs, lhs))
831 continue;
832 else if (va_list_ptr_write (&si, lhs, rhs))
833 continue;
834 else
835 check_va_list_escapes (&si, lhs, rhs);
836
726a989a 837 if (si.va_list_escapes)
746077ff
RH
838 {
839 if (dump_file && (dump_flags & TDF_DETAILS))
840 {
841 fputs ("va_list escapes in ", dump_file);
726a989a 842 print_gimple_stmt (dump_file, phi, 0, dump_flags);
746077ff
RH
843 fputc ('\n', dump_file);
844 }
845 va_list_escapes = true;
846 }
847 }
848 }
849 }
850
726a989a
RB
851 for (i = gsi_start_bb (bb);
852 !gsi_end_p (i) && !va_list_escapes;
853 gsi_next (&i))
9d30f3c1 854 {
726a989a 855 gimple stmt = gsi_stmt (i);
9d30f3c1
JJ
856
857 /* Don't look at __builtin_va_{start,end}, they are ok. */
726a989a 858 if (is_gimple_call (stmt))
9d30f3c1 859 {
726a989a 860 tree callee = gimple_call_fndecl (stmt);
9d30f3c1
JJ
861
862 if (callee
863 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
864 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
865 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
866 continue;
867 }
868
726a989a 869 if (is_gimple_assign (stmt))
9d30f3c1 870 {
726a989a
RB
871 tree lhs = gimple_assign_lhs (stmt);
872 tree rhs = gimple_assign_rhs1 (stmt);
9d30f3c1
JJ
873
874 if (va_list_simple_ptr)
875 {
726a989a
RB
876 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
877 == GIMPLE_SINGLE_RHS)
878 {
bad25453
JJ
879 /* Check for ap ={v} {}. */
880 if (TREE_CLOBBER_P (rhs))
881 continue;
882
726a989a 883 /* Check for tem = ap. */
bad25453 884 else if (va_list_ptr_read (&si, rhs, lhs))
726a989a 885 continue;
9d30f3c1 886
726a989a
RB
887 /* Check for the last insn in:
888 tem1 = ap;
889 tem2 = tem1 + CST;
890 ap = tem2;
891 sequence. */
892 else if (va_list_ptr_write (&si, lhs, rhs))
893 continue;
894 }
9d30f3c1 895
726a989a
RB
896 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
897 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
1a87cf0c 898 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
726a989a
RB
899 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
900 == GIMPLE_SINGLE_RHS))
9d30f3c1
JJ
901 check_va_list_escapes (&si, lhs, rhs);
902 }
903 else
904 {
726a989a
RB
905 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
906 == GIMPLE_SINGLE_RHS)
907 {
47598145
MM
908 /* Check for ap ={v} {}. */
909 if (TREE_CLOBBER_P (rhs))
910 continue;
bad25453 911
726a989a 912 /* Check for ap[0].field = temp. */
47598145 913 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
726a989a 914 continue;
9d30f3c1 915
726a989a
RB
916 /* Check for temp = ap[0].field. */
917 else if (va_list_counter_struct_op (&si, rhs, lhs,
918 false))
919 continue;
920 }
9d30f3c1
JJ
921
922 /* Do any architecture specific checking. */
726a989a
RB
923 if (targetm.stdarg_optimize_hook
924 && targetm.stdarg_optimize_hook (&si, stmt))
9d30f3c1
JJ
925 continue;
926 }
927 }
b5b8b0ac
AO
928 else if (is_gimple_debug (stmt))
929 continue;
9d30f3c1
JJ
930
931 /* All other uses of va_list are either va_copy (that is not handled
932 in this optimization), taking address of va_list variable or
933 passing va_list to other functions (in that case va_list might
934 escape the function and therefore va_start needs to set it up
935 fully), or some unexpected use of va_list. None of these should
936 happen in a gimplified VA_ARG_EXPR. */
937 if (si.va_list_escapes
726a989a 938 || walk_gimple_op (stmt, find_va_list_reference, &wi))
9d30f3c1
JJ
939 {
940 if (dump_file && (dump_flags & TDF_DETAILS))
941 {
942 fputs ("va_list escapes in ", dump_file);
726a989a 943 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
9d30f3c1
JJ
944 fputc ('\n', dump_file);
945 }
946 va_list_escapes = true;
947 }
948 }
949
950 if (va_list_escapes)
951 break;
952 }
953
954 if (! va_list_escapes
955 && va_list_simple_ptr
956 && ! bitmap_empty_p (si.va_list_escape_vars)
957 && check_all_va_list_escapes (&si))
958 va_list_escapes = true;
959
960finish:
961 if (va_list_escapes)
962 {
963 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
964 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
965 }
966 BITMAP_FREE (si.va_list_vars);
967 BITMAP_FREE (si.va_list_escape_vars);
968 free (si.offsets);
969 if (dump_file)
970 {
971 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
972 funcname, (int) va_list_escapes);
973 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
974 fputs ("all", dump_file);
975 else
976 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
977 fputs (" GPR units and ", dump_file);
978 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
979 fputs ("all", dump_file);
980 else
981 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
982 fputs (" FPR units.\n", dump_file);
983 }
c2924966 984 return 0;
9d30f3c1
JJ
985}
986
987
27a4cd48
DM
988namespace {
989
990const pass_data pass_data_stdarg =
9d30f3c1 991{
27a4cd48
DM
992 GIMPLE_PASS, /* type */
993 "stdarg", /* name */
994 OPTGROUP_NONE, /* optinfo_flags */
995 true, /* has_gate */
996 true, /* has_execute */
997 TV_NONE, /* tv_id */
998 ( PROP_cfg | PROP_ssa ), /* properties_required */
999 0, /* properties_provided */
1000 0, /* properties_destroyed */
1001 0, /* todo_flags_start */
1002 0, /* todo_flags_finish */
9d30f3c1 1003};
27a4cd48
DM
1004
1005class pass_stdarg : public gimple_opt_pass
1006{
1007public:
1008 pass_stdarg(gcc::context *ctxt)
1009 : gimple_opt_pass(pass_data_stdarg, ctxt)
1010 {}
1011
1012 /* opt_pass methods: */
1013 bool gate () { return gate_optimize_stdarg (); }
1014 unsigned int execute () { return execute_optimize_stdarg (); }
1015
1016}; // class pass_stdarg
1017
1018} // anon namespace
1019
1020gimple_opt_pass *
1021make_pass_stdarg (gcc::context *ctxt)
1022{
1023 return new pass_stdarg (ctxt);
1024}