]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-stdarg.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "gimple-pretty-print.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "langhooks.h"
34 #include "internal-fn.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "gimplify.h"
38 #include "tree-into-ssa.h"
39 #include "tree-cfg.h"
40 #include "tree-stdarg.h"
41
42 /* A simple pass that attempts to optimize stdarg functions on architectures
43 that need to save register arguments to stack on entry to stdarg functions.
44 If the function doesn't use any va_start macros, no registers need to
45 be saved. If va_start macros are used, the va_list variables don't escape
46 the function, it is only necessary to save registers that will be used
47 in va_arg macros. E.g. if va_arg is only used with integral types
48 in the function, floating point registers don't need to be saved, etc. */
49
50
51 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
52 is executed at most as many times as VA_START_BB. */
53
54 static bool
55 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
56 {
57 vec<edge> stack = vNULL;
58 edge e;
59 edge_iterator ei;
60 sbitmap visited;
61 bool ret;
62
63 if (va_arg_bb == va_start_bb)
64 return true;
65
66 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
67 return false;
68
69 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
70 bitmap_clear (visited);
71 ret = true;
72
73 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
74 stack.safe_push (e);
75
76 while (! stack.is_empty ())
77 {
78 basic_block src;
79
80 e = stack.pop ();
81 src = e->src;
82
83 if (e->flags & EDGE_COMPLEX)
84 {
85 ret = false;
86 break;
87 }
88
89 if (src == va_start_bb)
90 continue;
91
92 /* va_arg_bb can be executed more times than va_start_bb. */
93 if (src == va_arg_bb)
94 {
95 ret = false;
96 break;
97 }
98
99 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
100
101 if (! bitmap_bit_p (visited, src->index))
102 {
103 bitmap_set_bit (visited, src->index);
104 FOR_EACH_EDGE (e, ei, src->preds)
105 stack.safe_push (e);
106 }
107 }
108
109 stack.release ();
110 sbitmap_free (visited);
111 return ret;
112 }
113
114
115 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
116 return constant, otherwise return HOST_WIDE_INT_M1U.
117 GPR_P is true if this is GPR counter. */
118
119 static unsigned HOST_WIDE_INT
120 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
121 bool gpr_p)
122 {
123 tree lhs, orig_lhs;
124 gimple *stmt;
125 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
126 unsigned int max_size;
127
128 if (si->offsets == NULL)
129 {
130 unsigned int i;
131
132 si->offsets = XNEWVEC (int, num_ssa_names);
133 for (i = 0; i < num_ssa_names; ++i)
134 si->offsets[i] = -1;
135 }
136
137 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
138 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
139 orig_lhs = lhs = rhs;
140 while (lhs)
141 {
142 enum tree_code rhs_code;
143 tree rhs1;
144
145 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
146 {
147 if (counter_val >= max_size)
148 {
149 ret = max_size;
150 break;
151 }
152
153 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
154 break;
155 }
156
157 stmt = SSA_NAME_DEF_STMT (lhs);
158
159 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
160 return HOST_WIDE_INT_M1U;
161
162 rhs_code = gimple_assign_rhs_code (stmt);
163 rhs1 = gimple_assign_rhs1 (stmt);
164 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
165 || gimple_assign_cast_p (stmt))
166 && TREE_CODE (rhs1) == SSA_NAME)
167 {
168 lhs = rhs1;
169 continue;
170 }
171
172 if ((rhs_code == POINTER_PLUS_EXPR
173 || rhs_code == PLUS_EXPR)
174 && TREE_CODE (rhs1) == SSA_NAME
175 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
176 {
177 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
178 lhs = rhs1;
179 continue;
180 }
181
182 if (rhs_code == ADDR_EXPR
183 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
184 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
185 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
186 {
187 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
188 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
189 continue;
190 }
191
192 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
193 return HOST_WIDE_INT_M1U;
194
195 rhs = gimple_assign_rhs1 (stmt);
196 if (TREE_CODE (counter) != TREE_CODE (rhs))
197 return HOST_WIDE_INT_M1U;
198
199 if (TREE_CODE (counter) == COMPONENT_REF)
200 {
201 if (get_base_address (counter) != get_base_address (rhs)
202 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
203 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
204 return HOST_WIDE_INT_M1U;
205 }
206 else if (counter != rhs)
207 return HOST_WIDE_INT_M1U;
208
209 lhs = NULL;
210 }
211
212 lhs = orig_lhs;
213 val = ret + counter_val;
214 while (lhs)
215 {
216 enum tree_code rhs_code;
217 tree rhs1;
218
219 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
220 break;
221
222 if (val >= max_size)
223 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
224 else
225 si->offsets[SSA_NAME_VERSION (lhs)] = val;
226
227 stmt = SSA_NAME_DEF_STMT (lhs);
228
229 rhs_code = gimple_assign_rhs_code (stmt);
230 rhs1 = gimple_assign_rhs1 (stmt);
231 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
232 || gimple_assign_cast_p (stmt))
233 && TREE_CODE (rhs1) == SSA_NAME)
234 {
235 lhs = rhs1;
236 continue;
237 }
238
239 if ((rhs_code == POINTER_PLUS_EXPR
240 || rhs_code == PLUS_EXPR)
241 && TREE_CODE (rhs1) == SSA_NAME
242 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
243 {
244 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
245 lhs = rhs1;
246 continue;
247 }
248
249 if (rhs_code == ADDR_EXPR
250 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
251 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
252 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
253 {
254 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
255 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
256 continue;
257 }
258
259 lhs = NULL;
260 }
261
262 return ret;
263 }
264
265
266 /* Called by walk_tree to look for references to va_list variables. */
267
268 static tree
269 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
270 void *data)
271 {
272 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
273 tree var = *tp;
274
275 if (TREE_CODE (var) == SSA_NAME)
276 {
277 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
278 return var;
279 }
280 else if (TREE_CODE (var) == VAR_DECL)
281 {
282 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
283 return var;
284 }
285
286 return NULL_TREE;
287 }
288
289
290 /* Helper function of va_list_counter_struct_op. Compute
291 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
292 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
293 statement. GPR_P is true if AP is a GPR counter, false if it is
294 a FPR counter. */
295
296 static void
297 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
298 bool write_p)
299 {
300 unsigned HOST_WIDE_INT increment;
301
302 if (si->compute_sizes < 0)
303 {
304 si->compute_sizes = 0;
305 if (si->va_start_count == 1
306 && reachable_at_most_once (si->bb, si->va_start_bb))
307 si->compute_sizes = 1;
308
309 if (dump_file && (dump_flags & TDF_DETAILS))
310 fprintf (dump_file,
311 "bb%d will %sbe executed at most once for each va_start "
312 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
313 si->va_start_bb->index);
314 }
315
316 if (write_p
317 && si->compute_sizes
318 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
319 {
320 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
321 {
322 cfun->va_list_gpr_size += increment;
323 return;
324 }
325
326 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
327 {
328 cfun->va_list_fpr_size += increment;
329 return;
330 }
331 }
332
333 if (write_p || !si->compute_sizes)
334 {
335 if (gpr_p)
336 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
337 else
338 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
339 }
340 }
341
342
343 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
344 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
345 is false, AP has been seen in VAR = AP assignment.
346 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
347 va_arg operation that doesn't cause the va_list variable to escape
348 current function. */
349
350 static bool
351 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
352 bool write_p)
353 {
354 tree base;
355
356 if (TREE_CODE (ap) != COMPONENT_REF
357 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
358 return false;
359
360 if (TREE_CODE (var) != SSA_NAME
361 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
362 return false;
363
364 base = get_base_address (ap);
365 if (TREE_CODE (base) != VAR_DECL
366 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
367 return false;
368
369 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
370 va_list_counter_op (si, ap, var, true, write_p);
371 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
372 va_list_counter_op (si, ap, var, false, write_p);
373
374 return true;
375 }
376
377
378 /* Check for TEM = AP. Return true if found and the caller shouldn't
379 search for va_list references in the statement. */
380
381 static bool
382 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
383 {
384 if (TREE_CODE (ap) != VAR_DECL
385 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
386 return false;
387
388 if (TREE_CODE (tem) != SSA_NAME
389 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
390 return false;
391
392 if (si->compute_sizes < 0)
393 {
394 si->compute_sizes = 0;
395 if (si->va_start_count == 1
396 && reachable_at_most_once (si->bb, si->va_start_bb))
397 si->compute_sizes = 1;
398
399 if (dump_file && (dump_flags & TDF_DETAILS))
400 fprintf (dump_file,
401 "bb%d will %sbe executed at most once for each va_start "
402 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
403 si->va_start_bb->index);
404 }
405
406 /* For void * or char * va_list types, there is just one counter.
407 If va_arg is used in a loop, we don't know how many registers need
408 saving. */
409 if (! si->compute_sizes)
410 return false;
411
412 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
413 return false;
414
415 /* Note the temporary, as we need to track whether it doesn't escape
416 the current function. */
417 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
418
419 return true;
420 }
421
422
423 /* Check for:
424 tem1 = AP;
425 TEM2 = tem1 + CST;
426 AP = TEM2;
427 sequence and update cfun->va_list_gpr_size. Return true if found. */
428
429 static bool
430 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
431 {
432 unsigned HOST_WIDE_INT increment;
433
434 if (TREE_CODE (ap) != VAR_DECL
435 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
436 return false;
437
438 if (TREE_CODE (tem2) != SSA_NAME
439 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
440 return false;
441
442 if (si->compute_sizes <= 0)
443 return false;
444
445 increment = va_list_counter_bump (si, ap, tem2, true);
446 if (increment + 1 <= 1)
447 return false;
448
449 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
450 cfun->va_list_gpr_size += increment;
451 else
452 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
453
454 return true;
455 }
456
457
458 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
459 containing value of some va_list variable plus optionally some constant,
460 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
461 depending whether LHS is a function local temporary. */
462
463 static void
464 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
465 {
466 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
467 return;
468
469 if (TREE_CODE (rhs) == SSA_NAME)
470 {
471 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
472 return;
473 }
474 else if (TREE_CODE (rhs) == ADDR_EXPR
475 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
476 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
477 {
478 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
479 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
480 return;
481 }
482 else
483 return;
484
485 if (TREE_CODE (lhs) != SSA_NAME)
486 {
487 si->va_list_escapes = true;
488 return;
489 }
490
491 if (si->compute_sizes < 0)
492 {
493 si->compute_sizes = 0;
494 if (si->va_start_count == 1
495 && reachable_at_most_once (si->bb, si->va_start_bb))
496 si->compute_sizes = 1;
497
498 if (dump_file && (dump_flags & TDF_DETAILS))
499 fprintf (dump_file,
500 "bb%d will %sbe executed at most once for each va_start "
501 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
502 si->va_start_bb->index);
503 }
504
505 /* For void * or char * va_list types, there is just one counter.
506 If va_arg is used in a loop, we don't know how many registers need
507 saving. */
508 if (! si->compute_sizes)
509 {
510 si->va_list_escapes = true;
511 return;
512 }
513
514 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
515 == HOST_WIDE_INT_M1U)
516 {
517 si->va_list_escapes = true;
518 return;
519 }
520
521 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
522 }
523
524
525 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
526 Return true if va_list might be escaping. */
527
528 static bool
529 check_all_va_list_escapes (struct stdarg_info *si)
530 {
531 basic_block bb;
532
533 FOR_EACH_BB_FN (bb, cfun)
534 {
535 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
536 gsi_next (&i))
537 {
538 tree lhs;
539 use_operand_p uop;
540 ssa_op_iter soi;
541 gphi *phi = i.phi ();
542
543 lhs = PHI_RESULT (phi);
544 if (virtual_operand_p (lhs)
545 || bitmap_bit_p (si->va_list_escape_vars,
546 SSA_NAME_VERSION (lhs)))
547 continue;
548
549 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
550 {
551 tree rhs = USE_FROM_PTR (uop);
552 if (TREE_CODE (rhs) == SSA_NAME
553 && bitmap_bit_p (si->va_list_escape_vars,
554 SSA_NAME_VERSION (rhs)))
555 {
556 if (dump_file && (dump_flags & TDF_DETAILS))
557 {
558 fputs ("va_list escapes in ", dump_file);
559 print_gimple_stmt (dump_file, phi, 0, dump_flags);
560 fputc ('\n', dump_file);
561 }
562 return true;
563 }
564 }
565 }
566
567 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
568 gsi_next (&i))
569 {
570 gimple *stmt = gsi_stmt (i);
571 tree use;
572 ssa_op_iter iter;
573
574 if (is_gimple_debug (stmt))
575 continue;
576
577 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
578 {
579 if (! bitmap_bit_p (si->va_list_escape_vars,
580 SSA_NAME_VERSION (use)))
581 continue;
582
583 if (is_gimple_assign (stmt))
584 {
585 tree rhs = gimple_assign_rhs1 (stmt);
586 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
587
588 /* x = *ap_temp; */
589 if (rhs_code == MEM_REF
590 && TREE_OPERAND (rhs, 0) == use
591 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
592 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
593 && si->offsets[SSA_NAME_VERSION (use)] != -1)
594 {
595 unsigned HOST_WIDE_INT gpr_size;
596 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
597
598 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
599 + tree_to_shwi (TREE_OPERAND (rhs, 1))
600 + tree_to_uhwi (access_size);
601 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
602 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
603 else if (gpr_size > cfun->va_list_gpr_size)
604 cfun->va_list_gpr_size = gpr_size;
605 continue;
606 }
607
608 /* va_arg sequences may contain
609 other_ap_temp = ap_temp;
610 other_ap_temp = ap_temp + constant;
611 other_ap_temp = (some_type *) ap_temp;
612 ap = ap_temp;
613 statements. */
614 if (rhs == use
615 && ((rhs_code == POINTER_PLUS_EXPR
616 && (TREE_CODE (gimple_assign_rhs2 (stmt))
617 == INTEGER_CST))
618 || gimple_assign_cast_p (stmt)
619 || (get_gimple_rhs_class (rhs_code)
620 == GIMPLE_SINGLE_RHS)))
621 {
622 tree lhs = gimple_assign_lhs (stmt);
623
624 if (TREE_CODE (lhs) == SSA_NAME
625 && bitmap_bit_p (si->va_list_escape_vars,
626 SSA_NAME_VERSION (lhs)))
627 continue;
628
629 if (TREE_CODE (lhs) == VAR_DECL
630 && bitmap_bit_p (si->va_list_vars,
631 DECL_UID (lhs) + num_ssa_names))
632 continue;
633 }
634 else if (rhs_code == ADDR_EXPR
635 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
636 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
637 {
638 tree lhs = gimple_assign_lhs (stmt);
639
640 if (bitmap_bit_p (si->va_list_escape_vars,
641 SSA_NAME_VERSION (lhs)))
642 continue;
643 }
644 }
645
646 if (dump_file && (dump_flags & TDF_DETAILS))
647 {
648 fputs ("va_list escapes in ", dump_file);
649 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
650 fputc ('\n', dump_file);
651 }
652 return true;
653 }
654 }
655 }
656
657 return false;
658 }
659
660 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
661
662 static void
663 optimize_va_list_gpr_fpr_size (function *fun)
664 {
665 basic_block bb;
666 bool va_list_escapes = false;
667 bool va_list_simple_ptr;
668 struct stdarg_info si;
669 struct walk_stmt_info wi;
670 const char *funcname = NULL;
671 tree cfun_va_list;
672
673 fun->va_list_gpr_size = 0;
674 fun->va_list_fpr_size = 0;
675 memset (&si, 0, sizeof (si));
676 si.va_list_vars = BITMAP_ALLOC (NULL);
677 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
678
679 if (dump_file)
680 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
681
682 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
683 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
684 && (TREE_TYPE (cfun_va_list) == void_type_node
685 || TREE_TYPE (cfun_va_list) == char_type_node);
686 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
687
688 FOR_EACH_BB_FN (bb, fun)
689 {
690 gimple_stmt_iterator i;
691
692 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
693 {
694 gimple *stmt = gsi_stmt (i);
695 tree callee, ap;
696
697 if (!is_gimple_call (stmt))
698 continue;
699
700 callee = gimple_call_fndecl (stmt);
701 if (!callee
702 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
703 continue;
704
705 switch (DECL_FUNCTION_CODE (callee))
706 {
707 case BUILT_IN_VA_START:
708 break;
709 /* If old style builtins are used, don't optimize anything. */
710 case BUILT_IN_SAVEREGS:
711 case BUILT_IN_NEXT_ARG:
712 va_list_escapes = true;
713 continue;
714 default:
715 continue;
716 }
717
718 si.va_start_count++;
719 ap = gimple_call_arg (stmt, 0);
720
721 if (TREE_CODE (ap) != ADDR_EXPR)
722 {
723 va_list_escapes = true;
724 break;
725 }
726 ap = TREE_OPERAND (ap, 0);
727 if (TREE_CODE (ap) == ARRAY_REF)
728 {
729 if (! integer_zerop (TREE_OPERAND (ap, 1)))
730 {
731 va_list_escapes = true;
732 break;
733 }
734 ap = TREE_OPERAND (ap, 0);
735 }
736 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
737 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
738 || TREE_CODE (ap) != VAR_DECL)
739 {
740 va_list_escapes = true;
741 break;
742 }
743
744 if (is_global_var (ap))
745 {
746 va_list_escapes = true;
747 break;
748 }
749
750 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
751
752 /* VA_START_BB and VA_START_AP will be only used if there is just
753 one va_start in the function. */
754 si.va_start_bb = bb;
755 si.va_start_ap = ap;
756 }
757
758 if (va_list_escapes)
759 break;
760 }
761
762 /* If there were no va_start uses in the function, there is no need to
763 save anything. */
764 if (si.va_start_count == 0)
765 goto finish;
766
767 /* If some va_list arguments weren't local, we can't optimize. */
768 if (va_list_escapes)
769 goto finish;
770
771 /* For void * or char * va_list, something useful can be done only
772 if there is just one va_start. */
773 if (va_list_simple_ptr && si.va_start_count > 1)
774 {
775 va_list_escapes = true;
776 goto finish;
777 }
778
779 /* For struct * va_list, if the backend didn't tell us what the counter fields
780 are, there is nothing more we can do. */
781 if (!va_list_simple_ptr
782 && va_list_gpr_counter_field == NULL_TREE
783 && va_list_fpr_counter_field == NULL_TREE)
784 {
785 va_list_escapes = true;
786 goto finish;
787 }
788
789 /* For void * or char * va_list there is just one counter
790 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
791 if (va_list_simple_ptr)
792 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
793
794 calculate_dominance_info (CDI_DOMINATORS);
795 memset (&wi, 0, sizeof (wi));
796 wi.info = si.va_list_vars;
797
798 FOR_EACH_BB_FN (bb, fun)
799 {
800 si.compute_sizes = -1;
801 si.bb = bb;
802
803 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
804 them as assignments for the purpose of escape analysis. This is
805 not needed for non-simple va_list because virtual phis don't perform
806 any real data movement. Also, check PHI nodes for taking address of
807 the va_list vars. */
808 tree lhs, rhs;
809 use_operand_p uop;
810 ssa_op_iter soi;
811
812 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
813 gsi_next (&i))
814 {
815 gphi *phi = i.phi ();
816 lhs = PHI_RESULT (phi);
817
818 if (virtual_operand_p (lhs))
819 continue;
820
821 if (va_list_simple_ptr)
822 {
823 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
824 {
825 rhs = USE_FROM_PTR (uop);
826 if (va_list_ptr_read (&si, rhs, lhs))
827 continue;
828 else if (va_list_ptr_write (&si, lhs, rhs))
829 continue;
830 else
831 check_va_list_escapes (&si, lhs, rhs);
832
833 if (si.va_list_escapes)
834 {
835 if (dump_file && (dump_flags & TDF_DETAILS))
836 {
837 fputs ("va_list escapes in ", dump_file);
838 print_gimple_stmt (dump_file, phi, 0, dump_flags);
839 fputc ('\n', dump_file);
840 }
841 va_list_escapes = true;
842 }
843 }
844 }
845
846 for (unsigned j = 0; !va_list_escapes
847 && j < gimple_phi_num_args (phi); ++j)
848 if ((!va_list_simple_ptr
849 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
850 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
851 find_va_list_reference, &wi, NULL))
852 {
853 if (dump_file && (dump_flags & TDF_DETAILS))
854 {
855 fputs ("va_list escapes in ", dump_file);
856 print_gimple_stmt (dump_file, phi, 0, dump_flags);
857 fputc ('\n', dump_file);
858 }
859 va_list_escapes = true;
860 }
861 }
862
863 for (gimple_stmt_iterator i = gsi_start_bb (bb);
864 !gsi_end_p (i) && !va_list_escapes;
865 gsi_next (&i))
866 {
867 gimple *stmt = gsi_stmt (i);
868
869 /* Don't look at __builtin_va_{start,end}, they are ok. */
870 if (is_gimple_call (stmt))
871 {
872 tree callee = gimple_call_fndecl (stmt);
873
874 if (callee
875 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
876 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
877 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
878 continue;
879 }
880
881 if (is_gimple_assign (stmt))
882 {
883 lhs = gimple_assign_lhs (stmt);
884 rhs = gimple_assign_rhs1 (stmt);
885
886 if (va_list_simple_ptr)
887 {
888 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
889 == GIMPLE_SINGLE_RHS)
890 {
891 /* Check for ap ={v} {}. */
892 if (TREE_CLOBBER_P (rhs))
893 continue;
894
895 /* Check for tem = ap. */
896 else if (va_list_ptr_read (&si, rhs, lhs))
897 continue;
898
899 /* Check for the last insn in:
900 tem1 = ap;
901 tem2 = tem1 + CST;
902 ap = tem2;
903 sequence. */
904 else if (va_list_ptr_write (&si, lhs, rhs))
905 continue;
906 }
907
908 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
909 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
910 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
911 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
912 == GIMPLE_SINGLE_RHS))
913 check_va_list_escapes (&si, lhs, rhs);
914 }
915 else
916 {
917 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
918 == GIMPLE_SINGLE_RHS)
919 {
920 /* Check for ap ={v} {}. */
921 if (TREE_CLOBBER_P (rhs))
922 continue;
923
924 /* Check for ap[0].field = temp. */
925 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
926 continue;
927
928 /* Check for temp = ap[0].field. */
929 else if (va_list_counter_struct_op (&si, rhs, lhs,
930 false))
931 continue;
932 }
933
934 /* Do any architecture specific checking. */
935 if (targetm.stdarg_optimize_hook
936 && targetm.stdarg_optimize_hook (&si, stmt))
937 continue;
938 }
939 }
940 else if (is_gimple_debug (stmt))
941 continue;
942
943 /* All other uses of va_list are either va_copy (that is not handled
944 in this optimization), taking address of va_list variable or
945 passing va_list to other functions (in that case va_list might
946 escape the function and therefore va_start needs to set it up
947 fully), or some unexpected use of va_list. None of these should
948 happen in a gimplified VA_ARG_EXPR. */
949 if (si.va_list_escapes
950 || walk_gimple_op (stmt, find_va_list_reference, &wi))
951 {
952 if (dump_file && (dump_flags & TDF_DETAILS))
953 {
954 fputs ("va_list escapes in ", dump_file);
955 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
956 fputc ('\n', dump_file);
957 }
958 va_list_escapes = true;
959 }
960 }
961
962 if (va_list_escapes)
963 break;
964 }
965
966 if (! va_list_escapes
967 && va_list_simple_ptr
968 && ! bitmap_empty_p (si.va_list_escape_vars)
969 && check_all_va_list_escapes (&si))
970 va_list_escapes = true;
971
972 finish:
973 if (va_list_escapes)
974 {
975 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
976 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
977 }
978 BITMAP_FREE (si.va_list_vars);
979 BITMAP_FREE (si.va_list_escape_vars);
980 free (si.offsets);
981 if (dump_file)
982 {
983 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
984 funcname, (int) va_list_escapes);
985 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
986 fputs ("all", dump_file);
987 else
988 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
989 fputs (" GPR units and ", dump_file);
990 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
991 fputs ("all", dump_file);
992 else
993 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
994 fputs (" FPR units.\n", dump_file);
995 }
996 }
997
998 /* Return true if STMT is IFN_VA_ARG. */
999
1000 static bool
1001 gimple_call_ifn_va_arg_p (gimple *stmt)
1002 {
1003 return (is_gimple_call (stmt)
1004 && gimple_call_internal_p (stmt)
1005 && gimple_call_internal_fn (stmt) == IFN_VA_ARG);
1006 }
1007
1008 /* Expand IFN_VA_ARGs in FUN. */
1009
1010 static void
1011 expand_ifn_va_arg_1 (function *fun)
1012 {
1013 bool modified = false;
1014 basic_block bb;
1015 gimple_stmt_iterator i;
1016 location_t saved_location;
1017
1018 FOR_EACH_BB_FN (bb, fun)
1019 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1020 {
1021 gimple *stmt = gsi_stmt (i);
1022 tree ap, expr, lhs, type;
1023 gimple_seq pre = NULL, post = NULL;
1024
1025 if (!gimple_call_ifn_va_arg_p (stmt))
1026 continue;
1027
1028 modified = true;
1029
1030 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1031 ap = gimple_call_arg (stmt, 0);
1032
1033 /* Balanced out the &ap, usually added by build_va_arg. */
1034 ap = build_fold_indirect_ref (ap);
1035
1036 push_gimplify_context (false);
1037 saved_location = input_location;
1038 input_location = gimple_location (stmt);
1039
1040 /* Make it easier for the backends by protecting the valist argument
1041 from multiple evaluations. */
1042 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1043
1044 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1045
1046 lhs = gimple_call_lhs (stmt);
1047 if (lhs != NULL_TREE)
1048 {
1049 unsigned int nargs = gimple_call_num_args (stmt);
1050 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1051
1052 if (nargs == 3)
1053 {
1054 /* We've transported the size of with WITH_SIZE_EXPR here as
1055 the last argument of the internal fn call. Now reinstate
1056 it. */
1057 tree size = gimple_call_arg (stmt, nargs - 1);
1058 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1059 }
1060
1061 /* We use gimplify_assign here, rather than gimple_build_assign,
1062 because gimple_assign knows how to deal with variable-sized
1063 types. */
1064 gimplify_assign (lhs, expr, &pre);
1065 }
1066 else
1067 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue);
1068
1069 input_location = saved_location;
1070 pop_gimplify_context (NULL);
1071
1072 gimple_seq_add_seq (&pre, post);
1073 update_modified_stmts (pre);
1074
1075 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1076 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1077 inbetween. */
1078 gimple_find_sub_bbs (pre, &i);
1079
1080 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1081 bb. */
1082 unlink_stmt_vdef (stmt);
1083 release_ssa_name_fn (fun, gimple_vdef (stmt));
1084 gsi_remove (&i, true);
1085 gcc_assert (gsi_end_p (i));
1086
1087 /* We're walking here into the bbs which contain the expansion of
1088 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1089 expanding. We could try to skip walking these bbs, perhaps by
1090 walking backwards over gimples and bbs. */
1091 break;
1092 }
1093
1094 if (!modified)
1095 return;
1096
1097 free_dominance_info (CDI_DOMINATORS);
1098 update_ssa (TODO_update_ssa);
1099 }
1100
1101 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1102
1103 static void
1104 expand_ifn_va_arg (function *fun)
1105 {
1106 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1107 expand_ifn_va_arg_1 (fun);
1108
1109 if (flag_checking)
1110 {
1111 basic_block bb;
1112 gimple_stmt_iterator i;
1113 FOR_EACH_BB_FN (bb, fun)
1114 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1115 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
1116 }
1117 }
1118
1119 namespace {
1120
1121 const pass_data pass_data_stdarg =
1122 {
1123 GIMPLE_PASS, /* type */
1124 "stdarg", /* name */
1125 OPTGROUP_NONE, /* optinfo_flags */
1126 TV_NONE, /* tv_id */
1127 ( PROP_cfg | PROP_ssa ), /* properties_required */
1128 PROP_gimple_lva, /* properties_provided */
1129 0, /* properties_destroyed */
1130 0, /* todo_flags_start */
1131 0, /* todo_flags_finish */
1132 };
1133
1134 class pass_stdarg : public gimple_opt_pass
1135 {
1136 public:
1137 pass_stdarg (gcc::context *ctxt)
1138 : gimple_opt_pass (pass_data_stdarg, ctxt)
1139 {}
1140
1141 /* opt_pass methods: */
1142 virtual bool gate (function *)
1143 {
1144 /* Always run this pass, in order to expand va_arg internal_fns. We
1145 also need to do that if fun->stdarg == 0, because a va_arg may also
1146 occur in a function without varargs, f.i. if when passing a va_list to
1147 another function. */
1148 return true;
1149 }
1150
1151 virtual unsigned int execute (function *);
1152
1153 }; // class pass_stdarg
1154
1155 unsigned int
1156 pass_stdarg::execute (function *fun)
1157 {
1158 /* TODO: Postpone expand_ifn_va_arg till after
1159 optimize_va_list_gpr_fpr_size. */
1160 expand_ifn_va_arg (fun);
1161
1162 if (flag_stdarg_opt
1163 /* This optimization is only for stdarg functions. */
1164 && fun->stdarg != 0)
1165 optimize_va_list_gpr_fpr_size (fun);
1166
1167 return 0;
1168 }
1169
1170 } // anon namespace
1171
1172 gimple_opt_pass *
1173 make_pass_stdarg (gcc::context *ctxt)
1174 {
1175 return new pass_stdarg (ctxt);
1176 }
1177
1178 namespace {
1179
1180 const pass_data pass_data_lower_vaarg =
1181 {
1182 GIMPLE_PASS, /* type */
1183 "lower_vaarg", /* name */
1184 OPTGROUP_NONE, /* optinfo_flags */
1185 TV_NONE, /* tv_id */
1186 ( PROP_cfg | PROP_ssa ), /* properties_required */
1187 PROP_gimple_lva, /* properties_provided */
1188 0, /* properties_destroyed */
1189 0, /* todo_flags_start */
1190 0, /* todo_flags_finish */
1191 };
1192
1193 class pass_lower_vaarg : public gimple_opt_pass
1194 {
1195 public:
1196 pass_lower_vaarg (gcc::context *ctxt)
1197 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1198 {}
1199
1200 /* opt_pass methods: */
1201 virtual bool gate (function *)
1202 {
1203 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1204 }
1205
1206 virtual unsigned int execute (function *);
1207
1208 }; // class pass_lower_vaarg
1209
1210 unsigned int
1211 pass_lower_vaarg::execute (function *fun)
1212 {
1213 expand_ifn_va_arg (fun);
1214 return 0;
1215 }
1216
1217 } // anon namespace
1218
1219 gimple_opt_pass *
1220 make_pass_lower_vaarg (gcc::context *ctxt)
1221 {
1222 return new pass_lower_vaarg (ctxt);
1223 }