]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-stdarg.c
coretypes.h: Include hash-table.h and hash-set.h for host files.
[thirdparty/gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "langhooks.h"
34 #include "gimple-pretty-print.h"
35 #include "target.h"
36 #include "bitmap.h"
37 #include "predict.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "gimple-iterator.h"
47 #include "gimple-walk.h"
48 #include "gimple-ssa.h"
49 #include "gimplify.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "stringpool.h"
53 #include "tree-ssanames.h"
54 #include "tree-into-ssa.h"
55 #include "sbitmap.h"
56 #include "tree-cfg.h"
57 #include "tree-pass.h"
58 #include "tree-stdarg.h"
59
60 /* A simple pass that attempts to optimize stdarg functions on architectures
61 that need to save register arguments to stack on entry to stdarg functions.
62 If the function doesn't use any va_start macros, no registers need to
63 be saved. If va_start macros are used, the va_list variables don't escape
64 the function, it is only necessary to save registers that will be used
65 in va_arg macros. E.g. if va_arg is only used with integral types
66 in the function, floating point registers don't need to be saved, etc. */
67
68
69 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
70 is executed at most as many times as VA_START_BB. */
71
72 static bool
73 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
74 {
75 vec<edge> stack = vNULL;
76 edge e;
77 edge_iterator ei;
78 sbitmap visited;
79 bool ret;
80
81 if (va_arg_bb == va_start_bb)
82 return true;
83
84 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
85 return false;
86
87 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
88 bitmap_clear (visited);
89 ret = true;
90
91 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
92 stack.safe_push (e);
93
94 while (! stack.is_empty ())
95 {
96 basic_block src;
97
98 e = stack.pop ();
99 src = e->src;
100
101 if (e->flags & EDGE_COMPLEX)
102 {
103 ret = false;
104 break;
105 }
106
107 if (src == va_start_bb)
108 continue;
109
110 /* va_arg_bb can be executed more times than va_start_bb. */
111 if (src == va_arg_bb)
112 {
113 ret = false;
114 break;
115 }
116
117 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
118
119 if (! bitmap_bit_p (visited, src->index))
120 {
121 bitmap_set_bit (visited, src->index);
122 FOR_EACH_EDGE (e, ei, src->preds)
123 stack.safe_push (e);
124 }
125 }
126
127 stack.release ();
128 sbitmap_free (visited);
129 return ret;
130 }
131
132
133 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
134 return constant, otherwise return HOST_WIDE_INT_M1U.
135 GPR_P is true if this is GPR counter. */
136
137 static unsigned HOST_WIDE_INT
138 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
139 bool gpr_p)
140 {
141 tree lhs, orig_lhs;
142 gimple stmt;
143 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
144 unsigned int max_size;
145
146 if (si->offsets == NULL)
147 {
148 unsigned int i;
149
150 si->offsets = XNEWVEC (int, num_ssa_names);
151 for (i = 0; i < num_ssa_names; ++i)
152 si->offsets[i] = -1;
153 }
154
155 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
156 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
157 orig_lhs = lhs = rhs;
158 while (lhs)
159 {
160 enum tree_code rhs_code;
161 tree rhs1;
162
163 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
164 {
165 if (counter_val >= max_size)
166 {
167 ret = max_size;
168 break;
169 }
170
171 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
172 break;
173 }
174
175 stmt = SSA_NAME_DEF_STMT (lhs);
176
177 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
178 return HOST_WIDE_INT_M1U;
179
180 rhs_code = gimple_assign_rhs_code (stmt);
181 rhs1 = gimple_assign_rhs1 (stmt);
182 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
183 || gimple_assign_cast_p (stmt))
184 && TREE_CODE (rhs1) == SSA_NAME)
185 {
186 lhs = rhs1;
187 continue;
188 }
189
190 if ((rhs_code == POINTER_PLUS_EXPR
191 || rhs_code == PLUS_EXPR)
192 && TREE_CODE (rhs1) == SSA_NAME
193 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
194 {
195 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
196 lhs = rhs1;
197 continue;
198 }
199
200 if (rhs_code == ADDR_EXPR
201 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
202 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
203 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
204 {
205 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
206 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
207 continue;
208 }
209
210 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
211 return HOST_WIDE_INT_M1U;
212
213 rhs = gimple_assign_rhs1 (stmt);
214 if (TREE_CODE (counter) != TREE_CODE (rhs))
215 return HOST_WIDE_INT_M1U;
216
217 if (TREE_CODE (counter) == COMPONENT_REF)
218 {
219 if (get_base_address (counter) != get_base_address (rhs)
220 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
221 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
222 return HOST_WIDE_INT_M1U;
223 }
224 else if (counter != rhs)
225 return HOST_WIDE_INT_M1U;
226
227 lhs = NULL;
228 }
229
230 lhs = orig_lhs;
231 val = ret + counter_val;
232 while (lhs)
233 {
234 enum tree_code rhs_code;
235 tree rhs1;
236
237 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
238 break;
239
240 if (val >= max_size)
241 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
242 else
243 si->offsets[SSA_NAME_VERSION (lhs)] = val;
244
245 stmt = SSA_NAME_DEF_STMT (lhs);
246
247 rhs_code = gimple_assign_rhs_code (stmt);
248 rhs1 = gimple_assign_rhs1 (stmt);
249 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
250 || gimple_assign_cast_p (stmt))
251 && TREE_CODE (rhs1) == SSA_NAME)
252 {
253 lhs = rhs1;
254 continue;
255 }
256
257 if ((rhs_code == POINTER_PLUS_EXPR
258 || rhs_code == PLUS_EXPR)
259 && TREE_CODE (rhs1) == SSA_NAME
260 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
261 {
262 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
263 lhs = rhs1;
264 continue;
265 }
266
267 if (rhs_code == ADDR_EXPR
268 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
269 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
270 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
271 {
272 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
273 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
274 continue;
275 }
276
277 lhs = NULL;
278 }
279
280 return ret;
281 }
282
283
284 /* Called by walk_tree to look for references to va_list variables. */
285
286 static tree
287 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
288 void *data)
289 {
290 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
291 tree var = *tp;
292
293 if (TREE_CODE (var) == SSA_NAME)
294 {
295 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
296 return var;
297 }
298 else if (TREE_CODE (var) == VAR_DECL)
299 {
300 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
301 return var;
302 }
303
304 return NULL_TREE;
305 }
306
307
308 /* Helper function of va_list_counter_struct_op. Compute
309 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
310 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
311 statement. GPR_P is true if AP is a GPR counter, false if it is
312 a FPR counter. */
313
314 static void
315 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
316 bool write_p)
317 {
318 unsigned HOST_WIDE_INT increment;
319
320 if (si->compute_sizes < 0)
321 {
322 si->compute_sizes = 0;
323 if (si->va_start_count == 1
324 && reachable_at_most_once (si->bb, si->va_start_bb))
325 si->compute_sizes = 1;
326
327 if (dump_file && (dump_flags & TDF_DETAILS))
328 fprintf (dump_file,
329 "bb%d will %sbe executed at most once for each va_start "
330 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
331 si->va_start_bb->index);
332 }
333
334 if (write_p
335 && si->compute_sizes
336 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
337 {
338 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
339 {
340 cfun->va_list_gpr_size += increment;
341 return;
342 }
343
344 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
345 {
346 cfun->va_list_fpr_size += increment;
347 return;
348 }
349 }
350
351 if (write_p || !si->compute_sizes)
352 {
353 if (gpr_p)
354 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
355 else
356 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
357 }
358 }
359
360
361 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
362 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
363 is false, AP has been seen in VAR = AP assignment.
364 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
365 va_arg operation that doesn't cause the va_list variable to escape
366 current function. */
367
368 static bool
369 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
370 bool write_p)
371 {
372 tree base;
373
374 if (TREE_CODE (ap) != COMPONENT_REF
375 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
376 return false;
377
378 if (TREE_CODE (var) != SSA_NAME
379 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
380 return false;
381
382 base = get_base_address (ap);
383 if (TREE_CODE (base) != VAR_DECL
384 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
385 return false;
386
387 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
388 va_list_counter_op (si, ap, var, true, write_p);
389 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
390 va_list_counter_op (si, ap, var, false, write_p);
391
392 return true;
393 }
394
395
396 /* Check for TEM = AP. Return true if found and the caller shouldn't
397 search for va_list references in the statement. */
398
399 static bool
400 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
401 {
402 if (TREE_CODE (ap) != VAR_DECL
403 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
404 return false;
405
406 if (TREE_CODE (tem) != SSA_NAME
407 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
408 return false;
409
410 if (si->compute_sizes < 0)
411 {
412 si->compute_sizes = 0;
413 if (si->va_start_count == 1
414 && reachable_at_most_once (si->bb, si->va_start_bb))
415 si->compute_sizes = 1;
416
417 if (dump_file && (dump_flags & TDF_DETAILS))
418 fprintf (dump_file,
419 "bb%d will %sbe executed at most once for each va_start "
420 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
421 si->va_start_bb->index);
422 }
423
424 /* For void * or char * va_list types, there is just one counter.
425 If va_arg is used in a loop, we don't know how many registers need
426 saving. */
427 if (! si->compute_sizes)
428 return false;
429
430 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
431 return false;
432
433 /* Note the temporary, as we need to track whether it doesn't escape
434 the current function. */
435 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
436
437 return true;
438 }
439
440
441 /* Check for:
442 tem1 = AP;
443 TEM2 = tem1 + CST;
444 AP = TEM2;
445 sequence and update cfun->va_list_gpr_size. Return true if found. */
446
447 static bool
448 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
449 {
450 unsigned HOST_WIDE_INT increment;
451
452 if (TREE_CODE (ap) != VAR_DECL
453 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
454 return false;
455
456 if (TREE_CODE (tem2) != SSA_NAME
457 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
458 return false;
459
460 if (si->compute_sizes <= 0)
461 return false;
462
463 increment = va_list_counter_bump (si, ap, tem2, true);
464 if (increment + 1 <= 1)
465 return false;
466
467 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
468 cfun->va_list_gpr_size += increment;
469 else
470 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
471
472 return true;
473 }
474
475
476 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
477 containing value of some va_list variable plus optionally some constant,
478 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
479 depending whether LHS is a function local temporary. */
480
481 static void
482 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
483 {
484 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
485 return;
486
487 if (TREE_CODE (rhs) == SSA_NAME)
488 {
489 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
490 return;
491 }
492 else if (TREE_CODE (rhs) == ADDR_EXPR
493 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
494 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
495 {
496 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
497 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
498 return;
499 }
500 else
501 return;
502
503 if (TREE_CODE (lhs) != SSA_NAME)
504 {
505 si->va_list_escapes = true;
506 return;
507 }
508
509 if (si->compute_sizes < 0)
510 {
511 si->compute_sizes = 0;
512 if (si->va_start_count == 1
513 && reachable_at_most_once (si->bb, si->va_start_bb))
514 si->compute_sizes = 1;
515
516 if (dump_file && (dump_flags & TDF_DETAILS))
517 fprintf (dump_file,
518 "bb%d will %sbe executed at most once for each va_start "
519 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
520 si->va_start_bb->index);
521 }
522
523 /* For void * or char * va_list types, there is just one counter.
524 If va_arg is used in a loop, we don't know how many registers need
525 saving. */
526 if (! si->compute_sizes)
527 {
528 si->va_list_escapes = true;
529 return;
530 }
531
532 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
533 == HOST_WIDE_INT_M1U)
534 {
535 si->va_list_escapes = true;
536 return;
537 }
538
539 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
540 }
541
542
543 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
544 Return true if va_list might be escaping. */
545
546 static bool
547 check_all_va_list_escapes (struct stdarg_info *si)
548 {
549 basic_block bb;
550
551 FOR_EACH_BB_FN (bb, cfun)
552 {
553 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
554 gsi_next (&i))
555 {
556 tree lhs;
557 use_operand_p uop;
558 ssa_op_iter soi;
559 gphi *phi = i.phi ();
560
561 lhs = PHI_RESULT (phi);
562 if (virtual_operand_p (lhs)
563 || bitmap_bit_p (si->va_list_escape_vars,
564 SSA_NAME_VERSION (lhs)))
565 continue;
566
567 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
568 {
569 tree rhs = USE_FROM_PTR (uop);
570 if (TREE_CODE (rhs) == SSA_NAME
571 && bitmap_bit_p (si->va_list_escape_vars,
572 SSA_NAME_VERSION (rhs)))
573 {
574 if (dump_file && (dump_flags & TDF_DETAILS))
575 {
576 fputs ("va_list escapes in ", dump_file);
577 print_gimple_stmt (dump_file, phi, 0, dump_flags);
578 fputc ('\n', dump_file);
579 }
580 return true;
581 }
582 }
583 }
584
585 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
586 gsi_next (&i))
587 {
588 gimple stmt = gsi_stmt (i);
589 tree use;
590 ssa_op_iter iter;
591
592 if (is_gimple_debug (stmt))
593 continue;
594
595 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
596 {
597 if (! bitmap_bit_p (si->va_list_escape_vars,
598 SSA_NAME_VERSION (use)))
599 continue;
600
601 if (is_gimple_assign (stmt))
602 {
603 tree rhs = gimple_assign_rhs1 (stmt);
604 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
605
606 /* x = *ap_temp; */
607 if (rhs_code == MEM_REF
608 && TREE_OPERAND (rhs, 0) == use
609 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
610 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
611 && si->offsets[SSA_NAME_VERSION (use)] != -1)
612 {
613 unsigned HOST_WIDE_INT gpr_size;
614 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
615
616 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
617 + tree_to_shwi (TREE_OPERAND (rhs, 1))
618 + tree_to_uhwi (access_size);
619 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
620 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
621 else if (gpr_size > cfun->va_list_gpr_size)
622 cfun->va_list_gpr_size = gpr_size;
623 continue;
624 }
625
626 /* va_arg sequences may contain
627 other_ap_temp = ap_temp;
628 other_ap_temp = ap_temp + constant;
629 other_ap_temp = (some_type *) ap_temp;
630 ap = ap_temp;
631 statements. */
632 if (rhs == use
633 && ((rhs_code == POINTER_PLUS_EXPR
634 && (TREE_CODE (gimple_assign_rhs2 (stmt))
635 == INTEGER_CST))
636 || gimple_assign_cast_p (stmt)
637 || (get_gimple_rhs_class (rhs_code)
638 == GIMPLE_SINGLE_RHS)))
639 {
640 tree lhs = gimple_assign_lhs (stmt);
641
642 if (TREE_CODE (lhs) == SSA_NAME
643 && bitmap_bit_p (si->va_list_escape_vars,
644 SSA_NAME_VERSION (lhs)))
645 continue;
646
647 if (TREE_CODE (lhs) == VAR_DECL
648 && bitmap_bit_p (si->va_list_vars,
649 DECL_UID (lhs) + num_ssa_names))
650 continue;
651 }
652 else if (rhs_code == ADDR_EXPR
653 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
654 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
655 {
656 tree lhs = gimple_assign_lhs (stmt);
657
658 if (bitmap_bit_p (si->va_list_escape_vars,
659 SSA_NAME_VERSION (lhs)))
660 continue;
661 }
662 }
663
664 if (dump_file && (dump_flags & TDF_DETAILS))
665 {
666 fputs ("va_list escapes in ", dump_file);
667 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
668 fputc ('\n', dump_file);
669 }
670 return true;
671 }
672 }
673 }
674
675 return false;
676 }
677
678 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
679
680 static void
681 optimize_va_list_gpr_fpr_size (function *fun)
682 {
683 basic_block bb;
684 bool va_list_escapes = false;
685 bool va_list_simple_ptr;
686 struct stdarg_info si;
687 struct walk_stmt_info wi;
688 const char *funcname = NULL;
689 tree cfun_va_list;
690
691 fun->va_list_gpr_size = 0;
692 fun->va_list_fpr_size = 0;
693 memset (&si, 0, sizeof (si));
694 si.va_list_vars = BITMAP_ALLOC (NULL);
695 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
696
697 if (dump_file)
698 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
699
700 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
701 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
702 && (TREE_TYPE (cfun_va_list) == void_type_node
703 || TREE_TYPE (cfun_va_list) == char_type_node);
704 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
705
706 FOR_EACH_BB_FN (bb, fun)
707 {
708 gimple_stmt_iterator i;
709
710 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
711 {
712 gimple stmt = gsi_stmt (i);
713 tree callee, ap;
714
715 if (!is_gimple_call (stmt))
716 continue;
717
718 callee = gimple_call_fndecl (stmt);
719 if (!callee
720 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
721 continue;
722
723 switch (DECL_FUNCTION_CODE (callee))
724 {
725 case BUILT_IN_VA_START:
726 break;
727 /* If old style builtins are used, don't optimize anything. */
728 case BUILT_IN_SAVEREGS:
729 case BUILT_IN_NEXT_ARG:
730 va_list_escapes = true;
731 continue;
732 default:
733 continue;
734 }
735
736 si.va_start_count++;
737 ap = gimple_call_arg (stmt, 0);
738
739 if (TREE_CODE (ap) != ADDR_EXPR)
740 {
741 va_list_escapes = true;
742 break;
743 }
744 ap = TREE_OPERAND (ap, 0);
745 if (TREE_CODE (ap) == ARRAY_REF)
746 {
747 if (! integer_zerop (TREE_OPERAND (ap, 1)))
748 {
749 va_list_escapes = true;
750 break;
751 }
752 ap = TREE_OPERAND (ap, 0);
753 }
754 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
755 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
756 || TREE_CODE (ap) != VAR_DECL)
757 {
758 va_list_escapes = true;
759 break;
760 }
761
762 if (is_global_var (ap))
763 {
764 va_list_escapes = true;
765 break;
766 }
767
768 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
769
770 /* VA_START_BB and VA_START_AP will be only used if there is just
771 one va_start in the function. */
772 si.va_start_bb = bb;
773 si.va_start_ap = ap;
774 }
775
776 if (va_list_escapes)
777 break;
778 }
779
780 /* If there were no va_start uses in the function, there is no need to
781 save anything. */
782 if (si.va_start_count == 0)
783 goto finish;
784
785 /* If some va_list arguments weren't local, we can't optimize. */
786 if (va_list_escapes)
787 goto finish;
788
789 /* For void * or char * va_list, something useful can be done only
790 if there is just one va_start. */
791 if (va_list_simple_ptr && si.va_start_count > 1)
792 {
793 va_list_escapes = true;
794 goto finish;
795 }
796
797 /* For struct * va_list, if the backend didn't tell us what the counter fields
798 are, there is nothing more we can do. */
799 if (!va_list_simple_ptr
800 && va_list_gpr_counter_field == NULL_TREE
801 && va_list_fpr_counter_field == NULL_TREE)
802 {
803 va_list_escapes = true;
804 goto finish;
805 }
806
807 /* For void * or char * va_list there is just one counter
808 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
809 if (va_list_simple_ptr)
810 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
811
812 calculate_dominance_info (CDI_DOMINATORS);
813 memset (&wi, 0, sizeof (wi));
814 wi.info = si.va_list_vars;
815
816 FOR_EACH_BB_FN (bb, fun)
817 {
818 si.compute_sizes = -1;
819 si.bb = bb;
820
821 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
822 them as assignments for the purpose of escape analysis. This is
823 not needed for non-simple va_list because virtual phis don't perform
824 any real data movement. Also, check PHI nodes for taking address of
825 the va_list vars. */
826 tree lhs, rhs;
827 use_operand_p uop;
828 ssa_op_iter soi;
829
830 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
831 gsi_next (&i))
832 {
833 gphi *phi = i.phi ();
834 lhs = PHI_RESULT (phi);
835
836 if (virtual_operand_p (lhs))
837 continue;
838
839 if (va_list_simple_ptr)
840 {
841 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
842 {
843 rhs = USE_FROM_PTR (uop);
844 if (va_list_ptr_read (&si, rhs, lhs))
845 continue;
846 else if (va_list_ptr_write (&si, lhs, rhs))
847 continue;
848 else
849 check_va_list_escapes (&si, lhs, rhs);
850
851 if (si.va_list_escapes)
852 {
853 if (dump_file && (dump_flags & TDF_DETAILS))
854 {
855 fputs ("va_list escapes in ", dump_file);
856 print_gimple_stmt (dump_file, phi, 0, dump_flags);
857 fputc ('\n', dump_file);
858 }
859 va_list_escapes = true;
860 }
861 }
862 }
863
864 for (unsigned j = 0; !va_list_escapes
865 && j < gimple_phi_num_args (phi); ++j)
866 if ((!va_list_simple_ptr
867 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
868 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
869 find_va_list_reference, &wi, NULL))
870 {
871 if (dump_file && (dump_flags & TDF_DETAILS))
872 {
873 fputs ("va_list escapes in ", dump_file);
874 print_gimple_stmt (dump_file, phi, 0, dump_flags);
875 fputc ('\n', dump_file);
876 }
877 va_list_escapes = true;
878 }
879 }
880
881 for (gimple_stmt_iterator i = gsi_start_bb (bb);
882 !gsi_end_p (i) && !va_list_escapes;
883 gsi_next (&i))
884 {
885 gimple stmt = gsi_stmt (i);
886
887 /* Don't look at __builtin_va_{start,end}, they are ok. */
888 if (is_gimple_call (stmt))
889 {
890 tree callee = gimple_call_fndecl (stmt);
891
892 if (callee
893 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
894 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
895 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
896 continue;
897 }
898
899 if (is_gimple_assign (stmt))
900 {
901 lhs = gimple_assign_lhs (stmt);
902 rhs = gimple_assign_rhs1 (stmt);
903
904 if (va_list_simple_ptr)
905 {
906 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
907 == GIMPLE_SINGLE_RHS)
908 {
909 /* Check for ap ={v} {}. */
910 if (TREE_CLOBBER_P (rhs))
911 continue;
912
913 /* Check for tem = ap. */
914 else if (va_list_ptr_read (&si, rhs, lhs))
915 continue;
916
917 /* Check for the last insn in:
918 tem1 = ap;
919 tem2 = tem1 + CST;
920 ap = tem2;
921 sequence. */
922 else if (va_list_ptr_write (&si, lhs, rhs))
923 continue;
924 }
925
926 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
927 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
928 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
929 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
930 == GIMPLE_SINGLE_RHS))
931 check_va_list_escapes (&si, lhs, rhs);
932 }
933 else
934 {
935 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
936 == GIMPLE_SINGLE_RHS)
937 {
938 /* Check for ap ={v} {}. */
939 if (TREE_CLOBBER_P (rhs))
940 continue;
941
942 /* Check for ap[0].field = temp. */
943 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
944 continue;
945
946 /* Check for temp = ap[0].field. */
947 else if (va_list_counter_struct_op (&si, rhs, lhs,
948 false))
949 continue;
950 }
951
952 /* Do any architecture specific checking. */
953 if (targetm.stdarg_optimize_hook
954 && targetm.stdarg_optimize_hook (&si, stmt))
955 continue;
956 }
957 }
958 else if (is_gimple_debug (stmt))
959 continue;
960
961 /* All other uses of va_list are either va_copy (that is not handled
962 in this optimization), taking address of va_list variable or
963 passing va_list to other functions (in that case va_list might
964 escape the function and therefore va_start needs to set it up
965 fully), or some unexpected use of va_list. None of these should
966 happen in a gimplified VA_ARG_EXPR. */
967 if (si.va_list_escapes
968 || walk_gimple_op (stmt, find_va_list_reference, &wi))
969 {
970 if (dump_file && (dump_flags & TDF_DETAILS))
971 {
972 fputs ("va_list escapes in ", dump_file);
973 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
974 fputc ('\n', dump_file);
975 }
976 va_list_escapes = true;
977 }
978 }
979
980 if (va_list_escapes)
981 break;
982 }
983
984 if (! va_list_escapes
985 && va_list_simple_ptr
986 && ! bitmap_empty_p (si.va_list_escape_vars)
987 && check_all_va_list_escapes (&si))
988 va_list_escapes = true;
989
990 finish:
991 if (va_list_escapes)
992 {
993 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
994 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
995 }
996 BITMAP_FREE (si.va_list_vars);
997 BITMAP_FREE (si.va_list_escape_vars);
998 free (si.offsets);
999 if (dump_file)
1000 {
1001 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1002 funcname, (int) va_list_escapes);
1003 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1004 fputs ("all", dump_file);
1005 else
1006 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1007 fputs (" GPR units and ", dump_file);
1008 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1009 fputs ("all", dump_file);
1010 else
1011 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1012 fputs (" FPR units.\n", dump_file);
1013 }
1014 }
1015
1016 /* Return true if STMT is IFN_VA_ARG. */
1017
1018 static bool
1019 gimple_call_ifn_va_arg_p (gimple stmt)
1020 {
1021 return (is_gimple_call (stmt)
1022 && gimple_call_internal_p (stmt)
1023 && gimple_call_internal_fn (stmt) == IFN_VA_ARG);
1024 }
1025
1026 /* Expand IFN_VA_ARGs in FUN. */
1027
1028 static void
1029 expand_ifn_va_arg_1 (function *fun)
1030 {
1031 bool modified = false;
1032 basic_block bb;
1033 gimple_stmt_iterator i;
1034
1035 FOR_EACH_BB_FN (bb, fun)
1036 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1037 {
1038 gimple stmt = gsi_stmt (i);
1039 tree ap, expr, lhs, type;
1040 gimple_seq pre = NULL, post = NULL;
1041
1042 if (!gimple_call_ifn_va_arg_p (stmt))
1043 continue;
1044
1045 modified = true;
1046
1047 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1048 ap = gimple_call_arg (stmt, 0);
1049
1050 /* Balanced out the &ap, usually added by build_va_arg. */
1051 ap = build_fold_indirect_ref (ap);
1052
1053 push_gimplify_context (false);
1054
1055 /* Make it easier for the backends by protecting the valist argument
1056 from multiple evaluations. */
1057 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1058
1059 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1060
1061 lhs = gimple_call_lhs (stmt);
1062 if (lhs != NULL_TREE)
1063 {
1064 unsigned int nargs = gimple_call_num_args (stmt);
1065 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1066
1067 if (nargs == 3)
1068 {
1069 /* We've transported the size of with WITH_SIZE_EXPR here as
1070 the last argument of the internal fn call. Now reinstate
1071 it. */
1072 tree size = gimple_call_arg (stmt, nargs - 1);
1073 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1074 }
1075
1076 /* We use gimplify_assign here, rather than gimple_build_assign,
1077 because gimple_assign knows how to deal with variable-sized
1078 types. */
1079 gimplify_assign (lhs, expr, &pre);
1080 }
1081 else
1082 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue);
1083
1084 pop_gimplify_context (NULL);
1085
1086 gimple_seq_add_seq (&pre, post);
1087 update_modified_stmts (pre);
1088
1089 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1090 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1091 inbetween. */
1092 gimple_find_sub_bbs (pre, &i);
1093
1094 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1095 bb. */
1096 gsi_remove (&i, true);
1097 gcc_assert (gsi_end_p (i));
1098
1099 /* We're walking here into the bbs which contain the expansion of
1100 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1101 expanding. We could try to skip walking these bbs, perhaps by
1102 walking backwards over gimples and bbs. */
1103 break;
1104 }
1105
1106 if (!modified)
1107 return;
1108
1109 free_dominance_info (CDI_DOMINATORS);
1110 update_ssa (TODO_update_ssa);
1111 }
1112
1113 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1114
1115 static void
1116 expand_ifn_va_arg (function *fun)
1117 {
1118 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1119 expand_ifn_va_arg_1 (fun);
1120
1121 #if ENABLE_CHECKING
1122 basic_block bb;
1123 gimple_stmt_iterator i;
1124 FOR_EACH_BB_FN (bb, fun)
1125 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1126 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
1127 #endif
1128 }
1129
1130 namespace {
1131
1132 const pass_data pass_data_stdarg =
1133 {
1134 GIMPLE_PASS, /* type */
1135 "stdarg", /* name */
1136 OPTGROUP_NONE, /* optinfo_flags */
1137 TV_NONE, /* tv_id */
1138 ( PROP_cfg | PROP_ssa ), /* properties_required */
1139 PROP_gimple_lva, /* properties_provided */
1140 0, /* properties_destroyed */
1141 0, /* todo_flags_start */
1142 0, /* todo_flags_finish */
1143 };
1144
1145 class pass_stdarg : public gimple_opt_pass
1146 {
1147 public:
1148 pass_stdarg (gcc::context *ctxt)
1149 : gimple_opt_pass (pass_data_stdarg, ctxt)
1150 {}
1151
1152 /* opt_pass methods: */
1153 virtual bool gate (function *)
1154 {
1155 /* Always run this pass, in order to expand va_arg internal_fns. We
1156 also need to do that if fun->stdarg == 0, because a va_arg may also
1157 occur in a function without varargs, f.i. if when passing a va_list to
1158 another function. */
1159 return true;
1160 }
1161
1162 virtual unsigned int execute (function *);
1163
1164 }; // class pass_stdarg
1165
1166 unsigned int
1167 pass_stdarg::execute (function *fun)
1168 {
1169 /* TODO: Postpone expand_ifn_va_arg till after
1170 optimize_va_list_gpr_fpr_size. */
1171 expand_ifn_va_arg (fun);
1172
1173 if (flag_stdarg_opt
1174 /* This optimization is only for stdarg functions. */
1175 && fun->stdarg != 0)
1176 optimize_va_list_gpr_fpr_size (fun);
1177
1178 return 0;
1179 }
1180
1181 } // anon namespace
1182
1183 gimple_opt_pass *
1184 make_pass_stdarg (gcc::context *ctxt)
1185 {
1186 return new pass_stdarg (ctxt);
1187 }
1188
1189 namespace {
1190
1191 const pass_data pass_data_lower_vaarg =
1192 {
1193 GIMPLE_PASS, /* type */
1194 "lower_vaarg", /* name */
1195 OPTGROUP_NONE, /* optinfo_flags */
1196 TV_NONE, /* tv_id */
1197 ( PROP_cfg | PROP_ssa ), /* properties_required */
1198 PROP_gimple_lva, /* properties_provided */
1199 0, /* properties_destroyed */
1200 0, /* todo_flags_start */
1201 0, /* todo_flags_finish */
1202 };
1203
1204 class pass_lower_vaarg : public gimple_opt_pass
1205 {
1206 public:
1207 pass_lower_vaarg (gcc::context *ctxt)
1208 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1209 {}
1210
1211 /* opt_pass methods: */
1212 virtual bool gate (function *)
1213 {
1214 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1215 }
1216
1217 virtual unsigned int execute (function *);
1218
1219 }; // class pass_lower_vaarg
1220
1221 unsigned int
1222 pass_lower_vaarg::execute (function *fun)
1223 {
1224 expand_ifn_va_arg (fun);
1225 return 0;
1226 }
1227
1228 } // anon namespace
1229
1230 gimple_opt_pass *
1231 make_pass_lower_vaarg (gcc::context *ctxt)
1232 {
1233 return new pass_lower_vaarg (ctxt);
1234 }