]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-stdarg.c
tree-ssa.c (target_for_debug_bind, [...]): Use VAR_P and/or VAR_OR_FUNCTION_DECL_P...
[thirdparty/gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "langhooks.h"
33 #include "gimple-iterator.h"
34 #include "gimple-walk.h"
35 #include "gimplify.h"
36 #include "tree-into-ssa.h"
37 #include "tree-cfg.h"
38 #include "tree-stdarg.h"
39 #include "tree-chkp.h"
40
41 /* A simple pass that attempts to optimize stdarg functions on architectures
42 that need to save register arguments to stack on entry to stdarg functions.
43 If the function doesn't use any va_start macros, no registers need to
44 be saved. If va_start macros are used, the va_list variables don't escape
45 the function, it is only necessary to save registers that will be used
46 in va_arg macros. E.g. if va_arg is only used with integral types
47 in the function, floating point registers don't need to be saved, etc. */
48
49
50 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
51 is executed at most as many times as VA_START_BB. */
52
53 static bool
54 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
55 {
56 auto_vec<edge, 10> stack;
57 edge e;
58 edge_iterator ei;
59 bool ret;
60
61 if (va_arg_bb == va_start_bb)
62 return true;
63
64 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
65 return false;
66
67 auto_sbitmap visited (last_basic_block_for_fn (cfun));
68 bitmap_clear (visited);
69 ret = true;
70
71 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
72 stack.safe_push (e);
73
74 while (! stack.is_empty ())
75 {
76 basic_block src;
77
78 e = stack.pop ();
79 src = e->src;
80
81 if (e->flags & EDGE_COMPLEX)
82 {
83 ret = false;
84 break;
85 }
86
87 if (src == va_start_bb)
88 continue;
89
90 /* va_arg_bb can be executed more times than va_start_bb. */
91 if (src == va_arg_bb)
92 {
93 ret = false;
94 break;
95 }
96
97 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
98
99 if (! bitmap_bit_p (visited, src->index))
100 {
101 bitmap_set_bit (visited, src->index);
102 FOR_EACH_EDGE (e, ei, src->preds)
103 stack.safe_push (e);
104 }
105 }
106
107 return ret;
108 }
109
110
111 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
112 return constant, otherwise return HOST_WIDE_INT_M1U.
113 GPR_P is true if this is GPR counter. */
114
115 static unsigned HOST_WIDE_INT
116 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
117 bool gpr_p)
118 {
119 tree lhs, orig_lhs;
120 gimple *stmt;
121 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
122 unsigned int max_size;
123
124 if (si->offsets == NULL)
125 {
126 unsigned int i;
127
128 si->offsets = XNEWVEC (int, num_ssa_names);
129 for (i = 0; i < num_ssa_names; ++i)
130 si->offsets[i] = -1;
131 }
132
133 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
134 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
135 orig_lhs = lhs = rhs;
136 while (lhs)
137 {
138 enum tree_code rhs_code;
139 tree rhs1;
140
141 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
142 {
143 if (counter_val >= max_size)
144 {
145 ret = max_size;
146 break;
147 }
148
149 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
150 break;
151 }
152
153 stmt = SSA_NAME_DEF_STMT (lhs);
154
155 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
156 return HOST_WIDE_INT_M1U;
157
158 rhs_code = gimple_assign_rhs_code (stmt);
159 rhs1 = gimple_assign_rhs1 (stmt);
160 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
161 || gimple_assign_cast_p (stmt))
162 && TREE_CODE (rhs1) == SSA_NAME)
163 {
164 lhs = rhs1;
165 continue;
166 }
167
168 if ((rhs_code == POINTER_PLUS_EXPR
169 || rhs_code == PLUS_EXPR)
170 && TREE_CODE (rhs1) == SSA_NAME
171 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
172 {
173 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
174 lhs = rhs1;
175 continue;
176 }
177
178 if (rhs_code == ADDR_EXPR
179 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
180 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
181 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
182 {
183 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
184 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
185 continue;
186 }
187
188 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
189 return HOST_WIDE_INT_M1U;
190
191 rhs = gimple_assign_rhs1 (stmt);
192 if (TREE_CODE (counter) != TREE_CODE (rhs))
193 return HOST_WIDE_INT_M1U;
194
195 if (TREE_CODE (counter) == COMPONENT_REF)
196 {
197 if (get_base_address (counter) != get_base_address (rhs)
198 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
199 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
200 return HOST_WIDE_INT_M1U;
201 }
202 else if (counter != rhs)
203 return HOST_WIDE_INT_M1U;
204
205 lhs = NULL;
206 }
207
208 lhs = orig_lhs;
209 val = ret + counter_val;
210 while (lhs)
211 {
212 enum tree_code rhs_code;
213 tree rhs1;
214
215 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
216 break;
217
218 if (val >= max_size)
219 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
220 else
221 si->offsets[SSA_NAME_VERSION (lhs)] = val;
222
223 stmt = SSA_NAME_DEF_STMT (lhs);
224
225 rhs_code = gimple_assign_rhs_code (stmt);
226 rhs1 = gimple_assign_rhs1 (stmt);
227 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
228 || gimple_assign_cast_p (stmt))
229 && TREE_CODE (rhs1) == SSA_NAME)
230 {
231 lhs = rhs1;
232 continue;
233 }
234
235 if ((rhs_code == POINTER_PLUS_EXPR
236 || rhs_code == PLUS_EXPR)
237 && TREE_CODE (rhs1) == SSA_NAME
238 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
239 {
240 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
241 lhs = rhs1;
242 continue;
243 }
244
245 if (rhs_code == ADDR_EXPR
246 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
247 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
248 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
249 {
250 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
251 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
252 continue;
253 }
254
255 lhs = NULL;
256 }
257
258 return ret;
259 }
260
261
262 /* Called by walk_tree to look for references to va_list variables. */
263
264 static tree
265 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
266 void *data)
267 {
268 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
269 tree var = *tp;
270
271 if (TREE_CODE (var) == SSA_NAME)
272 {
273 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
274 return var;
275 }
276 else if (VAR_P (var))
277 {
278 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
279 return var;
280 }
281
282 return NULL_TREE;
283 }
284
285
286 /* Helper function of va_list_counter_struct_op. Compute
287 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
288 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
289 statement. GPR_P is true if AP is a GPR counter, false if it is
290 a FPR counter. */
291
292 static void
293 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
294 bool write_p)
295 {
296 unsigned HOST_WIDE_INT increment;
297
298 if (si->compute_sizes < 0)
299 {
300 si->compute_sizes = 0;
301 if (si->va_start_count == 1
302 && reachable_at_most_once (si->bb, si->va_start_bb))
303 si->compute_sizes = 1;
304
305 if (dump_file && (dump_flags & TDF_DETAILS))
306 fprintf (dump_file,
307 "bb%d will %sbe executed at most once for each va_start "
308 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
309 si->va_start_bb->index);
310 }
311
312 if (write_p
313 && si->compute_sizes
314 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
315 {
316 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
317 {
318 cfun->va_list_gpr_size += increment;
319 return;
320 }
321
322 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
323 {
324 cfun->va_list_fpr_size += increment;
325 return;
326 }
327 }
328
329 if (write_p || !si->compute_sizes)
330 {
331 if (gpr_p)
332 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
333 else
334 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
335 }
336 }
337
338
339 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
340 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
341 is false, AP has been seen in VAR = AP assignment.
342 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
343 va_arg operation that doesn't cause the va_list variable to escape
344 current function. */
345
346 static bool
347 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
348 bool write_p)
349 {
350 tree base;
351
352 if (TREE_CODE (ap) != COMPONENT_REF
353 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
354 return false;
355
356 if (TREE_CODE (var) != SSA_NAME
357 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
358 return false;
359
360 base = get_base_address (ap);
361 if (!VAR_P (base)
362 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
363 return false;
364
365 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
366 va_list_counter_op (si, ap, var, true, write_p);
367 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
368 va_list_counter_op (si, ap, var, false, write_p);
369
370 return true;
371 }
372
373
374 /* Check for TEM = AP. Return true if found and the caller shouldn't
375 search for va_list references in the statement. */
376
377 static bool
378 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
379 {
380 if (!VAR_P (ap)
381 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
382 return false;
383
384 if (TREE_CODE (tem) != SSA_NAME
385 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
386 return false;
387
388 if (si->compute_sizes < 0)
389 {
390 si->compute_sizes = 0;
391 if (si->va_start_count == 1
392 && reachable_at_most_once (si->bb, si->va_start_bb))
393 si->compute_sizes = 1;
394
395 if (dump_file && (dump_flags & TDF_DETAILS))
396 fprintf (dump_file,
397 "bb%d will %sbe executed at most once for each va_start "
398 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
399 si->va_start_bb->index);
400 }
401
402 /* For void * or char * va_list types, there is just one counter.
403 If va_arg is used in a loop, we don't know how many registers need
404 saving. */
405 if (! si->compute_sizes)
406 return false;
407
408 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
409 return false;
410
411 /* Note the temporary, as we need to track whether it doesn't escape
412 the current function. */
413 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
414
415 return true;
416 }
417
418
419 /* Check for:
420 tem1 = AP;
421 TEM2 = tem1 + CST;
422 AP = TEM2;
423 sequence and update cfun->va_list_gpr_size. Return true if found. */
424
425 static bool
426 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
427 {
428 unsigned HOST_WIDE_INT increment;
429
430 if (!VAR_P (ap)
431 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
432 return false;
433
434 if (TREE_CODE (tem2) != SSA_NAME
435 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
436 return false;
437
438 if (si->compute_sizes <= 0)
439 return false;
440
441 increment = va_list_counter_bump (si, ap, tem2, true);
442 if (increment + 1 <= 1)
443 return false;
444
445 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
446 cfun->va_list_gpr_size += increment;
447 else
448 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
449
450 return true;
451 }
452
453
454 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
455 containing value of some va_list variable plus optionally some constant,
456 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
457 depending whether LHS is a function local temporary. */
458
459 static void
460 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
461 {
462 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
463 return;
464
465 if (TREE_CODE (rhs) == SSA_NAME)
466 {
467 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
468 return;
469 }
470 else if (TREE_CODE (rhs) == ADDR_EXPR
471 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
473 {
474 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
475 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
476 return;
477 }
478 else
479 return;
480
481 if (TREE_CODE (lhs) != SSA_NAME)
482 {
483 si->va_list_escapes = true;
484 return;
485 }
486
487 if (si->compute_sizes < 0)
488 {
489 si->compute_sizes = 0;
490 if (si->va_start_count == 1
491 && reachable_at_most_once (si->bb, si->va_start_bb))
492 si->compute_sizes = 1;
493
494 if (dump_file && (dump_flags & TDF_DETAILS))
495 fprintf (dump_file,
496 "bb%d will %sbe executed at most once for each va_start "
497 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
498 si->va_start_bb->index);
499 }
500
501 /* For void * or char * va_list types, there is just one counter.
502 If va_arg is used in a loop, we don't know how many registers need
503 saving. */
504 if (! si->compute_sizes)
505 {
506 si->va_list_escapes = true;
507 return;
508 }
509
510 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
511 == HOST_WIDE_INT_M1U)
512 {
513 si->va_list_escapes = true;
514 return;
515 }
516
517 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
518 }
519
520
521 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
522 Return true if va_list might be escaping. */
523
524 static bool
525 check_all_va_list_escapes (struct stdarg_info *si)
526 {
527 basic_block bb;
528
529 FOR_EACH_BB_FN (bb, cfun)
530 {
531 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
532 gsi_next (&i))
533 {
534 tree lhs;
535 use_operand_p uop;
536 ssa_op_iter soi;
537 gphi *phi = i.phi ();
538
539 lhs = PHI_RESULT (phi);
540 if (virtual_operand_p (lhs)
541 || bitmap_bit_p (si->va_list_escape_vars,
542 SSA_NAME_VERSION (lhs)))
543 continue;
544
545 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
546 {
547 tree rhs = USE_FROM_PTR (uop);
548 if (TREE_CODE (rhs) == SSA_NAME
549 && bitmap_bit_p (si->va_list_escape_vars,
550 SSA_NAME_VERSION (rhs)))
551 {
552 if (dump_file && (dump_flags & TDF_DETAILS))
553 {
554 fputs ("va_list escapes in ", dump_file);
555 print_gimple_stmt (dump_file, phi, 0, dump_flags);
556 fputc ('\n', dump_file);
557 }
558 return true;
559 }
560 }
561 }
562
563 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
564 gsi_next (&i))
565 {
566 gimple *stmt = gsi_stmt (i);
567 tree use;
568 ssa_op_iter iter;
569
570 if (is_gimple_debug (stmt))
571 continue;
572
573 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
574 {
575 if (! bitmap_bit_p (si->va_list_escape_vars,
576 SSA_NAME_VERSION (use)))
577 continue;
578
579 if (is_gimple_assign (stmt))
580 {
581 tree rhs = gimple_assign_rhs1 (stmt);
582 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
583
584 /* x = *ap_temp; */
585 if (rhs_code == MEM_REF
586 && TREE_OPERAND (rhs, 0) == use
587 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
588 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
589 && si->offsets[SSA_NAME_VERSION (use)] != -1)
590 {
591 unsigned HOST_WIDE_INT gpr_size;
592 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
593
594 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
595 + tree_to_shwi (TREE_OPERAND (rhs, 1))
596 + tree_to_uhwi (access_size);
597 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
598 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
599 else if (gpr_size > cfun->va_list_gpr_size)
600 cfun->va_list_gpr_size = gpr_size;
601 continue;
602 }
603
604 /* va_arg sequences may contain
605 other_ap_temp = ap_temp;
606 other_ap_temp = ap_temp + constant;
607 other_ap_temp = (some_type *) ap_temp;
608 ap = ap_temp;
609 statements. */
610 if (rhs == use
611 && ((rhs_code == POINTER_PLUS_EXPR
612 && (TREE_CODE (gimple_assign_rhs2 (stmt))
613 == INTEGER_CST))
614 || gimple_assign_cast_p (stmt)
615 || (get_gimple_rhs_class (rhs_code)
616 == GIMPLE_SINGLE_RHS)))
617 {
618 tree lhs = gimple_assign_lhs (stmt);
619
620 if (TREE_CODE (lhs) == SSA_NAME
621 && bitmap_bit_p (si->va_list_escape_vars,
622 SSA_NAME_VERSION (lhs)))
623 continue;
624
625 if (VAR_P (lhs)
626 && bitmap_bit_p (si->va_list_vars,
627 DECL_UID (lhs) + num_ssa_names))
628 continue;
629 }
630 else if (rhs_code == ADDR_EXPR
631 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
632 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
633 {
634 tree lhs = gimple_assign_lhs (stmt);
635
636 if (bitmap_bit_p (si->va_list_escape_vars,
637 SSA_NAME_VERSION (lhs)))
638 continue;
639 }
640 }
641
642 if (dump_file && (dump_flags & TDF_DETAILS))
643 {
644 fputs ("va_list escapes in ", dump_file);
645 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
646 fputc ('\n', dump_file);
647 }
648 return true;
649 }
650 }
651 }
652
653 return false;
654 }
655
656 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
657
658 static void
659 optimize_va_list_gpr_fpr_size (function *fun)
660 {
661 basic_block bb;
662 bool va_list_escapes = false;
663 bool va_list_simple_ptr;
664 struct stdarg_info si;
665 struct walk_stmt_info wi;
666 const char *funcname = NULL;
667 tree cfun_va_list;
668
669 fun->va_list_gpr_size = 0;
670 fun->va_list_fpr_size = 0;
671 memset (&si, 0, sizeof (si));
672 si.va_list_vars = BITMAP_ALLOC (NULL);
673 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
674
675 if (dump_file)
676 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
677
678 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
679 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
680 && (TREE_TYPE (cfun_va_list) == void_type_node
681 || TREE_TYPE (cfun_va_list) == char_type_node);
682 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
683
684 FOR_EACH_BB_FN (bb, fun)
685 {
686 gimple_stmt_iterator i;
687
688 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
689 {
690 gimple *stmt = gsi_stmt (i);
691 tree callee, ap;
692
693 if (!is_gimple_call (stmt))
694 continue;
695
696 callee = gimple_call_fndecl (stmt);
697 if (!callee
698 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
699 continue;
700
701 switch (DECL_FUNCTION_CODE (callee))
702 {
703 case BUILT_IN_VA_START:
704 break;
705 /* If old style builtins are used, don't optimize anything. */
706 case BUILT_IN_SAVEREGS:
707 case BUILT_IN_NEXT_ARG:
708 va_list_escapes = true;
709 continue;
710 default:
711 continue;
712 }
713
714 si.va_start_count++;
715 ap = gimple_call_arg (stmt, 0);
716
717 if (TREE_CODE (ap) != ADDR_EXPR)
718 {
719 va_list_escapes = true;
720 break;
721 }
722 ap = TREE_OPERAND (ap, 0);
723 if (TREE_CODE (ap) == ARRAY_REF)
724 {
725 if (! integer_zerop (TREE_OPERAND (ap, 1)))
726 {
727 va_list_escapes = true;
728 break;
729 }
730 ap = TREE_OPERAND (ap, 0);
731 }
732 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
733 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
734 || !VAR_P (ap))
735 {
736 va_list_escapes = true;
737 break;
738 }
739
740 if (is_global_var (ap))
741 {
742 va_list_escapes = true;
743 break;
744 }
745
746 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
747
748 /* VA_START_BB and VA_START_AP will be only used if there is just
749 one va_start in the function. */
750 si.va_start_bb = bb;
751 si.va_start_ap = ap;
752 }
753
754 if (va_list_escapes)
755 break;
756 }
757
758 /* If there were no va_start uses in the function, there is no need to
759 save anything. */
760 if (si.va_start_count == 0)
761 goto finish;
762
763 /* If some va_list arguments weren't local, we can't optimize. */
764 if (va_list_escapes)
765 goto finish;
766
767 /* For void * or char * va_list, something useful can be done only
768 if there is just one va_start. */
769 if (va_list_simple_ptr && si.va_start_count > 1)
770 {
771 va_list_escapes = true;
772 goto finish;
773 }
774
775 /* For struct * va_list, if the backend didn't tell us what the counter fields
776 are, there is nothing more we can do. */
777 if (!va_list_simple_ptr
778 && va_list_gpr_counter_field == NULL_TREE
779 && va_list_fpr_counter_field == NULL_TREE)
780 {
781 va_list_escapes = true;
782 goto finish;
783 }
784
785 /* For void * or char * va_list there is just one counter
786 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
787 if (va_list_simple_ptr)
788 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
789
790 calculate_dominance_info (CDI_DOMINATORS);
791 memset (&wi, 0, sizeof (wi));
792 wi.info = si.va_list_vars;
793
794 FOR_EACH_BB_FN (bb, fun)
795 {
796 si.compute_sizes = -1;
797 si.bb = bb;
798
799 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
800 them as assignments for the purpose of escape analysis. This is
801 not needed for non-simple va_list because virtual phis don't perform
802 any real data movement. Also, check PHI nodes for taking address of
803 the va_list vars. */
804 tree lhs, rhs;
805 use_operand_p uop;
806 ssa_op_iter soi;
807
808 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
809 gsi_next (&i))
810 {
811 gphi *phi = i.phi ();
812 lhs = PHI_RESULT (phi);
813
814 if (virtual_operand_p (lhs))
815 continue;
816
817 if (va_list_simple_ptr)
818 {
819 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
820 {
821 rhs = USE_FROM_PTR (uop);
822 if (va_list_ptr_read (&si, rhs, lhs))
823 continue;
824 else if (va_list_ptr_write (&si, lhs, rhs))
825 continue;
826 else
827 check_va_list_escapes (&si, lhs, rhs);
828
829 if (si.va_list_escapes)
830 {
831 if (dump_file && (dump_flags & TDF_DETAILS))
832 {
833 fputs ("va_list escapes in ", dump_file);
834 print_gimple_stmt (dump_file, phi, 0, dump_flags);
835 fputc ('\n', dump_file);
836 }
837 va_list_escapes = true;
838 }
839 }
840 }
841
842 for (unsigned j = 0; !va_list_escapes
843 && j < gimple_phi_num_args (phi); ++j)
844 if ((!va_list_simple_ptr
845 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
846 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
847 find_va_list_reference, &wi, NULL))
848 {
849 if (dump_file && (dump_flags & TDF_DETAILS))
850 {
851 fputs ("va_list escapes in ", dump_file);
852 print_gimple_stmt (dump_file, phi, 0, dump_flags);
853 fputc ('\n', dump_file);
854 }
855 va_list_escapes = true;
856 }
857 }
858
859 for (gimple_stmt_iterator i = gsi_start_bb (bb);
860 !gsi_end_p (i) && !va_list_escapes;
861 gsi_next (&i))
862 {
863 gimple *stmt = gsi_stmt (i);
864
865 /* Don't look at __builtin_va_{start,end}, they are ok. */
866 if (is_gimple_call (stmt))
867 {
868 tree callee = gimple_call_fndecl (stmt);
869
870 if (callee
871 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
872 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
873 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
874 continue;
875 }
876
877 if (is_gimple_assign (stmt))
878 {
879 lhs = gimple_assign_lhs (stmt);
880 rhs = gimple_assign_rhs1 (stmt);
881
882 if (va_list_simple_ptr)
883 {
884 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
885 == GIMPLE_SINGLE_RHS)
886 {
887 /* Check for ap ={v} {}. */
888 if (TREE_CLOBBER_P (rhs))
889 continue;
890
891 /* Check for tem = ap. */
892 else if (va_list_ptr_read (&si, rhs, lhs))
893 continue;
894
895 /* Check for the last insn in:
896 tem1 = ap;
897 tem2 = tem1 + CST;
898 ap = tem2;
899 sequence. */
900 else if (va_list_ptr_write (&si, lhs, rhs))
901 continue;
902 }
903
904 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
905 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
906 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
907 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
908 == GIMPLE_SINGLE_RHS))
909 check_va_list_escapes (&si, lhs, rhs);
910 }
911 else
912 {
913 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
914 == GIMPLE_SINGLE_RHS)
915 {
916 /* Check for ap ={v} {}. */
917 if (TREE_CLOBBER_P (rhs))
918 continue;
919
920 /* Check for ap[0].field = temp. */
921 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
922 continue;
923
924 /* Check for temp = ap[0].field. */
925 else if (va_list_counter_struct_op (&si, rhs, lhs,
926 false))
927 continue;
928 }
929
930 /* Do any architecture specific checking. */
931 if (targetm.stdarg_optimize_hook
932 && targetm.stdarg_optimize_hook (&si, stmt))
933 continue;
934 }
935 }
936 else if (is_gimple_debug (stmt))
937 continue;
938
939 /* All other uses of va_list are either va_copy (that is not handled
940 in this optimization), taking address of va_list variable or
941 passing va_list to other functions (in that case va_list might
942 escape the function and therefore va_start needs to set it up
943 fully), or some unexpected use of va_list. None of these should
944 happen in a gimplified VA_ARG_EXPR. */
945 if (si.va_list_escapes
946 || walk_gimple_op (stmt, find_va_list_reference, &wi))
947 {
948 if (dump_file && (dump_flags & TDF_DETAILS))
949 {
950 fputs ("va_list escapes in ", dump_file);
951 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
952 fputc ('\n', dump_file);
953 }
954 va_list_escapes = true;
955 }
956 }
957
958 if (va_list_escapes)
959 break;
960 }
961
962 if (! va_list_escapes
963 && va_list_simple_ptr
964 && ! bitmap_empty_p (si.va_list_escape_vars)
965 && check_all_va_list_escapes (&si))
966 va_list_escapes = true;
967
968 finish:
969 if (va_list_escapes)
970 {
971 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
972 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
973 }
974 BITMAP_FREE (si.va_list_vars);
975 BITMAP_FREE (si.va_list_escape_vars);
976 free (si.offsets);
977 if (dump_file)
978 {
979 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
980 funcname, (int) va_list_escapes);
981 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
982 fputs ("all", dump_file);
983 else
984 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
985 fputs (" GPR units and ", dump_file);
986 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
987 fputs ("all", dump_file);
988 else
989 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
990 fputs (" FPR units.\n", dump_file);
991 }
992 }
993
994 /* Expand IFN_VA_ARGs in FUN. */
995
996 static void
997 expand_ifn_va_arg_1 (function *fun)
998 {
999 bool modified = false;
1000 basic_block bb;
1001 gimple_stmt_iterator i;
1002 location_t saved_location;
1003
1004 FOR_EACH_BB_FN (bb, fun)
1005 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1006 {
1007 gimple *stmt = gsi_stmt (i);
1008 tree ap, aptype, expr, lhs, type;
1009 gimple_seq pre = NULL, post = NULL;
1010
1011 if (!gimple_call_internal_p (stmt, IFN_VA_ARG))
1012 continue;
1013
1014 modified = true;
1015
1016 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1017 ap = gimple_call_arg (stmt, 0);
1018 aptype = TREE_TYPE (gimple_call_arg (stmt, 2));
1019 gcc_assert (POINTER_TYPE_P (aptype));
1020
1021 /* Balanced out the &ap, usually added by build_va_arg. */
1022 ap = build2 (MEM_REF, TREE_TYPE (aptype), ap,
1023 build_int_cst (aptype, 0));
1024
1025 push_gimplify_context (false);
1026 saved_location = input_location;
1027 input_location = gimple_location (stmt);
1028
1029 /* Make it easier for the backends by protecting the valist argument
1030 from multiple evaluations. */
1031 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1032
1033 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1034
1035 lhs = gimple_call_lhs (stmt);
1036 if (lhs != NULL_TREE)
1037 {
1038 unsigned int nargs = gimple_call_num_args (stmt);
1039 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1040
1041 /* We replace call with a new expr. This may require
1042 corresponding bndret call fixup. */
1043 if (chkp_function_instrumented_p (fun->decl))
1044 chkp_fixup_inlined_call (lhs, expr);
1045
1046 if (nargs == 4)
1047 {
1048 /* We've transported the size of with WITH_SIZE_EXPR here as
1049 the last argument of the internal fn call. Now reinstate
1050 it. */
1051 tree size = gimple_call_arg (stmt, nargs - 1);
1052 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1053 }
1054
1055 /* We use gimplify_assign here, rather than gimple_build_assign,
1056 because gimple_assign knows how to deal with variable-sized
1057 types. */
1058 gimplify_assign (lhs, expr, &pre);
1059 }
1060 else
1061 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue);
1062
1063 input_location = saved_location;
1064 pop_gimplify_context (NULL);
1065
1066 gimple_seq_add_seq (&pre, post);
1067 update_modified_stmts (pre);
1068
1069 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1070 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1071 inbetween. */
1072 gimple_find_sub_bbs (pre, &i);
1073
1074 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1075 bb. */
1076 unlink_stmt_vdef (stmt);
1077 release_ssa_name_fn (fun, gimple_vdef (stmt));
1078 gsi_remove (&i, true);
1079 gcc_assert (gsi_end_p (i));
1080
1081 /* We're walking here into the bbs which contain the expansion of
1082 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1083 expanding. We could try to skip walking these bbs, perhaps by
1084 walking backwards over gimples and bbs. */
1085 break;
1086 }
1087
1088 if (!modified)
1089 return;
1090
1091 free_dominance_info (CDI_DOMINATORS);
1092 update_ssa (TODO_update_ssa);
1093 }
1094
1095 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1096
1097 static void
1098 expand_ifn_va_arg (function *fun)
1099 {
1100 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1101 expand_ifn_va_arg_1 (fun);
1102
1103 if (flag_checking)
1104 {
1105 basic_block bb;
1106 gimple_stmt_iterator i;
1107 FOR_EACH_BB_FN (bb, fun)
1108 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1109 gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG));
1110 }
1111 }
1112
1113 namespace {
1114
1115 const pass_data pass_data_stdarg =
1116 {
1117 GIMPLE_PASS, /* type */
1118 "stdarg", /* name */
1119 OPTGROUP_NONE, /* optinfo_flags */
1120 TV_NONE, /* tv_id */
1121 ( PROP_cfg | PROP_ssa ), /* properties_required */
1122 PROP_gimple_lva, /* properties_provided */
1123 0, /* properties_destroyed */
1124 0, /* todo_flags_start */
1125 0, /* todo_flags_finish */
1126 };
1127
1128 class pass_stdarg : public gimple_opt_pass
1129 {
1130 public:
1131 pass_stdarg (gcc::context *ctxt)
1132 : gimple_opt_pass (pass_data_stdarg, ctxt)
1133 {}
1134
1135 /* opt_pass methods: */
1136 virtual bool gate (function *)
1137 {
1138 /* Always run this pass, in order to expand va_arg internal_fns. We
1139 also need to do that if fun->stdarg == 0, because a va_arg may also
1140 occur in a function without varargs, f.i. if when passing a va_list to
1141 another function. */
1142 return true;
1143 }
1144
1145 virtual unsigned int execute (function *);
1146
1147 }; // class pass_stdarg
1148
1149 unsigned int
1150 pass_stdarg::execute (function *fun)
1151 {
1152 /* TODO: Postpone expand_ifn_va_arg till after
1153 optimize_va_list_gpr_fpr_size. */
1154 expand_ifn_va_arg (fun);
1155
1156 if (flag_stdarg_opt
1157 /* This optimization is only for stdarg functions. */
1158 && fun->stdarg != 0)
1159 optimize_va_list_gpr_fpr_size (fun);
1160
1161 return 0;
1162 }
1163
1164 } // anon namespace
1165
1166 gimple_opt_pass *
1167 make_pass_stdarg (gcc::context *ctxt)
1168 {
1169 return new pass_stdarg (ctxt);
1170 }
1171
1172 namespace {
1173
1174 const pass_data pass_data_lower_vaarg =
1175 {
1176 GIMPLE_PASS, /* type */
1177 "lower_vaarg", /* name */
1178 OPTGROUP_NONE, /* optinfo_flags */
1179 TV_NONE, /* tv_id */
1180 ( PROP_cfg | PROP_ssa ), /* properties_required */
1181 PROP_gimple_lva, /* properties_provided */
1182 0, /* properties_destroyed */
1183 0, /* todo_flags_start */
1184 0, /* todo_flags_finish */
1185 };
1186
1187 class pass_lower_vaarg : public gimple_opt_pass
1188 {
1189 public:
1190 pass_lower_vaarg (gcc::context *ctxt)
1191 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1192 {}
1193
1194 /* opt_pass methods: */
1195 virtual bool gate (function *)
1196 {
1197 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1198 }
1199
1200 virtual unsigned int execute (function *);
1201
1202 }; // class pass_lower_vaarg
1203
1204 unsigned int
1205 pass_lower_vaarg::execute (function *fun)
1206 {
1207 expand_ifn_va_arg (fun);
1208 return 0;
1209 }
1210
1211 } // anon namespace
1212
1213 gimple_opt_pass *
1214 make_pass_lower_vaarg (gcc::context *ctxt)
1215 {
1216 return new pass_lower_vaarg (ctxt);
1217 }