]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
5624e564 2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
cbdd87d4 25#include "tree.h"
c7131fb2 26#include "gimple.h"
36566b39 27#include "rtl.h"
c7131fb2
AM
28#include "ssa.h"
29#include "alias.h"
30#include "fold-const.h"
36566b39 31#include "flags.h"
36566b39
PK
32#include "insn-config.h"
33#include "expmed.h"
34#include "dojump.h"
35#include "explow.h"
36#include "calls.h"
37#include "emit-rtl.h"
38#include "varasm.h"
39#include "stmt.h"
40#include "expr.h"
41#include "stor-layout.h"
7ee2468b 42#include "dumpfile.h"
60393bbc 43#include "dominance.h"
2fb9a547
AM
44#include "internal-fn.h"
45#include "gimple-fold.h"
45b0be94 46#include "gimplify.h"
5be5c238 47#include "gimple-iterator.h"
442b4905
AM
48#include "tree-into-ssa.h"
49#include "tree-dfa.h"
7a300452 50#include "tree-ssa.h"
cbdd87d4 51#include "tree-ssa-propagate.h"
cbdd87d4 52#include "target.h"
c582198b 53#include "cgraph.h"
450ad0cd
JH
54#include "ipa-utils.h"
55#include "gimple-pretty-print.h"
4484a35a 56#include "tree-ssa-address.h"
862d0b35 57#include "langhooks.h"
19e51b40 58#include "gimplify-me.h"
2b5f0895 59#include "dbgcnt.h"
9b2b7279 60#include "builtins.h"
fef5a0d9 61#include "output.h"
e0ee10ed
RB
62#include "tree-eh.h"
63#include "gimple-match.h"
cbdd87d4 64
b3b9f3d0 65/* Return true when DECL can be referenced from current unit.
c44c2088
JH
66 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
67 We can get declarations that are not possible to reference for various
68 reasons:
1389294c 69
1389294c
JH
70 1) When analyzing C++ virtual tables.
71 C++ virtual tables do have known constructors even
72 when they are keyed to other compilation unit.
73 Those tables can contain pointers to methods and vars
74 in other units. Those methods have both STATIC and EXTERNAL
75 set.
76 2) In WHOPR mode devirtualization might lead to reference
77 to method that was partitioned elsehwere.
78 In this case we have static VAR_DECL or FUNCTION_DECL
79 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
80 declaring the body.
81 3) COMDAT functions referred by external vtables that
3e89949e 82 we devirtualize only during final compilation stage.
b3b9f3d0
JH
83 At this time we already decided that we will not output
84 the function body and thus we can't reference the symbol
85 directly. */
86
1389294c 87static bool
c44c2088 88can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 89{
2c8326a5 90 varpool_node *vnode;
1389294c 91 struct cgraph_node *node;
5e20cdc9 92 symtab_node *snode;
c44c2088 93
00de328a 94 if (DECL_ABSTRACT_P (decl))
1632a686
JH
95 return false;
96
97 /* We are concerned only about static/external vars and functions. */
98 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
99 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
100 return true;
101
102 /* Static objects can be referred only if they was not optimized out yet. */
103 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
104 {
3aaf0529
JH
105 /* Before we start optimizing unreachable code we can be sure all
106 static objects are defined. */
3dafb85c 107 if (symtab->function_flags_ready)
3aaf0529 108 return true;
d52f5295 109 snode = symtab_node::get (decl);
3aaf0529 110 if (!snode || !snode->definition)
1632a686 111 return false;
7de90a6c 112 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
113 return !node || !node->global.inlined_to;
114 }
115
6da8be89 116 /* We will later output the initializer, so we can refer to it.
c44c2088 117 So we are concerned only when DECL comes from initializer of
3aaf0529 118 external var or var that has been optimized out. */
c44c2088
JH
119 if (!from_decl
120 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 121 || (!DECL_EXTERNAL (from_decl)
9041d2e6 122 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 123 && vnode->definition)
6da8be89 124 || (flag_ltrans
9041d2e6 125 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 126 && vnode->in_other_partition))
c44c2088 127 return true;
c44c2088
JH
128 /* We are folding reference from external vtable. The vtable may reffer
129 to a symbol keyed to other compilation unit. The other compilation
130 unit may be in separate DSO and the symbol may be hidden. */
131 if (DECL_VISIBILITY_SPECIFIED (decl)
132 && DECL_EXTERNAL (decl)
a33a931b 133 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 134 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 135 return false;
b3b9f3d0
JH
136 /* When function is public, we always can introduce new reference.
137 Exception are the COMDAT functions where introducing a direct
138 reference imply need to include function body in the curren tunit. */
139 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
140 return true;
3aaf0529
JH
141 /* We have COMDAT. We are going to check if we still have definition
142 or if the definition is going to be output in other partition.
143 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
144
145 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 146 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
147 output elsewhere when corresponding vtable is output.
148 This is however not possible - ABI specify that COMDATs are output in
149 units where they are used and when the other unit was compiled with LTO
150 it is possible that vtable was kept public while the function itself
151 was privatized. */
3dafb85c 152 if (!symtab->function_flags_ready)
b3b9f3d0 153 return true;
c44c2088 154
d52f5295 155 snode = symtab_node::get (decl);
3aaf0529
JH
156 if (!snode
157 || ((!snode->definition || DECL_EXTERNAL (decl))
158 && (!snode->in_other_partition
159 || (!snode->forced_by_abi && !snode->force_output))))
160 return false;
161 node = dyn_cast <cgraph_node *> (snode);
162 return !node || !node->global.inlined_to;
1389294c
JH
163}
164
0038d4e0 165/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
166 acceptable form for is_gimple_min_invariant.
167 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
168
169tree
c44c2088 170canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 171{
50619002
EB
172 tree orig_cval = cval;
173 STRIP_NOPS (cval);
315f5f1b
RG
174 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
175 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 176 {
315f5f1b
RG
177 tree ptr = TREE_OPERAND (cval, 0);
178 if (is_gimple_min_invariant (ptr))
179 cval = build1_loc (EXPR_LOCATION (cval),
180 ADDR_EXPR, TREE_TYPE (ptr),
181 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
182 ptr,
183 fold_convert (ptr_type_node,
184 TREE_OPERAND (cval, 1))));
17f39a39
JH
185 }
186 if (TREE_CODE (cval) == ADDR_EXPR)
187 {
5a27a197
RG
188 tree base = NULL_TREE;
189 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
190 {
191 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
192 if (base)
193 TREE_OPERAND (cval, 0) = base;
194 }
5a27a197
RG
195 else
196 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
197 if (!base)
198 return NULL_TREE;
b3b9f3d0 199
7501ca28
RG
200 if ((TREE_CODE (base) == VAR_DECL
201 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 202 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 203 return NULL_TREE;
7501ca28 204 if (TREE_CODE (base) == VAR_DECL)
46eb666a 205 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
206 else if (TREE_CODE (base) == FUNCTION_DECL)
207 {
208 /* Make sure we create a cgraph node for functions we'll reference.
209 They can be non-existent if the reference comes from an entry
210 of an external vtable for example. */
d52f5295 211 cgraph_node::get_create (base);
7501ca28 212 }
0038d4e0 213 /* Fixup types in global initializers. */
73aef89e
RG
214 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
215 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
216
217 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
218 cval = fold_convert (TREE_TYPE (orig_cval), cval);
219 return cval;
17f39a39 220 }
846abd0d
RB
221 if (TREE_OVERFLOW_P (cval))
222 return drop_tree_overflow (cval);
50619002 223 return orig_cval;
17f39a39 224}
cbdd87d4
RG
225
226/* If SYM is a constant variable with known value, return the value.
227 NULL_TREE is returned otherwise. */
228
229tree
230get_symbol_constant_value (tree sym)
231{
6a6dac52
JH
232 tree val = ctor_for_folding (sym);
233 if (val != error_mark_node)
cbdd87d4 234 {
cbdd87d4
RG
235 if (val)
236 {
9d60be38 237 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 238 if (val && is_gimple_min_invariant (val))
17f39a39 239 return val;
1389294c
JH
240 else
241 return NULL_TREE;
cbdd87d4
RG
242 }
243 /* Variables declared 'const' without an initializer
244 have zero as the initializer if they may not be
245 overridden at link or run time. */
246 if (!val
b8a8c472 247 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 248 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
249 }
250
251 return NULL_TREE;
252}
253
254
cbdd87d4
RG
255
256/* Subroutine of fold_stmt. We perform several simplifications of the
257 memory reference tree EXPR and make sure to re-gimplify them properly
258 after propagation of constant addresses. IS_LHS is true if the
259 reference is supposed to be an lvalue. */
260
261static tree
262maybe_fold_reference (tree expr, bool is_lhs)
263{
17f39a39 264 tree result;
cbdd87d4 265
f0eddb90
RG
266 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
267 || TREE_CODE (expr) == REALPART_EXPR
268 || TREE_CODE (expr) == IMAGPART_EXPR)
269 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
270 return fold_unary_loc (EXPR_LOCATION (expr),
271 TREE_CODE (expr),
272 TREE_TYPE (expr),
273 TREE_OPERAND (expr, 0));
274 else if (TREE_CODE (expr) == BIT_FIELD_REF
275 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
276 return fold_ternary_loc (EXPR_LOCATION (expr),
277 TREE_CODE (expr),
278 TREE_TYPE (expr),
279 TREE_OPERAND (expr, 0),
280 TREE_OPERAND (expr, 1),
281 TREE_OPERAND (expr, 2));
282
f0eddb90
RG
283 if (!is_lhs
284 && (result = fold_const_aggregate_ref (expr))
285 && is_gimple_min_invariant (result))
286 return result;
cbdd87d4 287
cbdd87d4
RG
288 return NULL_TREE;
289}
290
291
292/* Attempt to fold an assignment statement pointed-to by SI. Returns a
293 replacement rhs for the statement or NULL_TREE if no simplification
294 could be made. It is assumed that the operands have been previously
295 folded. */
296
297static tree
298fold_gimple_assign (gimple_stmt_iterator *si)
299{
300 gimple stmt = gsi_stmt (*si);
301 enum tree_code subcode = gimple_assign_rhs_code (stmt);
302 location_t loc = gimple_location (stmt);
303
304 tree result = NULL_TREE;
305
306 switch (get_gimple_rhs_class (subcode))
307 {
308 case GIMPLE_SINGLE_RHS:
309 {
310 tree rhs = gimple_assign_rhs1 (stmt);
311
8c00ba08
JW
312 if (TREE_CLOBBER_P (rhs))
313 return NULL_TREE;
314
4e71066d 315 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
316 return maybe_fold_reference (rhs, false);
317
bdf37f7a
JH
318 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
319 {
320 tree val = OBJ_TYPE_REF_EXPR (rhs);
321 if (is_gimple_min_invariant (val))
322 return val;
f8a39967 323 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
324 {
325 bool final;
326 vec <cgraph_node *>targets
f8a39967 327 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 328 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 329 {
2b5f0895
XDL
330 if (dump_enabled_p ())
331 {
807b7d62 332 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
333 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
334 "resolving virtual function address "
335 "reference to function %s\n",
336 targets.length () == 1
337 ? targets[0]->name ()
3ef276e4 338 : "NULL");
2b5f0895 339 }
3ef276e4
RB
340 if (targets.length () == 1)
341 {
342 val = fold_convert (TREE_TYPE (val),
343 build_fold_addr_expr_loc
344 (loc, targets[0]->decl));
345 STRIP_USELESS_TYPE_CONVERSION (val);
346 }
347 else
348 /* We can not use __builtin_unreachable here because it
349 can not have address taken. */
350 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
351 return val;
352 }
353 }
354
355 }
cbdd87d4
RG
356 else if (TREE_CODE (rhs) == ADDR_EXPR)
357 {
70f34814
RG
358 tree ref = TREE_OPERAND (rhs, 0);
359 tree tem = maybe_fold_reference (ref, true);
360 if (tem
361 && TREE_CODE (tem) == MEM_REF
362 && integer_zerop (TREE_OPERAND (tem, 1)))
363 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
364 else if (tem)
cbdd87d4
RG
365 result = fold_convert (TREE_TYPE (rhs),
366 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
367 else if (TREE_CODE (ref) == MEM_REF
368 && integer_zerop (TREE_OPERAND (ref, 1)))
369 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
370 }
371
372 else if (TREE_CODE (rhs) == CONSTRUCTOR
373 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
374 && (CONSTRUCTOR_NELTS (rhs)
375 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
376 {
377 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
378 unsigned i;
379 tree val;
380
381 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
382 if (TREE_CODE (val) != INTEGER_CST
383 && TREE_CODE (val) != REAL_CST
384 && TREE_CODE (val) != FIXED_CST)
385 return NULL_TREE;
386
387 return build_vector_from_ctor (TREE_TYPE (rhs),
388 CONSTRUCTOR_ELTS (rhs));
389 }
390
391 else if (DECL_P (rhs))
9d60be38 392 return get_symbol_constant_value (rhs);
cbdd87d4
RG
393
394 /* If we couldn't fold the RHS, hand over to the generic
395 fold routines. */
396 if (result == NULL_TREE)
397 result = fold (rhs);
398
399 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
400 that may have been added by fold, and "useless" type
401 conversions that might now be apparent due to propagation. */
402 STRIP_USELESS_TYPE_CONVERSION (result);
403
404 if (result != rhs && valid_gimple_rhs_p (result))
405 return result;
406
407 return NULL_TREE;
408 }
409 break;
410
411 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
412 break;
413
414 case GIMPLE_BINARY_RHS:
9b80d091
KT
415 /* Try to canonicalize for boolean-typed X the comparisons
416 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
417 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
418 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
419 {
420 tree lhs = gimple_assign_lhs (stmt);
421 tree op1 = gimple_assign_rhs1 (stmt);
422 tree op2 = gimple_assign_rhs2 (stmt);
423 tree type = TREE_TYPE (op1);
424
425 /* Check whether the comparison operands are of the same boolean
426 type as the result type is.
427 Check that second operand is an integer-constant with value
428 one or zero. */
429 if (TREE_CODE (op2) == INTEGER_CST
430 && (integer_zerop (op2) || integer_onep (op2))
431 && useless_type_conversion_p (TREE_TYPE (lhs), type))
432 {
433 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
434 bool is_logical_not = false;
435
436 /* X == 0 and X != 1 is a logical-not.of X
437 X == 1 and X != 0 is X */
438 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
439 || (cmp_code == NE_EXPR && integer_onep (op2)))
440 is_logical_not = true;
441
442 if (is_logical_not == false)
443 result = op1;
444 /* Only for one-bit precision typed X the transformation
445 !X -> ~X is valied. */
446 else if (TYPE_PRECISION (type) == 1)
447 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
448 type, op1);
449 /* Otherwise we use !X -> X ^ 1. */
450 else
451 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
452 type, op1, build_int_cst (type, 1));
453
454 }
455 }
cbdd87d4
RG
456
457 if (!result)
458 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
459 TREE_TYPE (gimple_assign_lhs (stmt)),
460 gimple_assign_rhs1 (stmt),
461 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
462
463 if (result)
464 {
465 STRIP_USELESS_TYPE_CONVERSION (result);
466 if (valid_gimple_rhs_p (result))
467 return result;
cbdd87d4
RG
468 }
469 break;
470
0354c0c7 471 case GIMPLE_TERNARY_RHS:
4e71066d
RG
472 /* Try to fold a conditional expression. */
473 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
474 {
475 tree op0 = gimple_assign_rhs1 (stmt);
476 tree tem;
477 bool set = false;
478 location_t cond_loc = gimple_location (stmt);
479
480 if (COMPARISON_CLASS_P (op0))
481 {
482 fold_defer_overflow_warnings ();
483 tem = fold_binary_loc (cond_loc,
484 TREE_CODE (op0), TREE_TYPE (op0),
485 TREE_OPERAND (op0, 0),
486 TREE_OPERAND (op0, 1));
487 /* This is actually a conditional expression, not a GIMPLE
488 conditional statement, however, the valid_gimple_rhs_p
489 test still applies. */
490 set = (tem && is_gimple_condexpr (tem)
491 && valid_gimple_rhs_p (tem));
492 fold_undefer_overflow_warnings (set, stmt, 0);
493 }
494 else if (is_gimple_min_invariant (op0))
495 {
496 tem = op0;
497 set = true;
498 }
499 else
500 return NULL_TREE;
501
502 if (set)
503 result = fold_build3_loc (cond_loc, COND_EXPR,
504 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
505 gimple_assign_rhs2 (stmt),
506 gimple_assign_rhs3 (stmt));
507 }
508
509 if (!result)
510 result = fold_ternary_loc (loc, subcode,
511 TREE_TYPE (gimple_assign_lhs (stmt)),
512 gimple_assign_rhs1 (stmt),
513 gimple_assign_rhs2 (stmt),
514 gimple_assign_rhs3 (stmt));
0354c0c7
BS
515
516 if (result)
517 {
518 STRIP_USELESS_TYPE_CONVERSION (result);
519 if (valid_gimple_rhs_p (result))
520 return result;
0354c0c7
BS
521 }
522 break;
523
cbdd87d4
RG
524 case GIMPLE_INVALID_RHS:
525 gcc_unreachable ();
526 }
527
528 return NULL_TREE;
529}
530
531/* Attempt to fold a conditional statement. Return true if any changes were
532 made. We only attempt to fold the condition expression, and do not perform
533 any transformation that would require alteration of the cfg. It is
534 assumed that the operands have been previously folded. */
535
536static bool
538dd0b7 537fold_gimple_cond (gcond *stmt)
cbdd87d4
RG
538{
539 tree result = fold_binary_loc (gimple_location (stmt),
540 gimple_cond_code (stmt),
541 boolean_type_node,
542 gimple_cond_lhs (stmt),
543 gimple_cond_rhs (stmt));
544
545 if (result)
546 {
547 STRIP_USELESS_TYPE_CONVERSION (result);
548 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
549 {
550 gimple_cond_set_condition_from_tree (stmt, result);
551 return true;
552 }
553 }
554
555 return false;
556}
557
fef5a0d9
RB
558
559/* Replace a statement at *SI_P with a sequence of statements in STMTS,
560 adjusting the replacement stmts location and virtual operands.
561 If the statement has a lhs the last stmt in the sequence is expected
562 to assign to that lhs. */
563
564static void
565gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
566{
567 gimple stmt = gsi_stmt (*si_p);
568
569 if (gimple_has_location (stmt))
570 annotate_all_with_location (stmts, gimple_location (stmt));
571
572 /* First iterate over the replacement statements backward, assigning
573 virtual operands to their defining statements. */
574 gimple laststore = NULL;
575 for (gimple_stmt_iterator i = gsi_last (stmts);
576 !gsi_end_p (i); gsi_prev (&i))
577 {
578 gimple new_stmt = gsi_stmt (i);
579 if ((gimple_assign_single_p (new_stmt)
580 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
581 || (is_gimple_call (new_stmt)
582 && (gimple_call_flags (new_stmt)
583 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
584 {
585 tree vdef;
586 if (!laststore)
587 vdef = gimple_vdef (stmt);
588 else
589 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
590 gimple_set_vdef (new_stmt, vdef);
591 if (vdef && TREE_CODE (vdef) == SSA_NAME)
592 SSA_NAME_DEF_STMT (vdef) = new_stmt;
593 laststore = new_stmt;
594 }
595 }
596
597 /* Second iterate over the statements forward, assigning virtual
598 operands to their uses. */
599 tree reaching_vuse = gimple_vuse (stmt);
600 for (gimple_stmt_iterator i = gsi_start (stmts);
601 !gsi_end_p (i); gsi_next (&i))
602 {
603 gimple new_stmt = gsi_stmt (i);
604 /* If the new statement possibly has a VUSE, update it with exact SSA
605 name we know will reach this one. */
606 if (gimple_has_mem_ops (new_stmt))
607 gimple_set_vuse (new_stmt, reaching_vuse);
608 gimple_set_modified (new_stmt, true);
609 if (gimple_vdef (new_stmt))
610 reaching_vuse = gimple_vdef (new_stmt);
611 }
612
613 /* If the new sequence does not do a store release the virtual
614 definition of the original statement. */
615 if (reaching_vuse
616 && reaching_vuse == gimple_vuse (stmt))
617 {
618 tree vdef = gimple_vdef (stmt);
619 if (vdef
620 && TREE_CODE (vdef) == SSA_NAME)
621 {
622 unlink_stmt_vdef (stmt);
623 release_ssa_name (vdef);
624 }
625 }
626
627 /* Finally replace the original statement with the sequence. */
628 gsi_replace_with_seq (si_p, stmts, false);
629}
630
cbdd87d4
RG
631/* Convert EXPR into a GIMPLE value suitable for substitution on the
632 RHS of an assignment. Insert the necessary statements before
633 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
634 is replaced. If the call is expected to produces a result, then it
635 is replaced by an assignment of the new RHS to the result variable.
636 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
637 GIMPLE_NOP. A proper VDEF chain is retained by making the first
638 VUSE and the last VDEF of the whole sequence be the same as the replaced
639 statement and using new SSA names for stores in between. */
cbdd87d4
RG
640
641void
642gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
643{
644 tree lhs;
cbdd87d4
RG
645 gimple stmt, new_stmt;
646 gimple_stmt_iterator i;
355a7673 647 gimple_seq stmts = NULL;
cbdd87d4
RG
648
649 stmt = gsi_stmt (*si_p);
650
651 gcc_assert (is_gimple_call (stmt));
652
45852dcc 653 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 654
e256dfce 655 lhs = gimple_call_lhs (stmt);
cbdd87d4 656 if (lhs == NULL_TREE)
6e572326
RG
657 {
658 gimplify_and_add (expr, &stmts);
659 /* We can end up with folding a memcpy of an empty class assignment
660 which gets optimized away by C++ gimplification. */
661 if (gimple_seq_empty_p (stmts))
662 {
9fdc58de 663 pop_gimplify_context (NULL);
6e572326
RG
664 if (gimple_in_ssa_p (cfun))
665 {
666 unlink_stmt_vdef (stmt);
667 release_defs (stmt);
668 }
77d19c72 669 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
670 return;
671 }
672 }
cbdd87d4 673 else
e256dfce
RG
674 {
675 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
676 new_stmt = gimple_build_assign (lhs, tmp);
677 i = gsi_last (stmts);
678 gsi_insert_after_without_update (&i, new_stmt,
679 GSI_CONTINUE_LINKING);
680 }
cbdd87d4
RG
681
682 pop_gimplify_context (NULL);
683
fef5a0d9
RB
684 gsi_replace_with_seq_vops (si_p, stmts);
685}
cbdd87d4 686
fef5a0d9
RB
687
688/* Replace the call at *GSI with the gimple value VAL. */
689
690static void
691replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
692{
693 gimple stmt = gsi_stmt (*gsi);
694 tree lhs = gimple_call_lhs (stmt);
695 gimple repl;
696 if (lhs)
e256dfce 697 {
fef5a0d9
RB
698 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
699 val = fold_convert (TREE_TYPE (lhs), val);
700 repl = gimple_build_assign (lhs, val);
701 }
702 else
703 repl = gimple_build_nop ();
704 tree vdef = gimple_vdef (stmt);
705 if (vdef && TREE_CODE (vdef) == SSA_NAME)
706 {
707 unlink_stmt_vdef (stmt);
708 release_ssa_name (vdef);
709 }
710 gsi_replace (gsi, repl, true);
711}
712
713/* Replace the call at *GSI with the new call REPL and fold that
714 again. */
715
716static void
717replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
718{
719 gimple stmt = gsi_stmt (*gsi);
720 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
721 gimple_set_location (repl, gimple_location (stmt));
722 if (gimple_vdef (stmt)
723 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
724 {
725 gimple_set_vdef (repl, gimple_vdef (stmt));
726 gimple_set_vuse (repl, gimple_vuse (stmt));
727 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
728 }
729 gsi_replace (gsi, repl, true);
730 fold_stmt (gsi);
731}
732
733/* Return true if VAR is a VAR_DECL or a component thereof. */
734
735static bool
736var_decl_component_p (tree var)
737{
738 tree inner = var;
739 while (handled_component_p (inner))
740 inner = TREE_OPERAND (inner, 0);
741 return SSA_VAR_P (inner);
742}
743
744/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 745 false if no simplification can be made.
fef5a0d9
RB
746 If ENDP is 0, return DEST (like memcpy).
747 If ENDP is 1, return DEST+LEN (like mempcpy).
748 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
749 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
750 (memmove). */
751
752static bool
753gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
754 tree dest, tree src, int endp)
755{
756 gimple stmt = gsi_stmt (*gsi);
757 tree lhs = gimple_call_lhs (stmt);
758 tree len = gimple_call_arg (stmt, 2);
759 tree destvar, srcvar;
760 location_t loc = gimple_location (stmt);
761
762 /* If the LEN parameter is zero, return DEST. */
763 if (integer_zerop (len))
764 {
765 gimple repl;
766 if (gimple_call_lhs (stmt))
767 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
768 else
769 repl = gimple_build_nop ();
770 tree vdef = gimple_vdef (stmt);
771 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 772 {
fef5a0d9
RB
773 unlink_stmt_vdef (stmt);
774 release_ssa_name (vdef);
775 }
776 gsi_replace (gsi, repl, true);
777 return true;
778 }
779
780 /* If SRC and DEST are the same (and not volatile), return
781 DEST{,+LEN,+LEN-1}. */
782 if (operand_equal_p (src, dest, 0))
783 {
784 unlink_stmt_vdef (stmt);
785 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
786 release_ssa_name (gimple_vdef (stmt));
787 if (!lhs)
788 {
789 gsi_replace (gsi, gimple_build_nop (), true);
790 return true;
791 }
792 goto done;
793 }
794 else
795 {
796 tree srctype, desttype;
797 unsigned int src_align, dest_align;
798 tree off0;
799
800 /* Build accesses at offset zero with a ref-all character type. */
801 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
802 ptr_mode, true), 0);
803
804 /* If we can perform the copy efficiently with first doing all loads
805 and then all stores inline it that way. Currently efficiently
806 means that we can load all the memory into a single integer
807 register which is what MOVE_MAX gives us. */
808 src_align = get_pointer_alignment (src);
809 dest_align = get_pointer_alignment (dest);
810 if (tree_fits_uhwi_p (len)
811 && compare_tree_int (len, MOVE_MAX) <= 0
812 /* ??? Don't transform copies from strings with known length this
813 confuses the tree-ssa-strlen.c. This doesn't handle
814 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
815 reason. */
816 && !c_strlen (src, 2))
817 {
818 unsigned ilen = tree_to_uhwi (len);
819 if (exact_log2 (ilen) != -1)
820 {
821 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
822 if (type
823 && TYPE_MODE (type) != BLKmode
824 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
825 == ilen * 8)
826 /* If the destination pointer is not aligned we must be able
827 to emit an unaligned store. */
828 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
829 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
830 {
831 tree srctype = type;
832 tree desttype = type;
833 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
834 srctype = build_aligned_type (type, src_align);
835 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
836 tree tem = fold_const_aggregate_ref (srcmem);
837 if (tem)
838 srcmem = tem;
839 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
840 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
841 src_align))
842 srcmem = NULL_TREE;
843 if (srcmem)
844 {
845 gimple new_stmt;
846 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
847 {
848 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
849 if (gimple_in_ssa_p (cfun))
850 srcmem = make_ssa_name (TREE_TYPE (srcmem),
851 new_stmt);
852 else
b731b390 853 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
854 gimple_assign_set_lhs (new_stmt, srcmem);
855 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
856 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
857 }
858 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
859 desttype = build_aligned_type (type, dest_align);
860 new_stmt
861 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
862 dest, off0),
863 srcmem);
864 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
865 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
866 if (gimple_vdef (new_stmt)
867 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
868 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
869 if (!lhs)
870 {
871 gsi_replace (gsi, new_stmt, true);
872 return true;
873 }
874 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
875 goto done;
876 }
877 }
878 }
879 }
880
881 if (endp == 3)
882 {
883 /* Both DEST and SRC must be pointer types.
884 ??? This is what old code did. Is the testing for pointer types
885 really mandatory?
886
887 If either SRC is readonly or length is 1, we can use memcpy. */
888 if (!dest_align || !src_align)
889 return false;
890 if (readonly_data_expr (src)
891 || (tree_fits_uhwi_p (len)
892 && (MIN (src_align, dest_align) / BITS_PER_UNIT
893 >= tree_to_uhwi (len))))
894 {
895 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
896 if (!fn)
897 return false;
898 gimple_call_set_fndecl (stmt, fn);
899 gimple_call_set_arg (stmt, 0, dest);
900 gimple_call_set_arg (stmt, 1, src);
901 fold_stmt (gsi);
902 return true;
903 }
904
905 /* If *src and *dest can't overlap, optimize into memcpy as well. */
906 if (TREE_CODE (src) == ADDR_EXPR
907 && TREE_CODE (dest) == ADDR_EXPR)
908 {
909 tree src_base, dest_base, fn;
910 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
911 HOST_WIDE_INT size = -1;
912 HOST_WIDE_INT maxsize = -1;
913
914 srcvar = TREE_OPERAND (src, 0);
915 src_base = get_ref_base_and_extent (srcvar, &src_offset,
916 &size, &maxsize);
917 destvar = TREE_OPERAND (dest, 0);
918 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
919 &size, &maxsize);
920 if (tree_fits_uhwi_p (len))
921 maxsize = tree_to_uhwi (len);
922 else
923 maxsize = -1;
924 src_offset /= BITS_PER_UNIT;
925 dest_offset /= BITS_PER_UNIT;
926 if (SSA_VAR_P (src_base)
927 && SSA_VAR_P (dest_base))
928 {
929 if (operand_equal_p (src_base, dest_base, 0)
930 && ranges_overlap_p (src_offset, maxsize,
931 dest_offset, maxsize))
932 return false;
933 }
934 else if (TREE_CODE (src_base) == MEM_REF
935 && TREE_CODE (dest_base) == MEM_REF)
936 {
937 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
938 TREE_OPERAND (dest_base, 0), 0))
939 return false;
940 offset_int off = mem_ref_offset (src_base) + src_offset;
941 if (!wi::fits_shwi_p (off))
942 return false;
943 src_offset = off.to_shwi ();
944
945 off = mem_ref_offset (dest_base) + dest_offset;
946 if (!wi::fits_shwi_p (off))
947 return false;
948 dest_offset = off.to_shwi ();
949 if (ranges_overlap_p (src_offset, maxsize,
950 dest_offset, maxsize))
951 return false;
952 }
953 else
954 return false;
955
956 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
957 if (!fn)
958 return false;
959 gimple_call_set_fndecl (stmt, fn);
960 gimple_call_set_arg (stmt, 0, dest);
961 gimple_call_set_arg (stmt, 1, src);
962 fold_stmt (gsi);
963 return true;
964 }
965
966 /* If the destination and source do not alias optimize into
967 memcpy as well. */
968 if ((is_gimple_min_invariant (dest)
969 || TREE_CODE (dest) == SSA_NAME)
970 && (is_gimple_min_invariant (src)
971 || TREE_CODE (src) == SSA_NAME))
972 {
973 ao_ref destr, srcr;
974 ao_ref_init_from_ptr_and_size (&destr, dest, len);
975 ao_ref_init_from_ptr_and_size (&srcr, src, len);
976 if (!refs_may_alias_p_1 (&destr, &srcr, false))
977 {
978 tree fn;
979 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
980 if (!fn)
981 return false;
982 gimple_call_set_fndecl (stmt, fn);
983 gimple_call_set_arg (stmt, 0, dest);
984 gimple_call_set_arg (stmt, 1, src);
985 fold_stmt (gsi);
986 return true;
987 }
988 }
989
990 return false;
991 }
992
993 if (!tree_fits_shwi_p (len))
994 return false;
995 /* FIXME:
996 This logic lose for arguments like (type *)malloc (sizeof (type)),
997 since we strip the casts of up to VOID return value from malloc.
998 Perhaps we ought to inherit type from non-VOID argument here? */
999 STRIP_NOPS (src);
1000 STRIP_NOPS (dest);
1001 if (!POINTER_TYPE_P (TREE_TYPE (src))
1002 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1003 return false;
1004 /* In the following try to find a type that is most natural to be
1005 used for the memcpy source and destination and that allows
1006 the most optimization when memcpy is turned into a plain assignment
1007 using that type. In theory we could always use a char[len] type
1008 but that only gains us that the destination and source possibly
1009 no longer will have their address taken. */
1010 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1011 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1012 {
1013 tree tem = TREE_OPERAND (src, 0);
1014 STRIP_NOPS (tem);
1015 if (tem != TREE_OPERAND (src, 0))
1016 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1017 }
1018 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1019 {
1020 tree tem = TREE_OPERAND (dest, 0);
1021 STRIP_NOPS (tem);
1022 if (tem != TREE_OPERAND (dest, 0))
1023 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1024 }
1025 srctype = TREE_TYPE (TREE_TYPE (src));
1026 if (TREE_CODE (srctype) == ARRAY_TYPE
1027 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1028 {
1029 srctype = TREE_TYPE (srctype);
1030 STRIP_NOPS (src);
1031 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1032 }
1033 desttype = TREE_TYPE (TREE_TYPE (dest));
1034 if (TREE_CODE (desttype) == ARRAY_TYPE
1035 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1036 {
1037 desttype = TREE_TYPE (desttype);
1038 STRIP_NOPS (dest);
1039 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1040 }
1041 if (TREE_ADDRESSABLE (srctype)
1042 || TREE_ADDRESSABLE (desttype))
1043 return false;
1044
1045 /* Make sure we are not copying using a floating-point mode or
1046 a type whose size possibly does not match its precision. */
1047 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1048 || TREE_CODE (desttype) == BOOLEAN_TYPE
1049 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1050 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1051 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1052 || TREE_CODE (srctype) == BOOLEAN_TYPE
1053 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1054 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1055 if (!srctype)
1056 srctype = desttype;
1057 if (!desttype)
1058 desttype = srctype;
1059 if (!srctype)
1060 return false;
1061
1062 src_align = get_pointer_alignment (src);
1063 dest_align = get_pointer_alignment (dest);
1064 if (dest_align < TYPE_ALIGN (desttype)
1065 || src_align < TYPE_ALIGN (srctype))
1066 return false;
1067
1068 destvar = dest;
1069 STRIP_NOPS (destvar);
1070 if (TREE_CODE (destvar) == ADDR_EXPR
1071 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1072 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1073 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1074 else
1075 destvar = NULL_TREE;
1076
1077 srcvar = src;
1078 STRIP_NOPS (srcvar);
1079 if (TREE_CODE (srcvar) == ADDR_EXPR
1080 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1081 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1082 {
1083 if (!destvar
1084 || src_align >= TYPE_ALIGN (desttype))
1085 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1086 srcvar, off0);
1087 else if (!STRICT_ALIGNMENT)
1088 {
1089 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1090 src_align);
1091 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1092 }
e256dfce 1093 else
fef5a0d9
RB
1094 srcvar = NULL_TREE;
1095 }
1096 else
1097 srcvar = NULL_TREE;
1098
1099 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1100 return false;
1101
1102 if (srcvar == NULL_TREE)
1103 {
1104 STRIP_NOPS (src);
1105 if (src_align >= TYPE_ALIGN (desttype))
1106 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1107 else
1108 {
1109 if (STRICT_ALIGNMENT)
1110 return false;
1111 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1112 src_align);
1113 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1114 }
1115 }
1116 else if (destvar == NULL_TREE)
1117 {
1118 STRIP_NOPS (dest);
1119 if (dest_align >= TYPE_ALIGN (srctype))
1120 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1121 else
1122 {
1123 if (STRICT_ALIGNMENT)
1124 return false;
1125 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1126 dest_align);
1127 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1128 }
1129 }
1130
1131 gimple new_stmt;
1132 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1133 {
1134 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1135 if (gimple_in_ssa_p (cfun))
1136 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1137 else
b731b390 1138 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1139 gimple_assign_set_lhs (new_stmt, srcvar);
1140 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1141 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1142 }
1143 new_stmt = gimple_build_assign (destvar, srcvar);
1144 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1145 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1146 if (gimple_vdef (new_stmt)
1147 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1148 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1149 if (!lhs)
1150 {
1151 gsi_replace (gsi, new_stmt, true);
1152 return true;
1153 }
1154 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1155 }
1156
1157done:
1158 if (endp == 0 || endp == 3)
1159 len = NULL_TREE;
1160 else if (endp == 2)
1161 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1162 ssize_int (1));
1163 if (endp == 2 || endp == 1)
1164 dest = fold_build_pointer_plus_loc (loc, dest, len);
1165
1166 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1167 GSI_SAME_STMT);
1168 gimple repl = gimple_build_assign (lhs, dest);
1169 gsi_replace (gsi, repl, true);
1170 return true;
1171}
1172
1173/* Fold function call to builtin memset or bzero at *GSI setting the
1174 memory of size LEN to VAL. Return whether a simplification was made. */
1175
1176static bool
1177gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1178{
1179 gimple stmt = gsi_stmt (*gsi);
1180 tree etype;
1181 unsigned HOST_WIDE_INT length, cval;
1182
1183 /* If the LEN parameter is zero, return DEST. */
1184 if (integer_zerop (len))
1185 {
1186 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1187 return true;
1188 }
1189
1190 if (! tree_fits_uhwi_p (len))
1191 return false;
1192
1193 if (TREE_CODE (c) != INTEGER_CST)
1194 return false;
1195
1196 tree dest = gimple_call_arg (stmt, 0);
1197 tree var = dest;
1198 if (TREE_CODE (var) != ADDR_EXPR)
1199 return false;
1200
1201 var = TREE_OPERAND (var, 0);
1202 if (TREE_THIS_VOLATILE (var))
1203 return false;
1204
1205 etype = TREE_TYPE (var);
1206 if (TREE_CODE (etype) == ARRAY_TYPE)
1207 etype = TREE_TYPE (etype);
1208
1209 if (!INTEGRAL_TYPE_P (etype)
1210 && !POINTER_TYPE_P (etype))
1211 return NULL_TREE;
1212
1213 if (! var_decl_component_p (var))
1214 return NULL_TREE;
1215
1216 length = tree_to_uhwi (len);
1217 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1218 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1219 return NULL_TREE;
1220
1221 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1222 return NULL_TREE;
1223
1224 if (integer_zerop (c))
1225 cval = 0;
1226 else
1227 {
1228 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1229 return NULL_TREE;
1230
1231 cval = TREE_INT_CST_LOW (c);
1232 cval &= 0xff;
1233 cval |= cval << 8;
1234 cval |= cval << 16;
1235 cval |= (cval << 31) << 1;
1236 }
1237
1238 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1239 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1240 gimple_set_vuse (store, gimple_vuse (stmt));
1241 tree vdef = gimple_vdef (stmt);
1242 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1243 {
1244 gimple_set_vdef (store, gimple_vdef (stmt));
1245 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1246 }
1247 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1248 if (gimple_call_lhs (stmt))
1249 {
1250 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1251 gsi_replace (gsi, asgn, true);
1252 }
1253 else
1254 {
1255 gimple_stmt_iterator gsi2 = *gsi;
1256 gsi_prev (gsi);
1257 gsi_remove (&gsi2, true);
1258 }
1259
1260 return true;
1261}
1262
1263
1264/* Return the string length, maximum string length or maximum value of
1265 ARG in LENGTH.
1266 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1267 is not NULL and, for TYPE == 0, its value is not equal to the length
1268 we determine or if we are unable to determine the length or value,
1269 return false. VISITED is a bitmap of visited variables.
1270 TYPE is 0 if string length should be returned, 1 for maximum string
1271 length and 2 for maximum value ARG can have. */
1272
1273static bool
dcb7fae2 1274get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1275{
1276 tree var, val;
1277 gimple def_stmt;
1278
1279 if (TREE_CODE (arg) != SSA_NAME)
1280 {
1281 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1282 if (TREE_CODE (arg) == ADDR_EXPR
1283 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1284 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1285 {
1286 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1287 if (TREE_CODE (aop0) == INDIRECT_REF
1288 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1289 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1290 length, visited, type);
1291 }
1292
1293 if (type == 2)
1294 {
1295 val = arg;
1296 if (TREE_CODE (val) != INTEGER_CST
1297 || tree_int_cst_sgn (val) < 0)
1298 return false;
1299 }
1300 else
1301 val = c_strlen (arg, 1);
1302 if (!val)
1303 return false;
1304
1305 if (*length)
1306 {
1307 if (type > 0)
1308 {
1309 if (TREE_CODE (*length) != INTEGER_CST
1310 || TREE_CODE (val) != INTEGER_CST)
1311 return false;
1312
1313 if (tree_int_cst_lt (*length, val))
1314 *length = val;
1315 return true;
1316 }
1317 else if (simple_cst_equal (val, *length) != 1)
1318 return false;
1319 }
1320
1321 *length = val;
1322 return true;
1323 }
1324
1325 /* If ARG is registered for SSA update we cannot look at its defining
1326 statement. */
1327 if (name_registered_for_update_p (arg))
1328 return false;
1329
1330 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1331 if (!*visited)
1332 *visited = BITMAP_ALLOC (NULL);
1333 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1334 return true;
1335
1336 var = arg;
1337 def_stmt = SSA_NAME_DEF_STMT (var);
1338
1339 switch (gimple_code (def_stmt))
1340 {
1341 case GIMPLE_ASSIGN:
1342 /* The RHS of the statement defining VAR must either have a
1343 constant length or come from another SSA_NAME with a constant
1344 length. */
1345 if (gimple_assign_single_p (def_stmt)
1346 || gimple_assign_unary_nop_p (def_stmt))
1347 {
1348 tree rhs = gimple_assign_rhs1 (def_stmt);
1349 return get_maxval_strlen (rhs, length, visited, type);
1350 }
1351 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1352 {
1353 tree op2 = gimple_assign_rhs2 (def_stmt);
1354 tree op3 = gimple_assign_rhs3 (def_stmt);
1355 return get_maxval_strlen (op2, length, visited, type)
1356 && get_maxval_strlen (op3, length, visited, type);
1357 }
1358 return false;
1359
1360 case GIMPLE_PHI:
1361 {
1362 /* All the arguments of the PHI node must have the same constant
1363 length. */
1364 unsigned i;
1365
1366 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1367 {
1368 tree arg = gimple_phi_arg (def_stmt, i)->def;
1369
1370 /* If this PHI has itself as an argument, we cannot
1371 determine the string length of this argument. However,
1372 if we can find a constant string length for the other
1373 PHI args then we can still be sure that this is a
1374 constant string length. So be optimistic and just
1375 continue with the next argument. */
1376 if (arg == gimple_phi_result (def_stmt))
1377 continue;
1378
1379 if (!get_maxval_strlen (arg, length, visited, type))
1380 return false;
1381 }
1382 }
1383 return true;
1384
1385 default:
1386 return false;
1387 }
1388}
1389
dcb7fae2
RB
1390tree
1391get_maxval_strlen (tree arg, int type)
1392{
1393 bitmap visited = NULL;
1394 tree len = NULL_TREE;
1395 if (!get_maxval_strlen (arg, &len, &visited, type))
1396 len = NULL_TREE;
1397 if (visited)
1398 BITMAP_FREE (visited);
1399
1400 return len;
1401}
1402
fef5a0d9
RB
1403
1404/* Fold function call to builtin strcpy with arguments DEST and SRC.
1405 If LEN is not NULL, it represents the length of the string to be
1406 copied. Return NULL_TREE if no simplification can be made. */
1407
1408static bool
1409gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1410 tree dest, tree src)
fef5a0d9 1411{
dcb7fae2 1412 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1413 tree fn;
1414
1415 /* If SRC and DEST are the same (and not volatile), return DEST. */
1416 if (operand_equal_p (src, dest, 0))
1417 {
1418 replace_call_with_value (gsi, dest);
1419 return true;
1420 }
1421
1422 if (optimize_function_for_size_p (cfun))
1423 return false;
1424
1425 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1426 if (!fn)
1427 return false;
1428
1579e1f8 1429 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1430 if (!len)
dcb7fae2 1431 return false;
fef5a0d9
RB
1432
1433 len = fold_convert_loc (loc, size_type_node, len);
1434 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1435 len = force_gimple_operand_gsi (gsi, len, true,
1436 NULL_TREE, true, GSI_SAME_STMT);
1437 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1438 replace_call_with_call_and_fold (gsi, repl);
1439 return true;
1440}
1441
1442/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1443 If SLEN is not NULL, it represents the length of the source string.
1444 Return NULL_TREE if no simplification can be made. */
1445
1446static bool
dcb7fae2
RB
1447gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1448 tree dest, tree src, tree len)
fef5a0d9 1449{
dcb7fae2 1450 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1451 tree fn;
1452
1453 /* If the LEN parameter is zero, return DEST. */
1454 if (integer_zerop (len))
1455 {
1456 replace_call_with_value (gsi, dest);
1457 return true;
1458 }
1459
1460 /* We can't compare slen with len as constants below if len is not a
1461 constant. */
dcb7fae2 1462 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1463 return false;
1464
fef5a0d9 1465 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1466 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1467 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1468 return false;
1469
1470 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1471
1472 /* We do not support simplification of this case, though we do
1473 support it when expanding trees into RTL. */
1474 /* FIXME: generate a call to __builtin_memset. */
1475 if (tree_int_cst_lt (slen, len))
1476 return false;
1477
1478 /* OK transform into builtin memcpy. */
1479 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1480 if (!fn)
1481 return false;
1482
1483 len = fold_convert_loc (loc, size_type_node, len);
1484 len = force_gimple_operand_gsi (gsi, len, true,
1485 NULL_TREE, true, GSI_SAME_STMT);
1486 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1487 replace_call_with_call_and_fold (gsi, repl);
1488 return true;
1489}
1490
1491/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1492 to the call.
1493
1494 Return NULL_TREE if no simplification was possible, otherwise return the
1495 simplified form of the call as a tree.
1496
1497 The simplified form may be a constant or other expression which
1498 computes the same value, but in a more efficient manner (including
1499 calls to other builtin functions).
1500
1501 The call may contain arguments which need to be evaluated, but
1502 which are not useful to determine the result of the call. In
1503 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1504 COMPOUND_EXPR will be an argument which must be evaluated.
1505 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1506 COMPOUND_EXPR in the chain will contain the tree for the simplified
1507 form of the builtin function call. */
1508
1509static bool
dcb7fae2 1510gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9
RB
1511{
1512 gimple stmt = gsi_stmt (*gsi);
dcb7fae2 1513 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1514
1515 const char *p = c_getstr (src);
1516
1517 /* If the string length is zero, return the dst parameter. */
1518 if (p && *p == '\0')
1519 {
1520 replace_call_with_value (gsi, dst);
1521 return true;
1522 }
1523
1524 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1525 return false;
1526
1527 /* See if we can store by pieces into (dst + strlen(dst)). */
1528 tree newdst;
1529 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1530 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1531
1532 if (!strlen_fn || !memcpy_fn)
1533 return false;
1534
1535 /* If the length of the source string isn't computable don't
1536 split strcat into strlen and memcpy. */
dcb7fae2 1537 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1538 if (! len)
fef5a0d9
RB
1539 return false;
1540
1541 /* Create strlen (dst). */
1542 gimple_seq stmts = NULL, stmts2;
1543 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1544 gimple_set_location (repl, loc);
1545 if (gimple_in_ssa_p (cfun))
b731b390 1546 newdst = make_ssa_name (size_type_node);
fef5a0d9 1547 else
b731b390 1548 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1549 gimple_call_set_lhs (repl, newdst);
1550 gimple_seq_add_stmt_without_update (&stmts, repl);
1551
1552 /* Create (dst p+ strlen (dst)). */
1553 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1554 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1555 gimple_seq_add_seq_without_update (&stmts, stmts2);
1556
1557 len = fold_convert_loc (loc, size_type_node, len);
1558 len = size_binop_loc (loc, PLUS_EXPR, len,
1559 build_int_cst (size_type_node, 1));
1560 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1561 gimple_seq_add_seq_without_update (&stmts, stmts2);
1562
1563 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1564 gimple_seq_add_stmt_without_update (&stmts, repl);
1565 if (gimple_call_lhs (stmt))
1566 {
1567 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1568 gimple_seq_add_stmt_without_update (&stmts, repl);
1569 gsi_replace_with_seq_vops (gsi, stmts);
1570 /* gsi now points at the assignment to the lhs, get a
1571 stmt iterator to the memcpy call.
1572 ??? We can't use gsi_for_stmt as that doesn't work when the
1573 CFG isn't built yet. */
1574 gimple_stmt_iterator gsi2 = *gsi;
1575 gsi_prev (&gsi2);
1576 fold_stmt (&gsi2);
1577 }
1578 else
1579 {
1580 gsi_replace_with_seq_vops (gsi, stmts);
1581 fold_stmt (gsi);
1582 }
1583 return true;
1584}
1585
07f1cf56
RB
1586/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1587 are the arguments to the call. */
1588
1589static bool
1590gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1591{
1592 gimple stmt = gsi_stmt (*gsi);
1593 tree dest = gimple_call_arg (stmt, 0);
1594 tree src = gimple_call_arg (stmt, 1);
1595 tree size = gimple_call_arg (stmt, 2);
1596 tree fn;
1597 const char *p;
1598
1599
1600 p = c_getstr (src);
1601 /* If the SRC parameter is "", return DEST. */
1602 if (p && *p == '\0')
1603 {
1604 replace_call_with_value (gsi, dest);
1605 return true;
1606 }
1607
1608 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1609 return false;
1610
1611 /* If __builtin_strcat_chk is used, assume strcat is available. */
1612 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1613 if (!fn)
1614 return false;
1615
1616 gimple repl = gimple_build_call (fn, 2, dest, src);
1617 replace_call_with_call_and_fold (gsi, repl);
1618 return true;
1619}
1620
ad03a744
RB
1621/* Simplify a call to the strncat builtin. */
1622
1623static bool
1624gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1625{
1626 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1627 tree dst = gimple_call_arg (stmt, 0);
1628 tree src = gimple_call_arg (stmt, 1);
1629 tree len = gimple_call_arg (stmt, 2);
1630
1631 const char *p = c_getstr (src);
1632
1633 /* If the requested length is zero, or the src parameter string
1634 length is zero, return the dst parameter. */
1635 if (integer_zerop (len) || (p && *p == '\0'))
1636 {
1637 replace_call_with_value (gsi, dst);
1638 return true;
1639 }
1640
1641 /* If the requested len is greater than or equal to the string
1642 length, call strcat. */
1643 if (TREE_CODE (len) == INTEGER_CST && p
1644 && compare_tree_int (len, strlen (p)) >= 0)
1645 {
1646 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1647
1648 /* If the replacement _DECL isn't initialized, don't do the
1649 transformation. */
1650 if (!fn)
1651 return false;
1652
1653 gcall *repl = gimple_build_call (fn, 2, dst, src);
1654 replace_call_with_call_and_fold (gsi, repl);
1655 return true;
1656 }
1657
1658 return false;
1659}
1660
745583f9
RB
1661/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1662 LEN, and SIZE. */
1663
1664static bool
1665gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1666{
1667 gimple stmt = gsi_stmt (*gsi);
1668 tree dest = gimple_call_arg (stmt, 0);
1669 tree src = gimple_call_arg (stmt, 1);
1670 tree len = gimple_call_arg (stmt, 2);
1671 tree size = gimple_call_arg (stmt, 3);
1672 tree fn;
1673 const char *p;
1674
1675 p = c_getstr (src);
1676 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1677 if ((p && *p == '\0')
1678 || integer_zerop (len))
1679 {
1680 replace_call_with_value (gsi, dest);
1681 return true;
1682 }
1683
1684 if (! tree_fits_uhwi_p (size))
1685 return false;
1686
1687 if (! integer_all_onesp (size))
1688 {
1689 tree src_len = c_strlen (src, 1);
1690 if (src_len
1691 && tree_fits_uhwi_p (src_len)
1692 && tree_fits_uhwi_p (len)
1693 && ! tree_int_cst_lt (len, src_len))
1694 {
1695 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1696 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1697 if (!fn)
1698 return false;
1699
1700 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1701 replace_call_with_call_and_fold (gsi, repl);
1702 return true;
1703 }
1704 return false;
1705 }
1706
1707 /* If __builtin_strncat_chk is used, assume strncat is available. */
1708 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1709 if (!fn)
1710 return false;
1711
1712 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1713 replace_call_with_call_and_fold (gsi, repl);
1714 return true;
1715}
1716
fef5a0d9
RB
1717/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1718 to the call. IGNORE is true if the value returned
1719 by the builtin will be ignored. UNLOCKED is true is true if this
1720 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1721 the known length of the string. Return NULL_TREE if no simplification
1722 was possible. */
1723
1724static bool
1725gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1726 tree arg0, tree arg1,
dcb7fae2 1727 bool unlocked)
fef5a0d9 1728{
dcb7fae2
RB
1729 gimple stmt = gsi_stmt (*gsi);
1730
fef5a0d9
RB
1731 /* If we're using an unlocked function, assume the other unlocked
1732 functions exist explicitly. */
1733 tree const fn_fputc = (unlocked
1734 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1735 : builtin_decl_implicit (BUILT_IN_FPUTC));
1736 tree const fn_fwrite = (unlocked
1737 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1738 : builtin_decl_implicit (BUILT_IN_FWRITE));
1739
1740 /* If the return value is used, don't do the transformation. */
dcb7fae2 1741 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1742 return false;
1743
fef5a0d9
RB
1744 /* Get the length of the string passed to fputs. If the length
1745 can't be determined, punt. */
dcb7fae2 1746 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1747 if (!len
1748 || TREE_CODE (len) != INTEGER_CST)
1749 return false;
1750
1751 switch (compare_tree_int (len, 1))
1752 {
1753 case -1: /* length is 0, delete the call entirely . */
1754 replace_call_with_value (gsi, integer_zero_node);
1755 return true;
1756
1757 case 0: /* length is 1, call fputc. */
1758 {
1759 const char *p = c_getstr (arg0);
1760 if (p != NULL)
1761 {
1762 if (!fn_fputc)
1763 return false;
1764
1765 gimple repl = gimple_build_call (fn_fputc, 2,
1766 build_int_cst
1767 (integer_type_node, p[0]), arg1);
1768 replace_call_with_call_and_fold (gsi, repl);
1769 return true;
1770 }
1771 }
1772 /* FALLTHROUGH */
1773 case 1: /* length is greater than 1, call fwrite. */
1774 {
1775 /* If optimizing for size keep fputs. */
1776 if (optimize_function_for_size_p (cfun))
1777 return false;
1778 /* New argument list transforming fputs(string, stream) to
1779 fwrite(string, 1, len, stream). */
1780 if (!fn_fwrite)
1781 return false;
1782
1783 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1784 size_one_node, len, arg1);
1785 replace_call_with_call_and_fold (gsi, repl);
1786 return true;
1787 }
1788 default:
1789 gcc_unreachable ();
1790 }
1791 return false;
1792}
1793
1794/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1795 DEST, SRC, LEN, and SIZE are the arguments to the call.
1796 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1797 code of the builtin. If MAXLEN is not NULL, it is maximum length
1798 passed as third argument. */
1799
1800static bool
1801gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1802 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1803 enum built_in_function fcode)
1804{
dcb7fae2
RB
1805 gimple stmt = gsi_stmt (*gsi);
1806 location_t loc = gimple_location (stmt);
1807 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1808 tree fn;
1809
1810 /* If SRC and DEST are the same (and not volatile), return DEST
1811 (resp. DEST+LEN for __mempcpy_chk). */
1812 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1813 {
1814 if (fcode != BUILT_IN_MEMPCPY_CHK)
1815 {
1816 replace_call_with_value (gsi, dest);
1817 return true;
1818 }
1819 else
1820 {
1821 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1822 temp = force_gimple_operand_gsi (gsi, temp,
1823 false, NULL_TREE, true,
1824 GSI_SAME_STMT);
1825 replace_call_with_value (gsi, temp);
1826 return true;
1827 }
1828 }
1829
1830 if (! tree_fits_uhwi_p (size))
1831 return false;
1832
dcb7fae2 1833 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1834 if (! integer_all_onesp (size))
1835 {
1836 if (! tree_fits_uhwi_p (len))
1837 {
1838 /* If LEN is not constant, try MAXLEN too.
1839 For MAXLEN only allow optimizing into non-_ocs function
1840 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1841 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1842 {
1843 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1844 {
1845 /* (void) __mempcpy_chk () can be optimized into
1846 (void) __memcpy_chk (). */
1847 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1848 if (!fn)
1849 return false;
1850
1851 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1852 replace_call_with_call_and_fold (gsi, repl);
1853 return true;
1854 }
1855 return false;
1856 }
1857 }
1858 else
1859 maxlen = len;
1860
1861 if (tree_int_cst_lt (size, maxlen))
1862 return false;
1863 }
1864
1865 fn = NULL_TREE;
1866 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1867 mem{cpy,pcpy,move,set} is available. */
1868 switch (fcode)
1869 {
1870 case BUILT_IN_MEMCPY_CHK:
1871 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1872 break;
1873 case BUILT_IN_MEMPCPY_CHK:
1874 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1875 break;
1876 case BUILT_IN_MEMMOVE_CHK:
1877 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1878 break;
1879 case BUILT_IN_MEMSET_CHK:
1880 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1881 break;
1882 default:
1883 break;
1884 }
1885
1886 if (!fn)
1887 return false;
1888
1889 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1890 replace_call_with_call_and_fold (gsi, repl);
1891 return true;
1892}
1893
1894/* Fold a call to the __st[rp]cpy_chk builtin.
1895 DEST, SRC, and SIZE are the arguments to the call.
1896 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1897 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1898 strings passed as second argument. */
1899
1900static bool
1901gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1902 tree dest,
fef5a0d9 1903 tree src, tree size,
fef5a0d9
RB
1904 enum built_in_function fcode)
1905{
dcb7fae2
RB
1906 gimple stmt = gsi_stmt (*gsi);
1907 location_t loc = gimple_location (stmt);
1908 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1909 tree len, fn;
1910
1911 /* If SRC and DEST are the same (and not volatile), return DEST. */
1912 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1913 {
1914 replace_call_with_value (gsi, dest);
1915 return true;
1916 }
1917
1918 if (! tree_fits_uhwi_p (size))
1919 return false;
1920
dcb7fae2 1921 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1922 if (! integer_all_onesp (size))
1923 {
1924 len = c_strlen (src, 1);
1925 if (! len || ! tree_fits_uhwi_p (len))
1926 {
1927 /* If LEN is not constant, try MAXLEN too.
1928 For MAXLEN only allow optimizing into non-_ocs function
1929 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1930 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1931 {
1932 if (fcode == BUILT_IN_STPCPY_CHK)
1933 {
1934 if (! ignore)
1935 return false;
1936
1937 /* If return value of __stpcpy_chk is ignored,
1938 optimize into __strcpy_chk. */
1939 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1940 if (!fn)
1941 return false;
1942
1943 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1944 replace_call_with_call_and_fold (gsi, repl);
1945 return true;
1946 }
1947
1948 if (! len || TREE_SIDE_EFFECTS (len))
1949 return false;
1950
1951 /* If c_strlen returned something, but not a constant,
1952 transform __strcpy_chk into __memcpy_chk. */
1953 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1954 if (!fn)
1955 return false;
1956
1957 len = fold_convert_loc (loc, size_type_node, len);
1958 len = size_binop_loc (loc, PLUS_EXPR, len,
1959 build_int_cst (size_type_node, 1));
1960 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1961 true, GSI_SAME_STMT);
1962 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1963 replace_call_with_call_and_fold (gsi, repl);
1964 return true;
1965 }
e256dfce 1966 }
fef5a0d9
RB
1967 else
1968 maxlen = len;
1969
1970 if (! tree_int_cst_lt (maxlen, size))
1971 return false;
e256dfce
RG
1972 }
1973
fef5a0d9
RB
1974 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1975 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1976 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1977 if (!fn)
1978 return false;
1979
1980 gimple repl = gimple_build_call (fn, 2, dest, src);
1981 replace_call_with_call_and_fold (gsi, repl);
1982 return true;
1983}
1984
1985/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1986 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1987 length passed as third argument. IGNORE is true if return value can be
1988 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1989
1990static bool
1991gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1992 tree dest, tree src,
dcb7fae2 1993 tree len, tree size,
fef5a0d9
RB
1994 enum built_in_function fcode)
1995{
dcb7fae2
RB
1996 gimple stmt = gsi_stmt (*gsi);
1997 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1998 tree fn;
1999
2000 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2001 {
fef5a0d9
RB
2002 /* If return value of __stpncpy_chk is ignored,
2003 optimize into __strncpy_chk. */
2004 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2005 if (fn)
2006 {
2007 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2008 replace_call_with_call_and_fold (gsi, repl);
2009 return true;
2010 }
cbdd87d4
RG
2011 }
2012
fef5a0d9
RB
2013 if (! tree_fits_uhwi_p (size))
2014 return false;
2015
dcb7fae2 2016 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2017 if (! integer_all_onesp (size))
cbdd87d4 2018 {
fef5a0d9 2019 if (! tree_fits_uhwi_p (len))
fe2ef088 2020 {
fef5a0d9
RB
2021 /* If LEN is not constant, try MAXLEN too.
2022 For MAXLEN only allow optimizing into non-_ocs function
2023 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2024 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2025 return false;
8a1561bc 2026 }
fef5a0d9
RB
2027 else
2028 maxlen = len;
2029
2030 if (tree_int_cst_lt (size, maxlen))
2031 return false;
cbdd87d4
RG
2032 }
2033
fef5a0d9
RB
2034 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2035 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2036 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2037 if (!fn)
2038 return false;
2039
2040 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2041 replace_call_with_call_and_fold (gsi, repl);
2042 return true;
cbdd87d4
RG
2043}
2044
2625bb5d
RB
2045/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2046 Return NULL_TREE if no simplification can be made. */
2047
2048static bool
2049gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2050{
2051 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2052 location_t loc = gimple_location (stmt);
2053 tree dest = gimple_call_arg (stmt, 0);
2054 tree src = gimple_call_arg (stmt, 1);
2055 tree fn, len, lenp1;
2056
2057 /* If the result is unused, replace stpcpy with strcpy. */
2058 if (gimple_call_lhs (stmt) == NULL_TREE)
2059 {
2060 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2061 if (!fn)
2062 return false;
2063 gimple_call_set_fndecl (stmt, fn);
2064 fold_stmt (gsi);
2065 return true;
2066 }
2067
2068 len = c_strlen (src, 1);
2069 if (!len
2070 || TREE_CODE (len) != INTEGER_CST)
2071 return false;
2072
2073 if (optimize_function_for_size_p (cfun)
2074 /* If length is zero it's small enough. */
2075 && !integer_zerop (len))
2076 return false;
2077
2078 /* If the source has a known length replace stpcpy with memcpy. */
2079 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2080 if (!fn)
2081 return false;
2082
2083 gimple_seq stmts = NULL;
2084 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2085 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2086 tem, build_int_cst (size_type_node, 1));
2087 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2088 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2089 gimple_set_vuse (repl, gimple_vuse (stmt));
2090 gimple_set_vdef (repl, gimple_vdef (stmt));
2091 if (gimple_vdef (repl)
2092 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2093 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2094 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2095 /* Replace the result with dest + len. */
2096 stmts = NULL;
2097 tem = gimple_convert (&stmts, loc, sizetype, len);
2098 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2099 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2100 POINTER_PLUS_EXPR, dest, tem);
2101 gsi_replace (gsi, ret, true);
2102 /* Finally fold the memcpy call. */
2103 gimple_stmt_iterator gsi2 = *gsi;
2104 gsi_prev (&gsi2);
2105 fold_stmt (&gsi2);
2106 return true;
2107}
2108
fef5a0d9
RB
2109/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2110 NULL_TREE if a normal call should be emitted rather than expanding
2111 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2112 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2113 passed as second argument. */
cbdd87d4
RG
2114
2115static bool
fef5a0d9 2116gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2117 enum built_in_function fcode)
cbdd87d4 2118{
538dd0b7 2119 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2120 tree dest, size, len, fn, fmt, flag;
2121 const char *fmt_str;
cbdd87d4 2122
fef5a0d9
RB
2123 /* Verify the required arguments in the original call. */
2124 if (gimple_call_num_args (stmt) < 5)
2125 return false;
cbdd87d4 2126
fef5a0d9
RB
2127 dest = gimple_call_arg (stmt, 0);
2128 len = gimple_call_arg (stmt, 1);
2129 flag = gimple_call_arg (stmt, 2);
2130 size = gimple_call_arg (stmt, 3);
2131 fmt = gimple_call_arg (stmt, 4);
2132
2133 if (! tree_fits_uhwi_p (size))
2134 return false;
2135
2136 if (! integer_all_onesp (size))
2137 {
dcb7fae2 2138 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2139 if (! tree_fits_uhwi_p (len))
cbdd87d4 2140 {
fef5a0d9
RB
2141 /* If LEN is not constant, try MAXLEN too.
2142 For MAXLEN only allow optimizing into non-_ocs function
2143 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2144 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2145 return false;
2146 }
2147 else
fef5a0d9 2148 maxlen = len;
cbdd87d4 2149
fef5a0d9
RB
2150 if (tree_int_cst_lt (size, maxlen))
2151 return false;
2152 }
cbdd87d4 2153
fef5a0d9
RB
2154 if (!init_target_chars ())
2155 return false;
cbdd87d4 2156
fef5a0d9
RB
2157 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2158 or if format doesn't contain % chars or is "%s". */
2159 if (! integer_zerop (flag))
2160 {
2161 fmt_str = c_getstr (fmt);
2162 if (fmt_str == NULL)
2163 return false;
2164 if (strchr (fmt_str, target_percent) != NULL
2165 && strcmp (fmt_str, target_percent_s))
2166 return false;
cbdd87d4
RG
2167 }
2168
fef5a0d9
RB
2169 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2170 available. */
2171 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2172 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2173 if (!fn)
491e0b9b
RG
2174 return false;
2175
fef5a0d9
RB
2176 /* Replace the called function and the first 5 argument by 3 retaining
2177 trailing varargs. */
2178 gimple_call_set_fndecl (stmt, fn);
2179 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2180 gimple_call_set_arg (stmt, 0, dest);
2181 gimple_call_set_arg (stmt, 1, len);
2182 gimple_call_set_arg (stmt, 2, fmt);
2183 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2184 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2185 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2186 fold_stmt (gsi);
2187 return true;
2188}
cbdd87d4 2189
fef5a0d9
RB
2190/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2191 Return NULL_TREE if a normal call should be emitted rather than
2192 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2193 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2194
fef5a0d9
RB
2195static bool
2196gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2197 enum built_in_function fcode)
2198{
538dd0b7 2199 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2200 tree dest, size, len, fn, fmt, flag;
2201 const char *fmt_str;
2202 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2203
fef5a0d9
RB
2204 /* Verify the required arguments in the original call. */
2205 if (nargs < 4)
2206 return false;
2207 dest = gimple_call_arg (stmt, 0);
2208 flag = gimple_call_arg (stmt, 1);
2209 size = gimple_call_arg (stmt, 2);
2210 fmt = gimple_call_arg (stmt, 3);
2211
2212 if (! tree_fits_uhwi_p (size))
2213 return false;
2214
2215 len = NULL_TREE;
2216
2217 if (!init_target_chars ())
2218 return false;
2219
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str = c_getstr (fmt);
2222 if (fmt_str != NULL)
2223 {
2224 /* If the format doesn't contain % args or %%, we know the size. */
2225 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2226 {
fef5a0d9
RB
2227 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2228 len = build_int_cstu (size_type_node, strlen (fmt_str));
2229 }
2230 /* If the format is "%s" and first ... argument is a string literal,
2231 we know the size too. */
2232 else if (fcode == BUILT_IN_SPRINTF_CHK
2233 && strcmp (fmt_str, target_percent_s) == 0)
2234 {
2235 tree arg;
cbdd87d4 2236
fef5a0d9
RB
2237 if (nargs == 5)
2238 {
2239 arg = gimple_call_arg (stmt, 4);
2240 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2241 {
2242 len = c_strlen (arg, 1);
2243 if (! len || ! tree_fits_uhwi_p (len))
2244 len = NULL_TREE;
2245 }
2246 }
2247 }
2248 }
cbdd87d4 2249
fef5a0d9
RB
2250 if (! integer_all_onesp (size))
2251 {
2252 if (! len || ! tree_int_cst_lt (len, size))
2253 return false;
2254 }
cbdd87d4 2255
fef5a0d9
RB
2256 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2257 or if format doesn't contain % chars or is "%s". */
2258 if (! integer_zerop (flag))
2259 {
2260 if (fmt_str == NULL)
2261 return false;
2262 if (strchr (fmt_str, target_percent) != NULL
2263 && strcmp (fmt_str, target_percent_s))
2264 return false;
2265 }
cbdd87d4 2266
fef5a0d9
RB
2267 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2268 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2269 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2270 if (!fn)
2271 return false;
2272
2273 /* Replace the called function and the first 4 argument by 2 retaining
2274 trailing varargs. */
2275 gimple_call_set_fndecl (stmt, fn);
2276 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2277 gimple_call_set_arg (stmt, 0, dest);
2278 gimple_call_set_arg (stmt, 1, fmt);
2279 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2280 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2281 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2282 fold_stmt (gsi);
2283 return true;
2284}
2285
35770bb2
RB
2286/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2287 ORIG may be null if this is a 2-argument call. We don't attempt to
2288 simplify calls with more than 3 arguments.
2289
2290 Return NULL_TREE if no simplification was possible, otherwise return the
2291 simplified form of the call as a tree. If IGNORED is true, it means that
2292 the caller does not use the returned value of the function. */
2293
2294static bool
dcb7fae2 2295gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2
RB
2296{
2297 gimple stmt = gsi_stmt (*gsi);
2298 tree dest = gimple_call_arg (stmt, 0);
2299 tree fmt = gimple_call_arg (stmt, 1);
2300 tree orig = NULL_TREE;
2301 const char *fmt_str = NULL;
2302
2303 /* Verify the required arguments in the original call. We deal with two
2304 types of sprintf() calls: 'sprintf (str, fmt)' and
2305 'sprintf (dest, "%s", orig)'. */
2306 if (gimple_call_num_args (stmt) > 3)
2307 return false;
2308
2309 if (gimple_call_num_args (stmt) == 3)
2310 orig = gimple_call_arg (stmt, 2);
2311
2312 /* Check whether the format is a literal string constant. */
2313 fmt_str = c_getstr (fmt);
2314 if (fmt_str == NULL)
2315 return false;
2316
2317 if (!init_target_chars ())
2318 return false;
2319
2320 /* If the format doesn't contain % args or %%, use strcpy. */
2321 if (strchr (fmt_str, target_percent) == NULL)
2322 {
2323 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2324
2325 if (!fn)
2326 return false;
2327
2328 /* Don't optimize sprintf (buf, "abc", ptr++). */
2329 if (orig)
2330 return false;
2331
2332 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2333 'format' is known to contain no % formats. */
2334 gimple_seq stmts = NULL;
2335 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2336 gimple_seq_add_stmt_without_update (&stmts, repl);
2337 if (gimple_call_lhs (stmt))
2338 {
2339 repl = gimple_build_assign (gimple_call_lhs (stmt),
2340 build_int_cst (integer_type_node,
2341 strlen (fmt_str)));
2342 gimple_seq_add_stmt_without_update (&stmts, repl);
2343 gsi_replace_with_seq_vops (gsi, stmts);
2344 /* gsi now points at the assignment to the lhs, get a
2345 stmt iterator to the memcpy call.
2346 ??? We can't use gsi_for_stmt as that doesn't work when the
2347 CFG isn't built yet. */
2348 gimple_stmt_iterator gsi2 = *gsi;
2349 gsi_prev (&gsi2);
2350 fold_stmt (&gsi2);
2351 }
2352 else
2353 {
2354 gsi_replace_with_seq_vops (gsi, stmts);
2355 fold_stmt (gsi);
2356 }
2357 return true;
2358 }
2359
2360 /* If the format is "%s", use strcpy if the result isn't used. */
2361 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2362 {
2363 tree fn;
2364 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2365
2366 if (!fn)
2367 return false;
2368
2369 /* Don't crash on sprintf (str1, "%s"). */
2370 if (!orig)
2371 return false;
2372
dcb7fae2
RB
2373 tree orig_len = NULL_TREE;
2374 if (gimple_call_lhs (stmt))
35770bb2 2375 {
dcb7fae2 2376 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2377 if (!orig_len)
35770bb2
RB
2378 return false;
2379 }
2380
2381 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2382 gimple_seq stmts = NULL;
2383 gimple repl = gimple_build_call (fn, 2, dest, orig);
2384 gimple_seq_add_stmt_without_update (&stmts, repl);
2385 if (gimple_call_lhs (stmt))
2386 {
d7e78447
RB
2387 if (!useless_type_conversion_p (integer_type_node,
2388 TREE_TYPE (orig_len)))
2389 orig_len = fold_convert (integer_type_node, orig_len);
2390 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2391 gimple_seq_add_stmt_without_update (&stmts, repl);
2392 gsi_replace_with_seq_vops (gsi, stmts);
2393 /* gsi now points at the assignment to the lhs, get a
2394 stmt iterator to the memcpy call.
2395 ??? We can't use gsi_for_stmt as that doesn't work when the
2396 CFG isn't built yet. */
2397 gimple_stmt_iterator gsi2 = *gsi;
2398 gsi_prev (&gsi2);
2399 fold_stmt (&gsi2);
2400 }
2401 else
2402 {
2403 gsi_replace_with_seq_vops (gsi, stmts);
2404 fold_stmt (gsi);
2405 }
2406 return true;
2407 }
2408 return false;
2409}
2410
d7e78447
RB
2411/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2412 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2413 attempt to simplify calls with more than 4 arguments.
35770bb2 2414
d7e78447
RB
2415 Return NULL_TREE if no simplification was possible, otherwise return the
2416 simplified form of the call as a tree. If IGNORED is true, it means that
2417 the caller does not use the returned value of the function. */
2418
2419static bool
dcb7fae2 2420gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2421{
538dd0b7 2422 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2423 tree dest = gimple_call_arg (stmt, 0);
2424 tree destsize = gimple_call_arg (stmt, 1);
2425 tree fmt = gimple_call_arg (stmt, 2);
2426 tree orig = NULL_TREE;
2427 const char *fmt_str = NULL;
2428
2429 if (gimple_call_num_args (stmt) > 4)
2430 return false;
2431
2432 if (gimple_call_num_args (stmt) == 4)
2433 orig = gimple_call_arg (stmt, 3);
2434
2435 if (!tree_fits_uhwi_p (destsize))
2436 return false;
2437 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2438
2439 /* Check whether the format is a literal string constant. */
2440 fmt_str = c_getstr (fmt);
2441 if (fmt_str == NULL)
2442 return false;
2443
2444 if (!init_target_chars ())
2445 return false;
2446
2447 /* If the format doesn't contain % args or %%, use strcpy. */
2448 if (strchr (fmt_str, target_percent) == NULL)
2449 {
2450 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2451 if (!fn)
2452 return false;
2453
2454 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2455 if (orig)
2456 return false;
2457
2458 /* We could expand this as
2459 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2460 or to
2461 memcpy (str, fmt_with_nul_at_cstm1, cst);
2462 but in the former case that might increase code size
2463 and in the latter case grow .rodata section too much.
2464 So punt for now. */
2465 size_t len = strlen (fmt_str);
2466 if (len >= destlen)
2467 return false;
2468
2469 gimple_seq stmts = NULL;
2470 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2471 gimple_seq_add_stmt_without_update (&stmts, repl);
2472 if (gimple_call_lhs (stmt))
2473 {
2474 repl = gimple_build_assign (gimple_call_lhs (stmt),
2475 build_int_cst (integer_type_node, len));
2476 gimple_seq_add_stmt_without_update (&stmts, repl);
2477 gsi_replace_with_seq_vops (gsi, stmts);
2478 /* gsi now points at the assignment to the lhs, get a
2479 stmt iterator to the memcpy call.
2480 ??? We can't use gsi_for_stmt as that doesn't work when the
2481 CFG isn't built yet. */
2482 gimple_stmt_iterator gsi2 = *gsi;
2483 gsi_prev (&gsi2);
2484 fold_stmt (&gsi2);
2485 }
2486 else
2487 {
2488 gsi_replace_with_seq_vops (gsi, stmts);
2489 fold_stmt (gsi);
2490 }
2491 return true;
2492 }
2493
2494 /* If the format is "%s", use strcpy if the result isn't used. */
2495 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2496 {
2497 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2498 if (!fn)
2499 return false;
2500
2501 /* Don't crash on snprintf (str1, cst, "%s"). */
2502 if (!orig)
2503 return false;
2504
dcb7fae2 2505 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2506 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2507 return false;
d7e78447
RB
2508
2509 /* We could expand this as
2510 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2511 or to
2512 memcpy (str1, str2_with_nul_at_cstm1, cst);
2513 but in the former case that might increase code size
2514 and in the latter case grow .rodata section too much.
2515 So punt for now. */
2516 if (compare_tree_int (orig_len, destlen) >= 0)
2517 return false;
2518
2519 /* Convert snprintf (str1, cst, "%s", str2) into
2520 strcpy (str1, str2) if strlen (str2) < cst. */
2521 gimple_seq stmts = NULL;
2522 gimple repl = gimple_build_call (fn, 2, dest, orig);
2523 gimple_seq_add_stmt_without_update (&stmts, repl);
2524 if (gimple_call_lhs (stmt))
2525 {
2526 if (!useless_type_conversion_p (integer_type_node,
2527 TREE_TYPE (orig_len)))
2528 orig_len = fold_convert (integer_type_node, orig_len);
2529 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2530 gimple_seq_add_stmt_without_update (&stmts, repl);
2531 gsi_replace_with_seq_vops (gsi, stmts);
2532 /* gsi now points at the assignment to the lhs, get a
2533 stmt iterator to the memcpy call.
2534 ??? We can't use gsi_for_stmt as that doesn't work when the
2535 CFG isn't built yet. */
2536 gimple_stmt_iterator gsi2 = *gsi;
2537 gsi_prev (&gsi2);
2538 fold_stmt (&gsi2);
2539 }
2540 else
2541 {
2542 gsi_replace_with_seq_vops (gsi, stmts);
2543 fold_stmt (gsi);
2544 }
2545 return true;
2546 }
2547 return false;
2548}
35770bb2 2549
edd7ae68
RB
2550/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2551 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2552 more than 3 arguments, and ARG may be null in the 2-argument case.
2553
2554 Return NULL_TREE if no simplification was possible, otherwise return the
2555 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2556 code of the function to be simplified. */
2557
2558static bool
2559gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2560 tree fp, tree fmt, tree arg,
2561 enum built_in_function fcode)
2562{
2563 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2564 tree fn_fputc, fn_fputs;
2565 const char *fmt_str = NULL;
2566
2567 /* If the return value is used, don't do the transformation. */
2568 if (gimple_call_lhs (stmt) != NULL_TREE)
2569 return false;
2570
2571 /* Check whether the format is a literal string constant. */
2572 fmt_str = c_getstr (fmt);
2573 if (fmt_str == NULL)
2574 return false;
2575
2576 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2577 {
2578 /* If we're using an unlocked function, assume the other
2579 unlocked functions exist explicitly. */
2580 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2581 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2582 }
2583 else
2584 {
2585 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2586 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2587 }
2588
2589 if (!init_target_chars ())
2590 return false;
2591
2592 /* If the format doesn't contain % args or %%, use strcpy. */
2593 if (strchr (fmt_str, target_percent) == NULL)
2594 {
2595 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2596 && arg)
2597 return false;
2598
2599 /* If the format specifier was "", fprintf does nothing. */
2600 if (fmt_str[0] == '\0')
2601 {
2602 replace_call_with_value (gsi, NULL_TREE);
2603 return true;
2604 }
2605
2606 /* When "string" doesn't contain %, replace all cases of
2607 fprintf (fp, string) with fputs (string, fp). The fputs
2608 builtin will take care of special cases like length == 1. */
2609 if (fn_fputs)
2610 {
2611 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2612 replace_call_with_call_and_fold (gsi, repl);
2613 return true;
2614 }
2615 }
2616
2617 /* The other optimizations can be done only on the non-va_list variants. */
2618 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2619 return false;
2620
2621 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2622 else if (strcmp (fmt_str, target_percent_s) == 0)
2623 {
2624 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2625 return false;
2626 if (fn_fputs)
2627 {
2628 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2629 replace_call_with_call_and_fold (gsi, repl);
2630 return true;
2631 }
2632 }
2633
2634 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2635 else if (strcmp (fmt_str, target_percent_c) == 0)
2636 {
2637 if (!arg
2638 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2639 return false;
2640 if (fn_fputc)
2641 {
2642 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2643 replace_call_with_call_and_fold (gsi, repl);
2644 return true;
2645 }
2646 }
2647
2648 return false;
2649}
2650
ad03a744
RB
2651/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2652 FMT and ARG are the arguments to the call; we don't fold cases with
2653 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2654
2655 Return NULL_TREE if no simplification was possible, otherwise return the
2656 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2657 code of the function to be simplified. */
2658
2659static bool
2660gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2661 tree arg, enum built_in_function fcode)
2662{
2663 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2664 tree fn_putchar, fn_puts, newarg;
2665 const char *fmt_str = NULL;
2666
2667 /* If the return value is used, don't do the transformation. */
2668 if (gimple_call_lhs (stmt) != NULL_TREE)
2669 return false;
2670
2671 /* Check whether the format is a literal string constant. */
2672 fmt_str = c_getstr (fmt);
2673 if (fmt_str == NULL)
2674 return false;
2675
2676 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2677 {
2678 /* If we're using an unlocked function, assume the other
2679 unlocked functions exist explicitly. */
2680 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2681 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2682 }
2683 else
2684 {
2685 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2686 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2687 }
2688
2689 if (!init_target_chars ())
2690 return false;
2691
2692 if (strcmp (fmt_str, target_percent_s) == 0
2693 || strchr (fmt_str, target_percent) == NULL)
2694 {
2695 const char *str;
2696
2697 if (strcmp (fmt_str, target_percent_s) == 0)
2698 {
2699 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2700 return false;
2701
2702 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2703 return false;
2704
2705 str = c_getstr (arg);
2706 if (str == NULL)
2707 return false;
2708 }
2709 else
2710 {
2711 /* The format specifier doesn't contain any '%' characters. */
2712 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2713 && arg)
2714 return false;
2715 str = fmt_str;
2716 }
2717
2718 /* If the string was "", printf does nothing. */
2719 if (str[0] == '\0')
2720 {
2721 replace_call_with_value (gsi, NULL_TREE);
2722 return true;
2723 }
2724
2725 /* If the string has length of 1, call putchar. */
2726 if (str[1] == '\0')
2727 {
2728 /* Given printf("c"), (where c is any one character,)
2729 convert "c"[0] to an int and pass that to the replacement
2730 function. */
2731 newarg = build_int_cst (integer_type_node, str[0]);
2732 if (fn_putchar)
2733 {
2734 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2735 replace_call_with_call_and_fold (gsi, repl);
2736 return true;
2737 }
2738 }
2739 else
2740 {
2741 /* If the string was "string\n", call puts("string"). */
2742 size_t len = strlen (str);
2743 if ((unsigned char)str[len - 1] == target_newline
2744 && (size_t) (int) len == len
2745 && (int) len > 0)
2746 {
2747 char *newstr;
2748 tree offset_node, string_cst;
2749
2750 /* Create a NUL-terminated string that's one char shorter
2751 than the original, stripping off the trailing '\n'. */
2752 newarg = build_string_literal (len, str);
2753 string_cst = string_constant (newarg, &offset_node);
2754 gcc_checking_assert (string_cst
2755 && (TREE_STRING_LENGTH (string_cst)
2756 == (int) len)
2757 && integer_zerop (offset_node)
2758 && (unsigned char)
2759 TREE_STRING_POINTER (string_cst)[len - 1]
2760 == target_newline);
2761 /* build_string_literal creates a new STRING_CST,
2762 modify it in place to avoid double copying. */
2763 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2764 newstr[len - 1] = '\0';
2765 if (fn_puts)
2766 {
2767 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2768 replace_call_with_call_and_fold (gsi, repl);
2769 return true;
2770 }
2771 }
2772 else
2773 /* We'd like to arrange to call fputs(string,stdout) here,
2774 but we need stdout and don't have a way to get it yet. */
2775 return false;
2776 }
2777 }
2778
2779 /* The other optimizations can be done only on the non-va_list variants. */
2780 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2781 return false;
2782
2783 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2784 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2785 {
2786 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2787 return false;
2788 if (fn_puts)
2789 {
2790 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2791 replace_call_with_call_and_fold (gsi, repl);
2792 return true;
2793 }
2794 }
2795
2796 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2797 else if (strcmp (fmt_str, target_percent_c) == 0)
2798 {
2799 if (!arg || ! useless_type_conversion_p (integer_type_node,
2800 TREE_TYPE (arg)))
2801 return false;
2802 if (fn_putchar)
2803 {
2804 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2805 replace_call_with_call_and_fold (gsi, repl);
2806 return true;
2807 }
2808 }
2809
2810 return false;
2811}
2812
edd7ae68 2813
fef5a0d9
RB
2814
2815/* Fold a call to __builtin_strlen with known length LEN. */
2816
2817static bool
dcb7fae2 2818gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2819{
dcb7fae2
RB
2820 gimple stmt = gsi_stmt (*gsi);
2821 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2822 if (!len)
2823 return false;
2813904b 2824 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2825 replace_call_with_value (gsi, len);
2826 return true;
cbdd87d4
RG
2827}
2828
2829
dcb7fae2
RB
2830/* Fold the non-target builtin at *GSI and return whether any simplification
2831 was made. */
cbdd87d4 2832
fef5a0d9 2833static bool
dcb7fae2 2834gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2835{
538dd0b7 2836 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2837 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2838
dcb7fae2
RB
2839 /* Give up for always_inline inline builtins until they are
2840 inlined. */
2841 if (avoid_folding_inline_builtin (callee))
2842 return false;
cbdd87d4 2843
edd7ae68
RB
2844 unsigned n = gimple_call_num_args (stmt);
2845 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2846 switch (fcode)
cbdd87d4 2847 {
dcb7fae2
RB
2848 case BUILT_IN_BZERO:
2849 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2850 gimple_call_arg (stmt, 1));
2851 case BUILT_IN_MEMSET:
2852 return gimple_fold_builtin_memset (gsi,
2853 gimple_call_arg (stmt, 1),
2854 gimple_call_arg (stmt, 2));
2855 case BUILT_IN_BCOPY:
2856 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2857 gimple_call_arg (stmt, 0), 3);
2858 case BUILT_IN_MEMCPY:
2859 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2860 gimple_call_arg (stmt, 1), 0);
2861 case BUILT_IN_MEMPCPY:
2862 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2863 gimple_call_arg (stmt, 1), 1);
2864 case BUILT_IN_MEMMOVE:
2865 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2866 gimple_call_arg (stmt, 1), 3);
2867 case BUILT_IN_SPRINTF_CHK:
2868 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2869 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2870 case BUILT_IN_STRCAT_CHK:
2871 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2872 case BUILT_IN_STRNCAT_CHK:
2873 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2874 case BUILT_IN_STRLEN:
dcb7fae2 2875 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2876 case BUILT_IN_STRCPY:
dcb7fae2 2877 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2878 gimple_call_arg (stmt, 0),
dcb7fae2 2879 gimple_call_arg (stmt, 1));
cbdd87d4 2880 case BUILT_IN_STRNCPY:
dcb7fae2 2881 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2882 gimple_call_arg (stmt, 0),
2883 gimple_call_arg (stmt, 1),
dcb7fae2 2884 gimple_call_arg (stmt, 2));
9a7eefec 2885 case BUILT_IN_STRCAT:
dcb7fae2
RB
2886 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2887 gimple_call_arg (stmt, 1));
ad03a744
RB
2888 case BUILT_IN_STRNCAT:
2889 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2890 case BUILT_IN_FPUTS:
dcb7fae2
RB
2891 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2892 gimple_call_arg (stmt, 1), false);
cbdd87d4 2893 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2894 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2895 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2896 case BUILT_IN_MEMCPY_CHK:
2897 case BUILT_IN_MEMPCPY_CHK:
2898 case BUILT_IN_MEMMOVE_CHK:
2899 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2900 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2901 gimple_call_arg (stmt, 0),
2902 gimple_call_arg (stmt, 1),
2903 gimple_call_arg (stmt, 2),
2904 gimple_call_arg (stmt, 3),
edd7ae68 2905 fcode);
2625bb5d
RB
2906 case BUILT_IN_STPCPY:
2907 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2908 case BUILT_IN_STRCPY_CHK:
2909 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2910 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2911 gimple_call_arg (stmt, 0),
2912 gimple_call_arg (stmt, 1),
2913 gimple_call_arg (stmt, 2),
edd7ae68 2914 fcode);
cbdd87d4 2915 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2916 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2917 return gimple_fold_builtin_stxncpy_chk (gsi,
2918 gimple_call_arg (stmt, 0),
2919 gimple_call_arg (stmt, 1),
2920 gimple_call_arg (stmt, 2),
2921 gimple_call_arg (stmt, 3),
edd7ae68 2922 fcode);
cbdd87d4
RG
2923 case BUILT_IN_SNPRINTF_CHK:
2924 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2925 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2926 case BUILT_IN_SNPRINTF:
dcb7fae2 2927 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2928 case BUILT_IN_SPRINTF:
dcb7fae2 2929 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2930 case BUILT_IN_FPRINTF:
2931 case BUILT_IN_FPRINTF_UNLOCKED:
2932 case BUILT_IN_VFPRINTF:
2933 if (n == 2 || n == 3)
2934 return gimple_fold_builtin_fprintf (gsi,
2935 gimple_call_arg (stmt, 0),
2936 gimple_call_arg (stmt, 1),
2937 n == 3
2938 ? gimple_call_arg (stmt, 2)
2939 : NULL_TREE,
2940 fcode);
2941 break;
2942 case BUILT_IN_FPRINTF_CHK:
2943 case BUILT_IN_VFPRINTF_CHK:
2944 if (n == 3 || n == 4)
2945 return gimple_fold_builtin_fprintf (gsi,
2946 gimple_call_arg (stmt, 0),
2947 gimple_call_arg (stmt, 2),
2948 n == 4
2949 ? gimple_call_arg (stmt, 3)
2950 : NULL_TREE,
2951 fcode);
2952 break;
ad03a744
RB
2953 case BUILT_IN_PRINTF:
2954 case BUILT_IN_PRINTF_UNLOCKED:
2955 case BUILT_IN_VPRINTF:
2956 if (n == 1 || n == 2)
2957 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2958 n == 2
2959 ? gimple_call_arg (stmt, 1)
2960 : NULL_TREE, fcode);
2961 break;
2962 case BUILT_IN_PRINTF_CHK:
2963 case BUILT_IN_VPRINTF_CHK:
2964 if (n == 2 || n == 3)
2965 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2966 n == 3
2967 ? gimple_call_arg (stmt, 2)
2968 : NULL_TREE, fcode);
fef5a0d9
RB
2969 default:;
2970 }
2971
2972 /* Try the generic builtin folder. */
2973 bool ignore = (gimple_call_lhs (stmt) == NULL);
2974 tree result = fold_call_stmt (stmt, ignore);
2975 if (result)
2976 {
2977 if (ignore)
2978 STRIP_NOPS (result);
2979 else
2980 result = fold_convert (gimple_call_return_type (stmt), result);
2981 if (!update_call_from_tree (gsi, result))
2982 gimplify_and_update_call_from_tree (gsi, result);
2983 return true;
2984 }
2985
2986 return false;
2987}
2988
1304953e
JJ
2989/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2990 doesn't fit into TYPE. The test for overflow should be regardless of
2991 -fwrapv, and even for unsigned types. */
2992
2993bool
2994arith_overflowed_p (enum tree_code code, const_tree type,
2995 const_tree arg0, const_tree arg1)
2996{
2997 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2998 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2999 widest2_int_cst;
3000 widest2_int warg0 = widest2_int_cst (arg0);
3001 widest2_int warg1 = widest2_int_cst (arg1);
3002 widest2_int wres;
3003 switch (code)
3004 {
3005 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3006 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3007 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3008 default: gcc_unreachable ();
3009 }
3010 signop sign = TYPE_SIGN (type);
3011 if (sign == UNSIGNED && wi::neg_p (wres))
3012 return true;
3013 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3014}
3015
cbdd87d4
RG
3016/* Attempt to fold a call statement referenced by the statement iterator GSI.
3017 The statement may be replaced by another statement, e.g., if the call
3018 simplifies to a constant value. Return true if any changes were made.
3019 It is assumed that the operands have been previously folded. */
3020
e021c122 3021static bool
ceeffab0 3022gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3023{
538dd0b7 3024 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3025 tree callee;
e021c122
RG
3026 bool changed = false;
3027 unsigned i;
cbdd87d4 3028
e021c122
RG
3029 /* Fold *& in call arguments. */
3030 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3031 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3032 {
3033 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3034 if (tmp)
3035 {
3036 gimple_call_set_arg (stmt, i, tmp);
3037 changed = true;
3038 }
3039 }
3b45a007
RG
3040
3041 /* Check for virtual calls that became direct calls. */
3042 callee = gimple_call_fn (stmt);
25583c4f 3043 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3044 {
49c471e3
MJ
3045 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3046 {
450ad0cd
JH
3047 if (dump_file && virtual_method_call_p (callee)
3048 && !possible_polymorphic_call_target_p
6f8091fc
JH
3049 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3050 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3051 {
3052 fprintf (dump_file,
a70e9985 3053 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3054 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3055 fprintf (dump_file, " to ");
3056 print_generic_expr (dump_file, callee, TDF_SLIM);
3057 fprintf (dump_file, "\n");
3058 }
3059
49c471e3 3060 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3061 changed = true;
3062 }
a70e9985 3063 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3064 {
61dd6a2e
JH
3065 bool final;
3066 vec <cgraph_node *>targets
058d0a90 3067 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3068 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3069 {
a70e9985 3070 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3071 if (dump_enabled_p ())
3072 {
807b7d62 3073 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3074 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3075 "folding virtual function call to %s\n",
3076 targets.length () == 1
3077 ? targets[0]->name ()
3078 : "__builtin_unreachable");
3079 }
61dd6a2e 3080 if (targets.length () == 1)
cf3e5a89
JJ
3081 {
3082 gimple_call_set_fndecl (stmt, targets[0]->decl);
3083 changed = true;
a70e9985
JJ
3084 /* If the call becomes noreturn, remove the lhs. */
3085 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3086 {
3087 if (TREE_CODE (lhs) == SSA_NAME)
3088 {
b731b390 3089 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985
JJ
3090 tree def = get_or_create_ssa_default_def (cfun, var);
3091 gimple new_stmt = gimple_build_assign (lhs, def);
3092 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3093 }
3094 gimple_call_set_lhs (stmt, NULL_TREE);
3095 }
0b986c6a 3096 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3097 }
a70e9985 3098 else
cf3e5a89
JJ
3099 {
3100 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3101 gimple new_stmt = gimple_build_call (fndecl, 0);
3102 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3103 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3104 {
b731b390 3105 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3106 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3107
3108 /* To satisfy condition for
3109 cgraph_update_edges_for_call_stmt_node,
3110 we need to preserve GIMPLE_CALL statement
3111 at position of GSI iterator. */
a70e9985 3112 update_call_from_tree (gsi, def);
eb14a79f 3113 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3114 }
3115 else
42e52a51
RB
3116 {
3117 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3118 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3119 gsi_replace (gsi, new_stmt, false);
3120 }
cf3e5a89
JJ
3121 return true;
3122 }
e021c122 3123 }
49c471e3 3124 }
e021c122 3125 }
49c471e3 3126
f2d3d07e
RH
3127 /* Check for indirect calls that became direct calls, and then
3128 no longer require a static chain. */
3129 if (gimple_call_chain (stmt))
3130 {
3131 tree fn = gimple_call_fndecl (stmt);
3132 if (fn && !DECL_STATIC_CHAIN (fn))
3133 {
3134 gimple_call_set_chain (stmt, NULL);
3135 changed = true;
3136 }
3137 else
3138 {
3139 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3140 if (tmp)
3141 {
3142 gimple_call_set_chain (stmt, tmp);
3143 changed = true;
3144 }
3145 }
3146 }
3147
e021c122
RG
3148 if (inplace)
3149 return changed;
3150
3151 /* Check for builtins that CCP can handle using information not
3152 available in the generic fold routines. */
fef5a0d9
RB
3153 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3154 {
3155 if (gimple_fold_builtin (gsi))
3156 changed = true;
3157 }
3158 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3159 {
ea679d55 3160 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3161 }
368b454d 3162 else if (gimple_call_internal_p (stmt))
ed9c79e1 3163 {
368b454d
JJ
3164 enum tree_code subcode = ERROR_MARK;
3165 tree result = NULL_TREE;
1304953e
JJ
3166 bool cplx_result = false;
3167 tree overflow = NULL_TREE;
368b454d
JJ
3168 switch (gimple_call_internal_fn (stmt))
3169 {
3170 case IFN_BUILTIN_EXPECT:
3171 result = fold_builtin_expect (gimple_location (stmt),
3172 gimple_call_arg (stmt, 0),
3173 gimple_call_arg (stmt, 1),
3174 gimple_call_arg (stmt, 2));
3175 break;
0e82f089
MP
3176 case IFN_UBSAN_OBJECT_SIZE:
3177 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3178 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3179 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3180 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3181 gimple_call_arg (stmt, 2))))
3182 {
3183 gsi_replace (gsi, gimple_build_nop (), true);
3184 unlink_stmt_vdef (stmt);
3185 release_defs (stmt);
3186 return true;
3187 }
3188 break;
368b454d
JJ
3189 case IFN_UBSAN_CHECK_ADD:
3190 subcode = PLUS_EXPR;
3191 break;
3192 case IFN_UBSAN_CHECK_SUB:
3193 subcode = MINUS_EXPR;
3194 break;
3195 case IFN_UBSAN_CHECK_MUL:
3196 subcode = MULT_EXPR;
3197 break;
1304953e
JJ
3198 case IFN_ADD_OVERFLOW:
3199 subcode = PLUS_EXPR;
3200 cplx_result = true;
3201 break;
3202 case IFN_SUB_OVERFLOW:
3203 subcode = MINUS_EXPR;
3204 cplx_result = true;
3205 break;
3206 case IFN_MUL_OVERFLOW:
3207 subcode = MULT_EXPR;
3208 cplx_result = true;
3209 break;
368b454d
JJ
3210 default:
3211 break;
3212 }
3213 if (subcode != ERROR_MARK)
3214 {
3215 tree arg0 = gimple_call_arg (stmt, 0);
3216 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3217 tree type = TREE_TYPE (arg0);
3218 if (cplx_result)
3219 {
3220 tree lhs = gimple_call_lhs (stmt);
3221 if (lhs == NULL_TREE)
3222 type = NULL_TREE;
3223 else
3224 type = TREE_TYPE (TREE_TYPE (lhs));
3225 }
3226 if (type == NULL_TREE)
3227 ;
368b454d 3228 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3229 else if (integer_zerop (arg1))
3230 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3231 /* x = 0 + y; x = 0 * y; */
3232 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3233 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3234 /* x = y - y; */
3235 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3236 result = integer_zero_node;
368b454d 3237 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3238 else if (subcode == MULT_EXPR && integer_onep (arg1))
3239 result = arg0;
3240 else if (subcode == MULT_EXPR && integer_onep (arg0))
3241 result = arg1;
3242 else if (TREE_CODE (arg0) == INTEGER_CST
3243 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3244 {
1304953e
JJ
3245 if (cplx_result)
3246 result = int_const_binop (subcode, fold_convert (type, arg0),
3247 fold_convert (type, arg1));
3248 else
3249 result = int_const_binop (subcode, arg0, arg1);
3250 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3251 {
3252 if (cplx_result)
3253 overflow = build_one_cst (type);
3254 else
3255 result = NULL_TREE;
3256 }
3257 }
3258 if (result)
3259 {
3260 if (result == integer_zero_node)
3261 result = build_zero_cst (type);
3262 else if (cplx_result && TREE_TYPE (result) != type)
3263 {
3264 if (TREE_CODE (result) == INTEGER_CST)
3265 {
3266 if (arith_overflowed_p (PLUS_EXPR, type, result,
3267 integer_zero_node))
3268 overflow = build_one_cst (type);
3269 }
3270 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3271 && TYPE_UNSIGNED (type))
3272 || (TYPE_PRECISION (type)
3273 < (TYPE_PRECISION (TREE_TYPE (result))
3274 + (TYPE_UNSIGNED (TREE_TYPE (result))
3275 && !TYPE_UNSIGNED (type)))))
3276 result = NULL_TREE;
3277 if (result)
3278 result = fold_convert (type, result);
3279 }
368b454d
JJ
3280 }
3281 }
1304953e 3282
ed9c79e1
JJ
3283 if (result)
3284 {
1304953e
JJ
3285 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3286 result = drop_tree_overflow (result);
3287 if (cplx_result)
3288 {
3289 if (overflow == NULL_TREE)
3290 overflow = build_zero_cst (TREE_TYPE (result));
3291 tree ctype = build_complex_type (TREE_TYPE (result));
3292 if (TREE_CODE (result) == INTEGER_CST
3293 && TREE_CODE (overflow) == INTEGER_CST)
3294 result = build_complex (ctype, result, overflow);
3295 else
3296 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3297 ctype, result, overflow);
3298 }
ed9c79e1
JJ
3299 if (!update_call_from_tree (gsi, result))
3300 gimplify_and_update_call_from_tree (gsi, result);
3301 changed = true;
3302 }
3303 }
3b45a007 3304
e021c122 3305 return changed;
cbdd87d4
RG
3306}
3307
e0ee10ed
RB
3308
3309/* Worker for fold_stmt_1 dispatch to pattern based folding with
3310 gimple_simplify.
3311
3312 Replaces *GSI with the simplification result in RCODE and OPS
3313 and the associated statements in *SEQ. Does the replacement
3314 according to INPLACE and returns true if the operation succeeded. */
3315
3316static bool
3317replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3318 code_helper rcode, tree *ops,
3319 gimple_seq *seq, bool inplace)
3320{
3321 gimple stmt = gsi_stmt (*gsi);
3322
3323 /* Play safe and do not allow abnormals to be mentioned in
3324 newly created statements. See also maybe_push_res_to_seq. */
3325 if ((TREE_CODE (ops[0]) == SSA_NAME
3326 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3327 || (ops[1]
3328 && TREE_CODE (ops[1]) == SSA_NAME
3329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3330 || (ops[2]
3331 && TREE_CODE (ops[2]) == SSA_NAME
3332 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3333 return false;
3334
538dd0b7 3335 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3336 {
3337 gcc_assert (rcode.is_tree_code ());
3338 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3339 /* GIMPLE_CONDs condition may not throw. */
3340 && (!flag_exceptions
3341 || !cfun->can_throw_non_call_exceptions
3342 || !operation_could_trap_p (rcode,
3343 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3344 false, NULL_TREE)))
538dd0b7 3345 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3346 else if (rcode == SSA_NAME)
538dd0b7 3347 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3348 build_zero_cst (TREE_TYPE (ops[0])));
3349 else if (rcode == INTEGER_CST)
3350 {
3351 if (integer_zerop (ops[0]))
538dd0b7 3352 gimple_cond_make_false (cond_stmt);
e0ee10ed 3353 else
538dd0b7 3354 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3355 }
3356 else if (!inplace)
3357 {
3358 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3359 ops, seq);
3360 if (!res)
3361 return false;
538dd0b7 3362 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3363 build_zero_cst (TREE_TYPE (res)));
3364 }
3365 else
3366 return false;
3367 if (dump_file && (dump_flags & TDF_DETAILS))
3368 {
3369 fprintf (dump_file, "gimple_simplified to ");
3370 if (!gimple_seq_empty_p (*seq))
3371 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3372 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3373 0, TDF_SLIM);
3374 }
3375 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3376 return true;
3377 }
3378 else if (is_gimple_assign (stmt)
3379 && rcode.is_tree_code ())
3380 {
3381 if (!inplace
f3582e54 3382 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3383 {
3384 maybe_build_generic_op (rcode,
3385 TREE_TYPE (gimple_assign_lhs (stmt)),
3386 &ops[0], ops[1], ops[2]);
00d66391 3387 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3388 if (dump_file && (dump_flags & TDF_DETAILS))
3389 {
3390 fprintf (dump_file, "gimple_simplified to ");
3391 if (!gimple_seq_empty_p (*seq))
3392 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3393 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3394 0, TDF_SLIM);
3395 }
3396 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3397 return true;
3398 }
3399 }
3400 else if (!inplace)
3401 {
3402 if (gimple_has_lhs (stmt))
3403 {
3404 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3405 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3406 ops, seq, lhs))
3407 return false;
e0ee10ed
RB
3408 if (dump_file && (dump_flags & TDF_DETAILS))
3409 {
3410 fprintf (dump_file, "gimple_simplified to ");
3411 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3412 }
3413 gsi_replace_with_seq_vops (gsi, *seq);
3414 return true;
3415 }
3416 else
3417 gcc_unreachable ();
3418 }
3419
3420 return false;
3421}
3422
040292e7
RB
3423/* Canonicalize MEM_REFs invariant address operand after propagation. */
3424
3425static bool
3426maybe_canonicalize_mem_ref_addr (tree *t)
3427{
3428 bool res = false;
3429
3430 if (TREE_CODE (*t) == ADDR_EXPR)
3431 t = &TREE_OPERAND (*t, 0);
3432
3433 while (handled_component_p (*t))
3434 t = &TREE_OPERAND (*t, 0);
3435
3436 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3437 of invariant addresses into a SSA name MEM_REF address. */
3438 if (TREE_CODE (*t) == MEM_REF
3439 || TREE_CODE (*t) == TARGET_MEM_REF)
3440 {
3441 tree addr = TREE_OPERAND (*t, 0);
3442 if (TREE_CODE (addr) == ADDR_EXPR
3443 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3444 || handled_component_p (TREE_OPERAND (addr, 0))))
3445 {
3446 tree base;
3447 HOST_WIDE_INT coffset;
3448 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3449 &coffset);
3450 if (!base)
3451 gcc_unreachable ();
3452
3453 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3454 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3455 TREE_OPERAND (*t, 1),
3456 size_int (coffset));
3457 res = true;
3458 }
3459 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3460 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3461 }
3462
3463 /* Canonicalize back MEM_REFs to plain reference trees if the object
3464 accessed is a decl that has the same access semantics as the MEM_REF. */
3465 if (TREE_CODE (*t) == MEM_REF
3466 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3467 && integer_zerop (TREE_OPERAND (*t, 1))
3468 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3469 {
3470 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3471 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3472 if (/* Same volatile qualification. */
3473 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3474 /* Same TBAA behavior with -fstrict-aliasing. */
3475 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3476 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3477 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3478 /* Same alignment. */
3479 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3480 /* We have to look out here to not drop a required conversion
3481 from the rhs to the lhs if *t appears on the lhs or vice-versa
3482 if it appears on the rhs. Thus require strict type
3483 compatibility. */
3484 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3485 {
3486 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3487 res = true;
3488 }
3489 }
3490
3491 /* Canonicalize TARGET_MEM_REF in particular with respect to
3492 the indexes becoming constant. */
3493 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3494 {
3495 tree tem = maybe_fold_tmr (*t);
3496 if (tem)
3497 {
3498 *t = tem;
3499 res = true;
3500 }
3501 }
3502
3503 return res;
3504}
3505
cbdd87d4
RG
3506/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3507 distinguishes both cases. */
3508
3509static bool
e0ee10ed 3510fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3511{
3512 bool changed = false;
3513 gimple stmt = gsi_stmt (*gsi);
3514 unsigned i;
3515
040292e7
RB
3516 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3517 after propagation.
3518 ??? This shouldn't be done in generic folding but in the
3519 propagation helpers which also know whether an address was
3520 propagated. */
3521 switch (gimple_code (stmt))
3522 {
3523 case GIMPLE_ASSIGN:
3524 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3525 {
3526 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3527 if ((REFERENCE_CLASS_P (*rhs)
3528 || TREE_CODE (*rhs) == ADDR_EXPR)
3529 && maybe_canonicalize_mem_ref_addr (rhs))
3530 changed = true;
3531 tree *lhs = gimple_assign_lhs_ptr (stmt);
3532 if (REFERENCE_CLASS_P (*lhs)
3533 && maybe_canonicalize_mem_ref_addr (lhs))
3534 changed = true;
3535 }
3536 break;
3537 case GIMPLE_CALL:
3538 {
3539 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3540 {
3541 tree *arg = gimple_call_arg_ptr (stmt, i);
3542 if (REFERENCE_CLASS_P (*arg)
3543 && maybe_canonicalize_mem_ref_addr (arg))
3544 changed = true;
3545 }
3546 tree *lhs = gimple_call_lhs_ptr (stmt);
3547 if (*lhs
3548 && REFERENCE_CLASS_P (*lhs)
3549 && maybe_canonicalize_mem_ref_addr (lhs))
3550 changed = true;
3551 break;
3552 }
3553 case GIMPLE_ASM:
3554 {
538dd0b7
DM
3555 gasm *asm_stmt = as_a <gasm *> (stmt);
3556 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3557 {
538dd0b7 3558 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3559 tree op = TREE_VALUE (link);
3560 if (REFERENCE_CLASS_P (op)
3561 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3562 changed = true;
3563 }
538dd0b7 3564 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3565 {
538dd0b7 3566 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3567 tree op = TREE_VALUE (link);
3568 if ((REFERENCE_CLASS_P (op)
3569 || TREE_CODE (op) == ADDR_EXPR)
3570 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3571 changed = true;
3572 }
3573 }
3574 break;
3575 case GIMPLE_DEBUG:
3576 if (gimple_debug_bind_p (stmt))
3577 {
3578 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3579 if (*val
3580 && (REFERENCE_CLASS_P (*val)
3581 || TREE_CODE (*val) == ADDR_EXPR)
3582 && maybe_canonicalize_mem_ref_addr (val))
3583 changed = true;
3584 }
3585 break;
3586 default:;
3587 }
3588
e0ee10ed
RB
3589 /* Dispatch to pattern-based folding. */
3590 if (!inplace
3591 || is_gimple_assign (stmt)
3592 || gimple_code (stmt) == GIMPLE_COND)
3593 {
3594 gimple_seq seq = NULL;
3595 code_helper rcode;
3596 tree ops[3] = {};
0ff093d8
RB
3597 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3598 valueize, valueize))
e0ee10ed
RB
3599 {
3600 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3601 changed = true;
3602 else
3603 gimple_seq_discard (seq);
3604 }
3605 }
3606
3607 stmt = gsi_stmt (*gsi);
3608
cbdd87d4
RG
3609 /* Fold the main computation performed by the statement. */
3610 switch (gimple_code (stmt))
3611 {
3612 case GIMPLE_ASSIGN:
3613 {
3614 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 3615 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 3616 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
3617 tree new_rhs;
3618 /* First canonicalize operand order. This avoids building new
3619 trees if this is the only thing fold would later do. */
3620 if ((commutative_tree_code (subcode)
3621 || commutative_ternary_tree_code (subcode))
3622 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3623 gimple_assign_rhs2 (stmt), false))
3624 {
3625 tree tem = gimple_assign_rhs1 (stmt);
3626 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3627 gimple_assign_set_rhs2 (stmt, tem);
3628 changed = true;
3629 }
3630 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3631 if (new_rhs
3632 && !useless_type_conversion_p (TREE_TYPE (lhs),
3633 TREE_TYPE (new_rhs)))
3634 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3635 if (new_rhs
3636 && (!inplace
3637 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3638 {
3639 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3640 changed = true;
3641 }
3642 break;
3643 }
3644
3645 case GIMPLE_COND:
538dd0b7 3646 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
cbdd87d4
RG
3647 break;
3648
3649 case GIMPLE_CALL:
ceeffab0 3650 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3651 break;
3652
3653 case GIMPLE_ASM:
3654 /* Fold *& in asm operands. */
38384150 3655 {
538dd0b7 3656 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3657 size_t noutputs;
3658 const char **oconstraints;
3659 const char *constraint;
3660 bool allows_mem, allows_reg;
3661
538dd0b7 3662 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3663 oconstraints = XALLOCAVEC (const char *, noutputs);
3664
538dd0b7 3665 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3666 {
538dd0b7 3667 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3668 tree op = TREE_VALUE (link);
3669 oconstraints[i]
3670 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3671 if (REFERENCE_CLASS_P (op)
3672 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3673 {
3674 TREE_VALUE (link) = op;
3675 changed = true;
3676 }
3677 }
538dd0b7 3678 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3679 {
538dd0b7 3680 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3681 tree op = TREE_VALUE (link);
3682 constraint
3683 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3684 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3685 oconstraints, &allows_mem, &allows_reg);
3686 if (REFERENCE_CLASS_P (op)
3687 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3688 != NULL_TREE)
3689 {
3690 TREE_VALUE (link) = op;
3691 changed = true;
3692 }
3693 }
3694 }
cbdd87d4
RG
3695 break;
3696
bd422c4a
RG
3697 case GIMPLE_DEBUG:
3698 if (gimple_debug_bind_p (stmt))
3699 {
3700 tree val = gimple_debug_bind_get_value (stmt);
3701 if (val
3702 && REFERENCE_CLASS_P (val))
3703 {
3704 tree tem = maybe_fold_reference (val, false);
3705 if (tem)
3706 {
3707 gimple_debug_bind_set_value (stmt, tem);
3708 changed = true;
3709 }
3710 }
3e888a5e
RG
3711 else if (val
3712 && TREE_CODE (val) == ADDR_EXPR)
3713 {
3714 tree ref = TREE_OPERAND (val, 0);
3715 tree tem = maybe_fold_reference (ref, false);
3716 if (tem)
3717 {
3718 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3719 gimple_debug_bind_set_value (stmt, tem);
3720 changed = true;
3721 }
3722 }
bd422c4a
RG
3723 }
3724 break;
3725
cbdd87d4
RG
3726 default:;
3727 }
3728
3729 stmt = gsi_stmt (*gsi);
3730
37376165
RB
3731 /* Fold *& on the lhs. */
3732 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3733 {
3734 tree lhs = gimple_get_lhs (stmt);
3735 if (lhs && REFERENCE_CLASS_P (lhs))
3736 {
3737 tree new_lhs = maybe_fold_reference (lhs, true);
3738 if (new_lhs)
3739 {
3740 gimple_set_lhs (stmt, new_lhs);
3741 changed = true;
3742 }
3743 }
3744 }
3745
3746 return changed;
3747}
3748
e0ee10ed
RB
3749/* Valueziation callback that ends up not following SSA edges. */
3750
3751tree
3752no_follow_ssa_edges (tree)
3753{
3754 return NULL_TREE;
3755}
3756
45cc9f96
RB
3757/* Valueization callback that ends up following single-use SSA edges only. */
3758
3759tree
3760follow_single_use_edges (tree val)
3761{
3762 if (TREE_CODE (val) == SSA_NAME
3763 && !has_single_use (val))
3764 return NULL_TREE;
3765 return val;
3766}
3767
cbdd87d4
RG
3768/* Fold the statement pointed to by GSI. In some cases, this function may
3769 replace the whole statement with a new one. Returns true iff folding
3770 makes any changes.
3771 The statement pointed to by GSI should be in valid gimple form but may
3772 be in unfolded state as resulting from for example constant propagation
3773 which can produce *&x = 0. */
3774
3775bool
3776fold_stmt (gimple_stmt_iterator *gsi)
3777{
e0ee10ed
RB
3778 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3779}
3780
3781bool
3782fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3783{
3784 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3785}
3786
59401b92 3787/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3788 *&x created by constant propagation are handled. The statement cannot
3789 be replaced with a new one. Return true if the statement was
3790 changed, false otherwise.
59401b92 3791 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3792 be in unfolded state as resulting from for example constant propagation
3793 which can produce *&x = 0. */
3794
3795bool
59401b92 3796fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3797{
59401b92 3798 gimple stmt = gsi_stmt (*gsi);
e0ee10ed 3799 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3800 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3801 return changed;
3802}
3803
e89065a1
SL
3804/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3805 if EXPR is null or we don't know how.
3806 If non-null, the result always has boolean type. */
3807
3808static tree
3809canonicalize_bool (tree expr, bool invert)
3810{
3811 if (!expr)
3812 return NULL_TREE;
3813 else if (invert)
3814 {
3815 if (integer_nonzerop (expr))
3816 return boolean_false_node;
3817 else if (integer_zerop (expr))
3818 return boolean_true_node;
3819 else if (TREE_CODE (expr) == SSA_NAME)
3820 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3821 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3822 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3823 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3824 boolean_type_node,
3825 TREE_OPERAND (expr, 0),
3826 TREE_OPERAND (expr, 1));
3827 else
3828 return NULL_TREE;
3829 }
3830 else
3831 {
3832 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3833 return expr;
3834 if (integer_nonzerop (expr))
3835 return boolean_true_node;
3836 else if (integer_zerop (expr))
3837 return boolean_false_node;
3838 else if (TREE_CODE (expr) == SSA_NAME)
3839 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3840 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3841 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3842 return fold_build2 (TREE_CODE (expr),
3843 boolean_type_node,
3844 TREE_OPERAND (expr, 0),
3845 TREE_OPERAND (expr, 1));
3846 else
3847 return NULL_TREE;
3848 }
3849}
3850
3851/* Check to see if a boolean expression EXPR is logically equivalent to the
3852 comparison (OP1 CODE OP2). Check for various identities involving
3853 SSA_NAMEs. */
3854
3855static bool
3856same_bool_comparison_p (const_tree expr, enum tree_code code,
3857 const_tree op1, const_tree op2)
3858{
3859 gimple s;
3860
3861 /* The obvious case. */
3862 if (TREE_CODE (expr) == code
3863 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3864 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3865 return true;
3866
3867 /* Check for comparing (name, name != 0) and the case where expr
3868 is an SSA_NAME with a definition matching the comparison. */
3869 if (TREE_CODE (expr) == SSA_NAME
3870 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3871 {
3872 if (operand_equal_p (expr, op1, 0))
3873 return ((code == NE_EXPR && integer_zerop (op2))
3874 || (code == EQ_EXPR && integer_nonzerop (op2)));
3875 s = SSA_NAME_DEF_STMT (expr);
3876 if (is_gimple_assign (s)
3877 && gimple_assign_rhs_code (s) == code
3878 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3879 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3880 return true;
3881 }
3882
3883 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3884 of name is a comparison, recurse. */
3885 if (TREE_CODE (op1) == SSA_NAME
3886 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3887 {
3888 s = SSA_NAME_DEF_STMT (op1);
3889 if (is_gimple_assign (s)
3890 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3891 {
3892 enum tree_code c = gimple_assign_rhs_code (s);
3893 if ((c == NE_EXPR && integer_zerop (op2))
3894 || (c == EQ_EXPR && integer_nonzerop (op2)))
3895 return same_bool_comparison_p (expr, c,
3896 gimple_assign_rhs1 (s),
3897 gimple_assign_rhs2 (s));
3898 if ((c == EQ_EXPR && integer_zerop (op2))
3899 || (c == NE_EXPR && integer_nonzerop (op2)))
3900 return same_bool_comparison_p (expr,
3901 invert_tree_comparison (c, false),
3902 gimple_assign_rhs1 (s),
3903 gimple_assign_rhs2 (s));
3904 }
3905 }
3906 return false;
3907}
3908
3909/* Check to see if two boolean expressions OP1 and OP2 are logically
3910 equivalent. */
3911
3912static bool
3913same_bool_result_p (const_tree op1, const_tree op2)
3914{
3915 /* Simple cases first. */
3916 if (operand_equal_p (op1, op2, 0))
3917 return true;
3918
3919 /* Check the cases where at least one of the operands is a comparison.
3920 These are a bit smarter than operand_equal_p in that they apply some
3921 identifies on SSA_NAMEs. */
98209db3 3922 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
3923 && same_bool_comparison_p (op1, TREE_CODE (op2),
3924 TREE_OPERAND (op2, 0),
3925 TREE_OPERAND (op2, 1)))
3926 return true;
98209db3 3927 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
3928 && same_bool_comparison_p (op2, TREE_CODE (op1),
3929 TREE_OPERAND (op1, 0),
3930 TREE_OPERAND (op1, 1)))
3931 return true;
3932
3933 /* Default case. */
3934 return false;
3935}
3936
3937/* Forward declarations for some mutually recursive functions. */
3938
3939static tree
3940and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3941 enum tree_code code2, tree op2a, tree op2b);
3942static tree
3943and_var_with_comparison (tree var, bool invert,
3944 enum tree_code code2, tree op2a, tree op2b);
3945static tree
3946and_var_with_comparison_1 (gimple stmt,
3947 enum tree_code code2, tree op2a, tree op2b);
3948static tree
3949or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3950 enum tree_code code2, tree op2a, tree op2b);
3951static tree
3952or_var_with_comparison (tree var, bool invert,
3953 enum tree_code code2, tree op2a, tree op2b);
3954static tree
3955or_var_with_comparison_1 (gimple stmt,
3956 enum tree_code code2, tree op2a, tree op2b);
3957
3958/* Helper function for and_comparisons_1: try to simplify the AND of the
3959 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3960 If INVERT is true, invert the value of the VAR before doing the AND.
3961 Return NULL_EXPR if we can't simplify this to a single expression. */
3962
3963static tree
3964and_var_with_comparison (tree var, bool invert,
3965 enum tree_code code2, tree op2a, tree op2b)
3966{
3967 tree t;
3968 gimple stmt = SSA_NAME_DEF_STMT (var);
3969
3970 /* We can only deal with variables whose definitions are assignments. */
3971 if (!is_gimple_assign (stmt))
3972 return NULL_TREE;
3973
3974 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3975 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3976 Then we only have to consider the simpler non-inverted cases. */
3977 if (invert)
3978 t = or_var_with_comparison_1 (stmt,
3979 invert_tree_comparison (code2, false),
3980 op2a, op2b);
3981 else
3982 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3983 return canonicalize_bool (t, invert);
3984}
3985
3986/* Try to simplify the AND of the ssa variable defined by the assignment
3987 STMT with the comparison specified by (OP2A CODE2 OP2B).
3988 Return NULL_EXPR if we can't simplify this to a single expression. */
3989
3990static tree
3991and_var_with_comparison_1 (gimple stmt,
3992 enum tree_code code2, tree op2a, tree op2b)
3993{
3994 tree var = gimple_assign_lhs (stmt);
3995 tree true_test_var = NULL_TREE;
3996 tree false_test_var = NULL_TREE;
3997 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3998
3999 /* Check for identities like (var AND (var == 0)) => false. */
4000 if (TREE_CODE (op2a) == SSA_NAME
4001 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4002 {
4003 if ((code2 == NE_EXPR && integer_zerop (op2b))
4004 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4005 {
4006 true_test_var = op2a;
4007 if (var == true_test_var)
4008 return var;
4009 }
4010 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4011 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4012 {
4013 false_test_var = op2a;
4014 if (var == false_test_var)
4015 return boolean_false_node;
4016 }
4017 }
4018
4019 /* If the definition is a comparison, recurse on it. */
4020 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4021 {
4022 tree t = and_comparisons_1 (innercode,
4023 gimple_assign_rhs1 (stmt),
4024 gimple_assign_rhs2 (stmt),
4025 code2,
4026 op2a,
4027 op2b);
4028 if (t)
4029 return t;
4030 }
4031
4032 /* If the definition is an AND or OR expression, we may be able to
4033 simplify by reassociating. */
eb9820c0
KT
4034 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4035 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4036 {
4037 tree inner1 = gimple_assign_rhs1 (stmt);
4038 tree inner2 = gimple_assign_rhs2 (stmt);
4039 gimple s;
4040 tree t;
4041 tree partial = NULL_TREE;
eb9820c0 4042 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4043
4044 /* Check for boolean identities that don't require recursive examination
4045 of inner1/inner2:
4046 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4047 inner1 AND (inner1 OR inner2) => inner1
4048 !inner1 AND (inner1 AND inner2) => false
4049 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4050 Likewise for similar cases involving inner2. */
4051 if (inner1 == true_test_var)
4052 return (is_and ? var : inner1);
4053 else if (inner2 == true_test_var)
4054 return (is_and ? var : inner2);
4055 else if (inner1 == false_test_var)
4056 return (is_and
4057 ? boolean_false_node
4058 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4059 else if (inner2 == false_test_var)
4060 return (is_and
4061 ? boolean_false_node
4062 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4063
4064 /* Next, redistribute/reassociate the AND across the inner tests.
4065 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4066 if (TREE_CODE (inner1) == SSA_NAME
4067 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4068 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4069 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4070 gimple_assign_rhs1 (s),
4071 gimple_assign_rhs2 (s),
4072 code2, op2a, op2b)))
4073 {
4074 /* Handle the AND case, where we are reassociating:
4075 (inner1 AND inner2) AND (op2a code2 op2b)
4076 => (t AND inner2)
4077 If the partial result t is a constant, we win. Otherwise
4078 continue on to try reassociating with the other inner test. */
4079 if (is_and)
4080 {
4081 if (integer_onep (t))
4082 return inner2;
4083 else if (integer_zerop (t))
4084 return boolean_false_node;
4085 }
4086
4087 /* Handle the OR case, where we are redistributing:
4088 (inner1 OR inner2) AND (op2a code2 op2b)
4089 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4090 else if (integer_onep (t))
4091 return boolean_true_node;
4092
4093 /* Save partial result for later. */
4094 partial = t;
e89065a1
SL
4095 }
4096
4097 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4098 if (TREE_CODE (inner2) == SSA_NAME
4099 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4100 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4101 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4102 gimple_assign_rhs1 (s),
4103 gimple_assign_rhs2 (s),
4104 code2, op2a, op2b)))
4105 {
4106 /* Handle the AND case, where we are reassociating:
4107 (inner1 AND inner2) AND (op2a code2 op2b)
4108 => (inner1 AND t) */
4109 if (is_and)
4110 {
4111 if (integer_onep (t))
4112 return inner1;
4113 else if (integer_zerop (t))
4114 return boolean_false_node;
8236c8eb
JJ
4115 /* If both are the same, we can apply the identity
4116 (x AND x) == x. */
4117 else if (partial && same_bool_result_p (t, partial))
4118 return t;
e89065a1
SL
4119 }
4120
4121 /* Handle the OR case. where we are redistributing:
4122 (inner1 OR inner2) AND (op2a code2 op2b)
4123 => (t OR (inner1 AND (op2a code2 op2b)))
4124 => (t OR partial) */
4125 else
4126 {
4127 if (integer_onep (t))
4128 return boolean_true_node;
4129 else if (partial)
4130 {
4131 /* We already got a simplification for the other
4132 operand to the redistributed OR expression. The
4133 interesting case is when at least one is false.
4134 Or, if both are the same, we can apply the identity
4135 (x OR x) == x. */
4136 if (integer_zerop (partial))
4137 return t;
4138 else if (integer_zerop (t))
4139 return partial;
4140 else if (same_bool_result_p (t, partial))
4141 return t;
4142 }
4143 }
4144 }
4145 }
4146 return NULL_TREE;
4147}
4148
4149/* Try to simplify the AND of two comparisons defined by
4150 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4151 If this can be done without constructing an intermediate value,
4152 return the resulting tree; otherwise NULL_TREE is returned.
4153 This function is deliberately asymmetric as it recurses on SSA_DEFs
4154 in the first comparison but not the second. */
4155
4156static tree
4157and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4158 enum tree_code code2, tree op2a, tree op2b)
4159{
ae22ac3c 4160 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4161
e89065a1
SL
4162 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4163 if (operand_equal_p (op1a, op2a, 0)
4164 && operand_equal_p (op1b, op2b, 0))
4165 {
eb9820c0 4166 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4167 tree t = combine_comparisons (UNKNOWN_LOCATION,
4168 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4169 truth_type, op1a, op1b);
e89065a1
SL
4170 if (t)
4171 return t;
4172 }
4173
4174 /* Likewise the swapped case of the above. */
4175 if (operand_equal_p (op1a, op2b, 0)
4176 && operand_equal_p (op1b, op2a, 0))
4177 {
eb9820c0 4178 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4179 tree t = combine_comparisons (UNKNOWN_LOCATION,
4180 TRUTH_ANDIF_EXPR, code1,
4181 swap_tree_comparison (code2),
31ed6226 4182 truth_type, op1a, op1b);
e89065a1
SL
4183 if (t)
4184 return t;
4185 }
4186
4187 /* If both comparisons are of the same value against constants, we might
4188 be able to merge them. */
4189 if (operand_equal_p (op1a, op2a, 0)
4190 && TREE_CODE (op1b) == INTEGER_CST
4191 && TREE_CODE (op2b) == INTEGER_CST)
4192 {
4193 int cmp = tree_int_cst_compare (op1b, op2b);
4194
4195 /* If we have (op1a == op1b), we should either be able to
4196 return that or FALSE, depending on whether the constant op1b
4197 also satisfies the other comparison against op2b. */
4198 if (code1 == EQ_EXPR)
4199 {
4200 bool done = true;
4201 bool val;
4202 switch (code2)
4203 {
4204 case EQ_EXPR: val = (cmp == 0); break;
4205 case NE_EXPR: val = (cmp != 0); break;
4206 case LT_EXPR: val = (cmp < 0); break;
4207 case GT_EXPR: val = (cmp > 0); break;
4208 case LE_EXPR: val = (cmp <= 0); break;
4209 case GE_EXPR: val = (cmp >= 0); break;
4210 default: done = false;
4211 }
4212 if (done)
4213 {
4214 if (val)
4215 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4216 else
4217 return boolean_false_node;
4218 }
4219 }
4220 /* Likewise if the second comparison is an == comparison. */
4221 else if (code2 == EQ_EXPR)
4222 {
4223 bool done = true;
4224 bool val;
4225 switch (code1)
4226 {
4227 case EQ_EXPR: val = (cmp == 0); break;
4228 case NE_EXPR: val = (cmp != 0); break;
4229 case LT_EXPR: val = (cmp > 0); break;
4230 case GT_EXPR: val = (cmp < 0); break;
4231 case LE_EXPR: val = (cmp >= 0); break;
4232 case GE_EXPR: val = (cmp <= 0); break;
4233 default: done = false;
4234 }
4235 if (done)
4236 {
4237 if (val)
4238 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4239 else
4240 return boolean_false_node;
4241 }
4242 }
4243
4244 /* Same business with inequality tests. */
4245 else if (code1 == NE_EXPR)
4246 {
4247 bool val;
4248 switch (code2)
4249 {
4250 case EQ_EXPR: val = (cmp != 0); break;
4251 case NE_EXPR: val = (cmp == 0); break;
4252 case LT_EXPR: val = (cmp >= 0); break;
4253 case GT_EXPR: val = (cmp <= 0); break;
4254 case LE_EXPR: val = (cmp > 0); break;
4255 case GE_EXPR: val = (cmp < 0); break;
4256 default:
4257 val = false;
4258 }
4259 if (val)
4260 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4261 }
4262 else if (code2 == NE_EXPR)
4263 {
4264 bool val;
4265 switch (code1)
4266 {
4267 case EQ_EXPR: val = (cmp == 0); break;
4268 case NE_EXPR: val = (cmp != 0); break;
4269 case LT_EXPR: val = (cmp <= 0); break;
4270 case GT_EXPR: val = (cmp >= 0); break;
4271 case LE_EXPR: val = (cmp < 0); break;
4272 case GE_EXPR: val = (cmp > 0); break;
4273 default:
4274 val = false;
4275 }
4276 if (val)
4277 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4278 }
4279
4280 /* Chose the more restrictive of two < or <= comparisons. */
4281 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4282 && (code2 == LT_EXPR || code2 == LE_EXPR))
4283 {
4284 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4285 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4286 else
4287 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4288 }
4289
4290 /* Likewise chose the more restrictive of two > or >= comparisons. */
4291 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4292 && (code2 == GT_EXPR || code2 == GE_EXPR))
4293 {
4294 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4295 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4296 else
4297 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4298 }
4299
4300 /* Check for singleton ranges. */
4301 else if (cmp == 0
4302 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4303 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4304 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4305
4306 /* Check for disjoint ranges. */
4307 else if (cmp <= 0
4308 && (code1 == LT_EXPR || code1 == LE_EXPR)
4309 && (code2 == GT_EXPR || code2 == GE_EXPR))
4310 return boolean_false_node;
4311 else if (cmp >= 0
4312 && (code1 == GT_EXPR || code1 == GE_EXPR)
4313 && (code2 == LT_EXPR || code2 == LE_EXPR))
4314 return boolean_false_node;
4315 }
4316
4317 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4318 NAME's definition is a truth value. See if there are any simplifications
4319 that can be done against the NAME's definition. */
4320 if (TREE_CODE (op1a) == SSA_NAME
4321 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4322 && (integer_zerop (op1b) || integer_onep (op1b)))
4323 {
4324 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4325 || (code1 == NE_EXPR && integer_onep (op1b)));
4326 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4327 switch (gimple_code (stmt))
4328 {
4329 case GIMPLE_ASSIGN:
4330 /* Try to simplify by copy-propagating the definition. */
4331 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4332
4333 case GIMPLE_PHI:
4334 /* If every argument to the PHI produces the same result when
4335 ANDed with the second comparison, we win.
4336 Do not do this unless the type is bool since we need a bool
4337 result here anyway. */
4338 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4339 {
4340 tree result = NULL_TREE;
4341 unsigned i;
4342 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4343 {
4344 tree arg = gimple_phi_arg_def (stmt, i);
4345
4346 /* If this PHI has itself as an argument, ignore it.
4347 If all the other args produce the same result,
4348 we're still OK. */
4349 if (arg == gimple_phi_result (stmt))
4350 continue;
4351 else if (TREE_CODE (arg) == INTEGER_CST)
4352 {
4353 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4354 {
4355 if (!result)
4356 result = boolean_false_node;
4357 else if (!integer_zerop (result))
4358 return NULL_TREE;
4359 }
4360 else if (!result)
4361 result = fold_build2 (code2, boolean_type_node,
4362 op2a, op2b);
4363 else if (!same_bool_comparison_p (result,
4364 code2, op2a, op2b))
4365 return NULL_TREE;
4366 }
0e8b84ec
JJ
4367 else if (TREE_CODE (arg) == SSA_NAME
4368 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4369 {
6c66f733
JJ
4370 tree temp;
4371 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4372 /* In simple cases we can look through PHI nodes,
4373 but we have to be careful with loops.
4374 See PR49073. */
4375 if (! dom_info_available_p (CDI_DOMINATORS)
4376 || gimple_bb (def_stmt) == gimple_bb (stmt)
4377 || dominated_by_p (CDI_DOMINATORS,
4378 gimple_bb (def_stmt),
4379 gimple_bb (stmt)))
4380 return NULL_TREE;
4381 temp = and_var_with_comparison (arg, invert, code2,
4382 op2a, op2b);
e89065a1
SL
4383 if (!temp)
4384 return NULL_TREE;
4385 else if (!result)
4386 result = temp;
4387 else if (!same_bool_result_p (result, temp))
4388 return NULL_TREE;
4389 }
4390 else
4391 return NULL_TREE;
4392 }
4393 return result;
4394 }
4395
4396 default:
4397 break;
4398 }
4399 }
4400 return NULL_TREE;
4401}
4402
4403/* Try to simplify the AND of two comparisons, specified by
4404 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4405 If this can be simplified to a single expression (without requiring
4406 introducing more SSA variables to hold intermediate values),
4407 return the resulting tree. Otherwise return NULL_TREE.
4408 If the result expression is non-null, it has boolean type. */
4409
4410tree
4411maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4412 enum tree_code code2, tree op2a, tree op2b)
4413{
4414 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4415 if (t)
4416 return t;
4417 else
4418 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4419}
4420
4421/* Helper function for or_comparisons_1: try to simplify the OR of the
4422 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4423 If INVERT is true, invert the value of VAR before doing the OR.
4424 Return NULL_EXPR if we can't simplify this to a single expression. */
4425
4426static tree
4427or_var_with_comparison (tree var, bool invert,
4428 enum tree_code code2, tree op2a, tree op2b)
4429{
4430 tree t;
4431 gimple stmt = SSA_NAME_DEF_STMT (var);
4432
4433 /* We can only deal with variables whose definitions are assignments. */
4434 if (!is_gimple_assign (stmt))
4435 return NULL_TREE;
4436
4437 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4438 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4439 Then we only have to consider the simpler non-inverted cases. */
4440 if (invert)
4441 t = and_var_with_comparison_1 (stmt,
4442 invert_tree_comparison (code2, false),
4443 op2a, op2b);
4444 else
4445 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4446 return canonicalize_bool (t, invert);
4447}
4448
4449/* Try to simplify the OR of the ssa variable defined by the assignment
4450 STMT with the comparison specified by (OP2A CODE2 OP2B).
4451 Return NULL_EXPR if we can't simplify this to a single expression. */
4452
4453static tree
4454or_var_with_comparison_1 (gimple stmt,
4455 enum tree_code code2, tree op2a, tree op2b)
4456{
4457 tree var = gimple_assign_lhs (stmt);
4458 tree true_test_var = NULL_TREE;
4459 tree false_test_var = NULL_TREE;
4460 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4461
4462 /* Check for identities like (var OR (var != 0)) => true . */
4463 if (TREE_CODE (op2a) == SSA_NAME
4464 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4465 {
4466 if ((code2 == NE_EXPR && integer_zerop (op2b))
4467 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4468 {
4469 true_test_var = op2a;
4470 if (var == true_test_var)
4471 return var;
4472 }
4473 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4474 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4475 {
4476 false_test_var = op2a;
4477 if (var == false_test_var)
4478 return boolean_true_node;
4479 }
4480 }
4481
4482 /* If the definition is a comparison, recurse on it. */
4483 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4484 {
4485 tree t = or_comparisons_1 (innercode,
4486 gimple_assign_rhs1 (stmt),
4487 gimple_assign_rhs2 (stmt),
4488 code2,
4489 op2a,
4490 op2b);
4491 if (t)
4492 return t;
4493 }
4494
4495 /* If the definition is an AND or OR expression, we may be able to
4496 simplify by reassociating. */
eb9820c0
KT
4497 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4498 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4499 {
4500 tree inner1 = gimple_assign_rhs1 (stmt);
4501 tree inner2 = gimple_assign_rhs2 (stmt);
4502 gimple s;
4503 tree t;
4504 tree partial = NULL_TREE;
eb9820c0 4505 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4506
4507 /* Check for boolean identities that don't require recursive examination
4508 of inner1/inner2:
4509 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4510 inner1 OR (inner1 AND inner2) => inner1
4511 !inner1 OR (inner1 OR inner2) => true
4512 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4513 */
4514 if (inner1 == true_test_var)
4515 return (is_or ? var : inner1);
4516 else if (inner2 == true_test_var)
4517 return (is_or ? var : inner2);
4518 else if (inner1 == false_test_var)
4519 return (is_or
4520 ? boolean_true_node
4521 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4522 else if (inner2 == false_test_var)
4523 return (is_or
4524 ? boolean_true_node
4525 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4526
4527 /* Next, redistribute/reassociate the OR across the inner tests.
4528 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4529 if (TREE_CODE (inner1) == SSA_NAME
4530 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4531 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4532 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4533 gimple_assign_rhs1 (s),
4534 gimple_assign_rhs2 (s),
4535 code2, op2a, op2b)))
4536 {
4537 /* Handle the OR case, where we are reassociating:
4538 (inner1 OR inner2) OR (op2a code2 op2b)
4539 => (t OR inner2)
4540 If the partial result t is a constant, we win. Otherwise
4541 continue on to try reassociating with the other inner test. */
8236c8eb 4542 if (is_or)
e89065a1
SL
4543 {
4544 if (integer_onep (t))
4545 return boolean_true_node;
4546 else if (integer_zerop (t))
4547 return inner2;
4548 }
4549
4550 /* Handle the AND case, where we are redistributing:
4551 (inner1 AND inner2) OR (op2a code2 op2b)
4552 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4553 else if (integer_zerop (t))
4554 return boolean_false_node;
4555
4556 /* Save partial result for later. */
4557 partial = t;
e89065a1
SL
4558 }
4559
4560 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4561 if (TREE_CODE (inner2) == SSA_NAME
4562 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4563 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4564 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4565 gimple_assign_rhs1 (s),
4566 gimple_assign_rhs2 (s),
4567 code2, op2a, op2b)))
4568 {
4569 /* Handle the OR case, where we are reassociating:
4570 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4571 => (inner1 OR t)
4572 => (t OR partial) */
4573 if (is_or)
e89065a1
SL
4574 {
4575 if (integer_zerop (t))
4576 return inner1;
4577 else if (integer_onep (t))
4578 return boolean_true_node;
8236c8eb
JJ
4579 /* If both are the same, we can apply the identity
4580 (x OR x) == x. */
4581 else if (partial && same_bool_result_p (t, partial))
4582 return t;
e89065a1
SL
4583 }
4584
4585 /* Handle the AND case, where we are redistributing:
4586 (inner1 AND inner2) OR (op2a code2 op2b)
4587 => (t AND (inner1 OR (op2a code2 op2b)))
4588 => (t AND partial) */
4589 else
4590 {
4591 if (integer_zerop (t))
4592 return boolean_false_node;
4593 else if (partial)
4594 {
4595 /* We already got a simplification for the other
4596 operand to the redistributed AND expression. The
4597 interesting case is when at least one is true.
4598 Or, if both are the same, we can apply the identity
8236c8eb 4599 (x AND x) == x. */
e89065a1
SL
4600 if (integer_onep (partial))
4601 return t;
4602 else if (integer_onep (t))
4603 return partial;
4604 else if (same_bool_result_p (t, partial))
8236c8eb 4605 return t;
e89065a1
SL
4606 }
4607 }
4608 }
4609 }
4610 return NULL_TREE;
4611}
4612
4613/* Try to simplify the OR of two comparisons defined by
4614 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4615 If this can be done without constructing an intermediate value,
4616 return the resulting tree; otherwise NULL_TREE is returned.
4617 This function is deliberately asymmetric as it recurses on SSA_DEFs
4618 in the first comparison but not the second. */
4619
4620static tree
4621or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4622 enum tree_code code2, tree op2a, tree op2b)
4623{
ae22ac3c 4624 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4625
e89065a1
SL
4626 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4627 if (operand_equal_p (op1a, op2a, 0)
4628 && operand_equal_p (op1b, op2b, 0))
4629 {
eb9820c0 4630 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4631 tree t = combine_comparisons (UNKNOWN_LOCATION,
4632 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4633 truth_type, op1a, op1b);
e89065a1
SL
4634 if (t)
4635 return t;
4636 }
4637
4638 /* Likewise the swapped case of the above. */
4639 if (operand_equal_p (op1a, op2b, 0)
4640 && operand_equal_p (op1b, op2a, 0))
4641 {
eb9820c0 4642 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4643 tree t = combine_comparisons (UNKNOWN_LOCATION,
4644 TRUTH_ORIF_EXPR, code1,
4645 swap_tree_comparison (code2),
31ed6226 4646 truth_type, op1a, op1b);
e89065a1
SL
4647 if (t)
4648 return t;
4649 }
4650
4651 /* If both comparisons are of the same value against constants, we might
4652 be able to merge them. */
4653 if (operand_equal_p (op1a, op2a, 0)
4654 && TREE_CODE (op1b) == INTEGER_CST
4655 && TREE_CODE (op2b) == INTEGER_CST)
4656 {
4657 int cmp = tree_int_cst_compare (op1b, op2b);
4658
4659 /* If we have (op1a != op1b), we should either be able to
4660 return that or TRUE, depending on whether the constant op1b
4661 also satisfies the other comparison against op2b. */
4662 if (code1 == NE_EXPR)
4663 {
4664 bool done = true;
4665 bool val;
4666 switch (code2)
4667 {
4668 case EQ_EXPR: val = (cmp == 0); break;
4669 case NE_EXPR: val = (cmp != 0); break;
4670 case LT_EXPR: val = (cmp < 0); break;
4671 case GT_EXPR: val = (cmp > 0); break;
4672 case LE_EXPR: val = (cmp <= 0); break;
4673 case GE_EXPR: val = (cmp >= 0); break;
4674 default: done = false;
4675 }
4676 if (done)
4677 {
4678 if (val)
4679 return boolean_true_node;
4680 else
4681 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4682 }
4683 }
4684 /* Likewise if the second comparison is a != comparison. */
4685 else if (code2 == NE_EXPR)
4686 {
4687 bool done = true;
4688 bool val;
4689 switch (code1)
4690 {
4691 case EQ_EXPR: val = (cmp == 0); break;
4692 case NE_EXPR: val = (cmp != 0); break;
4693 case LT_EXPR: val = (cmp > 0); break;
4694 case GT_EXPR: val = (cmp < 0); break;
4695 case LE_EXPR: val = (cmp >= 0); break;
4696 case GE_EXPR: val = (cmp <= 0); break;
4697 default: done = false;
4698 }
4699 if (done)
4700 {
4701 if (val)
4702 return boolean_true_node;
4703 else
4704 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4705 }
4706 }
4707
4708 /* See if an equality test is redundant with the other comparison. */
4709 else if (code1 == EQ_EXPR)
4710 {
4711 bool val;
4712 switch (code2)
4713 {
4714 case EQ_EXPR: val = (cmp == 0); break;
4715 case NE_EXPR: val = (cmp != 0); break;
4716 case LT_EXPR: val = (cmp < 0); break;
4717 case GT_EXPR: val = (cmp > 0); break;
4718 case LE_EXPR: val = (cmp <= 0); break;
4719 case GE_EXPR: val = (cmp >= 0); break;
4720 default:
4721 val = false;
4722 }
4723 if (val)
4724 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4725 }
4726 else if (code2 == EQ_EXPR)
4727 {
4728 bool val;
4729 switch (code1)
4730 {
4731 case EQ_EXPR: val = (cmp == 0); break;
4732 case NE_EXPR: val = (cmp != 0); break;
4733 case LT_EXPR: val = (cmp > 0); break;
4734 case GT_EXPR: val = (cmp < 0); break;
4735 case LE_EXPR: val = (cmp >= 0); break;
4736 case GE_EXPR: val = (cmp <= 0); break;
4737 default:
4738 val = false;
4739 }
4740 if (val)
4741 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4742 }
4743
4744 /* Chose the less restrictive of two < or <= comparisons. */
4745 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4746 && (code2 == LT_EXPR || code2 == LE_EXPR))
4747 {
4748 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4749 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4750 else
4751 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4752 }
4753
4754 /* Likewise chose the less restrictive of two > or >= comparisons. */
4755 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4756 && (code2 == GT_EXPR || code2 == GE_EXPR))
4757 {
4758 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4759 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4760 else
4761 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4762 }
4763
4764 /* Check for singleton ranges. */
4765 else if (cmp == 0
4766 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4767 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4768 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4769
4770 /* Check for less/greater pairs that don't restrict the range at all. */
4771 else if (cmp >= 0
4772 && (code1 == LT_EXPR || code1 == LE_EXPR)
4773 && (code2 == GT_EXPR || code2 == GE_EXPR))
4774 return boolean_true_node;
4775 else if (cmp <= 0
4776 && (code1 == GT_EXPR || code1 == GE_EXPR)
4777 && (code2 == LT_EXPR || code2 == LE_EXPR))
4778 return boolean_true_node;
4779 }
4780
4781 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4782 NAME's definition is a truth value. See if there are any simplifications
4783 that can be done against the NAME's definition. */
4784 if (TREE_CODE (op1a) == SSA_NAME
4785 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4786 && (integer_zerop (op1b) || integer_onep (op1b)))
4787 {
4788 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4789 || (code1 == NE_EXPR && integer_onep (op1b)));
4790 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4791 switch (gimple_code (stmt))
4792 {
4793 case GIMPLE_ASSIGN:
4794 /* Try to simplify by copy-propagating the definition. */
4795 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4796
4797 case GIMPLE_PHI:
4798 /* If every argument to the PHI produces the same result when
4799 ORed with the second comparison, we win.
4800 Do not do this unless the type is bool since we need a bool
4801 result here anyway. */
4802 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4803 {
4804 tree result = NULL_TREE;
4805 unsigned i;
4806 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4807 {
4808 tree arg = gimple_phi_arg_def (stmt, i);
4809
4810 /* If this PHI has itself as an argument, ignore it.
4811 If all the other args produce the same result,
4812 we're still OK. */
4813 if (arg == gimple_phi_result (stmt))
4814 continue;
4815 else if (TREE_CODE (arg) == INTEGER_CST)
4816 {
4817 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4818 {
4819 if (!result)
4820 result = boolean_true_node;
4821 else if (!integer_onep (result))
4822 return NULL_TREE;
4823 }
4824 else if (!result)
4825 result = fold_build2 (code2, boolean_type_node,
4826 op2a, op2b);
4827 else if (!same_bool_comparison_p (result,
4828 code2, op2a, op2b))
4829 return NULL_TREE;
4830 }
0e8b84ec
JJ
4831 else if (TREE_CODE (arg) == SSA_NAME
4832 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4833 {
6c66f733
JJ
4834 tree temp;
4835 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4836 /* In simple cases we can look through PHI nodes,
4837 but we have to be careful with loops.
4838 See PR49073. */
4839 if (! dom_info_available_p (CDI_DOMINATORS)
4840 || gimple_bb (def_stmt) == gimple_bb (stmt)
4841 || dominated_by_p (CDI_DOMINATORS,
4842 gimple_bb (def_stmt),
4843 gimple_bb (stmt)))
4844 return NULL_TREE;
4845 temp = or_var_with_comparison (arg, invert, code2,
4846 op2a, op2b);
e89065a1
SL
4847 if (!temp)
4848 return NULL_TREE;
4849 else if (!result)
4850 result = temp;
4851 else if (!same_bool_result_p (result, temp))
4852 return NULL_TREE;
4853 }
4854 else
4855 return NULL_TREE;
4856 }
4857 return result;
4858 }
4859
4860 default:
4861 break;
4862 }
4863 }
4864 return NULL_TREE;
4865}
4866
4867/* Try to simplify the OR of two comparisons, specified by
4868 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4869 If this can be simplified to a single expression (without requiring
4870 introducing more SSA variables to hold intermediate values),
4871 return the resulting tree. Otherwise return NULL_TREE.
4872 If the result expression is non-null, it has boolean type. */
4873
4874tree
4875maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4876 enum tree_code code2, tree op2a, tree op2b)
4877{
4878 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4879 if (t)
4880 return t;
4881 else
4882 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4883}
cfef45c8
RG
4884
4885
4886/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4887
4888 Either NULL_TREE, a simplified but non-constant or a constant
4889 is returned.
4890
4891 ??? This should go into a gimple-fold-inline.h file to be eventually
4892 privatized with the single valueize function used in the various TUs
4893 to avoid the indirect function call overhead. */
4894
4895tree
d2a85801
RB
4896gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4897 tree (*gvalueize) (tree))
cfef45c8 4898{
45cc9f96
RB
4899 code_helper rcode;
4900 tree ops[3] = {};
4901 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4902 edges if there are intermediate VARYING defs. For this reason
4903 do not follow SSA edges here even though SCCVN can technically
4904 just deal fine with that. */
0ff093d8 4905 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)
45cc9f96
RB
4906 && rcode.is_tree_code ()
4907 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4908 || ((tree_code) rcode) == ADDR_EXPR)
4909 && is_gimple_val (ops[0]))
4910 {
4911 tree res = ops[0];
4912 if (dump_file && dump_flags & TDF_DETAILS)
4913 {
4914 fprintf (dump_file, "Match-and-simplified ");
4915 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4916 fprintf (dump_file, " to ");
4917 print_generic_expr (dump_file, res, 0);
4918 fprintf (dump_file, "\n");
4919 }
4920 return res;
4921 }
4922
cfef45c8
RG
4923 location_t loc = gimple_location (stmt);
4924 switch (gimple_code (stmt))
4925 {
4926 case GIMPLE_ASSIGN:
4927 {
4928 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4929
4930 switch (get_gimple_rhs_class (subcode))
4931 {
4932 case GIMPLE_SINGLE_RHS:
4933 {
4934 tree rhs = gimple_assign_rhs1 (stmt);
4935 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4936
4937 if (TREE_CODE (rhs) == SSA_NAME)
4938 {
4939 /* If the RHS is an SSA_NAME, return its known constant value,
4940 if any. */
4941 return (*valueize) (rhs);
4942 }
4943 /* Handle propagating invariant addresses into address
4944 operations. */
4945 else if (TREE_CODE (rhs) == ADDR_EXPR
4946 && !is_gimple_min_invariant (rhs))
4947 {
d25c4172 4948 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4949 tree base;
4950 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4951 &offset,
4952 valueize);
4953 if (base
4954 && (CONSTANT_CLASS_P (base)
4955 || decl_address_invariant_p (base)))
4956 return build_invariant_address (TREE_TYPE (rhs),
4957 base, offset);
4958 }
4959 else if (TREE_CODE (rhs) == CONSTRUCTOR
4960 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4961 && (CONSTRUCTOR_NELTS (rhs)
4962 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4963 {
4964 unsigned i;
d2a12ae7 4965 tree val, *vec;
cfef45c8 4966
d2a12ae7
RG
4967 vec = XALLOCAVEC (tree,
4968 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4969 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4970 {
4971 val = (*valueize) (val);
4972 if (TREE_CODE (val) == INTEGER_CST
4973 || TREE_CODE (val) == REAL_CST
4974 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 4975 vec[i] = val;
cfef45c8
RG
4976 else
4977 return NULL_TREE;
4978 }
4979
d2a12ae7 4980 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 4981 }
bdf37f7a
JH
4982 if (subcode == OBJ_TYPE_REF)
4983 {
4984 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4985 /* If callee is constant, we can fold away the wrapper. */
4986 if (is_gimple_min_invariant (val))
4987 return val;
4988 }
cfef45c8
RG
4989
4990 if (kind == tcc_reference)
4991 {
4992 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4993 || TREE_CODE (rhs) == REALPART_EXPR
4994 || TREE_CODE (rhs) == IMAGPART_EXPR)
4995 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4996 {
4997 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4998 return fold_unary_loc (EXPR_LOCATION (rhs),
4999 TREE_CODE (rhs),
5000 TREE_TYPE (rhs), val);
5001 }
5002 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5003 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5004 {
5005 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5006 return fold_ternary_loc (EXPR_LOCATION (rhs),
5007 TREE_CODE (rhs),
5008 TREE_TYPE (rhs), val,
5009 TREE_OPERAND (rhs, 1),
5010 TREE_OPERAND (rhs, 2));
5011 }
5012 else if (TREE_CODE (rhs) == MEM_REF
5013 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5014 {
5015 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5016 if (TREE_CODE (val) == ADDR_EXPR
5017 && is_gimple_min_invariant (val))
5018 {
5019 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5020 unshare_expr (val),
5021 TREE_OPERAND (rhs, 1));
5022 if (tem)
5023 rhs = tem;
5024 }
5025 }
5026 return fold_const_aggregate_ref_1 (rhs, valueize);
5027 }
5028 else if (kind == tcc_declaration)
5029 return get_symbol_constant_value (rhs);
5030 return rhs;
5031 }
5032
5033 case GIMPLE_UNARY_RHS:
f3582e54 5034 return NULL_TREE;
cfef45c8
RG
5035
5036 case GIMPLE_BINARY_RHS:
5037 {
5038 /* Handle binary operators that can appear in GIMPLE form. */
5039 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5040 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5041
5042 /* Translate &x + CST into an invariant form suitable for
5043 further propagation. */
5044 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5045 && TREE_CODE (op0) == ADDR_EXPR
5046 && TREE_CODE (op1) == INTEGER_CST)
5047 {
5048 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
5049 return build_fold_addr_expr_loc
5050 (loc,
5051 fold_build2 (MEM_REF,
cfef45c8
RG
5052 TREE_TYPE (TREE_TYPE (op0)),
5053 unshare_expr (op0), off));
5054 }
5055
5056 return fold_binary_loc (loc, subcode,
5057 gimple_expr_type (stmt), op0, op1);
5058 }
5059
5060 case GIMPLE_TERNARY_RHS:
5061 {
5062 /* Handle ternary operators that can appear in GIMPLE form. */
5063 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5064 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5065 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5066
d3878abf
RG
5067 /* Fold embedded expressions in ternary codes. */
5068 if ((subcode == COND_EXPR
5069 || subcode == VEC_COND_EXPR)
5070 && COMPARISON_CLASS_P (op0))
5071 {
5072 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5073 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5074 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5075 TREE_TYPE (op0), op00, op01);
5076 if (tem)
5077 op0 = tem;
5078 }
5079
cfef45c8
RG
5080 return fold_ternary_loc (loc, subcode,
5081 gimple_expr_type (stmt), op0, op1, op2);
5082 }
5083
5084 default:
5085 gcc_unreachable ();
5086 }
5087 }
5088
5089 case GIMPLE_CALL:
5090 {
25583c4f 5091 tree fn;
538dd0b7 5092 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5093
5094 if (gimple_call_internal_p (stmt))
31e071ae
MP
5095 {
5096 enum tree_code subcode = ERROR_MARK;
5097 switch (gimple_call_internal_fn (stmt))
5098 {
5099 case IFN_UBSAN_CHECK_ADD:
5100 subcode = PLUS_EXPR;
5101 break;
5102 case IFN_UBSAN_CHECK_SUB:
5103 subcode = MINUS_EXPR;
5104 break;
5105 case IFN_UBSAN_CHECK_MUL:
5106 subcode = MULT_EXPR;
5107 break;
5108 default:
5109 return NULL_TREE;
5110 }
368b454d
JJ
5111 tree arg0 = gimple_call_arg (stmt, 0);
5112 tree arg1 = gimple_call_arg (stmt, 1);
5113 tree op0 = (*valueize) (arg0);
5114 tree op1 = (*valueize) (arg1);
31e071ae
MP
5115
5116 if (TREE_CODE (op0) != INTEGER_CST
5117 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5118 {
5119 switch (subcode)
5120 {
5121 case MULT_EXPR:
5122 /* x * 0 = 0 * x = 0 without overflow. */
5123 if (integer_zerop (op0) || integer_zerop (op1))
5124 return build_zero_cst (TREE_TYPE (arg0));
5125 break;
5126 case MINUS_EXPR:
5127 /* y - y = 0 without overflow. */
5128 if (operand_equal_p (op0, op1, 0))
5129 return build_zero_cst (TREE_TYPE (arg0));
5130 break;
5131 default:
5132 break;
5133 }
5134 }
5135 tree res
5136 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5137 if (res
5138 && TREE_CODE (res) == INTEGER_CST
5139 && !TREE_OVERFLOW (res))
5140 return res;
5141 return NULL_TREE;
5142 }
25583c4f
RS
5143
5144 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5145 if (TREE_CODE (fn) == ADDR_EXPR
5146 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5147 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5148 && gimple_builtin_call_types_compatible_p (stmt,
5149 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5150 {
5151 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5152 tree retval;
cfef45c8
RG
5153 unsigned i;
5154 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5155 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5156 retval = fold_builtin_call_array (loc,
538dd0b7 5157 gimple_call_return_type (call_stmt),
cfef45c8 5158 fn, gimple_call_num_args (stmt), args);
cfef45c8 5159 if (retval)
5c944c6c
RB
5160 {
5161 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5162 STRIP_NOPS (retval);
538dd0b7
DM
5163 retval = fold_convert (gimple_call_return_type (call_stmt),
5164 retval);
5c944c6c 5165 }
cfef45c8
RG
5166 return retval;
5167 }
5168 return NULL_TREE;
5169 }
5170
5171 default:
5172 return NULL_TREE;
5173 }
5174}
5175
5176/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5177 Returns NULL_TREE if folding to a constant is not possible, otherwise
5178 returns a constant according to is_gimple_min_invariant. */
5179
5180tree
5181gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5182{
5183 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5184 if (res && is_gimple_min_invariant (res))
5185 return res;
5186 return NULL_TREE;
5187}
5188
5189
5190/* The following set of functions are supposed to fold references using
5191 their constant initializers. */
5192
cfef45c8
RG
5193/* See if we can find constructor defining value of BASE.
5194 When we know the consructor with constant offset (such as
5195 base is array[40] and we do know constructor of array), then
5196 BIT_OFFSET is adjusted accordingly.
5197
5198 As a special case, return error_mark_node when constructor
5199 is not explicitly available, but it is known to be zero
5200 such as 'static const int a;'. */
5201static tree
5202get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5203 tree (*valueize)(tree))
5204{
5205 HOST_WIDE_INT bit_offset2, size, max_size;
5206 if (TREE_CODE (base) == MEM_REF)
5207 {
5208 if (!integer_zerop (TREE_OPERAND (base, 1)))
5209 {
9541ffee 5210 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5211 return NULL_TREE;
807e902e 5212 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5213 * BITS_PER_UNIT);
5214 }
5215
5216 if (valueize
5217 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5218 base = valueize (TREE_OPERAND (base, 0));
5219 if (!base || TREE_CODE (base) != ADDR_EXPR)
5220 return NULL_TREE;
5221 base = TREE_OPERAND (base, 0);
5222 }
5223
5224 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5225 DECL_INITIAL. If BASE is a nested reference into another
5226 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5227 the inner reference. */
5228 switch (TREE_CODE (base))
5229 {
5230 case VAR_DECL:
cfef45c8 5231 case CONST_DECL:
6a6dac52
JH
5232 {
5233 tree init = ctor_for_folding (base);
5234
688010ba 5235 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5236 NULL means unknown, while error_mark_node is 0. */
5237 if (init == error_mark_node)
5238 return NULL_TREE;
5239 if (!init)
5240 return error_mark_node;
5241 return init;
5242 }
cfef45c8
RG
5243
5244 case ARRAY_REF:
5245 case COMPONENT_REF:
5246 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5247 if (max_size == -1 || size != max_size)
5248 return NULL_TREE;
5249 *bit_offset += bit_offset2;
5250 return get_base_constructor (base, bit_offset, valueize);
5251
5252 case STRING_CST:
5253 case CONSTRUCTOR:
5254 return base;
5255
5256 default:
5257 return NULL_TREE;
5258 }
5259}
5260
cfef45c8
RG
5261/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5262 SIZE to the memory at bit OFFSET. */
5263
5264static tree
5265fold_array_ctor_reference (tree type, tree ctor,
5266 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5267 unsigned HOST_WIDE_INT size,
5268 tree from_decl)
cfef45c8
RG
5269{
5270 unsigned HOST_WIDE_INT cnt;
5271 tree cfield, cval;
807e902e
KZ
5272 offset_int low_bound;
5273 offset_int elt_size;
5274 offset_int index, max_index;
5275 offset_int access_index;
b48e22b2 5276 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
5277 HOST_WIDE_INT inner_offset;
5278
5279 /* Compute low bound and elt size. */
eb8f1123
RG
5280 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5281 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5282 if (domain_type && TYPE_MIN_VALUE (domain_type))
5283 {
5284 /* Static constructors for variably sized objects makes no sense. */
5285 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 5286 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 5287 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5288 }
5289 else
807e902e 5290 low_bound = 0;
cfef45c8 5291 /* Static constructors for variably sized objects makes no sense. */
c3284718 5292 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5293 == INTEGER_CST);
807e902e 5294 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5295
5296 /* We can handle only constantly sized accesses that are known to not
5297 be larger than size of array element. */
5298 if (!TYPE_SIZE_UNIT (type)
5299 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5300 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5301 || elt_size == 0)
cfef45c8
RG
5302 return NULL_TREE;
5303
5304 /* Compute the array index we look for. */
807e902e
KZ
5305 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5306 elt_size);
27bcd47c 5307 access_index += low_bound;
b48e22b2 5308 if (index_type)
807e902e
KZ
5309 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5310 TYPE_SIGN (index_type));
cfef45c8
RG
5311
5312 /* And offset within the access. */
27bcd47c 5313 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5314
5315 /* See if the array field is large enough to span whole access. We do not
5316 care to fold accesses spanning multiple array indexes. */
27bcd47c 5317 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
5318 return NULL_TREE;
5319
807e902e 5320 index = low_bound - 1;
b48e22b2 5321 if (index_type)
807e902e
KZ
5322 index = wi::ext (index, TYPE_PRECISION (index_type),
5323 TYPE_SIGN (index_type));
b48e22b2 5324
cfef45c8
RG
5325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5326 {
5327 /* Array constructor might explicitely set index, or specify range
5328 or leave index NULL meaning that it is next index after previous
5329 one. */
5330 if (cfield)
5331 {
5332 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 5333 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
5334 else
5335 {
5336 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
5337 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5338 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
5339 }
5340 }
5341 else
b48e22b2 5342 {
807e902e 5343 index += 1;
b48e22b2 5344 if (index_type)
807e902e
KZ
5345 index = wi::ext (index, TYPE_PRECISION (index_type),
5346 TYPE_SIGN (index_type));
b48e22b2
EB
5347 max_index = index;
5348 }
cfef45c8
RG
5349
5350 /* Do we have match? */
807e902e
KZ
5351 if (wi::cmpu (access_index, index) >= 0
5352 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
5353 return fold_ctor_reference (type, cval, inner_offset, size,
5354 from_decl);
cfef45c8
RG
5355 }
5356 /* When memory is not explicitely mentioned in constructor,
5357 it is 0 (or out of range). */
5358 return build_zero_cst (type);
5359}
5360
5361/* CTOR is CONSTRUCTOR of an aggregate or vector.
5362 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5363
5364static tree
5365fold_nonarray_ctor_reference (tree type, tree ctor,
5366 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5367 unsigned HOST_WIDE_INT size,
5368 tree from_decl)
cfef45c8
RG
5369{
5370 unsigned HOST_WIDE_INT cnt;
5371 tree cfield, cval;
5372
5373 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5374 cval)
5375 {
5376 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5377 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5378 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5379 offset_int bitoffset;
5380 offset_int bitoffset_end, access_end;
cfef45c8
RG
5381
5382 /* Variable sized objects in static constructors makes no sense,
5383 but field_size can be NULL for flexible array members. */
5384 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5385 && TREE_CODE (byte_offset) == INTEGER_CST
5386 && (field_size != NULL_TREE
5387 ? TREE_CODE (field_size) == INTEGER_CST
5388 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5389
5390 /* Compute bit offset of the field. */
807e902e
KZ
5391 bitoffset = (wi::to_offset (field_offset)
5392 + wi::lshift (wi::to_offset (byte_offset),
5393 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5394 /* Compute bit offset where the field ends. */
5395 if (field_size != NULL_TREE)
807e902e 5396 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5397 else
807e902e 5398 bitoffset_end = 0;
cfef45c8 5399
807e902e 5400 access_end = offset_int (offset) + size;
b8b2b009
JJ
5401
5402 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5403 [BITOFFSET, BITOFFSET_END)? */
807e902e 5404 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5405 && (field_size == NULL_TREE
807e902e 5406 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5407 {
807e902e 5408 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5409 /* We do have overlap. Now see if field is large enough to
5410 cover the access. Give up for accesses spanning multiple
5411 fields. */
807e902e 5412 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5413 return NULL_TREE;
807e902e 5414 if (wi::lts_p (offset, bitoffset))
b8b2b009 5415 return NULL_TREE;
cfef45c8 5416 return fold_ctor_reference (type, cval,
27bcd47c 5417 inner_offset.to_uhwi (), size,
c44c2088 5418 from_decl);
cfef45c8
RG
5419 }
5420 }
5421 /* When memory is not explicitely mentioned in constructor, it is 0. */
5422 return build_zero_cst (type);
5423}
5424
5425/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5426 to the memory at bit OFFSET. */
5427
8403c2cf 5428tree
cfef45c8 5429fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5430 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5431{
5432 tree ret;
5433
5434 /* We found the field with exact match. */
5435 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5436 && !offset)
9d60be38 5437 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5438
5439 /* We are at the end of walk, see if we can view convert the
5440 result. */
5441 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5442 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5443 && !compare_tree_int (TYPE_SIZE (type), size)
5444 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5445 {
9d60be38 5446 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5447 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5448 if (ret)
86c5a5c3 5449 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5450 return ret;
5451 }
b2505143
RB
5452 /* For constants and byte-aligned/sized reads try to go through
5453 native_encode/interpret. */
5454 if (CONSTANT_CLASS_P (ctor)
5455 && BITS_PER_UNIT == 8
5456 && offset % BITS_PER_UNIT == 0
5457 && size % BITS_PER_UNIT == 0
5458 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5459 {
5460 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5461 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5462 offset / BITS_PER_UNIT) > 0)
5463 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5464 }
cfef45c8
RG
5465 if (TREE_CODE (ctor) == CONSTRUCTOR)
5466 {
5467
eb8f1123
RG
5468 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5469 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5470 return fold_array_ctor_reference (type, ctor, offset, size,
5471 from_decl);
cfef45c8 5472 else
c44c2088
JH
5473 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5474 from_decl);
cfef45c8
RG
5475 }
5476
5477 return NULL_TREE;
5478}
5479
5480/* Return the tree representing the element referenced by T if T is an
5481 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5482 names using VALUEIZE. Return NULL_TREE otherwise. */
5483
5484tree
5485fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5486{
5487 tree ctor, idx, base;
5488 HOST_WIDE_INT offset, size, max_size;
5489 tree tem;
5490
f8a7df45
RG
5491 if (TREE_THIS_VOLATILE (t))
5492 return NULL_TREE;
5493
3a65ee74 5494 if (DECL_P (t))
cfef45c8
RG
5495 return get_symbol_constant_value (t);
5496
5497 tem = fold_read_from_constant_string (t);
5498 if (tem)
5499 return tem;
5500
5501 switch (TREE_CODE (t))
5502 {
5503 case ARRAY_REF:
5504 case ARRAY_RANGE_REF:
5505 /* Constant indexes are handled well by get_base_constructor.
5506 Only special case variable offsets.
5507 FIXME: This code can't handle nested references with variable indexes
5508 (they will be handled only by iteration of ccp). Perhaps we can bring
5509 get_ref_base_and_extent here and make it use a valueize callback. */
5510 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5511 && valueize
5512 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5513 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5514 {
5515 tree low_bound, unit_size;
5516
5517 /* If the resulting bit-offset is constant, track it. */
5518 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5519 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5520 && (unit_size = array_ref_element_size (t),
807e902e 5521 tree_fits_uhwi_p (unit_size)))
cfef45c8 5522 {
807e902e
KZ
5523 offset_int woffset
5524 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5525 TYPE_PRECISION (TREE_TYPE (idx)));
5526
5527 if (wi::fits_shwi_p (woffset))
5528 {
5529 offset = woffset.to_shwi ();
5530 /* TODO: This code seems wrong, multiply then check
5531 to see if it fits. */
5532 offset *= tree_to_uhwi (unit_size);
5533 offset *= BITS_PER_UNIT;
5534
5535 base = TREE_OPERAND (t, 0);
5536 ctor = get_base_constructor (base, &offset, valueize);
5537 /* Empty constructor. Always fold to 0. */
5538 if (ctor == error_mark_node)
5539 return build_zero_cst (TREE_TYPE (t));
5540 /* Out of bound array access. Value is undefined,
5541 but don't fold. */
5542 if (offset < 0)
5543 return NULL_TREE;
5544 /* We can not determine ctor. */
5545 if (!ctor)
5546 return NULL_TREE;
5547 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5548 tree_to_uhwi (unit_size)
5549 * BITS_PER_UNIT,
5550 base);
5551 }
cfef45c8
RG
5552 }
5553 }
5554 /* Fallthru. */
5555
5556 case COMPONENT_REF:
5557 case BIT_FIELD_REF:
5558 case TARGET_MEM_REF:
5559 case MEM_REF:
5560 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5561 ctor = get_base_constructor (base, &offset, valueize);
5562
5563 /* Empty constructor. Always fold to 0. */
5564 if (ctor == error_mark_node)
5565 return build_zero_cst (TREE_TYPE (t));
5566 /* We do not know precise address. */
5567 if (max_size == -1 || max_size != size)
5568 return NULL_TREE;
5569 /* We can not determine ctor. */
5570 if (!ctor)
5571 return NULL_TREE;
5572
5573 /* Out of bound array access. Value is undefined, but don't fold. */
5574 if (offset < 0)
5575 return NULL_TREE;
5576
c44c2088
JH
5577 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5578 base);
cfef45c8
RG
5579
5580 case REALPART_EXPR:
5581 case IMAGPART_EXPR:
5582 {
5583 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5584 if (c && TREE_CODE (c) == COMPLEX_CST)
5585 return fold_build1_loc (EXPR_LOCATION (t),
5586 TREE_CODE (t), TREE_TYPE (t), c);
5587 break;
5588 }
5589
5590 default:
5591 break;
5592 }
5593
5594 return NULL_TREE;
5595}
5596
5597tree
5598fold_const_aggregate_ref (tree t)
5599{
5600 return fold_const_aggregate_ref_1 (t, NULL);
5601}
06bc3ec7 5602
85942f45 5603/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5604 at OFFSET.
5605 Set CAN_REFER if non-NULL to false if method
5606 is not referable or if the virtual table is ill-formed (such as rewriten
5607 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5608
5609tree
85942f45
JH
5610gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5611 tree v,
ec77d61f
JH
5612 unsigned HOST_WIDE_INT offset,
5613 bool *can_refer)
81fa35bd 5614{
85942f45
JH
5615 tree vtable = v, init, fn;
5616 unsigned HOST_WIDE_INT size;
8c311b50
JH
5617 unsigned HOST_WIDE_INT elt_size, access_index;
5618 tree domain_type;
81fa35bd 5619
ec77d61f
JH
5620 if (can_refer)
5621 *can_refer = true;
5622
9de2f554 5623 /* First of all double check we have virtual table. */
81fa35bd 5624 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5625 || !DECL_VIRTUAL_P (v))
ec77d61f 5626 {
ec77d61f
JH
5627 /* Pass down that we lost track of the target. */
5628 if (can_refer)
5629 *can_refer = false;
5630 return NULL_TREE;
5631 }
9de2f554 5632
2aa3da06
JH
5633 init = ctor_for_folding (v);
5634
9de2f554 5635 /* The virtual tables should always be born with constructors
2aa3da06
JH
5636 and we always should assume that they are avaialble for
5637 folding. At the moment we do not stream them in all cases,
5638 but it should never happen that ctor seem unreachable. */
5639 gcc_assert (init);
5640 if (init == error_mark_node)
5641 {
5642 gcc_assert (in_lto_p);
ec77d61f
JH
5643 /* Pass down that we lost track of the target. */
5644 if (can_refer)
5645 *can_refer = false;
2aa3da06
JH
5646 return NULL_TREE;
5647 }
81fa35bd 5648 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5649 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5650 offset *= BITS_PER_UNIT;
81fa35bd 5651 offset += token * size;
9de2f554 5652
8c311b50
JH
5653 /* Lookup the value in the constructor that is assumed to be array.
5654 This is equivalent to
5655 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5656 offset, size, NULL);
5657 but in a constant time. We expect that frontend produced a simple
5658 array without indexed initializers. */
5659
5660 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5661 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5662 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5663 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5664
5665 access_index = offset / BITS_PER_UNIT / elt_size;
5666 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5667
5668 /* This code makes an assumption that there are no
5669 indexed fileds produced by C++ FE, so we can directly index the array. */
5670 if (access_index < CONSTRUCTOR_NELTS (init))
5671 {
5672 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5673 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5674 STRIP_NOPS (fn);
5675 }
5676 else
5677 fn = NULL;
9de2f554
JH
5678
5679 /* For type inconsistent program we may end up looking up virtual method
5680 in virtual table that does not contain TOKEN entries. We may overrun
5681 the virtual table and pick up a constant or RTTI info pointer.
5682 In any case the call is undefined. */
5683 if (!fn
5684 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5685 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5686 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5687 else
5688 {
5689 fn = TREE_OPERAND (fn, 0);
5690
5691 /* When cgraph node is missing and function is not public, we cannot
5692 devirtualize. This can happen in WHOPR when the actual method
5693 ends up in other partition, because we found devirtualization
5694 possibility too late. */
5695 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5696 {
5697 if (can_refer)
5698 {
5699 *can_refer = false;
5700 return fn;
5701 }
5702 return NULL_TREE;
5703 }
9de2f554 5704 }
81fa35bd 5705
7501ca28
RG
5706 /* Make sure we create a cgraph node for functions we'll reference.
5707 They can be non-existent if the reference comes from an entry
5708 of an external vtable for example. */
d52f5295 5709 cgraph_node::get_create (fn);
7501ca28 5710
81fa35bd
MJ
5711 return fn;
5712}
5713
85942f45
JH
5714/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5715 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5716 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5717 OBJ_TYPE_REF_OBJECT(REF).
5718 Set CAN_REFER if non-NULL to false if method
5719 is not referable or if the virtual table is ill-formed (such as rewriten
5720 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5721
5722tree
ec77d61f
JH
5723gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5724 bool *can_refer)
85942f45
JH
5725{
5726 unsigned HOST_WIDE_INT offset;
5727 tree v;
5728
5729 v = BINFO_VTABLE (known_binfo);
5730 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5731 if (!v)
5732 return NULL_TREE;
5733
5734 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5735 {
5736 if (can_refer)
5737 *can_refer = false;
5738 return NULL_TREE;
5739 }
5740 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5741}
5742
06bc3ec7
BS
5743/* Return true iff VAL is a gimple expression that is known to be
5744 non-negative. Restricted to floating-point inputs. */
5745
5746bool
5747gimple_val_nonnegative_real_p (tree val)
5748{
5749 gimple def_stmt;
5750
5751 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5752
5753 /* Use existing logic for non-gimple trees. */
5754 if (tree_expr_nonnegative_p (val))
5755 return true;
5756
5757 if (TREE_CODE (val) != SSA_NAME)
5758 return false;
5759
5760 /* Currently we look only at the immediately defining statement
5761 to make this determination, since recursion on defining
5762 statements of operands can lead to quadratic behavior in the
5763 worst case. This is expected to catch almost all occurrences
5764 in practice. It would be possible to implement limited-depth
5765 recursion if important cases are lost. Alternatively, passes
5766 that need this information (such as the pow/powi lowering code
5767 in the cse_sincos pass) could be revised to provide it through
5768 dataflow propagation. */
5769
5770 def_stmt = SSA_NAME_DEF_STMT (val);
5771
5772 if (is_gimple_assign (def_stmt))
5773 {
5774 tree op0, op1;
5775
5776 /* See fold-const.c:tree_expr_nonnegative_p for additional
5777 cases that could be handled with recursion. */
5778
5779 switch (gimple_assign_rhs_code (def_stmt))
5780 {
5781 case ABS_EXPR:
5782 /* Always true for floating-point operands. */
5783 return true;
5784
5785 case MULT_EXPR:
5786 /* True if the two operands are identical (since we are
5787 restricted to floating-point inputs). */
5788 op0 = gimple_assign_rhs1 (def_stmt);
5789 op1 = gimple_assign_rhs2 (def_stmt);
5790
5791 if (op0 == op1
5792 || operand_equal_p (op0, op1, 0))
5793 return true;
5794
5795 default:
5796 return false;
5797 }
5798 }
5799 else if (is_gimple_call (def_stmt))
5800 {
5801 tree fndecl = gimple_call_fndecl (def_stmt);
5802 if (fndecl
5803 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5804 {
5805 tree arg1;
5806
5807 switch (DECL_FUNCTION_CODE (fndecl))
5808 {
5809 CASE_FLT_FN (BUILT_IN_ACOS):
5810 CASE_FLT_FN (BUILT_IN_ACOSH):
5811 CASE_FLT_FN (BUILT_IN_CABS):
5812 CASE_FLT_FN (BUILT_IN_COSH):
5813 CASE_FLT_FN (BUILT_IN_ERFC):
5814 CASE_FLT_FN (BUILT_IN_EXP):
5815 CASE_FLT_FN (BUILT_IN_EXP10):
5816 CASE_FLT_FN (BUILT_IN_EXP2):
5817 CASE_FLT_FN (BUILT_IN_FABS):
5818 CASE_FLT_FN (BUILT_IN_FDIM):
5819 CASE_FLT_FN (BUILT_IN_HYPOT):
5820 CASE_FLT_FN (BUILT_IN_POW10):
5821 return true;
5822
5823 CASE_FLT_FN (BUILT_IN_SQRT):
5824 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5825 nonnegative inputs. */
3d3dbadd 5826 if (!HONOR_SIGNED_ZEROS (val))
06bc3ec7
BS
5827 return true;
5828
5829 break;
5830
5831 CASE_FLT_FN (BUILT_IN_POWI):
5832 /* True if the second argument is an even integer. */
5833 arg1 = gimple_call_arg (def_stmt, 1);
5834
5835 if (TREE_CODE (arg1) == INTEGER_CST
5836 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5837 return true;
5838
5839 break;
5840
5841 CASE_FLT_FN (BUILT_IN_POW):
5842 /* True if the second argument is an even integer-valued
5843 real. */
5844 arg1 = gimple_call_arg (def_stmt, 1);
5845
5846 if (TREE_CODE (arg1) == REAL_CST)
5847 {
5848 REAL_VALUE_TYPE c;
5849 HOST_WIDE_INT n;
5850
5851 c = TREE_REAL_CST (arg1);
5852 n = real_to_integer (&c);
5853
5854 if ((n & 1) == 0)
5855 {
5856 REAL_VALUE_TYPE cint;
807e902e 5857 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5858 if (real_identical (&c, &cint))
5859 return true;
5860 }
5861 }
5862
5863 break;
5864
5865 default:
5866 return false;
5867 }
5868 }
5869 }
5870
5871 return false;
5872}
b184c8f1
AM
5873
5874/* Given a pointer value OP0, return a simplified version of an
5875 indirection through OP0, or NULL_TREE if no simplification is
5876 possible. Note that the resulting type may be different from
5877 the type pointed to in the sense that it is still compatible
5878 from the langhooks point of view. */
5879
5880tree
5881gimple_fold_indirect_ref (tree t)
5882{
5883 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5884 tree sub = t;
5885 tree subtype;
5886
5887 STRIP_NOPS (sub);
5888 subtype = TREE_TYPE (sub);
5889 if (!POINTER_TYPE_P (subtype))
5890 return NULL_TREE;
5891
5892 if (TREE_CODE (sub) == ADDR_EXPR)
5893 {
5894 tree op = TREE_OPERAND (sub, 0);
5895 tree optype = TREE_TYPE (op);
5896 /* *&p => p */
5897 if (useless_type_conversion_p (type, optype))
5898 return op;
5899
5900 /* *(foo *)&fooarray => fooarray[0] */
5901 if (TREE_CODE (optype) == ARRAY_TYPE
5902 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5903 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5904 {
5905 tree type_domain = TYPE_DOMAIN (optype);
5906 tree min_val = size_zero_node;
5907 if (type_domain && TYPE_MIN_VALUE (type_domain))
5908 min_val = TYPE_MIN_VALUE (type_domain);
5909 if (TREE_CODE (min_val) == INTEGER_CST)
5910 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5911 }
5912 /* *(foo *)&complexfoo => __real__ complexfoo */
5913 else if (TREE_CODE (optype) == COMPLEX_TYPE
5914 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5915 return fold_build1 (REALPART_EXPR, type, op);
5916 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5917 else if (TREE_CODE (optype) == VECTOR_TYPE
5918 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5919 {
5920 tree part_width = TYPE_SIZE (type);
5921 tree index = bitsize_int (0);
5922 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5923 }
5924 }
5925
5926 /* *(p + CST) -> ... */
5927 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5928 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5929 {
5930 tree addr = TREE_OPERAND (sub, 0);
5931 tree off = TREE_OPERAND (sub, 1);
5932 tree addrtype;
5933
5934 STRIP_NOPS (addr);
5935 addrtype = TREE_TYPE (addr);
5936
5937 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5938 if (TREE_CODE (addr) == ADDR_EXPR
5939 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5940 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5941 && tree_fits_uhwi_p (off))
b184c8f1 5942 {
ae7e9ddd 5943 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5944 tree part_width = TYPE_SIZE (type);
5945 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5946 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5947 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5948 tree index = bitsize_int (indexi);
5949 if (offset / part_widthi
e934916c 5950 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5951 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5952 part_width, index);
5953 }
5954
5955 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5956 if (TREE_CODE (addr) == ADDR_EXPR
5957 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5958 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5959 {
5960 tree size = TYPE_SIZE_UNIT (type);
5961 if (tree_int_cst_equal (size, off))
5962 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5963 }
5964
5965 /* *(p + CST) -> MEM_REF <p, CST>. */
5966 if (TREE_CODE (addr) != ADDR_EXPR
5967 || DECL_P (TREE_OPERAND (addr, 0)))
5968 return fold_build2 (MEM_REF, type,
5969 addr,
807e902e 5970 wide_int_to_tree (ptype, off));
b184c8f1
AM
5971 }
5972
5973 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5974 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5975 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5976 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5977 {
5978 tree type_domain;
5979 tree min_val = size_zero_node;
5980 tree osub = sub;
5981 sub = gimple_fold_indirect_ref (sub);
5982 if (! sub)
5983 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5984 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5985 if (type_domain && TYPE_MIN_VALUE (type_domain))
5986 min_val = TYPE_MIN_VALUE (type_domain);
5987 if (TREE_CODE (min_val) == INTEGER_CST)
5988 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5989 }
5990
5991 return NULL_TREE;
5992}
19e51b40
JJ
5993
5994/* Return true if CODE is an operation that when operating on signed
5995 integer types involves undefined behavior on overflow and the
5996 operation can be expressed with unsigned arithmetic. */
5997
5998bool
5999arith_code_with_undefined_signed_overflow (tree_code code)
6000{
6001 switch (code)
6002 {
6003 case PLUS_EXPR:
6004 case MINUS_EXPR:
6005 case MULT_EXPR:
6006 case NEGATE_EXPR:
6007 case POINTER_PLUS_EXPR:
6008 return true;
6009 default:
6010 return false;
6011 }
6012}
6013
6014/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6015 operation that can be transformed to unsigned arithmetic by converting
6016 its operand, carrying out the operation in the corresponding unsigned
6017 type and converting the result back to the original type.
6018
6019 Returns a sequence of statements that replace STMT and also contain
6020 a modified form of STMT itself. */
6021
6022gimple_seq
6023rewrite_to_defined_overflow (gimple stmt)
6024{
6025 if (dump_file && (dump_flags & TDF_DETAILS))
6026 {
6027 fprintf (dump_file, "rewriting stmt with undefined signed "
6028 "overflow ");
6029 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6030 }
6031
6032 tree lhs = gimple_assign_lhs (stmt);
6033 tree type = unsigned_type_for (TREE_TYPE (lhs));
6034 gimple_seq stmts = NULL;
6035 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6036 {
6037 gimple_seq stmts2 = NULL;
6038 gimple_set_op (stmt, i,
6039 force_gimple_operand (fold_convert (type,
6040 gimple_op (stmt, i)),
6041 &stmts2, true, NULL_TREE));
6042 gimple_seq_add_seq (&stmts, stmts2);
6043 }
6044 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6045 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6046 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6047 gimple_seq_add_stmt (&stmts, stmt);
0d0e4a03 6048 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6049 gimple_seq_add_stmt (&stmts, cvt);
6050
6051 return stmts;
6052}
d4f5cd5e 6053
3d2cf79f 6054
c26de36d
RB
6055/* The valueization hook we use for the gimple_build API simplification.
6056 This makes us match fold_buildN behavior by only combining with
6057 statements in the sequence(s) we are currently building. */
6058
6059static tree
6060gimple_build_valueize (tree op)
6061{
6062 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6063 return op;
6064 return NULL_TREE;
6065}
6066
3d2cf79f 6067/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 6068 simplifying it first if possible. Returns the built
3d2cf79f
RB
6069 expression value and appends statements possibly defining it
6070 to SEQ. */
6071
6072tree
6073gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6074 enum tree_code code, tree type, tree op0)
3d2cf79f 6075{
c26de36d 6076 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
6077 if (!res)
6078 {
6079 if (gimple_in_ssa_p (cfun))
b731b390 6080 res = make_ssa_name (type);
3d2cf79f 6081 else
b731b390 6082 res = create_tmp_reg (type);
3d2cf79f
RB
6083 gimple stmt;
6084 if (code == REALPART_EXPR
6085 || code == IMAGPART_EXPR
6086 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6087 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6088 else
0d0e4a03 6089 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6090 gimple_set_location (stmt, loc);
6091 gimple_seq_add_stmt_without_update (seq, stmt);
6092 }
6093 return res;
6094}
6095
6096/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6097 simplifying it first if possible. Returns the built
3d2cf79f
RB
6098 expression value and appends statements possibly defining it
6099 to SEQ. */
6100
6101tree
6102gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6103 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6104{
c26de36d 6105 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6106 if (!res)
6107 {
6108 if (gimple_in_ssa_p (cfun))
b731b390 6109 res = make_ssa_name (type);
3d2cf79f 6110 else
b731b390 6111 res = create_tmp_reg (type);
0d0e4a03 6112 gimple stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6113 gimple_set_location (stmt, loc);
6114 gimple_seq_add_stmt_without_update (seq, stmt);
6115 }
6116 return res;
6117}
6118
6119/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6120 simplifying it first if possible. Returns the built
3d2cf79f
RB
6121 expression value and appends statements possibly defining it
6122 to SEQ. */
6123
6124tree
6125gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6126 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6127{
6128 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6129 seq, gimple_build_valueize);
3d2cf79f
RB
6130 if (!res)
6131 {
6132 if (gimple_in_ssa_p (cfun))
b731b390 6133 res = make_ssa_name (type);
3d2cf79f 6134 else
b731b390 6135 res = create_tmp_reg (type);
3d2cf79f
RB
6136 gimple stmt;
6137 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6138 stmt = gimple_build_assign (res, code,
6139 build3 (code, type, op0, op1, op2));
3d2cf79f 6140 else
0d0e4a03 6141 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6142 gimple_set_location (stmt, loc);
6143 gimple_seq_add_stmt_without_update (seq, stmt);
6144 }
6145 return res;
6146}
6147
6148/* Build the call FN (ARG0) with a result of type TYPE
6149 (or no result if TYPE is void) with location LOC,
c26de36d 6150 simplifying it first if possible. Returns the built
3d2cf79f
RB
6151 expression value (or NULL_TREE if TYPE is void) and appends
6152 statements possibly defining it to SEQ. */
6153
6154tree
6155gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6156 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6157{
c26de36d 6158 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6159 if (!res)
6160 {
6161 tree decl = builtin_decl_implicit (fn);
6162 gimple stmt = gimple_build_call (decl, 1, arg0);
6163 if (!VOID_TYPE_P (type))
6164 {
6165 if (gimple_in_ssa_p (cfun))
b731b390 6166 res = make_ssa_name (type);
3d2cf79f 6167 else
b731b390 6168 res = create_tmp_reg (type);
3d2cf79f
RB
6169 gimple_call_set_lhs (stmt, res);
6170 }
6171 gimple_set_location (stmt, loc);
6172 gimple_seq_add_stmt_without_update (seq, stmt);
6173 }
6174 return res;
6175}
6176
6177/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6178 (or no result if TYPE is void) with location LOC,
c26de36d 6179 simplifying it first if possible. Returns the built
3d2cf79f
RB
6180 expression value (or NULL_TREE if TYPE is void) and appends
6181 statements possibly defining it to SEQ. */
6182
6183tree
6184gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6185 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6186{
c26de36d 6187 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6188 if (!res)
6189 {
6190 tree decl = builtin_decl_implicit (fn);
6191 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6192 if (!VOID_TYPE_P (type))
6193 {
6194 if (gimple_in_ssa_p (cfun))
b731b390 6195 res = make_ssa_name (type);
3d2cf79f 6196 else
b731b390 6197 res = create_tmp_reg (type);
3d2cf79f
RB
6198 gimple_call_set_lhs (stmt, res);
6199 }
6200 gimple_set_location (stmt, loc);
6201 gimple_seq_add_stmt_without_update (seq, stmt);
6202 }
6203 return res;
6204}
6205
6206/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6207 (or no result if TYPE is void) with location LOC,
c26de36d 6208 simplifying it first if possible. Returns the built
3d2cf79f
RB
6209 expression value (or NULL_TREE if TYPE is void) and appends
6210 statements possibly defining it to SEQ. */
6211
6212tree
6213gimple_build (gimple_seq *seq, location_t loc,
6214 enum built_in_function fn, tree type,
c26de36d 6215 tree arg0, tree arg1, tree arg2)
3d2cf79f 6216{
c26de36d
RB
6217 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6218 seq, gimple_build_valueize);
3d2cf79f
RB
6219 if (!res)
6220 {
6221 tree decl = builtin_decl_implicit (fn);
6222 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6223 if (!VOID_TYPE_P (type))
6224 {
6225 if (gimple_in_ssa_p (cfun))
b731b390 6226 res = make_ssa_name (type);
3d2cf79f 6227 else
b731b390 6228 res = create_tmp_reg (type);
3d2cf79f
RB
6229 gimple_call_set_lhs (stmt, res);
6230 }
6231 gimple_set_location (stmt, loc);
6232 gimple_seq_add_stmt_without_update (seq, stmt);
6233 }
6234 return res;
6235}
6236
6237/* Build the conversion (TYPE) OP with a result of type TYPE
6238 with location LOC if such conversion is neccesary in GIMPLE,
6239 simplifying it first.
6240 Returns the built expression value and appends
6241 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6242
6243tree
6244gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6245{
6246 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6247 return op;
3d2cf79f 6248 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6249}