]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
[libstdc++-v3]Add _GLIBCXX_HAVE_LIMIT_FSIZE to guard 27_io/fpos/14775.cc.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
5624e564 2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
40e23961
MC
25#include "hash-set.h"
26#include "machmode.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
cbdd87d4 34#include "tree.h"
40e23961 35#include "fold-const.h"
d8a2d370 36#include "stringpool.h"
83685514 37#include "hashtab.h"
83685514 38#include "hard-reg-set.h"
cbdd87d4 39#include "function.h"
36566b39
PK
40#include "rtl.h"
41#include "flags.h"
42#include "statistics.h"
43#include "real.h"
44#include "fixed-value.h"
45#include "insn-config.h"
46#include "expmed.h"
47#include "dojump.h"
48#include "explow.h"
49#include "calls.h"
50#include "emit-rtl.h"
51#include "varasm.h"
52#include "stmt.h"
53#include "expr.h"
54#include "stor-layout.h"
7ee2468b 55#include "dumpfile.h"
442b4905 56#include "bitmap.h"
60393bbc
AM
57#include "predict.h"
58#include "dominance.h"
2fb9a547
AM
59#include "basic-block.h"
60#include "tree-ssa-alias.h"
61#include "internal-fn.h"
62#include "gimple-fold.h"
63#include "gimple-expr.h"
64#include "is-a.h"
18f429e2 65#include "gimple.h"
45b0be94 66#include "gimplify.h"
5be5c238 67#include "gimple-iterator.h"
442b4905
AM
68#include "gimple-ssa.h"
69#include "tree-ssanames.h"
70#include "tree-into-ssa.h"
71#include "tree-dfa.h"
7a300452 72#include "tree-ssa.h"
cbdd87d4 73#include "tree-ssa-propagate.h"
cbdd87d4 74#include "target.h"
c582198b
AM
75#include "hash-map.h"
76#include "plugin-api.h"
77#include "ipa-ref.h"
78#include "cgraph.h"
450ad0cd
JH
79#include "ipa-utils.h"
80#include "gimple-pretty-print.h"
4484a35a 81#include "tree-ssa-address.h"
862d0b35 82#include "langhooks.h"
19e51b40 83#include "gimplify-me.h"
2b5f0895 84#include "dbgcnt.h"
9b2b7279 85#include "builtins.h"
fef5a0d9 86#include "output.h"
e0ee10ed
RB
87#include "tree-eh.h"
88#include "gimple-match.h"
45cc9f96
RB
89#include "tree-phinodes.h"
90#include "ssa-iterators.h"
cbdd87d4 91
b3b9f3d0 92/* Return true when DECL can be referenced from current unit.
c44c2088
JH
93 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 We can get declarations that are not possible to reference for various
95 reasons:
1389294c 96
1389294c
JH
97 1) When analyzing C++ virtual tables.
98 C++ virtual tables do have known constructors even
99 when they are keyed to other compilation unit.
100 Those tables can contain pointers to methods and vars
101 in other units. Those methods have both STATIC and EXTERNAL
102 set.
103 2) In WHOPR mode devirtualization might lead to reference
104 to method that was partitioned elsehwere.
105 In this case we have static VAR_DECL or FUNCTION_DECL
106 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
107 declaring the body.
108 3) COMDAT functions referred by external vtables that
3e89949e 109 we devirtualize only during final compilation stage.
b3b9f3d0
JH
110 At this time we already decided that we will not output
111 the function body and thus we can't reference the symbol
112 directly. */
113
1389294c 114static bool
c44c2088 115can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 116{
2c8326a5 117 varpool_node *vnode;
1389294c 118 struct cgraph_node *node;
5e20cdc9 119 symtab_node *snode;
c44c2088 120
00de328a 121 if (DECL_ABSTRACT_P (decl))
1632a686
JH
122 return false;
123
124 /* We are concerned only about static/external vars and functions. */
125 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
126 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
127 return true;
128
129 /* Static objects can be referred only if they was not optimized out yet. */
130 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
131 {
3aaf0529
JH
132 /* Before we start optimizing unreachable code we can be sure all
133 static objects are defined. */
3dafb85c 134 if (symtab->function_flags_ready)
3aaf0529 135 return true;
d52f5295 136 snode = symtab_node::get (decl);
3aaf0529 137 if (!snode || !snode->definition)
1632a686 138 return false;
7de90a6c 139 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
140 return !node || !node->global.inlined_to;
141 }
142
6da8be89 143 /* We will later output the initializer, so we can refer to it.
c44c2088 144 So we are concerned only when DECL comes from initializer of
3aaf0529 145 external var or var that has been optimized out. */
c44c2088
JH
146 if (!from_decl
147 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 148 || (!DECL_EXTERNAL (from_decl)
9041d2e6 149 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 150 && vnode->definition)
6da8be89 151 || (flag_ltrans
9041d2e6 152 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 153 && vnode->in_other_partition))
c44c2088 154 return true;
c44c2088
JH
155 /* We are folding reference from external vtable. The vtable may reffer
156 to a symbol keyed to other compilation unit. The other compilation
157 unit may be in separate DSO and the symbol may be hidden. */
158 if (DECL_VISIBILITY_SPECIFIED (decl)
159 && DECL_EXTERNAL (decl)
a33a931b 160 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 161 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 162 return false;
b3b9f3d0
JH
163 /* When function is public, we always can introduce new reference.
164 Exception are the COMDAT functions where introducing a direct
165 reference imply need to include function body in the curren tunit. */
166 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
167 return true;
3aaf0529
JH
168 /* We have COMDAT. We are going to check if we still have definition
169 or if the definition is going to be output in other partition.
170 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
171
172 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 173 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
174 output elsewhere when corresponding vtable is output.
175 This is however not possible - ABI specify that COMDATs are output in
176 units where they are used and when the other unit was compiled with LTO
177 it is possible that vtable was kept public while the function itself
178 was privatized. */
3dafb85c 179 if (!symtab->function_flags_ready)
b3b9f3d0 180 return true;
c44c2088 181
d52f5295 182 snode = symtab_node::get (decl);
3aaf0529
JH
183 if (!snode
184 || ((!snode->definition || DECL_EXTERNAL (decl))
185 && (!snode->in_other_partition
186 || (!snode->forced_by_abi && !snode->force_output))))
187 return false;
188 node = dyn_cast <cgraph_node *> (snode);
189 return !node || !node->global.inlined_to;
1389294c
JH
190}
191
0038d4e0 192/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
193 acceptable form for is_gimple_min_invariant.
194 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
195
196tree
c44c2088 197canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 198{
50619002
EB
199 tree orig_cval = cval;
200 STRIP_NOPS (cval);
315f5f1b
RG
201 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
202 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 203 {
315f5f1b
RG
204 tree ptr = TREE_OPERAND (cval, 0);
205 if (is_gimple_min_invariant (ptr))
206 cval = build1_loc (EXPR_LOCATION (cval),
207 ADDR_EXPR, TREE_TYPE (ptr),
208 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
209 ptr,
210 fold_convert (ptr_type_node,
211 TREE_OPERAND (cval, 1))));
17f39a39
JH
212 }
213 if (TREE_CODE (cval) == ADDR_EXPR)
214 {
5a27a197
RG
215 tree base = NULL_TREE;
216 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
217 {
218 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
219 if (base)
220 TREE_OPERAND (cval, 0) = base;
221 }
5a27a197
RG
222 else
223 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
224 if (!base)
225 return NULL_TREE;
b3b9f3d0 226
7501ca28
RG
227 if ((TREE_CODE (base) == VAR_DECL
228 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 229 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 230 return NULL_TREE;
7501ca28 231 if (TREE_CODE (base) == VAR_DECL)
46eb666a 232 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
233 else if (TREE_CODE (base) == FUNCTION_DECL)
234 {
235 /* Make sure we create a cgraph node for functions we'll reference.
236 They can be non-existent if the reference comes from an entry
237 of an external vtable for example. */
d52f5295 238 cgraph_node::get_create (base);
7501ca28 239 }
0038d4e0 240 /* Fixup types in global initializers. */
73aef89e
RG
241 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
242 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
243
244 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
245 cval = fold_convert (TREE_TYPE (orig_cval), cval);
246 return cval;
17f39a39 247 }
846abd0d
RB
248 if (TREE_OVERFLOW_P (cval))
249 return drop_tree_overflow (cval);
50619002 250 return orig_cval;
17f39a39 251}
cbdd87d4
RG
252
253/* If SYM is a constant variable with known value, return the value.
254 NULL_TREE is returned otherwise. */
255
256tree
257get_symbol_constant_value (tree sym)
258{
6a6dac52
JH
259 tree val = ctor_for_folding (sym);
260 if (val != error_mark_node)
cbdd87d4 261 {
cbdd87d4
RG
262 if (val)
263 {
9d60be38 264 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 265 if (val && is_gimple_min_invariant (val))
17f39a39 266 return val;
1389294c
JH
267 else
268 return NULL_TREE;
cbdd87d4
RG
269 }
270 /* Variables declared 'const' without an initializer
271 have zero as the initializer if they may not be
272 overridden at link or run time. */
273 if (!val
b8a8c472 274 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 275 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
276 }
277
278 return NULL_TREE;
279}
280
281
cbdd87d4
RG
282
283/* Subroutine of fold_stmt. We perform several simplifications of the
284 memory reference tree EXPR and make sure to re-gimplify them properly
285 after propagation of constant addresses. IS_LHS is true if the
286 reference is supposed to be an lvalue. */
287
288static tree
289maybe_fold_reference (tree expr, bool is_lhs)
290{
17f39a39 291 tree result;
cbdd87d4 292
f0eddb90
RG
293 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
294 || TREE_CODE (expr) == REALPART_EXPR
295 || TREE_CODE (expr) == IMAGPART_EXPR)
296 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
297 return fold_unary_loc (EXPR_LOCATION (expr),
298 TREE_CODE (expr),
299 TREE_TYPE (expr),
300 TREE_OPERAND (expr, 0));
301 else if (TREE_CODE (expr) == BIT_FIELD_REF
302 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
303 return fold_ternary_loc (EXPR_LOCATION (expr),
304 TREE_CODE (expr),
305 TREE_TYPE (expr),
306 TREE_OPERAND (expr, 0),
307 TREE_OPERAND (expr, 1),
308 TREE_OPERAND (expr, 2));
309
f0eddb90
RG
310 if (!is_lhs
311 && (result = fold_const_aggregate_ref (expr))
312 && is_gimple_min_invariant (result))
313 return result;
cbdd87d4 314
cbdd87d4
RG
315 return NULL_TREE;
316}
317
318
319/* Attempt to fold an assignment statement pointed-to by SI. Returns a
320 replacement rhs for the statement or NULL_TREE if no simplification
321 could be made. It is assumed that the operands have been previously
322 folded. */
323
324static tree
325fold_gimple_assign (gimple_stmt_iterator *si)
326{
327 gimple stmt = gsi_stmt (*si);
328 enum tree_code subcode = gimple_assign_rhs_code (stmt);
329 location_t loc = gimple_location (stmt);
330
331 tree result = NULL_TREE;
332
333 switch (get_gimple_rhs_class (subcode))
334 {
335 case GIMPLE_SINGLE_RHS:
336 {
337 tree rhs = gimple_assign_rhs1 (stmt);
338
8c00ba08
JW
339 if (TREE_CLOBBER_P (rhs))
340 return NULL_TREE;
341
4e71066d 342 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
343 return maybe_fold_reference (rhs, false);
344
bdf37f7a
JH
345 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
346 {
347 tree val = OBJ_TYPE_REF_EXPR (rhs);
348 if (is_gimple_min_invariant (val))
349 return val;
f8a39967 350 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
351 {
352 bool final;
353 vec <cgraph_node *>targets
f8a39967 354 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 355 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 356 {
2b5f0895
XDL
357 if (dump_enabled_p ())
358 {
807b7d62 359 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
360 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
361 "resolving virtual function address "
362 "reference to function %s\n",
363 targets.length () == 1
364 ? targets[0]->name ()
3ef276e4 365 : "NULL");
2b5f0895 366 }
3ef276e4
RB
367 if (targets.length () == 1)
368 {
369 val = fold_convert (TREE_TYPE (val),
370 build_fold_addr_expr_loc
371 (loc, targets[0]->decl));
372 STRIP_USELESS_TYPE_CONVERSION (val);
373 }
374 else
375 /* We can not use __builtin_unreachable here because it
376 can not have address taken. */
377 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
378 return val;
379 }
380 }
381
382 }
cbdd87d4
RG
383 else if (TREE_CODE (rhs) == ADDR_EXPR)
384 {
70f34814
RG
385 tree ref = TREE_OPERAND (rhs, 0);
386 tree tem = maybe_fold_reference (ref, true);
387 if (tem
388 && TREE_CODE (tem) == MEM_REF
389 && integer_zerop (TREE_OPERAND (tem, 1)))
390 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
391 else if (tem)
cbdd87d4
RG
392 result = fold_convert (TREE_TYPE (rhs),
393 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
394 else if (TREE_CODE (ref) == MEM_REF
395 && integer_zerop (TREE_OPERAND (ref, 1)))
396 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
397 }
398
399 else if (TREE_CODE (rhs) == CONSTRUCTOR
400 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
401 && (CONSTRUCTOR_NELTS (rhs)
402 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
403 {
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
407
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (TREE_CODE (val) != INTEGER_CST
410 && TREE_CODE (val) != REAL_CST
411 && TREE_CODE (val) != FIXED_CST)
412 return NULL_TREE;
413
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
416 }
417
418 else if (DECL_P (rhs))
9d60be38 419 return get_symbol_constant_value (rhs);
cbdd87d4
RG
420
421 /* If we couldn't fold the RHS, hand over to the generic
422 fold routines. */
423 if (result == NULL_TREE)
424 result = fold (rhs);
425
426 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
427 that may have been added by fold, and "useless" type
428 conversions that might now be apparent due to propagation. */
429 STRIP_USELESS_TYPE_CONVERSION (result);
430
431 if (result != rhs && valid_gimple_rhs_p (result))
432 return result;
433
434 return NULL_TREE;
435 }
436 break;
437
438 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
439 break;
440
441 case GIMPLE_BINARY_RHS:
9b80d091
KT
442 /* Try to canonicalize for boolean-typed X the comparisons
443 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
444 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
445 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
446 {
447 tree lhs = gimple_assign_lhs (stmt);
448 tree op1 = gimple_assign_rhs1 (stmt);
449 tree op2 = gimple_assign_rhs2 (stmt);
450 tree type = TREE_TYPE (op1);
451
452 /* Check whether the comparison operands are of the same boolean
453 type as the result type is.
454 Check that second operand is an integer-constant with value
455 one or zero. */
456 if (TREE_CODE (op2) == INTEGER_CST
457 && (integer_zerop (op2) || integer_onep (op2))
458 && useless_type_conversion_p (TREE_TYPE (lhs), type))
459 {
460 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
461 bool is_logical_not = false;
462
463 /* X == 0 and X != 1 is a logical-not.of X
464 X == 1 and X != 0 is X */
465 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
466 || (cmp_code == NE_EXPR && integer_onep (op2)))
467 is_logical_not = true;
468
469 if (is_logical_not == false)
470 result = op1;
471 /* Only for one-bit precision typed X the transformation
472 !X -> ~X is valied. */
473 else if (TYPE_PRECISION (type) == 1)
474 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
475 type, op1);
476 /* Otherwise we use !X -> X ^ 1. */
477 else
478 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
479 type, op1, build_int_cst (type, 1));
480
481 }
482 }
cbdd87d4
RG
483
484 if (!result)
485 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
486 TREE_TYPE (gimple_assign_lhs (stmt)),
487 gimple_assign_rhs1 (stmt),
488 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
489
490 if (result)
491 {
492 STRIP_USELESS_TYPE_CONVERSION (result);
493 if (valid_gimple_rhs_p (result))
494 return result;
cbdd87d4
RG
495 }
496 break;
497
0354c0c7 498 case GIMPLE_TERNARY_RHS:
4e71066d
RG
499 /* Try to fold a conditional expression. */
500 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
501 {
502 tree op0 = gimple_assign_rhs1 (stmt);
503 tree tem;
504 bool set = false;
505 location_t cond_loc = gimple_location (stmt);
506
507 if (COMPARISON_CLASS_P (op0))
508 {
509 fold_defer_overflow_warnings ();
510 tem = fold_binary_loc (cond_loc,
511 TREE_CODE (op0), TREE_TYPE (op0),
512 TREE_OPERAND (op0, 0),
513 TREE_OPERAND (op0, 1));
514 /* This is actually a conditional expression, not a GIMPLE
515 conditional statement, however, the valid_gimple_rhs_p
516 test still applies. */
517 set = (tem && is_gimple_condexpr (tem)
518 && valid_gimple_rhs_p (tem));
519 fold_undefer_overflow_warnings (set, stmt, 0);
520 }
521 else if (is_gimple_min_invariant (op0))
522 {
523 tem = op0;
524 set = true;
525 }
526 else
527 return NULL_TREE;
528
529 if (set)
530 result = fold_build3_loc (cond_loc, COND_EXPR,
531 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
532 gimple_assign_rhs2 (stmt),
533 gimple_assign_rhs3 (stmt));
534 }
535
536 if (!result)
537 result = fold_ternary_loc (loc, subcode,
538 TREE_TYPE (gimple_assign_lhs (stmt)),
539 gimple_assign_rhs1 (stmt),
540 gimple_assign_rhs2 (stmt),
541 gimple_assign_rhs3 (stmt));
0354c0c7
BS
542
543 if (result)
544 {
545 STRIP_USELESS_TYPE_CONVERSION (result);
546 if (valid_gimple_rhs_p (result))
547 return result;
0354c0c7
BS
548 }
549 break;
550
cbdd87d4
RG
551 case GIMPLE_INVALID_RHS:
552 gcc_unreachable ();
553 }
554
555 return NULL_TREE;
556}
557
558/* Attempt to fold a conditional statement. Return true if any changes were
559 made. We only attempt to fold the condition expression, and do not perform
560 any transformation that would require alteration of the cfg. It is
561 assumed that the operands have been previously folded. */
562
563static bool
538dd0b7 564fold_gimple_cond (gcond *stmt)
cbdd87d4
RG
565{
566 tree result = fold_binary_loc (gimple_location (stmt),
567 gimple_cond_code (stmt),
568 boolean_type_node,
569 gimple_cond_lhs (stmt),
570 gimple_cond_rhs (stmt));
571
572 if (result)
573 {
574 STRIP_USELESS_TYPE_CONVERSION (result);
575 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
576 {
577 gimple_cond_set_condition_from_tree (stmt, result);
578 return true;
579 }
580 }
581
582 return false;
583}
584
fef5a0d9
RB
585
586/* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
590
591static void
592gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
593{
594 gimple stmt = gsi_stmt (*si_p);
595
596 if (gimple_has_location (stmt))
597 annotate_all_with_location (stmts, gimple_location (stmt));
598
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
601 gimple laststore = NULL;
602 for (gimple_stmt_iterator i = gsi_last (stmts);
603 !gsi_end_p (i); gsi_prev (&i))
604 {
605 gimple new_stmt = gsi_stmt (i);
606 if ((gimple_assign_single_p (new_stmt)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
608 || (is_gimple_call (new_stmt)
609 && (gimple_call_flags (new_stmt)
610 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
611 {
612 tree vdef;
613 if (!laststore)
614 vdef = gimple_vdef (stmt);
615 else
616 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
617 gimple_set_vdef (new_stmt, vdef);
618 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 SSA_NAME_DEF_STMT (vdef) = new_stmt;
620 laststore = new_stmt;
621 }
622 }
623
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse = gimple_vuse (stmt);
627 for (gimple_stmt_iterator i = gsi_start (stmts);
628 !gsi_end_p (i); gsi_next (&i))
629 {
630 gimple new_stmt = gsi_stmt (i);
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt))
634 gimple_set_vuse (new_stmt, reaching_vuse);
635 gimple_set_modified (new_stmt, true);
636 if (gimple_vdef (new_stmt))
637 reaching_vuse = gimple_vdef (new_stmt);
638 }
639
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
642 if (reaching_vuse
643 && reaching_vuse == gimple_vuse (stmt))
644 {
645 tree vdef = gimple_vdef (stmt);
646 if (vdef
647 && TREE_CODE (vdef) == SSA_NAME)
648 {
649 unlink_stmt_vdef (stmt);
650 release_ssa_name (vdef);
651 }
652 }
653
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p, stmts, false);
656}
657
cbdd87d4
RG
658/* Convert EXPR into a GIMPLE value suitable for substitution on the
659 RHS of an assignment. Insert the necessary statements before
660 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
661 is replaced. If the call is expected to produces a result, then it
662 is replaced by an assignment of the new RHS to the result variable.
663 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
664 GIMPLE_NOP. A proper VDEF chain is retained by making the first
665 VUSE and the last VDEF of the whole sequence be the same as the replaced
666 statement and using new SSA names for stores in between. */
cbdd87d4
RG
667
668void
669gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
670{
671 tree lhs;
cbdd87d4
RG
672 gimple stmt, new_stmt;
673 gimple_stmt_iterator i;
355a7673 674 gimple_seq stmts = NULL;
cbdd87d4
RG
675
676 stmt = gsi_stmt (*si_p);
677
678 gcc_assert (is_gimple_call (stmt));
679
45852dcc 680 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 681
e256dfce 682 lhs = gimple_call_lhs (stmt);
cbdd87d4 683 if (lhs == NULL_TREE)
6e572326
RG
684 {
685 gimplify_and_add (expr, &stmts);
686 /* We can end up with folding a memcpy of an empty class assignment
687 which gets optimized away by C++ gimplification. */
688 if (gimple_seq_empty_p (stmts))
689 {
9fdc58de 690 pop_gimplify_context (NULL);
6e572326
RG
691 if (gimple_in_ssa_p (cfun))
692 {
693 unlink_stmt_vdef (stmt);
694 release_defs (stmt);
695 }
77d19c72 696 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
697 return;
698 }
699 }
cbdd87d4 700 else
e256dfce
RG
701 {
702 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
703 new_stmt = gimple_build_assign (lhs, tmp);
704 i = gsi_last (stmts);
705 gsi_insert_after_without_update (&i, new_stmt,
706 GSI_CONTINUE_LINKING);
707 }
cbdd87d4
RG
708
709 pop_gimplify_context (NULL);
710
fef5a0d9
RB
711 gsi_replace_with_seq_vops (si_p, stmts);
712}
cbdd87d4 713
fef5a0d9
RB
714
715/* Replace the call at *GSI with the gimple value VAL. */
716
717static void
718replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
719{
720 gimple stmt = gsi_stmt (*gsi);
721 tree lhs = gimple_call_lhs (stmt);
722 gimple repl;
723 if (lhs)
e256dfce 724 {
fef5a0d9
RB
725 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
726 val = fold_convert (TREE_TYPE (lhs), val);
727 repl = gimple_build_assign (lhs, val);
728 }
729 else
730 repl = gimple_build_nop ();
731 tree vdef = gimple_vdef (stmt);
732 if (vdef && TREE_CODE (vdef) == SSA_NAME)
733 {
734 unlink_stmt_vdef (stmt);
735 release_ssa_name (vdef);
736 }
737 gsi_replace (gsi, repl, true);
738}
739
740/* Replace the call at *GSI with the new call REPL and fold that
741 again. */
742
743static void
744replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
745{
746 gimple stmt = gsi_stmt (*gsi);
747 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
748 gimple_set_location (repl, gimple_location (stmt));
749 if (gimple_vdef (stmt)
750 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
751 {
752 gimple_set_vdef (repl, gimple_vdef (stmt));
753 gimple_set_vuse (repl, gimple_vuse (stmt));
754 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
755 }
756 gsi_replace (gsi, repl, true);
757 fold_stmt (gsi);
758}
759
760/* Return true if VAR is a VAR_DECL or a component thereof. */
761
762static bool
763var_decl_component_p (tree var)
764{
765 tree inner = var;
766 while (handled_component_p (inner))
767 inner = TREE_OPERAND (inner, 0);
768 return SSA_VAR_P (inner);
769}
770
771/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 772 false if no simplification can be made.
fef5a0d9
RB
773 If ENDP is 0, return DEST (like memcpy).
774 If ENDP is 1, return DEST+LEN (like mempcpy).
775 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
776 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
777 (memmove). */
778
779static bool
780gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
781 tree dest, tree src, int endp)
782{
783 gimple stmt = gsi_stmt (*gsi);
784 tree lhs = gimple_call_lhs (stmt);
785 tree len = gimple_call_arg (stmt, 2);
786 tree destvar, srcvar;
787 location_t loc = gimple_location (stmt);
788
789 /* If the LEN parameter is zero, return DEST. */
790 if (integer_zerop (len))
791 {
792 gimple repl;
793 if (gimple_call_lhs (stmt))
794 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
795 else
796 repl = gimple_build_nop ();
797 tree vdef = gimple_vdef (stmt);
798 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 799 {
fef5a0d9
RB
800 unlink_stmt_vdef (stmt);
801 release_ssa_name (vdef);
802 }
803 gsi_replace (gsi, repl, true);
804 return true;
805 }
806
807 /* If SRC and DEST are the same (and not volatile), return
808 DEST{,+LEN,+LEN-1}. */
809 if (operand_equal_p (src, dest, 0))
810 {
811 unlink_stmt_vdef (stmt);
812 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
813 release_ssa_name (gimple_vdef (stmt));
814 if (!lhs)
815 {
816 gsi_replace (gsi, gimple_build_nop (), true);
817 return true;
818 }
819 goto done;
820 }
821 else
822 {
823 tree srctype, desttype;
824 unsigned int src_align, dest_align;
825 tree off0;
826
827 /* Build accesses at offset zero with a ref-all character type. */
828 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
829 ptr_mode, true), 0);
830
831 /* If we can perform the copy efficiently with first doing all loads
832 and then all stores inline it that way. Currently efficiently
833 means that we can load all the memory into a single integer
834 register which is what MOVE_MAX gives us. */
835 src_align = get_pointer_alignment (src);
836 dest_align = get_pointer_alignment (dest);
837 if (tree_fits_uhwi_p (len)
838 && compare_tree_int (len, MOVE_MAX) <= 0
839 /* ??? Don't transform copies from strings with known length this
840 confuses the tree-ssa-strlen.c. This doesn't handle
841 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
842 reason. */
843 && !c_strlen (src, 2))
844 {
845 unsigned ilen = tree_to_uhwi (len);
846 if (exact_log2 (ilen) != -1)
847 {
848 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
849 if (type
850 && TYPE_MODE (type) != BLKmode
851 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
852 == ilen * 8)
853 /* If the destination pointer is not aligned we must be able
854 to emit an unaligned store. */
855 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
856 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
857 {
858 tree srctype = type;
859 tree desttype = type;
860 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
861 srctype = build_aligned_type (type, src_align);
862 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
863 tree tem = fold_const_aggregate_ref (srcmem);
864 if (tem)
865 srcmem = tem;
866 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
867 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
868 src_align))
869 srcmem = NULL_TREE;
870 if (srcmem)
871 {
872 gimple new_stmt;
873 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
874 {
875 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
876 if (gimple_in_ssa_p (cfun))
877 srcmem = make_ssa_name (TREE_TYPE (srcmem),
878 new_stmt);
879 else
b731b390 880 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
881 gimple_assign_set_lhs (new_stmt, srcmem);
882 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
883 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
884 }
885 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
886 desttype = build_aligned_type (type, dest_align);
887 new_stmt
888 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
889 dest, off0),
890 srcmem);
891 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
892 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
893 if (gimple_vdef (new_stmt)
894 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
895 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
896 if (!lhs)
897 {
898 gsi_replace (gsi, new_stmt, true);
899 return true;
900 }
901 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
902 goto done;
903 }
904 }
905 }
906 }
907
908 if (endp == 3)
909 {
910 /* Both DEST and SRC must be pointer types.
911 ??? This is what old code did. Is the testing for pointer types
912 really mandatory?
913
914 If either SRC is readonly or length is 1, we can use memcpy. */
915 if (!dest_align || !src_align)
916 return false;
917 if (readonly_data_expr (src)
918 || (tree_fits_uhwi_p (len)
919 && (MIN (src_align, dest_align) / BITS_PER_UNIT
920 >= tree_to_uhwi (len))))
921 {
922 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
923 if (!fn)
924 return false;
925 gimple_call_set_fndecl (stmt, fn);
926 gimple_call_set_arg (stmt, 0, dest);
927 gimple_call_set_arg (stmt, 1, src);
928 fold_stmt (gsi);
929 return true;
930 }
931
932 /* If *src and *dest can't overlap, optimize into memcpy as well. */
933 if (TREE_CODE (src) == ADDR_EXPR
934 && TREE_CODE (dest) == ADDR_EXPR)
935 {
936 tree src_base, dest_base, fn;
937 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
938 HOST_WIDE_INT size = -1;
939 HOST_WIDE_INT maxsize = -1;
940
941 srcvar = TREE_OPERAND (src, 0);
942 src_base = get_ref_base_and_extent (srcvar, &src_offset,
943 &size, &maxsize);
944 destvar = TREE_OPERAND (dest, 0);
945 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
946 &size, &maxsize);
947 if (tree_fits_uhwi_p (len))
948 maxsize = tree_to_uhwi (len);
949 else
950 maxsize = -1;
951 src_offset /= BITS_PER_UNIT;
952 dest_offset /= BITS_PER_UNIT;
953 if (SSA_VAR_P (src_base)
954 && SSA_VAR_P (dest_base))
955 {
956 if (operand_equal_p (src_base, dest_base, 0)
957 && ranges_overlap_p (src_offset, maxsize,
958 dest_offset, maxsize))
959 return false;
960 }
961 else if (TREE_CODE (src_base) == MEM_REF
962 && TREE_CODE (dest_base) == MEM_REF)
963 {
964 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
965 TREE_OPERAND (dest_base, 0), 0))
966 return false;
967 offset_int off = mem_ref_offset (src_base) + src_offset;
968 if (!wi::fits_shwi_p (off))
969 return false;
970 src_offset = off.to_shwi ();
971
972 off = mem_ref_offset (dest_base) + dest_offset;
973 if (!wi::fits_shwi_p (off))
974 return false;
975 dest_offset = off.to_shwi ();
976 if (ranges_overlap_p (src_offset, maxsize,
977 dest_offset, maxsize))
978 return false;
979 }
980 else
981 return false;
982
983 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
984 if (!fn)
985 return false;
986 gimple_call_set_fndecl (stmt, fn);
987 gimple_call_set_arg (stmt, 0, dest);
988 gimple_call_set_arg (stmt, 1, src);
989 fold_stmt (gsi);
990 return true;
991 }
992
993 /* If the destination and source do not alias optimize into
994 memcpy as well. */
995 if ((is_gimple_min_invariant (dest)
996 || TREE_CODE (dest) == SSA_NAME)
997 && (is_gimple_min_invariant (src)
998 || TREE_CODE (src) == SSA_NAME))
999 {
1000 ao_ref destr, srcr;
1001 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1002 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1003 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1004 {
1005 tree fn;
1006 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1007 if (!fn)
1008 return false;
1009 gimple_call_set_fndecl (stmt, fn);
1010 gimple_call_set_arg (stmt, 0, dest);
1011 gimple_call_set_arg (stmt, 1, src);
1012 fold_stmt (gsi);
1013 return true;
1014 }
1015 }
1016
1017 return false;
1018 }
1019
1020 if (!tree_fits_shwi_p (len))
1021 return false;
1022 /* FIXME:
1023 This logic lose for arguments like (type *)malloc (sizeof (type)),
1024 since we strip the casts of up to VOID return value from malloc.
1025 Perhaps we ought to inherit type from non-VOID argument here? */
1026 STRIP_NOPS (src);
1027 STRIP_NOPS (dest);
1028 if (!POINTER_TYPE_P (TREE_TYPE (src))
1029 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1030 return false;
1031 /* In the following try to find a type that is most natural to be
1032 used for the memcpy source and destination and that allows
1033 the most optimization when memcpy is turned into a plain assignment
1034 using that type. In theory we could always use a char[len] type
1035 but that only gains us that the destination and source possibly
1036 no longer will have their address taken. */
1037 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1038 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1039 {
1040 tree tem = TREE_OPERAND (src, 0);
1041 STRIP_NOPS (tem);
1042 if (tem != TREE_OPERAND (src, 0))
1043 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1044 }
1045 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1046 {
1047 tree tem = TREE_OPERAND (dest, 0);
1048 STRIP_NOPS (tem);
1049 if (tem != TREE_OPERAND (dest, 0))
1050 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1051 }
1052 srctype = TREE_TYPE (TREE_TYPE (src));
1053 if (TREE_CODE (srctype) == ARRAY_TYPE
1054 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1055 {
1056 srctype = TREE_TYPE (srctype);
1057 STRIP_NOPS (src);
1058 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1059 }
1060 desttype = TREE_TYPE (TREE_TYPE (dest));
1061 if (TREE_CODE (desttype) == ARRAY_TYPE
1062 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1063 {
1064 desttype = TREE_TYPE (desttype);
1065 STRIP_NOPS (dest);
1066 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1067 }
1068 if (TREE_ADDRESSABLE (srctype)
1069 || TREE_ADDRESSABLE (desttype))
1070 return false;
1071
1072 /* Make sure we are not copying using a floating-point mode or
1073 a type whose size possibly does not match its precision. */
1074 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1075 || TREE_CODE (desttype) == BOOLEAN_TYPE
1076 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1077 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1078 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1079 || TREE_CODE (srctype) == BOOLEAN_TYPE
1080 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1081 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1082 if (!srctype)
1083 srctype = desttype;
1084 if (!desttype)
1085 desttype = srctype;
1086 if (!srctype)
1087 return false;
1088
1089 src_align = get_pointer_alignment (src);
1090 dest_align = get_pointer_alignment (dest);
1091 if (dest_align < TYPE_ALIGN (desttype)
1092 || src_align < TYPE_ALIGN (srctype))
1093 return false;
1094
1095 destvar = dest;
1096 STRIP_NOPS (destvar);
1097 if (TREE_CODE (destvar) == ADDR_EXPR
1098 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1099 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1100 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1101 else
1102 destvar = NULL_TREE;
1103
1104 srcvar = src;
1105 STRIP_NOPS (srcvar);
1106 if (TREE_CODE (srcvar) == ADDR_EXPR
1107 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1108 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1109 {
1110 if (!destvar
1111 || src_align >= TYPE_ALIGN (desttype))
1112 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1113 srcvar, off0);
1114 else if (!STRICT_ALIGNMENT)
1115 {
1116 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1117 src_align);
1118 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1119 }
e256dfce 1120 else
fef5a0d9
RB
1121 srcvar = NULL_TREE;
1122 }
1123 else
1124 srcvar = NULL_TREE;
1125
1126 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1127 return false;
1128
1129 if (srcvar == NULL_TREE)
1130 {
1131 STRIP_NOPS (src);
1132 if (src_align >= TYPE_ALIGN (desttype))
1133 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1134 else
1135 {
1136 if (STRICT_ALIGNMENT)
1137 return false;
1138 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1139 src_align);
1140 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1141 }
1142 }
1143 else if (destvar == NULL_TREE)
1144 {
1145 STRIP_NOPS (dest);
1146 if (dest_align >= TYPE_ALIGN (srctype))
1147 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1148 else
1149 {
1150 if (STRICT_ALIGNMENT)
1151 return false;
1152 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1153 dest_align);
1154 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1155 }
1156 }
1157
1158 gimple new_stmt;
1159 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1160 {
1161 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1162 if (gimple_in_ssa_p (cfun))
1163 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1164 else
b731b390 1165 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1166 gimple_assign_set_lhs (new_stmt, srcvar);
1167 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1168 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1169 }
1170 new_stmt = gimple_build_assign (destvar, srcvar);
1171 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1172 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1173 if (gimple_vdef (new_stmt)
1174 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1175 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1176 if (!lhs)
1177 {
1178 gsi_replace (gsi, new_stmt, true);
1179 return true;
1180 }
1181 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1182 }
1183
1184done:
1185 if (endp == 0 || endp == 3)
1186 len = NULL_TREE;
1187 else if (endp == 2)
1188 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1189 ssize_int (1));
1190 if (endp == 2 || endp == 1)
1191 dest = fold_build_pointer_plus_loc (loc, dest, len);
1192
1193 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1194 GSI_SAME_STMT);
1195 gimple repl = gimple_build_assign (lhs, dest);
1196 gsi_replace (gsi, repl, true);
1197 return true;
1198}
1199
1200/* Fold function call to builtin memset or bzero at *GSI setting the
1201 memory of size LEN to VAL. Return whether a simplification was made. */
1202
1203static bool
1204gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1205{
1206 gimple stmt = gsi_stmt (*gsi);
1207 tree etype;
1208 unsigned HOST_WIDE_INT length, cval;
1209
1210 /* If the LEN parameter is zero, return DEST. */
1211 if (integer_zerop (len))
1212 {
1213 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1214 return true;
1215 }
1216
1217 if (! tree_fits_uhwi_p (len))
1218 return false;
1219
1220 if (TREE_CODE (c) != INTEGER_CST)
1221 return false;
1222
1223 tree dest = gimple_call_arg (stmt, 0);
1224 tree var = dest;
1225 if (TREE_CODE (var) != ADDR_EXPR)
1226 return false;
1227
1228 var = TREE_OPERAND (var, 0);
1229 if (TREE_THIS_VOLATILE (var))
1230 return false;
1231
1232 etype = TREE_TYPE (var);
1233 if (TREE_CODE (etype) == ARRAY_TYPE)
1234 etype = TREE_TYPE (etype);
1235
1236 if (!INTEGRAL_TYPE_P (etype)
1237 && !POINTER_TYPE_P (etype))
1238 return NULL_TREE;
1239
1240 if (! var_decl_component_p (var))
1241 return NULL_TREE;
1242
1243 length = tree_to_uhwi (len);
1244 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1245 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1246 return NULL_TREE;
1247
1248 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1249 return NULL_TREE;
1250
1251 if (integer_zerop (c))
1252 cval = 0;
1253 else
1254 {
1255 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1256 return NULL_TREE;
1257
1258 cval = TREE_INT_CST_LOW (c);
1259 cval &= 0xff;
1260 cval |= cval << 8;
1261 cval |= cval << 16;
1262 cval |= (cval << 31) << 1;
1263 }
1264
1265 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1266 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1267 gimple_set_vuse (store, gimple_vuse (stmt));
1268 tree vdef = gimple_vdef (stmt);
1269 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1270 {
1271 gimple_set_vdef (store, gimple_vdef (stmt));
1272 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1273 }
1274 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1275 if (gimple_call_lhs (stmt))
1276 {
1277 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1278 gsi_replace (gsi, asgn, true);
1279 }
1280 else
1281 {
1282 gimple_stmt_iterator gsi2 = *gsi;
1283 gsi_prev (gsi);
1284 gsi_remove (&gsi2, true);
1285 }
1286
1287 return true;
1288}
1289
1290
1291/* Return the string length, maximum string length or maximum value of
1292 ARG in LENGTH.
1293 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1294 is not NULL and, for TYPE == 0, its value is not equal to the length
1295 we determine or if we are unable to determine the length or value,
1296 return false. VISITED is a bitmap of visited variables.
1297 TYPE is 0 if string length should be returned, 1 for maximum string
1298 length and 2 for maximum value ARG can have. */
1299
1300static bool
dcb7fae2 1301get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1302{
1303 tree var, val;
1304 gimple def_stmt;
1305
1306 if (TREE_CODE (arg) != SSA_NAME)
1307 {
1308 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1309 if (TREE_CODE (arg) == ADDR_EXPR
1310 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1311 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1312 {
1313 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1314 if (TREE_CODE (aop0) == INDIRECT_REF
1315 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1316 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1317 length, visited, type);
1318 }
1319
1320 if (type == 2)
1321 {
1322 val = arg;
1323 if (TREE_CODE (val) != INTEGER_CST
1324 || tree_int_cst_sgn (val) < 0)
1325 return false;
1326 }
1327 else
1328 val = c_strlen (arg, 1);
1329 if (!val)
1330 return false;
1331
1332 if (*length)
1333 {
1334 if (type > 0)
1335 {
1336 if (TREE_CODE (*length) != INTEGER_CST
1337 || TREE_CODE (val) != INTEGER_CST)
1338 return false;
1339
1340 if (tree_int_cst_lt (*length, val))
1341 *length = val;
1342 return true;
1343 }
1344 else if (simple_cst_equal (val, *length) != 1)
1345 return false;
1346 }
1347
1348 *length = val;
1349 return true;
1350 }
1351
1352 /* If ARG is registered for SSA update we cannot look at its defining
1353 statement. */
1354 if (name_registered_for_update_p (arg))
1355 return false;
1356
1357 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1358 if (!*visited)
1359 *visited = BITMAP_ALLOC (NULL);
1360 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1361 return true;
1362
1363 var = arg;
1364 def_stmt = SSA_NAME_DEF_STMT (var);
1365
1366 switch (gimple_code (def_stmt))
1367 {
1368 case GIMPLE_ASSIGN:
1369 /* The RHS of the statement defining VAR must either have a
1370 constant length or come from another SSA_NAME with a constant
1371 length. */
1372 if (gimple_assign_single_p (def_stmt)
1373 || gimple_assign_unary_nop_p (def_stmt))
1374 {
1375 tree rhs = gimple_assign_rhs1 (def_stmt);
1376 return get_maxval_strlen (rhs, length, visited, type);
1377 }
1378 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1379 {
1380 tree op2 = gimple_assign_rhs2 (def_stmt);
1381 tree op3 = gimple_assign_rhs3 (def_stmt);
1382 return get_maxval_strlen (op2, length, visited, type)
1383 && get_maxval_strlen (op3, length, visited, type);
1384 }
1385 return false;
1386
1387 case GIMPLE_PHI:
1388 {
1389 /* All the arguments of the PHI node must have the same constant
1390 length. */
1391 unsigned i;
1392
1393 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1394 {
1395 tree arg = gimple_phi_arg (def_stmt, i)->def;
1396
1397 /* If this PHI has itself as an argument, we cannot
1398 determine the string length of this argument. However,
1399 if we can find a constant string length for the other
1400 PHI args then we can still be sure that this is a
1401 constant string length. So be optimistic and just
1402 continue with the next argument. */
1403 if (arg == gimple_phi_result (def_stmt))
1404 continue;
1405
1406 if (!get_maxval_strlen (arg, length, visited, type))
1407 return false;
1408 }
1409 }
1410 return true;
1411
1412 default:
1413 return false;
1414 }
1415}
1416
dcb7fae2
RB
1417tree
1418get_maxval_strlen (tree arg, int type)
1419{
1420 bitmap visited = NULL;
1421 tree len = NULL_TREE;
1422 if (!get_maxval_strlen (arg, &len, &visited, type))
1423 len = NULL_TREE;
1424 if (visited)
1425 BITMAP_FREE (visited);
1426
1427 return len;
1428}
1429
fef5a0d9
RB
1430
1431/* Fold function call to builtin strcpy with arguments DEST and SRC.
1432 If LEN is not NULL, it represents the length of the string to be
1433 copied. Return NULL_TREE if no simplification can be made. */
1434
1435static bool
1436gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1437 tree dest, tree src)
fef5a0d9 1438{
dcb7fae2 1439 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1440 tree fn;
1441
1442 /* If SRC and DEST are the same (and not volatile), return DEST. */
1443 if (operand_equal_p (src, dest, 0))
1444 {
1445 replace_call_with_value (gsi, dest);
1446 return true;
1447 }
1448
1449 if (optimize_function_for_size_p (cfun))
1450 return false;
1451
1452 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1453 if (!fn)
1454 return false;
1455
1579e1f8 1456 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1457 if (!len)
dcb7fae2 1458 return false;
fef5a0d9
RB
1459
1460 len = fold_convert_loc (loc, size_type_node, len);
1461 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1462 len = force_gimple_operand_gsi (gsi, len, true,
1463 NULL_TREE, true, GSI_SAME_STMT);
1464 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1465 replace_call_with_call_and_fold (gsi, repl);
1466 return true;
1467}
1468
1469/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1470 If SLEN is not NULL, it represents the length of the source string.
1471 Return NULL_TREE if no simplification can be made. */
1472
1473static bool
dcb7fae2
RB
1474gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1475 tree dest, tree src, tree len)
fef5a0d9 1476{
dcb7fae2 1477 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1478 tree fn;
1479
1480 /* If the LEN parameter is zero, return DEST. */
1481 if (integer_zerop (len))
1482 {
1483 replace_call_with_value (gsi, dest);
1484 return true;
1485 }
1486
1487 /* We can't compare slen with len as constants below if len is not a
1488 constant. */
dcb7fae2 1489 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1490 return false;
1491
fef5a0d9 1492 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1493 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1494 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1495 return false;
1496
1497 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1498
1499 /* We do not support simplification of this case, though we do
1500 support it when expanding trees into RTL. */
1501 /* FIXME: generate a call to __builtin_memset. */
1502 if (tree_int_cst_lt (slen, len))
1503 return false;
1504
1505 /* OK transform into builtin memcpy. */
1506 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1507 if (!fn)
1508 return false;
1509
1510 len = fold_convert_loc (loc, size_type_node, len);
1511 len = force_gimple_operand_gsi (gsi, len, true,
1512 NULL_TREE, true, GSI_SAME_STMT);
1513 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1514 replace_call_with_call_and_fold (gsi, repl);
1515 return true;
1516}
1517
1518/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1519 to the call.
1520
1521 Return NULL_TREE if no simplification was possible, otherwise return the
1522 simplified form of the call as a tree.
1523
1524 The simplified form may be a constant or other expression which
1525 computes the same value, but in a more efficient manner (including
1526 calls to other builtin functions).
1527
1528 The call may contain arguments which need to be evaluated, but
1529 which are not useful to determine the result of the call. In
1530 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1531 COMPOUND_EXPR will be an argument which must be evaluated.
1532 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1533 COMPOUND_EXPR in the chain will contain the tree for the simplified
1534 form of the builtin function call. */
1535
1536static bool
dcb7fae2 1537gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9
RB
1538{
1539 gimple stmt = gsi_stmt (*gsi);
dcb7fae2 1540 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1541
1542 const char *p = c_getstr (src);
1543
1544 /* If the string length is zero, return the dst parameter. */
1545 if (p && *p == '\0')
1546 {
1547 replace_call_with_value (gsi, dst);
1548 return true;
1549 }
1550
1551 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1552 return false;
1553
1554 /* See if we can store by pieces into (dst + strlen(dst)). */
1555 tree newdst;
1556 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1557 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1558
1559 if (!strlen_fn || !memcpy_fn)
1560 return false;
1561
1562 /* If the length of the source string isn't computable don't
1563 split strcat into strlen and memcpy. */
dcb7fae2 1564 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1565 if (! len)
fef5a0d9
RB
1566 return false;
1567
1568 /* Create strlen (dst). */
1569 gimple_seq stmts = NULL, stmts2;
1570 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1571 gimple_set_location (repl, loc);
1572 if (gimple_in_ssa_p (cfun))
b731b390 1573 newdst = make_ssa_name (size_type_node);
fef5a0d9 1574 else
b731b390 1575 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1576 gimple_call_set_lhs (repl, newdst);
1577 gimple_seq_add_stmt_without_update (&stmts, repl);
1578
1579 /* Create (dst p+ strlen (dst)). */
1580 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1581 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1582 gimple_seq_add_seq_without_update (&stmts, stmts2);
1583
1584 len = fold_convert_loc (loc, size_type_node, len);
1585 len = size_binop_loc (loc, PLUS_EXPR, len,
1586 build_int_cst (size_type_node, 1));
1587 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1588 gimple_seq_add_seq_without_update (&stmts, stmts2);
1589
1590 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1591 gimple_seq_add_stmt_without_update (&stmts, repl);
1592 if (gimple_call_lhs (stmt))
1593 {
1594 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1595 gimple_seq_add_stmt_without_update (&stmts, repl);
1596 gsi_replace_with_seq_vops (gsi, stmts);
1597 /* gsi now points at the assignment to the lhs, get a
1598 stmt iterator to the memcpy call.
1599 ??? We can't use gsi_for_stmt as that doesn't work when the
1600 CFG isn't built yet. */
1601 gimple_stmt_iterator gsi2 = *gsi;
1602 gsi_prev (&gsi2);
1603 fold_stmt (&gsi2);
1604 }
1605 else
1606 {
1607 gsi_replace_with_seq_vops (gsi, stmts);
1608 fold_stmt (gsi);
1609 }
1610 return true;
1611}
1612
07f1cf56
RB
1613/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1614 are the arguments to the call. */
1615
1616static bool
1617gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1618{
1619 gimple stmt = gsi_stmt (*gsi);
1620 tree dest = gimple_call_arg (stmt, 0);
1621 tree src = gimple_call_arg (stmt, 1);
1622 tree size = gimple_call_arg (stmt, 2);
1623 tree fn;
1624 const char *p;
1625
1626
1627 p = c_getstr (src);
1628 /* If the SRC parameter is "", return DEST. */
1629 if (p && *p == '\0')
1630 {
1631 replace_call_with_value (gsi, dest);
1632 return true;
1633 }
1634
1635 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1636 return false;
1637
1638 /* If __builtin_strcat_chk is used, assume strcat is available. */
1639 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1640 if (!fn)
1641 return false;
1642
1643 gimple repl = gimple_build_call (fn, 2, dest, src);
1644 replace_call_with_call_and_fold (gsi, repl);
1645 return true;
1646}
1647
ad03a744
RB
1648/* Simplify a call to the strncat builtin. */
1649
1650static bool
1651gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1652{
1653 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1654 tree dst = gimple_call_arg (stmt, 0);
1655 tree src = gimple_call_arg (stmt, 1);
1656 tree len = gimple_call_arg (stmt, 2);
1657
1658 const char *p = c_getstr (src);
1659
1660 /* If the requested length is zero, or the src parameter string
1661 length is zero, return the dst parameter. */
1662 if (integer_zerop (len) || (p && *p == '\0'))
1663 {
1664 replace_call_with_value (gsi, dst);
1665 return true;
1666 }
1667
1668 /* If the requested len is greater than or equal to the string
1669 length, call strcat. */
1670 if (TREE_CODE (len) == INTEGER_CST && p
1671 && compare_tree_int (len, strlen (p)) >= 0)
1672 {
1673 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1674
1675 /* If the replacement _DECL isn't initialized, don't do the
1676 transformation. */
1677 if (!fn)
1678 return false;
1679
1680 gcall *repl = gimple_build_call (fn, 2, dst, src);
1681 replace_call_with_call_and_fold (gsi, repl);
1682 return true;
1683 }
1684
1685 return false;
1686}
1687
745583f9
RB
1688/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1689 LEN, and SIZE. */
1690
1691static bool
1692gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1693{
1694 gimple stmt = gsi_stmt (*gsi);
1695 tree dest = gimple_call_arg (stmt, 0);
1696 tree src = gimple_call_arg (stmt, 1);
1697 tree len = gimple_call_arg (stmt, 2);
1698 tree size = gimple_call_arg (stmt, 3);
1699 tree fn;
1700 const char *p;
1701
1702 p = c_getstr (src);
1703 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1704 if ((p && *p == '\0')
1705 || integer_zerop (len))
1706 {
1707 replace_call_with_value (gsi, dest);
1708 return true;
1709 }
1710
1711 if (! tree_fits_uhwi_p (size))
1712 return false;
1713
1714 if (! integer_all_onesp (size))
1715 {
1716 tree src_len = c_strlen (src, 1);
1717 if (src_len
1718 && tree_fits_uhwi_p (src_len)
1719 && tree_fits_uhwi_p (len)
1720 && ! tree_int_cst_lt (len, src_len))
1721 {
1722 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1723 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1724 if (!fn)
1725 return false;
1726
1727 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1728 replace_call_with_call_and_fold (gsi, repl);
1729 return true;
1730 }
1731 return false;
1732 }
1733
1734 /* If __builtin_strncat_chk is used, assume strncat is available. */
1735 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1736 if (!fn)
1737 return false;
1738
1739 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1740 replace_call_with_call_and_fold (gsi, repl);
1741 return true;
1742}
1743
fef5a0d9
RB
1744/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1745 to the call. IGNORE is true if the value returned
1746 by the builtin will be ignored. UNLOCKED is true is true if this
1747 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1748 the known length of the string. Return NULL_TREE if no simplification
1749 was possible. */
1750
1751static bool
1752gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1753 tree arg0, tree arg1,
dcb7fae2 1754 bool unlocked)
fef5a0d9 1755{
dcb7fae2
RB
1756 gimple stmt = gsi_stmt (*gsi);
1757
fef5a0d9
RB
1758 /* If we're using an unlocked function, assume the other unlocked
1759 functions exist explicitly. */
1760 tree const fn_fputc = (unlocked
1761 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1762 : builtin_decl_implicit (BUILT_IN_FPUTC));
1763 tree const fn_fwrite = (unlocked
1764 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1765 : builtin_decl_implicit (BUILT_IN_FWRITE));
1766
1767 /* If the return value is used, don't do the transformation. */
dcb7fae2 1768 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1769 return false;
1770
fef5a0d9
RB
1771 /* Get the length of the string passed to fputs. If the length
1772 can't be determined, punt. */
dcb7fae2 1773 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1774 if (!len
1775 || TREE_CODE (len) != INTEGER_CST)
1776 return false;
1777
1778 switch (compare_tree_int (len, 1))
1779 {
1780 case -1: /* length is 0, delete the call entirely . */
1781 replace_call_with_value (gsi, integer_zero_node);
1782 return true;
1783
1784 case 0: /* length is 1, call fputc. */
1785 {
1786 const char *p = c_getstr (arg0);
1787 if (p != NULL)
1788 {
1789 if (!fn_fputc)
1790 return false;
1791
1792 gimple repl = gimple_build_call (fn_fputc, 2,
1793 build_int_cst
1794 (integer_type_node, p[0]), arg1);
1795 replace_call_with_call_and_fold (gsi, repl);
1796 return true;
1797 }
1798 }
1799 /* FALLTHROUGH */
1800 case 1: /* length is greater than 1, call fwrite. */
1801 {
1802 /* If optimizing for size keep fputs. */
1803 if (optimize_function_for_size_p (cfun))
1804 return false;
1805 /* New argument list transforming fputs(string, stream) to
1806 fwrite(string, 1, len, stream). */
1807 if (!fn_fwrite)
1808 return false;
1809
1810 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1811 size_one_node, len, arg1);
1812 replace_call_with_call_and_fold (gsi, repl);
1813 return true;
1814 }
1815 default:
1816 gcc_unreachable ();
1817 }
1818 return false;
1819}
1820
1821/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1822 DEST, SRC, LEN, and SIZE are the arguments to the call.
1823 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1824 code of the builtin. If MAXLEN is not NULL, it is maximum length
1825 passed as third argument. */
1826
1827static bool
1828gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1829 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1830 enum built_in_function fcode)
1831{
dcb7fae2
RB
1832 gimple stmt = gsi_stmt (*gsi);
1833 location_t loc = gimple_location (stmt);
1834 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1835 tree fn;
1836
1837 /* If SRC and DEST are the same (and not volatile), return DEST
1838 (resp. DEST+LEN for __mempcpy_chk). */
1839 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1840 {
1841 if (fcode != BUILT_IN_MEMPCPY_CHK)
1842 {
1843 replace_call_with_value (gsi, dest);
1844 return true;
1845 }
1846 else
1847 {
1848 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1849 temp = force_gimple_operand_gsi (gsi, temp,
1850 false, NULL_TREE, true,
1851 GSI_SAME_STMT);
1852 replace_call_with_value (gsi, temp);
1853 return true;
1854 }
1855 }
1856
1857 if (! tree_fits_uhwi_p (size))
1858 return false;
1859
dcb7fae2 1860 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1861 if (! integer_all_onesp (size))
1862 {
1863 if (! tree_fits_uhwi_p (len))
1864 {
1865 /* If LEN is not constant, try MAXLEN too.
1866 For MAXLEN only allow optimizing into non-_ocs function
1867 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1868 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1869 {
1870 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1871 {
1872 /* (void) __mempcpy_chk () can be optimized into
1873 (void) __memcpy_chk (). */
1874 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1875 if (!fn)
1876 return false;
1877
1878 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1879 replace_call_with_call_and_fold (gsi, repl);
1880 return true;
1881 }
1882 return false;
1883 }
1884 }
1885 else
1886 maxlen = len;
1887
1888 if (tree_int_cst_lt (size, maxlen))
1889 return false;
1890 }
1891
1892 fn = NULL_TREE;
1893 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1894 mem{cpy,pcpy,move,set} is available. */
1895 switch (fcode)
1896 {
1897 case BUILT_IN_MEMCPY_CHK:
1898 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1899 break;
1900 case BUILT_IN_MEMPCPY_CHK:
1901 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1902 break;
1903 case BUILT_IN_MEMMOVE_CHK:
1904 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1905 break;
1906 case BUILT_IN_MEMSET_CHK:
1907 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1908 break;
1909 default:
1910 break;
1911 }
1912
1913 if (!fn)
1914 return false;
1915
1916 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1917 replace_call_with_call_and_fold (gsi, repl);
1918 return true;
1919}
1920
1921/* Fold a call to the __st[rp]cpy_chk builtin.
1922 DEST, SRC, and SIZE are the arguments to the call.
1923 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1924 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1925 strings passed as second argument. */
1926
1927static bool
1928gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1929 tree dest,
fef5a0d9 1930 tree src, tree size,
fef5a0d9
RB
1931 enum built_in_function fcode)
1932{
dcb7fae2
RB
1933 gimple stmt = gsi_stmt (*gsi);
1934 location_t loc = gimple_location (stmt);
1935 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1936 tree len, fn;
1937
1938 /* If SRC and DEST are the same (and not volatile), return DEST. */
1939 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1940 {
1941 replace_call_with_value (gsi, dest);
1942 return true;
1943 }
1944
1945 if (! tree_fits_uhwi_p (size))
1946 return false;
1947
dcb7fae2 1948 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1949 if (! integer_all_onesp (size))
1950 {
1951 len = c_strlen (src, 1);
1952 if (! len || ! tree_fits_uhwi_p (len))
1953 {
1954 /* If LEN is not constant, try MAXLEN too.
1955 For MAXLEN only allow optimizing into non-_ocs function
1956 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1957 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1958 {
1959 if (fcode == BUILT_IN_STPCPY_CHK)
1960 {
1961 if (! ignore)
1962 return false;
1963
1964 /* If return value of __stpcpy_chk is ignored,
1965 optimize into __strcpy_chk. */
1966 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1967 if (!fn)
1968 return false;
1969
1970 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1971 replace_call_with_call_and_fold (gsi, repl);
1972 return true;
1973 }
1974
1975 if (! len || TREE_SIDE_EFFECTS (len))
1976 return false;
1977
1978 /* If c_strlen returned something, but not a constant,
1979 transform __strcpy_chk into __memcpy_chk. */
1980 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1981 if (!fn)
1982 return false;
1983
1984 len = fold_convert_loc (loc, size_type_node, len);
1985 len = size_binop_loc (loc, PLUS_EXPR, len,
1986 build_int_cst (size_type_node, 1));
1987 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1988 true, GSI_SAME_STMT);
1989 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1990 replace_call_with_call_and_fold (gsi, repl);
1991 return true;
1992 }
e256dfce 1993 }
fef5a0d9
RB
1994 else
1995 maxlen = len;
1996
1997 if (! tree_int_cst_lt (maxlen, size))
1998 return false;
e256dfce
RG
1999 }
2000
fef5a0d9
RB
2001 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2002 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2003 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2004 if (!fn)
2005 return false;
2006
2007 gimple repl = gimple_build_call (fn, 2, dest, src);
2008 replace_call_with_call_and_fold (gsi, repl);
2009 return true;
2010}
2011
2012/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2013 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2014 length passed as third argument. IGNORE is true if return value can be
2015 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2016
2017static bool
2018gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2019 tree dest, tree src,
dcb7fae2 2020 tree len, tree size,
fef5a0d9
RB
2021 enum built_in_function fcode)
2022{
dcb7fae2
RB
2023 gimple stmt = gsi_stmt (*gsi);
2024 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2025 tree fn;
2026
2027 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2028 {
fef5a0d9
RB
2029 /* If return value of __stpncpy_chk is ignored,
2030 optimize into __strncpy_chk. */
2031 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2032 if (fn)
2033 {
2034 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2035 replace_call_with_call_and_fold (gsi, repl);
2036 return true;
2037 }
cbdd87d4
RG
2038 }
2039
fef5a0d9
RB
2040 if (! tree_fits_uhwi_p (size))
2041 return false;
2042
dcb7fae2 2043 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2044 if (! integer_all_onesp (size))
cbdd87d4 2045 {
fef5a0d9 2046 if (! tree_fits_uhwi_p (len))
fe2ef088 2047 {
fef5a0d9
RB
2048 /* If LEN is not constant, try MAXLEN too.
2049 For MAXLEN only allow optimizing into non-_ocs function
2050 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2051 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2052 return false;
8a1561bc 2053 }
fef5a0d9
RB
2054 else
2055 maxlen = len;
2056
2057 if (tree_int_cst_lt (size, maxlen))
2058 return false;
cbdd87d4
RG
2059 }
2060
fef5a0d9
RB
2061 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2062 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2063 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2064 if (!fn)
2065 return false;
2066
2067 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2068 replace_call_with_call_and_fold (gsi, repl);
2069 return true;
cbdd87d4
RG
2070}
2071
2625bb5d
RB
2072/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2073 Return NULL_TREE if no simplification can be made. */
2074
2075static bool
2076gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2077{
2078 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2079 location_t loc = gimple_location (stmt);
2080 tree dest = gimple_call_arg (stmt, 0);
2081 tree src = gimple_call_arg (stmt, 1);
2082 tree fn, len, lenp1;
2083
2084 /* If the result is unused, replace stpcpy with strcpy. */
2085 if (gimple_call_lhs (stmt) == NULL_TREE)
2086 {
2087 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2088 if (!fn)
2089 return false;
2090 gimple_call_set_fndecl (stmt, fn);
2091 fold_stmt (gsi);
2092 return true;
2093 }
2094
2095 len = c_strlen (src, 1);
2096 if (!len
2097 || TREE_CODE (len) != INTEGER_CST)
2098 return false;
2099
2100 if (optimize_function_for_size_p (cfun)
2101 /* If length is zero it's small enough. */
2102 && !integer_zerop (len))
2103 return false;
2104
2105 /* If the source has a known length replace stpcpy with memcpy. */
2106 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2107 if (!fn)
2108 return false;
2109
2110 gimple_seq stmts = NULL;
2111 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2112 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2113 tem, build_int_cst (size_type_node, 1));
2114 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2115 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2116 gimple_set_vuse (repl, gimple_vuse (stmt));
2117 gimple_set_vdef (repl, gimple_vdef (stmt));
2118 if (gimple_vdef (repl)
2119 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2120 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2121 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2122 /* Replace the result with dest + len. */
2123 stmts = NULL;
2124 tem = gimple_convert (&stmts, loc, sizetype, len);
2125 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2126 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2127 POINTER_PLUS_EXPR, dest, tem);
2128 gsi_replace (gsi, ret, true);
2129 /* Finally fold the memcpy call. */
2130 gimple_stmt_iterator gsi2 = *gsi;
2131 gsi_prev (&gsi2);
2132 fold_stmt (&gsi2);
2133 return true;
2134}
2135
fef5a0d9
RB
2136/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2137 NULL_TREE if a normal call should be emitted rather than expanding
2138 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2139 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2140 passed as second argument. */
cbdd87d4
RG
2141
2142static bool
fef5a0d9 2143gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2144 enum built_in_function fcode)
cbdd87d4 2145{
538dd0b7 2146 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2147 tree dest, size, len, fn, fmt, flag;
2148 const char *fmt_str;
cbdd87d4 2149
fef5a0d9
RB
2150 /* Verify the required arguments in the original call. */
2151 if (gimple_call_num_args (stmt) < 5)
2152 return false;
cbdd87d4 2153
fef5a0d9
RB
2154 dest = gimple_call_arg (stmt, 0);
2155 len = gimple_call_arg (stmt, 1);
2156 flag = gimple_call_arg (stmt, 2);
2157 size = gimple_call_arg (stmt, 3);
2158 fmt = gimple_call_arg (stmt, 4);
2159
2160 if (! tree_fits_uhwi_p (size))
2161 return false;
2162
2163 if (! integer_all_onesp (size))
2164 {
dcb7fae2 2165 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2166 if (! tree_fits_uhwi_p (len))
cbdd87d4 2167 {
fef5a0d9
RB
2168 /* If LEN is not constant, try MAXLEN too.
2169 For MAXLEN only allow optimizing into non-_ocs function
2170 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2171 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2172 return false;
2173 }
2174 else
fef5a0d9 2175 maxlen = len;
cbdd87d4 2176
fef5a0d9
RB
2177 if (tree_int_cst_lt (size, maxlen))
2178 return false;
2179 }
cbdd87d4 2180
fef5a0d9
RB
2181 if (!init_target_chars ())
2182 return false;
cbdd87d4 2183
fef5a0d9
RB
2184 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2185 or if format doesn't contain % chars or is "%s". */
2186 if (! integer_zerop (flag))
2187 {
2188 fmt_str = c_getstr (fmt);
2189 if (fmt_str == NULL)
2190 return false;
2191 if (strchr (fmt_str, target_percent) != NULL
2192 && strcmp (fmt_str, target_percent_s))
2193 return false;
cbdd87d4
RG
2194 }
2195
fef5a0d9
RB
2196 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2197 available. */
2198 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2199 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2200 if (!fn)
491e0b9b
RG
2201 return false;
2202
fef5a0d9
RB
2203 /* Replace the called function and the first 5 argument by 3 retaining
2204 trailing varargs. */
2205 gimple_call_set_fndecl (stmt, fn);
2206 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2207 gimple_call_set_arg (stmt, 0, dest);
2208 gimple_call_set_arg (stmt, 1, len);
2209 gimple_call_set_arg (stmt, 2, fmt);
2210 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2211 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2212 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2213 fold_stmt (gsi);
2214 return true;
2215}
cbdd87d4 2216
fef5a0d9
RB
2217/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2218 Return NULL_TREE if a normal call should be emitted rather than
2219 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2220 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2221
fef5a0d9
RB
2222static bool
2223gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2224 enum built_in_function fcode)
2225{
538dd0b7 2226 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2227 tree dest, size, len, fn, fmt, flag;
2228 const char *fmt_str;
2229 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2230
fef5a0d9
RB
2231 /* Verify the required arguments in the original call. */
2232 if (nargs < 4)
2233 return false;
2234 dest = gimple_call_arg (stmt, 0);
2235 flag = gimple_call_arg (stmt, 1);
2236 size = gimple_call_arg (stmt, 2);
2237 fmt = gimple_call_arg (stmt, 3);
2238
2239 if (! tree_fits_uhwi_p (size))
2240 return false;
2241
2242 len = NULL_TREE;
2243
2244 if (!init_target_chars ())
2245 return false;
2246
2247 /* Check whether the format is a literal string constant. */
2248 fmt_str = c_getstr (fmt);
2249 if (fmt_str != NULL)
2250 {
2251 /* If the format doesn't contain % args or %%, we know the size. */
2252 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2253 {
fef5a0d9
RB
2254 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2255 len = build_int_cstu (size_type_node, strlen (fmt_str));
2256 }
2257 /* If the format is "%s" and first ... argument is a string literal,
2258 we know the size too. */
2259 else if (fcode == BUILT_IN_SPRINTF_CHK
2260 && strcmp (fmt_str, target_percent_s) == 0)
2261 {
2262 tree arg;
cbdd87d4 2263
fef5a0d9
RB
2264 if (nargs == 5)
2265 {
2266 arg = gimple_call_arg (stmt, 4);
2267 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2268 {
2269 len = c_strlen (arg, 1);
2270 if (! len || ! tree_fits_uhwi_p (len))
2271 len = NULL_TREE;
2272 }
2273 }
2274 }
2275 }
cbdd87d4 2276
fef5a0d9
RB
2277 if (! integer_all_onesp (size))
2278 {
2279 if (! len || ! tree_int_cst_lt (len, size))
2280 return false;
2281 }
cbdd87d4 2282
fef5a0d9
RB
2283 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2284 or if format doesn't contain % chars or is "%s". */
2285 if (! integer_zerop (flag))
2286 {
2287 if (fmt_str == NULL)
2288 return false;
2289 if (strchr (fmt_str, target_percent) != NULL
2290 && strcmp (fmt_str, target_percent_s))
2291 return false;
2292 }
cbdd87d4 2293
fef5a0d9
RB
2294 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2295 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2296 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2297 if (!fn)
2298 return false;
2299
2300 /* Replace the called function and the first 4 argument by 2 retaining
2301 trailing varargs. */
2302 gimple_call_set_fndecl (stmt, fn);
2303 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2304 gimple_call_set_arg (stmt, 0, dest);
2305 gimple_call_set_arg (stmt, 1, fmt);
2306 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2307 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2308 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2309 fold_stmt (gsi);
2310 return true;
2311}
2312
35770bb2
RB
2313/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2314 ORIG may be null if this is a 2-argument call. We don't attempt to
2315 simplify calls with more than 3 arguments.
2316
2317 Return NULL_TREE if no simplification was possible, otherwise return the
2318 simplified form of the call as a tree. If IGNORED is true, it means that
2319 the caller does not use the returned value of the function. */
2320
2321static bool
dcb7fae2 2322gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2
RB
2323{
2324 gimple stmt = gsi_stmt (*gsi);
2325 tree dest = gimple_call_arg (stmt, 0);
2326 tree fmt = gimple_call_arg (stmt, 1);
2327 tree orig = NULL_TREE;
2328 const char *fmt_str = NULL;
2329
2330 /* Verify the required arguments in the original call. We deal with two
2331 types of sprintf() calls: 'sprintf (str, fmt)' and
2332 'sprintf (dest, "%s", orig)'. */
2333 if (gimple_call_num_args (stmt) > 3)
2334 return false;
2335
2336 if (gimple_call_num_args (stmt) == 3)
2337 orig = gimple_call_arg (stmt, 2);
2338
2339 /* Check whether the format is a literal string constant. */
2340 fmt_str = c_getstr (fmt);
2341 if (fmt_str == NULL)
2342 return false;
2343
2344 if (!init_target_chars ())
2345 return false;
2346
2347 /* If the format doesn't contain % args or %%, use strcpy. */
2348 if (strchr (fmt_str, target_percent) == NULL)
2349 {
2350 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2351
2352 if (!fn)
2353 return false;
2354
2355 /* Don't optimize sprintf (buf, "abc", ptr++). */
2356 if (orig)
2357 return false;
2358
2359 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2360 'format' is known to contain no % formats. */
2361 gimple_seq stmts = NULL;
2362 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2363 gimple_seq_add_stmt_without_update (&stmts, repl);
2364 if (gimple_call_lhs (stmt))
2365 {
2366 repl = gimple_build_assign (gimple_call_lhs (stmt),
2367 build_int_cst (integer_type_node,
2368 strlen (fmt_str)));
2369 gimple_seq_add_stmt_without_update (&stmts, repl);
2370 gsi_replace_with_seq_vops (gsi, stmts);
2371 /* gsi now points at the assignment to the lhs, get a
2372 stmt iterator to the memcpy call.
2373 ??? We can't use gsi_for_stmt as that doesn't work when the
2374 CFG isn't built yet. */
2375 gimple_stmt_iterator gsi2 = *gsi;
2376 gsi_prev (&gsi2);
2377 fold_stmt (&gsi2);
2378 }
2379 else
2380 {
2381 gsi_replace_with_seq_vops (gsi, stmts);
2382 fold_stmt (gsi);
2383 }
2384 return true;
2385 }
2386
2387 /* If the format is "%s", use strcpy if the result isn't used. */
2388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2389 {
2390 tree fn;
2391 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2392
2393 if (!fn)
2394 return false;
2395
2396 /* Don't crash on sprintf (str1, "%s"). */
2397 if (!orig)
2398 return false;
2399
dcb7fae2
RB
2400 tree orig_len = NULL_TREE;
2401 if (gimple_call_lhs (stmt))
35770bb2 2402 {
dcb7fae2 2403 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2404 if (!orig_len)
35770bb2
RB
2405 return false;
2406 }
2407
2408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2409 gimple_seq stmts = NULL;
2410 gimple repl = gimple_build_call (fn, 2, dest, orig);
2411 gimple_seq_add_stmt_without_update (&stmts, repl);
2412 if (gimple_call_lhs (stmt))
2413 {
d7e78447
RB
2414 if (!useless_type_conversion_p (integer_type_node,
2415 TREE_TYPE (orig_len)))
2416 orig_len = fold_convert (integer_type_node, orig_len);
2417 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2418 gimple_seq_add_stmt_without_update (&stmts, repl);
2419 gsi_replace_with_seq_vops (gsi, stmts);
2420 /* gsi now points at the assignment to the lhs, get a
2421 stmt iterator to the memcpy call.
2422 ??? We can't use gsi_for_stmt as that doesn't work when the
2423 CFG isn't built yet. */
2424 gimple_stmt_iterator gsi2 = *gsi;
2425 gsi_prev (&gsi2);
2426 fold_stmt (&gsi2);
2427 }
2428 else
2429 {
2430 gsi_replace_with_seq_vops (gsi, stmts);
2431 fold_stmt (gsi);
2432 }
2433 return true;
2434 }
2435 return false;
2436}
2437
d7e78447
RB
2438/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2439 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2440 attempt to simplify calls with more than 4 arguments.
35770bb2 2441
d7e78447
RB
2442 Return NULL_TREE if no simplification was possible, otherwise return the
2443 simplified form of the call as a tree. If IGNORED is true, it means that
2444 the caller does not use the returned value of the function. */
2445
2446static bool
dcb7fae2 2447gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2448{
538dd0b7 2449 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2450 tree dest = gimple_call_arg (stmt, 0);
2451 tree destsize = gimple_call_arg (stmt, 1);
2452 tree fmt = gimple_call_arg (stmt, 2);
2453 tree orig = NULL_TREE;
2454 const char *fmt_str = NULL;
2455
2456 if (gimple_call_num_args (stmt) > 4)
2457 return false;
2458
2459 if (gimple_call_num_args (stmt) == 4)
2460 orig = gimple_call_arg (stmt, 3);
2461
2462 if (!tree_fits_uhwi_p (destsize))
2463 return false;
2464 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2465
2466 /* Check whether the format is a literal string constant. */
2467 fmt_str = c_getstr (fmt);
2468 if (fmt_str == NULL)
2469 return false;
2470
2471 if (!init_target_chars ())
2472 return false;
2473
2474 /* If the format doesn't contain % args or %%, use strcpy. */
2475 if (strchr (fmt_str, target_percent) == NULL)
2476 {
2477 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2478 if (!fn)
2479 return false;
2480
2481 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2482 if (orig)
2483 return false;
2484
2485 /* We could expand this as
2486 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2487 or to
2488 memcpy (str, fmt_with_nul_at_cstm1, cst);
2489 but in the former case that might increase code size
2490 and in the latter case grow .rodata section too much.
2491 So punt for now. */
2492 size_t len = strlen (fmt_str);
2493 if (len >= destlen)
2494 return false;
2495
2496 gimple_seq stmts = NULL;
2497 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2498 gimple_seq_add_stmt_without_update (&stmts, repl);
2499 if (gimple_call_lhs (stmt))
2500 {
2501 repl = gimple_build_assign (gimple_call_lhs (stmt),
2502 build_int_cst (integer_type_node, len));
2503 gimple_seq_add_stmt_without_update (&stmts, repl);
2504 gsi_replace_with_seq_vops (gsi, stmts);
2505 /* gsi now points at the assignment to the lhs, get a
2506 stmt iterator to the memcpy call.
2507 ??? We can't use gsi_for_stmt as that doesn't work when the
2508 CFG isn't built yet. */
2509 gimple_stmt_iterator gsi2 = *gsi;
2510 gsi_prev (&gsi2);
2511 fold_stmt (&gsi2);
2512 }
2513 else
2514 {
2515 gsi_replace_with_seq_vops (gsi, stmts);
2516 fold_stmt (gsi);
2517 }
2518 return true;
2519 }
2520
2521 /* If the format is "%s", use strcpy if the result isn't used. */
2522 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2523 {
2524 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2525 if (!fn)
2526 return false;
2527
2528 /* Don't crash on snprintf (str1, cst, "%s"). */
2529 if (!orig)
2530 return false;
2531
dcb7fae2 2532 tree orig_len = get_maxval_strlen (orig, 0);
d7e78447 2533 if (!orig_len)
dcb7fae2 2534 return false;
d7e78447
RB
2535
2536 /* We could expand this as
2537 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2538 or to
2539 memcpy (str1, str2_with_nul_at_cstm1, cst);
2540 but in the former case that might increase code size
2541 and in the latter case grow .rodata section too much.
2542 So punt for now. */
2543 if (compare_tree_int (orig_len, destlen) >= 0)
2544 return false;
2545
2546 /* Convert snprintf (str1, cst, "%s", str2) into
2547 strcpy (str1, str2) if strlen (str2) < cst. */
2548 gimple_seq stmts = NULL;
2549 gimple repl = gimple_build_call (fn, 2, dest, orig);
2550 gimple_seq_add_stmt_without_update (&stmts, repl);
2551 if (gimple_call_lhs (stmt))
2552 {
2553 if (!useless_type_conversion_p (integer_type_node,
2554 TREE_TYPE (orig_len)))
2555 orig_len = fold_convert (integer_type_node, orig_len);
2556 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2557 gimple_seq_add_stmt_without_update (&stmts, repl);
2558 gsi_replace_with_seq_vops (gsi, stmts);
2559 /* gsi now points at the assignment to the lhs, get a
2560 stmt iterator to the memcpy call.
2561 ??? We can't use gsi_for_stmt as that doesn't work when the
2562 CFG isn't built yet. */
2563 gimple_stmt_iterator gsi2 = *gsi;
2564 gsi_prev (&gsi2);
2565 fold_stmt (&gsi2);
2566 }
2567 else
2568 {
2569 gsi_replace_with_seq_vops (gsi, stmts);
2570 fold_stmt (gsi);
2571 }
2572 return true;
2573 }
2574 return false;
2575}
35770bb2 2576
edd7ae68
RB
2577/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2578 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2579 more than 3 arguments, and ARG may be null in the 2-argument case.
2580
2581 Return NULL_TREE if no simplification was possible, otherwise return the
2582 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2583 code of the function to be simplified. */
2584
2585static bool
2586gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2587 tree fp, tree fmt, tree arg,
2588 enum built_in_function fcode)
2589{
2590 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2591 tree fn_fputc, fn_fputs;
2592 const char *fmt_str = NULL;
2593
2594 /* If the return value is used, don't do the transformation. */
2595 if (gimple_call_lhs (stmt) != NULL_TREE)
2596 return false;
2597
2598 /* Check whether the format is a literal string constant. */
2599 fmt_str = c_getstr (fmt);
2600 if (fmt_str == NULL)
2601 return false;
2602
2603 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2604 {
2605 /* If we're using an unlocked function, assume the other
2606 unlocked functions exist explicitly. */
2607 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2608 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2609 }
2610 else
2611 {
2612 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2613 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2614 }
2615
2616 if (!init_target_chars ())
2617 return false;
2618
2619 /* If the format doesn't contain % args or %%, use strcpy. */
2620 if (strchr (fmt_str, target_percent) == NULL)
2621 {
2622 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2623 && arg)
2624 return false;
2625
2626 /* If the format specifier was "", fprintf does nothing. */
2627 if (fmt_str[0] == '\0')
2628 {
2629 replace_call_with_value (gsi, NULL_TREE);
2630 return true;
2631 }
2632
2633 /* When "string" doesn't contain %, replace all cases of
2634 fprintf (fp, string) with fputs (string, fp). The fputs
2635 builtin will take care of special cases like length == 1. */
2636 if (fn_fputs)
2637 {
2638 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2639 replace_call_with_call_and_fold (gsi, repl);
2640 return true;
2641 }
2642 }
2643
2644 /* The other optimizations can be done only on the non-va_list variants. */
2645 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2646 return false;
2647
2648 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2649 else if (strcmp (fmt_str, target_percent_s) == 0)
2650 {
2651 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2652 return false;
2653 if (fn_fputs)
2654 {
2655 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2656 replace_call_with_call_and_fold (gsi, repl);
2657 return true;
2658 }
2659 }
2660
2661 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2662 else if (strcmp (fmt_str, target_percent_c) == 0)
2663 {
2664 if (!arg
2665 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2666 return false;
2667 if (fn_fputc)
2668 {
2669 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2670 replace_call_with_call_and_fold (gsi, repl);
2671 return true;
2672 }
2673 }
2674
2675 return false;
2676}
2677
ad03a744
RB
2678/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2679 FMT and ARG are the arguments to the call; we don't fold cases with
2680 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2681
2682 Return NULL_TREE if no simplification was possible, otherwise return the
2683 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2684 code of the function to be simplified. */
2685
2686static bool
2687gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2688 tree arg, enum built_in_function fcode)
2689{
2690 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2691 tree fn_putchar, fn_puts, newarg;
2692 const char *fmt_str = NULL;
2693
2694 /* If the return value is used, don't do the transformation. */
2695 if (gimple_call_lhs (stmt) != NULL_TREE)
2696 return false;
2697
2698 /* Check whether the format is a literal string constant. */
2699 fmt_str = c_getstr (fmt);
2700 if (fmt_str == NULL)
2701 return false;
2702
2703 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2704 {
2705 /* If we're using an unlocked function, assume the other
2706 unlocked functions exist explicitly. */
2707 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2708 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2709 }
2710 else
2711 {
2712 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2713 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2714 }
2715
2716 if (!init_target_chars ())
2717 return false;
2718
2719 if (strcmp (fmt_str, target_percent_s) == 0
2720 || strchr (fmt_str, target_percent) == NULL)
2721 {
2722 const char *str;
2723
2724 if (strcmp (fmt_str, target_percent_s) == 0)
2725 {
2726 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2727 return false;
2728
2729 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2730 return false;
2731
2732 str = c_getstr (arg);
2733 if (str == NULL)
2734 return false;
2735 }
2736 else
2737 {
2738 /* The format specifier doesn't contain any '%' characters. */
2739 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2740 && arg)
2741 return false;
2742 str = fmt_str;
2743 }
2744
2745 /* If the string was "", printf does nothing. */
2746 if (str[0] == '\0')
2747 {
2748 replace_call_with_value (gsi, NULL_TREE);
2749 return true;
2750 }
2751
2752 /* If the string has length of 1, call putchar. */
2753 if (str[1] == '\0')
2754 {
2755 /* Given printf("c"), (where c is any one character,)
2756 convert "c"[0] to an int and pass that to the replacement
2757 function. */
2758 newarg = build_int_cst (integer_type_node, str[0]);
2759 if (fn_putchar)
2760 {
2761 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2762 replace_call_with_call_and_fold (gsi, repl);
2763 return true;
2764 }
2765 }
2766 else
2767 {
2768 /* If the string was "string\n", call puts("string"). */
2769 size_t len = strlen (str);
2770 if ((unsigned char)str[len - 1] == target_newline
2771 && (size_t) (int) len == len
2772 && (int) len > 0)
2773 {
2774 char *newstr;
2775 tree offset_node, string_cst;
2776
2777 /* Create a NUL-terminated string that's one char shorter
2778 than the original, stripping off the trailing '\n'. */
2779 newarg = build_string_literal (len, str);
2780 string_cst = string_constant (newarg, &offset_node);
2781 gcc_checking_assert (string_cst
2782 && (TREE_STRING_LENGTH (string_cst)
2783 == (int) len)
2784 && integer_zerop (offset_node)
2785 && (unsigned char)
2786 TREE_STRING_POINTER (string_cst)[len - 1]
2787 == target_newline);
2788 /* build_string_literal creates a new STRING_CST,
2789 modify it in place to avoid double copying. */
2790 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2791 newstr[len - 1] = '\0';
2792 if (fn_puts)
2793 {
2794 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2795 replace_call_with_call_and_fold (gsi, repl);
2796 return true;
2797 }
2798 }
2799 else
2800 /* We'd like to arrange to call fputs(string,stdout) here,
2801 but we need stdout and don't have a way to get it yet. */
2802 return false;
2803 }
2804 }
2805
2806 /* The other optimizations can be done only on the non-va_list variants. */
2807 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2808 return false;
2809
2810 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2811 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2812 {
2813 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2814 return false;
2815 if (fn_puts)
2816 {
2817 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2818 replace_call_with_call_and_fold (gsi, repl);
2819 return true;
2820 }
2821 }
2822
2823 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2824 else if (strcmp (fmt_str, target_percent_c) == 0)
2825 {
2826 if (!arg || ! useless_type_conversion_p (integer_type_node,
2827 TREE_TYPE (arg)))
2828 return false;
2829 if (fn_putchar)
2830 {
2831 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2832 replace_call_with_call_and_fold (gsi, repl);
2833 return true;
2834 }
2835 }
2836
2837 return false;
2838}
2839
edd7ae68 2840
fef5a0d9
RB
2841
2842/* Fold a call to __builtin_strlen with known length LEN. */
2843
2844static bool
dcb7fae2 2845gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2846{
dcb7fae2
RB
2847 gimple stmt = gsi_stmt (*gsi);
2848 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2849 if (!len)
2850 return false;
2813904b 2851 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2852 replace_call_with_value (gsi, len);
2853 return true;
cbdd87d4
RG
2854}
2855
2856
dcb7fae2
RB
2857/* Fold the non-target builtin at *GSI and return whether any simplification
2858 was made. */
cbdd87d4 2859
fef5a0d9 2860static bool
dcb7fae2 2861gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2862{
538dd0b7 2863 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2864 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2865
dcb7fae2
RB
2866 /* Give up for always_inline inline builtins until they are
2867 inlined. */
2868 if (avoid_folding_inline_builtin (callee))
2869 return false;
cbdd87d4 2870
edd7ae68
RB
2871 unsigned n = gimple_call_num_args (stmt);
2872 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2873 switch (fcode)
cbdd87d4 2874 {
dcb7fae2
RB
2875 case BUILT_IN_BZERO:
2876 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2877 gimple_call_arg (stmt, 1));
2878 case BUILT_IN_MEMSET:
2879 return gimple_fold_builtin_memset (gsi,
2880 gimple_call_arg (stmt, 1),
2881 gimple_call_arg (stmt, 2));
2882 case BUILT_IN_BCOPY:
2883 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2884 gimple_call_arg (stmt, 0), 3);
2885 case BUILT_IN_MEMCPY:
2886 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2887 gimple_call_arg (stmt, 1), 0);
2888 case BUILT_IN_MEMPCPY:
2889 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2890 gimple_call_arg (stmt, 1), 1);
2891 case BUILT_IN_MEMMOVE:
2892 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2893 gimple_call_arg (stmt, 1), 3);
2894 case BUILT_IN_SPRINTF_CHK:
2895 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2896 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2897 case BUILT_IN_STRCAT_CHK:
2898 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2899 case BUILT_IN_STRNCAT_CHK:
2900 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2901 case BUILT_IN_STRLEN:
dcb7fae2 2902 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2903 case BUILT_IN_STRCPY:
dcb7fae2 2904 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2905 gimple_call_arg (stmt, 0),
dcb7fae2 2906 gimple_call_arg (stmt, 1));
cbdd87d4 2907 case BUILT_IN_STRNCPY:
dcb7fae2 2908 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2909 gimple_call_arg (stmt, 0),
2910 gimple_call_arg (stmt, 1),
dcb7fae2 2911 gimple_call_arg (stmt, 2));
9a7eefec 2912 case BUILT_IN_STRCAT:
dcb7fae2
RB
2913 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2914 gimple_call_arg (stmt, 1));
ad03a744
RB
2915 case BUILT_IN_STRNCAT:
2916 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2917 case BUILT_IN_FPUTS:
dcb7fae2
RB
2918 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2919 gimple_call_arg (stmt, 1), false);
cbdd87d4 2920 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2921 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2922 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2923 case BUILT_IN_MEMCPY_CHK:
2924 case BUILT_IN_MEMPCPY_CHK:
2925 case BUILT_IN_MEMMOVE_CHK:
2926 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2927 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2928 gimple_call_arg (stmt, 0),
2929 gimple_call_arg (stmt, 1),
2930 gimple_call_arg (stmt, 2),
2931 gimple_call_arg (stmt, 3),
edd7ae68 2932 fcode);
2625bb5d
RB
2933 case BUILT_IN_STPCPY:
2934 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2935 case BUILT_IN_STRCPY_CHK:
2936 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2937 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2938 gimple_call_arg (stmt, 0),
2939 gimple_call_arg (stmt, 1),
2940 gimple_call_arg (stmt, 2),
edd7ae68 2941 fcode);
cbdd87d4 2942 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2943 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2944 return gimple_fold_builtin_stxncpy_chk (gsi,
2945 gimple_call_arg (stmt, 0),
2946 gimple_call_arg (stmt, 1),
2947 gimple_call_arg (stmt, 2),
2948 gimple_call_arg (stmt, 3),
edd7ae68 2949 fcode);
cbdd87d4
RG
2950 case BUILT_IN_SNPRINTF_CHK:
2951 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2952 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2953 case BUILT_IN_SNPRINTF:
dcb7fae2 2954 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2955 case BUILT_IN_SPRINTF:
dcb7fae2 2956 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2957 case BUILT_IN_FPRINTF:
2958 case BUILT_IN_FPRINTF_UNLOCKED:
2959 case BUILT_IN_VFPRINTF:
2960 if (n == 2 || n == 3)
2961 return gimple_fold_builtin_fprintf (gsi,
2962 gimple_call_arg (stmt, 0),
2963 gimple_call_arg (stmt, 1),
2964 n == 3
2965 ? gimple_call_arg (stmt, 2)
2966 : NULL_TREE,
2967 fcode);
2968 break;
2969 case BUILT_IN_FPRINTF_CHK:
2970 case BUILT_IN_VFPRINTF_CHK:
2971 if (n == 3 || n == 4)
2972 return gimple_fold_builtin_fprintf (gsi,
2973 gimple_call_arg (stmt, 0),
2974 gimple_call_arg (stmt, 2),
2975 n == 4
2976 ? gimple_call_arg (stmt, 3)
2977 : NULL_TREE,
2978 fcode);
2979 break;
ad03a744
RB
2980 case BUILT_IN_PRINTF:
2981 case BUILT_IN_PRINTF_UNLOCKED:
2982 case BUILT_IN_VPRINTF:
2983 if (n == 1 || n == 2)
2984 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2985 n == 2
2986 ? gimple_call_arg (stmt, 1)
2987 : NULL_TREE, fcode);
2988 break;
2989 case BUILT_IN_PRINTF_CHK:
2990 case BUILT_IN_VPRINTF_CHK:
2991 if (n == 2 || n == 3)
2992 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2993 n == 3
2994 ? gimple_call_arg (stmt, 2)
2995 : NULL_TREE, fcode);
fef5a0d9
RB
2996 default:;
2997 }
2998
2999 /* Try the generic builtin folder. */
3000 bool ignore = (gimple_call_lhs (stmt) == NULL);
3001 tree result = fold_call_stmt (stmt, ignore);
3002 if (result)
3003 {
3004 if (ignore)
3005 STRIP_NOPS (result);
3006 else
3007 result = fold_convert (gimple_call_return_type (stmt), result);
3008 if (!update_call_from_tree (gsi, result))
3009 gimplify_and_update_call_from_tree (gsi, result);
3010 return true;
3011 }
3012
3013 return false;
3014}
3015
1304953e
JJ
3016/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3017 doesn't fit into TYPE. The test for overflow should be regardless of
3018 -fwrapv, and even for unsigned types. */
3019
3020bool
3021arith_overflowed_p (enum tree_code code, const_tree type,
3022 const_tree arg0, const_tree arg1)
3023{
3024 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3025 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3026 widest2_int_cst;
3027 widest2_int warg0 = widest2_int_cst (arg0);
3028 widest2_int warg1 = widest2_int_cst (arg1);
3029 widest2_int wres;
3030 switch (code)
3031 {
3032 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3033 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3034 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3035 default: gcc_unreachable ();
3036 }
3037 signop sign = TYPE_SIGN (type);
3038 if (sign == UNSIGNED && wi::neg_p (wres))
3039 return true;
3040 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3041}
3042
cbdd87d4
RG
3043/* Attempt to fold a call statement referenced by the statement iterator GSI.
3044 The statement may be replaced by another statement, e.g., if the call
3045 simplifies to a constant value. Return true if any changes were made.
3046 It is assumed that the operands have been previously folded. */
3047
e021c122 3048static bool
ceeffab0 3049gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3050{
538dd0b7 3051 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3052 tree callee;
e021c122
RG
3053 bool changed = false;
3054 unsigned i;
cbdd87d4 3055
e021c122
RG
3056 /* Fold *& in call arguments. */
3057 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3058 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3059 {
3060 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3061 if (tmp)
3062 {
3063 gimple_call_set_arg (stmt, i, tmp);
3064 changed = true;
3065 }
3066 }
3b45a007
RG
3067
3068 /* Check for virtual calls that became direct calls. */
3069 callee = gimple_call_fn (stmt);
25583c4f 3070 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3071 {
49c471e3
MJ
3072 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3073 {
450ad0cd
JH
3074 if (dump_file && virtual_method_call_p (callee)
3075 && !possible_polymorphic_call_target_p
6f8091fc
JH
3076 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3077 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3078 {
3079 fprintf (dump_file,
a70e9985 3080 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3081 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3082 fprintf (dump_file, " to ");
3083 print_generic_expr (dump_file, callee, TDF_SLIM);
3084 fprintf (dump_file, "\n");
3085 }
3086
49c471e3 3087 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3088 changed = true;
3089 }
a70e9985 3090 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3091 {
61dd6a2e
JH
3092 bool final;
3093 vec <cgraph_node *>targets
058d0a90 3094 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3095 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3096 {
a70e9985 3097 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3098 if (dump_enabled_p ())
3099 {
807b7d62 3100 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3101 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3102 "folding virtual function call to %s\n",
3103 targets.length () == 1
3104 ? targets[0]->name ()
3105 : "__builtin_unreachable");
3106 }
61dd6a2e 3107 if (targets.length () == 1)
cf3e5a89
JJ
3108 {
3109 gimple_call_set_fndecl (stmt, targets[0]->decl);
3110 changed = true;
a70e9985
JJ
3111 /* If the call becomes noreturn, remove the lhs. */
3112 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3113 {
3114 if (TREE_CODE (lhs) == SSA_NAME)
3115 {
b731b390 3116 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985
JJ
3117 tree def = get_or_create_ssa_default_def (cfun, var);
3118 gimple new_stmt = gimple_build_assign (lhs, def);
3119 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3120 }
3121 gimple_call_set_lhs (stmt, NULL_TREE);
3122 }
0b986c6a 3123 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3124 }
a70e9985 3125 else
cf3e5a89
JJ
3126 {
3127 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3128 gimple new_stmt = gimple_build_call (fndecl, 0);
3129 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3130 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3131 {
b731b390 3132 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3133 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3134
3135 /* To satisfy condition for
3136 cgraph_update_edges_for_call_stmt_node,
3137 we need to preserve GIMPLE_CALL statement
3138 at position of GSI iterator. */
a70e9985 3139 update_call_from_tree (gsi, def);
eb14a79f 3140 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3141 }
3142 else
42e52a51
RB
3143 {
3144 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3145 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3146 gsi_replace (gsi, new_stmt, false);
3147 }
cf3e5a89
JJ
3148 return true;
3149 }
e021c122 3150 }
49c471e3 3151 }
e021c122 3152 }
49c471e3 3153
f2d3d07e
RH
3154 /* Check for indirect calls that became direct calls, and then
3155 no longer require a static chain. */
3156 if (gimple_call_chain (stmt))
3157 {
3158 tree fn = gimple_call_fndecl (stmt);
3159 if (fn && !DECL_STATIC_CHAIN (fn))
3160 {
3161 gimple_call_set_chain (stmt, NULL);
3162 changed = true;
3163 }
3164 else
3165 {
3166 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3167 if (tmp)
3168 {
3169 gimple_call_set_chain (stmt, tmp);
3170 changed = true;
3171 }
3172 }
3173 }
3174
e021c122
RG
3175 if (inplace)
3176 return changed;
3177
3178 /* Check for builtins that CCP can handle using information not
3179 available in the generic fold routines. */
fef5a0d9
RB
3180 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3181 {
3182 if (gimple_fold_builtin (gsi))
3183 changed = true;
3184 }
3185 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3186 {
ea679d55 3187 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3188 }
368b454d 3189 else if (gimple_call_internal_p (stmt))
ed9c79e1 3190 {
368b454d
JJ
3191 enum tree_code subcode = ERROR_MARK;
3192 tree result = NULL_TREE;
1304953e
JJ
3193 bool cplx_result = false;
3194 tree overflow = NULL_TREE;
368b454d
JJ
3195 switch (gimple_call_internal_fn (stmt))
3196 {
3197 case IFN_BUILTIN_EXPECT:
3198 result = fold_builtin_expect (gimple_location (stmt),
3199 gimple_call_arg (stmt, 0),
3200 gimple_call_arg (stmt, 1),
3201 gimple_call_arg (stmt, 2));
3202 break;
0e82f089
MP
3203 case IFN_UBSAN_OBJECT_SIZE:
3204 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3205 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3206 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3207 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3208 gimple_call_arg (stmt, 2))))
3209 {
3210 gsi_replace (gsi, gimple_build_nop (), true);
3211 unlink_stmt_vdef (stmt);
3212 release_defs (stmt);
3213 return true;
3214 }
3215 break;
368b454d
JJ
3216 case IFN_UBSAN_CHECK_ADD:
3217 subcode = PLUS_EXPR;
3218 break;
3219 case IFN_UBSAN_CHECK_SUB:
3220 subcode = MINUS_EXPR;
3221 break;
3222 case IFN_UBSAN_CHECK_MUL:
3223 subcode = MULT_EXPR;
3224 break;
1304953e
JJ
3225 case IFN_ADD_OVERFLOW:
3226 subcode = PLUS_EXPR;
3227 cplx_result = true;
3228 break;
3229 case IFN_SUB_OVERFLOW:
3230 subcode = MINUS_EXPR;
3231 cplx_result = true;
3232 break;
3233 case IFN_MUL_OVERFLOW:
3234 subcode = MULT_EXPR;
3235 cplx_result = true;
3236 break;
368b454d
JJ
3237 default:
3238 break;
3239 }
3240 if (subcode != ERROR_MARK)
3241 {
3242 tree arg0 = gimple_call_arg (stmt, 0);
3243 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3244 tree type = TREE_TYPE (arg0);
3245 if (cplx_result)
3246 {
3247 tree lhs = gimple_call_lhs (stmt);
3248 if (lhs == NULL_TREE)
3249 type = NULL_TREE;
3250 else
3251 type = TREE_TYPE (TREE_TYPE (lhs));
3252 }
3253 if (type == NULL_TREE)
3254 ;
368b454d 3255 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3256 else if (integer_zerop (arg1))
3257 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3258 /* x = 0 + y; x = 0 * y; */
3259 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3260 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3261 /* x = y - y; */
3262 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3263 result = integer_zero_node;
368b454d 3264 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3265 else if (subcode == MULT_EXPR && integer_onep (arg1))
3266 result = arg0;
3267 else if (subcode == MULT_EXPR && integer_onep (arg0))
3268 result = arg1;
3269 else if (TREE_CODE (arg0) == INTEGER_CST
3270 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3271 {
1304953e
JJ
3272 if (cplx_result)
3273 result = int_const_binop (subcode, fold_convert (type, arg0),
3274 fold_convert (type, arg1));
3275 else
3276 result = int_const_binop (subcode, arg0, arg1);
3277 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3278 {
3279 if (cplx_result)
3280 overflow = build_one_cst (type);
3281 else
3282 result = NULL_TREE;
3283 }
3284 }
3285 if (result)
3286 {
3287 if (result == integer_zero_node)
3288 result = build_zero_cst (type);
3289 else if (cplx_result && TREE_TYPE (result) != type)
3290 {
3291 if (TREE_CODE (result) == INTEGER_CST)
3292 {
3293 if (arith_overflowed_p (PLUS_EXPR, type, result,
3294 integer_zero_node))
3295 overflow = build_one_cst (type);
3296 }
3297 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3298 && TYPE_UNSIGNED (type))
3299 || (TYPE_PRECISION (type)
3300 < (TYPE_PRECISION (TREE_TYPE (result))
3301 + (TYPE_UNSIGNED (TREE_TYPE (result))
3302 && !TYPE_UNSIGNED (type)))))
3303 result = NULL_TREE;
3304 if (result)
3305 result = fold_convert (type, result);
3306 }
368b454d
JJ
3307 }
3308 }
1304953e 3309
ed9c79e1
JJ
3310 if (result)
3311 {
1304953e
JJ
3312 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3313 result = drop_tree_overflow (result);
3314 if (cplx_result)
3315 {
3316 if (overflow == NULL_TREE)
3317 overflow = build_zero_cst (TREE_TYPE (result));
3318 tree ctype = build_complex_type (TREE_TYPE (result));
3319 if (TREE_CODE (result) == INTEGER_CST
3320 && TREE_CODE (overflow) == INTEGER_CST)
3321 result = build_complex (ctype, result, overflow);
3322 else
3323 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3324 ctype, result, overflow);
3325 }
ed9c79e1
JJ
3326 if (!update_call_from_tree (gsi, result))
3327 gimplify_and_update_call_from_tree (gsi, result);
3328 changed = true;
3329 }
3330 }
3b45a007 3331
e021c122 3332 return changed;
cbdd87d4
RG
3333}
3334
e0ee10ed
RB
3335
3336/* Worker for fold_stmt_1 dispatch to pattern based folding with
3337 gimple_simplify.
3338
3339 Replaces *GSI with the simplification result in RCODE and OPS
3340 and the associated statements in *SEQ. Does the replacement
3341 according to INPLACE and returns true if the operation succeeded. */
3342
3343static bool
3344replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3345 code_helper rcode, tree *ops,
3346 gimple_seq *seq, bool inplace)
3347{
3348 gimple stmt = gsi_stmt (*gsi);
3349
3350 /* Play safe and do not allow abnormals to be mentioned in
3351 newly created statements. See also maybe_push_res_to_seq. */
3352 if ((TREE_CODE (ops[0]) == SSA_NAME
3353 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3354 || (ops[1]
3355 && TREE_CODE (ops[1]) == SSA_NAME
3356 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3357 || (ops[2]
3358 && TREE_CODE (ops[2]) == SSA_NAME
3359 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3360 return false;
3361
538dd0b7 3362 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3363 {
3364 gcc_assert (rcode.is_tree_code ());
3365 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3366 /* GIMPLE_CONDs condition may not throw. */
3367 && (!flag_exceptions
3368 || !cfun->can_throw_non_call_exceptions
3369 || !operation_could_trap_p (rcode,
3370 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3371 false, NULL_TREE)))
538dd0b7 3372 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3373 else if (rcode == SSA_NAME)
538dd0b7 3374 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3375 build_zero_cst (TREE_TYPE (ops[0])));
3376 else if (rcode == INTEGER_CST)
3377 {
3378 if (integer_zerop (ops[0]))
538dd0b7 3379 gimple_cond_make_false (cond_stmt);
e0ee10ed 3380 else
538dd0b7 3381 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3382 }
3383 else if (!inplace)
3384 {
3385 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3386 ops, seq);
3387 if (!res)
3388 return false;
538dd0b7 3389 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3390 build_zero_cst (TREE_TYPE (res)));
3391 }
3392 else
3393 return false;
3394 if (dump_file && (dump_flags & TDF_DETAILS))
3395 {
3396 fprintf (dump_file, "gimple_simplified to ");
3397 if (!gimple_seq_empty_p (*seq))
3398 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3399 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3400 0, TDF_SLIM);
3401 }
3402 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3403 return true;
3404 }
3405 else if (is_gimple_assign (stmt)
3406 && rcode.is_tree_code ())
3407 {
3408 if (!inplace
f3582e54 3409 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3410 {
3411 maybe_build_generic_op (rcode,
3412 TREE_TYPE (gimple_assign_lhs (stmt)),
3413 &ops[0], ops[1], ops[2]);
00d66391 3414 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3415 if (dump_file && (dump_flags & TDF_DETAILS))
3416 {
3417 fprintf (dump_file, "gimple_simplified to ");
3418 if (!gimple_seq_empty_p (*seq))
3419 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3420 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3421 0, TDF_SLIM);
3422 }
3423 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3424 return true;
3425 }
3426 }
3427 else if (!inplace)
3428 {
3429 if (gimple_has_lhs (stmt))
3430 {
3431 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3432 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3433 ops, seq, lhs))
3434 return false;
e0ee10ed
RB
3435 if (dump_file && (dump_flags & TDF_DETAILS))
3436 {
3437 fprintf (dump_file, "gimple_simplified to ");
3438 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3439 }
3440 gsi_replace_with_seq_vops (gsi, *seq);
3441 return true;
3442 }
3443 else
3444 gcc_unreachable ();
3445 }
3446
3447 return false;
3448}
3449
040292e7
RB
3450/* Canonicalize MEM_REFs invariant address operand after propagation. */
3451
3452static bool
3453maybe_canonicalize_mem_ref_addr (tree *t)
3454{
3455 bool res = false;
3456
3457 if (TREE_CODE (*t) == ADDR_EXPR)
3458 t = &TREE_OPERAND (*t, 0);
3459
3460 while (handled_component_p (*t))
3461 t = &TREE_OPERAND (*t, 0);
3462
3463 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3464 of invariant addresses into a SSA name MEM_REF address. */
3465 if (TREE_CODE (*t) == MEM_REF
3466 || TREE_CODE (*t) == TARGET_MEM_REF)
3467 {
3468 tree addr = TREE_OPERAND (*t, 0);
3469 if (TREE_CODE (addr) == ADDR_EXPR
3470 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3471 || handled_component_p (TREE_OPERAND (addr, 0))))
3472 {
3473 tree base;
3474 HOST_WIDE_INT coffset;
3475 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3476 &coffset);
3477 if (!base)
3478 gcc_unreachable ();
3479
3480 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3481 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3482 TREE_OPERAND (*t, 1),
3483 size_int (coffset));
3484 res = true;
3485 }
3486 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3487 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3488 }
3489
3490 /* Canonicalize back MEM_REFs to plain reference trees if the object
3491 accessed is a decl that has the same access semantics as the MEM_REF. */
3492 if (TREE_CODE (*t) == MEM_REF
3493 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3494 && integer_zerop (TREE_OPERAND (*t, 1))
3495 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3496 {
3497 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3498 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3499 if (/* Same volatile qualification. */
3500 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3501 /* Same TBAA behavior with -fstrict-aliasing. */
3502 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3503 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3504 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3505 /* Same alignment. */
3506 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3507 /* We have to look out here to not drop a required conversion
3508 from the rhs to the lhs if *t appears on the lhs or vice-versa
3509 if it appears on the rhs. Thus require strict type
3510 compatibility. */
3511 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3512 {
3513 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3514 res = true;
3515 }
3516 }
3517
3518 /* Canonicalize TARGET_MEM_REF in particular with respect to
3519 the indexes becoming constant. */
3520 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3521 {
3522 tree tem = maybe_fold_tmr (*t);
3523 if (tem)
3524 {
3525 *t = tem;
3526 res = true;
3527 }
3528 }
3529
3530 return res;
3531}
3532
cbdd87d4
RG
3533/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3534 distinguishes both cases. */
3535
3536static bool
e0ee10ed 3537fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3538{
3539 bool changed = false;
3540 gimple stmt = gsi_stmt (*gsi);
3541 unsigned i;
3542
040292e7
RB
3543 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3544 after propagation.
3545 ??? This shouldn't be done in generic folding but in the
3546 propagation helpers which also know whether an address was
3547 propagated. */
3548 switch (gimple_code (stmt))
3549 {
3550 case GIMPLE_ASSIGN:
3551 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3552 {
3553 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3554 if ((REFERENCE_CLASS_P (*rhs)
3555 || TREE_CODE (*rhs) == ADDR_EXPR)
3556 && maybe_canonicalize_mem_ref_addr (rhs))
3557 changed = true;
3558 tree *lhs = gimple_assign_lhs_ptr (stmt);
3559 if (REFERENCE_CLASS_P (*lhs)
3560 && maybe_canonicalize_mem_ref_addr (lhs))
3561 changed = true;
3562 }
3563 break;
3564 case GIMPLE_CALL:
3565 {
3566 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3567 {
3568 tree *arg = gimple_call_arg_ptr (stmt, i);
3569 if (REFERENCE_CLASS_P (*arg)
3570 && maybe_canonicalize_mem_ref_addr (arg))
3571 changed = true;
3572 }
3573 tree *lhs = gimple_call_lhs_ptr (stmt);
3574 if (*lhs
3575 && REFERENCE_CLASS_P (*lhs)
3576 && maybe_canonicalize_mem_ref_addr (lhs))
3577 changed = true;
3578 break;
3579 }
3580 case GIMPLE_ASM:
3581 {
538dd0b7
DM
3582 gasm *asm_stmt = as_a <gasm *> (stmt);
3583 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3584 {
538dd0b7 3585 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3586 tree op = TREE_VALUE (link);
3587 if (REFERENCE_CLASS_P (op)
3588 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3589 changed = true;
3590 }
538dd0b7 3591 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3592 {
538dd0b7 3593 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3594 tree op = TREE_VALUE (link);
3595 if ((REFERENCE_CLASS_P (op)
3596 || TREE_CODE (op) == ADDR_EXPR)
3597 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3598 changed = true;
3599 }
3600 }
3601 break;
3602 case GIMPLE_DEBUG:
3603 if (gimple_debug_bind_p (stmt))
3604 {
3605 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3606 if (*val
3607 && (REFERENCE_CLASS_P (*val)
3608 || TREE_CODE (*val) == ADDR_EXPR)
3609 && maybe_canonicalize_mem_ref_addr (val))
3610 changed = true;
3611 }
3612 break;
3613 default:;
3614 }
3615
e0ee10ed
RB
3616 /* Dispatch to pattern-based folding. */
3617 if (!inplace
3618 || is_gimple_assign (stmt)
3619 || gimple_code (stmt) == GIMPLE_COND)
3620 {
3621 gimple_seq seq = NULL;
3622 code_helper rcode;
3623 tree ops[3] = {};
0ff093d8
RB
3624 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3625 valueize, valueize))
e0ee10ed
RB
3626 {
3627 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3628 changed = true;
3629 else
3630 gimple_seq_discard (seq);
3631 }
3632 }
3633
3634 stmt = gsi_stmt (*gsi);
3635
cbdd87d4
RG
3636 /* Fold the main computation performed by the statement. */
3637 switch (gimple_code (stmt))
3638 {
3639 case GIMPLE_ASSIGN:
3640 {
3641 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 3642 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 3643 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
3644 tree new_rhs;
3645 /* First canonicalize operand order. This avoids building new
3646 trees if this is the only thing fold would later do. */
3647 if ((commutative_tree_code (subcode)
3648 || commutative_ternary_tree_code (subcode))
3649 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3650 gimple_assign_rhs2 (stmt), false))
3651 {
3652 tree tem = gimple_assign_rhs1 (stmt);
3653 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3654 gimple_assign_set_rhs2 (stmt, tem);
3655 changed = true;
3656 }
3657 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3658 if (new_rhs
3659 && !useless_type_conversion_p (TREE_TYPE (lhs),
3660 TREE_TYPE (new_rhs)))
3661 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3662 if (new_rhs
3663 && (!inplace
3664 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3665 {
3666 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3667 changed = true;
3668 }
3669 break;
3670 }
3671
3672 case GIMPLE_COND:
538dd0b7 3673 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
cbdd87d4
RG
3674 break;
3675
3676 case GIMPLE_CALL:
ceeffab0 3677 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3678 break;
3679
3680 case GIMPLE_ASM:
3681 /* Fold *& in asm operands. */
38384150 3682 {
538dd0b7 3683 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3684 size_t noutputs;
3685 const char **oconstraints;
3686 const char *constraint;
3687 bool allows_mem, allows_reg;
3688
538dd0b7 3689 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3690 oconstraints = XALLOCAVEC (const char *, noutputs);
3691
538dd0b7 3692 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3693 {
538dd0b7 3694 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3695 tree op = TREE_VALUE (link);
3696 oconstraints[i]
3697 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3698 if (REFERENCE_CLASS_P (op)
3699 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3700 {
3701 TREE_VALUE (link) = op;
3702 changed = true;
3703 }
3704 }
538dd0b7 3705 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3706 {
538dd0b7 3707 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3708 tree op = TREE_VALUE (link);
3709 constraint
3710 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3711 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3712 oconstraints, &allows_mem, &allows_reg);
3713 if (REFERENCE_CLASS_P (op)
3714 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3715 != NULL_TREE)
3716 {
3717 TREE_VALUE (link) = op;
3718 changed = true;
3719 }
3720 }
3721 }
cbdd87d4
RG
3722 break;
3723
bd422c4a
RG
3724 case GIMPLE_DEBUG:
3725 if (gimple_debug_bind_p (stmt))
3726 {
3727 tree val = gimple_debug_bind_get_value (stmt);
3728 if (val
3729 && REFERENCE_CLASS_P (val))
3730 {
3731 tree tem = maybe_fold_reference (val, false);
3732 if (tem)
3733 {
3734 gimple_debug_bind_set_value (stmt, tem);
3735 changed = true;
3736 }
3737 }
3e888a5e
RG
3738 else if (val
3739 && TREE_CODE (val) == ADDR_EXPR)
3740 {
3741 tree ref = TREE_OPERAND (val, 0);
3742 tree tem = maybe_fold_reference (ref, false);
3743 if (tem)
3744 {
3745 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3746 gimple_debug_bind_set_value (stmt, tem);
3747 changed = true;
3748 }
3749 }
bd422c4a
RG
3750 }
3751 break;
3752
cbdd87d4
RG
3753 default:;
3754 }
3755
3756 stmt = gsi_stmt (*gsi);
3757
37376165
RB
3758 /* Fold *& on the lhs. */
3759 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3760 {
3761 tree lhs = gimple_get_lhs (stmt);
3762 if (lhs && REFERENCE_CLASS_P (lhs))
3763 {
3764 tree new_lhs = maybe_fold_reference (lhs, true);
3765 if (new_lhs)
3766 {
3767 gimple_set_lhs (stmt, new_lhs);
3768 changed = true;
3769 }
3770 }
3771 }
3772
3773 return changed;
3774}
3775
e0ee10ed
RB
3776/* Valueziation callback that ends up not following SSA edges. */
3777
3778tree
3779no_follow_ssa_edges (tree)
3780{
3781 return NULL_TREE;
3782}
3783
45cc9f96
RB
3784/* Valueization callback that ends up following single-use SSA edges only. */
3785
3786tree
3787follow_single_use_edges (tree val)
3788{
3789 if (TREE_CODE (val) == SSA_NAME
3790 && !has_single_use (val))
3791 return NULL_TREE;
3792 return val;
3793}
3794
cbdd87d4
RG
3795/* Fold the statement pointed to by GSI. In some cases, this function may
3796 replace the whole statement with a new one. Returns true iff folding
3797 makes any changes.
3798 The statement pointed to by GSI should be in valid gimple form but may
3799 be in unfolded state as resulting from for example constant propagation
3800 which can produce *&x = 0. */
3801
3802bool
3803fold_stmt (gimple_stmt_iterator *gsi)
3804{
e0ee10ed
RB
3805 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3806}
3807
3808bool
3809fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3810{
3811 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3812}
3813
59401b92 3814/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3815 *&x created by constant propagation are handled. The statement cannot
3816 be replaced with a new one. Return true if the statement was
3817 changed, false otherwise.
59401b92 3818 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3819 be in unfolded state as resulting from for example constant propagation
3820 which can produce *&x = 0. */
3821
3822bool
59401b92 3823fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3824{
59401b92 3825 gimple stmt = gsi_stmt (*gsi);
e0ee10ed 3826 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3827 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3828 return changed;
3829}
3830
e89065a1
SL
3831/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3832 if EXPR is null or we don't know how.
3833 If non-null, the result always has boolean type. */
3834
3835static tree
3836canonicalize_bool (tree expr, bool invert)
3837{
3838 if (!expr)
3839 return NULL_TREE;
3840 else if (invert)
3841 {
3842 if (integer_nonzerop (expr))
3843 return boolean_false_node;
3844 else if (integer_zerop (expr))
3845 return boolean_true_node;
3846 else if (TREE_CODE (expr) == SSA_NAME)
3847 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3848 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3849 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3850 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3851 boolean_type_node,
3852 TREE_OPERAND (expr, 0),
3853 TREE_OPERAND (expr, 1));
3854 else
3855 return NULL_TREE;
3856 }
3857 else
3858 {
3859 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3860 return expr;
3861 if (integer_nonzerop (expr))
3862 return boolean_true_node;
3863 else if (integer_zerop (expr))
3864 return boolean_false_node;
3865 else if (TREE_CODE (expr) == SSA_NAME)
3866 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3867 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3868 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3869 return fold_build2 (TREE_CODE (expr),
3870 boolean_type_node,
3871 TREE_OPERAND (expr, 0),
3872 TREE_OPERAND (expr, 1));
3873 else
3874 return NULL_TREE;
3875 }
3876}
3877
3878/* Check to see if a boolean expression EXPR is logically equivalent to the
3879 comparison (OP1 CODE OP2). Check for various identities involving
3880 SSA_NAMEs. */
3881
3882static bool
3883same_bool_comparison_p (const_tree expr, enum tree_code code,
3884 const_tree op1, const_tree op2)
3885{
3886 gimple s;
3887
3888 /* The obvious case. */
3889 if (TREE_CODE (expr) == code
3890 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3891 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3892 return true;
3893
3894 /* Check for comparing (name, name != 0) and the case where expr
3895 is an SSA_NAME with a definition matching the comparison. */
3896 if (TREE_CODE (expr) == SSA_NAME
3897 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3898 {
3899 if (operand_equal_p (expr, op1, 0))
3900 return ((code == NE_EXPR && integer_zerop (op2))
3901 || (code == EQ_EXPR && integer_nonzerop (op2)));
3902 s = SSA_NAME_DEF_STMT (expr);
3903 if (is_gimple_assign (s)
3904 && gimple_assign_rhs_code (s) == code
3905 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3906 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3907 return true;
3908 }
3909
3910 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3911 of name is a comparison, recurse. */
3912 if (TREE_CODE (op1) == SSA_NAME
3913 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3914 {
3915 s = SSA_NAME_DEF_STMT (op1);
3916 if (is_gimple_assign (s)
3917 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3918 {
3919 enum tree_code c = gimple_assign_rhs_code (s);
3920 if ((c == NE_EXPR && integer_zerop (op2))
3921 || (c == EQ_EXPR && integer_nonzerop (op2)))
3922 return same_bool_comparison_p (expr, c,
3923 gimple_assign_rhs1 (s),
3924 gimple_assign_rhs2 (s));
3925 if ((c == EQ_EXPR && integer_zerop (op2))
3926 || (c == NE_EXPR && integer_nonzerop (op2)))
3927 return same_bool_comparison_p (expr,
3928 invert_tree_comparison (c, false),
3929 gimple_assign_rhs1 (s),
3930 gimple_assign_rhs2 (s));
3931 }
3932 }
3933 return false;
3934}
3935
3936/* Check to see if two boolean expressions OP1 and OP2 are logically
3937 equivalent. */
3938
3939static bool
3940same_bool_result_p (const_tree op1, const_tree op2)
3941{
3942 /* Simple cases first. */
3943 if (operand_equal_p (op1, op2, 0))
3944 return true;
3945
3946 /* Check the cases where at least one of the operands is a comparison.
3947 These are a bit smarter than operand_equal_p in that they apply some
3948 identifies on SSA_NAMEs. */
98209db3 3949 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
3950 && same_bool_comparison_p (op1, TREE_CODE (op2),
3951 TREE_OPERAND (op2, 0),
3952 TREE_OPERAND (op2, 1)))
3953 return true;
98209db3 3954 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
3955 && same_bool_comparison_p (op2, TREE_CODE (op1),
3956 TREE_OPERAND (op1, 0),
3957 TREE_OPERAND (op1, 1)))
3958 return true;
3959
3960 /* Default case. */
3961 return false;
3962}
3963
3964/* Forward declarations for some mutually recursive functions. */
3965
3966static tree
3967and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3968 enum tree_code code2, tree op2a, tree op2b);
3969static tree
3970and_var_with_comparison (tree var, bool invert,
3971 enum tree_code code2, tree op2a, tree op2b);
3972static tree
3973and_var_with_comparison_1 (gimple stmt,
3974 enum tree_code code2, tree op2a, tree op2b);
3975static tree
3976or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3977 enum tree_code code2, tree op2a, tree op2b);
3978static tree
3979or_var_with_comparison (tree var, bool invert,
3980 enum tree_code code2, tree op2a, tree op2b);
3981static tree
3982or_var_with_comparison_1 (gimple stmt,
3983 enum tree_code code2, tree op2a, tree op2b);
3984
3985/* Helper function for and_comparisons_1: try to simplify the AND of the
3986 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3987 If INVERT is true, invert the value of the VAR before doing the AND.
3988 Return NULL_EXPR if we can't simplify this to a single expression. */
3989
3990static tree
3991and_var_with_comparison (tree var, bool invert,
3992 enum tree_code code2, tree op2a, tree op2b)
3993{
3994 tree t;
3995 gimple stmt = SSA_NAME_DEF_STMT (var);
3996
3997 /* We can only deal with variables whose definitions are assignments. */
3998 if (!is_gimple_assign (stmt))
3999 return NULL_TREE;
4000
4001 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4002 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4003 Then we only have to consider the simpler non-inverted cases. */
4004 if (invert)
4005 t = or_var_with_comparison_1 (stmt,
4006 invert_tree_comparison (code2, false),
4007 op2a, op2b);
4008 else
4009 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4010 return canonicalize_bool (t, invert);
4011}
4012
4013/* Try to simplify the AND of the ssa variable defined by the assignment
4014 STMT with the comparison specified by (OP2A CODE2 OP2B).
4015 Return NULL_EXPR if we can't simplify this to a single expression. */
4016
4017static tree
4018and_var_with_comparison_1 (gimple stmt,
4019 enum tree_code code2, tree op2a, tree op2b)
4020{
4021 tree var = gimple_assign_lhs (stmt);
4022 tree true_test_var = NULL_TREE;
4023 tree false_test_var = NULL_TREE;
4024 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4025
4026 /* Check for identities like (var AND (var == 0)) => false. */
4027 if (TREE_CODE (op2a) == SSA_NAME
4028 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4029 {
4030 if ((code2 == NE_EXPR && integer_zerop (op2b))
4031 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4032 {
4033 true_test_var = op2a;
4034 if (var == true_test_var)
4035 return var;
4036 }
4037 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4038 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4039 {
4040 false_test_var = op2a;
4041 if (var == false_test_var)
4042 return boolean_false_node;
4043 }
4044 }
4045
4046 /* If the definition is a comparison, recurse on it. */
4047 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4048 {
4049 tree t = and_comparisons_1 (innercode,
4050 gimple_assign_rhs1 (stmt),
4051 gimple_assign_rhs2 (stmt),
4052 code2,
4053 op2a,
4054 op2b);
4055 if (t)
4056 return t;
4057 }
4058
4059 /* If the definition is an AND or OR expression, we may be able to
4060 simplify by reassociating. */
eb9820c0
KT
4061 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4062 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4063 {
4064 tree inner1 = gimple_assign_rhs1 (stmt);
4065 tree inner2 = gimple_assign_rhs2 (stmt);
4066 gimple s;
4067 tree t;
4068 tree partial = NULL_TREE;
eb9820c0 4069 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4070
4071 /* Check for boolean identities that don't require recursive examination
4072 of inner1/inner2:
4073 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4074 inner1 AND (inner1 OR inner2) => inner1
4075 !inner1 AND (inner1 AND inner2) => false
4076 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4077 Likewise for similar cases involving inner2. */
4078 if (inner1 == true_test_var)
4079 return (is_and ? var : inner1);
4080 else if (inner2 == true_test_var)
4081 return (is_and ? var : inner2);
4082 else if (inner1 == false_test_var)
4083 return (is_and
4084 ? boolean_false_node
4085 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4086 else if (inner2 == false_test_var)
4087 return (is_and
4088 ? boolean_false_node
4089 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4090
4091 /* Next, redistribute/reassociate the AND across the inner tests.
4092 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4093 if (TREE_CODE (inner1) == SSA_NAME
4094 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4095 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4096 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4097 gimple_assign_rhs1 (s),
4098 gimple_assign_rhs2 (s),
4099 code2, op2a, op2b)))
4100 {
4101 /* Handle the AND case, where we are reassociating:
4102 (inner1 AND inner2) AND (op2a code2 op2b)
4103 => (t AND inner2)
4104 If the partial result t is a constant, we win. Otherwise
4105 continue on to try reassociating with the other inner test. */
4106 if (is_and)
4107 {
4108 if (integer_onep (t))
4109 return inner2;
4110 else if (integer_zerop (t))
4111 return boolean_false_node;
4112 }
4113
4114 /* Handle the OR case, where we are redistributing:
4115 (inner1 OR inner2) AND (op2a code2 op2b)
4116 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4117 else if (integer_onep (t))
4118 return boolean_true_node;
4119
4120 /* Save partial result for later. */
4121 partial = t;
e89065a1
SL
4122 }
4123
4124 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4125 if (TREE_CODE (inner2) == SSA_NAME
4126 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4127 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4128 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4129 gimple_assign_rhs1 (s),
4130 gimple_assign_rhs2 (s),
4131 code2, op2a, op2b)))
4132 {
4133 /* Handle the AND case, where we are reassociating:
4134 (inner1 AND inner2) AND (op2a code2 op2b)
4135 => (inner1 AND t) */
4136 if (is_and)
4137 {
4138 if (integer_onep (t))
4139 return inner1;
4140 else if (integer_zerop (t))
4141 return boolean_false_node;
8236c8eb
JJ
4142 /* If both are the same, we can apply the identity
4143 (x AND x) == x. */
4144 else if (partial && same_bool_result_p (t, partial))
4145 return t;
e89065a1
SL
4146 }
4147
4148 /* Handle the OR case. where we are redistributing:
4149 (inner1 OR inner2) AND (op2a code2 op2b)
4150 => (t OR (inner1 AND (op2a code2 op2b)))
4151 => (t OR partial) */
4152 else
4153 {
4154 if (integer_onep (t))
4155 return boolean_true_node;
4156 else if (partial)
4157 {
4158 /* We already got a simplification for the other
4159 operand to the redistributed OR expression. The
4160 interesting case is when at least one is false.
4161 Or, if both are the same, we can apply the identity
4162 (x OR x) == x. */
4163 if (integer_zerop (partial))
4164 return t;
4165 else if (integer_zerop (t))
4166 return partial;
4167 else if (same_bool_result_p (t, partial))
4168 return t;
4169 }
4170 }
4171 }
4172 }
4173 return NULL_TREE;
4174}
4175
4176/* Try to simplify the AND of two comparisons defined by
4177 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4178 If this can be done without constructing an intermediate value,
4179 return the resulting tree; otherwise NULL_TREE is returned.
4180 This function is deliberately asymmetric as it recurses on SSA_DEFs
4181 in the first comparison but not the second. */
4182
4183static tree
4184and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4185 enum tree_code code2, tree op2a, tree op2b)
4186{
ae22ac3c 4187 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4188
e89065a1
SL
4189 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4190 if (operand_equal_p (op1a, op2a, 0)
4191 && operand_equal_p (op1b, op2b, 0))
4192 {
eb9820c0 4193 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4194 tree t = combine_comparisons (UNKNOWN_LOCATION,
4195 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4196 truth_type, op1a, op1b);
e89065a1
SL
4197 if (t)
4198 return t;
4199 }
4200
4201 /* Likewise the swapped case of the above. */
4202 if (operand_equal_p (op1a, op2b, 0)
4203 && operand_equal_p (op1b, op2a, 0))
4204 {
eb9820c0 4205 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4206 tree t = combine_comparisons (UNKNOWN_LOCATION,
4207 TRUTH_ANDIF_EXPR, code1,
4208 swap_tree_comparison (code2),
31ed6226 4209 truth_type, op1a, op1b);
e89065a1
SL
4210 if (t)
4211 return t;
4212 }
4213
4214 /* If both comparisons are of the same value against constants, we might
4215 be able to merge them. */
4216 if (operand_equal_p (op1a, op2a, 0)
4217 && TREE_CODE (op1b) == INTEGER_CST
4218 && TREE_CODE (op2b) == INTEGER_CST)
4219 {
4220 int cmp = tree_int_cst_compare (op1b, op2b);
4221
4222 /* If we have (op1a == op1b), we should either be able to
4223 return that or FALSE, depending on whether the constant op1b
4224 also satisfies the other comparison against op2b. */
4225 if (code1 == EQ_EXPR)
4226 {
4227 bool done = true;
4228 bool val;
4229 switch (code2)
4230 {
4231 case EQ_EXPR: val = (cmp == 0); break;
4232 case NE_EXPR: val = (cmp != 0); break;
4233 case LT_EXPR: val = (cmp < 0); break;
4234 case GT_EXPR: val = (cmp > 0); break;
4235 case LE_EXPR: val = (cmp <= 0); break;
4236 case GE_EXPR: val = (cmp >= 0); break;
4237 default: done = false;
4238 }
4239 if (done)
4240 {
4241 if (val)
4242 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4243 else
4244 return boolean_false_node;
4245 }
4246 }
4247 /* Likewise if the second comparison is an == comparison. */
4248 else if (code2 == EQ_EXPR)
4249 {
4250 bool done = true;
4251 bool val;
4252 switch (code1)
4253 {
4254 case EQ_EXPR: val = (cmp == 0); break;
4255 case NE_EXPR: val = (cmp != 0); break;
4256 case LT_EXPR: val = (cmp > 0); break;
4257 case GT_EXPR: val = (cmp < 0); break;
4258 case LE_EXPR: val = (cmp >= 0); break;
4259 case GE_EXPR: val = (cmp <= 0); break;
4260 default: done = false;
4261 }
4262 if (done)
4263 {
4264 if (val)
4265 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4266 else
4267 return boolean_false_node;
4268 }
4269 }
4270
4271 /* Same business with inequality tests. */
4272 else if (code1 == NE_EXPR)
4273 {
4274 bool val;
4275 switch (code2)
4276 {
4277 case EQ_EXPR: val = (cmp != 0); break;
4278 case NE_EXPR: val = (cmp == 0); break;
4279 case LT_EXPR: val = (cmp >= 0); break;
4280 case GT_EXPR: val = (cmp <= 0); break;
4281 case LE_EXPR: val = (cmp > 0); break;
4282 case GE_EXPR: val = (cmp < 0); break;
4283 default:
4284 val = false;
4285 }
4286 if (val)
4287 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4288 }
4289 else if (code2 == NE_EXPR)
4290 {
4291 bool val;
4292 switch (code1)
4293 {
4294 case EQ_EXPR: val = (cmp == 0); break;
4295 case NE_EXPR: val = (cmp != 0); break;
4296 case LT_EXPR: val = (cmp <= 0); break;
4297 case GT_EXPR: val = (cmp >= 0); break;
4298 case LE_EXPR: val = (cmp < 0); break;
4299 case GE_EXPR: val = (cmp > 0); break;
4300 default:
4301 val = false;
4302 }
4303 if (val)
4304 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4305 }
4306
4307 /* Chose the more restrictive of two < or <= comparisons. */
4308 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4309 && (code2 == LT_EXPR || code2 == LE_EXPR))
4310 {
4311 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4312 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4313 else
4314 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4315 }
4316
4317 /* Likewise chose the more restrictive of two > or >= comparisons. */
4318 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4319 && (code2 == GT_EXPR || code2 == GE_EXPR))
4320 {
4321 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4322 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4323 else
4324 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4325 }
4326
4327 /* Check for singleton ranges. */
4328 else if (cmp == 0
4329 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4330 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4331 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4332
4333 /* Check for disjoint ranges. */
4334 else if (cmp <= 0
4335 && (code1 == LT_EXPR || code1 == LE_EXPR)
4336 && (code2 == GT_EXPR || code2 == GE_EXPR))
4337 return boolean_false_node;
4338 else if (cmp >= 0
4339 && (code1 == GT_EXPR || code1 == GE_EXPR)
4340 && (code2 == LT_EXPR || code2 == LE_EXPR))
4341 return boolean_false_node;
4342 }
4343
4344 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4345 NAME's definition is a truth value. See if there are any simplifications
4346 that can be done against the NAME's definition. */
4347 if (TREE_CODE (op1a) == SSA_NAME
4348 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4349 && (integer_zerop (op1b) || integer_onep (op1b)))
4350 {
4351 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4352 || (code1 == NE_EXPR && integer_onep (op1b)));
4353 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4354 switch (gimple_code (stmt))
4355 {
4356 case GIMPLE_ASSIGN:
4357 /* Try to simplify by copy-propagating the definition. */
4358 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4359
4360 case GIMPLE_PHI:
4361 /* If every argument to the PHI produces the same result when
4362 ANDed with the second comparison, we win.
4363 Do not do this unless the type is bool since we need a bool
4364 result here anyway. */
4365 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4366 {
4367 tree result = NULL_TREE;
4368 unsigned i;
4369 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4370 {
4371 tree arg = gimple_phi_arg_def (stmt, i);
4372
4373 /* If this PHI has itself as an argument, ignore it.
4374 If all the other args produce the same result,
4375 we're still OK. */
4376 if (arg == gimple_phi_result (stmt))
4377 continue;
4378 else if (TREE_CODE (arg) == INTEGER_CST)
4379 {
4380 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4381 {
4382 if (!result)
4383 result = boolean_false_node;
4384 else if (!integer_zerop (result))
4385 return NULL_TREE;
4386 }
4387 else if (!result)
4388 result = fold_build2 (code2, boolean_type_node,
4389 op2a, op2b);
4390 else if (!same_bool_comparison_p (result,
4391 code2, op2a, op2b))
4392 return NULL_TREE;
4393 }
0e8b84ec
JJ
4394 else if (TREE_CODE (arg) == SSA_NAME
4395 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4396 {
6c66f733
JJ
4397 tree temp;
4398 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4399 /* In simple cases we can look through PHI nodes,
4400 but we have to be careful with loops.
4401 See PR49073. */
4402 if (! dom_info_available_p (CDI_DOMINATORS)
4403 || gimple_bb (def_stmt) == gimple_bb (stmt)
4404 || dominated_by_p (CDI_DOMINATORS,
4405 gimple_bb (def_stmt),
4406 gimple_bb (stmt)))
4407 return NULL_TREE;
4408 temp = and_var_with_comparison (arg, invert, code2,
4409 op2a, op2b);
e89065a1
SL
4410 if (!temp)
4411 return NULL_TREE;
4412 else if (!result)
4413 result = temp;
4414 else if (!same_bool_result_p (result, temp))
4415 return NULL_TREE;
4416 }
4417 else
4418 return NULL_TREE;
4419 }
4420 return result;
4421 }
4422
4423 default:
4424 break;
4425 }
4426 }
4427 return NULL_TREE;
4428}
4429
4430/* Try to simplify the AND of two comparisons, specified by
4431 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4432 If this can be simplified to a single expression (without requiring
4433 introducing more SSA variables to hold intermediate values),
4434 return the resulting tree. Otherwise return NULL_TREE.
4435 If the result expression is non-null, it has boolean type. */
4436
4437tree
4438maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4439 enum tree_code code2, tree op2a, tree op2b)
4440{
4441 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4442 if (t)
4443 return t;
4444 else
4445 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4446}
4447
4448/* Helper function for or_comparisons_1: try to simplify the OR of the
4449 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4450 If INVERT is true, invert the value of VAR before doing the OR.
4451 Return NULL_EXPR if we can't simplify this to a single expression. */
4452
4453static tree
4454or_var_with_comparison (tree var, bool invert,
4455 enum tree_code code2, tree op2a, tree op2b)
4456{
4457 tree t;
4458 gimple stmt = SSA_NAME_DEF_STMT (var);
4459
4460 /* We can only deal with variables whose definitions are assignments. */
4461 if (!is_gimple_assign (stmt))
4462 return NULL_TREE;
4463
4464 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4465 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4466 Then we only have to consider the simpler non-inverted cases. */
4467 if (invert)
4468 t = and_var_with_comparison_1 (stmt,
4469 invert_tree_comparison (code2, false),
4470 op2a, op2b);
4471 else
4472 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4473 return canonicalize_bool (t, invert);
4474}
4475
4476/* Try to simplify the OR of the ssa variable defined by the assignment
4477 STMT with the comparison specified by (OP2A CODE2 OP2B).
4478 Return NULL_EXPR if we can't simplify this to a single expression. */
4479
4480static tree
4481or_var_with_comparison_1 (gimple stmt,
4482 enum tree_code code2, tree op2a, tree op2b)
4483{
4484 tree var = gimple_assign_lhs (stmt);
4485 tree true_test_var = NULL_TREE;
4486 tree false_test_var = NULL_TREE;
4487 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4488
4489 /* Check for identities like (var OR (var != 0)) => true . */
4490 if (TREE_CODE (op2a) == SSA_NAME
4491 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4492 {
4493 if ((code2 == NE_EXPR && integer_zerop (op2b))
4494 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4495 {
4496 true_test_var = op2a;
4497 if (var == true_test_var)
4498 return var;
4499 }
4500 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4501 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4502 {
4503 false_test_var = op2a;
4504 if (var == false_test_var)
4505 return boolean_true_node;
4506 }
4507 }
4508
4509 /* If the definition is a comparison, recurse on it. */
4510 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4511 {
4512 tree t = or_comparisons_1 (innercode,
4513 gimple_assign_rhs1 (stmt),
4514 gimple_assign_rhs2 (stmt),
4515 code2,
4516 op2a,
4517 op2b);
4518 if (t)
4519 return t;
4520 }
4521
4522 /* If the definition is an AND or OR expression, we may be able to
4523 simplify by reassociating. */
eb9820c0
KT
4524 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4525 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4526 {
4527 tree inner1 = gimple_assign_rhs1 (stmt);
4528 tree inner2 = gimple_assign_rhs2 (stmt);
4529 gimple s;
4530 tree t;
4531 tree partial = NULL_TREE;
eb9820c0 4532 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4533
4534 /* Check for boolean identities that don't require recursive examination
4535 of inner1/inner2:
4536 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4537 inner1 OR (inner1 AND inner2) => inner1
4538 !inner1 OR (inner1 OR inner2) => true
4539 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4540 */
4541 if (inner1 == true_test_var)
4542 return (is_or ? var : inner1);
4543 else if (inner2 == true_test_var)
4544 return (is_or ? var : inner2);
4545 else if (inner1 == false_test_var)
4546 return (is_or
4547 ? boolean_true_node
4548 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4549 else if (inner2 == false_test_var)
4550 return (is_or
4551 ? boolean_true_node
4552 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4553
4554 /* Next, redistribute/reassociate the OR across the inner tests.
4555 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4556 if (TREE_CODE (inner1) == SSA_NAME
4557 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4558 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4559 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4560 gimple_assign_rhs1 (s),
4561 gimple_assign_rhs2 (s),
4562 code2, op2a, op2b)))
4563 {
4564 /* Handle the OR case, where we are reassociating:
4565 (inner1 OR inner2) OR (op2a code2 op2b)
4566 => (t OR inner2)
4567 If the partial result t is a constant, we win. Otherwise
4568 continue on to try reassociating with the other inner test. */
8236c8eb 4569 if (is_or)
e89065a1
SL
4570 {
4571 if (integer_onep (t))
4572 return boolean_true_node;
4573 else if (integer_zerop (t))
4574 return inner2;
4575 }
4576
4577 /* Handle the AND case, where we are redistributing:
4578 (inner1 AND inner2) OR (op2a code2 op2b)
4579 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4580 else if (integer_zerop (t))
4581 return boolean_false_node;
4582
4583 /* Save partial result for later. */
4584 partial = t;
e89065a1
SL
4585 }
4586
4587 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4588 if (TREE_CODE (inner2) == SSA_NAME
4589 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4590 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4591 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4592 gimple_assign_rhs1 (s),
4593 gimple_assign_rhs2 (s),
4594 code2, op2a, op2b)))
4595 {
4596 /* Handle the OR case, where we are reassociating:
4597 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4598 => (inner1 OR t)
4599 => (t OR partial) */
4600 if (is_or)
e89065a1
SL
4601 {
4602 if (integer_zerop (t))
4603 return inner1;
4604 else if (integer_onep (t))
4605 return boolean_true_node;
8236c8eb
JJ
4606 /* If both are the same, we can apply the identity
4607 (x OR x) == x. */
4608 else if (partial && same_bool_result_p (t, partial))
4609 return t;
e89065a1
SL
4610 }
4611
4612 /* Handle the AND case, where we are redistributing:
4613 (inner1 AND inner2) OR (op2a code2 op2b)
4614 => (t AND (inner1 OR (op2a code2 op2b)))
4615 => (t AND partial) */
4616 else
4617 {
4618 if (integer_zerop (t))
4619 return boolean_false_node;
4620 else if (partial)
4621 {
4622 /* We already got a simplification for the other
4623 operand to the redistributed AND expression. The
4624 interesting case is when at least one is true.
4625 Or, if both are the same, we can apply the identity
8236c8eb 4626 (x AND x) == x. */
e89065a1
SL
4627 if (integer_onep (partial))
4628 return t;
4629 else if (integer_onep (t))
4630 return partial;
4631 else if (same_bool_result_p (t, partial))
8236c8eb 4632 return t;
e89065a1
SL
4633 }
4634 }
4635 }
4636 }
4637 return NULL_TREE;
4638}
4639
4640/* Try to simplify the OR of two comparisons defined by
4641 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4642 If this can be done without constructing an intermediate value,
4643 return the resulting tree; otherwise NULL_TREE is returned.
4644 This function is deliberately asymmetric as it recurses on SSA_DEFs
4645 in the first comparison but not the second. */
4646
4647static tree
4648or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4649 enum tree_code code2, tree op2a, tree op2b)
4650{
ae22ac3c 4651 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4652
e89065a1
SL
4653 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4654 if (operand_equal_p (op1a, op2a, 0)
4655 && operand_equal_p (op1b, op2b, 0))
4656 {
eb9820c0 4657 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4658 tree t = combine_comparisons (UNKNOWN_LOCATION,
4659 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4660 truth_type, op1a, op1b);
e89065a1
SL
4661 if (t)
4662 return t;
4663 }
4664
4665 /* Likewise the swapped case of the above. */
4666 if (operand_equal_p (op1a, op2b, 0)
4667 && operand_equal_p (op1b, op2a, 0))
4668 {
eb9820c0 4669 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4670 tree t = combine_comparisons (UNKNOWN_LOCATION,
4671 TRUTH_ORIF_EXPR, code1,
4672 swap_tree_comparison (code2),
31ed6226 4673 truth_type, op1a, op1b);
e89065a1
SL
4674 if (t)
4675 return t;
4676 }
4677
4678 /* If both comparisons are of the same value against constants, we might
4679 be able to merge them. */
4680 if (operand_equal_p (op1a, op2a, 0)
4681 && TREE_CODE (op1b) == INTEGER_CST
4682 && TREE_CODE (op2b) == INTEGER_CST)
4683 {
4684 int cmp = tree_int_cst_compare (op1b, op2b);
4685
4686 /* If we have (op1a != op1b), we should either be able to
4687 return that or TRUE, depending on whether the constant op1b
4688 also satisfies the other comparison against op2b. */
4689 if (code1 == NE_EXPR)
4690 {
4691 bool done = true;
4692 bool val;
4693 switch (code2)
4694 {
4695 case EQ_EXPR: val = (cmp == 0); break;
4696 case NE_EXPR: val = (cmp != 0); break;
4697 case LT_EXPR: val = (cmp < 0); break;
4698 case GT_EXPR: val = (cmp > 0); break;
4699 case LE_EXPR: val = (cmp <= 0); break;
4700 case GE_EXPR: val = (cmp >= 0); break;
4701 default: done = false;
4702 }
4703 if (done)
4704 {
4705 if (val)
4706 return boolean_true_node;
4707 else
4708 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4709 }
4710 }
4711 /* Likewise if the second comparison is a != comparison. */
4712 else if (code2 == NE_EXPR)
4713 {
4714 bool done = true;
4715 bool val;
4716 switch (code1)
4717 {
4718 case EQ_EXPR: val = (cmp == 0); break;
4719 case NE_EXPR: val = (cmp != 0); break;
4720 case LT_EXPR: val = (cmp > 0); break;
4721 case GT_EXPR: val = (cmp < 0); break;
4722 case LE_EXPR: val = (cmp >= 0); break;
4723 case GE_EXPR: val = (cmp <= 0); break;
4724 default: done = false;
4725 }
4726 if (done)
4727 {
4728 if (val)
4729 return boolean_true_node;
4730 else
4731 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4732 }
4733 }
4734
4735 /* See if an equality test is redundant with the other comparison. */
4736 else if (code1 == EQ_EXPR)
4737 {
4738 bool val;
4739 switch (code2)
4740 {
4741 case EQ_EXPR: val = (cmp == 0); break;
4742 case NE_EXPR: val = (cmp != 0); break;
4743 case LT_EXPR: val = (cmp < 0); break;
4744 case GT_EXPR: val = (cmp > 0); break;
4745 case LE_EXPR: val = (cmp <= 0); break;
4746 case GE_EXPR: val = (cmp >= 0); break;
4747 default:
4748 val = false;
4749 }
4750 if (val)
4751 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4752 }
4753 else if (code2 == EQ_EXPR)
4754 {
4755 bool val;
4756 switch (code1)
4757 {
4758 case EQ_EXPR: val = (cmp == 0); break;
4759 case NE_EXPR: val = (cmp != 0); break;
4760 case LT_EXPR: val = (cmp > 0); break;
4761 case GT_EXPR: val = (cmp < 0); break;
4762 case LE_EXPR: val = (cmp >= 0); break;
4763 case GE_EXPR: val = (cmp <= 0); break;
4764 default:
4765 val = false;
4766 }
4767 if (val)
4768 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4769 }
4770
4771 /* Chose the less restrictive of two < or <= comparisons. */
4772 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4773 && (code2 == LT_EXPR || code2 == LE_EXPR))
4774 {
4775 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4776 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4777 else
4778 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4779 }
4780
4781 /* Likewise chose the less restrictive of two > or >= comparisons. */
4782 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4783 && (code2 == GT_EXPR || code2 == GE_EXPR))
4784 {
4785 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4786 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4787 else
4788 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4789 }
4790
4791 /* Check for singleton ranges. */
4792 else if (cmp == 0
4793 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4794 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4795 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4796
4797 /* Check for less/greater pairs that don't restrict the range at all. */
4798 else if (cmp >= 0
4799 && (code1 == LT_EXPR || code1 == LE_EXPR)
4800 && (code2 == GT_EXPR || code2 == GE_EXPR))
4801 return boolean_true_node;
4802 else if (cmp <= 0
4803 && (code1 == GT_EXPR || code1 == GE_EXPR)
4804 && (code2 == LT_EXPR || code2 == LE_EXPR))
4805 return boolean_true_node;
4806 }
4807
4808 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4809 NAME's definition is a truth value. See if there are any simplifications
4810 that can be done against the NAME's definition. */
4811 if (TREE_CODE (op1a) == SSA_NAME
4812 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4813 && (integer_zerop (op1b) || integer_onep (op1b)))
4814 {
4815 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4816 || (code1 == NE_EXPR && integer_onep (op1b)));
4817 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4818 switch (gimple_code (stmt))
4819 {
4820 case GIMPLE_ASSIGN:
4821 /* Try to simplify by copy-propagating the definition. */
4822 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4823
4824 case GIMPLE_PHI:
4825 /* If every argument to the PHI produces the same result when
4826 ORed with the second comparison, we win.
4827 Do not do this unless the type is bool since we need a bool
4828 result here anyway. */
4829 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4830 {
4831 tree result = NULL_TREE;
4832 unsigned i;
4833 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4834 {
4835 tree arg = gimple_phi_arg_def (stmt, i);
4836
4837 /* If this PHI has itself as an argument, ignore it.
4838 If all the other args produce the same result,
4839 we're still OK. */
4840 if (arg == gimple_phi_result (stmt))
4841 continue;
4842 else if (TREE_CODE (arg) == INTEGER_CST)
4843 {
4844 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4845 {
4846 if (!result)
4847 result = boolean_true_node;
4848 else if (!integer_onep (result))
4849 return NULL_TREE;
4850 }
4851 else if (!result)
4852 result = fold_build2 (code2, boolean_type_node,
4853 op2a, op2b);
4854 else if (!same_bool_comparison_p (result,
4855 code2, op2a, op2b))
4856 return NULL_TREE;
4857 }
0e8b84ec
JJ
4858 else if (TREE_CODE (arg) == SSA_NAME
4859 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4860 {
6c66f733
JJ
4861 tree temp;
4862 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4863 /* In simple cases we can look through PHI nodes,
4864 but we have to be careful with loops.
4865 See PR49073. */
4866 if (! dom_info_available_p (CDI_DOMINATORS)
4867 || gimple_bb (def_stmt) == gimple_bb (stmt)
4868 || dominated_by_p (CDI_DOMINATORS,
4869 gimple_bb (def_stmt),
4870 gimple_bb (stmt)))
4871 return NULL_TREE;
4872 temp = or_var_with_comparison (arg, invert, code2,
4873 op2a, op2b);
e89065a1
SL
4874 if (!temp)
4875 return NULL_TREE;
4876 else if (!result)
4877 result = temp;
4878 else if (!same_bool_result_p (result, temp))
4879 return NULL_TREE;
4880 }
4881 else
4882 return NULL_TREE;
4883 }
4884 return result;
4885 }
4886
4887 default:
4888 break;
4889 }
4890 }
4891 return NULL_TREE;
4892}
4893
4894/* Try to simplify the OR of two comparisons, specified by
4895 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4896 If this can be simplified to a single expression (without requiring
4897 introducing more SSA variables to hold intermediate values),
4898 return the resulting tree. Otherwise return NULL_TREE.
4899 If the result expression is non-null, it has boolean type. */
4900
4901tree
4902maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4903 enum tree_code code2, tree op2a, tree op2b)
4904{
4905 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4906 if (t)
4907 return t;
4908 else
4909 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4910}
cfef45c8
RG
4911
4912
4913/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4914
4915 Either NULL_TREE, a simplified but non-constant or a constant
4916 is returned.
4917
4918 ??? This should go into a gimple-fold-inline.h file to be eventually
4919 privatized with the single valueize function used in the various TUs
4920 to avoid the indirect function call overhead. */
4921
4922tree
d2a85801
RB
4923gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4924 tree (*gvalueize) (tree))
cfef45c8 4925{
45cc9f96
RB
4926 code_helper rcode;
4927 tree ops[3] = {};
4928 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4929 edges if there are intermediate VARYING defs. For this reason
4930 do not follow SSA edges here even though SCCVN can technically
4931 just deal fine with that. */
0ff093d8 4932 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)
45cc9f96
RB
4933 && rcode.is_tree_code ()
4934 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4935 || ((tree_code) rcode) == ADDR_EXPR)
4936 && is_gimple_val (ops[0]))
4937 {
4938 tree res = ops[0];
4939 if (dump_file && dump_flags & TDF_DETAILS)
4940 {
4941 fprintf (dump_file, "Match-and-simplified ");
4942 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4943 fprintf (dump_file, " to ");
4944 print_generic_expr (dump_file, res, 0);
4945 fprintf (dump_file, "\n");
4946 }
4947 return res;
4948 }
4949
cfef45c8
RG
4950 location_t loc = gimple_location (stmt);
4951 switch (gimple_code (stmt))
4952 {
4953 case GIMPLE_ASSIGN:
4954 {
4955 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4956
4957 switch (get_gimple_rhs_class (subcode))
4958 {
4959 case GIMPLE_SINGLE_RHS:
4960 {
4961 tree rhs = gimple_assign_rhs1 (stmt);
4962 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4963
4964 if (TREE_CODE (rhs) == SSA_NAME)
4965 {
4966 /* If the RHS is an SSA_NAME, return its known constant value,
4967 if any. */
4968 return (*valueize) (rhs);
4969 }
4970 /* Handle propagating invariant addresses into address
4971 operations. */
4972 else if (TREE_CODE (rhs) == ADDR_EXPR
4973 && !is_gimple_min_invariant (rhs))
4974 {
d25c4172 4975 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4976 tree base;
4977 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4978 &offset,
4979 valueize);
4980 if (base
4981 && (CONSTANT_CLASS_P (base)
4982 || decl_address_invariant_p (base)))
4983 return build_invariant_address (TREE_TYPE (rhs),
4984 base, offset);
4985 }
4986 else if (TREE_CODE (rhs) == CONSTRUCTOR
4987 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4988 && (CONSTRUCTOR_NELTS (rhs)
4989 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4990 {
4991 unsigned i;
d2a12ae7 4992 tree val, *vec;
cfef45c8 4993
d2a12ae7
RG
4994 vec = XALLOCAVEC (tree,
4995 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4996 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4997 {
4998 val = (*valueize) (val);
4999 if (TREE_CODE (val) == INTEGER_CST
5000 || TREE_CODE (val) == REAL_CST
5001 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5002 vec[i] = val;
cfef45c8
RG
5003 else
5004 return NULL_TREE;
5005 }
5006
d2a12ae7 5007 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5008 }
bdf37f7a
JH
5009 if (subcode == OBJ_TYPE_REF)
5010 {
5011 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5012 /* If callee is constant, we can fold away the wrapper. */
5013 if (is_gimple_min_invariant (val))
5014 return val;
5015 }
cfef45c8
RG
5016
5017 if (kind == tcc_reference)
5018 {
5019 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5020 || TREE_CODE (rhs) == REALPART_EXPR
5021 || TREE_CODE (rhs) == IMAGPART_EXPR)
5022 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5023 {
5024 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5025 return fold_unary_loc (EXPR_LOCATION (rhs),
5026 TREE_CODE (rhs),
5027 TREE_TYPE (rhs), val);
5028 }
5029 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5030 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5031 {
5032 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5033 return fold_ternary_loc (EXPR_LOCATION (rhs),
5034 TREE_CODE (rhs),
5035 TREE_TYPE (rhs), val,
5036 TREE_OPERAND (rhs, 1),
5037 TREE_OPERAND (rhs, 2));
5038 }
5039 else if (TREE_CODE (rhs) == MEM_REF
5040 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5041 {
5042 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5043 if (TREE_CODE (val) == ADDR_EXPR
5044 && is_gimple_min_invariant (val))
5045 {
5046 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5047 unshare_expr (val),
5048 TREE_OPERAND (rhs, 1));
5049 if (tem)
5050 rhs = tem;
5051 }
5052 }
5053 return fold_const_aggregate_ref_1 (rhs, valueize);
5054 }
5055 else if (kind == tcc_declaration)
5056 return get_symbol_constant_value (rhs);
5057 return rhs;
5058 }
5059
5060 case GIMPLE_UNARY_RHS:
f3582e54 5061 return NULL_TREE;
cfef45c8
RG
5062
5063 case GIMPLE_BINARY_RHS:
5064 {
5065 /* Handle binary operators that can appear in GIMPLE form. */
5066 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5067 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5068
5069 /* Translate &x + CST into an invariant form suitable for
5070 further propagation. */
5071 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5072 && TREE_CODE (op0) == ADDR_EXPR
5073 && TREE_CODE (op1) == INTEGER_CST)
5074 {
5075 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
5076 return build_fold_addr_expr_loc
5077 (loc,
5078 fold_build2 (MEM_REF,
cfef45c8
RG
5079 TREE_TYPE (TREE_TYPE (op0)),
5080 unshare_expr (op0), off));
5081 }
5082
5083 return fold_binary_loc (loc, subcode,
5084 gimple_expr_type (stmt), op0, op1);
5085 }
5086
5087 case GIMPLE_TERNARY_RHS:
5088 {
5089 /* Handle ternary operators that can appear in GIMPLE form. */
5090 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5091 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5092 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5093
d3878abf
RG
5094 /* Fold embedded expressions in ternary codes. */
5095 if ((subcode == COND_EXPR
5096 || subcode == VEC_COND_EXPR)
5097 && COMPARISON_CLASS_P (op0))
5098 {
5099 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5100 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5101 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5102 TREE_TYPE (op0), op00, op01);
5103 if (tem)
5104 op0 = tem;
5105 }
5106
cfef45c8
RG
5107 return fold_ternary_loc (loc, subcode,
5108 gimple_expr_type (stmt), op0, op1, op2);
5109 }
5110
5111 default:
5112 gcc_unreachable ();
5113 }
5114 }
5115
5116 case GIMPLE_CALL:
5117 {
25583c4f 5118 tree fn;
538dd0b7 5119 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5120
5121 if (gimple_call_internal_p (stmt))
31e071ae
MP
5122 {
5123 enum tree_code subcode = ERROR_MARK;
5124 switch (gimple_call_internal_fn (stmt))
5125 {
5126 case IFN_UBSAN_CHECK_ADD:
5127 subcode = PLUS_EXPR;
5128 break;
5129 case IFN_UBSAN_CHECK_SUB:
5130 subcode = MINUS_EXPR;
5131 break;
5132 case IFN_UBSAN_CHECK_MUL:
5133 subcode = MULT_EXPR;
5134 break;
5135 default:
5136 return NULL_TREE;
5137 }
368b454d
JJ
5138 tree arg0 = gimple_call_arg (stmt, 0);
5139 tree arg1 = gimple_call_arg (stmt, 1);
5140 tree op0 = (*valueize) (arg0);
5141 tree op1 = (*valueize) (arg1);
31e071ae
MP
5142
5143 if (TREE_CODE (op0) != INTEGER_CST
5144 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5145 {
5146 switch (subcode)
5147 {
5148 case MULT_EXPR:
5149 /* x * 0 = 0 * x = 0 without overflow. */
5150 if (integer_zerop (op0) || integer_zerop (op1))
5151 return build_zero_cst (TREE_TYPE (arg0));
5152 break;
5153 case MINUS_EXPR:
5154 /* y - y = 0 without overflow. */
5155 if (operand_equal_p (op0, op1, 0))
5156 return build_zero_cst (TREE_TYPE (arg0));
5157 break;
5158 default:
5159 break;
5160 }
5161 }
5162 tree res
5163 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5164 if (res
5165 && TREE_CODE (res) == INTEGER_CST
5166 && !TREE_OVERFLOW (res))
5167 return res;
5168 return NULL_TREE;
5169 }
25583c4f
RS
5170
5171 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5172 if (TREE_CODE (fn) == ADDR_EXPR
5173 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5174 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5175 && gimple_builtin_call_types_compatible_p (stmt,
5176 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5177 {
5178 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5179 tree retval;
cfef45c8
RG
5180 unsigned i;
5181 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5182 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5183 retval = fold_builtin_call_array (loc,
538dd0b7 5184 gimple_call_return_type (call_stmt),
cfef45c8 5185 fn, gimple_call_num_args (stmt), args);
cfef45c8 5186 if (retval)
5c944c6c
RB
5187 {
5188 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5189 STRIP_NOPS (retval);
538dd0b7
DM
5190 retval = fold_convert (gimple_call_return_type (call_stmt),
5191 retval);
5c944c6c 5192 }
cfef45c8
RG
5193 return retval;
5194 }
5195 return NULL_TREE;
5196 }
5197
5198 default:
5199 return NULL_TREE;
5200 }
5201}
5202
5203/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5204 Returns NULL_TREE if folding to a constant is not possible, otherwise
5205 returns a constant according to is_gimple_min_invariant. */
5206
5207tree
5208gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5209{
5210 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5211 if (res && is_gimple_min_invariant (res))
5212 return res;
5213 return NULL_TREE;
5214}
5215
5216
5217/* The following set of functions are supposed to fold references using
5218 their constant initializers. */
5219
cfef45c8
RG
5220/* See if we can find constructor defining value of BASE.
5221 When we know the consructor with constant offset (such as
5222 base is array[40] and we do know constructor of array), then
5223 BIT_OFFSET is adjusted accordingly.
5224
5225 As a special case, return error_mark_node when constructor
5226 is not explicitly available, but it is known to be zero
5227 such as 'static const int a;'. */
5228static tree
5229get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5230 tree (*valueize)(tree))
5231{
5232 HOST_WIDE_INT bit_offset2, size, max_size;
5233 if (TREE_CODE (base) == MEM_REF)
5234 {
5235 if (!integer_zerop (TREE_OPERAND (base, 1)))
5236 {
9541ffee 5237 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5238 return NULL_TREE;
807e902e 5239 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5240 * BITS_PER_UNIT);
5241 }
5242
5243 if (valueize
5244 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5245 base = valueize (TREE_OPERAND (base, 0));
5246 if (!base || TREE_CODE (base) != ADDR_EXPR)
5247 return NULL_TREE;
5248 base = TREE_OPERAND (base, 0);
5249 }
5250
5251 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5252 DECL_INITIAL. If BASE is a nested reference into another
5253 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5254 the inner reference. */
5255 switch (TREE_CODE (base))
5256 {
5257 case VAR_DECL:
cfef45c8 5258 case CONST_DECL:
6a6dac52
JH
5259 {
5260 tree init = ctor_for_folding (base);
5261
688010ba 5262 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5263 NULL means unknown, while error_mark_node is 0. */
5264 if (init == error_mark_node)
5265 return NULL_TREE;
5266 if (!init)
5267 return error_mark_node;
5268 return init;
5269 }
cfef45c8
RG
5270
5271 case ARRAY_REF:
5272 case COMPONENT_REF:
5273 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5274 if (max_size == -1 || size != max_size)
5275 return NULL_TREE;
5276 *bit_offset += bit_offset2;
5277 return get_base_constructor (base, bit_offset, valueize);
5278
5279 case STRING_CST:
5280 case CONSTRUCTOR:
5281 return base;
5282
5283 default:
5284 return NULL_TREE;
5285 }
5286}
5287
cfef45c8
RG
5288/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5289 SIZE to the memory at bit OFFSET. */
5290
5291static tree
5292fold_array_ctor_reference (tree type, tree ctor,
5293 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5294 unsigned HOST_WIDE_INT size,
5295 tree from_decl)
cfef45c8
RG
5296{
5297 unsigned HOST_WIDE_INT cnt;
5298 tree cfield, cval;
807e902e
KZ
5299 offset_int low_bound;
5300 offset_int elt_size;
5301 offset_int index, max_index;
5302 offset_int access_index;
b48e22b2 5303 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
5304 HOST_WIDE_INT inner_offset;
5305
5306 /* Compute low bound and elt size. */
eb8f1123
RG
5307 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5308 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5309 if (domain_type && TYPE_MIN_VALUE (domain_type))
5310 {
5311 /* Static constructors for variably sized objects makes no sense. */
5312 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 5313 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 5314 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5315 }
5316 else
807e902e 5317 low_bound = 0;
cfef45c8 5318 /* Static constructors for variably sized objects makes no sense. */
c3284718 5319 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5320 == INTEGER_CST);
807e902e 5321 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5322
5323 /* We can handle only constantly sized accesses that are known to not
5324 be larger than size of array element. */
5325 if (!TYPE_SIZE_UNIT (type)
5326 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5327 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5328 || elt_size == 0)
cfef45c8
RG
5329 return NULL_TREE;
5330
5331 /* Compute the array index we look for. */
807e902e
KZ
5332 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5333 elt_size);
27bcd47c 5334 access_index += low_bound;
b48e22b2 5335 if (index_type)
807e902e
KZ
5336 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5337 TYPE_SIGN (index_type));
cfef45c8
RG
5338
5339 /* And offset within the access. */
27bcd47c 5340 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5341
5342 /* See if the array field is large enough to span whole access. We do not
5343 care to fold accesses spanning multiple array indexes. */
27bcd47c 5344 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
5345 return NULL_TREE;
5346
807e902e 5347 index = low_bound - 1;
b48e22b2 5348 if (index_type)
807e902e
KZ
5349 index = wi::ext (index, TYPE_PRECISION (index_type),
5350 TYPE_SIGN (index_type));
b48e22b2 5351
cfef45c8
RG
5352 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5353 {
5354 /* Array constructor might explicitely set index, or specify range
5355 or leave index NULL meaning that it is next index after previous
5356 one. */
5357 if (cfield)
5358 {
5359 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 5360 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
5361 else
5362 {
5363 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
5364 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5365 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
5366 }
5367 }
5368 else
b48e22b2 5369 {
807e902e 5370 index += 1;
b48e22b2 5371 if (index_type)
807e902e
KZ
5372 index = wi::ext (index, TYPE_PRECISION (index_type),
5373 TYPE_SIGN (index_type));
b48e22b2
EB
5374 max_index = index;
5375 }
cfef45c8
RG
5376
5377 /* Do we have match? */
807e902e
KZ
5378 if (wi::cmpu (access_index, index) >= 0
5379 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
5380 return fold_ctor_reference (type, cval, inner_offset, size,
5381 from_decl);
cfef45c8
RG
5382 }
5383 /* When memory is not explicitely mentioned in constructor,
5384 it is 0 (or out of range). */
5385 return build_zero_cst (type);
5386}
5387
5388/* CTOR is CONSTRUCTOR of an aggregate or vector.
5389 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5390
5391static tree
5392fold_nonarray_ctor_reference (tree type, tree ctor,
5393 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5394 unsigned HOST_WIDE_INT size,
5395 tree from_decl)
cfef45c8
RG
5396{
5397 unsigned HOST_WIDE_INT cnt;
5398 tree cfield, cval;
5399
5400 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5401 cval)
5402 {
5403 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5404 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5405 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5406 offset_int bitoffset;
5407 offset_int bitoffset_end, access_end;
cfef45c8
RG
5408
5409 /* Variable sized objects in static constructors makes no sense,
5410 but field_size can be NULL for flexible array members. */
5411 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5412 && TREE_CODE (byte_offset) == INTEGER_CST
5413 && (field_size != NULL_TREE
5414 ? TREE_CODE (field_size) == INTEGER_CST
5415 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5416
5417 /* Compute bit offset of the field. */
807e902e
KZ
5418 bitoffset = (wi::to_offset (field_offset)
5419 + wi::lshift (wi::to_offset (byte_offset),
5420 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5421 /* Compute bit offset where the field ends. */
5422 if (field_size != NULL_TREE)
807e902e 5423 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5424 else
807e902e 5425 bitoffset_end = 0;
cfef45c8 5426
807e902e 5427 access_end = offset_int (offset) + size;
b8b2b009
JJ
5428
5429 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5430 [BITOFFSET, BITOFFSET_END)? */
807e902e 5431 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5432 && (field_size == NULL_TREE
807e902e 5433 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5434 {
807e902e 5435 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5436 /* We do have overlap. Now see if field is large enough to
5437 cover the access. Give up for accesses spanning multiple
5438 fields. */
807e902e 5439 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5440 return NULL_TREE;
807e902e 5441 if (wi::lts_p (offset, bitoffset))
b8b2b009 5442 return NULL_TREE;
cfef45c8 5443 return fold_ctor_reference (type, cval,
27bcd47c 5444 inner_offset.to_uhwi (), size,
c44c2088 5445 from_decl);
cfef45c8
RG
5446 }
5447 }
5448 /* When memory is not explicitely mentioned in constructor, it is 0. */
5449 return build_zero_cst (type);
5450}
5451
5452/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5453 to the memory at bit OFFSET. */
5454
8403c2cf 5455tree
cfef45c8 5456fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5457 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5458{
5459 tree ret;
5460
5461 /* We found the field with exact match. */
5462 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5463 && !offset)
9d60be38 5464 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5465
5466 /* We are at the end of walk, see if we can view convert the
5467 result. */
5468 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5469 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5470 && !compare_tree_int (TYPE_SIZE (type), size)
5471 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5472 {
9d60be38 5473 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5474 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5475 if (ret)
86c5a5c3 5476 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5477 return ret;
5478 }
b2505143
RB
5479 /* For constants and byte-aligned/sized reads try to go through
5480 native_encode/interpret. */
5481 if (CONSTANT_CLASS_P (ctor)
5482 && BITS_PER_UNIT == 8
5483 && offset % BITS_PER_UNIT == 0
5484 && size % BITS_PER_UNIT == 0
5485 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5486 {
5487 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5488 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5489 offset / BITS_PER_UNIT) > 0)
5490 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5491 }
cfef45c8
RG
5492 if (TREE_CODE (ctor) == CONSTRUCTOR)
5493 {
5494
eb8f1123
RG
5495 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5496 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5497 return fold_array_ctor_reference (type, ctor, offset, size,
5498 from_decl);
cfef45c8 5499 else
c44c2088
JH
5500 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5501 from_decl);
cfef45c8
RG
5502 }
5503
5504 return NULL_TREE;
5505}
5506
5507/* Return the tree representing the element referenced by T if T is an
5508 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5509 names using VALUEIZE. Return NULL_TREE otherwise. */
5510
5511tree
5512fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5513{
5514 tree ctor, idx, base;
5515 HOST_WIDE_INT offset, size, max_size;
5516 tree tem;
5517
f8a7df45
RG
5518 if (TREE_THIS_VOLATILE (t))
5519 return NULL_TREE;
5520
3a65ee74 5521 if (DECL_P (t))
cfef45c8
RG
5522 return get_symbol_constant_value (t);
5523
5524 tem = fold_read_from_constant_string (t);
5525 if (tem)
5526 return tem;
5527
5528 switch (TREE_CODE (t))
5529 {
5530 case ARRAY_REF:
5531 case ARRAY_RANGE_REF:
5532 /* Constant indexes are handled well by get_base_constructor.
5533 Only special case variable offsets.
5534 FIXME: This code can't handle nested references with variable indexes
5535 (they will be handled only by iteration of ccp). Perhaps we can bring
5536 get_ref_base_and_extent here and make it use a valueize callback. */
5537 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5538 && valueize
5539 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5540 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5541 {
5542 tree low_bound, unit_size;
5543
5544 /* If the resulting bit-offset is constant, track it. */
5545 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5546 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5547 && (unit_size = array_ref_element_size (t),
807e902e 5548 tree_fits_uhwi_p (unit_size)))
cfef45c8 5549 {
807e902e
KZ
5550 offset_int woffset
5551 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5552 TYPE_PRECISION (TREE_TYPE (idx)));
5553
5554 if (wi::fits_shwi_p (woffset))
5555 {
5556 offset = woffset.to_shwi ();
5557 /* TODO: This code seems wrong, multiply then check
5558 to see if it fits. */
5559 offset *= tree_to_uhwi (unit_size);
5560 offset *= BITS_PER_UNIT;
5561
5562 base = TREE_OPERAND (t, 0);
5563 ctor = get_base_constructor (base, &offset, valueize);
5564 /* Empty constructor. Always fold to 0. */
5565 if (ctor == error_mark_node)
5566 return build_zero_cst (TREE_TYPE (t));
5567 /* Out of bound array access. Value is undefined,
5568 but don't fold. */
5569 if (offset < 0)
5570 return NULL_TREE;
5571 /* We can not determine ctor. */
5572 if (!ctor)
5573 return NULL_TREE;
5574 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5575 tree_to_uhwi (unit_size)
5576 * BITS_PER_UNIT,
5577 base);
5578 }
cfef45c8
RG
5579 }
5580 }
5581 /* Fallthru. */
5582
5583 case COMPONENT_REF:
5584 case BIT_FIELD_REF:
5585 case TARGET_MEM_REF:
5586 case MEM_REF:
5587 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5588 ctor = get_base_constructor (base, &offset, valueize);
5589
5590 /* Empty constructor. Always fold to 0. */
5591 if (ctor == error_mark_node)
5592 return build_zero_cst (TREE_TYPE (t));
5593 /* We do not know precise address. */
5594 if (max_size == -1 || max_size != size)
5595 return NULL_TREE;
5596 /* We can not determine ctor. */
5597 if (!ctor)
5598 return NULL_TREE;
5599
5600 /* Out of bound array access. Value is undefined, but don't fold. */
5601 if (offset < 0)
5602 return NULL_TREE;
5603
c44c2088
JH
5604 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5605 base);
cfef45c8
RG
5606
5607 case REALPART_EXPR:
5608 case IMAGPART_EXPR:
5609 {
5610 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5611 if (c && TREE_CODE (c) == COMPLEX_CST)
5612 return fold_build1_loc (EXPR_LOCATION (t),
5613 TREE_CODE (t), TREE_TYPE (t), c);
5614 break;
5615 }
5616
5617 default:
5618 break;
5619 }
5620
5621 return NULL_TREE;
5622}
5623
5624tree
5625fold_const_aggregate_ref (tree t)
5626{
5627 return fold_const_aggregate_ref_1 (t, NULL);
5628}
06bc3ec7 5629
85942f45 5630/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5631 at OFFSET.
5632 Set CAN_REFER if non-NULL to false if method
5633 is not referable or if the virtual table is ill-formed (such as rewriten
5634 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5635
5636tree
85942f45
JH
5637gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5638 tree v,
ec77d61f
JH
5639 unsigned HOST_WIDE_INT offset,
5640 bool *can_refer)
81fa35bd 5641{
85942f45
JH
5642 tree vtable = v, init, fn;
5643 unsigned HOST_WIDE_INT size;
8c311b50
JH
5644 unsigned HOST_WIDE_INT elt_size, access_index;
5645 tree domain_type;
81fa35bd 5646
ec77d61f
JH
5647 if (can_refer)
5648 *can_refer = true;
5649
9de2f554 5650 /* First of all double check we have virtual table. */
81fa35bd 5651 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5652 || !DECL_VIRTUAL_P (v))
ec77d61f 5653 {
ec77d61f
JH
5654 /* Pass down that we lost track of the target. */
5655 if (can_refer)
5656 *can_refer = false;
5657 return NULL_TREE;
5658 }
9de2f554 5659
2aa3da06
JH
5660 init = ctor_for_folding (v);
5661
9de2f554 5662 /* The virtual tables should always be born with constructors
2aa3da06
JH
5663 and we always should assume that they are avaialble for
5664 folding. At the moment we do not stream them in all cases,
5665 but it should never happen that ctor seem unreachable. */
5666 gcc_assert (init);
5667 if (init == error_mark_node)
5668 {
5669 gcc_assert (in_lto_p);
ec77d61f
JH
5670 /* Pass down that we lost track of the target. */
5671 if (can_refer)
5672 *can_refer = false;
2aa3da06
JH
5673 return NULL_TREE;
5674 }
81fa35bd 5675 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5676 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5677 offset *= BITS_PER_UNIT;
81fa35bd 5678 offset += token * size;
9de2f554 5679
8c311b50
JH
5680 /* Lookup the value in the constructor that is assumed to be array.
5681 This is equivalent to
5682 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5683 offset, size, NULL);
5684 but in a constant time. We expect that frontend produced a simple
5685 array without indexed initializers. */
5686
5687 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5688 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5689 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5690 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5691
5692 access_index = offset / BITS_PER_UNIT / elt_size;
5693 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5694
5695 /* This code makes an assumption that there are no
5696 indexed fileds produced by C++ FE, so we can directly index the array. */
5697 if (access_index < CONSTRUCTOR_NELTS (init))
5698 {
5699 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5700 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5701 STRIP_NOPS (fn);
5702 }
5703 else
5704 fn = NULL;
9de2f554
JH
5705
5706 /* For type inconsistent program we may end up looking up virtual method
5707 in virtual table that does not contain TOKEN entries. We may overrun
5708 the virtual table and pick up a constant or RTTI info pointer.
5709 In any case the call is undefined. */
5710 if (!fn
5711 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5712 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5713 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5714 else
5715 {
5716 fn = TREE_OPERAND (fn, 0);
5717
5718 /* When cgraph node is missing and function is not public, we cannot
5719 devirtualize. This can happen in WHOPR when the actual method
5720 ends up in other partition, because we found devirtualization
5721 possibility too late. */
5722 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5723 {
5724 if (can_refer)
5725 {
5726 *can_refer = false;
5727 return fn;
5728 }
5729 return NULL_TREE;
5730 }
9de2f554 5731 }
81fa35bd 5732
7501ca28
RG
5733 /* Make sure we create a cgraph node for functions we'll reference.
5734 They can be non-existent if the reference comes from an entry
5735 of an external vtable for example. */
d52f5295 5736 cgraph_node::get_create (fn);
7501ca28 5737
81fa35bd
MJ
5738 return fn;
5739}
5740
85942f45
JH
5741/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5742 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5743 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5744 OBJ_TYPE_REF_OBJECT(REF).
5745 Set CAN_REFER if non-NULL to false if method
5746 is not referable or if the virtual table is ill-formed (such as rewriten
5747 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5748
5749tree
ec77d61f
JH
5750gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5751 bool *can_refer)
85942f45
JH
5752{
5753 unsigned HOST_WIDE_INT offset;
5754 tree v;
5755
5756 v = BINFO_VTABLE (known_binfo);
5757 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5758 if (!v)
5759 return NULL_TREE;
5760
5761 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5762 {
5763 if (can_refer)
5764 *can_refer = false;
5765 return NULL_TREE;
5766 }
5767 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5768}
5769
06bc3ec7
BS
5770/* Return true iff VAL is a gimple expression that is known to be
5771 non-negative. Restricted to floating-point inputs. */
5772
5773bool
5774gimple_val_nonnegative_real_p (tree val)
5775{
5776 gimple def_stmt;
5777
5778 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5779
5780 /* Use existing logic for non-gimple trees. */
5781 if (tree_expr_nonnegative_p (val))
5782 return true;
5783
5784 if (TREE_CODE (val) != SSA_NAME)
5785 return false;
5786
5787 /* Currently we look only at the immediately defining statement
5788 to make this determination, since recursion on defining
5789 statements of operands can lead to quadratic behavior in the
5790 worst case. This is expected to catch almost all occurrences
5791 in practice. It would be possible to implement limited-depth
5792 recursion if important cases are lost. Alternatively, passes
5793 that need this information (such as the pow/powi lowering code
5794 in the cse_sincos pass) could be revised to provide it through
5795 dataflow propagation. */
5796
5797 def_stmt = SSA_NAME_DEF_STMT (val);
5798
5799 if (is_gimple_assign (def_stmt))
5800 {
5801 tree op0, op1;
5802
5803 /* See fold-const.c:tree_expr_nonnegative_p for additional
5804 cases that could be handled with recursion. */
5805
5806 switch (gimple_assign_rhs_code (def_stmt))
5807 {
5808 case ABS_EXPR:
5809 /* Always true for floating-point operands. */
5810 return true;
5811
5812 case MULT_EXPR:
5813 /* True if the two operands are identical (since we are
5814 restricted to floating-point inputs). */
5815 op0 = gimple_assign_rhs1 (def_stmt);
5816 op1 = gimple_assign_rhs2 (def_stmt);
5817
5818 if (op0 == op1
5819 || operand_equal_p (op0, op1, 0))
5820 return true;
5821
5822 default:
5823 return false;
5824 }
5825 }
5826 else if (is_gimple_call (def_stmt))
5827 {
5828 tree fndecl = gimple_call_fndecl (def_stmt);
5829 if (fndecl
5830 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5831 {
5832 tree arg1;
5833
5834 switch (DECL_FUNCTION_CODE (fndecl))
5835 {
5836 CASE_FLT_FN (BUILT_IN_ACOS):
5837 CASE_FLT_FN (BUILT_IN_ACOSH):
5838 CASE_FLT_FN (BUILT_IN_CABS):
5839 CASE_FLT_FN (BUILT_IN_COSH):
5840 CASE_FLT_FN (BUILT_IN_ERFC):
5841 CASE_FLT_FN (BUILT_IN_EXP):
5842 CASE_FLT_FN (BUILT_IN_EXP10):
5843 CASE_FLT_FN (BUILT_IN_EXP2):
5844 CASE_FLT_FN (BUILT_IN_FABS):
5845 CASE_FLT_FN (BUILT_IN_FDIM):
5846 CASE_FLT_FN (BUILT_IN_HYPOT):
5847 CASE_FLT_FN (BUILT_IN_POW10):
5848 return true;
5849
5850 CASE_FLT_FN (BUILT_IN_SQRT):
5851 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5852 nonnegative inputs. */
3d3dbadd 5853 if (!HONOR_SIGNED_ZEROS (val))
06bc3ec7
BS
5854 return true;
5855
5856 break;
5857
5858 CASE_FLT_FN (BUILT_IN_POWI):
5859 /* True if the second argument is an even integer. */
5860 arg1 = gimple_call_arg (def_stmt, 1);
5861
5862 if (TREE_CODE (arg1) == INTEGER_CST
5863 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5864 return true;
5865
5866 break;
5867
5868 CASE_FLT_FN (BUILT_IN_POW):
5869 /* True if the second argument is an even integer-valued
5870 real. */
5871 arg1 = gimple_call_arg (def_stmt, 1);
5872
5873 if (TREE_CODE (arg1) == REAL_CST)
5874 {
5875 REAL_VALUE_TYPE c;
5876 HOST_WIDE_INT n;
5877
5878 c = TREE_REAL_CST (arg1);
5879 n = real_to_integer (&c);
5880
5881 if ((n & 1) == 0)
5882 {
5883 REAL_VALUE_TYPE cint;
807e902e 5884 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5885 if (real_identical (&c, &cint))
5886 return true;
5887 }
5888 }
5889
5890 break;
5891
5892 default:
5893 return false;
5894 }
5895 }
5896 }
5897
5898 return false;
5899}
b184c8f1
AM
5900
5901/* Given a pointer value OP0, return a simplified version of an
5902 indirection through OP0, or NULL_TREE if no simplification is
5903 possible. Note that the resulting type may be different from
5904 the type pointed to in the sense that it is still compatible
5905 from the langhooks point of view. */
5906
5907tree
5908gimple_fold_indirect_ref (tree t)
5909{
5910 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5911 tree sub = t;
5912 tree subtype;
5913
5914 STRIP_NOPS (sub);
5915 subtype = TREE_TYPE (sub);
5916 if (!POINTER_TYPE_P (subtype))
5917 return NULL_TREE;
5918
5919 if (TREE_CODE (sub) == ADDR_EXPR)
5920 {
5921 tree op = TREE_OPERAND (sub, 0);
5922 tree optype = TREE_TYPE (op);
5923 /* *&p => p */
5924 if (useless_type_conversion_p (type, optype))
5925 return op;
5926
5927 /* *(foo *)&fooarray => fooarray[0] */
5928 if (TREE_CODE (optype) == ARRAY_TYPE
5929 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5930 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5931 {
5932 tree type_domain = TYPE_DOMAIN (optype);
5933 tree min_val = size_zero_node;
5934 if (type_domain && TYPE_MIN_VALUE (type_domain))
5935 min_val = TYPE_MIN_VALUE (type_domain);
5936 if (TREE_CODE (min_val) == INTEGER_CST)
5937 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5938 }
5939 /* *(foo *)&complexfoo => __real__ complexfoo */
5940 else if (TREE_CODE (optype) == COMPLEX_TYPE
5941 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5942 return fold_build1 (REALPART_EXPR, type, op);
5943 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5944 else if (TREE_CODE (optype) == VECTOR_TYPE
5945 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5946 {
5947 tree part_width = TYPE_SIZE (type);
5948 tree index = bitsize_int (0);
5949 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5950 }
5951 }
5952
5953 /* *(p + CST) -> ... */
5954 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5955 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5956 {
5957 tree addr = TREE_OPERAND (sub, 0);
5958 tree off = TREE_OPERAND (sub, 1);
5959 tree addrtype;
5960
5961 STRIP_NOPS (addr);
5962 addrtype = TREE_TYPE (addr);
5963
5964 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5965 if (TREE_CODE (addr) == ADDR_EXPR
5966 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5967 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5968 && tree_fits_uhwi_p (off))
b184c8f1 5969 {
ae7e9ddd 5970 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5971 tree part_width = TYPE_SIZE (type);
5972 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5973 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5974 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5975 tree index = bitsize_int (indexi);
5976 if (offset / part_widthi
e934916c 5977 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5978 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5979 part_width, index);
5980 }
5981
5982 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5983 if (TREE_CODE (addr) == ADDR_EXPR
5984 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5985 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5986 {
5987 tree size = TYPE_SIZE_UNIT (type);
5988 if (tree_int_cst_equal (size, off))
5989 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5990 }
5991
5992 /* *(p + CST) -> MEM_REF <p, CST>. */
5993 if (TREE_CODE (addr) != ADDR_EXPR
5994 || DECL_P (TREE_OPERAND (addr, 0)))
5995 return fold_build2 (MEM_REF, type,
5996 addr,
807e902e 5997 wide_int_to_tree (ptype, off));
b184c8f1
AM
5998 }
5999
6000 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6001 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6002 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6003 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6004 {
6005 tree type_domain;
6006 tree min_val = size_zero_node;
6007 tree osub = sub;
6008 sub = gimple_fold_indirect_ref (sub);
6009 if (! sub)
6010 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6011 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6012 if (type_domain && TYPE_MIN_VALUE (type_domain))
6013 min_val = TYPE_MIN_VALUE (type_domain);
6014 if (TREE_CODE (min_val) == INTEGER_CST)
6015 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6016 }
6017
6018 return NULL_TREE;
6019}
19e51b40
JJ
6020
6021/* Return true if CODE is an operation that when operating on signed
6022 integer types involves undefined behavior on overflow and the
6023 operation can be expressed with unsigned arithmetic. */
6024
6025bool
6026arith_code_with_undefined_signed_overflow (tree_code code)
6027{
6028 switch (code)
6029 {
6030 case PLUS_EXPR:
6031 case MINUS_EXPR:
6032 case MULT_EXPR:
6033 case NEGATE_EXPR:
6034 case POINTER_PLUS_EXPR:
6035 return true;
6036 default:
6037 return false;
6038 }
6039}
6040
6041/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6042 operation that can be transformed to unsigned arithmetic by converting
6043 its operand, carrying out the operation in the corresponding unsigned
6044 type and converting the result back to the original type.
6045
6046 Returns a sequence of statements that replace STMT and also contain
6047 a modified form of STMT itself. */
6048
6049gimple_seq
6050rewrite_to_defined_overflow (gimple stmt)
6051{
6052 if (dump_file && (dump_flags & TDF_DETAILS))
6053 {
6054 fprintf (dump_file, "rewriting stmt with undefined signed "
6055 "overflow ");
6056 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6057 }
6058
6059 tree lhs = gimple_assign_lhs (stmt);
6060 tree type = unsigned_type_for (TREE_TYPE (lhs));
6061 gimple_seq stmts = NULL;
6062 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6063 {
6064 gimple_seq stmts2 = NULL;
6065 gimple_set_op (stmt, i,
6066 force_gimple_operand (fold_convert (type,
6067 gimple_op (stmt, i)),
6068 &stmts2, true, NULL_TREE));
6069 gimple_seq_add_seq (&stmts, stmts2);
6070 }
6071 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6072 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6073 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6074 gimple_seq_add_stmt (&stmts, stmt);
0d0e4a03 6075 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6076 gimple_seq_add_stmt (&stmts, cvt);
6077
6078 return stmts;
6079}
d4f5cd5e 6080
3d2cf79f 6081
c26de36d
RB
6082/* The valueization hook we use for the gimple_build API simplification.
6083 This makes us match fold_buildN behavior by only combining with
6084 statements in the sequence(s) we are currently building. */
6085
6086static tree
6087gimple_build_valueize (tree op)
6088{
6089 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6090 return op;
6091 return NULL_TREE;
6092}
6093
3d2cf79f 6094/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 6095 simplifying it first if possible. Returns the built
3d2cf79f
RB
6096 expression value and appends statements possibly defining it
6097 to SEQ. */
6098
6099tree
6100gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6101 enum tree_code code, tree type, tree op0)
3d2cf79f 6102{
c26de36d 6103 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
6104 if (!res)
6105 {
6106 if (gimple_in_ssa_p (cfun))
b731b390 6107 res = make_ssa_name (type);
3d2cf79f 6108 else
b731b390 6109 res = create_tmp_reg (type);
3d2cf79f
RB
6110 gimple stmt;
6111 if (code == REALPART_EXPR
6112 || code == IMAGPART_EXPR
6113 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6114 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6115 else
0d0e4a03 6116 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6117 gimple_set_location (stmt, loc);
6118 gimple_seq_add_stmt_without_update (seq, stmt);
6119 }
6120 return res;
6121}
6122
6123/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6124 simplifying it first if possible. Returns the built
3d2cf79f
RB
6125 expression value and appends statements possibly defining it
6126 to SEQ. */
6127
6128tree
6129gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6130 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6131{
c26de36d 6132 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6133 if (!res)
6134 {
6135 if (gimple_in_ssa_p (cfun))
b731b390 6136 res = make_ssa_name (type);
3d2cf79f 6137 else
b731b390 6138 res = create_tmp_reg (type);
0d0e4a03 6139 gimple stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6140 gimple_set_location (stmt, loc);
6141 gimple_seq_add_stmt_without_update (seq, stmt);
6142 }
6143 return res;
6144}
6145
6146/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6147 simplifying it first if possible. Returns the built
3d2cf79f
RB
6148 expression value and appends statements possibly defining it
6149 to SEQ. */
6150
6151tree
6152gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6153 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6154{
6155 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6156 seq, gimple_build_valueize);
3d2cf79f
RB
6157 if (!res)
6158 {
6159 if (gimple_in_ssa_p (cfun))
b731b390 6160 res = make_ssa_name (type);
3d2cf79f 6161 else
b731b390 6162 res = create_tmp_reg (type);
3d2cf79f
RB
6163 gimple stmt;
6164 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6165 stmt = gimple_build_assign (res, code,
6166 build3 (code, type, op0, op1, op2));
3d2cf79f 6167 else
0d0e4a03 6168 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6169 gimple_set_location (stmt, loc);
6170 gimple_seq_add_stmt_without_update (seq, stmt);
6171 }
6172 return res;
6173}
6174
6175/* Build the call FN (ARG0) with a result of type TYPE
6176 (or no result if TYPE is void) with location LOC,
c26de36d 6177 simplifying it first if possible. Returns the built
3d2cf79f
RB
6178 expression value (or NULL_TREE if TYPE is void) and appends
6179 statements possibly defining it to SEQ. */
6180
6181tree
6182gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6183 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6184{
c26de36d 6185 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6186 if (!res)
6187 {
6188 tree decl = builtin_decl_implicit (fn);
6189 gimple stmt = gimple_build_call (decl, 1, arg0);
6190 if (!VOID_TYPE_P (type))
6191 {
6192 if (gimple_in_ssa_p (cfun))
b731b390 6193 res = make_ssa_name (type);
3d2cf79f 6194 else
b731b390 6195 res = create_tmp_reg (type);
3d2cf79f
RB
6196 gimple_call_set_lhs (stmt, res);
6197 }
6198 gimple_set_location (stmt, loc);
6199 gimple_seq_add_stmt_without_update (seq, stmt);
6200 }
6201 return res;
6202}
6203
6204/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6205 (or no result if TYPE is void) with location LOC,
c26de36d 6206 simplifying it first if possible. Returns the built
3d2cf79f
RB
6207 expression value (or NULL_TREE if TYPE is void) and appends
6208 statements possibly defining it to SEQ. */
6209
6210tree
6211gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6212 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6213{
c26de36d 6214 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6215 if (!res)
6216 {
6217 tree decl = builtin_decl_implicit (fn);
6218 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6219 if (!VOID_TYPE_P (type))
6220 {
6221 if (gimple_in_ssa_p (cfun))
b731b390 6222 res = make_ssa_name (type);
3d2cf79f 6223 else
b731b390 6224 res = create_tmp_reg (type);
3d2cf79f
RB
6225 gimple_call_set_lhs (stmt, res);
6226 }
6227 gimple_set_location (stmt, loc);
6228 gimple_seq_add_stmt_without_update (seq, stmt);
6229 }
6230 return res;
6231}
6232
6233/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6234 (or no result if TYPE is void) with location LOC,
c26de36d 6235 simplifying it first if possible. Returns the built
3d2cf79f
RB
6236 expression value (or NULL_TREE if TYPE is void) and appends
6237 statements possibly defining it to SEQ. */
6238
6239tree
6240gimple_build (gimple_seq *seq, location_t loc,
6241 enum built_in_function fn, tree type,
c26de36d 6242 tree arg0, tree arg1, tree arg2)
3d2cf79f 6243{
c26de36d
RB
6244 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6245 seq, gimple_build_valueize);
3d2cf79f
RB
6246 if (!res)
6247 {
6248 tree decl = builtin_decl_implicit (fn);
6249 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6250 if (!VOID_TYPE_P (type))
6251 {
6252 if (gimple_in_ssa_p (cfun))
b731b390 6253 res = make_ssa_name (type);
3d2cf79f 6254 else
b731b390 6255 res = create_tmp_reg (type);
3d2cf79f
RB
6256 gimple_call_set_lhs (stmt, res);
6257 }
6258 gimple_set_location (stmt, loc);
6259 gimple_seq_add_stmt_without_update (seq, stmt);
6260 }
6261 return res;
6262}
6263
6264/* Build the conversion (TYPE) OP with a result of type TYPE
6265 with location LOC if such conversion is neccesary in GIMPLE,
6266 simplifying it first.
6267 Returns the built expression value and appends
6268 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6269
6270tree
6271gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6272{
6273 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6274 return op;
3d2cf79f 6275 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6276}