]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
gimple-fold.c (strlen_range_kind): New enum.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
85ec4feb 2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
cbdd87d4 68
598f7235
MS
69enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Temporary until the rest of Martin's strlen range work is integrated. */
81 SRK_LENRANGE_2,
82 /* Determine the integer value of the argument (not string length). */
83 SRK_INT_VALUE
84};
85
86static bool get_range_strlen (tree, tree[2], bitmap *, strlen_range_kind,
87 bool *, unsigned, tree *);
fb471a13 88
b3b9f3d0 89/* Return true when DECL can be referenced from current unit.
c44c2088
JH
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
92 reasons:
1389294c 93
1389294c
JH
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
99 set.
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
104 declaring the body.
105 3) COMDAT functions referred by external vtables that
3e89949e 106 we devirtualize only during final compilation stage.
b3b9f3d0
JH
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
109 directly. */
110
1389294c 111static bool
c44c2088 112can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 113{
2c8326a5 114 varpool_node *vnode;
1389294c 115 struct cgraph_node *node;
5e20cdc9 116 symtab_node *snode;
c44c2088 117
00de328a 118 if (DECL_ABSTRACT_P (decl))
1632a686
JH
119 return false;
120
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 123 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
124 return true;
125
126 /* Static objects can be referred only if they was not optimized out yet. */
127 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
128 {
3aaf0529
JH
129 /* Before we start optimizing unreachable code we can be sure all
130 static objects are defined. */
3dafb85c 131 if (symtab->function_flags_ready)
3aaf0529 132 return true;
d52f5295 133 snode = symtab_node::get (decl);
3aaf0529 134 if (!snode || !snode->definition)
1632a686 135 return false;
7de90a6c 136 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
137 return !node || !node->global.inlined_to;
138 }
139
6da8be89 140 /* We will later output the initializer, so we can refer to it.
c44c2088 141 So we are concerned only when DECL comes from initializer of
3aaf0529 142 external var or var that has been optimized out. */
c44c2088 143 if (!from_decl
8813a647 144 || !VAR_P (from_decl)
3aaf0529 145 || (!DECL_EXTERNAL (from_decl)
9041d2e6 146 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 147 && vnode->definition)
6da8be89 148 || (flag_ltrans
9041d2e6 149 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 150 && vnode->in_other_partition))
c44c2088 151 return true;
c44c2088
JH
152 /* We are folding reference from external vtable. The vtable may reffer
153 to a symbol keyed to other compilation unit. The other compilation
154 unit may be in separate DSO and the symbol may be hidden. */
155 if (DECL_VISIBILITY_SPECIFIED (decl)
156 && DECL_EXTERNAL (decl)
a33a931b 157 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 158 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 159 return false;
b3b9f3d0
JH
160 /* When function is public, we always can introduce new reference.
161 Exception are the COMDAT functions where introducing a direct
162 reference imply need to include function body in the curren tunit. */
163 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
164 return true;
3aaf0529
JH
165 /* We have COMDAT. We are going to check if we still have definition
166 or if the definition is going to be output in other partition.
167 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
168
169 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 170 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
171 output elsewhere when corresponding vtable is output.
172 This is however not possible - ABI specify that COMDATs are output in
173 units where they are used and when the other unit was compiled with LTO
174 it is possible that vtable was kept public while the function itself
175 was privatized. */
3dafb85c 176 if (!symtab->function_flags_ready)
b3b9f3d0 177 return true;
c44c2088 178
d52f5295 179 snode = symtab_node::get (decl);
3aaf0529
JH
180 if (!snode
181 || ((!snode->definition || DECL_EXTERNAL (decl))
182 && (!snode->in_other_partition
183 || (!snode->forced_by_abi && !snode->force_output))))
184 return false;
185 node = dyn_cast <cgraph_node *> (snode);
186 return !node || !node->global.inlined_to;
1389294c
JH
187}
188
a15ebbcd
ML
189/* Create a temporary for TYPE for a statement STMT. If the current function
190 is in SSA form, a SSA name is created. Otherwise a temporary register
191 is made. */
192
edc19e03
WS
193tree
194create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
195{
196 if (gimple_in_ssa_p (cfun))
197 return make_ssa_name (type, stmt);
198 else
199 return create_tmp_reg (type);
200}
201
0038d4e0 202/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
203 acceptable form for is_gimple_min_invariant.
204 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
205
206tree
c44c2088 207canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 208{
50619002
EB
209 tree orig_cval = cval;
210 STRIP_NOPS (cval);
315f5f1b
RG
211 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
212 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 213 {
315f5f1b
RG
214 tree ptr = TREE_OPERAND (cval, 0);
215 if (is_gimple_min_invariant (ptr))
216 cval = build1_loc (EXPR_LOCATION (cval),
217 ADDR_EXPR, TREE_TYPE (ptr),
218 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
219 ptr,
220 fold_convert (ptr_type_node,
221 TREE_OPERAND (cval, 1))));
17f39a39
JH
222 }
223 if (TREE_CODE (cval) == ADDR_EXPR)
224 {
5a27a197
RG
225 tree base = NULL_TREE;
226 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
227 {
228 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
229 if (base)
230 TREE_OPERAND (cval, 0) = base;
231 }
5a27a197
RG
232 else
233 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
234 if (!base)
235 return NULL_TREE;
b3b9f3d0 236
8813a647 237 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 238 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 239 return NULL_TREE;
13f92e8d
JJ
240 if (TREE_TYPE (base) == error_mark_node)
241 return NULL_TREE;
8813a647 242 if (VAR_P (base))
46eb666a 243 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
244 else if (TREE_CODE (base) == FUNCTION_DECL)
245 {
246 /* Make sure we create a cgraph node for functions we'll reference.
247 They can be non-existent if the reference comes from an entry
248 of an external vtable for example. */
d52f5295 249 cgraph_node::get_create (base);
7501ca28 250 }
0038d4e0 251 /* Fixup types in global initializers. */
73aef89e
RG
252 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
253 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
254
255 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
256 cval = fold_convert (TREE_TYPE (orig_cval), cval);
257 return cval;
17f39a39 258 }
846abd0d
RB
259 if (TREE_OVERFLOW_P (cval))
260 return drop_tree_overflow (cval);
50619002 261 return orig_cval;
17f39a39 262}
cbdd87d4
RG
263
264/* If SYM is a constant variable with known value, return the value.
265 NULL_TREE is returned otherwise. */
266
267tree
268get_symbol_constant_value (tree sym)
269{
6a6dac52
JH
270 tree val = ctor_for_folding (sym);
271 if (val != error_mark_node)
cbdd87d4 272 {
cbdd87d4
RG
273 if (val)
274 {
9d60be38 275 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 276 if (val && is_gimple_min_invariant (val))
17f39a39 277 return val;
1389294c
JH
278 else
279 return NULL_TREE;
cbdd87d4
RG
280 }
281 /* Variables declared 'const' without an initializer
282 have zero as the initializer if they may not be
283 overridden at link or run time. */
284 if (!val
b8a8c472 285 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 286 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
287 }
288
289 return NULL_TREE;
290}
291
292
cbdd87d4
RG
293
294/* Subroutine of fold_stmt. We perform several simplifications of the
295 memory reference tree EXPR and make sure to re-gimplify them properly
296 after propagation of constant addresses. IS_LHS is true if the
297 reference is supposed to be an lvalue. */
298
299static tree
300maybe_fold_reference (tree expr, bool is_lhs)
301{
17f39a39 302 tree result;
cbdd87d4 303
f0eddb90
RG
304 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
305 || TREE_CODE (expr) == REALPART_EXPR
306 || TREE_CODE (expr) == IMAGPART_EXPR)
307 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
308 return fold_unary_loc (EXPR_LOCATION (expr),
309 TREE_CODE (expr),
310 TREE_TYPE (expr),
311 TREE_OPERAND (expr, 0));
312 else if (TREE_CODE (expr) == BIT_FIELD_REF
313 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
314 return fold_ternary_loc (EXPR_LOCATION (expr),
315 TREE_CODE (expr),
316 TREE_TYPE (expr),
317 TREE_OPERAND (expr, 0),
318 TREE_OPERAND (expr, 1),
319 TREE_OPERAND (expr, 2));
320
f0eddb90
RG
321 if (!is_lhs
322 && (result = fold_const_aggregate_ref (expr))
323 && is_gimple_min_invariant (result))
324 return result;
cbdd87d4 325
cbdd87d4
RG
326 return NULL_TREE;
327}
328
329
330/* Attempt to fold an assignment statement pointed-to by SI. Returns a
331 replacement rhs for the statement or NULL_TREE if no simplification
332 could be made. It is assumed that the operands have been previously
333 folded. */
334
335static tree
336fold_gimple_assign (gimple_stmt_iterator *si)
337{
355fe088 338 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
339 enum tree_code subcode = gimple_assign_rhs_code (stmt);
340 location_t loc = gimple_location (stmt);
341
342 tree result = NULL_TREE;
343
344 switch (get_gimple_rhs_class (subcode))
345 {
346 case GIMPLE_SINGLE_RHS:
347 {
348 tree rhs = gimple_assign_rhs1 (stmt);
349
8c00ba08
JW
350 if (TREE_CLOBBER_P (rhs))
351 return NULL_TREE;
352
4e71066d 353 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
354 return maybe_fold_reference (rhs, false);
355
bdf37f7a
JH
356 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
357 {
358 tree val = OBJ_TYPE_REF_EXPR (rhs);
359 if (is_gimple_min_invariant (val))
360 return val;
f8a39967 361 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
362 {
363 bool final;
364 vec <cgraph_node *>targets
f8a39967 365 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 366 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 367 {
2b5f0895
XDL
368 if (dump_enabled_p ())
369 {
4f5b9c80 370 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
371 "resolving virtual function address "
372 "reference to function %s\n",
373 targets.length () == 1
374 ? targets[0]->name ()
3ef276e4 375 : "NULL");
2b5f0895 376 }
3ef276e4
RB
377 if (targets.length () == 1)
378 {
379 val = fold_convert (TREE_TYPE (val),
380 build_fold_addr_expr_loc
381 (loc, targets[0]->decl));
382 STRIP_USELESS_TYPE_CONVERSION (val);
383 }
384 else
385 /* We can not use __builtin_unreachable here because it
386 can not have address taken. */
387 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
388 return val;
389 }
390 }
bdf37f7a 391 }
7524f419 392
cbdd87d4
RG
393 else if (TREE_CODE (rhs) == ADDR_EXPR)
394 {
70f34814
RG
395 tree ref = TREE_OPERAND (rhs, 0);
396 tree tem = maybe_fold_reference (ref, true);
397 if (tem
398 && TREE_CODE (tem) == MEM_REF
399 && integer_zerop (TREE_OPERAND (tem, 1)))
400 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
401 else if (tem)
cbdd87d4
RG
402 result = fold_convert (TREE_TYPE (rhs),
403 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
404 else if (TREE_CODE (ref) == MEM_REF
405 && integer_zerop (TREE_OPERAND (ref, 1)))
406 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
407
408 if (result)
409 {
410 /* Strip away useless type conversions. Both the
411 NON_LVALUE_EXPR that may have been added by fold, and
412 "useless" type conversions that might now be apparent
413 due to propagation. */
414 STRIP_USELESS_TYPE_CONVERSION (result);
415
416 if (result != rhs && valid_gimple_rhs_p (result))
417 return result;
418 }
cbdd87d4
RG
419 }
420
421 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 422 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
423 {
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
425 unsigned i;
426 tree val;
427
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 429 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
430 return NULL_TREE;
431
432 return build_vector_from_ctor (TREE_TYPE (rhs),
433 CONSTRUCTOR_ELTS (rhs));
434 }
435
436 else if (DECL_P (rhs))
9d60be38 437 return get_symbol_constant_value (rhs);
cbdd87d4
RG
438 }
439 break;
440
441 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
442 break;
443
444 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
445 break;
446
0354c0c7 447 case GIMPLE_TERNARY_RHS:
5c099d40
RB
448 result = fold_ternary_loc (loc, subcode,
449 TREE_TYPE (gimple_assign_lhs (stmt)),
450 gimple_assign_rhs1 (stmt),
451 gimple_assign_rhs2 (stmt),
452 gimple_assign_rhs3 (stmt));
0354c0c7
BS
453
454 if (result)
455 {
456 STRIP_USELESS_TYPE_CONVERSION (result);
457 if (valid_gimple_rhs_p (result))
458 return result;
0354c0c7
BS
459 }
460 break;
461
cbdd87d4
RG
462 case GIMPLE_INVALID_RHS:
463 gcc_unreachable ();
464 }
465
466 return NULL_TREE;
467}
468
fef5a0d9
RB
469
470/* Replace a statement at *SI_P with a sequence of statements in STMTS,
471 adjusting the replacement stmts location and virtual operands.
472 If the statement has a lhs the last stmt in the sequence is expected
473 to assign to that lhs. */
474
475static void
476gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
477{
355fe088 478 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
479
480 if (gimple_has_location (stmt))
481 annotate_all_with_location (stmts, gimple_location (stmt));
482
483 /* First iterate over the replacement statements backward, assigning
484 virtual operands to their defining statements. */
355fe088 485 gimple *laststore = NULL;
fef5a0d9
RB
486 for (gimple_stmt_iterator i = gsi_last (stmts);
487 !gsi_end_p (i); gsi_prev (&i))
488 {
355fe088 489 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
490 if ((gimple_assign_single_p (new_stmt)
491 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
492 || (is_gimple_call (new_stmt)
493 && (gimple_call_flags (new_stmt)
494 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
495 {
496 tree vdef;
497 if (!laststore)
498 vdef = gimple_vdef (stmt);
499 else
500 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
501 gimple_set_vdef (new_stmt, vdef);
502 if (vdef && TREE_CODE (vdef) == SSA_NAME)
503 SSA_NAME_DEF_STMT (vdef) = new_stmt;
504 laststore = new_stmt;
505 }
506 }
507
508 /* Second iterate over the statements forward, assigning virtual
509 operands to their uses. */
510 tree reaching_vuse = gimple_vuse (stmt);
511 for (gimple_stmt_iterator i = gsi_start (stmts);
512 !gsi_end_p (i); gsi_next (&i))
513 {
355fe088 514 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
515 /* If the new statement possibly has a VUSE, update it with exact SSA
516 name we know will reach this one. */
517 if (gimple_has_mem_ops (new_stmt))
518 gimple_set_vuse (new_stmt, reaching_vuse);
519 gimple_set_modified (new_stmt, true);
520 if (gimple_vdef (new_stmt))
521 reaching_vuse = gimple_vdef (new_stmt);
522 }
523
524 /* If the new sequence does not do a store release the virtual
525 definition of the original statement. */
526 if (reaching_vuse
527 && reaching_vuse == gimple_vuse (stmt))
528 {
529 tree vdef = gimple_vdef (stmt);
530 if (vdef
531 && TREE_CODE (vdef) == SSA_NAME)
532 {
533 unlink_stmt_vdef (stmt);
534 release_ssa_name (vdef);
535 }
536 }
537
538 /* Finally replace the original statement with the sequence. */
539 gsi_replace_with_seq (si_p, stmts, false);
540}
541
cbdd87d4
RG
542/* Convert EXPR into a GIMPLE value suitable for substitution on the
543 RHS of an assignment. Insert the necessary statements before
544 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
545 is replaced. If the call is expected to produces a result, then it
546 is replaced by an assignment of the new RHS to the result variable.
547 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
548 GIMPLE_NOP. A proper VDEF chain is retained by making the first
549 VUSE and the last VDEF of the whole sequence be the same as the replaced
550 statement and using new SSA names for stores in between. */
cbdd87d4
RG
551
552void
553gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
554{
555 tree lhs;
355fe088 556 gimple *stmt, *new_stmt;
cbdd87d4 557 gimple_stmt_iterator i;
355a7673 558 gimple_seq stmts = NULL;
cbdd87d4
RG
559
560 stmt = gsi_stmt (*si_p);
561
562 gcc_assert (is_gimple_call (stmt));
563
45852dcc 564 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 565
e256dfce 566 lhs = gimple_call_lhs (stmt);
cbdd87d4 567 if (lhs == NULL_TREE)
6e572326
RG
568 {
569 gimplify_and_add (expr, &stmts);
570 /* We can end up with folding a memcpy of an empty class assignment
571 which gets optimized away by C++ gimplification. */
572 if (gimple_seq_empty_p (stmts))
573 {
9fdc58de 574 pop_gimplify_context (NULL);
6e572326
RG
575 if (gimple_in_ssa_p (cfun))
576 {
577 unlink_stmt_vdef (stmt);
578 release_defs (stmt);
579 }
f6b4dc28 580 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
581 return;
582 }
583 }
cbdd87d4 584 else
e256dfce 585 {
381cdae4 586 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
587 new_stmt = gimple_build_assign (lhs, tmp);
588 i = gsi_last (stmts);
589 gsi_insert_after_without_update (&i, new_stmt,
590 GSI_CONTINUE_LINKING);
591 }
cbdd87d4
RG
592
593 pop_gimplify_context (NULL);
594
fef5a0d9
RB
595 gsi_replace_with_seq_vops (si_p, stmts);
596}
cbdd87d4 597
fef5a0d9
RB
598
599/* Replace the call at *GSI with the gimple value VAL. */
600
e3174bdf 601void
fef5a0d9
RB
602replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
603{
355fe088 604 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 605 tree lhs = gimple_call_lhs (stmt);
355fe088 606 gimple *repl;
fef5a0d9 607 if (lhs)
e256dfce 608 {
fef5a0d9
RB
609 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
610 val = fold_convert (TREE_TYPE (lhs), val);
611 repl = gimple_build_assign (lhs, val);
612 }
613 else
614 repl = gimple_build_nop ();
615 tree vdef = gimple_vdef (stmt);
616 if (vdef && TREE_CODE (vdef) == SSA_NAME)
617 {
618 unlink_stmt_vdef (stmt);
619 release_ssa_name (vdef);
620 }
f6b4dc28 621 gsi_replace (gsi, repl, false);
fef5a0d9
RB
622}
623
624/* Replace the call at *GSI with the new call REPL and fold that
625 again. */
626
627static void
355fe088 628replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 629{
355fe088 630 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
631 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
632 gimple_set_location (repl, gimple_location (stmt));
633 if (gimple_vdef (stmt)
634 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
635 {
636 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
637 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
638 }
00296d7f
JJ
639 if (gimple_vuse (stmt))
640 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 641 gsi_replace (gsi, repl, false);
fef5a0d9
RB
642 fold_stmt (gsi);
643}
644
645/* Return true if VAR is a VAR_DECL or a component thereof. */
646
647static bool
648var_decl_component_p (tree var)
649{
650 tree inner = var;
651 while (handled_component_p (inner))
652 inner = TREE_OPERAND (inner, 0);
47cac108
RB
653 return (DECL_P (inner)
654 || (TREE_CODE (inner) == MEM_REF
655 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
656}
657
c89af696
AH
658/* Return TRUE if the SIZE argument, representing the size of an
659 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
660
661static bool
662size_must_be_zero_p (tree size)
663{
664 if (integer_zerop (size))
665 return true;
666
3f27391f 667 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
668 return false;
669
6512c0f1
MS
670 tree type = TREE_TYPE (size);
671 int prec = TYPE_PRECISION (type);
672
6512c0f1
MS
673 /* Compute the value of SSIZE_MAX, the largest positive value that
674 can be stored in ssize_t, the signed counterpart of size_t. */
675 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
c89af696
AH
676 value_range valid_range (VR_RANGE,
677 build_int_cst (type, 0),
678 wide_int_to_tree (type, ssize_max));
679 value_range vr;
680 get_range_info (size, vr);
681 vr.intersect (&valid_range);
682 return vr.zero_p ();
6512c0f1
MS
683}
684
cc8bea0a
MS
685/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
686 diagnose (otherwise undefined) overlapping copies without preventing
687 folding. When folded, GCC guarantees that overlapping memcpy has
688 the same semantics as memmove. Call to the library memcpy need not
689 provide the same guarantee. Return false if no simplification can
690 be made. */
fef5a0d9
RB
691
692static bool
693gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
694 tree dest, tree src, int endp)
695{
355fe088 696 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
697 tree lhs = gimple_call_lhs (stmt);
698 tree len = gimple_call_arg (stmt, 2);
699 tree destvar, srcvar;
700 location_t loc = gimple_location (stmt);
701
cc8bea0a 702 bool nowarn = gimple_no_warning_p (stmt);
cc8bea0a 703
6512c0f1
MS
704 /* If the LEN parameter is a constant zero or in range where
705 the only valid value is zero, return DEST. */
706 if (size_must_be_zero_p (len))
fef5a0d9 707 {
355fe088 708 gimple *repl;
fef5a0d9
RB
709 if (gimple_call_lhs (stmt))
710 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
711 else
712 repl = gimple_build_nop ();
713 tree vdef = gimple_vdef (stmt);
714 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 715 {
fef5a0d9
RB
716 unlink_stmt_vdef (stmt);
717 release_ssa_name (vdef);
718 }
f6b4dc28 719 gsi_replace (gsi, repl, false);
fef5a0d9
RB
720 return true;
721 }
722
723 /* If SRC and DEST are the same (and not volatile), return
724 DEST{,+LEN,+LEN-1}. */
725 if (operand_equal_p (src, dest, 0))
726 {
cc8bea0a
MS
727 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
728 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 729 32667). */
fef5a0d9
RB
730 unlink_stmt_vdef (stmt);
731 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
732 release_ssa_name (gimple_vdef (stmt));
733 if (!lhs)
734 {
f6b4dc28 735 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
736 return true;
737 }
738 goto done;
739 }
740 else
741 {
742 tree srctype, desttype;
743 unsigned int src_align, dest_align;
744 tree off0;
d01b568a
BE
745 const char *tmp_str;
746 unsigned HOST_WIDE_INT tmp_len;
fef5a0d9
RB
747
748 /* Build accesses at offset zero with a ref-all character type. */
749 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
750 ptr_mode, true), 0);
751
752 /* If we can perform the copy efficiently with first doing all loads
753 and then all stores inline it that way. Currently efficiently
754 means that we can load all the memory into a single integer
755 register which is what MOVE_MAX gives us. */
756 src_align = get_pointer_alignment (src);
757 dest_align = get_pointer_alignment (dest);
758 if (tree_fits_uhwi_p (len)
759 && compare_tree_int (len, MOVE_MAX) <= 0
760 /* ??? Don't transform copies from strings with known length this
761 confuses the tree-ssa-strlen.c. This doesn't handle
762 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
763 reason. */
d01b568a
BE
764 && !c_strlen (src, 2)
765 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
766 && memchr (tmp_str, 0, tmp_len) == NULL))
fef5a0d9
RB
767 {
768 unsigned ilen = tree_to_uhwi (len);
146ec50f 769 if (pow2p_hwi (ilen))
fef5a0d9 770 {
cc8bea0a
MS
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
777
64ab8765 778 scalar_int_mode mode;
fef5a0d9
RB
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
64ab8765
RS
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
64ab8765 785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 786 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 787 || (optab_handler (movmisalign_optab, mode)
f869c12f 788 != CODE_FOR_nothing)))
fef5a0d9
RB
789 {
790 tree srctype = type;
791 tree desttype = type;
64ab8765 792 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
64ab8765 798 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 799 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 800 && (optab_handler (movmisalign_optab, mode)
f869c12f 801 == CODE_FOR_nothing))
fef5a0d9
RB
802 srcmem = NULL_TREE;
803 if (srcmem)
804 {
355fe088 805 gimple *new_stmt;
fef5a0d9
RB
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
807 {
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
fef5a0d9
RB
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
815 }
64ab8765 816 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
828 {
f6b4dc28 829 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
830 return true;
831 }
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
834 }
835 }
836 }
837 }
838
839 if (endp == 3)
840 {
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
844
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
852 {
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
861 }
862
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
866 {
867 tree src_base, dest_base, fn;
a90c8804
RS
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
fef5a0d9
RB
870
871 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
fef5a0d9 875 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
a90c8804 880 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 881 maxsize = -1;
fef5a0d9
RB
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
884 {
885 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
fef5a0d9
RB
888 return false;
889 }
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
892 {
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
a90c8804
RS
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
fef5a0d9
RB
902 return false;
903 }
904 else
905 return false;
906
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
923 {
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
928 {
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
938 }
939 }
940
941 return false;
942 }
943
944 if (!tree_fits_shwi_p (len))
945 return false;
fef5a0d9
RB
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
fef5a0d9
RB
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 958 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 962 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
966
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
983
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
989
42f74245
RB
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 995
42f74245
RB
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1000 {
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 1004 src, off0);
fef5a0d9
RB
1005 else if (!STRICT_ALIGNMENT)
1006 {
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
42f74245 1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 1010 }
fef5a0d9 1011 }
fef5a0d9
RB
1012
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1015
1016 if (srcvar == NULL_TREE)
1017 {
fef5a0d9
RB
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1021 {
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1027 }
1028 }
1029 else if (destvar == NULL_TREE)
1030 {
fef5a0d9
RB
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1040 }
1041 }
1042
cc8bea0a
MS
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1047
355fe088 1048 gimple *new_stmt;
fef5a0d9
RB
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1050 {
921b13d0
RB
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1055 {
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
921b13d0
RB
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1062 }
d7257171
RB
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
fef5a0d9 1065 }
d7257171
RB
1066
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080set_vop_and_replace:
fef5a0d9
RB
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1087 {
f6b4dc28 1088 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1089 return true;
1090 }
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1092 }
1093
1094done:
74e3c262 1095 gimple_seq stmts = NULL;
fef5a0d9
RB
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
74e3c262
RB
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
fef5a0d9 1101 if (endp == 2 || endp == 1)
74e3c262
RB
1102 {
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1106 }
fef5a0d9 1107
74e3c262 1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1109 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1110 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1111 return true;
1112}
1113
b3d8d88e
MS
1114/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1116
1117static bool
1118gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1119{
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1121
1122 if (!fn)
1123 return false;
1124
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1126
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1131
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1134
1135 return true;
1136}
1137
1138/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1140
1141static bool
1142gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1143{
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1145
1146 if (!fn)
1147 return false;
1148
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1152
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1157
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1161
1162 return true;
1163}
1164
1165/* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1167
1168static bool
1169gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1170{
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1172
1173 if (!fn)
1174 return false;
1175
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1177
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1181
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1187
1188 return true;
1189}
1190
fef5a0d9
RB
1191/* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1193
1194static bool
1195gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1196{
355fe088 1197 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1200
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1203 {
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1206 }
1207
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1210
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1213
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1218
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1222
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1226
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1230
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1233
1234 length = tree_to_uhwi (len);
7a504f33 1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1238
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1241
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1245 {
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1248
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1254 }
1255
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1261 {
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1264 }
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1267 {
355fe088 1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1269 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1270 }
1271 else
1272 {
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1276 }
1277
1278 return true;
1279}
1280
fb471a13 1281/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1282
1283static bool
598f7235
MS
1284get_range_strlen_tree (tree arg, tree length[2], bitmap *visited,
1285 strlen_range_kind rkind,
1286 bool *flexp, unsigned eltsize, tree *nonstr)
fef5a0d9 1287{
fb471a13
MS
1288 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1289
c8602fe6
JJ
1290 /* The minimum and maximum length. */
1291 tree *const minlen = length;
88d0c3f0
MS
1292 tree *const maxlen = length + 1;
1293
fb471a13
MS
1294 /* The length computed by this invocation of the function. */
1295 tree val = NULL_TREE;
1296
1297 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1298 if (TREE_CODE (arg) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1300 {
fb471a13
MS
1301 tree op = TREE_OPERAND (arg, 0);
1302 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1303 {
fb471a13
MS
1304 tree aop0 = TREE_OPERAND (op, 0);
1305 if (TREE_CODE (aop0) == INDIRECT_REF
1306 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1307 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
598f7235 1308 visited, rkind, flexp,
fb471a13 1309 eltsize, nonstr);
fef5a0d9 1310 }
598f7235
MS
1311 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1312 && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
fef5a0d9 1313 {
fb471a13
MS
1314 /* Fail if an array is the last member of a struct object
1315 since it could be treated as a (fake) flexible array
1316 member. */
1317 tree idx = TREE_OPERAND (op, 1);
1318
1319 arg = TREE_OPERAND (op, 0);
1320 tree optype = TREE_TYPE (arg);
1321 if (tree dom = TYPE_DOMAIN (optype))
1322 if (tree bound = TYPE_MAX_VALUE (dom))
1323 if (TREE_CODE (bound) == INTEGER_CST
1324 && TREE_CODE (idx) == INTEGER_CST
1325 && tree_int_cst_lt (bound, idx))
1326 return false;
fef5a0d9 1327 }
fb471a13 1328 }
7d583f42 1329
598f7235 1330 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1331 {
1332 /* We are computing the maximum value (not string length). */
1333 val = arg;
1334 if (TREE_CODE (val) != INTEGER_CST
1335 || tree_int_cst_sgn (val) < 0)
1336 return false;
1337 }
1338 else
1339 {
1340 c_strlen_data lendata = { };
1341 val = c_strlen (arg, 1, &lendata, eltsize);
1342
1343 /* If we potentially had a non-terminated string, then
1344 bubble that information up to the caller. */
1345 if (!val && lendata.decl)
1346 {
1347 *nonstr = lendata.decl;
1348 *minlen = lendata.minlen;
1349 *maxlen = lendata.minlen;
598f7235 1350 return rkind == SRK_STRLEN ? false : true;
7d583f42 1351 }
fb471a13
MS
1352 }
1353
598f7235 1354 if (!val && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
fb471a13
MS
1355 {
1356 if (TREE_CODE (arg) == ADDR_EXPR)
1357 return get_range_strlen (TREE_OPERAND (arg, 0), length,
598f7235 1358 visited, rkind, flexp,
fb471a13 1359 eltsize, nonstr);
88d0c3f0 1360
fb471a13 1361 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1362 {
fb471a13 1363 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1364
fb471a13
MS
1365 /* Determine the "innermost" array type. */
1366 while (TREE_CODE (optype) == ARRAY_TYPE
1367 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1368 optype = TREE_TYPE (optype);
c42d0aa0 1369
fb471a13
MS
1370 /* Avoid arrays of pointers. */
1371 tree eltype = TREE_TYPE (optype);
1372 if (TREE_CODE (optype) != ARRAY_TYPE
1373 || !INTEGRAL_TYPE_P (eltype))
1374 return false;
c42d0aa0 1375
fb471a13
MS
1376 /* Fail when the array bound is unknown or zero. */
1377 val = TYPE_SIZE_UNIT (optype);
1378 if (!val || integer_zerop (val))
1379 return false;
1bfd6a00 1380
fb471a13
MS
1381 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1382 integer_one_node);
c42d0aa0 1383
fb471a13
MS
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 *minlen = ssize_int (0);
204a7ecb 1387
fb471a13
MS
1388 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1389 && optype == TREE_TYPE (TREE_OPERAND (arg, 0))
1390 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1391 *flexp = true;
1392 }
1393 else if (TREE_CODE (arg) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1395 == ARRAY_TYPE))
1396 {
1397 /* Use the type of the member array to determine the upper
1398 bound on the length of the array. This may be overly
1399 optimistic if the array itself isn't NUL-terminated and
1400 the caller relies on the subsequent member to contain
1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct.
1403 Set *FLEXP to true if the array whose bound is being
1404 used is at the end of a struct. */
1405 if (array_at_struct_end_p (arg))
1406 *flexp = true;
1407
1408 tree fld = TREE_OPERAND (arg, 1);
1409
1410 tree optype = TREE_TYPE (fld);
1411
1412 /* Determine the "innermost" array type. */
1413 while (TREE_CODE (optype) == ARRAY_TYPE
1414 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1415 optype = TREE_TYPE (optype);
1416
1417 /* Fail when the array bound is unknown or zero. */
1418 val = TYPE_SIZE_UNIT (optype);
1419 if (!val || integer_zerop (val))
1420 return false;
1421 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1422 integer_one_node);
1423
1424 /* Set the minimum size to zero since the string in
1425 the array could have zero length. */
1426 *minlen = ssize_int (0);
1427 }
1428
1429 if (VAR_P (arg))
1430 {
1431 tree type = TREE_TYPE (arg);
1432 if (POINTER_TYPE_P (type))
1433 type = TREE_TYPE (type);
1434
1435 if (TREE_CODE (type) == ARRAY_TYPE)
88d0c3f0 1436 {
fb471a13
MS
1437 val = TYPE_SIZE_UNIT (type);
1438 if (!val
1439 || TREE_CODE (val) != INTEGER_CST
1440 || integer_zerop (val))
88d0c3f0 1441 return false;
fb471a13
MS
1442 val = wide_int_to_tree (TREE_TYPE (val),
1443 wi::sub (wi::to_wide (val), 1));
1444
e495e31a
MS
1445 /* Set the minimum size to zero since the string in
1446 the array could have zero length. */
1447 *minlen = ssize_int (0);
88d0c3f0
MS
1448 }
1449 }
fb471a13 1450 }
88d0c3f0 1451
fb471a13
MS
1452 if (!val)
1453 return false;
fef5a0d9 1454
fb471a13
MS
1455 /* Adjust the lower bound on the string length as necessary. */
1456 if (!*minlen
598f7235 1457 || (rkind != SRK_STRLEN
fb471a13
MS
1458 && TREE_CODE (*minlen) == INTEGER_CST
1459 && TREE_CODE (val) == INTEGER_CST
1460 && tree_int_cst_lt (val, *minlen)))
1461 *minlen = val;
88d0c3f0 1462
fb471a13
MS
1463 if (*maxlen)
1464 {
1465 /* Adjust the more conservative bound if possible/necessary
1466 and fail otherwise. */
598f7235 1467 if (rkind != SRK_STRLEN)
fef5a0d9 1468 {
fb471a13
MS
1469 if (TREE_CODE (*maxlen) != INTEGER_CST
1470 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1471 return false;
fef5a0d9 1472
fb471a13
MS
1473 if (tree_int_cst_lt (*maxlen, val))
1474 *maxlen = val;
1475 return true;
1476 }
1477 else if (simple_cst_equal (val, *maxlen) != 1)
1478 {
1479 /* Fail if the length of this ARG is different from that
1480 previously determined from another ARG. */
1481 return false;
1482 }
fef5a0d9
RB
1483 }
1484
fb471a13
MS
1485 *maxlen = val;
1486 return true;
1487}
1488
1489/* Obtain the minimum and maximum string length or minimum and maximum
1490 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1491 If ARG is an SSA name variable, follow its use-def chains. When
1492 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1493 if we are unable to determine the length or value, return false.
1494 VISITED is a bitmap of visited variables.
598f7235
MS
1495 RKIND determines the kind of value or range to obtain (see
1496 strlen_range_kind).
1497 Set PDATA->DECL if ARG refers to an unterminated constant array.
1498 On input, set ELTSIZE to 1 for normal single byte character strings,
1499 and either 2 or 4 for wide characer strings (the size of wchar_t).
1500 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1501
1502static bool
598f7235
MS
1503get_range_strlen (tree arg, tree length[2], bitmap *visited,
1504 strlen_range_kind rkind,
1505 bool *flexp, unsigned eltsize, tree *nonstr)
fb471a13
MS
1506{
1507
1508 if (TREE_CODE (arg) != SSA_NAME)
598f7235 1509 return get_range_strlen_tree (arg, length, visited, rkind, flexp,
fb471a13
MS
1510 eltsize, nonstr);
1511
fef5a0d9
RB
1512 /* If ARG is registered for SSA update we cannot look at its defining
1513 statement. */
1514 if (name_registered_for_update_p (arg))
1515 return false;
1516
1517 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1518 if (!*visited)
1519 *visited = BITMAP_ALLOC (NULL);
1520 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1521 return true;
1522
fb471a13
MS
1523 tree var = arg;
1524 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1525
1526 /* The minimum and maximum length. */
1527 tree *const maxlen = length + 1;
fef5a0d9
RB
1528
1529 switch (gimple_code (def_stmt))
1530 {
1531 case GIMPLE_ASSIGN:
598f7235
MS
1532 /* The RHS of the statement defining VAR must either have a
1533 constant length or come from another SSA_NAME with a constant
1534 length. */
fef5a0d9
RB
1535 if (gimple_assign_single_p (def_stmt)
1536 || gimple_assign_unary_nop_p (def_stmt))
1537 {
598f7235
MS
1538 tree rhs = gimple_assign_rhs1 (def_stmt);
1539 return get_range_strlen (rhs, length, visited, rkind, flexp,
e08341bb 1540 eltsize, nonstr);
fef5a0d9
RB
1541 }
1542 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1543 {
c8602fe6
JJ
1544 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1545 gimple_assign_rhs3 (def_stmt) };
1546
1547 for (unsigned int i = 0; i < 2; i++)
598f7235 1548 if (!get_range_strlen (ops[i], length, visited, rkind,
e08341bb 1549 flexp, eltsize, nonstr))
c8602fe6 1550 {
598f7235 1551 if (rkind == SRK_LENRANGE_2)
c8602fe6
JJ
1552 *maxlen = build_all_ones_cst (size_type_node);
1553 else
1554 return false;
1555 }
1556 return true;
cc8bea0a 1557 }
fef5a0d9
RB
1558 return false;
1559
1560 case GIMPLE_PHI:
598f7235
MS
1561 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1562 must have a constant length. */
c8602fe6 1563 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1564 {
1565 tree arg = gimple_phi_arg (def_stmt, i)->def;
1566
1567 /* If this PHI has itself as an argument, we cannot
1568 determine the string length of this argument. However,
1569 if we can find a constant string length for the other
1570 PHI args then we can still be sure that this is a
1571 constant string length. So be optimistic and just
1572 continue with the next argument. */
1573 if (arg == gimple_phi_result (def_stmt))
1574 continue;
1575
598f7235 1576 if (!get_range_strlen (arg, length, visited, rkind, flexp,
e08341bb 1577 eltsize, nonstr))
88d0c3f0 1578 {
598f7235 1579 if (rkind == SRK_LENRANGE_2)
88d0c3f0
MS
1580 *maxlen = build_all_ones_cst (size_type_node);
1581 else
1582 return false;
1583 }
fef5a0d9 1584 }
fef5a0d9
RB
1585 return true;
1586
1587 default:
1588 return false;
1589 }
1590}
88d0c3f0
MS
1591/* Determine the minimum and maximum value or string length that ARG
1592 refers to and store each in the first two elements of MINMAXLEN.
1593 For expressions that point to strings of unknown lengths that are
1594 character arrays, use the upper bound of the array as the maximum
1595 length. For example, given an expression like 'x ? array : "xyz"'
1596 and array declared as 'char array[8]', MINMAXLEN[0] will be set
c8602fe6 1597 to 0 and MINMAXLEN[1] to 7, the longest string that could be
88d0c3f0 1598 stored in array.
3f343040
MS
1599 Return true if the range of the string lengths has been obtained
1600 from the upper bound of an array at the end of a struct. Such
1601 an array may hold a string that's longer than its upper bound
c8602fe6
JJ
1602 due to it being used as a poor-man's flexible array member.
1603
1604 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1605 and false if PHIs and COND_EXPRs are to be handled optimistically,
1606 if we can determine string length minimum and maximum; it will use
1607 the minimum from the ones where it can be determined.
4148b00d 1608 STRICT false should be only used for warning code.
e08341bb
MS
1609 When non-null, clear *NONSTR if ARG refers to a constant array
1610 that is known not be nul-terminated. Otherwise set it to
1611 the declaration of the constant non-terminated array.
4148b00d
BE
1612
1613 ELTSIZE is 1 for normal single byte character strings, and 2 or
1614 4 for wide characer strings. ELTSIZE is by default 1. */
88d0c3f0 1615
3f343040 1616bool
e08341bb
MS
1617get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1618 bool strict, tree *nonstr /* = NULL */)
88d0c3f0
MS
1619{
1620 bitmap visited = NULL;
1621
1622 minmaxlen[0] = NULL_TREE;
1623 minmaxlen[1] = NULL_TREE;
1624
e08341bb
MS
1625 tree nonstrbuf;
1626 if (!nonstr)
1627 nonstr = &nonstrbuf;
1628 *nonstr = NULL_TREE;
1629
3f343040 1630 bool flexarray = false;
598f7235
MS
1631 if (!get_range_strlen (arg, minmaxlen, &visited,
1632 strict ? SRK_LENRANGE : SRK_LENRANGE_2,
e08341bb 1633 &flexarray, eltsize, nonstr))
c8602fe6
JJ
1634 {
1635 minmaxlen[0] = NULL_TREE;
1636 minmaxlen[1] = NULL_TREE;
1637 }
88d0c3f0
MS
1638
1639 if (visited)
1640 BITMAP_FREE (visited);
3f343040
MS
1641
1642 return flexarray;
88d0c3f0
MS
1643}
1644
e08341bb
MS
1645/* Return the maximum string length for ARG, counting by TYPE
1646 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1647 if the caller is prepared to handle unterminated strings.
1648
1649 If an unterminated string is discovered and our caller handles
1650 unterminated strings, then bubble up the offending DECL and
1651 return the maximum size. Otherwise return NULL. */
1652
598f7235
MS
1653static tree
1654get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1655{
598f7235
MS
1656 /* A non-null NONSTR is meaningless when determining the maximum
1657 value of an integer ARG. */
1658 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1659 /* ARG must have an integral type when RKIND says so. */
1660 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1661
dcb7fae2 1662 bitmap visited = NULL;
88d0c3f0 1663 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1664
1665 bool dummy;
e08341bb
MS
1666 /* Set to non-null if ARG refers to an untermianted array. */
1667 tree mynonstr = NULL_TREE;
598f7235 1668 if (!get_range_strlen (arg, len, &visited, rkind, &dummy, 1, &mynonstr))
88d0c3f0 1669 len[1] = NULL_TREE;
dcb7fae2
RB
1670 if (visited)
1671 BITMAP_FREE (visited);
1672
e08341bb
MS
1673 if (nonstr)
1674 {
1675 /* For callers prepared to handle unterminated arrays set
1676 *NONSTR to point to the declaration of the array and return
1677 the maximum length/size. */
1678 *nonstr = mynonstr;
1679 return len[1];
1680 }
1681
1682 /* Fail if the constant array isn't nul-terminated. */
1683 return mynonstr ? NULL_TREE : len[1];
dcb7fae2
RB
1684}
1685
fef5a0d9
RB
1686
1687/* Fold function call to builtin strcpy with arguments DEST and SRC.
1688 If LEN is not NULL, it represents the length of the string to be
1689 copied. Return NULL_TREE if no simplification can be made. */
1690
1691static bool
1692gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1693 tree dest, tree src)
fef5a0d9 1694{
cc8bea0a
MS
1695 gimple *stmt = gsi_stmt (*gsi);
1696 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1697 tree fn;
1698
1699 /* If SRC and DEST are the same (and not volatile), return DEST. */
1700 if (operand_equal_p (src, dest, 0))
1701 {
8cd95cec
MS
1702 /* Issue -Wrestrict unless the pointers are null (those do
1703 not point to objects and so do not indicate an overlap;
1704 such calls could be the result of sanitization and jump
1705 threading). */
1706 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1707 {
1708 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1709
e9b9fa4c
MS
1710 warning_at (loc, OPT_Wrestrict,
1711 "%qD source argument is the same as destination",
1712 func);
1713 }
cc8bea0a 1714
fef5a0d9
RB
1715 replace_call_with_value (gsi, dest);
1716 return true;
1717 }
1718
1719 if (optimize_function_for_size_p (cfun))
1720 return false;
1721
1722 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1723 if (!fn)
1724 return false;
1725
e08341bb
MS
1726 /* Set to non-null if ARG refers to an unterminated array. */
1727 tree nonstr = NULL;
598f7235 1728 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
1729
1730 if (nonstr)
1731 {
1732 /* Avoid folding calls with unterminated arrays. */
1733 if (!gimple_no_warning_p (stmt))
1734 warn_string_no_nul (loc, "strcpy", src, nonstr);
1735 gimple_set_no_warning (stmt, true);
1736 return false;
1737 }
1738
fef5a0d9 1739 if (!len)
dcb7fae2 1740 return false;
fef5a0d9
RB
1741
1742 len = fold_convert_loc (loc, size_type_node, len);
1743 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1744 len = force_gimple_operand_gsi (gsi, len, true,
1745 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1746 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1747 replace_call_with_call_and_fold (gsi, repl);
1748 return true;
1749}
1750
1751/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1752 If SLEN is not NULL, it represents the length of the source string.
1753 Return NULL_TREE if no simplification can be made. */
1754
1755static bool
dcb7fae2
RB
1756gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1757 tree dest, tree src, tree len)
fef5a0d9 1758{
025d57f0
MS
1759 gimple *stmt = gsi_stmt (*gsi);
1760 location_t loc = gimple_location (stmt);
6a33d0ff 1761 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1762
1763 /* If the LEN parameter is zero, return DEST. */
1764 if (integer_zerop (len))
1765 {
6a33d0ff
MS
1766 /* Avoid warning if the destination refers to a an array/pointer
1767 decorate with attribute nonstring. */
1768 if (!nonstring)
1769 {
1770 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1771
1772 /* Warn about the lack of nul termination: the result is not
1773 a (nul-terminated) string. */
598f7235 1774 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
1775 if (slen && !integer_zerop (slen))
1776 warning_at (loc, OPT_Wstringop_truncation,
1777 "%G%qD destination unchanged after copying no bytes "
1778 "from a string of length %E",
8a45b051 1779 stmt, fndecl, slen);
6a33d0ff
MS
1780 else
1781 warning_at (loc, OPT_Wstringop_truncation,
1782 "%G%qD destination unchanged after copying no bytes",
8a45b051 1783 stmt, fndecl);
6a33d0ff 1784 }
025d57f0 1785
fef5a0d9
RB
1786 replace_call_with_value (gsi, dest);
1787 return true;
1788 }
1789
1790 /* We can't compare slen with len as constants below if len is not a
1791 constant. */
dcb7fae2 1792 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1793 return false;
1794
fef5a0d9 1795 /* Now, we must be passed a constant src ptr parameter. */
598f7235 1796 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 1797 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1798 return false;
1799
025d57f0
MS
1800 /* The size of the source string including the terminating nul. */
1801 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1802
1803 /* We do not support simplification of this case, though we do
1804 support it when expanding trees into RTL. */
1805 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1806 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1807 return false;
1808
5d0d5d68
MS
1809 /* Diagnose truncation that leaves the copy unterminated. */
1810 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1811
fef5a0d9 1812 /* OK transform into builtin memcpy. */
025d57f0 1813 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1814 if (!fn)
1815 return false;
1816
1817 len = fold_convert_loc (loc, size_type_node, len);
1818 len = force_gimple_operand_gsi (gsi, len, true,
1819 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1820 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1821 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1822
fef5a0d9
RB
1823 return true;
1824}
1825
71dea1dd
WD
1826/* Fold function call to builtin strchr or strrchr.
1827 If both arguments are constant, evaluate and fold the result,
1828 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1829 In general strlen is significantly faster than strchr
1830 due to being a simpler operation. */
1831static bool
71dea1dd 1832gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1833{
1834 gimple *stmt = gsi_stmt (*gsi);
1835 tree str = gimple_call_arg (stmt, 0);
1836 tree c = gimple_call_arg (stmt, 1);
1837 location_t loc = gimple_location (stmt);
71dea1dd
WD
1838 const char *p;
1839 char ch;
912d9ec3 1840
71dea1dd 1841 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1842 return false;
1843
71dea1dd
WD
1844 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1845 {
1846 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1847
1848 if (p1 == NULL)
1849 {
1850 replace_call_with_value (gsi, integer_zero_node);
1851 return true;
1852 }
1853
1854 tree len = build_int_cst (size_type_node, p1 - p);
1855 gimple_seq stmts = NULL;
1856 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1857 POINTER_PLUS_EXPR, str, len);
1858 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1859 gsi_replace_with_seq_vops (gsi, stmts);
1860 return true;
1861 }
1862
1863 if (!integer_zerop (c))
912d9ec3
WD
1864 return false;
1865
71dea1dd 1866 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1867 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1868 {
1869 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1870
c8952930 1871 if (strchr_fn)
71dea1dd
WD
1872 {
1873 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1874 replace_call_with_call_and_fold (gsi, repl);
1875 return true;
1876 }
1877
1878 return false;
1879 }
1880
912d9ec3
WD
1881 tree len;
1882 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1883
1884 if (!strlen_fn)
1885 return false;
1886
1887 /* Create newstr = strlen (str). */
1888 gimple_seq stmts = NULL;
1889 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1890 gimple_set_location (new_stmt, loc);
a15ebbcd 1891 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1892 gimple_call_set_lhs (new_stmt, len);
1893 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1894
1895 /* Create (str p+ strlen (str)). */
1896 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1897 POINTER_PLUS_EXPR, str, len);
1898 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1899 gsi_replace_with_seq_vops (gsi, stmts);
1900 /* gsi now points at the assignment to the lhs, get a
1901 stmt iterator to the strlen.
1902 ??? We can't use gsi_for_stmt as that doesn't work when the
1903 CFG isn't built yet. */
1904 gimple_stmt_iterator gsi2 = *gsi;
1905 gsi_prev (&gsi2);
1906 fold_stmt (&gsi2);
1907 return true;
1908}
1909
c8952930
JJ
1910/* Fold function call to builtin strstr.
1911 If both arguments are constant, evaluate and fold the result,
1912 additionally fold strstr (x, "") into x and strstr (x, "c")
1913 into strchr (x, 'c'). */
1914static bool
1915gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1916{
1917 gimple *stmt = gsi_stmt (*gsi);
1918 tree haystack = gimple_call_arg (stmt, 0);
1919 tree needle = gimple_call_arg (stmt, 1);
1920 const char *p, *q;
1921
1922 if (!gimple_call_lhs (stmt))
1923 return false;
1924
1925 q = c_getstr (needle);
1926 if (q == NULL)
1927 return false;
1928
1929 if ((p = c_getstr (haystack)))
1930 {
1931 const char *r = strstr (p, q);
1932
1933 if (r == NULL)
1934 {
1935 replace_call_with_value (gsi, integer_zero_node);
1936 return true;
1937 }
1938
1939 tree len = build_int_cst (size_type_node, r - p);
1940 gimple_seq stmts = NULL;
1941 gimple *new_stmt
1942 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1943 haystack, len);
1944 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1945 gsi_replace_with_seq_vops (gsi, stmts);
1946 return true;
1947 }
1948
1949 /* For strstr (x, "") return x. */
1950 if (q[0] == '\0')
1951 {
1952 replace_call_with_value (gsi, haystack);
1953 return true;
1954 }
1955
1956 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1957 if (q[1] == '\0')
1958 {
1959 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1960 if (strchr_fn)
1961 {
1962 tree c = build_int_cst (integer_type_node, q[0]);
1963 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1964 replace_call_with_call_and_fold (gsi, repl);
1965 return true;
1966 }
1967 }
1968
1969 return false;
1970}
1971
fef5a0d9
RB
1972/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1973 to the call.
1974
1975 Return NULL_TREE if no simplification was possible, otherwise return the
1976 simplified form of the call as a tree.
1977
1978 The simplified form may be a constant or other expression which
1979 computes the same value, but in a more efficient manner (including
1980 calls to other builtin functions).
1981
1982 The call may contain arguments which need to be evaluated, but
1983 which are not useful to determine the result of the call. In
1984 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1985 COMPOUND_EXPR will be an argument which must be evaluated.
1986 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1987 COMPOUND_EXPR in the chain will contain the tree for the simplified
1988 form of the builtin function call. */
1989
1990static bool
dcb7fae2 1991gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1992{
355fe088 1993 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1994 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1995
1996 const char *p = c_getstr (src);
1997
1998 /* If the string length is zero, return the dst parameter. */
1999 if (p && *p == '\0')
2000 {
2001 replace_call_with_value (gsi, dst);
2002 return true;
2003 }
2004
2005 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2006 return false;
2007
2008 /* See if we can store by pieces into (dst + strlen(dst)). */
2009 tree newdst;
2010 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2011 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2012
2013 if (!strlen_fn || !memcpy_fn)
2014 return false;
2015
2016 /* If the length of the source string isn't computable don't
2017 split strcat into strlen and memcpy. */
598f7235 2018 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2019 if (! len)
fef5a0d9
RB
2020 return false;
2021
2022 /* Create strlen (dst). */
2023 gimple_seq stmts = NULL, stmts2;
355fe088 2024 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2025 gimple_set_location (repl, loc);
a15ebbcd 2026 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2027 gimple_call_set_lhs (repl, newdst);
2028 gimple_seq_add_stmt_without_update (&stmts, repl);
2029
2030 /* Create (dst p+ strlen (dst)). */
2031 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2032 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2033 gimple_seq_add_seq_without_update (&stmts, stmts2);
2034
2035 len = fold_convert_loc (loc, size_type_node, len);
2036 len = size_binop_loc (loc, PLUS_EXPR, len,
2037 build_int_cst (size_type_node, 1));
2038 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2039 gimple_seq_add_seq_without_update (&stmts, stmts2);
2040
2041 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2042 gimple_seq_add_stmt_without_update (&stmts, repl);
2043 if (gimple_call_lhs (stmt))
2044 {
2045 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2046 gimple_seq_add_stmt_without_update (&stmts, repl);
2047 gsi_replace_with_seq_vops (gsi, stmts);
2048 /* gsi now points at the assignment to the lhs, get a
2049 stmt iterator to the memcpy call.
2050 ??? We can't use gsi_for_stmt as that doesn't work when the
2051 CFG isn't built yet. */
2052 gimple_stmt_iterator gsi2 = *gsi;
2053 gsi_prev (&gsi2);
2054 fold_stmt (&gsi2);
2055 }
2056 else
2057 {
2058 gsi_replace_with_seq_vops (gsi, stmts);
2059 fold_stmt (gsi);
2060 }
2061 return true;
2062}
2063
07f1cf56
RB
2064/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2065 are the arguments to the call. */
2066
2067static bool
2068gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2069{
355fe088 2070 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2071 tree dest = gimple_call_arg (stmt, 0);
2072 tree src = gimple_call_arg (stmt, 1);
2073 tree size = gimple_call_arg (stmt, 2);
2074 tree fn;
2075 const char *p;
2076
2077
2078 p = c_getstr (src);
2079 /* If the SRC parameter is "", return DEST. */
2080 if (p && *p == '\0')
2081 {
2082 replace_call_with_value (gsi, dest);
2083 return true;
2084 }
2085
2086 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2087 return false;
2088
2089 /* If __builtin_strcat_chk is used, assume strcat is available. */
2090 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2091 if (!fn)
2092 return false;
2093
355fe088 2094 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2095 replace_call_with_call_and_fold (gsi, repl);
2096 return true;
2097}
2098
ad03a744
RB
2099/* Simplify a call to the strncat builtin. */
2100
2101static bool
2102gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2103{
8a45b051 2104 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2105 tree dst = gimple_call_arg (stmt, 0);
2106 tree src = gimple_call_arg (stmt, 1);
2107 tree len = gimple_call_arg (stmt, 2);
2108
2109 const char *p = c_getstr (src);
2110
2111 /* If the requested length is zero, or the src parameter string
2112 length is zero, return the dst parameter. */
2113 if (integer_zerop (len) || (p && *p == '\0'))
2114 {
2115 replace_call_with_value (gsi, dst);
2116 return true;
2117 }
2118
025d57f0
MS
2119 if (TREE_CODE (len) != INTEGER_CST || !p)
2120 return false;
2121
2122 unsigned srclen = strlen (p);
2123
2124 int cmpsrc = compare_tree_int (len, srclen);
2125
2126 /* Return early if the requested len is less than the string length.
2127 Warnings will be issued elsewhere later. */
2128 if (cmpsrc < 0)
2129 return false;
2130
2131 unsigned HOST_WIDE_INT dstsize;
2132
2133 bool nowarn = gimple_no_warning_p (stmt);
2134
2135 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2136 {
025d57f0 2137 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2138
025d57f0
MS
2139 if (cmpdst >= 0)
2140 {
2141 tree fndecl = gimple_call_fndecl (stmt);
2142
2143 /* Strncat copies (at most) LEN bytes and always appends
2144 the terminating NUL so the specified bound should never
2145 be equal to (or greater than) the size of the destination.
2146 If it is, the copy could overflow. */
2147 location_t loc = gimple_location (stmt);
2148 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2149 cmpdst == 0
2150 ? G_("%G%qD specified bound %E equals "
2151 "destination size")
2152 : G_("%G%qD specified bound %E exceeds "
2153 "destination size %wu"),
2154 stmt, fndecl, len, dstsize);
2155 if (nowarn)
2156 gimple_set_no_warning (stmt, true);
2157 }
2158 }
ad03a744 2159
025d57f0
MS
2160 if (!nowarn && cmpsrc == 0)
2161 {
2162 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2163 location_t loc = gimple_location (stmt);
eec5f615
MS
2164
2165 /* To avoid possible overflow the specified bound should also
2166 not be equal to the length of the source, even when the size
2167 of the destination is unknown (it's not an uncommon mistake
2168 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2169 if (warning_at (loc, OPT_Wstringop_overflow_,
2170 "%G%qD specified bound %E equals source length",
2171 stmt, fndecl, len))
2172 gimple_set_no_warning (stmt, true);
ad03a744
RB
2173 }
2174
025d57f0
MS
2175 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2176
2177 /* If the replacement _DECL isn't initialized, don't do the
2178 transformation. */
2179 if (!fn)
2180 return false;
2181
2182 /* Otherwise, emit a call to strcat. */
2183 gcall *repl = gimple_build_call (fn, 2, dst, src);
2184 replace_call_with_call_and_fold (gsi, repl);
2185 return true;
ad03a744
RB
2186}
2187
745583f9
RB
2188/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2189 LEN, and SIZE. */
2190
2191static bool
2192gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2193{
355fe088 2194 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2195 tree dest = gimple_call_arg (stmt, 0);
2196 tree src = gimple_call_arg (stmt, 1);
2197 tree len = gimple_call_arg (stmt, 2);
2198 tree size = gimple_call_arg (stmt, 3);
2199 tree fn;
2200 const char *p;
2201
2202 p = c_getstr (src);
2203 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2204 if ((p && *p == '\0')
2205 || integer_zerop (len))
2206 {
2207 replace_call_with_value (gsi, dest);
2208 return true;
2209 }
2210
2211 if (! tree_fits_uhwi_p (size))
2212 return false;
2213
2214 if (! integer_all_onesp (size))
2215 {
2216 tree src_len = c_strlen (src, 1);
2217 if (src_len
2218 && tree_fits_uhwi_p (src_len)
2219 && tree_fits_uhwi_p (len)
2220 && ! tree_int_cst_lt (len, src_len))
2221 {
2222 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2223 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2224 if (!fn)
2225 return false;
2226
355fe088 2227 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2228 replace_call_with_call_and_fold (gsi, repl);
2229 return true;
2230 }
2231 return false;
2232 }
2233
2234 /* If __builtin_strncat_chk is used, assume strncat is available. */
2235 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2236 if (!fn)
2237 return false;
2238
355fe088 2239 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2240 replace_call_with_call_and_fold (gsi, repl);
2241 return true;
2242}
2243
a918bfbf
ML
2244/* Build and append gimple statements to STMTS that would load a first
2245 character of a memory location identified by STR. LOC is location
2246 of the statement. */
2247
2248static tree
2249gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2250{
2251 tree var;
2252
2253 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2254 tree cst_uchar_ptr_node
2255 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2256 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2257
2258 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2259 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2260 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2261
2262 gimple_assign_set_lhs (stmt, var);
2263 gimple_seq_add_stmt_without_update (stmts, stmt);
2264
2265 return var;
2266}
2267
2268/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2269 FCODE is the name of the builtin. */
2270
2271static bool
2272gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2273{
2274 gimple *stmt = gsi_stmt (*gsi);
2275 tree callee = gimple_call_fndecl (stmt);
2276 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2277
2278 tree type = integer_type_node;
2279 tree str1 = gimple_call_arg (stmt, 0);
2280 tree str2 = gimple_call_arg (stmt, 1);
2281 tree lhs = gimple_call_lhs (stmt);
2282 HOST_WIDE_INT length = -1;
2283
2284 /* Handle strncmp and strncasecmp functions. */
2285 if (gimple_call_num_args (stmt) == 3)
2286 {
2287 tree len = gimple_call_arg (stmt, 2);
2288 if (tree_fits_uhwi_p (len))
2289 length = tree_to_uhwi (len);
2290 }
2291
2292 /* If the LEN parameter is zero, return zero. */
2293 if (length == 0)
2294 {
2295 replace_call_with_value (gsi, integer_zero_node);
2296 return true;
2297 }
2298
2299 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2300 if (operand_equal_p (str1, str2, 0))
2301 {
2302 replace_call_with_value (gsi, integer_zero_node);
2303 return true;
2304 }
2305
2306 const char *p1 = c_getstr (str1);
2307 const char *p2 = c_getstr (str2);
2308
2309 /* For known strings, return an immediate value. */
2310 if (p1 && p2)
2311 {
2312 int r = 0;
2313 bool known_result = false;
2314
2315 switch (fcode)
2316 {
2317 case BUILT_IN_STRCMP:
8b0b334a 2318 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
2319 {
2320 r = strcmp (p1, p2);
2321 known_result = true;
2322 break;
2323 }
2324 case BUILT_IN_STRNCMP:
8b0b334a 2325 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
2326 {
2327 if (length == -1)
2328 break;
2329 r = strncmp (p1, p2, length);
2330 known_result = true;
2331 break;
2332 }
2333 /* Only handleable situation is where the string are equal (result 0),
2334 which is already handled by operand_equal_p case. */
2335 case BUILT_IN_STRCASECMP:
2336 break;
2337 case BUILT_IN_STRNCASECMP:
2338 {
2339 if (length == -1)
2340 break;
2341 r = strncmp (p1, p2, length);
2342 if (r == 0)
2343 known_result = true;
5de73c05 2344 break;
a918bfbf
ML
2345 }
2346 default:
2347 gcc_unreachable ();
2348 }
2349
2350 if (known_result)
2351 {
2352 replace_call_with_value (gsi, build_cmp_result (type, r));
2353 return true;
2354 }
2355 }
2356
2357 bool nonzero_length = length >= 1
2358 || fcode == BUILT_IN_STRCMP
8b0b334a 2359 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2360 || fcode == BUILT_IN_STRCASECMP;
2361
2362 location_t loc = gimple_location (stmt);
2363
2364 /* If the second arg is "", return *(const unsigned char*)arg1. */
2365 if (p2 && *p2 == '\0' && nonzero_length)
2366 {
2367 gimple_seq stmts = NULL;
2368 tree var = gimple_load_first_char (loc, str1, &stmts);
2369 if (lhs)
2370 {
2371 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2372 gimple_seq_add_stmt_without_update (&stmts, stmt);
2373 }
2374
2375 gsi_replace_with_seq_vops (gsi, stmts);
2376 return true;
2377 }
2378
2379 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2380 if (p1 && *p1 == '\0' && nonzero_length)
2381 {
2382 gimple_seq stmts = NULL;
2383 tree var = gimple_load_first_char (loc, str2, &stmts);
2384
2385 if (lhs)
2386 {
2387 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2388 stmt = gimple_build_assign (c, NOP_EXPR, var);
2389 gimple_seq_add_stmt_without_update (&stmts, stmt);
2390
2391 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2392 gimple_seq_add_stmt_without_update (&stmts, stmt);
2393 }
2394
2395 gsi_replace_with_seq_vops (gsi, stmts);
2396 return true;
2397 }
2398
2399 /* If len parameter is one, return an expression corresponding to
2400 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2401 if (fcode == BUILT_IN_STRNCMP && length == 1)
2402 {
2403 gimple_seq stmts = NULL;
2404 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2405 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2406
2407 if (lhs)
2408 {
2409 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2410 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2411 gimple_seq_add_stmt_without_update (&stmts, convert1);
2412
2413 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2414 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2415 gimple_seq_add_stmt_without_update (&stmts, convert2);
2416
2417 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2418 gimple_seq_add_stmt_without_update (&stmts, stmt);
2419 }
2420
2421 gsi_replace_with_seq_vops (gsi, stmts);
2422 return true;
2423 }
2424
caed5c92
QZ
2425 /* If length is larger than the length of one constant string,
2426 replace strncmp with corresponding strcmp */
2427 if (fcode == BUILT_IN_STRNCMP
2428 && length > 0
2429 && ((p2 && (size_t) length > strlen (p2))
2430 || (p1 && (size_t) length > strlen (p1))))
2431 {
2432 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2433 if (!fn)
2434 return false;
2435 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2436 replace_call_with_call_and_fold (gsi, repl);
2437 return true;
2438 }
2439
a918bfbf
ML
2440 return false;
2441}
2442
488c6247
ML
2443/* Fold a call to the memchr pointed by GSI iterator. */
2444
2445static bool
2446gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2447{
2448 gimple *stmt = gsi_stmt (*gsi);
2449 tree lhs = gimple_call_lhs (stmt);
2450 tree arg1 = gimple_call_arg (stmt, 0);
2451 tree arg2 = gimple_call_arg (stmt, 1);
2452 tree len = gimple_call_arg (stmt, 2);
2453
2454 /* If the LEN parameter is zero, return zero. */
2455 if (integer_zerop (len))
2456 {
2457 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2458 return true;
2459 }
2460
2461 char c;
2462 if (TREE_CODE (arg2) != INTEGER_CST
2463 || !tree_fits_uhwi_p (len)
2464 || !target_char_cst_p (arg2, &c))
2465 return false;
2466
2467 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2468 unsigned HOST_WIDE_INT string_length;
2469 const char *p1 = c_getstr (arg1, &string_length);
2470
2471 if (p1)
2472 {
2473 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2474 if (r == NULL)
2475 {
2476 if (length <= string_length)
2477 {
2478 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2479 return true;
2480 }
2481 }
2482 else
2483 {
2484 unsigned HOST_WIDE_INT offset = r - p1;
2485 gimple_seq stmts = NULL;
2486 if (lhs != NULL_TREE)
2487 {
2488 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2489 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2490 arg1, offset_cst);
2491 gimple_seq_add_stmt_without_update (&stmts, stmt);
2492 }
2493 else
2494 gimple_seq_add_stmt_without_update (&stmts,
2495 gimple_build_nop ());
2496
2497 gsi_replace_with_seq_vops (gsi, stmts);
2498 return true;
2499 }
2500 }
2501
2502 return false;
2503}
a918bfbf 2504
fef5a0d9
RB
2505/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2506 to the call. IGNORE is true if the value returned
2507 by the builtin will be ignored. UNLOCKED is true is true if this
2508 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2509 the known length of the string. Return NULL_TREE if no simplification
2510 was possible. */
2511
2512static bool
2513gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2514 tree arg0, tree arg1,
dcb7fae2 2515 bool unlocked)
fef5a0d9 2516{
355fe088 2517 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2518
fef5a0d9
RB
2519 /* If we're using an unlocked function, assume the other unlocked
2520 functions exist explicitly. */
2521 tree const fn_fputc = (unlocked
2522 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2523 : builtin_decl_implicit (BUILT_IN_FPUTC));
2524 tree const fn_fwrite = (unlocked
2525 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2526 : builtin_decl_implicit (BUILT_IN_FWRITE));
2527
2528 /* If the return value is used, don't do the transformation. */
dcb7fae2 2529 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2530 return false;
2531
fef5a0d9
RB
2532 /* Get the length of the string passed to fputs. If the length
2533 can't be determined, punt. */
598f7235 2534 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2535 if (!len
2536 || TREE_CODE (len) != INTEGER_CST)
2537 return false;
2538
2539 switch (compare_tree_int (len, 1))
2540 {
2541 case -1: /* length is 0, delete the call entirely . */
2542 replace_call_with_value (gsi, integer_zero_node);
2543 return true;
2544
2545 case 0: /* length is 1, call fputc. */
2546 {
2547 const char *p = c_getstr (arg0);
2548 if (p != NULL)
2549 {
2550 if (!fn_fputc)
2551 return false;
2552
355fe088 2553 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2554 build_int_cst
2555 (integer_type_node, p[0]), arg1);
2556 replace_call_with_call_and_fold (gsi, repl);
2557 return true;
2558 }
2559 }
2560 /* FALLTHROUGH */
2561 case 1: /* length is greater than 1, call fwrite. */
2562 {
2563 /* If optimizing for size keep fputs. */
2564 if (optimize_function_for_size_p (cfun))
2565 return false;
2566 /* New argument list transforming fputs(string, stream) to
2567 fwrite(string, 1, len, stream). */
2568 if (!fn_fwrite)
2569 return false;
2570
355fe088 2571 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2572 size_one_node, len, arg1);
2573 replace_call_with_call_and_fold (gsi, repl);
2574 return true;
2575 }
2576 default:
2577 gcc_unreachable ();
2578 }
2579 return false;
2580}
2581
2582/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2583 DEST, SRC, LEN, and SIZE are the arguments to the call.
2584 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2585 code of the builtin. If MAXLEN is not NULL, it is maximum length
2586 passed as third argument. */
2587
2588static bool
2589gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2590 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2591 enum built_in_function fcode)
2592{
355fe088 2593 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2594 location_t loc = gimple_location (stmt);
2595 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2596 tree fn;
2597
2598 /* If SRC and DEST are the same (and not volatile), return DEST
2599 (resp. DEST+LEN for __mempcpy_chk). */
2600 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2601 {
2602 if (fcode != BUILT_IN_MEMPCPY_CHK)
2603 {
2604 replace_call_with_value (gsi, dest);
2605 return true;
2606 }
2607 else
2608 {
74e3c262
RB
2609 gimple_seq stmts = NULL;
2610 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2611 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2612 TREE_TYPE (dest), dest, len);
74e3c262 2613 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2614 replace_call_with_value (gsi, temp);
2615 return true;
2616 }
2617 }
2618
2619 if (! tree_fits_uhwi_p (size))
2620 return false;
2621
598f7235 2622 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
2623 if (! integer_all_onesp (size))
2624 {
2625 if (! tree_fits_uhwi_p (len))
2626 {
2627 /* If LEN is not constant, try MAXLEN too.
2628 For MAXLEN only allow optimizing into non-_ocs function
2629 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2630 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2631 {
2632 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2633 {
2634 /* (void) __mempcpy_chk () can be optimized into
2635 (void) __memcpy_chk (). */
2636 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2637 if (!fn)
2638 return false;
2639
355fe088 2640 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2641 replace_call_with_call_and_fold (gsi, repl);
2642 return true;
2643 }
2644 return false;
2645 }
2646 }
2647 else
2648 maxlen = len;
2649
2650 if (tree_int_cst_lt (size, maxlen))
2651 return false;
2652 }
2653
2654 fn = NULL_TREE;
2655 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2656 mem{cpy,pcpy,move,set} is available. */
2657 switch (fcode)
2658 {
2659 case BUILT_IN_MEMCPY_CHK:
2660 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2661 break;
2662 case BUILT_IN_MEMPCPY_CHK:
2663 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2664 break;
2665 case BUILT_IN_MEMMOVE_CHK:
2666 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2667 break;
2668 case BUILT_IN_MEMSET_CHK:
2669 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2670 break;
2671 default:
2672 break;
2673 }
2674
2675 if (!fn)
2676 return false;
2677
355fe088 2678 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2679 replace_call_with_call_and_fold (gsi, repl);
2680 return true;
2681}
2682
2683/* Fold a call to the __st[rp]cpy_chk builtin.
2684 DEST, SRC, and SIZE are the arguments to the call.
2685 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2686 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2687 strings passed as second argument. */
2688
2689static bool
2690gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2691 tree dest,
fef5a0d9 2692 tree src, tree size,
fef5a0d9
RB
2693 enum built_in_function fcode)
2694{
355fe088 2695 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2696 location_t loc = gimple_location (stmt);
2697 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2698 tree len, fn;
2699
2700 /* If SRC and DEST are the same (and not volatile), return DEST. */
2701 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2702 {
8cd95cec
MS
2703 /* Issue -Wrestrict unless the pointers are null (those do
2704 not point to objects and so do not indicate an overlap;
2705 such calls could be the result of sanitization and jump
2706 threading). */
2707 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2708 {
2709 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2710
e9b9fa4c
MS
2711 warning_at (loc, OPT_Wrestrict,
2712 "%qD source argument is the same as destination",
2713 func);
2714 }
cc8bea0a 2715
fef5a0d9
RB
2716 replace_call_with_value (gsi, dest);
2717 return true;
2718 }
2719
2720 if (! tree_fits_uhwi_p (size))
2721 return false;
2722
598f7235 2723 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
2724 if (! integer_all_onesp (size))
2725 {
2726 len = c_strlen (src, 1);
2727 if (! len || ! tree_fits_uhwi_p (len))
2728 {
2729 /* If LEN is not constant, try MAXLEN too.
2730 For MAXLEN only allow optimizing into non-_ocs function
2731 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2732 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2733 {
2734 if (fcode == BUILT_IN_STPCPY_CHK)
2735 {
2736 if (! ignore)
2737 return false;
2738
2739 /* If return value of __stpcpy_chk is ignored,
2740 optimize into __strcpy_chk. */
2741 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2742 if (!fn)
2743 return false;
2744
355fe088 2745 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2746 replace_call_with_call_and_fold (gsi, repl);
2747 return true;
2748 }
2749
2750 if (! len || TREE_SIDE_EFFECTS (len))
2751 return false;
2752
2753 /* If c_strlen returned something, but not a constant,
2754 transform __strcpy_chk into __memcpy_chk. */
2755 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2756 if (!fn)
2757 return false;
2758
74e3c262 2759 gimple_seq stmts = NULL;
770fe3a3 2760 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
2761 len = gimple_convert (&stmts, loc, size_type_node, len);
2762 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2763 build_int_cst (size_type_node, 1));
2764 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2765 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2766 replace_call_with_call_and_fold (gsi, repl);
2767 return true;
2768 }
e256dfce 2769 }
fef5a0d9
RB
2770 else
2771 maxlen = len;
2772
2773 if (! tree_int_cst_lt (maxlen, size))
2774 return false;
e256dfce
RG
2775 }
2776
fef5a0d9
RB
2777 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2778 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2779 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2780 if (!fn)
2781 return false;
2782
355fe088 2783 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2784 replace_call_with_call_and_fold (gsi, repl);
2785 return true;
2786}
2787
2788/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2789 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2790 length passed as third argument. IGNORE is true if return value can be
2791 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2792
2793static bool
2794gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2795 tree dest, tree src,
dcb7fae2 2796 tree len, tree size,
fef5a0d9
RB
2797 enum built_in_function fcode)
2798{
355fe088 2799 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2800 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2801 tree fn;
2802
2803 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2804 {
fef5a0d9
RB
2805 /* If return value of __stpncpy_chk is ignored,
2806 optimize into __strncpy_chk. */
2807 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2808 if (fn)
2809 {
355fe088 2810 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2811 replace_call_with_call_and_fold (gsi, repl);
2812 return true;
2813 }
cbdd87d4
RG
2814 }
2815
fef5a0d9
RB
2816 if (! tree_fits_uhwi_p (size))
2817 return false;
2818
598f7235 2819 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 2820 if (! integer_all_onesp (size))
cbdd87d4 2821 {
fef5a0d9 2822 if (! tree_fits_uhwi_p (len))
fe2ef088 2823 {
fef5a0d9
RB
2824 /* If LEN is not constant, try MAXLEN too.
2825 For MAXLEN only allow optimizing into non-_ocs function
2826 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2827 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2828 return false;
8a1561bc 2829 }
fef5a0d9
RB
2830 else
2831 maxlen = len;
2832
2833 if (tree_int_cst_lt (size, maxlen))
2834 return false;
cbdd87d4
RG
2835 }
2836
fef5a0d9
RB
2837 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2838 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2839 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2840 if (!fn)
2841 return false;
2842
355fe088 2843 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2844 replace_call_with_call_and_fold (gsi, repl);
2845 return true;
cbdd87d4
RG
2846}
2847
2625bb5d
RB
2848/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2849 Return NULL_TREE if no simplification can be made. */
2850
2851static bool
2852gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2853{
2854 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2855 location_t loc = gimple_location (stmt);
2856 tree dest = gimple_call_arg (stmt, 0);
2857 tree src = gimple_call_arg (stmt, 1);
01b0acb7 2858 tree fn, lenp1;
2625bb5d
RB
2859
2860 /* If the result is unused, replace stpcpy with strcpy. */
2861 if (gimple_call_lhs (stmt) == NULL_TREE)
2862 {
2863 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2864 if (!fn)
2865 return false;
2866 gimple_call_set_fndecl (stmt, fn);
2867 fold_stmt (gsi);
2868 return true;
2869 }
2870
01b0acb7 2871 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 2872 c_strlen_data data = { };
7d583f42 2873 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
2874 if (!len
2875 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 2876 {
7d583f42
JL
2877 data.decl = unterminated_array (src);
2878 if (!data.decl)
01b0acb7
MS
2879 return false;
2880 }
2881
7d583f42 2882 if (data.decl)
01b0acb7
MS
2883 {
2884 /* Avoid folding calls with unterminated arrays. */
2885 if (!gimple_no_warning_p (stmt))
7d583f42 2886 warn_string_no_nul (loc, "stpcpy", src, data.decl);
01b0acb7
MS
2887 gimple_set_no_warning (stmt, true);
2888 return false;
2889 }
2625bb5d
RB
2890
2891 if (optimize_function_for_size_p (cfun)
2892 /* If length is zero it's small enough. */
2893 && !integer_zerop (len))
2894 return false;
2895
2896 /* If the source has a known length replace stpcpy with memcpy. */
2897 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2898 if (!fn)
2899 return false;
2900
2901 gimple_seq stmts = NULL;
2902 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2903 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2904 tem, build_int_cst (size_type_node, 1));
2905 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2906 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2907 gimple_set_vuse (repl, gimple_vuse (stmt));
2908 gimple_set_vdef (repl, gimple_vdef (stmt));
2909 if (gimple_vdef (repl)
2910 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2911 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2912 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2913 /* Replace the result with dest + len. */
2914 stmts = NULL;
2915 tem = gimple_convert (&stmts, loc, sizetype, len);
2916 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2917 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2918 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2919 gsi_replace (gsi, ret, false);
2625bb5d
RB
2920 /* Finally fold the memcpy call. */
2921 gimple_stmt_iterator gsi2 = *gsi;
2922 gsi_prev (&gsi2);
2923 fold_stmt (&gsi2);
2924 return true;
2925}
2926
fef5a0d9
RB
2927/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2928 NULL_TREE if a normal call should be emitted rather than expanding
2929 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2930 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2931 passed as second argument. */
cbdd87d4
RG
2932
2933static bool
fef5a0d9 2934gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2935 enum built_in_function fcode)
cbdd87d4 2936{
538dd0b7 2937 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2938 tree dest, size, len, fn, fmt, flag;
2939 const char *fmt_str;
cbdd87d4 2940
fef5a0d9
RB
2941 /* Verify the required arguments in the original call. */
2942 if (gimple_call_num_args (stmt) < 5)
2943 return false;
cbdd87d4 2944
fef5a0d9
RB
2945 dest = gimple_call_arg (stmt, 0);
2946 len = gimple_call_arg (stmt, 1);
2947 flag = gimple_call_arg (stmt, 2);
2948 size = gimple_call_arg (stmt, 3);
2949 fmt = gimple_call_arg (stmt, 4);
2950
2951 if (! tree_fits_uhwi_p (size))
2952 return false;
2953
2954 if (! integer_all_onesp (size))
2955 {
598f7235 2956 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 2957 if (! tree_fits_uhwi_p (len))
cbdd87d4 2958 {
fef5a0d9
RB
2959 /* If LEN is not constant, try MAXLEN too.
2960 For MAXLEN only allow optimizing into non-_ocs function
2961 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2962 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2963 return false;
2964 }
2965 else
fef5a0d9 2966 maxlen = len;
cbdd87d4 2967
fef5a0d9
RB
2968 if (tree_int_cst_lt (size, maxlen))
2969 return false;
2970 }
cbdd87d4 2971
fef5a0d9
RB
2972 if (!init_target_chars ())
2973 return false;
cbdd87d4 2974
fef5a0d9
RB
2975 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2976 or if format doesn't contain % chars or is "%s". */
2977 if (! integer_zerop (flag))
2978 {
2979 fmt_str = c_getstr (fmt);
2980 if (fmt_str == NULL)
2981 return false;
2982 if (strchr (fmt_str, target_percent) != NULL
2983 && strcmp (fmt_str, target_percent_s))
2984 return false;
cbdd87d4
RG
2985 }
2986
fef5a0d9
RB
2987 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2988 available. */
2989 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2990 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2991 if (!fn)
491e0b9b
RG
2992 return false;
2993
fef5a0d9
RB
2994 /* Replace the called function and the first 5 argument by 3 retaining
2995 trailing varargs. */
2996 gimple_call_set_fndecl (stmt, fn);
2997 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2998 gimple_call_set_arg (stmt, 0, dest);
2999 gimple_call_set_arg (stmt, 1, len);
3000 gimple_call_set_arg (stmt, 2, fmt);
3001 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3002 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3003 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3004 fold_stmt (gsi);
3005 return true;
3006}
cbdd87d4 3007
fef5a0d9
RB
3008/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3009 Return NULL_TREE if a normal call should be emitted rather than
3010 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3011 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3012
fef5a0d9
RB
3013static bool
3014gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3015 enum built_in_function fcode)
3016{
538dd0b7 3017 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3018 tree dest, size, len, fn, fmt, flag;
3019 const char *fmt_str;
3020 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3021
fef5a0d9
RB
3022 /* Verify the required arguments in the original call. */
3023 if (nargs < 4)
3024 return false;
3025 dest = gimple_call_arg (stmt, 0);
3026 flag = gimple_call_arg (stmt, 1);
3027 size = gimple_call_arg (stmt, 2);
3028 fmt = gimple_call_arg (stmt, 3);
3029
3030 if (! tree_fits_uhwi_p (size))
3031 return false;
3032
3033 len = NULL_TREE;
3034
3035 if (!init_target_chars ())
3036 return false;
3037
3038 /* Check whether the format is a literal string constant. */
3039 fmt_str = c_getstr (fmt);
3040 if (fmt_str != NULL)
3041 {
3042 /* If the format doesn't contain % args or %%, we know the size. */
3043 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3044 {
fef5a0d9
RB
3045 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3046 len = build_int_cstu (size_type_node, strlen (fmt_str));
3047 }
3048 /* If the format is "%s" and first ... argument is a string literal,
3049 we know the size too. */
3050 else if (fcode == BUILT_IN_SPRINTF_CHK
3051 && strcmp (fmt_str, target_percent_s) == 0)
3052 {
3053 tree arg;
cbdd87d4 3054
fef5a0d9
RB
3055 if (nargs == 5)
3056 {
3057 arg = gimple_call_arg (stmt, 4);
3058 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3059 {
3060 len = c_strlen (arg, 1);
3061 if (! len || ! tree_fits_uhwi_p (len))
3062 len = NULL_TREE;
3063 }
3064 }
3065 }
3066 }
cbdd87d4 3067
fef5a0d9
RB
3068 if (! integer_all_onesp (size))
3069 {
3070 if (! len || ! tree_int_cst_lt (len, size))
3071 return false;
3072 }
cbdd87d4 3073
fef5a0d9
RB
3074 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3075 or if format doesn't contain % chars or is "%s". */
3076 if (! integer_zerop (flag))
3077 {
3078 if (fmt_str == NULL)
3079 return false;
3080 if (strchr (fmt_str, target_percent) != NULL
3081 && strcmp (fmt_str, target_percent_s))
3082 return false;
3083 }
cbdd87d4 3084
fef5a0d9
RB
3085 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3086 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3087 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3088 if (!fn)
3089 return false;
3090
3091 /* Replace the called function and the first 4 argument by 2 retaining
3092 trailing varargs. */
3093 gimple_call_set_fndecl (stmt, fn);
3094 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3095 gimple_call_set_arg (stmt, 0, dest);
3096 gimple_call_set_arg (stmt, 1, fmt);
3097 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3098 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3099 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3100 fold_stmt (gsi);
3101 return true;
3102}
3103
35770bb2
RB
3104/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3105 ORIG may be null if this is a 2-argument call. We don't attempt to
3106 simplify calls with more than 3 arguments.
3107
a104bd88 3108 Return true if simplification was possible, otherwise false. */
35770bb2 3109
a104bd88 3110bool
dcb7fae2 3111gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3112{
355fe088 3113 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3114 tree dest = gimple_call_arg (stmt, 0);
3115 tree fmt = gimple_call_arg (stmt, 1);
3116 tree orig = NULL_TREE;
3117 const char *fmt_str = NULL;
3118
3119 /* Verify the required arguments in the original call. We deal with two
3120 types of sprintf() calls: 'sprintf (str, fmt)' and
3121 'sprintf (dest, "%s", orig)'. */
3122 if (gimple_call_num_args (stmt) > 3)
3123 return false;
3124
3125 if (gimple_call_num_args (stmt) == 3)
3126 orig = gimple_call_arg (stmt, 2);
3127
3128 /* Check whether the format is a literal string constant. */
3129 fmt_str = c_getstr (fmt);
3130 if (fmt_str == NULL)
3131 return false;
3132
3133 if (!init_target_chars ())
3134 return false;
3135
3136 /* If the format doesn't contain % args or %%, use strcpy. */
3137 if (strchr (fmt_str, target_percent) == NULL)
3138 {
3139 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3140
3141 if (!fn)
3142 return false;
3143
3144 /* Don't optimize sprintf (buf, "abc", ptr++). */
3145 if (orig)
3146 return false;
3147
3148 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3149 'format' is known to contain no % formats. */
3150 gimple_seq stmts = NULL;
355fe088 3151 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3152
3153 /* Propagate the NO_WARNING bit to avoid issuing the same
3154 warning more than once. */
3155 if (gimple_no_warning_p (stmt))
3156 gimple_set_no_warning (repl, true);
3157
35770bb2
RB
3158 gimple_seq_add_stmt_without_update (&stmts, repl);
3159 if (gimple_call_lhs (stmt))
3160 {
3161 repl = gimple_build_assign (gimple_call_lhs (stmt),
3162 build_int_cst (integer_type_node,
3163 strlen (fmt_str)));
3164 gimple_seq_add_stmt_without_update (&stmts, repl);
3165 gsi_replace_with_seq_vops (gsi, stmts);
3166 /* gsi now points at the assignment to the lhs, get a
3167 stmt iterator to the memcpy call.
3168 ??? We can't use gsi_for_stmt as that doesn't work when the
3169 CFG isn't built yet. */
3170 gimple_stmt_iterator gsi2 = *gsi;
3171 gsi_prev (&gsi2);
3172 fold_stmt (&gsi2);
3173 }
3174 else
3175 {
3176 gsi_replace_with_seq_vops (gsi, stmts);
3177 fold_stmt (gsi);
3178 }
3179 return true;
3180 }
3181
3182 /* If the format is "%s", use strcpy if the result isn't used. */
3183 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3184 {
3185 tree fn;
3186 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3187
3188 if (!fn)
3189 return false;
3190
3191 /* Don't crash on sprintf (str1, "%s"). */
3192 if (!orig)
3193 return false;
3194
dcb7fae2
RB
3195 tree orig_len = NULL_TREE;
3196 if (gimple_call_lhs (stmt))
35770bb2 3197 {
598f7235 3198 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3199 if (!orig_len)
35770bb2
RB
3200 return false;
3201 }
3202
3203 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3204 gimple_seq stmts = NULL;
355fe088 3205 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3206
3207 /* Propagate the NO_WARNING bit to avoid issuing the same
3208 warning more than once. */
3209 if (gimple_no_warning_p (stmt))
3210 gimple_set_no_warning (repl, true);
3211
35770bb2
RB
3212 gimple_seq_add_stmt_without_update (&stmts, repl);
3213 if (gimple_call_lhs (stmt))
3214 {
d7e78447
RB
3215 if (!useless_type_conversion_p (integer_type_node,
3216 TREE_TYPE (orig_len)))
3217 orig_len = fold_convert (integer_type_node, orig_len);
3218 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
3219 gimple_seq_add_stmt_without_update (&stmts, repl);
3220 gsi_replace_with_seq_vops (gsi, stmts);
3221 /* gsi now points at the assignment to the lhs, get a
3222 stmt iterator to the memcpy call.
3223 ??? We can't use gsi_for_stmt as that doesn't work when the
3224 CFG isn't built yet. */
3225 gimple_stmt_iterator gsi2 = *gsi;
3226 gsi_prev (&gsi2);
3227 fold_stmt (&gsi2);
3228 }
3229 else
3230 {
3231 gsi_replace_with_seq_vops (gsi, stmts);
3232 fold_stmt (gsi);
3233 }
3234 return true;
3235 }
3236 return false;
3237}
3238
d7e78447
RB
3239/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3240 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3241 attempt to simplify calls with more than 4 arguments.
35770bb2 3242
a104bd88 3243 Return true if simplification was possible, otherwise false. */
d7e78447 3244
a104bd88 3245bool
dcb7fae2 3246gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3247{
538dd0b7 3248 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3249 tree dest = gimple_call_arg (stmt, 0);
3250 tree destsize = gimple_call_arg (stmt, 1);
3251 tree fmt = gimple_call_arg (stmt, 2);
3252 tree orig = NULL_TREE;
3253 const char *fmt_str = NULL;
3254
3255 if (gimple_call_num_args (stmt) > 4)
3256 return false;
3257
3258 if (gimple_call_num_args (stmt) == 4)
3259 orig = gimple_call_arg (stmt, 3);
3260
3261 if (!tree_fits_uhwi_p (destsize))
3262 return false;
3263 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3264
3265 /* Check whether the format is a literal string constant. */
3266 fmt_str = c_getstr (fmt);
3267 if (fmt_str == NULL)
3268 return false;
3269
3270 if (!init_target_chars ())
3271 return false;
3272
3273 /* If the format doesn't contain % args or %%, use strcpy. */
3274 if (strchr (fmt_str, target_percent) == NULL)
3275 {
3276 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3277 if (!fn)
3278 return false;
3279
3280 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3281 if (orig)
3282 return false;
3283
3284 /* We could expand this as
3285 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3286 or to
3287 memcpy (str, fmt_with_nul_at_cstm1, cst);
3288 but in the former case that might increase code size
3289 and in the latter case grow .rodata section too much.
3290 So punt for now. */
3291 size_t len = strlen (fmt_str);
3292 if (len >= destlen)
3293 return false;
3294
3295 gimple_seq stmts = NULL;
355fe088 3296 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3297 gimple_seq_add_stmt_without_update (&stmts, repl);
3298 if (gimple_call_lhs (stmt))
3299 {
3300 repl = gimple_build_assign (gimple_call_lhs (stmt),
3301 build_int_cst (integer_type_node, len));
3302 gimple_seq_add_stmt_without_update (&stmts, repl);
3303 gsi_replace_with_seq_vops (gsi, stmts);
3304 /* gsi now points at the assignment to the lhs, get a
3305 stmt iterator to the memcpy call.
3306 ??? We can't use gsi_for_stmt as that doesn't work when the
3307 CFG isn't built yet. */
3308 gimple_stmt_iterator gsi2 = *gsi;
3309 gsi_prev (&gsi2);
3310 fold_stmt (&gsi2);
3311 }
3312 else
3313 {
3314 gsi_replace_with_seq_vops (gsi, stmts);
3315 fold_stmt (gsi);
3316 }
3317 return true;
3318 }
3319
3320 /* If the format is "%s", use strcpy if the result isn't used. */
3321 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3322 {
3323 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3324 if (!fn)
3325 return false;
3326
3327 /* Don't crash on snprintf (str1, cst, "%s"). */
3328 if (!orig)
3329 return false;
3330
598f7235 3331 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3332 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3333 return false;
d7e78447
RB
3334
3335 /* We could expand this as
3336 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3337 or to
3338 memcpy (str1, str2_with_nul_at_cstm1, cst);
3339 but in the former case that might increase code size
3340 and in the latter case grow .rodata section too much.
3341 So punt for now. */
3342 if (compare_tree_int (orig_len, destlen) >= 0)
3343 return false;
3344
3345 /* Convert snprintf (str1, cst, "%s", str2) into
3346 strcpy (str1, str2) if strlen (str2) < cst. */
3347 gimple_seq stmts = NULL;
355fe088 3348 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3349 gimple_seq_add_stmt_without_update (&stmts, repl);
3350 if (gimple_call_lhs (stmt))
3351 {
3352 if (!useless_type_conversion_p (integer_type_node,
3353 TREE_TYPE (orig_len)))
3354 orig_len = fold_convert (integer_type_node, orig_len);
3355 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3356 gimple_seq_add_stmt_without_update (&stmts, repl);
3357 gsi_replace_with_seq_vops (gsi, stmts);
3358 /* gsi now points at the assignment to the lhs, get a
3359 stmt iterator to the memcpy call.
3360 ??? We can't use gsi_for_stmt as that doesn't work when the
3361 CFG isn't built yet. */
3362 gimple_stmt_iterator gsi2 = *gsi;
3363 gsi_prev (&gsi2);
3364 fold_stmt (&gsi2);
3365 }
3366 else
3367 {
3368 gsi_replace_with_seq_vops (gsi, stmts);
3369 fold_stmt (gsi);
3370 }
3371 return true;
3372 }
3373 return false;
3374}
35770bb2 3375
edd7ae68
RB
3376/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3377 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3378 more than 3 arguments, and ARG may be null in the 2-argument case.
3379
3380 Return NULL_TREE if no simplification was possible, otherwise return the
3381 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3382 code of the function to be simplified. */
3383
3384static bool
3385gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3386 tree fp, tree fmt, tree arg,
3387 enum built_in_function fcode)
3388{
3389 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3390 tree fn_fputc, fn_fputs;
3391 const char *fmt_str = NULL;
3392
3393 /* If the return value is used, don't do the transformation. */
3394 if (gimple_call_lhs (stmt) != NULL_TREE)
3395 return false;
3396
3397 /* Check whether the format is a literal string constant. */
3398 fmt_str = c_getstr (fmt);
3399 if (fmt_str == NULL)
3400 return false;
3401
3402 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3403 {
3404 /* If we're using an unlocked function, assume the other
3405 unlocked functions exist explicitly. */
3406 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3407 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3408 }
3409 else
3410 {
3411 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3412 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3413 }
3414
3415 if (!init_target_chars ())
3416 return false;
3417
3418 /* If the format doesn't contain % args or %%, use strcpy. */
3419 if (strchr (fmt_str, target_percent) == NULL)
3420 {
3421 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3422 && arg)
3423 return false;
3424
3425 /* If the format specifier was "", fprintf does nothing. */
3426 if (fmt_str[0] == '\0')
3427 {
3428 replace_call_with_value (gsi, NULL_TREE);
3429 return true;
3430 }
3431
3432 /* When "string" doesn't contain %, replace all cases of
3433 fprintf (fp, string) with fputs (string, fp). The fputs
3434 builtin will take care of special cases like length == 1. */
3435 if (fn_fputs)
3436 {
3437 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3438 replace_call_with_call_and_fold (gsi, repl);
3439 return true;
3440 }
3441 }
3442
3443 /* The other optimizations can be done only on the non-va_list variants. */
3444 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3445 return false;
3446
3447 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3448 else if (strcmp (fmt_str, target_percent_s) == 0)
3449 {
3450 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3451 return false;
3452 if (fn_fputs)
3453 {
3454 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3455 replace_call_with_call_and_fold (gsi, repl);
3456 return true;
3457 }
3458 }
3459
3460 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3461 else if (strcmp (fmt_str, target_percent_c) == 0)
3462 {
3463 if (!arg
3464 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3465 return false;
3466 if (fn_fputc)
3467 {
3468 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3469 replace_call_with_call_and_fold (gsi, repl);
3470 return true;
3471 }
3472 }
3473
3474 return false;
3475}
3476
ad03a744
RB
3477/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3478 FMT and ARG are the arguments to the call; we don't fold cases with
3479 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3480
3481 Return NULL_TREE if no simplification was possible, otherwise return the
3482 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3483 code of the function to be simplified. */
3484
3485static bool
3486gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3487 tree arg, enum built_in_function fcode)
3488{
3489 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3490 tree fn_putchar, fn_puts, newarg;
3491 const char *fmt_str = NULL;
3492
3493 /* If the return value is used, don't do the transformation. */
3494 if (gimple_call_lhs (stmt) != NULL_TREE)
3495 return false;
3496
3497 /* Check whether the format is a literal string constant. */
3498 fmt_str = c_getstr (fmt);
3499 if (fmt_str == NULL)
3500 return false;
3501
3502 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3503 {
3504 /* If we're using an unlocked function, assume the other
3505 unlocked functions exist explicitly. */
3506 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3507 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3508 }
3509 else
3510 {
3511 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3512 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3513 }
3514
3515 if (!init_target_chars ())
3516 return false;
3517
3518 if (strcmp (fmt_str, target_percent_s) == 0
3519 || strchr (fmt_str, target_percent) == NULL)
3520 {
3521 const char *str;
3522
3523 if (strcmp (fmt_str, target_percent_s) == 0)
3524 {
3525 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3526 return false;
3527
3528 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3529 return false;
3530
3531 str = c_getstr (arg);
3532 if (str == NULL)
3533 return false;
3534 }
3535 else
3536 {
3537 /* The format specifier doesn't contain any '%' characters. */
3538 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3539 && arg)
3540 return false;
3541 str = fmt_str;
3542 }
3543
3544 /* If the string was "", printf does nothing. */
3545 if (str[0] == '\0')
3546 {
3547 replace_call_with_value (gsi, NULL_TREE);
3548 return true;
3549 }
3550
3551 /* If the string has length of 1, call putchar. */
3552 if (str[1] == '\0')
3553 {
3554 /* Given printf("c"), (where c is any one character,)
3555 convert "c"[0] to an int and pass that to the replacement
3556 function. */
3557 newarg = build_int_cst (integer_type_node, str[0]);
3558 if (fn_putchar)
3559 {
3560 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3561 replace_call_with_call_and_fold (gsi, repl);
3562 return true;
3563 }
3564 }
3565 else
3566 {
3567 /* If the string was "string\n", call puts("string"). */
3568 size_t len = strlen (str);
3569 if ((unsigned char)str[len - 1] == target_newline
3570 && (size_t) (int) len == len
3571 && (int) len > 0)
3572 {
3573 char *newstr;
ad03a744
RB
3574
3575 /* Create a NUL-terminated string that's one char shorter
3576 than the original, stripping off the trailing '\n'. */
a353fec4 3577 newstr = xstrdup (str);
ad03a744 3578 newstr[len - 1] = '\0';
a353fec4
BE
3579 newarg = build_string_literal (len, newstr);
3580 free (newstr);
ad03a744
RB
3581 if (fn_puts)
3582 {
3583 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3584 replace_call_with_call_and_fold (gsi, repl);
3585 return true;
3586 }
3587 }
3588 else
3589 /* We'd like to arrange to call fputs(string,stdout) here,
3590 but we need stdout and don't have a way to get it yet. */
3591 return false;
3592 }
3593 }
3594
3595 /* The other optimizations can be done only on the non-va_list variants. */
3596 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3597 return false;
3598
3599 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3600 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3601 {
3602 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3603 return false;
3604 if (fn_puts)
3605 {
3606 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3607 replace_call_with_call_and_fold (gsi, repl);
3608 return true;
3609 }
3610 }
3611
3612 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3613 else if (strcmp (fmt_str, target_percent_c) == 0)
3614 {
3615 if (!arg || ! useless_type_conversion_p (integer_type_node,
3616 TREE_TYPE (arg)))
3617 return false;
3618 if (fn_putchar)
3619 {
3620 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3621 replace_call_with_call_and_fold (gsi, repl);
3622 return true;
3623 }
3624 }
3625
3626 return false;
3627}
3628
edd7ae68 3629
fef5a0d9
RB
3630
3631/* Fold a call to __builtin_strlen with known length LEN. */
3632
3633static bool
dcb7fae2 3634gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3635{
355fe088 3636 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3637 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3638
3639 wide_int minlen;
3640 wide_int maxlen;
3641
e08341bb
MS
3642 /* Set to non-null if ARG refers to an unterminated array. */
3643 tree nonstr;
c42d0aa0 3644 tree lenrange[2];
e08341bb 3645 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
78125561 3646 && !nonstr
c42d0aa0
MS
3647 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3648 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3649 {
3650 /* The range of lengths refers to either a single constant
3651 string or to the longest and shortest constant string
3652 referenced by the argument of the strlen() call, or to
3653 the strings that can possibly be stored in the arrays
3654 the argument refers to. */
3655 minlen = wi::to_wide (lenrange[0]);
3656 maxlen = wi::to_wide (lenrange[1]);
3657 }
3658 else
3659 {
3660 unsigned prec = TYPE_PRECISION (sizetype);
3661
3662 minlen = wi::shwi (0, prec);
3663 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3664 }
3665
3666 if (minlen == maxlen)
3667 {
3668 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3669 true, GSI_SAME_STMT);
3670 replace_call_with_value (gsi, lenrange[0]);
3671 return true;
3672 }
3673
a7bf6c08
MS
3674 if (tree lhs = gimple_call_lhs (stmt))
3675 if (TREE_CODE (lhs) == SSA_NAME
3676 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3677 set_range_info (lhs, VR_RANGE, minlen, maxlen);
c42d0aa0
MS
3678
3679 return false;
cbdd87d4
RG
3680}
3681
48126138
NS
3682/* Fold a call to __builtin_acc_on_device. */
3683
3684static bool
3685gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3686{
3687 /* Defer folding until we know which compiler we're in. */
3688 if (symtab->state != EXPANSION)
3689 return false;
3690
3691 unsigned val_host = GOMP_DEVICE_HOST;
3692 unsigned val_dev = GOMP_DEVICE_NONE;
3693
3694#ifdef ACCEL_COMPILER
3695 val_host = GOMP_DEVICE_NOT_HOST;
3696 val_dev = ACCEL_COMPILER_acc_device;
3697#endif
3698
3699 location_t loc = gimple_location (gsi_stmt (*gsi));
3700
3701 tree host_eq = make_ssa_name (boolean_type_node);
3702 gimple *host_ass = gimple_build_assign
3703 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3704 gimple_set_location (host_ass, loc);
3705 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3706
3707 tree dev_eq = make_ssa_name (boolean_type_node);
3708 gimple *dev_ass = gimple_build_assign
3709 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3710 gimple_set_location (dev_ass, loc);
3711 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3712
3713 tree result = make_ssa_name (boolean_type_node);
3714 gimple *result_ass = gimple_build_assign
3715 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3716 gimple_set_location (result_ass, loc);
3717 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3718
3719 replace_call_with_value (gsi, result);
3720
3721 return true;
3722}
cbdd87d4 3723
fe75f732
PK
3724/* Fold realloc (0, n) -> malloc (n). */
3725
3726static bool
3727gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3728{
3729 gimple *stmt = gsi_stmt (*gsi);
3730 tree arg = gimple_call_arg (stmt, 0);
3731 tree size = gimple_call_arg (stmt, 1);
3732
3733 if (operand_equal_p (arg, null_pointer_node, 0))
3734 {
3735 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3736 if (fn_malloc)
3737 {
3738 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3739 replace_call_with_call_and_fold (gsi, repl);
3740 return true;
3741 }
3742 }
3743 return false;
3744}
3745
dcb7fae2
RB
3746/* Fold the non-target builtin at *GSI and return whether any simplification
3747 was made. */
cbdd87d4 3748
fef5a0d9 3749static bool
dcb7fae2 3750gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3751{
538dd0b7 3752 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3753 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3754
dcb7fae2
RB
3755 /* Give up for always_inline inline builtins until they are
3756 inlined. */
3757 if (avoid_folding_inline_builtin (callee))
3758 return false;
cbdd87d4 3759
edd7ae68
RB
3760 unsigned n = gimple_call_num_args (stmt);
3761 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3762 switch (fcode)
cbdd87d4 3763 {
b3d8d88e
MS
3764 case BUILT_IN_BCMP:
3765 return gimple_fold_builtin_bcmp (gsi);
3766 case BUILT_IN_BCOPY:
3767 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3768 case BUILT_IN_BZERO:
b3d8d88e
MS
3769 return gimple_fold_builtin_bzero (gsi);
3770
dcb7fae2
RB
3771 case BUILT_IN_MEMSET:
3772 return gimple_fold_builtin_memset (gsi,
3773 gimple_call_arg (stmt, 1),
3774 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3775 case BUILT_IN_MEMCPY:
3776 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3777 gimple_call_arg (stmt, 1), 0);
3778 case BUILT_IN_MEMPCPY:
3779 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3780 gimple_call_arg (stmt, 1), 1);
3781 case BUILT_IN_MEMMOVE:
3782 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3783 gimple_call_arg (stmt, 1), 3);
3784 case BUILT_IN_SPRINTF_CHK:
3785 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3786 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3787 case BUILT_IN_STRCAT_CHK:
3788 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3789 case BUILT_IN_STRNCAT_CHK:
3790 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3791 case BUILT_IN_STRLEN:
dcb7fae2 3792 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3793 case BUILT_IN_STRCPY:
dcb7fae2 3794 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3795 gimple_call_arg (stmt, 0),
dcb7fae2 3796 gimple_call_arg (stmt, 1));
cbdd87d4 3797 case BUILT_IN_STRNCPY:
dcb7fae2 3798 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3799 gimple_call_arg (stmt, 0),
3800 gimple_call_arg (stmt, 1),
dcb7fae2 3801 gimple_call_arg (stmt, 2));
9a7eefec 3802 case BUILT_IN_STRCAT:
dcb7fae2
RB
3803 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3804 gimple_call_arg (stmt, 1));
ad03a744
RB
3805 case BUILT_IN_STRNCAT:
3806 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3807 case BUILT_IN_INDEX:
912d9ec3 3808 case BUILT_IN_STRCHR:
71dea1dd
WD
3809 return gimple_fold_builtin_strchr (gsi, false);
3810 case BUILT_IN_RINDEX:
3811 case BUILT_IN_STRRCHR:
3812 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3813 case BUILT_IN_STRSTR:
3814 return gimple_fold_builtin_strstr (gsi);
a918bfbf 3815 case BUILT_IN_STRCMP:
8b0b334a 3816 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
3817 case BUILT_IN_STRCASECMP:
3818 case BUILT_IN_STRNCMP:
8b0b334a 3819 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
3820 case BUILT_IN_STRNCASECMP:
3821 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3822 case BUILT_IN_MEMCHR:
3823 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3824 case BUILT_IN_FPUTS:
dcb7fae2
RB
3825 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3826 gimple_call_arg (stmt, 1), false);
cbdd87d4 3827 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3828 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3829 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3830 case BUILT_IN_MEMCPY_CHK:
3831 case BUILT_IN_MEMPCPY_CHK:
3832 case BUILT_IN_MEMMOVE_CHK:
3833 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3834 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3835 gimple_call_arg (stmt, 0),
3836 gimple_call_arg (stmt, 1),
3837 gimple_call_arg (stmt, 2),
3838 gimple_call_arg (stmt, 3),
edd7ae68 3839 fcode);
2625bb5d
RB
3840 case BUILT_IN_STPCPY:
3841 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3842 case BUILT_IN_STRCPY_CHK:
3843 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3844 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3845 gimple_call_arg (stmt, 0),
3846 gimple_call_arg (stmt, 1),
3847 gimple_call_arg (stmt, 2),
edd7ae68 3848 fcode);
cbdd87d4 3849 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3850 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3851 return gimple_fold_builtin_stxncpy_chk (gsi,
3852 gimple_call_arg (stmt, 0),
3853 gimple_call_arg (stmt, 1),
3854 gimple_call_arg (stmt, 2),
3855 gimple_call_arg (stmt, 3),
edd7ae68 3856 fcode);
cbdd87d4
RG
3857 case BUILT_IN_SNPRINTF_CHK:
3858 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3859 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3860
edd7ae68
RB
3861 case BUILT_IN_FPRINTF:
3862 case BUILT_IN_FPRINTF_UNLOCKED:
3863 case BUILT_IN_VFPRINTF:
3864 if (n == 2 || n == 3)
3865 return gimple_fold_builtin_fprintf (gsi,
3866 gimple_call_arg (stmt, 0),
3867 gimple_call_arg (stmt, 1),
3868 n == 3
3869 ? gimple_call_arg (stmt, 2)
3870 : NULL_TREE,
3871 fcode);
3872 break;
3873 case BUILT_IN_FPRINTF_CHK:
3874 case BUILT_IN_VFPRINTF_CHK:
3875 if (n == 3 || n == 4)
3876 return gimple_fold_builtin_fprintf (gsi,
3877 gimple_call_arg (stmt, 0),
3878 gimple_call_arg (stmt, 2),
3879 n == 4
3880 ? gimple_call_arg (stmt, 3)
3881 : NULL_TREE,
3882 fcode);
3883 break;
ad03a744
RB
3884 case BUILT_IN_PRINTF:
3885 case BUILT_IN_PRINTF_UNLOCKED:
3886 case BUILT_IN_VPRINTF:
3887 if (n == 1 || n == 2)
3888 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3889 n == 2
3890 ? gimple_call_arg (stmt, 1)
3891 : NULL_TREE, fcode);
3892 break;
3893 case BUILT_IN_PRINTF_CHK:
3894 case BUILT_IN_VPRINTF_CHK:
3895 if (n == 2 || n == 3)
3896 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3897 n == 3
3898 ? gimple_call_arg (stmt, 2)
3899 : NULL_TREE, fcode);
242a37f1 3900 break;
48126138
NS
3901 case BUILT_IN_ACC_ON_DEVICE:
3902 return gimple_fold_builtin_acc_on_device (gsi,
3903 gimple_call_arg (stmt, 0));
fe75f732
PK
3904 case BUILT_IN_REALLOC:
3905 return gimple_fold_builtin_realloc (gsi);
3906
fef5a0d9
RB
3907 default:;
3908 }
3909
3910 /* Try the generic builtin folder. */
3911 bool ignore = (gimple_call_lhs (stmt) == NULL);
3912 tree result = fold_call_stmt (stmt, ignore);
3913 if (result)
3914 {
3915 if (ignore)
3916 STRIP_NOPS (result);
3917 else
3918 result = fold_convert (gimple_call_return_type (stmt), result);
3919 if (!update_call_from_tree (gsi, result))
3920 gimplify_and_update_call_from_tree (gsi, result);
3921 return true;
3922 }
3923
3924 return false;
3925}
3926
451e8dae
NS
3927/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3928 function calls to constants, where possible. */
3929
3930static tree
3931fold_internal_goacc_dim (const gimple *call)
3932{
629b3d75
MJ
3933 int axis = oacc_get_ifn_dim_arg (call);
3934 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 3935 tree result = NULL_TREE;
67d2229e 3936 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 3937
67d2229e 3938 switch (gimple_call_internal_fn (call))
451e8dae 3939 {
67d2229e
TV
3940 case IFN_GOACC_DIM_POS:
3941 /* If the size is 1, we know the answer. */
3942 if (size == 1)
3943 result = build_int_cst (type, 0);
3944 break;
3945 case IFN_GOACC_DIM_SIZE:
3946 /* If the size is not dynamic, we know the answer. */
3947 if (size)
3948 result = build_int_cst (type, size);
3949 break;
3950 default:
3951 break;
451e8dae
NS
3952 }
3953
3954 return result;
3955}
3956
849a76a5
JJ
3957/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3958 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3959 &var where var is only addressable because of such calls. */
3960
3961bool
3962optimize_atomic_compare_exchange_p (gimple *stmt)
3963{
3964 if (gimple_call_num_args (stmt) != 6
3965 || !flag_inline_atomics
3966 || !optimize
45b2222a 3967 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
3968 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3969 || !gimple_vdef (stmt)
3970 || !gimple_vuse (stmt))
3971 return false;
3972
3973 tree fndecl = gimple_call_fndecl (stmt);
3974 switch (DECL_FUNCTION_CODE (fndecl))
3975 {
3976 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3977 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3978 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3979 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3980 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3981 break;
3982 default:
3983 return false;
3984 }
3985
3986 tree expected = gimple_call_arg (stmt, 1);
3987 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3988 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3989 return false;
3990
3991 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3992 if (!is_gimple_reg_type (etype)
849a76a5 3993 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3994 || TREE_THIS_VOLATILE (etype)
3995 || VECTOR_TYPE_P (etype)
3996 || TREE_CODE (etype) == COMPLEX_TYPE
3997 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3998 might not preserve all the bits. See PR71716. */
3999 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
4000 || maybe_ne (TYPE_PRECISION (etype),
4001 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
4002 return false;
4003
4004 tree weak = gimple_call_arg (stmt, 3);
4005 if (!integer_zerop (weak) && !integer_onep (weak))
4006 return false;
4007
4008 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4009 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4010 machine_mode mode = TYPE_MODE (itype);
4011
4012 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4013 == CODE_FOR_nothing
4014 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4015 return false;
4016
cf098191 4017 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
4018 return false;
4019
4020 return true;
4021}
4022
4023/* Fold
4024 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4025 into
4026 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4027 i = IMAGPART_EXPR <t>;
4028 r = (_Bool) i;
4029 e = REALPART_EXPR <t>; */
4030
4031void
4032fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4033{
4034 gimple *stmt = gsi_stmt (*gsi);
4035 tree fndecl = gimple_call_fndecl (stmt);
4036 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4037 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4038 tree ctype = build_complex_type (itype);
4039 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
4040 bool throws = false;
4041 edge e = NULL;
849a76a5
JJ
4042 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4043 expected);
4044 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4045 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4046 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4047 {
4048 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4049 build1 (VIEW_CONVERT_EXPR, itype,
4050 gimple_assign_lhs (g)));
4051 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4052 }
4053 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4054 + int_size_in_bytes (itype);
4055 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4056 gimple_call_arg (stmt, 0),
4057 gimple_assign_lhs (g),
4058 gimple_call_arg (stmt, 2),
4059 build_int_cst (integer_type_node, flag),
4060 gimple_call_arg (stmt, 4),
4061 gimple_call_arg (stmt, 5));
4062 tree lhs = make_ssa_name (ctype);
4063 gimple_call_set_lhs (g, lhs);
4064 gimple_set_vdef (g, gimple_vdef (stmt));
4065 gimple_set_vuse (g, gimple_vuse (stmt));
4066 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46 4067 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 4068 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
4069 {
4070 throws = true;
4071 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4072 }
4073 gimple_call_set_nothrow (as_a <gcall *> (g),
4074 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4075 gimple_call_set_lhs (stmt, NULL_TREE);
4076 gsi_replace (gsi, g, true);
4077 if (oldlhs)
849a76a5 4078 {
849a76a5
JJ
4079 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4080 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
4081 if (throws)
4082 {
4083 gsi_insert_on_edge_immediate (e, g);
4084 *gsi = gsi_for_stmt (g);
4085 }
4086 else
4087 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4088 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4089 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 4090 }
849a76a5
JJ
4091 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4092 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
4093 if (throws && oldlhs == NULL_TREE)
4094 {
4095 gsi_insert_on_edge_immediate (e, g);
4096 *gsi = gsi_for_stmt (g);
4097 }
4098 else
4099 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
4100 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4101 {
4102 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4103 VIEW_CONVERT_EXPR,
4104 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4105 gimple_assign_lhs (g)));
4106 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4107 }
4108 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4109 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4110 *gsi = gsiret;
4111}
4112
1304953e
JJ
4113/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4114 doesn't fit into TYPE. The test for overflow should be regardless of
4115 -fwrapv, and even for unsigned types. */
4116
4117bool
4118arith_overflowed_p (enum tree_code code, const_tree type,
4119 const_tree arg0, const_tree arg1)
4120{
1304953e
JJ
4121 widest2_int warg0 = widest2_int_cst (arg0);
4122 widest2_int warg1 = widest2_int_cst (arg1);
4123 widest2_int wres;
4124 switch (code)
4125 {
4126 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4127 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4128 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4129 default: gcc_unreachable ();
4130 }
4131 signop sign = TYPE_SIGN (type);
4132 if (sign == UNSIGNED && wi::neg_p (wres))
4133 return true;
4134 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4135}
4136
cbdd87d4
RG
4137/* Attempt to fold a call statement referenced by the statement iterator GSI.
4138 The statement may be replaced by another statement, e.g., if the call
4139 simplifies to a constant value. Return true if any changes were made.
4140 It is assumed that the operands have been previously folded. */
4141
e021c122 4142static bool
ceeffab0 4143gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4144{
538dd0b7 4145 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4146 tree callee;
e021c122
RG
4147 bool changed = false;
4148 unsigned i;
cbdd87d4 4149
e021c122
RG
4150 /* Fold *& in call arguments. */
4151 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4152 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4153 {
4154 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4155 if (tmp)
4156 {
4157 gimple_call_set_arg (stmt, i, tmp);
4158 changed = true;
4159 }
4160 }
3b45a007
RG
4161
4162 /* Check for virtual calls that became direct calls. */
4163 callee = gimple_call_fn (stmt);
25583c4f 4164 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4165 {
49c471e3
MJ
4166 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4167 {
450ad0cd
JH
4168 if (dump_file && virtual_method_call_p (callee)
4169 && !possible_polymorphic_call_target_p
6f8091fc
JH
4170 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4171 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4172 {
4173 fprintf (dump_file,
a70e9985 4174 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4175 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4176 fprintf (dump_file, " to ");
4177 print_generic_expr (dump_file, callee, TDF_SLIM);
4178 fprintf (dump_file, "\n");
4179 }
4180
49c471e3 4181 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4182 changed = true;
4183 }
a70e9985 4184 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4185 {
61dd6a2e
JH
4186 bool final;
4187 vec <cgraph_node *>targets
058d0a90 4188 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4189 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4190 {
a70e9985 4191 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4192 if (dump_enabled_p ())
4193 {
4f5b9c80 4194 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4195 "folding virtual function call to %s\n",
4196 targets.length () == 1
4197 ? targets[0]->name ()
4198 : "__builtin_unreachable");
4199 }
61dd6a2e 4200 if (targets.length () == 1)
cf3e5a89 4201 {
18954840
JJ
4202 tree fndecl = targets[0]->decl;
4203 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4204 changed = true;
18954840
JJ
4205 /* If changing the call to __cxa_pure_virtual
4206 or similar noreturn function, adjust gimple_call_fntype
4207 too. */
865f7046 4208 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4209 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4210 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4211 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4212 == void_type_node))
4213 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4214 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4215 if (lhs
4216 && gimple_call_noreturn_p (stmt)
18954840 4217 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4218 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4219 {
4220 if (TREE_CODE (lhs) == SSA_NAME)
4221 {
b731b390 4222 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4223 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4224 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4225 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4226 }
4227 gimple_call_set_lhs (stmt, NULL_TREE);
4228 }
0b986c6a 4229 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4230 }
a70e9985 4231 else
cf3e5a89
JJ
4232 {
4233 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4234 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4235 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4236 /* If the call had a SSA name as lhs morph that into
4237 an uninitialized value. */
a70e9985
JJ
4238 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4239 {
b731b390 4240 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4241 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4242 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4243 set_ssa_default_def (cfun, var, lhs);
42e52a51 4244 }
2da6996c
RB
4245 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4246 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4247 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4248 return true;
4249 }
e021c122 4250 }
49c471e3 4251 }
e021c122 4252 }
49c471e3 4253
f2d3d07e
RH
4254 /* Check for indirect calls that became direct calls, and then
4255 no longer require a static chain. */
4256 if (gimple_call_chain (stmt))
4257 {
4258 tree fn = gimple_call_fndecl (stmt);
4259 if (fn && !DECL_STATIC_CHAIN (fn))
4260 {
4261 gimple_call_set_chain (stmt, NULL);
4262 changed = true;
4263 }
4264 else
4265 {
4266 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4267 if (tmp)
4268 {
4269 gimple_call_set_chain (stmt, tmp);
4270 changed = true;
4271 }
4272 }
4273 }
4274
e021c122
RG
4275 if (inplace)
4276 return changed;
4277
4278 /* Check for builtins that CCP can handle using information not
4279 available in the generic fold routines. */
fef5a0d9
RB
4280 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4281 {
4282 if (gimple_fold_builtin (gsi))
4283 changed = true;
4284 }
4285 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4286 {
ea679d55 4287 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4288 }
368b454d 4289 else if (gimple_call_internal_p (stmt))
ed9c79e1 4290 {
368b454d
JJ
4291 enum tree_code subcode = ERROR_MARK;
4292 tree result = NULL_TREE;
1304953e
JJ
4293 bool cplx_result = false;
4294 tree overflow = NULL_TREE;
368b454d
JJ
4295 switch (gimple_call_internal_fn (stmt))
4296 {
4297 case IFN_BUILTIN_EXPECT:
4298 result = fold_builtin_expect (gimple_location (stmt),
4299 gimple_call_arg (stmt, 0),
4300 gimple_call_arg (stmt, 1),
1e9168b2
ML
4301 gimple_call_arg (stmt, 2),
4302 NULL_TREE);
368b454d 4303 break;
0e82f089 4304 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4305 {
4306 tree offset = gimple_call_arg (stmt, 1);
4307 tree objsize = gimple_call_arg (stmt, 2);
4308 if (integer_all_onesp (objsize)
4309 || (TREE_CODE (offset) == INTEGER_CST
4310 && TREE_CODE (objsize) == INTEGER_CST
4311 && tree_int_cst_le (offset, objsize)))
4312 {
4313 replace_call_with_value (gsi, NULL_TREE);
4314 return true;
4315 }
4316 }
4317 break;
4318 case IFN_UBSAN_PTR:
4319 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4320 {
ca1150f0 4321 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4322 return true;
4323 }
4324 break;
ca1150f0
JJ
4325 case IFN_UBSAN_BOUNDS:
4326 {
4327 tree index = gimple_call_arg (stmt, 1);
4328 tree bound = gimple_call_arg (stmt, 2);
4329 if (TREE_CODE (index) == INTEGER_CST
4330 && TREE_CODE (bound) == INTEGER_CST)
4331 {
4332 index = fold_convert (TREE_TYPE (bound), index);
4333 if (TREE_CODE (index) == INTEGER_CST
4334 && tree_int_cst_le (index, bound))
4335 {
4336 replace_call_with_value (gsi, NULL_TREE);
4337 return true;
4338 }
4339 }
4340 }
4341 break;
451e8dae
NS
4342 case IFN_GOACC_DIM_SIZE:
4343 case IFN_GOACC_DIM_POS:
4344 result = fold_internal_goacc_dim (stmt);
4345 break;
368b454d
JJ
4346 case IFN_UBSAN_CHECK_ADD:
4347 subcode = PLUS_EXPR;
4348 break;
4349 case IFN_UBSAN_CHECK_SUB:
4350 subcode = MINUS_EXPR;
4351 break;
4352 case IFN_UBSAN_CHECK_MUL:
4353 subcode = MULT_EXPR;
4354 break;
1304953e
JJ
4355 case IFN_ADD_OVERFLOW:
4356 subcode = PLUS_EXPR;
4357 cplx_result = true;
4358 break;
4359 case IFN_SUB_OVERFLOW:
4360 subcode = MINUS_EXPR;
4361 cplx_result = true;
4362 break;
4363 case IFN_MUL_OVERFLOW:
4364 subcode = MULT_EXPR;
4365 cplx_result = true;
4366 break;
368b454d
JJ
4367 default:
4368 break;
4369 }
4370 if (subcode != ERROR_MARK)
4371 {
4372 tree arg0 = gimple_call_arg (stmt, 0);
4373 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4374 tree type = TREE_TYPE (arg0);
4375 if (cplx_result)
4376 {
4377 tree lhs = gimple_call_lhs (stmt);
4378 if (lhs == NULL_TREE)
4379 type = NULL_TREE;
4380 else
4381 type = TREE_TYPE (TREE_TYPE (lhs));
4382 }
4383 if (type == NULL_TREE)
4384 ;
368b454d 4385 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4386 else if (integer_zerop (arg1))
4387 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4388 /* x = 0 + y; x = 0 * y; */
4389 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4390 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4391 /* x = y - y; */
4392 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4393 result = integer_zero_node;
368b454d 4394 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4395 else if (subcode == MULT_EXPR && integer_onep (arg1))
4396 result = arg0;
4397 else if (subcode == MULT_EXPR && integer_onep (arg0))
4398 result = arg1;
4399 else if (TREE_CODE (arg0) == INTEGER_CST
4400 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4401 {
1304953e
JJ
4402 if (cplx_result)
4403 result = int_const_binop (subcode, fold_convert (type, arg0),
4404 fold_convert (type, arg1));
4405 else
4406 result = int_const_binop (subcode, arg0, arg1);
4407 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4408 {
4409 if (cplx_result)
4410 overflow = build_one_cst (type);
4411 else
4412 result = NULL_TREE;
4413 }
4414 }
4415 if (result)
4416 {
4417 if (result == integer_zero_node)
4418 result = build_zero_cst (type);
4419 else if (cplx_result && TREE_TYPE (result) != type)
4420 {
4421 if (TREE_CODE (result) == INTEGER_CST)
4422 {
4423 if (arith_overflowed_p (PLUS_EXPR, type, result,
4424 integer_zero_node))
4425 overflow = build_one_cst (type);
4426 }
4427 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4428 && TYPE_UNSIGNED (type))
4429 || (TYPE_PRECISION (type)
4430 < (TYPE_PRECISION (TREE_TYPE (result))
4431 + (TYPE_UNSIGNED (TREE_TYPE (result))
4432 && !TYPE_UNSIGNED (type)))))
4433 result = NULL_TREE;
4434 if (result)
4435 result = fold_convert (type, result);
4436 }
368b454d
JJ
4437 }
4438 }
1304953e 4439
ed9c79e1
JJ
4440 if (result)
4441 {
1304953e
JJ
4442 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4443 result = drop_tree_overflow (result);
4444 if (cplx_result)
4445 {
4446 if (overflow == NULL_TREE)
4447 overflow = build_zero_cst (TREE_TYPE (result));
4448 tree ctype = build_complex_type (TREE_TYPE (result));
4449 if (TREE_CODE (result) == INTEGER_CST
4450 && TREE_CODE (overflow) == INTEGER_CST)
4451 result = build_complex (ctype, result, overflow);
4452 else
4453 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4454 ctype, result, overflow);
4455 }
ed9c79e1
JJ
4456 if (!update_call_from_tree (gsi, result))
4457 gimplify_and_update_call_from_tree (gsi, result);
4458 changed = true;
4459 }
4460 }
3b45a007 4461
e021c122 4462 return changed;
cbdd87d4
RG
4463}
4464
e0ee10ed 4465
89a79e96
RB
4466/* Return true whether NAME has a use on STMT. */
4467
4468static bool
355fe088 4469has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4470{
4471 imm_use_iterator iter;
4472 use_operand_p use_p;
4473 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4474 if (USE_STMT (use_p) == stmt)
4475 return true;
4476 return false;
4477}
4478
e0ee10ed
RB
4479/* Worker for fold_stmt_1 dispatch to pattern based folding with
4480 gimple_simplify.
4481
4482 Replaces *GSI with the simplification result in RCODE and OPS
4483 and the associated statements in *SEQ. Does the replacement
4484 according to INPLACE and returns true if the operation succeeded. */
4485
4486static bool
4487replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4488 gimple_match_op *res_op,
e0ee10ed
RB
4489 gimple_seq *seq, bool inplace)
4490{
355fe088 4491 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4492 tree *ops = res_op->ops;
4493 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4494
4495 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4496 newly created statements. See also maybe_push_res_to_seq.
4497 As an exception allow such uses if there was a use of the
4498 same SSA name on the old stmt. */
5d75ad95
RS
4499 for (unsigned int i = 0; i < num_ops; ++i)
4500 if (TREE_CODE (ops[i]) == SSA_NAME
4501 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4502 && !has_use_on_stmt (ops[i], stmt))
4503 return false;
4504
4505 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4506 for (unsigned int i = 0; i < 2; ++i)
4507 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4508 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4509 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4510 return false;
e0ee10ed 4511
fec40d06
RS
4512 /* Don't insert new statements when INPLACE is true, even if we could
4513 reuse STMT for the final statement. */
4514 if (inplace && !gimple_seq_empty_p (*seq))
4515 return false;
4516
538dd0b7 4517 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4518 {
5d75ad95
RS
4519 gcc_assert (res_op->code.is_tree_code ());
4520 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4521 /* GIMPLE_CONDs condition may not throw. */
4522 && (!flag_exceptions
4523 || !cfun->can_throw_non_call_exceptions
5d75ad95 4524 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4525 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4526 false, NULL_TREE)))
5d75ad95
RS
4527 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4528 else if (res_op->code == SSA_NAME)
538dd0b7 4529 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4530 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4531 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4532 {
4533 if (integer_zerop (ops[0]))
538dd0b7 4534 gimple_cond_make_false (cond_stmt);
e0ee10ed 4535 else
538dd0b7 4536 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4537 }
4538 else if (!inplace)
4539 {
5d75ad95 4540 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4541 if (!res)
4542 return false;
538dd0b7 4543 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4544 build_zero_cst (TREE_TYPE (res)));
4545 }
4546 else
4547 return false;
4548 if (dump_file && (dump_flags & TDF_DETAILS))
4549 {
4550 fprintf (dump_file, "gimple_simplified to ");
4551 if (!gimple_seq_empty_p (*seq))
4552 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4553 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4554 0, TDF_SLIM);
4555 }
4556 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4557 return true;
4558 }
4559 else if (is_gimple_assign (stmt)
5d75ad95 4560 && res_op->code.is_tree_code ())
e0ee10ed
RB
4561 {
4562 if (!inplace
5d75ad95 4563 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4564 {
5d75ad95
RS
4565 maybe_build_generic_op (res_op);
4566 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4567 res_op->op_or_null (0),
4568 res_op->op_or_null (1),
4569 res_op->op_or_null (2));
e0ee10ed
RB
4570 if (dump_file && (dump_flags & TDF_DETAILS))
4571 {
4572 fprintf (dump_file, "gimple_simplified to ");
4573 if (!gimple_seq_empty_p (*seq))
4574 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4575 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4576 0, TDF_SLIM);
4577 }
4578 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4579 return true;
4580 }
4581 }
5d75ad95
RS
4582 else if (res_op->code.is_fn_code ()
4583 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4584 {
5d75ad95
RS
4585 gcc_assert (num_ops == gimple_call_num_args (stmt));
4586 for (unsigned int i = 0; i < num_ops; ++i)
4587 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4588 if (dump_file && (dump_flags & TDF_DETAILS))
4589 {
4590 fprintf (dump_file, "gimple_simplified to ");
4591 if (!gimple_seq_empty_p (*seq))
4592 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4593 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4594 }
4595 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4596 return true;
4597 }
e0ee10ed
RB
4598 else if (!inplace)
4599 {
4600 if (gimple_has_lhs (stmt))
4601 {
4602 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4603 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4604 return false;
e0ee10ed
RB
4605 if (dump_file && (dump_flags & TDF_DETAILS))
4606 {
4607 fprintf (dump_file, "gimple_simplified to ");
4608 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4609 }
4610 gsi_replace_with_seq_vops (gsi, *seq);
4611 return true;
4612 }
4613 else
4614 gcc_unreachable ();
4615 }
4616
4617 return false;
4618}
4619
040292e7
RB
4620/* Canonicalize MEM_REFs invariant address operand after propagation. */
4621
4622static bool
4623maybe_canonicalize_mem_ref_addr (tree *t)
4624{
4625 bool res = false;
4626
4627 if (TREE_CODE (*t) == ADDR_EXPR)
4628 t = &TREE_OPERAND (*t, 0);
4629
f17a223d
RB
4630 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4631 generic vector extension. The actual vector referenced is
4632 view-converted to an array type for this purpose. If the index
4633 is constant the canonical representation in the middle-end is a
4634 BIT_FIELD_REF so re-write the former to the latter here. */
4635 if (TREE_CODE (*t) == ARRAY_REF
4636 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4637 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4638 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4639 {
4640 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4641 if (VECTOR_TYPE_P (vtype))
4642 {
4643 tree low = array_ref_low_bound (*t);
4644 if (TREE_CODE (low) == INTEGER_CST)
4645 {
4646 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4647 {
4648 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4649 wi::to_widest (low));
4650 idx = wi::mul (idx, wi::to_widest
4651 (TYPE_SIZE (TREE_TYPE (*t))));
4652 widest_int ext
4653 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4654 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4655 {
4656 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4657 TREE_TYPE (*t),
4658 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4659 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4660 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4661 res = true;
4662 }
4663 }
4664 }
4665 }
4666 }
4667
040292e7
RB
4668 while (handled_component_p (*t))
4669 t = &TREE_OPERAND (*t, 0);
4670
4671 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4672 of invariant addresses into a SSA name MEM_REF address. */
4673 if (TREE_CODE (*t) == MEM_REF
4674 || TREE_CODE (*t) == TARGET_MEM_REF)
4675 {
4676 tree addr = TREE_OPERAND (*t, 0);
4677 if (TREE_CODE (addr) == ADDR_EXPR
4678 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4679 || handled_component_p (TREE_OPERAND (addr, 0))))
4680 {
4681 tree base;
a90c8804 4682 poly_int64 coffset;
040292e7
RB
4683 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4684 &coffset);
4685 if (!base)
4686 gcc_unreachable ();
4687
4688 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4689 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4690 TREE_OPERAND (*t, 1),
4691 size_int (coffset));
4692 res = true;
4693 }
4694 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4695 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4696 }
4697
4698 /* Canonicalize back MEM_REFs to plain reference trees if the object
4699 accessed is a decl that has the same access semantics as the MEM_REF. */
4700 if (TREE_CODE (*t) == MEM_REF
4701 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4702 && integer_zerop (TREE_OPERAND (*t, 1))
4703 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4704 {
4705 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4706 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4707 if (/* Same volatile qualification. */
4708 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4709 /* Same TBAA behavior with -fstrict-aliasing. */
4710 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4711 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4712 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4713 /* Same alignment. */
4714 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4715 /* We have to look out here to not drop a required conversion
4716 from the rhs to the lhs if *t appears on the lhs or vice-versa
4717 if it appears on the rhs. Thus require strict type
4718 compatibility. */
4719 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4720 {
4721 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4722 res = true;
4723 }
4724 }
4725
4726 /* Canonicalize TARGET_MEM_REF in particular with respect to
4727 the indexes becoming constant. */
4728 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4729 {
4730 tree tem = maybe_fold_tmr (*t);
4731 if (tem)
4732 {
4733 *t = tem;
4734 res = true;
4735 }
4736 }
4737
4738 return res;
4739}
4740
cbdd87d4
RG
4741/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4742 distinguishes both cases. */
4743
4744static bool
e0ee10ed 4745fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4746{
4747 bool changed = false;
355fe088 4748 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4749 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4750 unsigned i;
a8b85ce9 4751 fold_defer_overflow_warnings ();
cbdd87d4 4752
040292e7
RB
4753 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4754 after propagation.
4755 ??? This shouldn't be done in generic folding but in the
4756 propagation helpers which also know whether an address was
89a79e96
RB
4757 propagated.
4758 Also canonicalize operand order. */
040292e7
RB
4759 switch (gimple_code (stmt))
4760 {
4761 case GIMPLE_ASSIGN:
4762 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4763 {
4764 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4765 if ((REFERENCE_CLASS_P (*rhs)
4766 || TREE_CODE (*rhs) == ADDR_EXPR)
4767 && maybe_canonicalize_mem_ref_addr (rhs))
4768 changed = true;
4769 tree *lhs = gimple_assign_lhs_ptr (stmt);
4770 if (REFERENCE_CLASS_P (*lhs)
4771 && maybe_canonicalize_mem_ref_addr (lhs))
4772 changed = true;
4773 }
89a79e96
RB
4774 else
4775 {
4776 /* Canonicalize operand order. */
4777 enum tree_code code = gimple_assign_rhs_code (stmt);
4778 if (TREE_CODE_CLASS (code) == tcc_comparison
4779 || commutative_tree_code (code)
4780 || commutative_ternary_tree_code (code))
4781 {
4782 tree rhs1 = gimple_assign_rhs1 (stmt);
4783 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4784 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4785 {
4786 gimple_assign_set_rhs1 (stmt, rhs2);
4787 gimple_assign_set_rhs2 (stmt, rhs1);
4788 if (TREE_CODE_CLASS (code) == tcc_comparison)
4789 gimple_assign_set_rhs_code (stmt,
4790 swap_tree_comparison (code));
4791 changed = true;
4792 }
4793 }
4794 }
040292e7
RB
4795 break;
4796 case GIMPLE_CALL:
4797 {
4798 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4799 {
4800 tree *arg = gimple_call_arg_ptr (stmt, i);
4801 if (REFERENCE_CLASS_P (*arg)
4802 && maybe_canonicalize_mem_ref_addr (arg))
4803 changed = true;
4804 }
4805 tree *lhs = gimple_call_lhs_ptr (stmt);
4806 if (*lhs
4807 && REFERENCE_CLASS_P (*lhs)
4808 && maybe_canonicalize_mem_ref_addr (lhs))
4809 changed = true;
4810 break;
4811 }
4812 case GIMPLE_ASM:
4813 {
538dd0b7
DM
4814 gasm *asm_stmt = as_a <gasm *> (stmt);
4815 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4816 {
538dd0b7 4817 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4818 tree op = TREE_VALUE (link);
4819 if (REFERENCE_CLASS_P (op)
4820 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4821 changed = true;
4822 }
538dd0b7 4823 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4824 {
538dd0b7 4825 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4826 tree op = TREE_VALUE (link);
4827 if ((REFERENCE_CLASS_P (op)
4828 || TREE_CODE (op) == ADDR_EXPR)
4829 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4830 changed = true;
4831 }
4832 }
4833 break;
4834 case GIMPLE_DEBUG:
4835 if (gimple_debug_bind_p (stmt))
4836 {
4837 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4838 if (*val
4839 && (REFERENCE_CLASS_P (*val)
4840 || TREE_CODE (*val) == ADDR_EXPR)
4841 && maybe_canonicalize_mem_ref_addr (val))
4842 changed = true;
4843 }
4844 break;
89a79e96
RB
4845 case GIMPLE_COND:
4846 {
4847 /* Canonicalize operand order. */
4848 tree lhs = gimple_cond_lhs (stmt);
4849 tree rhs = gimple_cond_rhs (stmt);
14e72812 4850 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4851 {
4852 gcond *gc = as_a <gcond *> (stmt);
4853 gimple_cond_set_lhs (gc, rhs);
4854 gimple_cond_set_rhs (gc, lhs);
4855 gimple_cond_set_code (gc,
4856 swap_tree_comparison (gimple_cond_code (gc)));
4857 changed = true;
4858 }
4859 }
040292e7
RB
4860 default:;
4861 }
4862
e0ee10ed
RB
4863 /* Dispatch to pattern-based folding. */
4864 if (!inplace
4865 || is_gimple_assign (stmt)
4866 || gimple_code (stmt) == GIMPLE_COND)
4867 {
4868 gimple_seq seq = NULL;
5d75ad95
RS
4869 gimple_match_op res_op;
4870 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 4871 valueize, valueize))
e0ee10ed 4872 {
5d75ad95 4873 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
4874 changed = true;
4875 else
4876 gimple_seq_discard (seq);
4877 }
4878 }
4879
4880 stmt = gsi_stmt (*gsi);
4881
cbdd87d4
RG
4882 /* Fold the main computation performed by the statement. */
4883 switch (gimple_code (stmt))
4884 {
4885 case GIMPLE_ASSIGN:
4886 {
819ec64c
RB
4887 /* Try to canonicalize for boolean-typed X the comparisons
4888 X == 0, X == 1, X != 0, and X != 1. */
4889 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4890 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4891 {
819ec64c
RB
4892 tree lhs = gimple_assign_lhs (stmt);
4893 tree op1 = gimple_assign_rhs1 (stmt);
4894 tree op2 = gimple_assign_rhs2 (stmt);
4895 tree type = TREE_TYPE (op1);
4896
4897 /* Check whether the comparison operands are of the same boolean
4898 type as the result type is.
4899 Check that second operand is an integer-constant with value
4900 one or zero. */
4901 if (TREE_CODE (op2) == INTEGER_CST
4902 && (integer_zerop (op2) || integer_onep (op2))
4903 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4904 {
4905 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4906 bool is_logical_not = false;
4907
4908 /* X == 0 and X != 1 is a logical-not.of X
4909 X == 1 and X != 0 is X */
4910 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4911 || (cmp_code == NE_EXPR && integer_onep (op2)))
4912 is_logical_not = true;
4913
4914 if (is_logical_not == false)
4915 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4916 /* Only for one-bit precision typed X the transformation
4917 !X -> ~X is valied. */
4918 else if (TYPE_PRECISION (type) == 1)
4919 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4920 /* Otherwise we use !X -> X ^ 1. */
4921 else
4922 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4923 build_int_cst (type, 1));
4924 changed = true;
4925 break;
4926 }
5fbcc0ed 4927 }
819ec64c
RB
4928
4929 unsigned old_num_ops = gimple_num_ops (stmt);
4930 tree lhs = gimple_assign_lhs (stmt);
4931 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4932 if (new_rhs
4933 && !useless_type_conversion_p (TREE_TYPE (lhs),
4934 TREE_TYPE (new_rhs)))
4935 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4936 if (new_rhs
4937 && (!inplace
4938 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4939 {
4940 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4941 changed = true;
4942 }
4943 break;
4944 }
4945
cbdd87d4 4946 case GIMPLE_CALL:
ceeffab0 4947 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4948 break;
4949
4950 case GIMPLE_ASM:
4951 /* Fold *& in asm operands. */
38384150 4952 {
538dd0b7 4953 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4954 size_t noutputs;
4955 const char **oconstraints;
4956 const char *constraint;
4957 bool allows_mem, allows_reg;
4958
538dd0b7 4959 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4960 oconstraints = XALLOCAVEC (const char *, noutputs);
4961
538dd0b7 4962 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4963 {
538dd0b7 4964 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4965 tree op = TREE_VALUE (link);
4966 oconstraints[i]
4967 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4968 if (REFERENCE_CLASS_P (op)
4969 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4970 {
4971 TREE_VALUE (link) = op;
4972 changed = true;
4973 }
4974 }
538dd0b7 4975 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4976 {
538dd0b7 4977 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4978 tree op = TREE_VALUE (link);
4979 constraint
4980 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4981 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4982 oconstraints, &allows_mem, &allows_reg);
4983 if (REFERENCE_CLASS_P (op)
4984 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4985 != NULL_TREE)
4986 {
4987 TREE_VALUE (link) = op;
4988 changed = true;
4989 }
4990 }
4991 }
cbdd87d4
RG
4992 break;
4993
bd422c4a
RG
4994 case GIMPLE_DEBUG:
4995 if (gimple_debug_bind_p (stmt))
4996 {
4997 tree val = gimple_debug_bind_get_value (stmt);
4998 if (val
4999 && REFERENCE_CLASS_P (val))
5000 {
5001 tree tem = maybe_fold_reference (val, false);
5002 if (tem)
5003 {
5004 gimple_debug_bind_set_value (stmt, tem);
5005 changed = true;
5006 }
5007 }
3e888a5e
RG
5008 else if (val
5009 && TREE_CODE (val) == ADDR_EXPR)
5010 {
5011 tree ref = TREE_OPERAND (val, 0);
5012 tree tem = maybe_fold_reference (ref, false);
5013 if (tem)
5014 {
5015 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5016 gimple_debug_bind_set_value (stmt, tem);
5017 changed = true;
5018 }
5019 }
bd422c4a
RG
5020 }
5021 break;
5022
cfe3d653
PK
5023 case GIMPLE_RETURN:
5024 {
5025 greturn *ret_stmt = as_a<greturn *> (stmt);
5026 tree ret = gimple_return_retval(ret_stmt);
5027
5028 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5029 {
5030 tree val = valueize (ret);
1af928db
RB
5031 if (val && val != ret
5032 && may_propagate_copy (ret, val))
cfe3d653
PK
5033 {
5034 gimple_return_set_retval (ret_stmt, val);
5035 changed = true;
5036 }
5037 }
5038 }
5039 break;
5040
cbdd87d4
RG
5041 default:;
5042 }
5043
5044 stmt = gsi_stmt (*gsi);
5045
37376165
RB
5046 /* Fold *& on the lhs. */
5047 if (gimple_has_lhs (stmt))
cbdd87d4
RG
5048 {
5049 tree lhs = gimple_get_lhs (stmt);
5050 if (lhs && REFERENCE_CLASS_P (lhs))
5051 {
5052 tree new_lhs = maybe_fold_reference (lhs, true);
5053 if (new_lhs)
5054 {
5055 gimple_set_lhs (stmt, new_lhs);
5056 changed = true;
5057 }
5058 }
5059 }
5060
a8b85ce9 5061 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
5062 return changed;
5063}
5064
e0ee10ed
RB
5065/* Valueziation callback that ends up not following SSA edges. */
5066
5067tree
5068no_follow_ssa_edges (tree)
5069{
5070 return NULL_TREE;
5071}
5072
45cc9f96
RB
5073/* Valueization callback that ends up following single-use SSA edges only. */
5074
5075tree
5076follow_single_use_edges (tree val)
5077{
5078 if (TREE_CODE (val) == SSA_NAME
5079 && !has_single_use (val))
5080 return NULL_TREE;
5081 return val;
5082}
5083
c566cc9f
RS
5084/* Valueization callback that follows all SSA edges. */
5085
5086tree
5087follow_all_ssa_edges (tree val)
5088{
5089 return val;
5090}
5091
cbdd87d4
RG
5092/* Fold the statement pointed to by GSI. In some cases, this function may
5093 replace the whole statement with a new one. Returns true iff folding
5094 makes any changes.
5095 The statement pointed to by GSI should be in valid gimple form but may
5096 be in unfolded state as resulting from for example constant propagation
5097 which can produce *&x = 0. */
5098
5099bool
5100fold_stmt (gimple_stmt_iterator *gsi)
5101{
e0ee10ed
RB
5102 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5103}
5104
5105bool
5106fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5107{
5108 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
5109}
5110
59401b92 5111/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
5112 *&x created by constant propagation are handled. The statement cannot
5113 be replaced with a new one. Return true if the statement was
5114 changed, false otherwise.
59401b92 5115 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
5116 be in unfolded state as resulting from for example constant propagation
5117 which can produce *&x = 0. */
5118
5119bool
59401b92 5120fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 5121{
355fe088 5122 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 5123 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 5124 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5125 return changed;
5126}
5127
e89065a1
SL
5128/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5129 if EXPR is null or we don't know how.
5130 If non-null, the result always has boolean type. */
5131
5132static tree
5133canonicalize_bool (tree expr, bool invert)
5134{
5135 if (!expr)
5136 return NULL_TREE;
5137 else if (invert)
5138 {
5139 if (integer_nonzerop (expr))
5140 return boolean_false_node;
5141 else if (integer_zerop (expr))
5142 return boolean_true_node;
5143 else if (TREE_CODE (expr) == SSA_NAME)
5144 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5145 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5146 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5147 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5148 boolean_type_node,
5149 TREE_OPERAND (expr, 0),
5150 TREE_OPERAND (expr, 1));
5151 else
5152 return NULL_TREE;
5153 }
5154 else
5155 {
5156 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5157 return expr;
5158 if (integer_nonzerop (expr))
5159 return boolean_true_node;
5160 else if (integer_zerop (expr))
5161 return boolean_false_node;
5162 else if (TREE_CODE (expr) == SSA_NAME)
5163 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5164 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5165 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5166 return fold_build2 (TREE_CODE (expr),
5167 boolean_type_node,
5168 TREE_OPERAND (expr, 0),
5169 TREE_OPERAND (expr, 1));
5170 else
5171 return NULL_TREE;
5172 }
5173}
5174
5175/* Check to see if a boolean expression EXPR is logically equivalent to the
5176 comparison (OP1 CODE OP2). Check for various identities involving
5177 SSA_NAMEs. */
5178
5179static bool
5180same_bool_comparison_p (const_tree expr, enum tree_code code,
5181 const_tree op1, const_tree op2)
5182{
355fe088 5183 gimple *s;
e89065a1
SL
5184
5185 /* The obvious case. */
5186 if (TREE_CODE (expr) == code
5187 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5188 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5189 return true;
5190
5191 /* Check for comparing (name, name != 0) and the case where expr
5192 is an SSA_NAME with a definition matching the comparison. */
5193 if (TREE_CODE (expr) == SSA_NAME
5194 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5195 {
5196 if (operand_equal_p (expr, op1, 0))
5197 return ((code == NE_EXPR && integer_zerop (op2))
5198 || (code == EQ_EXPR && integer_nonzerop (op2)));
5199 s = SSA_NAME_DEF_STMT (expr);
5200 if (is_gimple_assign (s)
5201 && gimple_assign_rhs_code (s) == code
5202 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5203 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5204 return true;
5205 }
5206
5207 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5208 of name is a comparison, recurse. */
5209 if (TREE_CODE (op1) == SSA_NAME
5210 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5211 {
5212 s = SSA_NAME_DEF_STMT (op1);
5213 if (is_gimple_assign (s)
5214 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5215 {
5216 enum tree_code c = gimple_assign_rhs_code (s);
5217 if ((c == NE_EXPR && integer_zerop (op2))
5218 || (c == EQ_EXPR && integer_nonzerop (op2)))
5219 return same_bool_comparison_p (expr, c,
5220 gimple_assign_rhs1 (s),
5221 gimple_assign_rhs2 (s));
5222 if ((c == EQ_EXPR && integer_zerop (op2))
5223 || (c == NE_EXPR && integer_nonzerop (op2)))
5224 return same_bool_comparison_p (expr,
5225 invert_tree_comparison (c, false),
5226 gimple_assign_rhs1 (s),
5227 gimple_assign_rhs2 (s));
5228 }
5229 }
5230 return false;
5231}
5232
5233/* Check to see if two boolean expressions OP1 and OP2 are logically
5234 equivalent. */
5235
5236static bool
5237same_bool_result_p (const_tree op1, const_tree op2)
5238{
5239 /* Simple cases first. */
5240 if (operand_equal_p (op1, op2, 0))
5241 return true;
5242
5243 /* Check the cases where at least one of the operands is a comparison.
5244 These are a bit smarter than operand_equal_p in that they apply some
5245 identifies on SSA_NAMEs. */
98209db3 5246 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5247 && same_bool_comparison_p (op1, TREE_CODE (op2),
5248 TREE_OPERAND (op2, 0),
5249 TREE_OPERAND (op2, 1)))
5250 return true;
98209db3 5251 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5252 && same_bool_comparison_p (op2, TREE_CODE (op1),
5253 TREE_OPERAND (op1, 0),
5254 TREE_OPERAND (op1, 1)))
5255 return true;
5256
5257 /* Default case. */
5258 return false;
5259}
5260
5261/* Forward declarations for some mutually recursive functions. */
5262
5263static tree
5264and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5265 enum tree_code code2, tree op2a, tree op2b);
5266static tree
5267and_var_with_comparison (tree var, bool invert,
5268 enum tree_code code2, tree op2a, tree op2b);
5269static tree
355fe088 5270and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5271 enum tree_code code2, tree op2a, tree op2b);
5272static tree
5273or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5274 enum tree_code code2, tree op2a, tree op2b);
5275static tree
5276or_var_with_comparison (tree var, bool invert,
5277 enum tree_code code2, tree op2a, tree op2b);
5278static tree
355fe088 5279or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5280 enum tree_code code2, tree op2a, tree op2b);
5281
5282/* Helper function for and_comparisons_1: try to simplify the AND of the
5283 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5284 If INVERT is true, invert the value of the VAR before doing the AND.
5285 Return NULL_EXPR if we can't simplify this to a single expression. */
5286
5287static tree
5288and_var_with_comparison (tree var, bool invert,
5289 enum tree_code code2, tree op2a, tree op2b)
5290{
5291 tree t;
355fe088 5292 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5293
5294 /* We can only deal with variables whose definitions are assignments. */
5295 if (!is_gimple_assign (stmt))
5296 return NULL_TREE;
5297
5298 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5299 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5300 Then we only have to consider the simpler non-inverted cases. */
5301 if (invert)
5302 t = or_var_with_comparison_1 (stmt,
5303 invert_tree_comparison (code2, false),
5304 op2a, op2b);
5305 else
5306 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5307 return canonicalize_bool (t, invert);
5308}
5309
5310/* Try to simplify the AND of the ssa variable defined by the assignment
5311 STMT with the comparison specified by (OP2A CODE2 OP2B).
5312 Return NULL_EXPR if we can't simplify this to a single expression. */
5313
5314static tree
355fe088 5315and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5316 enum tree_code code2, tree op2a, tree op2b)
5317{
5318 tree var = gimple_assign_lhs (stmt);
5319 tree true_test_var = NULL_TREE;
5320 tree false_test_var = NULL_TREE;
5321 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5322
5323 /* Check for identities like (var AND (var == 0)) => false. */
5324 if (TREE_CODE (op2a) == SSA_NAME
5325 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5326 {
5327 if ((code2 == NE_EXPR && integer_zerop (op2b))
5328 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5329 {
5330 true_test_var = op2a;
5331 if (var == true_test_var)
5332 return var;
5333 }
5334 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5335 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5336 {
5337 false_test_var = op2a;
5338 if (var == false_test_var)
5339 return boolean_false_node;
5340 }
5341 }
5342
5343 /* If the definition is a comparison, recurse on it. */
5344 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5345 {
5346 tree t = and_comparisons_1 (innercode,
5347 gimple_assign_rhs1 (stmt),
5348 gimple_assign_rhs2 (stmt),
5349 code2,
5350 op2a,
5351 op2b);
5352 if (t)
5353 return t;
5354 }
5355
5356 /* If the definition is an AND or OR expression, we may be able to
5357 simplify by reassociating. */
eb9820c0
KT
5358 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5359 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5360 {
5361 tree inner1 = gimple_assign_rhs1 (stmt);
5362 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5363 gimple *s;
e89065a1
SL
5364 tree t;
5365 tree partial = NULL_TREE;
eb9820c0 5366 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5367
5368 /* Check for boolean identities that don't require recursive examination
5369 of inner1/inner2:
5370 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5371 inner1 AND (inner1 OR inner2) => inner1
5372 !inner1 AND (inner1 AND inner2) => false
5373 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5374 Likewise for similar cases involving inner2. */
5375 if (inner1 == true_test_var)
5376 return (is_and ? var : inner1);
5377 else if (inner2 == true_test_var)
5378 return (is_and ? var : inner2);
5379 else if (inner1 == false_test_var)
5380 return (is_and
5381 ? boolean_false_node
5382 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5383 else if (inner2 == false_test_var)
5384 return (is_and
5385 ? boolean_false_node
5386 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5387
5388 /* Next, redistribute/reassociate the AND across the inner tests.
5389 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5390 if (TREE_CODE (inner1) == SSA_NAME
5391 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5392 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5393 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5394 gimple_assign_rhs1 (s),
5395 gimple_assign_rhs2 (s),
5396 code2, op2a, op2b)))
5397 {
5398 /* Handle the AND case, where we are reassociating:
5399 (inner1 AND inner2) AND (op2a code2 op2b)
5400 => (t AND inner2)
5401 If the partial result t is a constant, we win. Otherwise
5402 continue on to try reassociating with the other inner test. */
5403 if (is_and)
5404 {
5405 if (integer_onep (t))
5406 return inner2;
5407 else if (integer_zerop (t))
5408 return boolean_false_node;
5409 }
5410
5411 /* Handle the OR case, where we are redistributing:
5412 (inner1 OR inner2) AND (op2a code2 op2b)
5413 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5414 else if (integer_onep (t))
5415 return boolean_true_node;
5416
5417 /* Save partial result for later. */
5418 partial = t;
e89065a1
SL
5419 }
5420
5421 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5422 if (TREE_CODE (inner2) == SSA_NAME
5423 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5424 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5425 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5426 gimple_assign_rhs1 (s),
5427 gimple_assign_rhs2 (s),
5428 code2, op2a, op2b)))
5429 {
5430 /* Handle the AND case, where we are reassociating:
5431 (inner1 AND inner2) AND (op2a code2 op2b)
5432 => (inner1 AND t) */
5433 if (is_and)
5434 {
5435 if (integer_onep (t))
5436 return inner1;
5437 else if (integer_zerop (t))
5438 return boolean_false_node;
8236c8eb
JJ
5439 /* If both are the same, we can apply the identity
5440 (x AND x) == x. */
5441 else if (partial && same_bool_result_p (t, partial))
5442 return t;
e89065a1
SL
5443 }
5444
5445 /* Handle the OR case. where we are redistributing:
5446 (inner1 OR inner2) AND (op2a code2 op2b)
5447 => (t OR (inner1 AND (op2a code2 op2b)))
5448 => (t OR partial) */
5449 else
5450 {
5451 if (integer_onep (t))
5452 return boolean_true_node;
5453 else if (partial)
5454 {
5455 /* We already got a simplification for the other
5456 operand to the redistributed OR expression. The
5457 interesting case is when at least one is false.
5458 Or, if both are the same, we can apply the identity
5459 (x OR x) == x. */
5460 if (integer_zerop (partial))
5461 return t;
5462 else if (integer_zerop (t))
5463 return partial;
5464 else if (same_bool_result_p (t, partial))
5465 return t;
5466 }
5467 }
5468 }
5469 }
5470 return NULL_TREE;
5471}
5472
5473/* Try to simplify the AND of two comparisons defined by
5474 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5475 If this can be done without constructing an intermediate value,
5476 return the resulting tree; otherwise NULL_TREE is returned.
5477 This function is deliberately asymmetric as it recurses on SSA_DEFs
5478 in the first comparison but not the second. */
5479
5480static tree
5481and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5482 enum tree_code code2, tree op2a, tree op2b)
5483{
ae22ac3c 5484 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5485
e89065a1
SL
5486 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5487 if (operand_equal_p (op1a, op2a, 0)
5488 && operand_equal_p (op1b, op2b, 0))
5489 {
eb9820c0 5490 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5491 tree t = combine_comparisons (UNKNOWN_LOCATION,
5492 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5493 truth_type, op1a, op1b);
e89065a1
SL
5494 if (t)
5495 return t;
5496 }
5497
5498 /* Likewise the swapped case of the above. */
5499 if (operand_equal_p (op1a, op2b, 0)
5500 && operand_equal_p (op1b, op2a, 0))
5501 {
eb9820c0 5502 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5503 tree t = combine_comparisons (UNKNOWN_LOCATION,
5504 TRUTH_ANDIF_EXPR, code1,
5505 swap_tree_comparison (code2),
31ed6226 5506 truth_type, op1a, op1b);
e89065a1
SL
5507 if (t)
5508 return t;
5509 }
5510
5511 /* If both comparisons are of the same value against constants, we might
5512 be able to merge them. */
5513 if (operand_equal_p (op1a, op2a, 0)
5514 && TREE_CODE (op1b) == INTEGER_CST
5515 && TREE_CODE (op2b) == INTEGER_CST)
5516 {
5517 int cmp = tree_int_cst_compare (op1b, op2b);
5518
5519 /* If we have (op1a == op1b), we should either be able to
5520 return that or FALSE, depending on whether the constant op1b
5521 also satisfies the other comparison against op2b. */
5522 if (code1 == EQ_EXPR)
5523 {
5524 bool done = true;
5525 bool val;
5526 switch (code2)
5527 {
5528 case EQ_EXPR: val = (cmp == 0); break;
5529 case NE_EXPR: val = (cmp != 0); break;
5530 case LT_EXPR: val = (cmp < 0); break;
5531 case GT_EXPR: val = (cmp > 0); break;
5532 case LE_EXPR: val = (cmp <= 0); break;
5533 case GE_EXPR: val = (cmp >= 0); break;
5534 default: done = false;
5535 }
5536 if (done)
5537 {
5538 if (val)
5539 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5540 else
5541 return boolean_false_node;
5542 }
5543 }
5544 /* Likewise if the second comparison is an == comparison. */
5545 else if (code2 == EQ_EXPR)
5546 {
5547 bool done = true;
5548 bool val;
5549 switch (code1)
5550 {
5551 case EQ_EXPR: val = (cmp == 0); break;
5552 case NE_EXPR: val = (cmp != 0); break;
5553 case LT_EXPR: val = (cmp > 0); break;
5554 case GT_EXPR: val = (cmp < 0); break;
5555 case LE_EXPR: val = (cmp >= 0); break;
5556 case GE_EXPR: val = (cmp <= 0); break;
5557 default: done = false;
5558 }
5559 if (done)
5560 {
5561 if (val)
5562 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5563 else
5564 return boolean_false_node;
5565 }
5566 }
5567
5568 /* Same business with inequality tests. */
5569 else if (code1 == NE_EXPR)
5570 {
5571 bool val;
5572 switch (code2)
5573 {
5574 case EQ_EXPR: val = (cmp != 0); break;
5575 case NE_EXPR: val = (cmp == 0); break;
5576 case LT_EXPR: val = (cmp >= 0); break;
5577 case GT_EXPR: val = (cmp <= 0); break;
5578 case LE_EXPR: val = (cmp > 0); break;
5579 case GE_EXPR: val = (cmp < 0); break;
5580 default:
5581 val = false;
5582 }
5583 if (val)
5584 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5585 }
5586 else if (code2 == NE_EXPR)
5587 {
5588 bool val;
5589 switch (code1)
5590 {
5591 case EQ_EXPR: val = (cmp == 0); break;
5592 case NE_EXPR: val = (cmp != 0); break;
5593 case LT_EXPR: val = (cmp <= 0); break;
5594 case GT_EXPR: val = (cmp >= 0); break;
5595 case LE_EXPR: val = (cmp < 0); break;
5596 case GE_EXPR: val = (cmp > 0); break;
5597 default:
5598 val = false;
5599 }
5600 if (val)
5601 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5602 }
5603
5604 /* Chose the more restrictive of two < or <= comparisons. */
5605 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5606 && (code2 == LT_EXPR || code2 == LE_EXPR))
5607 {
5608 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5609 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5610 else
5611 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5612 }
5613
5614 /* Likewise chose the more restrictive of two > or >= comparisons. */
5615 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5616 && (code2 == GT_EXPR || code2 == GE_EXPR))
5617 {
5618 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5619 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5620 else
5621 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5622 }
5623
5624 /* Check for singleton ranges. */
5625 else if (cmp == 0
5626 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5627 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5628 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5629
5630 /* Check for disjoint ranges. */
5631 else if (cmp <= 0
5632 && (code1 == LT_EXPR || code1 == LE_EXPR)
5633 && (code2 == GT_EXPR || code2 == GE_EXPR))
5634 return boolean_false_node;
5635 else if (cmp >= 0
5636 && (code1 == GT_EXPR || code1 == GE_EXPR)
5637 && (code2 == LT_EXPR || code2 == LE_EXPR))
5638 return boolean_false_node;
5639 }
5640
5641 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5642 NAME's definition is a truth value. See if there are any simplifications
5643 that can be done against the NAME's definition. */
5644 if (TREE_CODE (op1a) == SSA_NAME
5645 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5646 && (integer_zerop (op1b) || integer_onep (op1b)))
5647 {
5648 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5649 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5650 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5651 switch (gimple_code (stmt))
5652 {
5653 case GIMPLE_ASSIGN:
5654 /* Try to simplify by copy-propagating the definition. */
5655 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5656
5657 case GIMPLE_PHI:
5658 /* If every argument to the PHI produces the same result when
5659 ANDed with the second comparison, we win.
5660 Do not do this unless the type is bool since we need a bool
5661 result here anyway. */
5662 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5663 {
5664 tree result = NULL_TREE;
5665 unsigned i;
5666 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5667 {
5668 tree arg = gimple_phi_arg_def (stmt, i);
5669
5670 /* If this PHI has itself as an argument, ignore it.
5671 If all the other args produce the same result,
5672 we're still OK. */
5673 if (arg == gimple_phi_result (stmt))
5674 continue;
5675 else if (TREE_CODE (arg) == INTEGER_CST)
5676 {
5677 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5678 {
5679 if (!result)
5680 result = boolean_false_node;
5681 else if (!integer_zerop (result))
5682 return NULL_TREE;
5683 }
5684 else if (!result)
5685 result = fold_build2 (code2, boolean_type_node,
5686 op2a, op2b);
5687 else if (!same_bool_comparison_p (result,
5688 code2, op2a, op2b))
5689 return NULL_TREE;
5690 }
0e8b84ec
JJ
5691 else if (TREE_CODE (arg) == SSA_NAME
5692 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5693 {
6c66f733 5694 tree temp;
355fe088 5695 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5696 /* In simple cases we can look through PHI nodes,
5697 but we have to be careful with loops.
5698 See PR49073. */
5699 if (! dom_info_available_p (CDI_DOMINATORS)
5700 || gimple_bb (def_stmt) == gimple_bb (stmt)
5701 || dominated_by_p (CDI_DOMINATORS,
5702 gimple_bb (def_stmt),
5703 gimple_bb (stmt)))
5704 return NULL_TREE;
5705 temp = and_var_with_comparison (arg, invert, code2,
5706 op2a, op2b);
e89065a1
SL
5707 if (!temp)
5708 return NULL_TREE;
5709 else if (!result)
5710 result = temp;
5711 else if (!same_bool_result_p (result, temp))
5712 return NULL_TREE;
5713 }
5714 else
5715 return NULL_TREE;
5716 }
5717 return result;
5718 }
5719
5720 default:
5721 break;
5722 }
5723 }
5724 return NULL_TREE;
5725}
5726
5727/* Try to simplify the AND of two comparisons, specified by
5728 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5729 If this can be simplified to a single expression (without requiring
5730 introducing more SSA variables to hold intermediate values),
5731 return the resulting tree. Otherwise return NULL_TREE.
5732 If the result expression is non-null, it has boolean type. */
5733
5734tree
5735maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5736 enum tree_code code2, tree op2a, tree op2b)
5737{
5738 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5739 if (t)
5740 return t;
5741 else
5742 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5743}
5744
5745/* Helper function for or_comparisons_1: try to simplify the OR of the
5746 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5747 If INVERT is true, invert the value of VAR before doing the OR.
5748 Return NULL_EXPR if we can't simplify this to a single expression. */
5749
5750static tree
5751or_var_with_comparison (tree var, bool invert,
5752 enum tree_code code2, tree op2a, tree op2b)
5753{
5754 tree t;
355fe088 5755 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5756
5757 /* We can only deal with variables whose definitions are assignments. */
5758 if (!is_gimple_assign (stmt))
5759 return NULL_TREE;
5760
5761 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5762 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5763 Then we only have to consider the simpler non-inverted cases. */
5764 if (invert)
5765 t = and_var_with_comparison_1 (stmt,
5766 invert_tree_comparison (code2, false),
5767 op2a, op2b);
5768 else
5769 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5770 return canonicalize_bool (t, invert);
5771}
5772
5773/* Try to simplify the OR of the ssa variable defined by the assignment
5774 STMT with the comparison specified by (OP2A CODE2 OP2B).
5775 Return NULL_EXPR if we can't simplify this to a single expression. */
5776
5777static tree
355fe088 5778or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5779 enum tree_code code2, tree op2a, tree op2b)
5780{
5781 tree var = gimple_assign_lhs (stmt);
5782 tree true_test_var = NULL_TREE;
5783 tree false_test_var = NULL_TREE;
5784 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5785
5786 /* Check for identities like (var OR (var != 0)) => true . */
5787 if (TREE_CODE (op2a) == SSA_NAME
5788 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5789 {
5790 if ((code2 == NE_EXPR && integer_zerop (op2b))
5791 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5792 {
5793 true_test_var = op2a;
5794 if (var == true_test_var)
5795 return var;
5796 }
5797 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5798 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5799 {
5800 false_test_var = op2a;
5801 if (var == false_test_var)
5802 return boolean_true_node;
5803 }
5804 }
5805
5806 /* If the definition is a comparison, recurse on it. */
5807 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5808 {
5809 tree t = or_comparisons_1 (innercode,
5810 gimple_assign_rhs1 (stmt),
5811 gimple_assign_rhs2 (stmt),
5812 code2,
5813 op2a,
5814 op2b);
5815 if (t)
5816 return t;
5817 }
5818
5819 /* If the definition is an AND or OR expression, we may be able to
5820 simplify by reassociating. */
eb9820c0
KT
5821 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5822 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5823 {
5824 tree inner1 = gimple_assign_rhs1 (stmt);
5825 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5826 gimple *s;
e89065a1
SL
5827 tree t;
5828 tree partial = NULL_TREE;
eb9820c0 5829 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5830
5831 /* Check for boolean identities that don't require recursive examination
5832 of inner1/inner2:
5833 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5834 inner1 OR (inner1 AND inner2) => inner1
5835 !inner1 OR (inner1 OR inner2) => true
5836 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5837 */
5838 if (inner1 == true_test_var)
5839 return (is_or ? var : inner1);
5840 else if (inner2 == true_test_var)
5841 return (is_or ? var : inner2);
5842 else if (inner1 == false_test_var)
5843 return (is_or
5844 ? boolean_true_node
5845 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5846 else if (inner2 == false_test_var)
5847 return (is_or
5848 ? boolean_true_node
5849 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5850
5851 /* Next, redistribute/reassociate the OR across the inner tests.
5852 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5853 if (TREE_CODE (inner1) == SSA_NAME
5854 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5855 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5856 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5857 gimple_assign_rhs1 (s),
5858 gimple_assign_rhs2 (s),
5859 code2, op2a, op2b)))
5860 {
5861 /* Handle the OR case, where we are reassociating:
5862 (inner1 OR inner2) OR (op2a code2 op2b)
5863 => (t OR inner2)
5864 If the partial result t is a constant, we win. Otherwise
5865 continue on to try reassociating with the other inner test. */
8236c8eb 5866 if (is_or)
e89065a1
SL
5867 {
5868 if (integer_onep (t))
5869 return boolean_true_node;
5870 else if (integer_zerop (t))
5871 return inner2;
5872 }
5873
5874 /* Handle the AND case, where we are redistributing:
5875 (inner1 AND inner2) OR (op2a code2 op2b)
5876 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5877 else if (integer_zerop (t))
5878 return boolean_false_node;
5879
5880 /* Save partial result for later. */
5881 partial = t;
e89065a1
SL
5882 }
5883
5884 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5885 if (TREE_CODE (inner2) == SSA_NAME
5886 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5887 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5888 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5889 gimple_assign_rhs1 (s),
5890 gimple_assign_rhs2 (s),
5891 code2, op2a, op2b)))
5892 {
5893 /* Handle the OR case, where we are reassociating:
5894 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5895 => (inner1 OR t)
5896 => (t OR partial) */
5897 if (is_or)
e89065a1
SL
5898 {
5899 if (integer_zerop (t))
5900 return inner1;
5901 else if (integer_onep (t))
5902 return boolean_true_node;
8236c8eb
JJ
5903 /* If both are the same, we can apply the identity
5904 (x OR x) == x. */
5905 else if (partial && same_bool_result_p (t, partial))
5906 return t;
e89065a1
SL
5907 }
5908
5909 /* Handle the AND case, where we are redistributing:
5910 (inner1 AND inner2) OR (op2a code2 op2b)
5911 => (t AND (inner1 OR (op2a code2 op2b)))
5912 => (t AND partial) */
5913 else
5914 {
5915 if (integer_zerop (t))
5916 return boolean_false_node;
5917 else if (partial)
5918 {
5919 /* We already got a simplification for the other
5920 operand to the redistributed AND expression. The
5921 interesting case is when at least one is true.
5922 Or, if both are the same, we can apply the identity
8236c8eb 5923 (x AND x) == x. */
e89065a1
SL
5924 if (integer_onep (partial))
5925 return t;
5926 else if (integer_onep (t))
5927 return partial;
5928 else if (same_bool_result_p (t, partial))
8236c8eb 5929 return t;
e89065a1
SL
5930 }
5931 }
5932 }
5933 }
5934 return NULL_TREE;
5935}
5936
5937/* Try to simplify the OR of two comparisons defined by
5938 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5939 If this can be done without constructing an intermediate value,
5940 return the resulting tree; otherwise NULL_TREE is returned.
5941 This function is deliberately asymmetric as it recurses on SSA_DEFs
5942 in the first comparison but not the second. */
5943
5944static tree
5945or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5946 enum tree_code code2, tree op2a, tree op2b)
5947{
ae22ac3c 5948 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5949
e89065a1
SL
5950 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5951 if (operand_equal_p (op1a, op2a, 0)
5952 && operand_equal_p (op1b, op2b, 0))
5953 {
eb9820c0 5954 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5955 tree t = combine_comparisons (UNKNOWN_LOCATION,
5956 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5957 truth_type, op1a, op1b);
e89065a1
SL
5958 if (t)
5959 return t;
5960 }
5961
5962 /* Likewise the swapped case of the above. */
5963 if (operand_equal_p (op1a, op2b, 0)
5964 && operand_equal_p (op1b, op2a, 0))
5965 {
eb9820c0 5966 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5967 tree t = combine_comparisons (UNKNOWN_LOCATION,
5968 TRUTH_ORIF_EXPR, code1,
5969 swap_tree_comparison (code2),
31ed6226 5970 truth_type, op1a, op1b);
e89065a1
SL
5971 if (t)
5972 return t;
5973 }
5974
5975 /* If both comparisons are of the same value against constants, we might
5976 be able to merge them. */
5977 if (operand_equal_p (op1a, op2a, 0)
5978 && TREE_CODE (op1b) == INTEGER_CST
5979 && TREE_CODE (op2b) == INTEGER_CST)
5980 {
5981 int cmp = tree_int_cst_compare (op1b, op2b);
5982
5983 /* If we have (op1a != op1b), we should either be able to
5984 return that or TRUE, depending on whether the constant op1b
5985 also satisfies the other comparison against op2b. */
5986 if (code1 == NE_EXPR)
5987 {
5988 bool done = true;
5989 bool val;
5990 switch (code2)
5991 {
5992 case EQ_EXPR: val = (cmp == 0); break;
5993 case NE_EXPR: val = (cmp != 0); break;
5994 case LT_EXPR: val = (cmp < 0); break;
5995 case GT_EXPR: val = (cmp > 0); break;
5996 case LE_EXPR: val = (cmp <= 0); break;
5997 case GE_EXPR: val = (cmp >= 0); break;
5998 default: done = false;
5999 }
6000 if (done)
6001 {
6002 if (val)
6003 return boolean_true_node;
6004 else
6005 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6006 }
6007 }
6008 /* Likewise if the second comparison is a != comparison. */
6009 else if (code2 == NE_EXPR)
6010 {
6011 bool done = true;
6012 bool val;
6013 switch (code1)
6014 {
6015 case EQ_EXPR: val = (cmp == 0); break;
6016 case NE_EXPR: val = (cmp != 0); break;
6017 case LT_EXPR: val = (cmp > 0); break;
6018 case GT_EXPR: val = (cmp < 0); break;
6019 case LE_EXPR: val = (cmp >= 0); break;
6020 case GE_EXPR: val = (cmp <= 0); break;
6021 default: done = false;
6022 }
6023 if (done)
6024 {
6025 if (val)
6026 return boolean_true_node;
6027 else
6028 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6029 }
6030 }
6031
6032 /* See if an equality test is redundant with the other comparison. */
6033 else if (code1 == EQ_EXPR)
6034 {
6035 bool val;
6036 switch (code2)
6037 {
6038 case EQ_EXPR: val = (cmp == 0); break;
6039 case NE_EXPR: val = (cmp != 0); break;
6040 case LT_EXPR: val = (cmp < 0); break;
6041 case GT_EXPR: val = (cmp > 0); break;
6042 case LE_EXPR: val = (cmp <= 0); break;
6043 case GE_EXPR: val = (cmp >= 0); break;
6044 default:
6045 val = false;
6046 }
6047 if (val)
6048 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6049 }
6050 else if (code2 == EQ_EXPR)
6051 {
6052 bool val;
6053 switch (code1)
6054 {
6055 case EQ_EXPR: val = (cmp == 0); break;
6056 case NE_EXPR: val = (cmp != 0); break;
6057 case LT_EXPR: val = (cmp > 0); break;
6058 case GT_EXPR: val = (cmp < 0); break;
6059 case LE_EXPR: val = (cmp >= 0); break;
6060 case GE_EXPR: val = (cmp <= 0); break;
6061 default:
6062 val = false;
6063 }
6064 if (val)
6065 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6066 }
6067
6068 /* Chose the less restrictive of two < or <= comparisons. */
6069 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6070 && (code2 == LT_EXPR || code2 == LE_EXPR))
6071 {
6072 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6073 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6074 else
6075 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6076 }
6077
6078 /* Likewise chose the less restrictive of two > or >= comparisons. */
6079 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6080 && (code2 == GT_EXPR || code2 == GE_EXPR))
6081 {
6082 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6083 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6084 else
6085 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6086 }
6087
6088 /* Check for singleton ranges. */
6089 else if (cmp == 0
6090 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6091 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6092 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6093
6094 /* Check for less/greater pairs that don't restrict the range at all. */
6095 else if (cmp >= 0
6096 && (code1 == LT_EXPR || code1 == LE_EXPR)
6097 && (code2 == GT_EXPR || code2 == GE_EXPR))
6098 return boolean_true_node;
6099 else if (cmp <= 0
6100 && (code1 == GT_EXPR || code1 == GE_EXPR)
6101 && (code2 == LT_EXPR || code2 == LE_EXPR))
6102 return boolean_true_node;
6103 }
6104
6105 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6106 NAME's definition is a truth value. See if there are any simplifications
6107 that can be done against the NAME's definition. */
6108 if (TREE_CODE (op1a) == SSA_NAME
6109 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6110 && (integer_zerop (op1b) || integer_onep (op1b)))
6111 {
6112 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6113 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6114 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6115 switch (gimple_code (stmt))
6116 {
6117 case GIMPLE_ASSIGN:
6118 /* Try to simplify by copy-propagating the definition. */
6119 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6120
6121 case GIMPLE_PHI:
6122 /* If every argument to the PHI produces the same result when
6123 ORed with the second comparison, we win.
6124 Do not do this unless the type is bool since we need a bool
6125 result here anyway. */
6126 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6127 {
6128 tree result = NULL_TREE;
6129 unsigned i;
6130 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6131 {
6132 tree arg = gimple_phi_arg_def (stmt, i);
6133
6134 /* If this PHI has itself as an argument, ignore it.
6135 If all the other args produce the same result,
6136 we're still OK. */
6137 if (arg == gimple_phi_result (stmt))
6138 continue;
6139 else if (TREE_CODE (arg) == INTEGER_CST)
6140 {
6141 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6142 {
6143 if (!result)
6144 result = boolean_true_node;
6145 else if (!integer_onep (result))
6146 return NULL_TREE;
6147 }
6148 else if (!result)
6149 result = fold_build2 (code2, boolean_type_node,
6150 op2a, op2b);
6151 else if (!same_bool_comparison_p (result,
6152 code2, op2a, op2b))
6153 return NULL_TREE;
6154 }
0e8b84ec
JJ
6155 else if (TREE_CODE (arg) == SSA_NAME
6156 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6157 {
6c66f733 6158 tree temp;
355fe088 6159 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6160 /* In simple cases we can look through PHI nodes,
6161 but we have to be careful with loops.
6162 See PR49073. */
6163 if (! dom_info_available_p (CDI_DOMINATORS)
6164 || gimple_bb (def_stmt) == gimple_bb (stmt)
6165 || dominated_by_p (CDI_DOMINATORS,
6166 gimple_bb (def_stmt),
6167 gimple_bb (stmt)))
6168 return NULL_TREE;
6169 temp = or_var_with_comparison (arg, invert, code2,
6170 op2a, op2b);
e89065a1
SL
6171 if (!temp)
6172 return NULL_TREE;
6173 else if (!result)
6174 result = temp;
6175 else if (!same_bool_result_p (result, temp))
6176 return NULL_TREE;
6177 }
6178 else
6179 return NULL_TREE;
6180 }
6181 return result;
6182 }
6183
6184 default:
6185 break;
6186 }
6187 }
6188 return NULL_TREE;
6189}
6190
6191/* Try to simplify the OR of two comparisons, specified by
6192 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6193 If this can be simplified to a single expression (without requiring
6194 introducing more SSA variables to hold intermediate values),
6195 return the resulting tree. Otherwise return NULL_TREE.
6196 If the result expression is non-null, it has boolean type. */
6197
6198tree
6199maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6200 enum tree_code code2, tree op2a, tree op2b)
6201{
6202 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6203 if (t)
6204 return t;
6205 else
6206 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6207}
cfef45c8
RG
6208
6209
6210/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6211
6212 Either NULL_TREE, a simplified but non-constant or a constant
6213 is returned.
6214
6215 ??? This should go into a gimple-fold-inline.h file to be eventually
6216 privatized with the single valueize function used in the various TUs
6217 to avoid the indirect function call overhead. */
6218
6219tree
355fe088 6220gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6221 tree (*gvalueize) (tree))
cfef45c8 6222{
5d75ad95 6223 gimple_match_op res_op;
45cc9f96
RB
6224 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6225 edges if there are intermediate VARYING defs. For this reason
6226 do not follow SSA edges here even though SCCVN can technically
6227 just deal fine with that. */
5d75ad95 6228 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6229 {
34050b6b 6230 tree res = NULL_TREE;
5d75ad95
RS
6231 if (gimple_simplified_result_is_gimple_val (&res_op))
6232 res = res_op.ops[0];
34050b6b 6233 else if (mprts_hook)
5d75ad95 6234 res = mprts_hook (&res_op);
34050b6b 6235 if (res)
45cc9f96 6236 {
34050b6b
RB
6237 if (dump_file && dump_flags & TDF_DETAILS)
6238 {
6239 fprintf (dump_file, "Match-and-simplified ");
6240 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6241 fprintf (dump_file, " to ");
ef6cb4c7 6242 print_generic_expr (dump_file, res);
34050b6b
RB
6243 fprintf (dump_file, "\n");
6244 }
6245 return res;
45cc9f96 6246 }
45cc9f96
RB
6247 }
6248
cfef45c8
RG
6249 location_t loc = gimple_location (stmt);
6250 switch (gimple_code (stmt))
6251 {
6252 case GIMPLE_ASSIGN:
6253 {
6254 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6255
6256 switch (get_gimple_rhs_class (subcode))
6257 {
6258 case GIMPLE_SINGLE_RHS:
6259 {
6260 tree rhs = gimple_assign_rhs1 (stmt);
6261 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6262
6263 if (TREE_CODE (rhs) == SSA_NAME)
6264 {
6265 /* If the RHS is an SSA_NAME, return its known constant value,
6266 if any. */
6267 return (*valueize) (rhs);
6268 }
6269 /* Handle propagating invariant addresses into address
6270 operations. */
6271 else if (TREE_CODE (rhs) == ADDR_EXPR
6272 && !is_gimple_min_invariant (rhs))
6273 {
a90c8804 6274 poly_int64 offset = 0;
cfef45c8
RG
6275 tree base;
6276 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6277 &offset,
6278 valueize);
6279 if (base
6280 && (CONSTANT_CLASS_P (base)
6281 || decl_address_invariant_p (base)))
6282 return build_invariant_address (TREE_TYPE (rhs),
6283 base, offset);
6284 }
6285 else if (TREE_CODE (rhs) == CONSTRUCTOR
6286 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6287 && known_eq (CONSTRUCTOR_NELTS (rhs),
6288 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6289 {
794e3180
RS
6290 unsigned i, nelts;
6291 tree val;
cfef45c8 6292
928686b1 6293 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6294 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6295 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6296 {
6297 val = (*valueize) (val);
6298 if (TREE_CODE (val) == INTEGER_CST
6299 || TREE_CODE (val) == REAL_CST
6300 || TREE_CODE (val) == FIXED_CST)
794e3180 6301 vec.quick_push (val);
cfef45c8
RG
6302 else
6303 return NULL_TREE;
6304 }
6305
5ebaa477 6306 return vec.build ();
cfef45c8 6307 }
bdf37f7a
JH
6308 if (subcode == OBJ_TYPE_REF)
6309 {
6310 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6311 /* If callee is constant, we can fold away the wrapper. */
6312 if (is_gimple_min_invariant (val))
6313 return val;
6314 }
cfef45c8
RG
6315
6316 if (kind == tcc_reference)
6317 {
6318 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6319 || TREE_CODE (rhs) == REALPART_EXPR
6320 || TREE_CODE (rhs) == IMAGPART_EXPR)
6321 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6322 {
6323 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6324 return fold_unary_loc (EXPR_LOCATION (rhs),
6325 TREE_CODE (rhs),
6326 TREE_TYPE (rhs), val);
6327 }
6328 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6329 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6330 {
6331 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6332 return fold_ternary_loc (EXPR_LOCATION (rhs),
6333 TREE_CODE (rhs),
6334 TREE_TYPE (rhs), val,
6335 TREE_OPERAND (rhs, 1),
6336 TREE_OPERAND (rhs, 2));
6337 }
6338 else if (TREE_CODE (rhs) == MEM_REF
6339 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6340 {
6341 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6342 if (TREE_CODE (val) == ADDR_EXPR
6343 && is_gimple_min_invariant (val))
6344 {
6345 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6346 unshare_expr (val),
6347 TREE_OPERAND (rhs, 1));
6348 if (tem)
6349 rhs = tem;
6350 }
6351 }
6352 return fold_const_aggregate_ref_1 (rhs, valueize);
6353 }
6354 else if (kind == tcc_declaration)
6355 return get_symbol_constant_value (rhs);
6356 return rhs;
6357 }
6358
6359 case GIMPLE_UNARY_RHS:
f3582e54 6360 return NULL_TREE;
cfef45c8
RG
6361
6362 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6363 /* Translate &x + CST into an invariant form suitable for
6364 further propagation. */
6365 if (subcode == POINTER_PLUS_EXPR)
6366 {
4b1b9e64
RB
6367 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6368 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6369 if (TREE_CODE (op0) == ADDR_EXPR
6370 && TREE_CODE (op1) == INTEGER_CST)
6371 {
6372 tree off = fold_convert (ptr_type_node, op1);
6373 return build_fold_addr_expr_loc
6374 (loc,
6375 fold_build2 (MEM_REF,
6376 TREE_TYPE (TREE_TYPE (op0)),
6377 unshare_expr (op0), off));
6378 }
6379 }
59c20dc7
RB
6380 /* Canonicalize bool != 0 and bool == 0 appearing after
6381 valueization. While gimple_simplify handles this
6382 it can get confused by the ~X == 1 -> X == 0 transform
6383 which we cant reduce to a SSA name or a constant
6384 (and we have no way to tell gimple_simplify to not
6385 consider those transforms in the first place). */
6386 else if (subcode == EQ_EXPR
6387 || subcode == NE_EXPR)
6388 {
6389 tree lhs = gimple_assign_lhs (stmt);
6390 tree op0 = gimple_assign_rhs1 (stmt);
6391 if (useless_type_conversion_p (TREE_TYPE (lhs),
6392 TREE_TYPE (op0)))
6393 {
6394 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6395 op0 = (*valueize) (op0);
8861704d
RB
6396 if (TREE_CODE (op0) == INTEGER_CST)
6397 std::swap (op0, op1);
6398 if (TREE_CODE (op1) == INTEGER_CST
6399 && ((subcode == NE_EXPR && integer_zerop (op1))
6400 || (subcode == EQ_EXPR && integer_onep (op1))))
6401 return op0;
59c20dc7
RB
6402 }
6403 }
4b1b9e64 6404 return NULL_TREE;
cfef45c8
RG
6405
6406 case GIMPLE_TERNARY_RHS:
6407 {
6408 /* Handle ternary operators that can appear in GIMPLE form. */
6409 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6410 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6411 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6412 return fold_ternary_loc (loc, subcode,
6413 gimple_expr_type (stmt), op0, op1, op2);
6414 }
6415
6416 default:
6417 gcc_unreachable ();
6418 }
6419 }
6420
6421 case GIMPLE_CALL:
6422 {
25583c4f 6423 tree fn;
538dd0b7 6424 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6425
6426 if (gimple_call_internal_p (stmt))
31e071ae
MP
6427 {
6428 enum tree_code subcode = ERROR_MARK;
6429 switch (gimple_call_internal_fn (stmt))
6430 {
6431 case IFN_UBSAN_CHECK_ADD:
6432 subcode = PLUS_EXPR;
6433 break;
6434 case IFN_UBSAN_CHECK_SUB:
6435 subcode = MINUS_EXPR;
6436 break;
6437 case IFN_UBSAN_CHECK_MUL:
6438 subcode = MULT_EXPR;
6439 break;
68fa96d6
ML
6440 case IFN_BUILTIN_EXPECT:
6441 {
6442 tree arg0 = gimple_call_arg (stmt, 0);
6443 tree op0 = (*valueize) (arg0);
6444 if (TREE_CODE (op0) == INTEGER_CST)
6445 return op0;
6446 return NULL_TREE;
6447 }
31e071ae
MP
6448 default:
6449 return NULL_TREE;
6450 }
368b454d
JJ
6451 tree arg0 = gimple_call_arg (stmt, 0);
6452 tree arg1 = gimple_call_arg (stmt, 1);
6453 tree op0 = (*valueize) (arg0);
6454 tree op1 = (*valueize) (arg1);
31e071ae
MP
6455
6456 if (TREE_CODE (op0) != INTEGER_CST
6457 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6458 {
6459 switch (subcode)
6460 {
6461 case MULT_EXPR:
6462 /* x * 0 = 0 * x = 0 without overflow. */
6463 if (integer_zerop (op0) || integer_zerop (op1))
6464 return build_zero_cst (TREE_TYPE (arg0));
6465 break;
6466 case MINUS_EXPR:
6467 /* y - y = 0 without overflow. */
6468 if (operand_equal_p (op0, op1, 0))
6469 return build_zero_cst (TREE_TYPE (arg0));
6470 break;
6471 default:
6472 break;
6473 }
6474 }
6475 tree res
6476 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6477 if (res
6478 && TREE_CODE (res) == INTEGER_CST
6479 && !TREE_OVERFLOW (res))
6480 return res;
6481 return NULL_TREE;
6482 }
25583c4f
RS
6483
6484 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 6485 if (TREE_CODE (fn) == ADDR_EXPR
3d78e008 6486 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
6487 && gimple_builtin_call_types_compatible_p (stmt,
6488 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6489 {
6490 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6491 tree retval;
cfef45c8
RG
6492 unsigned i;
6493 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6494 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6495 retval = fold_builtin_call_array (loc,
538dd0b7 6496 gimple_call_return_type (call_stmt),
cfef45c8 6497 fn, gimple_call_num_args (stmt), args);
cfef45c8 6498 if (retval)
5c944c6c
RB
6499 {
6500 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6501 STRIP_NOPS (retval);
538dd0b7
DM
6502 retval = fold_convert (gimple_call_return_type (call_stmt),
6503 retval);
5c944c6c 6504 }
cfef45c8
RG
6505 return retval;
6506 }
6507 return NULL_TREE;
6508 }
6509
6510 default:
6511 return NULL_TREE;
6512 }
6513}
6514
6515/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6516 Returns NULL_TREE if folding to a constant is not possible, otherwise
6517 returns a constant according to is_gimple_min_invariant. */
6518
6519tree
355fe088 6520gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6521{
6522 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6523 if (res && is_gimple_min_invariant (res))
6524 return res;
6525 return NULL_TREE;
6526}
6527
6528
6529/* The following set of functions are supposed to fold references using
6530 their constant initializers. */
6531
cfef45c8
RG
6532/* See if we can find constructor defining value of BASE.
6533 When we know the consructor with constant offset (such as
6534 base is array[40] and we do know constructor of array), then
6535 BIT_OFFSET is adjusted accordingly.
6536
6537 As a special case, return error_mark_node when constructor
6538 is not explicitly available, but it is known to be zero
6539 such as 'static const int a;'. */
6540static tree
588db50c 6541get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6542 tree (*valueize)(tree))
6543{
588db50c 6544 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6545 bool reverse;
6546
cfef45c8
RG
6547 if (TREE_CODE (base) == MEM_REF)
6548 {
6a5aca53
ML
6549 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6550 if (!boff.to_shwi (bit_offset))
6551 return NULL_TREE;
cfef45c8
RG
6552
6553 if (valueize
6554 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6555 base = valueize (TREE_OPERAND (base, 0));
6556 if (!base || TREE_CODE (base) != ADDR_EXPR)
6557 return NULL_TREE;
6558 base = TREE_OPERAND (base, 0);
6559 }
13e88953
RB
6560 else if (valueize
6561 && TREE_CODE (base) == SSA_NAME)
6562 base = valueize (base);
cfef45c8
RG
6563
6564 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6565 DECL_INITIAL. If BASE is a nested reference into another
6566 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6567 the inner reference. */
6568 switch (TREE_CODE (base))
6569 {
6570 case VAR_DECL:
cfef45c8 6571 case CONST_DECL:
6a6dac52
JH
6572 {
6573 tree init = ctor_for_folding (base);
6574
688010ba 6575 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6576 NULL means unknown, while error_mark_node is 0. */
6577 if (init == error_mark_node)
6578 return NULL_TREE;
6579 if (!init)
6580 return error_mark_node;
6581 return init;
6582 }
cfef45c8 6583
13e88953
RB
6584 case VIEW_CONVERT_EXPR:
6585 return get_base_constructor (TREE_OPERAND (base, 0),
6586 bit_offset, valueize);
6587
cfef45c8
RG
6588 case ARRAY_REF:
6589 case COMPONENT_REF:
ee45a32d
EB
6590 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6591 &reverse);
588db50c 6592 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6593 return NULL_TREE;
6594 *bit_offset += bit_offset2;
6595 return get_base_constructor (base, bit_offset, valueize);
6596
cfef45c8
RG
6597 case CONSTRUCTOR:
6598 return base;
6599
6600 default:
13e88953
RB
6601 if (CONSTANT_CLASS_P (base))
6602 return base;
6603
cfef45c8
RG
6604 return NULL_TREE;
6605 }
6606}
6607
35b4d3a6
MS
6608/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6609 to the memory at bit OFFSET. When non-null, TYPE is the expected
6610 type of the reference; otherwise the type of the referenced element
6611 is used instead. When SIZE is zero, attempt to fold a reference to
6612 the entire element which OFFSET refers to. Increment *SUBOFF by
6613 the bit offset of the accessed element. */
cfef45c8
RG
6614
6615static tree
6616fold_array_ctor_reference (tree type, tree ctor,
6617 unsigned HOST_WIDE_INT offset,
c44c2088 6618 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6619 tree from_decl,
6620 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6621{
807e902e
KZ
6622 offset_int low_bound;
6623 offset_int elt_size;
807e902e 6624 offset_int access_index;
6a636014 6625 tree domain_type = NULL_TREE;
cfef45c8
RG
6626 HOST_WIDE_INT inner_offset;
6627
6628 /* Compute low bound and elt size. */
eb8f1123
RG
6629 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6630 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6631 if (domain_type && TYPE_MIN_VALUE (domain_type))
6632 {
6633 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6634 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6635 return NULL_TREE;
807e902e 6636 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6637 }
6638 else
807e902e 6639 low_bound = 0;
cfef45c8 6640 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6641 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6642 return NULL_TREE;
807e902e 6643 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6644
35b4d3a6
MS
6645 /* When TYPE is non-null, verify that it specifies a constant-sized
6646 accessed not larger than size of array element. */
6647 if (type
6648 && (!TYPE_SIZE_UNIT (type)
6649 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6650 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6651 || elt_size == 0))
cfef45c8
RG
6652 return NULL_TREE;
6653
6654 /* Compute the array index we look for. */
807e902e
KZ
6655 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6656 elt_size);
27bcd47c 6657 access_index += low_bound;
cfef45c8
RG
6658
6659 /* And offset within the access. */
27bcd47c 6660 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6661
6662 /* See if the array field is large enough to span whole access. We do not
6663 care to fold accesses spanning multiple array indexes. */
27bcd47c 6664 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6665 return NULL_TREE;
6a636014 6666 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6667 {
6668 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6669 {
6670 /* For the final reference to the entire accessed element
6671 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6672 may be null) in favor of the type of the element, and set
6673 SIZE to the size of the accessed element. */
6674 inner_offset = 0;
6675 type = TREE_TYPE (val);
6676 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6677 }
6678
6679 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6680 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6681 suboff);
6682 }
cfef45c8 6683
35b4d3a6
MS
6684 /* Memory not explicitly mentioned in constructor is 0 (or
6685 the reference is out of range). */
6686 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6687}
6688
35b4d3a6
MS
6689/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6690 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6691 is the expected type of the reference; otherwise the type of
6692 the referenced member is used instead. When SIZE is zero,
6693 attempt to fold a reference to the entire member which OFFSET
6694 refers to; in this case. Increment *SUBOFF by the bit offset
6695 of the accessed member. */
cfef45c8
RG
6696
6697static tree
6698fold_nonarray_ctor_reference (tree type, tree ctor,
6699 unsigned HOST_WIDE_INT offset,
c44c2088 6700 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6701 tree from_decl,
6702 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6703{
6704 unsigned HOST_WIDE_INT cnt;
6705 tree cfield, cval;
6706
6707 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6708 cval)
6709 {
6710 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6711 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6712 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6713
6714 if (!field_size)
6715 {
6716 /* Determine the size of the flexible array member from
6717 the size of the initializer provided for it. */
6718 field_size = TYPE_SIZE (TREE_TYPE (cval));
6719 }
cfef45c8
RG
6720
6721 /* Variable sized objects in static constructors makes no sense,
6722 but field_size can be NULL for flexible array members. */
6723 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6724 && TREE_CODE (byte_offset) == INTEGER_CST
6725 && (field_size != NULL_TREE
6726 ? TREE_CODE (field_size) == INTEGER_CST
6727 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6728
6729 /* Compute bit offset of the field. */
35b4d3a6
MS
6730 offset_int bitoffset
6731 = (wi::to_offset (field_offset)
6732 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6733 /* Compute bit offset where the field ends. */
35b4d3a6 6734 offset_int bitoffset_end;
cfef45c8 6735 if (field_size != NULL_TREE)
807e902e 6736 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6737 else
807e902e 6738 bitoffset_end = 0;
cfef45c8 6739
35b4d3a6
MS
6740 /* Compute the bit offset of the end of the desired access.
6741 As a special case, if the size of the desired access is
6742 zero, assume the access is to the entire field (and let
6743 the caller make any necessary adjustments by storing
6744 the actual bounds of the field in FIELDBOUNDS). */
6745 offset_int access_end = offset_int (offset);
6746 if (size)
6747 access_end += size;
6748 else
6749 access_end = bitoffset_end;
b8b2b009 6750
35b4d3a6
MS
6751 /* Is there any overlap between the desired access at
6752 [OFFSET, OFFSET+SIZE) and the offset of the field within
6753 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6754 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6755 && (field_size == NULL_TREE
807e902e 6756 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6757 {
35b4d3a6
MS
6758 *suboff += bitoffset.to_uhwi ();
6759
6760 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6761 {
6762 /* For the final reference to the entire accessed member
6763 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6764 be null) in favor of the type of the member, and set
6765 SIZE to the size of the accessed member. */
6766 offset = bitoffset.to_uhwi ();
6767 type = TREE_TYPE (cval);
6768 size = (bitoffset_end - bitoffset).to_uhwi ();
6769 }
6770
6771 /* We do have overlap. Now see if the field is large enough
6772 to cover the access. Give up for accesses that extend
6773 beyond the end of the object or that span multiple fields. */
807e902e 6774 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6775 return NULL_TREE;
032c80e9 6776 if (offset < bitoffset)
b8b2b009 6777 return NULL_TREE;
35b4d3a6
MS
6778
6779 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6780 return fold_ctor_reference (type, cval,
27bcd47c 6781 inner_offset.to_uhwi (), size,
35b4d3a6 6782 from_decl, suboff);
cfef45c8
RG
6783 }
6784 }
35b4d3a6
MS
6785 /* Memory not explicitly mentioned in constructor is 0. */
6786 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6787}
6788
35b4d3a6
MS
6789/* CTOR is value initializing memory. Fold a reference of TYPE and
6790 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6791 is zero, attempt to fold a reference to the entire subobject
6792 which OFFSET refers to. This is used when folding accesses to
6793 string members of aggregates. When non-null, set *SUBOFF to
6794 the bit offset of the accessed subobject. */
cfef45c8 6795
8403c2cf 6796tree
35b4d3a6
MS
6797fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6798 const poly_uint64 &poly_size, tree from_decl,
6799 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
6800{
6801 tree ret;
6802
6803 /* We found the field with exact match. */
35b4d3a6
MS
6804 if (type
6805 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6806 && known_eq (poly_offset, 0U))
9d60be38 6807 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6808
30acf282
RS
6809 /* The remaining optimizations need a constant size and offset. */
6810 unsigned HOST_WIDE_INT size, offset;
6811 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6812 return NULL_TREE;
6813
cfef45c8
RG
6814 /* We are at the end of walk, see if we can view convert the
6815 result. */
6816 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6817 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6818 && !compare_tree_int (TYPE_SIZE (type), size)
6819 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6820 {
9d60be38 6821 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6822 if (ret)
672d9f8e
RB
6823 {
6824 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6825 if (ret)
6826 STRIP_USELESS_TYPE_CONVERSION (ret);
6827 }
cfef45c8
RG
6828 return ret;
6829 }
b2505143
RB
6830 /* For constants and byte-aligned/sized reads try to go through
6831 native_encode/interpret. */
6832 if (CONSTANT_CLASS_P (ctor)
6833 && BITS_PER_UNIT == 8
6834 && offset % BITS_PER_UNIT == 0
6835 && size % BITS_PER_UNIT == 0
6836 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6837 {
6838 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6839 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6840 offset / BITS_PER_UNIT);
6841 if (len > 0)
6842 return native_interpret_expr (type, buf, len);
b2505143 6843 }
cfef45c8
RG
6844 if (TREE_CODE (ctor) == CONSTRUCTOR)
6845 {
35b4d3a6
MS
6846 unsigned HOST_WIDE_INT dummy = 0;
6847 if (!suboff)
6848 suboff = &dummy;
cfef45c8 6849
eb8f1123
RG
6850 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6851 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088 6852 return fold_array_ctor_reference (type, ctor, offset, size,
35b4d3a6
MS
6853 from_decl, suboff);
6854
6855 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6856 from_decl, suboff);
cfef45c8
RG
6857 }
6858
6859 return NULL_TREE;
6860}
6861
6862/* Return the tree representing the element referenced by T if T is an
6863 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6864 names using VALUEIZE. Return NULL_TREE otherwise. */
6865
6866tree
6867fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6868{
6869 tree ctor, idx, base;
588db50c 6870 poly_int64 offset, size, max_size;
cfef45c8 6871 tree tem;
ee45a32d 6872 bool reverse;
cfef45c8 6873
f8a7df45
RG
6874 if (TREE_THIS_VOLATILE (t))
6875 return NULL_TREE;
6876
3a65ee74 6877 if (DECL_P (t))
cfef45c8
RG
6878 return get_symbol_constant_value (t);
6879
6880 tem = fold_read_from_constant_string (t);
6881 if (tem)
6882 return tem;
6883
6884 switch (TREE_CODE (t))
6885 {
6886 case ARRAY_REF:
6887 case ARRAY_RANGE_REF:
6888 /* Constant indexes are handled well by get_base_constructor.
6889 Only special case variable offsets.
6890 FIXME: This code can't handle nested references with variable indexes
6891 (they will be handled only by iteration of ccp). Perhaps we can bring
6892 get_ref_base_and_extent here and make it use a valueize callback. */
6893 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6894 && valueize
6895 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6896 && poly_int_tree_p (idx))
cfef45c8
RG
6897 {
6898 tree low_bound, unit_size;
6899
6900 /* If the resulting bit-offset is constant, track it. */
6901 if ((low_bound = array_ref_low_bound (t),
588db50c 6902 poly_int_tree_p (low_bound))
cfef45c8 6903 && (unit_size = array_ref_element_size (t),
807e902e 6904 tree_fits_uhwi_p (unit_size)))
cfef45c8 6905 {
588db50c
RS
6906 poly_offset_int woffset
6907 = wi::sext (wi::to_poly_offset (idx)
6908 - wi::to_poly_offset (low_bound),
807e902e
KZ
6909 TYPE_PRECISION (TREE_TYPE (idx)));
6910
588db50c 6911 if (woffset.to_shwi (&offset))
807e902e 6912 {
807e902e
KZ
6913 /* TODO: This code seems wrong, multiply then check
6914 to see if it fits. */
6915 offset *= tree_to_uhwi (unit_size);
6916 offset *= BITS_PER_UNIT;
6917
6918 base = TREE_OPERAND (t, 0);
6919 ctor = get_base_constructor (base, &offset, valueize);
6920 /* Empty constructor. Always fold to 0. */
6921 if (ctor == error_mark_node)
6922 return build_zero_cst (TREE_TYPE (t));
6923 /* Out of bound array access. Value is undefined,
6924 but don't fold. */
588db50c 6925 if (maybe_lt (offset, 0))
807e902e
KZ
6926 return NULL_TREE;
6927 /* We can not determine ctor. */
6928 if (!ctor)
6929 return NULL_TREE;
6930 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6931 tree_to_uhwi (unit_size)
6932 * BITS_PER_UNIT,
6933 base);
6934 }
cfef45c8
RG
6935 }
6936 }
6937 /* Fallthru. */
6938
6939 case COMPONENT_REF:
6940 case BIT_FIELD_REF:
6941 case TARGET_MEM_REF:
6942 case MEM_REF:
ee45a32d 6943 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6944 ctor = get_base_constructor (base, &offset, valueize);
6945
6946 /* Empty constructor. Always fold to 0. */
6947 if (ctor == error_mark_node)
6948 return build_zero_cst (TREE_TYPE (t));
6949 /* We do not know precise address. */
588db50c 6950 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8
RG
6951 return NULL_TREE;
6952 /* We can not determine ctor. */
6953 if (!ctor)
6954 return NULL_TREE;
6955
6956 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 6957 if (maybe_lt (offset, 0))
cfef45c8
RG
6958 return NULL_TREE;
6959
c44c2088
JH
6960 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6961 base);
cfef45c8
RG
6962
6963 case REALPART_EXPR:
6964 case IMAGPART_EXPR:
6965 {
6966 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6967 if (c && TREE_CODE (c) == COMPLEX_CST)
6968 return fold_build1_loc (EXPR_LOCATION (t),
6969 TREE_CODE (t), TREE_TYPE (t), c);
6970 break;
6971 }
6972
6973 default:
6974 break;
6975 }
6976
6977 return NULL_TREE;
6978}
6979
6980tree
6981fold_const_aggregate_ref (tree t)
6982{
6983 return fold_const_aggregate_ref_1 (t, NULL);
6984}
06bc3ec7 6985
85942f45 6986/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6987 at OFFSET.
6988 Set CAN_REFER if non-NULL to false if method
6989 is not referable or if the virtual table is ill-formed (such as rewriten
6990 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6991
6992tree
85942f45
JH
6993gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6994 tree v,
ec77d61f
JH
6995 unsigned HOST_WIDE_INT offset,
6996 bool *can_refer)
81fa35bd 6997{
85942f45
JH
6998 tree vtable = v, init, fn;
6999 unsigned HOST_WIDE_INT size;
8c311b50
JH
7000 unsigned HOST_WIDE_INT elt_size, access_index;
7001 tree domain_type;
81fa35bd 7002
ec77d61f
JH
7003 if (can_refer)
7004 *can_refer = true;
7005
9de2f554 7006 /* First of all double check we have virtual table. */
8813a647 7007 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 7008 {
ec77d61f
JH
7009 /* Pass down that we lost track of the target. */
7010 if (can_refer)
7011 *can_refer = false;
7012 return NULL_TREE;
7013 }
9de2f554 7014
2aa3da06
JH
7015 init = ctor_for_folding (v);
7016
9de2f554 7017 /* The virtual tables should always be born with constructors
2aa3da06
JH
7018 and we always should assume that they are avaialble for
7019 folding. At the moment we do not stream them in all cases,
7020 but it should never happen that ctor seem unreachable. */
7021 gcc_assert (init);
7022 if (init == error_mark_node)
7023 {
ec77d61f
JH
7024 /* Pass down that we lost track of the target. */
7025 if (can_refer)
7026 *can_refer = false;
2aa3da06
JH
7027 return NULL_TREE;
7028 }
81fa35bd 7029 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 7030 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 7031 offset *= BITS_PER_UNIT;
81fa35bd 7032 offset += token * size;
9de2f554 7033
8c311b50
JH
7034 /* Lookup the value in the constructor that is assumed to be array.
7035 This is equivalent to
7036 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7037 offset, size, NULL);
7038 but in a constant time. We expect that frontend produced a simple
7039 array without indexed initializers. */
7040
7041 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7042 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7043 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7044 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7045
7046 access_index = offset / BITS_PER_UNIT / elt_size;
7047 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7048
bf8d8309
MP
7049 /* The C++ FE can now produce indexed fields, and we check if the indexes
7050 match. */
8c311b50
JH
7051 if (access_index < CONSTRUCTOR_NELTS (init))
7052 {
7053 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
7054 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7055 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
7056 STRIP_NOPS (fn);
7057 }
7058 else
7059 fn = NULL;
9de2f554
JH
7060
7061 /* For type inconsistent program we may end up looking up virtual method
7062 in virtual table that does not contain TOKEN entries. We may overrun
7063 the virtual table and pick up a constant or RTTI info pointer.
7064 In any case the call is undefined. */
7065 if (!fn
7066 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7067 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7068 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7069 else
7070 {
7071 fn = TREE_OPERAND (fn, 0);
7072
7073 /* When cgraph node is missing and function is not public, we cannot
7074 devirtualize. This can happen in WHOPR when the actual method
7075 ends up in other partition, because we found devirtualization
7076 possibility too late. */
7077 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
7078 {
7079 if (can_refer)
7080 {
7081 *can_refer = false;
7082 return fn;
7083 }
7084 return NULL_TREE;
7085 }
9de2f554 7086 }
81fa35bd 7087
7501ca28
RG
7088 /* Make sure we create a cgraph node for functions we'll reference.
7089 They can be non-existent if the reference comes from an entry
7090 of an external vtable for example. */
d52f5295 7091 cgraph_node::get_create (fn);
7501ca28 7092
81fa35bd
MJ
7093 return fn;
7094}
7095
85942f45
JH
7096/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7097 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7098 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
7099 OBJ_TYPE_REF_OBJECT(REF).
7100 Set CAN_REFER if non-NULL to false if method
7101 is not referable or if the virtual table is ill-formed (such as rewriten
7102 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
7103
7104tree
ec77d61f
JH
7105gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7106 bool *can_refer)
85942f45
JH
7107{
7108 unsigned HOST_WIDE_INT offset;
7109 tree v;
7110
7111 v = BINFO_VTABLE (known_binfo);
7112 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7113 if (!v)
7114 return NULL_TREE;
7115
7116 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
7117 {
7118 if (can_refer)
7119 *can_refer = false;
7120 return NULL_TREE;
7121 }
7122 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
7123}
7124
737f500a
RB
7125/* Given a pointer value T, return a simplified version of an
7126 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
7127 possible. Note that the resulting type may be different from
7128 the type pointed to in the sense that it is still compatible
7129 from the langhooks point of view. */
7130
7131tree
7132gimple_fold_indirect_ref (tree t)
7133{
7134 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7135 tree sub = t;
7136 tree subtype;
7137
7138 STRIP_NOPS (sub);
7139 subtype = TREE_TYPE (sub);
737f500a
RB
7140 if (!POINTER_TYPE_P (subtype)
7141 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7142 return NULL_TREE;
7143
7144 if (TREE_CODE (sub) == ADDR_EXPR)
7145 {
7146 tree op = TREE_OPERAND (sub, 0);
7147 tree optype = TREE_TYPE (op);
7148 /* *&p => p */
7149 if (useless_type_conversion_p (type, optype))
7150 return op;
7151
7152 /* *(foo *)&fooarray => fooarray[0] */
7153 if (TREE_CODE (optype) == ARRAY_TYPE
7154 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7155 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7156 {
7157 tree type_domain = TYPE_DOMAIN (optype);
7158 tree min_val = size_zero_node;
7159 if (type_domain && TYPE_MIN_VALUE (type_domain))
7160 min_val = TYPE_MIN_VALUE (type_domain);
7161 if (TREE_CODE (min_val) == INTEGER_CST)
7162 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7163 }
7164 /* *(foo *)&complexfoo => __real__ complexfoo */
7165 else if (TREE_CODE (optype) == COMPLEX_TYPE
7166 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7167 return fold_build1 (REALPART_EXPR, type, op);
7168 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7169 else if (TREE_CODE (optype) == VECTOR_TYPE
7170 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7171 {
7172 tree part_width = TYPE_SIZE (type);
7173 tree index = bitsize_int (0);
7174 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7175 }
7176 }
7177
7178 /* *(p + CST) -> ... */
7179 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7180 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7181 {
7182 tree addr = TREE_OPERAND (sub, 0);
7183 tree off = TREE_OPERAND (sub, 1);
7184 tree addrtype;
7185
7186 STRIP_NOPS (addr);
7187 addrtype = TREE_TYPE (addr);
7188
7189 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7190 if (TREE_CODE (addr) == ADDR_EXPR
7191 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7192 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7193 && tree_fits_uhwi_p (off))
b184c8f1 7194 {
ae7e9ddd 7195 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7196 tree part_width = TYPE_SIZE (type);
7197 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7198 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7199 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7200 tree index = bitsize_int (indexi);
928686b1
RS
7201 if (known_lt (offset / part_widthi,
7202 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7203 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7204 part_width, index);
7205 }
7206
7207 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7208 if (TREE_CODE (addr) == ADDR_EXPR
7209 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7210 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7211 {
7212 tree size = TYPE_SIZE_UNIT (type);
7213 if (tree_int_cst_equal (size, off))
7214 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7215 }
7216
7217 /* *(p + CST) -> MEM_REF <p, CST>. */
7218 if (TREE_CODE (addr) != ADDR_EXPR
7219 || DECL_P (TREE_OPERAND (addr, 0)))
7220 return fold_build2 (MEM_REF, type,
7221 addr,
8e6cdc90 7222 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7223 }
7224
7225 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7226 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7227 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7228 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7229 {
7230 tree type_domain;
7231 tree min_val = size_zero_node;
7232 tree osub = sub;
7233 sub = gimple_fold_indirect_ref (sub);
7234 if (! sub)
7235 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7236 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7237 if (type_domain && TYPE_MIN_VALUE (type_domain))
7238 min_val = TYPE_MIN_VALUE (type_domain);
7239 if (TREE_CODE (min_val) == INTEGER_CST)
7240 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7241 }
7242
7243 return NULL_TREE;
7244}
19e51b40
JJ
7245
7246/* Return true if CODE is an operation that when operating on signed
7247 integer types involves undefined behavior on overflow and the
7248 operation can be expressed with unsigned arithmetic. */
7249
7250bool
7251arith_code_with_undefined_signed_overflow (tree_code code)
7252{
7253 switch (code)
7254 {
7255 case PLUS_EXPR:
7256 case MINUS_EXPR:
7257 case MULT_EXPR:
7258 case NEGATE_EXPR:
7259 case POINTER_PLUS_EXPR:
7260 return true;
7261 default:
7262 return false;
7263 }
7264}
7265
7266/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7267 operation that can be transformed to unsigned arithmetic by converting
7268 its operand, carrying out the operation in the corresponding unsigned
7269 type and converting the result back to the original type.
7270
7271 Returns a sequence of statements that replace STMT and also contain
7272 a modified form of STMT itself. */
7273
7274gimple_seq
355fe088 7275rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7276{
7277 if (dump_file && (dump_flags & TDF_DETAILS))
7278 {
7279 fprintf (dump_file, "rewriting stmt with undefined signed "
7280 "overflow ");
7281 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7282 }
7283
7284 tree lhs = gimple_assign_lhs (stmt);
7285 tree type = unsigned_type_for (TREE_TYPE (lhs));
7286 gimple_seq stmts = NULL;
7287 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7288 {
74e3c262
RB
7289 tree op = gimple_op (stmt, i);
7290 op = gimple_convert (&stmts, type, op);
7291 gimple_set_op (stmt, i, op);
19e51b40
JJ
7292 }
7293 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7294 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7295 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7296 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7297 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7298 gimple_seq_add_stmt (&stmts, cvt);
7299
7300 return stmts;
7301}
d4f5cd5e 7302
3d2cf79f 7303
c26de36d
RB
7304/* The valueization hook we use for the gimple_build API simplification.
7305 This makes us match fold_buildN behavior by only combining with
7306 statements in the sequence(s) we are currently building. */
7307
7308static tree
7309gimple_build_valueize (tree op)
7310{
7311 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7312 return op;
7313 return NULL_TREE;
7314}
7315
3d2cf79f 7316/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7317 simplifying it first if possible. Returns the built
3d2cf79f
RB
7318 expression value and appends statements possibly defining it
7319 to SEQ. */
7320
7321tree
7322gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7323 enum tree_code code, tree type, tree op0)
3d2cf79f 7324{
c26de36d 7325 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7326 if (!res)
7327 {
a15ebbcd 7328 res = create_tmp_reg_or_ssa_name (type);
355fe088 7329 gimple *stmt;
3d2cf79f
RB
7330 if (code == REALPART_EXPR
7331 || code == IMAGPART_EXPR
7332 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7333 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7334 else
0d0e4a03 7335 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7336 gimple_set_location (stmt, loc);
7337 gimple_seq_add_stmt_without_update (seq, stmt);
7338 }
7339 return res;
7340}
7341
7342/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7343 simplifying it first if possible. Returns the built
3d2cf79f
RB
7344 expression value and appends statements possibly defining it
7345 to SEQ. */
7346
7347tree
7348gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7349 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7350{
c26de36d 7351 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7352 if (!res)
7353 {
a15ebbcd 7354 res = create_tmp_reg_or_ssa_name (type);
355fe088 7355 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7356 gimple_set_location (stmt, loc);
7357 gimple_seq_add_stmt_without_update (seq, stmt);
7358 }
7359 return res;
7360}
7361
7362/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7363 simplifying it first if possible. Returns the built
3d2cf79f
RB
7364 expression value and appends statements possibly defining it
7365 to SEQ. */
7366
7367tree
7368gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7369 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7370{
7371 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7372 seq, gimple_build_valueize);
3d2cf79f
RB
7373 if (!res)
7374 {
a15ebbcd 7375 res = create_tmp_reg_or_ssa_name (type);
355fe088 7376 gimple *stmt;
3d2cf79f 7377 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7378 stmt = gimple_build_assign (res, code,
7379 build3 (code, type, op0, op1, op2));
3d2cf79f 7380 else
0d0e4a03 7381 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7382 gimple_set_location (stmt, loc);
7383 gimple_seq_add_stmt_without_update (seq, stmt);
7384 }
7385 return res;
7386}
7387
7388/* Build the call FN (ARG0) with a result of type TYPE
7389 (or no result if TYPE is void) with location LOC,
c26de36d 7390 simplifying it first if possible. Returns the built
3d2cf79f
RB
7391 expression value (or NULL_TREE if TYPE is void) and appends
7392 statements possibly defining it to SEQ. */
7393
7394tree
eb69361d
RS
7395gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7396 tree type, tree arg0)
3d2cf79f 7397{
c26de36d 7398 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7399 if (!res)
7400 {
eb69361d
RS
7401 gcall *stmt;
7402 if (internal_fn_p (fn))
7403 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7404 else
7405 {
7406 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7407 stmt = gimple_build_call (decl, 1, arg0);
7408 }
3d2cf79f
RB
7409 if (!VOID_TYPE_P (type))
7410 {
a15ebbcd 7411 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7412 gimple_call_set_lhs (stmt, res);
7413 }
7414 gimple_set_location (stmt, loc);
7415 gimple_seq_add_stmt_without_update (seq, stmt);
7416 }
7417 return res;
7418}
7419
7420/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7421 (or no result if TYPE is void) with location LOC,
c26de36d 7422 simplifying it first if possible. Returns the built
3d2cf79f
RB
7423 expression value (or NULL_TREE if TYPE is void) and appends
7424 statements possibly defining it to SEQ. */
7425
7426tree
eb69361d
RS
7427gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7428 tree type, tree arg0, tree arg1)
3d2cf79f 7429{
c26de36d 7430 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7431 if (!res)
7432 {
eb69361d
RS
7433 gcall *stmt;
7434 if (internal_fn_p (fn))
7435 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7436 else
7437 {
7438 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7439 stmt = gimple_build_call (decl, 2, arg0, arg1);
7440 }
3d2cf79f
RB
7441 if (!VOID_TYPE_P (type))
7442 {
a15ebbcd 7443 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7444 gimple_call_set_lhs (stmt, res);
7445 }
7446 gimple_set_location (stmt, loc);
7447 gimple_seq_add_stmt_without_update (seq, stmt);
7448 }
7449 return res;
7450}
7451
7452/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7453 (or no result if TYPE is void) with location LOC,
c26de36d 7454 simplifying it first if possible. Returns the built
3d2cf79f
RB
7455 expression value (or NULL_TREE if TYPE is void) and appends
7456 statements possibly defining it to SEQ. */
7457
7458tree
eb69361d
RS
7459gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7460 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7461{
c26de36d
RB
7462 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7463 seq, gimple_build_valueize);
3d2cf79f
RB
7464 if (!res)
7465 {
eb69361d
RS
7466 gcall *stmt;
7467 if (internal_fn_p (fn))
7468 stmt = gimple_build_call_internal (as_internal_fn (fn),
7469 3, arg0, arg1, arg2);
7470 else
7471 {
7472 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7473 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7474 }
3d2cf79f
RB
7475 if (!VOID_TYPE_P (type))
7476 {
a15ebbcd 7477 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7478 gimple_call_set_lhs (stmt, res);
7479 }
7480 gimple_set_location (stmt, loc);
7481 gimple_seq_add_stmt_without_update (seq, stmt);
7482 }
7483 return res;
7484}
7485
7486/* Build the conversion (TYPE) OP with a result of type TYPE
7487 with location LOC if such conversion is neccesary in GIMPLE,
7488 simplifying it first.
7489 Returns the built expression value and appends
7490 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7491
7492tree
7493gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7494{
7495 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7496 return op;
3d2cf79f 7497 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7498}
68e57f04 7499
74e3c262
RB
7500/* Build the conversion (ptrofftype) OP with a result of a type
7501 compatible with ptrofftype with location LOC if such conversion
7502 is neccesary in GIMPLE, simplifying it first.
7503 Returns the built expression value and appends
7504 statements possibly defining it to SEQ. */
7505
7506tree
7507gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7508{
7509 if (ptrofftype_p (TREE_TYPE (op)))
7510 return op;
7511 return gimple_convert (seq, loc, sizetype, op);
7512}
7513
e7c45b66
RS
7514/* Build a vector of type TYPE in which each element has the value OP.
7515 Return a gimple value for the result, appending any new statements
7516 to SEQ. */
7517
7518tree
7519gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7520 tree op)
7521{
928686b1
RS
7522 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7523 && !CONSTANT_CLASS_P (op))
7524 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7525
e7c45b66
RS
7526 tree res, vec = build_vector_from_val (type, op);
7527 if (is_gimple_val (vec))
7528 return vec;
7529 if (gimple_in_ssa_p (cfun))
7530 res = make_ssa_name (type);
7531 else
7532 res = create_tmp_reg (type);
7533 gimple *stmt = gimple_build_assign (res, vec);
7534 gimple_set_location (stmt, loc);
7535 gimple_seq_add_stmt_without_update (seq, stmt);
7536 return res;
7537}
7538
abe73c3d
RS
7539/* Build a vector from BUILDER, handling the case in which some elements
7540 are non-constant. Return a gimple value for the result, appending any
7541 new instructions to SEQ.
7542
7543 BUILDER must not have a stepped encoding on entry. This is because
7544 the function is not geared up to handle the arithmetic that would
7545 be needed in the variable case, and any code building a vector that
7546 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7547
7548tree
abe73c3d
RS
7549gimple_build_vector (gimple_seq *seq, location_t loc,
7550 tree_vector_builder *builder)
e7c45b66 7551{
abe73c3d
RS
7552 gcc_assert (builder->nelts_per_pattern () <= 2);
7553 unsigned int encoded_nelts = builder->encoded_nelts ();
7554 for (unsigned int i = 0; i < encoded_nelts; ++i)
7555 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7556 {
abe73c3d 7557 tree type = builder->type ();
928686b1 7558 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7559 vec<constructor_elt, va_gc> *v;
7560 vec_alloc (v, nelts);
7561 for (i = 0; i < nelts; ++i)
abe73c3d 7562 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7563
7564 tree res;
7565 if (gimple_in_ssa_p (cfun))
7566 res = make_ssa_name (type);
7567 else
7568 res = create_tmp_reg (type);
7569 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7570 gimple_set_location (stmt, loc);
7571 gimple_seq_add_stmt_without_update (seq, stmt);
7572 return res;
7573 }
abe73c3d 7574 return builder->build ();
e7c45b66
RS
7575}
7576
68e57f04
RS
7577/* Return true if the result of assignment STMT is known to be non-negative.
7578 If the return value is based on the assumption that signed overflow is
7579 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7580 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7581
7582static bool
7583gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7584 int depth)
7585{
7586 enum tree_code code = gimple_assign_rhs_code (stmt);
7587 switch (get_gimple_rhs_class (code))
7588 {
7589 case GIMPLE_UNARY_RHS:
7590 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7591 gimple_expr_type (stmt),
7592 gimple_assign_rhs1 (stmt),
7593 strict_overflow_p, depth);
7594 case GIMPLE_BINARY_RHS:
7595 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7596 gimple_expr_type (stmt),
7597 gimple_assign_rhs1 (stmt),
7598 gimple_assign_rhs2 (stmt),
7599 strict_overflow_p, depth);
7600 case GIMPLE_TERNARY_RHS:
7601 return false;
7602 case GIMPLE_SINGLE_RHS:
7603 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7604 strict_overflow_p, depth);
7605 case GIMPLE_INVALID_RHS:
7606 break;
7607 }
7608 gcc_unreachable ();
7609}
7610
7611/* Return true if return value of call STMT is known to be non-negative.
7612 If the return value is based on the assumption that signed overflow is
7613 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7614 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7615
7616static bool
7617gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7618 int depth)
7619{
7620 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7621 gimple_call_arg (stmt, 0) : NULL_TREE;
7622 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7623 gimple_call_arg (stmt, 1) : NULL_TREE;
7624
7625 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7626 gimple_call_combined_fn (stmt),
68e57f04
RS
7627 arg0,
7628 arg1,
7629 strict_overflow_p, depth);
7630}
7631
4534c203
RB
7632/* Return true if return value of call STMT is known to be non-negative.
7633 If the return value is based on the assumption that signed overflow is
7634 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7635 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7636
7637static bool
7638gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7639 int depth)
7640{
7641 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7642 {
7643 tree arg = gimple_phi_arg_def (stmt, i);
7644 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7645 return false;
7646 }
7647 return true;
7648}
7649
68e57f04
RS
7650/* Return true if STMT is known to compute a non-negative value.
7651 If the return value is based on the assumption that signed overflow is
7652 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7653 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7654
7655bool
7656gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7657 int depth)
7658{
7659 switch (gimple_code (stmt))
7660 {
7661 case GIMPLE_ASSIGN:
7662 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7663 depth);
7664 case GIMPLE_CALL:
7665 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7666 depth);
4534c203
RB
7667 case GIMPLE_PHI:
7668 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7669 depth);
68e57f04
RS
7670 default:
7671 return false;
7672 }
7673}
67dbe582
RS
7674
7675/* Return true if the floating-point value computed by assignment STMT
7676 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7677 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7678
7679 DEPTH is the current nesting depth of the query. */
7680
7681static bool
7682gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7683{
7684 enum tree_code code = gimple_assign_rhs_code (stmt);
7685 switch (get_gimple_rhs_class (code))
7686 {
7687 case GIMPLE_UNARY_RHS:
7688 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7689 gimple_assign_rhs1 (stmt), depth);
7690 case GIMPLE_BINARY_RHS:
7691 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7692 gimple_assign_rhs1 (stmt),
7693 gimple_assign_rhs2 (stmt), depth);
7694 case GIMPLE_TERNARY_RHS:
7695 return false;
7696 case GIMPLE_SINGLE_RHS:
7697 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7698 case GIMPLE_INVALID_RHS:
7699 break;
7700 }
7701 gcc_unreachable ();
7702}
7703
7704/* Return true if the floating-point value computed by call STMT is known
7705 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7706 considered integer values. Return false for signaling NaN.
67dbe582
RS
7707
7708 DEPTH is the current nesting depth of the query. */
7709
7710static bool
7711gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7712{
7713 tree arg0 = (gimple_call_num_args (stmt) > 0
7714 ? gimple_call_arg (stmt, 0)
7715 : NULL_TREE);
7716 tree arg1 = (gimple_call_num_args (stmt) > 1
7717 ? gimple_call_arg (stmt, 1)
7718 : NULL_TREE);
1d9da71f 7719 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7720 arg0, arg1, depth);
7721}
7722
7723/* Return true if the floating-point result of phi STMT is known to have
7724 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7725 integer values. Return false for signaling NaN.
67dbe582
RS
7726
7727 DEPTH is the current nesting depth of the query. */
7728
7729static bool
7730gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7731{
7732 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7733 {
7734 tree arg = gimple_phi_arg_def (stmt, i);
7735 if (!integer_valued_real_single_p (arg, depth + 1))
7736 return false;
7737 }
7738 return true;
7739}
7740
7741/* Return true if the floating-point value computed by STMT is known
7742 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7743 considered integer values. Return false for signaling NaN.
67dbe582
RS
7744
7745 DEPTH is the current nesting depth of the query. */
7746
7747bool
7748gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7749{
7750 switch (gimple_code (stmt))
7751 {
7752 case GIMPLE_ASSIGN:
7753 return gimple_assign_integer_valued_real_p (stmt, depth);
7754 case GIMPLE_CALL:
7755 return gimple_call_integer_valued_real_p (stmt, depth);
7756 case GIMPLE_PHI:
7757 return gimple_phi_integer_valued_real_p (stmt, depth);
7758 default:
7759 return false;
7760 }
7761}