]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Fix filesystem::equivalent for mingw
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
a5544970 2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
cbdd87d4 68
598f7235
MS
69enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
598f7235
MS
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
82};
83
03c4a945
MS
84static bool
85get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 86
b3b9f3d0 87/* Return true when DECL can be referenced from current unit.
c44c2088
JH
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
1389294c 91
1389294c
JH
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
3e89949e 104 we devirtualize only during final compilation stage.
b3b9f3d0
JH
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
108
1389294c 109static bool
c44c2088 110can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 111{
2c8326a5 112 varpool_node *vnode;
1389294c 113 struct cgraph_node *node;
5e20cdc9 114 symtab_node *snode;
c44c2088 115
00de328a 116 if (DECL_ABSTRACT_P (decl))
1632a686
JH
117 return false;
118
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 121 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
122 return true;
123
124 /* Static objects can be referred only if they was not optimized out yet. */
125 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
126 {
3aaf0529
JH
127 /* Before we start optimizing unreachable code we can be sure all
128 static objects are defined. */
3dafb85c 129 if (symtab->function_flags_ready)
3aaf0529 130 return true;
d52f5295 131 snode = symtab_node::get (decl);
3aaf0529 132 if (!snode || !snode->definition)
1632a686 133 return false;
7de90a6c 134 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
135 return !node || !node->global.inlined_to;
136 }
137
6da8be89 138 /* We will later output the initializer, so we can refer to it.
c44c2088 139 So we are concerned only when DECL comes from initializer of
3aaf0529 140 external var or var that has been optimized out. */
c44c2088 141 if (!from_decl
8813a647 142 || !VAR_P (from_decl)
3aaf0529 143 || (!DECL_EXTERNAL (from_decl)
9041d2e6 144 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 145 && vnode->definition)
6da8be89 146 || (flag_ltrans
9041d2e6 147 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 148 && vnode->in_other_partition))
c44c2088 149 return true;
c44c2088
JH
150 /* We are folding reference from external vtable. The vtable may reffer
151 to a symbol keyed to other compilation unit. The other compilation
152 unit may be in separate DSO and the symbol may be hidden. */
153 if (DECL_VISIBILITY_SPECIFIED (decl)
154 && DECL_EXTERNAL (decl)
a33a931b 155 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 156 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 157 return false;
b3b9f3d0
JH
158 /* When function is public, we always can introduce new reference.
159 Exception are the COMDAT functions where introducing a direct
160 reference imply need to include function body in the curren tunit. */
161 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
162 return true;
3aaf0529
JH
163 /* We have COMDAT. We are going to check if we still have definition
164 or if the definition is going to be output in other partition.
165 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
166
167 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 168 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
169 output elsewhere when corresponding vtable is output.
170 This is however not possible - ABI specify that COMDATs are output in
171 units where they are used and when the other unit was compiled with LTO
172 it is possible that vtable was kept public while the function itself
173 was privatized. */
3dafb85c 174 if (!symtab->function_flags_ready)
b3b9f3d0 175 return true;
c44c2088 176
d52f5295 177 snode = symtab_node::get (decl);
3aaf0529
JH
178 if (!snode
179 || ((!snode->definition || DECL_EXTERNAL (decl))
180 && (!snode->in_other_partition
181 || (!snode->forced_by_abi && !snode->force_output))))
182 return false;
183 node = dyn_cast <cgraph_node *> (snode);
184 return !node || !node->global.inlined_to;
1389294c
JH
185}
186
a15ebbcd
ML
187/* Create a temporary for TYPE for a statement STMT. If the current function
188 is in SSA form, a SSA name is created. Otherwise a temporary register
189 is made. */
190
edc19e03
WS
191tree
192create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
193{
194 if (gimple_in_ssa_p (cfun))
195 return make_ssa_name (type, stmt);
196 else
197 return create_tmp_reg (type);
198}
199
0038d4e0 200/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
201 acceptable form for is_gimple_min_invariant.
202 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
203
204tree
c44c2088 205canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 206{
50619002
EB
207 tree orig_cval = cval;
208 STRIP_NOPS (cval);
315f5f1b
RG
209 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
210 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 211 {
315f5f1b
RG
212 tree ptr = TREE_OPERAND (cval, 0);
213 if (is_gimple_min_invariant (ptr))
214 cval = build1_loc (EXPR_LOCATION (cval),
215 ADDR_EXPR, TREE_TYPE (ptr),
216 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
217 ptr,
218 fold_convert (ptr_type_node,
219 TREE_OPERAND (cval, 1))));
17f39a39
JH
220 }
221 if (TREE_CODE (cval) == ADDR_EXPR)
222 {
5a27a197
RG
223 tree base = NULL_TREE;
224 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
225 {
226 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
227 if (base)
228 TREE_OPERAND (cval, 0) = base;
229 }
5a27a197
RG
230 else
231 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
232 if (!base)
233 return NULL_TREE;
b3b9f3d0 234
8813a647 235 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 236 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 237 return NULL_TREE;
13f92e8d
JJ
238 if (TREE_TYPE (base) == error_mark_node)
239 return NULL_TREE;
8813a647 240 if (VAR_P (base))
46eb666a 241 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
242 else if (TREE_CODE (base) == FUNCTION_DECL)
243 {
244 /* Make sure we create a cgraph node for functions we'll reference.
245 They can be non-existent if the reference comes from an entry
246 of an external vtable for example. */
d52f5295 247 cgraph_node::get_create (base);
7501ca28 248 }
0038d4e0 249 /* Fixup types in global initializers. */
73aef89e
RG
250 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
251 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
252
253 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
254 cval = fold_convert (TREE_TYPE (orig_cval), cval);
255 return cval;
17f39a39 256 }
846abd0d
RB
257 if (TREE_OVERFLOW_P (cval))
258 return drop_tree_overflow (cval);
50619002 259 return orig_cval;
17f39a39 260}
cbdd87d4
RG
261
262/* If SYM is a constant variable with known value, return the value.
263 NULL_TREE is returned otherwise. */
264
265tree
266get_symbol_constant_value (tree sym)
267{
6a6dac52
JH
268 tree val = ctor_for_folding (sym);
269 if (val != error_mark_node)
cbdd87d4 270 {
cbdd87d4
RG
271 if (val)
272 {
9d60be38 273 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 274 if (val && is_gimple_min_invariant (val))
17f39a39 275 return val;
1389294c
JH
276 else
277 return NULL_TREE;
cbdd87d4
RG
278 }
279 /* Variables declared 'const' without an initializer
280 have zero as the initializer if they may not be
281 overridden at link or run time. */
282 if (!val
b8a8c472 283 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 284 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
285 }
286
287 return NULL_TREE;
288}
289
290
cbdd87d4
RG
291
292/* Subroutine of fold_stmt. We perform several simplifications of the
293 memory reference tree EXPR and make sure to re-gimplify them properly
294 after propagation of constant addresses. IS_LHS is true if the
295 reference is supposed to be an lvalue. */
296
297static tree
298maybe_fold_reference (tree expr, bool is_lhs)
299{
17f39a39 300 tree result;
cbdd87d4 301
f0eddb90
RG
302 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
303 || TREE_CODE (expr) == REALPART_EXPR
304 || TREE_CODE (expr) == IMAGPART_EXPR)
305 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
306 return fold_unary_loc (EXPR_LOCATION (expr),
307 TREE_CODE (expr),
308 TREE_TYPE (expr),
309 TREE_OPERAND (expr, 0));
310 else if (TREE_CODE (expr) == BIT_FIELD_REF
311 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
312 return fold_ternary_loc (EXPR_LOCATION (expr),
313 TREE_CODE (expr),
314 TREE_TYPE (expr),
315 TREE_OPERAND (expr, 0),
316 TREE_OPERAND (expr, 1),
317 TREE_OPERAND (expr, 2));
318
f0eddb90
RG
319 if (!is_lhs
320 && (result = fold_const_aggregate_ref (expr))
321 && is_gimple_min_invariant (result))
322 return result;
cbdd87d4 323
cbdd87d4
RG
324 return NULL_TREE;
325}
326
327
328/* Attempt to fold an assignment statement pointed-to by SI. Returns a
329 replacement rhs for the statement or NULL_TREE if no simplification
330 could be made. It is assumed that the operands have been previously
331 folded. */
332
333static tree
334fold_gimple_assign (gimple_stmt_iterator *si)
335{
355fe088 336 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
337 enum tree_code subcode = gimple_assign_rhs_code (stmt);
338 location_t loc = gimple_location (stmt);
339
340 tree result = NULL_TREE;
341
342 switch (get_gimple_rhs_class (subcode))
343 {
344 case GIMPLE_SINGLE_RHS:
345 {
346 tree rhs = gimple_assign_rhs1 (stmt);
347
8c00ba08
JW
348 if (TREE_CLOBBER_P (rhs))
349 return NULL_TREE;
350
4e71066d 351 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
352 return maybe_fold_reference (rhs, false);
353
bdf37f7a
JH
354 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
355 {
356 tree val = OBJ_TYPE_REF_EXPR (rhs);
357 if (is_gimple_min_invariant (val))
358 return val;
f8a39967 359 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
360 {
361 bool final;
362 vec <cgraph_node *>targets
f8a39967 363 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 364 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 365 {
2b5f0895
XDL
366 if (dump_enabled_p ())
367 {
4f5b9c80 368 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
369 "resolving virtual function address "
370 "reference to function %s\n",
371 targets.length () == 1
372 ? targets[0]->name ()
3ef276e4 373 : "NULL");
2b5f0895 374 }
3ef276e4
RB
375 if (targets.length () == 1)
376 {
377 val = fold_convert (TREE_TYPE (val),
378 build_fold_addr_expr_loc
379 (loc, targets[0]->decl));
380 STRIP_USELESS_TYPE_CONVERSION (val);
381 }
382 else
67914693
SL
383 /* We cannot use __builtin_unreachable here because it
384 cannot have address taken. */
3ef276e4 385 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
386 return val;
387 }
388 }
bdf37f7a 389 }
7524f419 390
cbdd87d4
RG
391 else if (TREE_CODE (rhs) == ADDR_EXPR)
392 {
70f34814
RG
393 tree ref = TREE_OPERAND (rhs, 0);
394 tree tem = maybe_fold_reference (ref, true);
395 if (tem
396 && TREE_CODE (tem) == MEM_REF
397 && integer_zerop (TREE_OPERAND (tem, 1)))
398 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
399 else if (tem)
cbdd87d4
RG
400 result = fold_convert (TREE_TYPE (rhs),
401 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
402 else if (TREE_CODE (ref) == MEM_REF
403 && integer_zerop (TREE_OPERAND (ref, 1)))
404 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
405
406 if (result)
407 {
408 /* Strip away useless type conversions. Both the
409 NON_LVALUE_EXPR that may have been added by fold, and
410 "useless" type conversions that might now be apparent
411 due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result);
413
414 if (result != rhs && valid_gimple_rhs_p (result))
415 return result;
416 }
cbdd87d4
RG
417 }
418
419 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 420 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
421 {
422 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
423 unsigned i;
424 tree val;
425
426 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 427 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
428 return NULL_TREE;
429
430 return build_vector_from_ctor (TREE_TYPE (rhs),
431 CONSTRUCTOR_ELTS (rhs));
432 }
433
434 else if (DECL_P (rhs))
9d60be38 435 return get_symbol_constant_value (rhs);
cbdd87d4
RG
436 }
437 break;
438
439 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
440 break;
441
442 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
443 break;
444
0354c0c7 445 case GIMPLE_TERNARY_RHS:
5c099d40
RB
446 result = fold_ternary_loc (loc, subcode,
447 TREE_TYPE (gimple_assign_lhs (stmt)),
448 gimple_assign_rhs1 (stmt),
449 gimple_assign_rhs2 (stmt),
450 gimple_assign_rhs3 (stmt));
0354c0c7
BS
451
452 if (result)
453 {
454 STRIP_USELESS_TYPE_CONVERSION (result);
455 if (valid_gimple_rhs_p (result))
456 return result;
0354c0c7
BS
457 }
458 break;
459
cbdd87d4
RG
460 case GIMPLE_INVALID_RHS:
461 gcc_unreachable ();
462 }
463
464 return NULL_TREE;
465}
466
fef5a0d9
RB
467
468/* Replace a statement at *SI_P with a sequence of statements in STMTS,
469 adjusting the replacement stmts location and virtual operands.
470 If the statement has a lhs the last stmt in the sequence is expected
471 to assign to that lhs. */
472
473static void
474gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
475{
355fe088 476 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
477
478 if (gimple_has_location (stmt))
479 annotate_all_with_location (stmts, gimple_location (stmt));
480
481 /* First iterate over the replacement statements backward, assigning
482 virtual operands to their defining statements. */
355fe088 483 gimple *laststore = NULL;
fef5a0d9
RB
484 for (gimple_stmt_iterator i = gsi_last (stmts);
485 !gsi_end_p (i); gsi_prev (&i))
486 {
355fe088 487 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
488 if ((gimple_assign_single_p (new_stmt)
489 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
490 || (is_gimple_call (new_stmt)
491 && (gimple_call_flags (new_stmt)
492 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
493 {
494 tree vdef;
495 if (!laststore)
496 vdef = gimple_vdef (stmt);
497 else
498 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
499 gimple_set_vdef (new_stmt, vdef);
500 if (vdef && TREE_CODE (vdef) == SSA_NAME)
501 SSA_NAME_DEF_STMT (vdef) = new_stmt;
502 laststore = new_stmt;
503 }
504 }
505
506 /* Second iterate over the statements forward, assigning virtual
507 operands to their uses. */
508 tree reaching_vuse = gimple_vuse (stmt);
509 for (gimple_stmt_iterator i = gsi_start (stmts);
510 !gsi_end_p (i); gsi_next (&i))
511 {
355fe088 512 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
513 /* If the new statement possibly has a VUSE, update it with exact SSA
514 name we know will reach this one. */
515 if (gimple_has_mem_ops (new_stmt))
516 gimple_set_vuse (new_stmt, reaching_vuse);
517 gimple_set_modified (new_stmt, true);
518 if (gimple_vdef (new_stmt))
519 reaching_vuse = gimple_vdef (new_stmt);
520 }
521
522 /* If the new sequence does not do a store release the virtual
523 definition of the original statement. */
524 if (reaching_vuse
525 && reaching_vuse == gimple_vuse (stmt))
526 {
527 tree vdef = gimple_vdef (stmt);
528 if (vdef
529 && TREE_CODE (vdef) == SSA_NAME)
530 {
531 unlink_stmt_vdef (stmt);
532 release_ssa_name (vdef);
533 }
534 }
535
536 /* Finally replace the original statement with the sequence. */
537 gsi_replace_with_seq (si_p, stmts, false);
538}
539
cbdd87d4
RG
540/* Convert EXPR into a GIMPLE value suitable for substitution on the
541 RHS of an assignment. Insert the necessary statements before
542 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
543 is replaced. If the call is expected to produces a result, then it
544 is replaced by an assignment of the new RHS to the result variable.
545 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
546 GIMPLE_NOP. A proper VDEF chain is retained by making the first
547 VUSE and the last VDEF of the whole sequence be the same as the replaced
548 statement and using new SSA names for stores in between. */
cbdd87d4
RG
549
550void
551gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
552{
553 tree lhs;
355fe088 554 gimple *stmt, *new_stmt;
cbdd87d4 555 gimple_stmt_iterator i;
355a7673 556 gimple_seq stmts = NULL;
cbdd87d4
RG
557
558 stmt = gsi_stmt (*si_p);
559
560 gcc_assert (is_gimple_call (stmt));
561
45852dcc 562 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 563
e256dfce 564 lhs = gimple_call_lhs (stmt);
cbdd87d4 565 if (lhs == NULL_TREE)
6e572326
RG
566 {
567 gimplify_and_add (expr, &stmts);
568 /* We can end up with folding a memcpy of an empty class assignment
569 which gets optimized away by C++ gimplification. */
570 if (gimple_seq_empty_p (stmts))
571 {
9fdc58de 572 pop_gimplify_context (NULL);
6e572326
RG
573 if (gimple_in_ssa_p (cfun))
574 {
575 unlink_stmt_vdef (stmt);
576 release_defs (stmt);
577 }
f6b4dc28 578 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
579 return;
580 }
581 }
cbdd87d4 582 else
e256dfce 583 {
381cdae4 584 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
585 new_stmt = gimple_build_assign (lhs, tmp);
586 i = gsi_last (stmts);
587 gsi_insert_after_without_update (&i, new_stmt,
588 GSI_CONTINUE_LINKING);
589 }
cbdd87d4
RG
590
591 pop_gimplify_context (NULL);
592
fef5a0d9
RB
593 gsi_replace_with_seq_vops (si_p, stmts);
594}
cbdd87d4 595
fef5a0d9
RB
596
597/* Replace the call at *GSI with the gimple value VAL. */
598
e3174bdf 599void
fef5a0d9
RB
600replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
601{
355fe088 602 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 603 tree lhs = gimple_call_lhs (stmt);
355fe088 604 gimple *repl;
fef5a0d9 605 if (lhs)
e256dfce 606 {
fef5a0d9
RB
607 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
608 val = fold_convert (TREE_TYPE (lhs), val);
609 repl = gimple_build_assign (lhs, val);
610 }
611 else
612 repl = gimple_build_nop ();
613 tree vdef = gimple_vdef (stmt);
614 if (vdef && TREE_CODE (vdef) == SSA_NAME)
615 {
616 unlink_stmt_vdef (stmt);
617 release_ssa_name (vdef);
618 }
f6b4dc28 619 gsi_replace (gsi, repl, false);
fef5a0d9
RB
620}
621
622/* Replace the call at *GSI with the new call REPL and fold that
623 again. */
624
625static void
355fe088 626replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 627{
355fe088 628 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
629 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
630 gimple_set_location (repl, gimple_location (stmt));
631 if (gimple_vdef (stmt)
632 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
633 {
634 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
635 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
636 }
00296d7f
JJ
637 if (gimple_vuse (stmt))
638 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 639 gsi_replace (gsi, repl, false);
fef5a0d9
RB
640 fold_stmt (gsi);
641}
642
643/* Return true if VAR is a VAR_DECL or a component thereof. */
644
645static bool
646var_decl_component_p (tree var)
647{
648 tree inner = var;
649 while (handled_component_p (inner))
650 inner = TREE_OPERAND (inner, 0);
47cac108
RB
651 return (DECL_P (inner)
652 || (TREE_CODE (inner) == MEM_REF
653 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
654}
655
c89af696
AH
656/* Return TRUE if the SIZE argument, representing the size of an
657 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
658
659static bool
660size_must_be_zero_p (tree size)
661{
662 if (integer_zerop (size))
663 return true;
664
3f27391f 665 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
666 return false;
667
6512c0f1
MS
668 tree type = TREE_TYPE (size);
669 int prec = TYPE_PRECISION (type);
670
6512c0f1
MS
671 /* Compute the value of SSIZE_MAX, the largest positive value that
672 can be stored in ssize_t, the signed counterpart of size_t. */
673 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
c89af696
AH
674 value_range valid_range (VR_RANGE,
675 build_int_cst (type, 0),
676 wide_int_to_tree (type, ssize_max));
677 value_range vr;
678 get_range_info (size, vr);
679 vr.intersect (&valid_range);
680 return vr.zero_p ();
6512c0f1
MS
681}
682
cc8bea0a
MS
683/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
684 diagnose (otherwise undefined) overlapping copies without preventing
685 folding. When folded, GCC guarantees that overlapping memcpy has
686 the same semantics as memmove. Call to the library memcpy need not
687 provide the same guarantee. Return false if no simplification can
688 be made. */
fef5a0d9
RB
689
690static bool
691gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
692 tree dest, tree src, int endp)
693{
355fe088 694 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
695 tree lhs = gimple_call_lhs (stmt);
696 tree len = gimple_call_arg (stmt, 2);
697 tree destvar, srcvar;
698 location_t loc = gimple_location (stmt);
699
cc8bea0a 700 bool nowarn = gimple_no_warning_p (stmt);
cc8bea0a 701
6512c0f1
MS
702 /* If the LEN parameter is a constant zero or in range where
703 the only valid value is zero, return DEST. */
704 if (size_must_be_zero_p (len))
fef5a0d9 705 {
355fe088 706 gimple *repl;
fef5a0d9
RB
707 if (gimple_call_lhs (stmt))
708 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
709 else
710 repl = gimple_build_nop ();
711 tree vdef = gimple_vdef (stmt);
712 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 713 {
fef5a0d9
RB
714 unlink_stmt_vdef (stmt);
715 release_ssa_name (vdef);
716 }
f6b4dc28 717 gsi_replace (gsi, repl, false);
fef5a0d9
RB
718 return true;
719 }
720
721 /* If SRC and DEST are the same (and not volatile), return
722 DEST{,+LEN,+LEN-1}. */
723 if (operand_equal_p (src, dest, 0))
724 {
cc8bea0a
MS
725 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
726 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 727 32667). */
fef5a0d9
RB
728 unlink_stmt_vdef (stmt);
729 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
730 release_ssa_name (gimple_vdef (stmt));
731 if (!lhs)
732 {
f6b4dc28 733 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
734 return true;
735 }
736 goto done;
737 }
738 else
739 {
740 tree srctype, desttype;
741 unsigned int src_align, dest_align;
742 tree off0;
d01b568a
BE
743 const char *tmp_str;
744 unsigned HOST_WIDE_INT tmp_len;
fef5a0d9
RB
745
746 /* Build accesses at offset zero with a ref-all character type. */
747 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
748 ptr_mode, true), 0);
749
750 /* If we can perform the copy efficiently with first doing all loads
751 and then all stores inline it that way. Currently efficiently
752 means that we can load all the memory into a single integer
753 register which is what MOVE_MAX gives us. */
754 src_align = get_pointer_alignment (src);
755 dest_align = get_pointer_alignment (dest);
756 if (tree_fits_uhwi_p (len)
757 && compare_tree_int (len, MOVE_MAX) <= 0
758 /* ??? Don't transform copies from strings with known length this
759 confuses the tree-ssa-strlen.c. This doesn't handle
760 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
761 reason. */
d01b568a
BE
762 && !c_strlen (src, 2)
763 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
764 && memchr (tmp_str, 0, tmp_len) == NULL))
fef5a0d9
RB
765 {
766 unsigned ilen = tree_to_uhwi (len);
146ec50f 767 if (pow2p_hwi (ilen))
fef5a0d9 768 {
cc8bea0a
MS
769 /* Detect invalid bounds and overlapping copies and issue
770 either -Warray-bounds or -Wrestrict. */
771 if (!nowarn
772 && check_bounds_or_overlap (as_a <gcall *>(stmt),
773 dest, src, len, len))
774 gimple_set_no_warning (stmt, true);
775
64ab8765 776 scalar_int_mode mode;
fef5a0d9
RB
777 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
778 if (type
64ab8765
RS
779 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
780 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
781 /* If the destination pointer is not aligned we must be able
782 to emit an unaligned store. */
64ab8765 783 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 784 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 785 || (optab_handler (movmisalign_optab, mode)
f869c12f 786 != CODE_FOR_nothing)))
fef5a0d9
RB
787 {
788 tree srctype = type;
789 tree desttype = type;
64ab8765 790 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
791 srctype = build_aligned_type (type, src_align);
792 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
793 tree tem = fold_const_aggregate_ref (srcmem);
794 if (tem)
795 srcmem = tem;
64ab8765 796 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 797 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 798 && (optab_handler (movmisalign_optab, mode)
f869c12f 799 == CODE_FOR_nothing))
fef5a0d9
RB
800 srcmem = NULL_TREE;
801 if (srcmem)
802 {
355fe088 803 gimple *new_stmt;
fef5a0d9
RB
804 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
805 {
806 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
807 srcmem
808 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
809 new_stmt);
fef5a0d9
RB
810 gimple_assign_set_lhs (new_stmt, srcmem);
811 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
812 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
813 }
64ab8765 814 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
815 desttype = build_aligned_type (type, dest_align);
816 new_stmt
817 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
818 dest, off0),
819 srcmem);
820 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
821 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
822 if (gimple_vdef (new_stmt)
823 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
824 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
825 if (!lhs)
826 {
f6b4dc28 827 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
828 return true;
829 }
830 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
831 goto done;
832 }
833 }
834 }
835 }
836
837 if (endp == 3)
838 {
839 /* Both DEST and SRC must be pointer types.
840 ??? This is what old code did. Is the testing for pointer types
841 really mandatory?
842
843 If either SRC is readonly or length is 1, we can use memcpy. */
844 if (!dest_align || !src_align)
845 return false;
846 if (readonly_data_expr (src)
847 || (tree_fits_uhwi_p (len)
848 && (MIN (src_align, dest_align) / BITS_PER_UNIT
849 >= tree_to_uhwi (len))))
850 {
851 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
852 if (!fn)
853 return false;
854 gimple_call_set_fndecl (stmt, fn);
855 gimple_call_set_arg (stmt, 0, dest);
856 gimple_call_set_arg (stmt, 1, src);
857 fold_stmt (gsi);
858 return true;
859 }
860
861 /* If *src and *dest can't overlap, optimize into memcpy as well. */
862 if (TREE_CODE (src) == ADDR_EXPR
863 && TREE_CODE (dest) == ADDR_EXPR)
864 {
865 tree src_base, dest_base, fn;
a90c8804
RS
866 poly_int64 src_offset = 0, dest_offset = 0;
867 poly_uint64 maxsize;
fef5a0d9
RB
868
869 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
870 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
871 if (src_base == NULL)
872 src_base = srcvar;
fef5a0d9 873 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
874 dest_base = get_addr_base_and_unit_offset (destvar,
875 &dest_offset);
876 if (dest_base == NULL)
877 dest_base = destvar;
a90c8804 878 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 879 maxsize = -1;
fef5a0d9
RB
880 if (SSA_VAR_P (src_base)
881 && SSA_VAR_P (dest_base))
882 {
883 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
884 && ranges_maybe_overlap_p (src_offset, maxsize,
885 dest_offset, maxsize))
fef5a0d9
RB
886 return false;
887 }
888 else if (TREE_CODE (src_base) == MEM_REF
889 && TREE_CODE (dest_base) == MEM_REF)
890 {
891 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
892 TREE_OPERAND (dest_base, 0), 0))
893 return false;
a90c8804
RS
894 poly_offset_int full_src_offset
895 = mem_ref_offset (src_base) + src_offset;
896 poly_offset_int full_dest_offset
897 = mem_ref_offset (dest_base) + dest_offset;
898 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
899 full_dest_offset, maxsize))
fef5a0d9
RB
900 return false;
901 }
902 else
903 return false;
904
905 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
906 if (!fn)
907 return false;
908 gimple_call_set_fndecl (stmt, fn);
909 gimple_call_set_arg (stmt, 0, dest);
910 gimple_call_set_arg (stmt, 1, src);
911 fold_stmt (gsi);
912 return true;
913 }
914
915 /* If the destination and source do not alias optimize into
916 memcpy as well. */
917 if ((is_gimple_min_invariant (dest)
918 || TREE_CODE (dest) == SSA_NAME)
919 && (is_gimple_min_invariant (src)
920 || TREE_CODE (src) == SSA_NAME))
921 {
922 ao_ref destr, srcr;
923 ao_ref_init_from_ptr_and_size (&destr, dest, len);
924 ao_ref_init_from_ptr_and_size (&srcr, src, len);
925 if (!refs_may_alias_p_1 (&destr, &srcr, false))
926 {
927 tree fn;
928 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
929 if (!fn)
930 return false;
931 gimple_call_set_fndecl (stmt, fn);
932 gimple_call_set_arg (stmt, 0, dest);
933 gimple_call_set_arg (stmt, 1, src);
934 fold_stmt (gsi);
935 return true;
936 }
937 }
938
939 return false;
940 }
941
942 if (!tree_fits_shwi_p (len))
943 return false;
fef5a0d9
RB
944 if (!POINTER_TYPE_P (TREE_TYPE (src))
945 || !POINTER_TYPE_P (TREE_TYPE (dest)))
946 return false;
947 /* In the following try to find a type that is most natural to be
948 used for the memcpy source and destination and that allows
949 the most optimization when memcpy is turned into a plain assignment
950 using that type. In theory we could always use a char[len] type
951 but that only gains us that the destination and source possibly
952 no longer will have their address taken. */
fef5a0d9
RB
953 srctype = TREE_TYPE (TREE_TYPE (src));
954 if (TREE_CODE (srctype) == ARRAY_TYPE
955 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 956 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
957 desttype = TREE_TYPE (TREE_TYPE (dest));
958 if (TREE_CODE (desttype) == ARRAY_TYPE
959 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 960 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
961 if (TREE_ADDRESSABLE (srctype)
962 || TREE_ADDRESSABLE (desttype))
963 return false;
964
965 /* Make sure we are not copying using a floating-point mode or
966 a type whose size possibly does not match its precision. */
967 if (FLOAT_MODE_P (TYPE_MODE (desttype))
968 || TREE_CODE (desttype) == BOOLEAN_TYPE
969 || TREE_CODE (desttype) == ENUMERAL_TYPE)
970 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
971 if (FLOAT_MODE_P (TYPE_MODE (srctype))
972 || TREE_CODE (srctype) == BOOLEAN_TYPE
973 || TREE_CODE (srctype) == ENUMERAL_TYPE)
974 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
975 if (!srctype)
976 srctype = desttype;
977 if (!desttype)
978 desttype = srctype;
979 if (!srctype)
980 return false;
981
982 src_align = get_pointer_alignment (src);
983 dest_align = get_pointer_alignment (dest);
984 if (dest_align < TYPE_ALIGN (desttype)
985 || src_align < TYPE_ALIGN (srctype))
986 return false;
987
42f74245
RB
988 destvar = NULL_TREE;
989 if (TREE_CODE (dest) == ADDR_EXPR
990 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 991 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 992 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 993
42f74245
RB
994 srcvar = NULL_TREE;
995 if (TREE_CODE (src) == ADDR_EXPR
996 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
997 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
998 {
999 if (!destvar
1000 || src_align >= TYPE_ALIGN (desttype))
1001 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 1002 src, off0);
fef5a0d9
RB
1003 else if (!STRICT_ALIGNMENT)
1004 {
1005 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1006 src_align);
42f74245 1007 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 1008 }
fef5a0d9 1009 }
fef5a0d9
RB
1010
1011 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1012 return false;
1013
1014 if (srcvar == NULL_TREE)
1015 {
fef5a0d9
RB
1016 if (src_align >= TYPE_ALIGN (desttype))
1017 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1018 else
1019 {
1020 if (STRICT_ALIGNMENT)
1021 return false;
1022 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1023 src_align);
1024 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1025 }
1026 }
1027 else if (destvar == NULL_TREE)
1028 {
fef5a0d9
RB
1029 if (dest_align >= TYPE_ALIGN (srctype))
1030 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1031 else
1032 {
1033 if (STRICT_ALIGNMENT)
1034 return false;
1035 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1036 dest_align);
1037 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1038 }
1039 }
1040
cc8bea0a
MS
1041 /* Detect invalid bounds and overlapping copies and issue either
1042 -Warray-bounds or -Wrestrict. */
1043 if (!nowarn)
1044 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1045
355fe088 1046 gimple *new_stmt;
fef5a0d9
RB
1047 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1048 {
921b13d0
RB
1049 tree tem = fold_const_aggregate_ref (srcvar);
1050 if (tem)
1051 srcvar = tem;
1052 if (! is_gimple_min_invariant (srcvar))
1053 {
1054 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1055 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1056 new_stmt);
921b13d0
RB
1057 gimple_assign_set_lhs (new_stmt, srcvar);
1058 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1059 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1060 }
d7257171
RB
1061 new_stmt = gimple_build_assign (destvar, srcvar);
1062 goto set_vop_and_replace;
fef5a0d9 1063 }
d7257171
RB
1064
1065 /* We get an aggregate copy. Use an unsigned char[] type to
1066 perform the copying to preserve padding and to avoid any issues
1067 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1068 desttype = build_array_type_nelts (unsigned_char_type_node,
1069 tree_to_uhwi (len));
1070 srctype = desttype;
1071 if (src_align > TYPE_ALIGN (srctype))
1072 srctype = build_aligned_type (srctype, src_align);
1073 if (dest_align > TYPE_ALIGN (desttype))
1074 desttype = build_aligned_type (desttype, dest_align);
1075 new_stmt
1076 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1077 fold_build2 (MEM_REF, srctype, src, off0));
1078set_vop_and_replace:
fef5a0d9
RB
1079 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1080 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1081 if (gimple_vdef (new_stmt)
1082 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1083 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1084 if (!lhs)
1085 {
f6b4dc28 1086 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1087 return true;
1088 }
1089 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1090 }
1091
1092done:
74e3c262 1093 gimple_seq stmts = NULL;
fef5a0d9
RB
1094 if (endp == 0 || endp == 3)
1095 len = NULL_TREE;
1096 else if (endp == 2)
74e3c262
RB
1097 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1098 ssize_int (1));
fef5a0d9 1099 if (endp == 2 || endp == 1)
74e3c262
RB
1100 {
1101 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1102 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1103 TREE_TYPE (dest), dest, len);
1104 }
fef5a0d9 1105
74e3c262 1106 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1107 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1108 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1109 return true;
1110}
1111
b3d8d88e
MS
1112/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1113 to built-in memcmp (a, b, len). */
1114
1115static bool
1116gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1117{
1118 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1119
1120 if (!fn)
1121 return false;
1122
1123 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1124
1125 gimple *stmt = gsi_stmt (*gsi);
1126 tree a = gimple_call_arg (stmt, 0);
1127 tree b = gimple_call_arg (stmt, 1);
1128 tree len = gimple_call_arg (stmt, 2);
1129
1130 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1131 replace_call_with_call_and_fold (gsi, repl);
1132
1133 return true;
1134}
1135
1136/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1137 to built-in memmove (dest, src, len). */
1138
1139static bool
1140gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1141{
1142 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1143
1144 if (!fn)
1145 return false;
1146
1147 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1148 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1149 len) into memmove (dest, src, len). */
1150
1151 gimple *stmt = gsi_stmt (*gsi);
1152 tree src = gimple_call_arg (stmt, 0);
1153 tree dest = gimple_call_arg (stmt, 1);
1154 tree len = gimple_call_arg (stmt, 2);
1155
1156 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1157 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1158 replace_call_with_call_and_fold (gsi, repl);
1159
1160 return true;
1161}
1162
1163/* Transform a call to built-in bzero (dest, len) at *GSI into one
1164 to built-in memset (dest, 0, len). */
1165
1166static bool
1167gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1168{
1169 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1170
1171 if (!fn)
1172 return false;
1173
1174 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1175
1176 gimple *stmt = gsi_stmt (*gsi);
1177 tree dest = gimple_call_arg (stmt, 0);
1178 tree len = gimple_call_arg (stmt, 1);
1179
1180 gimple_seq seq = NULL;
1181 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1182 gimple_seq_add_stmt_without_update (&seq, repl);
1183 gsi_replace_with_seq_vops (gsi, seq);
1184 fold_stmt (gsi);
1185
1186 return true;
1187}
1188
fef5a0d9
RB
1189/* Fold function call to builtin memset or bzero at *GSI setting the
1190 memory of size LEN to VAL. Return whether a simplification was made. */
1191
1192static bool
1193gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1194{
355fe088 1195 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1196 tree etype;
1197 unsigned HOST_WIDE_INT length, cval;
1198
1199 /* If the LEN parameter is zero, return DEST. */
1200 if (integer_zerop (len))
1201 {
1202 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1203 return true;
1204 }
1205
1206 if (! tree_fits_uhwi_p (len))
1207 return false;
1208
1209 if (TREE_CODE (c) != INTEGER_CST)
1210 return false;
1211
1212 tree dest = gimple_call_arg (stmt, 0);
1213 tree var = dest;
1214 if (TREE_CODE (var) != ADDR_EXPR)
1215 return false;
1216
1217 var = TREE_OPERAND (var, 0);
1218 if (TREE_THIS_VOLATILE (var))
1219 return false;
1220
1221 etype = TREE_TYPE (var);
1222 if (TREE_CODE (etype) == ARRAY_TYPE)
1223 etype = TREE_TYPE (etype);
1224
1225 if (!INTEGRAL_TYPE_P (etype)
1226 && !POINTER_TYPE_P (etype))
1227 return NULL_TREE;
1228
1229 if (! var_decl_component_p (var))
1230 return NULL_TREE;
1231
1232 length = tree_to_uhwi (len);
7a504f33 1233 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1234 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1235 return NULL_TREE;
1236
1237 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1238 return NULL_TREE;
1239
1240 if (integer_zerop (c))
1241 cval = 0;
1242 else
1243 {
1244 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1245 return NULL_TREE;
1246
1247 cval = TREE_INT_CST_LOW (c);
1248 cval &= 0xff;
1249 cval |= cval << 8;
1250 cval |= cval << 16;
1251 cval |= (cval << 31) << 1;
1252 }
1253
1254 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1255 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1256 gimple_set_vuse (store, gimple_vuse (stmt));
1257 tree vdef = gimple_vdef (stmt);
1258 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1259 {
1260 gimple_set_vdef (store, gimple_vdef (stmt));
1261 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1262 }
1263 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1264 if (gimple_call_lhs (stmt))
1265 {
355fe088 1266 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1267 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1268 }
1269 else
1270 {
1271 gimple_stmt_iterator gsi2 = *gsi;
1272 gsi_prev (gsi);
1273 gsi_remove (&gsi2, true);
1274 }
1275
1276 return true;
1277}
1278
fb471a13 1279/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1280
1281static bool
03c4a945
MS
1282get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1283 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1284{
fb471a13
MS
1285 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1286
fb471a13
MS
1287 /* The length computed by this invocation of the function. */
1288 tree val = NULL_TREE;
1289
eef2da67
MS
1290 /* True if VAL is an optimistic (tight) bound determined from
1291 the size of the character array in which the string may be
1292 stored. In that case, the computed VAL is used to set
1293 PDATA->MAXBOUND. */
1294 bool tight_bound = false;
1295
fb471a13
MS
1296 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1297 if (TREE_CODE (arg) == ADDR_EXPR
1298 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1299 {
fb471a13
MS
1300 tree op = TREE_OPERAND (arg, 0);
1301 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1302 {
fb471a13
MS
1303 tree aop0 = TREE_OPERAND (op, 0);
1304 if (TREE_CODE (aop0) == INDIRECT_REF
1305 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1306 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1307 pdata, eltsize);
fef5a0d9 1308 }
598f7235 1309 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1310 && rkind == SRK_LENRANGE)
fef5a0d9 1311 {
fb471a13
MS
1312 /* Fail if an array is the last member of a struct object
1313 since it could be treated as a (fake) flexible array
1314 member. */
1315 tree idx = TREE_OPERAND (op, 1);
1316
1317 arg = TREE_OPERAND (op, 0);
1318 tree optype = TREE_TYPE (arg);
1319 if (tree dom = TYPE_DOMAIN (optype))
1320 if (tree bound = TYPE_MAX_VALUE (dom))
1321 if (TREE_CODE (bound) == INTEGER_CST
1322 && TREE_CODE (idx) == INTEGER_CST
1323 && tree_int_cst_lt (bound, idx))
1324 return false;
fef5a0d9 1325 }
fb471a13 1326 }
7d583f42 1327
598f7235 1328 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1329 {
1330 /* We are computing the maximum value (not string length). */
1331 val = arg;
1332 if (TREE_CODE (val) != INTEGER_CST
1333 || tree_int_cst_sgn (val) < 0)
1334 return false;
1335 }
1336 else
1337 {
1338 c_strlen_data lendata = { };
1339 val = c_strlen (arg, 1, &lendata, eltsize);
1340
fb471a13
MS
1341 if (!val && lendata.decl)
1342 {
03c4a945
MS
1343 /* ARG refers to an unterminated const character array.
1344 DATA.DECL with size DATA.LEN. */
1345 val = lendata.minlen;
730832cd 1346 pdata->decl = lendata.decl;
7d583f42 1347 }
fb471a13
MS
1348 }
1349
84de9426 1350 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1351 {
1352 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1353 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1354 pdata, eltsize);
88d0c3f0 1355
fb471a13 1356 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1357 {
fb471a13 1358 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1359
fb471a13
MS
1360 /* Determine the "innermost" array type. */
1361 while (TREE_CODE (optype) == ARRAY_TYPE
1362 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1363 optype = TREE_TYPE (optype);
c42d0aa0 1364
fb471a13
MS
1365 /* Avoid arrays of pointers. */
1366 tree eltype = TREE_TYPE (optype);
1367 if (TREE_CODE (optype) != ARRAY_TYPE
1368 || !INTEGRAL_TYPE_P (eltype))
1369 return false;
c42d0aa0 1370
fb471a13
MS
1371 /* Fail when the array bound is unknown or zero. */
1372 val = TYPE_SIZE_UNIT (optype);
1373 if (!val || integer_zerop (val))
1374 return false;
1bfd6a00 1375
fb471a13
MS
1376 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1377 integer_one_node);
c42d0aa0 1378
fb471a13
MS
1379 /* Set the minimum size to zero since the string in
1380 the array could have zero length. */
730832cd 1381 pdata->minlen = ssize_int (0);
204a7ecb 1382
eef2da67 1383 tight_bound = true;
fb471a13
MS
1384 }
1385 else if (TREE_CODE (arg) == COMPONENT_REF
1386 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1387 == ARRAY_TYPE))
1388 {
1389 /* Use the type of the member array to determine the upper
1390 bound on the length of the array. This may be overly
1391 optimistic if the array itself isn't NUL-terminated and
1392 the caller relies on the subsequent member to contain
1393 the NUL but that would only be considered valid if
03c4a945 1394 the array were the last member of a struct. */
fb471a13
MS
1395
1396 tree fld = TREE_OPERAND (arg, 1);
1397
1398 tree optype = TREE_TYPE (fld);
1399
1400 /* Determine the "innermost" array type. */
1401 while (TREE_CODE (optype) == ARRAY_TYPE
1402 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1403 optype = TREE_TYPE (optype);
1404
1405 /* Fail when the array bound is unknown or zero. */
1406 val = TYPE_SIZE_UNIT (optype);
1407 if (!val || integer_zerop (val))
1408 return false;
1409 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1410 integer_one_node);
1411
1412 /* Set the minimum size to zero since the string in
1413 the array could have zero length. */
730832cd 1414 pdata->minlen = ssize_int (0);
fb471a13 1415
eef2da67
MS
1416 /* The array size determined above is an optimistic bound
1417 on the length. If the array isn't nul-terminated the
1418 length computed by the library function would be greater.
1419 Even though using strlen to cross the subobject boundary
1420 is undefined, avoid drawing conclusions from the member
1421 type about the length here. */
1422 tight_bound = true;
1423 }
1424 else if (VAR_P (arg))
fb471a13 1425 {
eef2da67
MS
1426 /* Avoid handling pointers to arrays. GCC might misuse
1427 a pointer to an array of one bound to point to an array
1428 object of a greater bound. */
1429 tree argtype = TREE_TYPE (arg);
1430 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1431 {
eef2da67 1432 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1433 if (!val
1434 || TREE_CODE (val) != INTEGER_CST
1435 || integer_zerop (val))
88d0c3f0 1436 return false;
fb471a13
MS
1437 val = wide_int_to_tree (TREE_TYPE (val),
1438 wi::sub (wi::to_wide (val), 1));
1439
e495e31a
MS
1440 /* Set the minimum size to zero since the string in
1441 the array could have zero length. */
730832cd 1442 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1443 }
1444 }
fb471a13 1445 }
88d0c3f0 1446
fb471a13
MS
1447 if (!val)
1448 return false;
fef5a0d9 1449
fb471a13 1450 /* Adjust the lower bound on the string length as necessary. */
730832cd 1451 if (!pdata->minlen
598f7235 1452 || (rkind != SRK_STRLEN
730832cd 1453 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1454 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1455 && tree_int_cst_lt (val, pdata->minlen)))
1456 pdata->minlen = val;
88d0c3f0 1457
730832cd
MS
1458 if (pdata->maxbound)
1459 {
1460 /* Adjust the tighter (more optimistic) string length bound
1461 if necessary and proceed to adjust the more conservative
1462 bound. */
1463 if (TREE_CODE (val) == INTEGER_CST)
1464 {
1465 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1466 {
1467 if (tree_int_cst_lt (pdata->maxbound, val))
1468 pdata->maxbound = val;
1469 }
1470 else
1471 pdata->maxbound = build_all_ones_cst (size_type_node);
1472 }
1473 else
1474 pdata->maxbound = val;
1475 }
1476 else
1477 pdata->maxbound = val;
1478
eef2da67
MS
1479 if (tight_bound)
1480 {
1481 /* VAL computed above represents an optimistically tight bound
1482 on the length of the string based on the referenced object's
1483 or subobject's type. Determine the conservative upper bound
1484 based on the enclosing object's size if possible. */
84de9426 1485 if (rkind == SRK_LENRANGE)
eef2da67
MS
1486 {
1487 poly_int64 offset;
1488 tree base = get_addr_base_and_unit_offset (arg, &offset);
1489 if (!base)
1490 {
1491 /* When the call above fails due to a non-constant offset
1492 assume the offset is zero and use the size of the whole
1493 enclosing object instead. */
1494 base = get_base_address (arg);
1495 offset = 0;
1496 }
1497 /* If the base object is a pointer no upper bound on the length
1498 can be determined. Otherwise the maximum length is equal to
1499 the size of the enclosing object minus the offset of
1500 the referenced subobject minus 1 (for the terminating nul). */
1501 tree type = TREE_TYPE (base);
1502 if (TREE_CODE (type) == POINTER_TYPE
1503 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1504 val = build_all_ones_cst (size_type_node);
1505 else
1506 {
1507 val = DECL_SIZE_UNIT (base);
1508 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1509 size_int (offset + 1));
1510 }
1511 }
1512 else
1513 return false;
1514 }
1515
730832cd 1516 if (pdata->maxlen)
fb471a13
MS
1517 {
1518 /* Adjust the more conservative bound if possible/necessary
1519 and fail otherwise. */
598f7235 1520 if (rkind != SRK_STRLEN)
fef5a0d9 1521 {
730832cd 1522 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1523 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1524 return false;
fef5a0d9 1525
730832cd
MS
1526 if (tree_int_cst_lt (pdata->maxlen, val))
1527 pdata->maxlen = val;
fb471a13
MS
1528 return true;
1529 }
730832cd 1530 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1531 {
1532 /* Fail if the length of this ARG is different from that
1533 previously determined from another ARG. */
1534 return false;
1535 }
fef5a0d9
RB
1536 }
1537
730832cd 1538 pdata->maxlen = val;
84de9426 1539 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1540}
1541
5d6655eb
MS
1542/* For an ARG referencing one or more strings, try to obtain the range
1543 of their lengths, or the size of the largest array ARG referes to if
1544 the range of lengths cannot be determined, and store all in *PDATA.
1545 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1546 the maximum constant value.
1547 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1548 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1549 length or if we are unable to determine the length, return false.
fb471a13 1550 VISITED is a bitmap of visited variables.
598f7235
MS
1551 RKIND determines the kind of value or range to obtain (see
1552 strlen_range_kind).
1553 Set PDATA->DECL if ARG refers to an unterminated constant array.
1554 On input, set ELTSIZE to 1 for normal single byte character strings,
1555 and either 2 or 4 for wide characer strings (the size of wchar_t).
1556 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1557
1558static bool
03c4a945
MS
1559get_range_strlen (tree arg, bitmap *visited,
1560 strlen_range_kind rkind,
1561 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1562{
1563
1564 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1565 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1566
fef5a0d9
RB
1567 /* If ARG is registered for SSA update we cannot look at its defining
1568 statement. */
1569 if (name_registered_for_update_p (arg))
1570 return false;
1571
1572 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1573 if (!*visited)
1574 *visited = BITMAP_ALLOC (NULL);
1575 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1576 return true;
1577
fb471a13
MS
1578 tree var = arg;
1579 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1580
fef5a0d9
RB
1581 switch (gimple_code (def_stmt))
1582 {
1583 case GIMPLE_ASSIGN:
598f7235
MS
1584 /* The RHS of the statement defining VAR must either have a
1585 constant length or come from another SSA_NAME with a constant
1586 length. */
fef5a0d9
RB
1587 if (gimple_assign_single_p (def_stmt)
1588 || gimple_assign_unary_nop_p (def_stmt))
1589 {
598f7235 1590 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1591 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1592 }
1593 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1594 {
c8602fe6
JJ
1595 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1596 gimple_assign_rhs3 (def_stmt) };
1597
1598 for (unsigned int i = 0; i < 2; i++)
03c4a945 1599 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1600 {
84de9426 1601 if (rkind != SRK_LENRANGE)
c8602fe6 1602 return false;
80c2bad6
MS
1603 /* Set the upper bound to the maximum to prevent
1604 it from being adjusted in the next iteration but
1605 leave MINLEN and the more conservative MAXBOUND
1606 determined so far alone (or leave them null if
1607 they haven't been set yet). That the MINLEN is
1608 in fact zero can be determined from MAXLEN being
1609 unbounded but the discovered minimum is used for
1610 diagnostics. */
730832cd 1611 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1612 }
1613 return true;
cc8bea0a 1614 }
fef5a0d9
RB
1615 return false;
1616
1617 case GIMPLE_PHI:
598f7235
MS
1618 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1619 must have a constant length. */
c8602fe6 1620 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1621 {
1622 tree arg = gimple_phi_arg (def_stmt, i)->def;
1623
1624 /* If this PHI has itself as an argument, we cannot
1625 determine the string length of this argument. However,
1626 if we can find a constant string length for the other
1627 PHI args then we can still be sure that this is a
1628 constant string length. So be optimistic and just
1629 continue with the next argument. */
1630 if (arg == gimple_phi_result (def_stmt))
1631 continue;
1632
03c4a945 1633 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1634 {
84de9426 1635 if (rkind != SRK_LENRANGE)
88d0c3f0 1636 return false;
80c2bad6
MS
1637 /* Set the upper bound to the maximum to prevent
1638 it from being adjusted in the next iteration but
1639 leave MINLEN and the more conservative MAXBOUND
1640 determined so far alone (or leave them null if
1641 they haven't been set yet). That the MINLEN is
1642 in fact zero can be determined from MAXLEN being
1643 unbounded but the discovered minimum is used for
1644 diagnostics. */
730832cd 1645 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1646 }
fef5a0d9 1647 }
fef5a0d9
RB
1648 return true;
1649
1650 default:
1651 return false;
1652 }
1653}
5d6655eb 1654
88d0c3f0
MS
1655/* Determine the minimum and maximum value or string length that ARG
1656 refers to and store each in the first two elements of MINMAXLEN.
1657 For expressions that point to strings of unknown lengths that are
1658 character arrays, use the upper bound of the array as the maximum
1659 length. For example, given an expression like 'x ? array : "xyz"'
1660 and array declared as 'char array[8]', MINMAXLEN[0] will be set
c8602fe6 1661 to 0 and MINMAXLEN[1] to 7, the longest string that could be
88d0c3f0 1662 stored in array.
3f343040
MS
1663 Return true if the range of the string lengths has been obtained
1664 from the upper bound of an array at the end of a struct. Such
1665 an array may hold a string that's longer than its upper bound
c8602fe6
JJ
1666 due to it being used as a poor-man's flexible array member.
1667
1668 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1669 and false if PHIs and COND_EXPRs are to be handled optimistically,
1670 if we can determine string length minimum and maximum; it will use
1671 the minimum from the ones where it can be determined.
4148b00d 1672 STRICT false should be only used for warning code.
e08341bb
MS
1673 When non-null, clear *NONSTR if ARG refers to a constant array
1674 that is known not be nul-terminated. Otherwise set it to
1675 the declaration of the constant non-terminated array.
4148b00d
BE
1676
1677 ELTSIZE is 1 for normal single byte character strings, and 2 or
1678 4 for wide characer strings. ELTSIZE is by default 1. */
88d0c3f0 1679
3f343040 1680bool
84de9426 1681get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1682{
1683 bitmap visited = NULL;
1684
84de9426 1685 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1686 {
5d6655eb
MS
1687 /* On failure extend the length range to an impossible maximum
1688 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1689 members can stay unchanged regardless. */
1690 pdata->minlen = ssize_int (0);
1691 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1692 }
5d6655eb
MS
1693 else if (!pdata->minlen)
1694 pdata->minlen = ssize_int (0);
1695
1696 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1697 if (!pdata->maxbound)
1698 pdata->maxbound = pdata->maxlen;
88d0c3f0
MS
1699
1700 if (visited)
1701 BITMAP_FREE (visited);
3f343040 1702
03c4a945 1703 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1704}
1705
5d6655eb
MS
1706/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1707 For ARG of pointer types, NONSTR indicates if the caller is prepared
1708 to handle unterminated strings. For integer ARG and when RKIND ==
1709 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1710
5d6655eb
MS
1711 If an unterminated array is discovered and our caller handles
1712 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1713 return the maximum size. Otherwise return NULL. */
1714
598f7235
MS
1715static tree
1716get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1717{
598f7235
MS
1718 /* A non-null NONSTR is meaningless when determining the maximum
1719 value of an integer ARG. */
1720 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1721 /* ARG must have an integral type when RKIND says so. */
1722 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1723
dcb7fae2 1724 bitmap visited = NULL;
3f343040 1725
5d6655eb
MS
1726 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1727 is unbounded. */
730832cd 1728 c_strlen_data lendata = { };
03c4a945 1729 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 1730 lendata.maxlen = NULL_TREE;
5d6655eb
MS
1731 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1732 lendata.maxlen = NULL_TREE;
1733
dcb7fae2
RB
1734 if (visited)
1735 BITMAP_FREE (visited);
1736
e08341bb
MS
1737 if (nonstr)
1738 {
1739 /* For callers prepared to handle unterminated arrays set
1740 *NONSTR to point to the declaration of the array and return
1741 the maximum length/size. */
730832cd
MS
1742 *nonstr = lendata.decl;
1743 return lendata.maxlen;
e08341bb
MS
1744 }
1745
1746 /* Fail if the constant array isn't nul-terminated. */
730832cd 1747 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
1748}
1749
fef5a0d9
RB
1750
1751/* Fold function call to builtin strcpy with arguments DEST and SRC.
1752 If LEN is not NULL, it represents the length of the string to be
1753 copied. Return NULL_TREE if no simplification can be made. */
1754
1755static bool
1756gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1757 tree dest, tree src)
fef5a0d9 1758{
cc8bea0a
MS
1759 gimple *stmt = gsi_stmt (*gsi);
1760 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1761 tree fn;
1762
1763 /* If SRC and DEST are the same (and not volatile), return DEST. */
1764 if (operand_equal_p (src, dest, 0))
1765 {
8cd95cec
MS
1766 /* Issue -Wrestrict unless the pointers are null (those do
1767 not point to objects and so do not indicate an overlap;
1768 such calls could be the result of sanitization and jump
1769 threading). */
1770 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1771 {
1772 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1773
e9b9fa4c
MS
1774 warning_at (loc, OPT_Wrestrict,
1775 "%qD source argument is the same as destination",
1776 func);
1777 }
cc8bea0a 1778
fef5a0d9
RB
1779 replace_call_with_value (gsi, dest);
1780 return true;
1781 }
1782
1783 if (optimize_function_for_size_p (cfun))
1784 return false;
1785
1786 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1787 if (!fn)
1788 return false;
1789
e08341bb
MS
1790 /* Set to non-null if ARG refers to an unterminated array. */
1791 tree nonstr = NULL;
598f7235 1792 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
1793
1794 if (nonstr)
1795 {
1796 /* Avoid folding calls with unterminated arrays. */
1797 if (!gimple_no_warning_p (stmt))
1798 warn_string_no_nul (loc, "strcpy", src, nonstr);
1799 gimple_set_no_warning (stmt, true);
1800 return false;
1801 }
1802
fef5a0d9 1803 if (!len)
dcb7fae2 1804 return false;
fef5a0d9
RB
1805
1806 len = fold_convert_loc (loc, size_type_node, len);
1807 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1808 len = force_gimple_operand_gsi (gsi, len, true,
1809 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1810 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1811 replace_call_with_call_and_fold (gsi, repl);
1812 return true;
1813}
1814
1815/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1816 If SLEN is not NULL, it represents the length of the source string.
1817 Return NULL_TREE if no simplification can be made. */
1818
1819static bool
dcb7fae2
RB
1820gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1821 tree dest, tree src, tree len)
fef5a0d9 1822{
025d57f0
MS
1823 gimple *stmt = gsi_stmt (*gsi);
1824 location_t loc = gimple_location (stmt);
6a33d0ff 1825 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1826
1827 /* If the LEN parameter is zero, return DEST. */
1828 if (integer_zerop (len))
1829 {
6a33d0ff
MS
1830 /* Avoid warning if the destination refers to a an array/pointer
1831 decorate with attribute nonstring. */
1832 if (!nonstring)
1833 {
1834 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1835
1836 /* Warn about the lack of nul termination: the result is not
1837 a (nul-terminated) string. */
598f7235 1838 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
1839 if (slen && !integer_zerop (slen))
1840 warning_at (loc, OPT_Wstringop_truncation,
1841 "%G%qD destination unchanged after copying no bytes "
1842 "from a string of length %E",
8a45b051 1843 stmt, fndecl, slen);
6a33d0ff
MS
1844 else
1845 warning_at (loc, OPT_Wstringop_truncation,
1846 "%G%qD destination unchanged after copying no bytes",
8a45b051 1847 stmt, fndecl);
6a33d0ff 1848 }
025d57f0 1849
fef5a0d9
RB
1850 replace_call_with_value (gsi, dest);
1851 return true;
1852 }
1853
1854 /* We can't compare slen with len as constants below if len is not a
1855 constant. */
dcb7fae2 1856 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1857 return false;
1858
fef5a0d9 1859 /* Now, we must be passed a constant src ptr parameter. */
598f7235 1860 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 1861 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1862 return false;
1863
025d57f0
MS
1864 /* The size of the source string including the terminating nul. */
1865 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1866
1867 /* We do not support simplification of this case, though we do
1868 support it when expanding trees into RTL. */
1869 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1870 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1871 return false;
1872
5d0d5d68
MS
1873 /* Diagnose truncation that leaves the copy unterminated. */
1874 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1875
fef5a0d9 1876 /* OK transform into builtin memcpy. */
025d57f0 1877 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1878 if (!fn)
1879 return false;
1880
1881 len = fold_convert_loc (loc, size_type_node, len);
1882 len = force_gimple_operand_gsi (gsi, len, true,
1883 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1884 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1885 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1886
fef5a0d9
RB
1887 return true;
1888}
1889
71dea1dd
WD
1890/* Fold function call to builtin strchr or strrchr.
1891 If both arguments are constant, evaluate and fold the result,
1892 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1893 In general strlen is significantly faster than strchr
1894 due to being a simpler operation. */
1895static bool
71dea1dd 1896gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1897{
1898 gimple *stmt = gsi_stmt (*gsi);
1899 tree str = gimple_call_arg (stmt, 0);
1900 tree c = gimple_call_arg (stmt, 1);
1901 location_t loc = gimple_location (stmt);
71dea1dd
WD
1902 const char *p;
1903 char ch;
912d9ec3 1904
71dea1dd 1905 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1906 return false;
1907
71dea1dd
WD
1908 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1909 {
1910 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1911
1912 if (p1 == NULL)
1913 {
1914 replace_call_with_value (gsi, integer_zero_node);
1915 return true;
1916 }
1917
1918 tree len = build_int_cst (size_type_node, p1 - p);
1919 gimple_seq stmts = NULL;
1920 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1921 POINTER_PLUS_EXPR, str, len);
1922 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1923 gsi_replace_with_seq_vops (gsi, stmts);
1924 return true;
1925 }
1926
1927 if (!integer_zerop (c))
912d9ec3
WD
1928 return false;
1929
71dea1dd 1930 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1931 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1932 {
1933 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1934
c8952930 1935 if (strchr_fn)
71dea1dd
WD
1936 {
1937 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1938 replace_call_with_call_and_fold (gsi, repl);
1939 return true;
1940 }
1941
1942 return false;
1943 }
1944
912d9ec3
WD
1945 tree len;
1946 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1947
1948 if (!strlen_fn)
1949 return false;
1950
1951 /* Create newstr = strlen (str). */
1952 gimple_seq stmts = NULL;
1953 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1954 gimple_set_location (new_stmt, loc);
a15ebbcd 1955 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1956 gimple_call_set_lhs (new_stmt, len);
1957 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1958
1959 /* Create (str p+ strlen (str)). */
1960 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1961 POINTER_PLUS_EXPR, str, len);
1962 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1963 gsi_replace_with_seq_vops (gsi, stmts);
1964 /* gsi now points at the assignment to the lhs, get a
1965 stmt iterator to the strlen.
1966 ??? We can't use gsi_for_stmt as that doesn't work when the
1967 CFG isn't built yet. */
1968 gimple_stmt_iterator gsi2 = *gsi;
1969 gsi_prev (&gsi2);
1970 fold_stmt (&gsi2);
1971 return true;
1972}
1973
c8952930
JJ
1974/* Fold function call to builtin strstr.
1975 If both arguments are constant, evaluate and fold the result,
1976 additionally fold strstr (x, "") into x and strstr (x, "c")
1977 into strchr (x, 'c'). */
1978static bool
1979gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1980{
1981 gimple *stmt = gsi_stmt (*gsi);
1982 tree haystack = gimple_call_arg (stmt, 0);
1983 tree needle = gimple_call_arg (stmt, 1);
1984 const char *p, *q;
1985
1986 if (!gimple_call_lhs (stmt))
1987 return false;
1988
1989 q = c_getstr (needle);
1990 if (q == NULL)
1991 return false;
1992
1993 if ((p = c_getstr (haystack)))
1994 {
1995 const char *r = strstr (p, q);
1996
1997 if (r == NULL)
1998 {
1999 replace_call_with_value (gsi, integer_zero_node);
2000 return true;
2001 }
2002
2003 tree len = build_int_cst (size_type_node, r - p);
2004 gimple_seq stmts = NULL;
2005 gimple *new_stmt
2006 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2007 haystack, len);
2008 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2009 gsi_replace_with_seq_vops (gsi, stmts);
2010 return true;
2011 }
2012
2013 /* For strstr (x, "") return x. */
2014 if (q[0] == '\0')
2015 {
2016 replace_call_with_value (gsi, haystack);
2017 return true;
2018 }
2019
2020 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2021 if (q[1] == '\0')
2022 {
2023 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2024 if (strchr_fn)
2025 {
2026 tree c = build_int_cst (integer_type_node, q[0]);
2027 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2028 replace_call_with_call_and_fold (gsi, repl);
2029 return true;
2030 }
2031 }
2032
2033 return false;
2034}
2035
fef5a0d9
RB
2036/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2037 to the call.
2038
2039 Return NULL_TREE if no simplification was possible, otherwise return the
2040 simplified form of the call as a tree.
2041
2042 The simplified form may be a constant or other expression which
2043 computes the same value, but in a more efficient manner (including
2044 calls to other builtin functions).
2045
2046 The call may contain arguments which need to be evaluated, but
2047 which are not useful to determine the result of the call. In
2048 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2049 COMPOUND_EXPR will be an argument which must be evaluated.
2050 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2051 COMPOUND_EXPR in the chain will contain the tree for the simplified
2052 form of the builtin function call. */
2053
2054static bool
dcb7fae2 2055gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2056{
355fe088 2057 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2058 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2059
2060 const char *p = c_getstr (src);
2061
2062 /* If the string length is zero, return the dst parameter. */
2063 if (p && *p == '\0')
2064 {
2065 replace_call_with_value (gsi, dst);
2066 return true;
2067 }
2068
2069 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2070 return false;
2071
2072 /* See if we can store by pieces into (dst + strlen(dst)). */
2073 tree newdst;
2074 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2075 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2076
2077 if (!strlen_fn || !memcpy_fn)
2078 return false;
2079
2080 /* If the length of the source string isn't computable don't
2081 split strcat into strlen and memcpy. */
598f7235 2082 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2083 if (! len)
fef5a0d9
RB
2084 return false;
2085
2086 /* Create strlen (dst). */
2087 gimple_seq stmts = NULL, stmts2;
355fe088 2088 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2089 gimple_set_location (repl, loc);
a15ebbcd 2090 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2091 gimple_call_set_lhs (repl, newdst);
2092 gimple_seq_add_stmt_without_update (&stmts, repl);
2093
2094 /* Create (dst p+ strlen (dst)). */
2095 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2096 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2097 gimple_seq_add_seq_without_update (&stmts, stmts2);
2098
2099 len = fold_convert_loc (loc, size_type_node, len);
2100 len = size_binop_loc (loc, PLUS_EXPR, len,
2101 build_int_cst (size_type_node, 1));
2102 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2103 gimple_seq_add_seq_without_update (&stmts, stmts2);
2104
2105 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2106 gimple_seq_add_stmt_without_update (&stmts, repl);
2107 if (gimple_call_lhs (stmt))
2108 {
2109 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2110 gimple_seq_add_stmt_without_update (&stmts, repl);
2111 gsi_replace_with_seq_vops (gsi, stmts);
2112 /* gsi now points at the assignment to the lhs, get a
2113 stmt iterator to the memcpy call.
2114 ??? We can't use gsi_for_stmt as that doesn't work when the
2115 CFG isn't built yet. */
2116 gimple_stmt_iterator gsi2 = *gsi;
2117 gsi_prev (&gsi2);
2118 fold_stmt (&gsi2);
2119 }
2120 else
2121 {
2122 gsi_replace_with_seq_vops (gsi, stmts);
2123 fold_stmt (gsi);
2124 }
2125 return true;
2126}
2127
07f1cf56
RB
2128/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2129 are the arguments to the call. */
2130
2131static bool
2132gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2133{
355fe088 2134 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2135 tree dest = gimple_call_arg (stmt, 0);
2136 tree src = gimple_call_arg (stmt, 1);
2137 tree size = gimple_call_arg (stmt, 2);
2138 tree fn;
2139 const char *p;
2140
2141
2142 p = c_getstr (src);
2143 /* If the SRC parameter is "", return DEST. */
2144 if (p && *p == '\0')
2145 {
2146 replace_call_with_value (gsi, dest);
2147 return true;
2148 }
2149
2150 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2151 return false;
2152
2153 /* If __builtin_strcat_chk is used, assume strcat is available. */
2154 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2155 if (!fn)
2156 return false;
2157
355fe088 2158 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2159 replace_call_with_call_and_fold (gsi, repl);
2160 return true;
2161}
2162
ad03a744
RB
2163/* Simplify a call to the strncat builtin. */
2164
2165static bool
2166gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2167{
8a45b051 2168 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2169 tree dst = gimple_call_arg (stmt, 0);
2170 tree src = gimple_call_arg (stmt, 1);
2171 tree len = gimple_call_arg (stmt, 2);
2172
2173 const char *p = c_getstr (src);
2174
2175 /* If the requested length is zero, or the src parameter string
2176 length is zero, return the dst parameter. */
2177 if (integer_zerop (len) || (p && *p == '\0'))
2178 {
2179 replace_call_with_value (gsi, dst);
2180 return true;
2181 }
2182
025d57f0
MS
2183 if (TREE_CODE (len) != INTEGER_CST || !p)
2184 return false;
2185
2186 unsigned srclen = strlen (p);
2187
2188 int cmpsrc = compare_tree_int (len, srclen);
2189
2190 /* Return early if the requested len is less than the string length.
2191 Warnings will be issued elsewhere later. */
2192 if (cmpsrc < 0)
2193 return false;
2194
2195 unsigned HOST_WIDE_INT dstsize;
2196
2197 bool nowarn = gimple_no_warning_p (stmt);
2198
2199 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2200 {
025d57f0 2201 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2202
025d57f0
MS
2203 if (cmpdst >= 0)
2204 {
2205 tree fndecl = gimple_call_fndecl (stmt);
2206
2207 /* Strncat copies (at most) LEN bytes and always appends
2208 the terminating NUL so the specified bound should never
2209 be equal to (or greater than) the size of the destination.
2210 If it is, the copy could overflow. */
2211 location_t loc = gimple_location (stmt);
2212 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2213 cmpdst == 0
2214 ? G_("%G%qD specified bound %E equals "
2215 "destination size")
2216 : G_("%G%qD specified bound %E exceeds "
2217 "destination size %wu"),
2218 stmt, fndecl, len, dstsize);
2219 if (nowarn)
2220 gimple_set_no_warning (stmt, true);
2221 }
2222 }
ad03a744 2223
025d57f0
MS
2224 if (!nowarn && cmpsrc == 0)
2225 {
2226 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2227 location_t loc = gimple_location (stmt);
eec5f615
MS
2228
2229 /* To avoid possible overflow the specified bound should also
2230 not be equal to the length of the source, even when the size
2231 of the destination is unknown (it's not an uncommon mistake
2232 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2233 if (warning_at (loc, OPT_Wstringop_overflow_,
2234 "%G%qD specified bound %E equals source length",
2235 stmt, fndecl, len))
2236 gimple_set_no_warning (stmt, true);
ad03a744
RB
2237 }
2238
025d57f0
MS
2239 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2240
2241 /* If the replacement _DECL isn't initialized, don't do the
2242 transformation. */
2243 if (!fn)
2244 return false;
2245
2246 /* Otherwise, emit a call to strcat. */
2247 gcall *repl = gimple_build_call (fn, 2, dst, src);
2248 replace_call_with_call_and_fold (gsi, repl);
2249 return true;
ad03a744
RB
2250}
2251
745583f9
RB
2252/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2253 LEN, and SIZE. */
2254
2255static bool
2256gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2257{
355fe088 2258 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2259 tree dest = gimple_call_arg (stmt, 0);
2260 tree src = gimple_call_arg (stmt, 1);
2261 tree len = gimple_call_arg (stmt, 2);
2262 tree size = gimple_call_arg (stmt, 3);
2263 tree fn;
2264 const char *p;
2265
2266 p = c_getstr (src);
2267 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2268 if ((p && *p == '\0')
2269 || integer_zerop (len))
2270 {
2271 replace_call_with_value (gsi, dest);
2272 return true;
2273 }
2274
2275 if (! tree_fits_uhwi_p (size))
2276 return false;
2277
2278 if (! integer_all_onesp (size))
2279 {
2280 tree src_len = c_strlen (src, 1);
2281 if (src_len
2282 && tree_fits_uhwi_p (src_len)
2283 && tree_fits_uhwi_p (len)
2284 && ! tree_int_cst_lt (len, src_len))
2285 {
2286 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2287 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2288 if (!fn)
2289 return false;
2290
355fe088 2291 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2292 replace_call_with_call_and_fold (gsi, repl);
2293 return true;
2294 }
2295 return false;
2296 }
2297
2298 /* If __builtin_strncat_chk is used, assume strncat is available. */
2299 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2300 if (!fn)
2301 return false;
2302
355fe088 2303 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2304 replace_call_with_call_and_fold (gsi, repl);
2305 return true;
2306}
2307
a918bfbf
ML
2308/* Build and append gimple statements to STMTS that would load a first
2309 character of a memory location identified by STR. LOC is location
2310 of the statement. */
2311
2312static tree
2313gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2314{
2315 tree var;
2316
2317 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2318 tree cst_uchar_ptr_node
2319 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2320 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2321
2322 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2323 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2324 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2325
2326 gimple_assign_set_lhs (stmt, var);
2327 gimple_seq_add_stmt_without_update (stmts, stmt);
2328
2329 return var;
2330}
2331
2332/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2333 FCODE is the name of the builtin. */
2334
2335static bool
2336gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2337{
2338 gimple *stmt = gsi_stmt (*gsi);
2339 tree callee = gimple_call_fndecl (stmt);
2340 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2341
2342 tree type = integer_type_node;
2343 tree str1 = gimple_call_arg (stmt, 0);
2344 tree str2 = gimple_call_arg (stmt, 1);
2345 tree lhs = gimple_call_lhs (stmt);
2346 HOST_WIDE_INT length = -1;
2347
2348 /* Handle strncmp and strncasecmp functions. */
2349 if (gimple_call_num_args (stmt) == 3)
2350 {
2351 tree len = gimple_call_arg (stmt, 2);
2352 if (tree_fits_uhwi_p (len))
2353 length = tree_to_uhwi (len);
2354 }
2355
2356 /* If the LEN parameter is zero, return zero. */
2357 if (length == 0)
2358 {
2359 replace_call_with_value (gsi, integer_zero_node);
2360 return true;
2361 }
2362
2363 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2364 if (operand_equal_p (str1, str2, 0))
2365 {
2366 replace_call_with_value (gsi, integer_zero_node);
2367 return true;
2368 }
2369
2370 const char *p1 = c_getstr (str1);
2371 const char *p2 = c_getstr (str2);
2372
2373 /* For known strings, return an immediate value. */
2374 if (p1 && p2)
2375 {
2376 int r = 0;
2377 bool known_result = false;
2378
2379 switch (fcode)
2380 {
2381 case BUILT_IN_STRCMP:
8b0b334a 2382 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
2383 {
2384 r = strcmp (p1, p2);
2385 known_result = true;
2386 break;
2387 }
2388 case BUILT_IN_STRNCMP:
8b0b334a 2389 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
2390 {
2391 if (length == -1)
2392 break;
2393 r = strncmp (p1, p2, length);
2394 known_result = true;
2395 break;
2396 }
2397 /* Only handleable situation is where the string are equal (result 0),
2398 which is already handled by operand_equal_p case. */
2399 case BUILT_IN_STRCASECMP:
2400 break;
2401 case BUILT_IN_STRNCASECMP:
2402 {
2403 if (length == -1)
2404 break;
2405 r = strncmp (p1, p2, length);
2406 if (r == 0)
2407 known_result = true;
5de73c05 2408 break;
a918bfbf
ML
2409 }
2410 default:
2411 gcc_unreachable ();
2412 }
2413
2414 if (known_result)
2415 {
2416 replace_call_with_value (gsi, build_cmp_result (type, r));
2417 return true;
2418 }
2419 }
2420
2421 bool nonzero_length = length >= 1
2422 || fcode == BUILT_IN_STRCMP
8b0b334a 2423 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2424 || fcode == BUILT_IN_STRCASECMP;
2425
2426 location_t loc = gimple_location (stmt);
2427
2428 /* If the second arg is "", return *(const unsigned char*)arg1. */
2429 if (p2 && *p2 == '\0' && nonzero_length)
2430 {
2431 gimple_seq stmts = NULL;
2432 tree var = gimple_load_first_char (loc, str1, &stmts);
2433 if (lhs)
2434 {
2435 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2436 gimple_seq_add_stmt_without_update (&stmts, stmt);
2437 }
2438
2439 gsi_replace_with_seq_vops (gsi, stmts);
2440 return true;
2441 }
2442
2443 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2444 if (p1 && *p1 == '\0' && nonzero_length)
2445 {
2446 gimple_seq stmts = NULL;
2447 tree var = gimple_load_first_char (loc, str2, &stmts);
2448
2449 if (lhs)
2450 {
2451 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2452 stmt = gimple_build_assign (c, NOP_EXPR, var);
2453 gimple_seq_add_stmt_without_update (&stmts, stmt);
2454
2455 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2456 gimple_seq_add_stmt_without_update (&stmts, stmt);
2457 }
2458
2459 gsi_replace_with_seq_vops (gsi, stmts);
2460 return true;
2461 }
2462
2463 /* If len parameter is one, return an expression corresponding to
2464 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2465 if (fcode == BUILT_IN_STRNCMP && length == 1)
2466 {
2467 gimple_seq stmts = NULL;
2468 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2469 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2470
2471 if (lhs)
2472 {
2473 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2474 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2475 gimple_seq_add_stmt_without_update (&stmts, convert1);
2476
2477 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2478 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2479 gimple_seq_add_stmt_without_update (&stmts, convert2);
2480
2481 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2482 gimple_seq_add_stmt_without_update (&stmts, stmt);
2483 }
2484
2485 gsi_replace_with_seq_vops (gsi, stmts);
2486 return true;
2487 }
2488
caed5c92
QZ
2489 /* If length is larger than the length of one constant string,
2490 replace strncmp with corresponding strcmp */
2491 if (fcode == BUILT_IN_STRNCMP
2492 && length > 0
2493 && ((p2 && (size_t) length > strlen (p2))
2494 || (p1 && (size_t) length > strlen (p1))))
2495 {
2496 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2497 if (!fn)
2498 return false;
2499 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2500 replace_call_with_call_and_fold (gsi, repl);
2501 return true;
2502 }
2503
a918bfbf
ML
2504 return false;
2505}
2506
488c6247
ML
2507/* Fold a call to the memchr pointed by GSI iterator. */
2508
2509static bool
2510gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2511{
2512 gimple *stmt = gsi_stmt (*gsi);
2513 tree lhs = gimple_call_lhs (stmt);
2514 tree arg1 = gimple_call_arg (stmt, 0);
2515 tree arg2 = gimple_call_arg (stmt, 1);
2516 tree len = gimple_call_arg (stmt, 2);
2517
2518 /* If the LEN parameter is zero, return zero. */
2519 if (integer_zerop (len))
2520 {
2521 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2522 return true;
2523 }
2524
2525 char c;
2526 if (TREE_CODE (arg2) != INTEGER_CST
2527 || !tree_fits_uhwi_p (len)
2528 || !target_char_cst_p (arg2, &c))
2529 return false;
2530
2531 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2532 unsigned HOST_WIDE_INT string_length;
2533 const char *p1 = c_getstr (arg1, &string_length);
2534
2535 if (p1)
2536 {
2537 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2538 if (r == NULL)
2539 {
2540 if (length <= string_length)
2541 {
2542 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2543 return true;
2544 }
2545 }
2546 else
2547 {
2548 unsigned HOST_WIDE_INT offset = r - p1;
2549 gimple_seq stmts = NULL;
2550 if (lhs != NULL_TREE)
2551 {
2552 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2553 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2554 arg1, offset_cst);
2555 gimple_seq_add_stmt_without_update (&stmts, stmt);
2556 }
2557 else
2558 gimple_seq_add_stmt_without_update (&stmts,
2559 gimple_build_nop ());
2560
2561 gsi_replace_with_seq_vops (gsi, stmts);
2562 return true;
2563 }
2564 }
2565
2566 return false;
2567}
a918bfbf 2568
fef5a0d9
RB
2569/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2570 to the call. IGNORE is true if the value returned
2571 by the builtin will be ignored. UNLOCKED is true is true if this
2572 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2573 the known length of the string. Return NULL_TREE if no simplification
2574 was possible. */
2575
2576static bool
2577gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2578 tree arg0, tree arg1,
dcb7fae2 2579 bool unlocked)
fef5a0d9 2580{
355fe088 2581 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2582
fef5a0d9
RB
2583 /* If we're using an unlocked function, assume the other unlocked
2584 functions exist explicitly. */
2585 tree const fn_fputc = (unlocked
2586 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2587 : builtin_decl_implicit (BUILT_IN_FPUTC));
2588 tree const fn_fwrite = (unlocked
2589 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2590 : builtin_decl_implicit (BUILT_IN_FWRITE));
2591
2592 /* If the return value is used, don't do the transformation. */
dcb7fae2 2593 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2594 return false;
2595
fef5a0d9
RB
2596 /* Get the length of the string passed to fputs. If the length
2597 can't be determined, punt. */
598f7235 2598 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2599 if (!len
2600 || TREE_CODE (len) != INTEGER_CST)
2601 return false;
2602
2603 switch (compare_tree_int (len, 1))
2604 {
2605 case -1: /* length is 0, delete the call entirely . */
2606 replace_call_with_value (gsi, integer_zero_node);
2607 return true;
2608
2609 case 0: /* length is 1, call fputc. */
2610 {
2611 const char *p = c_getstr (arg0);
2612 if (p != NULL)
2613 {
2614 if (!fn_fputc)
2615 return false;
2616
355fe088 2617 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2618 build_int_cst
2619 (integer_type_node, p[0]), arg1);
2620 replace_call_with_call_and_fold (gsi, repl);
2621 return true;
2622 }
2623 }
2624 /* FALLTHROUGH */
2625 case 1: /* length is greater than 1, call fwrite. */
2626 {
2627 /* If optimizing for size keep fputs. */
2628 if (optimize_function_for_size_p (cfun))
2629 return false;
2630 /* New argument list transforming fputs(string, stream) to
2631 fwrite(string, 1, len, stream). */
2632 if (!fn_fwrite)
2633 return false;
2634
355fe088 2635 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2636 size_one_node, len, arg1);
2637 replace_call_with_call_and_fold (gsi, repl);
2638 return true;
2639 }
2640 default:
2641 gcc_unreachable ();
2642 }
2643 return false;
2644}
2645
2646/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2647 DEST, SRC, LEN, and SIZE are the arguments to the call.
2648 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2649 code of the builtin. If MAXLEN is not NULL, it is maximum length
2650 passed as third argument. */
2651
2652static bool
2653gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2654 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2655 enum built_in_function fcode)
2656{
355fe088 2657 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2658 location_t loc = gimple_location (stmt);
2659 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2660 tree fn;
2661
2662 /* If SRC and DEST are the same (and not volatile), return DEST
2663 (resp. DEST+LEN for __mempcpy_chk). */
2664 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2665 {
2666 if (fcode != BUILT_IN_MEMPCPY_CHK)
2667 {
2668 replace_call_with_value (gsi, dest);
2669 return true;
2670 }
2671 else
2672 {
74e3c262
RB
2673 gimple_seq stmts = NULL;
2674 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2675 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2676 TREE_TYPE (dest), dest, len);
74e3c262 2677 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2678 replace_call_with_value (gsi, temp);
2679 return true;
2680 }
2681 }
2682
2683 if (! tree_fits_uhwi_p (size))
2684 return false;
2685
598f7235 2686 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
2687 if (! integer_all_onesp (size))
2688 {
2689 if (! tree_fits_uhwi_p (len))
2690 {
2691 /* If LEN is not constant, try MAXLEN too.
2692 For MAXLEN only allow optimizing into non-_ocs function
2693 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2694 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2695 {
2696 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2697 {
2698 /* (void) __mempcpy_chk () can be optimized into
2699 (void) __memcpy_chk (). */
2700 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2701 if (!fn)
2702 return false;
2703
355fe088 2704 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2705 replace_call_with_call_and_fold (gsi, repl);
2706 return true;
2707 }
2708 return false;
2709 }
2710 }
2711 else
2712 maxlen = len;
2713
2714 if (tree_int_cst_lt (size, maxlen))
2715 return false;
2716 }
2717
2718 fn = NULL_TREE;
2719 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2720 mem{cpy,pcpy,move,set} is available. */
2721 switch (fcode)
2722 {
2723 case BUILT_IN_MEMCPY_CHK:
2724 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2725 break;
2726 case BUILT_IN_MEMPCPY_CHK:
2727 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2728 break;
2729 case BUILT_IN_MEMMOVE_CHK:
2730 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2731 break;
2732 case BUILT_IN_MEMSET_CHK:
2733 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2734 break;
2735 default:
2736 break;
2737 }
2738
2739 if (!fn)
2740 return false;
2741
355fe088 2742 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2743 replace_call_with_call_and_fold (gsi, repl);
2744 return true;
2745}
2746
2747/* Fold a call to the __st[rp]cpy_chk builtin.
2748 DEST, SRC, and SIZE are the arguments to the call.
2749 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2750 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2751 strings passed as second argument. */
2752
2753static bool
2754gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2755 tree dest,
fef5a0d9 2756 tree src, tree size,
fef5a0d9
RB
2757 enum built_in_function fcode)
2758{
355fe088 2759 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2760 location_t loc = gimple_location (stmt);
2761 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2762 tree len, fn;
2763
2764 /* If SRC and DEST are the same (and not volatile), return DEST. */
2765 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2766 {
8cd95cec
MS
2767 /* Issue -Wrestrict unless the pointers are null (those do
2768 not point to objects and so do not indicate an overlap;
2769 such calls could be the result of sanitization and jump
2770 threading). */
2771 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2772 {
2773 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2774
e9b9fa4c
MS
2775 warning_at (loc, OPT_Wrestrict,
2776 "%qD source argument is the same as destination",
2777 func);
2778 }
cc8bea0a 2779
fef5a0d9
RB
2780 replace_call_with_value (gsi, dest);
2781 return true;
2782 }
2783
2784 if (! tree_fits_uhwi_p (size))
2785 return false;
2786
598f7235 2787 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
2788 if (! integer_all_onesp (size))
2789 {
2790 len = c_strlen (src, 1);
2791 if (! len || ! tree_fits_uhwi_p (len))
2792 {
2793 /* If LEN is not constant, try MAXLEN too.
2794 For MAXLEN only allow optimizing into non-_ocs function
2795 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2796 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2797 {
2798 if (fcode == BUILT_IN_STPCPY_CHK)
2799 {
2800 if (! ignore)
2801 return false;
2802
2803 /* If return value of __stpcpy_chk is ignored,
2804 optimize into __strcpy_chk. */
2805 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2806 if (!fn)
2807 return false;
2808
355fe088 2809 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2810 replace_call_with_call_and_fold (gsi, repl);
2811 return true;
2812 }
2813
2814 if (! len || TREE_SIDE_EFFECTS (len))
2815 return false;
2816
2817 /* If c_strlen returned something, but not a constant,
2818 transform __strcpy_chk into __memcpy_chk. */
2819 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2820 if (!fn)
2821 return false;
2822
74e3c262 2823 gimple_seq stmts = NULL;
770fe3a3 2824 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
2825 len = gimple_convert (&stmts, loc, size_type_node, len);
2826 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2827 build_int_cst (size_type_node, 1));
2828 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2829 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2830 replace_call_with_call_and_fold (gsi, repl);
2831 return true;
2832 }
e256dfce 2833 }
fef5a0d9
RB
2834 else
2835 maxlen = len;
2836
2837 if (! tree_int_cst_lt (maxlen, size))
2838 return false;
e256dfce
RG
2839 }
2840
fef5a0d9
RB
2841 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2842 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2843 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2844 if (!fn)
2845 return false;
2846
355fe088 2847 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2848 replace_call_with_call_and_fold (gsi, repl);
2849 return true;
2850}
2851
2852/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2853 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2854 length passed as third argument. IGNORE is true if return value can be
2855 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2856
2857static bool
2858gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2859 tree dest, tree src,
dcb7fae2 2860 tree len, tree size,
fef5a0d9
RB
2861 enum built_in_function fcode)
2862{
355fe088 2863 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2864 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2865 tree fn;
2866
2867 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2868 {
fef5a0d9
RB
2869 /* If return value of __stpncpy_chk is ignored,
2870 optimize into __strncpy_chk. */
2871 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2872 if (fn)
2873 {
355fe088 2874 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2875 replace_call_with_call_and_fold (gsi, repl);
2876 return true;
2877 }
cbdd87d4
RG
2878 }
2879
fef5a0d9
RB
2880 if (! tree_fits_uhwi_p (size))
2881 return false;
2882
598f7235 2883 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 2884 if (! integer_all_onesp (size))
cbdd87d4 2885 {
fef5a0d9 2886 if (! tree_fits_uhwi_p (len))
fe2ef088 2887 {
fef5a0d9
RB
2888 /* If LEN is not constant, try MAXLEN too.
2889 For MAXLEN only allow optimizing into non-_ocs function
2890 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2891 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2892 return false;
8a1561bc 2893 }
fef5a0d9
RB
2894 else
2895 maxlen = len;
2896
2897 if (tree_int_cst_lt (size, maxlen))
2898 return false;
cbdd87d4
RG
2899 }
2900
fef5a0d9
RB
2901 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2902 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2903 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2904 if (!fn)
2905 return false;
2906
355fe088 2907 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2908 replace_call_with_call_and_fold (gsi, repl);
2909 return true;
cbdd87d4
RG
2910}
2911
2625bb5d
RB
2912/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2913 Return NULL_TREE if no simplification can be made. */
2914
2915static bool
2916gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2917{
2918 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2919 location_t loc = gimple_location (stmt);
2920 tree dest = gimple_call_arg (stmt, 0);
2921 tree src = gimple_call_arg (stmt, 1);
01b0acb7 2922 tree fn, lenp1;
2625bb5d
RB
2923
2924 /* If the result is unused, replace stpcpy with strcpy. */
2925 if (gimple_call_lhs (stmt) == NULL_TREE)
2926 {
2927 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2928 if (!fn)
2929 return false;
2930 gimple_call_set_fndecl (stmt, fn);
2931 fold_stmt (gsi);
2932 return true;
2933 }
2934
01b0acb7 2935 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 2936 c_strlen_data data = { };
7d583f42 2937 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
2938 if (!len
2939 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 2940 {
7d583f42
JL
2941 data.decl = unterminated_array (src);
2942 if (!data.decl)
01b0acb7
MS
2943 return false;
2944 }
2945
7d583f42 2946 if (data.decl)
01b0acb7
MS
2947 {
2948 /* Avoid folding calls with unterminated arrays. */
2949 if (!gimple_no_warning_p (stmt))
7d583f42 2950 warn_string_no_nul (loc, "stpcpy", src, data.decl);
01b0acb7
MS
2951 gimple_set_no_warning (stmt, true);
2952 return false;
2953 }
2625bb5d
RB
2954
2955 if (optimize_function_for_size_p (cfun)
2956 /* If length is zero it's small enough. */
2957 && !integer_zerop (len))
2958 return false;
2959
2960 /* If the source has a known length replace stpcpy with memcpy. */
2961 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2962 if (!fn)
2963 return false;
2964
2965 gimple_seq stmts = NULL;
2966 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2967 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2968 tem, build_int_cst (size_type_node, 1));
2969 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2970 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2971 gimple_set_vuse (repl, gimple_vuse (stmt));
2972 gimple_set_vdef (repl, gimple_vdef (stmt));
2973 if (gimple_vdef (repl)
2974 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2975 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2976 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2977 /* Replace the result with dest + len. */
2978 stmts = NULL;
2979 tem = gimple_convert (&stmts, loc, sizetype, len);
2980 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2981 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2982 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2983 gsi_replace (gsi, ret, false);
2625bb5d
RB
2984 /* Finally fold the memcpy call. */
2985 gimple_stmt_iterator gsi2 = *gsi;
2986 gsi_prev (&gsi2);
2987 fold_stmt (&gsi2);
2988 return true;
2989}
2990
fef5a0d9
RB
2991/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2992 NULL_TREE if a normal call should be emitted rather than expanding
2993 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2994 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2995 passed as second argument. */
cbdd87d4
RG
2996
2997static bool
fef5a0d9 2998gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2999 enum built_in_function fcode)
cbdd87d4 3000{
538dd0b7 3001 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3002 tree dest, size, len, fn, fmt, flag;
3003 const char *fmt_str;
cbdd87d4 3004
fef5a0d9
RB
3005 /* Verify the required arguments in the original call. */
3006 if (gimple_call_num_args (stmt) < 5)
3007 return false;
cbdd87d4 3008
fef5a0d9
RB
3009 dest = gimple_call_arg (stmt, 0);
3010 len = gimple_call_arg (stmt, 1);
3011 flag = gimple_call_arg (stmt, 2);
3012 size = gimple_call_arg (stmt, 3);
3013 fmt = gimple_call_arg (stmt, 4);
3014
3015 if (! tree_fits_uhwi_p (size))
3016 return false;
3017
3018 if (! integer_all_onesp (size))
3019 {
598f7235 3020 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3021 if (! tree_fits_uhwi_p (len))
cbdd87d4 3022 {
fef5a0d9
RB
3023 /* If LEN is not constant, try MAXLEN too.
3024 For MAXLEN only allow optimizing into non-_ocs function
3025 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3026 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3027 return false;
3028 }
3029 else
fef5a0d9 3030 maxlen = len;
cbdd87d4 3031
fef5a0d9
RB
3032 if (tree_int_cst_lt (size, maxlen))
3033 return false;
3034 }
cbdd87d4 3035
fef5a0d9
RB
3036 if (!init_target_chars ())
3037 return false;
cbdd87d4 3038
fef5a0d9
RB
3039 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3040 or if format doesn't contain % chars or is "%s". */
3041 if (! integer_zerop (flag))
3042 {
3043 fmt_str = c_getstr (fmt);
3044 if (fmt_str == NULL)
3045 return false;
3046 if (strchr (fmt_str, target_percent) != NULL
3047 && strcmp (fmt_str, target_percent_s))
3048 return false;
cbdd87d4
RG
3049 }
3050
fef5a0d9
RB
3051 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3052 available. */
3053 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3054 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3055 if (!fn)
491e0b9b
RG
3056 return false;
3057
fef5a0d9
RB
3058 /* Replace the called function and the first 5 argument by 3 retaining
3059 trailing varargs. */
3060 gimple_call_set_fndecl (stmt, fn);
3061 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3062 gimple_call_set_arg (stmt, 0, dest);
3063 gimple_call_set_arg (stmt, 1, len);
3064 gimple_call_set_arg (stmt, 2, fmt);
3065 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3066 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3067 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3068 fold_stmt (gsi);
3069 return true;
3070}
cbdd87d4 3071
fef5a0d9
RB
3072/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3073 Return NULL_TREE if a normal call should be emitted rather than
3074 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3075 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3076
fef5a0d9
RB
3077static bool
3078gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3079 enum built_in_function fcode)
3080{
538dd0b7 3081 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3082 tree dest, size, len, fn, fmt, flag;
3083 const char *fmt_str;
3084 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3085
fef5a0d9
RB
3086 /* Verify the required arguments in the original call. */
3087 if (nargs < 4)
3088 return false;
3089 dest = gimple_call_arg (stmt, 0);
3090 flag = gimple_call_arg (stmt, 1);
3091 size = gimple_call_arg (stmt, 2);
3092 fmt = gimple_call_arg (stmt, 3);
3093
3094 if (! tree_fits_uhwi_p (size))
3095 return false;
3096
3097 len = NULL_TREE;
3098
3099 if (!init_target_chars ())
3100 return false;
3101
3102 /* Check whether the format is a literal string constant. */
3103 fmt_str = c_getstr (fmt);
3104 if (fmt_str != NULL)
3105 {
3106 /* If the format doesn't contain % args or %%, we know the size. */
3107 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3108 {
fef5a0d9
RB
3109 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3110 len = build_int_cstu (size_type_node, strlen (fmt_str));
3111 }
3112 /* If the format is "%s" and first ... argument is a string literal,
3113 we know the size too. */
3114 else if (fcode == BUILT_IN_SPRINTF_CHK
3115 && strcmp (fmt_str, target_percent_s) == 0)
3116 {
3117 tree arg;
cbdd87d4 3118
fef5a0d9
RB
3119 if (nargs == 5)
3120 {
3121 arg = gimple_call_arg (stmt, 4);
3122 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3123 {
3124 len = c_strlen (arg, 1);
3125 if (! len || ! tree_fits_uhwi_p (len))
3126 len = NULL_TREE;
3127 }
3128 }
3129 }
3130 }
cbdd87d4 3131
fef5a0d9
RB
3132 if (! integer_all_onesp (size))
3133 {
3134 if (! len || ! tree_int_cst_lt (len, size))
3135 return false;
3136 }
cbdd87d4 3137
fef5a0d9
RB
3138 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3139 or if format doesn't contain % chars or is "%s". */
3140 if (! integer_zerop (flag))
3141 {
3142 if (fmt_str == NULL)
3143 return false;
3144 if (strchr (fmt_str, target_percent) != NULL
3145 && strcmp (fmt_str, target_percent_s))
3146 return false;
3147 }
cbdd87d4 3148
fef5a0d9
RB
3149 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3150 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3151 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3152 if (!fn)
3153 return false;
3154
3155 /* Replace the called function and the first 4 argument by 2 retaining
3156 trailing varargs. */
3157 gimple_call_set_fndecl (stmt, fn);
3158 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3159 gimple_call_set_arg (stmt, 0, dest);
3160 gimple_call_set_arg (stmt, 1, fmt);
3161 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3162 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3163 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3164 fold_stmt (gsi);
3165 return true;
3166}
3167
35770bb2
RB
3168/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3169 ORIG may be null if this is a 2-argument call. We don't attempt to
3170 simplify calls with more than 3 arguments.
3171
a104bd88 3172 Return true if simplification was possible, otherwise false. */
35770bb2 3173
a104bd88 3174bool
dcb7fae2 3175gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3176{
355fe088 3177 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3178 tree dest = gimple_call_arg (stmt, 0);
3179 tree fmt = gimple_call_arg (stmt, 1);
3180 tree orig = NULL_TREE;
3181 const char *fmt_str = NULL;
3182
3183 /* Verify the required arguments in the original call. We deal with two
3184 types of sprintf() calls: 'sprintf (str, fmt)' and
3185 'sprintf (dest, "%s", orig)'. */
3186 if (gimple_call_num_args (stmt) > 3)
3187 return false;
3188
3189 if (gimple_call_num_args (stmt) == 3)
3190 orig = gimple_call_arg (stmt, 2);
3191
3192 /* Check whether the format is a literal string constant. */
3193 fmt_str = c_getstr (fmt);
3194 if (fmt_str == NULL)
3195 return false;
3196
3197 if (!init_target_chars ())
3198 return false;
3199
3200 /* If the format doesn't contain % args or %%, use strcpy. */
3201 if (strchr (fmt_str, target_percent) == NULL)
3202 {
3203 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3204
3205 if (!fn)
3206 return false;
3207
3208 /* Don't optimize sprintf (buf, "abc", ptr++). */
3209 if (orig)
3210 return false;
3211
3212 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3213 'format' is known to contain no % formats. */
3214 gimple_seq stmts = NULL;
355fe088 3215 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3216
3217 /* Propagate the NO_WARNING bit to avoid issuing the same
3218 warning more than once. */
3219 if (gimple_no_warning_p (stmt))
3220 gimple_set_no_warning (repl, true);
3221
35770bb2
RB
3222 gimple_seq_add_stmt_without_update (&stmts, repl);
3223 if (gimple_call_lhs (stmt))
3224 {
3225 repl = gimple_build_assign (gimple_call_lhs (stmt),
3226 build_int_cst (integer_type_node,
3227 strlen (fmt_str)));
3228 gimple_seq_add_stmt_without_update (&stmts, repl);
3229 gsi_replace_with_seq_vops (gsi, stmts);
3230 /* gsi now points at the assignment to the lhs, get a
3231 stmt iterator to the memcpy call.
3232 ??? We can't use gsi_for_stmt as that doesn't work when the
3233 CFG isn't built yet. */
3234 gimple_stmt_iterator gsi2 = *gsi;
3235 gsi_prev (&gsi2);
3236 fold_stmt (&gsi2);
3237 }
3238 else
3239 {
3240 gsi_replace_with_seq_vops (gsi, stmts);
3241 fold_stmt (gsi);
3242 }
3243 return true;
3244 }
3245
3246 /* If the format is "%s", use strcpy if the result isn't used. */
3247 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3248 {
3249 tree fn;
3250 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3251
3252 if (!fn)
3253 return false;
3254
3255 /* Don't crash on sprintf (str1, "%s"). */
3256 if (!orig)
3257 return false;
3258
dcb7fae2
RB
3259 tree orig_len = NULL_TREE;
3260 if (gimple_call_lhs (stmt))
35770bb2 3261 {
598f7235 3262 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3263 if (!orig_len)
35770bb2
RB
3264 return false;
3265 }
3266
3267 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3268 gimple_seq stmts = NULL;
355fe088 3269 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3270
3271 /* Propagate the NO_WARNING bit to avoid issuing the same
3272 warning more than once. */
3273 if (gimple_no_warning_p (stmt))
3274 gimple_set_no_warning (repl, true);
3275
35770bb2
RB
3276 gimple_seq_add_stmt_without_update (&stmts, repl);
3277 if (gimple_call_lhs (stmt))
3278 {
d7e78447
RB
3279 if (!useless_type_conversion_p (integer_type_node,
3280 TREE_TYPE (orig_len)))
3281 orig_len = fold_convert (integer_type_node, orig_len);
3282 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
3283 gimple_seq_add_stmt_without_update (&stmts, repl);
3284 gsi_replace_with_seq_vops (gsi, stmts);
3285 /* gsi now points at the assignment to the lhs, get a
3286 stmt iterator to the memcpy call.
3287 ??? We can't use gsi_for_stmt as that doesn't work when the
3288 CFG isn't built yet. */
3289 gimple_stmt_iterator gsi2 = *gsi;
3290 gsi_prev (&gsi2);
3291 fold_stmt (&gsi2);
3292 }
3293 else
3294 {
3295 gsi_replace_with_seq_vops (gsi, stmts);
3296 fold_stmt (gsi);
3297 }
3298 return true;
3299 }
3300 return false;
3301}
3302
d7e78447
RB
3303/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3304 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3305 attempt to simplify calls with more than 4 arguments.
35770bb2 3306
a104bd88 3307 Return true if simplification was possible, otherwise false. */
d7e78447 3308
a104bd88 3309bool
dcb7fae2 3310gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3311{
538dd0b7 3312 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3313 tree dest = gimple_call_arg (stmt, 0);
3314 tree destsize = gimple_call_arg (stmt, 1);
3315 tree fmt = gimple_call_arg (stmt, 2);
3316 tree orig = NULL_TREE;
3317 const char *fmt_str = NULL;
3318
3319 if (gimple_call_num_args (stmt) > 4)
3320 return false;
3321
3322 if (gimple_call_num_args (stmt) == 4)
3323 orig = gimple_call_arg (stmt, 3);
3324
3325 if (!tree_fits_uhwi_p (destsize))
3326 return false;
3327 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3328
3329 /* Check whether the format is a literal string constant. */
3330 fmt_str = c_getstr (fmt);
3331 if (fmt_str == NULL)
3332 return false;
3333
3334 if (!init_target_chars ())
3335 return false;
3336
3337 /* If the format doesn't contain % args or %%, use strcpy. */
3338 if (strchr (fmt_str, target_percent) == NULL)
3339 {
3340 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3341 if (!fn)
3342 return false;
3343
3344 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3345 if (orig)
3346 return false;
3347
3348 /* We could expand this as
3349 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3350 or to
3351 memcpy (str, fmt_with_nul_at_cstm1, cst);
3352 but in the former case that might increase code size
3353 and in the latter case grow .rodata section too much.
3354 So punt for now. */
3355 size_t len = strlen (fmt_str);
3356 if (len >= destlen)
3357 return false;
3358
3359 gimple_seq stmts = NULL;
355fe088 3360 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3361 gimple_seq_add_stmt_without_update (&stmts, repl);
3362 if (gimple_call_lhs (stmt))
3363 {
3364 repl = gimple_build_assign (gimple_call_lhs (stmt),
3365 build_int_cst (integer_type_node, len));
3366 gimple_seq_add_stmt_without_update (&stmts, repl);
3367 gsi_replace_with_seq_vops (gsi, stmts);
3368 /* gsi now points at the assignment to the lhs, get a
3369 stmt iterator to the memcpy call.
3370 ??? We can't use gsi_for_stmt as that doesn't work when the
3371 CFG isn't built yet. */
3372 gimple_stmt_iterator gsi2 = *gsi;
3373 gsi_prev (&gsi2);
3374 fold_stmt (&gsi2);
3375 }
3376 else
3377 {
3378 gsi_replace_with_seq_vops (gsi, stmts);
3379 fold_stmt (gsi);
3380 }
3381 return true;
3382 }
3383
3384 /* If the format is "%s", use strcpy if the result isn't used. */
3385 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3386 {
3387 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3388 if (!fn)
3389 return false;
3390
3391 /* Don't crash on snprintf (str1, cst, "%s"). */
3392 if (!orig)
3393 return false;
3394
598f7235 3395 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3396 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3397 return false;
d7e78447
RB
3398
3399 /* We could expand this as
3400 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3401 or to
3402 memcpy (str1, str2_with_nul_at_cstm1, cst);
3403 but in the former case that might increase code size
3404 and in the latter case grow .rodata section too much.
3405 So punt for now. */
3406 if (compare_tree_int (orig_len, destlen) >= 0)
3407 return false;
3408
3409 /* Convert snprintf (str1, cst, "%s", str2) into
3410 strcpy (str1, str2) if strlen (str2) < cst. */
3411 gimple_seq stmts = NULL;
355fe088 3412 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3413 gimple_seq_add_stmt_without_update (&stmts, repl);
3414 if (gimple_call_lhs (stmt))
3415 {
3416 if (!useless_type_conversion_p (integer_type_node,
3417 TREE_TYPE (orig_len)))
3418 orig_len = fold_convert (integer_type_node, orig_len);
3419 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3420 gimple_seq_add_stmt_without_update (&stmts, repl);
3421 gsi_replace_with_seq_vops (gsi, stmts);
3422 /* gsi now points at the assignment to the lhs, get a
3423 stmt iterator to the memcpy call.
3424 ??? We can't use gsi_for_stmt as that doesn't work when the
3425 CFG isn't built yet. */
3426 gimple_stmt_iterator gsi2 = *gsi;
3427 gsi_prev (&gsi2);
3428 fold_stmt (&gsi2);
3429 }
3430 else
3431 {
3432 gsi_replace_with_seq_vops (gsi, stmts);
3433 fold_stmt (gsi);
3434 }
3435 return true;
3436 }
3437 return false;
3438}
35770bb2 3439
edd7ae68
RB
3440/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3441 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3442 more than 3 arguments, and ARG may be null in the 2-argument case.
3443
3444 Return NULL_TREE if no simplification was possible, otherwise return the
3445 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3446 code of the function to be simplified. */
3447
3448static bool
3449gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3450 tree fp, tree fmt, tree arg,
3451 enum built_in_function fcode)
3452{
3453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3454 tree fn_fputc, fn_fputs;
3455 const char *fmt_str = NULL;
3456
3457 /* If the return value is used, don't do the transformation. */
3458 if (gimple_call_lhs (stmt) != NULL_TREE)
3459 return false;
3460
3461 /* Check whether the format is a literal string constant. */
3462 fmt_str = c_getstr (fmt);
3463 if (fmt_str == NULL)
3464 return false;
3465
3466 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3467 {
3468 /* If we're using an unlocked function, assume the other
3469 unlocked functions exist explicitly. */
3470 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3471 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3472 }
3473 else
3474 {
3475 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3476 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3477 }
3478
3479 if (!init_target_chars ())
3480 return false;
3481
3482 /* If the format doesn't contain % args or %%, use strcpy. */
3483 if (strchr (fmt_str, target_percent) == NULL)
3484 {
3485 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3486 && arg)
3487 return false;
3488
3489 /* If the format specifier was "", fprintf does nothing. */
3490 if (fmt_str[0] == '\0')
3491 {
3492 replace_call_with_value (gsi, NULL_TREE);
3493 return true;
3494 }
3495
3496 /* When "string" doesn't contain %, replace all cases of
3497 fprintf (fp, string) with fputs (string, fp). The fputs
3498 builtin will take care of special cases like length == 1. */
3499 if (fn_fputs)
3500 {
3501 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3502 replace_call_with_call_and_fold (gsi, repl);
3503 return true;
3504 }
3505 }
3506
3507 /* The other optimizations can be done only on the non-va_list variants. */
3508 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3509 return false;
3510
3511 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3512 else if (strcmp (fmt_str, target_percent_s) == 0)
3513 {
3514 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3515 return false;
3516 if (fn_fputs)
3517 {
3518 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3519 replace_call_with_call_and_fold (gsi, repl);
3520 return true;
3521 }
3522 }
3523
3524 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3525 else if (strcmp (fmt_str, target_percent_c) == 0)
3526 {
3527 if (!arg
3528 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3529 return false;
3530 if (fn_fputc)
3531 {
3532 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3533 replace_call_with_call_and_fold (gsi, repl);
3534 return true;
3535 }
3536 }
3537
3538 return false;
3539}
3540
ad03a744
RB
3541/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3542 FMT and ARG are the arguments to the call; we don't fold cases with
3543 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3544
3545 Return NULL_TREE if no simplification was possible, otherwise return the
3546 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3547 code of the function to be simplified. */
3548
3549static bool
3550gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3551 tree arg, enum built_in_function fcode)
3552{
3553 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3554 tree fn_putchar, fn_puts, newarg;
3555 const char *fmt_str = NULL;
3556
3557 /* If the return value is used, don't do the transformation. */
3558 if (gimple_call_lhs (stmt) != NULL_TREE)
3559 return false;
3560
3561 /* Check whether the format is a literal string constant. */
3562 fmt_str = c_getstr (fmt);
3563 if (fmt_str == NULL)
3564 return false;
3565
3566 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3567 {
3568 /* If we're using an unlocked function, assume the other
3569 unlocked functions exist explicitly. */
3570 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3571 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3572 }
3573 else
3574 {
3575 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3576 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3577 }
3578
3579 if (!init_target_chars ())
3580 return false;
3581
3582 if (strcmp (fmt_str, target_percent_s) == 0
3583 || strchr (fmt_str, target_percent) == NULL)
3584 {
3585 const char *str;
3586
3587 if (strcmp (fmt_str, target_percent_s) == 0)
3588 {
3589 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3590 return false;
3591
3592 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3593 return false;
3594
3595 str = c_getstr (arg);
3596 if (str == NULL)
3597 return false;
3598 }
3599 else
3600 {
3601 /* The format specifier doesn't contain any '%' characters. */
3602 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3603 && arg)
3604 return false;
3605 str = fmt_str;
3606 }
3607
3608 /* If the string was "", printf does nothing. */
3609 if (str[0] == '\0')
3610 {
3611 replace_call_with_value (gsi, NULL_TREE);
3612 return true;
3613 }
3614
3615 /* If the string has length of 1, call putchar. */
3616 if (str[1] == '\0')
3617 {
3618 /* Given printf("c"), (where c is any one character,)
3619 convert "c"[0] to an int and pass that to the replacement
3620 function. */
3621 newarg = build_int_cst (integer_type_node, str[0]);
3622 if (fn_putchar)
3623 {
3624 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3625 replace_call_with_call_and_fold (gsi, repl);
3626 return true;
3627 }
3628 }
3629 else
3630 {
3631 /* If the string was "string\n", call puts("string"). */
3632 size_t len = strlen (str);
3633 if ((unsigned char)str[len - 1] == target_newline
3634 && (size_t) (int) len == len
3635 && (int) len > 0)
3636 {
3637 char *newstr;
ad03a744
RB
3638
3639 /* Create a NUL-terminated string that's one char shorter
3640 than the original, stripping off the trailing '\n'. */
a353fec4 3641 newstr = xstrdup (str);
ad03a744 3642 newstr[len - 1] = '\0';
a353fec4
BE
3643 newarg = build_string_literal (len, newstr);
3644 free (newstr);
ad03a744
RB
3645 if (fn_puts)
3646 {
3647 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3648 replace_call_with_call_and_fold (gsi, repl);
3649 return true;
3650 }
3651 }
3652 else
3653 /* We'd like to arrange to call fputs(string,stdout) here,
3654 but we need stdout and don't have a way to get it yet. */
3655 return false;
3656 }
3657 }
3658
3659 /* The other optimizations can be done only on the non-va_list variants. */
3660 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3661 return false;
3662
3663 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3664 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3665 {
3666 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3667 return false;
3668 if (fn_puts)
3669 {
3670 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3671 replace_call_with_call_and_fold (gsi, repl);
3672 return true;
3673 }
3674 }
3675
3676 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3677 else if (strcmp (fmt_str, target_percent_c) == 0)
3678 {
3679 if (!arg || ! useless_type_conversion_p (integer_type_node,
3680 TREE_TYPE (arg)))
3681 return false;
3682 if (fn_putchar)
3683 {
3684 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3685 replace_call_with_call_and_fold (gsi, repl);
3686 return true;
3687 }
3688 }
3689
3690 return false;
3691}
3692
edd7ae68 3693
fef5a0d9
RB
3694
3695/* Fold a call to __builtin_strlen with known length LEN. */
3696
3697static bool
dcb7fae2 3698gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3699{
355fe088 3700 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3701 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3702
3703 wide_int minlen;
3704 wide_int maxlen;
3705
5d6655eb 3706 c_strlen_data lendata = { };
03c4a945 3707 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
3708 && !lendata.decl
3709 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3710 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
3711 {
3712 /* The range of lengths refers to either a single constant
3713 string or to the longest and shortest constant string
3714 referenced by the argument of the strlen() call, or to
3715 the strings that can possibly be stored in the arrays
3716 the argument refers to. */
5d6655eb
MS
3717 minlen = wi::to_wide (lendata.minlen);
3718 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
3719 }
3720 else
3721 {
3722 unsigned prec = TYPE_PRECISION (sizetype);
3723
3724 minlen = wi::shwi (0, prec);
3725 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3726 }
3727
3728 if (minlen == maxlen)
3729 {
5d6655eb
MS
3730 /* Fold the strlen call to a constant. */
3731 tree type = TREE_TYPE (lendata.minlen);
3732 tree len = force_gimple_operand_gsi (gsi,
3733 wide_int_to_tree (type, minlen),
3734 true, NULL, true, GSI_SAME_STMT);
3735 replace_call_with_value (gsi, len);
c42d0aa0
MS
3736 return true;
3737 }
3738
d4bf6975 3739 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 3740 if (tree lhs = gimple_call_lhs (stmt))
d4bf6975 3741 set_strlen_range (lhs, maxlen);
c42d0aa0
MS
3742
3743 return false;
cbdd87d4
RG
3744}
3745
48126138
NS
3746/* Fold a call to __builtin_acc_on_device. */
3747
3748static bool
3749gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3750{
3751 /* Defer folding until we know which compiler we're in. */
3752 if (symtab->state != EXPANSION)
3753 return false;
3754
3755 unsigned val_host = GOMP_DEVICE_HOST;
3756 unsigned val_dev = GOMP_DEVICE_NONE;
3757
3758#ifdef ACCEL_COMPILER
3759 val_host = GOMP_DEVICE_NOT_HOST;
3760 val_dev = ACCEL_COMPILER_acc_device;
3761#endif
3762
3763 location_t loc = gimple_location (gsi_stmt (*gsi));
3764
3765 tree host_eq = make_ssa_name (boolean_type_node);
3766 gimple *host_ass = gimple_build_assign
3767 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3768 gimple_set_location (host_ass, loc);
3769 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3770
3771 tree dev_eq = make_ssa_name (boolean_type_node);
3772 gimple *dev_ass = gimple_build_assign
3773 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3774 gimple_set_location (dev_ass, loc);
3775 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3776
3777 tree result = make_ssa_name (boolean_type_node);
3778 gimple *result_ass = gimple_build_assign
3779 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3780 gimple_set_location (result_ass, loc);
3781 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3782
3783 replace_call_with_value (gsi, result);
3784
3785 return true;
3786}
cbdd87d4 3787
fe75f732
PK
3788/* Fold realloc (0, n) -> malloc (n). */
3789
3790static bool
3791gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3792{
3793 gimple *stmt = gsi_stmt (*gsi);
3794 tree arg = gimple_call_arg (stmt, 0);
3795 tree size = gimple_call_arg (stmt, 1);
3796
3797 if (operand_equal_p (arg, null_pointer_node, 0))
3798 {
3799 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3800 if (fn_malloc)
3801 {
3802 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3803 replace_call_with_call_and_fold (gsi, repl);
3804 return true;
3805 }
3806 }
3807 return false;
3808}
3809
dcb7fae2
RB
3810/* Fold the non-target builtin at *GSI and return whether any simplification
3811 was made. */
cbdd87d4 3812
fef5a0d9 3813static bool
dcb7fae2 3814gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3815{
538dd0b7 3816 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3817 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3818
dcb7fae2
RB
3819 /* Give up for always_inline inline builtins until they are
3820 inlined. */
3821 if (avoid_folding_inline_builtin (callee))
3822 return false;
cbdd87d4 3823
edd7ae68
RB
3824 unsigned n = gimple_call_num_args (stmt);
3825 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3826 switch (fcode)
cbdd87d4 3827 {
b3d8d88e
MS
3828 case BUILT_IN_BCMP:
3829 return gimple_fold_builtin_bcmp (gsi);
3830 case BUILT_IN_BCOPY:
3831 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3832 case BUILT_IN_BZERO:
b3d8d88e
MS
3833 return gimple_fold_builtin_bzero (gsi);
3834
dcb7fae2
RB
3835 case BUILT_IN_MEMSET:
3836 return gimple_fold_builtin_memset (gsi,
3837 gimple_call_arg (stmt, 1),
3838 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3839 case BUILT_IN_MEMCPY:
3840 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3841 gimple_call_arg (stmt, 1), 0);
3842 case BUILT_IN_MEMPCPY:
3843 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3844 gimple_call_arg (stmt, 1), 1);
3845 case BUILT_IN_MEMMOVE:
3846 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3847 gimple_call_arg (stmt, 1), 3);
3848 case BUILT_IN_SPRINTF_CHK:
3849 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3850 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3851 case BUILT_IN_STRCAT_CHK:
3852 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3853 case BUILT_IN_STRNCAT_CHK:
3854 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3855 case BUILT_IN_STRLEN:
dcb7fae2 3856 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3857 case BUILT_IN_STRCPY:
dcb7fae2 3858 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3859 gimple_call_arg (stmt, 0),
dcb7fae2 3860 gimple_call_arg (stmt, 1));
cbdd87d4 3861 case BUILT_IN_STRNCPY:
dcb7fae2 3862 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3863 gimple_call_arg (stmt, 0),
3864 gimple_call_arg (stmt, 1),
dcb7fae2 3865 gimple_call_arg (stmt, 2));
9a7eefec 3866 case BUILT_IN_STRCAT:
dcb7fae2
RB
3867 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3868 gimple_call_arg (stmt, 1));
ad03a744
RB
3869 case BUILT_IN_STRNCAT:
3870 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3871 case BUILT_IN_INDEX:
912d9ec3 3872 case BUILT_IN_STRCHR:
71dea1dd
WD
3873 return gimple_fold_builtin_strchr (gsi, false);
3874 case BUILT_IN_RINDEX:
3875 case BUILT_IN_STRRCHR:
3876 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3877 case BUILT_IN_STRSTR:
3878 return gimple_fold_builtin_strstr (gsi);
a918bfbf 3879 case BUILT_IN_STRCMP:
8b0b334a 3880 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
3881 case BUILT_IN_STRCASECMP:
3882 case BUILT_IN_STRNCMP:
8b0b334a 3883 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
3884 case BUILT_IN_STRNCASECMP:
3885 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3886 case BUILT_IN_MEMCHR:
3887 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3888 case BUILT_IN_FPUTS:
dcb7fae2
RB
3889 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3890 gimple_call_arg (stmt, 1), false);
cbdd87d4 3891 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3892 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3893 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3894 case BUILT_IN_MEMCPY_CHK:
3895 case BUILT_IN_MEMPCPY_CHK:
3896 case BUILT_IN_MEMMOVE_CHK:
3897 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3898 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3899 gimple_call_arg (stmt, 0),
3900 gimple_call_arg (stmt, 1),
3901 gimple_call_arg (stmt, 2),
3902 gimple_call_arg (stmt, 3),
edd7ae68 3903 fcode);
2625bb5d
RB
3904 case BUILT_IN_STPCPY:
3905 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3906 case BUILT_IN_STRCPY_CHK:
3907 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3908 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3909 gimple_call_arg (stmt, 0),
3910 gimple_call_arg (stmt, 1),
3911 gimple_call_arg (stmt, 2),
edd7ae68 3912 fcode);
cbdd87d4 3913 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3914 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3915 return gimple_fold_builtin_stxncpy_chk (gsi,
3916 gimple_call_arg (stmt, 0),
3917 gimple_call_arg (stmt, 1),
3918 gimple_call_arg (stmt, 2),
3919 gimple_call_arg (stmt, 3),
edd7ae68 3920 fcode);
cbdd87d4
RG
3921 case BUILT_IN_SNPRINTF_CHK:
3922 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3923 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3924
edd7ae68
RB
3925 case BUILT_IN_FPRINTF:
3926 case BUILT_IN_FPRINTF_UNLOCKED:
3927 case BUILT_IN_VFPRINTF:
3928 if (n == 2 || n == 3)
3929 return gimple_fold_builtin_fprintf (gsi,
3930 gimple_call_arg (stmt, 0),
3931 gimple_call_arg (stmt, 1),
3932 n == 3
3933 ? gimple_call_arg (stmt, 2)
3934 : NULL_TREE,
3935 fcode);
3936 break;
3937 case BUILT_IN_FPRINTF_CHK:
3938 case BUILT_IN_VFPRINTF_CHK:
3939 if (n == 3 || n == 4)
3940 return gimple_fold_builtin_fprintf (gsi,
3941 gimple_call_arg (stmt, 0),
3942 gimple_call_arg (stmt, 2),
3943 n == 4
3944 ? gimple_call_arg (stmt, 3)
3945 : NULL_TREE,
3946 fcode);
3947 break;
ad03a744
RB
3948 case BUILT_IN_PRINTF:
3949 case BUILT_IN_PRINTF_UNLOCKED:
3950 case BUILT_IN_VPRINTF:
3951 if (n == 1 || n == 2)
3952 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3953 n == 2
3954 ? gimple_call_arg (stmt, 1)
3955 : NULL_TREE, fcode);
3956 break;
3957 case BUILT_IN_PRINTF_CHK:
3958 case BUILT_IN_VPRINTF_CHK:
3959 if (n == 2 || n == 3)
3960 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3961 n == 3
3962 ? gimple_call_arg (stmt, 2)
3963 : NULL_TREE, fcode);
242a37f1 3964 break;
48126138
NS
3965 case BUILT_IN_ACC_ON_DEVICE:
3966 return gimple_fold_builtin_acc_on_device (gsi,
3967 gimple_call_arg (stmt, 0));
fe75f732
PK
3968 case BUILT_IN_REALLOC:
3969 return gimple_fold_builtin_realloc (gsi);
3970
fef5a0d9
RB
3971 default:;
3972 }
3973
3974 /* Try the generic builtin folder. */
3975 bool ignore = (gimple_call_lhs (stmt) == NULL);
3976 tree result = fold_call_stmt (stmt, ignore);
3977 if (result)
3978 {
3979 if (ignore)
3980 STRIP_NOPS (result);
3981 else
3982 result = fold_convert (gimple_call_return_type (stmt), result);
3983 if (!update_call_from_tree (gsi, result))
3984 gimplify_and_update_call_from_tree (gsi, result);
3985 return true;
3986 }
3987
3988 return false;
3989}
3990
451e8dae
NS
3991/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3992 function calls to constants, where possible. */
3993
3994static tree
3995fold_internal_goacc_dim (const gimple *call)
3996{
629b3d75
MJ
3997 int axis = oacc_get_ifn_dim_arg (call);
3998 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 3999 tree result = NULL_TREE;
67d2229e 4000 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 4001
67d2229e 4002 switch (gimple_call_internal_fn (call))
451e8dae 4003 {
67d2229e
TV
4004 case IFN_GOACC_DIM_POS:
4005 /* If the size is 1, we know the answer. */
4006 if (size == 1)
4007 result = build_int_cst (type, 0);
4008 break;
4009 case IFN_GOACC_DIM_SIZE:
4010 /* If the size is not dynamic, we know the answer. */
4011 if (size)
4012 result = build_int_cst (type, size);
4013 break;
4014 default:
4015 break;
451e8dae
NS
4016 }
4017
4018 return result;
4019}
4020
849a76a5
JJ
4021/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4022 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4023 &var where var is only addressable because of such calls. */
4024
4025bool
4026optimize_atomic_compare_exchange_p (gimple *stmt)
4027{
4028 if (gimple_call_num_args (stmt) != 6
4029 || !flag_inline_atomics
4030 || !optimize
45b2222a 4031 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
4032 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4033 || !gimple_vdef (stmt)
4034 || !gimple_vuse (stmt))
4035 return false;
4036
4037 tree fndecl = gimple_call_fndecl (stmt);
4038 switch (DECL_FUNCTION_CODE (fndecl))
4039 {
4040 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4041 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4042 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4043 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4044 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4045 break;
4046 default:
4047 return false;
4048 }
4049
4050 tree expected = gimple_call_arg (stmt, 1);
4051 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
4052 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4053 return false;
4054
4055 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4056 if (!is_gimple_reg_type (etype)
849a76a5 4057 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
4058 || TREE_THIS_VOLATILE (etype)
4059 || VECTOR_TYPE_P (etype)
4060 || TREE_CODE (etype) == COMPLEX_TYPE
4061 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4062 might not preserve all the bits. See PR71716. */
4063 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
4064 || maybe_ne (TYPE_PRECISION (etype),
4065 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
4066 return false;
4067
4068 tree weak = gimple_call_arg (stmt, 3);
4069 if (!integer_zerop (weak) && !integer_onep (weak))
4070 return false;
4071
4072 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4073 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4074 machine_mode mode = TYPE_MODE (itype);
4075
4076 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4077 == CODE_FOR_nothing
4078 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4079 return false;
4080
cf098191 4081 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
4082 return false;
4083
4084 return true;
4085}
4086
4087/* Fold
4088 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4089 into
4090 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4091 i = IMAGPART_EXPR <t>;
4092 r = (_Bool) i;
4093 e = REALPART_EXPR <t>; */
4094
4095void
4096fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4097{
4098 gimple *stmt = gsi_stmt (*gsi);
4099 tree fndecl = gimple_call_fndecl (stmt);
4100 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4101 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4102 tree ctype = build_complex_type (itype);
4103 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
4104 bool throws = false;
4105 edge e = NULL;
849a76a5
JJ
4106 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4107 expected);
4108 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4109 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4110 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4111 {
4112 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4113 build1 (VIEW_CONVERT_EXPR, itype,
4114 gimple_assign_lhs (g)));
4115 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4116 }
4117 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4118 + int_size_in_bytes (itype);
4119 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4120 gimple_call_arg (stmt, 0),
4121 gimple_assign_lhs (g),
4122 gimple_call_arg (stmt, 2),
4123 build_int_cst (integer_type_node, flag),
4124 gimple_call_arg (stmt, 4),
4125 gimple_call_arg (stmt, 5));
4126 tree lhs = make_ssa_name (ctype);
4127 gimple_call_set_lhs (g, lhs);
4128 gimple_set_vdef (g, gimple_vdef (stmt));
4129 gimple_set_vuse (g, gimple_vuse (stmt));
4130 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46 4131 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 4132 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
4133 {
4134 throws = true;
4135 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4136 }
4137 gimple_call_set_nothrow (as_a <gcall *> (g),
4138 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4139 gimple_call_set_lhs (stmt, NULL_TREE);
4140 gsi_replace (gsi, g, true);
4141 if (oldlhs)
849a76a5 4142 {
849a76a5
JJ
4143 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4144 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
4145 if (throws)
4146 {
4147 gsi_insert_on_edge_immediate (e, g);
4148 *gsi = gsi_for_stmt (g);
4149 }
4150 else
4151 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4152 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4153 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 4154 }
849a76a5
JJ
4155 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4156 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
4157 if (throws && oldlhs == NULL_TREE)
4158 {
4159 gsi_insert_on_edge_immediate (e, g);
4160 *gsi = gsi_for_stmt (g);
4161 }
4162 else
4163 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
4164 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4165 {
4166 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4167 VIEW_CONVERT_EXPR,
4168 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4169 gimple_assign_lhs (g)));
4170 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4171 }
4172 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4173 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4174 *gsi = gsiret;
4175}
4176
1304953e
JJ
4177/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4178 doesn't fit into TYPE. The test for overflow should be regardless of
4179 -fwrapv, and even for unsigned types. */
4180
4181bool
4182arith_overflowed_p (enum tree_code code, const_tree type,
4183 const_tree arg0, const_tree arg1)
4184{
1304953e
JJ
4185 widest2_int warg0 = widest2_int_cst (arg0);
4186 widest2_int warg1 = widest2_int_cst (arg1);
4187 widest2_int wres;
4188 switch (code)
4189 {
4190 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4191 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4192 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4193 default: gcc_unreachable ();
4194 }
4195 signop sign = TYPE_SIGN (type);
4196 if (sign == UNSIGNED && wi::neg_p (wres))
4197 return true;
4198 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4199}
4200
cbdd87d4
RG
4201/* Attempt to fold a call statement referenced by the statement iterator GSI.
4202 The statement may be replaced by another statement, e.g., if the call
4203 simplifies to a constant value. Return true if any changes were made.
4204 It is assumed that the operands have been previously folded. */
4205
e021c122 4206static bool
ceeffab0 4207gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4208{
538dd0b7 4209 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4210 tree callee;
e021c122
RG
4211 bool changed = false;
4212 unsigned i;
cbdd87d4 4213
e021c122
RG
4214 /* Fold *& in call arguments. */
4215 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4216 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4217 {
4218 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4219 if (tmp)
4220 {
4221 gimple_call_set_arg (stmt, i, tmp);
4222 changed = true;
4223 }
4224 }
3b45a007
RG
4225
4226 /* Check for virtual calls that became direct calls. */
4227 callee = gimple_call_fn (stmt);
25583c4f 4228 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4229 {
49c471e3
MJ
4230 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4231 {
450ad0cd
JH
4232 if (dump_file && virtual_method_call_p (callee)
4233 && !possible_polymorphic_call_target_p
6f8091fc
JH
4234 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4235 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4236 {
4237 fprintf (dump_file,
a70e9985 4238 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4239 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4240 fprintf (dump_file, " to ");
4241 print_generic_expr (dump_file, callee, TDF_SLIM);
4242 fprintf (dump_file, "\n");
4243 }
4244
49c471e3 4245 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4246 changed = true;
4247 }
a70e9985 4248 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4249 {
61dd6a2e
JH
4250 bool final;
4251 vec <cgraph_node *>targets
058d0a90 4252 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4253 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4254 {
a70e9985 4255 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4256 if (dump_enabled_p ())
4257 {
4f5b9c80 4258 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4259 "folding virtual function call to %s\n",
4260 targets.length () == 1
4261 ? targets[0]->name ()
4262 : "__builtin_unreachable");
4263 }
61dd6a2e 4264 if (targets.length () == 1)
cf3e5a89 4265 {
18954840
JJ
4266 tree fndecl = targets[0]->decl;
4267 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4268 changed = true;
18954840
JJ
4269 /* If changing the call to __cxa_pure_virtual
4270 or similar noreturn function, adjust gimple_call_fntype
4271 too. */
865f7046 4272 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4273 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4274 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4275 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4276 == void_type_node))
4277 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4278 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4279 if (lhs
4280 && gimple_call_noreturn_p (stmt)
18954840 4281 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4282 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4283 {
4284 if (TREE_CODE (lhs) == SSA_NAME)
4285 {
b731b390 4286 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4287 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4288 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4289 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4290 }
4291 gimple_call_set_lhs (stmt, NULL_TREE);
4292 }
0b986c6a 4293 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4294 }
a70e9985 4295 else
cf3e5a89
JJ
4296 {
4297 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4298 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4299 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4300 /* If the call had a SSA name as lhs morph that into
4301 an uninitialized value. */
a70e9985
JJ
4302 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4303 {
b731b390 4304 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4305 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4306 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4307 set_ssa_default_def (cfun, var, lhs);
42e52a51 4308 }
2da6996c
RB
4309 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4310 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4311 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4312 return true;
4313 }
e021c122 4314 }
49c471e3 4315 }
e021c122 4316 }
49c471e3 4317
f2d3d07e
RH
4318 /* Check for indirect calls that became direct calls, and then
4319 no longer require a static chain. */
4320 if (gimple_call_chain (stmt))
4321 {
4322 tree fn = gimple_call_fndecl (stmt);
4323 if (fn && !DECL_STATIC_CHAIN (fn))
4324 {
4325 gimple_call_set_chain (stmt, NULL);
4326 changed = true;
4327 }
4328 else
4329 {
4330 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4331 if (tmp)
4332 {
4333 gimple_call_set_chain (stmt, tmp);
4334 changed = true;
4335 }
4336 }
4337 }
4338
e021c122
RG
4339 if (inplace)
4340 return changed;
4341
4342 /* Check for builtins that CCP can handle using information not
4343 available in the generic fold routines. */
fef5a0d9
RB
4344 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4345 {
4346 if (gimple_fold_builtin (gsi))
4347 changed = true;
4348 }
4349 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4350 {
ea679d55 4351 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4352 }
368b454d 4353 else if (gimple_call_internal_p (stmt))
ed9c79e1 4354 {
368b454d
JJ
4355 enum tree_code subcode = ERROR_MARK;
4356 tree result = NULL_TREE;
1304953e
JJ
4357 bool cplx_result = false;
4358 tree overflow = NULL_TREE;
368b454d
JJ
4359 switch (gimple_call_internal_fn (stmt))
4360 {
4361 case IFN_BUILTIN_EXPECT:
4362 result = fold_builtin_expect (gimple_location (stmt),
4363 gimple_call_arg (stmt, 0),
4364 gimple_call_arg (stmt, 1),
1e9168b2
ML
4365 gimple_call_arg (stmt, 2),
4366 NULL_TREE);
368b454d 4367 break;
0e82f089 4368 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4369 {
4370 tree offset = gimple_call_arg (stmt, 1);
4371 tree objsize = gimple_call_arg (stmt, 2);
4372 if (integer_all_onesp (objsize)
4373 || (TREE_CODE (offset) == INTEGER_CST
4374 && TREE_CODE (objsize) == INTEGER_CST
4375 && tree_int_cst_le (offset, objsize)))
4376 {
4377 replace_call_with_value (gsi, NULL_TREE);
4378 return true;
4379 }
4380 }
4381 break;
4382 case IFN_UBSAN_PTR:
4383 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4384 {
ca1150f0 4385 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4386 return true;
4387 }
4388 break;
ca1150f0
JJ
4389 case IFN_UBSAN_BOUNDS:
4390 {
4391 tree index = gimple_call_arg (stmt, 1);
4392 tree bound = gimple_call_arg (stmt, 2);
4393 if (TREE_CODE (index) == INTEGER_CST
4394 && TREE_CODE (bound) == INTEGER_CST)
4395 {
4396 index = fold_convert (TREE_TYPE (bound), index);
4397 if (TREE_CODE (index) == INTEGER_CST
4398 && tree_int_cst_le (index, bound))
4399 {
4400 replace_call_with_value (gsi, NULL_TREE);
4401 return true;
4402 }
4403 }
4404 }
4405 break;
451e8dae
NS
4406 case IFN_GOACC_DIM_SIZE:
4407 case IFN_GOACC_DIM_POS:
4408 result = fold_internal_goacc_dim (stmt);
4409 break;
368b454d
JJ
4410 case IFN_UBSAN_CHECK_ADD:
4411 subcode = PLUS_EXPR;
4412 break;
4413 case IFN_UBSAN_CHECK_SUB:
4414 subcode = MINUS_EXPR;
4415 break;
4416 case IFN_UBSAN_CHECK_MUL:
4417 subcode = MULT_EXPR;
4418 break;
1304953e
JJ
4419 case IFN_ADD_OVERFLOW:
4420 subcode = PLUS_EXPR;
4421 cplx_result = true;
4422 break;
4423 case IFN_SUB_OVERFLOW:
4424 subcode = MINUS_EXPR;
4425 cplx_result = true;
4426 break;
4427 case IFN_MUL_OVERFLOW:
4428 subcode = MULT_EXPR;
4429 cplx_result = true;
4430 break;
368b454d
JJ
4431 default:
4432 break;
4433 }
4434 if (subcode != ERROR_MARK)
4435 {
4436 tree arg0 = gimple_call_arg (stmt, 0);
4437 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4438 tree type = TREE_TYPE (arg0);
4439 if (cplx_result)
4440 {
4441 tree lhs = gimple_call_lhs (stmt);
4442 if (lhs == NULL_TREE)
4443 type = NULL_TREE;
4444 else
4445 type = TREE_TYPE (TREE_TYPE (lhs));
4446 }
4447 if (type == NULL_TREE)
4448 ;
368b454d 4449 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4450 else if (integer_zerop (arg1))
4451 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4452 /* x = 0 + y; x = 0 * y; */
4453 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4454 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4455 /* x = y - y; */
4456 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4457 result = integer_zero_node;
368b454d 4458 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4459 else if (subcode == MULT_EXPR && integer_onep (arg1))
4460 result = arg0;
4461 else if (subcode == MULT_EXPR && integer_onep (arg0))
4462 result = arg1;
4463 else if (TREE_CODE (arg0) == INTEGER_CST
4464 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4465 {
1304953e
JJ
4466 if (cplx_result)
4467 result = int_const_binop (subcode, fold_convert (type, arg0),
4468 fold_convert (type, arg1));
4469 else
4470 result = int_const_binop (subcode, arg0, arg1);
4471 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4472 {
4473 if (cplx_result)
4474 overflow = build_one_cst (type);
4475 else
4476 result = NULL_TREE;
4477 }
4478 }
4479 if (result)
4480 {
4481 if (result == integer_zero_node)
4482 result = build_zero_cst (type);
4483 else if (cplx_result && TREE_TYPE (result) != type)
4484 {
4485 if (TREE_CODE (result) == INTEGER_CST)
4486 {
4487 if (arith_overflowed_p (PLUS_EXPR, type, result,
4488 integer_zero_node))
4489 overflow = build_one_cst (type);
4490 }
4491 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4492 && TYPE_UNSIGNED (type))
4493 || (TYPE_PRECISION (type)
4494 < (TYPE_PRECISION (TREE_TYPE (result))
4495 + (TYPE_UNSIGNED (TREE_TYPE (result))
4496 && !TYPE_UNSIGNED (type)))))
4497 result = NULL_TREE;
4498 if (result)
4499 result = fold_convert (type, result);
4500 }
368b454d
JJ
4501 }
4502 }
1304953e 4503
ed9c79e1
JJ
4504 if (result)
4505 {
1304953e
JJ
4506 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4507 result = drop_tree_overflow (result);
4508 if (cplx_result)
4509 {
4510 if (overflow == NULL_TREE)
4511 overflow = build_zero_cst (TREE_TYPE (result));
4512 tree ctype = build_complex_type (TREE_TYPE (result));
4513 if (TREE_CODE (result) == INTEGER_CST
4514 && TREE_CODE (overflow) == INTEGER_CST)
4515 result = build_complex (ctype, result, overflow);
4516 else
4517 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4518 ctype, result, overflow);
4519 }
ed9c79e1
JJ
4520 if (!update_call_from_tree (gsi, result))
4521 gimplify_and_update_call_from_tree (gsi, result);
4522 changed = true;
4523 }
4524 }
3b45a007 4525
e021c122 4526 return changed;
cbdd87d4
RG
4527}
4528
e0ee10ed 4529
89a79e96
RB
4530/* Return true whether NAME has a use on STMT. */
4531
4532static bool
355fe088 4533has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4534{
4535 imm_use_iterator iter;
4536 use_operand_p use_p;
4537 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4538 if (USE_STMT (use_p) == stmt)
4539 return true;
4540 return false;
4541}
4542
e0ee10ed
RB
4543/* Worker for fold_stmt_1 dispatch to pattern based folding with
4544 gimple_simplify.
4545
4546 Replaces *GSI with the simplification result in RCODE and OPS
4547 and the associated statements in *SEQ. Does the replacement
4548 according to INPLACE and returns true if the operation succeeded. */
4549
4550static bool
4551replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4552 gimple_match_op *res_op,
e0ee10ed
RB
4553 gimple_seq *seq, bool inplace)
4554{
355fe088 4555 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4556 tree *ops = res_op->ops;
4557 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4558
4559 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4560 newly created statements. See also maybe_push_res_to_seq.
4561 As an exception allow such uses if there was a use of the
4562 same SSA name on the old stmt. */
5d75ad95
RS
4563 for (unsigned int i = 0; i < num_ops; ++i)
4564 if (TREE_CODE (ops[i]) == SSA_NAME
4565 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4566 && !has_use_on_stmt (ops[i], stmt))
4567 return false;
4568
4569 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4570 for (unsigned int i = 0; i < 2; ++i)
4571 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4572 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4573 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4574 return false;
e0ee10ed 4575
fec40d06
RS
4576 /* Don't insert new statements when INPLACE is true, even if we could
4577 reuse STMT for the final statement. */
4578 if (inplace && !gimple_seq_empty_p (*seq))
4579 return false;
4580
538dd0b7 4581 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4582 {
5d75ad95
RS
4583 gcc_assert (res_op->code.is_tree_code ());
4584 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4585 /* GIMPLE_CONDs condition may not throw. */
4586 && (!flag_exceptions
4587 || !cfun->can_throw_non_call_exceptions
5d75ad95 4588 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4589 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4590 false, NULL_TREE)))
5d75ad95
RS
4591 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4592 else if (res_op->code == SSA_NAME)
538dd0b7 4593 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4594 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4595 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4596 {
4597 if (integer_zerop (ops[0]))
538dd0b7 4598 gimple_cond_make_false (cond_stmt);
e0ee10ed 4599 else
538dd0b7 4600 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4601 }
4602 else if (!inplace)
4603 {
5d75ad95 4604 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4605 if (!res)
4606 return false;
538dd0b7 4607 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4608 build_zero_cst (TREE_TYPE (res)));
4609 }
4610 else
4611 return false;
4612 if (dump_file && (dump_flags & TDF_DETAILS))
4613 {
4614 fprintf (dump_file, "gimple_simplified to ");
4615 if (!gimple_seq_empty_p (*seq))
4616 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4617 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4618 0, TDF_SLIM);
4619 }
4620 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4621 return true;
4622 }
4623 else if (is_gimple_assign (stmt)
5d75ad95 4624 && res_op->code.is_tree_code ())
e0ee10ed
RB
4625 {
4626 if (!inplace
5d75ad95 4627 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4628 {
5d75ad95
RS
4629 maybe_build_generic_op (res_op);
4630 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4631 res_op->op_or_null (0),
4632 res_op->op_or_null (1),
4633 res_op->op_or_null (2));
e0ee10ed
RB
4634 if (dump_file && (dump_flags & TDF_DETAILS))
4635 {
4636 fprintf (dump_file, "gimple_simplified to ");
4637 if (!gimple_seq_empty_p (*seq))
4638 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4639 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4640 0, TDF_SLIM);
4641 }
4642 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4643 return true;
4644 }
4645 }
5d75ad95
RS
4646 else if (res_op->code.is_fn_code ()
4647 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4648 {
5d75ad95
RS
4649 gcc_assert (num_ops == gimple_call_num_args (stmt));
4650 for (unsigned int i = 0; i < num_ops; ++i)
4651 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4652 if (dump_file && (dump_flags & TDF_DETAILS))
4653 {
4654 fprintf (dump_file, "gimple_simplified to ");
4655 if (!gimple_seq_empty_p (*seq))
4656 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4657 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4658 }
4659 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4660 return true;
4661 }
e0ee10ed
RB
4662 else if (!inplace)
4663 {
4664 if (gimple_has_lhs (stmt))
4665 {
4666 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4667 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4668 return false;
e0ee10ed
RB
4669 if (dump_file && (dump_flags & TDF_DETAILS))
4670 {
4671 fprintf (dump_file, "gimple_simplified to ");
4672 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4673 }
4674 gsi_replace_with_seq_vops (gsi, *seq);
4675 return true;
4676 }
4677 else
4678 gcc_unreachable ();
4679 }
4680
4681 return false;
4682}
4683
040292e7
RB
4684/* Canonicalize MEM_REFs invariant address operand after propagation. */
4685
4686static bool
4687maybe_canonicalize_mem_ref_addr (tree *t)
4688{
4689 bool res = false;
4690
4691 if (TREE_CODE (*t) == ADDR_EXPR)
4692 t = &TREE_OPERAND (*t, 0);
4693
f17a223d
RB
4694 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4695 generic vector extension. The actual vector referenced is
4696 view-converted to an array type for this purpose. If the index
4697 is constant the canonical representation in the middle-end is a
4698 BIT_FIELD_REF so re-write the former to the latter here. */
4699 if (TREE_CODE (*t) == ARRAY_REF
4700 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4701 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4702 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4703 {
4704 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4705 if (VECTOR_TYPE_P (vtype))
4706 {
4707 tree low = array_ref_low_bound (*t);
4708 if (TREE_CODE (low) == INTEGER_CST)
4709 {
4710 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4711 {
4712 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4713 wi::to_widest (low));
4714 idx = wi::mul (idx, wi::to_widest
4715 (TYPE_SIZE (TREE_TYPE (*t))));
4716 widest_int ext
4717 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4718 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4719 {
4720 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4721 TREE_TYPE (*t),
4722 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4723 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4724 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4725 res = true;
4726 }
4727 }
4728 }
4729 }
4730 }
4731
040292e7
RB
4732 while (handled_component_p (*t))
4733 t = &TREE_OPERAND (*t, 0);
4734
4735 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4736 of invariant addresses into a SSA name MEM_REF address. */
4737 if (TREE_CODE (*t) == MEM_REF
4738 || TREE_CODE (*t) == TARGET_MEM_REF)
4739 {
4740 tree addr = TREE_OPERAND (*t, 0);
4741 if (TREE_CODE (addr) == ADDR_EXPR
4742 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4743 || handled_component_p (TREE_OPERAND (addr, 0))))
4744 {
4745 tree base;
a90c8804 4746 poly_int64 coffset;
040292e7
RB
4747 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4748 &coffset);
4749 if (!base)
4750 gcc_unreachable ();
4751
4752 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4753 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4754 TREE_OPERAND (*t, 1),
4755 size_int (coffset));
4756 res = true;
4757 }
4758 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4759 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4760 }
4761
4762 /* Canonicalize back MEM_REFs to plain reference trees if the object
4763 accessed is a decl that has the same access semantics as the MEM_REF. */
4764 if (TREE_CODE (*t) == MEM_REF
4765 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4766 && integer_zerop (TREE_OPERAND (*t, 1))
4767 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4768 {
4769 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4770 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4771 if (/* Same volatile qualification. */
4772 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4773 /* Same TBAA behavior with -fstrict-aliasing. */
4774 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4775 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4776 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4777 /* Same alignment. */
4778 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4779 /* We have to look out here to not drop a required conversion
4780 from the rhs to the lhs if *t appears on the lhs or vice-versa
4781 if it appears on the rhs. Thus require strict type
4782 compatibility. */
4783 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4784 {
4785 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4786 res = true;
4787 }
4788 }
4789
4790 /* Canonicalize TARGET_MEM_REF in particular with respect to
4791 the indexes becoming constant. */
4792 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4793 {
4794 tree tem = maybe_fold_tmr (*t);
4795 if (tem)
4796 {
4797 *t = tem;
4798 res = true;
4799 }
4800 }
4801
4802 return res;
4803}
4804
cbdd87d4
RG
4805/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4806 distinguishes both cases. */
4807
4808static bool
e0ee10ed 4809fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4810{
4811 bool changed = false;
355fe088 4812 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4813 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4814 unsigned i;
a8b85ce9 4815 fold_defer_overflow_warnings ();
cbdd87d4 4816
040292e7
RB
4817 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4818 after propagation.
4819 ??? This shouldn't be done in generic folding but in the
4820 propagation helpers which also know whether an address was
89a79e96
RB
4821 propagated.
4822 Also canonicalize operand order. */
040292e7
RB
4823 switch (gimple_code (stmt))
4824 {
4825 case GIMPLE_ASSIGN:
4826 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4827 {
4828 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4829 if ((REFERENCE_CLASS_P (*rhs)
4830 || TREE_CODE (*rhs) == ADDR_EXPR)
4831 && maybe_canonicalize_mem_ref_addr (rhs))
4832 changed = true;
4833 tree *lhs = gimple_assign_lhs_ptr (stmt);
4834 if (REFERENCE_CLASS_P (*lhs)
4835 && maybe_canonicalize_mem_ref_addr (lhs))
4836 changed = true;
4837 }
89a79e96
RB
4838 else
4839 {
4840 /* Canonicalize operand order. */
4841 enum tree_code code = gimple_assign_rhs_code (stmt);
4842 if (TREE_CODE_CLASS (code) == tcc_comparison
4843 || commutative_tree_code (code)
4844 || commutative_ternary_tree_code (code))
4845 {
4846 tree rhs1 = gimple_assign_rhs1 (stmt);
4847 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4848 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4849 {
4850 gimple_assign_set_rhs1 (stmt, rhs2);
4851 gimple_assign_set_rhs2 (stmt, rhs1);
4852 if (TREE_CODE_CLASS (code) == tcc_comparison)
4853 gimple_assign_set_rhs_code (stmt,
4854 swap_tree_comparison (code));
4855 changed = true;
4856 }
4857 }
4858 }
040292e7
RB
4859 break;
4860 case GIMPLE_CALL:
4861 {
4862 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4863 {
4864 tree *arg = gimple_call_arg_ptr (stmt, i);
4865 if (REFERENCE_CLASS_P (*arg)
4866 && maybe_canonicalize_mem_ref_addr (arg))
4867 changed = true;
4868 }
4869 tree *lhs = gimple_call_lhs_ptr (stmt);
4870 if (*lhs
4871 && REFERENCE_CLASS_P (*lhs)
4872 && maybe_canonicalize_mem_ref_addr (lhs))
4873 changed = true;
4874 break;
4875 }
4876 case GIMPLE_ASM:
4877 {
538dd0b7
DM
4878 gasm *asm_stmt = as_a <gasm *> (stmt);
4879 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4880 {
538dd0b7 4881 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4882 tree op = TREE_VALUE (link);
4883 if (REFERENCE_CLASS_P (op)
4884 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4885 changed = true;
4886 }
538dd0b7 4887 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4888 {
538dd0b7 4889 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4890 tree op = TREE_VALUE (link);
4891 if ((REFERENCE_CLASS_P (op)
4892 || TREE_CODE (op) == ADDR_EXPR)
4893 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4894 changed = true;
4895 }
4896 }
4897 break;
4898 case GIMPLE_DEBUG:
4899 if (gimple_debug_bind_p (stmt))
4900 {
4901 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4902 if (*val
4903 && (REFERENCE_CLASS_P (*val)
4904 || TREE_CODE (*val) == ADDR_EXPR)
4905 && maybe_canonicalize_mem_ref_addr (val))
4906 changed = true;
4907 }
4908 break;
89a79e96
RB
4909 case GIMPLE_COND:
4910 {
4911 /* Canonicalize operand order. */
4912 tree lhs = gimple_cond_lhs (stmt);
4913 tree rhs = gimple_cond_rhs (stmt);
14e72812 4914 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4915 {
4916 gcond *gc = as_a <gcond *> (stmt);
4917 gimple_cond_set_lhs (gc, rhs);
4918 gimple_cond_set_rhs (gc, lhs);
4919 gimple_cond_set_code (gc,
4920 swap_tree_comparison (gimple_cond_code (gc)));
4921 changed = true;
4922 }
4923 }
040292e7
RB
4924 default:;
4925 }
4926
e0ee10ed
RB
4927 /* Dispatch to pattern-based folding. */
4928 if (!inplace
4929 || is_gimple_assign (stmt)
4930 || gimple_code (stmt) == GIMPLE_COND)
4931 {
4932 gimple_seq seq = NULL;
5d75ad95
RS
4933 gimple_match_op res_op;
4934 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 4935 valueize, valueize))
e0ee10ed 4936 {
5d75ad95 4937 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
4938 changed = true;
4939 else
4940 gimple_seq_discard (seq);
4941 }
4942 }
4943
4944 stmt = gsi_stmt (*gsi);
4945
cbdd87d4
RG
4946 /* Fold the main computation performed by the statement. */
4947 switch (gimple_code (stmt))
4948 {
4949 case GIMPLE_ASSIGN:
4950 {
819ec64c
RB
4951 /* Try to canonicalize for boolean-typed X the comparisons
4952 X == 0, X == 1, X != 0, and X != 1. */
4953 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4954 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4955 {
819ec64c
RB
4956 tree lhs = gimple_assign_lhs (stmt);
4957 tree op1 = gimple_assign_rhs1 (stmt);
4958 tree op2 = gimple_assign_rhs2 (stmt);
4959 tree type = TREE_TYPE (op1);
4960
4961 /* Check whether the comparison operands are of the same boolean
4962 type as the result type is.
4963 Check that second operand is an integer-constant with value
4964 one or zero. */
4965 if (TREE_CODE (op2) == INTEGER_CST
4966 && (integer_zerop (op2) || integer_onep (op2))
4967 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4968 {
4969 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4970 bool is_logical_not = false;
4971
4972 /* X == 0 and X != 1 is a logical-not.of X
4973 X == 1 and X != 0 is X */
4974 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4975 || (cmp_code == NE_EXPR && integer_onep (op2)))
4976 is_logical_not = true;
4977
4978 if (is_logical_not == false)
4979 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4980 /* Only for one-bit precision typed X the transformation
4981 !X -> ~X is valied. */
4982 else if (TYPE_PRECISION (type) == 1)
4983 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4984 /* Otherwise we use !X -> X ^ 1. */
4985 else
4986 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4987 build_int_cst (type, 1));
4988 changed = true;
4989 break;
4990 }
5fbcc0ed 4991 }
819ec64c
RB
4992
4993 unsigned old_num_ops = gimple_num_ops (stmt);
4994 tree lhs = gimple_assign_lhs (stmt);
4995 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4996 if (new_rhs
4997 && !useless_type_conversion_p (TREE_TYPE (lhs),
4998 TREE_TYPE (new_rhs)))
4999 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5000 if (new_rhs
5001 && (!inplace
5002 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5003 {
5004 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5005 changed = true;
5006 }
5007 break;
5008 }
5009
cbdd87d4 5010 case GIMPLE_CALL:
ceeffab0 5011 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
5012 break;
5013
5014 case GIMPLE_ASM:
5015 /* Fold *& in asm operands. */
38384150 5016 {
538dd0b7 5017 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
5018 size_t noutputs;
5019 const char **oconstraints;
5020 const char *constraint;
5021 bool allows_mem, allows_reg;
5022
538dd0b7 5023 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
5024 oconstraints = XALLOCAVEC (const char *, noutputs);
5025
538dd0b7 5026 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 5027 {
538dd0b7 5028 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
5029 tree op = TREE_VALUE (link);
5030 oconstraints[i]
5031 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5032 if (REFERENCE_CLASS_P (op)
5033 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5034 {
5035 TREE_VALUE (link) = op;
5036 changed = true;
5037 }
5038 }
538dd0b7 5039 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 5040 {
538dd0b7 5041 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
5042 tree op = TREE_VALUE (link);
5043 constraint
5044 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5045 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5046 oconstraints, &allows_mem, &allows_reg);
5047 if (REFERENCE_CLASS_P (op)
5048 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5049 != NULL_TREE)
5050 {
5051 TREE_VALUE (link) = op;
5052 changed = true;
5053 }
5054 }
5055 }
cbdd87d4
RG
5056 break;
5057
bd422c4a
RG
5058 case GIMPLE_DEBUG:
5059 if (gimple_debug_bind_p (stmt))
5060 {
5061 tree val = gimple_debug_bind_get_value (stmt);
5062 if (val
5063 && REFERENCE_CLASS_P (val))
5064 {
5065 tree tem = maybe_fold_reference (val, false);
5066 if (tem)
5067 {
5068 gimple_debug_bind_set_value (stmt, tem);
5069 changed = true;
5070 }
5071 }
3e888a5e
RG
5072 else if (val
5073 && TREE_CODE (val) == ADDR_EXPR)
5074 {
5075 tree ref = TREE_OPERAND (val, 0);
5076 tree tem = maybe_fold_reference (ref, false);
5077 if (tem)
5078 {
5079 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5080 gimple_debug_bind_set_value (stmt, tem);
5081 changed = true;
5082 }
5083 }
bd422c4a
RG
5084 }
5085 break;
5086
cfe3d653
PK
5087 case GIMPLE_RETURN:
5088 {
5089 greturn *ret_stmt = as_a<greturn *> (stmt);
5090 tree ret = gimple_return_retval(ret_stmt);
5091
5092 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5093 {
5094 tree val = valueize (ret);
1af928db
RB
5095 if (val && val != ret
5096 && may_propagate_copy (ret, val))
cfe3d653
PK
5097 {
5098 gimple_return_set_retval (ret_stmt, val);
5099 changed = true;
5100 }
5101 }
5102 }
5103 break;
5104
cbdd87d4
RG
5105 default:;
5106 }
5107
5108 stmt = gsi_stmt (*gsi);
5109
37376165
RB
5110 /* Fold *& on the lhs. */
5111 if (gimple_has_lhs (stmt))
cbdd87d4
RG
5112 {
5113 tree lhs = gimple_get_lhs (stmt);
5114 if (lhs && REFERENCE_CLASS_P (lhs))
5115 {
5116 tree new_lhs = maybe_fold_reference (lhs, true);
5117 if (new_lhs)
5118 {
5119 gimple_set_lhs (stmt, new_lhs);
5120 changed = true;
5121 }
5122 }
5123 }
5124
a8b85ce9 5125 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
5126 return changed;
5127}
5128
e0ee10ed
RB
5129/* Valueziation callback that ends up not following SSA edges. */
5130
5131tree
5132no_follow_ssa_edges (tree)
5133{
5134 return NULL_TREE;
5135}
5136
45cc9f96
RB
5137/* Valueization callback that ends up following single-use SSA edges only. */
5138
5139tree
5140follow_single_use_edges (tree val)
5141{
5142 if (TREE_CODE (val) == SSA_NAME
5143 && !has_single_use (val))
5144 return NULL_TREE;
5145 return val;
5146}
5147
c566cc9f
RS
5148/* Valueization callback that follows all SSA edges. */
5149
5150tree
5151follow_all_ssa_edges (tree val)
5152{
5153 return val;
5154}
5155
cbdd87d4
RG
5156/* Fold the statement pointed to by GSI. In some cases, this function may
5157 replace the whole statement with a new one. Returns true iff folding
5158 makes any changes.
5159 The statement pointed to by GSI should be in valid gimple form but may
5160 be in unfolded state as resulting from for example constant propagation
5161 which can produce *&x = 0. */
5162
5163bool
5164fold_stmt (gimple_stmt_iterator *gsi)
5165{
e0ee10ed
RB
5166 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5167}
5168
5169bool
5170fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5171{
5172 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
5173}
5174
59401b92 5175/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
5176 *&x created by constant propagation are handled. The statement cannot
5177 be replaced with a new one. Return true if the statement was
5178 changed, false otherwise.
59401b92 5179 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
5180 be in unfolded state as resulting from for example constant propagation
5181 which can produce *&x = 0. */
5182
5183bool
59401b92 5184fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 5185{
355fe088 5186 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 5187 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 5188 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5189 return changed;
5190}
5191
e89065a1
SL
5192/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5193 if EXPR is null or we don't know how.
5194 If non-null, the result always has boolean type. */
5195
5196static tree
5197canonicalize_bool (tree expr, bool invert)
5198{
5199 if (!expr)
5200 return NULL_TREE;
5201 else if (invert)
5202 {
5203 if (integer_nonzerop (expr))
5204 return boolean_false_node;
5205 else if (integer_zerop (expr))
5206 return boolean_true_node;
5207 else if (TREE_CODE (expr) == SSA_NAME)
5208 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5209 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5210 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5211 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5212 boolean_type_node,
5213 TREE_OPERAND (expr, 0),
5214 TREE_OPERAND (expr, 1));
5215 else
5216 return NULL_TREE;
5217 }
5218 else
5219 {
5220 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5221 return expr;
5222 if (integer_nonzerop (expr))
5223 return boolean_true_node;
5224 else if (integer_zerop (expr))
5225 return boolean_false_node;
5226 else if (TREE_CODE (expr) == SSA_NAME)
5227 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5228 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5229 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5230 return fold_build2 (TREE_CODE (expr),
5231 boolean_type_node,
5232 TREE_OPERAND (expr, 0),
5233 TREE_OPERAND (expr, 1));
5234 else
5235 return NULL_TREE;
5236 }
5237}
5238
5239/* Check to see if a boolean expression EXPR is logically equivalent to the
5240 comparison (OP1 CODE OP2). Check for various identities involving
5241 SSA_NAMEs. */
5242
5243static bool
5244same_bool_comparison_p (const_tree expr, enum tree_code code,
5245 const_tree op1, const_tree op2)
5246{
355fe088 5247 gimple *s;
e89065a1
SL
5248
5249 /* The obvious case. */
5250 if (TREE_CODE (expr) == code
5251 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5252 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5253 return true;
5254
5255 /* Check for comparing (name, name != 0) and the case where expr
5256 is an SSA_NAME with a definition matching the comparison. */
5257 if (TREE_CODE (expr) == SSA_NAME
5258 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5259 {
5260 if (operand_equal_p (expr, op1, 0))
5261 return ((code == NE_EXPR && integer_zerop (op2))
5262 || (code == EQ_EXPR && integer_nonzerop (op2)));
5263 s = SSA_NAME_DEF_STMT (expr);
5264 if (is_gimple_assign (s)
5265 && gimple_assign_rhs_code (s) == code
5266 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5267 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5268 return true;
5269 }
5270
5271 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5272 of name is a comparison, recurse. */
5273 if (TREE_CODE (op1) == SSA_NAME
5274 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5275 {
5276 s = SSA_NAME_DEF_STMT (op1);
5277 if (is_gimple_assign (s)
5278 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5279 {
5280 enum tree_code c = gimple_assign_rhs_code (s);
5281 if ((c == NE_EXPR && integer_zerop (op2))
5282 || (c == EQ_EXPR && integer_nonzerop (op2)))
5283 return same_bool_comparison_p (expr, c,
5284 gimple_assign_rhs1 (s),
5285 gimple_assign_rhs2 (s));
5286 if ((c == EQ_EXPR && integer_zerop (op2))
5287 || (c == NE_EXPR && integer_nonzerop (op2)))
5288 return same_bool_comparison_p (expr,
5289 invert_tree_comparison (c, false),
5290 gimple_assign_rhs1 (s),
5291 gimple_assign_rhs2 (s));
5292 }
5293 }
5294 return false;
5295}
5296
5297/* Check to see if two boolean expressions OP1 and OP2 are logically
5298 equivalent. */
5299
5300static bool
5301same_bool_result_p (const_tree op1, const_tree op2)
5302{
5303 /* Simple cases first. */
5304 if (operand_equal_p (op1, op2, 0))
5305 return true;
5306
5307 /* Check the cases where at least one of the operands is a comparison.
5308 These are a bit smarter than operand_equal_p in that they apply some
5309 identifies on SSA_NAMEs. */
98209db3 5310 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5311 && same_bool_comparison_p (op1, TREE_CODE (op2),
5312 TREE_OPERAND (op2, 0),
5313 TREE_OPERAND (op2, 1)))
5314 return true;
98209db3 5315 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5316 && same_bool_comparison_p (op2, TREE_CODE (op1),
5317 TREE_OPERAND (op1, 0),
5318 TREE_OPERAND (op1, 1)))
5319 return true;
5320
5321 /* Default case. */
5322 return false;
5323}
5324
5325/* Forward declarations for some mutually recursive functions. */
5326
5327static tree
5328and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5329 enum tree_code code2, tree op2a, tree op2b);
5330static tree
5331and_var_with_comparison (tree var, bool invert,
5332 enum tree_code code2, tree op2a, tree op2b);
5333static tree
355fe088 5334and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5335 enum tree_code code2, tree op2a, tree op2b);
5336static tree
5337or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5338 enum tree_code code2, tree op2a, tree op2b);
5339static tree
5340or_var_with_comparison (tree var, bool invert,
5341 enum tree_code code2, tree op2a, tree op2b);
5342static tree
355fe088 5343or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5344 enum tree_code code2, tree op2a, tree op2b);
5345
5346/* Helper function for and_comparisons_1: try to simplify the AND of the
5347 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5348 If INVERT is true, invert the value of the VAR before doing the AND.
5349 Return NULL_EXPR if we can't simplify this to a single expression. */
5350
5351static tree
5352and_var_with_comparison (tree var, bool invert,
5353 enum tree_code code2, tree op2a, tree op2b)
5354{
5355 tree t;
355fe088 5356 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5357
5358 /* We can only deal with variables whose definitions are assignments. */
5359 if (!is_gimple_assign (stmt))
5360 return NULL_TREE;
5361
5362 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5363 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5364 Then we only have to consider the simpler non-inverted cases. */
5365 if (invert)
5366 t = or_var_with_comparison_1 (stmt,
5367 invert_tree_comparison (code2, false),
5368 op2a, op2b);
5369 else
5370 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5371 return canonicalize_bool (t, invert);
5372}
5373
5374/* Try to simplify the AND of the ssa variable defined by the assignment
5375 STMT with the comparison specified by (OP2A CODE2 OP2B).
5376 Return NULL_EXPR if we can't simplify this to a single expression. */
5377
5378static tree
355fe088 5379and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5380 enum tree_code code2, tree op2a, tree op2b)
5381{
5382 tree var = gimple_assign_lhs (stmt);
5383 tree true_test_var = NULL_TREE;
5384 tree false_test_var = NULL_TREE;
5385 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5386
5387 /* Check for identities like (var AND (var == 0)) => false. */
5388 if (TREE_CODE (op2a) == SSA_NAME
5389 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5390 {
5391 if ((code2 == NE_EXPR && integer_zerop (op2b))
5392 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5393 {
5394 true_test_var = op2a;
5395 if (var == true_test_var)
5396 return var;
5397 }
5398 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5399 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5400 {
5401 false_test_var = op2a;
5402 if (var == false_test_var)
5403 return boolean_false_node;
5404 }
5405 }
5406
5407 /* If the definition is a comparison, recurse on it. */
5408 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5409 {
5410 tree t = and_comparisons_1 (innercode,
5411 gimple_assign_rhs1 (stmt),
5412 gimple_assign_rhs2 (stmt),
5413 code2,
5414 op2a,
5415 op2b);
5416 if (t)
5417 return t;
5418 }
5419
5420 /* If the definition is an AND or OR expression, we may be able to
5421 simplify by reassociating. */
eb9820c0
KT
5422 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5423 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5424 {
5425 tree inner1 = gimple_assign_rhs1 (stmt);
5426 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5427 gimple *s;
e89065a1
SL
5428 tree t;
5429 tree partial = NULL_TREE;
eb9820c0 5430 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5431
5432 /* Check for boolean identities that don't require recursive examination
5433 of inner1/inner2:
5434 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5435 inner1 AND (inner1 OR inner2) => inner1
5436 !inner1 AND (inner1 AND inner2) => false
5437 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5438 Likewise for similar cases involving inner2. */
5439 if (inner1 == true_test_var)
5440 return (is_and ? var : inner1);
5441 else if (inner2 == true_test_var)
5442 return (is_and ? var : inner2);
5443 else if (inner1 == false_test_var)
5444 return (is_and
5445 ? boolean_false_node
5446 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5447 else if (inner2 == false_test_var)
5448 return (is_and
5449 ? boolean_false_node
5450 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5451
5452 /* Next, redistribute/reassociate the AND across the inner tests.
5453 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5454 if (TREE_CODE (inner1) == SSA_NAME
5455 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5456 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5457 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5458 gimple_assign_rhs1 (s),
5459 gimple_assign_rhs2 (s),
5460 code2, op2a, op2b)))
5461 {
5462 /* Handle the AND case, where we are reassociating:
5463 (inner1 AND inner2) AND (op2a code2 op2b)
5464 => (t AND inner2)
5465 If the partial result t is a constant, we win. Otherwise
5466 continue on to try reassociating with the other inner test. */
5467 if (is_and)
5468 {
5469 if (integer_onep (t))
5470 return inner2;
5471 else if (integer_zerop (t))
5472 return boolean_false_node;
5473 }
5474
5475 /* Handle the OR case, where we are redistributing:
5476 (inner1 OR inner2) AND (op2a code2 op2b)
5477 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5478 else if (integer_onep (t))
5479 return boolean_true_node;
5480
5481 /* Save partial result for later. */
5482 partial = t;
e89065a1
SL
5483 }
5484
5485 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5486 if (TREE_CODE (inner2) == SSA_NAME
5487 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5488 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5489 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5490 gimple_assign_rhs1 (s),
5491 gimple_assign_rhs2 (s),
5492 code2, op2a, op2b)))
5493 {
5494 /* Handle the AND case, where we are reassociating:
5495 (inner1 AND inner2) AND (op2a code2 op2b)
5496 => (inner1 AND t) */
5497 if (is_and)
5498 {
5499 if (integer_onep (t))
5500 return inner1;
5501 else if (integer_zerop (t))
5502 return boolean_false_node;
8236c8eb
JJ
5503 /* If both are the same, we can apply the identity
5504 (x AND x) == x. */
5505 else if (partial && same_bool_result_p (t, partial))
5506 return t;
e89065a1
SL
5507 }
5508
5509 /* Handle the OR case. where we are redistributing:
5510 (inner1 OR inner2) AND (op2a code2 op2b)
5511 => (t OR (inner1 AND (op2a code2 op2b)))
5512 => (t OR partial) */
5513 else
5514 {
5515 if (integer_onep (t))
5516 return boolean_true_node;
5517 else if (partial)
5518 {
5519 /* We already got a simplification for the other
5520 operand to the redistributed OR expression. The
5521 interesting case is when at least one is false.
5522 Or, if both are the same, we can apply the identity
5523 (x OR x) == x. */
5524 if (integer_zerop (partial))
5525 return t;
5526 else if (integer_zerop (t))
5527 return partial;
5528 else if (same_bool_result_p (t, partial))
5529 return t;
5530 }
5531 }
5532 }
5533 }
5534 return NULL_TREE;
5535}
5536
5537/* Try to simplify the AND of two comparisons defined by
5538 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5539 If this can be done without constructing an intermediate value,
5540 return the resulting tree; otherwise NULL_TREE is returned.
5541 This function is deliberately asymmetric as it recurses on SSA_DEFs
5542 in the first comparison but not the second. */
5543
5544static tree
5545and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5546 enum tree_code code2, tree op2a, tree op2b)
5547{
ae22ac3c 5548 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5549
e89065a1
SL
5550 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5551 if (operand_equal_p (op1a, op2a, 0)
5552 && operand_equal_p (op1b, op2b, 0))
5553 {
eb9820c0 5554 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5555 tree t = combine_comparisons (UNKNOWN_LOCATION,
5556 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5557 truth_type, op1a, op1b);
e89065a1
SL
5558 if (t)
5559 return t;
5560 }
5561
5562 /* Likewise the swapped case of the above. */
5563 if (operand_equal_p (op1a, op2b, 0)
5564 && operand_equal_p (op1b, op2a, 0))
5565 {
eb9820c0 5566 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5567 tree t = combine_comparisons (UNKNOWN_LOCATION,
5568 TRUTH_ANDIF_EXPR, code1,
5569 swap_tree_comparison (code2),
31ed6226 5570 truth_type, op1a, op1b);
e89065a1
SL
5571 if (t)
5572 return t;
5573 }
5574
5575 /* If both comparisons are of the same value against constants, we might
5576 be able to merge them. */
5577 if (operand_equal_p (op1a, op2a, 0)
5578 && TREE_CODE (op1b) == INTEGER_CST
5579 && TREE_CODE (op2b) == INTEGER_CST)
5580 {
5581 int cmp = tree_int_cst_compare (op1b, op2b);
5582
5583 /* If we have (op1a == op1b), we should either be able to
5584 return that or FALSE, depending on whether the constant op1b
5585 also satisfies the other comparison against op2b. */
5586 if (code1 == EQ_EXPR)
5587 {
5588 bool done = true;
5589 bool val;
5590 switch (code2)
5591 {
5592 case EQ_EXPR: val = (cmp == 0); break;
5593 case NE_EXPR: val = (cmp != 0); break;
5594 case LT_EXPR: val = (cmp < 0); break;
5595 case GT_EXPR: val = (cmp > 0); break;
5596 case LE_EXPR: val = (cmp <= 0); break;
5597 case GE_EXPR: val = (cmp >= 0); break;
5598 default: done = false;
5599 }
5600 if (done)
5601 {
5602 if (val)
5603 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5604 else
5605 return boolean_false_node;
5606 }
5607 }
5608 /* Likewise if the second comparison is an == comparison. */
5609 else if (code2 == EQ_EXPR)
5610 {
5611 bool done = true;
5612 bool val;
5613 switch (code1)
5614 {
5615 case EQ_EXPR: val = (cmp == 0); break;
5616 case NE_EXPR: val = (cmp != 0); break;
5617 case LT_EXPR: val = (cmp > 0); break;
5618 case GT_EXPR: val = (cmp < 0); break;
5619 case LE_EXPR: val = (cmp >= 0); break;
5620 case GE_EXPR: val = (cmp <= 0); break;
5621 default: done = false;
5622 }
5623 if (done)
5624 {
5625 if (val)
5626 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5627 else
5628 return boolean_false_node;
5629 }
5630 }
5631
5632 /* Same business with inequality tests. */
5633 else if (code1 == NE_EXPR)
5634 {
5635 bool val;
5636 switch (code2)
5637 {
5638 case EQ_EXPR: val = (cmp != 0); break;
5639 case NE_EXPR: val = (cmp == 0); break;
5640 case LT_EXPR: val = (cmp >= 0); break;
5641 case GT_EXPR: val = (cmp <= 0); break;
5642 case LE_EXPR: val = (cmp > 0); break;
5643 case GE_EXPR: val = (cmp < 0); break;
5644 default:
5645 val = false;
5646 }
5647 if (val)
5648 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5649 }
5650 else if (code2 == NE_EXPR)
5651 {
5652 bool val;
5653 switch (code1)
5654 {
5655 case EQ_EXPR: val = (cmp == 0); break;
5656 case NE_EXPR: val = (cmp != 0); break;
5657 case LT_EXPR: val = (cmp <= 0); break;
5658 case GT_EXPR: val = (cmp >= 0); break;
5659 case LE_EXPR: val = (cmp < 0); break;
5660 case GE_EXPR: val = (cmp > 0); break;
5661 default:
5662 val = false;
5663 }
5664 if (val)
5665 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5666 }
5667
5668 /* Chose the more restrictive of two < or <= comparisons. */
5669 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5670 && (code2 == LT_EXPR || code2 == LE_EXPR))
5671 {
5672 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5673 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5674 else
5675 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5676 }
5677
5678 /* Likewise chose the more restrictive of two > or >= comparisons. */
5679 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5680 && (code2 == GT_EXPR || code2 == GE_EXPR))
5681 {
5682 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5683 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5684 else
5685 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5686 }
5687
5688 /* Check for singleton ranges. */
5689 else if (cmp == 0
5690 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5691 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5692 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5693
5694 /* Check for disjoint ranges. */
5695 else if (cmp <= 0
5696 && (code1 == LT_EXPR || code1 == LE_EXPR)
5697 && (code2 == GT_EXPR || code2 == GE_EXPR))
5698 return boolean_false_node;
5699 else if (cmp >= 0
5700 && (code1 == GT_EXPR || code1 == GE_EXPR)
5701 && (code2 == LT_EXPR || code2 == LE_EXPR))
5702 return boolean_false_node;
5703 }
5704
5705 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5706 NAME's definition is a truth value. See if there are any simplifications
5707 that can be done against the NAME's definition. */
5708 if (TREE_CODE (op1a) == SSA_NAME
5709 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5710 && (integer_zerop (op1b) || integer_onep (op1b)))
5711 {
5712 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5713 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5714 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5715 switch (gimple_code (stmt))
5716 {
5717 case GIMPLE_ASSIGN:
5718 /* Try to simplify by copy-propagating the definition. */
5719 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5720
5721 case GIMPLE_PHI:
5722 /* If every argument to the PHI produces the same result when
5723 ANDed with the second comparison, we win.
5724 Do not do this unless the type is bool since we need a bool
5725 result here anyway. */
5726 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5727 {
5728 tree result = NULL_TREE;
5729 unsigned i;
5730 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5731 {
5732 tree arg = gimple_phi_arg_def (stmt, i);
5733
5734 /* If this PHI has itself as an argument, ignore it.
5735 If all the other args produce the same result,
5736 we're still OK. */
5737 if (arg == gimple_phi_result (stmt))
5738 continue;
5739 else if (TREE_CODE (arg) == INTEGER_CST)
5740 {
5741 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5742 {
5743 if (!result)
5744 result = boolean_false_node;
5745 else if (!integer_zerop (result))
5746 return NULL_TREE;
5747 }
5748 else if (!result)
5749 result = fold_build2 (code2, boolean_type_node,
5750 op2a, op2b);
5751 else if (!same_bool_comparison_p (result,
5752 code2, op2a, op2b))
5753 return NULL_TREE;
5754 }
0e8b84ec
JJ
5755 else if (TREE_CODE (arg) == SSA_NAME
5756 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5757 {
6c66f733 5758 tree temp;
355fe088 5759 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5760 /* In simple cases we can look through PHI nodes,
5761 but we have to be careful with loops.
5762 See PR49073. */
5763 if (! dom_info_available_p (CDI_DOMINATORS)
5764 || gimple_bb (def_stmt) == gimple_bb (stmt)
5765 || dominated_by_p (CDI_DOMINATORS,
5766 gimple_bb (def_stmt),
5767 gimple_bb (stmt)))
5768 return NULL_TREE;
5769 temp = and_var_with_comparison (arg, invert, code2,
5770 op2a, op2b);
e89065a1
SL
5771 if (!temp)
5772 return NULL_TREE;
5773 else if (!result)
5774 result = temp;
5775 else if (!same_bool_result_p (result, temp))
5776 return NULL_TREE;
5777 }
5778 else
5779 return NULL_TREE;
5780 }
5781 return result;
5782 }
5783
5784 default:
5785 break;
5786 }
5787 }
5788 return NULL_TREE;
5789}
5790
5791/* Try to simplify the AND of two comparisons, specified by
5792 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5793 If this can be simplified to a single expression (without requiring
5794 introducing more SSA variables to hold intermediate values),
5795 return the resulting tree. Otherwise return NULL_TREE.
5796 If the result expression is non-null, it has boolean type. */
5797
5798tree
5799maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5800 enum tree_code code2, tree op2a, tree op2b)
5801{
5802 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5803 if (t)
5804 return t;
5805 else
5806 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5807}
5808
5809/* Helper function for or_comparisons_1: try to simplify the OR of the
5810 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5811 If INVERT is true, invert the value of VAR before doing the OR.
5812 Return NULL_EXPR if we can't simplify this to a single expression. */
5813
5814static tree
5815or_var_with_comparison (tree var, bool invert,
5816 enum tree_code code2, tree op2a, tree op2b)
5817{
5818 tree t;
355fe088 5819 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5820
5821 /* We can only deal with variables whose definitions are assignments. */
5822 if (!is_gimple_assign (stmt))
5823 return NULL_TREE;
5824
5825 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5826 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5827 Then we only have to consider the simpler non-inverted cases. */
5828 if (invert)
5829 t = and_var_with_comparison_1 (stmt,
5830 invert_tree_comparison (code2, false),
5831 op2a, op2b);
5832 else
5833 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5834 return canonicalize_bool (t, invert);
5835}
5836
5837/* Try to simplify the OR of the ssa variable defined by the assignment
5838 STMT with the comparison specified by (OP2A CODE2 OP2B).
5839 Return NULL_EXPR if we can't simplify this to a single expression. */
5840
5841static tree
355fe088 5842or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5843 enum tree_code code2, tree op2a, tree op2b)
5844{
5845 tree var = gimple_assign_lhs (stmt);
5846 tree true_test_var = NULL_TREE;
5847 tree false_test_var = NULL_TREE;
5848 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5849
5850 /* Check for identities like (var OR (var != 0)) => true . */
5851 if (TREE_CODE (op2a) == SSA_NAME
5852 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5853 {
5854 if ((code2 == NE_EXPR && integer_zerop (op2b))
5855 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5856 {
5857 true_test_var = op2a;
5858 if (var == true_test_var)
5859 return var;
5860 }
5861 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5862 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5863 {
5864 false_test_var = op2a;
5865 if (var == false_test_var)
5866 return boolean_true_node;
5867 }
5868 }
5869
5870 /* If the definition is a comparison, recurse on it. */
5871 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5872 {
5873 tree t = or_comparisons_1 (innercode,
5874 gimple_assign_rhs1 (stmt),
5875 gimple_assign_rhs2 (stmt),
5876 code2,
5877 op2a,
5878 op2b);
5879 if (t)
5880 return t;
5881 }
5882
5883 /* If the definition is an AND or OR expression, we may be able to
5884 simplify by reassociating. */
eb9820c0
KT
5885 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5886 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5887 {
5888 tree inner1 = gimple_assign_rhs1 (stmt);
5889 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5890 gimple *s;
e89065a1
SL
5891 tree t;
5892 tree partial = NULL_TREE;
eb9820c0 5893 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5894
5895 /* Check for boolean identities that don't require recursive examination
5896 of inner1/inner2:
5897 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5898 inner1 OR (inner1 AND inner2) => inner1
5899 !inner1 OR (inner1 OR inner2) => true
5900 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5901 */
5902 if (inner1 == true_test_var)
5903 return (is_or ? var : inner1);
5904 else if (inner2 == true_test_var)
5905 return (is_or ? var : inner2);
5906 else if (inner1 == false_test_var)
5907 return (is_or
5908 ? boolean_true_node
5909 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5910 else if (inner2 == false_test_var)
5911 return (is_or
5912 ? boolean_true_node
5913 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5914
5915 /* Next, redistribute/reassociate the OR across the inner tests.
5916 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5917 if (TREE_CODE (inner1) == SSA_NAME
5918 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5919 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5920 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5921 gimple_assign_rhs1 (s),
5922 gimple_assign_rhs2 (s),
5923 code2, op2a, op2b)))
5924 {
5925 /* Handle the OR case, where we are reassociating:
5926 (inner1 OR inner2) OR (op2a code2 op2b)
5927 => (t OR inner2)
5928 If the partial result t is a constant, we win. Otherwise
5929 continue on to try reassociating with the other inner test. */
8236c8eb 5930 if (is_or)
e89065a1
SL
5931 {
5932 if (integer_onep (t))
5933 return boolean_true_node;
5934 else if (integer_zerop (t))
5935 return inner2;
5936 }
5937
5938 /* Handle the AND case, where we are redistributing:
5939 (inner1 AND inner2) OR (op2a code2 op2b)
5940 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5941 else if (integer_zerop (t))
5942 return boolean_false_node;
5943
5944 /* Save partial result for later. */
5945 partial = t;
e89065a1
SL
5946 }
5947
5948 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5949 if (TREE_CODE (inner2) == SSA_NAME
5950 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5951 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5952 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5953 gimple_assign_rhs1 (s),
5954 gimple_assign_rhs2 (s),
5955 code2, op2a, op2b)))
5956 {
5957 /* Handle the OR case, where we are reassociating:
5958 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5959 => (inner1 OR t)
5960 => (t OR partial) */
5961 if (is_or)
e89065a1
SL
5962 {
5963 if (integer_zerop (t))
5964 return inner1;
5965 else if (integer_onep (t))
5966 return boolean_true_node;
8236c8eb
JJ
5967 /* If both are the same, we can apply the identity
5968 (x OR x) == x. */
5969 else if (partial && same_bool_result_p (t, partial))
5970 return t;
e89065a1
SL
5971 }
5972
5973 /* Handle the AND case, where we are redistributing:
5974 (inner1 AND inner2) OR (op2a code2 op2b)
5975 => (t AND (inner1 OR (op2a code2 op2b)))
5976 => (t AND partial) */
5977 else
5978 {
5979 if (integer_zerop (t))
5980 return boolean_false_node;
5981 else if (partial)
5982 {
5983 /* We already got a simplification for the other
5984 operand to the redistributed AND expression. The
5985 interesting case is when at least one is true.
5986 Or, if both are the same, we can apply the identity
8236c8eb 5987 (x AND x) == x. */
e89065a1
SL
5988 if (integer_onep (partial))
5989 return t;
5990 else if (integer_onep (t))
5991 return partial;
5992 else if (same_bool_result_p (t, partial))
8236c8eb 5993 return t;
e89065a1
SL
5994 }
5995 }
5996 }
5997 }
5998 return NULL_TREE;
5999}
6000
6001/* Try to simplify the OR of two comparisons defined by
6002 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6003 If this can be done without constructing an intermediate value,
6004 return the resulting tree; otherwise NULL_TREE is returned.
6005 This function is deliberately asymmetric as it recurses on SSA_DEFs
6006 in the first comparison but not the second. */
6007
6008static tree
6009or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6010 enum tree_code code2, tree op2a, tree op2b)
6011{
ae22ac3c 6012 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6013
e89065a1
SL
6014 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6015 if (operand_equal_p (op1a, op2a, 0)
6016 && operand_equal_p (op1b, op2b, 0))
6017 {
eb9820c0 6018 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6019 tree t = combine_comparisons (UNKNOWN_LOCATION,
6020 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 6021 truth_type, op1a, op1b);
e89065a1
SL
6022 if (t)
6023 return t;
6024 }
6025
6026 /* Likewise the swapped case of the above. */
6027 if (operand_equal_p (op1a, op2b, 0)
6028 && operand_equal_p (op1b, op2a, 0))
6029 {
eb9820c0 6030 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6031 tree t = combine_comparisons (UNKNOWN_LOCATION,
6032 TRUTH_ORIF_EXPR, code1,
6033 swap_tree_comparison (code2),
31ed6226 6034 truth_type, op1a, op1b);
e89065a1
SL
6035 if (t)
6036 return t;
6037 }
6038
6039 /* If both comparisons are of the same value against constants, we might
6040 be able to merge them. */
6041 if (operand_equal_p (op1a, op2a, 0)
6042 && TREE_CODE (op1b) == INTEGER_CST
6043 && TREE_CODE (op2b) == INTEGER_CST)
6044 {
6045 int cmp = tree_int_cst_compare (op1b, op2b);
6046
6047 /* If we have (op1a != op1b), we should either be able to
6048 return that or TRUE, depending on whether the constant op1b
6049 also satisfies the other comparison against op2b. */
6050 if (code1 == NE_EXPR)
6051 {
6052 bool done = true;
6053 bool val;
6054 switch (code2)
6055 {
6056 case EQ_EXPR: val = (cmp == 0); break;
6057 case NE_EXPR: val = (cmp != 0); break;
6058 case LT_EXPR: val = (cmp < 0); break;
6059 case GT_EXPR: val = (cmp > 0); break;
6060 case LE_EXPR: val = (cmp <= 0); break;
6061 case GE_EXPR: val = (cmp >= 0); break;
6062 default: done = false;
6063 }
6064 if (done)
6065 {
6066 if (val)
6067 return boolean_true_node;
6068 else
6069 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6070 }
6071 }
6072 /* Likewise if the second comparison is a != comparison. */
6073 else if (code2 == NE_EXPR)
6074 {
6075 bool done = true;
6076 bool val;
6077 switch (code1)
6078 {
6079 case EQ_EXPR: val = (cmp == 0); break;
6080 case NE_EXPR: val = (cmp != 0); break;
6081 case LT_EXPR: val = (cmp > 0); break;
6082 case GT_EXPR: val = (cmp < 0); break;
6083 case LE_EXPR: val = (cmp >= 0); break;
6084 case GE_EXPR: val = (cmp <= 0); break;
6085 default: done = false;
6086 }
6087 if (done)
6088 {
6089 if (val)
6090 return boolean_true_node;
6091 else
6092 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6093 }
6094 }
6095
6096 /* See if an equality test is redundant with the other comparison. */
6097 else if (code1 == EQ_EXPR)
6098 {
6099 bool val;
6100 switch (code2)
6101 {
6102 case EQ_EXPR: val = (cmp == 0); break;
6103 case NE_EXPR: val = (cmp != 0); break;
6104 case LT_EXPR: val = (cmp < 0); break;
6105 case GT_EXPR: val = (cmp > 0); break;
6106 case LE_EXPR: val = (cmp <= 0); break;
6107 case GE_EXPR: val = (cmp >= 0); break;
6108 default:
6109 val = false;
6110 }
6111 if (val)
6112 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6113 }
6114 else if (code2 == EQ_EXPR)
6115 {
6116 bool val;
6117 switch (code1)
6118 {
6119 case EQ_EXPR: val = (cmp == 0); break;
6120 case NE_EXPR: val = (cmp != 0); break;
6121 case LT_EXPR: val = (cmp > 0); break;
6122 case GT_EXPR: val = (cmp < 0); break;
6123 case LE_EXPR: val = (cmp >= 0); break;
6124 case GE_EXPR: val = (cmp <= 0); break;
6125 default:
6126 val = false;
6127 }
6128 if (val)
6129 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6130 }
6131
6132 /* Chose the less restrictive of two < or <= comparisons. */
6133 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6134 && (code2 == LT_EXPR || code2 == LE_EXPR))
6135 {
6136 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6137 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6138 else
6139 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6140 }
6141
6142 /* Likewise chose the less restrictive of two > or >= comparisons. */
6143 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6144 && (code2 == GT_EXPR || code2 == GE_EXPR))
6145 {
6146 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6147 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6148 else
6149 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6150 }
6151
6152 /* Check for singleton ranges. */
6153 else if (cmp == 0
6154 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6155 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6156 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6157
6158 /* Check for less/greater pairs that don't restrict the range at all. */
6159 else if (cmp >= 0
6160 && (code1 == LT_EXPR || code1 == LE_EXPR)
6161 && (code2 == GT_EXPR || code2 == GE_EXPR))
6162 return boolean_true_node;
6163 else if (cmp <= 0
6164 && (code1 == GT_EXPR || code1 == GE_EXPR)
6165 && (code2 == LT_EXPR || code2 == LE_EXPR))
6166 return boolean_true_node;
6167 }
6168
6169 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6170 NAME's definition is a truth value. See if there are any simplifications
6171 that can be done against the NAME's definition. */
6172 if (TREE_CODE (op1a) == SSA_NAME
6173 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6174 && (integer_zerop (op1b) || integer_onep (op1b)))
6175 {
6176 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6177 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6178 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6179 switch (gimple_code (stmt))
6180 {
6181 case GIMPLE_ASSIGN:
6182 /* Try to simplify by copy-propagating the definition. */
6183 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6184
6185 case GIMPLE_PHI:
6186 /* If every argument to the PHI produces the same result when
6187 ORed with the second comparison, we win.
6188 Do not do this unless the type is bool since we need a bool
6189 result here anyway. */
6190 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6191 {
6192 tree result = NULL_TREE;
6193 unsigned i;
6194 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6195 {
6196 tree arg = gimple_phi_arg_def (stmt, i);
6197
6198 /* If this PHI has itself as an argument, ignore it.
6199 If all the other args produce the same result,
6200 we're still OK. */
6201 if (arg == gimple_phi_result (stmt))
6202 continue;
6203 else if (TREE_CODE (arg) == INTEGER_CST)
6204 {
6205 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6206 {
6207 if (!result)
6208 result = boolean_true_node;
6209 else if (!integer_onep (result))
6210 return NULL_TREE;
6211 }
6212 else if (!result)
6213 result = fold_build2 (code2, boolean_type_node,
6214 op2a, op2b);
6215 else if (!same_bool_comparison_p (result,
6216 code2, op2a, op2b))
6217 return NULL_TREE;
6218 }
0e8b84ec
JJ
6219 else if (TREE_CODE (arg) == SSA_NAME
6220 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6221 {
6c66f733 6222 tree temp;
355fe088 6223 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6224 /* In simple cases we can look through PHI nodes,
6225 but we have to be careful with loops.
6226 See PR49073. */
6227 if (! dom_info_available_p (CDI_DOMINATORS)
6228 || gimple_bb (def_stmt) == gimple_bb (stmt)
6229 || dominated_by_p (CDI_DOMINATORS,
6230 gimple_bb (def_stmt),
6231 gimple_bb (stmt)))
6232 return NULL_TREE;
6233 temp = or_var_with_comparison (arg, invert, code2,
6234 op2a, op2b);
e89065a1
SL
6235 if (!temp)
6236 return NULL_TREE;
6237 else if (!result)
6238 result = temp;
6239 else if (!same_bool_result_p (result, temp))
6240 return NULL_TREE;
6241 }
6242 else
6243 return NULL_TREE;
6244 }
6245 return result;
6246 }
6247
6248 default:
6249 break;
6250 }
6251 }
6252 return NULL_TREE;
6253}
6254
6255/* Try to simplify the OR of two comparisons, specified by
6256 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6257 If this can be simplified to a single expression (without requiring
6258 introducing more SSA variables to hold intermediate values),
6259 return the resulting tree. Otherwise return NULL_TREE.
6260 If the result expression is non-null, it has boolean type. */
6261
6262tree
6263maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6264 enum tree_code code2, tree op2a, tree op2b)
6265{
6266 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6267 if (t)
6268 return t;
6269 else
6270 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6271}
cfef45c8
RG
6272
6273
6274/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6275
6276 Either NULL_TREE, a simplified but non-constant or a constant
6277 is returned.
6278
6279 ??? This should go into a gimple-fold-inline.h file to be eventually
6280 privatized with the single valueize function used in the various TUs
6281 to avoid the indirect function call overhead. */
6282
6283tree
355fe088 6284gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6285 tree (*gvalueize) (tree))
cfef45c8 6286{
5d75ad95 6287 gimple_match_op res_op;
45cc9f96
RB
6288 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6289 edges if there are intermediate VARYING defs. For this reason
6290 do not follow SSA edges here even though SCCVN can technically
6291 just deal fine with that. */
5d75ad95 6292 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6293 {
34050b6b 6294 tree res = NULL_TREE;
5d75ad95
RS
6295 if (gimple_simplified_result_is_gimple_val (&res_op))
6296 res = res_op.ops[0];
34050b6b 6297 else if (mprts_hook)
5d75ad95 6298 res = mprts_hook (&res_op);
34050b6b 6299 if (res)
45cc9f96 6300 {
34050b6b
RB
6301 if (dump_file && dump_flags & TDF_DETAILS)
6302 {
6303 fprintf (dump_file, "Match-and-simplified ");
6304 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6305 fprintf (dump_file, " to ");
ef6cb4c7 6306 print_generic_expr (dump_file, res);
34050b6b
RB
6307 fprintf (dump_file, "\n");
6308 }
6309 return res;
45cc9f96 6310 }
45cc9f96
RB
6311 }
6312
cfef45c8
RG
6313 location_t loc = gimple_location (stmt);
6314 switch (gimple_code (stmt))
6315 {
6316 case GIMPLE_ASSIGN:
6317 {
6318 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6319
6320 switch (get_gimple_rhs_class (subcode))
6321 {
6322 case GIMPLE_SINGLE_RHS:
6323 {
6324 tree rhs = gimple_assign_rhs1 (stmt);
6325 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6326
6327 if (TREE_CODE (rhs) == SSA_NAME)
6328 {
6329 /* If the RHS is an SSA_NAME, return its known constant value,
6330 if any. */
6331 return (*valueize) (rhs);
6332 }
6333 /* Handle propagating invariant addresses into address
6334 operations. */
6335 else if (TREE_CODE (rhs) == ADDR_EXPR
6336 && !is_gimple_min_invariant (rhs))
6337 {
a90c8804 6338 poly_int64 offset = 0;
cfef45c8
RG
6339 tree base;
6340 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6341 &offset,
6342 valueize);
6343 if (base
6344 && (CONSTANT_CLASS_P (base)
6345 || decl_address_invariant_p (base)))
6346 return build_invariant_address (TREE_TYPE (rhs),
6347 base, offset);
6348 }
6349 else if (TREE_CODE (rhs) == CONSTRUCTOR
6350 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6351 && known_eq (CONSTRUCTOR_NELTS (rhs),
6352 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6353 {
794e3180
RS
6354 unsigned i, nelts;
6355 tree val;
cfef45c8 6356
928686b1 6357 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6358 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6359 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6360 {
6361 val = (*valueize) (val);
6362 if (TREE_CODE (val) == INTEGER_CST
6363 || TREE_CODE (val) == REAL_CST
6364 || TREE_CODE (val) == FIXED_CST)
794e3180 6365 vec.quick_push (val);
cfef45c8
RG
6366 else
6367 return NULL_TREE;
6368 }
6369
5ebaa477 6370 return vec.build ();
cfef45c8 6371 }
bdf37f7a
JH
6372 if (subcode == OBJ_TYPE_REF)
6373 {
6374 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6375 /* If callee is constant, we can fold away the wrapper. */
6376 if (is_gimple_min_invariant (val))
6377 return val;
6378 }
cfef45c8
RG
6379
6380 if (kind == tcc_reference)
6381 {
6382 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6383 || TREE_CODE (rhs) == REALPART_EXPR
6384 || TREE_CODE (rhs) == IMAGPART_EXPR)
6385 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6386 {
6387 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6388 return fold_unary_loc (EXPR_LOCATION (rhs),
6389 TREE_CODE (rhs),
6390 TREE_TYPE (rhs), val);
6391 }
6392 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6393 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6394 {
6395 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6396 return fold_ternary_loc (EXPR_LOCATION (rhs),
6397 TREE_CODE (rhs),
6398 TREE_TYPE (rhs), val,
6399 TREE_OPERAND (rhs, 1),
6400 TREE_OPERAND (rhs, 2));
6401 }
6402 else if (TREE_CODE (rhs) == MEM_REF
6403 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6404 {
6405 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6406 if (TREE_CODE (val) == ADDR_EXPR
6407 && is_gimple_min_invariant (val))
6408 {
6409 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6410 unshare_expr (val),
6411 TREE_OPERAND (rhs, 1));
6412 if (tem)
6413 rhs = tem;
6414 }
6415 }
6416 return fold_const_aggregate_ref_1 (rhs, valueize);
6417 }
6418 else if (kind == tcc_declaration)
6419 return get_symbol_constant_value (rhs);
6420 return rhs;
6421 }
6422
6423 case GIMPLE_UNARY_RHS:
f3582e54 6424 return NULL_TREE;
cfef45c8
RG
6425
6426 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6427 /* Translate &x + CST into an invariant form suitable for
6428 further propagation. */
6429 if (subcode == POINTER_PLUS_EXPR)
6430 {
4b1b9e64
RB
6431 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6432 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6433 if (TREE_CODE (op0) == ADDR_EXPR
6434 && TREE_CODE (op1) == INTEGER_CST)
6435 {
6436 tree off = fold_convert (ptr_type_node, op1);
6437 return build_fold_addr_expr_loc
6438 (loc,
6439 fold_build2 (MEM_REF,
6440 TREE_TYPE (TREE_TYPE (op0)),
6441 unshare_expr (op0), off));
6442 }
6443 }
59c20dc7
RB
6444 /* Canonicalize bool != 0 and bool == 0 appearing after
6445 valueization. While gimple_simplify handles this
6446 it can get confused by the ~X == 1 -> X == 0 transform
6447 which we cant reduce to a SSA name or a constant
6448 (and we have no way to tell gimple_simplify to not
6449 consider those transforms in the first place). */
6450 else if (subcode == EQ_EXPR
6451 || subcode == NE_EXPR)
6452 {
6453 tree lhs = gimple_assign_lhs (stmt);
6454 tree op0 = gimple_assign_rhs1 (stmt);
6455 if (useless_type_conversion_p (TREE_TYPE (lhs),
6456 TREE_TYPE (op0)))
6457 {
6458 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6459 op0 = (*valueize) (op0);
8861704d
RB
6460 if (TREE_CODE (op0) == INTEGER_CST)
6461 std::swap (op0, op1);
6462 if (TREE_CODE (op1) == INTEGER_CST
6463 && ((subcode == NE_EXPR && integer_zerop (op1))
6464 || (subcode == EQ_EXPR && integer_onep (op1))))
6465 return op0;
59c20dc7
RB
6466 }
6467 }
4b1b9e64 6468 return NULL_TREE;
cfef45c8
RG
6469
6470 case GIMPLE_TERNARY_RHS:
6471 {
6472 /* Handle ternary operators that can appear in GIMPLE form. */
6473 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6474 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6475 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6476 return fold_ternary_loc (loc, subcode,
6477 gimple_expr_type (stmt), op0, op1, op2);
6478 }
6479
6480 default:
6481 gcc_unreachable ();
6482 }
6483 }
6484
6485 case GIMPLE_CALL:
6486 {
25583c4f 6487 tree fn;
538dd0b7 6488 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6489
6490 if (gimple_call_internal_p (stmt))
31e071ae
MP
6491 {
6492 enum tree_code subcode = ERROR_MARK;
6493 switch (gimple_call_internal_fn (stmt))
6494 {
6495 case IFN_UBSAN_CHECK_ADD:
6496 subcode = PLUS_EXPR;
6497 break;
6498 case IFN_UBSAN_CHECK_SUB:
6499 subcode = MINUS_EXPR;
6500 break;
6501 case IFN_UBSAN_CHECK_MUL:
6502 subcode = MULT_EXPR;
6503 break;
68fa96d6
ML
6504 case IFN_BUILTIN_EXPECT:
6505 {
6506 tree arg0 = gimple_call_arg (stmt, 0);
6507 tree op0 = (*valueize) (arg0);
6508 if (TREE_CODE (op0) == INTEGER_CST)
6509 return op0;
6510 return NULL_TREE;
6511 }
31e071ae
MP
6512 default:
6513 return NULL_TREE;
6514 }
368b454d
JJ
6515 tree arg0 = gimple_call_arg (stmt, 0);
6516 tree arg1 = gimple_call_arg (stmt, 1);
6517 tree op0 = (*valueize) (arg0);
6518 tree op1 = (*valueize) (arg1);
31e071ae
MP
6519
6520 if (TREE_CODE (op0) != INTEGER_CST
6521 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6522 {
6523 switch (subcode)
6524 {
6525 case MULT_EXPR:
6526 /* x * 0 = 0 * x = 0 without overflow. */
6527 if (integer_zerop (op0) || integer_zerop (op1))
6528 return build_zero_cst (TREE_TYPE (arg0));
6529 break;
6530 case MINUS_EXPR:
6531 /* y - y = 0 without overflow. */
6532 if (operand_equal_p (op0, op1, 0))
6533 return build_zero_cst (TREE_TYPE (arg0));
6534 break;
6535 default:
6536 break;
6537 }
6538 }
6539 tree res
6540 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6541 if (res
6542 && TREE_CODE (res) == INTEGER_CST
6543 && !TREE_OVERFLOW (res))
6544 return res;
6545 return NULL_TREE;
6546 }
25583c4f
RS
6547
6548 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 6549 if (TREE_CODE (fn) == ADDR_EXPR
3d78e008 6550 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
6551 && gimple_builtin_call_types_compatible_p (stmt,
6552 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6553 {
6554 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6555 tree retval;
cfef45c8
RG
6556 unsigned i;
6557 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6558 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6559 retval = fold_builtin_call_array (loc,
538dd0b7 6560 gimple_call_return_type (call_stmt),
cfef45c8 6561 fn, gimple_call_num_args (stmt), args);
cfef45c8 6562 if (retval)
5c944c6c
RB
6563 {
6564 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6565 STRIP_NOPS (retval);
538dd0b7
DM
6566 retval = fold_convert (gimple_call_return_type (call_stmt),
6567 retval);
5c944c6c 6568 }
cfef45c8
RG
6569 return retval;
6570 }
6571 return NULL_TREE;
6572 }
6573
6574 default:
6575 return NULL_TREE;
6576 }
6577}
6578
6579/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6580 Returns NULL_TREE if folding to a constant is not possible, otherwise
6581 returns a constant according to is_gimple_min_invariant. */
6582
6583tree
355fe088 6584gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6585{
6586 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6587 if (res && is_gimple_min_invariant (res))
6588 return res;
6589 return NULL_TREE;
6590}
6591
6592
6593/* The following set of functions are supposed to fold references using
6594 their constant initializers. */
6595
cfef45c8
RG
6596/* See if we can find constructor defining value of BASE.
6597 When we know the consructor with constant offset (such as
6598 base is array[40] and we do know constructor of array), then
6599 BIT_OFFSET is adjusted accordingly.
6600
6601 As a special case, return error_mark_node when constructor
6602 is not explicitly available, but it is known to be zero
6603 such as 'static const int a;'. */
6604static tree
588db50c 6605get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6606 tree (*valueize)(tree))
6607{
588db50c 6608 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6609 bool reverse;
6610
cfef45c8
RG
6611 if (TREE_CODE (base) == MEM_REF)
6612 {
6a5aca53
ML
6613 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6614 if (!boff.to_shwi (bit_offset))
6615 return NULL_TREE;
cfef45c8
RG
6616
6617 if (valueize
6618 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6619 base = valueize (TREE_OPERAND (base, 0));
6620 if (!base || TREE_CODE (base) != ADDR_EXPR)
6621 return NULL_TREE;
6622 base = TREE_OPERAND (base, 0);
6623 }
13e88953
RB
6624 else if (valueize
6625 && TREE_CODE (base) == SSA_NAME)
6626 base = valueize (base);
cfef45c8
RG
6627
6628 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6629 DECL_INITIAL. If BASE is a nested reference into another
6630 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6631 the inner reference. */
6632 switch (TREE_CODE (base))
6633 {
6634 case VAR_DECL:
cfef45c8 6635 case CONST_DECL:
6a6dac52
JH
6636 {
6637 tree init = ctor_for_folding (base);
6638
688010ba 6639 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6640 NULL means unknown, while error_mark_node is 0. */
6641 if (init == error_mark_node)
6642 return NULL_TREE;
6643 if (!init)
6644 return error_mark_node;
6645 return init;
6646 }
cfef45c8 6647
13e88953
RB
6648 case VIEW_CONVERT_EXPR:
6649 return get_base_constructor (TREE_OPERAND (base, 0),
6650 bit_offset, valueize);
6651
cfef45c8
RG
6652 case ARRAY_REF:
6653 case COMPONENT_REF:
ee45a32d
EB
6654 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6655 &reverse);
588db50c 6656 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6657 return NULL_TREE;
6658 *bit_offset += bit_offset2;
6659 return get_base_constructor (base, bit_offset, valueize);
6660
cfef45c8
RG
6661 case CONSTRUCTOR:
6662 return base;
6663
6664 default:
13e88953
RB
6665 if (CONSTANT_CLASS_P (base))
6666 return base;
6667
cfef45c8
RG
6668 return NULL_TREE;
6669 }
6670}
6671
35b4d3a6
MS
6672/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6673 to the memory at bit OFFSET. When non-null, TYPE is the expected
6674 type of the reference; otherwise the type of the referenced element
6675 is used instead. When SIZE is zero, attempt to fold a reference to
6676 the entire element which OFFSET refers to. Increment *SUBOFF by
6677 the bit offset of the accessed element. */
cfef45c8
RG
6678
6679static tree
6680fold_array_ctor_reference (tree type, tree ctor,
6681 unsigned HOST_WIDE_INT offset,
c44c2088 6682 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6683 tree from_decl,
6684 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6685{
807e902e
KZ
6686 offset_int low_bound;
6687 offset_int elt_size;
807e902e 6688 offset_int access_index;
6a636014 6689 tree domain_type = NULL_TREE;
cfef45c8
RG
6690 HOST_WIDE_INT inner_offset;
6691
6692 /* Compute low bound and elt size. */
eb8f1123
RG
6693 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6694 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6695 if (domain_type && TYPE_MIN_VALUE (domain_type))
6696 {
6697 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6698 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6699 return NULL_TREE;
807e902e 6700 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6701 }
6702 else
807e902e 6703 low_bound = 0;
cfef45c8 6704 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6705 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6706 return NULL_TREE;
807e902e 6707 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6708
35b4d3a6
MS
6709 /* When TYPE is non-null, verify that it specifies a constant-sized
6710 accessed not larger than size of array element. */
6711 if (type
6712 && (!TYPE_SIZE_UNIT (type)
6713 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6714 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6715 || elt_size == 0))
cfef45c8
RG
6716 return NULL_TREE;
6717
6718 /* Compute the array index we look for. */
807e902e
KZ
6719 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6720 elt_size);
27bcd47c 6721 access_index += low_bound;
cfef45c8
RG
6722
6723 /* And offset within the access. */
27bcd47c 6724 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6725
6726 /* See if the array field is large enough to span whole access. We do not
6727 care to fold accesses spanning multiple array indexes. */
27bcd47c 6728 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6729 return NULL_TREE;
6a636014 6730 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6731 {
6732 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6733 {
6734 /* For the final reference to the entire accessed element
6735 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6736 may be null) in favor of the type of the element, and set
6737 SIZE to the size of the accessed element. */
6738 inner_offset = 0;
6739 type = TREE_TYPE (val);
6740 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6741 }
6742
6743 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6744 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6745 suboff);
6746 }
cfef45c8 6747
35b4d3a6
MS
6748 /* Memory not explicitly mentioned in constructor is 0 (or
6749 the reference is out of range). */
6750 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6751}
6752
35b4d3a6
MS
6753/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6754 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6755 is the expected type of the reference; otherwise the type of
6756 the referenced member is used instead. When SIZE is zero,
6757 attempt to fold a reference to the entire member which OFFSET
6758 refers to; in this case. Increment *SUBOFF by the bit offset
6759 of the accessed member. */
cfef45c8
RG
6760
6761static tree
6762fold_nonarray_ctor_reference (tree type, tree ctor,
6763 unsigned HOST_WIDE_INT offset,
c44c2088 6764 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6765 tree from_decl,
6766 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6767{
6768 unsigned HOST_WIDE_INT cnt;
6769 tree cfield, cval;
6770
6771 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6772 cval)
6773 {
6774 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6775 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6776 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6777
6778 if (!field_size)
6779 {
6780 /* Determine the size of the flexible array member from
6781 the size of the initializer provided for it. */
6782 field_size = TYPE_SIZE (TREE_TYPE (cval));
6783 }
cfef45c8
RG
6784
6785 /* Variable sized objects in static constructors makes no sense,
6786 but field_size can be NULL for flexible array members. */
6787 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6788 && TREE_CODE (byte_offset) == INTEGER_CST
6789 && (field_size != NULL_TREE
6790 ? TREE_CODE (field_size) == INTEGER_CST
6791 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6792
6793 /* Compute bit offset of the field. */
35b4d3a6
MS
6794 offset_int bitoffset
6795 = (wi::to_offset (field_offset)
6796 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6797 /* Compute bit offset where the field ends. */
35b4d3a6 6798 offset_int bitoffset_end;
cfef45c8 6799 if (field_size != NULL_TREE)
807e902e 6800 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6801 else
807e902e 6802 bitoffset_end = 0;
cfef45c8 6803
35b4d3a6
MS
6804 /* Compute the bit offset of the end of the desired access.
6805 As a special case, if the size of the desired access is
6806 zero, assume the access is to the entire field (and let
6807 the caller make any necessary adjustments by storing
6808 the actual bounds of the field in FIELDBOUNDS). */
6809 offset_int access_end = offset_int (offset);
6810 if (size)
6811 access_end += size;
6812 else
6813 access_end = bitoffset_end;
b8b2b009 6814
35b4d3a6
MS
6815 /* Is there any overlap between the desired access at
6816 [OFFSET, OFFSET+SIZE) and the offset of the field within
6817 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6818 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6819 && (field_size == NULL_TREE
807e902e 6820 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6821 {
35b4d3a6
MS
6822 *suboff += bitoffset.to_uhwi ();
6823
6824 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6825 {
6826 /* For the final reference to the entire accessed member
6827 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6828 be null) in favor of the type of the member, and set
6829 SIZE to the size of the accessed member. */
6830 offset = bitoffset.to_uhwi ();
6831 type = TREE_TYPE (cval);
6832 size = (bitoffset_end - bitoffset).to_uhwi ();
6833 }
6834
6835 /* We do have overlap. Now see if the field is large enough
6836 to cover the access. Give up for accesses that extend
6837 beyond the end of the object or that span multiple fields. */
807e902e 6838 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6839 return NULL_TREE;
032c80e9 6840 if (offset < bitoffset)
b8b2b009 6841 return NULL_TREE;
35b4d3a6
MS
6842
6843 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6844 return fold_ctor_reference (type, cval,
27bcd47c 6845 inner_offset.to_uhwi (), size,
35b4d3a6 6846 from_decl, suboff);
cfef45c8
RG
6847 }
6848 }
35b4d3a6
MS
6849 /* Memory not explicitly mentioned in constructor is 0. */
6850 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6851}
6852
35b4d3a6
MS
6853/* CTOR is value initializing memory. Fold a reference of TYPE and
6854 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6855 is zero, attempt to fold a reference to the entire subobject
6856 which OFFSET refers to. This is used when folding accesses to
6857 string members of aggregates. When non-null, set *SUBOFF to
6858 the bit offset of the accessed subobject. */
cfef45c8 6859
8403c2cf 6860tree
35b4d3a6
MS
6861fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6862 const poly_uint64 &poly_size, tree from_decl,
6863 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
6864{
6865 tree ret;
6866
6867 /* We found the field with exact match. */
35b4d3a6
MS
6868 if (type
6869 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6870 && known_eq (poly_offset, 0U))
9d60be38 6871 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6872
30acf282
RS
6873 /* The remaining optimizations need a constant size and offset. */
6874 unsigned HOST_WIDE_INT size, offset;
6875 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6876 return NULL_TREE;
6877
cfef45c8
RG
6878 /* We are at the end of walk, see if we can view convert the
6879 result. */
6880 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6881 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6882 && !compare_tree_int (TYPE_SIZE (type), size)
6883 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6884 {
9d60be38 6885 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6886 if (ret)
672d9f8e
RB
6887 {
6888 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6889 if (ret)
6890 STRIP_USELESS_TYPE_CONVERSION (ret);
6891 }
cfef45c8
RG
6892 return ret;
6893 }
b2505143
RB
6894 /* For constants and byte-aligned/sized reads try to go through
6895 native_encode/interpret. */
6896 if (CONSTANT_CLASS_P (ctor)
6897 && BITS_PER_UNIT == 8
6898 && offset % BITS_PER_UNIT == 0
6899 && size % BITS_PER_UNIT == 0
6900 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6901 {
6902 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6903 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6904 offset / BITS_PER_UNIT);
6905 if (len > 0)
6906 return native_interpret_expr (type, buf, len);
b2505143 6907 }
cfef45c8
RG
6908 if (TREE_CODE (ctor) == CONSTRUCTOR)
6909 {
35b4d3a6
MS
6910 unsigned HOST_WIDE_INT dummy = 0;
6911 if (!suboff)
6912 suboff = &dummy;
cfef45c8 6913
eb8f1123
RG
6914 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6915 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088 6916 return fold_array_ctor_reference (type, ctor, offset, size,
35b4d3a6
MS
6917 from_decl, suboff);
6918
6919 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6920 from_decl, suboff);
cfef45c8
RG
6921 }
6922
6923 return NULL_TREE;
6924}
6925
6926/* Return the tree representing the element referenced by T if T is an
6927 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6928 names using VALUEIZE. Return NULL_TREE otherwise. */
6929
6930tree
6931fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6932{
6933 tree ctor, idx, base;
588db50c 6934 poly_int64 offset, size, max_size;
cfef45c8 6935 tree tem;
ee45a32d 6936 bool reverse;
cfef45c8 6937
f8a7df45
RG
6938 if (TREE_THIS_VOLATILE (t))
6939 return NULL_TREE;
6940
3a65ee74 6941 if (DECL_P (t))
cfef45c8
RG
6942 return get_symbol_constant_value (t);
6943
6944 tem = fold_read_from_constant_string (t);
6945 if (tem)
6946 return tem;
6947
6948 switch (TREE_CODE (t))
6949 {
6950 case ARRAY_REF:
6951 case ARRAY_RANGE_REF:
6952 /* Constant indexes are handled well by get_base_constructor.
6953 Only special case variable offsets.
6954 FIXME: This code can't handle nested references with variable indexes
6955 (they will be handled only by iteration of ccp). Perhaps we can bring
6956 get_ref_base_and_extent here and make it use a valueize callback. */
6957 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6958 && valueize
6959 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6960 && poly_int_tree_p (idx))
cfef45c8
RG
6961 {
6962 tree low_bound, unit_size;
6963
6964 /* If the resulting bit-offset is constant, track it. */
6965 if ((low_bound = array_ref_low_bound (t),
588db50c 6966 poly_int_tree_p (low_bound))
cfef45c8 6967 && (unit_size = array_ref_element_size (t),
807e902e 6968 tree_fits_uhwi_p (unit_size)))
cfef45c8 6969 {
588db50c
RS
6970 poly_offset_int woffset
6971 = wi::sext (wi::to_poly_offset (idx)
6972 - wi::to_poly_offset (low_bound),
807e902e
KZ
6973 TYPE_PRECISION (TREE_TYPE (idx)));
6974
588db50c 6975 if (woffset.to_shwi (&offset))
807e902e 6976 {
807e902e
KZ
6977 /* TODO: This code seems wrong, multiply then check
6978 to see if it fits. */
6979 offset *= tree_to_uhwi (unit_size);
6980 offset *= BITS_PER_UNIT;
6981
6982 base = TREE_OPERAND (t, 0);
6983 ctor = get_base_constructor (base, &offset, valueize);
6984 /* Empty constructor. Always fold to 0. */
6985 if (ctor == error_mark_node)
6986 return build_zero_cst (TREE_TYPE (t));
6987 /* Out of bound array access. Value is undefined,
6988 but don't fold. */
588db50c 6989 if (maybe_lt (offset, 0))
807e902e 6990 return NULL_TREE;
67914693 6991 /* We cannot determine ctor. */
807e902e
KZ
6992 if (!ctor)
6993 return NULL_TREE;
6994 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6995 tree_to_uhwi (unit_size)
6996 * BITS_PER_UNIT,
6997 base);
6998 }
cfef45c8
RG
6999 }
7000 }
7001 /* Fallthru. */
7002
7003 case COMPONENT_REF:
7004 case BIT_FIELD_REF:
7005 case TARGET_MEM_REF:
7006 case MEM_REF:
ee45a32d 7007 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
7008 ctor = get_base_constructor (base, &offset, valueize);
7009
7010 /* Empty constructor. Always fold to 0. */
7011 if (ctor == error_mark_node)
7012 return build_zero_cst (TREE_TYPE (t));
7013 /* We do not know precise address. */
588db50c 7014 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 7015 return NULL_TREE;
67914693 7016 /* We cannot determine ctor. */
cfef45c8
RG
7017 if (!ctor)
7018 return NULL_TREE;
7019
7020 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 7021 if (maybe_lt (offset, 0))
cfef45c8
RG
7022 return NULL_TREE;
7023
c44c2088
JH
7024 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7025 base);
cfef45c8
RG
7026
7027 case REALPART_EXPR:
7028 case IMAGPART_EXPR:
7029 {
7030 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7031 if (c && TREE_CODE (c) == COMPLEX_CST)
7032 return fold_build1_loc (EXPR_LOCATION (t),
7033 TREE_CODE (t), TREE_TYPE (t), c);
7034 break;
7035 }
7036
7037 default:
7038 break;
7039 }
7040
7041 return NULL_TREE;
7042}
7043
7044tree
7045fold_const_aggregate_ref (tree t)
7046{
7047 return fold_const_aggregate_ref_1 (t, NULL);
7048}
06bc3ec7 7049
85942f45 7050/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
7051 at OFFSET.
7052 Set CAN_REFER if non-NULL to false if method
7053 is not referable or if the virtual table is ill-formed (such as rewriten
7054 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
7055
7056tree
85942f45
JH
7057gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7058 tree v,
ec77d61f
JH
7059 unsigned HOST_WIDE_INT offset,
7060 bool *can_refer)
81fa35bd 7061{
85942f45
JH
7062 tree vtable = v, init, fn;
7063 unsigned HOST_WIDE_INT size;
8c311b50
JH
7064 unsigned HOST_WIDE_INT elt_size, access_index;
7065 tree domain_type;
81fa35bd 7066
ec77d61f
JH
7067 if (can_refer)
7068 *can_refer = true;
7069
9de2f554 7070 /* First of all double check we have virtual table. */
8813a647 7071 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 7072 {
ec77d61f
JH
7073 /* Pass down that we lost track of the target. */
7074 if (can_refer)
7075 *can_refer = false;
7076 return NULL_TREE;
7077 }
9de2f554 7078
2aa3da06
JH
7079 init = ctor_for_folding (v);
7080
9de2f554 7081 /* The virtual tables should always be born with constructors
2aa3da06
JH
7082 and we always should assume that they are avaialble for
7083 folding. At the moment we do not stream them in all cases,
7084 but it should never happen that ctor seem unreachable. */
7085 gcc_assert (init);
7086 if (init == error_mark_node)
7087 {
ec77d61f
JH
7088 /* Pass down that we lost track of the target. */
7089 if (can_refer)
7090 *can_refer = false;
2aa3da06
JH
7091 return NULL_TREE;
7092 }
81fa35bd 7093 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 7094 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 7095 offset *= BITS_PER_UNIT;
81fa35bd 7096 offset += token * size;
9de2f554 7097
8c311b50
JH
7098 /* Lookup the value in the constructor that is assumed to be array.
7099 This is equivalent to
7100 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7101 offset, size, NULL);
7102 but in a constant time. We expect that frontend produced a simple
7103 array without indexed initializers. */
7104
7105 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7106 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7107 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7108 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7109
7110 access_index = offset / BITS_PER_UNIT / elt_size;
7111 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7112
bf8d8309
MP
7113 /* The C++ FE can now produce indexed fields, and we check if the indexes
7114 match. */
8c311b50
JH
7115 if (access_index < CONSTRUCTOR_NELTS (init))
7116 {
7117 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
7118 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7119 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
7120 STRIP_NOPS (fn);
7121 }
7122 else
7123 fn = NULL;
9de2f554
JH
7124
7125 /* For type inconsistent program we may end up looking up virtual method
7126 in virtual table that does not contain TOKEN entries. We may overrun
7127 the virtual table and pick up a constant or RTTI info pointer.
7128 In any case the call is undefined. */
7129 if (!fn
7130 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7131 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7132 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7133 else
7134 {
7135 fn = TREE_OPERAND (fn, 0);
7136
7137 /* When cgraph node is missing and function is not public, we cannot
7138 devirtualize. This can happen in WHOPR when the actual method
7139 ends up in other partition, because we found devirtualization
7140 possibility too late. */
7141 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
7142 {
7143 if (can_refer)
7144 {
7145 *can_refer = false;
7146 return fn;
7147 }
7148 return NULL_TREE;
7149 }
9de2f554 7150 }
81fa35bd 7151
7501ca28
RG
7152 /* Make sure we create a cgraph node for functions we'll reference.
7153 They can be non-existent if the reference comes from an entry
7154 of an external vtable for example. */
d52f5295 7155 cgraph_node::get_create (fn);
7501ca28 7156
81fa35bd
MJ
7157 return fn;
7158}
7159
85942f45
JH
7160/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7161 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7162 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
7163 OBJ_TYPE_REF_OBJECT(REF).
7164 Set CAN_REFER if non-NULL to false if method
7165 is not referable or if the virtual table is ill-formed (such as rewriten
7166 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
7167
7168tree
ec77d61f
JH
7169gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7170 bool *can_refer)
85942f45
JH
7171{
7172 unsigned HOST_WIDE_INT offset;
7173 tree v;
7174
7175 v = BINFO_VTABLE (known_binfo);
7176 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7177 if (!v)
7178 return NULL_TREE;
7179
7180 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
7181 {
7182 if (can_refer)
7183 *can_refer = false;
7184 return NULL_TREE;
7185 }
7186 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
7187}
7188
737f500a
RB
7189/* Given a pointer value T, return a simplified version of an
7190 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
7191 possible. Note that the resulting type may be different from
7192 the type pointed to in the sense that it is still compatible
7193 from the langhooks point of view. */
7194
7195tree
7196gimple_fold_indirect_ref (tree t)
7197{
7198 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7199 tree sub = t;
7200 tree subtype;
7201
7202 STRIP_NOPS (sub);
7203 subtype = TREE_TYPE (sub);
737f500a
RB
7204 if (!POINTER_TYPE_P (subtype)
7205 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7206 return NULL_TREE;
7207
7208 if (TREE_CODE (sub) == ADDR_EXPR)
7209 {
7210 tree op = TREE_OPERAND (sub, 0);
7211 tree optype = TREE_TYPE (op);
7212 /* *&p => p */
7213 if (useless_type_conversion_p (type, optype))
7214 return op;
7215
7216 /* *(foo *)&fooarray => fooarray[0] */
7217 if (TREE_CODE (optype) == ARRAY_TYPE
7218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7219 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7220 {
7221 tree type_domain = TYPE_DOMAIN (optype);
7222 tree min_val = size_zero_node;
7223 if (type_domain && TYPE_MIN_VALUE (type_domain))
7224 min_val = TYPE_MIN_VALUE (type_domain);
7225 if (TREE_CODE (min_val) == INTEGER_CST)
7226 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7227 }
7228 /* *(foo *)&complexfoo => __real__ complexfoo */
7229 else if (TREE_CODE (optype) == COMPLEX_TYPE
7230 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7231 return fold_build1 (REALPART_EXPR, type, op);
7232 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7233 else if (TREE_CODE (optype) == VECTOR_TYPE
7234 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7235 {
7236 tree part_width = TYPE_SIZE (type);
7237 tree index = bitsize_int (0);
7238 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7239 }
7240 }
7241
7242 /* *(p + CST) -> ... */
7243 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7244 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7245 {
7246 tree addr = TREE_OPERAND (sub, 0);
7247 tree off = TREE_OPERAND (sub, 1);
7248 tree addrtype;
7249
7250 STRIP_NOPS (addr);
7251 addrtype = TREE_TYPE (addr);
7252
7253 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7254 if (TREE_CODE (addr) == ADDR_EXPR
7255 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7256 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7257 && tree_fits_uhwi_p (off))
b184c8f1 7258 {
ae7e9ddd 7259 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7260 tree part_width = TYPE_SIZE (type);
7261 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7262 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7263 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7264 tree index = bitsize_int (indexi);
928686b1
RS
7265 if (known_lt (offset / part_widthi,
7266 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7267 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7268 part_width, index);
7269 }
7270
7271 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7272 if (TREE_CODE (addr) == ADDR_EXPR
7273 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7274 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7275 {
7276 tree size = TYPE_SIZE_UNIT (type);
7277 if (tree_int_cst_equal (size, off))
7278 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7279 }
7280
7281 /* *(p + CST) -> MEM_REF <p, CST>. */
7282 if (TREE_CODE (addr) != ADDR_EXPR
7283 || DECL_P (TREE_OPERAND (addr, 0)))
7284 return fold_build2 (MEM_REF, type,
7285 addr,
8e6cdc90 7286 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7287 }
7288
7289 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7290 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7291 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7292 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7293 {
7294 tree type_domain;
7295 tree min_val = size_zero_node;
7296 tree osub = sub;
7297 sub = gimple_fold_indirect_ref (sub);
7298 if (! sub)
7299 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7300 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7301 if (type_domain && TYPE_MIN_VALUE (type_domain))
7302 min_val = TYPE_MIN_VALUE (type_domain);
7303 if (TREE_CODE (min_val) == INTEGER_CST)
7304 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7305 }
7306
7307 return NULL_TREE;
7308}
19e51b40
JJ
7309
7310/* Return true if CODE is an operation that when operating on signed
7311 integer types involves undefined behavior on overflow and the
7312 operation can be expressed with unsigned arithmetic. */
7313
7314bool
7315arith_code_with_undefined_signed_overflow (tree_code code)
7316{
7317 switch (code)
7318 {
7319 case PLUS_EXPR:
7320 case MINUS_EXPR:
7321 case MULT_EXPR:
7322 case NEGATE_EXPR:
7323 case POINTER_PLUS_EXPR:
7324 return true;
7325 default:
7326 return false;
7327 }
7328}
7329
7330/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7331 operation that can be transformed to unsigned arithmetic by converting
7332 its operand, carrying out the operation in the corresponding unsigned
7333 type and converting the result back to the original type.
7334
7335 Returns a sequence of statements that replace STMT and also contain
7336 a modified form of STMT itself. */
7337
7338gimple_seq
355fe088 7339rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7340{
7341 if (dump_file && (dump_flags & TDF_DETAILS))
7342 {
7343 fprintf (dump_file, "rewriting stmt with undefined signed "
7344 "overflow ");
7345 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7346 }
7347
7348 tree lhs = gimple_assign_lhs (stmt);
7349 tree type = unsigned_type_for (TREE_TYPE (lhs));
7350 gimple_seq stmts = NULL;
7351 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7352 {
74e3c262
RB
7353 tree op = gimple_op (stmt, i);
7354 op = gimple_convert (&stmts, type, op);
7355 gimple_set_op (stmt, i, op);
19e51b40
JJ
7356 }
7357 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7358 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7359 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7360 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7361 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7362 gimple_seq_add_stmt (&stmts, cvt);
7363
7364 return stmts;
7365}
d4f5cd5e 7366
3d2cf79f 7367
c26de36d
RB
7368/* The valueization hook we use for the gimple_build API simplification.
7369 This makes us match fold_buildN behavior by only combining with
7370 statements in the sequence(s) we are currently building. */
7371
7372static tree
7373gimple_build_valueize (tree op)
7374{
7375 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7376 return op;
7377 return NULL_TREE;
7378}
7379
3d2cf79f 7380/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7381 simplifying it first if possible. Returns the built
3d2cf79f
RB
7382 expression value and appends statements possibly defining it
7383 to SEQ. */
7384
7385tree
7386gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7387 enum tree_code code, tree type, tree op0)
3d2cf79f 7388{
c26de36d 7389 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7390 if (!res)
7391 {
a15ebbcd 7392 res = create_tmp_reg_or_ssa_name (type);
355fe088 7393 gimple *stmt;
3d2cf79f
RB
7394 if (code == REALPART_EXPR
7395 || code == IMAGPART_EXPR
7396 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7397 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7398 else
0d0e4a03 7399 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7400 gimple_set_location (stmt, loc);
7401 gimple_seq_add_stmt_without_update (seq, stmt);
7402 }
7403 return res;
7404}
7405
7406/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7407 simplifying it first if possible. Returns the built
3d2cf79f
RB
7408 expression value and appends statements possibly defining it
7409 to SEQ. */
7410
7411tree
7412gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7413 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7414{
c26de36d 7415 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7416 if (!res)
7417 {
a15ebbcd 7418 res = create_tmp_reg_or_ssa_name (type);
355fe088 7419 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7420 gimple_set_location (stmt, loc);
7421 gimple_seq_add_stmt_without_update (seq, stmt);
7422 }
7423 return res;
7424}
7425
7426/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7427 simplifying it first if possible. Returns the built
3d2cf79f
RB
7428 expression value and appends statements possibly defining it
7429 to SEQ. */
7430
7431tree
7432gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7433 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7434{
7435 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7436 seq, gimple_build_valueize);
3d2cf79f
RB
7437 if (!res)
7438 {
a15ebbcd 7439 res = create_tmp_reg_or_ssa_name (type);
355fe088 7440 gimple *stmt;
3d2cf79f 7441 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7442 stmt = gimple_build_assign (res, code,
7443 build3 (code, type, op0, op1, op2));
3d2cf79f 7444 else
0d0e4a03 7445 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7446 gimple_set_location (stmt, loc);
7447 gimple_seq_add_stmt_without_update (seq, stmt);
7448 }
7449 return res;
7450}
7451
7452/* Build the call FN (ARG0) with a result of type TYPE
7453 (or no result if TYPE is void) with location LOC,
c26de36d 7454 simplifying it first if possible. Returns the built
3d2cf79f
RB
7455 expression value (or NULL_TREE if TYPE is void) and appends
7456 statements possibly defining it to SEQ. */
7457
7458tree
eb69361d
RS
7459gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7460 tree type, tree arg0)
3d2cf79f 7461{
c26de36d 7462 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7463 if (!res)
7464 {
eb69361d
RS
7465 gcall *stmt;
7466 if (internal_fn_p (fn))
7467 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7468 else
7469 {
7470 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7471 stmt = gimple_build_call (decl, 1, arg0);
7472 }
3d2cf79f
RB
7473 if (!VOID_TYPE_P (type))
7474 {
a15ebbcd 7475 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7476 gimple_call_set_lhs (stmt, res);
7477 }
7478 gimple_set_location (stmt, loc);
7479 gimple_seq_add_stmt_without_update (seq, stmt);
7480 }
7481 return res;
7482}
7483
7484/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7485 (or no result if TYPE is void) with location LOC,
c26de36d 7486 simplifying it first if possible. Returns the built
3d2cf79f
RB
7487 expression value (or NULL_TREE if TYPE is void) and appends
7488 statements possibly defining it to SEQ. */
7489
7490tree
eb69361d
RS
7491gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7492 tree type, tree arg0, tree arg1)
3d2cf79f 7493{
c26de36d 7494 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7495 if (!res)
7496 {
eb69361d
RS
7497 gcall *stmt;
7498 if (internal_fn_p (fn))
7499 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7500 else
7501 {
7502 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7503 stmt = gimple_build_call (decl, 2, arg0, arg1);
7504 }
3d2cf79f
RB
7505 if (!VOID_TYPE_P (type))
7506 {
a15ebbcd 7507 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7508 gimple_call_set_lhs (stmt, res);
7509 }
7510 gimple_set_location (stmt, loc);
7511 gimple_seq_add_stmt_without_update (seq, stmt);
7512 }
7513 return res;
7514}
7515
7516/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7517 (or no result if TYPE is void) with location LOC,
c26de36d 7518 simplifying it first if possible. Returns the built
3d2cf79f
RB
7519 expression value (or NULL_TREE if TYPE is void) and appends
7520 statements possibly defining it to SEQ. */
7521
7522tree
eb69361d
RS
7523gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7524 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7525{
c26de36d
RB
7526 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7527 seq, gimple_build_valueize);
3d2cf79f
RB
7528 if (!res)
7529 {
eb69361d
RS
7530 gcall *stmt;
7531 if (internal_fn_p (fn))
7532 stmt = gimple_build_call_internal (as_internal_fn (fn),
7533 3, arg0, arg1, arg2);
7534 else
7535 {
7536 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7537 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7538 }
3d2cf79f
RB
7539 if (!VOID_TYPE_P (type))
7540 {
a15ebbcd 7541 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7542 gimple_call_set_lhs (stmt, res);
7543 }
7544 gimple_set_location (stmt, loc);
7545 gimple_seq_add_stmt_without_update (seq, stmt);
7546 }
7547 return res;
7548}
7549
7550/* Build the conversion (TYPE) OP with a result of type TYPE
7551 with location LOC if such conversion is neccesary in GIMPLE,
7552 simplifying it first.
7553 Returns the built expression value and appends
7554 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7555
7556tree
7557gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7558{
7559 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7560 return op;
3d2cf79f 7561 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7562}
68e57f04 7563
74e3c262
RB
7564/* Build the conversion (ptrofftype) OP with a result of a type
7565 compatible with ptrofftype with location LOC if such conversion
7566 is neccesary in GIMPLE, simplifying it first.
7567 Returns the built expression value and appends
7568 statements possibly defining it to SEQ. */
7569
7570tree
7571gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7572{
7573 if (ptrofftype_p (TREE_TYPE (op)))
7574 return op;
7575 return gimple_convert (seq, loc, sizetype, op);
7576}
7577
e7c45b66
RS
7578/* Build a vector of type TYPE in which each element has the value OP.
7579 Return a gimple value for the result, appending any new statements
7580 to SEQ. */
7581
7582tree
7583gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7584 tree op)
7585{
928686b1
RS
7586 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7587 && !CONSTANT_CLASS_P (op))
7588 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7589
e7c45b66
RS
7590 tree res, vec = build_vector_from_val (type, op);
7591 if (is_gimple_val (vec))
7592 return vec;
7593 if (gimple_in_ssa_p (cfun))
7594 res = make_ssa_name (type);
7595 else
7596 res = create_tmp_reg (type);
7597 gimple *stmt = gimple_build_assign (res, vec);
7598 gimple_set_location (stmt, loc);
7599 gimple_seq_add_stmt_without_update (seq, stmt);
7600 return res;
7601}
7602
abe73c3d
RS
7603/* Build a vector from BUILDER, handling the case in which some elements
7604 are non-constant. Return a gimple value for the result, appending any
7605 new instructions to SEQ.
7606
7607 BUILDER must not have a stepped encoding on entry. This is because
7608 the function is not geared up to handle the arithmetic that would
7609 be needed in the variable case, and any code building a vector that
7610 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7611
7612tree
abe73c3d
RS
7613gimple_build_vector (gimple_seq *seq, location_t loc,
7614 tree_vector_builder *builder)
e7c45b66 7615{
abe73c3d
RS
7616 gcc_assert (builder->nelts_per_pattern () <= 2);
7617 unsigned int encoded_nelts = builder->encoded_nelts ();
7618 for (unsigned int i = 0; i < encoded_nelts; ++i)
7619 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7620 {
abe73c3d 7621 tree type = builder->type ();
928686b1 7622 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7623 vec<constructor_elt, va_gc> *v;
7624 vec_alloc (v, nelts);
7625 for (i = 0; i < nelts; ++i)
abe73c3d 7626 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7627
7628 tree res;
7629 if (gimple_in_ssa_p (cfun))
7630 res = make_ssa_name (type);
7631 else
7632 res = create_tmp_reg (type);
7633 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7634 gimple_set_location (stmt, loc);
7635 gimple_seq_add_stmt_without_update (seq, stmt);
7636 return res;
7637 }
abe73c3d 7638 return builder->build ();
e7c45b66
RS
7639}
7640
68e57f04
RS
7641/* Return true if the result of assignment STMT is known to be non-negative.
7642 If the return value is based on the assumption that signed overflow is
7643 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7644 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7645
7646static bool
7647gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7648 int depth)
7649{
7650 enum tree_code code = gimple_assign_rhs_code (stmt);
7651 switch (get_gimple_rhs_class (code))
7652 {
7653 case GIMPLE_UNARY_RHS:
7654 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7655 gimple_expr_type (stmt),
7656 gimple_assign_rhs1 (stmt),
7657 strict_overflow_p, depth);
7658 case GIMPLE_BINARY_RHS:
7659 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7660 gimple_expr_type (stmt),
7661 gimple_assign_rhs1 (stmt),
7662 gimple_assign_rhs2 (stmt),
7663 strict_overflow_p, depth);
7664 case GIMPLE_TERNARY_RHS:
7665 return false;
7666 case GIMPLE_SINGLE_RHS:
7667 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7668 strict_overflow_p, depth);
7669 case GIMPLE_INVALID_RHS:
7670 break;
7671 }
7672 gcc_unreachable ();
7673}
7674
7675/* Return true if return value of call STMT is known to be non-negative.
7676 If the return value is based on the assumption that signed overflow is
7677 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7678 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7679
7680static bool
7681gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7682 int depth)
7683{
7684 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7685 gimple_call_arg (stmt, 0) : NULL_TREE;
7686 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7687 gimple_call_arg (stmt, 1) : NULL_TREE;
7688
7689 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7690 gimple_call_combined_fn (stmt),
68e57f04
RS
7691 arg0,
7692 arg1,
7693 strict_overflow_p, depth);
7694}
7695
4534c203
RB
7696/* Return true if return value of call STMT is known to be non-negative.
7697 If the return value is based on the assumption that signed overflow is
7698 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7699 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7700
7701static bool
7702gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7703 int depth)
7704{
7705 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7706 {
7707 tree arg = gimple_phi_arg_def (stmt, i);
7708 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7709 return false;
7710 }
7711 return true;
7712}
7713
68e57f04
RS
7714/* Return true if STMT is known to compute a non-negative value.
7715 If the return value is based on the assumption that signed overflow is
7716 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7717 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7718
7719bool
7720gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7721 int depth)
7722{
7723 switch (gimple_code (stmt))
7724 {
7725 case GIMPLE_ASSIGN:
7726 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7727 depth);
7728 case GIMPLE_CALL:
7729 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7730 depth);
4534c203
RB
7731 case GIMPLE_PHI:
7732 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7733 depth);
68e57f04
RS
7734 default:
7735 return false;
7736 }
7737}
67dbe582
RS
7738
7739/* Return true if the floating-point value computed by assignment STMT
7740 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7741 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7742
7743 DEPTH is the current nesting depth of the query. */
7744
7745static bool
7746gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7747{
7748 enum tree_code code = gimple_assign_rhs_code (stmt);
7749 switch (get_gimple_rhs_class (code))
7750 {
7751 case GIMPLE_UNARY_RHS:
7752 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7753 gimple_assign_rhs1 (stmt), depth);
7754 case GIMPLE_BINARY_RHS:
7755 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7756 gimple_assign_rhs1 (stmt),
7757 gimple_assign_rhs2 (stmt), depth);
7758 case GIMPLE_TERNARY_RHS:
7759 return false;
7760 case GIMPLE_SINGLE_RHS:
7761 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7762 case GIMPLE_INVALID_RHS:
7763 break;
7764 }
7765 gcc_unreachable ();
7766}
7767
7768/* Return true if the floating-point value computed by call STMT is known
7769 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7770 considered integer values. Return false for signaling NaN.
67dbe582
RS
7771
7772 DEPTH is the current nesting depth of the query. */
7773
7774static bool
7775gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7776{
7777 tree arg0 = (gimple_call_num_args (stmt) > 0
7778 ? gimple_call_arg (stmt, 0)
7779 : NULL_TREE);
7780 tree arg1 = (gimple_call_num_args (stmt) > 1
7781 ? gimple_call_arg (stmt, 1)
7782 : NULL_TREE);
1d9da71f 7783 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7784 arg0, arg1, depth);
7785}
7786
7787/* Return true if the floating-point result of phi STMT is known to have
7788 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7789 integer values. Return false for signaling NaN.
67dbe582
RS
7790
7791 DEPTH is the current nesting depth of the query. */
7792
7793static bool
7794gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7795{
7796 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7797 {
7798 tree arg = gimple_phi_arg_def (stmt, i);
7799 if (!integer_valued_real_single_p (arg, depth + 1))
7800 return false;
7801 }
7802 return true;
7803}
7804
7805/* Return true if the floating-point value computed by STMT is known
7806 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7807 considered integer values. Return false for signaling NaN.
67dbe582
RS
7808
7809 DEPTH is the current nesting depth of the query. */
7810
7811bool
7812gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7813{
7814 switch (gimple_code (stmt))
7815 {
7816 case GIMPLE_ASSIGN:
7817 return gimple_assign_integer_valued_real_p (stmt, depth);
7818 case GIMPLE_CALL:
7819 return gimple_call_integer_valued_real_p (stmt, depth);
7820 case GIMPLE_PHI:
7821 return gimple_phi_integer_valued_real_p (stmt, depth);
7822 default:
7823 return false;
7824 }
7825}