]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
re PR c++/86875 (ICE in tsubst_copy, at cp/pt.c:15478)
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
a5544970 2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
cbdd87d4 68
598f7235
MS
69enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Temporary until the rest of Martin's strlen range work is integrated. */
81 SRK_LENRANGE_2,
82 /* Determine the integer value of the argument (not string length). */
83 SRK_INT_VALUE
84};
85
730832cd
MS
86static bool get_range_strlen (tree, bitmap *, strlen_range_kind,
87 c_strlen_data *, bool *, unsigned);
fb471a13 88
b3b9f3d0 89/* Return true when DECL can be referenced from current unit.
c44c2088
JH
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
92 reasons:
1389294c 93
1389294c
JH
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
99 set.
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
104 declaring the body.
105 3) COMDAT functions referred by external vtables that
3e89949e 106 we devirtualize only during final compilation stage.
b3b9f3d0
JH
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
109 directly. */
110
1389294c 111static bool
c44c2088 112can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 113{
2c8326a5 114 varpool_node *vnode;
1389294c 115 struct cgraph_node *node;
5e20cdc9 116 symtab_node *snode;
c44c2088 117
00de328a 118 if (DECL_ABSTRACT_P (decl))
1632a686
JH
119 return false;
120
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 123 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
124 return true;
125
126 /* Static objects can be referred only if they was not optimized out yet. */
127 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
128 {
3aaf0529
JH
129 /* Before we start optimizing unreachable code we can be sure all
130 static objects are defined. */
3dafb85c 131 if (symtab->function_flags_ready)
3aaf0529 132 return true;
d52f5295 133 snode = symtab_node::get (decl);
3aaf0529 134 if (!snode || !snode->definition)
1632a686 135 return false;
7de90a6c 136 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
137 return !node || !node->global.inlined_to;
138 }
139
6da8be89 140 /* We will later output the initializer, so we can refer to it.
c44c2088 141 So we are concerned only when DECL comes from initializer of
3aaf0529 142 external var or var that has been optimized out. */
c44c2088 143 if (!from_decl
8813a647 144 || !VAR_P (from_decl)
3aaf0529 145 || (!DECL_EXTERNAL (from_decl)
9041d2e6 146 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 147 && vnode->definition)
6da8be89 148 || (flag_ltrans
9041d2e6 149 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 150 && vnode->in_other_partition))
c44c2088 151 return true;
c44c2088
JH
152 /* We are folding reference from external vtable. The vtable may reffer
153 to a symbol keyed to other compilation unit. The other compilation
154 unit may be in separate DSO and the symbol may be hidden. */
155 if (DECL_VISIBILITY_SPECIFIED (decl)
156 && DECL_EXTERNAL (decl)
a33a931b 157 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 158 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 159 return false;
b3b9f3d0
JH
160 /* When function is public, we always can introduce new reference.
161 Exception are the COMDAT functions where introducing a direct
162 reference imply need to include function body in the curren tunit. */
163 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
164 return true;
3aaf0529
JH
165 /* We have COMDAT. We are going to check if we still have definition
166 or if the definition is going to be output in other partition.
167 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
168
169 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 170 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
171 output elsewhere when corresponding vtable is output.
172 This is however not possible - ABI specify that COMDATs are output in
173 units where they are used and when the other unit was compiled with LTO
174 it is possible that vtable was kept public while the function itself
175 was privatized. */
3dafb85c 176 if (!symtab->function_flags_ready)
b3b9f3d0 177 return true;
c44c2088 178
d52f5295 179 snode = symtab_node::get (decl);
3aaf0529
JH
180 if (!snode
181 || ((!snode->definition || DECL_EXTERNAL (decl))
182 && (!snode->in_other_partition
183 || (!snode->forced_by_abi && !snode->force_output))))
184 return false;
185 node = dyn_cast <cgraph_node *> (snode);
186 return !node || !node->global.inlined_to;
1389294c
JH
187}
188
a15ebbcd
ML
189/* Create a temporary for TYPE for a statement STMT. If the current function
190 is in SSA form, a SSA name is created. Otherwise a temporary register
191 is made. */
192
edc19e03
WS
193tree
194create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
195{
196 if (gimple_in_ssa_p (cfun))
197 return make_ssa_name (type, stmt);
198 else
199 return create_tmp_reg (type);
200}
201
0038d4e0 202/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
203 acceptable form for is_gimple_min_invariant.
204 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
205
206tree
c44c2088 207canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 208{
50619002
EB
209 tree orig_cval = cval;
210 STRIP_NOPS (cval);
315f5f1b
RG
211 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
212 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 213 {
315f5f1b
RG
214 tree ptr = TREE_OPERAND (cval, 0);
215 if (is_gimple_min_invariant (ptr))
216 cval = build1_loc (EXPR_LOCATION (cval),
217 ADDR_EXPR, TREE_TYPE (ptr),
218 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
219 ptr,
220 fold_convert (ptr_type_node,
221 TREE_OPERAND (cval, 1))));
17f39a39
JH
222 }
223 if (TREE_CODE (cval) == ADDR_EXPR)
224 {
5a27a197
RG
225 tree base = NULL_TREE;
226 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
227 {
228 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
229 if (base)
230 TREE_OPERAND (cval, 0) = base;
231 }
5a27a197
RG
232 else
233 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
234 if (!base)
235 return NULL_TREE;
b3b9f3d0 236
8813a647 237 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 238 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 239 return NULL_TREE;
13f92e8d
JJ
240 if (TREE_TYPE (base) == error_mark_node)
241 return NULL_TREE;
8813a647 242 if (VAR_P (base))
46eb666a 243 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
244 else if (TREE_CODE (base) == FUNCTION_DECL)
245 {
246 /* Make sure we create a cgraph node for functions we'll reference.
247 They can be non-existent if the reference comes from an entry
248 of an external vtable for example. */
d52f5295 249 cgraph_node::get_create (base);
7501ca28 250 }
0038d4e0 251 /* Fixup types in global initializers. */
73aef89e
RG
252 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
253 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
254
255 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
256 cval = fold_convert (TREE_TYPE (orig_cval), cval);
257 return cval;
17f39a39 258 }
846abd0d
RB
259 if (TREE_OVERFLOW_P (cval))
260 return drop_tree_overflow (cval);
50619002 261 return orig_cval;
17f39a39 262}
cbdd87d4
RG
263
264/* If SYM is a constant variable with known value, return the value.
265 NULL_TREE is returned otherwise. */
266
267tree
268get_symbol_constant_value (tree sym)
269{
6a6dac52
JH
270 tree val = ctor_for_folding (sym);
271 if (val != error_mark_node)
cbdd87d4 272 {
cbdd87d4
RG
273 if (val)
274 {
9d60be38 275 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 276 if (val && is_gimple_min_invariant (val))
17f39a39 277 return val;
1389294c
JH
278 else
279 return NULL_TREE;
cbdd87d4
RG
280 }
281 /* Variables declared 'const' without an initializer
282 have zero as the initializer if they may not be
283 overridden at link or run time. */
284 if (!val
b8a8c472 285 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 286 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
287 }
288
289 return NULL_TREE;
290}
291
292
cbdd87d4
RG
293
294/* Subroutine of fold_stmt. We perform several simplifications of the
295 memory reference tree EXPR and make sure to re-gimplify them properly
296 after propagation of constant addresses. IS_LHS is true if the
297 reference is supposed to be an lvalue. */
298
299static tree
300maybe_fold_reference (tree expr, bool is_lhs)
301{
17f39a39 302 tree result;
cbdd87d4 303
f0eddb90
RG
304 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
305 || TREE_CODE (expr) == REALPART_EXPR
306 || TREE_CODE (expr) == IMAGPART_EXPR)
307 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
308 return fold_unary_loc (EXPR_LOCATION (expr),
309 TREE_CODE (expr),
310 TREE_TYPE (expr),
311 TREE_OPERAND (expr, 0));
312 else if (TREE_CODE (expr) == BIT_FIELD_REF
313 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
314 return fold_ternary_loc (EXPR_LOCATION (expr),
315 TREE_CODE (expr),
316 TREE_TYPE (expr),
317 TREE_OPERAND (expr, 0),
318 TREE_OPERAND (expr, 1),
319 TREE_OPERAND (expr, 2));
320
f0eddb90
RG
321 if (!is_lhs
322 && (result = fold_const_aggregate_ref (expr))
323 && is_gimple_min_invariant (result))
324 return result;
cbdd87d4 325
cbdd87d4
RG
326 return NULL_TREE;
327}
328
329
330/* Attempt to fold an assignment statement pointed-to by SI. Returns a
331 replacement rhs for the statement or NULL_TREE if no simplification
332 could be made. It is assumed that the operands have been previously
333 folded. */
334
335static tree
336fold_gimple_assign (gimple_stmt_iterator *si)
337{
355fe088 338 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
339 enum tree_code subcode = gimple_assign_rhs_code (stmt);
340 location_t loc = gimple_location (stmt);
341
342 tree result = NULL_TREE;
343
344 switch (get_gimple_rhs_class (subcode))
345 {
346 case GIMPLE_SINGLE_RHS:
347 {
348 tree rhs = gimple_assign_rhs1 (stmt);
349
8c00ba08
JW
350 if (TREE_CLOBBER_P (rhs))
351 return NULL_TREE;
352
4e71066d 353 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
354 return maybe_fold_reference (rhs, false);
355
bdf37f7a
JH
356 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
357 {
358 tree val = OBJ_TYPE_REF_EXPR (rhs);
359 if (is_gimple_min_invariant (val))
360 return val;
f8a39967 361 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
362 {
363 bool final;
364 vec <cgraph_node *>targets
f8a39967 365 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 366 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 367 {
2b5f0895
XDL
368 if (dump_enabled_p ())
369 {
4f5b9c80 370 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
371 "resolving virtual function address "
372 "reference to function %s\n",
373 targets.length () == 1
374 ? targets[0]->name ()
3ef276e4 375 : "NULL");
2b5f0895 376 }
3ef276e4
RB
377 if (targets.length () == 1)
378 {
379 val = fold_convert (TREE_TYPE (val),
380 build_fold_addr_expr_loc
381 (loc, targets[0]->decl));
382 STRIP_USELESS_TYPE_CONVERSION (val);
383 }
384 else
385 /* We can not use __builtin_unreachable here because it
386 can not have address taken. */
387 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
388 return val;
389 }
390 }
bdf37f7a 391 }
7524f419 392
cbdd87d4
RG
393 else if (TREE_CODE (rhs) == ADDR_EXPR)
394 {
70f34814
RG
395 tree ref = TREE_OPERAND (rhs, 0);
396 tree tem = maybe_fold_reference (ref, true);
397 if (tem
398 && TREE_CODE (tem) == MEM_REF
399 && integer_zerop (TREE_OPERAND (tem, 1)))
400 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
401 else if (tem)
cbdd87d4
RG
402 result = fold_convert (TREE_TYPE (rhs),
403 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
404 else if (TREE_CODE (ref) == MEM_REF
405 && integer_zerop (TREE_OPERAND (ref, 1)))
406 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
407
408 if (result)
409 {
410 /* Strip away useless type conversions. Both the
411 NON_LVALUE_EXPR that may have been added by fold, and
412 "useless" type conversions that might now be apparent
413 due to propagation. */
414 STRIP_USELESS_TYPE_CONVERSION (result);
415
416 if (result != rhs && valid_gimple_rhs_p (result))
417 return result;
418 }
cbdd87d4
RG
419 }
420
421 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 422 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
423 {
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
425 unsigned i;
426 tree val;
427
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 429 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
430 return NULL_TREE;
431
432 return build_vector_from_ctor (TREE_TYPE (rhs),
433 CONSTRUCTOR_ELTS (rhs));
434 }
435
436 else if (DECL_P (rhs))
9d60be38 437 return get_symbol_constant_value (rhs);
cbdd87d4
RG
438 }
439 break;
440
441 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
442 break;
443
444 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
445 break;
446
0354c0c7 447 case GIMPLE_TERNARY_RHS:
5c099d40
RB
448 result = fold_ternary_loc (loc, subcode,
449 TREE_TYPE (gimple_assign_lhs (stmt)),
450 gimple_assign_rhs1 (stmt),
451 gimple_assign_rhs2 (stmt),
452 gimple_assign_rhs3 (stmt));
0354c0c7
BS
453
454 if (result)
455 {
456 STRIP_USELESS_TYPE_CONVERSION (result);
457 if (valid_gimple_rhs_p (result))
458 return result;
0354c0c7
BS
459 }
460 break;
461
cbdd87d4
RG
462 case GIMPLE_INVALID_RHS:
463 gcc_unreachable ();
464 }
465
466 return NULL_TREE;
467}
468
fef5a0d9
RB
469
470/* Replace a statement at *SI_P with a sequence of statements in STMTS,
471 adjusting the replacement stmts location and virtual operands.
472 If the statement has a lhs the last stmt in the sequence is expected
473 to assign to that lhs. */
474
475static void
476gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
477{
355fe088 478 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
479
480 if (gimple_has_location (stmt))
481 annotate_all_with_location (stmts, gimple_location (stmt));
482
483 /* First iterate over the replacement statements backward, assigning
484 virtual operands to their defining statements. */
355fe088 485 gimple *laststore = NULL;
fef5a0d9
RB
486 for (gimple_stmt_iterator i = gsi_last (stmts);
487 !gsi_end_p (i); gsi_prev (&i))
488 {
355fe088 489 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
490 if ((gimple_assign_single_p (new_stmt)
491 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
492 || (is_gimple_call (new_stmt)
493 && (gimple_call_flags (new_stmt)
494 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
495 {
496 tree vdef;
497 if (!laststore)
498 vdef = gimple_vdef (stmt);
499 else
500 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
501 gimple_set_vdef (new_stmt, vdef);
502 if (vdef && TREE_CODE (vdef) == SSA_NAME)
503 SSA_NAME_DEF_STMT (vdef) = new_stmt;
504 laststore = new_stmt;
505 }
506 }
507
508 /* Second iterate over the statements forward, assigning virtual
509 operands to their uses. */
510 tree reaching_vuse = gimple_vuse (stmt);
511 for (gimple_stmt_iterator i = gsi_start (stmts);
512 !gsi_end_p (i); gsi_next (&i))
513 {
355fe088 514 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
515 /* If the new statement possibly has a VUSE, update it with exact SSA
516 name we know will reach this one. */
517 if (gimple_has_mem_ops (new_stmt))
518 gimple_set_vuse (new_stmt, reaching_vuse);
519 gimple_set_modified (new_stmt, true);
520 if (gimple_vdef (new_stmt))
521 reaching_vuse = gimple_vdef (new_stmt);
522 }
523
524 /* If the new sequence does not do a store release the virtual
525 definition of the original statement. */
526 if (reaching_vuse
527 && reaching_vuse == gimple_vuse (stmt))
528 {
529 tree vdef = gimple_vdef (stmt);
530 if (vdef
531 && TREE_CODE (vdef) == SSA_NAME)
532 {
533 unlink_stmt_vdef (stmt);
534 release_ssa_name (vdef);
535 }
536 }
537
538 /* Finally replace the original statement with the sequence. */
539 gsi_replace_with_seq (si_p, stmts, false);
540}
541
cbdd87d4
RG
542/* Convert EXPR into a GIMPLE value suitable for substitution on the
543 RHS of an assignment. Insert the necessary statements before
544 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
545 is replaced. If the call is expected to produces a result, then it
546 is replaced by an assignment of the new RHS to the result variable.
547 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
548 GIMPLE_NOP. A proper VDEF chain is retained by making the first
549 VUSE and the last VDEF of the whole sequence be the same as the replaced
550 statement and using new SSA names for stores in between. */
cbdd87d4
RG
551
552void
553gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
554{
555 tree lhs;
355fe088 556 gimple *stmt, *new_stmt;
cbdd87d4 557 gimple_stmt_iterator i;
355a7673 558 gimple_seq stmts = NULL;
cbdd87d4
RG
559
560 stmt = gsi_stmt (*si_p);
561
562 gcc_assert (is_gimple_call (stmt));
563
45852dcc 564 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 565
e256dfce 566 lhs = gimple_call_lhs (stmt);
cbdd87d4 567 if (lhs == NULL_TREE)
6e572326
RG
568 {
569 gimplify_and_add (expr, &stmts);
570 /* We can end up with folding a memcpy of an empty class assignment
571 which gets optimized away by C++ gimplification. */
572 if (gimple_seq_empty_p (stmts))
573 {
9fdc58de 574 pop_gimplify_context (NULL);
6e572326
RG
575 if (gimple_in_ssa_p (cfun))
576 {
577 unlink_stmt_vdef (stmt);
578 release_defs (stmt);
579 }
f6b4dc28 580 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
581 return;
582 }
583 }
cbdd87d4 584 else
e256dfce 585 {
381cdae4 586 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
587 new_stmt = gimple_build_assign (lhs, tmp);
588 i = gsi_last (stmts);
589 gsi_insert_after_without_update (&i, new_stmt,
590 GSI_CONTINUE_LINKING);
591 }
cbdd87d4
RG
592
593 pop_gimplify_context (NULL);
594
fef5a0d9
RB
595 gsi_replace_with_seq_vops (si_p, stmts);
596}
cbdd87d4 597
fef5a0d9
RB
598
599/* Replace the call at *GSI with the gimple value VAL. */
600
e3174bdf 601void
fef5a0d9
RB
602replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
603{
355fe088 604 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 605 tree lhs = gimple_call_lhs (stmt);
355fe088 606 gimple *repl;
fef5a0d9 607 if (lhs)
e256dfce 608 {
fef5a0d9
RB
609 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
610 val = fold_convert (TREE_TYPE (lhs), val);
611 repl = gimple_build_assign (lhs, val);
612 }
613 else
614 repl = gimple_build_nop ();
615 tree vdef = gimple_vdef (stmt);
616 if (vdef && TREE_CODE (vdef) == SSA_NAME)
617 {
618 unlink_stmt_vdef (stmt);
619 release_ssa_name (vdef);
620 }
f6b4dc28 621 gsi_replace (gsi, repl, false);
fef5a0d9
RB
622}
623
624/* Replace the call at *GSI with the new call REPL and fold that
625 again. */
626
627static void
355fe088 628replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 629{
355fe088 630 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
631 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
632 gimple_set_location (repl, gimple_location (stmt));
633 if (gimple_vdef (stmt)
634 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
635 {
636 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
637 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
638 }
00296d7f
JJ
639 if (gimple_vuse (stmt))
640 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 641 gsi_replace (gsi, repl, false);
fef5a0d9
RB
642 fold_stmt (gsi);
643}
644
645/* Return true if VAR is a VAR_DECL or a component thereof. */
646
647static bool
648var_decl_component_p (tree var)
649{
650 tree inner = var;
651 while (handled_component_p (inner))
652 inner = TREE_OPERAND (inner, 0);
47cac108
RB
653 return (DECL_P (inner)
654 || (TREE_CODE (inner) == MEM_REF
655 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
656}
657
c89af696
AH
658/* Return TRUE if the SIZE argument, representing the size of an
659 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
660
661static bool
662size_must_be_zero_p (tree size)
663{
664 if (integer_zerop (size))
665 return true;
666
3f27391f 667 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
668 return false;
669
6512c0f1
MS
670 tree type = TREE_TYPE (size);
671 int prec = TYPE_PRECISION (type);
672
6512c0f1
MS
673 /* Compute the value of SSIZE_MAX, the largest positive value that
674 can be stored in ssize_t, the signed counterpart of size_t. */
675 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
c89af696
AH
676 value_range valid_range (VR_RANGE,
677 build_int_cst (type, 0),
678 wide_int_to_tree (type, ssize_max));
679 value_range vr;
680 get_range_info (size, vr);
681 vr.intersect (&valid_range);
682 return vr.zero_p ();
6512c0f1
MS
683}
684
cc8bea0a
MS
685/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
686 diagnose (otherwise undefined) overlapping copies without preventing
687 folding. When folded, GCC guarantees that overlapping memcpy has
688 the same semantics as memmove. Call to the library memcpy need not
689 provide the same guarantee. Return false if no simplification can
690 be made. */
fef5a0d9
RB
691
692static bool
693gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
694 tree dest, tree src, int endp)
695{
355fe088 696 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
697 tree lhs = gimple_call_lhs (stmt);
698 tree len = gimple_call_arg (stmt, 2);
699 tree destvar, srcvar;
700 location_t loc = gimple_location (stmt);
701
cc8bea0a 702 bool nowarn = gimple_no_warning_p (stmt);
cc8bea0a 703
6512c0f1
MS
704 /* If the LEN parameter is a constant zero or in range where
705 the only valid value is zero, return DEST. */
706 if (size_must_be_zero_p (len))
fef5a0d9 707 {
355fe088 708 gimple *repl;
fef5a0d9
RB
709 if (gimple_call_lhs (stmt))
710 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
711 else
712 repl = gimple_build_nop ();
713 tree vdef = gimple_vdef (stmt);
714 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 715 {
fef5a0d9
RB
716 unlink_stmt_vdef (stmt);
717 release_ssa_name (vdef);
718 }
f6b4dc28 719 gsi_replace (gsi, repl, false);
fef5a0d9
RB
720 return true;
721 }
722
723 /* If SRC and DEST are the same (and not volatile), return
724 DEST{,+LEN,+LEN-1}. */
725 if (operand_equal_p (src, dest, 0))
726 {
cc8bea0a
MS
727 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
728 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 729 32667). */
fef5a0d9
RB
730 unlink_stmt_vdef (stmt);
731 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
732 release_ssa_name (gimple_vdef (stmt));
733 if (!lhs)
734 {
f6b4dc28 735 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
736 return true;
737 }
738 goto done;
739 }
740 else
741 {
742 tree srctype, desttype;
743 unsigned int src_align, dest_align;
744 tree off0;
d01b568a
BE
745 const char *tmp_str;
746 unsigned HOST_WIDE_INT tmp_len;
fef5a0d9
RB
747
748 /* Build accesses at offset zero with a ref-all character type. */
749 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
750 ptr_mode, true), 0);
751
752 /* If we can perform the copy efficiently with first doing all loads
753 and then all stores inline it that way. Currently efficiently
754 means that we can load all the memory into a single integer
755 register which is what MOVE_MAX gives us. */
756 src_align = get_pointer_alignment (src);
757 dest_align = get_pointer_alignment (dest);
758 if (tree_fits_uhwi_p (len)
759 && compare_tree_int (len, MOVE_MAX) <= 0
760 /* ??? Don't transform copies from strings with known length this
761 confuses the tree-ssa-strlen.c. This doesn't handle
762 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
763 reason. */
d01b568a
BE
764 && !c_strlen (src, 2)
765 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
766 && memchr (tmp_str, 0, tmp_len) == NULL))
fef5a0d9
RB
767 {
768 unsigned ilen = tree_to_uhwi (len);
146ec50f 769 if (pow2p_hwi (ilen))
fef5a0d9 770 {
cc8bea0a
MS
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
777
64ab8765 778 scalar_int_mode mode;
fef5a0d9
RB
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
64ab8765
RS
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
64ab8765 785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 786 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 787 || (optab_handler (movmisalign_optab, mode)
f869c12f 788 != CODE_FOR_nothing)))
fef5a0d9
RB
789 {
790 tree srctype = type;
791 tree desttype = type;
64ab8765 792 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
64ab8765 798 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 799 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 800 && (optab_handler (movmisalign_optab, mode)
f869c12f 801 == CODE_FOR_nothing))
fef5a0d9
RB
802 srcmem = NULL_TREE;
803 if (srcmem)
804 {
355fe088 805 gimple *new_stmt;
fef5a0d9
RB
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
807 {
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
fef5a0d9
RB
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
815 }
64ab8765 816 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
828 {
f6b4dc28 829 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
830 return true;
831 }
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
834 }
835 }
836 }
837 }
838
839 if (endp == 3)
840 {
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
844
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
852 {
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
861 }
862
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
866 {
867 tree src_base, dest_base, fn;
a90c8804
RS
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
fef5a0d9
RB
870
871 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
fef5a0d9 875 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
a90c8804 880 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 881 maxsize = -1;
fef5a0d9
RB
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
884 {
885 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
fef5a0d9
RB
888 return false;
889 }
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
892 {
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
a90c8804
RS
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
fef5a0d9
RB
902 return false;
903 }
904 else
905 return false;
906
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
923 {
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
928 {
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
938 }
939 }
940
941 return false;
942 }
943
944 if (!tree_fits_shwi_p (len))
945 return false;
fef5a0d9
RB
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
fef5a0d9
RB
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 958 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 962 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
966
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
983
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
989
42f74245
RB
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 995
42f74245
RB
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1000 {
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 1004 src, off0);
fef5a0d9
RB
1005 else if (!STRICT_ALIGNMENT)
1006 {
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
42f74245 1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 1010 }
fef5a0d9 1011 }
fef5a0d9
RB
1012
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1015
1016 if (srcvar == NULL_TREE)
1017 {
fef5a0d9
RB
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1021 {
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1027 }
1028 }
1029 else if (destvar == NULL_TREE)
1030 {
fef5a0d9
RB
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1040 }
1041 }
1042
cc8bea0a
MS
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1047
355fe088 1048 gimple *new_stmt;
fef5a0d9
RB
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1050 {
921b13d0
RB
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1055 {
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
921b13d0
RB
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1062 }
d7257171
RB
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
fef5a0d9 1065 }
d7257171
RB
1066
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080set_vop_and_replace:
fef5a0d9
RB
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1087 {
f6b4dc28 1088 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1089 return true;
1090 }
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1092 }
1093
1094done:
74e3c262 1095 gimple_seq stmts = NULL;
fef5a0d9
RB
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
74e3c262
RB
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
fef5a0d9 1101 if (endp == 2 || endp == 1)
74e3c262
RB
1102 {
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1106 }
fef5a0d9 1107
74e3c262 1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1109 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1110 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1111 return true;
1112}
1113
b3d8d88e
MS
1114/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1116
1117static bool
1118gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1119{
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1121
1122 if (!fn)
1123 return false;
1124
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1126
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1131
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1134
1135 return true;
1136}
1137
1138/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1140
1141static bool
1142gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1143{
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1145
1146 if (!fn)
1147 return false;
1148
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1152
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1157
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1161
1162 return true;
1163}
1164
1165/* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1167
1168static bool
1169gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1170{
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1172
1173 if (!fn)
1174 return false;
1175
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1177
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1181
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1187
1188 return true;
1189}
1190
fef5a0d9
RB
1191/* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1193
1194static bool
1195gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1196{
355fe088 1197 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1200
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1203 {
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1206 }
1207
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1210
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1213
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1218
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1222
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1226
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1230
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1233
1234 length = tree_to_uhwi (len);
7a504f33 1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1238
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1241
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1245 {
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1248
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1254 }
1255
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1261 {
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1264 }
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1267 {
355fe088 1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1269 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1270 }
1271 else
1272 {
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1276 }
1277
1278 return true;
1279}
1280
fb471a13 1281/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1282
1283static bool
730832cd 1284get_range_strlen_tree (tree arg, bitmap *visited,
598f7235 1285 strlen_range_kind rkind,
730832cd
MS
1286 c_strlen_data *pdata,
1287 bool *flexp, unsigned eltsize)
fef5a0d9 1288{
fb471a13
MS
1289 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1290
fb471a13
MS
1291 /* The length computed by this invocation of the function. */
1292 tree val = NULL_TREE;
1293
eef2da67
MS
1294 /* True if VAL is an optimistic (tight) bound determined from
1295 the size of the character array in which the string may be
1296 stored. In that case, the computed VAL is used to set
1297 PDATA->MAXBOUND. */
1298 bool tight_bound = false;
1299
fb471a13
MS
1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1301 if (TREE_CODE (arg) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1303 {
fb471a13
MS
1304 tree op = TREE_OPERAND (arg, 0);
1305 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1306 {
fb471a13
MS
1307 tree aop0 = TREE_OPERAND (op, 0);
1308 if (TREE_CODE (aop0) == INDIRECT_REF
1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
730832cd
MS
1310 return get_range_strlen (TREE_OPERAND (aop0, 0), visited,
1311 rkind, pdata, flexp, eltsize);
fef5a0d9 1312 }
598f7235
MS
1313 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1314 && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
fef5a0d9 1315 {
fb471a13
MS
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1320
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
fef5a0d9 1329 }
fb471a13 1330 }
7d583f42 1331
598f7235 1332 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1333 {
1334 /* We are computing the maximum value (not string length). */
1335 val = arg;
1336 if (TREE_CODE (val) != INTEGER_CST
1337 || tree_int_cst_sgn (val) < 0)
1338 return false;
1339 }
1340 else
1341 {
1342 c_strlen_data lendata = { };
1343 val = c_strlen (arg, 1, &lendata, eltsize);
1344
1345 /* If we potentially had a non-terminated string, then
1346 bubble that information up to the caller. */
1347 if (!val && lendata.decl)
1348 {
730832cd
MS
1349 pdata->decl = lendata.decl;
1350 pdata->minlen = lendata.minlen;
1351 pdata->maxlen = lendata.minlen;
598f7235 1352 return rkind == SRK_STRLEN ? false : true;
7d583f42 1353 }
fb471a13
MS
1354 }
1355
598f7235 1356 if (!val && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
fb471a13
MS
1357 {
1358 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd
MS
1359 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1360 pdata, flexp, eltsize);
88d0c3f0 1361
fb471a13 1362 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1363 {
fb471a13 1364 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1365
fb471a13
MS
1366 /* Determine the "innermost" array type. */
1367 while (TREE_CODE (optype) == ARRAY_TYPE
1368 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1369 optype = TREE_TYPE (optype);
c42d0aa0 1370
fb471a13
MS
1371 /* Avoid arrays of pointers. */
1372 tree eltype = TREE_TYPE (optype);
1373 if (TREE_CODE (optype) != ARRAY_TYPE
1374 || !INTEGRAL_TYPE_P (eltype))
1375 return false;
c42d0aa0 1376
fb471a13
MS
1377 /* Fail when the array bound is unknown or zero. */
1378 val = TYPE_SIZE_UNIT (optype);
1379 if (!val || integer_zerop (val))
1380 return false;
1bfd6a00 1381
fb471a13
MS
1382 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1383 integer_one_node);
c42d0aa0 1384
fb471a13
MS
1385 /* Set the minimum size to zero since the string in
1386 the array could have zero length. */
730832cd 1387 pdata->minlen = ssize_int (0);
204a7ecb 1388
fb471a13
MS
1389 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1390 && optype == TREE_TYPE (TREE_OPERAND (arg, 0))
1391 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1392 *flexp = true;
eef2da67 1393 tight_bound = true;
fb471a13
MS
1394 }
1395 else if (TREE_CODE (arg) == COMPONENT_REF
1396 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1397 == ARRAY_TYPE))
1398 {
1399 /* Use the type of the member array to determine the upper
1400 bound on the length of the array. This may be overly
1401 optimistic if the array itself isn't NUL-terminated and
1402 the caller relies on the subsequent member to contain
1403 the NUL but that would only be considered valid if
1404 the array were the last member of a struct.
1405 Set *FLEXP to true if the array whose bound is being
1406 used is at the end of a struct. */
1407 if (array_at_struct_end_p (arg))
1408 *flexp = true;
1409
1410 tree fld = TREE_OPERAND (arg, 1);
1411
1412 tree optype = TREE_TYPE (fld);
1413
1414 /* Determine the "innermost" array type. */
1415 while (TREE_CODE (optype) == ARRAY_TYPE
1416 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1417 optype = TREE_TYPE (optype);
1418
1419 /* Fail when the array bound is unknown or zero. */
1420 val = TYPE_SIZE_UNIT (optype);
1421 if (!val || integer_zerop (val))
1422 return false;
1423 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1424 integer_one_node);
1425
1426 /* Set the minimum size to zero since the string in
1427 the array could have zero length. */
730832cd 1428 pdata->minlen = ssize_int (0);
fb471a13 1429
eef2da67
MS
1430 /* The array size determined above is an optimistic bound
1431 on the length. If the array isn't nul-terminated the
1432 length computed by the library function would be greater.
1433 Even though using strlen to cross the subobject boundary
1434 is undefined, avoid drawing conclusions from the member
1435 type about the length here. */
1436 tight_bound = true;
1437 }
1438 else if (VAR_P (arg))
fb471a13 1439 {
eef2da67
MS
1440 /* Avoid handling pointers to arrays. GCC might misuse
1441 a pointer to an array of one bound to point to an array
1442 object of a greater bound. */
1443 tree argtype = TREE_TYPE (arg);
1444 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1445 {
eef2da67 1446 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1447 if (!val
1448 || TREE_CODE (val) != INTEGER_CST
1449 || integer_zerop (val))
88d0c3f0 1450 return false;
fb471a13
MS
1451 val = wide_int_to_tree (TREE_TYPE (val),
1452 wi::sub (wi::to_wide (val), 1));
1453
e495e31a
MS
1454 /* Set the minimum size to zero since the string in
1455 the array could have zero length. */
730832cd 1456 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1457 }
1458 }
fb471a13 1459 }
88d0c3f0 1460
fb471a13
MS
1461 if (!val)
1462 return false;
fef5a0d9 1463
fb471a13 1464 /* Adjust the lower bound on the string length as necessary. */
730832cd 1465 if (!pdata->minlen
598f7235 1466 || (rkind != SRK_STRLEN
730832cd 1467 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1468 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1469 && tree_int_cst_lt (val, pdata->minlen)))
1470 pdata->minlen = val;
88d0c3f0 1471
730832cd
MS
1472 if (pdata->maxbound)
1473 {
1474 /* Adjust the tighter (more optimistic) string length bound
1475 if necessary and proceed to adjust the more conservative
1476 bound. */
1477 if (TREE_CODE (val) == INTEGER_CST)
1478 {
1479 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1480 {
1481 if (tree_int_cst_lt (pdata->maxbound, val))
1482 pdata->maxbound = val;
1483 }
1484 else
1485 pdata->maxbound = build_all_ones_cst (size_type_node);
1486 }
1487 else
1488 pdata->maxbound = val;
1489 }
1490 else
1491 pdata->maxbound = val;
1492
eef2da67
MS
1493 if (tight_bound)
1494 {
1495 /* VAL computed above represents an optimistically tight bound
1496 on the length of the string based on the referenced object's
1497 or subobject's type. Determine the conservative upper bound
1498 based on the enclosing object's size if possible. */
1499 if (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2)
1500 {
1501 poly_int64 offset;
1502 tree base = get_addr_base_and_unit_offset (arg, &offset);
1503 if (!base)
1504 {
1505 /* When the call above fails due to a non-constant offset
1506 assume the offset is zero and use the size of the whole
1507 enclosing object instead. */
1508 base = get_base_address (arg);
1509 offset = 0;
1510 }
1511 /* If the base object is a pointer no upper bound on the length
1512 can be determined. Otherwise the maximum length is equal to
1513 the size of the enclosing object minus the offset of
1514 the referenced subobject minus 1 (for the terminating nul). */
1515 tree type = TREE_TYPE (base);
1516 if (TREE_CODE (type) == POINTER_TYPE
1517 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1518 val = build_all_ones_cst (size_type_node);
1519 else
1520 {
1521 val = DECL_SIZE_UNIT (base);
1522 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1523 size_int (offset + 1));
1524 }
1525 }
1526 else
1527 return false;
1528 }
1529
730832cd 1530 if (pdata->maxlen)
fb471a13
MS
1531 {
1532 /* Adjust the more conservative bound if possible/necessary
1533 and fail otherwise. */
598f7235 1534 if (rkind != SRK_STRLEN)
fef5a0d9 1535 {
730832cd 1536 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1537 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1538 return false;
fef5a0d9 1539
730832cd
MS
1540 if (tree_int_cst_lt (pdata->maxlen, val))
1541 pdata->maxlen = val;
fb471a13
MS
1542 return true;
1543 }
730832cd 1544 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1545 {
1546 /* Fail if the length of this ARG is different from that
1547 previously determined from another ARG. */
1548 return false;
1549 }
fef5a0d9
RB
1550 }
1551
730832cd 1552 pdata->maxlen = val;
fb471a13
MS
1553 return true;
1554}
1555
5d6655eb
MS
1556/* For an ARG referencing one or more strings, try to obtain the range
1557 of their lengths, or the size of the largest array ARG referes to if
1558 the range of lengths cannot be determined, and store all in *PDATA.
1559 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1560 the maximum constant value.
1561 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1562 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1563 length or if we are unable to determine the length, return false.
fb471a13 1564 VISITED is a bitmap of visited variables.
598f7235
MS
1565 RKIND determines the kind of value or range to obtain (see
1566 strlen_range_kind).
1567 Set PDATA->DECL if ARG refers to an unterminated constant array.
1568 On input, set ELTSIZE to 1 for normal single byte character strings,
1569 and either 2 or 4 for wide characer strings (the size of wchar_t).
1570 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1571
1572static bool
5d6655eb 1573get_range_strlen (tree arg, bitmap *visited, strlen_range_kind rkind,
730832cd 1574 c_strlen_data *pdata, bool *flexp, unsigned eltsize)
fb471a13
MS
1575{
1576
1577 if (TREE_CODE (arg) != SSA_NAME)
730832cd 1578 return get_range_strlen_tree (arg, visited, rkind, pdata, flexp, eltsize);
fb471a13 1579
fef5a0d9
RB
1580 /* If ARG is registered for SSA update we cannot look at its defining
1581 statement. */
1582 if (name_registered_for_update_p (arg))
1583 return false;
1584
1585 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1586 if (!*visited)
1587 *visited = BITMAP_ALLOC (NULL);
1588 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1589 return true;
1590
fb471a13
MS
1591 tree var = arg;
1592 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1593
fef5a0d9
RB
1594 switch (gimple_code (def_stmt))
1595 {
1596 case GIMPLE_ASSIGN:
598f7235
MS
1597 /* The RHS of the statement defining VAR must either have a
1598 constant length or come from another SSA_NAME with a constant
1599 length. */
fef5a0d9
RB
1600 if (gimple_assign_single_p (def_stmt)
1601 || gimple_assign_unary_nop_p (def_stmt))
1602 {
598f7235 1603 tree rhs = gimple_assign_rhs1 (def_stmt);
730832cd 1604 return get_range_strlen (rhs, visited, rkind, pdata, flexp, eltsize);
fef5a0d9
RB
1605 }
1606 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1607 {
c8602fe6
JJ
1608 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1609 gimple_assign_rhs3 (def_stmt) };
1610
1611 for (unsigned int i = 0; i < 2; i++)
730832cd
MS
1612 if (!get_range_strlen (ops[i], visited, rkind, pdata,
1613 flexp, eltsize))
c8602fe6 1614 {
80c2bad6 1615 if (rkind != SRK_LENRANGE_2)
c8602fe6 1616 return false;
80c2bad6
MS
1617 /* Set the upper bound to the maximum to prevent
1618 it from being adjusted in the next iteration but
1619 leave MINLEN and the more conservative MAXBOUND
1620 determined so far alone (or leave them null if
1621 they haven't been set yet). That the MINLEN is
1622 in fact zero can be determined from MAXLEN being
1623 unbounded but the discovered minimum is used for
1624 diagnostics. */
730832cd 1625 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1626 }
1627 return true;
cc8bea0a 1628 }
fef5a0d9
RB
1629 return false;
1630
1631 case GIMPLE_PHI:
598f7235
MS
1632 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1633 must have a constant length. */
c8602fe6 1634 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1635 {
1636 tree arg = gimple_phi_arg (def_stmt, i)->def;
1637
1638 /* If this PHI has itself as an argument, we cannot
1639 determine the string length of this argument. However,
1640 if we can find a constant string length for the other
1641 PHI args then we can still be sure that this is a
1642 constant string length. So be optimistic and just
1643 continue with the next argument. */
1644 if (arg == gimple_phi_result (def_stmt))
1645 continue;
1646
730832cd 1647 if (!get_range_strlen (arg, visited, rkind, pdata, flexp, eltsize))
88d0c3f0 1648 {
80c2bad6 1649 if (rkind != SRK_LENRANGE_2)
88d0c3f0 1650 return false;
80c2bad6
MS
1651 /* Set the upper bound to the maximum to prevent
1652 it from being adjusted in the next iteration but
1653 leave MINLEN and the more conservative MAXBOUND
1654 determined so far alone (or leave them null if
1655 they haven't been set yet). That the MINLEN is
1656 in fact zero can be determined from MAXLEN being
1657 unbounded but the discovered minimum is used for
1658 diagnostics. */
730832cd 1659 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1660 }
fef5a0d9 1661 }
fef5a0d9
RB
1662 return true;
1663
1664 default:
1665 return false;
1666 }
1667}
5d6655eb 1668
88d0c3f0
MS
1669/* Determine the minimum and maximum value or string length that ARG
1670 refers to and store each in the first two elements of MINMAXLEN.
1671 For expressions that point to strings of unknown lengths that are
1672 character arrays, use the upper bound of the array as the maximum
1673 length. For example, given an expression like 'x ? array : "xyz"'
1674 and array declared as 'char array[8]', MINMAXLEN[0] will be set
c8602fe6 1675 to 0 and MINMAXLEN[1] to 7, the longest string that could be
88d0c3f0 1676 stored in array.
3f343040
MS
1677 Return true if the range of the string lengths has been obtained
1678 from the upper bound of an array at the end of a struct. Such
1679 an array may hold a string that's longer than its upper bound
c8602fe6
JJ
1680 due to it being used as a poor-man's flexible array member.
1681
1682 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1683 and false if PHIs and COND_EXPRs are to be handled optimistically,
1684 if we can determine string length minimum and maximum; it will use
1685 the minimum from the ones where it can be determined.
4148b00d 1686 STRICT false should be only used for warning code.
e08341bb
MS
1687 When non-null, clear *NONSTR if ARG refers to a constant array
1688 that is known not be nul-terminated. Otherwise set it to
1689 the declaration of the constant non-terminated array.
4148b00d
BE
1690
1691 ELTSIZE is 1 for normal single byte character strings, and 2 or
1692 4 for wide characer strings. ELTSIZE is by default 1. */
88d0c3f0 1693
3f343040 1694bool
5d6655eb 1695get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize, bool strict)
88d0c3f0
MS
1696{
1697 bitmap visited = NULL;
1698
3f343040 1699 bool flexarray = false;
5d6655eb 1700 if (!get_range_strlen (arg, &visited, strict ? SRK_LENRANGE : SRK_LENRANGE_2, pdata, &flexarray, eltsize))
730832cd 1701 {
5d6655eb
MS
1702 /* On failure extend the length range to an impossible maximum
1703 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1704 members can stay unchanged regardless. */
1705 pdata->minlen = ssize_int (0);
1706 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1707 }
5d6655eb
MS
1708 else if (!pdata->minlen)
1709 pdata->minlen = ssize_int (0);
1710
1711 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1712 if (!pdata->maxbound)
1713 pdata->maxbound = pdata->maxlen;
88d0c3f0
MS
1714
1715 if (visited)
1716 BITMAP_FREE (visited);
3f343040
MS
1717
1718 return flexarray;
88d0c3f0
MS
1719}
1720
5d6655eb
MS
1721/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1722 For ARG of pointer types, NONSTR indicates if the caller is prepared
1723 to handle unterminated strings. For integer ARG and when RKIND ==
1724 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1725
5d6655eb
MS
1726 If an unterminated array is discovered and our caller handles
1727 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1728 return the maximum size. Otherwise return NULL. */
1729
598f7235
MS
1730static tree
1731get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1732{
598f7235
MS
1733 /* A non-null NONSTR is meaningless when determining the maximum
1734 value of an integer ARG. */
1735 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1736 /* ARG must have an integral type when RKIND says so. */
1737 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1738
dcb7fae2 1739 bitmap visited = NULL;
3f343040 1740
5d6655eb
MS
1741 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1742 is unbounded. */
730832cd 1743 c_strlen_data lendata = { };
5d6655eb 1744 bool dummy;
730832cd
MS
1745 if (!get_range_strlen (arg, &visited, rkind, &lendata, &dummy, 1))
1746 lendata.maxlen = NULL_TREE;
5d6655eb
MS
1747 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1748 lendata.maxlen = NULL_TREE;
1749
dcb7fae2
RB
1750 if (visited)
1751 BITMAP_FREE (visited);
1752
e08341bb
MS
1753 if (nonstr)
1754 {
1755 /* For callers prepared to handle unterminated arrays set
1756 *NONSTR to point to the declaration of the array and return
1757 the maximum length/size. */
730832cd
MS
1758 *nonstr = lendata.decl;
1759 return lendata.maxlen;
e08341bb
MS
1760 }
1761
1762 /* Fail if the constant array isn't nul-terminated. */
730832cd 1763 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
1764}
1765
fef5a0d9
RB
1766
1767/* Fold function call to builtin strcpy with arguments DEST and SRC.
1768 If LEN is not NULL, it represents the length of the string to be
1769 copied. Return NULL_TREE if no simplification can be made. */
1770
1771static bool
1772gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1773 tree dest, tree src)
fef5a0d9 1774{
cc8bea0a
MS
1775 gimple *stmt = gsi_stmt (*gsi);
1776 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1777 tree fn;
1778
1779 /* If SRC and DEST are the same (and not volatile), return DEST. */
1780 if (operand_equal_p (src, dest, 0))
1781 {
8cd95cec
MS
1782 /* Issue -Wrestrict unless the pointers are null (those do
1783 not point to objects and so do not indicate an overlap;
1784 such calls could be the result of sanitization and jump
1785 threading). */
1786 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1787 {
1788 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1789
e9b9fa4c
MS
1790 warning_at (loc, OPT_Wrestrict,
1791 "%qD source argument is the same as destination",
1792 func);
1793 }
cc8bea0a 1794
fef5a0d9
RB
1795 replace_call_with_value (gsi, dest);
1796 return true;
1797 }
1798
1799 if (optimize_function_for_size_p (cfun))
1800 return false;
1801
1802 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1803 if (!fn)
1804 return false;
1805
e08341bb
MS
1806 /* Set to non-null if ARG refers to an unterminated array. */
1807 tree nonstr = NULL;
598f7235 1808 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
1809
1810 if (nonstr)
1811 {
1812 /* Avoid folding calls with unterminated arrays. */
1813 if (!gimple_no_warning_p (stmt))
1814 warn_string_no_nul (loc, "strcpy", src, nonstr);
1815 gimple_set_no_warning (stmt, true);
1816 return false;
1817 }
1818
fef5a0d9 1819 if (!len)
dcb7fae2 1820 return false;
fef5a0d9
RB
1821
1822 len = fold_convert_loc (loc, size_type_node, len);
1823 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1824 len = force_gimple_operand_gsi (gsi, len, true,
1825 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1826 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1827 replace_call_with_call_and_fold (gsi, repl);
1828 return true;
1829}
1830
1831/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1832 If SLEN is not NULL, it represents the length of the source string.
1833 Return NULL_TREE if no simplification can be made. */
1834
1835static bool
dcb7fae2
RB
1836gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1837 tree dest, tree src, tree len)
fef5a0d9 1838{
025d57f0
MS
1839 gimple *stmt = gsi_stmt (*gsi);
1840 location_t loc = gimple_location (stmt);
6a33d0ff 1841 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1842
1843 /* If the LEN parameter is zero, return DEST. */
1844 if (integer_zerop (len))
1845 {
6a33d0ff
MS
1846 /* Avoid warning if the destination refers to a an array/pointer
1847 decorate with attribute nonstring. */
1848 if (!nonstring)
1849 {
1850 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1851
1852 /* Warn about the lack of nul termination: the result is not
1853 a (nul-terminated) string. */
598f7235 1854 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
1855 if (slen && !integer_zerop (slen))
1856 warning_at (loc, OPT_Wstringop_truncation,
1857 "%G%qD destination unchanged after copying no bytes "
1858 "from a string of length %E",
8a45b051 1859 stmt, fndecl, slen);
6a33d0ff
MS
1860 else
1861 warning_at (loc, OPT_Wstringop_truncation,
1862 "%G%qD destination unchanged after copying no bytes",
8a45b051 1863 stmt, fndecl);
6a33d0ff 1864 }
025d57f0 1865
fef5a0d9
RB
1866 replace_call_with_value (gsi, dest);
1867 return true;
1868 }
1869
1870 /* We can't compare slen with len as constants below if len is not a
1871 constant. */
dcb7fae2 1872 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1873 return false;
1874
fef5a0d9 1875 /* Now, we must be passed a constant src ptr parameter. */
598f7235 1876 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 1877 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1878 return false;
1879
025d57f0
MS
1880 /* The size of the source string including the terminating nul. */
1881 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1882
1883 /* We do not support simplification of this case, though we do
1884 support it when expanding trees into RTL. */
1885 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1886 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1887 return false;
1888
5d0d5d68
MS
1889 /* Diagnose truncation that leaves the copy unterminated. */
1890 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1891
fef5a0d9 1892 /* OK transform into builtin memcpy. */
025d57f0 1893 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1894 if (!fn)
1895 return false;
1896
1897 len = fold_convert_loc (loc, size_type_node, len);
1898 len = force_gimple_operand_gsi (gsi, len, true,
1899 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1900 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1901 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1902
fef5a0d9
RB
1903 return true;
1904}
1905
71dea1dd
WD
1906/* Fold function call to builtin strchr or strrchr.
1907 If both arguments are constant, evaluate and fold the result,
1908 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1909 In general strlen is significantly faster than strchr
1910 due to being a simpler operation. */
1911static bool
71dea1dd 1912gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1913{
1914 gimple *stmt = gsi_stmt (*gsi);
1915 tree str = gimple_call_arg (stmt, 0);
1916 tree c = gimple_call_arg (stmt, 1);
1917 location_t loc = gimple_location (stmt);
71dea1dd
WD
1918 const char *p;
1919 char ch;
912d9ec3 1920
71dea1dd 1921 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1922 return false;
1923
71dea1dd
WD
1924 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1925 {
1926 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1927
1928 if (p1 == NULL)
1929 {
1930 replace_call_with_value (gsi, integer_zero_node);
1931 return true;
1932 }
1933
1934 tree len = build_int_cst (size_type_node, p1 - p);
1935 gimple_seq stmts = NULL;
1936 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1937 POINTER_PLUS_EXPR, str, len);
1938 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1939 gsi_replace_with_seq_vops (gsi, stmts);
1940 return true;
1941 }
1942
1943 if (!integer_zerop (c))
912d9ec3
WD
1944 return false;
1945
71dea1dd 1946 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1947 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1948 {
1949 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1950
c8952930 1951 if (strchr_fn)
71dea1dd
WD
1952 {
1953 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1954 replace_call_with_call_and_fold (gsi, repl);
1955 return true;
1956 }
1957
1958 return false;
1959 }
1960
912d9ec3
WD
1961 tree len;
1962 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1963
1964 if (!strlen_fn)
1965 return false;
1966
1967 /* Create newstr = strlen (str). */
1968 gimple_seq stmts = NULL;
1969 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1970 gimple_set_location (new_stmt, loc);
a15ebbcd 1971 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1972 gimple_call_set_lhs (new_stmt, len);
1973 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1974
1975 /* Create (str p+ strlen (str)). */
1976 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1977 POINTER_PLUS_EXPR, str, len);
1978 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1979 gsi_replace_with_seq_vops (gsi, stmts);
1980 /* gsi now points at the assignment to the lhs, get a
1981 stmt iterator to the strlen.
1982 ??? We can't use gsi_for_stmt as that doesn't work when the
1983 CFG isn't built yet. */
1984 gimple_stmt_iterator gsi2 = *gsi;
1985 gsi_prev (&gsi2);
1986 fold_stmt (&gsi2);
1987 return true;
1988}
1989
c8952930
JJ
1990/* Fold function call to builtin strstr.
1991 If both arguments are constant, evaluate and fold the result,
1992 additionally fold strstr (x, "") into x and strstr (x, "c")
1993 into strchr (x, 'c'). */
1994static bool
1995gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1996{
1997 gimple *stmt = gsi_stmt (*gsi);
1998 tree haystack = gimple_call_arg (stmt, 0);
1999 tree needle = gimple_call_arg (stmt, 1);
2000 const char *p, *q;
2001
2002 if (!gimple_call_lhs (stmt))
2003 return false;
2004
2005 q = c_getstr (needle);
2006 if (q == NULL)
2007 return false;
2008
2009 if ((p = c_getstr (haystack)))
2010 {
2011 const char *r = strstr (p, q);
2012
2013 if (r == NULL)
2014 {
2015 replace_call_with_value (gsi, integer_zero_node);
2016 return true;
2017 }
2018
2019 tree len = build_int_cst (size_type_node, r - p);
2020 gimple_seq stmts = NULL;
2021 gimple *new_stmt
2022 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2023 haystack, len);
2024 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2025 gsi_replace_with_seq_vops (gsi, stmts);
2026 return true;
2027 }
2028
2029 /* For strstr (x, "") return x. */
2030 if (q[0] == '\0')
2031 {
2032 replace_call_with_value (gsi, haystack);
2033 return true;
2034 }
2035
2036 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2037 if (q[1] == '\0')
2038 {
2039 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2040 if (strchr_fn)
2041 {
2042 tree c = build_int_cst (integer_type_node, q[0]);
2043 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2044 replace_call_with_call_and_fold (gsi, repl);
2045 return true;
2046 }
2047 }
2048
2049 return false;
2050}
2051
fef5a0d9
RB
2052/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2053 to the call.
2054
2055 Return NULL_TREE if no simplification was possible, otherwise return the
2056 simplified form of the call as a tree.
2057
2058 The simplified form may be a constant or other expression which
2059 computes the same value, but in a more efficient manner (including
2060 calls to other builtin functions).
2061
2062 The call may contain arguments which need to be evaluated, but
2063 which are not useful to determine the result of the call. In
2064 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2065 COMPOUND_EXPR will be an argument which must be evaluated.
2066 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2067 COMPOUND_EXPR in the chain will contain the tree for the simplified
2068 form of the builtin function call. */
2069
2070static bool
dcb7fae2 2071gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2072{
355fe088 2073 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2074 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2075
2076 const char *p = c_getstr (src);
2077
2078 /* If the string length is zero, return the dst parameter. */
2079 if (p && *p == '\0')
2080 {
2081 replace_call_with_value (gsi, dst);
2082 return true;
2083 }
2084
2085 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2086 return false;
2087
2088 /* See if we can store by pieces into (dst + strlen(dst)). */
2089 tree newdst;
2090 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2091 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2092
2093 if (!strlen_fn || !memcpy_fn)
2094 return false;
2095
2096 /* If the length of the source string isn't computable don't
2097 split strcat into strlen and memcpy. */
598f7235 2098 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2099 if (! len)
fef5a0d9
RB
2100 return false;
2101
2102 /* Create strlen (dst). */
2103 gimple_seq stmts = NULL, stmts2;
355fe088 2104 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2105 gimple_set_location (repl, loc);
a15ebbcd 2106 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2107 gimple_call_set_lhs (repl, newdst);
2108 gimple_seq_add_stmt_without_update (&stmts, repl);
2109
2110 /* Create (dst p+ strlen (dst)). */
2111 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2112 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2113 gimple_seq_add_seq_without_update (&stmts, stmts2);
2114
2115 len = fold_convert_loc (loc, size_type_node, len);
2116 len = size_binop_loc (loc, PLUS_EXPR, len,
2117 build_int_cst (size_type_node, 1));
2118 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2119 gimple_seq_add_seq_without_update (&stmts, stmts2);
2120
2121 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2122 gimple_seq_add_stmt_without_update (&stmts, repl);
2123 if (gimple_call_lhs (stmt))
2124 {
2125 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2126 gimple_seq_add_stmt_without_update (&stmts, repl);
2127 gsi_replace_with_seq_vops (gsi, stmts);
2128 /* gsi now points at the assignment to the lhs, get a
2129 stmt iterator to the memcpy call.
2130 ??? We can't use gsi_for_stmt as that doesn't work when the
2131 CFG isn't built yet. */
2132 gimple_stmt_iterator gsi2 = *gsi;
2133 gsi_prev (&gsi2);
2134 fold_stmt (&gsi2);
2135 }
2136 else
2137 {
2138 gsi_replace_with_seq_vops (gsi, stmts);
2139 fold_stmt (gsi);
2140 }
2141 return true;
2142}
2143
07f1cf56
RB
2144/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2145 are the arguments to the call. */
2146
2147static bool
2148gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2149{
355fe088 2150 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2151 tree dest = gimple_call_arg (stmt, 0);
2152 tree src = gimple_call_arg (stmt, 1);
2153 tree size = gimple_call_arg (stmt, 2);
2154 tree fn;
2155 const char *p;
2156
2157
2158 p = c_getstr (src);
2159 /* If the SRC parameter is "", return DEST. */
2160 if (p && *p == '\0')
2161 {
2162 replace_call_with_value (gsi, dest);
2163 return true;
2164 }
2165
2166 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2167 return false;
2168
2169 /* If __builtin_strcat_chk is used, assume strcat is available. */
2170 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2171 if (!fn)
2172 return false;
2173
355fe088 2174 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2175 replace_call_with_call_and_fold (gsi, repl);
2176 return true;
2177}
2178
ad03a744
RB
2179/* Simplify a call to the strncat builtin. */
2180
2181static bool
2182gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2183{
8a45b051 2184 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2185 tree dst = gimple_call_arg (stmt, 0);
2186 tree src = gimple_call_arg (stmt, 1);
2187 tree len = gimple_call_arg (stmt, 2);
2188
2189 const char *p = c_getstr (src);
2190
2191 /* If the requested length is zero, or the src parameter string
2192 length is zero, return the dst parameter. */
2193 if (integer_zerop (len) || (p && *p == '\0'))
2194 {
2195 replace_call_with_value (gsi, dst);
2196 return true;
2197 }
2198
025d57f0
MS
2199 if (TREE_CODE (len) != INTEGER_CST || !p)
2200 return false;
2201
2202 unsigned srclen = strlen (p);
2203
2204 int cmpsrc = compare_tree_int (len, srclen);
2205
2206 /* Return early if the requested len is less than the string length.
2207 Warnings will be issued elsewhere later. */
2208 if (cmpsrc < 0)
2209 return false;
2210
2211 unsigned HOST_WIDE_INT dstsize;
2212
2213 bool nowarn = gimple_no_warning_p (stmt);
2214
2215 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2216 {
025d57f0 2217 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2218
025d57f0
MS
2219 if (cmpdst >= 0)
2220 {
2221 tree fndecl = gimple_call_fndecl (stmt);
2222
2223 /* Strncat copies (at most) LEN bytes and always appends
2224 the terminating NUL so the specified bound should never
2225 be equal to (or greater than) the size of the destination.
2226 If it is, the copy could overflow. */
2227 location_t loc = gimple_location (stmt);
2228 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2229 cmpdst == 0
2230 ? G_("%G%qD specified bound %E equals "
2231 "destination size")
2232 : G_("%G%qD specified bound %E exceeds "
2233 "destination size %wu"),
2234 stmt, fndecl, len, dstsize);
2235 if (nowarn)
2236 gimple_set_no_warning (stmt, true);
2237 }
2238 }
ad03a744 2239
025d57f0
MS
2240 if (!nowarn && cmpsrc == 0)
2241 {
2242 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2243 location_t loc = gimple_location (stmt);
eec5f615
MS
2244
2245 /* To avoid possible overflow the specified bound should also
2246 not be equal to the length of the source, even when the size
2247 of the destination is unknown (it's not an uncommon mistake
2248 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2249 if (warning_at (loc, OPT_Wstringop_overflow_,
2250 "%G%qD specified bound %E equals source length",
2251 stmt, fndecl, len))
2252 gimple_set_no_warning (stmt, true);
ad03a744
RB
2253 }
2254
025d57f0
MS
2255 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2256
2257 /* If the replacement _DECL isn't initialized, don't do the
2258 transformation. */
2259 if (!fn)
2260 return false;
2261
2262 /* Otherwise, emit a call to strcat. */
2263 gcall *repl = gimple_build_call (fn, 2, dst, src);
2264 replace_call_with_call_and_fold (gsi, repl);
2265 return true;
ad03a744
RB
2266}
2267
745583f9
RB
2268/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2269 LEN, and SIZE. */
2270
2271static bool
2272gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2273{
355fe088 2274 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2275 tree dest = gimple_call_arg (stmt, 0);
2276 tree src = gimple_call_arg (stmt, 1);
2277 tree len = gimple_call_arg (stmt, 2);
2278 tree size = gimple_call_arg (stmt, 3);
2279 tree fn;
2280 const char *p;
2281
2282 p = c_getstr (src);
2283 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2284 if ((p && *p == '\0')
2285 || integer_zerop (len))
2286 {
2287 replace_call_with_value (gsi, dest);
2288 return true;
2289 }
2290
2291 if (! tree_fits_uhwi_p (size))
2292 return false;
2293
2294 if (! integer_all_onesp (size))
2295 {
2296 tree src_len = c_strlen (src, 1);
2297 if (src_len
2298 && tree_fits_uhwi_p (src_len)
2299 && tree_fits_uhwi_p (len)
2300 && ! tree_int_cst_lt (len, src_len))
2301 {
2302 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2303 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2304 if (!fn)
2305 return false;
2306
355fe088 2307 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2308 replace_call_with_call_and_fold (gsi, repl);
2309 return true;
2310 }
2311 return false;
2312 }
2313
2314 /* If __builtin_strncat_chk is used, assume strncat is available. */
2315 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2316 if (!fn)
2317 return false;
2318
355fe088 2319 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2320 replace_call_with_call_and_fold (gsi, repl);
2321 return true;
2322}
2323
a918bfbf
ML
2324/* Build and append gimple statements to STMTS that would load a first
2325 character of a memory location identified by STR. LOC is location
2326 of the statement. */
2327
2328static tree
2329gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2330{
2331 tree var;
2332
2333 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2334 tree cst_uchar_ptr_node
2335 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2336 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2337
2338 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2339 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2340 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2341
2342 gimple_assign_set_lhs (stmt, var);
2343 gimple_seq_add_stmt_without_update (stmts, stmt);
2344
2345 return var;
2346}
2347
2348/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2349 FCODE is the name of the builtin. */
2350
2351static bool
2352gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2353{
2354 gimple *stmt = gsi_stmt (*gsi);
2355 tree callee = gimple_call_fndecl (stmt);
2356 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2357
2358 tree type = integer_type_node;
2359 tree str1 = gimple_call_arg (stmt, 0);
2360 tree str2 = gimple_call_arg (stmt, 1);
2361 tree lhs = gimple_call_lhs (stmt);
2362 HOST_WIDE_INT length = -1;
2363
2364 /* Handle strncmp and strncasecmp functions. */
2365 if (gimple_call_num_args (stmt) == 3)
2366 {
2367 tree len = gimple_call_arg (stmt, 2);
2368 if (tree_fits_uhwi_p (len))
2369 length = tree_to_uhwi (len);
2370 }
2371
2372 /* If the LEN parameter is zero, return zero. */
2373 if (length == 0)
2374 {
2375 replace_call_with_value (gsi, integer_zero_node);
2376 return true;
2377 }
2378
2379 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2380 if (operand_equal_p (str1, str2, 0))
2381 {
2382 replace_call_with_value (gsi, integer_zero_node);
2383 return true;
2384 }
2385
2386 const char *p1 = c_getstr (str1);
2387 const char *p2 = c_getstr (str2);
2388
2389 /* For known strings, return an immediate value. */
2390 if (p1 && p2)
2391 {
2392 int r = 0;
2393 bool known_result = false;
2394
2395 switch (fcode)
2396 {
2397 case BUILT_IN_STRCMP:
8b0b334a 2398 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
2399 {
2400 r = strcmp (p1, p2);
2401 known_result = true;
2402 break;
2403 }
2404 case BUILT_IN_STRNCMP:
8b0b334a 2405 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
2406 {
2407 if (length == -1)
2408 break;
2409 r = strncmp (p1, p2, length);
2410 known_result = true;
2411 break;
2412 }
2413 /* Only handleable situation is where the string are equal (result 0),
2414 which is already handled by operand_equal_p case. */
2415 case BUILT_IN_STRCASECMP:
2416 break;
2417 case BUILT_IN_STRNCASECMP:
2418 {
2419 if (length == -1)
2420 break;
2421 r = strncmp (p1, p2, length);
2422 if (r == 0)
2423 known_result = true;
5de73c05 2424 break;
a918bfbf
ML
2425 }
2426 default:
2427 gcc_unreachable ();
2428 }
2429
2430 if (known_result)
2431 {
2432 replace_call_with_value (gsi, build_cmp_result (type, r));
2433 return true;
2434 }
2435 }
2436
2437 bool nonzero_length = length >= 1
2438 || fcode == BUILT_IN_STRCMP
8b0b334a 2439 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2440 || fcode == BUILT_IN_STRCASECMP;
2441
2442 location_t loc = gimple_location (stmt);
2443
2444 /* If the second arg is "", return *(const unsigned char*)arg1. */
2445 if (p2 && *p2 == '\0' && nonzero_length)
2446 {
2447 gimple_seq stmts = NULL;
2448 tree var = gimple_load_first_char (loc, str1, &stmts);
2449 if (lhs)
2450 {
2451 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2452 gimple_seq_add_stmt_without_update (&stmts, stmt);
2453 }
2454
2455 gsi_replace_with_seq_vops (gsi, stmts);
2456 return true;
2457 }
2458
2459 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2460 if (p1 && *p1 == '\0' && nonzero_length)
2461 {
2462 gimple_seq stmts = NULL;
2463 tree var = gimple_load_first_char (loc, str2, &stmts);
2464
2465 if (lhs)
2466 {
2467 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2468 stmt = gimple_build_assign (c, NOP_EXPR, var);
2469 gimple_seq_add_stmt_without_update (&stmts, stmt);
2470
2471 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2472 gimple_seq_add_stmt_without_update (&stmts, stmt);
2473 }
2474
2475 gsi_replace_with_seq_vops (gsi, stmts);
2476 return true;
2477 }
2478
2479 /* If len parameter is one, return an expression corresponding to
2480 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2481 if (fcode == BUILT_IN_STRNCMP && length == 1)
2482 {
2483 gimple_seq stmts = NULL;
2484 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2485 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2486
2487 if (lhs)
2488 {
2489 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2490 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2491 gimple_seq_add_stmt_without_update (&stmts, convert1);
2492
2493 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2494 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2495 gimple_seq_add_stmt_without_update (&stmts, convert2);
2496
2497 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2498 gimple_seq_add_stmt_without_update (&stmts, stmt);
2499 }
2500
2501 gsi_replace_with_seq_vops (gsi, stmts);
2502 return true;
2503 }
2504
caed5c92
QZ
2505 /* If length is larger than the length of one constant string,
2506 replace strncmp with corresponding strcmp */
2507 if (fcode == BUILT_IN_STRNCMP
2508 && length > 0
2509 && ((p2 && (size_t) length > strlen (p2))
2510 || (p1 && (size_t) length > strlen (p1))))
2511 {
2512 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2513 if (!fn)
2514 return false;
2515 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2516 replace_call_with_call_and_fold (gsi, repl);
2517 return true;
2518 }
2519
a918bfbf
ML
2520 return false;
2521}
2522
488c6247
ML
2523/* Fold a call to the memchr pointed by GSI iterator. */
2524
2525static bool
2526gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2527{
2528 gimple *stmt = gsi_stmt (*gsi);
2529 tree lhs = gimple_call_lhs (stmt);
2530 tree arg1 = gimple_call_arg (stmt, 0);
2531 tree arg2 = gimple_call_arg (stmt, 1);
2532 tree len = gimple_call_arg (stmt, 2);
2533
2534 /* If the LEN parameter is zero, return zero. */
2535 if (integer_zerop (len))
2536 {
2537 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2538 return true;
2539 }
2540
2541 char c;
2542 if (TREE_CODE (arg2) != INTEGER_CST
2543 || !tree_fits_uhwi_p (len)
2544 || !target_char_cst_p (arg2, &c))
2545 return false;
2546
2547 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2548 unsigned HOST_WIDE_INT string_length;
2549 const char *p1 = c_getstr (arg1, &string_length);
2550
2551 if (p1)
2552 {
2553 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2554 if (r == NULL)
2555 {
2556 if (length <= string_length)
2557 {
2558 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2559 return true;
2560 }
2561 }
2562 else
2563 {
2564 unsigned HOST_WIDE_INT offset = r - p1;
2565 gimple_seq stmts = NULL;
2566 if (lhs != NULL_TREE)
2567 {
2568 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2569 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2570 arg1, offset_cst);
2571 gimple_seq_add_stmt_without_update (&stmts, stmt);
2572 }
2573 else
2574 gimple_seq_add_stmt_without_update (&stmts,
2575 gimple_build_nop ());
2576
2577 gsi_replace_with_seq_vops (gsi, stmts);
2578 return true;
2579 }
2580 }
2581
2582 return false;
2583}
a918bfbf 2584
fef5a0d9
RB
2585/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2586 to the call. IGNORE is true if the value returned
2587 by the builtin will be ignored. UNLOCKED is true is true if this
2588 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2589 the known length of the string. Return NULL_TREE if no simplification
2590 was possible. */
2591
2592static bool
2593gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2594 tree arg0, tree arg1,
dcb7fae2 2595 bool unlocked)
fef5a0d9 2596{
355fe088 2597 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2598
fef5a0d9
RB
2599 /* If we're using an unlocked function, assume the other unlocked
2600 functions exist explicitly. */
2601 tree const fn_fputc = (unlocked
2602 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2603 : builtin_decl_implicit (BUILT_IN_FPUTC));
2604 tree const fn_fwrite = (unlocked
2605 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2606 : builtin_decl_implicit (BUILT_IN_FWRITE));
2607
2608 /* If the return value is used, don't do the transformation. */
dcb7fae2 2609 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2610 return false;
2611
fef5a0d9
RB
2612 /* Get the length of the string passed to fputs. If the length
2613 can't be determined, punt. */
598f7235 2614 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2615 if (!len
2616 || TREE_CODE (len) != INTEGER_CST)
2617 return false;
2618
2619 switch (compare_tree_int (len, 1))
2620 {
2621 case -1: /* length is 0, delete the call entirely . */
2622 replace_call_with_value (gsi, integer_zero_node);
2623 return true;
2624
2625 case 0: /* length is 1, call fputc. */
2626 {
2627 const char *p = c_getstr (arg0);
2628 if (p != NULL)
2629 {
2630 if (!fn_fputc)
2631 return false;
2632
355fe088 2633 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2634 build_int_cst
2635 (integer_type_node, p[0]), arg1);
2636 replace_call_with_call_and_fold (gsi, repl);
2637 return true;
2638 }
2639 }
2640 /* FALLTHROUGH */
2641 case 1: /* length is greater than 1, call fwrite. */
2642 {
2643 /* If optimizing for size keep fputs. */
2644 if (optimize_function_for_size_p (cfun))
2645 return false;
2646 /* New argument list transforming fputs(string, stream) to
2647 fwrite(string, 1, len, stream). */
2648 if (!fn_fwrite)
2649 return false;
2650
355fe088 2651 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2652 size_one_node, len, arg1);
2653 replace_call_with_call_and_fold (gsi, repl);
2654 return true;
2655 }
2656 default:
2657 gcc_unreachable ();
2658 }
2659 return false;
2660}
2661
2662/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2663 DEST, SRC, LEN, and SIZE are the arguments to the call.
2664 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2665 code of the builtin. If MAXLEN is not NULL, it is maximum length
2666 passed as third argument. */
2667
2668static bool
2669gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2670 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2671 enum built_in_function fcode)
2672{
355fe088 2673 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2674 location_t loc = gimple_location (stmt);
2675 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2676 tree fn;
2677
2678 /* If SRC and DEST are the same (and not volatile), return DEST
2679 (resp. DEST+LEN for __mempcpy_chk). */
2680 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2681 {
2682 if (fcode != BUILT_IN_MEMPCPY_CHK)
2683 {
2684 replace_call_with_value (gsi, dest);
2685 return true;
2686 }
2687 else
2688 {
74e3c262
RB
2689 gimple_seq stmts = NULL;
2690 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2691 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2692 TREE_TYPE (dest), dest, len);
74e3c262 2693 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2694 replace_call_with_value (gsi, temp);
2695 return true;
2696 }
2697 }
2698
2699 if (! tree_fits_uhwi_p (size))
2700 return false;
2701
598f7235 2702 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
2703 if (! integer_all_onesp (size))
2704 {
2705 if (! tree_fits_uhwi_p (len))
2706 {
2707 /* If LEN is not constant, try MAXLEN too.
2708 For MAXLEN only allow optimizing into non-_ocs function
2709 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2710 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2711 {
2712 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2713 {
2714 /* (void) __mempcpy_chk () can be optimized into
2715 (void) __memcpy_chk (). */
2716 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2717 if (!fn)
2718 return false;
2719
355fe088 2720 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2721 replace_call_with_call_and_fold (gsi, repl);
2722 return true;
2723 }
2724 return false;
2725 }
2726 }
2727 else
2728 maxlen = len;
2729
2730 if (tree_int_cst_lt (size, maxlen))
2731 return false;
2732 }
2733
2734 fn = NULL_TREE;
2735 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2736 mem{cpy,pcpy,move,set} is available. */
2737 switch (fcode)
2738 {
2739 case BUILT_IN_MEMCPY_CHK:
2740 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2741 break;
2742 case BUILT_IN_MEMPCPY_CHK:
2743 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2744 break;
2745 case BUILT_IN_MEMMOVE_CHK:
2746 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2747 break;
2748 case BUILT_IN_MEMSET_CHK:
2749 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2750 break;
2751 default:
2752 break;
2753 }
2754
2755 if (!fn)
2756 return false;
2757
355fe088 2758 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2759 replace_call_with_call_and_fold (gsi, repl);
2760 return true;
2761}
2762
2763/* Fold a call to the __st[rp]cpy_chk builtin.
2764 DEST, SRC, and SIZE are the arguments to the call.
2765 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2766 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2767 strings passed as second argument. */
2768
2769static bool
2770gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2771 tree dest,
fef5a0d9 2772 tree src, tree size,
fef5a0d9
RB
2773 enum built_in_function fcode)
2774{
355fe088 2775 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2776 location_t loc = gimple_location (stmt);
2777 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2778 tree len, fn;
2779
2780 /* If SRC and DEST are the same (and not volatile), return DEST. */
2781 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2782 {
8cd95cec
MS
2783 /* Issue -Wrestrict unless the pointers are null (those do
2784 not point to objects and so do not indicate an overlap;
2785 such calls could be the result of sanitization and jump
2786 threading). */
2787 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2788 {
2789 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2790
e9b9fa4c
MS
2791 warning_at (loc, OPT_Wrestrict,
2792 "%qD source argument is the same as destination",
2793 func);
2794 }
cc8bea0a 2795
fef5a0d9
RB
2796 replace_call_with_value (gsi, dest);
2797 return true;
2798 }
2799
2800 if (! tree_fits_uhwi_p (size))
2801 return false;
2802
598f7235 2803 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
2804 if (! integer_all_onesp (size))
2805 {
2806 len = c_strlen (src, 1);
2807 if (! len || ! tree_fits_uhwi_p (len))
2808 {
2809 /* If LEN is not constant, try MAXLEN too.
2810 For MAXLEN only allow optimizing into non-_ocs function
2811 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2812 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2813 {
2814 if (fcode == BUILT_IN_STPCPY_CHK)
2815 {
2816 if (! ignore)
2817 return false;
2818
2819 /* If return value of __stpcpy_chk is ignored,
2820 optimize into __strcpy_chk. */
2821 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2822 if (!fn)
2823 return false;
2824
355fe088 2825 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2826 replace_call_with_call_and_fold (gsi, repl);
2827 return true;
2828 }
2829
2830 if (! len || TREE_SIDE_EFFECTS (len))
2831 return false;
2832
2833 /* If c_strlen returned something, but not a constant,
2834 transform __strcpy_chk into __memcpy_chk. */
2835 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2836 if (!fn)
2837 return false;
2838
74e3c262 2839 gimple_seq stmts = NULL;
770fe3a3 2840 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
2841 len = gimple_convert (&stmts, loc, size_type_node, len);
2842 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2843 build_int_cst (size_type_node, 1));
2844 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2845 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2846 replace_call_with_call_and_fold (gsi, repl);
2847 return true;
2848 }
e256dfce 2849 }
fef5a0d9
RB
2850 else
2851 maxlen = len;
2852
2853 if (! tree_int_cst_lt (maxlen, size))
2854 return false;
e256dfce
RG
2855 }
2856
fef5a0d9
RB
2857 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2858 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2859 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2860 if (!fn)
2861 return false;
2862
355fe088 2863 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2864 replace_call_with_call_and_fold (gsi, repl);
2865 return true;
2866}
2867
2868/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2869 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2870 length passed as third argument. IGNORE is true if return value can be
2871 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2872
2873static bool
2874gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2875 tree dest, tree src,
dcb7fae2 2876 tree len, tree size,
fef5a0d9
RB
2877 enum built_in_function fcode)
2878{
355fe088 2879 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2881 tree fn;
2882
2883 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2884 {
fef5a0d9
RB
2885 /* If return value of __stpncpy_chk is ignored,
2886 optimize into __strncpy_chk. */
2887 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2888 if (fn)
2889 {
355fe088 2890 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2891 replace_call_with_call_and_fold (gsi, repl);
2892 return true;
2893 }
cbdd87d4
RG
2894 }
2895
fef5a0d9
RB
2896 if (! tree_fits_uhwi_p (size))
2897 return false;
2898
598f7235 2899 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 2900 if (! integer_all_onesp (size))
cbdd87d4 2901 {
fef5a0d9 2902 if (! tree_fits_uhwi_p (len))
fe2ef088 2903 {
fef5a0d9
RB
2904 /* If LEN is not constant, try MAXLEN too.
2905 For MAXLEN only allow optimizing into non-_ocs function
2906 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2907 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2908 return false;
8a1561bc 2909 }
fef5a0d9
RB
2910 else
2911 maxlen = len;
2912
2913 if (tree_int_cst_lt (size, maxlen))
2914 return false;
cbdd87d4
RG
2915 }
2916
fef5a0d9
RB
2917 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2918 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2919 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2920 if (!fn)
2921 return false;
2922
355fe088 2923 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2924 replace_call_with_call_and_fold (gsi, repl);
2925 return true;
cbdd87d4
RG
2926}
2927
2625bb5d
RB
2928/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2929 Return NULL_TREE if no simplification can be made. */
2930
2931static bool
2932gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2933{
2934 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2935 location_t loc = gimple_location (stmt);
2936 tree dest = gimple_call_arg (stmt, 0);
2937 tree src = gimple_call_arg (stmt, 1);
01b0acb7 2938 tree fn, lenp1;
2625bb5d
RB
2939
2940 /* If the result is unused, replace stpcpy with strcpy. */
2941 if (gimple_call_lhs (stmt) == NULL_TREE)
2942 {
2943 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2944 if (!fn)
2945 return false;
2946 gimple_call_set_fndecl (stmt, fn);
2947 fold_stmt (gsi);
2948 return true;
2949 }
2950
01b0acb7 2951 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 2952 c_strlen_data data = { };
7d583f42 2953 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
2954 if (!len
2955 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 2956 {
7d583f42
JL
2957 data.decl = unterminated_array (src);
2958 if (!data.decl)
01b0acb7
MS
2959 return false;
2960 }
2961
7d583f42 2962 if (data.decl)
01b0acb7
MS
2963 {
2964 /* Avoid folding calls with unterminated arrays. */
2965 if (!gimple_no_warning_p (stmt))
7d583f42 2966 warn_string_no_nul (loc, "stpcpy", src, data.decl);
01b0acb7
MS
2967 gimple_set_no_warning (stmt, true);
2968 return false;
2969 }
2625bb5d
RB
2970
2971 if (optimize_function_for_size_p (cfun)
2972 /* If length is zero it's small enough. */
2973 && !integer_zerop (len))
2974 return false;
2975
2976 /* If the source has a known length replace stpcpy with memcpy. */
2977 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2978 if (!fn)
2979 return false;
2980
2981 gimple_seq stmts = NULL;
2982 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2983 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2984 tem, build_int_cst (size_type_node, 1));
2985 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2986 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2987 gimple_set_vuse (repl, gimple_vuse (stmt));
2988 gimple_set_vdef (repl, gimple_vdef (stmt));
2989 if (gimple_vdef (repl)
2990 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2991 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2992 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2993 /* Replace the result with dest + len. */
2994 stmts = NULL;
2995 tem = gimple_convert (&stmts, loc, sizetype, len);
2996 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2997 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2998 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2999 gsi_replace (gsi, ret, false);
2625bb5d
RB
3000 /* Finally fold the memcpy call. */
3001 gimple_stmt_iterator gsi2 = *gsi;
3002 gsi_prev (&gsi2);
3003 fold_stmt (&gsi2);
3004 return true;
3005}
3006
fef5a0d9
RB
3007/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3008 NULL_TREE if a normal call should be emitted rather than expanding
3009 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3010 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3011 passed as second argument. */
cbdd87d4
RG
3012
3013static bool
fef5a0d9 3014gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3015 enum built_in_function fcode)
cbdd87d4 3016{
538dd0b7 3017 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3018 tree dest, size, len, fn, fmt, flag;
3019 const char *fmt_str;
cbdd87d4 3020
fef5a0d9
RB
3021 /* Verify the required arguments in the original call. */
3022 if (gimple_call_num_args (stmt) < 5)
3023 return false;
cbdd87d4 3024
fef5a0d9
RB
3025 dest = gimple_call_arg (stmt, 0);
3026 len = gimple_call_arg (stmt, 1);
3027 flag = gimple_call_arg (stmt, 2);
3028 size = gimple_call_arg (stmt, 3);
3029 fmt = gimple_call_arg (stmt, 4);
3030
3031 if (! tree_fits_uhwi_p (size))
3032 return false;
3033
3034 if (! integer_all_onesp (size))
3035 {
598f7235 3036 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3037 if (! tree_fits_uhwi_p (len))
cbdd87d4 3038 {
fef5a0d9
RB
3039 /* If LEN is not constant, try MAXLEN too.
3040 For MAXLEN only allow optimizing into non-_ocs function
3041 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3042 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3043 return false;
3044 }
3045 else
fef5a0d9 3046 maxlen = len;
cbdd87d4 3047
fef5a0d9
RB
3048 if (tree_int_cst_lt (size, maxlen))
3049 return false;
3050 }
cbdd87d4 3051
fef5a0d9
RB
3052 if (!init_target_chars ())
3053 return false;
cbdd87d4 3054
fef5a0d9
RB
3055 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3056 or if format doesn't contain % chars or is "%s". */
3057 if (! integer_zerop (flag))
3058 {
3059 fmt_str = c_getstr (fmt);
3060 if (fmt_str == NULL)
3061 return false;
3062 if (strchr (fmt_str, target_percent) != NULL
3063 && strcmp (fmt_str, target_percent_s))
3064 return false;
cbdd87d4
RG
3065 }
3066
fef5a0d9
RB
3067 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3068 available. */
3069 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3070 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3071 if (!fn)
491e0b9b
RG
3072 return false;
3073
fef5a0d9
RB
3074 /* Replace the called function and the first 5 argument by 3 retaining
3075 trailing varargs. */
3076 gimple_call_set_fndecl (stmt, fn);
3077 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3078 gimple_call_set_arg (stmt, 0, dest);
3079 gimple_call_set_arg (stmt, 1, len);
3080 gimple_call_set_arg (stmt, 2, fmt);
3081 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3082 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3083 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3084 fold_stmt (gsi);
3085 return true;
3086}
cbdd87d4 3087
fef5a0d9
RB
3088/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3089 Return NULL_TREE if a normal call should be emitted rather than
3090 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3091 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3092
fef5a0d9
RB
3093static bool
3094gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3095 enum built_in_function fcode)
3096{
538dd0b7 3097 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3098 tree dest, size, len, fn, fmt, flag;
3099 const char *fmt_str;
3100 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3101
fef5a0d9
RB
3102 /* Verify the required arguments in the original call. */
3103 if (nargs < 4)
3104 return false;
3105 dest = gimple_call_arg (stmt, 0);
3106 flag = gimple_call_arg (stmt, 1);
3107 size = gimple_call_arg (stmt, 2);
3108 fmt = gimple_call_arg (stmt, 3);
3109
3110 if (! tree_fits_uhwi_p (size))
3111 return false;
3112
3113 len = NULL_TREE;
3114
3115 if (!init_target_chars ())
3116 return false;
3117
3118 /* Check whether the format is a literal string constant. */
3119 fmt_str = c_getstr (fmt);
3120 if (fmt_str != NULL)
3121 {
3122 /* If the format doesn't contain % args or %%, we know the size. */
3123 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3124 {
fef5a0d9
RB
3125 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3126 len = build_int_cstu (size_type_node, strlen (fmt_str));
3127 }
3128 /* If the format is "%s" and first ... argument is a string literal,
3129 we know the size too. */
3130 else if (fcode == BUILT_IN_SPRINTF_CHK
3131 && strcmp (fmt_str, target_percent_s) == 0)
3132 {
3133 tree arg;
cbdd87d4 3134
fef5a0d9
RB
3135 if (nargs == 5)
3136 {
3137 arg = gimple_call_arg (stmt, 4);
3138 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3139 {
3140 len = c_strlen (arg, 1);
3141 if (! len || ! tree_fits_uhwi_p (len))
3142 len = NULL_TREE;
3143 }
3144 }
3145 }
3146 }
cbdd87d4 3147
fef5a0d9
RB
3148 if (! integer_all_onesp (size))
3149 {
3150 if (! len || ! tree_int_cst_lt (len, size))
3151 return false;
3152 }
cbdd87d4 3153
fef5a0d9
RB
3154 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3155 or if format doesn't contain % chars or is "%s". */
3156 if (! integer_zerop (flag))
3157 {
3158 if (fmt_str == NULL)
3159 return false;
3160 if (strchr (fmt_str, target_percent) != NULL
3161 && strcmp (fmt_str, target_percent_s))
3162 return false;
3163 }
cbdd87d4 3164
fef5a0d9
RB
3165 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3166 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3167 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3168 if (!fn)
3169 return false;
3170
3171 /* Replace the called function and the first 4 argument by 2 retaining
3172 trailing varargs. */
3173 gimple_call_set_fndecl (stmt, fn);
3174 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3175 gimple_call_set_arg (stmt, 0, dest);
3176 gimple_call_set_arg (stmt, 1, fmt);
3177 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3178 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3179 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3180 fold_stmt (gsi);
3181 return true;
3182}
3183
35770bb2
RB
3184/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3185 ORIG may be null if this is a 2-argument call. We don't attempt to
3186 simplify calls with more than 3 arguments.
3187
a104bd88 3188 Return true if simplification was possible, otherwise false. */
35770bb2 3189
a104bd88 3190bool
dcb7fae2 3191gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3192{
355fe088 3193 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3194 tree dest = gimple_call_arg (stmt, 0);
3195 tree fmt = gimple_call_arg (stmt, 1);
3196 tree orig = NULL_TREE;
3197 const char *fmt_str = NULL;
3198
3199 /* Verify the required arguments in the original call. We deal with two
3200 types of sprintf() calls: 'sprintf (str, fmt)' and
3201 'sprintf (dest, "%s", orig)'. */
3202 if (gimple_call_num_args (stmt) > 3)
3203 return false;
3204
3205 if (gimple_call_num_args (stmt) == 3)
3206 orig = gimple_call_arg (stmt, 2);
3207
3208 /* Check whether the format is a literal string constant. */
3209 fmt_str = c_getstr (fmt);
3210 if (fmt_str == NULL)
3211 return false;
3212
3213 if (!init_target_chars ())
3214 return false;
3215
3216 /* If the format doesn't contain % args or %%, use strcpy. */
3217 if (strchr (fmt_str, target_percent) == NULL)
3218 {
3219 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3220
3221 if (!fn)
3222 return false;
3223
3224 /* Don't optimize sprintf (buf, "abc", ptr++). */
3225 if (orig)
3226 return false;
3227
3228 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3229 'format' is known to contain no % formats. */
3230 gimple_seq stmts = NULL;
355fe088 3231 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3232
3233 /* Propagate the NO_WARNING bit to avoid issuing the same
3234 warning more than once. */
3235 if (gimple_no_warning_p (stmt))
3236 gimple_set_no_warning (repl, true);
3237
35770bb2
RB
3238 gimple_seq_add_stmt_without_update (&stmts, repl);
3239 if (gimple_call_lhs (stmt))
3240 {
3241 repl = gimple_build_assign (gimple_call_lhs (stmt),
3242 build_int_cst (integer_type_node,
3243 strlen (fmt_str)));
3244 gimple_seq_add_stmt_without_update (&stmts, repl);
3245 gsi_replace_with_seq_vops (gsi, stmts);
3246 /* gsi now points at the assignment to the lhs, get a
3247 stmt iterator to the memcpy call.
3248 ??? We can't use gsi_for_stmt as that doesn't work when the
3249 CFG isn't built yet. */
3250 gimple_stmt_iterator gsi2 = *gsi;
3251 gsi_prev (&gsi2);
3252 fold_stmt (&gsi2);
3253 }
3254 else
3255 {
3256 gsi_replace_with_seq_vops (gsi, stmts);
3257 fold_stmt (gsi);
3258 }
3259 return true;
3260 }
3261
3262 /* If the format is "%s", use strcpy if the result isn't used. */
3263 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3264 {
3265 tree fn;
3266 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3267
3268 if (!fn)
3269 return false;
3270
3271 /* Don't crash on sprintf (str1, "%s"). */
3272 if (!orig)
3273 return false;
3274
dcb7fae2
RB
3275 tree orig_len = NULL_TREE;
3276 if (gimple_call_lhs (stmt))
35770bb2 3277 {
598f7235 3278 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3279 if (!orig_len)
35770bb2
RB
3280 return false;
3281 }
3282
3283 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3284 gimple_seq stmts = NULL;
355fe088 3285 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3286
3287 /* Propagate the NO_WARNING bit to avoid issuing the same
3288 warning more than once. */
3289 if (gimple_no_warning_p (stmt))
3290 gimple_set_no_warning (repl, true);
3291
35770bb2
RB
3292 gimple_seq_add_stmt_without_update (&stmts, repl);
3293 if (gimple_call_lhs (stmt))
3294 {
d7e78447
RB
3295 if (!useless_type_conversion_p (integer_type_node,
3296 TREE_TYPE (orig_len)))
3297 orig_len = fold_convert (integer_type_node, orig_len);
3298 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
3299 gimple_seq_add_stmt_without_update (&stmts, repl);
3300 gsi_replace_with_seq_vops (gsi, stmts);
3301 /* gsi now points at the assignment to the lhs, get a
3302 stmt iterator to the memcpy call.
3303 ??? We can't use gsi_for_stmt as that doesn't work when the
3304 CFG isn't built yet. */
3305 gimple_stmt_iterator gsi2 = *gsi;
3306 gsi_prev (&gsi2);
3307 fold_stmt (&gsi2);
3308 }
3309 else
3310 {
3311 gsi_replace_with_seq_vops (gsi, stmts);
3312 fold_stmt (gsi);
3313 }
3314 return true;
3315 }
3316 return false;
3317}
3318
d7e78447
RB
3319/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3320 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3321 attempt to simplify calls with more than 4 arguments.
35770bb2 3322
a104bd88 3323 Return true if simplification was possible, otherwise false. */
d7e78447 3324
a104bd88 3325bool
dcb7fae2 3326gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3327{
538dd0b7 3328 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3329 tree dest = gimple_call_arg (stmt, 0);
3330 tree destsize = gimple_call_arg (stmt, 1);
3331 tree fmt = gimple_call_arg (stmt, 2);
3332 tree orig = NULL_TREE;
3333 const char *fmt_str = NULL;
3334
3335 if (gimple_call_num_args (stmt) > 4)
3336 return false;
3337
3338 if (gimple_call_num_args (stmt) == 4)
3339 orig = gimple_call_arg (stmt, 3);
3340
3341 if (!tree_fits_uhwi_p (destsize))
3342 return false;
3343 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3344
3345 /* Check whether the format is a literal string constant. */
3346 fmt_str = c_getstr (fmt);
3347 if (fmt_str == NULL)
3348 return false;
3349
3350 if (!init_target_chars ())
3351 return false;
3352
3353 /* If the format doesn't contain % args or %%, use strcpy. */
3354 if (strchr (fmt_str, target_percent) == NULL)
3355 {
3356 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3357 if (!fn)
3358 return false;
3359
3360 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3361 if (orig)
3362 return false;
3363
3364 /* We could expand this as
3365 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3366 or to
3367 memcpy (str, fmt_with_nul_at_cstm1, cst);
3368 but in the former case that might increase code size
3369 and in the latter case grow .rodata section too much.
3370 So punt for now. */
3371 size_t len = strlen (fmt_str);
3372 if (len >= destlen)
3373 return false;
3374
3375 gimple_seq stmts = NULL;
355fe088 3376 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3377 gimple_seq_add_stmt_without_update (&stmts, repl);
3378 if (gimple_call_lhs (stmt))
3379 {
3380 repl = gimple_build_assign (gimple_call_lhs (stmt),
3381 build_int_cst (integer_type_node, len));
3382 gimple_seq_add_stmt_without_update (&stmts, repl);
3383 gsi_replace_with_seq_vops (gsi, stmts);
3384 /* gsi now points at the assignment to the lhs, get a
3385 stmt iterator to the memcpy call.
3386 ??? We can't use gsi_for_stmt as that doesn't work when the
3387 CFG isn't built yet. */
3388 gimple_stmt_iterator gsi2 = *gsi;
3389 gsi_prev (&gsi2);
3390 fold_stmt (&gsi2);
3391 }
3392 else
3393 {
3394 gsi_replace_with_seq_vops (gsi, stmts);
3395 fold_stmt (gsi);
3396 }
3397 return true;
3398 }
3399
3400 /* If the format is "%s", use strcpy if the result isn't used. */
3401 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3402 {
3403 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3404 if (!fn)
3405 return false;
3406
3407 /* Don't crash on snprintf (str1, cst, "%s"). */
3408 if (!orig)
3409 return false;
3410
598f7235 3411 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3412 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3413 return false;
d7e78447
RB
3414
3415 /* We could expand this as
3416 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3417 or to
3418 memcpy (str1, str2_with_nul_at_cstm1, cst);
3419 but in the former case that might increase code size
3420 and in the latter case grow .rodata section too much.
3421 So punt for now. */
3422 if (compare_tree_int (orig_len, destlen) >= 0)
3423 return false;
3424
3425 /* Convert snprintf (str1, cst, "%s", str2) into
3426 strcpy (str1, str2) if strlen (str2) < cst. */
3427 gimple_seq stmts = NULL;
355fe088 3428 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3429 gimple_seq_add_stmt_without_update (&stmts, repl);
3430 if (gimple_call_lhs (stmt))
3431 {
3432 if (!useless_type_conversion_p (integer_type_node,
3433 TREE_TYPE (orig_len)))
3434 orig_len = fold_convert (integer_type_node, orig_len);
3435 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3436 gimple_seq_add_stmt_without_update (&stmts, repl);
3437 gsi_replace_with_seq_vops (gsi, stmts);
3438 /* gsi now points at the assignment to the lhs, get a
3439 stmt iterator to the memcpy call.
3440 ??? We can't use gsi_for_stmt as that doesn't work when the
3441 CFG isn't built yet. */
3442 gimple_stmt_iterator gsi2 = *gsi;
3443 gsi_prev (&gsi2);
3444 fold_stmt (&gsi2);
3445 }
3446 else
3447 {
3448 gsi_replace_with_seq_vops (gsi, stmts);
3449 fold_stmt (gsi);
3450 }
3451 return true;
3452 }
3453 return false;
3454}
35770bb2 3455
edd7ae68
RB
3456/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3457 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3458 more than 3 arguments, and ARG may be null in the 2-argument case.
3459
3460 Return NULL_TREE if no simplification was possible, otherwise return the
3461 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3462 code of the function to be simplified. */
3463
3464static bool
3465gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3466 tree fp, tree fmt, tree arg,
3467 enum built_in_function fcode)
3468{
3469 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3470 tree fn_fputc, fn_fputs;
3471 const char *fmt_str = NULL;
3472
3473 /* If the return value is used, don't do the transformation. */
3474 if (gimple_call_lhs (stmt) != NULL_TREE)
3475 return false;
3476
3477 /* Check whether the format is a literal string constant. */
3478 fmt_str = c_getstr (fmt);
3479 if (fmt_str == NULL)
3480 return false;
3481
3482 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3483 {
3484 /* If we're using an unlocked function, assume the other
3485 unlocked functions exist explicitly. */
3486 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3487 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3488 }
3489 else
3490 {
3491 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3492 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3493 }
3494
3495 if (!init_target_chars ())
3496 return false;
3497
3498 /* If the format doesn't contain % args or %%, use strcpy. */
3499 if (strchr (fmt_str, target_percent) == NULL)
3500 {
3501 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3502 && arg)
3503 return false;
3504
3505 /* If the format specifier was "", fprintf does nothing. */
3506 if (fmt_str[0] == '\0')
3507 {
3508 replace_call_with_value (gsi, NULL_TREE);
3509 return true;
3510 }
3511
3512 /* When "string" doesn't contain %, replace all cases of
3513 fprintf (fp, string) with fputs (string, fp). The fputs
3514 builtin will take care of special cases like length == 1. */
3515 if (fn_fputs)
3516 {
3517 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3518 replace_call_with_call_and_fold (gsi, repl);
3519 return true;
3520 }
3521 }
3522
3523 /* The other optimizations can be done only on the non-va_list variants. */
3524 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3525 return false;
3526
3527 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3528 else if (strcmp (fmt_str, target_percent_s) == 0)
3529 {
3530 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3531 return false;
3532 if (fn_fputs)
3533 {
3534 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3535 replace_call_with_call_and_fold (gsi, repl);
3536 return true;
3537 }
3538 }
3539
3540 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3541 else if (strcmp (fmt_str, target_percent_c) == 0)
3542 {
3543 if (!arg
3544 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3545 return false;
3546 if (fn_fputc)
3547 {
3548 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3549 replace_call_with_call_and_fold (gsi, repl);
3550 return true;
3551 }
3552 }
3553
3554 return false;
3555}
3556
ad03a744
RB
3557/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3558 FMT and ARG are the arguments to the call; we don't fold cases with
3559 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3560
3561 Return NULL_TREE if no simplification was possible, otherwise return the
3562 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3563 code of the function to be simplified. */
3564
3565static bool
3566gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3567 tree arg, enum built_in_function fcode)
3568{
3569 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3570 tree fn_putchar, fn_puts, newarg;
3571 const char *fmt_str = NULL;
3572
3573 /* If the return value is used, don't do the transformation. */
3574 if (gimple_call_lhs (stmt) != NULL_TREE)
3575 return false;
3576
3577 /* Check whether the format is a literal string constant. */
3578 fmt_str = c_getstr (fmt);
3579 if (fmt_str == NULL)
3580 return false;
3581
3582 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3583 {
3584 /* If we're using an unlocked function, assume the other
3585 unlocked functions exist explicitly. */
3586 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3587 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3588 }
3589 else
3590 {
3591 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3592 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3593 }
3594
3595 if (!init_target_chars ())
3596 return false;
3597
3598 if (strcmp (fmt_str, target_percent_s) == 0
3599 || strchr (fmt_str, target_percent) == NULL)
3600 {
3601 const char *str;
3602
3603 if (strcmp (fmt_str, target_percent_s) == 0)
3604 {
3605 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3606 return false;
3607
3608 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3609 return false;
3610
3611 str = c_getstr (arg);
3612 if (str == NULL)
3613 return false;
3614 }
3615 else
3616 {
3617 /* The format specifier doesn't contain any '%' characters. */
3618 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3619 && arg)
3620 return false;
3621 str = fmt_str;
3622 }
3623
3624 /* If the string was "", printf does nothing. */
3625 if (str[0] == '\0')
3626 {
3627 replace_call_with_value (gsi, NULL_TREE);
3628 return true;
3629 }
3630
3631 /* If the string has length of 1, call putchar. */
3632 if (str[1] == '\0')
3633 {
3634 /* Given printf("c"), (where c is any one character,)
3635 convert "c"[0] to an int and pass that to the replacement
3636 function. */
3637 newarg = build_int_cst (integer_type_node, str[0]);
3638 if (fn_putchar)
3639 {
3640 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3641 replace_call_with_call_and_fold (gsi, repl);
3642 return true;
3643 }
3644 }
3645 else
3646 {
3647 /* If the string was "string\n", call puts("string"). */
3648 size_t len = strlen (str);
3649 if ((unsigned char)str[len - 1] == target_newline
3650 && (size_t) (int) len == len
3651 && (int) len > 0)
3652 {
3653 char *newstr;
ad03a744
RB
3654
3655 /* Create a NUL-terminated string that's one char shorter
3656 than the original, stripping off the trailing '\n'. */
a353fec4 3657 newstr = xstrdup (str);
ad03a744 3658 newstr[len - 1] = '\0';
a353fec4
BE
3659 newarg = build_string_literal (len, newstr);
3660 free (newstr);
ad03a744
RB
3661 if (fn_puts)
3662 {
3663 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3664 replace_call_with_call_and_fold (gsi, repl);
3665 return true;
3666 }
3667 }
3668 else
3669 /* We'd like to arrange to call fputs(string,stdout) here,
3670 but we need stdout and don't have a way to get it yet. */
3671 return false;
3672 }
3673 }
3674
3675 /* The other optimizations can be done only on the non-va_list variants. */
3676 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3677 return false;
3678
3679 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3680 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3681 {
3682 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3683 return false;
3684 if (fn_puts)
3685 {
3686 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3687 replace_call_with_call_and_fold (gsi, repl);
3688 return true;
3689 }
3690 }
3691
3692 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3693 else if (strcmp (fmt_str, target_percent_c) == 0)
3694 {
3695 if (!arg || ! useless_type_conversion_p (integer_type_node,
3696 TREE_TYPE (arg)))
3697 return false;
3698 if (fn_putchar)
3699 {
3700 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3701 replace_call_with_call_and_fold (gsi, repl);
3702 return true;
3703 }
3704 }
3705
3706 return false;
3707}
3708
edd7ae68 3709
fef5a0d9
RB
3710
3711/* Fold a call to __builtin_strlen with known length LEN. */
3712
3713static bool
dcb7fae2 3714gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3715{
355fe088 3716 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3717 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3718
3719 wide_int minlen;
3720 wide_int maxlen;
3721
5d6655eb
MS
3722 c_strlen_data lendata = { };
3723 if (!get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3724 && !lendata.decl
3725 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3726 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
3727 {
3728 /* The range of lengths refers to either a single constant
3729 string or to the longest and shortest constant string
3730 referenced by the argument of the strlen() call, or to
3731 the strings that can possibly be stored in the arrays
3732 the argument refers to. */
5d6655eb
MS
3733 minlen = wi::to_wide (lendata.minlen);
3734 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
3735 }
3736 else
3737 {
3738 unsigned prec = TYPE_PRECISION (sizetype);
3739
3740 minlen = wi::shwi (0, prec);
3741 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3742 }
3743
3744 if (minlen == maxlen)
3745 {
5d6655eb
MS
3746 /* Fold the strlen call to a constant. */
3747 tree type = TREE_TYPE (lendata.minlen);
3748 tree len = force_gimple_operand_gsi (gsi,
3749 wide_int_to_tree (type, minlen),
3750 true, NULL, true, GSI_SAME_STMT);
3751 replace_call_with_value (gsi, len);
c42d0aa0
MS
3752 return true;
3753 }
3754
a7bf6c08
MS
3755 if (tree lhs = gimple_call_lhs (stmt))
3756 if (TREE_CODE (lhs) == SSA_NAME
3757 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3758 set_range_info (lhs, VR_RANGE, minlen, maxlen);
c42d0aa0
MS
3759
3760 return false;
cbdd87d4
RG
3761}
3762
48126138
NS
3763/* Fold a call to __builtin_acc_on_device. */
3764
3765static bool
3766gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3767{
3768 /* Defer folding until we know which compiler we're in. */
3769 if (symtab->state != EXPANSION)
3770 return false;
3771
3772 unsigned val_host = GOMP_DEVICE_HOST;
3773 unsigned val_dev = GOMP_DEVICE_NONE;
3774
3775#ifdef ACCEL_COMPILER
3776 val_host = GOMP_DEVICE_NOT_HOST;
3777 val_dev = ACCEL_COMPILER_acc_device;
3778#endif
3779
3780 location_t loc = gimple_location (gsi_stmt (*gsi));
3781
3782 tree host_eq = make_ssa_name (boolean_type_node);
3783 gimple *host_ass = gimple_build_assign
3784 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3785 gimple_set_location (host_ass, loc);
3786 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3787
3788 tree dev_eq = make_ssa_name (boolean_type_node);
3789 gimple *dev_ass = gimple_build_assign
3790 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3791 gimple_set_location (dev_ass, loc);
3792 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3793
3794 tree result = make_ssa_name (boolean_type_node);
3795 gimple *result_ass = gimple_build_assign
3796 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3797 gimple_set_location (result_ass, loc);
3798 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3799
3800 replace_call_with_value (gsi, result);
3801
3802 return true;
3803}
cbdd87d4 3804
fe75f732
PK
3805/* Fold realloc (0, n) -> malloc (n). */
3806
3807static bool
3808gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3809{
3810 gimple *stmt = gsi_stmt (*gsi);
3811 tree arg = gimple_call_arg (stmt, 0);
3812 tree size = gimple_call_arg (stmt, 1);
3813
3814 if (operand_equal_p (arg, null_pointer_node, 0))
3815 {
3816 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3817 if (fn_malloc)
3818 {
3819 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3820 replace_call_with_call_and_fold (gsi, repl);
3821 return true;
3822 }
3823 }
3824 return false;
3825}
3826
dcb7fae2
RB
3827/* Fold the non-target builtin at *GSI and return whether any simplification
3828 was made. */
cbdd87d4 3829
fef5a0d9 3830static bool
dcb7fae2 3831gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3832{
538dd0b7 3833 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3834 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3835
dcb7fae2
RB
3836 /* Give up for always_inline inline builtins until they are
3837 inlined. */
3838 if (avoid_folding_inline_builtin (callee))
3839 return false;
cbdd87d4 3840
edd7ae68
RB
3841 unsigned n = gimple_call_num_args (stmt);
3842 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3843 switch (fcode)
cbdd87d4 3844 {
b3d8d88e
MS
3845 case BUILT_IN_BCMP:
3846 return gimple_fold_builtin_bcmp (gsi);
3847 case BUILT_IN_BCOPY:
3848 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3849 case BUILT_IN_BZERO:
b3d8d88e
MS
3850 return gimple_fold_builtin_bzero (gsi);
3851
dcb7fae2
RB
3852 case BUILT_IN_MEMSET:
3853 return gimple_fold_builtin_memset (gsi,
3854 gimple_call_arg (stmt, 1),
3855 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3856 case BUILT_IN_MEMCPY:
3857 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3858 gimple_call_arg (stmt, 1), 0);
3859 case BUILT_IN_MEMPCPY:
3860 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3861 gimple_call_arg (stmt, 1), 1);
3862 case BUILT_IN_MEMMOVE:
3863 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3864 gimple_call_arg (stmt, 1), 3);
3865 case BUILT_IN_SPRINTF_CHK:
3866 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3867 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3868 case BUILT_IN_STRCAT_CHK:
3869 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3870 case BUILT_IN_STRNCAT_CHK:
3871 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3872 case BUILT_IN_STRLEN:
dcb7fae2 3873 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3874 case BUILT_IN_STRCPY:
dcb7fae2 3875 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3876 gimple_call_arg (stmt, 0),
dcb7fae2 3877 gimple_call_arg (stmt, 1));
cbdd87d4 3878 case BUILT_IN_STRNCPY:
dcb7fae2 3879 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3880 gimple_call_arg (stmt, 0),
3881 gimple_call_arg (stmt, 1),
dcb7fae2 3882 gimple_call_arg (stmt, 2));
9a7eefec 3883 case BUILT_IN_STRCAT:
dcb7fae2
RB
3884 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3885 gimple_call_arg (stmt, 1));
ad03a744
RB
3886 case BUILT_IN_STRNCAT:
3887 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3888 case BUILT_IN_INDEX:
912d9ec3 3889 case BUILT_IN_STRCHR:
71dea1dd
WD
3890 return gimple_fold_builtin_strchr (gsi, false);
3891 case BUILT_IN_RINDEX:
3892 case BUILT_IN_STRRCHR:
3893 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3894 case BUILT_IN_STRSTR:
3895 return gimple_fold_builtin_strstr (gsi);
a918bfbf 3896 case BUILT_IN_STRCMP:
8b0b334a 3897 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
3898 case BUILT_IN_STRCASECMP:
3899 case BUILT_IN_STRNCMP:
8b0b334a 3900 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
3901 case BUILT_IN_STRNCASECMP:
3902 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3903 case BUILT_IN_MEMCHR:
3904 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3905 case BUILT_IN_FPUTS:
dcb7fae2
RB
3906 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3907 gimple_call_arg (stmt, 1), false);
cbdd87d4 3908 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3909 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3910 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3911 case BUILT_IN_MEMCPY_CHK:
3912 case BUILT_IN_MEMPCPY_CHK:
3913 case BUILT_IN_MEMMOVE_CHK:
3914 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3915 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3916 gimple_call_arg (stmt, 0),
3917 gimple_call_arg (stmt, 1),
3918 gimple_call_arg (stmt, 2),
3919 gimple_call_arg (stmt, 3),
edd7ae68 3920 fcode);
2625bb5d
RB
3921 case BUILT_IN_STPCPY:
3922 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3923 case BUILT_IN_STRCPY_CHK:
3924 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3925 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3926 gimple_call_arg (stmt, 0),
3927 gimple_call_arg (stmt, 1),
3928 gimple_call_arg (stmt, 2),
edd7ae68 3929 fcode);
cbdd87d4 3930 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3931 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3932 return gimple_fold_builtin_stxncpy_chk (gsi,
3933 gimple_call_arg (stmt, 0),
3934 gimple_call_arg (stmt, 1),
3935 gimple_call_arg (stmt, 2),
3936 gimple_call_arg (stmt, 3),
edd7ae68 3937 fcode);
cbdd87d4
RG
3938 case BUILT_IN_SNPRINTF_CHK:
3939 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3940 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3941
edd7ae68
RB
3942 case BUILT_IN_FPRINTF:
3943 case BUILT_IN_FPRINTF_UNLOCKED:
3944 case BUILT_IN_VFPRINTF:
3945 if (n == 2 || n == 3)
3946 return gimple_fold_builtin_fprintf (gsi,
3947 gimple_call_arg (stmt, 0),
3948 gimple_call_arg (stmt, 1),
3949 n == 3
3950 ? gimple_call_arg (stmt, 2)
3951 : NULL_TREE,
3952 fcode);
3953 break;
3954 case BUILT_IN_FPRINTF_CHK:
3955 case BUILT_IN_VFPRINTF_CHK:
3956 if (n == 3 || n == 4)
3957 return gimple_fold_builtin_fprintf (gsi,
3958 gimple_call_arg (stmt, 0),
3959 gimple_call_arg (stmt, 2),
3960 n == 4
3961 ? gimple_call_arg (stmt, 3)
3962 : NULL_TREE,
3963 fcode);
3964 break;
ad03a744
RB
3965 case BUILT_IN_PRINTF:
3966 case BUILT_IN_PRINTF_UNLOCKED:
3967 case BUILT_IN_VPRINTF:
3968 if (n == 1 || n == 2)
3969 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3970 n == 2
3971 ? gimple_call_arg (stmt, 1)
3972 : NULL_TREE, fcode);
3973 break;
3974 case BUILT_IN_PRINTF_CHK:
3975 case BUILT_IN_VPRINTF_CHK:
3976 if (n == 2 || n == 3)
3977 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3978 n == 3
3979 ? gimple_call_arg (stmt, 2)
3980 : NULL_TREE, fcode);
242a37f1 3981 break;
48126138
NS
3982 case BUILT_IN_ACC_ON_DEVICE:
3983 return gimple_fold_builtin_acc_on_device (gsi,
3984 gimple_call_arg (stmt, 0));
fe75f732
PK
3985 case BUILT_IN_REALLOC:
3986 return gimple_fold_builtin_realloc (gsi);
3987
fef5a0d9
RB
3988 default:;
3989 }
3990
3991 /* Try the generic builtin folder. */
3992 bool ignore = (gimple_call_lhs (stmt) == NULL);
3993 tree result = fold_call_stmt (stmt, ignore);
3994 if (result)
3995 {
3996 if (ignore)
3997 STRIP_NOPS (result);
3998 else
3999 result = fold_convert (gimple_call_return_type (stmt), result);
4000 if (!update_call_from_tree (gsi, result))
4001 gimplify_and_update_call_from_tree (gsi, result);
4002 return true;
4003 }
4004
4005 return false;
4006}
4007
451e8dae
NS
4008/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4009 function calls to constants, where possible. */
4010
4011static tree
4012fold_internal_goacc_dim (const gimple *call)
4013{
629b3d75
MJ
4014 int axis = oacc_get_ifn_dim_arg (call);
4015 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 4016 tree result = NULL_TREE;
67d2229e 4017 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 4018
67d2229e 4019 switch (gimple_call_internal_fn (call))
451e8dae 4020 {
67d2229e
TV
4021 case IFN_GOACC_DIM_POS:
4022 /* If the size is 1, we know the answer. */
4023 if (size == 1)
4024 result = build_int_cst (type, 0);
4025 break;
4026 case IFN_GOACC_DIM_SIZE:
4027 /* If the size is not dynamic, we know the answer. */
4028 if (size)
4029 result = build_int_cst (type, size);
4030 break;
4031 default:
4032 break;
451e8dae
NS
4033 }
4034
4035 return result;
4036}
4037
849a76a5
JJ
4038/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4039 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4040 &var where var is only addressable because of such calls. */
4041
4042bool
4043optimize_atomic_compare_exchange_p (gimple *stmt)
4044{
4045 if (gimple_call_num_args (stmt) != 6
4046 || !flag_inline_atomics
4047 || !optimize
45b2222a 4048 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
4049 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4050 || !gimple_vdef (stmt)
4051 || !gimple_vuse (stmt))
4052 return false;
4053
4054 tree fndecl = gimple_call_fndecl (stmt);
4055 switch (DECL_FUNCTION_CODE (fndecl))
4056 {
4057 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4058 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4059 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4060 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4061 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4062 break;
4063 default:
4064 return false;
4065 }
4066
4067 tree expected = gimple_call_arg (stmt, 1);
4068 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
4069 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4070 return false;
4071
4072 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4073 if (!is_gimple_reg_type (etype)
849a76a5 4074 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
4075 || TREE_THIS_VOLATILE (etype)
4076 || VECTOR_TYPE_P (etype)
4077 || TREE_CODE (etype) == COMPLEX_TYPE
4078 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4079 might not preserve all the bits. See PR71716. */
4080 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
4081 || maybe_ne (TYPE_PRECISION (etype),
4082 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
4083 return false;
4084
4085 tree weak = gimple_call_arg (stmt, 3);
4086 if (!integer_zerop (weak) && !integer_onep (weak))
4087 return false;
4088
4089 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4090 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4091 machine_mode mode = TYPE_MODE (itype);
4092
4093 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4094 == CODE_FOR_nothing
4095 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4096 return false;
4097
cf098191 4098 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
4099 return false;
4100
4101 return true;
4102}
4103
4104/* Fold
4105 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4106 into
4107 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4108 i = IMAGPART_EXPR <t>;
4109 r = (_Bool) i;
4110 e = REALPART_EXPR <t>; */
4111
4112void
4113fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4114{
4115 gimple *stmt = gsi_stmt (*gsi);
4116 tree fndecl = gimple_call_fndecl (stmt);
4117 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4118 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4119 tree ctype = build_complex_type (itype);
4120 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
4121 bool throws = false;
4122 edge e = NULL;
849a76a5
JJ
4123 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4124 expected);
4125 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4126 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4127 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4128 {
4129 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4130 build1 (VIEW_CONVERT_EXPR, itype,
4131 gimple_assign_lhs (g)));
4132 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4133 }
4134 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4135 + int_size_in_bytes (itype);
4136 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4137 gimple_call_arg (stmt, 0),
4138 gimple_assign_lhs (g),
4139 gimple_call_arg (stmt, 2),
4140 build_int_cst (integer_type_node, flag),
4141 gimple_call_arg (stmt, 4),
4142 gimple_call_arg (stmt, 5));
4143 tree lhs = make_ssa_name (ctype);
4144 gimple_call_set_lhs (g, lhs);
4145 gimple_set_vdef (g, gimple_vdef (stmt));
4146 gimple_set_vuse (g, gimple_vuse (stmt));
4147 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46 4148 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 4149 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
4150 {
4151 throws = true;
4152 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4153 }
4154 gimple_call_set_nothrow (as_a <gcall *> (g),
4155 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4156 gimple_call_set_lhs (stmt, NULL_TREE);
4157 gsi_replace (gsi, g, true);
4158 if (oldlhs)
849a76a5 4159 {
849a76a5
JJ
4160 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4161 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
4162 if (throws)
4163 {
4164 gsi_insert_on_edge_immediate (e, g);
4165 *gsi = gsi_for_stmt (g);
4166 }
4167 else
4168 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4169 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4170 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 4171 }
849a76a5
JJ
4172 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4173 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
4174 if (throws && oldlhs == NULL_TREE)
4175 {
4176 gsi_insert_on_edge_immediate (e, g);
4177 *gsi = gsi_for_stmt (g);
4178 }
4179 else
4180 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
4181 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4182 {
4183 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4184 VIEW_CONVERT_EXPR,
4185 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4186 gimple_assign_lhs (g)));
4187 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4188 }
4189 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4190 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4191 *gsi = gsiret;
4192}
4193
1304953e
JJ
4194/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4195 doesn't fit into TYPE. The test for overflow should be regardless of
4196 -fwrapv, and even for unsigned types. */
4197
4198bool
4199arith_overflowed_p (enum tree_code code, const_tree type,
4200 const_tree arg0, const_tree arg1)
4201{
1304953e
JJ
4202 widest2_int warg0 = widest2_int_cst (arg0);
4203 widest2_int warg1 = widest2_int_cst (arg1);
4204 widest2_int wres;
4205 switch (code)
4206 {
4207 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4208 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4209 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4210 default: gcc_unreachable ();
4211 }
4212 signop sign = TYPE_SIGN (type);
4213 if (sign == UNSIGNED && wi::neg_p (wres))
4214 return true;
4215 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4216}
4217
cbdd87d4
RG
4218/* Attempt to fold a call statement referenced by the statement iterator GSI.
4219 The statement may be replaced by another statement, e.g., if the call
4220 simplifies to a constant value. Return true if any changes were made.
4221 It is assumed that the operands have been previously folded. */
4222
e021c122 4223static bool
ceeffab0 4224gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4225{
538dd0b7 4226 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4227 tree callee;
e021c122
RG
4228 bool changed = false;
4229 unsigned i;
cbdd87d4 4230
e021c122
RG
4231 /* Fold *& in call arguments. */
4232 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4233 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4234 {
4235 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4236 if (tmp)
4237 {
4238 gimple_call_set_arg (stmt, i, tmp);
4239 changed = true;
4240 }
4241 }
3b45a007
RG
4242
4243 /* Check for virtual calls that became direct calls. */
4244 callee = gimple_call_fn (stmt);
25583c4f 4245 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4246 {
49c471e3
MJ
4247 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4248 {
450ad0cd
JH
4249 if (dump_file && virtual_method_call_p (callee)
4250 && !possible_polymorphic_call_target_p
6f8091fc
JH
4251 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4252 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4253 {
4254 fprintf (dump_file,
a70e9985 4255 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4256 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4257 fprintf (dump_file, " to ");
4258 print_generic_expr (dump_file, callee, TDF_SLIM);
4259 fprintf (dump_file, "\n");
4260 }
4261
49c471e3 4262 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4263 changed = true;
4264 }
a70e9985 4265 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4266 {
61dd6a2e
JH
4267 bool final;
4268 vec <cgraph_node *>targets
058d0a90 4269 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4270 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4271 {
a70e9985 4272 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4273 if (dump_enabled_p ())
4274 {
4f5b9c80 4275 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4276 "folding virtual function call to %s\n",
4277 targets.length () == 1
4278 ? targets[0]->name ()
4279 : "__builtin_unreachable");
4280 }
61dd6a2e 4281 if (targets.length () == 1)
cf3e5a89 4282 {
18954840
JJ
4283 tree fndecl = targets[0]->decl;
4284 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4285 changed = true;
18954840
JJ
4286 /* If changing the call to __cxa_pure_virtual
4287 or similar noreturn function, adjust gimple_call_fntype
4288 too. */
865f7046 4289 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4290 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4291 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4292 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4293 == void_type_node))
4294 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4295 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4296 if (lhs
4297 && gimple_call_noreturn_p (stmt)
18954840 4298 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4299 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4300 {
4301 if (TREE_CODE (lhs) == SSA_NAME)
4302 {
b731b390 4303 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4304 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4305 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4306 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4307 }
4308 gimple_call_set_lhs (stmt, NULL_TREE);
4309 }
0b986c6a 4310 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4311 }
a70e9985 4312 else
cf3e5a89
JJ
4313 {
4314 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4315 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4316 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4317 /* If the call had a SSA name as lhs morph that into
4318 an uninitialized value. */
a70e9985
JJ
4319 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4320 {
b731b390 4321 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4322 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4323 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4324 set_ssa_default_def (cfun, var, lhs);
42e52a51 4325 }
2da6996c
RB
4326 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4327 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4328 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4329 return true;
4330 }
e021c122 4331 }
49c471e3 4332 }
e021c122 4333 }
49c471e3 4334
f2d3d07e
RH
4335 /* Check for indirect calls that became direct calls, and then
4336 no longer require a static chain. */
4337 if (gimple_call_chain (stmt))
4338 {
4339 tree fn = gimple_call_fndecl (stmt);
4340 if (fn && !DECL_STATIC_CHAIN (fn))
4341 {
4342 gimple_call_set_chain (stmt, NULL);
4343 changed = true;
4344 }
4345 else
4346 {
4347 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4348 if (tmp)
4349 {
4350 gimple_call_set_chain (stmt, tmp);
4351 changed = true;
4352 }
4353 }
4354 }
4355
e021c122
RG
4356 if (inplace)
4357 return changed;
4358
4359 /* Check for builtins that CCP can handle using information not
4360 available in the generic fold routines. */
fef5a0d9
RB
4361 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4362 {
4363 if (gimple_fold_builtin (gsi))
4364 changed = true;
4365 }
4366 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4367 {
ea679d55 4368 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4369 }
368b454d 4370 else if (gimple_call_internal_p (stmt))
ed9c79e1 4371 {
368b454d
JJ
4372 enum tree_code subcode = ERROR_MARK;
4373 tree result = NULL_TREE;
1304953e
JJ
4374 bool cplx_result = false;
4375 tree overflow = NULL_TREE;
368b454d
JJ
4376 switch (gimple_call_internal_fn (stmt))
4377 {
4378 case IFN_BUILTIN_EXPECT:
4379 result = fold_builtin_expect (gimple_location (stmt),
4380 gimple_call_arg (stmt, 0),
4381 gimple_call_arg (stmt, 1),
1e9168b2
ML
4382 gimple_call_arg (stmt, 2),
4383 NULL_TREE);
368b454d 4384 break;
0e82f089 4385 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4386 {
4387 tree offset = gimple_call_arg (stmt, 1);
4388 tree objsize = gimple_call_arg (stmt, 2);
4389 if (integer_all_onesp (objsize)
4390 || (TREE_CODE (offset) == INTEGER_CST
4391 && TREE_CODE (objsize) == INTEGER_CST
4392 && tree_int_cst_le (offset, objsize)))
4393 {
4394 replace_call_with_value (gsi, NULL_TREE);
4395 return true;
4396 }
4397 }
4398 break;
4399 case IFN_UBSAN_PTR:
4400 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4401 {
ca1150f0 4402 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4403 return true;
4404 }
4405 break;
ca1150f0
JJ
4406 case IFN_UBSAN_BOUNDS:
4407 {
4408 tree index = gimple_call_arg (stmt, 1);
4409 tree bound = gimple_call_arg (stmt, 2);
4410 if (TREE_CODE (index) == INTEGER_CST
4411 && TREE_CODE (bound) == INTEGER_CST)
4412 {
4413 index = fold_convert (TREE_TYPE (bound), index);
4414 if (TREE_CODE (index) == INTEGER_CST
4415 && tree_int_cst_le (index, bound))
4416 {
4417 replace_call_with_value (gsi, NULL_TREE);
4418 return true;
4419 }
4420 }
4421 }
4422 break;
451e8dae
NS
4423 case IFN_GOACC_DIM_SIZE:
4424 case IFN_GOACC_DIM_POS:
4425 result = fold_internal_goacc_dim (stmt);
4426 break;
368b454d
JJ
4427 case IFN_UBSAN_CHECK_ADD:
4428 subcode = PLUS_EXPR;
4429 break;
4430 case IFN_UBSAN_CHECK_SUB:
4431 subcode = MINUS_EXPR;
4432 break;
4433 case IFN_UBSAN_CHECK_MUL:
4434 subcode = MULT_EXPR;
4435 break;
1304953e
JJ
4436 case IFN_ADD_OVERFLOW:
4437 subcode = PLUS_EXPR;
4438 cplx_result = true;
4439 break;
4440 case IFN_SUB_OVERFLOW:
4441 subcode = MINUS_EXPR;
4442 cplx_result = true;
4443 break;
4444 case IFN_MUL_OVERFLOW:
4445 subcode = MULT_EXPR;
4446 cplx_result = true;
4447 break;
368b454d
JJ
4448 default:
4449 break;
4450 }
4451 if (subcode != ERROR_MARK)
4452 {
4453 tree arg0 = gimple_call_arg (stmt, 0);
4454 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4455 tree type = TREE_TYPE (arg0);
4456 if (cplx_result)
4457 {
4458 tree lhs = gimple_call_lhs (stmt);
4459 if (lhs == NULL_TREE)
4460 type = NULL_TREE;
4461 else
4462 type = TREE_TYPE (TREE_TYPE (lhs));
4463 }
4464 if (type == NULL_TREE)
4465 ;
368b454d 4466 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4467 else if (integer_zerop (arg1))
4468 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4469 /* x = 0 + y; x = 0 * y; */
4470 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4471 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4472 /* x = y - y; */
4473 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4474 result = integer_zero_node;
368b454d 4475 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4476 else if (subcode == MULT_EXPR && integer_onep (arg1))
4477 result = arg0;
4478 else if (subcode == MULT_EXPR && integer_onep (arg0))
4479 result = arg1;
4480 else if (TREE_CODE (arg0) == INTEGER_CST
4481 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4482 {
1304953e
JJ
4483 if (cplx_result)
4484 result = int_const_binop (subcode, fold_convert (type, arg0),
4485 fold_convert (type, arg1));
4486 else
4487 result = int_const_binop (subcode, arg0, arg1);
4488 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4489 {
4490 if (cplx_result)
4491 overflow = build_one_cst (type);
4492 else
4493 result = NULL_TREE;
4494 }
4495 }
4496 if (result)
4497 {
4498 if (result == integer_zero_node)
4499 result = build_zero_cst (type);
4500 else if (cplx_result && TREE_TYPE (result) != type)
4501 {
4502 if (TREE_CODE (result) == INTEGER_CST)
4503 {
4504 if (arith_overflowed_p (PLUS_EXPR, type, result,
4505 integer_zero_node))
4506 overflow = build_one_cst (type);
4507 }
4508 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4509 && TYPE_UNSIGNED (type))
4510 || (TYPE_PRECISION (type)
4511 < (TYPE_PRECISION (TREE_TYPE (result))
4512 + (TYPE_UNSIGNED (TREE_TYPE (result))
4513 && !TYPE_UNSIGNED (type)))))
4514 result = NULL_TREE;
4515 if (result)
4516 result = fold_convert (type, result);
4517 }
368b454d
JJ
4518 }
4519 }
1304953e 4520
ed9c79e1
JJ
4521 if (result)
4522 {
1304953e
JJ
4523 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4524 result = drop_tree_overflow (result);
4525 if (cplx_result)
4526 {
4527 if (overflow == NULL_TREE)
4528 overflow = build_zero_cst (TREE_TYPE (result));
4529 tree ctype = build_complex_type (TREE_TYPE (result));
4530 if (TREE_CODE (result) == INTEGER_CST
4531 && TREE_CODE (overflow) == INTEGER_CST)
4532 result = build_complex (ctype, result, overflow);
4533 else
4534 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4535 ctype, result, overflow);
4536 }
ed9c79e1
JJ
4537 if (!update_call_from_tree (gsi, result))
4538 gimplify_and_update_call_from_tree (gsi, result);
4539 changed = true;
4540 }
4541 }
3b45a007 4542
e021c122 4543 return changed;
cbdd87d4
RG
4544}
4545
e0ee10ed 4546
89a79e96
RB
4547/* Return true whether NAME has a use on STMT. */
4548
4549static bool
355fe088 4550has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4551{
4552 imm_use_iterator iter;
4553 use_operand_p use_p;
4554 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4555 if (USE_STMT (use_p) == stmt)
4556 return true;
4557 return false;
4558}
4559
e0ee10ed
RB
4560/* Worker for fold_stmt_1 dispatch to pattern based folding with
4561 gimple_simplify.
4562
4563 Replaces *GSI with the simplification result in RCODE and OPS
4564 and the associated statements in *SEQ. Does the replacement
4565 according to INPLACE and returns true if the operation succeeded. */
4566
4567static bool
4568replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4569 gimple_match_op *res_op,
e0ee10ed
RB
4570 gimple_seq *seq, bool inplace)
4571{
355fe088 4572 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4573 tree *ops = res_op->ops;
4574 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4575
4576 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4577 newly created statements. See also maybe_push_res_to_seq.
4578 As an exception allow such uses if there was a use of the
4579 same SSA name on the old stmt. */
5d75ad95
RS
4580 for (unsigned int i = 0; i < num_ops; ++i)
4581 if (TREE_CODE (ops[i]) == SSA_NAME
4582 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4583 && !has_use_on_stmt (ops[i], stmt))
4584 return false;
4585
4586 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4587 for (unsigned int i = 0; i < 2; ++i)
4588 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4589 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4590 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4591 return false;
e0ee10ed 4592
fec40d06
RS
4593 /* Don't insert new statements when INPLACE is true, even if we could
4594 reuse STMT for the final statement. */
4595 if (inplace && !gimple_seq_empty_p (*seq))
4596 return false;
4597
538dd0b7 4598 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4599 {
5d75ad95
RS
4600 gcc_assert (res_op->code.is_tree_code ());
4601 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4602 /* GIMPLE_CONDs condition may not throw. */
4603 && (!flag_exceptions
4604 || !cfun->can_throw_non_call_exceptions
5d75ad95 4605 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4606 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4607 false, NULL_TREE)))
5d75ad95
RS
4608 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4609 else if (res_op->code == SSA_NAME)
538dd0b7 4610 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4611 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4612 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4613 {
4614 if (integer_zerop (ops[0]))
538dd0b7 4615 gimple_cond_make_false (cond_stmt);
e0ee10ed 4616 else
538dd0b7 4617 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4618 }
4619 else if (!inplace)
4620 {
5d75ad95 4621 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4622 if (!res)
4623 return false;
538dd0b7 4624 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4625 build_zero_cst (TREE_TYPE (res)));
4626 }
4627 else
4628 return false;
4629 if (dump_file && (dump_flags & TDF_DETAILS))
4630 {
4631 fprintf (dump_file, "gimple_simplified to ");
4632 if (!gimple_seq_empty_p (*seq))
4633 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4634 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4635 0, TDF_SLIM);
4636 }
4637 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4638 return true;
4639 }
4640 else if (is_gimple_assign (stmt)
5d75ad95 4641 && res_op->code.is_tree_code ())
e0ee10ed
RB
4642 {
4643 if (!inplace
5d75ad95 4644 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4645 {
5d75ad95
RS
4646 maybe_build_generic_op (res_op);
4647 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4648 res_op->op_or_null (0),
4649 res_op->op_or_null (1),
4650 res_op->op_or_null (2));
e0ee10ed
RB
4651 if (dump_file && (dump_flags & TDF_DETAILS))
4652 {
4653 fprintf (dump_file, "gimple_simplified to ");
4654 if (!gimple_seq_empty_p (*seq))
4655 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4656 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4657 0, TDF_SLIM);
4658 }
4659 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4660 return true;
4661 }
4662 }
5d75ad95
RS
4663 else if (res_op->code.is_fn_code ()
4664 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4665 {
5d75ad95
RS
4666 gcc_assert (num_ops == gimple_call_num_args (stmt));
4667 for (unsigned int i = 0; i < num_ops; ++i)
4668 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4669 if (dump_file && (dump_flags & TDF_DETAILS))
4670 {
4671 fprintf (dump_file, "gimple_simplified to ");
4672 if (!gimple_seq_empty_p (*seq))
4673 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4674 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4675 }
4676 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4677 return true;
4678 }
e0ee10ed
RB
4679 else if (!inplace)
4680 {
4681 if (gimple_has_lhs (stmt))
4682 {
4683 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4684 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4685 return false;
e0ee10ed
RB
4686 if (dump_file && (dump_flags & TDF_DETAILS))
4687 {
4688 fprintf (dump_file, "gimple_simplified to ");
4689 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4690 }
4691 gsi_replace_with_seq_vops (gsi, *seq);
4692 return true;
4693 }
4694 else
4695 gcc_unreachable ();
4696 }
4697
4698 return false;
4699}
4700
040292e7
RB
4701/* Canonicalize MEM_REFs invariant address operand after propagation. */
4702
4703static bool
4704maybe_canonicalize_mem_ref_addr (tree *t)
4705{
4706 bool res = false;
4707
4708 if (TREE_CODE (*t) == ADDR_EXPR)
4709 t = &TREE_OPERAND (*t, 0);
4710
f17a223d
RB
4711 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4712 generic vector extension. The actual vector referenced is
4713 view-converted to an array type for this purpose. If the index
4714 is constant the canonical representation in the middle-end is a
4715 BIT_FIELD_REF so re-write the former to the latter here. */
4716 if (TREE_CODE (*t) == ARRAY_REF
4717 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4718 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4719 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4720 {
4721 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4722 if (VECTOR_TYPE_P (vtype))
4723 {
4724 tree low = array_ref_low_bound (*t);
4725 if (TREE_CODE (low) == INTEGER_CST)
4726 {
4727 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4728 {
4729 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4730 wi::to_widest (low));
4731 idx = wi::mul (idx, wi::to_widest
4732 (TYPE_SIZE (TREE_TYPE (*t))));
4733 widest_int ext
4734 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4735 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4736 {
4737 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4738 TREE_TYPE (*t),
4739 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4740 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4741 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4742 res = true;
4743 }
4744 }
4745 }
4746 }
4747 }
4748
040292e7
RB
4749 while (handled_component_p (*t))
4750 t = &TREE_OPERAND (*t, 0);
4751
4752 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4753 of invariant addresses into a SSA name MEM_REF address. */
4754 if (TREE_CODE (*t) == MEM_REF
4755 || TREE_CODE (*t) == TARGET_MEM_REF)
4756 {
4757 tree addr = TREE_OPERAND (*t, 0);
4758 if (TREE_CODE (addr) == ADDR_EXPR
4759 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4760 || handled_component_p (TREE_OPERAND (addr, 0))))
4761 {
4762 tree base;
a90c8804 4763 poly_int64 coffset;
040292e7
RB
4764 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4765 &coffset);
4766 if (!base)
4767 gcc_unreachable ();
4768
4769 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4770 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4771 TREE_OPERAND (*t, 1),
4772 size_int (coffset));
4773 res = true;
4774 }
4775 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4776 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4777 }
4778
4779 /* Canonicalize back MEM_REFs to plain reference trees if the object
4780 accessed is a decl that has the same access semantics as the MEM_REF. */
4781 if (TREE_CODE (*t) == MEM_REF
4782 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4783 && integer_zerop (TREE_OPERAND (*t, 1))
4784 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4785 {
4786 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4787 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4788 if (/* Same volatile qualification. */
4789 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4790 /* Same TBAA behavior with -fstrict-aliasing. */
4791 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4792 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4793 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4794 /* Same alignment. */
4795 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4796 /* We have to look out here to not drop a required conversion
4797 from the rhs to the lhs if *t appears on the lhs or vice-versa
4798 if it appears on the rhs. Thus require strict type
4799 compatibility. */
4800 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4801 {
4802 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4803 res = true;
4804 }
4805 }
4806
4807 /* Canonicalize TARGET_MEM_REF in particular with respect to
4808 the indexes becoming constant. */
4809 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4810 {
4811 tree tem = maybe_fold_tmr (*t);
4812 if (tem)
4813 {
4814 *t = tem;
4815 res = true;
4816 }
4817 }
4818
4819 return res;
4820}
4821
cbdd87d4
RG
4822/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4823 distinguishes both cases. */
4824
4825static bool
e0ee10ed 4826fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4827{
4828 bool changed = false;
355fe088 4829 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4830 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4831 unsigned i;
a8b85ce9 4832 fold_defer_overflow_warnings ();
cbdd87d4 4833
040292e7
RB
4834 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4835 after propagation.
4836 ??? This shouldn't be done in generic folding but in the
4837 propagation helpers which also know whether an address was
89a79e96
RB
4838 propagated.
4839 Also canonicalize operand order. */
040292e7
RB
4840 switch (gimple_code (stmt))
4841 {
4842 case GIMPLE_ASSIGN:
4843 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4844 {
4845 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4846 if ((REFERENCE_CLASS_P (*rhs)
4847 || TREE_CODE (*rhs) == ADDR_EXPR)
4848 && maybe_canonicalize_mem_ref_addr (rhs))
4849 changed = true;
4850 tree *lhs = gimple_assign_lhs_ptr (stmt);
4851 if (REFERENCE_CLASS_P (*lhs)
4852 && maybe_canonicalize_mem_ref_addr (lhs))
4853 changed = true;
4854 }
89a79e96
RB
4855 else
4856 {
4857 /* Canonicalize operand order. */
4858 enum tree_code code = gimple_assign_rhs_code (stmt);
4859 if (TREE_CODE_CLASS (code) == tcc_comparison
4860 || commutative_tree_code (code)
4861 || commutative_ternary_tree_code (code))
4862 {
4863 tree rhs1 = gimple_assign_rhs1 (stmt);
4864 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4865 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4866 {
4867 gimple_assign_set_rhs1 (stmt, rhs2);
4868 gimple_assign_set_rhs2 (stmt, rhs1);
4869 if (TREE_CODE_CLASS (code) == tcc_comparison)
4870 gimple_assign_set_rhs_code (stmt,
4871 swap_tree_comparison (code));
4872 changed = true;
4873 }
4874 }
4875 }
040292e7
RB
4876 break;
4877 case GIMPLE_CALL:
4878 {
4879 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4880 {
4881 tree *arg = gimple_call_arg_ptr (stmt, i);
4882 if (REFERENCE_CLASS_P (*arg)
4883 && maybe_canonicalize_mem_ref_addr (arg))
4884 changed = true;
4885 }
4886 tree *lhs = gimple_call_lhs_ptr (stmt);
4887 if (*lhs
4888 && REFERENCE_CLASS_P (*lhs)
4889 && maybe_canonicalize_mem_ref_addr (lhs))
4890 changed = true;
4891 break;
4892 }
4893 case GIMPLE_ASM:
4894 {
538dd0b7
DM
4895 gasm *asm_stmt = as_a <gasm *> (stmt);
4896 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4897 {
538dd0b7 4898 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4899 tree op = TREE_VALUE (link);
4900 if (REFERENCE_CLASS_P (op)
4901 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4902 changed = true;
4903 }
538dd0b7 4904 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4905 {
538dd0b7 4906 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4907 tree op = TREE_VALUE (link);
4908 if ((REFERENCE_CLASS_P (op)
4909 || TREE_CODE (op) == ADDR_EXPR)
4910 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4911 changed = true;
4912 }
4913 }
4914 break;
4915 case GIMPLE_DEBUG:
4916 if (gimple_debug_bind_p (stmt))
4917 {
4918 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4919 if (*val
4920 && (REFERENCE_CLASS_P (*val)
4921 || TREE_CODE (*val) == ADDR_EXPR)
4922 && maybe_canonicalize_mem_ref_addr (val))
4923 changed = true;
4924 }
4925 break;
89a79e96
RB
4926 case GIMPLE_COND:
4927 {
4928 /* Canonicalize operand order. */
4929 tree lhs = gimple_cond_lhs (stmt);
4930 tree rhs = gimple_cond_rhs (stmt);
14e72812 4931 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4932 {
4933 gcond *gc = as_a <gcond *> (stmt);
4934 gimple_cond_set_lhs (gc, rhs);
4935 gimple_cond_set_rhs (gc, lhs);
4936 gimple_cond_set_code (gc,
4937 swap_tree_comparison (gimple_cond_code (gc)));
4938 changed = true;
4939 }
4940 }
040292e7
RB
4941 default:;
4942 }
4943
e0ee10ed
RB
4944 /* Dispatch to pattern-based folding. */
4945 if (!inplace
4946 || is_gimple_assign (stmt)
4947 || gimple_code (stmt) == GIMPLE_COND)
4948 {
4949 gimple_seq seq = NULL;
5d75ad95
RS
4950 gimple_match_op res_op;
4951 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 4952 valueize, valueize))
e0ee10ed 4953 {
5d75ad95 4954 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
4955 changed = true;
4956 else
4957 gimple_seq_discard (seq);
4958 }
4959 }
4960
4961 stmt = gsi_stmt (*gsi);
4962
cbdd87d4
RG
4963 /* Fold the main computation performed by the statement. */
4964 switch (gimple_code (stmt))
4965 {
4966 case GIMPLE_ASSIGN:
4967 {
819ec64c
RB
4968 /* Try to canonicalize for boolean-typed X the comparisons
4969 X == 0, X == 1, X != 0, and X != 1. */
4970 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4971 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4972 {
819ec64c
RB
4973 tree lhs = gimple_assign_lhs (stmt);
4974 tree op1 = gimple_assign_rhs1 (stmt);
4975 tree op2 = gimple_assign_rhs2 (stmt);
4976 tree type = TREE_TYPE (op1);
4977
4978 /* Check whether the comparison operands are of the same boolean
4979 type as the result type is.
4980 Check that second operand is an integer-constant with value
4981 one or zero. */
4982 if (TREE_CODE (op2) == INTEGER_CST
4983 && (integer_zerop (op2) || integer_onep (op2))
4984 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4985 {
4986 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4987 bool is_logical_not = false;
4988
4989 /* X == 0 and X != 1 is a logical-not.of X
4990 X == 1 and X != 0 is X */
4991 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4992 || (cmp_code == NE_EXPR && integer_onep (op2)))
4993 is_logical_not = true;
4994
4995 if (is_logical_not == false)
4996 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4997 /* Only for one-bit precision typed X the transformation
4998 !X -> ~X is valied. */
4999 else if (TYPE_PRECISION (type) == 1)
5000 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5001 /* Otherwise we use !X -> X ^ 1. */
5002 else
5003 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5004 build_int_cst (type, 1));
5005 changed = true;
5006 break;
5007 }
5fbcc0ed 5008 }
819ec64c
RB
5009
5010 unsigned old_num_ops = gimple_num_ops (stmt);
5011 tree lhs = gimple_assign_lhs (stmt);
5012 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
5013 if (new_rhs
5014 && !useless_type_conversion_p (TREE_TYPE (lhs),
5015 TREE_TYPE (new_rhs)))
5016 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5017 if (new_rhs
5018 && (!inplace
5019 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5020 {
5021 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5022 changed = true;
5023 }
5024 break;
5025 }
5026
cbdd87d4 5027 case GIMPLE_CALL:
ceeffab0 5028 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
5029 break;
5030
5031 case GIMPLE_ASM:
5032 /* Fold *& in asm operands. */
38384150 5033 {
538dd0b7 5034 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
5035 size_t noutputs;
5036 const char **oconstraints;
5037 const char *constraint;
5038 bool allows_mem, allows_reg;
5039
538dd0b7 5040 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
5041 oconstraints = XALLOCAVEC (const char *, noutputs);
5042
538dd0b7 5043 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 5044 {
538dd0b7 5045 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
5046 tree op = TREE_VALUE (link);
5047 oconstraints[i]
5048 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5049 if (REFERENCE_CLASS_P (op)
5050 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5051 {
5052 TREE_VALUE (link) = op;
5053 changed = true;
5054 }
5055 }
538dd0b7 5056 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 5057 {
538dd0b7 5058 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
5059 tree op = TREE_VALUE (link);
5060 constraint
5061 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5062 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5063 oconstraints, &allows_mem, &allows_reg);
5064 if (REFERENCE_CLASS_P (op)
5065 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5066 != NULL_TREE)
5067 {
5068 TREE_VALUE (link) = op;
5069 changed = true;
5070 }
5071 }
5072 }
cbdd87d4
RG
5073 break;
5074
bd422c4a
RG
5075 case GIMPLE_DEBUG:
5076 if (gimple_debug_bind_p (stmt))
5077 {
5078 tree val = gimple_debug_bind_get_value (stmt);
5079 if (val
5080 && REFERENCE_CLASS_P (val))
5081 {
5082 tree tem = maybe_fold_reference (val, false);
5083 if (tem)
5084 {
5085 gimple_debug_bind_set_value (stmt, tem);
5086 changed = true;
5087 }
5088 }
3e888a5e
RG
5089 else if (val
5090 && TREE_CODE (val) == ADDR_EXPR)
5091 {
5092 tree ref = TREE_OPERAND (val, 0);
5093 tree tem = maybe_fold_reference (ref, false);
5094 if (tem)
5095 {
5096 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5097 gimple_debug_bind_set_value (stmt, tem);
5098 changed = true;
5099 }
5100 }
bd422c4a
RG
5101 }
5102 break;
5103
cfe3d653
PK
5104 case GIMPLE_RETURN:
5105 {
5106 greturn *ret_stmt = as_a<greturn *> (stmt);
5107 tree ret = gimple_return_retval(ret_stmt);
5108
5109 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5110 {
5111 tree val = valueize (ret);
1af928db
RB
5112 if (val && val != ret
5113 && may_propagate_copy (ret, val))
cfe3d653
PK
5114 {
5115 gimple_return_set_retval (ret_stmt, val);
5116 changed = true;
5117 }
5118 }
5119 }
5120 break;
5121
cbdd87d4
RG
5122 default:;
5123 }
5124
5125 stmt = gsi_stmt (*gsi);
5126
37376165
RB
5127 /* Fold *& on the lhs. */
5128 if (gimple_has_lhs (stmt))
cbdd87d4
RG
5129 {
5130 tree lhs = gimple_get_lhs (stmt);
5131 if (lhs && REFERENCE_CLASS_P (lhs))
5132 {
5133 tree new_lhs = maybe_fold_reference (lhs, true);
5134 if (new_lhs)
5135 {
5136 gimple_set_lhs (stmt, new_lhs);
5137 changed = true;
5138 }
5139 }
5140 }
5141
a8b85ce9 5142 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
5143 return changed;
5144}
5145
e0ee10ed
RB
5146/* Valueziation callback that ends up not following SSA edges. */
5147
5148tree
5149no_follow_ssa_edges (tree)
5150{
5151 return NULL_TREE;
5152}
5153
45cc9f96
RB
5154/* Valueization callback that ends up following single-use SSA edges only. */
5155
5156tree
5157follow_single_use_edges (tree val)
5158{
5159 if (TREE_CODE (val) == SSA_NAME
5160 && !has_single_use (val))
5161 return NULL_TREE;
5162 return val;
5163}
5164
c566cc9f
RS
5165/* Valueization callback that follows all SSA edges. */
5166
5167tree
5168follow_all_ssa_edges (tree val)
5169{
5170 return val;
5171}
5172
cbdd87d4
RG
5173/* Fold the statement pointed to by GSI. In some cases, this function may
5174 replace the whole statement with a new one. Returns true iff folding
5175 makes any changes.
5176 The statement pointed to by GSI should be in valid gimple form but may
5177 be in unfolded state as resulting from for example constant propagation
5178 which can produce *&x = 0. */
5179
5180bool
5181fold_stmt (gimple_stmt_iterator *gsi)
5182{
e0ee10ed
RB
5183 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5184}
5185
5186bool
5187fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5188{
5189 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
5190}
5191
59401b92 5192/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
5193 *&x created by constant propagation are handled. The statement cannot
5194 be replaced with a new one. Return true if the statement was
5195 changed, false otherwise.
59401b92 5196 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
5197 be in unfolded state as resulting from for example constant propagation
5198 which can produce *&x = 0. */
5199
5200bool
59401b92 5201fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 5202{
355fe088 5203 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 5204 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 5205 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5206 return changed;
5207}
5208
e89065a1
SL
5209/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5210 if EXPR is null or we don't know how.
5211 If non-null, the result always has boolean type. */
5212
5213static tree
5214canonicalize_bool (tree expr, bool invert)
5215{
5216 if (!expr)
5217 return NULL_TREE;
5218 else if (invert)
5219 {
5220 if (integer_nonzerop (expr))
5221 return boolean_false_node;
5222 else if (integer_zerop (expr))
5223 return boolean_true_node;
5224 else if (TREE_CODE (expr) == SSA_NAME)
5225 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5226 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5227 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5228 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5229 boolean_type_node,
5230 TREE_OPERAND (expr, 0),
5231 TREE_OPERAND (expr, 1));
5232 else
5233 return NULL_TREE;
5234 }
5235 else
5236 {
5237 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5238 return expr;
5239 if (integer_nonzerop (expr))
5240 return boolean_true_node;
5241 else if (integer_zerop (expr))
5242 return boolean_false_node;
5243 else if (TREE_CODE (expr) == SSA_NAME)
5244 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5245 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5246 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5247 return fold_build2 (TREE_CODE (expr),
5248 boolean_type_node,
5249 TREE_OPERAND (expr, 0),
5250 TREE_OPERAND (expr, 1));
5251 else
5252 return NULL_TREE;
5253 }
5254}
5255
5256/* Check to see if a boolean expression EXPR is logically equivalent to the
5257 comparison (OP1 CODE OP2). Check for various identities involving
5258 SSA_NAMEs. */
5259
5260static bool
5261same_bool_comparison_p (const_tree expr, enum tree_code code,
5262 const_tree op1, const_tree op2)
5263{
355fe088 5264 gimple *s;
e89065a1
SL
5265
5266 /* The obvious case. */
5267 if (TREE_CODE (expr) == code
5268 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5269 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5270 return true;
5271
5272 /* Check for comparing (name, name != 0) and the case where expr
5273 is an SSA_NAME with a definition matching the comparison. */
5274 if (TREE_CODE (expr) == SSA_NAME
5275 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5276 {
5277 if (operand_equal_p (expr, op1, 0))
5278 return ((code == NE_EXPR && integer_zerop (op2))
5279 || (code == EQ_EXPR && integer_nonzerop (op2)));
5280 s = SSA_NAME_DEF_STMT (expr);
5281 if (is_gimple_assign (s)
5282 && gimple_assign_rhs_code (s) == code
5283 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5284 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5285 return true;
5286 }
5287
5288 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5289 of name is a comparison, recurse. */
5290 if (TREE_CODE (op1) == SSA_NAME
5291 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5292 {
5293 s = SSA_NAME_DEF_STMT (op1);
5294 if (is_gimple_assign (s)
5295 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5296 {
5297 enum tree_code c = gimple_assign_rhs_code (s);
5298 if ((c == NE_EXPR && integer_zerop (op2))
5299 || (c == EQ_EXPR && integer_nonzerop (op2)))
5300 return same_bool_comparison_p (expr, c,
5301 gimple_assign_rhs1 (s),
5302 gimple_assign_rhs2 (s));
5303 if ((c == EQ_EXPR && integer_zerop (op2))
5304 || (c == NE_EXPR && integer_nonzerop (op2)))
5305 return same_bool_comparison_p (expr,
5306 invert_tree_comparison (c, false),
5307 gimple_assign_rhs1 (s),
5308 gimple_assign_rhs2 (s));
5309 }
5310 }
5311 return false;
5312}
5313
5314/* Check to see if two boolean expressions OP1 and OP2 are logically
5315 equivalent. */
5316
5317static bool
5318same_bool_result_p (const_tree op1, const_tree op2)
5319{
5320 /* Simple cases first. */
5321 if (operand_equal_p (op1, op2, 0))
5322 return true;
5323
5324 /* Check the cases where at least one of the operands is a comparison.
5325 These are a bit smarter than operand_equal_p in that they apply some
5326 identifies on SSA_NAMEs. */
98209db3 5327 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5328 && same_bool_comparison_p (op1, TREE_CODE (op2),
5329 TREE_OPERAND (op2, 0),
5330 TREE_OPERAND (op2, 1)))
5331 return true;
98209db3 5332 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5333 && same_bool_comparison_p (op2, TREE_CODE (op1),
5334 TREE_OPERAND (op1, 0),
5335 TREE_OPERAND (op1, 1)))
5336 return true;
5337
5338 /* Default case. */
5339 return false;
5340}
5341
5342/* Forward declarations for some mutually recursive functions. */
5343
5344static tree
5345and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5346 enum tree_code code2, tree op2a, tree op2b);
5347static tree
5348and_var_with_comparison (tree var, bool invert,
5349 enum tree_code code2, tree op2a, tree op2b);
5350static tree
355fe088 5351and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5352 enum tree_code code2, tree op2a, tree op2b);
5353static tree
5354or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5355 enum tree_code code2, tree op2a, tree op2b);
5356static tree
5357or_var_with_comparison (tree var, bool invert,
5358 enum tree_code code2, tree op2a, tree op2b);
5359static tree
355fe088 5360or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5361 enum tree_code code2, tree op2a, tree op2b);
5362
5363/* Helper function for and_comparisons_1: try to simplify the AND of the
5364 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5365 If INVERT is true, invert the value of the VAR before doing the AND.
5366 Return NULL_EXPR if we can't simplify this to a single expression. */
5367
5368static tree
5369and_var_with_comparison (tree var, bool invert,
5370 enum tree_code code2, tree op2a, tree op2b)
5371{
5372 tree t;
355fe088 5373 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5374
5375 /* We can only deal with variables whose definitions are assignments. */
5376 if (!is_gimple_assign (stmt))
5377 return NULL_TREE;
5378
5379 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5380 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5381 Then we only have to consider the simpler non-inverted cases. */
5382 if (invert)
5383 t = or_var_with_comparison_1 (stmt,
5384 invert_tree_comparison (code2, false),
5385 op2a, op2b);
5386 else
5387 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5388 return canonicalize_bool (t, invert);
5389}
5390
5391/* Try to simplify the AND of the ssa variable defined by the assignment
5392 STMT with the comparison specified by (OP2A CODE2 OP2B).
5393 Return NULL_EXPR if we can't simplify this to a single expression. */
5394
5395static tree
355fe088 5396and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5397 enum tree_code code2, tree op2a, tree op2b)
5398{
5399 tree var = gimple_assign_lhs (stmt);
5400 tree true_test_var = NULL_TREE;
5401 tree false_test_var = NULL_TREE;
5402 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5403
5404 /* Check for identities like (var AND (var == 0)) => false. */
5405 if (TREE_CODE (op2a) == SSA_NAME
5406 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5407 {
5408 if ((code2 == NE_EXPR && integer_zerop (op2b))
5409 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5410 {
5411 true_test_var = op2a;
5412 if (var == true_test_var)
5413 return var;
5414 }
5415 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5416 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5417 {
5418 false_test_var = op2a;
5419 if (var == false_test_var)
5420 return boolean_false_node;
5421 }
5422 }
5423
5424 /* If the definition is a comparison, recurse on it. */
5425 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5426 {
5427 tree t = and_comparisons_1 (innercode,
5428 gimple_assign_rhs1 (stmt),
5429 gimple_assign_rhs2 (stmt),
5430 code2,
5431 op2a,
5432 op2b);
5433 if (t)
5434 return t;
5435 }
5436
5437 /* If the definition is an AND or OR expression, we may be able to
5438 simplify by reassociating. */
eb9820c0
KT
5439 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5440 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5441 {
5442 tree inner1 = gimple_assign_rhs1 (stmt);
5443 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5444 gimple *s;
e89065a1
SL
5445 tree t;
5446 tree partial = NULL_TREE;
eb9820c0 5447 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5448
5449 /* Check for boolean identities that don't require recursive examination
5450 of inner1/inner2:
5451 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5452 inner1 AND (inner1 OR inner2) => inner1
5453 !inner1 AND (inner1 AND inner2) => false
5454 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5455 Likewise for similar cases involving inner2. */
5456 if (inner1 == true_test_var)
5457 return (is_and ? var : inner1);
5458 else if (inner2 == true_test_var)
5459 return (is_and ? var : inner2);
5460 else if (inner1 == false_test_var)
5461 return (is_and
5462 ? boolean_false_node
5463 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5464 else if (inner2 == false_test_var)
5465 return (is_and
5466 ? boolean_false_node
5467 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5468
5469 /* Next, redistribute/reassociate the AND across the inner tests.
5470 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5471 if (TREE_CODE (inner1) == SSA_NAME
5472 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5473 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5474 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5475 gimple_assign_rhs1 (s),
5476 gimple_assign_rhs2 (s),
5477 code2, op2a, op2b)))
5478 {
5479 /* Handle the AND case, where we are reassociating:
5480 (inner1 AND inner2) AND (op2a code2 op2b)
5481 => (t AND inner2)
5482 If the partial result t is a constant, we win. Otherwise
5483 continue on to try reassociating with the other inner test. */
5484 if (is_and)
5485 {
5486 if (integer_onep (t))
5487 return inner2;
5488 else if (integer_zerop (t))
5489 return boolean_false_node;
5490 }
5491
5492 /* Handle the OR case, where we are redistributing:
5493 (inner1 OR inner2) AND (op2a code2 op2b)
5494 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5495 else if (integer_onep (t))
5496 return boolean_true_node;
5497
5498 /* Save partial result for later. */
5499 partial = t;
e89065a1
SL
5500 }
5501
5502 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5503 if (TREE_CODE (inner2) == SSA_NAME
5504 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5505 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5506 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5507 gimple_assign_rhs1 (s),
5508 gimple_assign_rhs2 (s),
5509 code2, op2a, op2b)))
5510 {
5511 /* Handle the AND case, where we are reassociating:
5512 (inner1 AND inner2) AND (op2a code2 op2b)
5513 => (inner1 AND t) */
5514 if (is_and)
5515 {
5516 if (integer_onep (t))
5517 return inner1;
5518 else if (integer_zerop (t))
5519 return boolean_false_node;
8236c8eb
JJ
5520 /* If both are the same, we can apply the identity
5521 (x AND x) == x. */
5522 else if (partial && same_bool_result_p (t, partial))
5523 return t;
e89065a1
SL
5524 }
5525
5526 /* Handle the OR case. where we are redistributing:
5527 (inner1 OR inner2) AND (op2a code2 op2b)
5528 => (t OR (inner1 AND (op2a code2 op2b)))
5529 => (t OR partial) */
5530 else
5531 {
5532 if (integer_onep (t))
5533 return boolean_true_node;
5534 else if (partial)
5535 {
5536 /* We already got a simplification for the other
5537 operand to the redistributed OR expression. The
5538 interesting case is when at least one is false.
5539 Or, if both are the same, we can apply the identity
5540 (x OR x) == x. */
5541 if (integer_zerop (partial))
5542 return t;
5543 else if (integer_zerop (t))
5544 return partial;
5545 else if (same_bool_result_p (t, partial))
5546 return t;
5547 }
5548 }
5549 }
5550 }
5551 return NULL_TREE;
5552}
5553
5554/* Try to simplify the AND of two comparisons defined by
5555 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5556 If this can be done without constructing an intermediate value,
5557 return the resulting tree; otherwise NULL_TREE is returned.
5558 This function is deliberately asymmetric as it recurses on SSA_DEFs
5559 in the first comparison but not the second. */
5560
5561static tree
5562and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5563 enum tree_code code2, tree op2a, tree op2b)
5564{
ae22ac3c 5565 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5566
e89065a1
SL
5567 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5568 if (operand_equal_p (op1a, op2a, 0)
5569 && operand_equal_p (op1b, op2b, 0))
5570 {
eb9820c0 5571 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5572 tree t = combine_comparisons (UNKNOWN_LOCATION,
5573 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5574 truth_type, op1a, op1b);
e89065a1
SL
5575 if (t)
5576 return t;
5577 }
5578
5579 /* Likewise the swapped case of the above. */
5580 if (operand_equal_p (op1a, op2b, 0)
5581 && operand_equal_p (op1b, op2a, 0))
5582 {
eb9820c0 5583 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5584 tree t = combine_comparisons (UNKNOWN_LOCATION,
5585 TRUTH_ANDIF_EXPR, code1,
5586 swap_tree_comparison (code2),
31ed6226 5587 truth_type, op1a, op1b);
e89065a1
SL
5588 if (t)
5589 return t;
5590 }
5591
5592 /* If both comparisons are of the same value against constants, we might
5593 be able to merge them. */
5594 if (operand_equal_p (op1a, op2a, 0)
5595 && TREE_CODE (op1b) == INTEGER_CST
5596 && TREE_CODE (op2b) == INTEGER_CST)
5597 {
5598 int cmp = tree_int_cst_compare (op1b, op2b);
5599
5600 /* If we have (op1a == op1b), we should either be able to
5601 return that or FALSE, depending on whether the constant op1b
5602 also satisfies the other comparison against op2b. */
5603 if (code1 == EQ_EXPR)
5604 {
5605 bool done = true;
5606 bool val;
5607 switch (code2)
5608 {
5609 case EQ_EXPR: val = (cmp == 0); break;
5610 case NE_EXPR: val = (cmp != 0); break;
5611 case LT_EXPR: val = (cmp < 0); break;
5612 case GT_EXPR: val = (cmp > 0); break;
5613 case LE_EXPR: val = (cmp <= 0); break;
5614 case GE_EXPR: val = (cmp >= 0); break;
5615 default: done = false;
5616 }
5617 if (done)
5618 {
5619 if (val)
5620 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5621 else
5622 return boolean_false_node;
5623 }
5624 }
5625 /* Likewise if the second comparison is an == comparison. */
5626 else if (code2 == EQ_EXPR)
5627 {
5628 bool done = true;
5629 bool val;
5630 switch (code1)
5631 {
5632 case EQ_EXPR: val = (cmp == 0); break;
5633 case NE_EXPR: val = (cmp != 0); break;
5634 case LT_EXPR: val = (cmp > 0); break;
5635 case GT_EXPR: val = (cmp < 0); break;
5636 case LE_EXPR: val = (cmp >= 0); break;
5637 case GE_EXPR: val = (cmp <= 0); break;
5638 default: done = false;
5639 }
5640 if (done)
5641 {
5642 if (val)
5643 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5644 else
5645 return boolean_false_node;
5646 }
5647 }
5648
5649 /* Same business with inequality tests. */
5650 else if (code1 == NE_EXPR)
5651 {
5652 bool val;
5653 switch (code2)
5654 {
5655 case EQ_EXPR: val = (cmp != 0); break;
5656 case NE_EXPR: val = (cmp == 0); break;
5657 case LT_EXPR: val = (cmp >= 0); break;
5658 case GT_EXPR: val = (cmp <= 0); break;
5659 case LE_EXPR: val = (cmp > 0); break;
5660 case GE_EXPR: val = (cmp < 0); break;
5661 default:
5662 val = false;
5663 }
5664 if (val)
5665 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5666 }
5667 else if (code2 == NE_EXPR)
5668 {
5669 bool val;
5670 switch (code1)
5671 {
5672 case EQ_EXPR: val = (cmp == 0); break;
5673 case NE_EXPR: val = (cmp != 0); break;
5674 case LT_EXPR: val = (cmp <= 0); break;
5675 case GT_EXPR: val = (cmp >= 0); break;
5676 case LE_EXPR: val = (cmp < 0); break;
5677 case GE_EXPR: val = (cmp > 0); break;
5678 default:
5679 val = false;
5680 }
5681 if (val)
5682 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5683 }
5684
5685 /* Chose the more restrictive of two < or <= comparisons. */
5686 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5687 && (code2 == LT_EXPR || code2 == LE_EXPR))
5688 {
5689 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5690 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5691 else
5692 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5693 }
5694
5695 /* Likewise chose the more restrictive of two > or >= comparisons. */
5696 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5697 && (code2 == GT_EXPR || code2 == GE_EXPR))
5698 {
5699 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5700 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5701 else
5702 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5703 }
5704
5705 /* Check for singleton ranges. */
5706 else if (cmp == 0
5707 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5708 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5709 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5710
5711 /* Check for disjoint ranges. */
5712 else if (cmp <= 0
5713 && (code1 == LT_EXPR || code1 == LE_EXPR)
5714 && (code2 == GT_EXPR || code2 == GE_EXPR))
5715 return boolean_false_node;
5716 else if (cmp >= 0
5717 && (code1 == GT_EXPR || code1 == GE_EXPR)
5718 && (code2 == LT_EXPR || code2 == LE_EXPR))
5719 return boolean_false_node;
5720 }
5721
5722 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5723 NAME's definition is a truth value. See if there are any simplifications
5724 that can be done against the NAME's definition. */
5725 if (TREE_CODE (op1a) == SSA_NAME
5726 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5727 && (integer_zerop (op1b) || integer_onep (op1b)))
5728 {
5729 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5730 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5731 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5732 switch (gimple_code (stmt))
5733 {
5734 case GIMPLE_ASSIGN:
5735 /* Try to simplify by copy-propagating the definition. */
5736 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5737
5738 case GIMPLE_PHI:
5739 /* If every argument to the PHI produces the same result when
5740 ANDed with the second comparison, we win.
5741 Do not do this unless the type is bool since we need a bool
5742 result here anyway. */
5743 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5744 {
5745 tree result = NULL_TREE;
5746 unsigned i;
5747 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5748 {
5749 tree arg = gimple_phi_arg_def (stmt, i);
5750
5751 /* If this PHI has itself as an argument, ignore it.
5752 If all the other args produce the same result,
5753 we're still OK. */
5754 if (arg == gimple_phi_result (stmt))
5755 continue;
5756 else if (TREE_CODE (arg) == INTEGER_CST)
5757 {
5758 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5759 {
5760 if (!result)
5761 result = boolean_false_node;
5762 else if (!integer_zerop (result))
5763 return NULL_TREE;
5764 }
5765 else if (!result)
5766 result = fold_build2 (code2, boolean_type_node,
5767 op2a, op2b);
5768 else if (!same_bool_comparison_p (result,
5769 code2, op2a, op2b))
5770 return NULL_TREE;
5771 }
0e8b84ec
JJ
5772 else if (TREE_CODE (arg) == SSA_NAME
5773 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5774 {
6c66f733 5775 tree temp;
355fe088 5776 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5777 /* In simple cases we can look through PHI nodes,
5778 but we have to be careful with loops.
5779 See PR49073. */
5780 if (! dom_info_available_p (CDI_DOMINATORS)
5781 || gimple_bb (def_stmt) == gimple_bb (stmt)
5782 || dominated_by_p (CDI_DOMINATORS,
5783 gimple_bb (def_stmt),
5784 gimple_bb (stmt)))
5785 return NULL_TREE;
5786 temp = and_var_with_comparison (arg, invert, code2,
5787 op2a, op2b);
e89065a1
SL
5788 if (!temp)
5789 return NULL_TREE;
5790 else if (!result)
5791 result = temp;
5792 else if (!same_bool_result_p (result, temp))
5793 return NULL_TREE;
5794 }
5795 else
5796 return NULL_TREE;
5797 }
5798 return result;
5799 }
5800
5801 default:
5802 break;
5803 }
5804 }
5805 return NULL_TREE;
5806}
5807
5808/* Try to simplify the AND of two comparisons, specified by
5809 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5810 If this can be simplified to a single expression (without requiring
5811 introducing more SSA variables to hold intermediate values),
5812 return the resulting tree. Otherwise return NULL_TREE.
5813 If the result expression is non-null, it has boolean type. */
5814
5815tree
5816maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5817 enum tree_code code2, tree op2a, tree op2b)
5818{
5819 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5820 if (t)
5821 return t;
5822 else
5823 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5824}
5825
5826/* Helper function for or_comparisons_1: try to simplify the OR of the
5827 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5828 If INVERT is true, invert the value of VAR before doing the OR.
5829 Return NULL_EXPR if we can't simplify this to a single expression. */
5830
5831static tree
5832or_var_with_comparison (tree var, bool invert,
5833 enum tree_code code2, tree op2a, tree op2b)
5834{
5835 tree t;
355fe088 5836 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5837
5838 /* We can only deal with variables whose definitions are assignments. */
5839 if (!is_gimple_assign (stmt))
5840 return NULL_TREE;
5841
5842 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5843 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5844 Then we only have to consider the simpler non-inverted cases. */
5845 if (invert)
5846 t = and_var_with_comparison_1 (stmt,
5847 invert_tree_comparison (code2, false),
5848 op2a, op2b);
5849 else
5850 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5851 return canonicalize_bool (t, invert);
5852}
5853
5854/* Try to simplify the OR of the ssa variable defined by the assignment
5855 STMT with the comparison specified by (OP2A CODE2 OP2B).
5856 Return NULL_EXPR if we can't simplify this to a single expression. */
5857
5858static tree
355fe088 5859or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5860 enum tree_code code2, tree op2a, tree op2b)
5861{
5862 tree var = gimple_assign_lhs (stmt);
5863 tree true_test_var = NULL_TREE;
5864 tree false_test_var = NULL_TREE;
5865 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5866
5867 /* Check for identities like (var OR (var != 0)) => true . */
5868 if (TREE_CODE (op2a) == SSA_NAME
5869 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5870 {
5871 if ((code2 == NE_EXPR && integer_zerop (op2b))
5872 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5873 {
5874 true_test_var = op2a;
5875 if (var == true_test_var)
5876 return var;
5877 }
5878 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5879 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5880 {
5881 false_test_var = op2a;
5882 if (var == false_test_var)
5883 return boolean_true_node;
5884 }
5885 }
5886
5887 /* If the definition is a comparison, recurse on it. */
5888 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5889 {
5890 tree t = or_comparisons_1 (innercode,
5891 gimple_assign_rhs1 (stmt),
5892 gimple_assign_rhs2 (stmt),
5893 code2,
5894 op2a,
5895 op2b);
5896 if (t)
5897 return t;
5898 }
5899
5900 /* If the definition is an AND or OR expression, we may be able to
5901 simplify by reassociating. */
eb9820c0
KT
5902 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5903 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5904 {
5905 tree inner1 = gimple_assign_rhs1 (stmt);
5906 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5907 gimple *s;
e89065a1
SL
5908 tree t;
5909 tree partial = NULL_TREE;
eb9820c0 5910 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5911
5912 /* Check for boolean identities that don't require recursive examination
5913 of inner1/inner2:
5914 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5915 inner1 OR (inner1 AND inner2) => inner1
5916 !inner1 OR (inner1 OR inner2) => true
5917 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5918 */
5919 if (inner1 == true_test_var)
5920 return (is_or ? var : inner1);
5921 else if (inner2 == true_test_var)
5922 return (is_or ? var : inner2);
5923 else if (inner1 == false_test_var)
5924 return (is_or
5925 ? boolean_true_node
5926 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5927 else if (inner2 == false_test_var)
5928 return (is_or
5929 ? boolean_true_node
5930 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5931
5932 /* Next, redistribute/reassociate the OR across the inner tests.
5933 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5934 if (TREE_CODE (inner1) == SSA_NAME
5935 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5936 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5937 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5938 gimple_assign_rhs1 (s),
5939 gimple_assign_rhs2 (s),
5940 code2, op2a, op2b)))
5941 {
5942 /* Handle the OR case, where we are reassociating:
5943 (inner1 OR inner2) OR (op2a code2 op2b)
5944 => (t OR inner2)
5945 If the partial result t is a constant, we win. Otherwise
5946 continue on to try reassociating with the other inner test. */
8236c8eb 5947 if (is_or)
e89065a1
SL
5948 {
5949 if (integer_onep (t))
5950 return boolean_true_node;
5951 else if (integer_zerop (t))
5952 return inner2;
5953 }
5954
5955 /* Handle the AND case, where we are redistributing:
5956 (inner1 AND inner2) OR (op2a code2 op2b)
5957 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5958 else if (integer_zerop (t))
5959 return boolean_false_node;
5960
5961 /* Save partial result for later. */
5962 partial = t;
e89065a1
SL
5963 }
5964
5965 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5966 if (TREE_CODE (inner2) == SSA_NAME
5967 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5968 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5969 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5970 gimple_assign_rhs1 (s),
5971 gimple_assign_rhs2 (s),
5972 code2, op2a, op2b)))
5973 {
5974 /* Handle the OR case, where we are reassociating:
5975 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5976 => (inner1 OR t)
5977 => (t OR partial) */
5978 if (is_or)
e89065a1
SL
5979 {
5980 if (integer_zerop (t))
5981 return inner1;
5982 else if (integer_onep (t))
5983 return boolean_true_node;
8236c8eb
JJ
5984 /* If both are the same, we can apply the identity
5985 (x OR x) == x. */
5986 else if (partial && same_bool_result_p (t, partial))
5987 return t;
e89065a1
SL
5988 }
5989
5990 /* Handle the AND case, where we are redistributing:
5991 (inner1 AND inner2) OR (op2a code2 op2b)
5992 => (t AND (inner1 OR (op2a code2 op2b)))
5993 => (t AND partial) */
5994 else
5995 {
5996 if (integer_zerop (t))
5997 return boolean_false_node;
5998 else if (partial)
5999 {
6000 /* We already got a simplification for the other
6001 operand to the redistributed AND expression. The
6002 interesting case is when at least one is true.
6003 Or, if both are the same, we can apply the identity
8236c8eb 6004 (x AND x) == x. */
e89065a1
SL
6005 if (integer_onep (partial))
6006 return t;
6007 else if (integer_onep (t))
6008 return partial;
6009 else if (same_bool_result_p (t, partial))
8236c8eb 6010 return t;
e89065a1
SL
6011 }
6012 }
6013 }
6014 }
6015 return NULL_TREE;
6016}
6017
6018/* Try to simplify the OR of two comparisons defined by
6019 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6020 If this can be done without constructing an intermediate value,
6021 return the resulting tree; otherwise NULL_TREE is returned.
6022 This function is deliberately asymmetric as it recurses on SSA_DEFs
6023 in the first comparison but not the second. */
6024
6025static tree
6026or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6027 enum tree_code code2, tree op2a, tree op2b)
6028{
ae22ac3c 6029 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6030
e89065a1
SL
6031 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6032 if (operand_equal_p (op1a, op2a, 0)
6033 && operand_equal_p (op1b, op2b, 0))
6034 {
eb9820c0 6035 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6036 tree t = combine_comparisons (UNKNOWN_LOCATION,
6037 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 6038 truth_type, op1a, op1b);
e89065a1
SL
6039 if (t)
6040 return t;
6041 }
6042
6043 /* Likewise the swapped case of the above. */
6044 if (operand_equal_p (op1a, op2b, 0)
6045 && operand_equal_p (op1b, op2a, 0))
6046 {
eb9820c0 6047 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6048 tree t = combine_comparisons (UNKNOWN_LOCATION,
6049 TRUTH_ORIF_EXPR, code1,
6050 swap_tree_comparison (code2),
31ed6226 6051 truth_type, op1a, op1b);
e89065a1
SL
6052 if (t)
6053 return t;
6054 }
6055
6056 /* If both comparisons are of the same value against constants, we might
6057 be able to merge them. */
6058 if (operand_equal_p (op1a, op2a, 0)
6059 && TREE_CODE (op1b) == INTEGER_CST
6060 && TREE_CODE (op2b) == INTEGER_CST)
6061 {
6062 int cmp = tree_int_cst_compare (op1b, op2b);
6063
6064 /* If we have (op1a != op1b), we should either be able to
6065 return that or TRUE, depending on whether the constant op1b
6066 also satisfies the other comparison against op2b. */
6067 if (code1 == NE_EXPR)
6068 {
6069 bool done = true;
6070 bool val;
6071 switch (code2)
6072 {
6073 case EQ_EXPR: val = (cmp == 0); break;
6074 case NE_EXPR: val = (cmp != 0); break;
6075 case LT_EXPR: val = (cmp < 0); break;
6076 case GT_EXPR: val = (cmp > 0); break;
6077 case LE_EXPR: val = (cmp <= 0); break;
6078 case GE_EXPR: val = (cmp >= 0); break;
6079 default: done = false;
6080 }
6081 if (done)
6082 {
6083 if (val)
6084 return boolean_true_node;
6085 else
6086 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6087 }
6088 }
6089 /* Likewise if the second comparison is a != comparison. */
6090 else if (code2 == NE_EXPR)
6091 {
6092 bool done = true;
6093 bool val;
6094 switch (code1)
6095 {
6096 case EQ_EXPR: val = (cmp == 0); break;
6097 case NE_EXPR: val = (cmp != 0); break;
6098 case LT_EXPR: val = (cmp > 0); break;
6099 case GT_EXPR: val = (cmp < 0); break;
6100 case LE_EXPR: val = (cmp >= 0); break;
6101 case GE_EXPR: val = (cmp <= 0); break;
6102 default: done = false;
6103 }
6104 if (done)
6105 {
6106 if (val)
6107 return boolean_true_node;
6108 else
6109 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6110 }
6111 }
6112
6113 /* See if an equality test is redundant with the other comparison. */
6114 else if (code1 == EQ_EXPR)
6115 {
6116 bool val;
6117 switch (code2)
6118 {
6119 case EQ_EXPR: val = (cmp == 0); break;
6120 case NE_EXPR: val = (cmp != 0); break;
6121 case LT_EXPR: val = (cmp < 0); break;
6122 case GT_EXPR: val = (cmp > 0); break;
6123 case LE_EXPR: val = (cmp <= 0); break;
6124 case GE_EXPR: val = (cmp >= 0); break;
6125 default:
6126 val = false;
6127 }
6128 if (val)
6129 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6130 }
6131 else if (code2 == EQ_EXPR)
6132 {
6133 bool val;
6134 switch (code1)
6135 {
6136 case EQ_EXPR: val = (cmp == 0); break;
6137 case NE_EXPR: val = (cmp != 0); break;
6138 case LT_EXPR: val = (cmp > 0); break;
6139 case GT_EXPR: val = (cmp < 0); break;
6140 case LE_EXPR: val = (cmp >= 0); break;
6141 case GE_EXPR: val = (cmp <= 0); break;
6142 default:
6143 val = false;
6144 }
6145 if (val)
6146 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6147 }
6148
6149 /* Chose the less restrictive of two < or <= comparisons. */
6150 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6151 && (code2 == LT_EXPR || code2 == LE_EXPR))
6152 {
6153 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6154 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6155 else
6156 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6157 }
6158
6159 /* Likewise chose the less restrictive of two > or >= comparisons. */
6160 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6161 && (code2 == GT_EXPR || code2 == GE_EXPR))
6162 {
6163 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6164 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6165 else
6166 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6167 }
6168
6169 /* Check for singleton ranges. */
6170 else if (cmp == 0
6171 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6172 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6173 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6174
6175 /* Check for less/greater pairs that don't restrict the range at all. */
6176 else if (cmp >= 0
6177 && (code1 == LT_EXPR || code1 == LE_EXPR)
6178 && (code2 == GT_EXPR || code2 == GE_EXPR))
6179 return boolean_true_node;
6180 else if (cmp <= 0
6181 && (code1 == GT_EXPR || code1 == GE_EXPR)
6182 && (code2 == LT_EXPR || code2 == LE_EXPR))
6183 return boolean_true_node;
6184 }
6185
6186 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6187 NAME's definition is a truth value. See if there are any simplifications
6188 that can be done against the NAME's definition. */
6189 if (TREE_CODE (op1a) == SSA_NAME
6190 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6191 && (integer_zerop (op1b) || integer_onep (op1b)))
6192 {
6193 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6194 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6195 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6196 switch (gimple_code (stmt))
6197 {
6198 case GIMPLE_ASSIGN:
6199 /* Try to simplify by copy-propagating the definition. */
6200 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6201
6202 case GIMPLE_PHI:
6203 /* If every argument to the PHI produces the same result when
6204 ORed with the second comparison, we win.
6205 Do not do this unless the type is bool since we need a bool
6206 result here anyway. */
6207 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6208 {
6209 tree result = NULL_TREE;
6210 unsigned i;
6211 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6212 {
6213 tree arg = gimple_phi_arg_def (stmt, i);
6214
6215 /* If this PHI has itself as an argument, ignore it.
6216 If all the other args produce the same result,
6217 we're still OK. */
6218 if (arg == gimple_phi_result (stmt))
6219 continue;
6220 else if (TREE_CODE (arg) == INTEGER_CST)
6221 {
6222 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6223 {
6224 if (!result)
6225 result = boolean_true_node;
6226 else if (!integer_onep (result))
6227 return NULL_TREE;
6228 }
6229 else if (!result)
6230 result = fold_build2 (code2, boolean_type_node,
6231 op2a, op2b);
6232 else if (!same_bool_comparison_p (result,
6233 code2, op2a, op2b))
6234 return NULL_TREE;
6235 }
0e8b84ec
JJ
6236 else if (TREE_CODE (arg) == SSA_NAME
6237 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6238 {
6c66f733 6239 tree temp;
355fe088 6240 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6241 /* In simple cases we can look through PHI nodes,
6242 but we have to be careful with loops.
6243 See PR49073. */
6244 if (! dom_info_available_p (CDI_DOMINATORS)
6245 || gimple_bb (def_stmt) == gimple_bb (stmt)
6246 || dominated_by_p (CDI_DOMINATORS,
6247 gimple_bb (def_stmt),
6248 gimple_bb (stmt)))
6249 return NULL_TREE;
6250 temp = or_var_with_comparison (arg, invert, code2,
6251 op2a, op2b);
e89065a1
SL
6252 if (!temp)
6253 return NULL_TREE;
6254 else if (!result)
6255 result = temp;
6256 else if (!same_bool_result_p (result, temp))
6257 return NULL_TREE;
6258 }
6259 else
6260 return NULL_TREE;
6261 }
6262 return result;
6263 }
6264
6265 default:
6266 break;
6267 }
6268 }
6269 return NULL_TREE;
6270}
6271
6272/* Try to simplify the OR of two comparisons, specified by
6273 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6274 If this can be simplified to a single expression (without requiring
6275 introducing more SSA variables to hold intermediate values),
6276 return the resulting tree. Otherwise return NULL_TREE.
6277 If the result expression is non-null, it has boolean type. */
6278
6279tree
6280maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6281 enum tree_code code2, tree op2a, tree op2b)
6282{
6283 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6284 if (t)
6285 return t;
6286 else
6287 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6288}
cfef45c8
RG
6289
6290
6291/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6292
6293 Either NULL_TREE, a simplified but non-constant or a constant
6294 is returned.
6295
6296 ??? This should go into a gimple-fold-inline.h file to be eventually
6297 privatized with the single valueize function used in the various TUs
6298 to avoid the indirect function call overhead. */
6299
6300tree
355fe088 6301gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6302 tree (*gvalueize) (tree))
cfef45c8 6303{
5d75ad95 6304 gimple_match_op res_op;
45cc9f96
RB
6305 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6306 edges if there are intermediate VARYING defs. For this reason
6307 do not follow SSA edges here even though SCCVN can technically
6308 just deal fine with that. */
5d75ad95 6309 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6310 {
34050b6b 6311 tree res = NULL_TREE;
5d75ad95
RS
6312 if (gimple_simplified_result_is_gimple_val (&res_op))
6313 res = res_op.ops[0];
34050b6b 6314 else if (mprts_hook)
5d75ad95 6315 res = mprts_hook (&res_op);
34050b6b 6316 if (res)
45cc9f96 6317 {
34050b6b
RB
6318 if (dump_file && dump_flags & TDF_DETAILS)
6319 {
6320 fprintf (dump_file, "Match-and-simplified ");
6321 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6322 fprintf (dump_file, " to ");
ef6cb4c7 6323 print_generic_expr (dump_file, res);
34050b6b
RB
6324 fprintf (dump_file, "\n");
6325 }
6326 return res;
45cc9f96 6327 }
45cc9f96
RB
6328 }
6329
cfef45c8
RG
6330 location_t loc = gimple_location (stmt);
6331 switch (gimple_code (stmt))
6332 {
6333 case GIMPLE_ASSIGN:
6334 {
6335 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6336
6337 switch (get_gimple_rhs_class (subcode))
6338 {
6339 case GIMPLE_SINGLE_RHS:
6340 {
6341 tree rhs = gimple_assign_rhs1 (stmt);
6342 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6343
6344 if (TREE_CODE (rhs) == SSA_NAME)
6345 {
6346 /* If the RHS is an SSA_NAME, return its known constant value,
6347 if any. */
6348 return (*valueize) (rhs);
6349 }
6350 /* Handle propagating invariant addresses into address
6351 operations. */
6352 else if (TREE_CODE (rhs) == ADDR_EXPR
6353 && !is_gimple_min_invariant (rhs))
6354 {
a90c8804 6355 poly_int64 offset = 0;
cfef45c8
RG
6356 tree base;
6357 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6358 &offset,
6359 valueize);
6360 if (base
6361 && (CONSTANT_CLASS_P (base)
6362 || decl_address_invariant_p (base)))
6363 return build_invariant_address (TREE_TYPE (rhs),
6364 base, offset);
6365 }
6366 else if (TREE_CODE (rhs) == CONSTRUCTOR
6367 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6368 && known_eq (CONSTRUCTOR_NELTS (rhs),
6369 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6370 {
794e3180
RS
6371 unsigned i, nelts;
6372 tree val;
cfef45c8 6373
928686b1 6374 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6375 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6376 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6377 {
6378 val = (*valueize) (val);
6379 if (TREE_CODE (val) == INTEGER_CST
6380 || TREE_CODE (val) == REAL_CST
6381 || TREE_CODE (val) == FIXED_CST)
794e3180 6382 vec.quick_push (val);
cfef45c8
RG
6383 else
6384 return NULL_TREE;
6385 }
6386
5ebaa477 6387 return vec.build ();
cfef45c8 6388 }
bdf37f7a
JH
6389 if (subcode == OBJ_TYPE_REF)
6390 {
6391 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6392 /* If callee is constant, we can fold away the wrapper. */
6393 if (is_gimple_min_invariant (val))
6394 return val;
6395 }
cfef45c8
RG
6396
6397 if (kind == tcc_reference)
6398 {
6399 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6400 || TREE_CODE (rhs) == REALPART_EXPR
6401 || TREE_CODE (rhs) == IMAGPART_EXPR)
6402 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6403 {
6404 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6405 return fold_unary_loc (EXPR_LOCATION (rhs),
6406 TREE_CODE (rhs),
6407 TREE_TYPE (rhs), val);
6408 }
6409 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6410 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6411 {
6412 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6413 return fold_ternary_loc (EXPR_LOCATION (rhs),
6414 TREE_CODE (rhs),
6415 TREE_TYPE (rhs), val,
6416 TREE_OPERAND (rhs, 1),
6417 TREE_OPERAND (rhs, 2));
6418 }
6419 else if (TREE_CODE (rhs) == MEM_REF
6420 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6421 {
6422 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6423 if (TREE_CODE (val) == ADDR_EXPR
6424 && is_gimple_min_invariant (val))
6425 {
6426 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6427 unshare_expr (val),
6428 TREE_OPERAND (rhs, 1));
6429 if (tem)
6430 rhs = tem;
6431 }
6432 }
6433 return fold_const_aggregate_ref_1 (rhs, valueize);
6434 }
6435 else if (kind == tcc_declaration)
6436 return get_symbol_constant_value (rhs);
6437 return rhs;
6438 }
6439
6440 case GIMPLE_UNARY_RHS:
f3582e54 6441 return NULL_TREE;
cfef45c8
RG
6442
6443 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6444 /* Translate &x + CST into an invariant form suitable for
6445 further propagation. */
6446 if (subcode == POINTER_PLUS_EXPR)
6447 {
4b1b9e64
RB
6448 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6449 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6450 if (TREE_CODE (op0) == ADDR_EXPR
6451 && TREE_CODE (op1) == INTEGER_CST)
6452 {
6453 tree off = fold_convert (ptr_type_node, op1);
6454 return build_fold_addr_expr_loc
6455 (loc,
6456 fold_build2 (MEM_REF,
6457 TREE_TYPE (TREE_TYPE (op0)),
6458 unshare_expr (op0), off));
6459 }
6460 }
59c20dc7
RB
6461 /* Canonicalize bool != 0 and bool == 0 appearing after
6462 valueization. While gimple_simplify handles this
6463 it can get confused by the ~X == 1 -> X == 0 transform
6464 which we cant reduce to a SSA name or a constant
6465 (and we have no way to tell gimple_simplify to not
6466 consider those transforms in the first place). */
6467 else if (subcode == EQ_EXPR
6468 || subcode == NE_EXPR)
6469 {
6470 tree lhs = gimple_assign_lhs (stmt);
6471 tree op0 = gimple_assign_rhs1 (stmt);
6472 if (useless_type_conversion_p (TREE_TYPE (lhs),
6473 TREE_TYPE (op0)))
6474 {
6475 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6476 op0 = (*valueize) (op0);
8861704d
RB
6477 if (TREE_CODE (op0) == INTEGER_CST)
6478 std::swap (op0, op1);
6479 if (TREE_CODE (op1) == INTEGER_CST
6480 && ((subcode == NE_EXPR && integer_zerop (op1))
6481 || (subcode == EQ_EXPR && integer_onep (op1))))
6482 return op0;
59c20dc7
RB
6483 }
6484 }
4b1b9e64 6485 return NULL_TREE;
cfef45c8
RG
6486
6487 case GIMPLE_TERNARY_RHS:
6488 {
6489 /* Handle ternary operators that can appear in GIMPLE form. */
6490 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6491 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6492 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6493 return fold_ternary_loc (loc, subcode,
6494 gimple_expr_type (stmt), op0, op1, op2);
6495 }
6496
6497 default:
6498 gcc_unreachable ();
6499 }
6500 }
6501
6502 case GIMPLE_CALL:
6503 {
25583c4f 6504 tree fn;
538dd0b7 6505 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6506
6507 if (gimple_call_internal_p (stmt))
31e071ae
MP
6508 {
6509 enum tree_code subcode = ERROR_MARK;
6510 switch (gimple_call_internal_fn (stmt))
6511 {
6512 case IFN_UBSAN_CHECK_ADD:
6513 subcode = PLUS_EXPR;
6514 break;
6515 case IFN_UBSAN_CHECK_SUB:
6516 subcode = MINUS_EXPR;
6517 break;
6518 case IFN_UBSAN_CHECK_MUL:
6519 subcode = MULT_EXPR;
6520 break;
68fa96d6
ML
6521 case IFN_BUILTIN_EXPECT:
6522 {
6523 tree arg0 = gimple_call_arg (stmt, 0);
6524 tree op0 = (*valueize) (arg0);
6525 if (TREE_CODE (op0) == INTEGER_CST)
6526 return op0;
6527 return NULL_TREE;
6528 }
31e071ae
MP
6529 default:
6530 return NULL_TREE;
6531 }
368b454d
JJ
6532 tree arg0 = gimple_call_arg (stmt, 0);
6533 tree arg1 = gimple_call_arg (stmt, 1);
6534 tree op0 = (*valueize) (arg0);
6535 tree op1 = (*valueize) (arg1);
31e071ae
MP
6536
6537 if (TREE_CODE (op0) != INTEGER_CST
6538 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6539 {
6540 switch (subcode)
6541 {
6542 case MULT_EXPR:
6543 /* x * 0 = 0 * x = 0 without overflow. */
6544 if (integer_zerop (op0) || integer_zerop (op1))
6545 return build_zero_cst (TREE_TYPE (arg0));
6546 break;
6547 case MINUS_EXPR:
6548 /* y - y = 0 without overflow. */
6549 if (operand_equal_p (op0, op1, 0))
6550 return build_zero_cst (TREE_TYPE (arg0));
6551 break;
6552 default:
6553 break;
6554 }
6555 }
6556 tree res
6557 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6558 if (res
6559 && TREE_CODE (res) == INTEGER_CST
6560 && !TREE_OVERFLOW (res))
6561 return res;
6562 return NULL_TREE;
6563 }
25583c4f
RS
6564
6565 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 6566 if (TREE_CODE (fn) == ADDR_EXPR
3d78e008 6567 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
6568 && gimple_builtin_call_types_compatible_p (stmt,
6569 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6570 {
6571 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6572 tree retval;
cfef45c8
RG
6573 unsigned i;
6574 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6575 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6576 retval = fold_builtin_call_array (loc,
538dd0b7 6577 gimple_call_return_type (call_stmt),
cfef45c8 6578 fn, gimple_call_num_args (stmt), args);
cfef45c8 6579 if (retval)
5c944c6c
RB
6580 {
6581 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6582 STRIP_NOPS (retval);
538dd0b7
DM
6583 retval = fold_convert (gimple_call_return_type (call_stmt),
6584 retval);
5c944c6c 6585 }
cfef45c8
RG
6586 return retval;
6587 }
6588 return NULL_TREE;
6589 }
6590
6591 default:
6592 return NULL_TREE;
6593 }
6594}
6595
6596/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6597 Returns NULL_TREE if folding to a constant is not possible, otherwise
6598 returns a constant according to is_gimple_min_invariant. */
6599
6600tree
355fe088 6601gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6602{
6603 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6604 if (res && is_gimple_min_invariant (res))
6605 return res;
6606 return NULL_TREE;
6607}
6608
6609
6610/* The following set of functions are supposed to fold references using
6611 their constant initializers. */
6612
cfef45c8
RG
6613/* See if we can find constructor defining value of BASE.
6614 When we know the consructor with constant offset (such as
6615 base is array[40] and we do know constructor of array), then
6616 BIT_OFFSET is adjusted accordingly.
6617
6618 As a special case, return error_mark_node when constructor
6619 is not explicitly available, but it is known to be zero
6620 such as 'static const int a;'. */
6621static tree
588db50c 6622get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6623 tree (*valueize)(tree))
6624{
588db50c 6625 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6626 bool reverse;
6627
cfef45c8
RG
6628 if (TREE_CODE (base) == MEM_REF)
6629 {
6a5aca53
ML
6630 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6631 if (!boff.to_shwi (bit_offset))
6632 return NULL_TREE;
cfef45c8
RG
6633
6634 if (valueize
6635 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6636 base = valueize (TREE_OPERAND (base, 0));
6637 if (!base || TREE_CODE (base) != ADDR_EXPR)
6638 return NULL_TREE;
6639 base = TREE_OPERAND (base, 0);
6640 }
13e88953
RB
6641 else if (valueize
6642 && TREE_CODE (base) == SSA_NAME)
6643 base = valueize (base);
cfef45c8
RG
6644
6645 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6646 DECL_INITIAL. If BASE is a nested reference into another
6647 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6648 the inner reference. */
6649 switch (TREE_CODE (base))
6650 {
6651 case VAR_DECL:
cfef45c8 6652 case CONST_DECL:
6a6dac52
JH
6653 {
6654 tree init = ctor_for_folding (base);
6655
688010ba 6656 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6657 NULL means unknown, while error_mark_node is 0. */
6658 if (init == error_mark_node)
6659 return NULL_TREE;
6660 if (!init)
6661 return error_mark_node;
6662 return init;
6663 }
cfef45c8 6664
13e88953
RB
6665 case VIEW_CONVERT_EXPR:
6666 return get_base_constructor (TREE_OPERAND (base, 0),
6667 bit_offset, valueize);
6668
cfef45c8
RG
6669 case ARRAY_REF:
6670 case COMPONENT_REF:
ee45a32d
EB
6671 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6672 &reverse);
588db50c 6673 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6674 return NULL_TREE;
6675 *bit_offset += bit_offset2;
6676 return get_base_constructor (base, bit_offset, valueize);
6677
cfef45c8
RG
6678 case CONSTRUCTOR:
6679 return base;
6680
6681 default:
13e88953
RB
6682 if (CONSTANT_CLASS_P (base))
6683 return base;
6684
cfef45c8
RG
6685 return NULL_TREE;
6686 }
6687}
6688
35b4d3a6
MS
6689/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6690 to the memory at bit OFFSET. When non-null, TYPE is the expected
6691 type of the reference; otherwise the type of the referenced element
6692 is used instead. When SIZE is zero, attempt to fold a reference to
6693 the entire element which OFFSET refers to. Increment *SUBOFF by
6694 the bit offset of the accessed element. */
cfef45c8
RG
6695
6696static tree
6697fold_array_ctor_reference (tree type, tree ctor,
6698 unsigned HOST_WIDE_INT offset,
c44c2088 6699 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6700 tree from_decl,
6701 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6702{
807e902e
KZ
6703 offset_int low_bound;
6704 offset_int elt_size;
807e902e 6705 offset_int access_index;
6a636014 6706 tree domain_type = NULL_TREE;
cfef45c8
RG
6707 HOST_WIDE_INT inner_offset;
6708
6709 /* Compute low bound and elt size. */
eb8f1123
RG
6710 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6711 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6712 if (domain_type && TYPE_MIN_VALUE (domain_type))
6713 {
6714 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6715 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6716 return NULL_TREE;
807e902e 6717 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6718 }
6719 else
807e902e 6720 low_bound = 0;
cfef45c8 6721 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6722 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6723 return NULL_TREE;
807e902e 6724 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6725
35b4d3a6
MS
6726 /* When TYPE is non-null, verify that it specifies a constant-sized
6727 accessed not larger than size of array element. */
6728 if (type
6729 && (!TYPE_SIZE_UNIT (type)
6730 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6731 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6732 || elt_size == 0))
cfef45c8
RG
6733 return NULL_TREE;
6734
6735 /* Compute the array index we look for. */
807e902e
KZ
6736 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6737 elt_size);
27bcd47c 6738 access_index += low_bound;
cfef45c8
RG
6739
6740 /* And offset within the access. */
27bcd47c 6741 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6742
6743 /* See if the array field is large enough to span whole access. We do not
6744 care to fold accesses spanning multiple array indexes. */
27bcd47c 6745 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6746 return NULL_TREE;
6a636014 6747 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6748 {
6749 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6750 {
6751 /* For the final reference to the entire accessed element
6752 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6753 may be null) in favor of the type of the element, and set
6754 SIZE to the size of the accessed element. */
6755 inner_offset = 0;
6756 type = TREE_TYPE (val);
6757 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6758 }
6759
6760 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6761 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6762 suboff);
6763 }
cfef45c8 6764
35b4d3a6
MS
6765 /* Memory not explicitly mentioned in constructor is 0 (or
6766 the reference is out of range). */
6767 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6768}
6769
35b4d3a6
MS
6770/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6771 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6772 is the expected type of the reference; otherwise the type of
6773 the referenced member is used instead. When SIZE is zero,
6774 attempt to fold a reference to the entire member which OFFSET
6775 refers to; in this case. Increment *SUBOFF by the bit offset
6776 of the accessed member. */
cfef45c8
RG
6777
6778static tree
6779fold_nonarray_ctor_reference (tree type, tree ctor,
6780 unsigned HOST_WIDE_INT offset,
c44c2088 6781 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6782 tree from_decl,
6783 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6784{
6785 unsigned HOST_WIDE_INT cnt;
6786 tree cfield, cval;
6787
6788 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6789 cval)
6790 {
6791 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6792 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6793 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6794
6795 if (!field_size)
6796 {
6797 /* Determine the size of the flexible array member from
6798 the size of the initializer provided for it. */
6799 field_size = TYPE_SIZE (TREE_TYPE (cval));
6800 }
cfef45c8
RG
6801
6802 /* Variable sized objects in static constructors makes no sense,
6803 but field_size can be NULL for flexible array members. */
6804 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6805 && TREE_CODE (byte_offset) == INTEGER_CST
6806 && (field_size != NULL_TREE
6807 ? TREE_CODE (field_size) == INTEGER_CST
6808 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6809
6810 /* Compute bit offset of the field. */
35b4d3a6
MS
6811 offset_int bitoffset
6812 = (wi::to_offset (field_offset)
6813 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6814 /* Compute bit offset where the field ends. */
35b4d3a6 6815 offset_int bitoffset_end;
cfef45c8 6816 if (field_size != NULL_TREE)
807e902e 6817 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6818 else
807e902e 6819 bitoffset_end = 0;
cfef45c8 6820
35b4d3a6
MS
6821 /* Compute the bit offset of the end of the desired access.
6822 As a special case, if the size of the desired access is
6823 zero, assume the access is to the entire field (and let
6824 the caller make any necessary adjustments by storing
6825 the actual bounds of the field in FIELDBOUNDS). */
6826 offset_int access_end = offset_int (offset);
6827 if (size)
6828 access_end += size;
6829 else
6830 access_end = bitoffset_end;
b8b2b009 6831
35b4d3a6
MS
6832 /* Is there any overlap between the desired access at
6833 [OFFSET, OFFSET+SIZE) and the offset of the field within
6834 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6835 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6836 && (field_size == NULL_TREE
807e902e 6837 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6838 {
35b4d3a6
MS
6839 *suboff += bitoffset.to_uhwi ();
6840
6841 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6842 {
6843 /* For the final reference to the entire accessed member
6844 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6845 be null) in favor of the type of the member, and set
6846 SIZE to the size of the accessed member. */
6847 offset = bitoffset.to_uhwi ();
6848 type = TREE_TYPE (cval);
6849 size = (bitoffset_end - bitoffset).to_uhwi ();
6850 }
6851
6852 /* We do have overlap. Now see if the field is large enough
6853 to cover the access. Give up for accesses that extend
6854 beyond the end of the object or that span multiple fields. */
807e902e 6855 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6856 return NULL_TREE;
032c80e9 6857 if (offset < bitoffset)
b8b2b009 6858 return NULL_TREE;
35b4d3a6
MS
6859
6860 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6861 return fold_ctor_reference (type, cval,
27bcd47c 6862 inner_offset.to_uhwi (), size,
35b4d3a6 6863 from_decl, suboff);
cfef45c8
RG
6864 }
6865 }
35b4d3a6
MS
6866 /* Memory not explicitly mentioned in constructor is 0. */
6867 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6868}
6869
35b4d3a6
MS
6870/* CTOR is value initializing memory. Fold a reference of TYPE and
6871 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6872 is zero, attempt to fold a reference to the entire subobject
6873 which OFFSET refers to. This is used when folding accesses to
6874 string members of aggregates. When non-null, set *SUBOFF to
6875 the bit offset of the accessed subobject. */
cfef45c8 6876
8403c2cf 6877tree
35b4d3a6
MS
6878fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6879 const poly_uint64 &poly_size, tree from_decl,
6880 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
6881{
6882 tree ret;
6883
6884 /* We found the field with exact match. */
35b4d3a6
MS
6885 if (type
6886 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6887 && known_eq (poly_offset, 0U))
9d60be38 6888 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6889
30acf282
RS
6890 /* The remaining optimizations need a constant size and offset. */
6891 unsigned HOST_WIDE_INT size, offset;
6892 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6893 return NULL_TREE;
6894
cfef45c8
RG
6895 /* We are at the end of walk, see if we can view convert the
6896 result. */
6897 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6898 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6899 && !compare_tree_int (TYPE_SIZE (type), size)
6900 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6901 {
9d60be38 6902 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6903 if (ret)
672d9f8e
RB
6904 {
6905 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6906 if (ret)
6907 STRIP_USELESS_TYPE_CONVERSION (ret);
6908 }
cfef45c8
RG
6909 return ret;
6910 }
b2505143
RB
6911 /* For constants and byte-aligned/sized reads try to go through
6912 native_encode/interpret. */
6913 if (CONSTANT_CLASS_P (ctor)
6914 && BITS_PER_UNIT == 8
6915 && offset % BITS_PER_UNIT == 0
6916 && size % BITS_PER_UNIT == 0
6917 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6918 {
6919 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6920 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6921 offset / BITS_PER_UNIT);
6922 if (len > 0)
6923 return native_interpret_expr (type, buf, len);
b2505143 6924 }
cfef45c8
RG
6925 if (TREE_CODE (ctor) == CONSTRUCTOR)
6926 {
35b4d3a6
MS
6927 unsigned HOST_WIDE_INT dummy = 0;
6928 if (!suboff)
6929 suboff = &dummy;
cfef45c8 6930
eb8f1123
RG
6931 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6932 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088 6933 return fold_array_ctor_reference (type, ctor, offset, size,
35b4d3a6
MS
6934 from_decl, suboff);
6935
6936 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6937 from_decl, suboff);
cfef45c8
RG
6938 }
6939
6940 return NULL_TREE;
6941}
6942
6943/* Return the tree representing the element referenced by T if T is an
6944 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6945 names using VALUEIZE. Return NULL_TREE otherwise. */
6946
6947tree
6948fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6949{
6950 tree ctor, idx, base;
588db50c 6951 poly_int64 offset, size, max_size;
cfef45c8 6952 tree tem;
ee45a32d 6953 bool reverse;
cfef45c8 6954
f8a7df45
RG
6955 if (TREE_THIS_VOLATILE (t))
6956 return NULL_TREE;
6957
3a65ee74 6958 if (DECL_P (t))
cfef45c8
RG
6959 return get_symbol_constant_value (t);
6960
6961 tem = fold_read_from_constant_string (t);
6962 if (tem)
6963 return tem;
6964
6965 switch (TREE_CODE (t))
6966 {
6967 case ARRAY_REF:
6968 case ARRAY_RANGE_REF:
6969 /* Constant indexes are handled well by get_base_constructor.
6970 Only special case variable offsets.
6971 FIXME: This code can't handle nested references with variable indexes
6972 (they will be handled only by iteration of ccp). Perhaps we can bring
6973 get_ref_base_and_extent here and make it use a valueize callback. */
6974 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6975 && valueize
6976 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6977 && poly_int_tree_p (idx))
cfef45c8
RG
6978 {
6979 tree low_bound, unit_size;
6980
6981 /* If the resulting bit-offset is constant, track it. */
6982 if ((low_bound = array_ref_low_bound (t),
588db50c 6983 poly_int_tree_p (low_bound))
cfef45c8 6984 && (unit_size = array_ref_element_size (t),
807e902e 6985 tree_fits_uhwi_p (unit_size)))
cfef45c8 6986 {
588db50c
RS
6987 poly_offset_int woffset
6988 = wi::sext (wi::to_poly_offset (idx)
6989 - wi::to_poly_offset (low_bound),
807e902e
KZ
6990 TYPE_PRECISION (TREE_TYPE (idx)));
6991
588db50c 6992 if (woffset.to_shwi (&offset))
807e902e 6993 {
807e902e
KZ
6994 /* TODO: This code seems wrong, multiply then check
6995 to see if it fits. */
6996 offset *= tree_to_uhwi (unit_size);
6997 offset *= BITS_PER_UNIT;
6998
6999 base = TREE_OPERAND (t, 0);
7000 ctor = get_base_constructor (base, &offset, valueize);
7001 /* Empty constructor. Always fold to 0. */
7002 if (ctor == error_mark_node)
7003 return build_zero_cst (TREE_TYPE (t));
7004 /* Out of bound array access. Value is undefined,
7005 but don't fold. */
588db50c 7006 if (maybe_lt (offset, 0))
807e902e
KZ
7007 return NULL_TREE;
7008 /* We can not determine ctor. */
7009 if (!ctor)
7010 return NULL_TREE;
7011 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7012 tree_to_uhwi (unit_size)
7013 * BITS_PER_UNIT,
7014 base);
7015 }
cfef45c8
RG
7016 }
7017 }
7018 /* Fallthru. */
7019
7020 case COMPONENT_REF:
7021 case BIT_FIELD_REF:
7022 case TARGET_MEM_REF:
7023 case MEM_REF:
ee45a32d 7024 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
7025 ctor = get_base_constructor (base, &offset, valueize);
7026
7027 /* Empty constructor. Always fold to 0. */
7028 if (ctor == error_mark_node)
7029 return build_zero_cst (TREE_TYPE (t));
7030 /* We do not know precise address. */
588db50c 7031 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8
RG
7032 return NULL_TREE;
7033 /* We can not determine ctor. */
7034 if (!ctor)
7035 return NULL_TREE;
7036
7037 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 7038 if (maybe_lt (offset, 0))
cfef45c8
RG
7039 return NULL_TREE;
7040
c44c2088
JH
7041 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7042 base);
cfef45c8
RG
7043
7044 case REALPART_EXPR:
7045 case IMAGPART_EXPR:
7046 {
7047 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7048 if (c && TREE_CODE (c) == COMPLEX_CST)
7049 return fold_build1_loc (EXPR_LOCATION (t),
7050 TREE_CODE (t), TREE_TYPE (t), c);
7051 break;
7052 }
7053
7054 default:
7055 break;
7056 }
7057
7058 return NULL_TREE;
7059}
7060
7061tree
7062fold_const_aggregate_ref (tree t)
7063{
7064 return fold_const_aggregate_ref_1 (t, NULL);
7065}
06bc3ec7 7066
85942f45 7067/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
7068 at OFFSET.
7069 Set CAN_REFER if non-NULL to false if method
7070 is not referable or if the virtual table is ill-formed (such as rewriten
7071 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
7072
7073tree
85942f45
JH
7074gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7075 tree v,
ec77d61f
JH
7076 unsigned HOST_WIDE_INT offset,
7077 bool *can_refer)
81fa35bd 7078{
85942f45
JH
7079 tree vtable = v, init, fn;
7080 unsigned HOST_WIDE_INT size;
8c311b50
JH
7081 unsigned HOST_WIDE_INT elt_size, access_index;
7082 tree domain_type;
81fa35bd 7083
ec77d61f
JH
7084 if (can_refer)
7085 *can_refer = true;
7086
9de2f554 7087 /* First of all double check we have virtual table. */
8813a647 7088 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 7089 {
ec77d61f
JH
7090 /* Pass down that we lost track of the target. */
7091 if (can_refer)
7092 *can_refer = false;
7093 return NULL_TREE;
7094 }
9de2f554 7095
2aa3da06
JH
7096 init = ctor_for_folding (v);
7097
9de2f554 7098 /* The virtual tables should always be born with constructors
2aa3da06
JH
7099 and we always should assume that they are avaialble for
7100 folding. At the moment we do not stream them in all cases,
7101 but it should never happen that ctor seem unreachable. */
7102 gcc_assert (init);
7103 if (init == error_mark_node)
7104 {
ec77d61f
JH
7105 /* Pass down that we lost track of the target. */
7106 if (can_refer)
7107 *can_refer = false;
2aa3da06
JH
7108 return NULL_TREE;
7109 }
81fa35bd 7110 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 7111 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 7112 offset *= BITS_PER_UNIT;
81fa35bd 7113 offset += token * size;
9de2f554 7114
8c311b50
JH
7115 /* Lookup the value in the constructor that is assumed to be array.
7116 This is equivalent to
7117 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7118 offset, size, NULL);
7119 but in a constant time. We expect that frontend produced a simple
7120 array without indexed initializers. */
7121
7122 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7123 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7124 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7125 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7126
7127 access_index = offset / BITS_PER_UNIT / elt_size;
7128 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7129
bf8d8309
MP
7130 /* The C++ FE can now produce indexed fields, and we check if the indexes
7131 match. */
8c311b50
JH
7132 if (access_index < CONSTRUCTOR_NELTS (init))
7133 {
7134 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
7135 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7136 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
7137 STRIP_NOPS (fn);
7138 }
7139 else
7140 fn = NULL;
9de2f554
JH
7141
7142 /* For type inconsistent program we may end up looking up virtual method
7143 in virtual table that does not contain TOKEN entries. We may overrun
7144 the virtual table and pick up a constant or RTTI info pointer.
7145 In any case the call is undefined. */
7146 if (!fn
7147 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7148 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7149 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7150 else
7151 {
7152 fn = TREE_OPERAND (fn, 0);
7153
7154 /* When cgraph node is missing and function is not public, we cannot
7155 devirtualize. This can happen in WHOPR when the actual method
7156 ends up in other partition, because we found devirtualization
7157 possibility too late. */
7158 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
7159 {
7160 if (can_refer)
7161 {
7162 *can_refer = false;
7163 return fn;
7164 }
7165 return NULL_TREE;
7166 }
9de2f554 7167 }
81fa35bd 7168
7501ca28
RG
7169 /* Make sure we create a cgraph node for functions we'll reference.
7170 They can be non-existent if the reference comes from an entry
7171 of an external vtable for example. */
d52f5295 7172 cgraph_node::get_create (fn);
7501ca28 7173
81fa35bd
MJ
7174 return fn;
7175}
7176
85942f45
JH
7177/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7178 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7179 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
7180 OBJ_TYPE_REF_OBJECT(REF).
7181 Set CAN_REFER if non-NULL to false if method
7182 is not referable or if the virtual table is ill-formed (such as rewriten
7183 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
7184
7185tree
ec77d61f
JH
7186gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7187 bool *can_refer)
85942f45
JH
7188{
7189 unsigned HOST_WIDE_INT offset;
7190 tree v;
7191
7192 v = BINFO_VTABLE (known_binfo);
7193 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7194 if (!v)
7195 return NULL_TREE;
7196
7197 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
7198 {
7199 if (can_refer)
7200 *can_refer = false;
7201 return NULL_TREE;
7202 }
7203 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
7204}
7205
737f500a
RB
7206/* Given a pointer value T, return a simplified version of an
7207 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
7208 possible. Note that the resulting type may be different from
7209 the type pointed to in the sense that it is still compatible
7210 from the langhooks point of view. */
7211
7212tree
7213gimple_fold_indirect_ref (tree t)
7214{
7215 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7216 tree sub = t;
7217 tree subtype;
7218
7219 STRIP_NOPS (sub);
7220 subtype = TREE_TYPE (sub);
737f500a
RB
7221 if (!POINTER_TYPE_P (subtype)
7222 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7223 return NULL_TREE;
7224
7225 if (TREE_CODE (sub) == ADDR_EXPR)
7226 {
7227 tree op = TREE_OPERAND (sub, 0);
7228 tree optype = TREE_TYPE (op);
7229 /* *&p => p */
7230 if (useless_type_conversion_p (type, optype))
7231 return op;
7232
7233 /* *(foo *)&fooarray => fooarray[0] */
7234 if (TREE_CODE (optype) == ARRAY_TYPE
7235 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7236 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7237 {
7238 tree type_domain = TYPE_DOMAIN (optype);
7239 tree min_val = size_zero_node;
7240 if (type_domain && TYPE_MIN_VALUE (type_domain))
7241 min_val = TYPE_MIN_VALUE (type_domain);
7242 if (TREE_CODE (min_val) == INTEGER_CST)
7243 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7244 }
7245 /* *(foo *)&complexfoo => __real__ complexfoo */
7246 else if (TREE_CODE (optype) == COMPLEX_TYPE
7247 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7248 return fold_build1 (REALPART_EXPR, type, op);
7249 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7250 else if (TREE_CODE (optype) == VECTOR_TYPE
7251 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7252 {
7253 tree part_width = TYPE_SIZE (type);
7254 tree index = bitsize_int (0);
7255 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7256 }
7257 }
7258
7259 /* *(p + CST) -> ... */
7260 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7261 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7262 {
7263 tree addr = TREE_OPERAND (sub, 0);
7264 tree off = TREE_OPERAND (sub, 1);
7265 tree addrtype;
7266
7267 STRIP_NOPS (addr);
7268 addrtype = TREE_TYPE (addr);
7269
7270 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7271 if (TREE_CODE (addr) == ADDR_EXPR
7272 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7273 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7274 && tree_fits_uhwi_p (off))
b184c8f1 7275 {
ae7e9ddd 7276 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7277 tree part_width = TYPE_SIZE (type);
7278 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7279 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7280 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7281 tree index = bitsize_int (indexi);
928686b1
RS
7282 if (known_lt (offset / part_widthi,
7283 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7284 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7285 part_width, index);
7286 }
7287
7288 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7289 if (TREE_CODE (addr) == ADDR_EXPR
7290 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7291 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7292 {
7293 tree size = TYPE_SIZE_UNIT (type);
7294 if (tree_int_cst_equal (size, off))
7295 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7296 }
7297
7298 /* *(p + CST) -> MEM_REF <p, CST>. */
7299 if (TREE_CODE (addr) != ADDR_EXPR
7300 || DECL_P (TREE_OPERAND (addr, 0)))
7301 return fold_build2 (MEM_REF, type,
7302 addr,
8e6cdc90 7303 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7304 }
7305
7306 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7307 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7308 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7309 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7310 {
7311 tree type_domain;
7312 tree min_val = size_zero_node;
7313 tree osub = sub;
7314 sub = gimple_fold_indirect_ref (sub);
7315 if (! sub)
7316 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7317 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7318 if (type_domain && TYPE_MIN_VALUE (type_domain))
7319 min_val = TYPE_MIN_VALUE (type_domain);
7320 if (TREE_CODE (min_val) == INTEGER_CST)
7321 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7322 }
7323
7324 return NULL_TREE;
7325}
19e51b40
JJ
7326
7327/* Return true if CODE is an operation that when operating on signed
7328 integer types involves undefined behavior on overflow and the
7329 operation can be expressed with unsigned arithmetic. */
7330
7331bool
7332arith_code_with_undefined_signed_overflow (tree_code code)
7333{
7334 switch (code)
7335 {
7336 case PLUS_EXPR:
7337 case MINUS_EXPR:
7338 case MULT_EXPR:
7339 case NEGATE_EXPR:
7340 case POINTER_PLUS_EXPR:
7341 return true;
7342 default:
7343 return false;
7344 }
7345}
7346
7347/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7348 operation that can be transformed to unsigned arithmetic by converting
7349 its operand, carrying out the operation in the corresponding unsigned
7350 type and converting the result back to the original type.
7351
7352 Returns a sequence of statements that replace STMT and also contain
7353 a modified form of STMT itself. */
7354
7355gimple_seq
355fe088 7356rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7357{
7358 if (dump_file && (dump_flags & TDF_DETAILS))
7359 {
7360 fprintf (dump_file, "rewriting stmt with undefined signed "
7361 "overflow ");
7362 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7363 }
7364
7365 tree lhs = gimple_assign_lhs (stmt);
7366 tree type = unsigned_type_for (TREE_TYPE (lhs));
7367 gimple_seq stmts = NULL;
7368 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7369 {
74e3c262
RB
7370 tree op = gimple_op (stmt, i);
7371 op = gimple_convert (&stmts, type, op);
7372 gimple_set_op (stmt, i, op);
19e51b40
JJ
7373 }
7374 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7375 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7376 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7377 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7378 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7379 gimple_seq_add_stmt (&stmts, cvt);
7380
7381 return stmts;
7382}
d4f5cd5e 7383
3d2cf79f 7384
c26de36d
RB
7385/* The valueization hook we use for the gimple_build API simplification.
7386 This makes us match fold_buildN behavior by only combining with
7387 statements in the sequence(s) we are currently building. */
7388
7389static tree
7390gimple_build_valueize (tree op)
7391{
7392 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7393 return op;
7394 return NULL_TREE;
7395}
7396
3d2cf79f 7397/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7398 simplifying it first if possible. Returns the built
3d2cf79f
RB
7399 expression value and appends statements possibly defining it
7400 to SEQ. */
7401
7402tree
7403gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7404 enum tree_code code, tree type, tree op0)
3d2cf79f 7405{
c26de36d 7406 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7407 if (!res)
7408 {
a15ebbcd 7409 res = create_tmp_reg_or_ssa_name (type);
355fe088 7410 gimple *stmt;
3d2cf79f
RB
7411 if (code == REALPART_EXPR
7412 || code == IMAGPART_EXPR
7413 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7414 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7415 else
0d0e4a03 7416 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7417 gimple_set_location (stmt, loc);
7418 gimple_seq_add_stmt_without_update (seq, stmt);
7419 }
7420 return res;
7421}
7422
7423/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7424 simplifying it first if possible. Returns the built
3d2cf79f
RB
7425 expression value and appends statements possibly defining it
7426 to SEQ. */
7427
7428tree
7429gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7430 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7431{
c26de36d 7432 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7433 if (!res)
7434 {
a15ebbcd 7435 res = create_tmp_reg_or_ssa_name (type);
355fe088 7436 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7437 gimple_set_location (stmt, loc);
7438 gimple_seq_add_stmt_without_update (seq, stmt);
7439 }
7440 return res;
7441}
7442
7443/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7444 simplifying it first if possible. Returns the built
3d2cf79f
RB
7445 expression value and appends statements possibly defining it
7446 to SEQ. */
7447
7448tree
7449gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7450 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7451{
7452 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7453 seq, gimple_build_valueize);
3d2cf79f
RB
7454 if (!res)
7455 {
a15ebbcd 7456 res = create_tmp_reg_or_ssa_name (type);
355fe088 7457 gimple *stmt;
3d2cf79f 7458 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7459 stmt = gimple_build_assign (res, code,
7460 build3 (code, type, op0, op1, op2));
3d2cf79f 7461 else
0d0e4a03 7462 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7463 gimple_set_location (stmt, loc);
7464 gimple_seq_add_stmt_without_update (seq, stmt);
7465 }
7466 return res;
7467}
7468
7469/* Build the call FN (ARG0) with a result of type TYPE
7470 (or no result if TYPE is void) with location LOC,
c26de36d 7471 simplifying it first if possible. Returns the built
3d2cf79f
RB
7472 expression value (or NULL_TREE if TYPE is void) and appends
7473 statements possibly defining it to SEQ. */
7474
7475tree
eb69361d
RS
7476gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7477 tree type, tree arg0)
3d2cf79f 7478{
c26de36d 7479 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7480 if (!res)
7481 {
eb69361d
RS
7482 gcall *stmt;
7483 if (internal_fn_p (fn))
7484 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7485 else
7486 {
7487 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7488 stmt = gimple_build_call (decl, 1, arg0);
7489 }
3d2cf79f
RB
7490 if (!VOID_TYPE_P (type))
7491 {
a15ebbcd 7492 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7493 gimple_call_set_lhs (stmt, res);
7494 }
7495 gimple_set_location (stmt, loc);
7496 gimple_seq_add_stmt_without_update (seq, stmt);
7497 }
7498 return res;
7499}
7500
7501/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7502 (or no result if TYPE is void) with location LOC,
c26de36d 7503 simplifying it first if possible. Returns the built
3d2cf79f
RB
7504 expression value (or NULL_TREE if TYPE is void) and appends
7505 statements possibly defining it to SEQ. */
7506
7507tree
eb69361d
RS
7508gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7509 tree type, tree arg0, tree arg1)
3d2cf79f 7510{
c26de36d 7511 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7512 if (!res)
7513 {
eb69361d
RS
7514 gcall *stmt;
7515 if (internal_fn_p (fn))
7516 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7517 else
7518 {
7519 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7520 stmt = gimple_build_call (decl, 2, arg0, arg1);
7521 }
3d2cf79f
RB
7522 if (!VOID_TYPE_P (type))
7523 {
a15ebbcd 7524 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7525 gimple_call_set_lhs (stmt, res);
7526 }
7527 gimple_set_location (stmt, loc);
7528 gimple_seq_add_stmt_without_update (seq, stmt);
7529 }
7530 return res;
7531}
7532
7533/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7534 (or no result if TYPE is void) with location LOC,
c26de36d 7535 simplifying it first if possible. Returns the built
3d2cf79f
RB
7536 expression value (or NULL_TREE if TYPE is void) and appends
7537 statements possibly defining it to SEQ. */
7538
7539tree
eb69361d
RS
7540gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7541 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7542{
c26de36d
RB
7543 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7544 seq, gimple_build_valueize);
3d2cf79f
RB
7545 if (!res)
7546 {
eb69361d
RS
7547 gcall *stmt;
7548 if (internal_fn_p (fn))
7549 stmt = gimple_build_call_internal (as_internal_fn (fn),
7550 3, arg0, arg1, arg2);
7551 else
7552 {
7553 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7554 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7555 }
3d2cf79f
RB
7556 if (!VOID_TYPE_P (type))
7557 {
a15ebbcd 7558 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7559 gimple_call_set_lhs (stmt, res);
7560 }
7561 gimple_set_location (stmt, loc);
7562 gimple_seq_add_stmt_without_update (seq, stmt);
7563 }
7564 return res;
7565}
7566
7567/* Build the conversion (TYPE) OP with a result of type TYPE
7568 with location LOC if such conversion is neccesary in GIMPLE,
7569 simplifying it first.
7570 Returns the built expression value and appends
7571 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7572
7573tree
7574gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7575{
7576 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7577 return op;
3d2cf79f 7578 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7579}
68e57f04 7580
74e3c262
RB
7581/* Build the conversion (ptrofftype) OP with a result of a type
7582 compatible with ptrofftype with location LOC if such conversion
7583 is neccesary in GIMPLE, simplifying it first.
7584 Returns the built expression value and appends
7585 statements possibly defining it to SEQ. */
7586
7587tree
7588gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7589{
7590 if (ptrofftype_p (TREE_TYPE (op)))
7591 return op;
7592 return gimple_convert (seq, loc, sizetype, op);
7593}
7594
e7c45b66
RS
7595/* Build a vector of type TYPE in which each element has the value OP.
7596 Return a gimple value for the result, appending any new statements
7597 to SEQ. */
7598
7599tree
7600gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7601 tree op)
7602{
928686b1
RS
7603 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7604 && !CONSTANT_CLASS_P (op))
7605 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7606
e7c45b66
RS
7607 tree res, vec = build_vector_from_val (type, op);
7608 if (is_gimple_val (vec))
7609 return vec;
7610 if (gimple_in_ssa_p (cfun))
7611 res = make_ssa_name (type);
7612 else
7613 res = create_tmp_reg (type);
7614 gimple *stmt = gimple_build_assign (res, vec);
7615 gimple_set_location (stmt, loc);
7616 gimple_seq_add_stmt_without_update (seq, stmt);
7617 return res;
7618}
7619
abe73c3d
RS
7620/* Build a vector from BUILDER, handling the case in which some elements
7621 are non-constant. Return a gimple value for the result, appending any
7622 new instructions to SEQ.
7623
7624 BUILDER must not have a stepped encoding on entry. This is because
7625 the function is not geared up to handle the arithmetic that would
7626 be needed in the variable case, and any code building a vector that
7627 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7628
7629tree
abe73c3d
RS
7630gimple_build_vector (gimple_seq *seq, location_t loc,
7631 tree_vector_builder *builder)
e7c45b66 7632{
abe73c3d
RS
7633 gcc_assert (builder->nelts_per_pattern () <= 2);
7634 unsigned int encoded_nelts = builder->encoded_nelts ();
7635 for (unsigned int i = 0; i < encoded_nelts; ++i)
7636 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7637 {
abe73c3d 7638 tree type = builder->type ();
928686b1 7639 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7640 vec<constructor_elt, va_gc> *v;
7641 vec_alloc (v, nelts);
7642 for (i = 0; i < nelts; ++i)
abe73c3d 7643 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7644
7645 tree res;
7646 if (gimple_in_ssa_p (cfun))
7647 res = make_ssa_name (type);
7648 else
7649 res = create_tmp_reg (type);
7650 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7651 gimple_set_location (stmt, loc);
7652 gimple_seq_add_stmt_without_update (seq, stmt);
7653 return res;
7654 }
abe73c3d 7655 return builder->build ();
e7c45b66
RS
7656}
7657
68e57f04
RS
7658/* Return true if the result of assignment STMT is known to be non-negative.
7659 If the return value is based on the assumption that signed overflow is
7660 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7661 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7662
7663static bool
7664gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7665 int depth)
7666{
7667 enum tree_code code = gimple_assign_rhs_code (stmt);
7668 switch (get_gimple_rhs_class (code))
7669 {
7670 case GIMPLE_UNARY_RHS:
7671 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7672 gimple_expr_type (stmt),
7673 gimple_assign_rhs1 (stmt),
7674 strict_overflow_p, depth);
7675 case GIMPLE_BINARY_RHS:
7676 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7677 gimple_expr_type (stmt),
7678 gimple_assign_rhs1 (stmt),
7679 gimple_assign_rhs2 (stmt),
7680 strict_overflow_p, depth);
7681 case GIMPLE_TERNARY_RHS:
7682 return false;
7683 case GIMPLE_SINGLE_RHS:
7684 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7685 strict_overflow_p, depth);
7686 case GIMPLE_INVALID_RHS:
7687 break;
7688 }
7689 gcc_unreachable ();
7690}
7691
7692/* Return true if return value of call STMT is known to be non-negative.
7693 If the return value is based on the assumption that signed overflow is
7694 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7695 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7696
7697static bool
7698gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7699 int depth)
7700{
7701 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7702 gimple_call_arg (stmt, 0) : NULL_TREE;
7703 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7704 gimple_call_arg (stmt, 1) : NULL_TREE;
7705
7706 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7707 gimple_call_combined_fn (stmt),
68e57f04
RS
7708 arg0,
7709 arg1,
7710 strict_overflow_p, depth);
7711}
7712
4534c203
RB
7713/* Return true if return value of call STMT is known to be non-negative.
7714 If the return value is based on the assumption that signed overflow is
7715 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7716 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7717
7718static bool
7719gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7720 int depth)
7721{
7722 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7723 {
7724 tree arg = gimple_phi_arg_def (stmt, i);
7725 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7726 return false;
7727 }
7728 return true;
7729}
7730
68e57f04
RS
7731/* Return true if STMT is known to compute a non-negative value.
7732 If the return value is based on the assumption that signed overflow is
7733 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7734 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7735
7736bool
7737gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7738 int depth)
7739{
7740 switch (gimple_code (stmt))
7741 {
7742 case GIMPLE_ASSIGN:
7743 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7744 depth);
7745 case GIMPLE_CALL:
7746 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7747 depth);
4534c203
RB
7748 case GIMPLE_PHI:
7749 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7750 depth);
68e57f04
RS
7751 default:
7752 return false;
7753 }
7754}
67dbe582
RS
7755
7756/* Return true if the floating-point value computed by assignment STMT
7757 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7758 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7759
7760 DEPTH is the current nesting depth of the query. */
7761
7762static bool
7763gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7764{
7765 enum tree_code code = gimple_assign_rhs_code (stmt);
7766 switch (get_gimple_rhs_class (code))
7767 {
7768 case GIMPLE_UNARY_RHS:
7769 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7770 gimple_assign_rhs1 (stmt), depth);
7771 case GIMPLE_BINARY_RHS:
7772 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7773 gimple_assign_rhs1 (stmt),
7774 gimple_assign_rhs2 (stmt), depth);
7775 case GIMPLE_TERNARY_RHS:
7776 return false;
7777 case GIMPLE_SINGLE_RHS:
7778 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7779 case GIMPLE_INVALID_RHS:
7780 break;
7781 }
7782 gcc_unreachable ();
7783}
7784
7785/* Return true if the floating-point value computed by call STMT is known
7786 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7787 considered integer values. Return false for signaling NaN.
67dbe582
RS
7788
7789 DEPTH is the current nesting depth of the query. */
7790
7791static bool
7792gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7793{
7794 tree arg0 = (gimple_call_num_args (stmt) > 0
7795 ? gimple_call_arg (stmt, 0)
7796 : NULL_TREE);
7797 tree arg1 = (gimple_call_num_args (stmt) > 1
7798 ? gimple_call_arg (stmt, 1)
7799 : NULL_TREE);
1d9da71f 7800 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7801 arg0, arg1, depth);
7802}
7803
7804/* Return true if the floating-point result of phi STMT is known to have
7805 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7806 integer values. Return false for signaling NaN.
67dbe582
RS
7807
7808 DEPTH is the current nesting depth of the query. */
7809
7810static bool
7811gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7812{
7813 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7814 {
7815 tree arg = gimple_phi_arg_def (stmt, i);
7816 if (!integer_valued_real_single_p (arg, depth + 1))
7817 return false;
7818 }
7819 return true;
7820}
7821
7822/* Return true if the floating-point value computed by STMT is known
7823 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7824 considered integer values. Return false for signaling NaN.
67dbe582
RS
7825
7826 DEPTH is the current nesting depth of the query. */
7827
7828bool
7829gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7830{
7831 switch (gimple_code (stmt))
7832 {
7833 case GIMPLE_ASSIGN:
7834 return gimple_assign_integer_valued_real_p (stmt, depth);
7835 case GIMPLE_CALL:
7836 return gimple_call_integer_valued_real_p (stmt, depth);
7837 case GIMPLE_PHI:
7838 return gimple_phi_integer_valued_real_p (stmt, depth);
7839 default:
7840 return false;
7841 }
7842}