]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
re PR c/83117 (FAIL: gcc.target/x86_64/abi/ms-sysv/ms-sysv.c (test for excess errors))
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
cbe34bb5 2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
025d57f0 43#include "tree-object-size.h"
7a300452 44#include "tree-ssa.h"
cbdd87d4 45#include "tree-ssa-propagate.h"
450ad0cd 46#include "ipa-utils.h"
4484a35a 47#include "tree-ssa-address.h"
862d0b35 48#include "langhooks.h"
19e51b40 49#include "gimplify-me.h"
2b5f0895 50#include "dbgcnt.h"
9b2b7279 51#include "builtins.h"
e0ee10ed
RB
52#include "tree-eh.h"
53#include "gimple-match.h"
48126138 54#include "gomp-constants.h"
f869c12f 55#include "optabs-query.h"
629b3d75 56#include "omp-general.h"
3de2a40e 57#include "ipa-chkp.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
cbdd87d4 66
b3b9f3d0 67/* Return true when DECL can be referenced from current unit.
c44c2088
JH
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
70 reasons:
1389294c 71
1389294c
JH
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
77 set.
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
82 declaring the body.
83 3) COMDAT functions referred by external vtables that
3e89949e 84 we devirtualize only during final compilation stage.
b3b9f3d0
JH
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
87 directly. */
88
1389294c 89static bool
c44c2088 90can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 91{
2c8326a5 92 varpool_node *vnode;
1389294c 93 struct cgraph_node *node;
5e20cdc9 94 symtab_node *snode;
c44c2088 95
00de328a 96 if (DECL_ABSTRACT_P (decl))
1632a686
JH
97 return false;
98
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 101 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
102 return true;
103
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
106 {
3aaf0529
JH
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
3dafb85c 109 if (symtab->function_flags_ready)
3aaf0529 110 return true;
d52f5295 111 snode = symtab_node::get (decl);
3aaf0529 112 if (!snode || !snode->definition)
1632a686 113 return false;
7de90a6c 114 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
115 return !node || !node->global.inlined_to;
116 }
117
6da8be89 118 /* We will later output the initializer, so we can refer to it.
c44c2088 119 So we are concerned only when DECL comes from initializer of
3aaf0529 120 external var or var that has been optimized out. */
c44c2088 121 if (!from_decl
8813a647 122 || !VAR_P (from_decl)
3aaf0529 123 || (!DECL_EXTERNAL (from_decl)
9041d2e6 124 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 125 && vnode->definition)
6da8be89 126 || (flag_ltrans
9041d2e6 127 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 128 && vnode->in_other_partition))
c44c2088 129 return true;
c44c2088
JH
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl)
134 && DECL_EXTERNAL (decl)
a33a931b 135 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 136 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 137 return false;
b3b9f3d0
JH
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
142 return true;
3aaf0529
JH
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
146
147 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 148 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
153 was privatized. */
3dafb85c 154 if (!symtab->function_flags_ready)
b3b9f3d0 155 return true;
c44c2088 156
d52f5295 157 snode = symtab_node::get (decl);
3aaf0529
JH
158 if (!snode
159 || ((!snode->definition || DECL_EXTERNAL (decl))
160 && (!snode->in_other_partition
161 || (!snode->forced_by_abi && !snode->force_output))))
162 return false;
163 node = dyn_cast <cgraph_node *> (snode);
164 return !node || !node->global.inlined_to;
1389294c
JH
165}
166
a15ebbcd
ML
167/* Create a temporary for TYPE for a statement STMT. If the current function
168 is in SSA form, a SSA name is created. Otherwise a temporary register
169 is made. */
170
edc19e03
WS
171tree
172create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
173{
174 if (gimple_in_ssa_p (cfun))
175 return make_ssa_name (type, stmt);
176 else
177 return create_tmp_reg (type);
178}
179
0038d4e0 180/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
181 acceptable form for is_gimple_min_invariant.
182 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
183
184tree
c44c2088 185canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 186{
50619002
EB
187 tree orig_cval = cval;
188 STRIP_NOPS (cval);
315f5f1b
RG
189 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
190 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 191 {
315f5f1b
RG
192 tree ptr = TREE_OPERAND (cval, 0);
193 if (is_gimple_min_invariant (ptr))
194 cval = build1_loc (EXPR_LOCATION (cval),
195 ADDR_EXPR, TREE_TYPE (ptr),
196 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
197 ptr,
198 fold_convert (ptr_type_node,
199 TREE_OPERAND (cval, 1))));
17f39a39
JH
200 }
201 if (TREE_CODE (cval) == ADDR_EXPR)
202 {
5a27a197
RG
203 tree base = NULL_TREE;
204 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
205 {
206 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
207 if (base)
208 TREE_OPERAND (cval, 0) = base;
209 }
5a27a197
RG
210 else
211 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
212 if (!base)
213 return NULL_TREE;
b3b9f3d0 214
8813a647 215 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 216 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 217 return NULL_TREE;
13f92e8d
JJ
218 if (TREE_TYPE (base) == error_mark_node)
219 return NULL_TREE;
8813a647 220 if (VAR_P (base))
46eb666a 221 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
222 else if (TREE_CODE (base) == FUNCTION_DECL)
223 {
224 /* Make sure we create a cgraph node for functions we'll reference.
225 They can be non-existent if the reference comes from an entry
226 of an external vtable for example. */
d52f5295 227 cgraph_node::get_create (base);
7501ca28 228 }
0038d4e0 229 /* Fixup types in global initializers. */
73aef89e
RG
230 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
231 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
232
233 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
234 cval = fold_convert (TREE_TYPE (orig_cval), cval);
235 return cval;
17f39a39 236 }
846abd0d
RB
237 if (TREE_OVERFLOW_P (cval))
238 return drop_tree_overflow (cval);
50619002 239 return orig_cval;
17f39a39 240}
cbdd87d4
RG
241
242/* If SYM is a constant variable with known value, return the value.
243 NULL_TREE is returned otherwise. */
244
245tree
246get_symbol_constant_value (tree sym)
247{
6a6dac52
JH
248 tree val = ctor_for_folding (sym);
249 if (val != error_mark_node)
cbdd87d4 250 {
cbdd87d4
RG
251 if (val)
252 {
9d60be38 253 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 254 if (val && is_gimple_min_invariant (val))
17f39a39 255 return val;
1389294c
JH
256 else
257 return NULL_TREE;
cbdd87d4
RG
258 }
259 /* Variables declared 'const' without an initializer
260 have zero as the initializer if they may not be
261 overridden at link or run time. */
262 if (!val
b8a8c472 263 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 264 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
265 }
266
267 return NULL_TREE;
268}
269
270
cbdd87d4
RG
271
272/* Subroutine of fold_stmt. We perform several simplifications of the
273 memory reference tree EXPR and make sure to re-gimplify them properly
274 after propagation of constant addresses. IS_LHS is true if the
275 reference is supposed to be an lvalue. */
276
277static tree
278maybe_fold_reference (tree expr, bool is_lhs)
279{
17f39a39 280 tree result;
cbdd87d4 281
f0eddb90
RG
282 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
283 || TREE_CODE (expr) == REALPART_EXPR
284 || TREE_CODE (expr) == IMAGPART_EXPR)
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
286 return fold_unary_loc (EXPR_LOCATION (expr),
287 TREE_CODE (expr),
288 TREE_TYPE (expr),
289 TREE_OPERAND (expr, 0));
290 else if (TREE_CODE (expr) == BIT_FIELD_REF
291 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
292 return fold_ternary_loc (EXPR_LOCATION (expr),
293 TREE_CODE (expr),
294 TREE_TYPE (expr),
295 TREE_OPERAND (expr, 0),
296 TREE_OPERAND (expr, 1),
297 TREE_OPERAND (expr, 2));
298
f0eddb90
RG
299 if (!is_lhs
300 && (result = fold_const_aggregate_ref (expr))
301 && is_gimple_min_invariant (result))
302 return result;
cbdd87d4 303
cbdd87d4
RG
304 return NULL_TREE;
305}
306
307
308/* Attempt to fold an assignment statement pointed-to by SI. Returns a
309 replacement rhs for the statement or NULL_TREE if no simplification
310 could be made. It is assumed that the operands have been previously
311 folded. */
312
313static tree
314fold_gimple_assign (gimple_stmt_iterator *si)
315{
355fe088 316 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
317 enum tree_code subcode = gimple_assign_rhs_code (stmt);
318 location_t loc = gimple_location (stmt);
319
320 tree result = NULL_TREE;
321
322 switch (get_gimple_rhs_class (subcode))
323 {
324 case GIMPLE_SINGLE_RHS:
325 {
326 tree rhs = gimple_assign_rhs1 (stmt);
327
8c00ba08
JW
328 if (TREE_CLOBBER_P (rhs))
329 return NULL_TREE;
330
4e71066d 331 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
332 return maybe_fold_reference (rhs, false);
333
bdf37f7a
JH
334 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
335 {
336 tree val = OBJ_TYPE_REF_EXPR (rhs);
337 if (is_gimple_min_invariant (val))
338 return val;
f8a39967 339 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
340 {
341 bool final;
342 vec <cgraph_node *>targets
f8a39967 343 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 344 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 345 {
2b5f0895
XDL
346 if (dump_enabled_p ())
347 {
807b7d62 348 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
349 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
350 "resolving virtual function address "
351 "reference to function %s\n",
352 targets.length () == 1
353 ? targets[0]->name ()
3ef276e4 354 : "NULL");
2b5f0895 355 }
3ef276e4
RB
356 if (targets.length () == 1)
357 {
358 val = fold_convert (TREE_TYPE (val),
359 build_fold_addr_expr_loc
360 (loc, targets[0]->decl));
361 STRIP_USELESS_TYPE_CONVERSION (val);
362 }
363 else
364 /* We can not use __builtin_unreachable here because it
365 can not have address taken. */
366 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
367 return val;
368 }
369 }
bdf37f7a 370 }
7524f419 371
cbdd87d4
RG
372 else if (TREE_CODE (rhs) == ADDR_EXPR)
373 {
70f34814
RG
374 tree ref = TREE_OPERAND (rhs, 0);
375 tree tem = maybe_fold_reference (ref, true);
376 if (tem
377 && TREE_CODE (tem) == MEM_REF
378 && integer_zerop (TREE_OPERAND (tem, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
380 else if (tem)
cbdd87d4
RG
381 result = fold_convert (TREE_TYPE (rhs),
382 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
383 else if (TREE_CODE (ref) == MEM_REF
384 && integer_zerop (TREE_OPERAND (ref, 1)))
385 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
386
387 if (result)
388 {
389 /* Strip away useless type conversions. Both the
390 NON_LVALUE_EXPR that may have been added by fold, and
391 "useless" type conversions that might now be apparent
392 due to propagation. */
393 STRIP_USELESS_TYPE_CONVERSION (result);
394
395 if (result != rhs && valid_gimple_rhs_p (result))
396 return result;
397 }
cbdd87d4
RG
398 }
399
400 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 401 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
402 {
403 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
404 unsigned i;
405 tree val;
406
407 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 408 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
409 return NULL_TREE;
410
411 return build_vector_from_ctor (TREE_TYPE (rhs),
412 CONSTRUCTOR_ELTS (rhs));
413 }
414
415 else if (DECL_P (rhs))
9d60be38 416 return get_symbol_constant_value (rhs);
cbdd87d4
RG
417 }
418 break;
419
420 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
421 break;
422
423 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
424 break;
425
0354c0c7 426 case GIMPLE_TERNARY_RHS:
5c099d40
RB
427 result = fold_ternary_loc (loc, subcode,
428 TREE_TYPE (gimple_assign_lhs (stmt)),
429 gimple_assign_rhs1 (stmt),
430 gimple_assign_rhs2 (stmt),
431 gimple_assign_rhs3 (stmt));
0354c0c7
BS
432
433 if (result)
434 {
435 STRIP_USELESS_TYPE_CONVERSION (result);
436 if (valid_gimple_rhs_p (result))
437 return result;
0354c0c7
BS
438 }
439 break;
440
cbdd87d4
RG
441 case GIMPLE_INVALID_RHS:
442 gcc_unreachable ();
443 }
444
445 return NULL_TREE;
446}
447
fef5a0d9
RB
448
449/* Replace a statement at *SI_P with a sequence of statements in STMTS,
450 adjusting the replacement stmts location and virtual operands.
451 If the statement has a lhs the last stmt in the sequence is expected
452 to assign to that lhs. */
453
454static void
455gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
456{
355fe088 457 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
458
459 if (gimple_has_location (stmt))
460 annotate_all_with_location (stmts, gimple_location (stmt));
461
462 /* First iterate over the replacement statements backward, assigning
463 virtual operands to their defining statements. */
355fe088 464 gimple *laststore = NULL;
fef5a0d9
RB
465 for (gimple_stmt_iterator i = gsi_last (stmts);
466 !gsi_end_p (i); gsi_prev (&i))
467 {
355fe088 468 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
469 if ((gimple_assign_single_p (new_stmt)
470 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
471 || (is_gimple_call (new_stmt)
472 && (gimple_call_flags (new_stmt)
473 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
474 {
475 tree vdef;
476 if (!laststore)
477 vdef = gimple_vdef (stmt);
478 else
479 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
480 gimple_set_vdef (new_stmt, vdef);
481 if (vdef && TREE_CODE (vdef) == SSA_NAME)
482 SSA_NAME_DEF_STMT (vdef) = new_stmt;
483 laststore = new_stmt;
484 }
485 }
486
487 /* Second iterate over the statements forward, assigning virtual
488 operands to their uses. */
489 tree reaching_vuse = gimple_vuse (stmt);
490 for (gimple_stmt_iterator i = gsi_start (stmts);
491 !gsi_end_p (i); gsi_next (&i))
492 {
355fe088 493 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
494 /* If the new statement possibly has a VUSE, update it with exact SSA
495 name we know will reach this one. */
496 if (gimple_has_mem_ops (new_stmt))
497 gimple_set_vuse (new_stmt, reaching_vuse);
498 gimple_set_modified (new_stmt, true);
499 if (gimple_vdef (new_stmt))
500 reaching_vuse = gimple_vdef (new_stmt);
501 }
502
503 /* If the new sequence does not do a store release the virtual
504 definition of the original statement. */
505 if (reaching_vuse
506 && reaching_vuse == gimple_vuse (stmt))
507 {
508 tree vdef = gimple_vdef (stmt);
509 if (vdef
510 && TREE_CODE (vdef) == SSA_NAME)
511 {
512 unlink_stmt_vdef (stmt);
513 release_ssa_name (vdef);
514 }
515 }
516
517 /* Finally replace the original statement with the sequence. */
518 gsi_replace_with_seq (si_p, stmts, false);
519}
520
cbdd87d4
RG
521/* Convert EXPR into a GIMPLE value suitable for substitution on the
522 RHS of an assignment. Insert the necessary statements before
523 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
524 is replaced. If the call is expected to produces a result, then it
525 is replaced by an assignment of the new RHS to the result variable.
526 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
527 GIMPLE_NOP. A proper VDEF chain is retained by making the first
528 VUSE and the last VDEF of the whole sequence be the same as the replaced
529 statement and using new SSA names for stores in between. */
cbdd87d4
RG
530
531void
532gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
533{
534 tree lhs;
355fe088 535 gimple *stmt, *new_stmt;
cbdd87d4 536 gimple_stmt_iterator i;
355a7673 537 gimple_seq stmts = NULL;
cbdd87d4
RG
538
539 stmt = gsi_stmt (*si_p);
540
541 gcc_assert (is_gimple_call (stmt));
542
45852dcc 543 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 544
e256dfce 545 lhs = gimple_call_lhs (stmt);
cbdd87d4 546 if (lhs == NULL_TREE)
6e572326
RG
547 {
548 gimplify_and_add (expr, &stmts);
549 /* We can end up with folding a memcpy of an empty class assignment
550 which gets optimized away by C++ gimplification. */
551 if (gimple_seq_empty_p (stmts))
552 {
9fdc58de 553 pop_gimplify_context (NULL);
6e572326
RG
554 if (gimple_in_ssa_p (cfun))
555 {
556 unlink_stmt_vdef (stmt);
557 release_defs (stmt);
558 }
f6b4dc28 559 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
560 return;
561 }
562 }
cbdd87d4 563 else
e256dfce 564 {
381cdae4 565 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
566 new_stmt = gimple_build_assign (lhs, tmp);
567 i = gsi_last (stmts);
568 gsi_insert_after_without_update (&i, new_stmt,
569 GSI_CONTINUE_LINKING);
570 }
cbdd87d4
RG
571
572 pop_gimplify_context (NULL);
573
fef5a0d9
RB
574 gsi_replace_with_seq_vops (si_p, stmts);
575}
cbdd87d4 576
fef5a0d9
RB
577
578/* Replace the call at *GSI with the gimple value VAL. */
579
e3174bdf 580void
fef5a0d9
RB
581replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
582{
355fe088 583 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 584 tree lhs = gimple_call_lhs (stmt);
355fe088 585 gimple *repl;
fef5a0d9 586 if (lhs)
e256dfce 587 {
fef5a0d9
RB
588 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
589 val = fold_convert (TREE_TYPE (lhs), val);
590 repl = gimple_build_assign (lhs, val);
591 }
592 else
593 repl = gimple_build_nop ();
594 tree vdef = gimple_vdef (stmt);
595 if (vdef && TREE_CODE (vdef) == SSA_NAME)
596 {
597 unlink_stmt_vdef (stmt);
598 release_ssa_name (vdef);
599 }
f6b4dc28 600 gsi_replace (gsi, repl, false);
fef5a0d9
RB
601}
602
603/* Replace the call at *GSI with the new call REPL and fold that
604 again. */
605
606static void
355fe088 607replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 608{
355fe088 609 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
610 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
611 gimple_set_location (repl, gimple_location (stmt));
612 if (gimple_vdef (stmt)
613 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
614 {
615 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
616 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
617 }
00296d7f
JJ
618 if (gimple_vuse (stmt))
619 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 620 gsi_replace (gsi, repl, false);
fef5a0d9
RB
621 fold_stmt (gsi);
622}
623
624/* Return true if VAR is a VAR_DECL or a component thereof. */
625
626static bool
627var_decl_component_p (tree var)
628{
629 tree inner = var;
630 while (handled_component_p (inner))
631 inner = TREE_OPERAND (inner, 0);
632 return SSA_VAR_P (inner);
633}
634
6512c0f1
MS
635/* If the SIZE argument representing the size of an object is in a range
636 of values of which exactly one is valid (and that is zero), return
637 true, otherwise false. */
638
639static bool
640size_must_be_zero_p (tree size)
641{
642 if (integer_zerop (size))
643 return true;
644
645 if (TREE_CODE (size) != SSA_NAME)
646 return false;
647
648 wide_int min, max;
649 enum value_range_type rtype = get_range_info (size, &min, &max);
650 if (rtype != VR_ANTI_RANGE)
651 return false;
652
653 tree type = TREE_TYPE (size);
654 int prec = TYPE_PRECISION (type);
655
656 wide_int wone = wi::one (prec);
657
658 /* Compute the value of SSIZE_MAX, the largest positive value that
659 can be stored in ssize_t, the signed counterpart of size_t. */
660 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
661
662 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
663}
664
fef5a0d9 665/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 666 false if no simplification can be made.
fef5a0d9
RB
667 If ENDP is 0, return DEST (like memcpy).
668 If ENDP is 1, return DEST+LEN (like mempcpy).
669 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
670 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
671 (memmove). */
672
673static bool
674gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
675 tree dest, tree src, int endp)
676{
355fe088 677 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
678 tree lhs = gimple_call_lhs (stmt);
679 tree len = gimple_call_arg (stmt, 2);
680 tree destvar, srcvar;
681 location_t loc = gimple_location (stmt);
682
6512c0f1
MS
683 /* If the LEN parameter is a constant zero or in range where
684 the only valid value is zero, return DEST. */
685 if (size_must_be_zero_p (len))
fef5a0d9 686 {
355fe088 687 gimple *repl;
fef5a0d9
RB
688 if (gimple_call_lhs (stmt))
689 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
690 else
691 repl = gimple_build_nop ();
692 tree vdef = gimple_vdef (stmt);
693 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 694 {
fef5a0d9
RB
695 unlink_stmt_vdef (stmt);
696 release_ssa_name (vdef);
697 }
f6b4dc28 698 gsi_replace (gsi, repl, false);
fef5a0d9
RB
699 return true;
700 }
701
702 /* If SRC and DEST are the same (and not volatile), return
703 DEST{,+LEN,+LEN-1}. */
704 if (operand_equal_p (src, dest, 0))
705 {
706 unlink_stmt_vdef (stmt);
707 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
708 release_ssa_name (gimple_vdef (stmt));
709 if (!lhs)
710 {
f6b4dc28 711 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
712 return true;
713 }
714 goto done;
715 }
716 else
717 {
718 tree srctype, desttype;
719 unsigned int src_align, dest_align;
720 tree off0;
721
3de2a40e
IE
722 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
723 pointers as wide integer) and also may result in huge function
724 size because of inlined bounds copy. Thus don't inline for
725 functions we want to instrument. */
726 if (flag_check_pointer_bounds
727 && chkp_instrumentable_p (cfun->decl)
728 /* Even if data may contain pointers we can inline if copy
729 less than a pointer size. */
730 && (!tree_fits_uhwi_p (len)
731 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
732 return false;
733
fef5a0d9
RB
734 /* Build accesses at offset zero with a ref-all character type. */
735 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
736 ptr_mode, true), 0);
737
738 /* If we can perform the copy efficiently with first doing all loads
739 and then all stores inline it that way. Currently efficiently
740 means that we can load all the memory into a single integer
741 register which is what MOVE_MAX gives us. */
742 src_align = get_pointer_alignment (src);
743 dest_align = get_pointer_alignment (dest);
744 if (tree_fits_uhwi_p (len)
745 && compare_tree_int (len, MOVE_MAX) <= 0
746 /* ??? Don't transform copies from strings with known length this
747 confuses the tree-ssa-strlen.c. This doesn't handle
748 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
749 reason. */
750 && !c_strlen (src, 2))
751 {
752 unsigned ilen = tree_to_uhwi (len);
146ec50f 753 if (pow2p_hwi (ilen))
fef5a0d9 754 {
64ab8765 755 scalar_int_mode mode;
fef5a0d9
RB
756 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
757 if (type
64ab8765
RS
758 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
759 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
760 /* If the destination pointer is not aligned we must be able
761 to emit an unaligned store. */
64ab8765 762 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 763 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 764 || (optab_handler (movmisalign_optab, mode)
f869c12f 765 != CODE_FOR_nothing)))
fef5a0d9
RB
766 {
767 tree srctype = type;
768 tree desttype = type;
64ab8765 769 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
770 srctype = build_aligned_type (type, src_align);
771 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
772 tree tem = fold_const_aggregate_ref (srcmem);
773 if (tem)
774 srcmem = tem;
64ab8765 775 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 776 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 777 && (optab_handler (movmisalign_optab, mode)
f869c12f 778 == CODE_FOR_nothing))
fef5a0d9
RB
779 srcmem = NULL_TREE;
780 if (srcmem)
781 {
355fe088 782 gimple *new_stmt;
fef5a0d9
RB
783 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
784 {
785 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
786 srcmem
787 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
788 new_stmt);
fef5a0d9
RB
789 gimple_assign_set_lhs (new_stmt, srcmem);
790 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
791 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
792 }
64ab8765 793 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
794 desttype = build_aligned_type (type, dest_align);
795 new_stmt
796 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
797 dest, off0),
798 srcmem);
799 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
800 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
801 if (gimple_vdef (new_stmt)
802 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
803 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
804 if (!lhs)
805 {
f6b4dc28 806 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
807 return true;
808 }
809 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
810 goto done;
811 }
812 }
813 }
814 }
815
816 if (endp == 3)
817 {
818 /* Both DEST and SRC must be pointer types.
819 ??? This is what old code did. Is the testing for pointer types
820 really mandatory?
821
822 If either SRC is readonly or length is 1, we can use memcpy. */
823 if (!dest_align || !src_align)
824 return false;
825 if (readonly_data_expr (src)
826 || (tree_fits_uhwi_p (len)
827 && (MIN (src_align, dest_align) / BITS_PER_UNIT
828 >= tree_to_uhwi (len))))
829 {
830 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
831 if (!fn)
832 return false;
833 gimple_call_set_fndecl (stmt, fn);
834 gimple_call_set_arg (stmt, 0, dest);
835 gimple_call_set_arg (stmt, 1, src);
836 fold_stmt (gsi);
837 return true;
838 }
839
840 /* If *src and *dest can't overlap, optimize into memcpy as well. */
841 if (TREE_CODE (src) == ADDR_EXPR
842 && TREE_CODE (dest) == ADDR_EXPR)
843 {
844 tree src_base, dest_base, fn;
845 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
4fda19ef 846 HOST_WIDE_INT maxsize;
fef5a0d9
RB
847
848 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
849 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
850 if (src_base == NULL)
851 src_base = srcvar;
fef5a0d9 852 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
853 dest_base = get_addr_base_and_unit_offset (destvar,
854 &dest_offset);
855 if (dest_base == NULL)
856 dest_base = destvar;
fef5a0d9
RB
857 if (tree_fits_uhwi_p (len))
858 maxsize = tree_to_uhwi (len);
859 else
860 maxsize = -1;
fef5a0d9
RB
861 if (SSA_VAR_P (src_base)
862 && SSA_VAR_P (dest_base))
863 {
864 if (operand_equal_p (src_base, dest_base, 0)
865 && ranges_overlap_p (src_offset, maxsize,
866 dest_offset, maxsize))
867 return false;
868 }
869 else if (TREE_CODE (src_base) == MEM_REF
870 && TREE_CODE (dest_base) == MEM_REF)
871 {
872 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
873 TREE_OPERAND (dest_base, 0), 0))
874 return false;
875 offset_int off = mem_ref_offset (src_base) + src_offset;
876 if (!wi::fits_shwi_p (off))
877 return false;
878 src_offset = off.to_shwi ();
879
880 off = mem_ref_offset (dest_base) + dest_offset;
881 if (!wi::fits_shwi_p (off))
882 return false;
883 dest_offset = off.to_shwi ();
884 if (ranges_overlap_p (src_offset, maxsize,
885 dest_offset, maxsize))
886 return false;
887 }
888 else
889 return false;
890
891 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
892 if (!fn)
893 return false;
894 gimple_call_set_fndecl (stmt, fn);
895 gimple_call_set_arg (stmt, 0, dest);
896 gimple_call_set_arg (stmt, 1, src);
897 fold_stmt (gsi);
898 return true;
899 }
900
901 /* If the destination and source do not alias optimize into
902 memcpy as well. */
903 if ((is_gimple_min_invariant (dest)
904 || TREE_CODE (dest) == SSA_NAME)
905 && (is_gimple_min_invariant (src)
906 || TREE_CODE (src) == SSA_NAME))
907 {
908 ao_ref destr, srcr;
909 ao_ref_init_from_ptr_and_size (&destr, dest, len);
910 ao_ref_init_from_ptr_and_size (&srcr, src, len);
911 if (!refs_may_alias_p_1 (&destr, &srcr, false))
912 {
913 tree fn;
914 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
915 if (!fn)
916 return false;
917 gimple_call_set_fndecl (stmt, fn);
918 gimple_call_set_arg (stmt, 0, dest);
919 gimple_call_set_arg (stmt, 1, src);
920 fold_stmt (gsi);
921 return true;
922 }
923 }
924
925 return false;
926 }
927
928 if (!tree_fits_shwi_p (len))
929 return false;
fef5a0d9
RB
930 if (!POINTER_TYPE_P (TREE_TYPE (src))
931 || !POINTER_TYPE_P (TREE_TYPE (dest)))
932 return false;
933 /* In the following try to find a type that is most natural to be
934 used for the memcpy source and destination and that allows
935 the most optimization when memcpy is turned into a plain assignment
936 using that type. In theory we could always use a char[len] type
937 but that only gains us that the destination and source possibly
938 no longer will have their address taken. */
fef5a0d9
RB
939 srctype = TREE_TYPE (TREE_TYPE (src));
940 if (TREE_CODE (srctype) == ARRAY_TYPE
941 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 942 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
943 desttype = TREE_TYPE (TREE_TYPE (dest));
944 if (TREE_CODE (desttype) == ARRAY_TYPE
945 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 946 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
947 if (TREE_ADDRESSABLE (srctype)
948 || TREE_ADDRESSABLE (desttype))
949 return false;
950
951 /* Make sure we are not copying using a floating-point mode or
952 a type whose size possibly does not match its precision. */
953 if (FLOAT_MODE_P (TYPE_MODE (desttype))
954 || TREE_CODE (desttype) == BOOLEAN_TYPE
955 || TREE_CODE (desttype) == ENUMERAL_TYPE)
956 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
957 if (FLOAT_MODE_P (TYPE_MODE (srctype))
958 || TREE_CODE (srctype) == BOOLEAN_TYPE
959 || TREE_CODE (srctype) == ENUMERAL_TYPE)
960 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
961 if (!srctype)
962 srctype = desttype;
963 if (!desttype)
964 desttype = srctype;
965 if (!srctype)
966 return false;
967
968 src_align = get_pointer_alignment (src);
969 dest_align = get_pointer_alignment (dest);
970 if (dest_align < TYPE_ALIGN (desttype)
971 || src_align < TYPE_ALIGN (srctype))
972 return false;
973
42f74245
RB
974 destvar = NULL_TREE;
975 if (TREE_CODE (dest) == ADDR_EXPR
976 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 977 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 978 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 979
42f74245
RB
980 srcvar = NULL_TREE;
981 if (TREE_CODE (src) == ADDR_EXPR
982 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
983 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
984 {
985 if (!destvar
986 || src_align >= TYPE_ALIGN (desttype))
987 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 988 src, off0);
fef5a0d9
RB
989 else if (!STRICT_ALIGNMENT)
990 {
991 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
992 src_align);
42f74245 993 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 994 }
fef5a0d9 995 }
fef5a0d9
RB
996
997 if (srcvar == NULL_TREE && destvar == NULL_TREE)
998 return false;
999
1000 if (srcvar == NULL_TREE)
1001 {
fef5a0d9
RB
1002 if (src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1004 else
1005 {
1006 if (STRICT_ALIGNMENT)
1007 return false;
1008 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1009 src_align);
1010 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1011 }
1012 }
1013 else if (destvar == NULL_TREE)
1014 {
fef5a0d9
RB
1015 if (dest_align >= TYPE_ALIGN (srctype))
1016 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1017 else
1018 {
1019 if (STRICT_ALIGNMENT)
1020 return false;
1021 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1022 dest_align);
1023 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1024 }
1025 }
1026
355fe088 1027 gimple *new_stmt;
fef5a0d9
RB
1028 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1029 {
921b13d0
RB
1030 tree tem = fold_const_aggregate_ref (srcvar);
1031 if (tem)
1032 srcvar = tem;
1033 if (! is_gimple_min_invariant (srcvar))
1034 {
1035 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1036 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1037 new_stmt);
921b13d0
RB
1038 gimple_assign_set_lhs (new_stmt, srcvar);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1041 }
fef5a0d9
RB
1042 }
1043 new_stmt = gimple_build_assign (destvar, srcvar);
1044 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1045 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1046 if (gimple_vdef (new_stmt)
1047 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1048 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1049 if (!lhs)
1050 {
f6b4dc28 1051 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1052 return true;
1053 }
1054 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1055 }
1056
1057done:
74e3c262 1058 gimple_seq stmts = NULL;
fef5a0d9
RB
1059 if (endp == 0 || endp == 3)
1060 len = NULL_TREE;
1061 else if (endp == 2)
74e3c262
RB
1062 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1063 ssize_int (1));
fef5a0d9 1064 if (endp == 2 || endp == 1)
74e3c262
RB
1065 {
1066 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1067 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1068 TREE_TYPE (dest), dest, len);
1069 }
fef5a0d9 1070
74e3c262 1071 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1072 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1073 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1074 return true;
1075}
1076
b3d8d88e
MS
1077/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1078 to built-in memcmp (a, b, len). */
1079
1080static bool
1081gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1082{
1083 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1084
1085 if (!fn)
1086 return false;
1087
1088 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1089
1090 gimple *stmt = gsi_stmt (*gsi);
1091 tree a = gimple_call_arg (stmt, 0);
1092 tree b = gimple_call_arg (stmt, 1);
1093 tree len = gimple_call_arg (stmt, 2);
1094
1095 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1096 replace_call_with_call_and_fold (gsi, repl);
1097
1098 return true;
1099}
1100
1101/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1102 to built-in memmove (dest, src, len). */
1103
1104static bool
1105gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1106{
1107 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1108
1109 if (!fn)
1110 return false;
1111
1112 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1113 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1114 len) into memmove (dest, src, len). */
1115
1116 gimple *stmt = gsi_stmt (*gsi);
1117 tree src = gimple_call_arg (stmt, 0);
1118 tree dest = gimple_call_arg (stmt, 1);
1119 tree len = gimple_call_arg (stmt, 2);
1120
1121 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1122 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1123 replace_call_with_call_and_fold (gsi, repl);
1124
1125 return true;
1126}
1127
1128/* Transform a call to built-in bzero (dest, len) at *GSI into one
1129 to built-in memset (dest, 0, len). */
1130
1131static bool
1132gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1133{
1134 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1135
1136 if (!fn)
1137 return false;
1138
1139 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1140
1141 gimple *stmt = gsi_stmt (*gsi);
1142 tree dest = gimple_call_arg (stmt, 0);
1143 tree len = gimple_call_arg (stmt, 1);
1144
1145 gimple_seq seq = NULL;
1146 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1147 gimple_seq_add_stmt_without_update (&seq, repl);
1148 gsi_replace_with_seq_vops (gsi, seq);
1149 fold_stmt (gsi);
1150
1151 return true;
1152}
1153
fef5a0d9
RB
1154/* Fold function call to builtin memset or bzero at *GSI setting the
1155 memory of size LEN to VAL. Return whether a simplification was made. */
1156
1157static bool
1158gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1159{
355fe088 1160 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1161 tree etype;
1162 unsigned HOST_WIDE_INT length, cval;
1163
1164 /* If the LEN parameter is zero, return DEST. */
1165 if (integer_zerop (len))
1166 {
1167 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1168 return true;
1169 }
1170
1171 if (! tree_fits_uhwi_p (len))
1172 return false;
1173
1174 if (TREE_CODE (c) != INTEGER_CST)
1175 return false;
1176
1177 tree dest = gimple_call_arg (stmt, 0);
1178 tree var = dest;
1179 if (TREE_CODE (var) != ADDR_EXPR)
1180 return false;
1181
1182 var = TREE_OPERAND (var, 0);
1183 if (TREE_THIS_VOLATILE (var))
1184 return false;
1185
1186 etype = TREE_TYPE (var);
1187 if (TREE_CODE (etype) == ARRAY_TYPE)
1188 etype = TREE_TYPE (etype);
1189
1190 if (!INTEGRAL_TYPE_P (etype)
1191 && !POINTER_TYPE_P (etype))
1192 return NULL_TREE;
1193
1194 if (! var_decl_component_p (var))
1195 return NULL_TREE;
1196
1197 length = tree_to_uhwi (len);
7a504f33 1198 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1199 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1200 return NULL_TREE;
1201
1202 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1203 return NULL_TREE;
1204
1205 if (integer_zerop (c))
1206 cval = 0;
1207 else
1208 {
1209 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1210 return NULL_TREE;
1211
1212 cval = TREE_INT_CST_LOW (c);
1213 cval &= 0xff;
1214 cval |= cval << 8;
1215 cval |= cval << 16;
1216 cval |= (cval << 31) << 1;
1217 }
1218
1219 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1220 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1221 gimple_set_vuse (store, gimple_vuse (stmt));
1222 tree vdef = gimple_vdef (stmt);
1223 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1224 {
1225 gimple_set_vdef (store, gimple_vdef (stmt));
1226 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1227 }
1228 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1229 if (gimple_call_lhs (stmt))
1230 {
355fe088 1231 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1232 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1233 }
1234 else
1235 {
1236 gimple_stmt_iterator gsi2 = *gsi;
1237 gsi_prev (gsi);
1238 gsi_remove (&gsi2, true);
1239 }
1240
1241 return true;
1242}
1243
1244
88d0c3f0
MS
1245/* Obtain the minimum and maximum string length or minimum and maximum
1246 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1247 If ARG is an SSA name variable, follow its use-def chains. When
1248 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1249 if we are unable to determine the length or value, return False.
1250 VISITED is a bitmap of visited variables.
1251 TYPE is 0 if string length should be obtained, 1 for maximum string
1252 length and 2 for maximum value ARG can have.
1253 When FUZZY is set and the length of a string cannot be determined,
1254 the function instead considers as the maximum possible length the
3f343040
MS
1255 size of a character array it may refer to.
1256 Set *FLEXP to true if the range of the string lengths has been
1257 obtained from the upper bound of an array at the end of a struct.
1258 Such an array may hold a string that's longer than its upper bound
1259 due to it being used as a poor-man's flexible array member. */
fef5a0d9
RB
1260
1261static bool
88d0c3f0 1262get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
3f343040 1263 bool fuzzy, bool *flexp)
fef5a0d9
RB
1264{
1265 tree var, val;
355fe088 1266 gimple *def_stmt;
fef5a0d9 1267
88d0c3f0
MS
1268 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1269 but MINLEN may be cleared during the execution of the function. */
1270 tree *minlen = length;
1271 tree *const maxlen = length + 1;
1272
fef5a0d9
RB
1273 if (TREE_CODE (arg) != SSA_NAME)
1274 {
1275 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1276 if (TREE_CODE (arg) == ADDR_EXPR
1277 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1278 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1279 {
1280 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1281 if (TREE_CODE (aop0) == INDIRECT_REF
1282 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
88d0c3f0 1283 return get_range_strlen (TREE_OPERAND (aop0, 0),
3f343040 1284 length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1285 }
1286
1287 if (type == 2)
1288 {
1289 val = arg;
1290 if (TREE_CODE (val) != INTEGER_CST
1291 || tree_int_cst_sgn (val) < 0)
1292 return false;
1293 }
1294 else
1295 val = c_strlen (arg, 1);
88d0c3f0
MS
1296
1297 if (!val && fuzzy)
1298 {
1299 if (TREE_CODE (arg) == ADDR_EXPR)
1300 return get_range_strlen (TREE_OPERAND (arg, 0), length,
3f343040 1301 visited, type, fuzzy, flexp);
88d0c3f0
MS
1302
1303 if (TREE_CODE (arg) == COMPONENT_REF
1304 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1305 {
1306 /* Use the type of the member array to determine the upper
1307 bound on the length of the array. This may be overly
1308 optimistic if the array itself isn't NUL-terminated and
1309 the caller relies on the subsequent member to contain
3f343040
MS
1310 the NUL.
1311 Set *FLEXP to true if the array whose bound is being
1312 used is at the end of a struct. */
c3e46927 1313 if (array_at_struct_end_p (arg))
3f343040
MS
1314 *flexp = true;
1315
88d0c3f0
MS
1316 arg = TREE_OPERAND (arg, 1);
1317 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1318 if (!val || integer_zerop (val))
1319 return false;
1320 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1321 integer_one_node);
e495e31a
MS
1322 /* Set the minimum size to zero since the string in
1323 the array could have zero length. */
1324 *minlen = ssize_int (0);
88d0c3f0
MS
1325 }
1326 }
1327
fef5a0d9
RB
1328 if (!val)
1329 return false;
1330
88d0c3f0
MS
1331 if (minlen
1332 && (!*minlen
1333 || (type > 0
1334 && TREE_CODE (*minlen) == INTEGER_CST
1335 && TREE_CODE (val) == INTEGER_CST
1336 && tree_int_cst_lt (val, *minlen))))
1337 *minlen = val;
1338
1339 if (*maxlen)
fef5a0d9
RB
1340 {
1341 if (type > 0)
1342 {
88d0c3f0 1343 if (TREE_CODE (*maxlen) != INTEGER_CST
fef5a0d9
RB
1344 || TREE_CODE (val) != INTEGER_CST)
1345 return false;
1346
88d0c3f0
MS
1347 if (tree_int_cst_lt (*maxlen, val))
1348 *maxlen = val;
fef5a0d9
RB
1349 return true;
1350 }
88d0c3f0 1351 else if (simple_cst_equal (val, *maxlen) != 1)
fef5a0d9
RB
1352 return false;
1353 }
1354
88d0c3f0 1355 *maxlen = val;
fef5a0d9
RB
1356 return true;
1357 }
1358
1359 /* If ARG is registered for SSA update we cannot look at its defining
1360 statement. */
1361 if (name_registered_for_update_p (arg))
1362 return false;
1363
1364 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1365 if (!*visited)
1366 *visited = BITMAP_ALLOC (NULL);
1367 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1368 return true;
1369
1370 var = arg;
1371 def_stmt = SSA_NAME_DEF_STMT (var);
1372
1373 switch (gimple_code (def_stmt))
1374 {
1375 case GIMPLE_ASSIGN:
1376 /* The RHS of the statement defining VAR must either have a
1377 constant length or come from another SSA_NAME with a constant
1378 length. */
1379 if (gimple_assign_single_p (def_stmt)
1380 || gimple_assign_unary_nop_p (def_stmt))
1381 {
1382 tree rhs = gimple_assign_rhs1 (def_stmt);
3f343040 1383 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1384 }
1385 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1386 {
1387 tree op2 = gimple_assign_rhs2 (def_stmt);
1388 tree op3 = gimple_assign_rhs3 (def_stmt);
3f343040
MS
1389 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1390 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1391 }
1392 return false;
1393
1394 case GIMPLE_PHI:
1395 {
1396 /* All the arguments of the PHI node must have the same constant
1397 length. */
1398 unsigned i;
1399
1400 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1401 {
1402 tree arg = gimple_phi_arg (def_stmt, i)->def;
1403
1404 /* If this PHI has itself as an argument, we cannot
1405 determine the string length of this argument. However,
1406 if we can find a constant string length for the other
1407 PHI args then we can still be sure that this is a
1408 constant string length. So be optimistic and just
1409 continue with the next argument. */
1410 if (arg == gimple_phi_result (def_stmt))
1411 continue;
1412
3f343040 1413 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
88d0c3f0
MS
1414 {
1415 if (fuzzy)
1416 *maxlen = build_all_ones_cst (size_type_node);
1417 else
1418 return false;
1419 }
fef5a0d9
RB
1420 }
1421 }
1422 return true;
1423
1424 default:
1425 return false;
1426 }
1427}
1428
88d0c3f0
MS
1429/* Determine the minimum and maximum value or string length that ARG
1430 refers to and store each in the first two elements of MINMAXLEN.
1431 For expressions that point to strings of unknown lengths that are
1432 character arrays, use the upper bound of the array as the maximum
1433 length. For example, given an expression like 'x ? array : "xyz"'
1434 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1435 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1436 stored in array.
3f343040
MS
1437 Return true if the range of the string lengths has been obtained
1438 from the upper bound of an array at the end of a struct. Such
1439 an array may hold a string that's longer than its upper bound
1440 due to it being used as a poor-man's flexible array member. */
88d0c3f0 1441
3f343040
MS
1442bool
1443get_range_strlen (tree arg, tree minmaxlen[2])
88d0c3f0
MS
1444{
1445 bitmap visited = NULL;
1446
1447 minmaxlen[0] = NULL_TREE;
1448 minmaxlen[1] = NULL_TREE;
1449
3f343040
MS
1450 bool flexarray = false;
1451 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
88d0c3f0
MS
1452
1453 if (visited)
1454 BITMAP_FREE (visited);
3f343040
MS
1455
1456 return flexarray;
88d0c3f0
MS
1457}
1458
dcb7fae2
RB
1459tree
1460get_maxval_strlen (tree arg, int type)
1461{
1462 bitmap visited = NULL;
88d0c3f0 1463 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1464
1465 bool dummy;
1466 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
88d0c3f0 1467 len[1] = NULL_TREE;
dcb7fae2
RB
1468 if (visited)
1469 BITMAP_FREE (visited);
1470
88d0c3f0 1471 return len[1];
dcb7fae2
RB
1472}
1473
fef5a0d9
RB
1474
1475/* Fold function call to builtin strcpy with arguments DEST and SRC.
1476 If LEN is not NULL, it represents the length of the string to be
1477 copied. Return NULL_TREE if no simplification can be made. */
1478
1479static bool
1480gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1481 tree dest, tree src)
fef5a0d9 1482{
dcb7fae2 1483 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1484 tree fn;
1485
1486 /* If SRC and DEST are the same (and not volatile), return DEST. */
1487 if (operand_equal_p (src, dest, 0))
1488 {
1489 replace_call_with_value (gsi, dest);
1490 return true;
1491 }
1492
1493 if (optimize_function_for_size_p (cfun))
1494 return false;
1495
1496 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1497 if (!fn)
1498 return false;
1499
1579e1f8 1500 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1501 if (!len)
dcb7fae2 1502 return false;
fef5a0d9
RB
1503
1504 len = fold_convert_loc (loc, size_type_node, len);
1505 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1506 len = force_gimple_operand_gsi (gsi, len, true,
1507 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1508 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1509 replace_call_with_call_and_fold (gsi, repl);
1510 return true;
1511}
1512
1513/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1514 If SLEN is not NULL, it represents the length of the source string.
1515 Return NULL_TREE if no simplification can be made. */
1516
1517static bool
dcb7fae2
RB
1518gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1519 tree dest, tree src, tree len)
fef5a0d9 1520{
025d57f0
MS
1521 gimple *stmt = gsi_stmt (*gsi);
1522 location_t loc = gimple_location (stmt);
6a33d0ff 1523 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1524
1525 /* If the LEN parameter is zero, return DEST. */
1526 if (integer_zerop (len))
1527 {
6a33d0ff
MS
1528 /* Avoid warning if the destination refers to a an array/pointer
1529 decorate with attribute nonstring. */
1530 if (!nonstring)
1531 {
1532 tree fndecl = gimple_call_fndecl (stmt);
1533 gcall *call = as_a <gcall *> (stmt);
1534
1535 /* Warn about the lack of nul termination: the result is not
1536 a (nul-terminated) string. */
1537 tree slen = get_maxval_strlen (src, 0);
1538 if (slen && !integer_zerop (slen))
1539 warning_at (loc, OPT_Wstringop_truncation,
1540 "%G%qD destination unchanged after copying no bytes "
1541 "from a string of length %E",
1542 call, fndecl, slen);
1543 else
1544 warning_at (loc, OPT_Wstringop_truncation,
1545 "%G%qD destination unchanged after copying no bytes",
1546 call, fndecl);
1547 }
025d57f0 1548
fef5a0d9
RB
1549 replace_call_with_value (gsi, dest);
1550 return true;
1551 }
1552
1553 /* We can't compare slen with len as constants below if len is not a
1554 constant. */
dcb7fae2 1555 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1556 return false;
1557
fef5a0d9 1558 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1559 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1560 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1561 return false;
1562
025d57f0
MS
1563 /* The size of the source string including the terminating nul. */
1564 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1565
1566 /* We do not support simplification of this case, though we do
1567 support it when expanding trees into RTL. */
1568 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1569 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1570 return false;
1571
6a33d0ff 1572 if (!nonstring)
025d57f0 1573 {
6a33d0ff 1574 if (tree_int_cst_lt (len, slen))
025d57f0 1575 {
6a33d0ff
MS
1576 tree fndecl = gimple_call_fndecl (stmt);
1577 gcall *call = as_a <gcall *> (stmt);
1578
1579 warning_at (loc, OPT_Wstringop_truncation,
1580 (tree_int_cst_equal (size_one_node, len)
1581 ? G_("%G%qD output truncated copying %E byte "
1582 "from a string of length %E")
1583 : G_("%G%qD output truncated copying %E bytes "
1584 "from a string of length %E")),
1585 call, fndecl, len, slen);
1586 }
1587 else if (tree_int_cst_equal (len, slen))
1588 {
1589 tree fndecl = gimple_call_fndecl (stmt);
1590 gcall *call = as_a <gcall *> (stmt);
1591
1592 warning_at (loc, OPT_Wstringop_truncation,
1593 (tree_int_cst_equal (size_one_node, len)
1594 ? G_("%G%qD output truncated before terminating nul "
1595 "copying %E byte from a string of the same "
1596 "length")
1597 : G_("%G%qD output truncated before terminating nul "
1598 "copying %E bytes from a string of the same "
1599 "length")),
1600 call, fndecl, len);
025d57f0 1601 }
025d57f0
MS
1602 }
1603
fef5a0d9 1604 /* OK transform into builtin memcpy. */
025d57f0 1605 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1606 if (!fn)
1607 return false;
1608
1609 len = fold_convert_loc (loc, size_type_node, len);
1610 len = force_gimple_operand_gsi (gsi, len, true,
1611 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1612 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1613 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1614
fef5a0d9
RB
1615 return true;
1616}
1617
71dea1dd
WD
1618/* Fold function call to builtin strchr or strrchr.
1619 If both arguments are constant, evaluate and fold the result,
1620 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1621 In general strlen is significantly faster than strchr
1622 due to being a simpler operation. */
1623static bool
71dea1dd 1624gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1625{
1626 gimple *stmt = gsi_stmt (*gsi);
1627 tree str = gimple_call_arg (stmt, 0);
1628 tree c = gimple_call_arg (stmt, 1);
1629 location_t loc = gimple_location (stmt);
71dea1dd
WD
1630 const char *p;
1631 char ch;
912d9ec3 1632
71dea1dd 1633 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1634 return false;
1635
71dea1dd
WD
1636 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1637 {
1638 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1639
1640 if (p1 == NULL)
1641 {
1642 replace_call_with_value (gsi, integer_zero_node);
1643 return true;
1644 }
1645
1646 tree len = build_int_cst (size_type_node, p1 - p);
1647 gimple_seq stmts = NULL;
1648 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1649 POINTER_PLUS_EXPR, str, len);
1650 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1651 gsi_replace_with_seq_vops (gsi, stmts);
1652 return true;
1653 }
1654
1655 if (!integer_zerop (c))
912d9ec3
WD
1656 return false;
1657
71dea1dd 1658 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1659 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1660 {
1661 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1662
c8952930 1663 if (strchr_fn)
71dea1dd
WD
1664 {
1665 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1666 replace_call_with_call_and_fold (gsi, repl);
1667 return true;
1668 }
1669
1670 return false;
1671 }
1672
912d9ec3
WD
1673 tree len;
1674 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1675
1676 if (!strlen_fn)
1677 return false;
1678
1679 /* Create newstr = strlen (str). */
1680 gimple_seq stmts = NULL;
1681 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1682 gimple_set_location (new_stmt, loc);
a15ebbcd 1683 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1684 gimple_call_set_lhs (new_stmt, len);
1685 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1686
1687 /* Create (str p+ strlen (str)). */
1688 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1689 POINTER_PLUS_EXPR, str, len);
1690 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1691 gsi_replace_with_seq_vops (gsi, stmts);
1692 /* gsi now points at the assignment to the lhs, get a
1693 stmt iterator to the strlen.
1694 ??? We can't use gsi_for_stmt as that doesn't work when the
1695 CFG isn't built yet. */
1696 gimple_stmt_iterator gsi2 = *gsi;
1697 gsi_prev (&gsi2);
1698 fold_stmt (&gsi2);
1699 return true;
1700}
1701
c8952930
JJ
1702/* Fold function call to builtin strstr.
1703 If both arguments are constant, evaluate and fold the result,
1704 additionally fold strstr (x, "") into x and strstr (x, "c")
1705 into strchr (x, 'c'). */
1706static bool
1707gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1708{
1709 gimple *stmt = gsi_stmt (*gsi);
1710 tree haystack = gimple_call_arg (stmt, 0);
1711 tree needle = gimple_call_arg (stmt, 1);
1712 const char *p, *q;
1713
1714 if (!gimple_call_lhs (stmt))
1715 return false;
1716
1717 q = c_getstr (needle);
1718 if (q == NULL)
1719 return false;
1720
1721 if ((p = c_getstr (haystack)))
1722 {
1723 const char *r = strstr (p, q);
1724
1725 if (r == NULL)
1726 {
1727 replace_call_with_value (gsi, integer_zero_node);
1728 return true;
1729 }
1730
1731 tree len = build_int_cst (size_type_node, r - p);
1732 gimple_seq stmts = NULL;
1733 gimple *new_stmt
1734 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1735 haystack, len);
1736 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1737 gsi_replace_with_seq_vops (gsi, stmts);
1738 return true;
1739 }
1740
1741 /* For strstr (x, "") return x. */
1742 if (q[0] == '\0')
1743 {
1744 replace_call_with_value (gsi, haystack);
1745 return true;
1746 }
1747
1748 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1749 if (q[1] == '\0')
1750 {
1751 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1752 if (strchr_fn)
1753 {
1754 tree c = build_int_cst (integer_type_node, q[0]);
1755 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1756 replace_call_with_call_and_fold (gsi, repl);
1757 return true;
1758 }
1759 }
1760
1761 return false;
1762}
1763
fef5a0d9
RB
1764/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1765 to the call.
1766
1767 Return NULL_TREE if no simplification was possible, otherwise return the
1768 simplified form of the call as a tree.
1769
1770 The simplified form may be a constant or other expression which
1771 computes the same value, but in a more efficient manner (including
1772 calls to other builtin functions).
1773
1774 The call may contain arguments which need to be evaluated, but
1775 which are not useful to determine the result of the call. In
1776 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1777 COMPOUND_EXPR will be an argument which must be evaluated.
1778 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1779 COMPOUND_EXPR in the chain will contain the tree for the simplified
1780 form of the builtin function call. */
1781
1782static bool
dcb7fae2 1783gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1784{
355fe088 1785 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1786 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1787
1788 const char *p = c_getstr (src);
1789
1790 /* If the string length is zero, return the dst parameter. */
1791 if (p && *p == '\0')
1792 {
1793 replace_call_with_value (gsi, dst);
1794 return true;
1795 }
1796
1797 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1798 return false;
1799
1800 /* See if we can store by pieces into (dst + strlen(dst)). */
1801 tree newdst;
1802 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1803 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1804
1805 if (!strlen_fn || !memcpy_fn)
1806 return false;
1807
1808 /* If the length of the source string isn't computable don't
1809 split strcat into strlen and memcpy. */
dcb7fae2 1810 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1811 if (! len)
fef5a0d9
RB
1812 return false;
1813
1814 /* Create strlen (dst). */
1815 gimple_seq stmts = NULL, stmts2;
355fe088 1816 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 1817 gimple_set_location (repl, loc);
a15ebbcd 1818 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
1819 gimple_call_set_lhs (repl, newdst);
1820 gimple_seq_add_stmt_without_update (&stmts, repl);
1821
1822 /* Create (dst p+ strlen (dst)). */
1823 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1824 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1825 gimple_seq_add_seq_without_update (&stmts, stmts2);
1826
1827 len = fold_convert_loc (loc, size_type_node, len);
1828 len = size_binop_loc (loc, PLUS_EXPR, len,
1829 build_int_cst (size_type_node, 1));
1830 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1831 gimple_seq_add_seq_without_update (&stmts, stmts2);
1832
1833 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1834 gimple_seq_add_stmt_without_update (&stmts, repl);
1835 if (gimple_call_lhs (stmt))
1836 {
1837 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1838 gimple_seq_add_stmt_without_update (&stmts, repl);
1839 gsi_replace_with_seq_vops (gsi, stmts);
1840 /* gsi now points at the assignment to the lhs, get a
1841 stmt iterator to the memcpy call.
1842 ??? We can't use gsi_for_stmt as that doesn't work when the
1843 CFG isn't built yet. */
1844 gimple_stmt_iterator gsi2 = *gsi;
1845 gsi_prev (&gsi2);
1846 fold_stmt (&gsi2);
1847 }
1848 else
1849 {
1850 gsi_replace_with_seq_vops (gsi, stmts);
1851 fold_stmt (gsi);
1852 }
1853 return true;
1854}
1855
07f1cf56
RB
1856/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1857 are the arguments to the call. */
1858
1859static bool
1860gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1861{
355fe088 1862 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1863 tree dest = gimple_call_arg (stmt, 0);
1864 tree src = gimple_call_arg (stmt, 1);
1865 tree size = gimple_call_arg (stmt, 2);
1866 tree fn;
1867 const char *p;
1868
1869
1870 p = c_getstr (src);
1871 /* If the SRC parameter is "", return DEST. */
1872 if (p && *p == '\0')
1873 {
1874 replace_call_with_value (gsi, dest);
1875 return true;
1876 }
1877
1878 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1879 return false;
1880
1881 /* If __builtin_strcat_chk is used, assume strcat is available. */
1882 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1883 if (!fn)
1884 return false;
1885
355fe088 1886 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1887 replace_call_with_call_and_fold (gsi, repl);
1888 return true;
1889}
1890
ad03a744
RB
1891/* Simplify a call to the strncat builtin. */
1892
1893static bool
1894gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1895{
1896 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1897 tree dst = gimple_call_arg (stmt, 0);
1898 tree src = gimple_call_arg (stmt, 1);
1899 tree len = gimple_call_arg (stmt, 2);
1900
1901 const char *p = c_getstr (src);
1902
1903 /* If the requested length is zero, or the src parameter string
1904 length is zero, return the dst parameter. */
1905 if (integer_zerop (len) || (p && *p == '\0'))
1906 {
1907 replace_call_with_value (gsi, dst);
1908 return true;
1909 }
1910
025d57f0
MS
1911 if (TREE_CODE (len) != INTEGER_CST || !p)
1912 return false;
1913
1914 unsigned srclen = strlen (p);
1915
1916 int cmpsrc = compare_tree_int (len, srclen);
1917
1918 /* Return early if the requested len is less than the string length.
1919 Warnings will be issued elsewhere later. */
1920 if (cmpsrc < 0)
1921 return false;
1922
1923 unsigned HOST_WIDE_INT dstsize;
1924
1925 bool nowarn = gimple_no_warning_p (stmt);
1926
1927 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 1928 {
025d57f0 1929 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 1930
025d57f0
MS
1931 if (cmpdst >= 0)
1932 {
1933 tree fndecl = gimple_call_fndecl (stmt);
1934
1935 /* Strncat copies (at most) LEN bytes and always appends
1936 the terminating NUL so the specified bound should never
1937 be equal to (or greater than) the size of the destination.
1938 If it is, the copy could overflow. */
1939 location_t loc = gimple_location (stmt);
1940 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1941 cmpdst == 0
1942 ? G_("%G%qD specified bound %E equals "
1943 "destination size")
1944 : G_("%G%qD specified bound %E exceeds "
1945 "destination size %wu"),
1946 stmt, fndecl, len, dstsize);
1947 if (nowarn)
1948 gimple_set_no_warning (stmt, true);
1949 }
1950 }
ad03a744 1951
025d57f0
MS
1952 if (!nowarn && cmpsrc == 0)
1953 {
1954 tree fndecl = gimple_call_fndecl (stmt);
1955
1956 /* To avoid certain truncation the specified bound should also
1957 not be equal to (or less than) the length of the source. */
1958 location_t loc = gimple_location (stmt);
1959 if (warning_at (loc, OPT_Wstringop_overflow_,
1960 "%G%qD specified bound %E equals source length",
1961 stmt, fndecl, len))
1962 gimple_set_no_warning (stmt, true);
ad03a744
RB
1963 }
1964
025d57f0
MS
1965 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1966
1967 /* If the replacement _DECL isn't initialized, don't do the
1968 transformation. */
1969 if (!fn)
1970 return false;
1971
1972 /* Otherwise, emit a call to strcat. */
1973 gcall *repl = gimple_build_call (fn, 2, dst, src);
1974 replace_call_with_call_and_fold (gsi, repl);
1975 return true;
ad03a744
RB
1976}
1977
745583f9
RB
1978/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1979 LEN, and SIZE. */
1980
1981static bool
1982gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1983{
355fe088 1984 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1985 tree dest = gimple_call_arg (stmt, 0);
1986 tree src = gimple_call_arg (stmt, 1);
1987 tree len = gimple_call_arg (stmt, 2);
1988 tree size = gimple_call_arg (stmt, 3);
1989 tree fn;
1990 const char *p;
1991
1992 p = c_getstr (src);
1993 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1994 if ((p && *p == '\0')
1995 || integer_zerop (len))
1996 {
1997 replace_call_with_value (gsi, dest);
1998 return true;
1999 }
2000
2001 if (! tree_fits_uhwi_p (size))
2002 return false;
2003
2004 if (! integer_all_onesp (size))
2005 {
2006 tree src_len = c_strlen (src, 1);
2007 if (src_len
2008 && tree_fits_uhwi_p (src_len)
2009 && tree_fits_uhwi_p (len)
2010 && ! tree_int_cst_lt (len, src_len))
2011 {
2012 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2013 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2014 if (!fn)
2015 return false;
2016
355fe088 2017 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2018 replace_call_with_call_and_fold (gsi, repl);
2019 return true;
2020 }
2021 return false;
2022 }
2023
2024 /* If __builtin_strncat_chk is used, assume strncat is available. */
2025 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2026 if (!fn)
2027 return false;
2028
355fe088 2029 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2030 replace_call_with_call_and_fold (gsi, repl);
2031 return true;
2032}
2033
a918bfbf
ML
2034/* Build and append gimple statements to STMTS that would load a first
2035 character of a memory location identified by STR. LOC is location
2036 of the statement. */
2037
2038static tree
2039gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2040{
2041 tree var;
2042
2043 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2044 tree cst_uchar_ptr_node
2045 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2046 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2047
2048 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2049 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2050 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2051
2052 gimple_assign_set_lhs (stmt, var);
2053 gimple_seq_add_stmt_without_update (stmts, stmt);
2054
2055 return var;
2056}
2057
2058/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2059 FCODE is the name of the builtin. */
2060
2061static bool
2062gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2063{
2064 gimple *stmt = gsi_stmt (*gsi);
2065 tree callee = gimple_call_fndecl (stmt);
2066 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2067
2068 tree type = integer_type_node;
2069 tree str1 = gimple_call_arg (stmt, 0);
2070 tree str2 = gimple_call_arg (stmt, 1);
2071 tree lhs = gimple_call_lhs (stmt);
2072 HOST_WIDE_INT length = -1;
2073
2074 /* Handle strncmp and strncasecmp functions. */
2075 if (gimple_call_num_args (stmt) == 3)
2076 {
2077 tree len = gimple_call_arg (stmt, 2);
2078 if (tree_fits_uhwi_p (len))
2079 length = tree_to_uhwi (len);
2080 }
2081
2082 /* If the LEN parameter is zero, return zero. */
2083 if (length == 0)
2084 {
2085 replace_call_with_value (gsi, integer_zero_node);
2086 return true;
2087 }
2088
2089 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2090 if (operand_equal_p (str1, str2, 0))
2091 {
2092 replace_call_with_value (gsi, integer_zero_node);
2093 return true;
2094 }
2095
2096 const char *p1 = c_getstr (str1);
2097 const char *p2 = c_getstr (str2);
2098
2099 /* For known strings, return an immediate value. */
2100 if (p1 && p2)
2101 {
2102 int r = 0;
2103 bool known_result = false;
2104
2105 switch (fcode)
2106 {
2107 case BUILT_IN_STRCMP:
2108 {
2109 r = strcmp (p1, p2);
2110 known_result = true;
2111 break;
2112 }
2113 case BUILT_IN_STRNCMP:
2114 {
2115 if (length == -1)
2116 break;
2117 r = strncmp (p1, p2, length);
2118 known_result = true;
2119 break;
2120 }
2121 /* Only handleable situation is where the string are equal (result 0),
2122 which is already handled by operand_equal_p case. */
2123 case BUILT_IN_STRCASECMP:
2124 break;
2125 case BUILT_IN_STRNCASECMP:
2126 {
2127 if (length == -1)
2128 break;
2129 r = strncmp (p1, p2, length);
2130 if (r == 0)
2131 known_result = true;
2132 break;;
2133 }
2134 default:
2135 gcc_unreachable ();
2136 }
2137
2138 if (known_result)
2139 {
2140 replace_call_with_value (gsi, build_cmp_result (type, r));
2141 return true;
2142 }
2143 }
2144
2145 bool nonzero_length = length >= 1
2146 || fcode == BUILT_IN_STRCMP
2147 || fcode == BUILT_IN_STRCASECMP;
2148
2149 location_t loc = gimple_location (stmt);
2150
2151 /* If the second arg is "", return *(const unsigned char*)arg1. */
2152 if (p2 && *p2 == '\0' && nonzero_length)
2153 {
2154 gimple_seq stmts = NULL;
2155 tree var = gimple_load_first_char (loc, str1, &stmts);
2156 if (lhs)
2157 {
2158 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2159 gimple_seq_add_stmt_without_update (&stmts, stmt);
2160 }
2161
2162 gsi_replace_with_seq_vops (gsi, stmts);
2163 return true;
2164 }
2165
2166 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2167 if (p1 && *p1 == '\0' && nonzero_length)
2168 {
2169 gimple_seq stmts = NULL;
2170 tree var = gimple_load_first_char (loc, str2, &stmts);
2171
2172 if (lhs)
2173 {
2174 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2175 stmt = gimple_build_assign (c, NOP_EXPR, var);
2176 gimple_seq_add_stmt_without_update (&stmts, stmt);
2177
2178 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2179 gimple_seq_add_stmt_without_update (&stmts, stmt);
2180 }
2181
2182 gsi_replace_with_seq_vops (gsi, stmts);
2183 return true;
2184 }
2185
2186 /* If len parameter is one, return an expression corresponding to
2187 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2188 if (fcode == BUILT_IN_STRNCMP && length == 1)
2189 {
2190 gimple_seq stmts = NULL;
2191 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2192 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2193
2194 if (lhs)
2195 {
2196 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2197 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2198 gimple_seq_add_stmt_without_update (&stmts, convert1);
2199
2200 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2201 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2202 gimple_seq_add_stmt_without_update (&stmts, convert2);
2203
2204 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2205 gimple_seq_add_stmt_without_update (&stmts, stmt);
2206 }
2207
2208 gsi_replace_with_seq_vops (gsi, stmts);
2209 return true;
2210 }
2211
caed5c92
QZ
2212 /* If length is larger than the length of one constant string,
2213 replace strncmp with corresponding strcmp */
2214 if (fcode == BUILT_IN_STRNCMP
2215 && length > 0
2216 && ((p2 && (size_t) length > strlen (p2))
2217 || (p1 && (size_t) length > strlen (p1))))
2218 {
2219 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2220 if (!fn)
2221 return false;
2222 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2223 replace_call_with_call_and_fold (gsi, repl);
2224 return true;
2225 }
2226
a918bfbf
ML
2227 return false;
2228}
2229
488c6247
ML
2230/* Fold a call to the memchr pointed by GSI iterator. */
2231
2232static bool
2233gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2234{
2235 gimple *stmt = gsi_stmt (*gsi);
2236 tree lhs = gimple_call_lhs (stmt);
2237 tree arg1 = gimple_call_arg (stmt, 0);
2238 tree arg2 = gimple_call_arg (stmt, 1);
2239 tree len = gimple_call_arg (stmt, 2);
2240
2241 /* If the LEN parameter is zero, return zero. */
2242 if (integer_zerop (len))
2243 {
2244 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2245 return true;
2246 }
2247
2248 char c;
2249 if (TREE_CODE (arg2) != INTEGER_CST
2250 || !tree_fits_uhwi_p (len)
2251 || !target_char_cst_p (arg2, &c))
2252 return false;
2253
2254 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2255 unsigned HOST_WIDE_INT string_length;
2256 const char *p1 = c_getstr (arg1, &string_length);
2257
2258 if (p1)
2259 {
2260 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2261 if (r == NULL)
2262 {
2263 if (length <= string_length)
2264 {
2265 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2266 return true;
2267 }
2268 }
2269 else
2270 {
2271 unsigned HOST_WIDE_INT offset = r - p1;
2272 gimple_seq stmts = NULL;
2273 if (lhs != NULL_TREE)
2274 {
2275 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2276 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2277 arg1, offset_cst);
2278 gimple_seq_add_stmt_without_update (&stmts, stmt);
2279 }
2280 else
2281 gimple_seq_add_stmt_without_update (&stmts,
2282 gimple_build_nop ());
2283
2284 gsi_replace_with_seq_vops (gsi, stmts);
2285 return true;
2286 }
2287 }
2288
2289 return false;
2290}
a918bfbf 2291
fef5a0d9
RB
2292/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2293 to the call. IGNORE is true if the value returned
2294 by the builtin will be ignored. UNLOCKED is true is true if this
2295 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2296 the known length of the string. Return NULL_TREE if no simplification
2297 was possible. */
2298
2299static bool
2300gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2301 tree arg0, tree arg1,
dcb7fae2 2302 bool unlocked)
fef5a0d9 2303{
355fe088 2304 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2305
fef5a0d9
RB
2306 /* If we're using an unlocked function, assume the other unlocked
2307 functions exist explicitly. */
2308 tree const fn_fputc = (unlocked
2309 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2310 : builtin_decl_implicit (BUILT_IN_FPUTC));
2311 tree const fn_fwrite = (unlocked
2312 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2313 : builtin_decl_implicit (BUILT_IN_FWRITE));
2314
2315 /* If the return value is used, don't do the transformation. */
dcb7fae2 2316 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2317 return false;
2318
fef5a0d9
RB
2319 /* Get the length of the string passed to fputs. If the length
2320 can't be determined, punt. */
dcb7fae2 2321 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
2322 if (!len
2323 || TREE_CODE (len) != INTEGER_CST)
2324 return false;
2325
2326 switch (compare_tree_int (len, 1))
2327 {
2328 case -1: /* length is 0, delete the call entirely . */
2329 replace_call_with_value (gsi, integer_zero_node);
2330 return true;
2331
2332 case 0: /* length is 1, call fputc. */
2333 {
2334 const char *p = c_getstr (arg0);
2335 if (p != NULL)
2336 {
2337 if (!fn_fputc)
2338 return false;
2339
355fe088 2340 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2341 build_int_cst
2342 (integer_type_node, p[0]), arg1);
2343 replace_call_with_call_and_fold (gsi, repl);
2344 return true;
2345 }
2346 }
2347 /* FALLTHROUGH */
2348 case 1: /* length is greater than 1, call fwrite. */
2349 {
2350 /* If optimizing for size keep fputs. */
2351 if (optimize_function_for_size_p (cfun))
2352 return false;
2353 /* New argument list transforming fputs(string, stream) to
2354 fwrite(string, 1, len, stream). */
2355 if (!fn_fwrite)
2356 return false;
2357
355fe088 2358 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2359 size_one_node, len, arg1);
2360 replace_call_with_call_and_fold (gsi, repl);
2361 return true;
2362 }
2363 default:
2364 gcc_unreachable ();
2365 }
2366 return false;
2367}
2368
2369/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2370 DEST, SRC, LEN, and SIZE are the arguments to the call.
2371 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2372 code of the builtin. If MAXLEN is not NULL, it is maximum length
2373 passed as third argument. */
2374
2375static bool
2376gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2377 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2378 enum built_in_function fcode)
2379{
355fe088 2380 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2381 location_t loc = gimple_location (stmt);
2382 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2383 tree fn;
2384
2385 /* If SRC and DEST are the same (and not volatile), return DEST
2386 (resp. DEST+LEN for __mempcpy_chk). */
2387 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2388 {
2389 if (fcode != BUILT_IN_MEMPCPY_CHK)
2390 {
2391 replace_call_with_value (gsi, dest);
2392 return true;
2393 }
2394 else
2395 {
74e3c262
RB
2396 gimple_seq stmts = NULL;
2397 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2398 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2399 TREE_TYPE (dest), dest, len);
74e3c262 2400 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2401 replace_call_with_value (gsi, temp);
2402 return true;
2403 }
2404 }
2405
2406 if (! tree_fits_uhwi_p (size))
2407 return false;
2408
dcb7fae2 2409 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
2410 if (! integer_all_onesp (size))
2411 {
2412 if (! tree_fits_uhwi_p (len))
2413 {
2414 /* If LEN is not constant, try MAXLEN too.
2415 For MAXLEN only allow optimizing into non-_ocs function
2416 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2417 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2418 {
2419 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2420 {
2421 /* (void) __mempcpy_chk () can be optimized into
2422 (void) __memcpy_chk (). */
2423 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2424 if (!fn)
2425 return false;
2426
355fe088 2427 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2428 replace_call_with_call_and_fold (gsi, repl);
2429 return true;
2430 }
2431 return false;
2432 }
2433 }
2434 else
2435 maxlen = len;
2436
2437 if (tree_int_cst_lt (size, maxlen))
2438 return false;
2439 }
2440
2441 fn = NULL_TREE;
2442 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2443 mem{cpy,pcpy,move,set} is available. */
2444 switch (fcode)
2445 {
2446 case BUILT_IN_MEMCPY_CHK:
2447 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2448 break;
2449 case BUILT_IN_MEMPCPY_CHK:
2450 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2451 break;
2452 case BUILT_IN_MEMMOVE_CHK:
2453 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2454 break;
2455 case BUILT_IN_MEMSET_CHK:
2456 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2457 break;
2458 default:
2459 break;
2460 }
2461
2462 if (!fn)
2463 return false;
2464
355fe088 2465 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2466 replace_call_with_call_and_fold (gsi, repl);
2467 return true;
2468}
2469
2470/* Fold a call to the __st[rp]cpy_chk builtin.
2471 DEST, SRC, and SIZE are the arguments to the call.
2472 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2473 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2474 strings passed as second argument. */
2475
2476static bool
2477gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2478 tree dest,
fef5a0d9 2479 tree src, tree size,
fef5a0d9
RB
2480 enum built_in_function fcode)
2481{
355fe088 2482 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2483 location_t loc = gimple_location (stmt);
2484 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2485 tree len, fn;
2486
2487 /* If SRC and DEST are the same (and not volatile), return DEST. */
2488 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2489 {
2490 replace_call_with_value (gsi, dest);
2491 return true;
2492 }
2493
2494 if (! tree_fits_uhwi_p (size))
2495 return false;
2496
dcb7fae2 2497 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
2498 if (! integer_all_onesp (size))
2499 {
2500 len = c_strlen (src, 1);
2501 if (! len || ! tree_fits_uhwi_p (len))
2502 {
2503 /* If LEN is not constant, try MAXLEN too.
2504 For MAXLEN only allow optimizing into non-_ocs function
2505 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2506 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2507 {
2508 if (fcode == BUILT_IN_STPCPY_CHK)
2509 {
2510 if (! ignore)
2511 return false;
2512
2513 /* If return value of __stpcpy_chk is ignored,
2514 optimize into __strcpy_chk. */
2515 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2516 if (!fn)
2517 return false;
2518
355fe088 2519 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2520 replace_call_with_call_and_fold (gsi, repl);
2521 return true;
2522 }
2523
2524 if (! len || TREE_SIDE_EFFECTS (len))
2525 return false;
2526
2527 /* If c_strlen returned something, but not a constant,
2528 transform __strcpy_chk into __memcpy_chk. */
2529 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2530 if (!fn)
2531 return false;
2532
74e3c262
RB
2533 gimple_seq stmts = NULL;
2534 len = gimple_convert (&stmts, loc, size_type_node, len);
2535 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2536 build_int_cst (size_type_node, 1));
2537 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2538 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2539 replace_call_with_call_and_fold (gsi, repl);
2540 return true;
2541 }
e256dfce 2542 }
fef5a0d9
RB
2543 else
2544 maxlen = len;
2545
2546 if (! tree_int_cst_lt (maxlen, size))
2547 return false;
e256dfce
RG
2548 }
2549
fef5a0d9
RB
2550 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2551 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2552 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2553 if (!fn)
2554 return false;
2555
355fe088 2556 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2557 replace_call_with_call_and_fold (gsi, repl);
2558 return true;
2559}
2560
2561/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2562 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2563 length passed as third argument. IGNORE is true if return value can be
2564 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2565
2566static bool
2567gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2568 tree dest, tree src,
dcb7fae2 2569 tree len, tree size,
fef5a0d9
RB
2570 enum built_in_function fcode)
2571{
355fe088 2572 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2573 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2574 tree fn;
2575
2576 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2577 {
fef5a0d9
RB
2578 /* If return value of __stpncpy_chk is ignored,
2579 optimize into __strncpy_chk. */
2580 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2581 if (fn)
2582 {
355fe088 2583 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2584 replace_call_with_call_and_fold (gsi, repl);
2585 return true;
2586 }
cbdd87d4
RG
2587 }
2588
fef5a0d9
RB
2589 if (! tree_fits_uhwi_p (size))
2590 return false;
2591
dcb7fae2 2592 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2593 if (! integer_all_onesp (size))
cbdd87d4 2594 {
fef5a0d9 2595 if (! tree_fits_uhwi_p (len))
fe2ef088 2596 {
fef5a0d9
RB
2597 /* If LEN is not constant, try MAXLEN too.
2598 For MAXLEN only allow optimizing into non-_ocs function
2599 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2600 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2601 return false;
8a1561bc 2602 }
fef5a0d9
RB
2603 else
2604 maxlen = len;
2605
2606 if (tree_int_cst_lt (size, maxlen))
2607 return false;
cbdd87d4
RG
2608 }
2609
fef5a0d9
RB
2610 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2611 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2612 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2613 if (!fn)
2614 return false;
2615
355fe088 2616 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2617 replace_call_with_call_and_fold (gsi, repl);
2618 return true;
cbdd87d4
RG
2619}
2620
2625bb5d
RB
2621/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2622 Return NULL_TREE if no simplification can be made. */
2623
2624static bool
2625gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2626{
2627 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2628 location_t loc = gimple_location (stmt);
2629 tree dest = gimple_call_arg (stmt, 0);
2630 tree src = gimple_call_arg (stmt, 1);
2631 tree fn, len, lenp1;
2632
2633 /* If the result is unused, replace stpcpy with strcpy. */
2634 if (gimple_call_lhs (stmt) == NULL_TREE)
2635 {
2636 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2637 if (!fn)
2638 return false;
2639 gimple_call_set_fndecl (stmt, fn);
2640 fold_stmt (gsi);
2641 return true;
2642 }
2643
2644 len = c_strlen (src, 1);
2645 if (!len
2646 || TREE_CODE (len) != INTEGER_CST)
2647 return false;
2648
2649 if (optimize_function_for_size_p (cfun)
2650 /* If length is zero it's small enough. */
2651 && !integer_zerop (len))
2652 return false;
2653
2654 /* If the source has a known length replace stpcpy with memcpy. */
2655 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2656 if (!fn)
2657 return false;
2658
2659 gimple_seq stmts = NULL;
2660 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2661 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2662 tem, build_int_cst (size_type_node, 1));
2663 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2664 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2665 gimple_set_vuse (repl, gimple_vuse (stmt));
2666 gimple_set_vdef (repl, gimple_vdef (stmt));
2667 if (gimple_vdef (repl)
2668 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2669 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2670 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2671 /* Replace the result with dest + len. */
2672 stmts = NULL;
2673 tem = gimple_convert (&stmts, loc, sizetype, len);
2674 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2675 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2676 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2677 gsi_replace (gsi, ret, false);
2625bb5d
RB
2678 /* Finally fold the memcpy call. */
2679 gimple_stmt_iterator gsi2 = *gsi;
2680 gsi_prev (&gsi2);
2681 fold_stmt (&gsi2);
2682 return true;
2683}
2684
fef5a0d9
RB
2685/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2686 NULL_TREE if a normal call should be emitted rather than expanding
2687 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2688 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2689 passed as second argument. */
cbdd87d4
RG
2690
2691static bool
fef5a0d9 2692gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2693 enum built_in_function fcode)
cbdd87d4 2694{
538dd0b7 2695 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2696 tree dest, size, len, fn, fmt, flag;
2697 const char *fmt_str;
cbdd87d4 2698
fef5a0d9
RB
2699 /* Verify the required arguments in the original call. */
2700 if (gimple_call_num_args (stmt) < 5)
2701 return false;
cbdd87d4 2702
fef5a0d9
RB
2703 dest = gimple_call_arg (stmt, 0);
2704 len = gimple_call_arg (stmt, 1);
2705 flag = gimple_call_arg (stmt, 2);
2706 size = gimple_call_arg (stmt, 3);
2707 fmt = gimple_call_arg (stmt, 4);
2708
2709 if (! tree_fits_uhwi_p (size))
2710 return false;
2711
2712 if (! integer_all_onesp (size))
2713 {
dcb7fae2 2714 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2715 if (! tree_fits_uhwi_p (len))
cbdd87d4 2716 {
fef5a0d9
RB
2717 /* If LEN is not constant, try MAXLEN too.
2718 For MAXLEN only allow optimizing into non-_ocs function
2719 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2720 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2721 return false;
2722 }
2723 else
fef5a0d9 2724 maxlen = len;
cbdd87d4 2725
fef5a0d9
RB
2726 if (tree_int_cst_lt (size, maxlen))
2727 return false;
2728 }
cbdd87d4 2729
fef5a0d9
RB
2730 if (!init_target_chars ())
2731 return false;
cbdd87d4 2732
fef5a0d9
RB
2733 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2734 or if format doesn't contain % chars or is "%s". */
2735 if (! integer_zerop (flag))
2736 {
2737 fmt_str = c_getstr (fmt);
2738 if (fmt_str == NULL)
2739 return false;
2740 if (strchr (fmt_str, target_percent) != NULL
2741 && strcmp (fmt_str, target_percent_s))
2742 return false;
cbdd87d4
RG
2743 }
2744
fef5a0d9
RB
2745 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2746 available. */
2747 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2748 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2749 if (!fn)
491e0b9b
RG
2750 return false;
2751
fef5a0d9
RB
2752 /* Replace the called function and the first 5 argument by 3 retaining
2753 trailing varargs. */
2754 gimple_call_set_fndecl (stmt, fn);
2755 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2756 gimple_call_set_arg (stmt, 0, dest);
2757 gimple_call_set_arg (stmt, 1, len);
2758 gimple_call_set_arg (stmt, 2, fmt);
2759 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2760 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2761 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2762 fold_stmt (gsi);
2763 return true;
2764}
cbdd87d4 2765
fef5a0d9
RB
2766/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2767 Return NULL_TREE if a normal call should be emitted rather than
2768 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2769 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2770
fef5a0d9
RB
2771static bool
2772gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2773 enum built_in_function fcode)
2774{
538dd0b7 2775 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2776 tree dest, size, len, fn, fmt, flag;
2777 const char *fmt_str;
2778 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2779
fef5a0d9
RB
2780 /* Verify the required arguments in the original call. */
2781 if (nargs < 4)
2782 return false;
2783 dest = gimple_call_arg (stmt, 0);
2784 flag = gimple_call_arg (stmt, 1);
2785 size = gimple_call_arg (stmt, 2);
2786 fmt = gimple_call_arg (stmt, 3);
2787
2788 if (! tree_fits_uhwi_p (size))
2789 return false;
2790
2791 len = NULL_TREE;
2792
2793 if (!init_target_chars ())
2794 return false;
2795
2796 /* Check whether the format is a literal string constant. */
2797 fmt_str = c_getstr (fmt);
2798 if (fmt_str != NULL)
2799 {
2800 /* If the format doesn't contain % args or %%, we know the size. */
2801 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2802 {
fef5a0d9
RB
2803 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2804 len = build_int_cstu (size_type_node, strlen (fmt_str));
2805 }
2806 /* If the format is "%s" and first ... argument is a string literal,
2807 we know the size too. */
2808 else if (fcode == BUILT_IN_SPRINTF_CHK
2809 && strcmp (fmt_str, target_percent_s) == 0)
2810 {
2811 tree arg;
cbdd87d4 2812
fef5a0d9
RB
2813 if (nargs == 5)
2814 {
2815 arg = gimple_call_arg (stmt, 4);
2816 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2817 {
2818 len = c_strlen (arg, 1);
2819 if (! len || ! tree_fits_uhwi_p (len))
2820 len = NULL_TREE;
2821 }
2822 }
2823 }
2824 }
cbdd87d4 2825
fef5a0d9
RB
2826 if (! integer_all_onesp (size))
2827 {
2828 if (! len || ! tree_int_cst_lt (len, size))
2829 return false;
2830 }
cbdd87d4 2831
fef5a0d9
RB
2832 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2833 or if format doesn't contain % chars or is "%s". */
2834 if (! integer_zerop (flag))
2835 {
2836 if (fmt_str == NULL)
2837 return false;
2838 if (strchr (fmt_str, target_percent) != NULL
2839 && strcmp (fmt_str, target_percent_s))
2840 return false;
2841 }
cbdd87d4 2842
fef5a0d9
RB
2843 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2844 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2845 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2846 if (!fn)
2847 return false;
2848
2849 /* Replace the called function and the first 4 argument by 2 retaining
2850 trailing varargs. */
2851 gimple_call_set_fndecl (stmt, fn);
2852 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2853 gimple_call_set_arg (stmt, 0, dest);
2854 gimple_call_set_arg (stmt, 1, fmt);
2855 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2856 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2857 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2858 fold_stmt (gsi);
2859 return true;
2860}
2861
35770bb2
RB
2862/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2863 ORIG may be null if this is a 2-argument call. We don't attempt to
2864 simplify calls with more than 3 arguments.
2865
a104bd88 2866 Return true if simplification was possible, otherwise false. */
35770bb2 2867
a104bd88 2868bool
dcb7fae2 2869gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2870{
355fe088 2871 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2872 tree dest = gimple_call_arg (stmt, 0);
2873 tree fmt = gimple_call_arg (stmt, 1);
2874 tree orig = NULL_TREE;
2875 const char *fmt_str = NULL;
2876
2877 /* Verify the required arguments in the original call. We deal with two
2878 types of sprintf() calls: 'sprintf (str, fmt)' and
2879 'sprintf (dest, "%s", orig)'. */
2880 if (gimple_call_num_args (stmt) > 3)
2881 return false;
2882
2883 if (gimple_call_num_args (stmt) == 3)
2884 orig = gimple_call_arg (stmt, 2);
2885
2886 /* Check whether the format is a literal string constant. */
2887 fmt_str = c_getstr (fmt);
2888 if (fmt_str == NULL)
2889 return false;
2890
2891 if (!init_target_chars ())
2892 return false;
2893
2894 /* If the format doesn't contain % args or %%, use strcpy. */
2895 if (strchr (fmt_str, target_percent) == NULL)
2896 {
2897 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2898
2899 if (!fn)
2900 return false;
2901
2902 /* Don't optimize sprintf (buf, "abc", ptr++). */
2903 if (orig)
2904 return false;
2905
2906 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2907 'format' is known to contain no % formats. */
2908 gimple_seq stmts = NULL;
355fe088 2909 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2910 gimple_seq_add_stmt_without_update (&stmts, repl);
2911 if (gimple_call_lhs (stmt))
2912 {
2913 repl = gimple_build_assign (gimple_call_lhs (stmt),
2914 build_int_cst (integer_type_node,
2915 strlen (fmt_str)));
2916 gimple_seq_add_stmt_without_update (&stmts, repl);
2917 gsi_replace_with_seq_vops (gsi, stmts);
2918 /* gsi now points at the assignment to the lhs, get a
2919 stmt iterator to the memcpy call.
2920 ??? We can't use gsi_for_stmt as that doesn't work when the
2921 CFG isn't built yet. */
2922 gimple_stmt_iterator gsi2 = *gsi;
2923 gsi_prev (&gsi2);
2924 fold_stmt (&gsi2);
2925 }
2926 else
2927 {
2928 gsi_replace_with_seq_vops (gsi, stmts);
2929 fold_stmt (gsi);
2930 }
2931 return true;
2932 }
2933
2934 /* If the format is "%s", use strcpy if the result isn't used. */
2935 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2936 {
2937 tree fn;
2938 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2939
2940 if (!fn)
2941 return false;
2942
2943 /* Don't crash on sprintf (str1, "%s"). */
2944 if (!orig)
2945 return false;
2946
dcb7fae2
RB
2947 tree orig_len = NULL_TREE;
2948 if (gimple_call_lhs (stmt))
35770bb2 2949 {
dcb7fae2 2950 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2951 if (!orig_len)
35770bb2
RB
2952 return false;
2953 }
2954
2955 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2956 gimple_seq stmts = NULL;
355fe088 2957 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2958 gimple_seq_add_stmt_without_update (&stmts, repl);
2959 if (gimple_call_lhs (stmt))
2960 {
d7e78447
RB
2961 if (!useless_type_conversion_p (integer_type_node,
2962 TREE_TYPE (orig_len)))
2963 orig_len = fold_convert (integer_type_node, orig_len);
2964 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2965 gimple_seq_add_stmt_without_update (&stmts, repl);
2966 gsi_replace_with_seq_vops (gsi, stmts);
2967 /* gsi now points at the assignment to the lhs, get a
2968 stmt iterator to the memcpy call.
2969 ??? We can't use gsi_for_stmt as that doesn't work when the
2970 CFG isn't built yet. */
2971 gimple_stmt_iterator gsi2 = *gsi;
2972 gsi_prev (&gsi2);
2973 fold_stmt (&gsi2);
2974 }
2975 else
2976 {
2977 gsi_replace_with_seq_vops (gsi, stmts);
2978 fold_stmt (gsi);
2979 }
2980 return true;
2981 }
2982 return false;
2983}
2984
d7e78447
RB
2985/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2986 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2987 attempt to simplify calls with more than 4 arguments.
35770bb2 2988
a104bd88 2989 Return true if simplification was possible, otherwise false. */
d7e78447 2990
a104bd88 2991bool
dcb7fae2 2992gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2993{
538dd0b7 2994 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2995 tree dest = gimple_call_arg (stmt, 0);
2996 tree destsize = gimple_call_arg (stmt, 1);
2997 tree fmt = gimple_call_arg (stmt, 2);
2998 tree orig = NULL_TREE;
2999 const char *fmt_str = NULL;
3000
3001 if (gimple_call_num_args (stmt) > 4)
3002 return false;
3003
3004 if (gimple_call_num_args (stmt) == 4)
3005 orig = gimple_call_arg (stmt, 3);
3006
3007 if (!tree_fits_uhwi_p (destsize))
3008 return false;
3009 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3010
3011 /* Check whether the format is a literal string constant. */
3012 fmt_str = c_getstr (fmt);
3013 if (fmt_str == NULL)
3014 return false;
3015
3016 if (!init_target_chars ())
3017 return false;
3018
3019 /* If the format doesn't contain % args or %%, use strcpy. */
3020 if (strchr (fmt_str, target_percent) == NULL)
3021 {
3022 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3023 if (!fn)
3024 return false;
3025
3026 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3027 if (orig)
3028 return false;
3029
3030 /* We could expand this as
3031 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3032 or to
3033 memcpy (str, fmt_with_nul_at_cstm1, cst);
3034 but in the former case that might increase code size
3035 and in the latter case grow .rodata section too much.
3036 So punt for now. */
3037 size_t len = strlen (fmt_str);
3038 if (len >= destlen)
3039 return false;
3040
3041 gimple_seq stmts = NULL;
355fe088 3042 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3043 gimple_seq_add_stmt_without_update (&stmts, repl);
3044 if (gimple_call_lhs (stmt))
3045 {
3046 repl = gimple_build_assign (gimple_call_lhs (stmt),
3047 build_int_cst (integer_type_node, len));
3048 gimple_seq_add_stmt_without_update (&stmts, repl);
3049 gsi_replace_with_seq_vops (gsi, stmts);
3050 /* gsi now points at the assignment to the lhs, get a
3051 stmt iterator to the memcpy call.
3052 ??? We can't use gsi_for_stmt as that doesn't work when the
3053 CFG isn't built yet. */
3054 gimple_stmt_iterator gsi2 = *gsi;
3055 gsi_prev (&gsi2);
3056 fold_stmt (&gsi2);
3057 }
3058 else
3059 {
3060 gsi_replace_with_seq_vops (gsi, stmts);
3061 fold_stmt (gsi);
3062 }
3063 return true;
3064 }
3065
3066 /* If the format is "%s", use strcpy if the result isn't used. */
3067 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3068 {
3069 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3070 if (!fn)
3071 return false;
3072
3073 /* Don't crash on snprintf (str1, cst, "%s"). */
3074 if (!orig)
3075 return false;
3076
dcb7fae2 3077 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 3078 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3079 return false;
d7e78447
RB
3080
3081 /* We could expand this as
3082 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3083 or to
3084 memcpy (str1, str2_with_nul_at_cstm1, cst);
3085 but in the former case that might increase code size
3086 and in the latter case grow .rodata section too much.
3087 So punt for now. */
3088 if (compare_tree_int (orig_len, destlen) >= 0)
3089 return false;
3090
3091 /* Convert snprintf (str1, cst, "%s", str2) into
3092 strcpy (str1, str2) if strlen (str2) < cst. */
3093 gimple_seq stmts = NULL;
355fe088 3094 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3095 gimple_seq_add_stmt_without_update (&stmts, repl);
3096 if (gimple_call_lhs (stmt))
3097 {
3098 if (!useless_type_conversion_p (integer_type_node,
3099 TREE_TYPE (orig_len)))
3100 orig_len = fold_convert (integer_type_node, orig_len);
3101 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3102 gimple_seq_add_stmt_without_update (&stmts, repl);
3103 gsi_replace_with_seq_vops (gsi, stmts);
3104 /* gsi now points at the assignment to the lhs, get a
3105 stmt iterator to the memcpy call.
3106 ??? We can't use gsi_for_stmt as that doesn't work when the
3107 CFG isn't built yet. */
3108 gimple_stmt_iterator gsi2 = *gsi;
3109 gsi_prev (&gsi2);
3110 fold_stmt (&gsi2);
3111 }
3112 else
3113 {
3114 gsi_replace_with_seq_vops (gsi, stmts);
3115 fold_stmt (gsi);
3116 }
3117 return true;
3118 }
3119 return false;
3120}
35770bb2 3121
edd7ae68
RB
3122/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3123 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3124 more than 3 arguments, and ARG may be null in the 2-argument case.
3125
3126 Return NULL_TREE if no simplification was possible, otherwise return the
3127 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3128 code of the function to be simplified. */
3129
3130static bool
3131gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3132 tree fp, tree fmt, tree arg,
3133 enum built_in_function fcode)
3134{
3135 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3136 tree fn_fputc, fn_fputs;
3137 const char *fmt_str = NULL;
3138
3139 /* If the return value is used, don't do the transformation. */
3140 if (gimple_call_lhs (stmt) != NULL_TREE)
3141 return false;
3142
3143 /* Check whether the format is a literal string constant. */
3144 fmt_str = c_getstr (fmt);
3145 if (fmt_str == NULL)
3146 return false;
3147
3148 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3149 {
3150 /* If we're using an unlocked function, assume the other
3151 unlocked functions exist explicitly. */
3152 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3153 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3154 }
3155 else
3156 {
3157 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3158 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3159 }
3160
3161 if (!init_target_chars ())
3162 return false;
3163
3164 /* If the format doesn't contain % args or %%, use strcpy. */
3165 if (strchr (fmt_str, target_percent) == NULL)
3166 {
3167 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3168 && arg)
3169 return false;
3170
3171 /* If the format specifier was "", fprintf does nothing. */
3172 if (fmt_str[0] == '\0')
3173 {
3174 replace_call_with_value (gsi, NULL_TREE);
3175 return true;
3176 }
3177
3178 /* When "string" doesn't contain %, replace all cases of
3179 fprintf (fp, string) with fputs (string, fp). The fputs
3180 builtin will take care of special cases like length == 1. */
3181 if (fn_fputs)
3182 {
3183 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3184 replace_call_with_call_and_fold (gsi, repl);
3185 return true;
3186 }
3187 }
3188
3189 /* The other optimizations can be done only on the non-va_list variants. */
3190 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3191 return false;
3192
3193 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3194 else if (strcmp (fmt_str, target_percent_s) == 0)
3195 {
3196 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3197 return false;
3198 if (fn_fputs)
3199 {
3200 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3201 replace_call_with_call_and_fold (gsi, repl);
3202 return true;
3203 }
3204 }
3205
3206 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3207 else if (strcmp (fmt_str, target_percent_c) == 0)
3208 {
3209 if (!arg
3210 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3211 return false;
3212 if (fn_fputc)
3213 {
3214 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3215 replace_call_with_call_and_fold (gsi, repl);
3216 return true;
3217 }
3218 }
3219
3220 return false;
3221}
3222
ad03a744
RB
3223/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3224 FMT and ARG are the arguments to the call; we don't fold cases with
3225 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3226
3227 Return NULL_TREE if no simplification was possible, otherwise return the
3228 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3229 code of the function to be simplified. */
3230
3231static bool
3232gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3233 tree arg, enum built_in_function fcode)
3234{
3235 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3236 tree fn_putchar, fn_puts, newarg;
3237 const char *fmt_str = NULL;
3238
3239 /* If the return value is used, don't do the transformation. */
3240 if (gimple_call_lhs (stmt) != NULL_TREE)
3241 return false;
3242
3243 /* Check whether the format is a literal string constant. */
3244 fmt_str = c_getstr (fmt);
3245 if (fmt_str == NULL)
3246 return false;
3247
3248 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3249 {
3250 /* If we're using an unlocked function, assume the other
3251 unlocked functions exist explicitly. */
3252 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3253 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3254 }
3255 else
3256 {
3257 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3258 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3259 }
3260
3261 if (!init_target_chars ())
3262 return false;
3263
3264 if (strcmp (fmt_str, target_percent_s) == 0
3265 || strchr (fmt_str, target_percent) == NULL)
3266 {
3267 const char *str;
3268
3269 if (strcmp (fmt_str, target_percent_s) == 0)
3270 {
3271 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3272 return false;
3273
3274 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3275 return false;
3276
3277 str = c_getstr (arg);
3278 if (str == NULL)
3279 return false;
3280 }
3281 else
3282 {
3283 /* The format specifier doesn't contain any '%' characters. */
3284 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3285 && arg)
3286 return false;
3287 str = fmt_str;
3288 }
3289
3290 /* If the string was "", printf does nothing. */
3291 if (str[0] == '\0')
3292 {
3293 replace_call_with_value (gsi, NULL_TREE);
3294 return true;
3295 }
3296
3297 /* If the string has length of 1, call putchar. */
3298 if (str[1] == '\0')
3299 {
3300 /* Given printf("c"), (where c is any one character,)
3301 convert "c"[0] to an int and pass that to the replacement
3302 function. */
3303 newarg = build_int_cst (integer_type_node, str[0]);
3304 if (fn_putchar)
3305 {
3306 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3307 replace_call_with_call_and_fold (gsi, repl);
3308 return true;
3309 }
3310 }
3311 else
3312 {
3313 /* If the string was "string\n", call puts("string"). */
3314 size_t len = strlen (str);
3315 if ((unsigned char)str[len - 1] == target_newline
3316 && (size_t) (int) len == len
3317 && (int) len > 0)
3318 {
3319 char *newstr;
3320 tree offset_node, string_cst;
3321
3322 /* Create a NUL-terminated string that's one char shorter
3323 than the original, stripping off the trailing '\n'. */
3324 newarg = build_string_literal (len, str);
3325 string_cst = string_constant (newarg, &offset_node);
3326 gcc_checking_assert (string_cst
3327 && (TREE_STRING_LENGTH (string_cst)
3328 == (int) len)
3329 && integer_zerop (offset_node)
3330 && (unsigned char)
3331 TREE_STRING_POINTER (string_cst)[len - 1]
3332 == target_newline);
3333 /* build_string_literal creates a new STRING_CST,
3334 modify it in place to avoid double copying. */
3335 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3336 newstr[len - 1] = '\0';
3337 if (fn_puts)
3338 {
3339 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3340 replace_call_with_call_and_fold (gsi, repl);
3341 return true;
3342 }
3343 }
3344 else
3345 /* We'd like to arrange to call fputs(string,stdout) here,
3346 but we need stdout and don't have a way to get it yet. */
3347 return false;
3348 }
3349 }
3350
3351 /* The other optimizations can be done only on the non-va_list variants. */
3352 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3353 return false;
3354
3355 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3356 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3357 {
3358 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3359 return false;
3360 if (fn_puts)
3361 {
3362 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3363 replace_call_with_call_and_fold (gsi, repl);
3364 return true;
3365 }
3366 }
3367
3368 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3369 else if (strcmp (fmt_str, target_percent_c) == 0)
3370 {
3371 if (!arg || ! useless_type_conversion_p (integer_type_node,
3372 TREE_TYPE (arg)))
3373 return false;
3374 if (fn_putchar)
3375 {
3376 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3377 replace_call_with_call_and_fold (gsi, repl);
3378 return true;
3379 }
3380 }
3381
3382 return false;
3383}
3384
edd7ae68 3385
fef5a0d9
RB
3386
3387/* Fold a call to __builtin_strlen with known length LEN. */
3388
3389static bool
dcb7fae2 3390gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3391{
355fe088 3392 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3393 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
3394 if (!len)
3395 return false;
2813904b 3396 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
3397 replace_call_with_value (gsi, len);
3398 return true;
cbdd87d4
RG
3399}
3400
48126138
NS
3401/* Fold a call to __builtin_acc_on_device. */
3402
3403static bool
3404gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3405{
3406 /* Defer folding until we know which compiler we're in. */
3407 if (symtab->state != EXPANSION)
3408 return false;
3409
3410 unsigned val_host = GOMP_DEVICE_HOST;
3411 unsigned val_dev = GOMP_DEVICE_NONE;
3412
3413#ifdef ACCEL_COMPILER
3414 val_host = GOMP_DEVICE_NOT_HOST;
3415 val_dev = ACCEL_COMPILER_acc_device;
3416#endif
3417
3418 location_t loc = gimple_location (gsi_stmt (*gsi));
3419
3420 tree host_eq = make_ssa_name (boolean_type_node);
3421 gimple *host_ass = gimple_build_assign
3422 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3423 gimple_set_location (host_ass, loc);
3424 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3425
3426 tree dev_eq = make_ssa_name (boolean_type_node);
3427 gimple *dev_ass = gimple_build_assign
3428 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3429 gimple_set_location (dev_ass, loc);
3430 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3431
3432 tree result = make_ssa_name (boolean_type_node);
3433 gimple *result_ass = gimple_build_assign
3434 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3435 gimple_set_location (result_ass, loc);
3436 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3437
3438 replace_call_with_value (gsi, result);
3439
3440 return true;
3441}
cbdd87d4 3442
fe75f732
PK
3443/* Fold realloc (0, n) -> malloc (n). */
3444
3445static bool
3446gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3447{
3448 gimple *stmt = gsi_stmt (*gsi);
3449 tree arg = gimple_call_arg (stmt, 0);
3450 tree size = gimple_call_arg (stmt, 1);
3451
3452 if (operand_equal_p (arg, null_pointer_node, 0))
3453 {
3454 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3455 if (fn_malloc)
3456 {
3457 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3458 replace_call_with_call_and_fold (gsi, repl);
3459 return true;
3460 }
3461 }
3462 return false;
3463}
3464
dcb7fae2
RB
3465/* Fold the non-target builtin at *GSI and return whether any simplification
3466 was made. */
cbdd87d4 3467
fef5a0d9 3468static bool
dcb7fae2 3469gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3470{
538dd0b7 3471 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3472 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3473
dcb7fae2
RB
3474 /* Give up for always_inline inline builtins until they are
3475 inlined. */
3476 if (avoid_folding_inline_builtin (callee))
3477 return false;
cbdd87d4 3478
edd7ae68
RB
3479 unsigned n = gimple_call_num_args (stmt);
3480 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3481 switch (fcode)
cbdd87d4 3482 {
b3d8d88e
MS
3483 case BUILT_IN_BCMP:
3484 return gimple_fold_builtin_bcmp (gsi);
3485 case BUILT_IN_BCOPY:
3486 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3487 case BUILT_IN_BZERO:
b3d8d88e
MS
3488 return gimple_fold_builtin_bzero (gsi);
3489
dcb7fae2
RB
3490 case BUILT_IN_MEMSET:
3491 return gimple_fold_builtin_memset (gsi,
3492 gimple_call_arg (stmt, 1),
3493 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3494 case BUILT_IN_MEMCPY:
3495 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3496 gimple_call_arg (stmt, 1), 0);
3497 case BUILT_IN_MEMPCPY:
3498 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3499 gimple_call_arg (stmt, 1), 1);
3500 case BUILT_IN_MEMMOVE:
3501 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3502 gimple_call_arg (stmt, 1), 3);
3503 case BUILT_IN_SPRINTF_CHK:
3504 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3505 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3506 case BUILT_IN_STRCAT_CHK:
3507 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3508 case BUILT_IN_STRNCAT_CHK:
3509 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3510 case BUILT_IN_STRLEN:
dcb7fae2 3511 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3512 case BUILT_IN_STRCPY:
dcb7fae2 3513 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3514 gimple_call_arg (stmt, 0),
dcb7fae2 3515 gimple_call_arg (stmt, 1));
cbdd87d4 3516 case BUILT_IN_STRNCPY:
dcb7fae2 3517 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3518 gimple_call_arg (stmt, 0),
3519 gimple_call_arg (stmt, 1),
dcb7fae2 3520 gimple_call_arg (stmt, 2));
9a7eefec 3521 case BUILT_IN_STRCAT:
dcb7fae2
RB
3522 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3523 gimple_call_arg (stmt, 1));
ad03a744
RB
3524 case BUILT_IN_STRNCAT:
3525 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3526 case BUILT_IN_INDEX:
912d9ec3 3527 case BUILT_IN_STRCHR:
71dea1dd
WD
3528 return gimple_fold_builtin_strchr (gsi, false);
3529 case BUILT_IN_RINDEX:
3530 case BUILT_IN_STRRCHR:
3531 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3532 case BUILT_IN_STRSTR:
3533 return gimple_fold_builtin_strstr (gsi);
a918bfbf
ML
3534 case BUILT_IN_STRCMP:
3535 case BUILT_IN_STRCASECMP:
3536 case BUILT_IN_STRNCMP:
3537 case BUILT_IN_STRNCASECMP:
3538 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3539 case BUILT_IN_MEMCHR:
3540 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3541 case BUILT_IN_FPUTS:
dcb7fae2
RB
3542 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3543 gimple_call_arg (stmt, 1), false);
cbdd87d4 3544 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3545 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3546 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3547 case BUILT_IN_MEMCPY_CHK:
3548 case BUILT_IN_MEMPCPY_CHK:
3549 case BUILT_IN_MEMMOVE_CHK:
3550 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3551 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3552 gimple_call_arg (stmt, 0),
3553 gimple_call_arg (stmt, 1),
3554 gimple_call_arg (stmt, 2),
3555 gimple_call_arg (stmt, 3),
edd7ae68 3556 fcode);
2625bb5d
RB
3557 case BUILT_IN_STPCPY:
3558 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3559 case BUILT_IN_STRCPY_CHK:
3560 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3561 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3562 gimple_call_arg (stmt, 0),
3563 gimple_call_arg (stmt, 1),
3564 gimple_call_arg (stmt, 2),
edd7ae68 3565 fcode);
cbdd87d4 3566 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3567 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3568 return gimple_fold_builtin_stxncpy_chk (gsi,
3569 gimple_call_arg (stmt, 0),
3570 gimple_call_arg (stmt, 1),
3571 gimple_call_arg (stmt, 2),
3572 gimple_call_arg (stmt, 3),
edd7ae68 3573 fcode);
cbdd87d4
RG
3574 case BUILT_IN_SNPRINTF_CHK:
3575 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3576 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3577
edd7ae68
RB
3578 case BUILT_IN_FPRINTF:
3579 case BUILT_IN_FPRINTF_UNLOCKED:
3580 case BUILT_IN_VFPRINTF:
3581 if (n == 2 || n == 3)
3582 return gimple_fold_builtin_fprintf (gsi,
3583 gimple_call_arg (stmt, 0),
3584 gimple_call_arg (stmt, 1),
3585 n == 3
3586 ? gimple_call_arg (stmt, 2)
3587 : NULL_TREE,
3588 fcode);
3589 break;
3590 case BUILT_IN_FPRINTF_CHK:
3591 case BUILT_IN_VFPRINTF_CHK:
3592 if (n == 3 || n == 4)
3593 return gimple_fold_builtin_fprintf (gsi,
3594 gimple_call_arg (stmt, 0),
3595 gimple_call_arg (stmt, 2),
3596 n == 4
3597 ? gimple_call_arg (stmt, 3)
3598 : NULL_TREE,
3599 fcode);
3600 break;
ad03a744
RB
3601 case BUILT_IN_PRINTF:
3602 case BUILT_IN_PRINTF_UNLOCKED:
3603 case BUILT_IN_VPRINTF:
3604 if (n == 1 || n == 2)
3605 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3606 n == 2
3607 ? gimple_call_arg (stmt, 1)
3608 : NULL_TREE, fcode);
3609 break;
3610 case BUILT_IN_PRINTF_CHK:
3611 case BUILT_IN_VPRINTF_CHK:
3612 if (n == 2 || n == 3)
3613 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3614 n == 3
3615 ? gimple_call_arg (stmt, 2)
3616 : NULL_TREE, fcode);
242a37f1 3617 break;
48126138
NS
3618 case BUILT_IN_ACC_ON_DEVICE:
3619 return gimple_fold_builtin_acc_on_device (gsi,
3620 gimple_call_arg (stmt, 0));
fe75f732
PK
3621 case BUILT_IN_REALLOC:
3622 return gimple_fold_builtin_realloc (gsi);
3623
fef5a0d9
RB
3624 default:;
3625 }
3626
3627 /* Try the generic builtin folder. */
3628 bool ignore = (gimple_call_lhs (stmt) == NULL);
3629 tree result = fold_call_stmt (stmt, ignore);
3630 if (result)
3631 {
3632 if (ignore)
3633 STRIP_NOPS (result);
3634 else
3635 result = fold_convert (gimple_call_return_type (stmt), result);
3636 if (!update_call_from_tree (gsi, result))
3637 gimplify_and_update_call_from_tree (gsi, result);
3638 return true;
3639 }
3640
3641 return false;
3642}
3643
451e8dae
NS
3644/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3645 function calls to constants, where possible. */
3646
3647static tree
3648fold_internal_goacc_dim (const gimple *call)
3649{
629b3d75
MJ
3650 int axis = oacc_get_ifn_dim_arg (call);
3651 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae
NS
3652 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3653 tree result = NULL_TREE;
3654
3655 /* If the size is 1, or we only want the size and it is not dynamic,
3656 we know the answer. */
3657 if (size == 1 || (!is_pos && size))
3658 {
3659 tree type = TREE_TYPE (gimple_call_lhs (call));
3660 result = build_int_cst (type, size - is_pos);
3661 }
3662
3663 return result;
3664}
3665
849a76a5
JJ
3666/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3667 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3668 &var where var is only addressable because of such calls. */
3669
3670bool
3671optimize_atomic_compare_exchange_p (gimple *stmt)
3672{
3673 if (gimple_call_num_args (stmt) != 6
3674 || !flag_inline_atomics
3675 || !optimize
45b2222a 3676 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
3677 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3678 || !gimple_vdef (stmt)
3679 || !gimple_vuse (stmt))
3680 return false;
3681
3682 tree fndecl = gimple_call_fndecl (stmt);
3683 switch (DECL_FUNCTION_CODE (fndecl))
3684 {
3685 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3686 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3687 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3688 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3689 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3690 break;
3691 default:
3692 return false;
3693 }
3694
3695 tree expected = gimple_call_arg (stmt, 1);
3696 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3697 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3698 return false;
3699
3700 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3701 if (!is_gimple_reg_type (etype)
849a76a5 3702 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3703 || TREE_THIS_VOLATILE (etype)
3704 || VECTOR_TYPE_P (etype)
3705 || TREE_CODE (etype) == COMPLEX_TYPE
3706 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3707 might not preserve all the bits. See PR71716. */
3708 || SCALAR_FLOAT_TYPE_P (etype)
3709 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
849a76a5
JJ
3710 return false;
3711
3712 tree weak = gimple_call_arg (stmt, 3);
3713 if (!integer_zerop (weak) && !integer_onep (weak))
3714 return false;
3715
3716 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3717 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3718 machine_mode mode = TYPE_MODE (itype);
3719
3720 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3721 == CODE_FOR_nothing
3722 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3723 return false;
3724
1de3c940 3725 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
849a76a5
JJ
3726 return false;
3727
3728 return true;
3729}
3730
3731/* Fold
3732 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3733 into
3734 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3735 i = IMAGPART_EXPR <t>;
3736 r = (_Bool) i;
3737 e = REALPART_EXPR <t>; */
3738
3739void
3740fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3741{
3742 gimple *stmt = gsi_stmt (*gsi);
3743 tree fndecl = gimple_call_fndecl (stmt);
3744 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3745 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3746 tree ctype = build_complex_type (itype);
3747 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
3748 bool throws = false;
3749 edge e = NULL;
849a76a5
JJ
3750 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3751 expected);
3752 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3753 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3754 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3755 {
3756 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3757 build1 (VIEW_CONVERT_EXPR, itype,
3758 gimple_assign_lhs (g)));
3759 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3760 }
3761 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3762 + int_size_in_bytes (itype);
3763 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3764 gimple_call_arg (stmt, 0),
3765 gimple_assign_lhs (g),
3766 gimple_call_arg (stmt, 2),
3767 build_int_cst (integer_type_node, flag),
3768 gimple_call_arg (stmt, 4),
3769 gimple_call_arg (stmt, 5));
3770 tree lhs = make_ssa_name (ctype);
3771 gimple_call_set_lhs (g, lhs);
3772 gimple_set_vdef (g, gimple_vdef (stmt));
3773 gimple_set_vuse (g, gimple_vuse (stmt));
3774 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46
JJ
3775 tree oldlhs = gimple_call_lhs (stmt);
3776 if (stmt_can_throw_internal (stmt))
3777 {
3778 throws = true;
3779 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3780 }
3781 gimple_call_set_nothrow (as_a <gcall *> (g),
3782 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3783 gimple_call_set_lhs (stmt, NULL_TREE);
3784 gsi_replace (gsi, g, true);
3785 if (oldlhs)
849a76a5 3786 {
849a76a5
JJ
3787 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3788 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
3789 if (throws)
3790 {
3791 gsi_insert_on_edge_immediate (e, g);
3792 *gsi = gsi_for_stmt (g);
3793 }
3794 else
3795 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3796 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3797 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 3798 }
849a76a5
JJ
3799 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3800 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
3801 if (throws && oldlhs == NULL_TREE)
3802 {
3803 gsi_insert_on_edge_immediate (e, g);
3804 *gsi = gsi_for_stmt (g);
3805 }
3806 else
3807 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
3808 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3809 {
3810 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3811 VIEW_CONVERT_EXPR,
3812 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3813 gimple_assign_lhs (g)));
3814 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3815 }
3816 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3817 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3818 *gsi = gsiret;
3819}
3820
1304953e
JJ
3821/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3822 doesn't fit into TYPE. The test for overflow should be regardless of
3823 -fwrapv, and even for unsigned types. */
3824
3825bool
3826arith_overflowed_p (enum tree_code code, const_tree type,
3827 const_tree arg0, const_tree arg1)
3828{
3829 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3830 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3831 widest2_int_cst;
3832 widest2_int warg0 = widest2_int_cst (arg0);
3833 widest2_int warg1 = widest2_int_cst (arg1);
3834 widest2_int wres;
3835 switch (code)
3836 {
3837 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3838 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3839 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3840 default: gcc_unreachable ();
3841 }
3842 signop sign = TYPE_SIGN (type);
3843 if (sign == UNSIGNED && wi::neg_p (wres))
3844 return true;
3845 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3846}
3847
cbdd87d4
RG
3848/* Attempt to fold a call statement referenced by the statement iterator GSI.
3849 The statement may be replaced by another statement, e.g., if the call
3850 simplifies to a constant value. Return true if any changes were made.
3851 It is assumed that the operands have been previously folded. */
3852
e021c122 3853static bool
ceeffab0 3854gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3855{
538dd0b7 3856 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3857 tree callee;
e021c122
RG
3858 bool changed = false;
3859 unsigned i;
cbdd87d4 3860
e021c122
RG
3861 /* Fold *& in call arguments. */
3862 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3863 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3864 {
3865 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3866 if (tmp)
3867 {
3868 gimple_call_set_arg (stmt, i, tmp);
3869 changed = true;
3870 }
3871 }
3b45a007
RG
3872
3873 /* Check for virtual calls that became direct calls. */
3874 callee = gimple_call_fn (stmt);
25583c4f 3875 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3876 {
49c471e3
MJ
3877 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3878 {
450ad0cd
JH
3879 if (dump_file && virtual_method_call_p (callee)
3880 && !possible_polymorphic_call_target_p
6f8091fc
JH
3881 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3882 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3883 {
3884 fprintf (dump_file,
a70e9985 3885 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3886 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3887 fprintf (dump_file, " to ");
3888 print_generic_expr (dump_file, callee, TDF_SLIM);
3889 fprintf (dump_file, "\n");
3890 }
3891
49c471e3 3892 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3893 changed = true;
3894 }
a70e9985 3895 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3896 {
61dd6a2e
JH
3897 bool final;
3898 vec <cgraph_node *>targets
058d0a90 3899 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3900 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3901 {
a70e9985 3902 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3903 if (dump_enabled_p ())
3904 {
807b7d62 3905 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3906 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3907 "folding virtual function call to %s\n",
3908 targets.length () == 1
3909 ? targets[0]->name ()
3910 : "__builtin_unreachable");
3911 }
61dd6a2e 3912 if (targets.length () == 1)
cf3e5a89 3913 {
18954840
JJ
3914 tree fndecl = targets[0]->decl;
3915 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 3916 changed = true;
18954840
JJ
3917 /* If changing the call to __cxa_pure_virtual
3918 or similar noreturn function, adjust gimple_call_fntype
3919 too. */
865f7046 3920 if (gimple_call_noreturn_p (stmt)
18954840
JJ
3921 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3922 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3923 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3924 == void_type_node))
3925 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 3926 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
3927 if (lhs
3928 && gimple_call_noreturn_p (stmt)
18954840 3929 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 3930 || should_remove_lhs_p (lhs)))
a70e9985
JJ
3931 {
3932 if (TREE_CODE (lhs) == SSA_NAME)
3933 {
b731b390 3934 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3935 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3936 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3937 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3938 }
3939 gimple_call_set_lhs (stmt, NULL_TREE);
3940 }
0b986c6a 3941 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3942 }
a70e9985 3943 else
cf3e5a89
JJ
3944 {
3945 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3946 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3947 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
3948 /* If the call had a SSA name as lhs morph that into
3949 an uninitialized value. */
a70e9985
JJ
3950 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3951 {
b731b390 3952 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
3953 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
3954 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
3955 set_ssa_default_def (cfun, var, lhs);
42e52a51 3956 }
2da6996c
RB
3957 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3958 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3959 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
3960 return true;
3961 }
e021c122 3962 }
49c471e3 3963 }
e021c122 3964 }
49c471e3 3965
f2d3d07e
RH
3966 /* Check for indirect calls that became direct calls, and then
3967 no longer require a static chain. */
3968 if (gimple_call_chain (stmt))
3969 {
3970 tree fn = gimple_call_fndecl (stmt);
3971 if (fn && !DECL_STATIC_CHAIN (fn))
3972 {
3973 gimple_call_set_chain (stmt, NULL);
3974 changed = true;
3975 }
3976 else
3977 {
3978 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3979 if (tmp)
3980 {
3981 gimple_call_set_chain (stmt, tmp);
3982 changed = true;
3983 }
3984 }
3985 }
3986
e021c122
RG
3987 if (inplace)
3988 return changed;
3989
3990 /* Check for builtins that CCP can handle using information not
3991 available in the generic fold routines. */
fef5a0d9
RB
3992 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3993 {
3994 if (gimple_fold_builtin (gsi))
3995 changed = true;
3996 }
3997 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3998 {
ea679d55 3999 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4000 }
368b454d 4001 else if (gimple_call_internal_p (stmt))
ed9c79e1 4002 {
368b454d
JJ
4003 enum tree_code subcode = ERROR_MARK;
4004 tree result = NULL_TREE;
1304953e
JJ
4005 bool cplx_result = false;
4006 tree overflow = NULL_TREE;
368b454d
JJ
4007 switch (gimple_call_internal_fn (stmt))
4008 {
4009 case IFN_BUILTIN_EXPECT:
4010 result = fold_builtin_expect (gimple_location (stmt),
4011 gimple_call_arg (stmt, 0),
4012 gimple_call_arg (stmt, 1),
4013 gimple_call_arg (stmt, 2));
4014 break;
0e82f089 4015 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4016 {
4017 tree offset = gimple_call_arg (stmt, 1);
4018 tree objsize = gimple_call_arg (stmt, 2);
4019 if (integer_all_onesp (objsize)
4020 || (TREE_CODE (offset) == INTEGER_CST
4021 && TREE_CODE (objsize) == INTEGER_CST
4022 && tree_int_cst_le (offset, objsize)))
4023 {
4024 replace_call_with_value (gsi, NULL_TREE);
4025 return true;
4026 }
4027 }
4028 break;
4029 case IFN_UBSAN_PTR:
4030 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4031 {
ca1150f0 4032 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4033 return true;
4034 }
4035 break;
ca1150f0
JJ
4036 case IFN_UBSAN_BOUNDS:
4037 {
4038 tree index = gimple_call_arg (stmt, 1);
4039 tree bound = gimple_call_arg (stmt, 2);
4040 if (TREE_CODE (index) == INTEGER_CST
4041 && TREE_CODE (bound) == INTEGER_CST)
4042 {
4043 index = fold_convert (TREE_TYPE (bound), index);
4044 if (TREE_CODE (index) == INTEGER_CST
4045 && tree_int_cst_le (index, bound))
4046 {
4047 replace_call_with_value (gsi, NULL_TREE);
4048 return true;
4049 }
4050 }
4051 }
4052 break;
451e8dae
NS
4053 case IFN_GOACC_DIM_SIZE:
4054 case IFN_GOACC_DIM_POS:
4055 result = fold_internal_goacc_dim (stmt);
4056 break;
368b454d
JJ
4057 case IFN_UBSAN_CHECK_ADD:
4058 subcode = PLUS_EXPR;
4059 break;
4060 case IFN_UBSAN_CHECK_SUB:
4061 subcode = MINUS_EXPR;
4062 break;
4063 case IFN_UBSAN_CHECK_MUL:
4064 subcode = MULT_EXPR;
4065 break;
1304953e
JJ
4066 case IFN_ADD_OVERFLOW:
4067 subcode = PLUS_EXPR;
4068 cplx_result = true;
4069 break;
4070 case IFN_SUB_OVERFLOW:
4071 subcode = MINUS_EXPR;
4072 cplx_result = true;
4073 break;
4074 case IFN_MUL_OVERFLOW:
4075 subcode = MULT_EXPR;
4076 cplx_result = true;
4077 break;
368b454d
JJ
4078 default:
4079 break;
4080 }
4081 if (subcode != ERROR_MARK)
4082 {
4083 tree arg0 = gimple_call_arg (stmt, 0);
4084 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4085 tree type = TREE_TYPE (arg0);
4086 if (cplx_result)
4087 {
4088 tree lhs = gimple_call_lhs (stmt);
4089 if (lhs == NULL_TREE)
4090 type = NULL_TREE;
4091 else
4092 type = TREE_TYPE (TREE_TYPE (lhs));
4093 }
4094 if (type == NULL_TREE)
4095 ;
368b454d 4096 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4097 else if (integer_zerop (arg1))
4098 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4099 /* x = 0 + y; x = 0 * y; */
4100 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4101 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4102 /* x = y - y; */
4103 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4104 result = integer_zero_node;
368b454d 4105 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4106 else if (subcode == MULT_EXPR && integer_onep (arg1))
4107 result = arg0;
4108 else if (subcode == MULT_EXPR && integer_onep (arg0))
4109 result = arg1;
4110 else if (TREE_CODE (arg0) == INTEGER_CST
4111 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4112 {
1304953e
JJ
4113 if (cplx_result)
4114 result = int_const_binop (subcode, fold_convert (type, arg0),
4115 fold_convert (type, arg1));
4116 else
4117 result = int_const_binop (subcode, arg0, arg1);
4118 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4119 {
4120 if (cplx_result)
4121 overflow = build_one_cst (type);
4122 else
4123 result = NULL_TREE;
4124 }
4125 }
4126 if (result)
4127 {
4128 if (result == integer_zero_node)
4129 result = build_zero_cst (type);
4130 else if (cplx_result && TREE_TYPE (result) != type)
4131 {
4132 if (TREE_CODE (result) == INTEGER_CST)
4133 {
4134 if (arith_overflowed_p (PLUS_EXPR, type, result,
4135 integer_zero_node))
4136 overflow = build_one_cst (type);
4137 }
4138 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4139 && TYPE_UNSIGNED (type))
4140 || (TYPE_PRECISION (type)
4141 < (TYPE_PRECISION (TREE_TYPE (result))
4142 + (TYPE_UNSIGNED (TREE_TYPE (result))
4143 && !TYPE_UNSIGNED (type)))))
4144 result = NULL_TREE;
4145 if (result)
4146 result = fold_convert (type, result);
4147 }
368b454d
JJ
4148 }
4149 }
1304953e 4150
ed9c79e1
JJ
4151 if (result)
4152 {
1304953e
JJ
4153 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4154 result = drop_tree_overflow (result);
4155 if (cplx_result)
4156 {
4157 if (overflow == NULL_TREE)
4158 overflow = build_zero_cst (TREE_TYPE (result));
4159 tree ctype = build_complex_type (TREE_TYPE (result));
4160 if (TREE_CODE (result) == INTEGER_CST
4161 && TREE_CODE (overflow) == INTEGER_CST)
4162 result = build_complex (ctype, result, overflow);
4163 else
4164 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4165 ctype, result, overflow);
4166 }
ed9c79e1
JJ
4167 if (!update_call_from_tree (gsi, result))
4168 gimplify_and_update_call_from_tree (gsi, result);
4169 changed = true;
4170 }
4171 }
3b45a007 4172
e021c122 4173 return changed;
cbdd87d4
RG
4174}
4175
e0ee10ed 4176
89a79e96
RB
4177/* Return true whether NAME has a use on STMT. */
4178
4179static bool
355fe088 4180has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4181{
4182 imm_use_iterator iter;
4183 use_operand_p use_p;
4184 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4185 if (USE_STMT (use_p) == stmt)
4186 return true;
4187 return false;
4188}
4189
e0ee10ed
RB
4190/* Worker for fold_stmt_1 dispatch to pattern based folding with
4191 gimple_simplify.
4192
4193 Replaces *GSI with the simplification result in RCODE and OPS
4194 and the associated statements in *SEQ. Does the replacement
4195 according to INPLACE and returns true if the operation succeeded. */
4196
4197static bool
4198replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4199 code_helper rcode, tree *ops,
4200 gimple_seq *seq, bool inplace)
4201{
355fe088 4202 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
4203
4204 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4205 newly created statements. See also maybe_push_res_to_seq.
4206 As an exception allow such uses if there was a use of the
4207 same SSA name on the old stmt. */
e0ee10ed 4208 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
4209 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4210 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
4211 || (ops[1]
4212 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
4213 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4214 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
4215 || (ops[2]
4216 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96 4217 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
e0237993
JJ
4218 && !has_use_on_stmt (ops[2], stmt))
4219 || (COMPARISON_CLASS_P (ops[0])
4220 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4221 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4222 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4223 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4224 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4225 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
e0ee10ed
RB
4226 return false;
4227
fec40d06
RS
4228 /* Don't insert new statements when INPLACE is true, even if we could
4229 reuse STMT for the final statement. */
4230 if (inplace && !gimple_seq_empty_p (*seq))
4231 return false;
4232
538dd0b7 4233 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
4234 {
4235 gcc_assert (rcode.is_tree_code ());
4236 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4237 /* GIMPLE_CONDs condition may not throw. */
4238 && (!flag_exceptions
4239 || !cfun->can_throw_non_call_exceptions
4240 || !operation_could_trap_p (rcode,
4241 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4242 false, NULL_TREE)))
538dd0b7 4243 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 4244 else if (rcode == SSA_NAME)
538dd0b7 4245 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
4246 build_zero_cst (TREE_TYPE (ops[0])));
4247 else if (rcode == INTEGER_CST)
4248 {
4249 if (integer_zerop (ops[0]))
538dd0b7 4250 gimple_cond_make_false (cond_stmt);
e0ee10ed 4251 else
538dd0b7 4252 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4253 }
4254 else if (!inplace)
4255 {
4256 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4257 ops, seq);
4258 if (!res)
4259 return false;
538dd0b7 4260 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4261 build_zero_cst (TREE_TYPE (res)));
4262 }
4263 else
4264 return false;
4265 if (dump_file && (dump_flags & TDF_DETAILS))
4266 {
4267 fprintf (dump_file, "gimple_simplified to ");
4268 if (!gimple_seq_empty_p (*seq))
4269 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4270 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4271 0, TDF_SLIM);
4272 }
4273 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4274 return true;
4275 }
4276 else if (is_gimple_assign (stmt)
4277 && rcode.is_tree_code ())
4278 {
4279 if (!inplace
f3582e54 4280 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
4281 {
4282 maybe_build_generic_op (rcode,
545cd7ec 4283 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
00d66391 4284 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
4285 if (dump_file && (dump_flags & TDF_DETAILS))
4286 {
4287 fprintf (dump_file, "gimple_simplified to ");
4288 if (!gimple_seq_empty_p (*seq))
4289 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4290 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4291 0, TDF_SLIM);
4292 }
4293 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4294 return true;
4295 }
4296 }
37d486ab 4297 else if (rcode.is_fn_code ()
c9e926ce 4298 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
4299 {
4300 unsigned i;
4301 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4302 {
4303 gcc_assert (ops[i] != NULL_TREE);
4304 gimple_call_set_arg (stmt, i, ops[i]);
4305 }
4306 if (i < 3)
4307 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
4308 if (dump_file && (dump_flags & TDF_DETAILS))
4309 {
4310 fprintf (dump_file, "gimple_simplified to ");
4311 if (!gimple_seq_empty_p (*seq))
4312 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4313 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4314 }
4315 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4316 return true;
4317 }
e0ee10ed
RB
4318 else if (!inplace)
4319 {
4320 if (gimple_has_lhs (stmt))
4321 {
4322 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
4323 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4324 ops, seq, lhs))
4325 return false;
e0ee10ed
RB
4326 if (dump_file && (dump_flags & TDF_DETAILS))
4327 {
4328 fprintf (dump_file, "gimple_simplified to ");
4329 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4330 }
4331 gsi_replace_with_seq_vops (gsi, *seq);
4332 return true;
4333 }
4334 else
4335 gcc_unreachable ();
4336 }
4337
4338 return false;
4339}
4340
040292e7
RB
4341/* Canonicalize MEM_REFs invariant address operand after propagation. */
4342
4343static bool
4344maybe_canonicalize_mem_ref_addr (tree *t)
4345{
4346 bool res = false;
4347
4348 if (TREE_CODE (*t) == ADDR_EXPR)
4349 t = &TREE_OPERAND (*t, 0);
4350
f17a223d
RB
4351 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4352 generic vector extension. The actual vector referenced is
4353 view-converted to an array type for this purpose. If the index
4354 is constant the canonical representation in the middle-end is a
4355 BIT_FIELD_REF so re-write the former to the latter here. */
4356 if (TREE_CODE (*t) == ARRAY_REF
4357 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4358 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4359 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4360 {
4361 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4362 if (VECTOR_TYPE_P (vtype))
4363 {
4364 tree low = array_ref_low_bound (*t);
4365 if (TREE_CODE (low) == INTEGER_CST)
4366 {
4367 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4368 {
4369 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4370 wi::to_widest (low));
4371 idx = wi::mul (idx, wi::to_widest
4372 (TYPE_SIZE (TREE_TYPE (*t))));
4373 widest_int ext
4374 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4375 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4376 {
4377 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4378 TREE_TYPE (*t),
4379 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4380 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4381 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4382 res = true;
4383 }
4384 }
4385 }
4386 }
4387 }
4388
040292e7
RB
4389 while (handled_component_p (*t))
4390 t = &TREE_OPERAND (*t, 0);
4391
4392 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4393 of invariant addresses into a SSA name MEM_REF address. */
4394 if (TREE_CODE (*t) == MEM_REF
4395 || TREE_CODE (*t) == TARGET_MEM_REF)
4396 {
4397 tree addr = TREE_OPERAND (*t, 0);
4398 if (TREE_CODE (addr) == ADDR_EXPR
4399 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4400 || handled_component_p (TREE_OPERAND (addr, 0))))
4401 {
4402 tree base;
4403 HOST_WIDE_INT coffset;
4404 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4405 &coffset);
4406 if (!base)
4407 gcc_unreachable ();
4408
4409 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4410 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4411 TREE_OPERAND (*t, 1),
4412 size_int (coffset));
4413 res = true;
4414 }
4415 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4416 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4417 }
4418
4419 /* Canonicalize back MEM_REFs to plain reference trees if the object
4420 accessed is a decl that has the same access semantics as the MEM_REF. */
4421 if (TREE_CODE (*t) == MEM_REF
4422 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4423 && integer_zerop (TREE_OPERAND (*t, 1))
4424 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4425 {
4426 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4427 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4428 if (/* Same volatile qualification. */
4429 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4430 /* Same TBAA behavior with -fstrict-aliasing. */
4431 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4432 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4433 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4434 /* Same alignment. */
4435 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4436 /* We have to look out here to not drop a required conversion
4437 from the rhs to the lhs if *t appears on the lhs or vice-versa
4438 if it appears on the rhs. Thus require strict type
4439 compatibility. */
4440 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4441 {
4442 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4443 res = true;
4444 }
4445 }
4446
4447 /* Canonicalize TARGET_MEM_REF in particular with respect to
4448 the indexes becoming constant. */
4449 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4450 {
4451 tree tem = maybe_fold_tmr (*t);
4452 if (tem)
4453 {
4454 *t = tem;
4455 res = true;
4456 }
4457 }
4458
4459 return res;
4460}
4461
cbdd87d4
RG
4462/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4463 distinguishes both cases. */
4464
4465static bool
e0ee10ed 4466fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4467{
4468 bool changed = false;
355fe088 4469 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4470 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4471 unsigned i;
a8b85ce9 4472 fold_defer_overflow_warnings ();
cbdd87d4 4473
040292e7
RB
4474 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4475 after propagation.
4476 ??? This shouldn't be done in generic folding but in the
4477 propagation helpers which also know whether an address was
89a79e96
RB
4478 propagated.
4479 Also canonicalize operand order. */
040292e7
RB
4480 switch (gimple_code (stmt))
4481 {
4482 case GIMPLE_ASSIGN:
4483 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4484 {
4485 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4486 if ((REFERENCE_CLASS_P (*rhs)
4487 || TREE_CODE (*rhs) == ADDR_EXPR)
4488 && maybe_canonicalize_mem_ref_addr (rhs))
4489 changed = true;
4490 tree *lhs = gimple_assign_lhs_ptr (stmt);
4491 if (REFERENCE_CLASS_P (*lhs)
4492 && maybe_canonicalize_mem_ref_addr (lhs))
4493 changed = true;
4494 }
89a79e96
RB
4495 else
4496 {
4497 /* Canonicalize operand order. */
4498 enum tree_code code = gimple_assign_rhs_code (stmt);
4499 if (TREE_CODE_CLASS (code) == tcc_comparison
4500 || commutative_tree_code (code)
4501 || commutative_ternary_tree_code (code))
4502 {
4503 tree rhs1 = gimple_assign_rhs1 (stmt);
4504 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4505 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4506 {
4507 gimple_assign_set_rhs1 (stmt, rhs2);
4508 gimple_assign_set_rhs2 (stmt, rhs1);
4509 if (TREE_CODE_CLASS (code) == tcc_comparison)
4510 gimple_assign_set_rhs_code (stmt,
4511 swap_tree_comparison (code));
4512 changed = true;
4513 }
4514 }
4515 }
040292e7
RB
4516 break;
4517 case GIMPLE_CALL:
4518 {
4519 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4520 {
4521 tree *arg = gimple_call_arg_ptr (stmt, i);
4522 if (REFERENCE_CLASS_P (*arg)
4523 && maybe_canonicalize_mem_ref_addr (arg))
4524 changed = true;
4525 }
4526 tree *lhs = gimple_call_lhs_ptr (stmt);
4527 if (*lhs
4528 && REFERENCE_CLASS_P (*lhs)
4529 && maybe_canonicalize_mem_ref_addr (lhs))
4530 changed = true;
4531 break;
4532 }
4533 case GIMPLE_ASM:
4534 {
538dd0b7
DM
4535 gasm *asm_stmt = as_a <gasm *> (stmt);
4536 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4537 {
538dd0b7 4538 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4539 tree op = TREE_VALUE (link);
4540 if (REFERENCE_CLASS_P (op)
4541 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4542 changed = true;
4543 }
538dd0b7 4544 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4545 {
538dd0b7 4546 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4547 tree op = TREE_VALUE (link);
4548 if ((REFERENCE_CLASS_P (op)
4549 || TREE_CODE (op) == ADDR_EXPR)
4550 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4551 changed = true;
4552 }
4553 }
4554 break;
4555 case GIMPLE_DEBUG:
4556 if (gimple_debug_bind_p (stmt))
4557 {
4558 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4559 if (*val
4560 && (REFERENCE_CLASS_P (*val)
4561 || TREE_CODE (*val) == ADDR_EXPR)
4562 && maybe_canonicalize_mem_ref_addr (val))
4563 changed = true;
4564 }
4565 break;
89a79e96
RB
4566 case GIMPLE_COND:
4567 {
4568 /* Canonicalize operand order. */
4569 tree lhs = gimple_cond_lhs (stmt);
4570 tree rhs = gimple_cond_rhs (stmt);
14e72812 4571 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4572 {
4573 gcond *gc = as_a <gcond *> (stmt);
4574 gimple_cond_set_lhs (gc, rhs);
4575 gimple_cond_set_rhs (gc, lhs);
4576 gimple_cond_set_code (gc,
4577 swap_tree_comparison (gimple_cond_code (gc)));
4578 changed = true;
4579 }
4580 }
040292e7
RB
4581 default:;
4582 }
4583
e0ee10ed
RB
4584 /* Dispatch to pattern-based folding. */
4585 if (!inplace
4586 || is_gimple_assign (stmt)
4587 || gimple_code (stmt) == GIMPLE_COND)
4588 {
4589 gimple_seq seq = NULL;
4590 code_helper rcode;
4591 tree ops[3] = {};
0ff093d8
RB
4592 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4593 valueize, valueize))
e0ee10ed
RB
4594 {
4595 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4596 changed = true;
4597 else
4598 gimple_seq_discard (seq);
4599 }
4600 }
4601
4602 stmt = gsi_stmt (*gsi);
4603
cbdd87d4
RG
4604 /* Fold the main computation performed by the statement. */
4605 switch (gimple_code (stmt))
4606 {
4607 case GIMPLE_ASSIGN:
4608 {
819ec64c
RB
4609 /* Try to canonicalize for boolean-typed X the comparisons
4610 X == 0, X == 1, X != 0, and X != 1. */
4611 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4612 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4613 {
819ec64c
RB
4614 tree lhs = gimple_assign_lhs (stmt);
4615 tree op1 = gimple_assign_rhs1 (stmt);
4616 tree op2 = gimple_assign_rhs2 (stmt);
4617 tree type = TREE_TYPE (op1);
4618
4619 /* Check whether the comparison operands are of the same boolean
4620 type as the result type is.
4621 Check that second operand is an integer-constant with value
4622 one or zero. */
4623 if (TREE_CODE (op2) == INTEGER_CST
4624 && (integer_zerop (op2) || integer_onep (op2))
4625 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4626 {
4627 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4628 bool is_logical_not = false;
4629
4630 /* X == 0 and X != 1 is a logical-not.of X
4631 X == 1 and X != 0 is X */
4632 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4633 || (cmp_code == NE_EXPR && integer_onep (op2)))
4634 is_logical_not = true;
4635
4636 if (is_logical_not == false)
4637 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4638 /* Only for one-bit precision typed X the transformation
4639 !X -> ~X is valied. */
4640 else if (TYPE_PRECISION (type) == 1)
4641 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4642 /* Otherwise we use !X -> X ^ 1. */
4643 else
4644 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4645 build_int_cst (type, 1));
4646 changed = true;
4647 break;
4648 }
5fbcc0ed 4649 }
819ec64c
RB
4650
4651 unsigned old_num_ops = gimple_num_ops (stmt);
4652 tree lhs = gimple_assign_lhs (stmt);
4653 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4654 if (new_rhs
4655 && !useless_type_conversion_p (TREE_TYPE (lhs),
4656 TREE_TYPE (new_rhs)))
4657 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4658 if (new_rhs
4659 && (!inplace
4660 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4661 {
4662 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4663 changed = true;
4664 }
4665 break;
4666 }
4667
cbdd87d4 4668 case GIMPLE_CALL:
ceeffab0 4669 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4670 break;
4671
4672 case GIMPLE_ASM:
4673 /* Fold *& in asm operands. */
38384150 4674 {
538dd0b7 4675 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4676 size_t noutputs;
4677 const char **oconstraints;
4678 const char *constraint;
4679 bool allows_mem, allows_reg;
4680
538dd0b7 4681 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4682 oconstraints = XALLOCAVEC (const char *, noutputs);
4683
538dd0b7 4684 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4685 {
538dd0b7 4686 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4687 tree op = TREE_VALUE (link);
4688 oconstraints[i]
4689 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4690 if (REFERENCE_CLASS_P (op)
4691 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4692 {
4693 TREE_VALUE (link) = op;
4694 changed = true;
4695 }
4696 }
538dd0b7 4697 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4698 {
538dd0b7 4699 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4700 tree op = TREE_VALUE (link);
4701 constraint
4702 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4703 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4704 oconstraints, &allows_mem, &allows_reg);
4705 if (REFERENCE_CLASS_P (op)
4706 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4707 != NULL_TREE)
4708 {
4709 TREE_VALUE (link) = op;
4710 changed = true;
4711 }
4712 }
4713 }
cbdd87d4
RG
4714 break;
4715
bd422c4a
RG
4716 case GIMPLE_DEBUG:
4717 if (gimple_debug_bind_p (stmt))
4718 {
4719 tree val = gimple_debug_bind_get_value (stmt);
4720 if (val
4721 && REFERENCE_CLASS_P (val))
4722 {
4723 tree tem = maybe_fold_reference (val, false);
4724 if (tem)
4725 {
4726 gimple_debug_bind_set_value (stmt, tem);
4727 changed = true;
4728 }
4729 }
3e888a5e
RG
4730 else if (val
4731 && TREE_CODE (val) == ADDR_EXPR)
4732 {
4733 tree ref = TREE_OPERAND (val, 0);
4734 tree tem = maybe_fold_reference (ref, false);
4735 if (tem)
4736 {
4737 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4738 gimple_debug_bind_set_value (stmt, tem);
4739 changed = true;
4740 }
4741 }
bd422c4a
RG
4742 }
4743 break;
4744
cfe3d653
PK
4745 case GIMPLE_RETURN:
4746 {
4747 greturn *ret_stmt = as_a<greturn *> (stmt);
4748 tree ret = gimple_return_retval(ret_stmt);
4749
4750 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4751 {
4752 tree val = valueize (ret);
1af928db
RB
4753 if (val && val != ret
4754 && may_propagate_copy (ret, val))
cfe3d653
PK
4755 {
4756 gimple_return_set_retval (ret_stmt, val);
4757 changed = true;
4758 }
4759 }
4760 }
4761 break;
4762
cbdd87d4
RG
4763 default:;
4764 }
4765
4766 stmt = gsi_stmt (*gsi);
4767
37376165
RB
4768 /* Fold *& on the lhs. */
4769 if (gimple_has_lhs (stmt))
cbdd87d4
RG
4770 {
4771 tree lhs = gimple_get_lhs (stmt);
4772 if (lhs && REFERENCE_CLASS_P (lhs))
4773 {
4774 tree new_lhs = maybe_fold_reference (lhs, true);
4775 if (new_lhs)
4776 {
4777 gimple_set_lhs (stmt, new_lhs);
4778 changed = true;
4779 }
4780 }
4781 }
4782
a8b85ce9 4783 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
4784 return changed;
4785}
4786
e0ee10ed
RB
4787/* Valueziation callback that ends up not following SSA edges. */
4788
4789tree
4790no_follow_ssa_edges (tree)
4791{
4792 return NULL_TREE;
4793}
4794
45cc9f96
RB
4795/* Valueization callback that ends up following single-use SSA edges only. */
4796
4797tree
4798follow_single_use_edges (tree val)
4799{
4800 if (TREE_CODE (val) == SSA_NAME
4801 && !has_single_use (val))
4802 return NULL_TREE;
4803 return val;
4804}
4805
cbdd87d4
RG
4806/* Fold the statement pointed to by GSI. In some cases, this function may
4807 replace the whole statement with a new one. Returns true iff folding
4808 makes any changes.
4809 The statement pointed to by GSI should be in valid gimple form but may
4810 be in unfolded state as resulting from for example constant propagation
4811 which can produce *&x = 0. */
4812
4813bool
4814fold_stmt (gimple_stmt_iterator *gsi)
4815{
e0ee10ed
RB
4816 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4817}
4818
4819bool
4820fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4821{
4822 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
4823}
4824
59401b92 4825/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
4826 *&x created by constant propagation are handled. The statement cannot
4827 be replaced with a new one. Return true if the statement was
4828 changed, false otherwise.
59401b92 4829 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
4830 be in unfolded state as resulting from for example constant propagation
4831 which can produce *&x = 0. */
4832
4833bool
59401b92 4834fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 4835{
355fe088 4836 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 4837 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 4838 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
4839 return changed;
4840}
4841
e89065a1
SL
4842/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4843 if EXPR is null or we don't know how.
4844 If non-null, the result always has boolean type. */
4845
4846static tree
4847canonicalize_bool (tree expr, bool invert)
4848{
4849 if (!expr)
4850 return NULL_TREE;
4851 else if (invert)
4852 {
4853 if (integer_nonzerop (expr))
4854 return boolean_false_node;
4855 else if (integer_zerop (expr))
4856 return boolean_true_node;
4857 else if (TREE_CODE (expr) == SSA_NAME)
4858 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4859 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4860 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4861 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4862 boolean_type_node,
4863 TREE_OPERAND (expr, 0),
4864 TREE_OPERAND (expr, 1));
4865 else
4866 return NULL_TREE;
4867 }
4868 else
4869 {
4870 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4871 return expr;
4872 if (integer_nonzerop (expr))
4873 return boolean_true_node;
4874 else if (integer_zerop (expr))
4875 return boolean_false_node;
4876 else if (TREE_CODE (expr) == SSA_NAME)
4877 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4878 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4879 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4880 return fold_build2 (TREE_CODE (expr),
4881 boolean_type_node,
4882 TREE_OPERAND (expr, 0),
4883 TREE_OPERAND (expr, 1));
4884 else
4885 return NULL_TREE;
4886 }
4887}
4888
4889/* Check to see if a boolean expression EXPR is logically equivalent to the
4890 comparison (OP1 CODE OP2). Check for various identities involving
4891 SSA_NAMEs. */
4892
4893static bool
4894same_bool_comparison_p (const_tree expr, enum tree_code code,
4895 const_tree op1, const_tree op2)
4896{
355fe088 4897 gimple *s;
e89065a1
SL
4898
4899 /* The obvious case. */
4900 if (TREE_CODE (expr) == code
4901 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4902 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4903 return true;
4904
4905 /* Check for comparing (name, name != 0) and the case where expr
4906 is an SSA_NAME with a definition matching the comparison. */
4907 if (TREE_CODE (expr) == SSA_NAME
4908 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4909 {
4910 if (operand_equal_p (expr, op1, 0))
4911 return ((code == NE_EXPR && integer_zerop (op2))
4912 || (code == EQ_EXPR && integer_nonzerop (op2)));
4913 s = SSA_NAME_DEF_STMT (expr);
4914 if (is_gimple_assign (s)
4915 && gimple_assign_rhs_code (s) == code
4916 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4917 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4918 return true;
4919 }
4920
4921 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4922 of name is a comparison, recurse. */
4923 if (TREE_CODE (op1) == SSA_NAME
4924 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4925 {
4926 s = SSA_NAME_DEF_STMT (op1);
4927 if (is_gimple_assign (s)
4928 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4929 {
4930 enum tree_code c = gimple_assign_rhs_code (s);
4931 if ((c == NE_EXPR && integer_zerop (op2))
4932 || (c == EQ_EXPR && integer_nonzerop (op2)))
4933 return same_bool_comparison_p (expr, c,
4934 gimple_assign_rhs1 (s),
4935 gimple_assign_rhs2 (s));
4936 if ((c == EQ_EXPR && integer_zerop (op2))
4937 || (c == NE_EXPR && integer_nonzerop (op2)))
4938 return same_bool_comparison_p (expr,
4939 invert_tree_comparison (c, false),
4940 gimple_assign_rhs1 (s),
4941 gimple_assign_rhs2 (s));
4942 }
4943 }
4944 return false;
4945}
4946
4947/* Check to see if two boolean expressions OP1 and OP2 are logically
4948 equivalent. */
4949
4950static bool
4951same_bool_result_p (const_tree op1, const_tree op2)
4952{
4953 /* Simple cases first. */
4954 if (operand_equal_p (op1, op2, 0))
4955 return true;
4956
4957 /* Check the cases where at least one of the operands is a comparison.
4958 These are a bit smarter than operand_equal_p in that they apply some
4959 identifies on SSA_NAMEs. */
98209db3 4960 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
4961 && same_bool_comparison_p (op1, TREE_CODE (op2),
4962 TREE_OPERAND (op2, 0),
4963 TREE_OPERAND (op2, 1)))
4964 return true;
98209db3 4965 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
4966 && same_bool_comparison_p (op2, TREE_CODE (op1),
4967 TREE_OPERAND (op1, 0),
4968 TREE_OPERAND (op1, 1)))
4969 return true;
4970
4971 /* Default case. */
4972 return false;
4973}
4974
4975/* Forward declarations for some mutually recursive functions. */
4976
4977static tree
4978and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4979 enum tree_code code2, tree op2a, tree op2b);
4980static tree
4981and_var_with_comparison (tree var, bool invert,
4982 enum tree_code code2, tree op2a, tree op2b);
4983static tree
355fe088 4984and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4985 enum tree_code code2, tree op2a, tree op2b);
4986static tree
4987or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4988 enum tree_code code2, tree op2a, tree op2b);
4989static tree
4990or_var_with_comparison (tree var, bool invert,
4991 enum tree_code code2, tree op2a, tree op2b);
4992static tree
355fe088 4993or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4994 enum tree_code code2, tree op2a, tree op2b);
4995
4996/* Helper function for and_comparisons_1: try to simplify the AND of the
4997 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4998 If INVERT is true, invert the value of the VAR before doing the AND.
4999 Return NULL_EXPR if we can't simplify this to a single expression. */
5000
5001static tree
5002and_var_with_comparison (tree var, bool invert,
5003 enum tree_code code2, tree op2a, tree op2b)
5004{
5005 tree t;
355fe088 5006 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5007
5008 /* We can only deal with variables whose definitions are assignments. */
5009 if (!is_gimple_assign (stmt))
5010 return NULL_TREE;
5011
5012 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5013 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5014 Then we only have to consider the simpler non-inverted cases. */
5015 if (invert)
5016 t = or_var_with_comparison_1 (stmt,
5017 invert_tree_comparison (code2, false),
5018 op2a, op2b);
5019 else
5020 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5021 return canonicalize_bool (t, invert);
5022}
5023
5024/* Try to simplify the AND of the ssa variable defined by the assignment
5025 STMT with the comparison specified by (OP2A CODE2 OP2B).
5026 Return NULL_EXPR if we can't simplify this to a single expression. */
5027
5028static tree
355fe088 5029and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5030 enum tree_code code2, tree op2a, tree op2b)
5031{
5032 tree var = gimple_assign_lhs (stmt);
5033 tree true_test_var = NULL_TREE;
5034 tree false_test_var = NULL_TREE;
5035 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5036
5037 /* Check for identities like (var AND (var == 0)) => false. */
5038 if (TREE_CODE (op2a) == SSA_NAME
5039 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5040 {
5041 if ((code2 == NE_EXPR && integer_zerop (op2b))
5042 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5043 {
5044 true_test_var = op2a;
5045 if (var == true_test_var)
5046 return var;
5047 }
5048 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5049 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5050 {
5051 false_test_var = op2a;
5052 if (var == false_test_var)
5053 return boolean_false_node;
5054 }
5055 }
5056
5057 /* If the definition is a comparison, recurse on it. */
5058 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5059 {
5060 tree t = and_comparisons_1 (innercode,
5061 gimple_assign_rhs1 (stmt),
5062 gimple_assign_rhs2 (stmt),
5063 code2,
5064 op2a,
5065 op2b);
5066 if (t)
5067 return t;
5068 }
5069
5070 /* If the definition is an AND or OR expression, we may be able to
5071 simplify by reassociating. */
eb9820c0
KT
5072 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5073 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5074 {
5075 tree inner1 = gimple_assign_rhs1 (stmt);
5076 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5077 gimple *s;
e89065a1
SL
5078 tree t;
5079 tree partial = NULL_TREE;
eb9820c0 5080 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5081
5082 /* Check for boolean identities that don't require recursive examination
5083 of inner1/inner2:
5084 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5085 inner1 AND (inner1 OR inner2) => inner1
5086 !inner1 AND (inner1 AND inner2) => false
5087 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5088 Likewise for similar cases involving inner2. */
5089 if (inner1 == true_test_var)
5090 return (is_and ? var : inner1);
5091 else if (inner2 == true_test_var)
5092 return (is_and ? var : inner2);
5093 else if (inner1 == false_test_var)
5094 return (is_and
5095 ? boolean_false_node
5096 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5097 else if (inner2 == false_test_var)
5098 return (is_and
5099 ? boolean_false_node
5100 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5101
5102 /* Next, redistribute/reassociate the AND across the inner tests.
5103 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5104 if (TREE_CODE (inner1) == SSA_NAME
5105 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5106 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5107 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5108 gimple_assign_rhs1 (s),
5109 gimple_assign_rhs2 (s),
5110 code2, op2a, op2b)))
5111 {
5112 /* Handle the AND case, where we are reassociating:
5113 (inner1 AND inner2) AND (op2a code2 op2b)
5114 => (t AND inner2)
5115 If the partial result t is a constant, we win. Otherwise
5116 continue on to try reassociating with the other inner test. */
5117 if (is_and)
5118 {
5119 if (integer_onep (t))
5120 return inner2;
5121 else if (integer_zerop (t))
5122 return boolean_false_node;
5123 }
5124
5125 /* Handle the OR case, where we are redistributing:
5126 (inner1 OR inner2) AND (op2a code2 op2b)
5127 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5128 else if (integer_onep (t))
5129 return boolean_true_node;
5130
5131 /* Save partial result for later. */
5132 partial = t;
e89065a1
SL
5133 }
5134
5135 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5136 if (TREE_CODE (inner2) == SSA_NAME
5137 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5138 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5139 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5140 gimple_assign_rhs1 (s),
5141 gimple_assign_rhs2 (s),
5142 code2, op2a, op2b)))
5143 {
5144 /* Handle the AND case, where we are reassociating:
5145 (inner1 AND inner2) AND (op2a code2 op2b)
5146 => (inner1 AND t) */
5147 if (is_and)
5148 {
5149 if (integer_onep (t))
5150 return inner1;
5151 else if (integer_zerop (t))
5152 return boolean_false_node;
8236c8eb
JJ
5153 /* If both are the same, we can apply the identity
5154 (x AND x) == x. */
5155 else if (partial && same_bool_result_p (t, partial))
5156 return t;
e89065a1
SL
5157 }
5158
5159 /* Handle the OR case. where we are redistributing:
5160 (inner1 OR inner2) AND (op2a code2 op2b)
5161 => (t OR (inner1 AND (op2a code2 op2b)))
5162 => (t OR partial) */
5163 else
5164 {
5165 if (integer_onep (t))
5166 return boolean_true_node;
5167 else if (partial)
5168 {
5169 /* We already got a simplification for the other
5170 operand to the redistributed OR expression. The
5171 interesting case is when at least one is false.
5172 Or, if both are the same, we can apply the identity
5173 (x OR x) == x. */
5174 if (integer_zerop (partial))
5175 return t;
5176 else if (integer_zerop (t))
5177 return partial;
5178 else if (same_bool_result_p (t, partial))
5179 return t;
5180 }
5181 }
5182 }
5183 }
5184 return NULL_TREE;
5185}
5186
5187/* Try to simplify the AND of two comparisons defined by
5188 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5189 If this can be done without constructing an intermediate value,
5190 return the resulting tree; otherwise NULL_TREE is returned.
5191 This function is deliberately asymmetric as it recurses on SSA_DEFs
5192 in the first comparison but not the second. */
5193
5194static tree
5195and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5196 enum tree_code code2, tree op2a, tree op2b)
5197{
ae22ac3c 5198 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5199
e89065a1
SL
5200 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5201 if (operand_equal_p (op1a, op2a, 0)
5202 && operand_equal_p (op1b, op2b, 0))
5203 {
eb9820c0 5204 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5205 tree t = combine_comparisons (UNKNOWN_LOCATION,
5206 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5207 truth_type, op1a, op1b);
e89065a1
SL
5208 if (t)
5209 return t;
5210 }
5211
5212 /* Likewise the swapped case of the above. */
5213 if (operand_equal_p (op1a, op2b, 0)
5214 && operand_equal_p (op1b, op2a, 0))
5215 {
eb9820c0 5216 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5217 tree t = combine_comparisons (UNKNOWN_LOCATION,
5218 TRUTH_ANDIF_EXPR, code1,
5219 swap_tree_comparison (code2),
31ed6226 5220 truth_type, op1a, op1b);
e89065a1
SL
5221 if (t)
5222 return t;
5223 }
5224
5225 /* If both comparisons are of the same value against constants, we might
5226 be able to merge them. */
5227 if (operand_equal_p (op1a, op2a, 0)
5228 && TREE_CODE (op1b) == INTEGER_CST
5229 && TREE_CODE (op2b) == INTEGER_CST)
5230 {
5231 int cmp = tree_int_cst_compare (op1b, op2b);
5232
5233 /* If we have (op1a == op1b), we should either be able to
5234 return that or FALSE, depending on whether the constant op1b
5235 also satisfies the other comparison against op2b. */
5236 if (code1 == EQ_EXPR)
5237 {
5238 bool done = true;
5239 bool val;
5240 switch (code2)
5241 {
5242 case EQ_EXPR: val = (cmp == 0); break;
5243 case NE_EXPR: val = (cmp != 0); break;
5244 case LT_EXPR: val = (cmp < 0); break;
5245 case GT_EXPR: val = (cmp > 0); break;
5246 case LE_EXPR: val = (cmp <= 0); break;
5247 case GE_EXPR: val = (cmp >= 0); break;
5248 default: done = false;
5249 }
5250 if (done)
5251 {
5252 if (val)
5253 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5254 else
5255 return boolean_false_node;
5256 }
5257 }
5258 /* Likewise if the second comparison is an == comparison. */
5259 else if (code2 == EQ_EXPR)
5260 {
5261 bool done = true;
5262 bool val;
5263 switch (code1)
5264 {
5265 case EQ_EXPR: val = (cmp == 0); break;
5266 case NE_EXPR: val = (cmp != 0); break;
5267 case LT_EXPR: val = (cmp > 0); break;
5268 case GT_EXPR: val = (cmp < 0); break;
5269 case LE_EXPR: val = (cmp >= 0); break;
5270 case GE_EXPR: val = (cmp <= 0); break;
5271 default: done = false;
5272 }
5273 if (done)
5274 {
5275 if (val)
5276 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5277 else
5278 return boolean_false_node;
5279 }
5280 }
5281
5282 /* Same business with inequality tests. */
5283 else if (code1 == NE_EXPR)
5284 {
5285 bool val;
5286 switch (code2)
5287 {
5288 case EQ_EXPR: val = (cmp != 0); break;
5289 case NE_EXPR: val = (cmp == 0); break;
5290 case LT_EXPR: val = (cmp >= 0); break;
5291 case GT_EXPR: val = (cmp <= 0); break;
5292 case LE_EXPR: val = (cmp > 0); break;
5293 case GE_EXPR: val = (cmp < 0); break;
5294 default:
5295 val = false;
5296 }
5297 if (val)
5298 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5299 }
5300 else if (code2 == NE_EXPR)
5301 {
5302 bool val;
5303 switch (code1)
5304 {
5305 case EQ_EXPR: val = (cmp == 0); break;
5306 case NE_EXPR: val = (cmp != 0); break;
5307 case LT_EXPR: val = (cmp <= 0); break;
5308 case GT_EXPR: val = (cmp >= 0); break;
5309 case LE_EXPR: val = (cmp < 0); break;
5310 case GE_EXPR: val = (cmp > 0); break;
5311 default:
5312 val = false;
5313 }
5314 if (val)
5315 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5316 }
5317
5318 /* Chose the more restrictive of two < or <= comparisons. */
5319 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5320 && (code2 == LT_EXPR || code2 == LE_EXPR))
5321 {
5322 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5323 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5324 else
5325 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5326 }
5327
5328 /* Likewise chose the more restrictive of two > or >= comparisons. */
5329 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5330 && (code2 == GT_EXPR || code2 == GE_EXPR))
5331 {
5332 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5333 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5334 else
5335 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5336 }
5337
5338 /* Check for singleton ranges. */
5339 else if (cmp == 0
5340 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5341 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5342 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5343
5344 /* Check for disjoint ranges. */
5345 else if (cmp <= 0
5346 && (code1 == LT_EXPR || code1 == LE_EXPR)
5347 && (code2 == GT_EXPR || code2 == GE_EXPR))
5348 return boolean_false_node;
5349 else if (cmp >= 0
5350 && (code1 == GT_EXPR || code1 == GE_EXPR)
5351 && (code2 == LT_EXPR || code2 == LE_EXPR))
5352 return boolean_false_node;
5353 }
5354
5355 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5356 NAME's definition is a truth value. See if there are any simplifications
5357 that can be done against the NAME's definition. */
5358 if (TREE_CODE (op1a) == SSA_NAME
5359 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5360 && (integer_zerop (op1b) || integer_onep (op1b)))
5361 {
5362 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5363 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5364 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5365 switch (gimple_code (stmt))
5366 {
5367 case GIMPLE_ASSIGN:
5368 /* Try to simplify by copy-propagating the definition. */
5369 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5370
5371 case GIMPLE_PHI:
5372 /* If every argument to the PHI produces the same result when
5373 ANDed with the second comparison, we win.
5374 Do not do this unless the type is bool since we need a bool
5375 result here anyway. */
5376 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5377 {
5378 tree result = NULL_TREE;
5379 unsigned i;
5380 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5381 {
5382 tree arg = gimple_phi_arg_def (stmt, i);
5383
5384 /* If this PHI has itself as an argument, ignore it.
5385 If all the other args produce the same result,
5386 we're still OK. */
5387 if (arg == gimple_phi_result (stmt))
5388 continue;
5389 else if (TREE_CODE (arg) == INTEGER_CST)
5390 {
5391 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5392 {
5393 if (!result)
5394 result = boolean_false_node;
5395 else if (!integer_zerop (result))
5396 return NULL_TREE;
5397 }
5398 else if (!result)
5399 result = fold_build2 (code2, boolean_type_node,
5400 op2a, op2b);
5401 else if (!same_bool_comparison_p (result,
5402 code2, op2a, op2b))
5403 return NULL_TREE;
5404 }
0e8b84ec
JJ
5405 else if (TREE_CODE (arg) == SSA_NAME
5406 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5407 {
6c66f733 5408 tree temp;
355fe088 5409 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5410 /* In simple cases we can look through PHI nodes,
5411 but we have to be careful with loops.
5412 See PR49073. */
5413 if (! dom_info_available_p (CDI_DOMINATORS)
5414 || gimple_bb (def_stmt) == gimple_bb (stmt)
5415 || dominated_by_p (CDI_DOMINATORS,
5416 gimple_bb (def_stmt),
5417 gimple_bb (stmt)))
5418 return NULL_TREE;
5419 temp = and_var_with_comparison (arg, invert, code2,
5420 op2a, op2b);
e89065a1
SL
5421 if (!temp)
5422 return NULL_TREE;
5423 else if (!result)
5424 result = temp;
5425 else if (!same_bool_result_p (result, temp))
5426 return NULL_TREE;
5427 }
5428 else
5429 return NULL_TREE;
5430 }
5431 return result;
5432 }
5433
5434 default:
5435 break;
5436 }
5437 }
5438 return NULL_TREE;
5439}
5440
5441/* Try to simplify the AND of two comparisons, specified by
5442 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5443 If this can be simplified to a single expression (without requiring
5444 introducing more SSA variables to hold intermediate values),
5445 return the resulting tree. Otherwise return NULL_TREE.
5446 If the result expression is non-null, it has boolean type. */
5447
5448tree
5449maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5450 enum tree_code code2, tree op2a, tree op2b)
5451{
5452 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5453 if (t)
5454 return t;
5455 else
5456 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5457}
5458
5459/* Helper function for or_comparisons_1: try to simplify the OR of the
5460 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5461 If INVERT is true, invert the value of VAR before doing the OR.
5462 Return NULL_EXPR if we can't simplify this to a single expression. */
5463
5464static tree
5465or_var_with_comparison (tree var, bool invert,
5466 enum tree_code code2, tree op2a, tree op2b)
5467{
5468 tree t;
355fe088 5469 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5470
5471 /* We can only deal with variables whose definitions are assignments. */
5472 if (!is_gimple_assign (stmt))
5473 return NULL_TREE;
5474
5475 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5476 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5477 Then we only have to consider the simpler non-inverted cases. */
5478 if (invert)
5479 t = and_var_with_comparison_1 (stmt,
5480 invert_tree_comparison (code2, false),
5481 op2a, op2b);
5482 else
5483 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5484 return canonicalize_bool (t, invert);
5485}
5486
5487/* Try to simplify the OR of the ssa variable defined by the assignment
5488 STMT with the comparison specified by (OP2A CODE2 OP2B).
5489 Return NULL_EXPR if we can't simplify this to a single expression. */
5490
5491static tree
355fe088 5492or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5493 enum tree_code code2, tree op2a, tree op2b)
5494{
5495 tree var = gimple_assign_lhs (stmt);
5496 tree true_test_var = NULL_TREE;
5497 tree false_test_var = NULL_TREE;
5498 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5499
5500 /* Check for identities like (var OR (var != 0)) => true . */
5501 if (TREE_CODE (op2a) == SSA_NAME
5502 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5503 {
5504 if ((code2 == NE_EXPR && integer_zerop (op2b))
5505 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5506 {
5507 true_test_var = op2a;
5508 if (var == true_test_var)
5509 return var;
5510 }
5511 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5512 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5513 {
5514 false_test_var = op2a;
5515 if (var == false_test_var)
5516 return boolean_true_node;
5517 }
5518 }
5519
5520 /* If the definition is a comparison, recurse on it. */
5521 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5522 {
5523 tree t = or_comparisons_1 (innercode,
5524 gimple_assign_rhs1 (stmt),
5525 gimple_assign_rhs2 (stmt),
5526 code2,
5527 op2a,
5528 op2b);
5529 if (t)
5530 return t;
5531 }
5532
5533 /* If the definition is an AND or OR expression, we may be able to
5534 simplify by reassociating. */
eb9820c0
KT
5535 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5536 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5537 {
5538 tree inner1 = gimple_assign_rhs1 (stmt);
5539 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5540 gimple *s;
e89065a1
SL
5541 tree t;
5542 tree partial = NULL_TREE;
eb9820c0 5543 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5544
5545 /* Check for boolean identities that don't require recursive examination
5546 of inner1/inner2:
5547 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5548 inner1 OR (inner1 AND inner2) => inner1
5549 !inner1 OR (inner1 OR inner2) => true
5550 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5551 */
5552 if (inner1 == true_test_var)
5553 return (is_or ? var : inner1);
5554 else if (inner2 == true_test_var)
5555 return (is_or ? var : inner2);
5556 else if (inner1 == false_test_var)
5557 return (is_or
5558 ? boolean_true_node
5559 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5560 else if (inner2 == false_test_var)
5561 return (is_or
5562 ? boolean_true_node
5563 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5564
5565 /* Next, redistribute/reassociate the OR across the inner tests.
5566 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5567 if (TREE_CODE (inner1) == SSA_NAME
5568 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5569 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5570 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5571 gimple_assign_rhs1 (s),
5572 gimple_assign_rhs2 (s),
5573 code2, op2a, op2b)))
5574 {
5575 /* Handle the OR case, where we are reassociating:
5576 (inner1 OR inner2) OR (op2a code2 op2b)
5577 => (t OR inner2)
5578 If the partial result t is a constant, we win. Otherwise
5579 continue on to try reassociating with the other inner test. */
8236c8eb 5580 if (is_or)
e89065a1
SL
5581 {
5582 if (integer_onep (t))
5583 return boolean_true_node;
5584 else if (integer_zerop (t))
5585 return inner2;
5586 }
5587
5588 /* Handle the AND case, where we are redistributing:
5589 (inner1 AND inner2) OR (op2a code2 op2b)
5590 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5591 else if (integer_zerop (t))
5592 return boolean_false_node;
5593
5594 /* Save partial result for later. */
5595 partial = t;
e89065a1
SL
5596 }
5597
5598 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5599 if (TREE_CODE (inner2) == SSA_NAME
5600 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5601 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5602 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5603 gimple_assign_rhs1 (s),
5604 gimple_assign_rhs2 (s),
5605 code2, op2a, op2b)))
5606 {
5607 /* Handle the OR case, where we are reassociating:
5608 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5609 => (inner1 OR t)
5610 => (t OR partial) */
5611 if (is_or)
e89065a1
SL
5612 {
5613 if (integer_zerop (t))
5614 return inner1;
5615 else if (integer_onep (t))
5616 return boolean_true_node;
8236c8eb
JJ
5617 /* If both are the same, we can apply the identity
5618 (x OR x) == x. */
5619 else if (partial && same_bool_result_p (t, partial))
5620 return t;
e89065a1
SL
5621 }
5622
5623 /* Handle the AND case, where we are redistributing:
5624 (inner1 AND inner2) OR (op2a code2 op2b)
5625 => (t AND (inner1 OR (op2a code2 op2b)))
5626 => (t AND partial) */
5627 else
5628 {
5629 if (integer_zerop (t))
5630 return boolean_false_node;
5631 else if (partial)
5632 {
5633 /* We already got a simplification for the other
5634 operand to the redistributed AND expression. The
5635 interesting case is when at least one is true.
5636 Or, if both are the same, we can apply the identity
8236c8eb 5637 (x AND x) == x. */
e89065a1
SL
5638 if (integer_onep (partial))
5639 return t;
5640 else if (integer_onep (t))
5641 return partial;
5642 else if (same_bool_result_p (t, partial))
8236c8eb 5643 return t;
e89065a1
SL
5644 }
5645 }
5646 }
5647 }
5648 return NULL_TREE;
5649}
5650
5651/* Try to simplify the OR of two comparisons defined by
5652 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5653 If this can be done without constructing an intermediate value,
5654 return the resulting tree; otherwise NULL_TREE is returned.
5655 This function is deliberately asymmetric as it recurses on SSA_DEFs
5656 in the first comparison but not the second. */
5657
5658static tree
5659or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5660 enum tree_code code2, tree op2a, tree op2b)
5661{
ae22ac3c 5662 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5663
e89065a1
SL
5664 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5665 if (operand_equal_p (op1a, op2a, 0)
5666 && operand_equal_p (op1b, op2b, 0))
5667 {
eb9820c0 5668 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5669 tree t = combine_comparisons (UNKNOWN_LOCATION,
5670 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5671 truth_type, op1a, op1b);
e89065a1
SL
5672 if (t)
5673 return t;
5674 }
5675
5676 /* Likewise the swapped case of the above. */
5677 if (operand_equal_p (op1a, op2b, 0)
5678 && operand_equal_p (op1b, op2a, 0))
5679 {
eb9820c0 5680 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5681 tree t = combine_comparisons (UNKNOWN_LOCATION,
5682 TRUTH_ORIF_EXPR, code1,
5683 swap_tree_comparison (code2),
31ed6226 5684 truth_type, op1a, op1b);
e89065a1
SL
5685 if (t)
5686 return t;
5687 }
5688
5689 /* If both comparisons are of the same value against constants, we might
5690 be able to merge them. */
5691 if (operand_equal_p (op1a, op2a, 0)
5692 && TREE_CODE (op1b) == INTEGER_CST
5693 && TREE_CODE (op2b) == INTEGER_CST)
5694 {
5695 int cmp = tree_int_cst_compare (op1b, op2b);
5696
5697 /* If we have (op1a != op1b), we should either be able to
5698 return that or TRUE, depending on whether the constant op1b
5699 also satisfies the other comparison against op2b. */
5700 if (code1 == NE_EXPR)
5701 {
5702 bool done = true;
5703 bool val;
5704 switch (code2)
5705 {
5706 case EQ_EXPR: val = (cmp == 0); break;
5707 case NE_EXPR: val = (cmp != 0); break;
5708 case LT_EXPR: val = (cmp < 0); break;
5709 case GT_EXPR: val = (cmp > 0); break;
5710 case LE_EXPR: val = (cmp <= 0); break;
5711 case GE_EXPR: val = (cmp >= 0); break;
5712 default: done = false;
5713 }
5714 if (done)
5715 {
5716 if (val)
5717 return boolean_true_node;
5718 else
5719 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5720 }
5721 }
5722 /* Likewise if the second comparison is a != comparison. */
5723 else if (code2 == NE_EXPR)
5724 {
5725 bool done = true;
5726 bool val;
5727 switch (code1)
5728 {
5729 case EQ_EXPR: val = (cmp == 0); break;
5730 case NE_EXPR: val = (cmp != 0); break;
5731 case LT_EXPR: val = (cmp > 0); break;
5732 case GT_EXPR: val = (cmp < 0); break;
5733 case LE_EXPR: val = (cmp >= 0); break;
5734 case GE_EXPR: val = (cmp <= 0); break;
5735 default: done = false;
5736 }
5737 if (done)
5738 {
5739 if (val)
5740 return boolean_true_node;
5741 else
5742 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5743 }
5744 }
5745
5746 /* See if an equality test is redundant with the other comparison. */
5747 else if (code1 == EQ_EXPR)
5748 {
5749 bool val;
5750 switch (code2)
5751 {
5752 case EQ_EXPR: val = (cmp == 0); break;
5753 case NE_EXPR: val = (cmp != 0); break;
5754 case LT_EXPR: val = (cmp < 0); break;
5755 case GT_EXPR: val = (cmp > 0); break;
5756 case LE_EXPR: val = (cmp <= 0); break;
5757 case GE_EXPR: val = (cmp >= 0); break;
5758 default:
5759 val = false;
5760 }
5761 if (val)
5762 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5763 }
5764 else if (code2 == EQ_EXPR)
5765 {
5766 bool val;
5767 switch (code1)
5768 {
5769 case EQ_EXPR: val = (cmp == 0); break;
5770 case NE_EXPR: val = (cmp != 0); break;
5771 case LT_EXPR: val = (cmp > 0); break;
5772 case GT_EXPR: val = (cmp < 0); break;
5773 case LE_EXPR: val = (cmp >= 0); break;
5774 case GE_EXPR: val = (cmp <= 0); break;
5775 default:
5776 val = false;
5777 }
5778 if (val)
5779 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5780 }
5781
5782 /* Chose the less restrictive of two < or <= comparisons. */
5783 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5784 && (code2 == LT_EXPR || code2 == LE_EXPR))
5785 {
5786 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5787 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5788 else
5789 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5790 }
5791
5792 /* Likewise chose the less restrictive of two > or >= comparisons. */
5793 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5794 && (code2 == GT_EXPR || code2 == GE_EXPR))
5795 {
5796 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5797 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5798 else
5799 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5800 }
5801
5802 /* Check for singleton ranges. */
5803 else if (cmp == 0
5804 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5805 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5806 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5807
5808 /* Check for less/greater pairs that don't restrict the range at all. */
5809 else if (cmp >= 0
5810 && (code1 == LT_EXPR || code1 == LE_EXPR)
5811 && (code2 == GT_EXPR || code2 == GE_EXPR))
5812 return boolean_true_node;
5813 else if (cmp <= 0
5814 && (code1 == GT_EXPR || code1 == GE_EXPR)
5815 && (code2 == LT_EXPR || code2 == LE_EXPR))
5816 return boolean_true_node;
5817 }
5818
5819 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5820 NAME's definition is a truth value. See if there are any simplifications
5821 that can be done against the NAME's definition. */
5822 if (TREE_CODE (op1a) == SSA_NAME
5823 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5824 && (integer_zerop (op1b) || integer_onep (op1b)))
5825 {
5826 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5827 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5828 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5829 switch (gimple_code (stmt))
5830 {
5831 case GIMPLE_ASSIGN:
5832 /* Try to simplify by copy-propagating the definition. */
5833 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5834
5835 case GIMPLE_PHI:
5836 /* If every argument to the PHI produces the same result when
5837 ORed with the second comparison, we win.
5838 Do not do this unless the type is bool since we need a bool
5839 result here anyway. */
5840 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5841 {
5842 tree result = NULL_TREE;
5843 unsigned i;
5844 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5845 {
5846 tree arg = gimple_phi_arg_def (stmt, i);
5847
5848 /* If this PHI has itself as an argument, ignore it.
5849 If all the other args produce the same result,
5850 we're still OK. */
5851 if (arg == gimple_phi_result (stmt))
5852 continue;
5853 else if (TREE_CODE (arg) == INTEGER_CST)
5854 {
5855 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5856 {
5857 if (!result)
5858 result = boolean_true_node;
5859 else if (!integer_onep (result))
5860 return NULL_TREE;
5861 }
5862 else if (!result)
5863 result = fold_build2 (code2, boolean_type_node,
5864 op2a, op2b);
5865 else if (!same_bool_comparison_p (result,
5866 code2, op2a, op2b))
5867 return NULL_TREE;
5868 }
0e8b84ec
JJ
5869 else if (TREE_CODE (arg) == SSA_NAME
5870 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5871 {
6c66f733 5872 tree temp;
355fe088 5873 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5874 /* In simple cases we can look through PHI nodes,
5875 but we have to be careful with loops.
5876 See PR49073. */
5877 if (! dom_info_available_p (CDI_DOMINATORS)
5878 || gimple_bb (def_stmt) == gimple_bb (stmt)
5879 || dominated_by_p (CDI_DOMINATORS,
5880 gimple_bb (def_stmt),
5881 gimple_bb (stmt)))
5882 return NULL_TREE;
5883 temp = or_var_with_comparison (arg, invert, code2,
5884 op2a, op2b);
e89065a1
SL
5885 if (!temp)
5886 return NULL_TREE;
5887 else if (!result)
5888 result = temp;
5889 else if (!same_bool_result_p (result, temp))
5890 return NULL_TREE;
5891 }
5892 else
5893 return NULL_TREE;
5894 }
5895 return result;
5896 }
5897
5898 default:
5899 break;
5900 }
5901 }
5902 return NULL_TREE;
5903}
5904
5905/* Try to simplify the OR of two comparisons, specified by
5906 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5907 If this can be simplified to a single expression (without requiring
5908 introducing more SSA variables to hold intermediate values),
5909 return the resulting tree. Otherwise return NULL_TREE.
5910 If the result expression is non-null, it has boolean type. */
5911
5912tree
5913maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5914 enum tree_code code2, tree op2a, tree op2b)
5915{
5916 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5917 if (t)
5918 return t;
5919 else
5920 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5921}
cfef45c8
RG
5922
5923
5924/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5925
5926 Either NULL_TREE, a simplified but non-constant or a constant
5927 is returned.
5928
5929 ??? This should go into a gimple-fold-inline.h file to be eventually
5930 privatized with the single valueize function used in the various TUs
5931 to avoid the indirect function call overhead. */
5932
5933tree
355fe088 5934gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 5935 tree (*gvalueize) (tree))
cfef45c8 5936{
45cc9f96
RB
5937 code_helper rcode;
5938 tree ops[3] = {};
5939 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5940 edges if there are intermediate VARYING defs. For this reason
5941 do not follow SSA edges here even though SCCVN can technically
5942 just deal fine with that. */
34050b6b 5943 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 5944 {
34050b6b 5945 tree res = NULL_TREE;
c0f62740 5946 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
5947 res = ops[0];
5948 else if (mprts_hook)
5949 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5950 if (res)
45cc9f96 5951 {
34050b6b
RB
5952 if (dump_file && dump_flags & TDF_DETAILS)
5953 {
5954 fprintf (dump_file, "Match-and-simplified ");
5955 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5956 fprintf (dump_file, " to ");
ef6cb4c7 5957 print_generic_expr (dump_file, res);
34050b6b
RB
5958 fprintf (dump_file, "\n");
5959 }
5960 return res;
45cc9f96 5961 }
45cc9f96
RB
5962 }
5963
cfef45c8
RG
5964 location_t loc = gimple_location (stmt);
5965 switch (gimple_code (stmt))
5966 {
5967 case GIMPLE_ASSIGN:
5968 {
5969 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5970
5971 switch (get_gimple_rhs_class (subcode))
5972 {
5973 case GIMPLE_SINGLE_RHS:
5974 {
5975 tree rhs = gimple_assign_rhs1 (stmt);
5976 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5977
5978 if (TREE_CODE (rhs) == SSA_NAME)
5979 {
5980 /* If the RHS is an SSA_NAME, return its known constant value,
5981 if any. */
5982 return (*valueize) (rhs);
5983 }
5984 /* Handle propagating invariant addresses into address
5985 operations. */
5986 else if (TREE_CODE (rhs) == ADDR_EXPR
5987 && !is_gimple_min_invariant (rhs))
5988 {
d25c4172 5989 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5990 tree base;
5991 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5992 &offset,
5993 valueize);
5994 if (base
5995 && (CONSTANT_CLASS_P (base)
5996 || decl_address_invariant_p (base)))
5997 return build_invariant_address (TREE_TYPE (rhs),
5998 base, offset);
5999 }
6000 else if (TREE_CODE (rhs) == CONSTRUCTOR
6001 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6002 && (CONSTRUCTOR_NELTS (rhs)
6003 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6004 {
794e3180
RS
6005 unsigned i, nelts;
6006 tree val;
cfef45c8 6007
794e3180
RS
6008 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6009 auto_vec<tree, 32> vec (nelts);
cfef45c8
RG
6010 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6011 {
6012 val = (*valueize) (val);
6013 if (TREE_CODE (val) == INTEGER_CST
6014 || TREE_CODE (val) == REAL_CST
6015 || TREE_CODE (val) == FIXED_CST)
794e3180 6016 vec.quick_push (val);
cfef45c8
RG
6017 else
6018 return NULL_TREE;
6019 }
6020
d2a12ae7 6021 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 6022 }
bdf37f7a
JH
6023 if (subcode == OBJ_TYPE_REF)
6024 {
6025 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6026 /* If callee is constant, we can fold away the wrapper. */
6027 if (is_gimple_min_invariant (val))
6028 return val;
6029 }
cfef45c8
RG
6030
6031 if (kind == tcc_reference)
6032 {
6033 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6034 || TREE_CODE (rhs) == REALPART_EXPR
6035 || TREE_CODE (rhs) == IMAGPART_EXPR)
6036 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6037 {
6038 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6039 return fold_unary_loc (EXPR_LOCATION (rhs),
6040 TREE_CODE (rhs),
6041 TREE_TYPE (rhs), val);
6042 }
6043 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6044 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6045 {
6046 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6047 return fold_ternary_loc (EXPR_LOCATION (rhs),
6048 TREE_CODE (rhs),
6049 TREE_TYPE (rhs), val,
6050 TREE_OPERAND (rhs, 1),
6051 TREE_OPERAND (rhs, 2));
6052 }
6053 else if (TREE_CODE (rhs) == MEM_REF
6054 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6055 {
6056 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6057 if (TREE_CODE (val) == ADDR_EXPR
6058 && is_gimple_min_invariant (val))
6059 {
6060 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6061 unshare_expr (val),
6062 TREE_OPERAND (rhs, 1));
6063 if (tem)
6064 rhs = tem;
6065 }
6066 }
6067 return fold_const_aggregate_ref_1 (rhs, valueize);
6068 }
6069 else if (kind == tcc_declaration)
6070 return get_symbol_constant_value (rhs);
6071 return rhs;
6072 }
6073
6074 case GIMPLE_UNARY_RHS:
f3582e54 6075 return NULL_TREE;
cfef45c8
RG
6076
6077 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6078 /* Translate &x + CST into an invariant form suitable for
6079 further propagation. */
6080 if (subcode == POINTER_PLUS_EXPR)
6081 {
4b1b9e64
RB
6082 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6083 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6084 if (TREE_CODE (op0) == ADDR_EXPR
6085 && TREE_CODE (op1) == INTEGER_CST)
6086 {
6087 tree off = fold_convert (ptr_type_node, op1);
6088 return build_fold_addr_expr_loc
6089 (loc,
6090 fold_build2 (MEM_REF,
6091 TREE_TYPE (TREE_TYPE (op0)),
6092 unshare_expr (op0), off));
6093 }
6094 }
59c20dc7
RB
6095 /* Canonicalize bool != 0 and bool == 0 appearing after
6096 valueization. While gimple_simplify handles this
6097 it can get confused by the ~X == 1 -> X == 0 transform
6098 which we cant reduce to a SSA name or a constant
6099 (and we have no way to tell gimple_simplify to not
6100 consider those transforms in the first place). */
6101 else if (subcode == EQ_EXPR
6102 || subcode == NE_EXPR)
6103 {
6104 tree lhs = gimple_assign_lhs (stmt);
6105 tree op0 = gimple_assign_rhs1 (stmt);
6106 if (useless_type_conversion_p (TREE_TYPE (lhs),
6107 TREE_TYPE (op0)))
6108 {
6109 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6110 op0 = (*valueize) (op0);
8861704d
RB
6111 if (TREE_CODE (op0) == INTEGER_CST)
6112 std::swap (op0, op1);
6113 if (TREE_CODE (op1) == INTEGER_CST
6114 && ((subcode == NE_EXPR && integer_zerop (op1))
6115 || (subcode == EQ_EXPR && integer_onep (op1))))
6116 return op0;
59c20dc7
RB
6117 }
6118 }
4b1b9e64 6119 return NULL_TREE;
cfef45c8
RG
6120
6121 case GIMPLE_TERNARY_RHS:
6122 {
6123 /* Handle ternary operators that can appear in GIMPLE form. */
6124 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6125 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6126 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6127 return fold_ternary_loc (loc, subcode,
6128 gimple_expr_type (stmt), op0, op1, op2);
6129 }
6130
6131 default:
6132 gcc_unreachable ();
6133 }
6134 }
6135
6136 case GIMPLE_CALL:
6137 {
25583c4f 6138 tree fn;
538dd0b7 6139 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6140
6141 if (gimple_call_internal_p (stmt))
31e071ae
MP
6142 {
6143 enum tree_code subcode = ERROR_MARK;
6144 switch (gimple_call_internal_fn (stmt))
6145 {
6146 case IFN_UBSAN_CHECK_ADD:
6147 subcode = PLUS_EXPR;
6148 break;
6149 case IFN_UBSAN_CHECK_SUB:
6150 subcode = MINUS_EXPR;
6151 break;
6152 case IFN_UBSAN_CHECK_MUL:
6153 subcode = MULT_EXPR;
6154 break;
68fa96d6
ML
6155 case IFN_BUILTIN_EXPECT:
6156 {
6157 tree arg0 = gimple_call_arg (stmt, 0);
6158 tree op0 = (*valueize) (arg0);
6159 if (TREE_CODE (op0) == INTEGER_CST)
6160 return op0;
6161 return NULL_TREE;
6162 }
31e071ae
MP
6163 default:
6164 return NULL_TREE;
6165 }
368b454d
JJ
6166 tree arg0 = gimple_call_arg (stmt, 0);
6167 tree arg1 = gimple_call_arg (stmt, 1);
6168 tree op0 = (*valueize) (arg0);
6169 tree op1 = (*valueize) (arg1);
31e071ae
MP
6170
6171 if (TREE_CODE (op0) != INTEGER_CST
6172 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6173 {
6174 switch (subcode)
6175 {
6176 case MULT_EXPR:
6177 /* x * 0 = 0 * x = 0 without overflow. */
6178 if (integer_zerop (op0) || integer_zerop (op1))
6179 return build_zero_cst (TREE_TYPE (arg0));
6180 break;
6181 case MINUS_EXPR:
6182 /* y - y = 0 without overflow. */
6183 if (operand_equal_p (op0, op1, 0))
6184 return build_zero_cst (TREE_TYPE (arg0));
6185 break;
6186 default:
6187 break;
6188 }
6189 }
6190 tree res
6191 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6192 if (res
6193 && TREE_CODE (res) == INTEGER_CST
6194 && !TREE_OVERFLOW (res))
6195 return res;
6196 return NULL_TREE;
6197 }
25583c4f
RS
6198
6199 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
6200 if (TREE_CODE (fn) == ADDR_EXPR
6201 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
6202 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6203 && gimple_builtin_call_types_compatible_p (stmt,
6204 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6205 {
6206 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6207 tree retval;
cfef45c8
RG
6208 unsigned i;
6209 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6210 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6211 retval = fold_builtin_call_array (loc,
538dd0b7 6212 gimple_call_return_type (call_stmt),
cfef45c8 6213 fn, gimple_call_num_args (stmt), args);
cfef45c8 6214 if (retval)
5c944c6c
RB
6215 {
6216 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6217 STRIP_NOPS (retval);
538dd0b7
DM
6218 retval = fold_convert (gimple_call_return_type (call_stmt),
6219 retval);
5c944c6c 6220 }
cfef45c8
RG
6221 return retval;
6222 }
6223 return NULL_TREE;
6224 }
6225
6226 default:
6227 return NULL_TREE;
6228 }
6229}
6230
6231/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6232 Returns NULL_TREE if folding to a constant is not possible, otherwise
6233 returns a constant according to is_gimple_min_invariant. */
6234
6235tree
355fe088 6236gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6237{
6238 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6239 if (res && is_gimple_min_invariant (res))
6240 return res;
6241 return NULL_TREE;
6242}
6243
6244
6245/* The following set of functions are supposed to fold references using
6246 their constant initializers. */
6247
cfef45c8
RG
6248/* See if we can find constructor defining value of BASE.
6249 When we know the consructor with constant offset (such as
6250 base is array[40] and we do know constructor of array), then
6251 BIT_OFFSET is adjusted accordingly.
6252
6253 As a special case, return error_mark_node when constructor
6254 is not explicitly available, but it is known to be zero
6255 such as 'static const int a;'. */
6256static tree
6257get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6258 tree (*valueize)(tree))
6259{
6260 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
6261 bool reverse;
6262
cfef45c8
RG
6263 if (TREE_CODE (base) == MEM_REF)
6264 {
6265 if (!integer_zerop (TREE_OPERAND (base, 1)))
6266 {
9541ffee 6267 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 6268 return NULL_TREE;
807e902e 6269 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
6270 * BITS_PER_UNIT);
6271 }
6272
6273 if (valueize
6274 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6275 base = valueize (TREE_OPERAND (base, 0));
6276 if (!base || TREE_CODE (base) != ADDR_EXPR)
6277 return NULL_TREE;
6278 base = TREE_OPERAND (base, 0);
6279 }
13e88953
RB
6280 else if (valueize
6281 && TREE_CODE (base) == SSA_NAME)
6282 base = valueize (base);
cfef45c8
RG
6283
6284 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6285 DECL_INITIAL. If BASE is a nested reference into another
6286 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6287 the inner reference. */
6288 switch (TREE_CODE (base))
6289 {
6290 case VAR_DECL:
cfef45c8 6291 case CONST_DECL:
6a6dac52
JH
6292 {
6293 tree init = ctor_for_folding (base);
6294
688010ba 6295 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6296 NULL means unknown, while error_mark_node is 0. */
6297 if (init == error_mark_node)
6298 return NULL_TREE;
6299 if (!init)
6300 return error_mark_node;
6301 return init;
6302 }
cfef45c8 6303
13e88953
RB
6304 case VIEW_CONVERT_EXPR:
6305 return get_base_constructor (TREE_OPERAND (base, 0),
6306 bit_offset, valueize);
6307
cfef45c8
RG
6308 case ARRAY_REF:
6309 case COMPONENT_REF:
ee45a32d
EB
6310 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6311 &reverse);
cfef45c8
RG
6312 if (max_size == -1 || size != max_size)
6313 return NULL_TREE;
6314 *bit_offset += bit_offset2;
6315 return get_base_constructor (base, bit_offset, valueize);
6316
cfef45c8
RG
6317 case CONSTRUCTOR:
6318 return base;
6319
6320 default:
13e88953
RB
6321 if (CONSTANT_CLASS_P (base))
6322 return base;
6323
cfef45c8
RG
6324 return NULL_TREE;
6325 }
6326}
6327
cfef45c8
RG
6328/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6329 SIZE to the memory at bit OFFSET. */
6330
6331static tree
6332fold_array_ctor_reference (tree type, tree ctor,
6333 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6334 unsigned HOST_WIDE_INT size,
6335 tree from_decl)
cfef45c8 6336{
807e902e
KZ
6337 offset_int low_bound;
6338 offset_int elt_size;
807e902e 6339 offset_int access_index;
6a636014 6340 tree domain_type = NULL_TREE;
cfef45c8
RG
6341 HOST_WIDE_INT inner_offset;
6342
6343 /* Compute low bound and elt size. */
eb8f1123
RG
6344 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6345 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6346 if (domain_type && TYPE_MIN_VALUE (domain_type))
6347 {
6348 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6349 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6350 return NULL_TREE;
807e902e 6351 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6352 }
6353 else
807e902e 6354 low_bound = 0;
cfef45c8 6355 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6356 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6357 return NULL_TREE;
807e902e 6358 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
6359
6360 /* We can handle only constantly sized accesses that are known to not
6361 be larger than size of array element. */
6362 if (!TYPE_SIZE_UNIT (type)
6363 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
032c80e9 6364 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
807e902e 6365 || elt_size == 0)
cfef45c8
RG
6366 return NULL_TREE;
6367
6368 /* Compute the array index we look for. */
807e902e
KZ
6369 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6370 elt_size);
27bcd47c 6371 access_index += low_bound;
cfef45c8
RG
6372
6373 /* And offset within the access. */
27bcd47c 6374 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6375
6376 /* See if the array field is large enough to span whole access. We do not
6377 care to fold accesses spanning multiple array indexes. */
27bcd47c 6378 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6379 return NULL_TREE;
6a636014
AL
6380 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6381 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 6382
cfef45c8
RG
6383 /* When memory is not explicitely mentioned in constructor,
6384 it is 0 (or out of range). */
6385 return build_zero_cst (type);
6386}
6387
6388/* CTOR is CONSTRUCTOR of an aggregate or vector.
6389 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6390
6391static tree
6392fold_nonarray_ctor_reference (tree type, tree ctor,
6393 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6394 unsigned HOST_WIDE_INT size,
6395 tree from_decl)
cfef45c8
RG
6396{
6397 unsigned HOST_WIDE_INT cnt;
6398 tree cfield, cval;
6399
6400 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6401 cval)
6402 {
6403 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6404 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6405 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
6406 offset_int bitoffset;
6407 offset_int bitoffset_end, access_end;
cfef45c8
RG
6408
6409 /* Variable sized objects in static constructors makes no sense,
6410 but field_size can be NULL for flexible array members. */
6411 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6412 && TREE_CODE (byte_offset) == INTEGER_CST
6413 && (field_size != NULL_TREE
6414 ? TREE_CODE (field_size) == INTEGER_CST
6415 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6416
6417 /* Compute bit offset of the field. */
807e902e 6418 bitoffset = (wi::to_offset (field_offset)
8de73453 6419 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8
RG
6420 /* Compute bit offset where the field ends. */
6421 if (field_size != NULL_TREE)
807e902e 6422 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6423 else
807e902e 6424 bitoffset_end = 0;
cfef45c8 6425
807e902e 6426 access_end = offset_int (offset) + size;
b8b2b009
JJ
6427
6428 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6429 [BITOFFSET, BITOFFSET_END)? */
807e902e 6430 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6431 && (field_size == NULL_TREE
807e902e 6432 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6433 {
807e902e 6434 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
6435 /* We do have overlap. Now see if field is large enough to
6436 cover the access. Give up for accesses spanning multiple
6437 fields. */
807e902e 6438 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6439 return NULL_TREE;
032c80e9 6440 if (offset < bitoffset)
b8b2b009 6441 return NULL_TREE;
cfef45c8 6442 return fold_ctor_reference (type, cval,
27bcd47c 6443 inner_offset.to_uhwi (), size,
c44c2088 6444 from_decl);
cfef45c8
RG
6445 }
6446 }
6447 /* When memory is not explicitely mentioned in constructor, it is 0. */
6448 return build_zero_cst (type);
6449}
6450
6451/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6452 to the memory at bit OFFSET. */
6453
8403c2cf 6454tree
cfef45c8 6455fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 6456 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
6457{
6458 tree ret;
6459
6460 /* We found the field with exact match. */
6461 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6462 && !offset)
9d60be38 6463 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
6464
6465 /* We are at the end of walk, see if we can view convert the
6466 result. */
6467 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6468 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6469 && !compare_tree_int (TYPE_SIZE (type), size)
6470 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6471 {
9d60be38 6472 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6473 if (ret)
672d9f8e
RB
6474 {
6475 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6476 if (ret)
6477 STRIP_USELESS_TYPE_CONVERSION (ret);
6478 }
cfef45c8
RG
6479 return ret;
6480 }
b2505143
RB
6481 /* For constants and byte-aligned/sized reads try to go through
6482 native_encode/interpret. */
6483 if (CONSTANT_CLASS_P (ctor)
6484 && BITS_PER_UNIT == 8
6485 && offset % BITS_PER_UNIT == 0
6486 && size % BITS_PER_UNIT == 0
6487 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6488 {
6489 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6490 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6491 offset / BITS_PER_UNIT);
6492 if (len > 0)
6493 return native_interpret_expr (type, buf, len);
b2505143 6494 }
cfef45c8
RG
6495 if (TREE_CODE (ctor) == CONSTRUCTOR)
6496 {
6497
eb8f1123
RG
6498 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6499 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
6500 return fold_array_ctor_reference (type, ctor, offset, size,
6501 from_decl);
cfef45c8 6502 else
c44c2088
JH
6503 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6504 from_decl);
cfef45c8
RG
6505 }
6506
6507 return NULL_TREE;
6508}
6509
6510/* Return the tree representing the element referenced by T if T is an
6511 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6512 names using VALUEIZE. Return NULL_TREE otherwise. */
6513
6514tree
6515fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6516{
6517 tree ctor, idx, base;
6518 HOST_WIDE_INT offset, size, max_size;
6519 tree tem;
ee45a32d 6520 bool reverse;
cfef45c8 6521
f8a7df45
RG
6522 if (TREE_THIS_VOLATILE (t))
6523 return NULL_TREE;
6524
3a65ee74 6525 if (DECL_P (t))
cfef45c8
RG
6526 return get_symbol_constant_value (t);
6527
6528 tem = fold_read_from_constant_string (t);
6529 if (tem)
6530 return tem;
6531
6532 switch (TREE_CODE (t))
6533 {
6534 case ARRAY_REF:
6535 case ARRAY_RANGE_REF:
6536 /* Constant indexes are handled well by get_base_constructor.
6537 Only special case variable offsets.
6538 FIXME: This code can't handle nested references with variable indexes
6539 (they will be handled only by iteration of ccp). Perhaps we can bring
6540 get_ref_base_and_extent here and make it use a valueize callback. */
6541 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6542 && valueize
6543 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 6544 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
6545 {
6546 tree low_bound, unit_size;
6547
6548 /* If the resulting bit-offset is constant, track it. */
6549 if ((low_bound = array_ref_low_bound (t),
b48e22b2 6550 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 6551 && (unit_size = array_ref_element_size (t),
807e902e 6552 tree_fits_uhwi_p (unit_size)))
cfef45c8 6553 {
807e902e
KZ
6554 offset_int woffset
6555 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6556 TYPE_PRECISION (TREE_TYPE (idx)));
6557
6558 if (wi::fits_shwi_p (woffset))
6559 {
6560 offset = woffset.to_shwi ();
6561 /* TODO: This code seems wrong, multiply then check
6562 to see if it fits. */
6563 offset *= tree_to_uhwi (unit_size);
6564 offset *= BITS_PER_UNIT;
6565
6566 base = TREE_OPERAND (t, 0);
6567 ctor = get_base_constructor (base, &offset, valueize);
6568 /* Empty constructor. Always fold to 0. */
6569 if (ctor == error_mark_node)
6570 return build_zero_cst (TREE_TYPE (t));
6571 /* Out of bound array access. Value is undefined,
6572 but don't fold. */
6573 if (offset < 0)
6574 return NULL_TREE;
6575 /* We can not determine ctor. */
6576 if (!ctor)
6577 return NULL_TREE;
6578 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6579 tree_to_uhwi (unit_size)
6580 * BITS_PER_UNIT,
6581 base);
6582 }
cfef45c8
RG
6583 }
6584 }
6585 /* Fallthru. */
6586
6587 case COMPONENT_REF:
6588 case BIT_FIELD_REF:
6589 case TARGET_MEM_REF:
6590 case MEM_REF:
ee45a32d 6591 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6592 ctor = get_base_constructor (base, &offset, valueize);
6593
6594 /* Empty constructor. Always fold to 0. */
6595 if (ctor == error_mark_node)
6596 return build_zero_cst (TREE_TYPE (t));
6597 /* We do not know precise address. */
6598 if (max_size == -1 || max_size != size)
6599 return NULL_TREE;
6600 /* We can not determine ctor. */
6601 if (!ctor)
6602 return NULL_TREE;
6603
6604 /* Out of bound array access. Value is undefined, but don't fold. */
6605 if (offset < 0)
6606 return NULL_TREE;
6607
c44c2088
JH
6608 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6609 base);
cfef45c8
RG
6610
6611 case REALPART_EXPR:
6612 case IMAGPART_EXPR:
6613 {
6614 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6615 if (c && TREE_CODE (c) == COMPLEX_CST)
6616 return fold_build1_loc (EXPR_LOCATION (t),
6617 TREE_CODE (t), TREE_TYPE (t), c);
6618 break;
6619 }
6620
6621 default:
6622 break;
6623 }
6624
6625 return NULL_TREE;
6626}
6627
6628tree
6629fold_const_aggregate_ref (tree t)
6630{
6631 return fold_const_aggregate_ref_1 (t, NULL);
6632}
06bc3ec7 6633
85942f45 6634/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6635 at OFFSET.
6636 Set CAN_REFER if non-NULL to false if method
6637 is not referable or if the virtual table is ill-formed (such as rewriten
6638 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6639
6640tree
85942f45
JH
6641gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6642 tree v,
ec77d61f
JH
6643 unsigned HOST_WIDE_INT offset,
6644 bool *can_refer)
81fa35bd 6645{
85942f45
JH
6646 tree vtable = v, init, fn;
6647 unsigned HOST_WIDE_INT size;
8c311b50
JH
6648 unsigned HOST_WIDE_INT elt_size, access_index;
6649 tree domain_type;
81fa35bd 6650
ec77d61f
JH
6651 if (can_refer)
6652 *can_refer = true;
6653
9de2f554 6654 /* First of all double check we have virtual table. */
8813a647 6655 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 6656 {
ec77d61f
JH
6657 /* Pass down that we lost track of the target. */
6658 if (can_refer)
6659 *can_refer = false;
6660 return NULL_TREE;
6661 }
9de2f554 6662
2aa3da06
JH
6663 init = ctor_for_folding (v);
6664
9de2f554 6665 /* The virtual tables should always be born with constructors
2aa3da06
JH
6666 and we always should assume that they are avaialble for
6667 folding. At the moment we do not stream them in all cases,
6668 but it should never happen that ctor seem unreachable. */
6669 gcc_assert (init);
6670 if (init == error_mark_node)
6671 {
ec77d61f
JH
6672 /* Pass down that we lost track of the target. */
6673 if (can_refer)
6674 *can_refer = false;
2aa3da06
JH
6675 return NULL_TREE;
6676 }
81fa35bd 6677 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 6678 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 6679 offset *= BITS_PER_UNIT;
81fa35bd 6680 offset += token * size;
9de2f554 6681
8c311b50
JH
6682 /* Lookup the value in the constructor that is assumed to be array.
6683 This is equivalent to
6684 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6685 offset, size, NULL);
6686 but in a constant time. We expect that frontend produced a simple
6687 array without indexed initializers. */
6688
6689 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6690 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6691 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6692 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6693
6694 access_index = offset / BITS_PER_UNIT / elt_size;
6695 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6696
6697 /* This code makes an assumption that there are no
6698 indexed fileds produced by C++ FE, so we can directly index the array. */
6699 if (access_index < CONSTRUCTOR_NELTS (init))
6700 {
6701 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6702 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6703 STRIP_NOPS (fn);
6704 }
6705 else
6706 fn = NULL;
9de2f554
JH
6707
6708 /* For type inconsistent program we may end up looking up virtual method
6709 in virtual table that does not contain TOKEN entries. We may overrun
6710 the virtual table and pick up a constant or RTTI info pointer.
6711 In any case the call is undefined. */
6712 if (!fn
6713 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6714 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6715 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6716 else
6717 {
6718 fn = TREE_OPERAND (fn, 0);
6719
6720 /* When cgraph node is missing and function is not public, we cannot
6721 devirtualize. This can happen in WHOPR when the actual method
6722 ends up in other partition, because we found devirtualization
6723 possibility too late. */
6724 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
6725 {
6726 if (can_refer)
6727 {
6728 *can_refer = false;
6729 return fn;
6730 }
6731 return NULL_TREE;
6732 }
9de2f554 6733 }
81fa35bd 6734
7501ca28
RG
6735 /* Make sure we create a cgraph node for functions we'll reference.
6736 They can be non-existent if the reference comes from an entry
6737 of an external vtable for example. */
d52f5295 6738 cgraph_node::get_create (fn);
7501ca28 6739
81fa35bd
MJ
6740 return fn;
6741}
6742
85942f45
JH
6743/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6744 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6745 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
6746 OBJ_TYPE_REF_OBJECT(REF).
6747 Set CAN_REFER if non-NULL to false if method
6748 is not referable or if the virtual table is ill-formed (such as rewriten
6749 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
6750
6751tree
ec77d61f
JH
6752gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6753 bool *can_refer)
85942f45
JH
6754{
6755 unsigned HOST_WIDE_INT offset;
6756 tree v;
6757
6758 v = BINFO_VTABLE (known_binfo);
6759 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6760 if (!v)
6761 return NULL_TREE;
6762
6763 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
6764 {
6765 if (can_refer)
6766 *can_refer = false;
6767 return NULL_TREE;
6768 }
6769 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
6770}
6771
737f500a
RB
6772/* Given a pointer value T, return a simplified version of an
6773 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
6774 possible. Note that the resulting type may be different from
6775 the type pointed to in the sense that it is still compatible
6776 from the langhooks point of view. */
6777
6778tree
6779gimple_fold_indirect_ref (tree t)
6780{
6781 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6782 tree sub = t;
6783 tree subtype;
6784
6785 STRIP_NOPS (sub);
6786 subtype = TREE_TYPE (sub);
737f500a
RB
6787 if (!POINTER_TYPE_P (subtype)
6788 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
6789 return NULL_TREE;
6790
6791 if (TREE_CODE (sub) == ADDR_EXPR)
6792 {
6793 tree op = TREE_OPERAND (sub, 0);
6794 tree optype = TREE_TYPE (op);
6795 /* *&p => p */
6796 if (useless_type_conversion_p (type, optype))
6797 return op;
6798
6799 /* *(foo *)&fooarray => fooarray[0] */
6800 if (TREE_CODE (optype) == ARRAY_TYPE
6801 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6802 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6803 {
6804 tree type_domain = TYPE_DOMAIN (optype);
6805 tree min_val = size_zero_node;
6806 if (type_domain && TYPE_MIN_VALUE (type_domain))
6807 min_val = TYPE_MIN_VALUE (type_domain);
6808 if (TREE_CODE (min_val) == INTEGER_CST)
6809 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6810 }
6811 /* *(foo *)&complexfoo => __real__ complexfoo */
6812 else if (TREE_CODE (optype) == COMPLEX_TYPE
6813 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6814 return fold_build1 (REALPART_EXPR, type, op);
6815 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6816 else if (TREE_CODE (optype) == VECTOR_TYPE
6817 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6818 {
6819 tree part_width = TYPE_SIZE (type);
6820 tree index = bitsize_int (0);
6821 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6822 }
6823 }
6824
6825 /* *(p + CST) -> ... */
6826 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6827 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6828 {
6829 tree addr = TREE_OPERAND (sub, 0);
6830 tree off = TREE_OPERAND (sub, 1);
6831 tree addrtype;
6832
6833 STRIP_NOPS (addr);
6834 addrtype = TREE_TYPE (addr);
6835
6836 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6837 if (TREE_CODE (addr) == ADDR_EXPR
6838 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6839 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 6840 && tree_fits_uhwi_p (off))
b184c8f1 6841 {
ae7e9ddd 6842 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
6843 tree part_width = TYPE_SIZE (type);
6844 unsigned HOST_WIDE_INT part_widthi
9439e9a1 6845 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
6846 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6847 tree index = bitsize_int (indexi);
6848 if (offset / part_widthi
e934916c 6849 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
6850 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6851 part_width, index);
6852 }
6853
6854 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6855 if (TREE_CODE (addr) == ADDR_EXPR
6856 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6857 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6858 {
6859 tree size = TYPE_SIZE_UNIT (type);
6860 if (tree_int_cst_equal (size, off))
6861 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6862 }
6863
6864 /* *(p + CST) -> MEM_REF <p, CST>. */
6865 if (TREE_CODE (addr) != ADDR_EXPR
6866 || DECL_P (TREE_OPERAND (addr, 0)))
6867 return fold_build2 (MEM_REF, type,
6868 addr,
8e6cdc90 6869 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
6870 }
6871
6872 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6873 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6874 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6875 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6876 {
6877 tree type_domain;
6878 tree min_val = size_zero_node;
6879 tree osub = sub;
6880 sub = gimple_fold_indirect_ref (sub);
6881 if (! sub)
6882 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6883 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6884 if (type_domain && TYPE_MIN_VALUE (type_domain))
6885 min_val = TYPE_MIN_VALUE (type_domain);
6886 if (TREE_CODE (min_val) == INTEGER_CST)
6887 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6888 }
6889
6890 return NULL_TREE;
6891}
19e51b40
JJ
6892
6893/* Return true if CODE is an operation that when operating on signed
6894 integer types involves undefined behavior on overflow and the
6895 operation can be expressed with unsigned arithmetic. */
6896
6897bool
6898arith_code_with_undefined_signed_overflow (tree_code code)
6899{
6900 switch (code)
6901 {
6902 case PLUS_EXPR:
6903 case MINUS_EXPR:
6904 case MULT_EXPR:
6905 case NEGATE_EXPR:
6906 case POINTER_PLUS_EXPR:
6907 return true;
6908 default:
6909 return false;
6910 }
6911}
6912
6913/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6914 operation that can be transformed to unsigned arithmetic by converting
6915 its operand, carrying out the operation in the corresponding unsigned
6916 type and converting the result back to the original type.
6917
6918 Returns a sequence of statements that replace STMT and also contain
6919 a modified form of STMT itself. */
6920
6921gimple_seq
355fe088 6922rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
6923{
6924 if (dump_file && (dump_flags & TDF_DETAILS))
6925 {
6926 fprintf (dump_file, "rewriting stmt with undefined signed "
6927 "overflow ");
6928 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6929 }
6930
6931 tree lhs = gimple_assign_lhs (stmt);
6932 tree type = unsigned_type_for (TREE_TYPE (lhs));
6933 gimple_seq stmts = NULL;
6934 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6935 {
74e3c262
RB
6936 tree op = gimple_op (stmt, i);
6937 op = gimple_convert (&stmts, type, op);
6938 gimple_set_op (stmt, i, op);
19e51b40
JJ
6939 }
6940 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6941 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6942 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6943 gimple_seq_add_stmt (&stmts, stmt);
355fe088 6944 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6945 gimple_seq_add_stmt (&stmts, cvt);
6946
6947 return stmts;
6948}
d4f5cd5e 6949
3d2cf79f 6950
c26de36d
RB
6951/* The valueization hook we use for the gimple_build API simplification.
6952 This makes us match fold_buildN behavior by only combining with
6953 statements in the sequence(s) we are currently building. */
6954
6955static tree
6956gimple_build_valueize (tree op)
6957{
6958 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6959 return op;
6960 return NULL_TREE;
6961}
6962
3d2cf79f 6963/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 6964 simplifying it first if possible. Returns the built
3d2cf79f
RB
6965 expression value and appends statements possibly defining it
6966 to SEQ. */
6967
6968tree
6969gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6970 enum tree_code code, tree type, tree op0)
3d2cf79f 6971{
c26de36d 6972 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
6973 if (!res)
6974 {
a15ebbcd 6975 res = create_tmp_reg_or_ssa_name (type);
355fe088 6976 gimple *stmt;
3d2cf79f
RB
6977 if (code == REALPART_EXPR
6978 || code == IMAGPART_EXPR
6979 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6980 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6981 else
0d0e4a03 6982 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6983 gimple_set_location (stmt, loc);
6984 gimple_seq_add_stmt_without_update (seq, stmt);
6985 }
6986 return res;
6987}
6988
6989/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6990 simplifying it first if possible. Returns the built
3d2cf79f
RB
6991 expression value and appends statements possibly defining it
6992 to SEQ. */
6993
6994tree
6995gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6996 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6997{
c26de36d 6998 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6999 if (!res)
7000 {
a15ebbcd 7001 res = create_tmp_reg_or_ssa_name (type);
355fe088 7002 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7003 gimple_set_location (stmt, loc);
7004 gimple_seq_add_stmt_without_update (seq, stmt);
7005 }
7006 return res;
7007}
7008
7009/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7010 simplifying it first if possible. Returns the built
3d2cf79f
RB
7011 expression value and appends statements possibly defining it
7012 to SEQ. */
7013
7014tree
7015gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7016 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7017{
7018 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7019 seq, gimple_build_valueize);
3d2cf79f
RB
7020 if (!res)
7021 {
a15ebbcd 7022 res = create_tmp_reg_or_ssa_name (type);
355fe088 7023 gimple *stmt;
3d2cf79f 7024 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7025 stmt = gimple_build_assign (res, code,
7026 build3 (code, type, op0, op1, op2));
3d2cf79f 7027 else
0d0e4a03 7028 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7029 gimple_set_location (stmt, loc);
7030 gimple_seq_add_stmt_without_update (seq, stmt);
7031 }
7032 return res;
7033}
7034
7035/* Build the call FN (ARG0) with a result of type TYPE
7036 (or no result if TYPE is void) with location LOC,
c26de36d 7037 simplifying it first if possible. Returns the built
3d2cf79f
RB
7038 expression value (or NULL_TREE if TYPE is void) and appends
7039 statements possibly defining it to SEQ. */
7040
7041tree
7042gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7043 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 7044{
c26de36d 7045 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7046 if (!res)
7047 {
7048 tree decl = builtin_decl_implicit (fn);
355fe088 7049 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
7050 if (!VOID_TYPE_P (type))
7051 {
a15ebbcd 7052 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7053 gimple_call_set_lhs (stmt, res);
7054 }
7055 gimple_set_location (stmt, loc);
7056 gimple_seq_add_stmt_without_update (seq, stmt);
7057 }
7058 return res;
7059}
7060
7061/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7062 (or no result if TYPE is void) with location LOC,
c26de36d 7063 simplifying it first if possible. Returns the built
3d2cf79f
RB
7064 expression value (or NULL_TREE if TYPE is void) and appends
7065 statements possibly defining it to SEQ. */
7066
7067tree
7068gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7069 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 7070{
c26de36d 7071 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7072 if (!res)
7073 {
7074 tree decl = builtin_decl_implicit (fn);
355fe088 7075 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
7076 if (!VOID_TYPE_P (type))
7077 {
a15ebbcd 7078 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7079 gimple_call_set_lhs (stmt, res);
7080 }
7081 gimple_set_location (stmt, loc);
7082 gimple_seq_add_stmt_without_update (seq, stmt);
7083 }
7084 return res;
7085}
7086
7087/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7088 (or no result if TYPE is void) with location LOC,
c26de36d 7089 simplifying it first if possible. Returns the built
3d2cf79f
RB
7090 expression value (or NULL_TREE if TYPE is void) and appends
7091 statements possibly defining it to SEQ. */
7092
7093tree
7094gimple_build (gimple_seq *seq, location_t loc,
7095 enum built_in_function fn, tree type,
c26de36d 7096 tree arg0, tree arg1, tree arg2)
3d2cf79f 7097{
c26de36d
RB
7098 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7099 seq, gimple_build_valueize);
3d2cf79f
RB
7100 if (!res)
7101 {
7102 tree decl = builtin_decl_implicit (fn);
355fe088 7103 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
7104 if (!VOID_TYPE_P (type))
7105 {
a15ebbcd 7106 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7107 gimple_call_set_lhs (stmt, res);
7108 }
7109 gimple_set_location (stmt, loc);
7110 gimple_seq_add_stmt_without_update (seq, stmt);
7111 }
7112 return res;
7113}
7114
7115/* Build the conversion (TYPE) OP with a result of type TYPE
7116 with location LOC if such conversion is neccesary in GIMPLE,
7117 simplifying it first.
7118 Returns the built expression value and appends
7119 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7120
7121tree
7122gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7123{
7124 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7125 return op;
3d2cf79f 7126 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7127}
68e57f04 7128
74e3c262
RB
7129/* Build the conversion (ptrofftype) OP with a result of a type
7130 compatible with ptrofftype with location LOC if such conversion
7131 is neccesary in GIMPLE, simplifying it first.
7132 Returns the built expression value and appends
7133 statements possibly defining it to SEQ. */
7134
7135tree
7136gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7137{
7138 if (ptrofftype_p (TREE_TYPE (op)))
7139 return op;
7140 return gimple_convert (seq, loc, sizetype, op);
7141}
7142
e7c45b66
RS
7143/* Build a vector of type TYPE in which each element has the value OP.
7144 Return a gimple value for the result, appending any new statements
7145 to SEQ. */
7146
7147tree
7148gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7149 tree op)
7150{
7151 tree res, vec = build_vector_from_val (type, op);
7152 if (is_gimple_val (vec))
7153 return vec;
7154 if (gimple_in_ssa_p (cfun))
7155 res = make_ssa_name (type);
7156 else
7157 res = create_tmp_reg (type);
7158 gimple *stmt = gimple_build_assign (res, vec);
7159 gimple_set_location (stmt, loc);
7160 gimple_seq_add_stmt_without_update (seq, stmt);
7161 return res;
7162}
7163
7164/* Build a vector of type TYPE in which the elements have the values
7165 given by ELTS. Return a gimple value for the result, appending any
7166 new instructions to SEQ. */
7167
7168tree
7169gimple_build_vector (gimple_seq *seq, location_t loc, tree type,
7170 vec<tree> elts)
7171{
7172 unsigned int nelts = elts.length ();
7173 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
7174 for (unsigned int i = 0; i < nelts; ++i)
7175 if (!TREE_CONSTANT (elts[i]))
7176 {
7177 vec<constructor_elt, va_gc> *v;
7178 vec_alloc (v, nelts);
7179 for (i = 0; i < nelts; ++i)
7180 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]);
7181
7182 tree res;
7183 if (gimple_in_ssa_p (cfun))
7184 res = make_ssa_name (type);
7185 else
7186 res = create_tmp_reg (type);
7187 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7188 gimple_set_location (stmt, loc);
7189 gimple_seq_add_stmt_without_update (seq, stmt);
7190 return res;
7191 }
7192 return build_vector (type, elts);
7193}
7194
68e57f04
RS
7195/* Return true if the result of assignment STMT is known to be non-negative.
7196 If the return value is based on the assumption that signed overflow is
7197 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7198 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7199
7200static bool
7201gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7202 int depth)
7203{
7204 enum tree_code code = gimple_assign_rhs_code (stmt);
7205 switch (get_gimple_rhs_class (code))
7206 {
7207 case GIMPLE_UNARY_RHS:
7208 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7209 gimple_expr_type (stmt),
7210 gimple_assign_rhs1 (stmt),
7211 strict_overflow_p, depth);
7212 case GIMPLE_BINARY_RHS:
7213 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7214 gimple_expr_type (stmt),
7215 gimple_assign_rhs1 (stmt),
7216 gimple_assign_rhs2 (stmt),
7217 strict_overflow_p, depth);
7218 case GIMPLE_TERNARY_RHS:
7219 return false;
7220 case GIMPLE_SINGLE_RHS:
7221 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7222 strict_overflow_p, depth);
7223 case GIMPLE_INVALID_RHS:
7224 break;
7225 }
7226 gcc_unreachable ();
7227}
7228
7229/* Return true if return value of call STMT is known to be non-negative.
7230 If the return value is based on the assumption that signed overflow is
7231 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7232 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7233
7234static bool
7235gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7236 int depth)
7237{
7238 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7239 gimple_call_arg (stmt, 0) : NULL_TREE;
7240 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7241 gimple_call_arg (stmt, 1) : NULL_TREE;
7242
7243 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7244 gimple_call_combined_fn (stmt),
68e57f04
RS
7245 arg0,
7246 arg1,
7247 strict_overflow_p, depth);
7248}
7249
4534c203
RB
7250/* Return true if return value of call STMT is known to be non-negative.
7251 If the return value is based on the assumption that signed overflow is
7252 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7253 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7254
7255static bool
7256gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7257 int depth)
7258{
7259 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7260 {
7261 tree arg = gimple_phi_arg_def (stmt, i);
7262 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7263 return false;
7264 }
7265 return true;
7266}
7267
68e57f04
RS
7268/* Return true if STMT is known to compute a non-negative value.
7269 If the return value is based on the assumption that signed overflow is
7270 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7271 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7272
7273bool
7274gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7275 int depth)
7276{
7277 switch (gimple_code (stmt))
7278 {
7279 case GIMPLE_ASSIGN:
7280 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7281 depth);
7282 case GIMPLE_CALL:
7283 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7284 depth);
4534c203
RB
7285 case GIMPLE_PHI:
7286 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7287 depth);
68e57f04
RS
7288 default:
7289 return false;
7290 }
7291}
67dbe582
RS
7292
7293/* Return true if the floating-point value computed by assignment STMT
7294 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7295 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7296
7297 DEPTH is the current nesting depth of the query. */
7298
7299static bool
7300gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7301{
7302 enum tree_code code = gimple_assign_rhs_code (stmt);
7303 switch (get_gimple_rhs_class (code))
7304 {
7305 case GIMPLE_UNARY_RHS:
7306 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7307 gimple_assign_rhs1 (stmt), depth);
7308 case GIMPLE_BINARY_RHS:
7309 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7310 gimple_assign_rhs1 (stmt),
7311 gimple_assign_rhs2 (stmt), depth);
7312 case GIMPLE_TERNARY_RHS:
7313 return false;
7314 case GIMPLE_SINGLE_RHS:
7315 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7316 case GIMPLE_INVALID_RHS:
7317 break;
7318 }
7319 gcc_unreachable ();
7320}
7321
7322/* Return true if the floating-point value computed by call STMT is known
7323 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7324 considered integer values. Return false for signaling NaN.
67dbe582
RS
7325
7326 DEPTH is the current nesting depth of the query. */
7327
7328static bool
7329gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7330{
7331 tree arg0 = (gimple_call_num_args (stmt) > 0
7332 ? gimple_call_arg (stmt, 0)
7333 : NULL_TREE);
7334 tree arg1 = (gimple_call_num_args (stmt) > 1
7335 ? gimple_call_arg (stmt, 1)
7336 : NULL_TREE);
1d9da71f 7337 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7338 arg0, arg1, depth);
7339}
7340
7341/* Return true if the floating-point result of phi STMT is known to have
7342 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7343 integer values. Return false for signaling NaN.
67dbe582
RS
7344
7345 DEPTH is the current nesting depth of the query. */
7346
7347static bool
7348gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7349{
7350 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7351 {
7352 tree arg = gimple_phi_arg_def (stmt, i);
7353 if (!integer_valued_real_single_p (arg, depth + 1))
7354 return false;
7355 }
7356 return true;
7357}
7358
7359/* Return true if the floating-point value computed by STMT is known
7360 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7361 considered integer values. Return false for signaling NaN.
67dbe582
RS
7362
7363 DEPTH is the current nesting depth of the query. */
7364
7365bool
7366gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7367{
7368 switch (gimple_code (stmt))
7369 {
7370 case GIMPLE_ASSIGN:
7371 return gimple_assign_integer_valued_real_p (stmt, depth);
7372 case GIMPLE_CALL:
7373 return gimple_call_integer_valued_real_p (stmt, depth);
7374 case GIMPLE_PHI:
7375 return gimple_phi_integer_valued_real_p (stmt, depth);
7376 default:
7377 return false;
7378 }
7379}