]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
GCOV: create one intermediate file per a gcno file (PR gcov-profile/82702).
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
cbe34bb5 2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
cbdd87d4 44#include "tree-ssa-propagate.h"
450ad0cd 45#include "ipa-utils.h"
4484a35a 46#include "tree-ssa-address.h"
862d0b35 47#include "langhooks.h"
19e51b40 48#include "gimplify-me.h"
2b5f0895 49#include "dbgcnt.h"
9b2b7279 50#include "builtins.h"
e0ee10ed
RB
51#include "tree-eh.h"
52#include "gimple-match.h"
48126138 53#include "gomp-constants.h"
f869c12f 54#include "optabs-query.h"
629b3d75 55#include "omp-general.h"
3de2a40e 56#include "ipa-chkp.h"
abd3a68c 57#include "tree-cfg.h"
a918bfbf 58#include "fold-const-call.h"
314e6352
ML
59#include "stringpool.h"
60#include "attribs.h"
45b2222a 61#include "asan.h"
cbdd87d4 62
b3b9f3d0 63/* Return true when DECL can be referenced from current unit.
c44c2088
JH
64 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
65 We can get declarations that are not possible to reference for various
66 reasons:
1389294c 67
1389294c
JH
68 1) When analyzing C++ virtual tables.
69 C++ virtual tables do have known constructors even
70 when they are keyed to other compilation unit.
71 Those tables can contain pointers to methods and vars
72 in other units. Those methods have both STATIC and EXTERNAL
73 set.
74 2) In WHOPR mode devirtualization might lead to reference
75 to method that was partitioned elsehwere.
76 In this case we have static VAR_DECL or FUNCTION_DECL
77 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
78 declaring the body.
79 3) COMDAT functions referred by external vtables that
3e89949e 80 we devirtualize only during final compilation stage.
b3b9f3d0
JH
81 At this time we already decided that we will not output
82 the function body and thus we can't reference the symbol
83 directly. */
84
1389294c 85static bool
c44c2088 86can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 87{
2c8326a5 88 varpool_node *vnode;
1389294c 89 struct cgraph_node *node;
5e20cdc9 90 symtab_node *snode;
c44c2088 91
00de328a 92 if (DECL_ABSTRACT_P (decl))
1632a686
JH
93 return false;
94
95 /* We are concerned only about static/external vars and functions. */
96 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 97 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
98 return true;
99
100 /* Static objects can be referred only if they was not optimized out yet. */
101 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
102 {
3aaf0529
JH
103 /* Before we start optimizing unreachable code we can be sure all
104 static objects are defined. */
3dafb85c 105 if (symtab->function_flags_ready)
3aaf0529 106 return true;
d52f5295 107 snode = symtab_node::get (decl);
3aaf0529 108 if (!snode || !snode->definition)
1632a686 109 return false;
7de90a6c 110 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
111 return !node || !node->global.inlined_to;
112 }
113
6da8be89 114 /* We will later output the initializer, so we can refer to it.
c44c2088 115 So we are concerned only when DECL comes from initializer of
3aaf0529 116 external var or var that has been optimized out. */
c44c2088 117 if (!from_decl
8813a647 118 || !VAR_P (from_decl)
3aaf0529 119 || (!DECL_EXTERNAL (from_decl)
9041d2e6 120 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 121 && vnode->definition)
6da8be89 122 || (flag_ltrans
9041d2e6 123 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 124 && vnode->in_other_partition))
c44c2088 125 return true;
c44c2088
JH
126 /* We are folding reference from external vtable. The vtable may reffer
127 to a symbol keyed to other compilation unit. The other compilation
128 unit may be in separate DSO and the symbol may be hidden. */
129 if (DECL_VISIBILITY_SPECIFIED (decl)
130 && DECL_EXTERNAL (decl)
a33a931b 131 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 132 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 133 return false;
b3b9f3d0
JH
134 /* When function is public, we always can introduce new reference.
135 Exception are the COMDAT functions where introducing a direct
136 reference imply need to include function body in the curren tunit. */
137 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
138 return true;
3aaf0529
JH
139 /* We have COMDAT. We are going to check if we still have definition
140 or if the definition is going to be output in other partition.
141 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
142
143 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 144 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
145 output elsewhere when corresponding vtable is output.
146 This is however not possible - ABI specify that COMDATs are output in
147 units where they are used and when the other unit was compiled with LTO
148 it is possible that vtable was kept public while the function itself
149 was privatized. */
3dafb85c 150 if (!symtab->function_flags_ready)
b3b9f3d0 151 return true;
c44c2088 152
d52f5295 153 snode = symtab_node::get (decl);
3aaf0529
JH
154 if (!snode
155 || ((!snode->definition || DECL_EXTERNAL (decl))
156 && (!snode->in_other_partition
157 || (!snode->forced_by_abi && !snode->force_output))))
158 return false;
159 node = dyn_cast <cgraph_node *> (snode);
160 return !node || !node->global.inlined_to;
1389294c
JH
161}
162
a15ebbcd
ML
163/* Create a temporary for TYPE for a statement STMT. If the current function
164 is in SSA form, a SSA name is created. Otherwise a temporary register
165 is made. */
166
edc19e03
WS
167tree
168create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
169{
170 if (gimple_in_ssa_p (cfun))
171 return make_ssa_name (type, stmt);
172 else
173 return create_tmp_reg (type);
174}
175
0038d4e0 176/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
177 acceptable form for is_gimple_min_invariant.
178 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
179
180tree
c44c2088 181canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 182{
50619002
EB
183 tree orig_cval = cval;
184 STRIP_NOPS (cval);
315f5f1b
RG
185 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
186 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 187 {
315f5f1b
RG
188 tree ptr = TREE_OPERAND (cval, 0);
189 if (is_gimple_min_invariant (ptr))
190 cval = build1_loc (EXPR_LOCATION (cval),
191 ADDR_EXPR, TREE_TYPE (ptr),
192 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
193 ptr,
194 fold_convert (ptr_type_node,
195 TREE_OPERAND (cval, 1))));
17f39a39
JH
196 }
197 if (TREE_CODE (cval) == ADDR_EXPR)
198 {
5a27a197
RG
199 tree base = NULL_TREE;
200 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
201 {
202 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
203 if (base)
204 TREE_OPERAND (cval, 0) = base;
205 }
5a27a197
RG
206 else
207 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
208 if (!base)
209 return NULL_TREE;
b3b9f3d0 210
8813a647 211 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 212 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 213 return NULL_TREE;
13f92e8d
JJ
214 if (TREE_TYPE (base) == error_mark_node)
215 return NULL_TREE;
8813a647 216 if (VAR_P (base))
46eb666a 217 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
218 else if (TREE_CODE (base) == FUNCTION_DECL)
219 {
220 /* Make sure we create a cgraph node for functions we'll reference.
221 They can be non-existent if the reference comes from an entry
222 of an external vtable for example. */
d52f5295 223 cgraph_node::get_create (base);
7501ca28 224 }
0038d4e0 225 /* Fixup types in global initializers. */
73aef89e
RG
226 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
227 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
228
229 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
230 cval = fold_convert (TREE_TYPE (orig_cval), cval);
231 return cval;
17f39a39 232 }
846abd0d
RB
233 if (TREE_OVERFLOW_P (cval))
234 return drop_tree_overflow (cval);
50619002 235 return orig_cval;
17f39a39 236}
cbdd87d4
RG
237
238/* If SYM is a constant variable with known value, return the value.
239 NULL_TREE is returned otherwise. */
240
241tree
242get_symbol_constant_value (tree sym)
243{
6a6dac52
JH
244 tree val = ctor_for_folding (sym);
245 if (val != error_mark_node)
cbdd87d4 246 {
cbdd87d4
RG
247 if (val)
248 {
9d60be38 249 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 250 if (val && is_gimple_min_invariant (val))
17f39a39 251 return val;
1389294c
JH
252 else
253 return NULL_TREE;
cbdd87d4
RG
254 }
255 /* Variables declared 'const' without an initializer
256 have zero as the initializer if they may not be
257 overridden at link or run time. */
258 if (!val
b8a8c472 259 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 260 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
261 }
262
263 return NULL_TREE;
264}
265
266
cbdd87d4
RG
267
268/* Subroutine of fold_stmt. We perform several simplifications of the
269 memory reference tree EXPR and make sure to re-gimplify them properly
270 after propagation of constant addresses. IS_LHS is true if the
271 reference is supposed to be an lvalue. */
272
273static tree
274maybe_fold_reference (tree expr, bool is_lhs)
275{
17f39a39 276 tree result;
cbdd87d4 277
f0eddb90
RG
278 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
279 || TREE_CODE (expr) == REALPART_EXPR
280 || TREE_CODE (expr) == IMAGPART_EXPR)
281 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
282 return fold_unary_loc (EXPR_LOCATION (expr),
283 TREE_CODE (expr),
284 TREE_TYPE (expr),
285 TREE_OPERAND (expr, 0));
286 else if (TREE_CODE (expr) == BIT_FIELD_REF
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_ternary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0),
292 TREE_OPERAND (expr, 1),
293 TREE_OPERAND (expr, 2));
294
f0eddb90
RG
295 if (!is_lhs
296 && (result = fold_const_aggregate_ref (expr))
297 && is_gimple_min_invariant (result))
298 return result;
cbdd87d4 299
cbdd87d4
RG
300 return NULL_TREE;
301}
302
303
304/* Attempt to fold an assignment statement pointed-to by SI. Returns a
305 replacement rhs for the statement or NULL_TREE if no simplification
306 could be made. It is assumed that the operands have been previously
307 folded. */
308
309static tree
310fold_gimple_assign (gimple_stmt_iterator *si)
311{
355fe088 312 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
313 enum tree_code subcode = gimple_assign_rhs_code (stmt);
314 location_t loc = gimple_location (stmt);
315
316 tree result = NULL_TREE;
317
318 switch (get_gimple_rhs_class (subcode))
319 {
320 case GIMPLE_SINGLE_RHS:
321 {
322 tree rhs = gimple_assign_rhs1 (stmt);
323
8c00ba08
JW
324 if (TREE_CLOBBER_P (rhs))
325 return NULL_TREE;
326
4e71066d 327 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
328 return maybe_fold_reference (rhs, false);
329
bdf37f7a
JH
330 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
331 {
332 tree val = OBJ_TYPE_REF_EXPR (rhs);
333 if (is_gimple_min_invariant (val))
334 return val;
f8a39967 335 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
336 {
337 bool final;
338 vec <cgraph_node *>targets
f8a39967 339 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 340 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 341 {
2b5f0895
XDL
342 if (dump_enabled_p ())
343 {
807b7d62 344 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
345 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
346 "resolving virtual function address "
347 "reference to function %s\n",
348 targets.length () == 1
349 ? targets[0]->name ()
3ef276e4 350 : "NULL");
2b5f0895 351 }
3ef276e4
RB
352 if (targets.length () == 1)
353 {
354 val = fold_convert (TREE_TYPE (val),
355 build_fold_addr_expr_loc
356 (loc, targets[0]->decl));
357 STRIP_USELESS_TYPE_CONVERSION (val);
358 }
359 else
360 /* We can not use __builtin_unreachable here because it
361 can not have address taken. */
362 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
363 return val;
364 }
365 }
bdf37f7a 366 }
7524f419 367
cbdd87d4
RG
368 else if (TREE_CODE (rhs) == ADDR_EXPR)
369 {
70f34814
RG
370 tree ref = TREE_OPERAND (rhs, 0);
371 tree tem = maybe_fold_reference (ref, true);
372 if (tem
373 && TREE_CODE (tem) == MEM_REF
374 && integer_zerop (TREE_OPERAND (tem, 1)))
375 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
376 else if (tem)
cbdd87d4
RG
377 result = fold_convert (TREE_TYPE (rhs),
378 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
379 else if (TREE_CODE (ref) == MEM_REF
380 && integer_zerop (TREE_OPERAND (ref, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
382
383 if (result)
384 {
385 /* Strip away useless type conversions. Both the
386 NON_LVALUE_EXPR that may have been added by fold, and
387 "useless" type conversions that might now be apparent
388 due to propagation. */
389 STRIP_USELESS_TYPE_CONVERSION (result);
390
391 if (result != rhs && valid_gimple_rhs_p (result))
392 return result;
393 }
cbdd87d4
RG
394 }
395
396 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 397 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
398 {
399 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
400 unsigned i;
401 tree val;
402
403 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 404 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
405 return NULL_TREE;
406
407 return build_vector_from_ctor (TREE_TYPE (rhs),
408 CONSTRUCTOR_ELTS (rhs));
409 }
410
411 else if (DECL_P (rhs))
9d60be38 412 return get_symbol_constant_value (rhs);
cbdd87d4
RG
413 }
414 break;
415
416 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
417 break;
418
419 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
420 break;
421
0354c0c7 422 case GIMPLE_TERNARY_RHS:
5c099d40
RB
423 result = fold_ternary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt),
427 gimple_assign_rhs3 (stmt));
0354c0c7
BS
428
429 if (result)
430 {
431 STRIP_USELESS_TYPE_CONVERSION (result);
432 if (valid_gimple_rhs_p (result))
433 return result;
0354c0c7
BS
434 }
435 break;
436
cbdd87d4
RG
437 case GIMPLE_INVALID_RHS:
438 gcc_unreachable ();
439 }
440
441 return NULL_TREE;
442}
443
fef5a0d9
RB
444
445/* Replace a statement at *SI_P with a sequence of statements in STMTS,
446 adjusting the replacement stmts location and virtual operands.
447 If the statement has a lhs the last stmt in the sequence is expected
448 to assign to that lhs. */
449
450static void
451gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
452{
355fe088 453 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
454
455 if (gimple_has_location (stmt))
456 annotate_all_with_location (stmts, gimple_location (stmt));
457
458 /* First iterate over the replacement statements backward, assigning
459 virtual operands to their defining statements. */
355fe088 460 gimple *laststore = NULL;
fef5a0d9
RB
461 for (gimple_stmt_iterator i = gsi_last (stmts);
462 !gsi_end_p (i); gsi_prev (&i))
463 {
355fe088 464 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
465 if ((gimple_assign_single_p (new_stmt)
466 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
467 || (is_gimple_call (new_stmt)
468 && (gimple_call_flags (new_stmt)
469 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
470 {
471 tree vdef;
472 if (!laststore)
473 vdef = gimple_vdef (stmt);
474 else
475 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
476 gimple_set_vdef (new_stmt, vdef);
477 if (vdef && TREE_CODE (vdef) == SSA_NAME)
478 SSA_NAME_DEF_STMT (vdef) = new_stmt;
479 laststore = new_stmt;
480 }
481 }
482
483 /* Second iterate over the statements forward, assigning virtual
484 operands to their uses. */
485 tree reaching_vuse = gimple_vuse (stmt);
486 for (gimple_stmt_iterator i = gsi_start (stmts);
487 !gsi_end_p (i); gsi_next (&i))
488 {
355fe088 489 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
490 /* If the new statement possibly has a VUSE, update it with exact SSA
491 name we know will reach this one. */
492 if (gimple_has_mem_ops (new_stmt))
493 gimple_set_vuse (new_stmt, reaching_vuse);
494 gimple_set_modified (new_stmt, true);
495 if (gimple_vdef (new_stmt))
496 reaching_vuse = gimple_vdef (new_stmt);
497 }
498
499 /* If the new sequence does not do a store release the virtual
500 definition of the original statement. */
501 if (reaching_vuse
502 && reaching_vuse == gimple_vuse (stmt))
503 {
504 tree vdef = gimple_vdef (stmt);
505 if (vdef
506 && TREE_CODE (vdef) == SSA_NAME)
507 {
508 unlink_stmt_vdef (stmt);
509 release_ssa_name (vdef);
510 }
511 }
512
513 /* Finally replace the original statement with the sequence. */
514 gsi_replace_with_seq (si_p, stmts, false);
515}
516
cbdd87d4
RG
517/* Convert EXPR into a GIMPLE value suitable for substitution on the
518 RHS of an assignment. Insert the necessary statements before
519 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
520 is replaced. If the call is expected to produces a result, then it
521 is replaced by an assignment of the new RHS to the result variable.
522 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
523 GIMPLE_NOP. A proper VDEF chain is retained by making the first
524 VUSE and the last VDEF of the whole sequence be the same as the replaced
525 statement and using new SSA names for stores in between. */
cbdd87d4
RG
526
527void
528gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
529{
530 tree lhs;
355fe088 531 gimple *stmt, *new_stmt;
cbdd87d4 532 gimple_stmt_iterator i;
355a7673 533 gimple_seq stmts = NULL;
cbdd87d4
RG
534
535 stmt = gsi_stmt (*si_p);
536
537 gcc_assert (is_gimple_call (stmt));
538
45852dcc 539 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 540
e256dfce 541 lhs = gimple_call_lhs (stmt);
cbdd87d4 542 if (lhs == NULL_TREE)
6e572326
RG
543 {
544 gimplify_and_add (expr, &stmts);
545 /* We can end up with folding a memcpy of an empty class assignment
546 which gets optimized away by C++ gimplification. */
547 if (gimple_seq_empty_p (stmts))
548 {
9fdc58de 549 pop_gimplify_context (NULL);
6e572326
RG
550 if (gimple_in_ssa_p (cfun))
551 {
552 unlink_stmt_vdef (stmt);
553 release_defs (stmt);
554 }
f6b4dc28 555 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
556 return;
557 }
558 }
cbdd87d4 559 else
e256dfce 560 {
381cdae4 561 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
562 new_stmt = gimple_build_assign (lhs, tmp);
563 i = gsi_last (stmts);
564 gsi_insert_after_without_update (&i, new_stmt,
565 GSI_CONTINUE_LINKING);
566 }
cbdd87d4
RG
567
568 pop_gimplify_context (NULL);
569
fef5a0d9
RB
570 gsi_replace_with_seq_vops (si_p, stmts);
571}
cbdd87d4 572
fef5a0d9
RB
573
574/* Replace the call at *GSI with the gimple value VAL. */
575
e3174bdf 576void
fef5a0d9
RB
577replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
578{
355fe088 579 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 580 tree lhs = gimple_call_lhs (stmt);
355fe088 581 gimple *repl;
fef5a0d9 582 if (lhs)
e256dfce 583 {
fef5a0d9
RB
584 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
585 val = fold_convert (TREE_TYPE (lhs), val);
586 repl = gimple_build_assign (lhs, val);
587 }
588 else
589 repl = gimple_build_nop ();
590 tree vdef = gimple_vdef (stmt);
591 if (vdef && TREE_CODE (vdef) == SSA_NAME)
592 {
593 unlink_stmt_vdef (stmt);
594 release_ssa_name (vdef);
595 }
f6b4dc28 596 gsi_replace (gsi, repl, false);
fef5a0d9
RB
597}
598
599/* Replace the call at *GSI with the new call REPL and fold that
600 again. */
601
602static void
355fe088 603replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 604{
355fe088 605 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
606 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
607 gimple_set_location (repl, gimple_location (stmt));
608 if (gimple_vdef (stmt)
609 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
610 {
611 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
612 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
613 }
00296d7f
JJ
614 if (gimple_vuse (stmt))
615 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 616 gsi_replace (gsi, repl, false);
fef5a0d9
RB
617 fold_stmt (gsi);
618}
619
620/* Return true if VAR is a VAR_DECL or a component thereof. */
621
622static bool
623var_decl_component_p (tree var)
624{
625 tree inner = var;
626 while (handled_component_p (inner))
627 inner = TREE_OPERAND (inner, 0);
628 return SSA_VAR_P (inner);
629}
630
6512c0f1
MS
631/* If the SIZE argument representing the size of an object is in a range
632 of values of which exactly one is valid (and that is zero), return
633 true, otherwise false. */
634
635static bool
636size_must_be_zero_p (tree size)
637{
638 if (integer_zerop (size))
639 return true;
640
641 if (TREE_CODE (size) != SSA_NAME)
642 return false;
643
644 wide_int min, max;
645 enum value_range_type rtype = get_range_info (size, &min, &max);
646 if (rtype != VR_ANTI_RANGE)
647 return false;
648
649 tree type = TREE_TYPE (size);
650 int prec = TYPE_PRECISION (type);
651
652 wide_int wone = wi::one (prec);
653
654 /* Compute the value of SSIZE_MAX, the largest positive value that
655 can be stored in ssize_t, the signed counterpart of size_t. */
656 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
657
658 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
659}
660
fef5a0d9 661/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 662 false if no simplification can be made.
fef5a0d9
RB
663 If ENDP is 0, return DEST (like memcpy).
664 If ENDP is 1, return DEST+LEN (like mempcpy).
665 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
666 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
667 (memmove). */
668
669static bool
670gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
671 tree dest, tree src, int endp)
672{
355fe088 673 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
674 tree lhs = gimple_call_lhs (stmt);
675 tree len = gimple_call_arg (stmt, 2);
676 tree destvar, srcvar;
677 location_t loc = gimple_location (stmt);
678
6512c0f1
MS
679 /* If the LEN parameter is a constant zero or in range where
680 the only valid value is zero, return DEST. */
681 if (size_must_be_zero_p (len))
fef5a0d9 682 {
355fe088 683 gimple *repl;
fef5a0d9
RB
684 if (gimple_call_lhs (stmt))
685 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
686 else
687 repl = gimple_build_nop ();
688 tree vdef = gimple_vdef (stmt);
689 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 690 {
fef5a0d9
RB
691 unlink_stmt_vdef (stmt);
692 release_ssa_name (vdef);
693 }
f6b4dc28 694 gsi_replace (gsi, repl, false);
fef5a0d9
RB
695 return true;
696 }
697
698 /* If SRC and DEST are the same (and not volatile), return
699 DEST{,+LEN,+LEN-1}. */
700 if (operand_equal_p (src, dest, 0))
701 {
702 unlink_stmt_vdef (stmt);
703 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
704 release_ssa_name (gimple_vdef (stmt));
705 if (!lhs)
706 {
f6b4dc28 707 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
708 return true;
709 }
710 goto done;
711 }
712 else
713 {
714 tree srctype, desttype;
715 unsigned int src_align, dest_align;
716 tree off0;
717
3de2a40e
IE
718 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
719 pointers as wide integer) and also may result in huge function
720 size because of inlined bounds copy. Thus don't inline for
721 functions we want to instrument. */
722 if (flag_check_pointer_bounds
723 && chkp_instrumentable_p (cfun->decl)
724 /* Even if data may contain pointers we can inline if copy
725 less than a pointer size. */
726 && (!tree_fits_uhwi_p (len)
727 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
728 return false;
729
fef5a0d9
RB
730 /* Build accesses at offset zero with a ref-all character type. */
731 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
732 ptr_mode, true), 0);
733
734 /* If we can perform the copy efficiently with first doing all loads
735 and then all stores inline it that way. Currently efficiently
736 means that we can load all the memory into a single integer
737 register which is what MOVE_MAX gives us. */
738 src_align = get_pointer_alignment (src);
739 dest_align = get_pointer_alignment (dest);
740 if (tree_fits_uhwi_p (len)
741 && compare_tree_int (len, MOVE_MAX) <= 0
742 /* ??? Don't transform copies from strings with known length this
743 confuses the tree-ssa-strlen.c. This doesn't handle
744 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
745 reason. */
746 && !c_strlen (src, 2))
747 {
748 unsigned ilen = tree_to_uhwi (len);
146ec50f 749 if (pow2p_hwi (ilen))
fef5a0d9 750 {
64ab8765 751 scalar_int_mode mode;
fef5a0d9
RB
752 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
753 if (type
64ab8765
RS
754 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
755 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
756 /* If the destination pointer is not aligned we must be able
757 to emit an unaligned store. */
64ab8765 758 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 759 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 760 || (optab_handler (movmisalign_optab, mode)
f869c12f 761 != CODE_FOR_nothing)))
fef5a0d9
RB
762 {
763 tree srctype = type;
764 tree desttype = type;
64ab8765 765 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
766 srctype = build_aligned_type (type, src_align);
767 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
768 tree tem = fold_const_aggregate_ref (srcmem);
769 if (tem)
770 srcmem = tem;
64ab8765 771 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 772 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 773 && (optab_handler (movmisalign_optab, mode)
f869c12f 774 == CODE_FOR_nothing))
fef5a0d9
RB
775 srcmem = NULL_TREE;
776 if (srcmem)
777 {
355fe088 778 gimple *new_stmt;
fef5a0d9
RB
779 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
780 {
781 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
782 srcmem
783 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
784 new_stmt);
fef5a0d9
RB
785 gimple_assign_set_lhs (new_stmt, srcmem);
786 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
787 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
788 }
64ab8765 789 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
790 desttype = build_aligned_type (type, dest_align);
791 new_stmt
792 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
793 dest, off0),
794 srcmem);
795 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
796 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
797 if (gimple_vdef (new_stmt)
798 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
799 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
800 if (!lhs)
801 {
f6b4dc28 802 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
803 return true;
804 }
805 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
806 goto done;
807 }
808 }
809 }
810 }
811
812 if (endp == 3)
813 {
814 /* Both DEST and SRC must be pointer types.
815 ??? This is what old code did. Is the testing for pointer types
816 really mandatory?
817
818 If either SRC is readonly or length is 1, we can use memcpy. */
819 if (!dest_align || !src_align)
820 return false;
821 if (readonly_data_expr (src)
822 || (tree_fits_uhwi_p (len)
823 && (MIN (src_align, dest_align) / BITS_PER_UNIT
824 >= tree_to_uhwi (len))))
825 {
826 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
827 if (!fn)
828 return false;
829 gimple_call_set_fndecl (stmt, fn);
830 gimple_call_set_arg (stmt, 0, dest);
831 gimple_call_set_arg (stmt, 1, src);
832 fold_stmt (gsi);
833 return true;
834 }
835
836 /* If *src and *dest can't overlap, optimize into memcpy as well. */
837 if (TREE_CODE (src) == ADDR_EXPR
838 && TREE_CODE (dest) == ADDR_EXPR)
839 {
840 tree src_base, dest_base, fn;
841 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
4fda19ef 842 HOST_WIDE_INT maxsize;
fef5a0d9
RB
843
844 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
845 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
846 if (src_base == NULL)
847 src_base = srcvar;
fef5a0d9 848 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
849 dest_base = get_addr_base_and_unit_offset (destvar,
850 &dest_offset);
851 if (dest_base == NULL)
852 dest_base = destvar;
fef5a0d9
RB
853 if (tree_fits_uhwi_p (len))
854 maxsize = tree_to_uhwi (len);
855 else
856 maxsize = -1;
fef5a0d9
RB
857 if (SSA_VAR_P (src_base)
858 && SSA_VAR_P (dest_base))
859 {
860 if (operand_equal_p (src_base, dest_base, 0)
861 && ranges_overlap_p (src_offset, maxsize,
862 dest_offset, maxsize))
863 return false;
864 }
865 else if (TREE_CODE (src_base) == MEM_REF
866 && TREE_CODE (dest_base) == MEM_REF)
867 {
868 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
869 TREE_OPERAND (dest_base, 0), 0))
870 return false;
871 offset_int off = mem_ref_offset (src_base) + src_offset;
872 if (!wi::fits_shwi_p (off))
873 return false;
874 src_offset = off.to_shwi ();
875
876 off = mem_ref_offset (dest_base) + dest_offset;
877 if (!wi::fits_shwi_p (off))
878 return false;
879 dest_offset = off.to_shwi ();
880 if (ranges_overlap_p (src_offset, maxsize,
881 dest_offset, maxsize))
882 return false;
883 }
884 else
885 return false;
886
887 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
888 if (!fn)
889 return false;
890 gimple_call_set_fndecl (stmt, fn);
891 gimple_call_set_arg (stmt, 0, dest);
892 gimple_call_set_arg (stmt, 1, src);
893 fold_stmt (gsi);
894 return true;
895 }
896
897 /* If the destination and source do not alias optimize into
898 memcpy as well. */
899 if ((is_gimple_min_invariant (dest)
900 || TREE_CODE (dest) == SSA_NAME)
901 && (is_gimple_min_invariant (src)
902 || TREE_CODE (src) == SSA_NAME))
903 {
904 ao_ref destr, srcr;
905 ao_ref_init_from_ptr_and_size (&destr, dest, len);
906 ao_ref_init_from_ptr_and_size (&srcr, src, len);
907 if (!refs_may_alias_p_1 (&destr, &srcr, false))
908 {
909 tree fn;
910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 if (!fn)
912 return false;
913 gimple_call_set_fndecl (stmt, fn);
914 gimple_call_set_arg (stmt, 0, dest);
915 gimple_call_set_arg (stmt, 1, src);
916 fold_stmt (gsi);
917 return true;
918 }
919 }
920
921 return false;
922 }
923
924 if (!tree_fits_shwi_p (len))
925 return false;
926 /* FIXME:
927 This logic lose for arguments like (type *)malloc (sizeof (type)),
928 since we strip the casts of up to VOID return value from malloc.
929 Perhaps we ought to inherit type from non-VOID argument here? */
930 STRIP_NOPS (src);
931 STRIP_NOPS (dest);
932 if (!POINTER_TYPE_P (TREE_TYPE (src))
933 || !POINTER_TYPE_P (TREE_TYPE (dest)))
934 return false;
935 /* In the following try to find a type that is most natural to be
936 used for the memcpy source and destination and that allows
937 the most optimization when memcpy is turned into a plain assignment
938 using that type. In theory we could always use a char[len] type
939 but that only gains us that the destination and source possibly
940 no longer will have their address taken. */
941 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
942 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
943 {
944 tree tem = TREE_OPERAND (src, 0);
945 STRIP_NOPS (tem);
946 if (tem != TREE_OPERAND (src, 0))
947 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
948 }
949 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
950 {
951 tree tem = TREE_OPERAND (dest, 0);
952 STRIP_NOPS (tem);
953 if (tem != TREE_OPERAND (dest, 0))
954 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
955 }
956 srctype = TREE_TYPE (TREE_TYPE (src));
957 if (TREE_CODE (srctype) == ARRAY_TYPE
958 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
959 {
960 srctype = TREE_TYPE (srctype);
961 STRIP_NOPS (src);
962 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
963 }
964 desttype = TREE_TYPE (TREE_TYPE (dest));
965 if (TREE_CODE (desttype) == ARRAY_TYPE
966 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
967 {
968 desttype = TREE_TYPE (desttype);
969 STRIP_NOPS (dest);
970 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
971 }
972 if (TREE_ADDRESSABLE (srctype)
973 || TREE_ADDRESSABLE (desttype))
974 return false;
975
976 /* Make sure we are not copying using a floating-point mode or
977 a type whose size possibly does not match its precision. */
978 if (FLOAT_MODE_P (TYPE_MODE (desttype))
979 || TREE_CODE (desttype) == BOOLEAN_TYPE
980 || TREE_CODE (desttype) == ENUMERAL_TYPE)
981 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
982 if (FLOAT_MODE_P (TYPE_MODE (srctype))
983 || TREE_CODE (srctype) == BOOLEAN_TYPE
984 || TREE_CODE (srctype) == ENUMERAL_TYPE)
985 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
986 if (!srctype)
987 srctype = desttype;
988 if (!desttype)
989 desttype = srctype;
990 if (!srctype)
991 return false;
992
993 src_align = get_pointer_alignment (src);
994 dest_align = get_pointer_alignment (dest);
995 if (dest_align < TYPE_ALIGN (desttype)
996 || src_align < TYPE_ALIGN (srctype))
997 return false;
998
999 destvar = dest;
1000 STRIP_NOPS (destvar);
1001 if (TREE_CODE (destvar) == ADDR_EXPR
1002 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1003 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1004 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1005 else
1006 destvar = NULL_TREE;
1007
1008 srcvar = src;
1009 STRIP_NOPS (srcvar);
1010 if (TREE_CODE (srcvar) == ADDR_EXPR
1011 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1012 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1013 {
1014 if (!destvar
1015 || src_align >= TYPE_ALIGN (desttype))
1016 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1017 srcvar, off0);
1018 else if (!STRICT_ALIGNMENT)
1019 {
1020 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1021 src_align);
1022 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1023 }
e256dfce 1024 else
fef5a0d9
RB
1025 srcvar = NULL_TREE;
1026 }
1027 else
1028 srcvar = NULL_TREE;
1029
1030 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1031 return false;
1032
1033 if (srcvar == NULL_TREE)
1034 {
1035 STRIP_NOPS (src);
1036 if (src_align >= TYPE_ALIGN (desttype))
1037 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1038 else
1039 {
1040 if (STRICT_ALIGNMENT)
1041 return false;
1042 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1043 src_align);
1044 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1045 }
1046 }
1047 else if (destvar == NULL_TREE)
1048 {
1049 STRIP_NOPS (dest);
1050 if (dest_align >= TYPE_ALIGN (srctype))
1051 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1052 else
1053 {
1054 if (STRICT_ALIGNMENT)
1055 return false;
1056 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1057 dest_align);
1058 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1059 }
1060 }
1061
355fe088 1062 gimple *new_stmt;
fef5a0d9
RB
1063 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1064 {
921b13d0
RB
1065 tree tem = fold_const_aggregate_ref (srcvar);
1066 if (tem)
1067 srcvar = tem;
1068 if (! is_gimple_min_invariant (srcvar))
1069 {
1070 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1071 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1072 new_stmt);
921b13d0
RB
1073 gimple_assign_set_lhs (new_stmt, srcvar);
1074 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1075 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1076 }
fef5a0d9
RB
1077 }
1078 new_stmt = gimple_build_assign (destvar, srcvar);
1079 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1080 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1081 if (gimple_vdef (new_stmt)
1082 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1083 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1084 if (!lhs)
1085 {
f6b4dc28 1086 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1087 return true;
1088 }
1089 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1090 }
1091
1092done:
74e3c262 1093 gimple_seq stmts = NULL;
fef5a0d9
RB
1094 if (endp == 0 || endp == 3)
1095 len = NULL_TREE;
1096 else if (endp == 2)
74e3c262
RB
1097 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1098 ssize_int (1));
fef5a0d9 1099 if (endp == 2 || endp == 1)
74e3c262
RB
1100 {
1101 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1102 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1103 TREE_TYPE (dest), dest, len);
1104 }
fef5a0d9 1105
74e3c262 1106 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1107 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1108 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1109 return true;
1110}
1111
b3d8d88e
MS
1112/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1113 to built-in memcmp (a, b, len). */
1114
1115static bool
1116gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1117{
1118 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1119
1120 if (!fn)
1121 return false;
1122
1123 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1124
1125 gimple *stmt = gsi_stmt (*gsi);
1126 tree a = gimple_call_arg (stmt, 0);
1127 tree b = gimple_call_arg (stmt, 1);
1128 tree len = gimple_call_arg (stmt, 2);
1129
1130 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1131 replace_call_with_call_and_fold (gsi, repl);
1132
1133 return true;
1134}
1135
1136/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1137 to built-in memmove (dest, src, len). */
1138
1139static bool
1140gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1141{
1142 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1143
1144 if (!fn)
1145 return false;
1146
1147 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1148 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1149 len) into memmove (dest, src, len). */
1150
1151 gimple *stmt = gsi_stmt (*gsi);
1152 tree src = gimple_call_arg (stmt, 0);
1153 tree dest = gimple_call_arg (stmt, 1);
1154 tree len = gimple_call_arg (stmt, 2);
1155
1156 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1157 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1158 replace_call_with_call_and_fold (gsi, repl);
1159
1160 return true;
1161}
1162
1163/* Transform a call to built-in bzero (dest, len) at *GSI into one
1164 to built-in memset (dest, 0, len). */
1165
1166static bool
1167gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1168{
1169 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1170
1171 if (!fn)
1172 return false;
1173
1174 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1175
1176 gimple *stmt = gsi_stmt (*gsi);
1177 tree dest = gimple_call_arg (stmt, 0);
1178 tree len = gimple_call_arg (stmt, 1);
1179
1180 gimple_seq seq = NULL;
1181 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1182 gimple_seq_add_stmt_without_update (&seq, repl);
1183 gsi_replace_with_seq_vops (gsi, seq);
1184 fold_stmt (gsi);
1185
1186 return true;
1187}
1188
fef5a0d9
RB
1189/* Fold function call to builtin memset or bzero at *GSI setting the
1190 memory of size LEN to VAL. Return whether a simplification was made. */
1191
1192static bool
1193gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1194{
355fe088 1195 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1196 tree etype;
1197 unsigned HOST_WIDE_INT length, cval;
1198
1199 /* If the LEN parameter is zero, return DEST. */
1200 if (integer_zerop (len))
1201 {
1202 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1203 return true;
1204 }
1205
1206 if (! tree_fits_uhwi_p (len))
1207 return false;
1208
1209 if (TREE_CODE (c) != INTEGER_CST)
1210 return false;
1211
1212 tree dest = gimple_call_arg (stmt, 0);
1213 tree var = dest;
1214 if (TREE_CODE (var) != ADDR_EXPR)
1215 return false;
1216
1217 var = TREE_OPERAND (var, 0);
1218 if (TREE_THIS_VOLATILE (var))
1219 return false;
1220
1221 etype = TREE_TYPE (var);
1222 if (TREE_CODE (etype) == ARRAY_TYPE)
1223 etype = TREE_TYPE (etype);
1224
1225 if (!INTEGRAL_TYPE_P (etype)
1226 && !POINTER_TYPE_P (etype))
1227 return NULL_TREE;
1228
1229 if (! var_decl_component_p (var))
1230 return NULL_TREE;
1231
1232 length = tree_to_uhwi (len);
7a504f33 1233 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1234 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1235 return NULL_TREE;
1236
1237 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1238 return NULL_TREE;
1239
1240 if (integer_zerop (c))
1241 cval = 0;
1242 else
1243 {
1244 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1245 return NULL_TREE;
1246
1247 cval = TREE_INT_CST_LOW (c);
1248 cval &= 0xff;
1249 cval |= cval << 8;
1250 cval |= cval << 16;
1251 cval |= (cval << 31) << 1;
1252 }
1253
1254 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1255 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1256 gimple_set_vuse (store, gimple_vuse (stmt));
1257 tree vdef = gimple_vdef (stmt);
1258 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1259 {
1260 gimple_set_vdef (store, gimple_vdef (stmt));
1261 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1262 }
1263 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1264 if (gimple_call_lhs (stmt))
1265 {
355fe088 1266 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1267 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1268 }
1269 else
1270 {
1271 gimple_stmt_iterator gsi2 = *gsi;
1272 gsi_prev (gsi);
1273 gsi_remove (&gsi2, true);
1274 }
1275
1276 return true;
1277}
1278
1279
88d0c3f0
MS
1280/* Obtain the minimum and maximum string length or minimum and maximum
1281 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1282 If ARG is an SSA name variable, follow its use-def chains. When
1283 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1284 if we are unable to determine the length or value, return False.
1285 VISITED is a bitmap of visited variables.
1286 TYPE is 0 if string length should be obtained, 1 for maximum string
1287 length and 2 for maximum value ARG can have.
1288 When FUZZY is set and the length of a string cannot be determined,
1289 the function instead considers as the maximum possible length the
3f343040
MS
1290 size of a character array it may refer to.
1291 Set *FLEXP to true if the range of the string lengths has been
1292 obtained from the upper bound of an array at the end of a struct.
1293 Such an array may hold a string that's longer than its upper bound
1294 due to it being used as a poor-man's flexible array member. */
fef5a0d9
RB
1295
1296static bool
88d0c3f0 1297get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
3f343040 1298 bool fuzzy, bool *flexp)
fef5a0d9
RB
1299{
1300 tree var, val;
355fe088 1301 gimple *def_stmt;
fef5a0d9 1302
88d0c3f0
MS
1303 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1304 but MINLEN may be cleared during the execution of the function. */
1305 tree *minlen = length;
1306 tree *const maxlen = length + 1;
1307
fef5a0d9
RB
1308 if (TREE_CODE (arg) != SSA_NAME)
1309 {
1310 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1311 if (TREE_CODE (arg) == ADDR_EXPR
1312 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1313 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1314 {
1315 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1316 if (TREE_CODE (aop0) == INDIRECT_REF
1317 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
88d0c3f0 1318 return get_range_strlen (TREE_OPERAND (aop0, 0),
3f343040 1319 length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1320 }
1321
1322 if (type == 2)
1323 {
1324 val = arg;
1325 if (TREE_CODE (val) != INTEGER_CST
1326 || tree_int_cst_sgn (val) < 0)
1327 return false;
1328 }
1329 else
1330 val = c_strlen (arg, 1);
88d0c3f0
MS
1331
1332 if (!val && fuzzy)
1333 {
1334 if (TREE_CODE (arg) == ADDR_EXPR)
1335 return get_range_strlen (TREE_OPERAND (arg, 0), length,
3f343040 1336 visited, type, fuzzy, flexp);
88d0c3f0
MS
1337
1338 if (TREE_CODE (arg) == COMPONENT_REF
1339 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1340 {
1341 /* Use the type of the member array to determine the upper
1342 bound on the length of the array. This may be overly
1343 optimistic if the array itself isn't NUL-terminated and
1344 the caller relies on the subsequent member to contain
3f343040
MS
1345 the NUL.
1346 Set *FLEXP to true if the array whose bound is being
1347 used is at the end of a struct. */
c3e46927 1348 if (array_at_struct_end_p (arg))
3f343040
MS
1349 *flexp = true;
1350
88d0c3f0
MS
1351 arg = TREE_OPERAND (arg, 1);
1352 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1353 if (!val || integer_zerop (val))
1354 return false;
1355 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1356 integer_one_node);
e495e31a
MS
1357 /* Set the minimum size to zero since the string in
1358 the array could have zero length. */
1359 *minlen = ssize_int (0);
88d0c3f0
MS
1360 }
1361 }
1362
fef5a0d9
RB
1363 if (!val)
1364 return false;
1365
88d0c3f0
MS
1366 if (minlen
1367 && (!*minlen
1368 || (type > 0
1369 && TREE_CODE (*minlen) == INTEGER_CST
1370 && TREE_CODE (val) == INTEGER_CST
1371 && tree_int_cst_lt (val, *minlen))))
1372 *minlen = val;
1373
1374 if (*maxlen)
fef5a0d9
RB
1375 {
1376 if (type > 0)
1377 {
88d0c3f0 1378 if (TREE_CODE (*maxlen) != INTEGER_CST
fef5a0d9
RB
1379 || TREE_CODE (val) != INTEGER_CST)
1380 return false;
1381
88d0c3f0
MS
1382 if (tree_int_cst_lt (*maxlen, val))
1383 *maxlen = val;
fef5a0d9
RB
1384 return true;
1385 }
88d0c3f0 1386 else if (simple_cst_equal (val, *maxlen) != 1)
fef5a0d9
RB
1387 return false;
1388 }
1389
88d0c3f0 1390 *maxlen = val;
fef5a0d9
RB
1391 return true;
1392 }
1393
1394 /* If ARG is registered for SSA update we cannot look at its defining
1395 statement. */
1396 if (name_registered_for_update_p (arg))
1397 return false;
1398
1399 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1400 if (!*visited)
1401 *visited = BITMAP_ALLOC (NULL);
1402 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1403 return true;
1404
1405 var = arg;
1406 def_stmt = SSA_NAME_DEF_STMT (var);
1407
1408 switch (gimple_code (def_stmt))
1409 {
1410 case GIMPLE_ASSIGN:
1411 /* The RHS of the statement defining VAR must either have a
1412 constant length or come from another SSA_NAME with a constant
1413 length. */
1414 if (gimple_assign_single_p (def_stmt)
1415 || gimple_assign_unary_nop_p (def_stmt))
1416 {
1417 tree rhs = gimple_assign_rhs1 (def_stmt);
3f343040 1418 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1419 }
1420 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1421 {
1422 tree op2 = gimple_assign_rhs2 (def_stmt);
1423 tree op3 = gimple_assign_rhs3 (def_stmt);
3f343040
MS
1424 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1425 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1426 }
1427 return false;
1428
1429 case GIMPLE_PHI:
1430 {
1431 /* All the arguments of the PHI node must have the same constant
1432 length. */
1433 unsigned i;
1434
1435 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1436 {
1437 tree arg = gimple_phi_arg (def_stmt, i)->def;
1438
1439 /* If this PHI has itself as an argument, we cannot
1440 determine the string length of this argument. However,
1441 if we can find a constant string length for the other
1442 PHI args then we can still be sure that this is a
1443 constant string length. So be optimistic and just
1444 continue with the next argument. */
1445 if (arg == gimple_phi_result (def_stmt))
1446 continue;
1447
3f343040 1448 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
88d0c3f0
MS
1449 {
1450 if (fuzzy)
1451 *maxlen = build_all_ones_cst (size_type_node);
1452 else
1453 return false;
1454 }
fef5a0d9
RB
1455 }
1456 }
1457 return true;
1458
1459 default:
1460 return false;
1461 }
1462}
1463
88d0c3f0
MS
1464/* Determine the minimum and maximum value or string length that ARG
1465 refers to and store each in the first two elements of MINMAXLEN.
1466 For expressions that point to strings of unknown lengths that are
1467 character arrays, use the upper bound of the array as the maximum
1468 length. For example, given an expression like 'x ? array : "xyz"'
1469 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1470 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1471 stored in array.
3f343040
MS
1472 Return true if the range of the string lengths has been obtained
1473 from the upper bound of an array at the end of a struct. Such
1474 an array may hold a string that's longer than its upper bound
1475 due to it being used as a poor-man's flexible array member. */
88d0c3f0 1476
3f343040
MS
1477bool
1478get_range_strlen (tree arg, tree minmaxlen[2])
88d0c3f0
MS
1479{
1480 bitmap visited = NULL;
1481
1482 minmaxlen[0] = NULL_TREE;
1483 minmaxlen[1] = NULL_TREE;
1484
3f343040
MS
1485 bool flexarray = false;
1486 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
88d0c3f0
MS
1487
1488 if (visited)
1489 BITMAP_FREE (visited);
3f343040
MS
1490
1491 return flexarray;
88d0c3f0
MS
1492}
1493
dcb7fae2
RB
1494tree
1495get_maxval_strlen (tree arg, int type)
1496{
1497 bitmap visited = NULL;
88d0c3f0 1498 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1499
1500 bool dummy;
1501 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
88d0c3f0 1502 len[1] = NULL_TREE;
dcb7fae2
RB
1503 if (visited)
1504 BITMAP_FREE (visited);
1505
88d0c3f0 1506 return len[1];
dcb7fae2
RB
1507}
1508
fef5a0d9
RB
1509
1510/* Fold function call to builtin strcpy with arguments DEST and SRC.
1511 If LEN is not NULL, it represents the length of the string to be
1512 copied. Return NULL_TREE if no simplification can be made. */
1513
1514static bool
1515gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1516 tree dest, tree src)
fef5a0d9 1517{
dcb7fae2 1518 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1519 tree fn;
1520
1521 /* If SRC and DEST are the same (and not volatile), return DEST. */
1522 if (operand_equal_p (src, dest, 0))
1523 {
1524 replace_call_with_value (gsi, dest);
1525 return true;
1526 }
1527
1528 if (optimize_function_for_size_p (cfun))
1529 return false;
1530
1531 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1532 if (!fn)
1533 return false;
1534
1579e1f8 1535 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1536 if (!len)
dcb7fae2 1537 return false;
fef5a0d9
RB
1538
1539 len = fold_convert_loc (loc, size_type_node, len);
1540 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1541 len = force_gimple_operand_gsi (gsi, len, true,
1542 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1543 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1544 replace_call_with_call_and_fold (gsi, repl);
1545 return true;
1546}
1547
1548/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1549 If SLEN is not NULL, it represents the length of the source string.
1550 Return NULL_TREE if no simplification can be made. */
1551
1552static bool
dcb7fae2
RB
1553gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1554 tree dest, tree src, tree len)
fef5a0d9 1555{
dcb7fae2 1556 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1557 tree fn;
1558
1559 /* If the LEN parameter is zero, return DEST. */
1560 if (integer_zerop (len))
1561 {
1562 replace_call_with_value (gsi, dest);
1563 return true;
1564 }
1565
1566 /* We can't compare slen with len as constants below if len is not a
1567 constant. */
dcb7fae2 1568 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1569 return false;
1570
fef5a0d9 1571 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1572 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1573 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1574 return false;
1575
1576 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1577
1578 /* We do not support simplification of this case, though we do
1579 support it when expanding trees into RTL. */
1580 /* FIXME: generate a call to __builtin_memset. */
1581 if (tree_int_cst_lt (slen, len))
1582 return false;
1583
1584 /* OK transform into builtin memcpy. */
1585 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1586 if (!fn)
1587 return false;
1588
1589 len = fold_convert_loc (loc, size_type_node, len);
1590 len = force_gimple_operand_gsi (gsi, len, true,
1591 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1592 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1593 replace_call_with_call_and_fold (gsi, repl);
1594 return true;
1595}
1596
71dea1dd
WD
1597/* Fold function call to builtin strchr or strrchr.
1598 If both arguments are constant, evaluate and fold the result,
1599 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1600 In general strlen is significantly faster than strchr
1601 due to being a simpler operation. */
1602static bool
71dea1dd 1603gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1604{
1605 gimple *stmt = gsi_stmt (*gsi);
1606 tree str = gimple_call_arg (stmt, 0);
1607 tree c = gimple_call_arg (stmt, 1);
1608 location_t loc = gimple_location (stmt);
71dea1dd
WD
1609 const char *p;
1610 char ch;
912d9ec3 1611
71dea1dd 1612 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1613 return false;
1614
71dea1dd
WD
1615 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1616 {
1617 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1618
1619 if (p1 == NULL)
1620 {
1621 replace_call_with_value (gsi, integer_zero_node);
1622 return true;
1623 }
1624
1625 tree len = build_int_cst (size_type_node, p1 - p);
1626 gimple_seq stmts = NULL;
1627 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1628 POINTER_PLUS_EXPR, str, len);
1629 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1630 gsi_replace_with_seq_vops (gsi, stmts);
1631 return true;
1632 }
1633
1634 if (!integer_zerop (c))
912d9ec3
WD
1635 return false;
1636
71dea1dd 1637 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1638 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1639 {
1640 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1641
c8952930 1642 if (strchr_fn)
71dea1dd
WD
1643 {
1644 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1645 replace_call_with_call_and_fold (gsi, repl);
1646 return true;
1647 }
1648
1649 return false;
1650 }
1651
912d9ec3
WD
1652 tree len;
1653 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1654
1655 if (!strlen_fn)
1656 return false;
1657
1658 /* Create newstr = strlen (str). */
1659 gimple_seq stmts = NULL;
1660 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1661 gimple_set_location (new_stmt, loc);
a15ebbcd 1662 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1663 gimple_call_set_lhs (new_stmt, len);
1664 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1665
1666 /* Create (str p+ strlen (str)). */
1667 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1668 POINTER_PLUS_EXPR, str, len);
1669 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1670 gsi_replace_with_seq_vops (gsi, stmts);
1671 /* gsi now points at the assignment to the lhs, get a
1672 stmt iterator to the strlen.
1673 ??? We can't use gsi_for_stmt as that doesn't work when the
1674 CFG isn't built yet. */
1675 gimple_stmt_iterator gsi2 = *gsi;
1676 gsi_prev (&gsi2);
1677 fold_stmt (&gsi2);
1678 return true;
1679}
1680
c8952930
JJ
1681/* Fold function call to builtin strstr.
1682 If both arguments are constant, evaluate and fold the result,
1683 additionally fold strstr (x, "") into x and strstr (x, "c")
1684 into strchr (x, 'c'). */
1685static bool
1686gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1687{
1688 gimple *stmt = gsi_stmt (*gsi);
1689 tree haystack = gimple_call_arg (stmt, 0);
1690 tree needle = gimple_call_arg (stmt, 1);
1691 const char *p, *q;
1692
1693 if (!gimple_call_lhs (stmt))
1694 return false;
1695
1696 q = c_getstr (needle);
1697 if (q == NULL)
1698 return false;
1699
1700 if ((p = c_getstr (haystack)))
1701 {
1702 const char *r = strstr (p, q);
1703
1704 if (r == NULL)
1705 {
1706 replace_call_with_value (gsi, integer_zero_node);
1707 return true;
1708 }
1709
1710 tree len = build_int_cst (size_type_node, r - p);
1711 gimple_seq stmts = NULL;
1712 gimple *new_stmt
1713 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1714 haystack, len);
1715 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1716 gsi_replace_with_seq_vops (gsi, stmts);
1717 return true;
1718 }
1719
1720 /* For strstr (x, "") return x. */
1721 if (q[0] == '\0')
1722 {
1723 replace_call_with_value (gsi, haystack);
1724 return true;
1725 }
1726
1727 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1728 if (q[1] == '\0')
1729 {
1730 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1731 if (strchr_fn)
1732 {
1733 tree c = build_int_cst (integer_type_node, q[0]);
1734 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1735 replace_call_with_call_and_fold (gsi, repl);
1736 return true;
1737 }
1738 }
1739
1740 return false;
1741}
1742
fef5a0d9
RB
1743/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1744 to the call.
1745
1746 Return NULL_TREE if no simplification was possible, otherwise return the
1747 simplified form of the call as a tree.
1748
1749 The simplified form may be a constant or other expression which
1750 computes the same value, but in a more efficient manner (including
1751 calls to other builtin functions).
1752
1753 The call may contain arguments which need to be evaluated, but
1754 which are not useful to determine the result of the call. In
1755 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1756 COMPOUND_EXPR will be an argument which must be evaluated.
1757 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1758 COMPOUND_EXPR in the chain will contain the tree for the simplified
1759 form of the builtin function call. */
1760
1761static bool
dcb7fae2 1762gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1763{
355fe088 1764 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1765 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1766
1767 const char *p = c_getstr (src);
1768
1769 /* If the string length is zero, return the dst parameter. */
1770 if (p && *p == '\0')
1771 {
1772 replace_call_with_value (gsi, dst);
1773 return true;
1774 }
1775
1776 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1777 return false;
1778
1779 /* See if we can store by pieces into (dst + strlen(dst)). */
1780 tree newdst;
1781 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1782 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1783
1784 if (!strlen_fn || !memcpy_fn)
1785 return false;
1786
1787 /* If the length of the source string isn't computable don't
1788 split strcat into strlen and memcpy. */
dcb7fae2 1789 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1790 if (! len)
fef5a0d9
RB
1791 return false;
1792
1793 /* Create strlen (dst). */
1794 gimple_seq stmts = NULL, stmts2;
355fe088 1795 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 1796 gimple_set_location (repl, loc);
a15ebbcd 1797 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
1798 gimple_call_set_lhs (repl, newdst);
1799 gimple_seq_add_stmt_without_update (&stmts, repl);
1800
1801 /* Create (dst p+ strlen (dst)). */
1802 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1803 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1804 gimple_seq_add_seq_without_update (&stmts, stmts2);
1805
1806 len = fold_convert_loc (loc, size_type_node, len);
1807 len = size_binop_loc (loc, PLUS_EXPR, len,
1808 build_int_cst (size_type_node, 1));
1809 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1810 gimple_seq_add_seq_without_update (&stmts, stmts2);
1811
1812 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1813 gimple_seq_add_stmt_without_update (&stmts, repl);
1814 if (gimple_call_lhs (stmt))
1815 {
1816 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1817 gimple_seq_add_stmt_without_update (&stmts, repl);
1818 gsi_replace_with_seq_vops (gsi, stmts);
1819 /* gsi now points at the assignment to the lhs, get a
1820 stmt iterator to the memcpy call.
1821 ??? We can't use gsi_for_stmt as that doesn't work when the
1822 CFG isn't built yet. */
1823 gimple_stmt_iterator gsi2 = *gsi;
1824 gsi_prev (&gsi2);
1825 fold_stmt (&gsi2);
1826 }
1827 else
1828 {
1829 gsi_replace_with_seq_vops (gsi, stmts);
1830 fold_stmt (gsi);
1831 }
1832 return true;
1833}
1834
07f1cf56
RB
1835/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1836 are the arguments to the call. */
1837
1838static bool
1839gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1840{
355fe088 1841 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1842 tree dest = gimple_call_arg (stmt, 0);
1843 tree src = gimple_call_arg (stmt, 1);
1844 tree size = gimple_call_arg (stmt, 2);
1845 tree fn;
1846 const char *p;
1847
1848
1849 p = c_getstr (src);
1850 /* If the SRC parameter is "", return DEST. */
1851 if (p && *p == '\0')
1852 {
1853 replace_call_with_value (gsi, dest);
1854 return true;
1855 }
1856
1857 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1858 return false;
1859
1860 /* If __builtin_strcat_chk is used, assume strcat is available. */
1861 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1862 if (!fn)
1863 return false;
1864
355fe088 1865 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1866 replace_call_with_call_and_fold (gsi, repl);
1867 return true;
1868}
1869
ad03a744
RB
1870/* Simplify a call to the strncat builtin. */
1871
1872static bool
1873gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1874{
1875 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1876 tree dst = gimple_call_arg (stmt, 0);
1877 tree src = gimple_call_arg (stmt, 1);
1878 tree len = gimple_call_arg (stmt, 2);
1879
1880 const char *p = c_getstr (src);
1881
1882 /* If the requested length is zero, or the src parameter string
1883 length is zero, return the dst parameter. */
1884 if (integer_zerop (len) || (p && *p == '\0'))
1885 {
1886 replace_call_with_value (gsi, dst);
1887 return true;
1888 }
1889
1890 /* If the requested len is greater than or equal to the string
1891 length, call strcat. */
1892 if (TREE_CODE (len) == INTEGER_CST && p
1893 && compare_tree_int (len, strlen (p)) >= 0)
1894 {
1895 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1896
1897 /* If the replacement _DECL isn't initialized, don't do the
1898 transformation. */
1899 if (!fn)
1900 return false;
1901
1902 gcall *repl = gimple_build_call (fn, 2, dst, src);
1903 replace_call_with_call_and_fold (gsi, repl);
1904 return true;
1905 }
1906
1907 return false;
1908}
1909
745583f9
RB
1910/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1911 LEN, and SIZE. */
1912
1913static bool
1914gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1915{
355fe088 1916 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1917 tree dest = gimple_call_arg (stmt, 0);
1918 tree src = gimple_call_arg (stmt, 1);
1919 tree len = gimple_call_arg (stmt, 2);
1920 tree size = gimple_call_arg (stmt, 3);
1921 tree fn;
1922 const char *p;
1923
1924 p = c_getstr (src);
1925 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1926 if ((p && *p == '\0')
1927 || integer_zerop (len))
1928 {
1929 replace_call_with_value (gsi, dest);
1930 return true;
1931 }
1932
1933 if (! tree_fits_uhwi_p (size))
1934 return false;
1935
1936 if (! integer_all_onesp (size))
1937 {
1938 tree src_len = c_strlen (src, 1);
1939 if (src_len
1940 && tree_fits_uhwi_p (src_len)
1941 && tree_fits_uhwi_p (len)
1942 && ! tree_int_cst_lt (len, src_len))
1943 {
1944 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1945 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1946 if (!fn)
1947 return false;
1948
355fe088 1949 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1950 replace_call_with_call_and_fold (gsi, repl);
1951 return true;
1952 }
1953 return false;
1954 }
1955
1956 /* If __builtin_strncat_chk is used, assume strncat is available. */
1957 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1958 if (!fn)
1959 return false;
1960
355fe088 1961 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1962 replace_call_with_call_and_fold (gsi, repl);
1963 return true;
1964}
1965
a918bfbf
ML
1966/* Build and append gimple statements to STMTS that would load a first
1967 character of a memory location identified by STR. LOC is location
1968 of the statement. */
1969
1970static tree
1971gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1972{
1973 tree var;
1974
1975 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1976 tree cst_uchar_ptr_node
1977 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1978 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1979
1980 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1981 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1982 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1983
1984 gimple_assign_set_lhs (stmt, var);
1985 gimple_seq_add_stmt_without_update (stmts, stmt);
1986
1987 return var;
1988}
1989
1990/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1991 FCODE is the name of the builtin. */
1992
1993static bool
1994gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1995{
1996 gimple *stmt = gsi_stmt (*gsi);
1997 tree callee = gimple_call_fndecl (stmt);
1998 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
1999
2000 tree type = integer_type_node;
2001 tree str1 = gimple_call_arg (stmt, 0);
2002 tree str2 = gimple_call_arg (stmt, 1);
2003 tree lhs = gimple_call_lhs (stmt);
2004 HOST_WIDE_INT length = -1;
2005
2006 /* Handle strncmp and strncasecmp functions. */
2007 if (gimple_call_num_args (stmt) == 3)
2008 {
2009 tree len = gimple_call_arg (stmt, 2);
2010 if (tree_fits_uhwi_p (len))
2011 length = tree_to_uhwi (len);
2012 }
2013
2014 /* If the LEN parameter is zero, return zero. */
2015 if (length == 0)
2016 {
2017 replace_call_with_value (gsi, integer_zero_node);
2018 return true;
2019 }
2020
2021 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2022 if (operand_equal_p (str1, str2, 0))
2023 {
2024 replace_call_with_value (gsi, integer_zero_node);
2025 return true;
2026 }
2027
2028 const char *p1 = c_getstr (str1);
2029 const char *p2 = c_getstr (str2);
2030
2031 /* For known strings, return an immediate value. */
2032 if (p1 && p2)
2033 {
2034 int r = 0;
2035 bool known_result = false;
2036
2037 switch (fcode)
2038 {
2039 case BUILT_IN_STRCMP:
2040 {
2041 r = strcmp (p1, p2);
2042 known_result = true;
2043 break;
2044 }
2045 case BUILT_IN_STRNCMP:
2046 {
2047 if (length == -1)
2048 break;
2049 r = strncmp (p1, p2, length);
2050 known_result = true;
2051 break;
2052 }
2053 /* Only handleable situation is where the string are equal (result 0),
2054 which is already handled by operand_equal_p case. */
2055 case BUILT_IN_STRCASECMP:
2056 break;
2057 case BUILT_IN_STRNCASECMP:
2058 {
2059 if (length == -1)
2060 break;
2061 r = strncmp (p1, p2, length);
2062 if (r == 0)
2063 known_result = true;
2064 break;;
2065 }
2066 default:
2067 gcc_unreachable ();
2068 }
2069
2070 if (known_result)
2071 {
2072 replace_call_with_value (gsi, build_cmp_result (type, r));
2073 return true;
2074 }
2075 }
2076
2077 bool nonzero_length = length >= 1
2078 || fcode == BUILT_IN_STRCMP
2079 || fcode == BUILT_IN_STRCASECMP;
2080
2081 location_t loc = gimple_location (stmt);
2082
2083 /* If the second arg is "", return *(const unsigned char*)arg1. */
2084 if (p2 && *p2 == '\0' && nonzero_length)
2085 {
2086 gimple_seq stmts = NULL;
2087 tree var = gimple_load_first_char (loc, str1, &stmts);
2088 if (lhs)
2089 {
2090 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2091 gimple_seq_add_stmt_without_update (&stmts, stmt);
2092 }
2093
2094 gsi_replace_with_seq_vops (gsi, stmts);
2095 return true;
2096 }
2097
2098 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2099 if (p1 && *p1 == '\0' && nonzero_length)
2100 {
2101 gimple_seq stmts = NULL;
2102 tree var = gimple_load_first_char (loc, str2, &stmts);
2103
2104 if (lhs)
2105 {
2106 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2107 stmt = gimple_build_assign (c, NOP_EXPR, var);
2108 gimple_seq_add_stmt_without_update (&stmts, stmt);
2109
2110 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2111 gimple_seq_add_stmt_without_update (&stmts, stmt);
2112 }
2113
2114 gsi_replace_with_seq_vops (gsi, stmts);
2115 return true;
2116 }
2117
2118 /* If len parameter is one, return an expression corresponding to
2119 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2120 if (fcode == BUILT_IN_STRNCMP && length == 1)
2121 {
2122 gimple_seq stmts = NULL;
2123 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2124 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2125
2126 if (lhs)
2127 {
2128 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2129 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2130 gimple_seq_add_stmt_without_update (&stmts, convert1);
2131
2132 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2133 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2134 gimple_seq_add_stmt_without_update (&stmts, convert2);
2135
2136 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2137 gimple_seq_add_stmt_without_update (&stmts, stmt);
2138 }
2139
2140 gsi_replace_with_seq_vops (gsi, stmts);
2141 return true;
2142 }
2143
2144 return false;
2145}
2146
488c6247
ML
2147/* Fold a call to the memchr pointed by GSI iterator. */
2148
2149static bool
2150gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2151{
2152 gimple *stmt = gsi_stmt (*gsi);
2153 tree lhs = gimple_call_lhs (stmt);
2154 tree arg1 = gimple_call_arg (stmt, 0);
2155 tree arg2 = gimple_call_arg (stmt, 1);
2156 tree len = gimple_call_arg (stmt, 2);
2157
2158 /* If the LEN parameter is zero, return zero. */
2159 if (integer_zerop (len))
2160 {
2161 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2162 return true;
2163 }
2164
2165 char c;
2166 if (TREE_CODE (arg2) != INTEGER_CST
2167 || !tree_fits_uhwi_p (len)
2168 || !target_char_cst_p (arg2, &c))
2169 return false;
2170
2171 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2172 unsigned HOST_WIDE_INT string_length;
2173 const char *p1 = c_getstr (arg1, &string_length);
2174
2175 if (p1)
2176 {
2177 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2178 if (r == NULL)
2179 {
2180 if (length <= string_length)
2181 {
2182 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2183 return true;
2184 }
2185 }
2186 else
2187 {
2188 unsigned HOST_WIDE_INT offset = r - p1;
2189 gimple_seq stmts = NULL;
2190 if (lhs != NULL_TREE)
2191 {
2192 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2193 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2194 arg1, offset_cst);
2195 gimple_seq_add_stmt_without_update (&stmts, stmt);
2196 }
2197 else
2198 gimple_seq_add_stmt_without_update (&stmts,
2199 gimple_build_nop ());
2200
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 return true;
2203 }
2204 }
2205
2206 return false;
2207}
a918bfbf 2208
fef5a0d9
RB
2209/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2210 to the call. IGNORE is true if the value returned
2211 by the builtin will be ignored. UNLOCKED is true is true if this
2212 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2213 the known length of the string. Return NULL_TREE if no simplification
2214 was possible. */
2215
2216static bool
2217gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2218 tree arg0, tree arg1,
dcb7fae2 2219 bool unlocked)
fef5a0d9 2220{
355fe088 2221 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2222
fef5a0d9
RB
2223 /* If we're using an unlocked function, assume the other unlocked
2224 functions exist explicitly. */
2225 tree const fn_fputc = (unlocked
2226 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2227 : builtin_decl_implicit (BUILT_IN_FPUTC));
2228 tree const fn_fwrite = (unlocked
2229 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2230 : builtin_decl_implicit (BUILT_IN_FWRITE));
2231
2232 /* If the return value is used, don't do the transformation. */
dcb7fae2 2233 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2234 return false;
2235
fef5a0d9
RB
2236 /* Get the length of the string passed to fputs. If the length
2237 can't be determined, punt. */
dcb7fae2 2238 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
2239 if (!len
2240 || TREE_CODE (len) != INTEGER_CST)
2241 return false;
2242
2243 switch (compare_tree_int (len, 1))
2244 {
2245 case -1: /* length is 0, delete the call entirely . */
2246 replace_call_with_value (gsi, integer_zero_node);
2247 return true;
2248
2249 case 0: /* length is 1, call fputc. */
2250 {
2251 const char *p = c_getstr (arg0);
2252 if (p != NULL)
2253 {
2254 if (!fn_fputc)
2255 return false;
2256
355fe088 2257 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2258 build_int_cst
2259 (integer_type_node, p[0]), arg1);
2260 replace_call_with_call_and_fold (gsi, repl);
2261 return true;
2262 }
2263 }
2264 /* FALLTHROUGH */
2265 case 1: /* length is greater than 1, call fwrite. */
2266 {
2267 /* If optimizing for size keep fputs. */
2268 if (optimize_function_for_size_p (cfun))
2269 return false;
2270 /* New argument list transforming fputs(string, stream) to
2271 fwrite(string, 1, len, stream). */
2272 if (!fn_fwrite)
2273 return false;
2274
355fe088 2275 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2276 size_one_node, len, arg1);
2277 replace_call_with_call_and_fold (gsi, repl);
2278 return true;
2279 }
2280 default:
2281 gcc_unreachable ();
2282 }
2283 return false;
2284}
2285
2286/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2287 DEST, SRC, LEN, and SIZE are the arguments to the call.
2288 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2289 code of the builtin. If MAXLEN is not NULL, it is maximum length
2290 passed as third argument. */
2291
2292static bool
2293gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2294 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2295 enum built_in_function fcode)
2296{
355fe088 2297 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2298 location_t loc = gimple_location (stmt);
2299 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2300 tree fn;
2301
2302 /* If SRC and DEST are the same (and not volatile), return DEST
2303 (resp. DEST+LEN for __mempcpy_chk). */
2304 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2305 {
2306 if (fcode != BUILT_IN_MEMPCPY_CHK)
2307 {
2308 replace_call_with_value (gsi, dest);
2309 return true;
2310 }
2311 else
2312 {
74e3c262
RB
2313 gimple_seq stmts = NULL;
2314 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2315 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2316 TREE_TYPE (dest), dest, len);
74e3c262 2317 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2318 replace_call_with_value (gsi, temp);
2319 return true;
2320 }
2321 }
2322
2323 if (! tree_fits_uhwi_p (size))
2324 return false;
2325
dcb7fae2 2326 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
2327 if (! integer_all_onesp (size))
2328 {
2329 if (! tree_fits_uhwi_p (len))
2330 {
2331 /* If LEN is not constant, try MAXLEN too.
2332 For MAXLEN only allow optimizing into non-_ocs function
2333 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2334 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2335 {
2336 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2337 {
2338 /* (void) __mempcpy_chk () can be optimized into
2339 (void) __memcpy_chk (). */
2340 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2341 if (!fn)
2342 return false;
2343
355fe088 2344 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2345 replace_call_with_call_and_fold (gsi, repl);
2346 return true;
2347 }
2348 return false;
2349 }
2350 }
2351 else
2352 maxlen = len;
2353
2354 if (tree_int_cst_lt (size, maxlen))
2355 return false;
2356 }
2357
2358 fn = NULL_TREE;
2359 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2360 mem{cpy,pcpy,move,set} is available. */
2361 switch (fcode)
2362 {
2363 case BUILT_IN_MEMCPY_CHK:
2364 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2365 break;
2366 case BUILT_IN_MEMPCPY_CHK:
2367 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2368 break;
2369 case BUILT_IN_MEMMOVE_CHK:
2370 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2371 break;
2372 case BUILT_IN_MEMSET_CHK:
2373 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2374 break;
2375 default:
2376 break;
2377 }
2378
2379 if (!fn)
2380 return false;
2381
355fe088 2382 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2383 replace_call_with_call_and_fold (gsi, repl);
2384 return true;
2385}
2386
2387/* Fold a call to the __st[rp]cpy_chk builtin.
2388 DEST, SRC, and SIZE are the arguments to the call.
2389 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2390 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2391 strings passed as second argument. */
2392
2393static bool
2394gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2395 tree dest,
fef5a0d9 2396 tree src, tree size,
fef5a0d9
RB
2397 enum built_in_function fcode)
2398{
355fe088 2399 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2400 location_t loc = gimple_location (stmt);
2401 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2402 tree len, fn;
2403
2404 /* If SRC and DEST are the same (and not volatile), return DEST. */
2405 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2406 {
2407 replace_call_with_value (gsi, dest);
2408 return true;
2409 }
2410
2411 if (! tree_fits_uhwi_p (size))
2412 return false;
2413
dcb7fae2 2414 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
2415 if (! integer_all_onesp (size))
2416 {
2417 len = c_strlen (src, 1);
2418 if (! len || ! tree_fits_uhwi_p (len))
2419 {
2420 /* If LEN is not constant, try MAXLEN too.
2421 For MAXLEN only allow optimizing into non-_ocs function
2422 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2423 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2424 {
2425 if (fcode == BUILT_IN_STPCPY_CHK)
2426 {
2427 if (! ignore)
2428 return false;
2429
2430 /* If return value of __stpcpy_chk is ignored,
2431 optimize into __strcpy_chk. */
2432 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2433 if (!fn)
2434 return false;
2435
355fe088 2436 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2437 replace_call_with_call_and_fold (gsi, repl);
2438 return true;
2439 }
2440
2441 if (! len || TREE_SIDE_EFFECTS (len))
2442 return false;
2443
2444 /* If c_strlen returned something, but not a constant,
2445 transform __strcpy_chk into __memcpy_chk. */
2446 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2447 if (!fn)
2448 return false;
2449
74e3c262
RB
2450 gimple_seq stmts = NULL;
2451 len = gimple_convert (&stmts, loc, size_type_node, len);
2452 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2453 build_int_cst (size_type_node, 1));
2454 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2455 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2456 replace_call_with_call_and_fold (gsi, repl);
2457 return true;
2458 }
e256dfce 2459 }
fef5a0d9
RB
2460 else
2461 maxlen = len;
2462
2463 if (! tree_int_cst_lt (maxlen, size))
2464 return false;
e256dfce
RG
2465 }
2466
fef5a0d9
RB
2467 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2468 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2469 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2470 if (!fn)
2471 return false;
2472
355fe088 2473 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2474 replace_call_with_call_and_fold (gsi, repl);
2475 return true;
2476}
2477
2478/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2479 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2480 length passed as third argument. IGNORE is true if return value can be
2481 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2482
2483static bool
2484gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2485 tree dest, tree src,
dcb7fae2 2486 tree len, tree size,
fef5a0d9
RB
2487 enum built_in_function fcode)
2488{
355fe088 2489 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2490 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2491 tree fn;
2492
2493 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2494 {
fef5a0d9
RB
2495 /* If return value of __stpncpy_chk is ignored,
2496 optimize into __strncpy_chk. */
2497 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2498 if (fn)
2499 {
355fe088 2500 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2501 replace_call_with_call_and_fold (gsi, repl);
2502 return true;
2503 }
cbdd87d4
RG
2504 }
2505
fef5a0d9
RB
2506 if (! tree_fits_uhwi_p (size))
2507 return false;
2508
dcb7fae2 2509 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2510 if (! integer_all_onesp (size))
cbdd87d4 2511 {
fef5a0d9 2512 if (! tree_fits_uhwi_p (len))
fe2ef088 2513 {
fef5a0d9
RB
2514 /* If LEN is not constant, try MAXLEN too.
2515 For MAXLEN only allow optimizing into non-_ocs function
2516 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2517 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2518 return false;
8a1561bc 2519 }
fef5a0d9
RB
2520 else
2521 maxlen = len;
2522
2523 if (tree_int_cst_lt (size, maxlen))
2524 return false;
cbdd87d4
RG
2525 }
2526
fef5a0d9
RB
2527 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2528 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2529 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2530 if (!fn)
2531 return false;
2532
355fe088 2533 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2534 replace_call_with_call_and_fold (gsi, repl);
2535 return true;
cbdd87d4
RG
2536}
2537
2625bb5d
RB
2538/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2539 Return NULL_TREE if no simplification can be made. */
2540
2541static bool
2542gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2543{
2544 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2545 location_t loc = gimple_location (stmt);
2546 tree dest = gimple_call_arg (stmt, 0);
2547 tree src = gimple_call_arg (stmt, 1);
2548 tree fn, len, lenp1;
2549
2550 /* If the result is unused, replace stpcpy with strcpy. */
2551 if (gimple_call_lhs (stmt) == NULL_TREE)
2552 {
2553 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2554 if (!fn)
2555 return false;
2556 gimple_call_set_fndecl (stmt, fn);
2557 fold_stmt (gsi);
2558 return true;
2559 }
2560
2561 len = c_strlen (src, 1);
2562 if (!len
2563 || TREE_CODE (len) != INTEGER_CST)
2564 return false;
2565
2566 if (optimize_function_for_size_p (cfun)
2567 /* If length is zero it's small enough. */
2568 && !integer_zerop (len))
2569 return false;
2570
2571 /* If the source has a known length replace stpcpy with memcpy. */
2572 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2573 if (!fn)
2574 return false;
2575
2576 gimple_seq stmts = NULL;
2577 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2578 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2579 tem, build_int_cst (size_type_node, 1));
2580 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2581 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2582 gimple_set_vuse (repl, gimple_vuse (stmt));
2583 gimple_set_vdef (repl, gimple_vdef (stmt));
2584 if (gimple_vdef (repl)
2585 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2586 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2587 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2588 /* Replace the result with dest + len. */
2589 stmts = NULL;
2590 tem = gimple_convert (&stmts, loc, sizetype, len);
2591 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2592 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2593 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2594 gsi_replace (gsi, ret, false);
2625bb5d
RB
2595 /* Finally fold the memcpy call. */
2596 gimple_stmt_iterator gsi2 = *gsi;
2597 gsi_prev (&gsi2);
2598 fold_stmt (&gsi2);
2599 return true;
2600}
2601
fef5a0d9
RB
2602/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2603 NULL_TREE if a normal call should be emitted rather than expanding
2604 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2605 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2606 passed as second argument. */
cbdd87d4
RG
2607
2608static bool
fef5a0d9 2609gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2610 enum built_in_function fcode)
cbdd87d4 2611{
538dd0b7 2612 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2613 tree dest, size, len, fn, fmt, flag;
2614 const char *fmt_str;
cbdd87d4 2615
fef5a0d9
RB
2616 /* Verify the required arguments in the original call. */
2617 if (gimple_call_num_args (stmt) < 5)
2618 return false;
cbdd87d4 2619
fef5a0d9
RB
2620 dest = gimple_call_arg (stmt, 0);
2621 len = gimple_call_arg (stmt, 1);
2622 flag = gimple_call_arg (stmt, 2);
2623 size = gimple_call_arg (stmt, 3);
2624 fmt = gimple_call_arg (stmt, 4);
2625
2626 if (! tree_fits_uhwi_p (size))
2627 return false;
2628
2629 if (! integer_all_onesp (size))
2630 {
dcb7fae2 2631 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2632 if (! tree_fits_uhwi_p (len))
cbdd87d4 2633 {
fef5a0d9
RB
2634 /* If LEN is not constant, try MAXLEN too.
2635 For MAXLEN only allow optimizing into non-_ocs function
2636 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2637 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2638 return false;
2639 }
2640 else
fef5a0d9 2641 maxlen = len;
cbdd87d4 2642
fef5a0d9
RB
2643 if (tree_int_cst_lt (size, maxlen))
2644 return false;
2645 }
cbdd87d4 2646
fef5a0d9
RB
2647 if (!init_target_chars ())
2648 return false;
cbdd87d4 2649
fef5a0d9
RB
2650 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2651 or if format doesn't contain % chars or is "%s". */
2652 if (! integer_zerop (flag))
2653 {
2654 fmt_str = c_getstr (fmt);
2655 if (fmt_str == NULL)
2656 return false;
2657 if (strchr (fmt_str, target_percent) != NULL
2658 && strcmp (fmt_str, target_percent_s))
2659 return false;
cbdd87d4
RG
2660 }
2661
fef5a0d9
RB
2662 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2663 available. */
2664 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2665 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2666 if (!fn)
491e0b9b
RG
2667 return false;
2668
fef5a0d9
RB
2669 /* Replace the called function and the first 5 argument by 3 retaining
2670 trailing varargs. */
2671 gimple_call_set_fndecl (stmt, fn);
2672 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2673 gimple_call_set_arg (stmt, 0, dest);
2674 gimple_call_set_arg (stmt, 1, len);
2675 gimple_call_set_arg (stmt, 2, fmt);
2676 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2677 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2678 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2679 fold_stmt (gsi);
2680 return true;
2681}
cbdd87d4 2682
fef5a0d9
RB
2683/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2684 Return NULL_TREE if a normal call should be emitted rather than
2685 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2686 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2687
fef5a0d9
RB
2688static bool
2689gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2690 enum built_in_function fcode)
2691{
538dd0b7 2692 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2693 tree dest, size, len, fn, fmt, flag;
2694 const char *fmt_str;
2695 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2696
fef5a0d9
RB
2697 /* Verify the required arguments in the original call. */
2698 if (nargs < 4)
2699 return false;
2700 dest = gimple_call_arg (stmt, 0);
2701 flag = gimple_call_arg (stmt, 1);
2702 size = gimple_call_arg (stmt, 2);
2703 fmt = gimple_call_arg (stmt, 3);
2704
2705 if (! tree_fits_uhwi_p (size))
2706 return false;
2707
2708 len = NULL_TREE;
2709
2710 if (!init_target_chars ())
2711 return false;
2712
2713 /* Check whether the format is a literal string constant. */
2714 fmt_str = c_getstr (fmt);
2715 if (fmt_str != NULL)
2716 {
2717 /* If the format doesn't contain % args or %%, we know the size. */
2718 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2719 {
fef5a0d9
RB
2720 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2721 len = build_int_cstu (size_type_node, strlen (fmt_str));
2722 }
2723 /* If the format is "%s" and first ... argument is a string literal,
2724 we know the size too. */
2725 else if (fcode == BUILT_IN_SPRINTF_CHK
2726 && strcmp (fmt_str, target_percent_s) == 0)
2727 {
2728 tree arg;
cbdd87d4 2729
fef5a0d9
RB
2730 if (nargs == 5)
2731 {
2732 arg = gimple_call_arg (stmt, 4);
2733 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2734 {
2735 len = c_strlen (arg, 1);
2736 if (! len || ! tree_fits_uhwi_p (len))
2737 len = NULL_TREE;
2738 }
2739 }
2740 }
2741 }
cbdd87d4 2742
fef5a0d9
RB
2743 if (! integer_all_onesp (size))
2744 {
2745 if (! len || ! tree_int_cst_lt (len, size))
2746 return false;
2747 }
cbdd87d4 2748
fef5a0d9
RB
2749 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2750 or if format doesn't contain % chars or is "%s". */
2751 if (! integer_zerop (flag))
2752 {
2753 if (fmt_str == NULL)
2754 return false;
2755 if (strchr (fmt_str, target_percent) != NULL
2756 && strcmp (fmt_str, target_percent_s))
2757 return false;
2758 }
cbdd87d4 2759
fef5a0d9
RB
2760 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2761 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2762 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2763 if (!fn)
2764 return false;
2765
2766 /* Replace the called function and the first 4 argument by 2 retaining
2767 trailing varargs. */
2768 gimple_call_set_fndecl (stmt, fn);
2769 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2770 gimple_call_set_arg (stmt, 0, dest);
2771 gimple_call_set_arg (stmt, 1, fmt);
2772 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2773 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2774 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2775 fold_stmt (gsi);
2776 return true;
2777}
2778
35770bb2
RB
2779/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2780 ORIG may be null if this is a 2-argument call. We don't attempt to
2781 simplify calls with more than 3 arguments.
2782
a104bd88 2783 Return true if simplification was possible, otherwise false. */
35770bb2 2784
a104bd88 2785bool
dcb7fae2 2786gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2787{
355fe088 2788 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2789 tree dest = gimple_call_arg (stmt, 0);
2790 tree fmt = gimple_call_arg (stmt, 1);
2791 tree orig = NULL_TREE;
2792 const char *fmt_str = NULL;
2793
2794 /* Verify the required arguments in the original call. We deal with two
2795 types of sprintf() calls: 'sprintf (str, fmt)' and
2796 'sprintf (dest, "%s", orig)'. */
2797 if (gimple_call_num_args (stmt) > 3)
2798 return false;
2799
2800 if (gimple_call_num_args (stmt) == 3)
2801 orig = gimple_call_arg (stmt, 2);
2802
2803 /* Check whether the format is a literal string constant. */
2804 fmt_str = c_getstr (fmt);
2805 if (fmt_str == NULL)
2806 return false;
2807
2808 if (!init_target_chars ())
2809 return false;
2810
2811 /* If the format doesn't contain % args or %%, use strcpy. */
2812 if (strchr (fmt_str, target_percent) == NULL)
2813 {
2814 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2815
2816 if (!fn)
2817 return false;
2818
2819 /* Don't optimize sprintf (buf, "abc", ptr++). */
2820 if (orig)
2821 return false;
2822
2823 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2824 'format' is known to contain no % formats. */
2825 gimple_seq stmts = NULL;
355fe088 2826 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2827 gimple_seq_add_stmt_without_update (&stmts, repl);
2828 if (gimple_call_lhs (stmt))
2829 {
2830 repl = gimple_build_assign (gimple_call_lhs (stmt),
2831 build_int_cst (integer_type_node,
2832 strlen (fmt_str)));
2833 gimple_seq_add_stmt_without_update (&stmts, repl);
2834 gsi_replace_with_seq_vops (gsi, stmts);
2835 /* gsi now points at the assignment to the lhs, get a
2836 stmt iterator to the memcpy call.
2837 ??? We can't use gsi_for_stmt as that doesn't work when the
2838 CFG isn't built yet. */
2839 gimple_stmt_iterator gsi2 = *gsi;
2840 gsi_prev (&gsi2);
2841 fold_stmt (&gsi2);
2842 }
2843 else
2844 {
2845 gsi_replace_with_seq_vops (gsi, stmts);
2846 fold_stmt (gsi);
2847 }
2848 return true;
2849 }
2850
2851 /* If the format is "%s", use strcpy if the result isn't used. */
2852 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2853 {
2854 tree fn;
2855 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2856
2857 if (!fn)
2858 return false;
2859
2860 /* Don't crash on sprintf (str1, "%s"). */
2861 if (!orig)
2862 return false;
2863
dcb7fae2
RB
2864 tree orig_len = NULL_TREE;
2865 if (gimple_call_lhs (stmt))
35770bb2 2866 {
dcb7fae2 2867 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2868 if (!orig_len)
35770bb2
RB
2869 return false;
2870 }
2871
2872 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2873 gimple_seq stmts = NULL;
355fe088 2874 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2875 gimple_seq_add_stmt_without_update (&stmts, repl);
2876 if (gimple_call_lhs (stmt))
2877 {
d7e78447
RB
2878 if (!useless_type_conversion_p (integer_type_node,
2879 TREE_TYPE (orig_len)))
2880 orig_len = fold_convert (integer_type_node, orig_len);
2881 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2882 gimple_seq_add_stmt_without_update (&stmts, repl);
2883 gsi_replace_with_seq_vops (gsi, stmts);
2884 /* gsi now points at the assignment to the lhs, get a
2885 stmt iterator to the memcpy call.
2886 ??? We can't use gsi_for_stmt as that doesn't work when the
2887 CFG isn't built yet. */
2888 gimple_stmt_iterator gsi2 = *gsi;
2889 gsi_prev (&gsi2);
2890 fold_stmt (&gsi2);
2891 }
2892 else
2893 {
2894 gsi_replace_with_seq_vops (gsi, stmts);
2895 fold_stmt (gsi);
2896 }
2897 return true;
2898 }
2899 return false;
2900}
2901
d7e78447
RB
2902/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2903 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2904 attempt to simplify calls with more than 4 arguments.
35770bb2 2905
a104bd88 2906 Return true if simplification was possible, otherwise false. */
d7e78447 2907
a104bd88 2908bool
dcb7fae2 2909gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2910{
538dd0b7 2911 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2912 tree dest = gimple_call_arg (stmt, 0);
2913 tree destsize = gimple_call_arg (stmt, 1);
2914 tree fmt = gimple_call_arg (stmt, 2);
2915 tree orig = NULL_TREE;
2916 const char *fmt_str = NULL;
2917
2918 if (gimple_call_num_args (stmt) > 4)
2919 return false;
2920
2921 if (gimple_call_num_args (stmt) == 4)
2922 orig = gimple_call_arg (stmt, 3);
2923
2924 if (!tree_fits_uhwi_p (destsize))
2925 return false;
2926 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2927
2928 /* Check whether the format is a literal string constant. */
2929 fmt_str = c_getstr (fmt);
2930 if (fmt_str == NULL)
2931 return false;
2932
2933 if (!init_target_chars ())
2934 return false;
2935
2936 /* If the format doesn't contain % args or %%, use strcpy. */
2937 if (strchr (fmt_str, target_percent) == NULL)
2938 {
2939 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2940 if (!fn)
2941 return false;
2942
2943 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2944 if (orig)
2945 return false;
2946
2947 /* We could expand this as
2948 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2949 or to
2950 memcpy (str, fmt_with_nul_at_cstm1, cst);
2951 but in the former case that might increase code size
2952 and in the latter case grow .rodata section too much.
2953 So punt for now. */
2954 size_t len = strlen (fmt_str);
2955 if (len >= destlen)
2956 return false;
2957
2958 gimple_seq stmts = NULL;
355fe088 2959 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2960 gimple_seq_add_stmt_without_update (&stmts, repl);
2961 if (gimple_call_lhs (stmt))
2962 {
2963 repl = gimple_build_assign (gimple_call_lhs (stmt),
2964 build_int_cst (integer_type_node, len));
2965 gimple_seq_add_stmt_without_update (&stmts, repl);
2966 gsi_replace_with_seq_vops (gsi, stmts);
2967 /* gsi now points at the assignment to the lhs, get a
2968 stmt iterator to the memcpy call.
2969 ??? We can't use gsi_for_stmt as that doesn't work when the
2970 CFG isn't built yet. */
2971 gimple_stmt_iterator gsi2 = *gsi;
2972 gsi_prev (&gsi2);
2973 fold_stmt (&gsi2);
2974 }
2975 else
2976 {
2977 gsi_replace_with_seq_vops (gsi, stmts);
2978 fold_stmt (gsi);
2979 }
2980 return true;
2981 }
2982
2983 /* If the format is "%s", use strcpy if the result isn't used. */
2984 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2985 {
2986 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2987 if (!fn)
2988 return false;
2989
2990 /* Don't crash on snprintf (str1, cst, "%s"). */
2991 if (!orig)
2992 return false;
2993
dcb7fae2 2994 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2995 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2996 return false;
d7e78447
RB
2997
2998 /* We could expand this as
2999 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3000 or to
3001 memcpy (str1, str2_with_nul_at_cstm1, cst);
3002 but in the former case that might increase code size
3003 and in the latter case grow .rodata section too much.
3004 So punt for now. */
3005 if (compare_tree_int (orig_len, destlen) >= 0)
3006 return false;
3007
3008 /* Convert snprintf (str1, cst, "%s", str2) into
3009 strcpy (str1, str2) if strlen (str2) < cst. */
3010 gimple_seq stmts = NULL;
355fe088 3011 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3012 gimple_seq_add_stmt_without_update (&stmts, repl);
3013 if (gimple_call_lhs (stmt))
3014 {
3015 if (!useless_type_conversion_p (integer_type_node,
3016 TREE_TYPE (orig_len)))
3017 orig_len = fold_convert (integer_type_node, orig_len);
3018 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3019 gimple_seq_add_stmt_without_update (&stmts, repl);
3020 gsi_replace_with_seq_vops (gsi, stmts);
3021 /* gsi now points at the assignment to the lhs, get a
3022 stmt iterator to the memcpy call.
3023 ??? We can't use gsi_for_stmt as that doesn't work when the
3024 CFG isn't built yet. */
3025 gimple_stmt_iterator gsi2 = *gsi;
3026 gsi_prev (&gsi2);
3027 fold_stmt (&gsi2);
3028 }
3029 else
3030 {
3031 gsi_replace_with_seq_vops (gsi, stmts);
3032 fold_stmt (gsi);
3033 }
3034 return true;
3035 }
3036 return false;
3037}
35770bb2 3038
edd7ae68
RB
3039/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3040 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3041 more than 3 arguments, and ARG may be null in the 2-argument case.
3042
3043 Return NULL_TREE if no simplification was possible, otherwise return the
3044 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3045 code of the function to be simplified. */
3046
3047static bool
3048gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3049 tree fp, tree fmt, tree arg,
3050 enum built_in_function fcode)
3051{
3052 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3053 tree fn_fputc, fn_fputs;
3054 const char *fmt_str = NULL;
3055
3056 /* If the return value is used, don't do the transformation. */
3057 if (gimple_call_lhs (stmt) != NULL_TREE)
3058 return false;
3059
3060 /* Check whether the format is a literal string constant. */
3061 fmt_str = c_getstr (fmt);
3062 if (fmt_str == NULL)
3063 return false;
3064
3065 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3066 {
3067 /* If we're using an unlocked function, assume the other
3068 unlocked functions exist explicitly. */
3069 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3070 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3071 }
3072 else
3073 {
3074 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3075 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3076 }
3077
3078 if (!init_target_chars ())
3079 return false;
3080
3081 /* If the format doesn't contain % args or %%, use strcpy. */
3082 if (strchr (fmt_str, target_percent) == NULL)
3083 {
3084 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3085 && arg)
3086 return false;
3087
3088 /* If the format specifier was "", fprintf does nothing. */
3089 if (fmt_str[0] == '\0')
3090 {
3091 replace_call_with_value (gsi, NULL_TREE);
3092 return true;
3093 }
3094
3095 /* When "string" doesn't contain %, replace all cases of
3096 fprintf (fp, string) with fputs (string, fp). The fputs
3097 builtin will take care of special cases like length == 1. */
3098 if (fn_fputs)
3099 {
3100 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3101 replace_call_with_call_and_fold (gsi, repl);
3102 return true;
3103 }
3104 }
3105
3106 /* The other optimizations can be done only on the non-va_list variants. */
3107 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3108 return false;
3109
3110 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3111 else if (strcmp (fmt_str, target_percent_s) == 0)
3112 {
3113 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3114 return false;
3115 if (fn_fputs)
3116 {
3117 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3118 replace_call_with_call_and_fold (gsi, repl);
3119 return true;
3120 }
3121 }
3122
3123 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3124 else if (strcmp (fmt_str, target_percent_c) == 0)
3125 {
3126 if (!arg
3127 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3128 return false;
3129 if (fn_fputc)
3130 {
3131 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3132 replace_call_with_call_and_fold (gsi, repl);
3133 return true;
3134 }
3135 }
3136
3137 return false;
3138}
3139
ad03a744
RB
3140/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3141 FMT and ARG are the arguments to the call; we don't fold cases with
3142 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3143
3144 Return NULL_TREE if no simplification was possible, otherwise return the
3145 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3146 code of the function to be simplified. */
3147
3148static bool
3149gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3150 tree arg, enum built_in_function fcode)
3151{
3152 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3153 tree fn_putchar, fn_puts, newarg;
3154 const char *fmt_str = NULL;
3155
3156 /* If the return value is used, don't do the transformation. */
3157 if (gimple_call_lhs (stmt) != NULL_TREE)
3158 return false;
3159
3160 /* Check whether the format is a literal string constant. */
3161 fmt_str = c_getstr (fmt);
3162 if (fmt_str == NULL)
3163 return false;
3164
3165 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3166 {
3167 /* If we're using an unlocked function, assume the other
3168 unlocked functions exist explicitly. */
3169 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3170 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3171 }
3172 else
3173 {
3174 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3175 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3176 }
3177
3178 if (!init_target_chars ())
3179 return false;
3180
3181 if (strcmp (fmt_str, target_percent_s) == 0
3182 || strchr (fmt_str, target_percent) == NULL)
3183 {
3184 const char *str;
3185
3186 if (strcmp (fmt_str, target_percent_s) == 0)
3187 {
3188 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3189 return false;
3190
3191 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3192 return false;
3193
3194 str = c_getstr (arg);
3195 if (str == NULL)
3196 return false;
3197 }
3198 else
3199 {
3200 /* The format specifier doesn't contain any '%' characters. */
3201 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3202 && arg)
3203 return false;
3204 str = fmt_str;
3205 }
3206
3207 /* If the string was "", printf does nothing. */
3208 if (str[0] == '\0')
3209 {
3210 replace_call_with_value (gsi, NULL_TREE);
3211 return true;
3212 }
3213
3214 /* If the string has length of 1, call putchar. */
3215 if (str[1] == '\0')
3216 {
3217 /* Given printf("c"), (where c is any one character,)
3218 convert "c"[0] to an int and pass that to the replacement
3219 function. */
3220 newarg = build_int_cst (integer_type_node, str[0]);
3221 if (fn_putchar)
3222 {
3223 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3224 replace_call_with_call_and_fold (gsi, repl);
3225 return true;
3226 }
3227 }
3228 else
3229 {
3230 /* If the string was "string\n", call puts("string"). */
3231 size_t len = strlen (str);
3232 if ((unsigned char)str[len - 1] == target_newline
3233 && (size_t) (int) len == len
3234 && (int) len > 0)
3235 {
3236 char *newstr;
3237 tree offset_node, string_cst;
3238
3239 /* Create a NUL-terminated string that's one char shorter
3240 than the original, stripping off the trailing '\n'. */
3241 newarg = build_string_literal (len, str);
3242 string_cst = string_constant (newarg, &offset_node);
3243 gcc_checking_assert (string_cst
3244 && (TREE_STRING_LENGTH (string_cst)
3245 == (int) len)
3246 && integer_zerop (offset_node)
3247 && (unsigned char)
3248 TREE_STRING_POINTER (string_cst)[len - 1]
3249 == target_newline);
3250 /* build_string_literal creates a new STRING_CST,
3251 modify it in place to avoid double copying. */
3252 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3253 newstr[len - 1] = '\0';
3254 if (fn_puts)
3255 {
3256 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3257 replace_call_with_call_and_fold (gsi, repl);
3258 return true;
3259 }
3260 }
3261 else
3262 /* We'd like to arrange to call fputs(string,stdout) here,
3263 but we need stdout and don't have a way to get it yet. */
3264 return false;
3265 }
3266 }
3267
3268 /* The other optimizations can be done only on the non-va_list variants. */
3269 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3270 return false;
3271
3272 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3273 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3274 {
3275 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3276 return false;
3277 if (fn_puts)
3278 {
3279 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3280 replace_call_with_call_and_fold (gsi, repl);
3281 return true;
3282 }
3283 }
3284
3285 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3286 else if (strcmp (fmt_str, target_percent_c) == 0)
3287 {
3288 if (!arg || ! useless_type_conversion_p (integer_type_node,
3289 TREE_TYPE (arg)))
3290 return false;
3291 if (fn_putchar)
3292 {
3293 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3294 replace_call_with_call_and_fold (gsi, repl);
3295 return true;
3296 }
3297 }
3298
3299 return false;
3300}
3301
edd7ae68 3302
fef5a0d9
RB
3303
3304/* Fold a call to __builtin_strlen with known length LEN. */
3305
3306static bool
dcb7fae2 3307gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3308{
355fe088 3309 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3310 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
3311 if (!len)
3312 return false;
2813904b 3313 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
3314 replace_call_with_value (gsi, len);
3315 return true;
cbdd87d4
RG
3316}
3317
48126138
NS
3318/* Fold a call to __builtin_acc_on_device. */
3319
3320static bool
3321gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3322{
3323 /* Defer folding until we know which compiler we're in. */
3324 if (symtab->state != EXPANSION)
3325 return false;
3326
3327 unsigned val_host = GOMP_DEVICE_HOST;
3328 unsigned val_dev = GOMP_DEVICE_NONE;
3329
3330#ifdef ACCEL_COMPILER
3331 val_host = GOMP_DEVICE_NOT_HOST;
3332 val_dev = ACCEL_COMPILER_acc_device;
3333#endif
3334
3335 location_t loc = gimple_location (gsi_stmt (*gsi));
3336
3337 tree host_eq = make_ssa_name (boolean_type_node);
3338 gimple *host_ass = gimple_build_assign
3339 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3340 gimple_set_location (host_ass, loc);
3341 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3342
3343 tree dev_eq = make_ssa_name (boolean_type_node);
3344 gimple *dev_ass = gimple_build_assign
3345 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3346 gimple_set_location (dev_ass, loc);
3347 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3348
3349 tree result = make_ssa_name (boolean_type_node);
3350 gimple *result_ass = gimple_build_assign
3351 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3352 gimple_set_location (result_ass, loc);
3353 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3354
3355 replace_call_with_value (gsi, result);
3356
3357 return true;
3358}
cbdd87d4 3359
fe75f732
PK
3360/* Fold realloc (0, n) -> malloc (n). */
3361
3362static bool
3363gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3364{
3365 gimple *stmt = gsi_stmt (*gsi);
3366 tree arg = gimple_call_arg (stmt, 0);
3367 tree size = gimple_call_arg (stmt, 1);
3368
3369 if (operand_equal_p (arg, null_pointer_node, 0))
3370 {
3371 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3372 if (fn_malloc)
3373 {
3374 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3375 replace_call_with_call_and_fold (gsi, repl);
3376 return true;
3377 }
3378 }
3379 return false;
3380}
3381
dcb7fae2
RB
3382/* Fold the non-target builtin at *GSI and return whether any simplification
3383 was made. */
cbdd87d4 3384
fef5a0d9 3385static bool
dcb7fae2 3386gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3387{
538dd0b7 3388 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3389 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3390
dcb7fae2
RB
3391 /* Give up for always_inline inline builtins until they are
3392 inlined. */
3393 if (avoid_folding_inline_builtin (callee))
3394 return false;
cbdd87d4 3395
edd7ae68
RB
3396 unsigned n = gimple_call_num_args (stmt);
3397 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3398 switch (fcode)
cbdd87d4 3399 {
b3d8d88e
MS
3400 case BUILT_IN_BCMP:
3401 return gimple_fold_builtin_bcmp (gsi);
3402 case BUILT_IN_BCOPY:
3403 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3404 case BUILT_IN_BZERO:
b3d8d88e
MS
3405 return gimple_fold_builtin_bzero (gsi);
3406
dcb7fae2
RB
3407 case BUILT_IN_MEMSET:
3408 return gimple_fold_builtin_memset (gsi,
3409 gimple_call_arg (stmt, 1),
3410 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3411 case BUILT_IN_MEMCPY:
3412 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3413 gimple_call_arg (stmt, 1), 0);
3414 case BUILT_IN_MEMPCPY:
3415 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3416 gimple_call_arg (stmt, 1), 1);
3417 case BUILT_IN_MEMMOVE:
3418 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3419 gimple_call_arg (stmt, 1), 3);
3420 case BUILT_IN_SPRINTF_CHK:
3421 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3422 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3423 case BUILT_IN_STRCAT_CHK:
3424 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3425 case BUILT_IN_STRNCAT_CHK:
3426 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3427 case BUILT_IN_STRLEN:
dcb7fae2 3428 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3429 case BUILT_IN_STRCPY:
dcb7fae2 3430 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3431 gimple_call_arg (stmt, 0),
dcb7fae2 3432 gimple_call_arg (stmt, 1));
cbdd87d4 3433 case BUILT_IN_STRNCPY:
dcb7fae2 3434 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3435 gimple_call_arg (stmt, 0),
3436 gimple_call_arg (stmt, 1),
dcb7fae2 3437 gimple_call_arg (stmt, 2));
9a7eefec 3438 case BUILT_IN_STRCAT:
dcb7fae2
RB
3439 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3440 gimple_call_arg (stmt, 1));
ad03a744
RB
3441 case BUILT_IN_STRNCAT:
3442 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3443 case BUILT_IN_INDEX:
912d9ec3 3444 case BUILT_IN_STRCHR:
71dea1dd
WD
3445 return gimple_fold_builtin_strchr (gsi, false);
3446 case BUILT_IN_RINDEX:
3447 case BUILT_IN_STRRCHR:
3448 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3449 case BUILT_IN_STRSTR:
3450 return gimple_fold_builtin_strstr (gsi);
a918bfbf
ML
3451 case BUILT_IN_STRCMP:
3452 case BUILT_IN_STRCASECMP:
3453 case BUILT_IN_STRNCMP:
3454 case BUILT_IN_STRNCASECMP:
3455 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3456 case BUILT_IN_MEMCHR:
3457 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3458 case BUILT_IN_FPUTS:
dcb7fae2
RB
3459 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3460 gimple_call_arg (stmt, 1), false);
cbdd87d4 3461 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3462 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3463 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3464 case BUILT_IN_MEMCPY_CHK:
3465 case BUILT_IN_MEMPCPY_CHK:
3466 case BUILT_IN_MEMMOVE_CHK:
3467 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3468 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3469 gimple_call_arg (stmt, 0),
3470 gimple_call_arg (stmt, 1),
3471 gimple_call_arg (stmt, 2),
3472 gimple_call_arg (stmt, 3),
edd7ae68 3473 fcode);
2625bb5d
RB
3474 case BUILT_IN_STPCPY:
3475 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3476 case BUILT_IN_STRCPY_CHK:
3477 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3478 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3479 gimple_call_arg (stmt, 0),
3480 gimple_call_arg (stmt, 1),
3481 gimple_call_arg (stmt, 2),
edd7ae68 3482 fcode);
cbdd87d4 3483 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3484 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3485 return gimple_fold_builtin_stxncpy_chk (gsi,
3486 gimple_call_arg (stmt, 0),
3487 gimple_call_arg (stmt, 1),
3488 gimple_call_arg (stmt, 2),
3489 gimple_call_arg (stmt, 3),
edd7ae68 3490 fcode);
cbdd87d4
RG
3491 case BUILT_IN_SNPRINTF_CHK:
3492 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3493 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3494
edd7ae68
RB
3495 case BUILT_IN_FPRINTF:
3496 case BUILT_IN_FPRINTF_UNLOCKED:
3497 case BUILT_IN_VFPRINTF:
3498 if (n == 2 || n == 3)
3499 return gimple_fold_builtin_fprintf (gsi,
3500 gimple_call_arg (stmt, 0),
3501 gimple_call_arg (stmt, 1),
3502 n == 3
3503 ? gimple_call_arg (stmt, 2)
3504 : NULL_TREE,
3505 fcode);
3506 break;
3507 case BUILT_IN_FPRINTF_CHK:
3508 case BUILT_IN_VFPRINTF_CHK:
3509 if (n == 3 || n == 4)
3510 return gimple_fold_builtin_fprintf (gsi,
3511 gimple_call_arg (stmt, 0),
3512 gimple_call_arg (stmt, 2),
3513 n == 4
3514 ? gimple_call_arg (stmt, 3)
3515 : NULL_TREE,
3516 fcode);
3517 break;
ad03a744
RB
3518 case BUILT_IN_PRINTF:
3519 case BUILT_IN_PRINTF_UNLOCKED:
3520 case BUILT_IN_VPRINTF:
3521 if (n == 1 || n == 2)
3522 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3523 n == 2
3524 ? gimple_call_arg (stmt, 1)
3525 : NULL_TREE, fcode);
3526 break;
3527 case BUILT_IN_PRINTF_CHK:
3528 case BUILT_IN_VPRINTF_CHK:
3529 if (n == 2 || n == 3)
3530 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3531 n == 3
3532 ? gimple_call_arg (stmt, 2)
3533 : NULL_TREE, fcode);
242a37f1 3534 break;
48126138
NS
3535 case BUILT_IN_ACC_ON_DEVICE:
3536 return gimple_fold_builtin_acc_on_device (gsi,
3537 gimple_call_arg (stmt, 0));
fe75f732
PK
3538 case BUILT_IN_REALLOC:
3539 return gimple_fold_builtin_realloc (gsi);
3540
fef5a0d9
RB
3541 default:;
3542 }
3543
3544 /* Try the generic builtin folder. */
3545 bool ignore = (gimple_call_lhs (stmt) == NULL);
3546 tree result = fold_call_stmt (stmt, ignore);
3547 if (result)
3548 {
3549 if (ignore)
3550 STRIP_NOPS (result);
3551 else
3552 result = fold_convert (gimple_call_return_type (stmt), result);
3553 if (!update_call_from_tree (gsi, result))
3554 gimplify_and_update_call_from_tree (gsi, result);
3555 return true;
3556 }
3557
3558 return false;
3559}
3560
451e8dae
NS
3561/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3562 function calls to constants, where possible. */
3563
3564static tree
3565fold_internal_goacc_dim (const gimple *call)
3566{
629b3d75
MJ
3567 int axis = oacc_get_ifn_dim_arg (call);
3568 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae
NS
3569 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3570 tree result = NULL_TREE;
3571
3572 /* If the size is 1, or we only want the size and it is not dynamic,
3573 we know the answer. */
3574 if (size == 1 || (!is_pos && size))
3575 {
3576 tree type = TREE_TYPE (gimple_call_lhs (call));
3577 result = build_int_cst (type, size - is_pos);
3578 }
3579
3580 return result;
3581}
3582
849a76a5
JJ
3583/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3584 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3585 &var where var is only addressable because of such calls. */
3586
3587bool
3588optimize_atomic_compare_exchange_p (gimple *stmt)
3589{
3590 if (gimple_call_num_args (stmt) != 6
3591 || !flag_inline_atomics
3592 || !optimize
45b2222a 3593 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
3594 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3595 || !gimple_vdef (stmt)
3596 || !gimple_vuse (stmt))
3597 return false;
3598
3599 tree fndecl = gimple_call_fndecl (stmt);
3600 switch (DECL_FUNCTION_CODE (fndecl))
3601 {
3602 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3603 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3604 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3605 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3606 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3607 break;
3608 default:
3609 return false;
3610 }
3611
3612 tree expected = gimple_call_arg (stmt, 1);
3613 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3614 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3615 return false;
3616
3617 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3618 if (!is_gimple_reg_type (etype)
849a76a5 3619 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3620 || TREE_THIS_VOLATILE (etype)
3621 || VECTOR_TYPE_P (etype)
3622 || TREE_CODE (etype) == COMPLEX_TYPE
3623 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3624 might not preserve all the bits. See PR71716. */
3625 || SCALAR_FLOAT_TYPE_P (etype)
3626 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
849a76a5
JJ
3627 return false;
3628
3629 tree weak = gimple_call_arg (stmt, 3);
3630 if (!integer_zerop (weak) && !integer_onep (weak))
3631 return false;
3632
3633 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3634 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3635 machine_mode mode = TYPE_MODE (itype);
3636
3637 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3638 == CODE_FOR_nothing
3639 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3640 return false;
3641
1de3c940 3642 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
849a76a5
JJ
3643 return false;
3644
3645 return true;
3646}
3647
3648/* Fold
3649 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3650 into
3651 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3652 i = IMAGPART_EXPR <t>;
3653 r = (_Bool) i;
3654 e = REALPART_EXPR <t>; */
3655
3656void
3657fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3658{
3659 gimple *stmt = gsi_stmt (*gsi);
3660 tree fndecl = gimple_call_fndecl (stmt);
3661 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3662 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3663 tree ctype = build_complex_type (itype);
3664 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
3665 bool throws = false;
3666 edge e = NULL;
849a76a5
JJ
3667 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3668 expected);
3669 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3670 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3671 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3672 {
3673 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3674 build1 (VIEW_CONVERT_EXPR, itype,
3675 gimple_assign_lhs (g)));
3676 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3677 }
3678 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3679 + int_size_in_bytes (itype);
3680 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3681 gimple_call_arg (stmt, 0),
3682 gimple_assign_lhs (g),
3683 gimple_call_arg (stmt, 2),
3684 build_int_cst (integer_type_node, flag),
3685 gimple_call_arg (stmt, 4),
3686 gimple_call_arg (stmt, 5));
3687 tree lhs = make_ssa_name (ctype);
3688 gimple_call_set_lhs (g, lhs);
3689 gimple_set_vdef (g, gimple_vdef (stmt));
3690 gimple_set_vuse (g, gimple_vuse (stmt));
3691 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46
JJ
3692 tree oldlhs = gimple_call_lhs (stmt);
3693 if (stmt_can_throw_internal (stmt))
3694 {
3695 throws = true;
3696 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3697 }
3698 gimple_call_set_nothrow (as_a <gcall *> (g),
3699 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3700 gimple_call_set_lhs (stmt, NULL_TREE);
3701 gsi_replace (gsi, g, true);
3702 if (oldlhs)
849a76a5 3703 {
849a76a5
JJ
3704 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3705 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
3706 if (throws)
3707 {
3708 gsi_insert_on_edge_immediate (e, g);
3709 *gsi = gsi_for_stmt (g);
3710 }
3711 else
3712 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3713 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3714 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 3715 }
849a76a5
JJ
3716 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3717 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
3718 if (throws && oldlhs == NULL_TREE)
3719 {
3720 gsi_insert_on_edge_immediate (e, g);
3721 *gsi = gsi_for_stmt (g);
3722 }
3723 else
3724 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
3725 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3726 {
3727 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3728 VIEW_CONVERT_EXPR,
3729 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3730 gimple_assign_lhs (g)));
3731 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3732 }
3733 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3734 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3735 *gsi = gsiret;
3736}
3737
1304953e
JJ
3738/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3739 doesn't fit into TYPE. The test for overflow should be regardless of
3740 -fwrapv, and even for unsigned types. */
3741
3742bool
3743arith_overflowed_p (enum tree_code code, const_tree type,
3744 const_tree arg0, const_tree arg1)
3745{
3746 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3747 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3748 widest2_int_cst;
3749 widest2_int warg0 = widest2_int_cst (arg0);
3750 widest2_int warg1 = widest2_int_cst (arg1);
3751 widest2_int wres;
3752 switch (code)
3753 {
3754 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3755 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3756 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3757 default: gcc_unreachable ();
3758 }
3759 signop sign = TYPE_SIGN (type);
3760 if (sign == UNSIGNED && wi::neg_p (wres))
3761 return true;
3762 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3763}
3764
cbdd87d4
RG
3765/* Attempt to fold a call statement referenced by the statement iterator GSI.
3766 The statement may be replaced by another statement, e.g., if the call
3767 simplifies to a constant value. Return true if any changes were made.
3768 It is assumed that the operands have been previously folded. */
3769
e021c122 3770static bool
ceeffab0 3771gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3772{
538dd0b7 3773 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3774 tree callee;
e021c122
RG
3775 bool changed = false;
3776 unsigned i;
cbdd87d4 3777
e021c122
RG
3778 /* Fold *& in call arguments. */
3779 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3780 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3781 {
3782 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3783 if (tmp)
3784 {
3785 gimple_call_set_arg (stmt, i, tmp);
3786 changed = true;
3787 }
3788 }
3b45a007
RG
3789
3790 /* Check for virtual calls that became direct calls. */
3791 callee = gimple_call_fn (stmt);
25583c4f 3792 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3793 {
49c471e3
MJ
3794 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3795 {
450ad0cd
JH
3796 if (dump_file && virtual_method_call_p (callee)
3797 && !possible_polymorphic_call_target_p
6f8091fc
JH
3798 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3799 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3800 {
3801 fprintf (dump_file,
a70e9985 3802 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3803 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3804 fprintf (dump_file, " to ");
3805 print_generic_expr (dump_file, callee, TDF_SLIM);
3806 fprintf (dump_file, "\n");
3807 }
3808
49c471e3 3809 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3810 changed = true;
3811 }
a70e9985 3812 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3813 {
61dd6a2e
JH
3814 bool final;
3815 vec <cgraph_node *>targets
058d0a90 3816 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3817 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3818 {
a70e9985 3819 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3820 if (dump_enabled_p ())
3821 {
807b7d62 3822 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3823 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3824 "folding virtual function call to %s\n",
3825 targets.length () == 1
3826 ? targets[0]->name ()
3827 : "__builtin_unreachable");
3828 }
61dd6a2e 3829 if (targets.length () == 1)
cf3e5a89 3830 {
18954840
JJ
3831 tree fndecl = targets[0]->decl;
3832 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 3833 changed = true;
18954840
JJ
3834 /* If changing the call to __cxa_pure_virtual
3835 or similar noreturn function, adjust gimple_call_fntype
3836 too. */
865f7046 3837 if (gimple_call_noreturn_p (stmt)
18954840
JJ
3838 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3839 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3840 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3841 == void_type_node))
3842 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 3843 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
3844 if (lhs
3845 && gimple_call_noreturn_p (stmt)
18954840 3846 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 3847 || should_remove_lhs_p (lhs)))
a70e9985
JJ
3848 {
3849 if (TREE_CODE (lhs) == SSA_NAME)
3850 {
b731b390 3851 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3852 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3853 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3854 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3855 }
3856 gimple_call_set_lhs (stmt, NULL_TREE);
3857 }
0b986c6a 3858 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3859 }
a70e9985 3860 else
cf3e5a89
JJ
3861 {
3862 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3863 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3864 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
3865 /* If the call had a SSA name as lhs morph that into
3866 an uninitialized value. */
a70e9985
JJ
3867 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3868 {
b731b390 3869 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
3870 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
3871 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
3872 set_ssa_default_def (cfun, var, lhs);
42e52a51 3873 }
2da6996c
RB
3874 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3875 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3876 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
3877 return true;
3878 }
e021c122 3879 }
49c471e3 3880 }
e021c122 3881 }
49c471e3 3882
f2d3d07e
RH
3883 /* Check for indirect calls that became direct calls, and then
3884 no longer require a static chain. */
3885 if (gimple_call_chain (stmt))
3886 {
3887 tree fn = gimple_call_fndecl (stmt);
3888 if (fn && !DECL_STATIC_CHAIN (fn))
3889 {
3890 gimple_call_set_chain (stmt, NULL);
3891 changed = true;
3892 }
3893 else
3894 {
3895 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3896 if (tmp)
3897 {
3898 gimple_call_set_chain (stmt, tmp);
3899 changed = true;
3900 }
3901 }
3902 }
3903
e021c122
RG
3904 if (inplace)
3905 return changed;
3906
3907 /* Check for builtins that CCP can handle using information not
3908 available in the generic fold routines. */
fef5a0d9
RB
3909 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3910 {
3911 if (gimple_fold_builtin (gsi))
3912 changed = true;
3913 }
3914 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3915 {
ea679d55 3916 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3917 }
368b454d 3918 else if (gimple_call_internal_p (stmt))
ed9c79e1 3919 {
368b454d
JJ
3920 enum tree_code subcode = ERROR_MARK;
3921 tree result = NULL_TREE;
1304953e
JJ
3922 bool cplx_result = false;
3923 tree overflow = NULL_TREE;
368b454d
JJ
3924 switch (gimple_call_internal_fn (stmt))
3925 {
3926 case IFN_BUILTIN_EXPECT:
3927 result = fold_builtin_expect (gimple_location (stmt),
3928 gimple_call_arg (stmt, 0),
3929 gimple_call_arg (stmt, 1),
3930 gimple_call_arg (stmt, 2));
3931 break;
0e82f089 3932 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
3933 {
3934 tree offset = gimple_call_arg (stmt, 1);
3935 tree objsize = gimple_call_arg (stmt, 2);
3936 if (integer_all_onesp (objsize)
3937 || (TREE_CODE (offset) == INTEGER_CST
3938 && TREE_CODE (objsize) == INTEGER_CST
3939 && tree_int_cst_le (offset, objsize)))
3940 {
3941 replace_call_with_value (gsi, NULL_TREE);
3942 return true;
3943 }
3944 }
3945 break;
3946 case IFN_UBSAN_PTR:
3947 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 3948 {
ca1150f0 3949 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
3950 return true;
3951 }
3952 break;
ca1150f0
JJ
3953 case IFN_UBSAN_BOUNDS:
3954 {
3955 tree index = gimple_call_arg (stmt, 1);
3956 tree bound = gimple_call_arg (stmt, 2);
3957 if (TREE_CODE (index) == INTEGER_CST
3958 && TREE_CODE (bound) == INTEGER_CST)
3959 {
3960 index = fold_convert (TREE_TYPE (bound), index);
3961 if (TREE_CODE (index) == INTEGER_CST
3962 && tree_int_cst_le (index, bound))
3963 {
3964 replace_call_with_value (gsi, NULL_TREE);
3965 return true;
3966 }
3967 }
3968 }
3969 break;
451e8dae
NS
3970 case IFN_GOACC_DIM_SIZE:
3971 case IFN_GOACC_DIM_POS:
3972 result = fold_internal_goacc_dim (stmt);
3973 break;
368b454d
JJ
3974 case IFN_UBSAN_CHECK_ADD:
3975 subcode = PLUS_EXPR;
3976 break;
3977 case IFN_UBSAN_CHECK_SUB:
3978 subcode = MINUS_EXPR;
3979 break;
3980 case IFN_UBSAN_CHECK_MUL:
3981 subcode = MULT_EXPR;
3982 break;
1304953e
JJ
3983 case IFN_ADD_OVERFLOW:
3984 subcode = PLUS_EXPR;
3985 cplx_result = true;
3986 break;
3987 case IFN_SUB_OVERFLOW:
3988 subcode = MINUS_EXPR;
3989 cplx_result = true;
3990 break;
3991 case IFN_MUL_OVERFLOW:
3992 subcode = MULT_EXPR;
3993 cplx_result = true;
3994 break;
368b454d
JJ
3995 default:
3996 break;
3997 }
3998 if (subcode != ERROR_MARK)
3999 {
4000 tree arg0 = gimple_call_arg (stmt, 0);
4001 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4002 tree type = TREE_TYPE (arg0);
4003 if (cplx_result)
4004 {
4005 tree lhs = gimple_call_lhs (stmt);
4006 if (lhs == NULL_TREE)
4007 type = NULL_TREE;
4008 else
4009 type = TREE_TYPE (TREE_TYPE (lhs));
4010 }
4011 if (type == NULL_TREE)
4012 ;
368b454d 4013 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4014 else if (integer_zerop (arg1))
4015 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4016 /* x = 0 + y; x = 0 * y; */
4017 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4018 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4019 /* x = y - y; */
4020 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4021 result = integer_zero_node;
368b454d 4022 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4023 else if (subcode == MULT_EXPR && integer_onep (arg1))
4024 result = arg0;
4025 else if (subcode == MULT_EXPR && integer_onep (arg0))
4026 result = arg1;
4027 else if (TREE_CODE (arg0) == INTEGER_CST
4028 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4029 {
1304953e
JJ
4030 if (cplx_result)
4031 result = int_const_binop (subcode, fold_convert (type, arg0),
4032 fold_convert (type, arg1));
4033 else
4034 result = int_const_binop (subcode, arg0, arg1);
4035 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4036 {
4037 if (cplx_result)
4038 overflow = build_one_cst (type);
4039 else
4040 result = NULL_TREE;
4041 }
4042 }
4043 if (result)
4044 {
4045 if (result == integer_zero_node)
4046 result = build_zero_cst (type);
4047 else if (cplx_result && TREE_TYPE (result) != type)
4048 {
4049 if (TREE_CODE (result) == INTEGER_CST)
4050 {
4051 if (arith_overflowed_p (PLUS_EXPR, type, result,
4052 integer_zero_node))
4053 overflow = build_one_cst (type);
4054 }
4055 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4056 && TYPE_UNSIGNED (type))
4057 || (TYPE_PRECISION (type)
4058 < (TYPE_PRECISION (TREE_TYPE (result))
4059 + (TYPE_UNSIGNED (TREE_TYPE (result))
4060 && !TYPE_UNSIGNED (type)))))
4061 result = NULL_TREE;
4062 if (result)
4063 result = fold_convert (type, result);
4064 }
368b454d
JJ
4065 }
4066 }
1304953e 4067
ed9c79e1
JJ
4068 if (result)
4069 {
1304953e
JJ
4070 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4071 result = drop_tree_overflow (result);
4072 if (cplx_result)
4073 {
4074 if (overflow == NULL_TREE)
4075 overflow = build_zero_cst (TREE_TYPE (result));
4076 tree ctype = build_complex_type (TREE_TYPE (result));
4077 if (TREE_CODE (result) == INTEGER_CST
4078 && TREE_CODE (overflow) == INTEGER_CST)
4079 result = build_complex (ctype, result, overflow);
4080 else
4081 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4082 ctype, result, overflow);
4083 }
ed9c79e1
JJ
4084 if (!update_call_from_tree (gsi, result))
4085 gimplify_and_update_call_from_tree (gsi, result);
4086 changed = true;
4087 }
4088 }
3b45a007 4089
e021c122 4090 return changed;
cbdd87d4
RG
4091}
4092
e0ee10ed 4093
89a79e96
RB
4094/* Return true whether NAME has a use on STMT. */
4095
4096static bool
355fe088 4097has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4098{
4099 imm_use_iterator iter;
4100 use_operand_p use_p;
4101 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4102 if (USE_STMT (use_p) == stmt)
4103 return true;
4104 return false;
4105}
4106
e0ee10ed
RB
4107/* Worker for fold_stmt_1 dispatch to pattern based folding with
4108 gimple_simplify.
4109
4110 Replaces *GSI with the simplification result in RCODE and OPS
4111 and the associated statements in *SEQ. Does the replacement
4112 according to INPLACE and returns true if the operation succeeded. */
4113
4114static bool
4115replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4116 code_helper rcode, tree *ops,
4117 gimple_seq *seq, bool inplace)
4118{
355fe088 4119 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
4120
4121 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4122 newly created statements. See also maybe_push_res_to_seq.
4123 As an exception allow such uses if there was a use of the
4124 same SSA name on the old stmt. */
e0ee10ed 4125 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
4126 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4127 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
4128 || (ops[1]
4129 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
4130 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4131 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
4132 || (ops[2]
4133 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96 4134 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
e0237993
JJ
4135 && !has_use_on_stmt (ops[2], stmt))
4136 || (COMPARISON_CLASS_P (ops[0])
4137 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4138 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4139 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4140 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4141 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4142 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
e0ee10ed
RB
4143 return false;
4144
fec40d06
RS
4145 /* Don't insert new statements when INPLACE is true, even if we could
4146 reuse STMT for the final statement. */
4147 if (inplace && !gimple_seq_empty_p (*seq))
4148 return false;
4149
538dd0b7 4150 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
4151 {
4152 gcc_assert (rcode.is_tree_code ());
4153 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4154 /* GIMPLE_CONDs condition may not throw. */
4155 && (!flag_exceptions
4156 || !cfun->can_throw_non_call_exceptions
4157 || !operation_could_trap_p (rcode,
4158 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4159 false, NULL_TREE)))
538dd0b7 4160 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 4161 else if (rcode == SSA_NAME)
538dd0b7 4162 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
4163 build_zero_cst (TREE_TYPE (ops[0])));
4164 else if (rcode == INTEGER_CST)
4165 {
4166 if (integer_zerop (ops[0]))
538dd0b7 4167 gimple_cond_make_false (cond_stmt);
e0ee10ed 4168 else
538dd0b7 4169 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4170 }
4171 else if (!inplace)
4172 {
4173 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4174 ops, seq);
4175 if (!res)
4176 return false;
538dd0b7 4177 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4178 build_zero_cst (TREE_TYPE (res)));
4179 }
4180 else
4181 return false;
4182 if (dump_file && (dump_flags & TDF_DETAILS))
4183 {
4184 fprintf (dump_file, "gimple_simplified to ");
4185 if (!gimple_seq_empty_p (*seq))
4186 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4187 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4188 0, TDF_SLIM);
4189 }
4190 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4191 return true;
4192 }
4193 else if (is_gimple_assign (stmt)
4194 && rcode.is_tree_code ())
4195 {
4196 if (!inplace
f3582e54 4197 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
4198 {
4199 maybe_build_generic_op (rcode,
545cd7ec 4200 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
00d66391 4201 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
4202 if (dump_file && (dump_flags & TDF_DETAILS))
4203 {
4204 fprintf (dump_file, "gimple_simplified to ");
4205 if (!gimple_seq_empty_p (*seq))
4206 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4207 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4208 0, TDF_SLIM);
4209 }
4210 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4211 return true;
4212 }
4213 }
37d486ab 4214 else if (rcode.is_fn_code ()
c9e926ce 4215 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
4216 {
4217 unsigned i;
4218 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4219 {
4220 gcc_assert (ops[i] != NULL_TREE);
4221 gimple_call_set_arg (stmt, i, ops[i]);
4222 }
4223 if (i < 3)
4224 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
4225 if (dump_file && (dump_flags & TDF_DETAILS))
4226 {
4227 fprintf (dump_file, "gimple_simplified to ");
4228 if (!gimple_seq_empty_p (*seq))
4229 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4230 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4231 }
4232 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4233 return true;
4234 }
e0ee10ed
RB
4235 else if (!inplace)
4236 {
4237 if (gimple_has_lhs (stmt))
4238 {
4239 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
4240 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4241 ops, seq, lhs))
4242 return false;
e0ee10ed
RB
4243 if (dump_file && (dump_flags & TDF_DETAILS))
4244 {
4245 fprintf (dump_file, "gimple_simplified to ");
4246 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4247 }
4248 gsi_replace_with_seq_vops (gsi, *seq);
4249 return true;
4250 }
4251 else
4252 gcc_unreachable ();
4253 }
4254
4255 return false;
4256}
4257
040292e7
RB
4258/* Canonicalize MEM_REFs invariant address operand after propagation. */
4259
4260static bool
4261maybe_canonicalize_mem_ref_addr (tree *t)
4262{
4263 bool res = false;
4264
4265 if (TREE_CODE (*t) == ADDR_EXPR)
4266 t = &TREE_OPERAND (*t, 0);
4267
f17a223d
RB
4268 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4269 generic vector extension. The actual vector referenced is
4270 view-converted to an array type for this purpose. If the index
4271 is constant the canonical representation in the middle-end is a
4272 BIT_FIELD_REF so re-write the former to the latter here. */
4273 if (TREE_CODE (*t) == ARRAY_REF
4274 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4275 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4276 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4277 {
4278 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4279 if (VECTOR_TYPE_P (vtype))
4280 {
4281 tree low = array_ref_low_bound (*t);
4282 if (TREE_CODE (low) == INTEGER_CST)
4283 {
4284 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4285 {
4286 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4287 wi::to_widest (low));
4288 idx = wi::mul (idx, wi::to_widest
4289 (TYPE_SIZE (TREE_TYPE (*t))));
4290 widest_int ext
4291 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4292 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4293 {
4294 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4295 TREE_TYPE (*t),
4296 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4297 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4298 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4299 res = true;
4300 }
4301 }
4302 }
4303 }
4304 }
4305
040292e7
RB
4306 while (handled_component_p (*t))
4307 t = &TREE_OPERAND (*t, 0);
4308
4309 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4310 of invariant addresses into a SSA name MEM_REF address. */
4311 if (TREE_CODE (*t) == MEM_REF
4312 || TREE_CODE (*t) == TARGET_MEM_REF)
4313 {
4314 tree addr = TREE_OPERAND (*t, 0);
4315 if (TREE_CODE (addr) == ADDR_EXPR
4316 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4317 || handled_component_p (TREE_OPERAND (addr, 0))))
4318 {
4319 tree base;
4320 HOST_WIDE_INT coffset;
4321 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4322 &coffset);
4323 if (!base)
4324 gcc_unreachable ();
4325
4326 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4327 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4328 TREE_OPERAND (*t, 1),
4329 size_int (coffset));
4330 res = true;
4331 }
4332 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4333 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4334 }
4335
4336 /* Canonicalize back MEM_REFs to plain reference trees if the object
4337 accessed is a decl that has the same access semantics as the MEM_REF. */
4338 if (TREE_CODE (*t) == MEM_REF
4339 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4340 && integer_zerop (TREE_OPERAND (*t, 1))
4341 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4342 {
4343 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4344 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4345 if (/* Same volatile qualification. */
4346 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4347 /* Same TBAA behavior with -fstrict-aliasing. */
4348 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4349 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4350 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4351 /* Same alignment. */
4352 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4353 /* We have to look out here to not drop a required conversion
4354 from the rhs to the lhs if *t appears on the lhs or vice-versa
4355 if it appears on the rhs. Thus require strict type
4356 compatibility. */
4357 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4358 {
4359 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4360 res = true;
4361 }
4362 }
4363
4364 /* Canonicalize TARGET_MEM_REF in particular with respect to
4365 the indexes becoming constant. */
4366 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4367 {
4368 tree tem = maybe_fold_tmr (*t);
4369 if (tem)
4370 {
4371 *t = tem;
4372 res = true;
4373 }
4374 }
4375
4376 return res;
4377}
4378
cbdd87d4
RG
4379/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4380 distinguishes both cases. */
4381
4382static bool
e0ee10ed 4383fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4384{
4385 bool changed = false;
355fe088 4386 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4387 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4388 unsigned i;
a8b85ce9 4389 fold_defer_overflow_warnings ();
cbdd87d4 4390
040292e7
RB
4391 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4392 after propagation.
4393 ??? This shouldn't be done in generic folding but in the
4394 propagation helpers which also know whether an address was
89a79e96
RB
4395 propagated.
4396 Also canonicalize operand order. */
040292e7
RB
4397 switch (gimple_code (stmt))
4398 {
4399 case GIMPLE_ASSIGN:
4400 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4401 {
4402 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4403 if ((REFERENCE_CLASS_P (*rhs)
4404 || TREE_CODE (*rhs) == ADDR_EXPR)
4405 && maybe_canonicalize_mem_ref_addr (rhs))
4406 changed = true;
4407 tree *lhs = gimple_assign_lhs_ptr (stmt);
4408 if (REFERENCE_CLASS_P (*lhs)
4409 && maybe_canonicalize_mem_ref_addr (lhs))
4410 changed = true;
4411 }
89a79e96
RB
4412 else
4413 {
4414 /* Canonicalize operand order. */
4415 enum tree_code code = gimple_assign_rhs_code (stmt);
4416 if (TREE_CODE_CLASS (code) == tcc_comparison
4417 || commutative_tree_code (code)
4418 || commutative_ternary_tree_code (code))
4419 {
4420 tree rhs1 = gimple_assign_rhs1 (stmt);
4421 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4422 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4423 {
4424 gimple_assign_set_rhs1 (stmt, rhs2);
4425 gimple_assign_set_rhs2 (stmt, rhs1);
4426 if (TREE_CODE_CLASS (code) == tcc_comparison)
4427 gimple_assign_set_rhs_code (stmt,
4428 swap_tree_comparison (code));
4429 changed = true;
4430 }
4431 }
4432 }
040292e7
RB
4433 break;
4434 case GIMPLE_CALL:
4435 {
4436 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4437 {
4438 tree *arg = gimple_call_arg_ptr (stmt, i);
4439 if (REFERENCE_CLASS_P (*arg)
4440 && maybe_canonicalize_mem_ref_addr (arg))
4441 changed = true;
4442 }
4443 tree *lhs = gimple_call_lhs_ptr (stmt);
4444 if (*lhs
4445 && REFERENCE_CLASS_P (*lhs)
4446 && maybe_canonicalize_mem_ref_addr (lhs))
4447 changed = true;
4448 break;
4449 }
4450 case GIMPLE_ASM:
4451 {
538dd0b7
DM
4452 gasm *asm_stmt = as_a <gasm *> (stmt);
4453 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4454 {
538dd0b7 4455 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4456 tree op = TREE_VALUE (link);
4457 if (REFERENCE_CLASS_P (op)
4458 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4459 changed = true;
4460 }
538dd0b7 4461 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4462 {
538dd0b7 4463 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4464 tree op = TREE_VALUE (link);
4465 if ((REFERENCE_CLASS_P (op)
4466 || TREE_CODE (op) == ADDR_EXPR)
4467 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4468 changed = true;
4469 }
4470 }
4471 break;
4472 case GIMPLE_DEBUG:
4473 if (gimple_debug_bind_p (stmt))
4474 {
4475 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4476 if (*val
4477 && (REFERENCE_CLASS_P (*val)
4478 || TREE_CODE (*val) == ADDR_EXPR)
4479 && maybe_canonicalize_mem_ref_addr (val))
4480 changed = true;
4481 }
4482 break;
89a79e96
RB
4483 case GIMPLE_COND:
4484 {
4485 /* Canonicalize operand order. */
4486 tree lhs = gimple_cond_lhs (stmt);
4487 tree rhs = gimple_cond_rhs (stmt);
14e72812 4488 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4489 {
4490 gcond *gc = as_a <gcond *> (stmt);
4491 gimple_cond_set_lhs (gc, rhs);
4492 gimple_cond_set_rhs (gc, lhs);
4493 gimple_cond_set_code (gc,
4494 swap_tree_comparison (gimple_cond_code (gc)));
4495 changed = true;
4496 }
4497 }
040292e7
RB
4498 default:;
4499 }
4500
e0ee10ed
RB
4501 /* Dispatch to pattern-based folding. */
4502 if (!inplace
4503 || is_gimple_assign (stmt)
4504 || gimple_code (stmt) == GIMPLE_COND)
4505 {
4506 gimple_seq seq = NULL;
4507 code_helper rcode;
4508 tree ops[3] = {};
0ff093d8
RB
4509 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4510 valueize, valueize))
e0ee10ed
RB
4511 {
4512 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4513 changed = true;
4514 else
4515 gimple_seq_discard (seq);
4516 }
4517 }
4518
4519 stmt = gsi_stmt (*gsi);
4520
cbdd87d4
RG
4521 /* Fold the main computation performed by the statement. */
4522 switch (gimple_code (stmt))
4523 {
4524 case GIMPLE_ASSIGN:
4525 {
819ec64c
RB
4526 /* Try to canonicalize for boolean-typed X the comparisons
4527 X == 0, X == 1, X != 0, and X != 1. */
4528 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4529 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4530 {
819ec64c
RB
4531 tree lhs = gimple_assign_lhs (stmt);
4532 tree op1 = gimple_assign_rhs1 (stmt);
4533 tree op2 = gimple_assign_rhs2 (stmt);
4534 tree type = TREE_TYPE (op1);
4535
4536 /* Check whether the comparison operands are of the same boolean
4537 type as the result type is.
4538 Check that second operand is an integer-constant with value
4539 one or zero. */
4540 if (TREE_CODE (op2) == INTEGER_CST
4541 && (integer_zerop (op2) || integer_onep (op2))
4542 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4543 {
4544 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4545 bool is_logical_not = false;
4546
4547 /* X == 0 and X != 1 is a logical-not.of X
4548 X == 1 and X != 0 is X */
4549 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4550 || (cmp_code == NE_EXPR && integer_onep (op2)))
4551 is_logical_not = true;
4552
4553 if (is_logical_not == false)
4554 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4555 /* Only for one-bit precision typed X the transformation
4556 !X -> ~X is valied. */
4557 else if (TYPE_PRECISION (type) == 1)
4558 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4559 /* Otherwise we use !X -> X ^ 1. */
4560 else
4561 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4562 build_int_cst (type, 1));
4563 changed = true;
4564 break;
4565 }
5fbcc0ed 4566 }
819ec64c
RB
4567
4568 unsigned old_num_ops = gimple_num_ops (stmt);
4569 tree lhs = gimple_assign_lhs (stmt);
4570 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4571 if (new_rhs
4572 && !useless_type_conversion_p (TREE_TYPE (lhs),
4573 TREE_TYPE (new_rhs)))
4574 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4575 if (new_rhs
4576 && (!inplace
4577 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4578 {
4579 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4580 changed = true;
4581 }
4582 break;
4583 }
4584
cbdd87d4 4585 case GIMPLE_CALL:
ceeffab0 4586 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4587 break;
4588
4589 case GIMPLE_ASM:
4590 /* Fold *& in asm operands. */
38384150 4591 {
538dd0b7 4592 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4593 size_t noutputs;
4594 const char **oconstraints;
4595 const char *constraint;
4596 bool allows_mem, allows_reg;
4597
538dd0b7 4598 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4599 oconstraints = XALLOCAVEC (const char *, noutputs);
4600
538dd0b7 4601 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4602 {
538dd0b7 4603 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4604 tree op = TREE_VALUE (link);
4605 oconstraints[i]
4606 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4607 if (REFERENCE_CLASS_P (op)
4608 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4609 {
4610 TREE_VALUE (link) = op;
4611 changed = true;
4612 }
4613 }
538dd0b7 4614 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4615 {
538dd0b7 4616 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4617 tree op = TREE_VALUE (link);
4618 constraint
4619 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4620 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4621 oconstraints, &allows_mem, &allows_reg);
4622 if (REFERENCE_CLASS_P (op)
4623 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4624 != NULL_TREE)
4625 {
4626 TREE_VALUE (link) = op;
4627 changed = true;
4628 }
4629 }
4630 }
cbdd87d4
RG
4631 break;
4632
bd422c4a
RG
4633 case GIMPLE_DEBUG:
4634 if (gimple_debug_bind_p (stmt))
4635 {
4636 tree val = gimple_debug_bind_get_value (stmt);
4637 if (val
4638 && REFERENCE_CLASS_P (val))
4639 {
4640 tree tem = maybe_fold_reference (val, false);
4641 if (tem)
4642 {
4643 gimple_debug_bind_set_value (stmt, tem);
4644 changed = true;
4645 }
4646 }
3e888a5e
RG
4647 else if (val
4648 && TREE_CODE (val) == ADDR_EXPR)
4649 {
4650 tree ref = TREE_OPERAND (val, 0);
4651 tree tem = maybe_fold_reference (ref, false);
4652 if (tem)
4653 {
4654 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4655 gimple_debug_bind_set_value (stmt, tem);
4656 changed = true;
4657 }
4658 }
bd422c4a
RG
4659 }
4660 break;
4661
cfe3d653
PK
4662 case GIMPLE_RETURN:
4663 {
4664 greturn *ret_stmt = as_a<greturn *> (stmt);
4665 tree ret = gimple_return_retval(ret_stmt);
4666
4667 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4668 {
4669 tree val = valueize (ret);
1af928db
RB
4670 if (val && val != ret
4671 && may_propagate_copy (ret, val))
cfe3d653
PK
4672 {
4673 gimple_return_set_retval (ret_stmt, val);
4674 changed = true;
4675 }
4676 }
4677 }
4678 break;
4679
cbdd87d4
RG
4680 default:;
4681 }
4682
4683 stmt = gsi_stmt (*gsi);
4684
37376165
RB
4685 /* Fold *& on the lhs. */
4686 if (gimple_has_lhs (stmt))
cbdd87d4
RG
4687 {
4688 tree lhs = gimple_get_lhs (stmt);
4689 if (lhs && REFERENCE_CLASS_P (lhs))
4690 {
4691 tree new_lhs = maybe_fold_reference (lhs, true);
4692 if (new_lhs)
4693 {
4694 gimple_set_lhs (stmt, new_lhs);
4695 changed = true;
4696 }
4697 }
4698 }
4699
a8b85ce9 4700 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
4701 return changed;
4702}
4703
e0ee10ed
RB
4704/* Valueziation callback that ends up not following SSA edges. */
4705
4706tree
4707no_follow_ssa_edges (tree)
4708{
4709 return NULL_TREE;
4710}
4711
45cc9f96
RB
4712/* Valueization callback that ends up following single-use SSA edges only. */
4713
4714tree
4715follow_single_use_edges (tree val)
4716{
4717 if (TREE_CODE (val) == SSA_NAME
4718 && !has_single_use (val))
4719 return NULL_TREE;
4720 return val;
4721}
4722
cbdd87d4
RG
4723/* Fold the statement pointed to by GSI. In some cases, this function may
4724 replace the whole statement with a new one. Returns true iff folding
4725 makes any changes.
4726 The statement pointed to by GSI should be in valid gimple form but may
4727 be in unfolded state as resulting from for example constant propagation
4728 which can produce *&x = 0. */
4729
4730bool
4731fold_stmt (gimple_stmt_iterator *gsi)
4732{
e0ee10ed
RB
4733 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4734}
4735
4736bool
4737fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4738{
4739 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
4740}
4741
59401b92 4742/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
4743 *&x created by constant propagation are handled. The statement cannot
4744 be replaced with a new one. Return true if the statement was
4745 changed, false otherwise.
59401b92 4746 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
4747 be in unfolded state as resulting from for example constant propagation
4748 which can produce *&x = 0. */
4749
4750bool
59401b92 4751fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 4752{
355fe088 4753 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 4754 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 4755 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
4756 return changed;
4757}
4758
e89065a1
SL
4759/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4760 if EXPR is null or we don't know how.
4761 If non-null, the result always has boolean type. */
4762
4763static tree
4764canonicalize_bool (tree expr, bool invert)
4765{
4766 if (!expr)
4767 return NULL_TREE;
4768 else if (invert)
4769 {
4770 if (integer_nonzerop (expr))
4771 return boolean_false_node;
4772 else if (integer_zerop (expr))
4773 return boolean_true_node;
4774 else if (TREE_CODE (expr) == SSA_NAME)
4775 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4776 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4777 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4778 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4779 boolean_type_node,
4780 TREE_OPERAND (expr, 0),
4781 TREE_OPERAND (expr, 1));
4782 else
4783 return NULL_TREE;
4784 }
4785 else
4786 {
4787 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4788 return expr;
4789 if (integer_nonzerop (expr))
4790 return boolean_true_node;
4791 else if (integer_zerop (expr))
4792 return boolean_false_node;
4793 else if (TREE_CODE (expr) == SSA_NAME)
4794 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4795 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4796 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4797 return fold_build2 (TREE_CODE (expr),
4798 boolean_type_node,
4799 TREE_OPERAND (expr, 0),
4800 TREE_OPERAND (expr, 1));
4801 else
4802 return NULL_TREE;
4803 }
4804}
4805
4806/* Check to see if a boolean expression EXPR is logically equivalent to the
4807 comparison (OP1 CODE OP2). Check for various identities involving
4808 SSA_NAMEs. */
4809
4810static bool
4811same_bool_comparison_p (const_tree expr, enum tree_code code,
4812 const_tree op1, const_tree op2)
4813{
355fe088 4814 gimple *s;
e89065a1
SL
4815
4816 /* The obvious case. */
4817 if (TREE_CODE (expr) == code
4818 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4819 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4820 return true;
4821
4822 /* Check for comparing (name, name != 0) and the case where expr
4823 is an SSA_NAME with a definition matching the comparison. */
4824 if (TREE_CODE (expr) == SSA_NAME
4825 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4826 {
4827 if (operand_equal_p (expr, op1, 0))
4828 return ((code == NE_EXPR && integer_zerop (op2))
4829 || (code == EQ_EXPR && integer_nonzerop (op2)));
4830 s = SSA_NAME_DEF_STMT (expr);
4831 if (is_gimple_assign (s)
4832 && gimple_assign_rhs_code (s) == code
4833 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4834 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4835 return true;
4836 }
4837
4838 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4839 of name is a comparison, recurse. */
4840 if (TREE_CODE (op1) == SSA_NAME
4841 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4842 {
4843 s = SSA_NAME_DEF_STMT (op1);
4844 if (is_gimple_assign (s)
4845 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4846 {
4847 enum tree_code c = gimple_assign_rhs_code (s);
4848 if ((c == NE_EXPR && integer_zerop (op2))
4849 || (c == EQ_EXPR && integer_nonzerop (op2)))
4850 return same_bool_comparison_p (expr, c,
4851 gimple_assign_rhs1 (s),
4852 gimple_assign_rhs2 (s));
4853 if ((c == EQ_EXPR && integer_zerop (op2))
4854 || (c == NE_EXPR && integer_nonzerop (op2)))
4855 return same_bool_comparison_p (expr,
4856 invert_tree_comparison (c, false),
4857 gimple_assign_rhs1 (s),
4858 gimple_assign_rhs2 (s));
4859 }
4860 }
4861 return false;
4862}
4863
4864/* Check to see if two boolean expressions OP1 and OP2 are logically
4865 equivalent. */
4866
4867static bool
4868same_bool_result_p (const_tree op1, const_tree op2)
4869{
4870 /* Simple cases first. */
4871 if (operand_equal_p (op1, op2, 0))
4872 return true;
4873
4874 /* Check the cases where at least one of the operands is a comparison.
4875 These are a bit smarter than operand_equal_p in that they apply some
4876 identifies on SSA_NAMEs. */
98209db3 4877 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
4878 && same_bool_comparison_p (op1, TREE_CODE (op2),
4879 TREE_OPERAND (op2, 0),
4880 TREE_OPERAND (op2, 1)))
4881 return true;
98209db3 4882 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
4883 && same_bool_comparison_p (op2, TREE_CODE (op1),
4884 TREE_OPERAND (op1, 0),
4885 TREE_OPERAND (op1, 1)))
4886 return true;
4887
4888 /* Default case. */
4889 return false;
4890}
4891
4892/* Forward declarations for some mutually recursive functions. */
4893
4894static tree
4895and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4896 enum tree_code code2, tree op2a, tree op2b);
4897static tree
4898and_var_with_comparison (tree var, bool invert,
4899 enum tree_code code2, tree op2a, tree op2b);
4900static tree
355fe088 4901and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4902 enum tree_code code2, tree op2a, tree op2b);
4903static tree
4904or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4905 enum tree_code code2, tree op2a, tree op2b);
4906static tree
4907or_var_with_comparison (tree var, bool invert,
4908 enum tree_code code2, tree op2a, tree op2b);
4909static tree
355fe088 4910or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4911 enum tree_code code2, tree op2a, tree op2b);
4912
4913/* Helper function for and_comparisons_1: try to simplify the AND of the
4914 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4915 If INVERT is true, invert the value of the VAR before doing the AND.
4916 Return NULL_EXPR if we can't simplify this to a single expression. */
4917
4918static tree
4919and_var_with_comparison (tree var, bool invert,
4920 enum tree_code code2, tree op2a, tree op2b)
4921{
4922 tree t;
355fe088 4923 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4924
4925 /* We can only deal with variables whose definitions are assignments. */
4926 if (!is_gimple_assign (stmt))
4927 return NULL_TREE;
4928
4929 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4930 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4931 Then we only have to consider the simpler non-inverted cases. */
4932 if (invert)
4933 t = or_var_with_comparison_1 (stmt,
4934 invert_tree_comparison (code2, false),
4935 op2a, op2b);
4936 else
4937 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4938 return canonicalize_bool (t, invert);
4939}
4940
4941/* Try to simplify the AND of the ssa variable defined by the assignment
4942 STMT with the comparison specified by (OP2A CODE2 OP2B).
4943 Return NULL_EXPR if we can't simplify this to a single expression. */
4944
4945static tree
355fe088 4946and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4947 enum tree_code code2, tree op2a, tree op2b)
4948{
4949 tree var = gimple_assign_lhs (stmt);
4950 tree true_test_var = NULL_TREE;
4951 tree false_test_var = NULL_TREE;
4952 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4953
4954 /* Check for identities like (var AND (var == 0)) => false. */
4955 if (TREE_CODE (op2a) == SSA_NAME
4956 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4957 {
4958 if ((code2 == NE_EXPR && integer_zerop (op2b))
4959 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4960 {
4961 true_test_var = op2a;
4962 if (var == true_test_var)
4963 return var;
4964 }
4965 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4966 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4967 {
4968 false_test_var = op2a;
4969 if (var == false_test_var)
4970 return boolean_false_node;
4971 }
4972 }
4973
4974 /* If the definition is a comparison, recurse on it. */
4975 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4976 {
4977 tree t = and_comparisons_1 (innercode,
4978 gimple_assign_rhs1 (stmt),
4979 gimple_assign_rhs2 (stmt),
4980 code2,
4981 op2a,
4982 op2b);
4983 if (t)
4984 return t;
4985 }
4986
4987 /* If the definition is an AND or OR expression, we may be able to
4988 simplify by reassociating. */
eb9820c0
KT
4989 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4990 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4991 {
4992 tree inner1 = gimple_assign_rhs1 (stmt);
4993 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4994 gimple *s;
e89065a1
SL
4995 tree t;
4996 tree partial = NULL_TREE;
eb9820c0 4997 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4998
4999 /* Check for boolean identities that don't require recursive examination
5000 of inner1/inner2:
5001 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5002 inner1 AND (inner1 OR inner2) => inner1
5003 !inner1 AND (inner1 AND inner2) => false
5004 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5005 Likewise for similar cases involving inner2. */
5006 if (inner1 == true_test_var)
5007 return (is_and ? var : inner1);
5008 else if (inner2 == true_test_var)
5009 return (is_and ? var : inner2);
5010 else if (inner1 == false_test_var)
5011 return (is_and
5012 ? boolean_false_node
5013 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5014 else if (inner2 == false_test_var)
5015 return (is_and
5016 ? boolean_false_node
5017 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5018
5019 /* Next, redistribute/reassociate the AND across the inner tests.
5020 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5021 if (TREE_CODE (inner1) == SSA_NAME
5022 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5023 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5024 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5025 gimple_assign_rhs1 (s),
5026 gimple_assign_rhs2 (s),
5027 code2, op2a, op2b)))
5028 {
5029 /* Handle the AND case, where we are reassociating:
5030 (inner1 AND inner2) AND (op2a code2 op2b)
5031 => (t AND inner2)
5032 If the partial result t is a constant, we win. Otherwise
5033 continue on to try reassociating with the other inner test. */
5034 if (is_and)
5035 {
5036 if (integer_onep (t))
5037 return inner2;
5038 else if (integer_zerop (t))
5039 return boolean_false_node;
5040 }
5041
5042 /* Handle the OR case, where we are redistributing:
5043 (inner1 OR inner2) AND (op2a code2 op2b)
5044 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5045 else if (integer_onep (t))
5046 return boolean_true_node;
5047
5048 /* Save partial result for later. */
5049 partial = t;
e89065a1
SL
5050 }
5051
5052 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5053 if (TREE_CODE (inner2) == SSA_NAME
5054 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5055 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5056 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5057 gimple_assign_rhs1 (s),
5058 gimple_assign_rhs2 (s),
5059 code2, op2a, op2b)))
5060 {
5061 /* Handle the AND case, where we are reassociating:
5062 (inner1 AND inner2) AND (op2a code2 op2b)
5063 => (inner1 AND t) */
5064 if (is_and)
5065 {
5066 if (integer_onep (t))
5067 return inner1;
5068 else if (integer_zerop (t))
5069 return boolean_false_node;
8236c8eb
JJ
5070 /* If both are the same, we can apply the identity
5071 (x AND x) == x. */
5072 else if (partial && same_bool_result_p (t, partial))
5073 return t;
e89065a1
SL
5074 }
5075
5076 /* Handle the OR case. where we are redistributing:
5077 (inner1 OR inner2) AND (op2a code2 op2b)
5078 => (t OR (inner1 AND (op2a code2 op2b)))
5079 => (t OR partial) */
5080 else
5081 {
5082 if (integer_onep (t))
5083 return boolean_true_node;
5084 else if (partial)
5085 {
5086 /* We already got a simplification for the other
5087 operand to the redistributed OR expression. The
5088 interesting case is when at least one is false.
5089 Or, if both are the same, we can apply the identity
5090 (x OR x) == x. */
5091 if (integer_zerop (partial))
5092 return t;
5093 else if (integer_zerop (t))
5094 return partial;
5095 else if (same_bool_result_p (t, partial))
5096 return t;
5097 }
5098 }
5099 }
5100 }
5101 return NULL_TREE;
5102}
5103
5104/* Try to simplify the AND of two comparisons defined by
5105 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5106 If this can be done without constructing an intermediate value,
5107 return the resulting tree; otherwise NULL_TREE is returned.
5108 This function is deliberately asymmetric as it recurses on SSA_DEFs
5109 in the first comparison but not the second. */
5110
5111static tree
5112and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5113 enum tree_code code2, tree op2a, tree op2b)
5114{
ae22ac3c 5115 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5116
e89065a1
SL
5117 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5118 if (operand_equal_p (op1a, op2a, 0)
5119 && operand_equal_p (op1b, op2b, 0))
5120 {
eb9820c0 5121 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5122 tree t = combine_comparisons (UNKNOWN_LOCATION,
5123 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5124 truth_type, op1a, op1b);
e89065a1
SL
5125 if (t)
5126 return t;
5127 }
5128
5129 /* Likewise the swapped case of the above. */
5130 if (operand_equal_p (op1a, op2b, 0)
5131 && operand_equal_p (op1b, op2a, 0))
5132 {
eb9820c0 5133 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5134 tree t = combine_comparisons (UNKNOWN_LOCATION,
5135 TRUTH_ANDIF_EXPR, code1,
5136 swap_tree_comparison (code2),
31ed6226 5137 truth_type, op1a, op1b);
e89065a1
SL
5138 if (t)
5139 return t;
5140 }
5141
5142 /* If both comparisons are of the same value against constants, we might
5143 be able to merge them. */
5144 if (operand_equal_p (op1a, op2a, 0)
5145 && TREE_CODE (op1b) == INTEGER_CST
5146 && TREE_CODE (op2b) == INTEGER_CST)
5147 {
5148 int cmp = tree_int_cst_compare (op1b, op2b);
5149
5150 /* If we have (op1a == op1b), we should either be able to
5151 return that or FALSE, depending on whether the constant op1b
5152 also satisfies the other comparison against op2b. */
5153 if (code1 == EQ_EXPR)
5154 {
5155 bool done = true;
5156 bool val;
5157 switch (code2)
5158 {
5159 case EQ_EXPR: val = (cmp == 0); break;
5160 case NE_EXPR: val = (cmp != 0); break;
5161 case LT_EXPR: val = (cmp < 0); break;
5162 case GT_EXPR: val = (cmp > 0); break;
5163 case LE_EXPR: val = (cmp <= 0); break;
5164 case GE_EXPR: val = (cmp >= 0); break;
5165 default: done = false;
5166 }
5167 if (done)
5168 {
5169 if (val)
5170 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5171 else
5172 return boolean_false_node;
5173 }
5174 }
5175 /* Likewise if the second comparison is an == comparison. */
5176 else if (code2 == EQ_EXPR)
5177 {
5178 bool done = true;
5179 bool val;
5180 switch (code1)
5181 {
5182 case EQ_EXPR: val = (cmp == 0); break;
5183 case NE_EXPR: val = (cmp != 0); break;
5184 case LT_EXPR: val = (cmp > 0); break;
5185 case GT_EXPR: val = (cmp < 0); break;
5186 case LE_EXPR: val = (cmp >= 0); break;
5187 case GE_EXPR: val = (cmp <= 0); break;
5188 default: done = false;
5189 }
5190 if (done)
5191 {
5192 if (val)
5193 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5194 else
5195 return boolean_false_node;
5196 }
5197 }
5198
5199 /* Same business with inequality tests. */
5200 else if (code1 == NE_EXPR)
5201 {
5202 bool val;
5203 switch (code2)
5204 {
5205 case EQ_EXPR: val = (cmp != 0); break;
5206 case NE_EXPR: val = (cmp == 0); break;
5207 case LT_EXPR: val = (cmp >= 0); break;
5208 case GT_EXPR: val = (cmp <= 0); break;
5209 case LE_EXPR: val = (cmp > 0); break;
5210 case GE_EXPR: val = (cmp < 0); break;
5211 default:
5212 val = false;
5213 }
5214 if (val)
5215 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5216 }
5217 else if (code2 == NE_EXPR)
5218 {
5219 bool val;
5220 switch (code1)
5221 {
5222 case EQ_EXPR: val = (cmp == 0); break;
5223 case NE_EXPR: val = (cmp != 0); break;
5224 case LT_EXPR: val = (cmp <= 0); break;
5225 case GT_EXPR: val = (cmp >= 0); break;
5226 case LE_EXPR: val = (cmp < 0); break;
5227 case GE_EXPR: val = (cmp > 0); break;
5228 default:
5229 val = false;
5230 }
5231 if (val)
5232 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5233 }
5234
5235 /* Chose the more restrictive of two < or <= comparisons. */
5236 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5237 && (code2 == LT_EXPR || code2 == LE_EXPR))
5238 {
5239 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5240 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5241 else
5242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5243 }
5244
5245 /* Likewise chose the more restrictive of two > or >= comparisons. */
5246 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5247 && (code2 == GT_EXPR || code2 == GE_EXPR))
5248 {
5249 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5250 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5251 else
5252 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5253 }
5254
5255 /* Check for singleton ranges. */
5256 else if (cmp == 0
5257 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5258 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5259 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5260
5261 /* Check for disjoint ranges. */
5262 else if (cmp <= 0
5263 && (code1 == LT_EXPR || code1 == LE_EXPR)
5264 && (code2 == GT_EXPR || code2 == GE_EXPR))
5265 return boolean_false_node;
5266 else if (cmp >= 0
5267 && (code1 == GT_EXPR || code1 == GE_EXPR)
5268 && (code2 == LT_EXPR || code2 == LE_EXPR))
5269 return boolean_false_node;
5270 }
5271
5272 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5273 NAME's definition is a truth value. See if there are any simplifications
5274 that can be done against the NAME's definition. */
5275 if (TREE_CODE (op1a) == SSA_NAME
5276 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5277 && (integer_zerop (op1b) || integer_onep (op1b)))
5278 {
5279 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5280 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5281 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5282 switch (gimple_code (stmt))
5283 {
5284 case GIMPLE_ASSIGN:
5285 /* Try to simplify by copy-propagating the definition. */
5286 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5287
5288 case GIMPLE_PHI:
5289 /* If every argument to the PHI produces the same result when
5290 ANDed with the second comparison, we win.
5291 Do not do this unless the type is bool since we need a bool
5292 result here anyway. */
5293 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5294 {
5295 tree result = NULL_TREE;
5296 unsigned i;
5297 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5298 {
5299 tree arg = gimple_phi_arg_def (stmt, i);
5300
5301 /* If this PHI has itself as an argument, ignore it.
5302 If all the other args produce the same result,
5303 we're still OK. */
5304 if (arg == gimple_phi_result (stmt))
5305 continue;
5306 else if (TREE_CODE (arg) == INTEGER_CST)
5307 {
5308 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5309 {
5310 if (!result)
5311 result = boolean_false_node;
5312 else if (!integer_zerop (result))
5313 return NULL_TREE;
5314 }
5315 else if (!result)
5316 result = fold_build2 (code2, boolean_type_node,
5317 op2a, op2b);
5318 else if (!same_bool_comparison_p (result,
5319 code2, op2a, op2b))
5320 return NULL_TREE;
5321 }
0e8b84ec
JJ
5322 else if (TREE_CODE (arg) == SSA_NAME
5323 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5324 {
6c66f733 5325 tree temp;
355fe088 5326 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5327 /* In simple cases we can look through PHI nodes,
5328 but we have to be careful with loops.
5329 See PR49073. */
5330 if (! dom_info_available_p (CDI_DOMINATORS)
5331 || gimple_bb (def_stmt) == gimple_bb (stmt)
5332 || dominated_by_p (CDI_DOMINATORS,
5333 gimple_bb (def_stmt),
5334 gimple_bb (stmt)))
5335 return NULL_TREE;
5336 temp = and_var_with_comparison (arg, invert, code2,
5337 op2a, op2b);
e89065a1
SL
5338 if (!temp)
5339 return NULL_TREE;
5340 else if (!result)
5341 result = temp;
5342 else if (!same_bool_result_p (result, temp))
5343 return NULL_TREE;
5344 }
5345 else
5346 return NULL_TREE;
5347 }
5348 return result;
5349 }
5350
5351 default:
5352 break;
5353 }
5354 }
5355 return NULL_TREE;
5356}
5357
5358/* Try to simplify the AND of two comparisons, specified by
5359 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5360 If this can be simplified to a single expression (without requiring
5361 introducing more SSA variables to hold intermediate values),
5362 return the resulting tree. Otherwise return NULL_TREE.
5363 If the result expression is non-null, it has boolean type. */
5364
5365tree
5366maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5367 enum tree_code code2, tree op2a, tree op2b)
5368{
5369 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5370 if (t)
5371 return t;
5372 else
5373 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5374}
5375
5376/* Helper function for or_comparisons_1: try to simplify the OR of the
5377 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5378 If INVERT is true, invert the value of VAR before doing the OR.
5379 Return NULL_EXPR if we can't simplify this to a single expression. */
5380
5381static tree
5382or_var_with_comparison (tree var, bool invert,
5383 enum tree_code code2, tree op2a, tree op2b)
5384{
5385 tree t;
355fe088 5386 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5387
5388 /* We can only deal with variables whose definitions are assignments. */
5389 if (!is_gimple_assign (stmt))
5390 return NULL_TREE;
5391
5392 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5393 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5394 Then we only have to consider the simpler non-inverted cases. */
5395 if (invert)
5396 t = and_var_with_comparison_1 (stmt,
5397 invert_tree_comparison (code2, false),
5398 op2a, op2b);
5399 else
5400 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5401 return canonicalize_bool (t, invert);
5402}
5403
5404/* Try to simplify the OR of the ssa variable defined by the assignment
5405 STMT with the comparison specified by (OP2A CODE2 OP2B).
5406 Return NULL_EXPR if we can't simplify this to a single expression. */
5407
5408static tree
355fe088 5409or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5410 enum tree_code code2, tree op2a, tree op2b)
5411{
5412 tree var = gimple_assign_lhs (stmt);
5413 tree true_test_var = NULL_TREE;
5414 tree false_test_var = NULL_TREE;
5415 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5416
5417 /* Check for identities like (var OR (var != 0)) => true . */
5418 if (TREE_CODE (op2a) == SSA_NAME
5419 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5420 {
5421 if ((code2 == NE_EXPR && integer_zerop (op2b))
5422 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5423 {
5424 true_test_var = op2a;
5425 if (var == true_test_var)
5426 return var;
5427 }
5428 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5429 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5430 {
5431 false_test_var = op2a;
5432 if (var == false_test_var)
5433 return boolean_true_node;
5434 }
5435 }
5436
5437 /* If the definition is a comparison, recurse on it. */
5438 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5439 {
5440 tree t = or_comparisons_1 (innercode,
5441 gimple_assign_rhs1 (stmt),
5442 gimple_assign_rhs2 (stmt),
5443 code2,
5444 op2a,
5445 op2b);
5446 if (t)
5447 return t;
5448 }
5449
5450 /* If the definition is an AND or OR expression, we may be able to
5451 simplify by reassociating. */
eb9820c0
KT
5452 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5453 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5454 {
5455 tree inner1 = gimple_assign_rhs1 (stmt);
5456 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5457 gimple *s;
e89065a1
SL
5458 tree t;
5459 tree partial = NULL_TREE;
eb9820c0 5460 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5461
5462 /* Check for boolean identities that don't require recursive examination
5463 of inner1/inner2:
5464 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5465 inner1 OR (inner1 AND inner2) => inner1
5466 !inner1 OR (inner1 OR inner2) => true
5467 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5468 */
5469 if (inner1 == true_test_var)
5470 return (is_or ? var : inner1);
5471 else if (inner2 == true_test_var)
5472 return (is_or ? var : inner2);
5473 else if (inner1 == false_test_var)
5474 return (is_or
5475 ? boolean_true_node
5476 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5477 else if (inner2 == false_test_var)
5478 return (is_or
5479 ? boolean_true_node
5480 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5481
5482 /* Next, redistribute/reassociate the OR across the inner tests.
5483 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5484 if (TREE_CODE (inner1) == SSA_NAME
5485 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5486 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5487 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5488 gimple_assign_rhs1 (s),
5489 gimple_assign_rhs2 (s),
5490 code2, op2a, op2b)))
5491 {
5492 /* Handle the OR case, where we are reassociating:
5493 (inner1 OR inner2) OR (op2a code2 op2b)
5494 => (t OR inner2)
5495 If the partial result t is a constant, we win. Otherwise
5496 continue on to try reassociating with the other inner test. */
8236c8eb 5497 if (is_or)
e89065a1
SL
5498 {
5499 if (integer_onep (t))
5500 return boolean_true_node;
5501 else if (integer_zerop (t))
5502 return inner2;
5503 }
5504
5505 /* Handle the AND case, where we are redistributing:
5506 (inner1 AND inner2) OR (op2a code2 op2b)
5507 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5508 else if (integer_zerop (t))
5509 return boolean_false_node;
5510
5511 /* Save partial result for later. */
5512 partial = t;
e89065a1
SL
5513 }
5514
5515 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5516 if (TREE_CODE (inner2) == SSA_NAME
5517 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5518 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5519 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5520 gimple_assign_rhs1 (s),
5521 gimple_assign_rhs2 (s),
5522 code2, op2a, op2b)))
5523 {
5524 /* Handle the OR case, where we are reassociating:
5525 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5526 => (inner1 OR t)
5527 => (t OR partial) */
5528 if (is_or)
e89065a1
SL
5529 {
5530 if (integer_zerop (t))
5531 return inner1;
5532 else if (integer_onep (t))
5533 return boolean_true_node;
8236c8eb
JJ
5534 /* If both are the same, we can apply the identity
5535 (x OR x) == x. */
5536 else if (partial && same_bool_result_p (t, partial))
5537 return t;
e89065a1
SL
5538 }
5539
5540 /* Handle the AND case, where we are redistributing:
5541 (inner1 AND inner2) OR (op2a code2 op2b)
5542 => (t AND (inner1 OR (op2a code2 op2b)))
5543 => (t AND partial) */
5544 else
5545 {
5546 if (integer_zerop (t))
5547 return boolean_false_node;
5548 else if (partial)
5549 {
5550 /* We already got a simplification for the other
5551 operand to the redistributed AND expression. The
5552 interesting case is when at least one is true.
5553 Or, if both are the same, we can apply the identity
8236c8eb 5554 (x AND x) == x. */
e89065a1
SL
5555 if (integer_onep (partial))
5556 return t;
5557 else if (integer_onep (t))
5558 return partial;
5559 else if (same_bool_result_p (t, partial))
8236c8eb 5560 return t;
e89065a1
SL
5561 }
5562 }
5563 }
5564 }
5565 return NULL_TREE;
5566}
5567
5568/* Try to simplify the OR of two comparisons defined by
5569 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5570 If this can be done without constructing an intermediate value,
5571 return the resulting tree; otherwise NULL_TREE is returned.
5572 This function is deliberately asymmetric as it recurses on SSA_DEFs
5573 in the first comparison but not the second. */
5574
5575static tree
5576or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5577 enum tree_code code2, tree op2a, tree op2b)
5578{
ae22ac3c 5579 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5580
e89065a1
SL
5581 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5582 if (operand_equal_p (op1a, op2a, 0)
5583 && operand_equal_p (op1b, op2b, 0))
5584 {
eb9820c0 5585 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5586 tree t = combine_comparisons (UNKNOWN_LOCATION,
5587 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5588 truth_type, op1a, op1b);
e89065a1
SL
5589 if (t)
5590 return t;
5591 }
5592
5593 /* Likewise the swapped case of the above. */
5594 if (operand_equal_p (op1a, op2b, 0)
5595 && operand_equal_p (op1b, op2a, 0))
5596 {
eb9820c0 5597 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5598 tree t = combine_comparisons (UNKNOWN_LOCATION,
5599 TRUTH_ORIF_EXPR, code1,
5600 swap_tree_comparison (code2),
31ed6226 5601 truth_type, op1a, op1b);
e89065a1
SL
5602 if (t)
5603 return t;
5604 }
5605
5606 /* If both comparisons are of the same value against constants, we might
5607 be able to merge them. */
5608 if (operand_equal_p (op1a, op2a, 0)
5609 && TREE_CODE (op1b) == INTEGER_CST
5610 && TREE_CODE (op2b) == INTEGER_CST)
5611 {
5612 int cmp = tree_int_cst_compare (op1b, op2b);
5613
5614 /* If we have (op1a != op1b), we should either be able to
5615 return that or TRUE, depending on whether the constant op1b
5616 also satisfies the other comparison against op2b. */
5617 if (code1 == NE_EXPR)
5618 {
5619 bool done = true;
5620 bool val;
5621 switch (code2)
5622 {
5623 case EQ_EXPR: val = (cmp == 0); break;
5624 case NE_EXPR: val = (cmp != 0); break;
5625 case LT_EXPR: val = (cmp < 0); break;
5626 case GT_EXPR: val = (cmp > 0); break;
5627 case LE_EXPR: val = (cmp <= 0); break;
5628 case GE_EXPR: val = (cmp >= 0); break;
5629 default: done = false;
5630 }
5631 if (done)
5632 {
5633 if (val)
5634 return boolean_true_node;
5635 else
5636 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5637 }
5638 }
5639 /* Likewise if the second comparison is a != comparison. */
5640 else if (code2 == NE_EXPR)
5641 {
5642 bool done = true;
5643 bool val;
5644 switch (code1)
5645 {
5646 case EQ_EXPR: val = (cmp == 0); break;
5647 case NE_EXPR: val = (cmp != 0); break;
5648 case LT_EXPR: val = (cmp > 0); break;
5649 case GT_EXPR: val = (cmp < 0); break;
5650 case LE_EXPR: val = (cmp >= 0); break;
5651 case GE_EXPR: val = (cmp <= 0); break;
5652 default: done = false;
5653 }
5654 if (done)
5655 {
5656 if (val)
5657 return boolean_true_node;
5658 else
5659 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5660 }
5661 }
5662
5663 /* See if an equality test is redundant with the other comparison. */
5664 else if (code1 == EQ_EXPR)
5665 {
5666 bool val;
5667 switch (code2)
5668 {
5669 case EQ_EXPR: val = (cmp == 0); break;
5670 case NE_EXPR: val = (cmp != 0); break;
5671 case LT_EXPR: val = (cmp < 0); break;
5672 case GT_EXPR: val = (cmp > 0); break;
5673 case LE_EXPR: val = (cmp <= 0); break;
5674 case GE_EXPR: val = (cmp >= 0); break;
5675 default:
5676 val = false;
5677 }
5678 if (val)
5679 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5680 }
5681 else if (code2 == EQ_EXPR)
5682 {
5683 bool val;
5684 switch (code1)
5685 {
5686 case EQ_EXPR: val = (cmp == 0); break;
5687 case NE_EXPR: val = (cmp != 0); break;
5688 case LT_EXPR: val = (cmp > 0); break;
5689 case GT_EXPR: val = (cmp < 0); break;
5690 case LE_EXPR: val = (cmp >= 0); break;
5691 case GE_EXPR: val = (cmp <= 0); break;
5692 default:
5693 val = false;
5694 }
5695 if (val)
5696 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5697 }
5698
5699 /* Chose the less restrictive of two < or <= comparisons. */
5700 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5701 && (code2 == LT_EXPR || code2 == LE_EXPR))
5702 {
5703 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5704 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5705 else
5706 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5707 }
5708
5709 /* Likewise chose the less restrictive of two > or >= comparisons. */
5710 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5711 && (code2 == GT_EXPR || code2 == GE_EXPR))
5712 {
5713 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5714 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5715 else
5716 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5717 }
5718
5719 /* Check for singleton ranges. */
5720 else if (cmp == 0
5721 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5722 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5723 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5724
5725 /* Check for less/greater pairs that don't restrict the range at all. */
5726 else if (cmp >= 0
5727 && (code1 == LT_EXPR || code1 == LE_EXPR)
5728 && (code2 == GT_EXPR || code2 == GE_EXPR))
5729 return boolean_true_node;
5730 else if (cmp <= 0
5731 && (code1 == GT_EXPR || code1 == GE_EXPR)
5732 && (code2 == LT_EXPR || code2 == LE_EXPR))
5733 return boolean_true_node;
5734 }
5735
5736 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5737 NAME's definition is a truth value. See if there are any simplifications
5738 that can be done against the NAME's definition. */
5739 if (TREE_CODE (op1a) == SSA_NAME
5740 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5741 && (integer_zerop (op1b) || integer_onep (op1b)))
5742 {
5743 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5744 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5745 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5746 switch (gimple_code (stmt))
5747 {
5748 case GIMPLE_ASSIGN:
5749 /* Try to simplify by copy-propagating the definition. */
5750 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5751
5752 case GIMPLE_PHI:
5753 /* If every argument to the PHI produces the same result when
5754 ORed with the second comparison, we win.
5755 Do not do this unless the type is bool since we need a bool
5756 result here anyway. */
5757 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5758 {
5759 tree result = NULL_TREE;
5760 unsigned i;
5761 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5762 {
5763 tree arg = gimple_phi_arg_def (stmt, i);
5764
5765 /* If this PHI has itself as an argument, ignore it.
5766 If all the other args produce the same result,
5767 we're still OK. */
5768 if (arg == gimple_phi_result (stmt))
5769 continue;
5770 else if (TREE_CODE (arg) == INTEGER_CST)
5771 {
5772 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5773 {
5774 if (!result)
5775 result = boolean_true_node;
5776 else if (!integer_onep (result))
5777 return NULL_TREE;
5778 }
5779 else if (!result)
5780 result = fold_build2 (code2, boolean_type_node,
5781 op2a, op2b);
5782 else if (!same_bool_comparison_p (result,
5783 code2, op2a, op2b))
5784 return NULL_TREE;
5785 }
0e8b84ec
JJ
5786 else if (TREE_CODE (arg) == SSA_NAME
5787 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5788 {
6c66f733 5789 tree temp;
355fe088 5790 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5791 /* In simple cases we can look through PHI nodes,
5792 but we have to be careful with loops.
5793 See PR49073. */
5794 if (! dom_info_available_p (CDI_DOMINATORS)
5795 || gimple_bb (def_stmt) == gimple_bb (stmt)
5796 || dominated_by_p (CDI_DOMINATORS,
5797 gimple_bb (def_stmt),
5798 gimple_bb (stmt)))
5799 return NULL_TREE;
5800 temp = or_var_with_comparison (arg, invert, code2,
5801 op2a, op2b);
e89065a1
SL
5802 if (!temp)
5803 return NULL_TREE;
5804 else if (!result)
5805 result = temp;
5806 else if (!same_bool_result_p (result, temp))
5807 return NULL_TREE;
5808 }
5809 else
5810 return NULL_TREE;
5811 }
5812 return result;
5813 }
5814
5815 default:
5816 break;
5817 }
5818 }
5819 return NULL_TREE;
5820}
5821
5822/* Try to simplify the OR of two comparisons, specified by
5823 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5824 If this can be simplified to a single expression (without requiring
5825 introducing more SSA variables to hold intermediate values),
5826 return the resulting tree. Otherwise return NULL_TREE.
5827 If the result expression is non-null, it has boolean type. */
5828
5829tree
5830maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5831 enum tree_code code2, tree op2a, tree op2b)
5832{
5833 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5834 if (t)
5835 return t;
5836 else
5837 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5838}
cfef45c8
RG
5839
5840
5841/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5842
5843 Either NULL_TREE, a simplified but non-constant or a constant
5844 is returned.
5845
5846 ??? This should go into a gimple-fold-inline.h file to be eventually
5847 privatized with the single valueize function used in the various TUs
5848 to avoid the indirect function call overhead. */
5849
5850tree
355fe088 5851gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 5852 tree (*gvalueize) (tree))
cfef45c8 5853{
45cc9f96
RB
5854 code_helper rcode;
5855 tree ops[3] = {};
5856 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5857 edges if there are intermediate VARYING defs. For this reason
5858 do not follow SSA edges here even though SCCVN can technically
5859 just deal fine with that. */
34050b6b 5860 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 5861 {
34050b6b 5862 tree res = NULL_TREE;
c0f62740 5863 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
5864 res = ops[0];
5865 else if (mprts_hook)
5866 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5867 if (res)
45cc9f96 5868 {
34050b6b
RB
5869 if (dump_file && dump_flags & TDF_DETAILS)
5870 {
5871 fprintf (dump_file, "Match-and-simplified ");
5872 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5873 fprintf (dump_file, " to ");
ef6cb4c7 5874 print_generic_expr (dump_file, res);
34050b6b
RB
5875 fprintf (dump_file, "\n");
5876 }
5877 return res;
45cc9f96 5878 }
45cc9f96
RB
5879 }
5880
cfef45c8
RG
5881 location_t loc = gimple_location (stmt);
5882 switch (gimple_code (stmt))
5883 {
5884 case GIMPLE_ASSIGN:
5885 {
5886 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5887
5888 switch (get_gimple_rhs_class (subcode))
5889 {
5890 case GIMPLE_SINGLE_RHS:
5891 {
5892 tree rhs = gimple_assign_rhs1 (stmt);
5893 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5894
5895 if (TREE_CODE (rhs) == SSA_NAME)
5896 {
5897 /* If the RHS is an SSA_NAME, return its known constant value,
5898 if any. */
5899 return (*valueize) (rhs);
5900 }
5901 /* Handle propagating invariant addresses into address
5902 operations. */
5903 else if (TREE_CODE (rhs) == ADDR_EXPR
5904 && !is_gimple_min_invariant (rhs))
5905 {
d25c4172 5906 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5907 tree base;
5908 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5909 &offset,
5910 valueize);
5911 if (base
5912 && (CONSTANT_CLASS_P (base)
5913 || decl_address_invariant_p (base)))
5914 return build_invariant_address (TREE_TYPE (rhs),
5915 base, offset);
5916 }
5917 else if (TREE_CODE (rhs) == CONSTRUCTOR
5918 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5919 && (CONSTRUCTOR_NELTS (rhs)
5920 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5921 {
794e3180
RS
5922 unsigned i, nelts;
5923 tree val;
cfef45c8 5924
794e3180
RS
5925 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
5926 auto_vec<tree, 32> vec (nelts);
cfef45c8
RG
5927 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5928 {
5929 val = (*valueize) (val);
5930 if (TREE_CODE (val) == INTEGER_CST
5931 || TREE_CODE (val) == REAL_CST
5932 || TREE_CODE (val) == FIXED_CST)
794e3180 5933 vec.quick_push (val);
cfef45c8
RG
5934 else
5935 return NULL_TREE;
5936 }
5937
d2a12ae7 5938 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5939 }
bdf37f7a
JH
5940 if (subcode == OBJ_TYPE_REF)
5941 {
5942 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5943 /* If callee is constant, we can fold away the wrapper. */
5944 if (is_gimple_min_invariant (val))
5945 return val;
5946 }
cfef45c8
RG
5947
5948 if (kind == tcc_reference)
5949 {
5950 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5951 || TREE_CODE (rhs) == REALPART_EXPR
5952 || TREE_CODE (rhs) == IMAGPART_EXPR)
5953 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5954 {
5955 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5956 return fold_unary_loc (EXPR_LOCATION (rhs),
5957 TREE_CODE (rhs),
5958 TREE_TYPE (rhs), val);
5959 }
5960 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5961 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5962 {
5963 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5964 return fold_ternary_loc (EXPR_LOCATION (rhs),
5965 TREE_CODE (rhs),
5966 TREE_TYPE (rhs), val,
5967 TREE_OPERAND (rhs, 1),
5968 TREE_OPERAND (rhs, 2));
5969 }
5970 else if (TREE_CODE (rhs) == MEM_REF
5971 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5972 {
5973 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5974 if (TREE_CODE (val) == ADDR_EXPR
5975 && is_gimple_min_invariant (val))
5976 {
5977 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5978 unshare_expr (val),
5979 TREE_OPERAND (rhs, 1));
5980 if (tem)
5981 rhs = tem;
5982 }
5983 }
5984 return fold_const_aggregate_ref_1 (rhs, valueize);
5985 }
5986 else if (kind == tcc_declaration)
5987 return get_symbol_constant_value (rhs);
5988 return rhs;
5989 }
5990
5991 case GIMPLE_UNARY_RHS:
f3582e54 5992 return NULL_TREE;
cfef45c8
RG
5993
5994 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5995 /* Translate &x + CST into an invariant form suitable for
5996 further propagation. */
5997 if (subcode == POINTER_PLUS_EXPR)
5998 {
4b1b9e64
RB
5999 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6000 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6001 if (TREE_CODE (op0) == ADDR_EXPR
6002 && TREE_CODE (op1) == INTEGER_CST)
6003 {
6004 tree off = fold_convert (ptr_type_node, op1);
6005 return build_fold_addr_expr_loc
6006 (loc,
6007 fold_build2 (MEM_REF,
6008 TREE_TYPE (TREE_TYPE (op0)),
6009 unshare_expr (op0), off));
6010 }
6011 }
59c20dc7
RB
6012 /* Canonicalize bool != 0 and bool == 0 appearing after
6013 valueization. While gimple_simplify handles this
6014 it can get confused by the ~X == 1 -> X == 0 transform
6015 which we cant reduce to a SSA name or a constant
6016 (and we have no way to tell gimple_simplify to not
6017 consider those transforms in the first place). */
6018 else if (subcode == EQ_EXPR
6019 || subcode == NE_EXPR)
6020 {
6021 tree lhs = gimple_assign_lhs (stmt);
6022 tree op0 = gimple_assign_rhs1 (stmt);
6023 if (useless_type_conversion_p (TREE_TYPE (lhs),
6024 TREE_TYPE (op0)))
6025 {
6026 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6027 op0 = (*valueize) (op0);
8861704d
RB
6028 if (TREE_CODE (op0) == INTEGER_CST)
6029 std::swap (op0, op1);
6030 if (TREE_CODE (op1) == INTEGER_CST
6031 && ((subcode == NE_EXPR && integer_zerop (op1))
6032 || (subcode == EQ_EXPR && integer_onep (op1))))
6033 return op0;
59c20dc7
RB
6034 }
6035 }
4b1b9e64 6036 return NULL_TREE;
cfef45c8
RG
6037
6038 case GIMPLE_TERNARY_RHS:
6039 {
6040 /* Handle ternary operators that can appear in GIMPLE form. */
6041 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6042 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6043 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6044 return fold_ternary_loc (loc, subcode,
6045 gimple_expr_type (stmt), op0, op1, op2);
6046 }
6047
6048 default:
6049 gcc_unreachable ();
6050 }
6051 }
6052
6053 case GIMPLE_CALL:
6054 {
25583c4f 6055 tree fn;
538dd0b7 6056 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6057
6058 if (gimple_call_internal_p (stmt))
31e071ae
MP
6059 {
6060 enum tree_code subcode = ERROR_MARK;
6061 switch (gimple_call_internal_fn (stmt))
6062 {
6063 case IFN_UBSAN_CHECK_ADD:
6064 subcode = PLUS_EXPR;
6065 break;
6066 case IFN_UBSAN_CHECK_SUB:
6067 subcode = MINUS_EXPR;
6068 break;
6069 case IFN_UBSAN_CHECK_MUL:
6070 subcode = MULT_EXPR;
6071 break;
68fa96d6
ML
6072 case IFN_BUILTIN_EXPECT:
6073 {
6074 tree arg0 = gimple_call_arg (stmt, 0);
6075 tree op0 = (*valueize) (arg0);
6076 if (TREE_CODE (op0) == INTEGER_CST)
6077 return op0;
6078 return NULL_TREE;
6079 }
31e071ae
MP
6080 default:
6081 return NULL_TREE;
6082 }
368b454d
JJ
6083 tree arg0 = gimple_call_arg (stmt, 0);
6084 tree arg1 = gimple_call_arg (stmt, 1);
6085 tree op0 = (*valueize) (arg0);
6086 tree op1 = (*valueize) (arg1);
31e071ae
MP
6087
6088 if (TREE_CODE (op0) != INTEGER_CST
6089 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6090 {
6091 switch (subcode)
6092 {
6093 case MULT_EXPR:
6094 /* x * 0 = 0 * x = 0 without overflow. */
6095 if (integer_zerop (op0) || integer_zerop (op1))
6096 return build_zero_cst (TREE_TYPE (arg0));
6097 break;
6098 case MINUS_EXPR:
6099 /* y - y = 0 without overflow. */
6100 if (operand_equal_p (op0, op1, 0))
6101 return build_zero_cst (TREE_TYPE (arg0));
6102 break;
6103 default:
6104 break;
6105 }
6106 }
6107 tree res
6108 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6109 if (res
6110 && TREE_CODE (res) == INTEGER_CST
6111 && !TREE_OVERFLOW (res))
6112 return res;
6113 return NULL_TREE;
6114 }
25583c4f
RS
6115
6116 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
6117 if (TREE_CODE (fn) == ADDR_EXPR
6118 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
6119 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6120 && gimple_builtin_call_types_compatible_p (stmt,
6121 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6122 {
6123 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6124 tree retval;
cfef45c8
RG
6125 unsigned i;
6126 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6127 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6128 retval = fold_builtin_call_array (loc,
538dd0b7 6129 gimple_call_return_type (call_stmt),
cfef45c8 6130 fn, gimple_call_num_args (stmt), args);
cfef45c8 6131 if (retval)
5c944c6c
RB
6132 {
6133 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6134 STRIP_NOPS (retval);
538dd0b7
DM
6135 retval = fold_convert (gimple_call_return_type (call_stmt),
6136 retval);
5c944c6c 6137 }
cfef45c8
RG
6138 return retval;
6139 }
6140 return NULL_TREE;
6141 }
6142
6143 default:
6144 return NULL_TREE;
6145 }
6146}
6147
6148/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6149 Returns NULL_TREE if folding to a constant is not possible, otherwise
6150 returns a constant according to is_gimple_min_invariant. */
6151
6152tree
355fe088 6153gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6154{
6155 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6156 if (res && is_gimple_min_invariant (res))
6157 return res;
6158 return NULL_TREE;
6159}
6160
6161
6162/* The following set of functions are supposed to fold references using
6163 their constant initializers. */
6164
cfef45c8
RG
6165/* See if we can find constructor defining value of BASE.
6166 When we know the consructor with constant offset (such as
6167 base is array[40] and we do know constructor of array), then
6168 BIT_OFFSET is adjusted accordingly.
6169
6170 As a special case, return error_mark_node when constructor
6171 is not explicitly available, but it is known to be zero
6172 such as 'static const int a;'. */
6173static tree
6174get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6175 tree (*valueize)(tree))
6176{
6177 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
6178 bool reverse;
6179
cfef45c8
RG
6180 if (TREE_CODE (base) == MEM_REF)
6181 {
6182 if (!integer_zerop (TREE_OPERAND (base, 1)))
6183 {
9541ffee 6184 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 6185 return NULL_TREE;
807e902e 6186 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
6187 * BITS_PER_UNIT);
6188 }
6189
6190 if (valueize
6191 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6192 base = valueize (TREE_OPERAND (base, 0));
6193 if (!base || TREE_CODE (base) != ADDR_EXPR)
6194 return NULL_TREE;
6195 base = TREE_OPERAND (base, 0);
6196 }
13e88953
RB
6197 else if (valueize
6198 && TREE_CODE (base) == SSA_NAME)
6199 base = valueize (base);
cfef45c8
RG
6200
6201 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6202 DECL_INITIAL. If BASE is a nested reference into another
6203 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6204 the inner reference. */
6205 switch (TREE_CODE (base))
6206 {
6207 case VAR_DECL:
cfef45c8 6208 case CONST_DECL:
6a6dac52
JH
6209 {
6210 tree init = ctor_for_folding (base);
6211
688010ba 6212 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6213 NULL means unknown, while error_mark_node is 0. */
6214 if (init == error_mark_node)
6215 return NULL_TREE;
6216 if (!init)
6217 return error_mark_node;
6218 return init;
6219 }
cfef45c8 6220
13e88953
RB
6221 case VIEW_CONVERT_EXPR:
6222 return get_base_constructor (TREE_OPERAND (base, 0),
6223 bit_offset, valueize);
6224
cfef45c8
RG
6225 case ARRAY_REF:
6226 case COMPONENT_REF:
ee45a32d
EB
6227 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6228 &reverse);
cfef45c8
RG
6229 if (max_size == -1 || size != max_size)
6230 return NULL_TREE;
6231 *bit_offset += bit_offset2;
6232 return get_base_constructor (base, bit_offset, valueize);
6233
cfef45c8
RG
6234 case CONSTRUCTOR:
6235 return base;
6236
6237 default:
13e88953
RB
6238 if (CONSTANT_CLASS_P (base))
6239 return base;
6240
cfef45c8
RG
6241 return NULL_TREE;
6242 }
6243}
6244
cfef45c8
RG
6245/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6246 SIZE to the memory at bit OFFSET. */
6247
6248static tree
6249fold_array_ctor_reference (tree type, tree ctor,
6250 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6251 unsigned HOST_WIDE_INT size,
6252 tree from_decl)
cfef45c8 6253{
807e902e
KZ
6254 offset_int low_bound;
6255 offset_int elt_size;
807e902e 6256 offset_int access_index;
6a636014 6257 tree domain_type = NULL_TREE;
cfef45c8
RG
6258 HOST_WIDE_INT inner_offset;
6259
6260 /* Compute low bound and elt size. */
eb8f1123
RG
6261 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6262 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6263 if (domain_type && TYPE_MIN_VALUE (domain_type))
6264 {
6265 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6266 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6267 return NULL_TREE;
807e902e 6268 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6269 }
6270 else
807e902e 6271 low_bound = 0;
cfef45c8 6272 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6273 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6274 return NULL_TREE;
807e902e 6275 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
6276
6277 /* We can handle only constantly sized accesses that are known to not
6278 be larger than size of array element. */
6279 if (!TYPE_SIZE_UNIT (type)
6280 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
032c80e9 6281 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
807e902e 6282 || elt_size == 0)
cfef45c8
RG
6283 return NULL_TREE;
6284
6285 /* Compute the array index we look for. */
807e902e
KZ
6286 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6287 elt_size);
27bcd47c 6288 access_index += low_bound;
cfef45c8
RG
6289
6290 /* And offset within the access. */
27bcd47c 6291 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6292
6293 /* See if the array field is large enough to span whole access. We do not
6294 care to fold accesses spanning multiple array indexes. */
27bcd47c 6295 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6296 return NULL_TREE;
6a636014
AL
6297 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6298 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 6299
cfef45c8
RG
6300 /* When memory is not explicitely mentioned in constructor,
6301 it is 0 (or out of range). */
6302 return build_zero_cst (type);
6303}
6304
6305/* CTOR is CONSTRUCTOR of an aggregate or vector.
6306 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6307
6308static tree
6309fold_nonarray_ctor_reference (tree type, tree ctor,
6310 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6311 unsigned HOST_WIDE_INT size,
6312 tree from_decl)
cfef45c8
RG
6313{
6314 unsigned HOST_WIDE_INT cnt;
6315 tree cfield, cval;
6316
6317 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6318 cval)
6319 {
6320 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6321 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6322 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
6323 offset_int bitoffset;
6324 offset_int bitoffset_end, access_end;
cfef45c8
RG
6325
6326 /* Variable sized objects in static constructors makes no sense,
6327 but field_size can be NULL for flexible array members. */
6328 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6329 && TREE_CODE (byte_offset) == INTEGER_CST
6330 && (field_size != NULL_TREE
6331 ? TREE_CODE (field_size) == INTEGER_CST
6332 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6333
6334 /* Compute bit offset of the field. */
807e902e 6335 bitoffset = (wi::to_offset (field_offset)
8de73453 6336 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8
RG
6337 /* Compute bit offset where the field ends. */
6338 if (field_size != NULL_TREE)
807e902e 6339 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6340 else
807e902e 6341 bitoffset_end = 0;
cfef45c8 6342
807e902e 6343 access_end = offset_int (offset) + size;
b8b2b009
JJ
6344
6345 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6346 [BITOFFSET, BITOFFSET_END)? */
807e902e 6347 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6348 && (field_size == NULL_TREE
807e902e 6349 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6350 {
807e902e 6351 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
6352 /* We do have overlap. Now see if field is large enough to
6353 cover the access. Give up for accesses spanning multiple
6354 fields. */
807e902e 6355 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6356 return NULL_TREE;
032c80e9 6357 if (offset < bitoffset)
b8b2b009 6358 return NULL_TREE;
cfef45c8 6359 return fold_ctor_reference (type, cval,
27bcd47c 6360 inner_offset.to_uhwi (), size,
c44c2088 6361 from_decl);
cfef45c8
RG
6362 }
6363 }
6364 /* When memory is not explicitely mentioned in constructor, it is 0. */
6365 return build_zero_cst (type);
6366}
6367
6368/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6369 to the memory at bit OFFSET. */
6370
8403c2cf 6371tree
cfef45c8 6372fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 6373 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
6374{
6375 tree ret;
6376
6377 /* We found the field with exact match. */
6378 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6379 && !offset)
9d60be38 6380 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
6381
6382 /* We are at the end of walk, see if we can view convert the
6383 result. */
6384 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6385 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6386 && !compare_tree_int (TYPE_SIZE (type), size)
6387 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6388 {
9d60be38 6389 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6390 if (ret)
672d9f8e
RB
6391 {
6392 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6393 if (ret)
6394 STRIP_USELESS_TYPE_CONVERSION (ret);
6395 }
cfef45c8
RG
6396 return ret;
6397 }
b2505143
RB
6398 /* For constants and byte-aligned/sized reads try to go through
6399 native_encode/interpret. */
6400 if (CONSTANT_CLASS_P (ctor)
6401 && BITS_PER_UNIT == 8
6402 && offset % BITS_PER_UNIT == 0
6403 && size % BITS_PER_UNIT == 0
6404 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6405 {
6406 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6407 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6408 offset / BITS_PER_UNIT);
6409 if (len > 0)
6410 return native_interpret_expr (type, buf, len);
b2505143 6411 }
cfef45c8
RG
6412 if (TREE_CODE (ctor) == CONSTRUCTOR)
6413 {
6414
eb8f1123
RG
6415 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6416 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
6417 return fold_array_ctor_reference (type, ctor, offset, size,
6418 from_decl);
cfef45c8 6419 else
c44c2088
JH
6420 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6421 from_decl);
cfef45c8
RG
6422 }
6423
6424 return NULL_TREE;
6425}
6426
6427/* Return the tree representing the element referenced by T if T is an
6428 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6429 names using VALUEIZE. Return NULL_TREE otherwise. */
6430
6431tree
6432fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6433{
6434 tree ctor, idx, base;
6435 HOST_WIDE_INT offset, size, max_size;
6436 tree tem;
ee45a32d 6437 bool reverse;
cfef45c8 6438
f8a7df45
RG
6439 if (TREE_THIS_VOLATILE (t))
6440 return NULL_TREE;
6441
3a65ee74 6442 if (DECL_P (t))
cfef45c8
RG
6443 return get_symbol_constant_value (t);
6444
6445 tem = fold_read_from_constant_string (t);
6446 if (tem)
6447 return tem;
6448
6449 switch (TREE_CODE (t))
6450 {
6451 case ARRAY_REF:
6452 case ARRAY_RANGE_REF:
6453 /* Constant indexes are handled well by get_base_constructor.
6454 Only special case variable offsets.
6455 FIXME: This code can't handle nested references with variable indexes
6456 (they will be handled only by iteration of ccp). Perhaps we can bring
6457 get_ref_base_and_extent here and make it use a valueize callback. */
6458 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6459 && valueize
6460 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 6461 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
6462 {
6463 tree low_bound, unit_size;
6464
6465 /* If the resulting bit-offset is constant, track it. */
6466 if ((low_bound = array_ref_low_bound (t),
b48e22b2 6467 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 6468 && (unit_size = array_ref_element_size (t),
807e902e 6469 tree_fits_uhwi_p (unit_size)))
cfef45c8 6470 {
807e902e
KZ
6471 offset_int woffset
6472 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6473 TYPE_PRECISION (TREE_TYPE (idx)));
6474
6475 if (wi::fits_shwi_p (woffset))
6476 {
6477 offset = woffset.to_shwi ();
6478 /* TODO: This code seems wrong, multiply then check
6479 to see if it fits. */
6480 offset *= tree_to_uhwi (unit_size);
6481 offset *= BITS_PER_UNIT;
6482
6483 base = TREE_OPERAND (t, 0);
6484 ctor = get_base_constructor (base, &offset, valueize);
6485 /* Empty constructor. Always fold to 0. */
6486 if (ctor == error_mark_node)
6487 return build_zero_cst (TREE_TYPE (t));
6488 /* Out of bound array access. Value is undefined,
6489 but don't fold. */
6490 if (offset < 0)
6491 return NULL_TREE;
6492 /* We can not determine ctor. */
6493 if (!ctor)
6494 return NULL_TREE;
6495 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6496 tree_to_uhwi (unit_size)
6497 * BITS_PER_UNIT,
6498 base);
6499 }
cfef45c8
RG
6500 }
6501 }
6502 /* Fallthru. */
6503
6504 case COMPONENT_REF:
6505 case BIT_FIELD_REF:
6506 case TARGET_MEM_REF:
6507 case MEM_REF:
ee45a32d 6508 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6509 ctor = get_base_constructor (base, &offset, valueize);
6510
6511 /* Empty constructor. Always fold to 0. */
6512 if (ctor == error_mark_node)
6513 return build_zero_cst (TREE_TYPE (t));
6514 /* We do not know precise address. */
6515 if (max_size == -1 || max_size != size)
6516 return NULL_TREE;
6517 /* We can not determine ctor. */
6518 if (!ctor)
6519 return NULL_TREE;
6520
6521 /* Out of bound array access. Value is undefined, but don't fold. */
6522 if (offset < 0)
6523 return NULL_TREE;
6524
c44c2088
JH
6525 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6526 base);
cfef45c8
RG
6527
6528 case REALPART_EXPR:
6529 case IMAGPART_EXPR:
6530 {
6531 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6532 if (c && TREE_CODE (c) == COMPLEX_CST)
6533 return fold_build1_loc (EXPR_LOCATION (t),
6534 TREE_CODE (t), TREE_TYPE (t), c);
6535 break;
6536 }
6537
6538 default:
6539 break;
6540 }
6541
6542 return NULL_TREE;
6543}
6544
6545tree
6546fold_const_aggregate_ref (tree t)
6547{
6548 return fold_const_aggregate_ref_1 (t, NULL);
6549}
06bc3ec7 6550
85942f45 6551/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6552 at OFFSET.
6553 Set CAN_REFER if non-NULL to false if method
6554 is not referable or if the virtual table is ill-formed (such as rewriten
6555 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6556
6557tree
85942f45
JH
6558gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6559 tree v,
ec77d61f
JH
6560 unsigned HOST_WIDE_INT offset,
6561 bool *can_refer)
81fa35bd 6562{
85942f45
JH
6563 tree vtable = v, init, fn;
6564 unsigned HOST_WIDE_INT size;
8c311b50
JH
6565 unsigned HOST_WIDE_INT elt_size, access_index;
6566 tree domain_type;
81fa35bd 6567
ec77d61f
JH
6568 if (can_refer)
6569 *can_refer = true;
6570
9de2f554 6571 /* First of all double check we have virtual table. */
8813a647 6572 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 6573 {
ec77d61f
JH
6574 /* Pass down that we lost track of the target. */
6575 if (can_refer)
6576 *can_refer = false;
6577 return NULL_TREE;
6578 }
9de2f554 6579
2aa3da06
JH
6580 init = ctor_for_folding (v);
6581
9de2f554 6582 /* The virtual tables should always be born with constructors
2aa3da06
JH
6583 and we always should assume that they are avaialble for
6584 folding. At the moment we do not stream them in all cases,
6585 but it should never happen that ctor seem unreachable. */
6586 gcc_assert (init);
6587 if (init == error_mark_node)
6588 {
ec77d61f
JH
6589 /* Pass down that we lost track of the target. */
6590 if (can_refer)
6591 *can_refer = false;
2aa3da06
JH
6592 return NULL_TREE;
6593 }
81fa35bd 6594 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 6595 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 6596 offset *= BITS_PER_UNIT;
81fa35bd 6597 offset += token * size;
9de2f554 6598
8c311b50
JH
6599 /* Lookup the value in the constructor that is assumed to be array.
6600 This is equivalent to
6601 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6602 offset, size, NULL);
6603 but in a constant time. We expect that frontend produced a simple
6604 array without indexed initializers. */
6605
6606 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6607 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6608 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6609 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6610
6611 access_index = offset / BITS_PER_UNIT / elt_size;
6612 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6613
6614 /* This code makes an assumption that there are no
6615 indexed fileds produced by C++ FE, so we can directly index the array. */
6616 if (access_index < CONSTRUCTOR_NELTS (init))
6617 {
6618 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6619 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6620 STRIP_NOPS (fn);
6621 }
6622 else
6623 fn = NULL;
9de2f554
JH
6624
6625 /* For type inconsistent program we may end up looking up virtual method
6626 in virtual table that does not contain TOKEN entries. We may overrun
6627 the virtual table and pick up a constant or RTTI info pointer.
6628 In any case the call is undefined. */
6629 if (!fn
6630 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6631 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6632 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6633 else
6634 {
6635 fn = TREE_OPERAND (fn, 0);
6636
6637 /* When cgraph node is missing and function is not public, we cannot
6638 devirtualize. This can happen in WHOPR when the actual method
6639 ends up in other partition, because we found devirtualization
6640 possibility too late. */
6641 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
6642 {
6643 if (can_refer)
6644 {
6645 *can_refer = false;
6646 return fn;
6647 }
6648 return NULL_TREE;
6649 }
9de2f554 6650 }
81fa35bd 6651
7501ca28
RG
6652 /* Make sure we create a cgraph node for functions we'll reference.
6653 They can be non-existent if the reference comes from an entry
6654 of an external vtable for example. */
d52f5295 6655 cgraph_node::get_create (fn);
7501ca28 6656
81fa35bd
MJ
6657 return fn;
6658}
6659
85942f45
JH
6660/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6661 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6662 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
6663 OBJ_TYPE_REF_OBJECT(REF).
6664 Set CAN_REFER if non-NULL to false if method
6665 is not referable or if the virtual table is ill-formed (such as rewriten
6666 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
6667
6668tree
ec77d61f
JH
6669gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6670 bool *can_refer)
85942f45
JH
6671{
6672 unsigned HOST_WIDE_INT offset;
6673 tree v;
6674
6675 v = BINFO_VTABLE (known_binfo);
6676 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6677 if (!v)
6678 return NULL_TREE;
6679
6680 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
6681 {
6682 if (can_refer)
6683 *can_refer = false;
6684 return NULL_TREE;
6685 }
6686 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
6687}
6688
737f500a
RB
6689/* Given a pointer value T, return a simplified version of an
6690 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
6691 possible. Note that the resulting type may be different from
6692 the type pointed to in the sense that it is still compatible
6693 from the langhooks point of view. */
6694
6695tree
6696gimple_fold_indirect_ref (tree t)
6697{
6698 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6699 tree sub = t;
6700 tree subtype;
6701
6702 STRIP_NOPS (sub);
6703 subtype = TREE_TYPE (sub);
737f500a
RB
6704 if (!POINTER_TYPE_P (subtype)
6705 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
6706 return NULL_TREE;
6707
6708 if (TREE_CODE (sub) == ADDR_EXPR)
6709 {
6710 tree op = TREE_OPERAND (sub, 0);
6711 tree optype = TREE_TYPE (op);
6712 /* *&p => p */
6713 if (useless_type_conversion_p (type, optype))
6714 return op;
6715
6716 /* *(foo *)&fooarray => fooarray[0] */
6717 if (TREE_CODE (optype) == ARRAY_TYPE
6718 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6719 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6720 {
6721 tree type_domain = TYPE_DOMAIN (optype);
6722 tree min_val = size_zero_node;
6723 if (type_domain && TYPE_MIN_VALUE (type_domain))
6724 min_val = TYPE_MIN_VALUE (type_domain);
6725 if (TREE_CODE (min_val) == INTEGER_CST)
6726 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6727 }
6728 /* *(foo *)&complexfoo => __real__ complexfoo */
6729 else if (TREE_CODE (optype) == COMPLEX_TYPE
6730 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6731 return fold_build1 (REALPART_EXPR, type, op);
6732 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6733 else if (TREE_CODE (optype) == VECTOR_TYPE
6734 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6735 {
6736 tree part_width = TYPE_SIZE (type);
6737 tree index = bitsize_int (0);
6738 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6739 }
6740 }
6741
6742 /* *(p + CST) -> ... */
6743 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6744 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6745 {
6746 tree addr = TREE_OPERAND (sub, 0);
6747 tree off = TREE_OPERAND (sub, 1);
6748 tree addrtype;
6749
6750 STRIP_NOPS (addr);
6751 addrtype = TREE_TYPE (addr);
6752
6753 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6754 if (TREE_CODE (addr) == ADDR_EXPR
6755 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6756 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 6757 && tree_fits_uhwi_p (off))
b184c8f1 6758 {
ae7e9ddd 6759 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
6760 tree part_width = TYPE_SIZE (type);
6761 unsigned HOST_WIDE_INT part_widthi
9439e9a1 6762 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
6763 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6764 tree index = bitsize_int (indexi);
6765 if (offset / part_widthi
e934916c 6766 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
6767 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6768 part_width, index);
6769 }
6770
6771 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6772 if (TREE_CODE (addr) == ADDR_EXPR
6773 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6774 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6775 {
6776 tree size = TYPE_SIZE_UNIT (type);
6777 if (tree_int_cst_equal (size, off))
6778 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6779 }
6780
6781 /* *(p + CST) -> MEM_REF <p, CST>. */
6782 if (TREE_CODE (addr) != ADDR_EXPR
6783 || DECL_P (TREE_OPERAND (addr, 0)))
6784 return fold_build2 (MEM_REF, type,
6785 addr,
8e6cdc90 6786 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
6787 }
6788
6789 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6790 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6791 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6792 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6793 {
6794 tree type_domain;
6795 tree min_val = size_zero_node;
6796 tree osub = sub;
6797 sub = gimple_fold_indirect_ref (sub);
6798 if (! sub)
6799 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6800 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6801 if (type_domain && TYPE_MIN_VALUE (type_domain))
6802 min_val = TYPE_MIN_VALUE (type_domain);
6803 if (TREE_CODE (min_val) == INTEGER_CST)
6804 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6805 }
6806
6807 return NULL_TREE;
6808}
19e51b40
JJ
6809
6810/* Return true if CODE is an operation that when operating on signed
6811 integer types involves undefined behavior on overflow and the
6812 operation can be expressed with unsigned arithmetic. */
6813
6814bool
6815arith_code_with_undefined_signed_overflow (tree_code code)
6816{
6817 switch (code)
6818 {
6819 case PLUS_EXPR:
6820 case MINUS_EXPR:
6821 case MULT_EXPR:
6822 case NEGATE_EXPR:
6823 case POINTER_PLUS_EXPR:
6824 return true;
6825 default:
6826 return false;
6827 }
6828}
6829
6830/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6831 operation that can be transformed to unsigned arithmetic by converting
6832 its operand, carrying out the operation in the corresponding unsigned
6833 type and converting the result back to the original type.
6834
6835 Returns a sequence of statements that replace STMT and also contain
6836 a modified form of STMT itself. */
6837
6838gimple_seq
355fe088 6839rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
6840{
6841 if (dump_file && (dump_flags & TDF_DETAILS))
6842 {
6843 fprintf (dump_file, "rewriting stmt with undefined signed "
6844 "overflow ");
6845 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6846 }
6847
6848 tree lhs = gimple_assign_lhs (stmt);
6849 tree type = unsigned_type_for (TREE_TYPE (lhs));
6850 gimple_seq stmts = NULL;
6851 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6852 {
74e3c262
RB
6853 tree op = gimple_op (stmt, i);
6854 op = gimple_convert (&stmts, type, op);
6855 gimple_set_op (stmt, i, op);
19e51b40
JJ
6856 }
6857 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6858 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6859 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6860 gimple_seq_add_stmt (&stmts, stmt);
355fe088 6861 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6862 gimple_seq_add_stmt (&stmts, cvt);
6863
6864 return stmts;
6865}
d4f5cd5e 6866
3d2cf79f 6867
c26de36d
RB
6868/* The valueization hook we use for the gimple_build API simplification.
6869 This makes us match fold_buildN behavior by only combining with
6870 statements in the sequence(s) we are currently building. */
6871
6872static tree
6873gimple_build_valueize (tree op)
6874{
6875 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6876 return op;
6877 return NULL_TREE;
6878}
6879
3d2cf79f 6880/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 6881 simplifying it first if possible. Returns the built
3d2cf79f
RB
6882 expression value and appends statements possibly defining it
6883 to SEQ. */
6884
6885tree
6886gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6887 enum tree_code code, tree type, tree op0)
3d2cf79f 6888{
c26de36d 6889 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
6890 if (!res)
6891 {
a15ebbcd 6892 res = create_tmp_reg_or_ssa_name (type);
355fe088 6893 gimple *stmt;
3d2cf79f
RB
6894 if (code == REALPART_EXPR
6895 || code == IMAGPART_EXPR
6896 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6897 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6898 else
0d0e4a03 6899 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6900 gimple_set_location (stmt, loc);
6901 gimple_seq_add_stmt_without_update (seq, stmt);
6902 }
6903 return res;
6904}
6905
6906/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6907 simplifying it first if possible. Returns the built
3d2cf79f
RB
6908 expression value and appends statements possibly defining it
6909 to SEQ. */
6910
6911tree
6912gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6913 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6914{
c26de36d 6915 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6916 if (!res)
6917 {
a15ebbcd 6918 res = create_tmp_reg_or_ssa_name (type);
355fe088 6919 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6920 gimple_set_location (stmt, loc);
6921 gimple_seq_add_stmt_without_update (seq, stmt);
6922 }
6923 return res;
6924}
6925
6926/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6927 simplifying it first if possible. Returns the built
3d2cf79f
RB
6928 expression value and appends statements possibly defining it
6929 to SEQ. */
6930
6931tree
6932gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6933 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6934{
6935 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6936 seq, gimple_build_valueize);
3d2cf79f
RB
6937 if (!res)
6938 {
a15ebbcd 6939 res = create_tmp_reg_or_ssa_name (type);
355fe088 6940 gimple *stmt;
3d2cf79f 6941 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6942 stmt = gimple_build_assign (res, code,
6943 build3 (code, type, op0, op1, op2));
3d2cf79f 6944 else
0d0e4a03 6945 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6946 gimple_set_location (stmt, loc);
6947 gimple_seq_add_stmt_without_update (seq, stmt);
6948 }
6949 return res;
6950}
6951
6952/* Build the call FN (ARG0) with a result of type TYPE
6953 (or no result if TYPE is void) with location LOC,
c26de36d 6954 simplifying it first if possible. Returns the built
3d2cf79f
RB
6955 expression value (or NULL_TREE if TYPE is void) and appends
6956 statements possibly defining it to SEQ. */
6957
6958tree
6959gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6960 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6961{
c26de36d 6962 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6963 if (!res)
6964 {
6965 tree decl = builtin_decl_implicit (fn);
355fe088 6966 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6967 if (!VOID_TYPE_P (type))
6968 {
a15ebbcd 6969 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
6970 gimple_call_set_lhs (stmt, res);
6971 }
6972 gimple_set_location (stmt, loc);
6973 gimple_seq_add_stmt_without_update (seq, stmt);
6974 }
6975 return res;
6976}
6977
6978/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6979 (or no result if TYPE is void) with location LOC,
c26de36d 6980 simplifying it first if possible. Returns the built
3d2cf79f
RB
6981 expression value (or NULL_TREE if TYPE is void) and appends
6982 statements possibly defining it to SEQ. */
6983
6984tree
6985gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6986 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6987{
c26de36d 6988 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6989 if (!res)
6990 {
6991 tree decl = builtin_decl_implicit (fn);
355fe088 6992 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6993 if (!VOID_TYPE_P (type))
6994 {
a15ebbcd 6995 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
6996 gimple_call_set_lhs (stmt, res);
6997 }
6998 gimple_set_location (stmt, loc);
6999 gimple_seq_add_stmt_without_update (seq, stmt);
7000 }
7001 return res;
7002}
7003
7004/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7005 (or no result if TYPE is void) with location LOC,
c26de36d 7006 simplifying it first if possible. Returns the built
3d2cf79f
RB
7007 expression value (or NULL_TREE if TYPE is void) and appends
7008 statements possibly defining it to SEQ. */
7009
7010tree
7011gimple_build (gimple_seq *seq, location_t loc,
7012 enum built_in_function fn, tree type,
c26de36d 7013 tree arg0, tree arg1, tree arg2)
3d2cf79f 7014{
c26de36d
RB
7015 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7016 seq, gimple_build_valueize);
3d2cf79f
RB
7017 if (!res)
7018 {
7019 tree decl = builtin_decl_implicit (fn);
355fe088 7020 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
7021 if (!VOID_TYPE_P (type))
7022 {
a15ebbcd 7023 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7024 gimple_call_set_lhs (stmt, res);
7025 }
7026 gimple_set_location (stmt, loc);
7027 gimple_seq_add_stmt_without_update (seq, stmt);
7028 }
7029 return res;
7030}
7031
7032/* Build the conversion (TYPE) OP with a result of type TYPE
7033 with location LOC if such conversion is neccesary in GIMPLE,
7034 simplifying it first.
7035 Returns the built expression value and appends
7036 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7037
7038tree
7039gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7040{
7041 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7042 return op;
3d2cf79f 7043 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7044}
68e57f04 7045
74e3c262
RB
7046/* Build the conversion (ptrofftype) OP with a result of a type
7047 compatible with ptrofftype with location LOC if such conversion
7048 is neccesary in GIMPLE, simplifying it first.
7049 Returns the built expression value and appends
7050 statements possibly defining it to SEQ. */
7051
7052tree
7053gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7054{
7055 if (ptrofftype_p (TREE_TYPE (op)))
7056 return op;
7057 return gimple_convert (seq, loc, sizetype, op);
7058}
7059
e7c45b66
RS
7060/* Build a vector of type TYPE in which each element has the value OP.
7061 Return a gimple value for the result, appending any new statements
7062 to SEQ. */
7063
7064tree
7065gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7066 tree op)
7067{
7068 tree res, vec = build_vector_from_val (type, op);
7069 if (is_gimple_val (vec))
7070 return vec;
7071 if (gimple_in_ssa_p (cfun))
7072 res = make_ssa_name (type);
7073 else
7074 res = create_tmp_reg (type);
7075 gimple *stmt = gimple_build_assign (res, vec);
7076 gimple_set_location (stmt, loc);
7077 gimple_seq_add_stmt_without_update (seq, stmt);
7078 return res;
7079}
7080
7081/* Build a vector of type TYPE in which the elements have the values
7082 given by ELTS. Return a gimple value for the result, appending any
7083 new instructions to SEQ. */
7084
7085tree
7086gimple_build_vector (gimple_seq *seq, location_t loc, tree type,
7087 vec<tree> elts)
7088{
7089 unsigned int nelts = elts.length ();
7090 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
7091 for (unsigned int i = 0; i < nelts; ++i)
7092 if (!TREE_CONSTANT (elts[i]))
7093 {
7094 vec<constructor_elt, va_gc> *v;
7095 vec_alloc (v, nelts);
7096 for (i = 0; i < nelts; ++i)
7097 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]);
7098
7099 tree res;
7100 if (gimple_in_ssa_p (cfun))
7101 res = make_ssa_name (type);
7102 else
7103 res = create_tmp_reg (type);
7104 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7105 gimple_set_location (stmt, loc);
7106 gimple_seq_add_stmt_without_update (seq, stmt);
7107 return res;
7108 }
7109 return build_vector (type, elts);
7110}
7111
68e57f04
RS
7112/* Return true if the result of assignment STMT is known to be non-negative.
7113 If the return value is based on the assumption that signed overflow is
7114 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7115 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7116
7117static bool
7118gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7119 int depth)
7120{
7121 enum tree_code code = gimple_assign_rhs_code (stmt);
7122 switch (get_gimple_rhs_class (code))
7123 {
7124 case GIMPLE_UNARY_RHS:
7125 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7126 gimple_expr_type (stmt),
7127 gimple_assign_rhs1 (stmt),
7128 strict_overflow_p, depth);
7129 case GIMPLE_BINARY_RHS:
7130 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7131 gimple_expr_type (stmt),
7132 gimple_assign_rhs1 (stmt),
7133 gimple_assign_rhs2 (stmt),
7134 strict_overflow_p, depth);
7135 case GIMPLE_TERNARY_RHS:
7136 return false;
7137 case GIMPLE_SINGLE_RHS:
7138 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7139 strict_overflow_p, depth);
7140 case GIMPLE_INVALID_RHS:
7141 break;
7142 }
7143 gcc_unreachable ();
7144}
7145
7146/* Return true if return value of call STMT is known to be non-negative.
7147 If the return value is based on the assumption that signed overflow is
7148 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7149 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7150
7151static bool
7152gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7153 int depth)
7154{
7155 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7156 gimple_call_arg (stmt, 0) : NULL_TREE;
7157 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7158 gimple_call_arg (stmt, 1) : NULL_TREE;
7159
7160 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7161 gimple_call_combined_fn (stmt),
68e57f04
RS
7162 arg0,
7163 arg1,
7164 strict_overflow_p, depth);
7165}
7166
4534c203
RB
7167/* Return true if return value of call STMT is known to be non-negative.
7168 If the return value is based on the assumption that signed overflow is
7169 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7170 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7171
7172static bool
7173gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7174 int depth)
7175{
7176 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7177 {
7178 tree arg = gimple_phi_arg_def (stmt, i);
7179 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7180 return false;
7181 }
7182 return true;
7183}
7184
68e57f04
RS
7185/* Return true if STMT is known to compute a non-negative value.
7186 If the return value is based on the assumption that signed overflow is
7187 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7188 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7189
7190bool
7191gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7192 int depth)
7193{
7194 switch (gimple_code (stmt))
7195 {
7196 case GIMPLE_ASSIGN:
7197 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7198 depth);
7199 case GIMPLE_CALL:
7200 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7201 depth);
4534c203
RB
7202 case GIMPLE_PHI:
7203 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7204 depth);
68e57f04
RS
7205 default:
7206 return false;
7207 }
7208}
67dbe582
RS
7209
7210/* Return true if the floating-point value computed by assignment STMT
7211 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7212 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7213
7214 DEPTH is the current nesting depth of the query. */
7215
7216static bool
7217gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7218{
7219 enum tree_code code = gimple_assign_rhs_code (stmt);
7220 switch (get_gimple_rhs_class (code))
7221 {
7222 case GIMPLE_UNARY_RHS:
7223 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7224 gimple_assign_rhs1 (stmt), depth);
7225 case GIMPLE_BINARY_RHS:
7226 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7227 gimple_assign_rhs1 (stmt),
7228 gimple_assign_rhs2 (stmt), depth);
7229 case GIMPLE_TERNARY_RHS:
7230 return false;
7231 case GIMPLE_SINGLE_RHS:
7232 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7233 case GIMPLE_INVALID_RHS:
7234 break;
7235 }
7236 gcc_unreachable ();
7237}
7238
7239/* Return true if the floating-point value computed by call STMT is known
7240 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7241 considered integer values. Return false for signaling NaN.
67dbe582
RS
7242
7243 DEPTH is the current nesting depth of the query. */
7244
7245static bool
7246gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7247{
7248 tree arg0 = (gimple_call_num_args (stmt) > 0
7249 ? gimple_call_arg (stmt, 0)
7250 : NULL_TREE);
7251 tree arg1 = (gimple_call_num_args (stmt) > 1
7252 ? gimple_call_arg (stmt, 1)
7253 : NULL_TREE);
1d9da71f 7254 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7255 arg0, arg1, depth);
7256}
7257
7258/* Return true if the floating-point result of phi STMT is known to have
7259 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7260 integer values. Return false for signaling NaN.
67dbe582
RS
7261
7262 DEPTH is the current nesting depth of the query. */
7263
7264static bool
7265gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7266{
7267 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7268 {
7269 tree arg = gimple_phi_arg_def (stmt, i);
7270 if (!integer_valued_real_single_p (arg, depth + 1))
7271 return false;
7272 }
7273 return true;
7274}
7275
7276/* Return true if the floating-point value computed by STMT is known
7277 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7278 considered integer values. Return false for signaling NaN.
67dbe582
RS
7279
7280 DEPTH is the current nesting depth of the query. */
7281
7282bool
7283gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7284{
7285 switch (gimple_code (stmt))
7286 {
7287 case GIMPLE_ASSIGN:
7288 return gimple_assign_integer_valued_real_p (stmt, depth);
7289 case GIMPLE_CALL:
7290 return gimple_call_integer_valued_real_p (stmt, depth);
7291 case GIMPLE_PHI:
7292 return gimple_phi_integer_valued_real_p (stmt, depth);
7293 default:
7294 return false;
7295 }
7296}