]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Update copyright years.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
85ec4feb 2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
3de2a40e 58#include "ipa-chkp.h"
abd3a68c 59#include "tree-cfg.h"
a918bfbf 60#include "fold-const-call.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
45b2222a 63#include "asan.h"
025d57f0
MS
64#include "diagnostic-core.h"
65#include "intl.h"
6a33d0ff 66#include "calls.h"
5ebaa477 67#include "tree-vector-builder.h"
cbdd87d4 68
b3b9f3d0 69/* Return true when DECL can be referenced from current unit.
c44c2088
JH
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
1389294c 73
1389294c
JH
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
3e89949e 86 we devirtualize only during final compilation stage.
b3b9f3d0
JH
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
90
1389294c 91static bool
c44c2088 92can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 93{
2c8326a5 94 varpool_node *vnode;
1389294c 95 struct cgraph_node *node;
5e20cdc9 96 symtab_node *snode;
c44c2088 97
00de328a 98 if (DECL_ABSTRACT_P (decl))
1632a686
JH
99 return false;
100
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 103 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
104 return true;
105
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
108 {
3aaf0529
JH
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
3dafb85c 111 if (symtab->function_flags_ready)
3aaf0529 112 return true;
d52f5295 113 snode = symtab_node::get (decl);
3aaf0529 114 if (!snode || !snode->definition)
1632a686 115 return false;
7de90a6c 116 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
117 return !node || !node->global.inlined_to;
118 }
119
6da8be89 120 /* We will later output the initializer, so we can refer to it.
c44c2088 121 So we are concerned only when DECL comes from initializer of
3aaf0529 122 external var or var that has been optimized out. */
c44c2088 123 if (!from_decl
8813a647 124 || !VAR_P (from_decl)
3aaf0529 125 || (!DECL_EXTERNAL (from_decl)
9041d2e6 126 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 127 && vnode->definition)
6da8be89 128 || (flag_ltrans
9041d2e6 129 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 130 && vnode->in_other_partition))
c44c2088 131 return true;
c44c2088
JH
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
a33a931b 137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 139 return false;
b3b9f3d0
JH
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
3aaf0529
JH
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
148
149 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 150 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
3dafb85c 156 if (!symtab->function_flags_ready)
b3b9f3d0 157 return true;
c44c2088 158
d52f5295 159 snode = symtab_node::get (decl);
3aaf0529
JH
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
1389294c
JH
167}
168
a15ebbcd
ML
169/* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
172
edc19e03
WS
173tree
174create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
175{
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
180}
181
0038d4e0 182/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
185
186tree
c44c2088 187canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 188{
50619002
EB
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
315f5f1b
RG
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 193 {
315f5f1b
RG
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
17f39a39
JH
202 }
203 if (TREE_CODE (cval) == ADDR_EXPR)
204 {
5a27a197
RG
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
207 {
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
211 }
5a27a197
RG
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
214 if (!base)
215 return NULL_TREE;
b3b9f3d0 216
8813a647 217 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 218 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 219 return NULL_TREE;
13f92e8d
JJ
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
8813a647 222 if (VAR_P (base))
46eb666a 223 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
224 else if (TREE_CODE (base) == FUNCTION_DECL)
225 {
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
d52f5295 229 cgraph_node::get_create (base);
7501ca28 230 }
0038d4e0 231 /* Fixup types in global initializers. */
73aef89e
RG
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
234
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
17f39a39 238 }
846abd0d
RB
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
50619002 241 return orig_cval;
17f39a39 242}
cbdd87d4
RG
243
244/* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
246
247tree
248get_symbol_constant_value (tree sym)
249{
6a6dac52
JH
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
cbdd87d4 252 {
cbdd87d4
RG
253 if (val)
254 {
9d60be38 255 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 256 if (val && is_gimple_min_invariant (val))
17f39a39 257 return val;
1389294c
JH
258 else
259 return NULL_TREE;
cbdd87d4
RG
260 }
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
b8a8c472 265 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 266 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
267 }
268
269 return NULL_TREE;
270}
271
272
cbdd87d4
RG
273
274/* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
278
279static tree
280maybe_fold_reference (tree expr, bool is_lhs)
281{
17f39a39 282 tree result;
cbdd87d4 283
f0eddb90
RG
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
300
f0eddb90
RG
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
cbdd87d4 305
cbdd87d4
RG
306 return NULL_TREE;
307}
308
309
310/* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
314
315static tree
316fold_gimple_assign (gimple_stmt_iterator *si)
317{
355fe088 318 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
321
322 tree result = NULL_TREE;
323
324 switch (get_gimple_rhs_class (subcode))
325 {
326 case GIMPLE_SINGLE_RHS:
327 {
328 tree rhs = gimple_assign_rhs1 (stmt);
329
8c00ba08
JW
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
332
4e71066d 333 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
334 return maybe_fold_reference (rhs, false);
335
bdf37f7a
JH
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
337 {
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
f8a39967 341 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
342 {
343 bool final;
344 vec <cgraph_node *>targets
f8a39967 345 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 347 {
2b5f0895
XDL
348 if (dump_enabled_p ())
349 {
807b7d62 350 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets.length () == 1
355 ? targets[0]->name ()
3ef276e4 356 : "NULL");
2b5f0895 357 }
3ef276e4
RB
358 if (targets.length () == 1)
359 {
360 val = fold_convert (TREE_TYPE (val),
361 build_fold_addr_expr_loc
362 (loc, targets[0]->decl));
363 STRIP_USELESS_TYPE_CONVERSION (val);
364 }
365 else
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
369 return val;
370 }
371 }
bdf37f7a 372 }
7524f419 373
cbdd87d4
RG
374 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 {
70f34814
RG
376 tree ref = TREE_OPERAND (rhs, 0);
377 tree tem = maybe_fold_reference (ref, true);
378 if (tem
379 && TREE_CODE (tem) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
382 else if (tem)
cbdd87d4
RG
383 result = fold_convert (TREE_TYPE (rhs),
384 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
385 else if (TREE_CODE (ref) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref, 1)))
387 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
388
389 if (result)
390 {
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result);
396
397 if (result != rhs && valid_gimple_rhs_p (result))
398 return result;
399 }
cbdd87d4
RG
400 }
401
402 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 403 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
404 {
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
406 unsigned i;
407 tree val;
408
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 410 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
411 return NULL_TREE;
412
413 return build_vector_from_ctor (TREE_TYPE (rhs),
414 CONSTRUCTOR_ELTS (rhs));
415 }
416
417 else if (DECL_P (rhs))
9d60be38 418 return get_symbol_constant_value (rhs);
cbdd87d4
RG
419 }
420 break;
421
422 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
423 break;
424
425 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
426 break;
427
0354c0c7 428 case GIMPLE_TERNARY_RHS:
5c099d40
RB
429 result = fold_ternary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt),
433 gimple_assign_rhs3 (stmt));
0354c0c7
BS
434
435 if (result)
436 {
437 STRIP_USELESS_TYPE_CONVERSION (result);
438 if (valid_gimple_rhs_p (result))
439 return result;
0354c0c7
BS
440 }
441 break;
442
cbdd87d4
RG
443 case GIMPLE_INVALID_RHS:
444 gcc_unreachable ();
445 }
446
447 return NULL_TREE;
448}
449
fef5a0d9
RB
450
451/* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
455
456static void
457gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458{
355fe088 459 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
460
461 if (gimple_has_location (stmt))
462 annotate_all_with_location (stmts, gimple_location (stmt));
463
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
355fe088 466 gimple *laststore = NULL;
fef5a0d9
RB
467 for (gimple_stmt_iterator i = gsi_last (stmts);
468 !gsi_end_p (i); gsi_prev (&i))
469 {
355fe088 470 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
471 if ((gimple_assign_single_p (new_stmt)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
473 || (is_gimple_call (new_stmt)
474 && (gimple_call_flags (new_stmt)
475 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 {
477 tree vdef;
478 if (!laststore)
479 vdef = gimple_vdef (stmt);
480 else
481 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
482 gimple_set_vdef (new_stmt, vdef);
483 if (vdef && TREE_CODE (vdef) == SSA_NAME)
484 SSA_NAME_DEF_STMT (vdef) = new_stmt;
485 laststore = new_stmt;
486 }
487 }
488
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse = gimple_vuse (stmt);
492 for (gimple_stmt_iterator i = gsi_start (stmts);
493 !gsi_end_p (i); gsi_next (&i))
494 {
355fe088 495 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt))
499 gimple_set_vuse (new_stmt, reaching_vuse);
500 gimple_set_modified (new_stmt, true);
501 if (gimple_vdef (new_stmt))
502 reaching_vuse = gimple_vdef (new_stmt);
503 }
504
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
507 if (reaching_vuse
508 && reaching_vuse == gimple_vuse (stmt))
509 {
510 tree vdef = gimple_vdef (stmt);
511 if (vdef
512 && TREE_CODE (vdef) == SSA_NAME)
513 {
514 unlink_stmt_vdef (stmt);
515 release_ssa_name (vdef);
516 }
517 }
518
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p, stmts, false);
521}
522
cbdd87d4
RG
523/* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
cbdd87d4
RG
532
533void
534gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535{
536 tree lhs;
355fe088 537 gimple *stmt, *new_stmt;
cbdd87d4 538 gimple_stmt_iterator i;
355a7673 539 gimple_seq stmts = NULL;
cbdd87d4
RG
540
541 stmt = gsi_stmt (*si_p);
542
543 gcc_assert (is_gimple_call (stmt));
544
45852dcc 545 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 546
e256dfce 547 lhs = gimple_call_lhs (stmt);
cbdd87d4 548 if (lhs == NULL_TREE)
6e572326
RG
549 {
550 gimplify_and_add (expr, &stmts);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts))
554 {
9fdc58de 555 pop_gimplify_context (NULL);
6e572326
RG
556 if (gimple_in_ssa_p (cfun))
557 {
558 unlink_stmt_vdef (stmt);
559 release_defs (stmt);
560 }
f6b4dc28 561 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
562 return;
563 }
564 }
cbdd87d4 565 else
e256dfce 566 {
381cdae4 567 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
568 new_stmt = gimple_build_assign (lhs, tmp);
569 i = gsi_last (stmts);
570 gsi_insert_after_without_update (&i, new_stmt,
571 GSI_CONTINUE_LINKING);
572 }
cbdd87d4
RG
573
574 pop_gimplify_context (NULL);
575
fef5a0d9
RB
576 gsi_replace_with_seq_vops (si_p, stmts);
577}
cbdd87d4 578
fef5a0d9
RB
579
580/* Replace the call at *GSI with the gimple value VAL. */
581
e3174bdf 582void
fef5a0d9
RB
583replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584{
355fe088 585 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 586 tree lhs = gimple_call_lhs (stmt);
355fe088 587 gimple *repl;
fef5a0d9 588 if (lhs)
e256dfce 589 {
fef5a0d9
RB
590 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
591 val = fold_convert (TREE_TYPE (lhs), val);
592 repl = gimple_build_assign (lhs, val);
593 }
594 else
595 repl = gimple_build_nop ();
596 tree vdef = gimple_vdef (stmt);
597 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 {
599 unlink_stmt_vdef (stmt);
600 release_ssa_name (vdef);
601 }
f6b4dc28 602 gsi_replace (gsi, repl, false);
fef5a0d9
RB
603}
604
605/* Replace the call at *GSI with the new call REPL and fold that
606 again. */
607
608static void
355fe088 609replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 610{
355fe088 611 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
612 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
613 gimple_set_location (repl, gimple_location (stmt));
614 if (gimple_vdef (stmt)
615 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 {
617 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
618 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 }
00296d7f
JJ
620 if (gimple_vuse (stmt))
621 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 622 gsi_replace (gsi, repl, false);
fef5a0d9
RB
623 fold_stmt (gsi);
624}
625
626/* Return true if VAR is a VAR_DECL or a component thereof. */
627
628static bool
629var_decl_component_p (tree var)
630{
631 tree inner = var;
632 while (handled_component_p (inner))
633 inner = TREE_OPERAND (inner, 0);
634 return SSA_VAR_P (inner);
635}
636
6512c0f1
MS
637/* If the SIZE argument representing the size of an object is in a range
638 of values of which exactly one is valid (and that is zero), return
639 true, otherwise false. */
640
641static bool
642size_must_be_zero_p (tree size)
643{
644 if (integer_zerop (size))
645 return true;
646
647 if (TREE_CODE (size) != SSA_NAME)
648 return false;
649
650 wide_int min, max;
651 enum value_range_type rtype = get_range_info (size, &min, &max);
652 if (rtype != VR_ANTI_RANGE)
653 return false;
654
655 tree type = TREE_TYPE (size);
656 int prec = TYPE_PRECISION (type);
657
658 wide_int wone = wi::one (prec);
659
660 /* Compute the value of SSIZE_MAX, the largest positive value that
661 can be stored in ssize_t, the signed counterpart of size_t. */
662 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
663
664 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
665}
666
cc8bea0a
MS
667/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
668 diagnose (otherwise undefined) overlapping copies without preventing
669 folding. When folded, GCC guarantees that overlapping memcpy has
670 the same semantics as memmove. Call to the library memcpy need not
671 provide the same guarantee. Return false if no simplification can
672 be made. */
fef5a0d9
RB
673
674static bool
675gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
677{
355fe088 678 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
683
cc8bea0a
MS
684 tree func = gimple_call_fndecl (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
686 bool check_overlap = (DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE
687 && DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE_CHK
688 && !nowarn);
689
6512c0f1
MS
690 /* If the LEN parameter is a constant zero or in range where
691 the only valid value is zero, return DEST. */
692 if (size_must_be_zero_p (len))
fef5a0d9 693 {
355fe088 694 gimple *repl;
fef5a0d9
RB
695 if (gimple_call_lhs (stmt))
696 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
697 else
698 repl = gimple_build_nop ();
699 tree vdef = gimple_vdef (stmt);
700 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 701 {
fef5a0d9
RB
702 unlink_stmt_vdef (stmt);
703 release_ssa_name (vdef);
704 }
f6b4dc28 705 gsi_replace (gsi, repl, false);
fef5a0d9
RB
706 return true;
707 }
708
709 /* If SRC and DEST are the same (and not volatile), return
710 DEST{,+LEN,+LEN-1}. */
711 if (operand_equal_p (src, dest, 0))
712 {
cc8bea0a
MS
713 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
714 It's safe and may even be emitted by GCC itself (see bug
715 32667). However, diagnose it in explicit calls to the memcpy
716 function. */
717 if (check_overlap && *IDENTIFIER_POINTER (DECL_NAME (func)) != '_')
718 warning_at (loc, OPT_Wrestrict,
719 "%qD source argument is the same as destination",
720 func);
721
fef5a0d9
RB
722 unlink_stmt_vdef (stmt);
723 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
724 release_ssa_name (gimple_vdef (stmt));
725 if (!lhs)
726 {
f6b4dc28 727 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
728 return true;
729 }
730 goto done;
731 }
732 else
733 {
734 tree srctype, desttype;
735 unsigned int src_align, dest_align;
736 tree off0;
737
3de2a40e
IE
738 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
739 pointers as wide integer) and also may result in huge function
740 size because of inlined bounds copy. Thus don't inline for
741 functions we want to instrument. */
742 if (flag_check_pointer_bounds
743 && chkp_instrumentable_p (cfun->decl)
744 /* Even if data may contain pointers we can inline if copy
745 less than a pointer size. */
746 && (!tree_fits_uhwi_p (len)
747 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
748 return false;
749
fef5a0d9
RB
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
753
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
765 reason. */
766 && !c_strlen (src, 2))
767 {
768 unsigned ilen = tree_to_uhwi (len);
146ec50f 769 if (pow2p_hwi (ilen))
fef5a0d9 770 {
cc8bea0a
MS
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
777
64ab8765 778 scalar_int_mode mode;
fef5a0d9
RB
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
64ab8765
RS
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
64ab8765 785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 786 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 787 || (optab_handler (movmisalign_optab, mode)
f869c12f 788 != CODE_FOR_nothing)))
fef5a0d9
RB
789 {
790 tree srctype = type;
791 tree desttype = type;
64ab8765 792 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
64ab8765 798 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 799 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 800 && (optab_handler (movmisalign_optab, mode)
f869c12f 801 == CODE_FOR_nothing))
fef5a0d9
RB
802 srcmem = NULL_TREE;
803 if (srcmem)
804 {
355fe088 805 gimple *new_stmt;
fef5a0d9
RB
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
807 {
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
fef5a0d9
RB
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
815 }
64ab8765 816 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
828 {
f6b4dc28 829 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
830 return true;
831 }
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
834 }
835 }
836 }
837 }
838
839 if (endp == 3)
840 {
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
844
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
852 {
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
861 }
862
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
866 {
867 tree src_base, dest_base, fn;
a90c8804
RS
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
fef5a0d9
RB
870
871 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
fef5a0d9 875 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
a90c8804 880 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 881 maxsize = -1;
fef5a0d9
RB
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
884 {
885 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
fef5a0d9
RB
888 return false;
889 }
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
892 {
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
a90c8804
RS
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
fef5a0d9
RB
902 return false;
903 }
904 else
905 return false;
906
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
923 {
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
928 {
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
938 }
939 }
940
941 return false;
942 }
943
944 if (!tree_fits_shwi_p (len))
945 return false;
fef5a0d9
RB
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
fef5a0d9
RB
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 958 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 962 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
966
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
983
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
989
42f74245
RB
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 995
42f74245
RB
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1000 {
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 1004 src, off0);
fef5a0d9
RB
1005 else if (!STRICT_ALIGNMENT)
1006 {
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
42f74245 1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 1010 }
fef5a0d9 1011 }
fef5a0d9
RB
1012
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1015
1016 if (srcvar == NULL_TREE)
1017 {
fef5a0d9
RB
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1021 {
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1027 }
1028 }
1029 else if (destvar == NULL_TREE)
1030 {
fef5a0d9
RB
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1040 }
1041 }
1042
cc8bea0a
MS
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1047
355fe088 1048 gimple *new_stmt;
fef5a0d9
RB
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1050 {
921b13d0
RB
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1055 {
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
921b13d0
RB
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1062 }
d7257171
RB
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
fef5a0d9 1065 }
d7257171
RB
1066
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080set_vop_and_replace:
fef5a0d9
RB
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1087 {
f6b4dc28 1088 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1089 return true;
1090 }
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1092 }
1093
1094done:
74e3c262 1095 gimple_seq stmts = NULL;
fef5a0d9
RB
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
74e3c262
RB
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
fef5a0d9 1101 if (endp == 2 || endp == 1)
74e3c262
RB
1102 {
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1106 }
fef5a0d9 1107
74e3c262 1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1109 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1110 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1111 return true;
1112}
1113
b3d8d88e
MS
1114/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1116
1117static bool
1118gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1119{
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1121
1122 if (!fn)
1123 return false;
1124
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1126
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1131
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1134
1135 return true;
1136}
1137
1138/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1140
1141static bool
1142gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1143{
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1145
1146 if (!fn)
1147 return false;
1148
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1152
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1157
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1161
1162 return true;
1163}
1164
1165/* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1167
1168static bool
1169gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1170{
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1172
1173 if (!fn)
1174 return false;
1175
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1177
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1181
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1187
1188 return true;
1189}
1190
fef5a0d9
RB
1191/* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1193
1194static bool
1195gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1196{
355fe088 1197 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1200
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1203 {
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1206 }
1207
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1210
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1213
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1218
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1222
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1226
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1230
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1233
1234 length = tree_to_uhwi (len);
7a504f33 1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1238
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1241
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1245 {
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1248
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1254 }
1255
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1261 {
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1264 }
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1267 {
355fe088 1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1269 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1270 }
1271 else
1272 {
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1276 }
1277
1278 return true;
1279}
1280
1281
88d0c3f0
MS
1282/* Obtain the minimum and maximum string length or minimum and maximum
1283 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1284 If ARG is an SSA name variable, follow its use-def chains. When
1285 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1286 if we are unable to determine the length or value, return False.
1287 VISITED is a bitmap of visited variables.
1288 TYPE is 0 if string length should be obtained, 1 for maximum string
1289 length and 2 for maximum value ARG can have.
1290 When FUZZY is set and the length of a string cannot be determined,
1291 the function instead considers as the maximum possible length the
3f343040
MS
1292 size of a character array it may refer to.
1293 Set *FLEXP to true if the range of the string lengths has been
1294 obtained from the upper bound of an array at the end of a struct.
1295 Such an array may hold a string that's longer than its upper bound
1296 due to it being used as a poor-man's flexible array member. */
fef5a0d9
RB
1297
1298static bool
88d0c3f0 1299get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
3f343040 1300 bool fuzzy, bool *flexp)
fef5a0d9
RB
1301{
1302 tree var, val;
355fe088 1303 gimple *def_stmt;
fef5a0d9 1304
88d0c3f0
MS
1305 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1306 but MINLEN may be cleared during the execution of the function. */
1307 tree *minlen = length;
1308 tree *const maxlen = length + 1;
1309
fef5a0d9
RB
1310 if (TREE_CODE (arg) != SSA_NAME)
1311 {
1312 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1313 if (TREE_CODE (arg) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1315 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1316 {
1317 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1318 if (TREE_CODE (aop0) == INDIRECT_REF
1319 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
88d0c3f0 1320 return get_range_strlen (TREE_OPERAND (aop0, 0),
3f343040 1321 length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1322 }
1323
1324 if (type == 2)
1325 {
1326 val = arg;
1327 if (TREE_CODE (val) != INTEGER_CST
1328 || tree_int_cst_sgn (val) < 0)
1329 return false;
1330 }
1331 else
1332 val = c_strlen (arg, 1);
88d0c3f0
MS
1333
1334 if (!val && fuzzy)
1335 {
1336 if (TREE_CODE (arg) == ADDR_EXPR)
1337 return get_range_strlen (TREE_OPERAND (arg, 0), length,
3f343040 1338 visited, type, fuzzy, flexp);
88d0c3f0
MS
1339
1340 if (TREE_CODE (arg) == COMPONENT_REF
1341 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1342 {
1343 /* Use the type of the member array to determine the upper
1344 bound on the length of the array. This may be overly
1345 optimistic if the array itself isn't NUL-terminated and
1346 the caller relies on the subsequent member to contain
3f343040
MS
1347 the NUL.
1348 Set *FLEXP to true if the array whose bound is being
1349 used is at the end of a struct. */
c3e46927 1350 if (array_at_struct_end_p (arg))
3f343040
MS
1351 *flexp = true;
1352
88d0c3f0
MS
1353 arg = TREE_OPERAND (arg, 1);
1354 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1355 if (!val || integer_zerop (val))
1356 return false;
1357 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1358 integer_one_node);
e495e31a
MS
1359 /* Set the minimum size to zero since the string in
1360 the array could have zero length. */
1361 *minlen = ssize_int (0);
88d0c3f0 1362 }
2004617a
QZ
1363
1364 if (VAR_P (arg)
1365 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
1366 {
1367 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1368 if (!val || TREE_CODE (val) != INTEGER_CST || integer_zerop (val))
1369 return false;
1370 val = wide_int_to_tree (TREE_TYPE (val),
1371 wi::sub(wi::to_wide (val), 1));
1372 /* Set the minimum size to zero since the string in
1373 the array could have zero length. */
1374 *minlen = ssize_int (0);
1375 }
88d0c3f0
MS
1376 }
1377
fef5a0d9
RB
1378 if (!val)
1379 return false;
1380
88d0c3f0
MS
1381 if (minlen
1382 && (!*minlen
1383 || (type > 0
1384 && TREE_CODE (*minlen) == INTEGER_CST
1385 && TREE_CODE (val) == INTEGER_CST
1386 && tree_int_cst_lt (val, *minlen))))
1387 *minlen = val;
1388
1389 if (*maxlen)
fef5a0d9
RB
1390 {
1391 if (type > 0)
1392 {
88d0c3f0 1393 if (TREE_CODE (*maxlen) != INTEGER_CST
fef5a0d9
RB
1394 || TREE_CODE (val) != INTEGER_CST)
1395 return false;
1396
88d0c3f0
MS
1397 if (tree_int_cst_lt (*maxlen, val))
1398 *maxlen = val;
fef5a0d9
RB
1399 return true;
1400 }
88d0c3f0 1401 else if (simple_cst_equal (val, *maxlen) != 1)
fef5a0d9
RB
1402 return false;
1403 }
1404
88d0c3f0 1405 *maxlen = val;
fef5a0d9
RB
1406 return true;
1407 }
1408
1409 /* If ARG is registered for SSA update we cannot look at its defining
1410 statement. */
1411 if (name_registered_for_update_p (arg))
1412 return false;
1413
1414 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1415 if (!*visited)
1416 *visited = BITMAP_ALLOC (NULL);
1417 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1418 return true;
1419
1420 var = arg;
1421 def_stmt = SSA_NAME_DEF_STMT (var);
1422
1423 switch (gimple_code (def_stmt))
1424 {
1425 case GIMPLE_ASSIGN:
1426 /* The RHS of the statement defining VAR must either have a
1427 constant length or come from another SSA_NAME with a constant
1428 length. */
1429 if (gimple_assign_single_p (def_stmt)
1430 || gimple_assign_unary_nop_p (def_stmt))
1431 {
1432 tree rhs = gimple_assign_rhs1 (def_stmt);
3f343040 1433 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1434 }
1435 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1436 {
1437 tree op2 = gimple_assign_rhs2 (def_stmt);
1438 tree op3 = gimple_assign_rhs3 (def_stmt);
3f343040
MS
1439 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1440 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
cc8bea0a 1441 }
fef5a0d9
RB
1442 return false;
1443
1444 case GIMPLE_PHI:
1445 {
1446 /* All the arguments of the PHI node must have the same constant
1447 length. */
1448 unsigned i;
1449
1450 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1451 {
1452 tree arg = gimple_phi_arg (def_stmt, i)->def;
1453
1454 /* If this PHI has itself as an argument, we cannot
1455 determine the string length of this argument. However,
1456 if we can find a constant string length for the other
1457 PHI args then we can still be sure that this is a
1458 constant string length. So be optimistic and just
1459 continue with the next argument. */
1460 if (arg == gimple_phi_result (def_stmt))
1461 continue;
1462
3f343040 1463 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
88d0c3f0
MS
1464 {
1465 if (fuzzy)
1466 *maxlen = build_all_ones_cst (size_type_node);
1467 else
1468 return false;
1469 }
fef5a0d9
RB
1470 }
1471 }
1472 return true;
1473
1474 default:
1475 return false;
1476 }
1477}
1478
88d0c3f0
MS
1479/* Determine the minimum and maximum value or string length that ARG
1480 refers to and store each in the first two elements of MINMAXLEN.
1481 For expressions that point to strings of unknown lengths that are
1482 character arrays, use the upper bound of the array as the maximum
1483 length. For example, given an expression like 'x ? array : "xyz"'
1484 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1485 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1486 stored in array.
3f343040
MS
1487 Return true if the range of the string lengths has been obtained
1488 from the upper bound of an array at the end of a struct. Such
1489 an array may hold a string that's longer than its upper bound
1490 due to it being used as a poor-man's flexible array member. */
88d0c3f0 1491
3f343040
MS
1492bool
1493get_range_strlen (tree arg, tree minmaxlen[2])
88d0c3f0
MS
1494{
1495 bitmap visited = NULL;
1496
1497 minmaxlen[0] = NULL_TREE;
1498 minmaxlen[1] = NULL_TREE;
1499
3f343040
MS
1500 bool flexarray = false;
1501 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
88d0c3f0
MS
1502
1503 if (visited)
1504 BITMAP_FREE (visited);
3f343040
MS
1505
1506 return flexarray;
88d0c3f0
MS
1507}
1508
dcb7fae2
RB
1509tree
1510get_maxval_strlen (tree arg, int type)
1511{
1512 bitmap visited = NULL;
88d0c3f0 1513 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1514
1515 bool dummy;
1516 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
88d0c3f0 1517 len[1] = NULL_TREE;
dcb7fae2
RB
1518 if (visited)
1519 BITMAP_FREE (visited);
1520
88d0c3f0 1521 return len[1];
dcb7fae2
RB
1522}
1523
fef5a0d9
RB
1524
1525/* Fold function call to builtin strcpy with arguments DEST and SRC.
1526 If LEN is not NULL, it represents the length of the string to be
1527 copied. Return NULL_TREE if no simplification can be made. */
1528
1529static bool
1530gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1531 tree dest, tree src)
fef5a0d9 1532{
cc8bea0a
MS
1533 gimple *stmt = gsi_stmt (*gsi);
1534 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1535 tree fn;
1536
1537 /* If SRC and DEST are the same (and not volatile), return DEST. */
1538 if (operand_equal_p (src, dest, 0))
1539 {
cc8bea0a
MS
1540 tree func = gimple_call_fndecl (stmt);
1541
1542 warning_at (loc, OPT_Wrestrict,
1543 "%qD source argument is the same as destination",
1544 func);
1545
fef5a0d9
RB
1546 replace_call_with_value (gsi, dest);
1547 return true;
1548 }
1549
1550 if (optimize_function_for_size_p (cfun))
1551 return false;
1552
1553 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1554 if (!fn)
1555 return false;
1556
1579e1f8 1557 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1558 if (!len)
dcb7fae2 1559 return false;
fef5a0d9
RB
1560
1561 len = fold_convert_loc (loc, size_type_node, len);
1562 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1563 len = force_gimple_operand_gsi (gsi, len, true,
1564 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1565 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1566 replace_call_with_call_and_fold (gsi, repl);
1567 return true;
1568}
1569
1570/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1571 If SLEN is not NULL, it represents the length of the source string.
1572 Return NULL_TREE if no simplification can be made. */
1573
1574static bool
dcb7fae2
RB
1575gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1576 tree dest, tree src, tree len)
fef5a0d9 1577{
025d57f0
MS
1578 gimple *stmt = gsi_stmt (*gsi);
1579 location_t loc = gimple_location (stmt);
6a33d0ff 1580 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1581
1582 /* If the LEN parameter is zero, return DEST. */
1583 if (integer_zerop (len))
1584 {
6a33d0ff
MS
1585 /* Avoid warning if the destination refers to a an array/pointer
1586 decorate with attribute nonstring. */
1587 if (!nonstring)
1588 {
1589 tree fndecl = gimple_call_fndecl (stmt);
1590 gcall *call = as_a <gcall *> (stmt);
1591
1592 /* Warn about the lack of nul termination: the result is not
1593 a (nul-terminated) string. */
1594 tree slen = get_maxval_strlen (src, 0);
1595 if (slen && !integer_zerop (slen))
1596 warning_at (loc, OPT_Wstringop_truncation,
1597 "%G%qD destination unchanged after copying no bytes "
1598 "from a string of length %E",
1599 call, fndecl, slen);
1600 else
1601 warning_at (loc, OPT_Wstringop_truncation,
1602 "%G%qD destination unchanged after copying no bytes",
1603 call, fndecl);
1604 }
025d57f0 1605
fef5a0d9
RB
1606 replace_call_with_value (gsi, dest);
1607 return true;
1608 }
1609
1610 /* We can't compare slen with len as constants below if len is not a
1611 constant. */
dcb7fae2 1612 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1613 return false;
1614
fef5a0d9 1615 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1616 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1617 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1618 return false;
1619
025d57f0
MS
1620 /* The size of the source string including the terminating nul. */
1621 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1622
1623 /* We do not support simplification of this case, though we do
1624 support it when expanding trees into RTL. */
1625 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1626 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1627 return false;
1628
6a33d0ff 1629 if (!nonstring)
025d57f0 1630 {
6a33d0ff 1631 if (tree_int_cst_lt (len, slen))
025d57f0 1632 {
6a33d0ff
MS
1633 tree fndecl = gimple_call_fndecl (stmt);
1634 gcall *call = as_a <gcall *> (stmt);
1635
1636 warning_at (loc, OPT_Wstringop_truncation,
1637 (tree_int_cst_equal (size_one_node, len)
1638 ? G_("%G%qD output truncated copying %E byte "
1639 "from a string of length %E")
1640 : G_("%G%qD output truncated copying %E bytes "
1641 "from a string of length %E")),
1642 call, fndecl, len, slen);
1643 }
1644 else if (tree_int_cst_equal (len, slen))
1645 {
1646 tree fndecl = gimple_call_fndecl (stmt);
1647 gcall *call = as_a <gcall *> (stmt);
1648
1649 warning_at (loc, OPT_Wstringop_truncation,
1650 (tree_int_cst_equal (size_one_node, len)
1651 ? G_("%G%qD output truncated before terminating nul "
1652 "copying %E byte from a string of the same "
1653 "length")
1654 : G_("%G%qD output truncated before terminating nul "
1655 "copying %E bytes from a string of the same "
1656 "length")),
1657 call, fndecl, len);
025d57f0 1658 }
025d57f0
MS
1659 }
1660
fef5a0d9 1661 /* OK transform into builtin memcpy. */
025d57f0 1662 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1663 if (!fn)
1664 return false;
1665
1666 len = fold_convert_loc (loc, size_type_node, len);
1667 len = force_gimple_operand_gsi (gsi, len, true,
1668 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1669 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1670 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1671
fef5a0d9
RB
1672 return true;
1673}
1674
71dea1dd
WD
1675/* Fold function call to builtin strchr or strrchr.
1676 If both arguments are constant, evaluate and fold the result,
1677 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1678 In general strlen is significantly faster than strchr
1679 due to being a simpler operation. */
1680static bool
71dea1dd 1681gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1682{
1683 gimple *stmt = gsi_stmt (*gsi);
1684 tree str = gimple_call_arg (stmt, 0);
1685 tree c = gimple_call_arg (stmt, 1);
1686 location_t loc = gimple_location (stmt);
71dea1dd
WD
1687 const char *p;
1688 char ch;
912d9ec3 1689
71dea1dd 1690 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1691 return false;
1692
71dea1dd
WD
1693 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1694 {
1695 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1696
1697 if (p1 == NULL)
1698 {
1699 replace_call_with_value (gsi, integer_zero_node);
1700 return true;
1701 }
1702
1703 tree len = build_int_cst (size_type_node, p1 - p);
1704 gimple_seq stmts = NULL;
1705 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1706 POINTER_PLUS_EXPR, str, len);
1707 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1708 gsi_replace_with_seq_vops (gsi, stmts);
1709 return true;
1710 }
1711
1712 if (!integer_zerop (c))
912d9ec3
WD
1713 return false;
1714
71dea1dd 1715 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1716 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1717 {
1718 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1719
c8952930 1720 if (strchr_fn)
71dea1dd
WD
1721 {
1722 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1723 replace_call_with_call_and_fold (gsi, repl);
1724 return true;
1725 }
1726
1727 return false;
1728 }
1729
912d9ec3
WD
1730 tree len;
1731 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1732
1733 if (!strlen_fn)
1734 return false;
1735
1736 /* Create newstr = strlen (str). */
1737 gimple_seq stmts = NULL;
1738 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1739 gimple_set_location (new_stmt, loc);
a15ebbcd 1740 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1741 gimple_call_set_lhs (new_stmt, len);
1742 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1743
1744 /* Create (str p+ strlen (str)). */
1745 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1746 POINTER_PLUS_EXPR, str, len);
1747 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1748 gsi_replace_with_seq_vops (gsi, stmts);
1749 /* gsi now points at the assignment to the lhs, get a
1750 stmt iterator to the strlen.
1751 ??? We can't use gsi_for_stmt as that doesn't work when the
1752 CFG isn't built yet. */
1753 gimple_stmt_iterator gsi2 = *gsi;
1754 gsi_prev (&gsi2);
1755 fold_stmt (&gsi2);
1756 return true;
1757}
1758
c8952930
JJ
1759/* Fold function call to builtin strstr.
1760 If both arguments are constant, evaluate and fold the result,
1761 additionally fold strstr (x, "") into x and strstr (x, "c")
1762 into strchr (x, 'c'). */
1763static bool
1764gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1765{
1766 gimple *stmt = gsi_stmt (*gsi);
1767 tree haystack = gimple_call_arg (stmt, 0);
1768 tree needle = gimple_call_arg (stmt, 1);
1769 const char *p, *q;
1770
1771 if (!gimple_call_lhs (stmt))
1772 return false;
1773
1774 q = c_getstr (needle);
1775 if (q == NULL)
1776 return false;
1777
1778 if ((p = c_getstr (haystack)))
1779 {
1780 const char *r = strstr (p, q);
1781
1782 if (r == NULL)
1783 {
1784 replace_call_with_value (gsi, integer_zero_node);
1785 return true;
1786 }
1787
1788 tree len = build_int_cst (size_type_node, r - p);
1789 gimple_seq stmts = NULL;
1790 gimple *new_stmt
1791 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1792 haystack, len);
1793 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1794 gsi_replace_with_seq_vops (gsi, stmts);
1795 return true;
1796 }
1797
1798 /* For strstr (x, "") return x. */
1799 if (q[0] == '\0')
1800 {
1801 replace_call_with_value (gsi, haystack);
1802 return true;
1803 }
1804
1805 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1806 if (q[1] == '\0')
1807 {
1808 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1809 if (strchr_fn)
1810 {
1811 tree c = build_int_cst (integer_type_node, q[0]);
1812 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1813 replace_call_with_call_and_fold (gsi, repl);
1814 return true;
1815 }
1816 }
1817
1818 return false;
1819}
1820
fef5a0d9
RB
1821/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1822 to the call.
1823
1824 Return NULL_TREE if no simplification was possible, otherwise return the
1825 simplified form of the call as a tree.
1826
1827 The simplified form may be a constant or other expression which
1828 computes the same value, but in a more efficient manner (including
1829 calls to other builtin functions).
1830
1831 The call may contain arguments which need to be evaluated, but
1832 which are not useful to determine the result of the call. In
1833 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1834 COMPOUND_EXPR will be an argument which must be evaluated.
1835 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1836 COMPOUND_EXPR in the chain will contain the tree for the simplified
1837 form of the builtin function call. */
1838
1839static bool
dcb7fae2 1840gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1841{
355fe088 1842 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1843 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1844
1845 const char *p = c_getstr (src);
1846
1847 /* If the string length is zero, return the dst parameter. */
1848 if (p && *p == '\0')
1849 {
1850 replace_call_with_value (gsi, dst);
1851 return true;
1852 }
1853
1854 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1855 return false;
1856
1857 /* See if we can store by pieces into (dst + strlen(dst)). */
1858 tree newdst;
1859 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1860 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1861
1862 if (!strlen_fn || !memcpy_fn)
1863 return false;
1864
1865 /* If the length of the source string isn't computable don't
1866 split strcat into strlen and memcpy. */
dcb7fae2 1867 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1868 if (! len)
fef5a0d9
RB
1869 return false;
1870
1871 /* Create strlen (dst). */
1872 gimple_seq stmts = NULL, stmts2;
355fe088 1873 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 1874 gimple_set_location (repl, loc);
a15ebbcd 1875 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
1876 gimple_call_set_lhs (repl, newdst);
1877 gimple_seq_add_stmt_without_update (&stmts, repl);
1878
1879 /* Create (dst p+ strlen (dst)). */
1880 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1881 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1882 gimple_seq_add_seq_without_update (&stmts, stmts2);
1883
1884 len = fold_convert_loc (loc, size_type_node, len);
1885 len = size_binop_loc (loc, PLUS_EXPR, len,
1886 build_int_cst (size_type_node, 1));
1887 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1888 gimple_seq_add_seq_without_update (&stmts, stmts2);
1889
1890 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1891 gimple_seq_add_stmt_without_update (&stmts, repl);
1892 if (gimple_call_lhs (stmt))
1893 {
1894 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1895 gimple_seq_add_stmt_without_update (&stmts, repl);
1896 gsi_replace_with_seq_vops (gsi, stmts);
1897 /* gsi now points at the assignment to the lhs, get a
1898 stmt iterator to the memcpy call.
1899 ??? We can't use gsi_for_stmt as that doesn't work when the
1900 CFG isn't built yet. */
1901 gimple_stmt_iterator gsi2 = *gsi;
1902 gsi_prev (&gsi2);
1903 fold_stmt (&gsi2);
1904 }
1905 else
1906 {
1907 gsi_replace_with_seq_vops (gsi, stmts);
1908 fold_stmt (gsi);
1909 }
1910 return true;
1911}
1912
07f1cf56
RB
1913/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1914 are the arguments to the call. */
1915
1916static bool
1917gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1918{
355fe088 1919 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1920 tree dest = gimple_call_arg (stmt, 0);
1921 tree src = gimple_call_arg (stmt, 1);
1922 tree size = gimple_call_arg (stmt, 2);
1923 tree fn;
1924 const char *p;
1925
1926
1927 p = c_getstr (src);
1928 /* If the SRC parameter is "", return DEST. */
1929 if (p && *p == '\0')
1930 {
1931 replace_call_with_value (gsi, dest);
1932 return true;
1933 }
1934
1935 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1936 return false;
1937
1938 /* If __builtin_strcat_chk is used, assume strcat is available. */
1939 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1940 if (!fn)
1941 return false;
1942
355fe088 1943 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1944 replace_call_with_call_and_fold (gsi, repl);
1945 return true;
1946}
1947
ad03a744
RB
1948/* Simplify a call to the strncat builtin. */
1949
1950static bool
1951gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1952{
1953 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1954 tree dst = gimple_call_arg (stmt, 0);
1955 tree src = gimple_call_arg (stmt, 1);
1956 tree len = gimple_call_arg (stmt, 2);
1957
1958 const char *p = c_getstr (src);
1959
1960 /* If the requested length is zero, or the src parameter string
1961 length is zero, return the dst parameter. */
1962 if (integer_zerop (len) || (p && *p == '\0'))
1963 {
1964 replace_call_with_value (gsi, dst);
1965 return true;
1966 }
1967
025d57f0
MS
1968 if (TREE_CODE (len) != INTEGER_CST || !p)
1969 return false;
1970
1971 unsigned srclen = strlen (p);
1972
1973 int cmpsrc = compare_tree_int (len, srclen);
1974
1975 /* Return early if the requested len is less than the string length.
1976 Warnings will be issued elsewhere later. */
1977 if (cmpsrc < 0)
1978 return false;
1979
1980 unsigned HOST_WIDE_INT dstsize;
1981
1982 bool nowarn = gimple_no_warning_p (stmt);
1983
1984 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 1985 {
025d57f0 1986 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 1987
025d57f0
MS
1988 if (cmpdst >= 0)
1989 {
1990 tree fndecl = gimple_call_fndecl (stmt);
1991
1992 /* Strncat copies (at most) LEN bytes and always appends
1993 the terminating NUL so the specified bound should never
1994 be equal to (or greater than) the size of the destination.
1995 If it is, the copy could overflow. */
1996 location_t loc = gimple_location (stmt);
1997 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1998 cmpdst == 0
1999 ? G_("%G%qD specified bound %E equals "
2000 "destination size")
2001 : G_("%G%qD specified bound %E exceeds "
2002 "destination size %wu"),
2003 stmt, fndecl, len, dstsize);
2004 if (nowarn)
2005 gimple_set_no_warning (stmt, true);
2006 }
2007 }
ad03a744 2008
025d57f0
MS
2009 if (!nowarn && cmpsrc == 0)
2010 {
2011 tree fndecl = gimple_call_fndecl (stmt);
2012
2013 /* To avoid certain truncation the specified bound should also
2014 not be equal to (or less than) the length of the source. */
2015 location_t loc = gimple_location (stmt);
2016 if (warning_at (loc, OPT_Wstringop_overflow_,
2017 "%G%qD specified bound %E equals source length",
2018 stmt, fndecl, len))
2019 gimple_set_no_warning (stmt, true);
ad03a744
RB
2020 }
2021
025d57f0
MS
2022 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2023
2024 /* If the replacement _DECL isn't initialized, don't do the
2025 transformation. */
2026 if (!fn)
2027 return false;
2028
2029 /* Otherwise, emit a call to strcat. */
2030 gcall *repl = gimple_build_call (fn, 2, dst, src);
2031 replace_call_with_call_and_fold (gsi, repl);
2032 return true;
ad03a744
RB
2033}
2034
745583f9
RB
2035/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2036 LEN, and SIZE. */
2037
2038static bool
2039gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2040{
355fe088 2041 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2042 tree dest = gimple_call_arg (stmt, 0);
2043 tree src = gimple_call_arg (stmt, 1);
2044 tree len = gimple_call_arg (stmt, 2);
2045 tree size = gimple_call_arg (stmt, 3);
2046 tree fn;
2047 const char *p;
2048
2049 p = c_getstr (src);
2050 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2051 if ((p && *p == '\0')
2052 || integer_zerop (len))
2053 {
2054 replace_call_with_value (gsi, dest);
2055 return true;
2056 }
2057
2058 if (! tree_fits_uhwi_p (size))
2059 return false;
2060
2061 if (! integer_all_onesp (size))
2062 {
2063 tree src_len = c_strlen (src, 1);
2064 if (src_len
2065 && tree_fits_uhwi_p (src_len)
2066 && tree_fits_uhwi_p (len)
2067 && ! tree_int_cst_lt (len, src_len))
2068 {
2069 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2070 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2071 if (!fn)
2072 return false;
2073
355fe088 2074 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2075 replace_call_with_call_and_fold (gsi, repl);
2076 return true;
2077 }
2078 return false;
2079 }
2080
2081 /* If __builtin_strncat_chk is used, assume strncat is available. */
2082 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2083 if (!fn)
2084 return false;
2085
355fe088 2086 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2087 replace_call_with_call_and_fold (gsi, repl);
2088 return true;
2089}
2090
a918bfbf
ML
2091/* Build and append gimple statements to STMTS that would load a first
2092 character of a memory location identified by STR. LOC is location
2093 of the statement. */
2094
2095static tree
2096gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2097{
2098 tree var;
2099
2100 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2101 tree cst_uchar_ptr_node
2102 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2103 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2104
2105 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2106 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2107 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2108
2109 gimple_assign_set_lhs (stmt, var);
2110 gimple_seq_add_stmt_without_update (stmts, stmt);
2111
2112 return var;
2113}
2114
2115/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2116 FCODE is the name of the builtin. */
2117
2118static bool
2119gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2120{
2121 gimple *stmt = gsi_stmt (*gsi);
2122 tree callee = gimple_call_fndecl (stmt);
2123 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2124
2125 tree type = integer_type_node;
2126 tree str1 = gimple_call_arg (stmt, 0);
2127 tree str2 = gimple_call_arg (stmt, 1);
2128 tree lhs = gimple_call_lhs (stmt);
2129 HOST_WIDE_INT length = -1;
2130
2131 /* Handle strncmp and strncasecmp functions. */
2132 if (gimple_call_num_args (stmt) == 3)
2133 {
2134 tree len = gimple_call_arg (stmt, 2);
2135 if (tree_fits_uhwi_p (len))
2136 length = tree_to_uhwi (len);
2137 }
2138
2139 /* If the LEN parameter is zero, return zero. */
2140 if (length == 0)
2141 {
2142 replace_call_with_value (gsi, integer_zero_node);
2143 return true;
2144 }
2145
2146 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2147 if (operand_equal_p (str1, str2, 0))
2148 {
2149 replace_call_with_value (gsi, integer_zero_node);
2150 return true;
2151 }
2152
2153 const char *p1 = c_getstr (str1);
2154 const char *p2 = c_getstr (str2);
2155
2156 /* For known strings, return an immediate value. */
2157 if (p1 && p2)
2158 {
2159 int r = 0;
2160 bool known_result = false;
2161
2162 switch (fcode)
2163 {
2164 case BUILT_IN_STRCMP:
2165 {
2166 r = strcmp (p1, p2);
2167 known_result = true;
2168 break;
2169 }
2170 case BUILT_IN_STRNCMP:
2171 {
2172 if (length == -1)
2173 break;
2174 r = strncmp (p1, p2, length);
2175 known_result = true;
2176 break;
2177 }
2178 /* Only handleable situation is where the string are equal (result 0),
2179 which is already handled by operand_equal_p case. */
2180 case BUILT_IN_STRCASECMP:
2181 break;
2182 case BUILT_IN_STRNCASECMP:
2183 {
2184 if (length == -1)
2185 break;
2186 r = strncmp (p1, p2, length);
2187 if (r == 0)
2188 known_result = true;
5de73c05 2189 break;
a918bfbf
ML
2190 }
2191 default:
2192 gcc_unreachable ();
2193 }
2194
2195 if (known_result)
2196 {
2197 replace_call_with_value (gsi, build_cmp_result (type, r));
2198 return true;
2199 }
2200 }
2201
2202 bool nonzero_length = length >= 1
2203 || fcode == BUILT_IN_STRCMP
2204 || fcode == BUILT_IN_STRCASECMP;
2205
2206 location_t loc = gimple_location (stmt);
2207
2208 /* If the second arg is "", return *(const unsigned char*)arg1. */
2209 if (p2 && *p2 == '\0' && nonzero_length)
2210 {
2211 gimple_seq stmts = NULL;
2212 tree var = gimple_load_first_char (loc, str1, &stmts);
2213 if (lhs)
2214 {
2215 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2216 gimple_seq_add_stmt_without_update (&stmts, stmt);
2217 }
2218
2219 gsi_replace_with_seq_vops (gsi, stmts);
2220 return true;
2221 }
2222
2223 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2224 if (p1 && *p1 == '\0' && nonzero_length)
2225 {
2226 gimple_seq stmts = NULL;
2227 tree var = gimple_load_first_char (loc, str2, &stmts);
2228
2229 if (lhs)
2230 {
2231 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2232 stmt = gimple_build_assign (c, NOP_EXPR, var);
2233 gimple_seq_add_stmt_without_update (&stmts, stmt);
2234
2235 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2236 gimple_seq_add_stmt_without_update (&stmts, stmt);
2237 }
2238
2239 gsi_replace_with_seq_vops (gsi, stmts);
2240 return true;
2241 }
2242
2243 /* If len parameter is one, return an expression corresponding to
2244 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2245 if (fcode == BUILT_IN_STRNCMP && length == 1)
2246 {
2247 gimple_seq stmts = NULL;
2248 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2249 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2250
2251 if (lhs)
2252 {
2253 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2254 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2255 gimple_seq_add_stmt_without_update (&stmts, convert1);
2256
2257 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2258 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2259 gimple_seq_add_stmt_without_update (&stmts, convert2);
2260
2261 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2262 gimple_seq_add_stmt_without_update (&stmts, stmt);
2263 }
2264
2265 gsi_replace_with_seq_vops (gsi, stmts);
2266 return true;
2267 }
2268
caed5c92
QZ
2269 /* If length is larger than the length of one constant string,
2270 replace strncmp with corresponding strcmp */
2271 if (fcode == BUILT_IN_STRNCMP
2272 && length > 0
2273 && ((p2 && (size_t) length > strlen (p2))
2274 || (p1 && (size_t) length > strlen (p1))))
2275 {
2276 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2277 if (!fn)
2278 return false;
2279 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2280 replace_call_with_call_and_fold (gsi, repl);
2281 return true;
2282 }
2283
a918bfbf
ML
2284 return false;
2285}
2286
488c6247
ML
2287/* Fold a call to the memchr pointed by GSI iterator. */
2288
2289static bool
2290gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2291{
2292 gimple *stmt = gsi_stmt (*gsi);
2293 tree lhs = gimple_call_lhs (stmt);
2294 tree arg1 = gimple_call_arg (stmt, 0);
2295 tree arg2 = gimple_call_arg (stmt, 1);
2296 tree len = gimple_call_arg (stmt, 2);
2297
2298 /* If the LEN parameter is zero, return zero. */
2299 if (integer_zerop (len))
2300 {
2301 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2302 return true;
2303 }
2304
2305 char c;
2306 if (TREE_CODE (arg2) != INTEGER_CST
2307 || !tree_fits_uhwi_p (len)
2308 || !target_char_cst_p (arg2, &c))
2309 return false;
2310
2311 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2312 unsigned HOST_WIDE_INT string_length;
2313 const char *p1 = c_getstr (arg1, &string_length);
2314
2315 if (p1)
2316 {
2317 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2318 if (r == NULL)
2319 {
2320 if (length <= string_length)
2321 {
2322 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2323 return true;
2324 }
2325 }
2326 else
2327 {
2328 unsigned HOST_WIDE_INT offset = r - p1;
2329 gimple_seq stmts = NULL;
2330 if (lhs != NULL_TREE)
2331 {
2332 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2333 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2334 arg1, offset_cst);
2335 gimple_seq_add_stmt_without_update (&stmts, stmt);
2336 }
2337 else
2338 gimple_seq_add_stmt_without_update (&stmts,
2339 gimple_build_nop ());
2340
2341 gsi_replace_with_seq_vops (gsi, stmts);
2342 return true;
2343 }
2344 }
2345
2346 return false;
2347}
a918bfbf 2348
fef5a0d9
RB
2349/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2350 to the call. IGNORE is true if the value returned
2351 by the builtin will be ignored. UNLOCKED is true is true if this
2352 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2353 the known length of the string. Return NULL_TREE if no simplification
2354 was possible. */
2355
2356static bool
2357gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2358 tree arg0, tree arg1,
dcb7fae2 2359 bool unlocked)
fef5a0d9 2360{
355fe088 2361 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2362
fef5a0d9
RB
2363 /* If we're using an unlocked function, assume the other unlocked
2364 functions exist explicitly. */
2365 tree const fn_fputc = (unlocked
2366 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2367 : builtin_decl_implicit (BUILT_IN_FPUTC));
2368 tree const fn_fwrite = (unlocked
2369 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2370 : builtin_decl_implicit (BUILT_IN_FWRITE));
2371
2372 /* If the return value is used, don't do the transformation. */
dcb7fae2 2373 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2374 return false;
2375
fef5a0d9
RB
2376 /* Get the length of the string passed to fputs. If the length
2377 can't be determined, punt. */
dcb7fae2 2378 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
2379 if (!len
2380 || TREE_CODE (len) != INTEGER_CST)
2381 return false;
2382
2383 switch (compare_tree_int (len, 1))
2384 {
2385 case -1: /* length is 0, delete the call entirely . */
2386 replace_call_with_value (gsi, integer_zero_node);
2387 return true;
2388
2389 case 0: /* length is 1, call fputc. */
2390 {
2391 const char *p = c_getstr (arg0);
2392 if (p != NULL)
2393 {
2394 if (!fn_fputc)
2395 return false;
2396
355fe088 2397 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2398 build_int_cst
2399 (integer_type_node, p[0]), arg1);
2400 replace_call_with_call_and_fold (gsi, repl);
2401 return true;
2402 }
2403 }
2404 /* FALLTHROUGH */
2405 case 1: /* length is greater than 1, call fwrite. */
2406 {
2407 /* If optimizing for size keep fputs. */
2408 if (optimize_function_for_size_p (cfun))
2409 return false;
2410 /* New argument list transforming fputs(string, stream) to
2411 fwrite(string, 1, len, stream). */
2412 if (!fn_fwrite)
2413 return false;
2414
355fe088 2415 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2416 size_one_node, len, arg1);
2417 replace_call_with_call_and_fold (gsi, repl);
2418 return true;
2419 }
2420 default:
2421 gcc_unreachable ();
2422 }
2423 return false;
2424}
2425
2426/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2427 DEST, SRC, LEN, and SIZE are the arguments to the call.
2428 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2429 code of the builtin. If MAXLEN is not NULL, it is maximum length
2430 passed as third argument. */
2431
2432static bool
2433gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2434 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2435 enum built_in_function fcode)
2436{
355fe088 2437 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2438 location_t loc = gimple_location (stmt);
2439 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2440 tree fn;
2441
2442 /* If SRC and DEST are the same (and not volatile), return DEST
2443 (resp. DEST+LEN for __mempcpy_chk). */
2444 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2445 {
cc8bea0a
MS
2446 if (fcode != BUILT_IN_MEMMOVE && fcode != BUILT_IN_MEMMOVE_CHK)
2447 {
2448 tree func = gimple_call_fndecl (stmt);
2449
2450 warning_at (loc, OPT_Wrestrict,
2451 "%qD source argument is the same as destination",
2452 func);
2453 }
2454
fef5a0d9
RB
2455 if (fcode != BUILT_IN_MEMPCPY_CHK)
2456 {
2457 replace_call_with_value (gsi, dest);
2458 return true;
2459 }
2460 else
2461 {
74e3c262
RB
2462 gimple_seq stmts = NULL;
2463 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2464 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2465 TREE_TYPE (dest), dest, len);
74e3c262 2466 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2467 replace_call_with_value (gsi, temp);
2468 return true;
2469 }
2470 }
2471
2472 if (! tree_fits_uhwi_p (size))
2473 return false;
2474
dcb7fae2 2475 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
2476 if (! integer_all_onesp (size))
2477 {
2478 if (! tree_fits_uhwi_p (len))
2479 {
2480 /* If LEN is not constant, try MAXLEN too.
2481 For MAXLEN only allow optimizing into non-_ocs function
2482 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2483 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2484 {
2485 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2486 {
2487 /* (void) __mempcpy_chk () can be optimized into
2488 (void) __memcpy_chk (). */
2489 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2490 if (!fn)
2491 return false;
2492
355fe088 2493 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2494 replace_call_with_call_and_fold (gsi, repl);
2495 return true;
2496 }
2497 return false;
2498 }
2499 }
2500 else
2501 maxlen = len;
2502
2503 if (tree_int_cst_lt (size, maxlen))
2504 return false;
2505 }
2506
2507 fn = NULL_TREE;
2508 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2509 mem{cpy,pcpy,move,set} is available. */
2510 switch (fcode)
2511 {
2512 case BUILT_IN_MEMCPY_CHK:
2513 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2514 break;
2515 case BUILT_IN_MEMPCPY_CHK:
2516 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2517 break;
2518 case BUILT_IN_MEMMOVE_CHK:
2519 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2520 break;
2521 case BUILT_IN_MEMSET_CHK:
2522 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2523 break;
2524 default:
2525 break;
2526 }
2527
2528 if (!fn)
2529 return false;
2530
355fe088 2531 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2532 replace_call_with_call_and_fold (gsi, repl);
2533 return true;
2534}
2535
2536/* Fold a call to the __st[rp]cpy_chk builtin.
2537 DEST, SRC, and SIZE are the arguments to the call.
2538 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2539 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2540 strings passed as second argument. */
2541
2542static bool
2543gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2544 tree dest,
fef5a0d9 2545 tree src, tree size,
fef5a0d9
RB
2546 enum built_in_function fcode)
2547{
355fe088 2548 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2549 location_t loc = gimple_location (stmt);
2550 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2551 tree len, fn;
2552
2553 /* If SRC and DEST are the same (and not volatile), return DEST. */
2554 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2555 {
cc8bea0a
MS
2556 tree func = gimple_call_fndecl (stmt);
2557
2558 warning_at (loc, OPT_Wrestrict,
2559 "%qD source argument is the same as destination",
2560 func);
2561
fef5a0d9
RB
2562 replace_call_with_value (gsi, dest);
2563 return true;
2564 }
2565
2566 if (! tree_fits_uhwi_p (size))
2567 return false;
2568
dcb7fae2 2569 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
2570 if (! integer_all_onesp (size))
2571 {
2572 len = c_strlen (src, 1);
2573 if (! len || ! tree_fits_uhwi_p (len))
2574 {
2575 /* If LEN is not constant, try MAXLEN too.
2576 For MAXLEN only allow optimizing into non-_ocs function
2577 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2578 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2579 {
2580 if (fcode == BUILT_IN_STPCPY_CHK)
2581 {
2582 if (! ignore)
2583 return false;
2584
2585 /* If return value of __stpcpy_chk is ignored,
2586 optimize into __strcpy_chk. */
2587 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2588 if (!fn)
2589 return false;
2590
355fe088 2591 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2592 replace_call_with_call_and_fold (gsi, repl);
2593 return true;
2594 }
2595
2596 if (! len || TREE_SIDE_EFFECTS (len))
2597 return false;
2598
2599 /* If c_strlen returned something, but not a constant,
2600 transform __strcpy_chk into __memcpy_chk. */
2601 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2602 if (!fn)
2603 return false;
2604
74e3c262
RB
2605 gimple_seq stmts = NULL;
2606 len = gimple_convert (&stmts, loc, size_type_node, len);
2607 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2608 build_int_cst (size_type_node, 1));
2609 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2610 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2611 replace_call_with_call_and_fold (gsi, repl);
2612 return true;
2613 }
e256dfce 2614 }
fef5a0d9
RB
2615 else
2616 maxlen = len;
2617
2618 if (! tree_int_cst_lt (maxlen, size))
2619 return false;
e256dfce
RG
2620 }
2621
fef5a0d9
RB
2622 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2623 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2624 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2625 if (!fn)
2626 return false;
2627
355fe088 2628 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2629 replace_call_with_call_and_fold (gsi, repl);
2630 return true;
2631}
2632
2633/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2634 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2635 length passed as third argument. IGNORE is true if return value can be
2636 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2637
2638static bool
2639gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2640 tree dest, tree src,
dcb7fae2 2641 tree len, tree size,
fef5a0d9
RB
2642 enum built_in_function fcode)
2643{
355fe088 2644 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2645 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2646 tree fn;
2647
2648 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2649 {
fef5a0d9
RB
2650 /* If return value of __stpncpy_chk is ignored,
2651 optimize into __strncpy_chk. */
2652 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2653 if (fn)
2654 {
355fe088 2655 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2656 replace_call_with_call_and_fold (gsi, repl);
2657 return true;
2658 }
cbdd87d4
RG
2659 }
2660
fef5a0d9
RB
2661 if (! tree_fits_uhwi_p (size))
2662 return false;
2663
dcb7fae2 2664 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2665 if (! integer_all_onesp (size))
cbdd87d4 2666 {
fef5a0d9 2667 if (! tree_fits_uhwi_p (len))
fe2ef088 2668 {
fef5a0d9
RB
2669 /* If LEN is not constant, try MAXLEN too.
2670 For MAXLEN only allow optimizing into non-_ocs function
2671 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2672 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2673 return false;
8a1561bc 2674 }
fef5a0d9
RB
2675 else
2676 maxlen = len;
2677
2678 if (tree_int_cst_lt (size, maxlen))
2679 return false;
cbdd87d4
RG
2680 }
2681
fef5a0d9
RB
2682 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2683 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2684 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2685 if (!fn)
2686 return false;
2687
355fe088 2688 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2689 replace_call_with_call_and_fold (gsi, repl);
2690 return true;
cbdd87d4
RG
2691}
2692
2625bb5d
RB
2693/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2694 Return NULL_TREE if no simplification can be made. */
2695
2696static bool
2697gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2698{
2699 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2700 location_t loc = gimple_location (stmt);
2701 tree dest = gimple_call_arg (stmt, 0);
2702 tree src = gimple_call_arg (stmt, 1);
2703 tree fn, len, lenp1;
2704
2705 /* If the result is unused, replace stpcpy with strcpy. */
2706 if (gimple_call_lhs (stmt) == NULL_TREE)
2707 {
2708 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2709 if (!fn)
2710 return false;
2711 gimple_call_set_fndecl (stmt, fn);
2712 fold_stmt (gsi);
2713 return true;
2714 }
2715
2716 len = c_strlen (src, 1);
2717 if (!len
2718 || TREE_CODE (len) != INTEGER_CST)
2719 return false;
2720
2721 if (optimize_function_for_size_p (cfun)
2722 /* If length is zero it's small enough. */
2723 && !integer_zerop (len))
2724 return false;
2725
2726 /* If the source has a known length replace stpcpy with memcpy. */
2727 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2728 if (!fn)
2729 return false;
2730
2731 gimple_seq stmts = NULL;
2732 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2733 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2734 tem, build_int_cst (size_type_node, 1));
2735 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2736 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2737 gimple_set_vuse (repl, gimple_vuse (stmt));
2738 gimple_set_vdef (repl, gimple_vdef (stmt));
2739 if (gimple_vdef (repl)
2740 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2741 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2742 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2743 /* Replace the result with dest + len. */
2744 stmts = NULL;
2745 tem = gimple_convert (&stmts, loc, sizetype, len);
2746 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2747 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2748 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2749 gsi_replace (gsi, ret, false);
2625bb5d
RB
2750 /* Finally fold the memcpy call. */
2751 gimple_stmt_iterator gsi2 = *gsi;
2752 gsi_prev (&gsi2);
2753 fold_stmt (&gsi2);
2754 return true;
2755}
2756
fef5a0d9
RB
2757/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2758 NULL_TREE if a normal call should be emitted rather than expanding
2759 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2760 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2761 passed as second argument. */
cbdd87d4
RG
2762
2763static bool
fef5a0d9 2764gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2765 enum built_in_function fcode)
cbdd87d4 2766{
538dd0b7 2767 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2768 tree dest, size, len, fn, fmt, flag;
2769 const char *fmt_str;
cbdd87d4 2770
fef5a0d9
RB
2771 /* Verify the required arguments in the original call. */
2772 if (gimple_call_num_args (stmt) < 5)
2773 return false;
cbdd87d4 2774
fef5a0d9
RB
2775 dest = gimple_call_arg (stmt, 0);
2776 len = gimple_call_arg (stmt, 1);
2777 flag = gimple_call_arg (stmt, 2);
2778 size = gimple_call_arg (stmt, 3);
2779 fmt = gimple_call_arg (stmt, 4);
2780
2781 if (! tree_fits_uhwi_p (size))
2782 return false;
2783
2784 if (! integer_all_onesp (size))
2785 {
dcb7fae2 2786 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2787 if (! tree_fits_uhwi_p (len))
cbdd87d4 2788 {
fef5a0d9
RB
2789 /* If LEN is not constant, try MAXLEN too.
2790 For MAXLEN only allow optimizing into non-_ocs function
2791 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2792 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2793 return false;
2794 }
2795 else
fef5a0d9 2796 maxlen = len;
cbdd87d4 2797
fef5a0d9
RB
2798 if (tree_int_cst_lt (size, maxlen))
2799 return false;
2800 }
cbdd87d4 2801
fef5a0d9
RB
2802 if (!init_target_chars ())
2803 return false;
cbdd87d4 2804
fef5a0d9
RB
2805 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2806 or if format doesn't contain % chars or is "%s". */
2807 if (! integer_zerop (flag))
2808 {
2809 fmt_str = c_getstr (fmt);
2810 if (fmt_str == NULL)
2811 return false;
2812 if (strchr (fmt_str, target_percent) != NULL
2813 && strcmp (fmt_str, target_percent_s))
2814 return false;
cbdd87d4
RG
2815 }
2816
fef5a0d9
RB
2817 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2818 available. */
2819 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2820 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2821 if (!fn)
491e0b9b
RG
2822 return false;
2823
fef5a0d9
RB
2824 /* Replace the called function and the first 5 argument by 3 retaining
2825 trailing varargs. */
2826 gimple_call_set_fndecl (stmt, fn);
2827 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2828 gimple_call_set_arg (stmt, 0, dest);
2829 gimple_call_set_arg (stmt, 1, len);
2830 gimple_call_set_arg (stmt, 2, fmt);
2831 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2832 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2833 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2834 fold_stmt (gsi);
2835 return true;
2836}
cbdd87d4 2837
fef5a0d9
RB
2838/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2839 Return NULL_TREE if a normal call should be emitted rather than
2840 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2841 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2842
fef5a0d9
RB
2843static bool
2844gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2845 enum built_in_function fcode)
2846{
538dd0b7 2847 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2848 tree dest, size, len, fn, fmt, flag;
2849 const char *fmt_str;
2850 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2851
fef5a0d9
RB
2852 /* Verify the required arguments in the original call. */
2853 if (nargs < 4)
2854 return false;
2855 dest = gimple_call_arg (stmt, 0);
2856 flag = gimple_call_arg (stmt, 1);
2857 size = gimple_call_arg (stmt, 2);
2858 fmt = gimple_call_arg (stmt, 3);
2859
2860 if (! tree_fits_uhwi_p (size))
2861 return false;
2862
2863 len = NULL_TREE;
2864
2865 if (!init_target_chars ())
2866 return false;
2867
2868 /* Check whether the format is a literal string constant. */
2869 fmt_str = c_getstr (fmt);
2870 if (fmt_str != NULL)
2871 {
2872 /* If the format doesn't contain % args or %%, we know the size. */
2873 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2874 {
fef5a0d9
RB
2875 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2876 len = build_int_cstu (size_type_node, strlen (fmt_str));
2877 }
2878 /* If the format is "%s" and first ... argument is a string literal,
2879 we know the size too. */
2880 else if (fcode == BUILT_IN_SPRINTF_CHK
2881 && strcmp (fmt_str, target_percent_s) == 0)
2882 {
2883 tree arg;
cbdd87d4 2884
fef5a0d9
RB
2885 if (nargs == 5)
2886 {
2887 arg = gimple_call_arg (stmt, 4);
2888 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2889 {
2890 len = c_strlen (arg, 1);
2891 if (! len || ! tree_fits_uhwi_p (len))
2892 len = NULL_TREE;
2893 }
2894 }
2895 }
2896 }
cbdd87d4 2897
fef5a0d9
RB
2898 if (! integer_all_onesp (size))
2899 {
2900 if (! len || ! tree_int_cst_lt (len, size))
2901 return false;
2902 }
cbdd87d4 2903
fef5a0d9
RB
2904 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2905 or if format doesn't contain % chars or is "%s". */
2906 if (! integer_zerop (flag))
2907 {
2908 if (fmt_str == NULL)
2909 return false;
2910 if (strchr (fmt_str, target_percent) != NULL
2911 && strcmp (fmt_str, target_percent_s))
2912 return false;
2913 }
cbdd87d4 2914
fef5a0d9
RB
2915 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2916 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2917 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2918 if (!fn)
2919 return false;
2920
2921 /* Replace the called function and the first 4 argument by 2 retaining
2922 trailing varargs. */
2923 gimple_call_set_fndecl (stmt, fn);
2924 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2925 gimple_call_set_arg (stmt, 0, dest);
2926 gimple_call_set_arg (stmt, 1, fmt);
2927 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2928 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2929 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2930 fold_stmt (gsi);
2931 return true;
2932}
2933
35770bb2
RB
2934/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2935 ORIG may be null if this is a 2-argument call. We don't attempt to
2936 simplify calls with more than 3 arguments.
2937
a104bd88 2938 Return true if simplification was possible, otherwise false. */
35770bb2 2939
a104bd88 2940bool
dcb7fae2 2941gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2942{
355fe088 2943 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2944 tree dest = gimple_call_arg (stmt, 0);
2945 tree fmt = gimple_call_arg (stmt, 1);
2946 tree orig = NULL_TREE;
2947 const char *fmt_str = NULL;
2948
2949 /* Verify the required arguments in the original call. We deal with two
2950 types of sprintf() calls: 'sprintf (str, fmt)' and
2951 'sprintf (dest, "%s", orig)'. */
2952 if (gimple_call_num_args (stmt) > 3)
2953 return false;
2954
2955 if (gimple_call_num_args (stmt) == 3)
2956 orig = gimple_call_arg (stmt, 2);
2957
2958 /* Check whether the format is a literal string constant. */
2959 fmt_str = c_getstr (fmt);
2960 if (fmt_str == NULL)
2961 return false;
2962
2963 if (!init_target_chars ())
2964 return false;
2965
2966 /* If the format doesn't contain % args or %%, use strcpy. */
2967 if (strchr (fmt_str, target_percent) == NULL)
2968 {
2969 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2970
2971 if (!fn)
2972 return false;
2973
2974 /* Don't optimize sprintf (buf, "abc", ptr++). */
2975 if (orig)
2976 return false;
2977
2978 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2979 'format' is known to contain no % formats. */
2980 gimple_seq stmts = NULL;
355fe088 2981 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2982 gimple_seq_add_stmt_without_update (&stmts, repl);
2983 if (gimple_call_lhs (stmt))
2984 {
2985 repl = gimple_build_assign (gimple_call_lhs (stmt),
2986 build_int_cst (integer_type_node,
2987 strlen (fmt_str)));
2988 gimple_seq_add_stmt_without_update (&stmts, repl);
2989 gsi_replace_with_seq_vops (gsi, stmts);
2990 /* gsi now points at the assignment to the lhs, get a
2991 stmt iterator to the memcpy call.
2992 ??? We can't use gsi_for_stmt as that doesn't work when the
2993 CFG isn't built yet. */
2994 gimple_stmt_iterator gsi2 = *gsi;
2995 gsi_prev (&gsi2);
2996 fold_stmt (&gsi2);
2997 }
2998 else
2999 {
3000 gsi_replace_with_seq_vops (gsi, stmts);
3001 fold_stmt (gsi);
3002 }
3003 return true;
3004 }
3005
3006 /* If the format is "%s", use strcpy if the result isn't used. */
3007 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3008 {
3009 tree fn;
3010 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3011
3012 if (!fn)
3013 return false;
3014
3015 /* Don't crash on sprintf (str1, "%s"). */
3016 if (!orig)
3017 return false;
3018
dcb7fae2
RB
3019 tree orig_len = NULL_TREE;
3020 if (gimple_call_lhs (stmt))
35770bb2 3021 {
dcb7fae2 3022 orig_len = get_maxval_strlen (orig, 0);
d7e78447 3023 if (!orig_len)
35770bb2
RB
3024 return false;
3025 }
3026
3027 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3028 gimple_seq stmts = NULL;
355fe088 3029 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
3030 gimple_seq_add_stmt_without_update (&stmts, repl);
3031 if (gimple_call_lhs (stmt))
3032 {
d7e78447
RB
3033 if (!useless_type_conversion_p (integer_type_node,
3034 TREE_TYPE (orig_len)))
3035 orig_len = fold_convert (integer_type_node, orig_len);
3036 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
3037 gimple_seq_add_stmt_without_update (&stmts, repl);
3038 gsi_replace_with_seq_vops (gsi, stmts);
3039 /* gsi now points at the assignment to the lhs, get a
3040 stmt iterator to the memcpy call.
3041 ??? We can't use gsi_for_stmt as that doesn't work when the
3042 CFG isn't built yet. */
3043 gimple_stmt_iterator gsi2 = *gsi;
3044 gsi_prev (&gsi2);
3045 fold_stmt (&gsi2);
3046 }
3047 else
3048 {
3049 gsi_replace_with_seq_vops (gsi, stmts);
3050 fold_stmt (gsi);
3051 }
3052 return true;
3053 }
3054 return false;
3055}
3056
d7e78447
RB
3057/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3058 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3059 attempt to simplify calls with more than 4 arguments.
35770bb2 3060
a104bd88 3061 Return true if simplification was possible, otherwise false. */
d7e78447 3062
a104bd88 3063bool
dcb7fae2 3064gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3065{
538dd0b7 3066 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3067 tree dest = gimple_call_arg (stmt, 0);
3068 tree destsize = gimple_call_arg (stmt, 1);
3069 tree fmt = gimple_call_arg (stmt, 2);
3070 tree orig = NULL_TREE;
3071 const char *fmt_str = NULL;
3072
3073 if (gimple_call_num_args (stmt) > 4)
3074 return false;
3075
3076 if (gimple_call_num_args (stmt) == 4)
3077 orig = gimple_call_arg (stmt, 3);
3078
3079 if (!tree_fits_uhwi_p (destsize))
3080 return false;
3081 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3082
3083 /* Check whether the format is a literal string constant. */
3084 fmt_str = c_getstr (fmt);
3085 if (fmt_str == NULL)
3086 return false;
3087
3088 if (!init_target_chars ())
3089 return false;
3090
3091 /* If the format doesn't contain % args or %%, use strcpy. */
3092 if (strchr (fmt_str, target_percent) == NULL)
3093 {
3094 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3095 if (!fn)
3096 return false;
3097
3098 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3099 if (orig)
3100 return false;
3101
3102 /* We could expand this as
3103 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3104 or to
3105 memcpy (str, fmt_with_nul_at_cstm1, cst);
3106 but in the former case that might increase code size
3107 and in the latter case grow .rodata section too much.
3108 So punt for now. */
3109 size_t len = strlen (fmt_str);
3110 if (len >= destlen)
3111 return false;
3112
3113 gimple_seq stmts = NULL;
355fe088 3114 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3115 gimple_seq_add_stmt_without_update (&stmts, repl);
3116 if (gimple_call_lhs (stmt))
3117 {
3118 repl = gimple_build_assign (gimple_call_lhs (stmt),
3119 build_int_cst (integer_type_node, len));
3120 gimple_seq_add_stmt_without_update (&stmts, repl);
3121 gsi_replace_with_seq_vops (gsi, stmts);
3122 /* gsi now points at the assignment to the lhs, get a
3123 stmt iterator to the memcpy call.
3124 ??? We can't use gsi_for_stmt as that doesn't work when the
3125 CFG isn't built yet. */
3126 gimple_stmt_iterator gsi2 = *gsi;
3127 gsi_prev (&gsi2);
3128 fold_stmt (&gsi2);
3129 }
3130 else
3131 {
3132 gsi_replace_with_seq_vops (gsi, stmts);
3133 fold_stmt (gsi);
3134 }
3135 return true;
3136 }
3137
3138 /* If the format is "%s", use strcpy if the result isn't used. */
3139 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3140 {
3141 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3142 if (!fn)
3143 return false;
3144
3145 /* Don't crash on snprintf (str1, cst, "%s"). */
3146 if (!orig)
3147 return false;
3148
dcb7fae2 3149 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 3150 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3151 return false;
d7e78447
RB
3152
3153 /* We could expand this as
3154 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3155 or to
3156 memcpy (str1, str2_with_nul_at_cstm1, cst);
3157 but in the former case that might increase code size
3158 and in the latter case grow .rodata section too much.
3159 So punt for now. */
3160 if (compare_tree_int (orig_len, destlen) >= 0)
3161 return false;
3162
3163 /* Convert snprintf (str1, cst, "%s", str2) into
3164 strcpy (str1, str2) if strlen (str2) < cst. */
3165 gimple_seq stmts = NULL;
355fe088 3166 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3167 gimple_seq_add_stmt_without_update (&stmts, repl);
3168 if (gimple_call_lhs (stmt))
3169 {
3170 if (!useless_type_conversion_p (integer_type_node,
3171 TREE_TYPE (orig_len)))
3172 orig_len = fold_convert (integer_type_node, orig_len);
3173 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3174 gimple_seq_add_stmt_without_update (&stmts, repl);
3175 gsi_replace_with_seq_vops (gsi, stmts);
3176 /* gsi now points at the assignment to the lhs, get a
3177 stmt iterator to the memcpy call.
3178 ??? We can't use gsi_for_stmt as that doesn't work when the
3179 CFG isn't built yet. */
3180 gimple_stmt_iterator gsi2 = *gsi;
3181 gsi_prev (&gsi2);
3182 fold_stmt (&gsi2);
3183 }
3184 else
3185 {
3186 gsi_replace_with_seq_vops (gsi, stmts);
3187 fold_stmt (gsi);
3188 }
3189 return true;
3190 }
3191 return false;
3192}
35770bb2 3193
edd7ae68
RB
3194/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3195 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3196 more than 3 arguments, and ARG may be null in the 2-argument case.
3197
3198 Return NULL_TREE if no simplification was possible, otherwise return the
3199 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3200 code of the function to be simplified. */
3201
3202static bool
3203gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3204 tree fp, tree fmt, tree arg,
3205 enum built_in_function fcode)
3206{
3207 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3208 tree fn_fputc, fn_fputs;
3209 const char *fmt_str = NULL;
3210
3211 /* If the return value is used, don't do the transformation. */
3212 if (gimple_call_lhs (stmt) != NULL_TREE)
3213 return false;
3214
3215 /* Check whether the format is a literal string constant. */
3216 fmt_str = c_getstr (fmt);
3217 if (fmt_str == NULL)
3218 return false;
3219
3220 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3221 {
3222 /* If we're using an unlocked function, assume the other
3223 unlocked functions exist explicitly. */
3224 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3225 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3226 }
3227 else
3228 {
3229 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3230 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3231 }
3232
3233 if (!init_target_chars ())
3234 return false;
3235
3236 /* If the format doesn't contain % args or %%, use strcpy. */
3237 if (strchr (fmt_str, target_percent) == NULL)
3238 {
3239 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3240 && arg)
3241 return false;
3242
3243 /* If the format specifier was "", fprintf does nothing. */
3244 if (fmt_str[0] == '\0')
3245 {
3246 replace_call_with_value (gsi, NULL_TREE);
3247 return true;
3248 }
3249
3250 /* When "string" doesn't contain %, replace all cases of
3251 fprintf (fp, string) with fputs (string, fp). The fputs
3252 builtin will take care of special cases like length == 1. */
3253 if (fn_fputs)
3254 {
3255 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3256 replace_call_with_call_and_fold (gsi, repl);
3257 return true;
3258 }
3259 }
3260
3261 /* The other optimizations can be done only on the non-va_list variants. */
3262 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3263 return false;
3264
3265 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3266 else if (strcmp (fmt_str, target_percent_s) == 0)
3267 {
3268 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3269 return false;
3270 if (fn_fputs)
3271 {
3272 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3273 replace_call_with_call_and_fold (gsi, repl);
3274 return true;
3275 }
3276 }
3277
3278 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3279 else if (strcmp (fmt_str, target_percent_c) == 0)
3280 {
3281 if (!arg
3282 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3283 return false;
3284 if (fn_fputc)
3285 {
3286 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3287 replace_call_with_call_and_fold (gsi, repl);
3288 return true;
3289 }
3290 }
3291
3292 return false;
3293}
3294
ad03a744
RB
3295/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3296 FMT and ARG are the arguments to the call; we don't fold cases with
3297 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3298
3299 Return NULL_TREE if no simplification was possible, otherwise return the
3300 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3301 code of the function to be simplified. */
3302
3303static bool
3304gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3305 tree arg, enum built_in_function fcode)
3306{
3307 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3308 tree fn_putchar, fn_puts, newarg;
3309 const char *fmt_str = NULL;
3310
3311 /* If the return value is used, don't do the transformation. */
3312 if (gimple_call_lhs (stmt) != NULL_TREE)
3313 return false;
3314
3315 /* Check whether the format is a literal string constant. */
3316 fmt_str = c_getstr (fmt);
3317 if (fmt_str == NULL)
3318 return false;
3319
3320 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3321 {
3322 /* If we're using an unlocked function, assume the other
3323 unlocked functions exist explicitly. */
3324 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3325 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3326 }
3327 else
3328 {
3329 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3330 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3331 }
3332
3333 if (!init_target_chars ())
3334 return false;
3335
3336 if (strcmp (fmt_str, target_percent_s) == 0
3337 || strchr (fmt_str, target_percent) == NULL)
3338 {
3339 const char *str;
3340
3341 if (strcmp (fmt_str, target_percent_s) == 0)
3342 {
3343 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3344 return false;
3345
3346 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3347 return false;
3348
3349 str = c_getstr (arg);
3350 if (str == NULL)
3351 return false;
3352 }
3353 else
3354 {
3355 /* The format specifier doesn't contain any '%' characters. */
3356 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3357 && arg)
3358 return false;
3359 str = fmt_str;
3360 }
3361
3362 /* If the string was "", printf does nothing. */
3363 if (str[0] == '\0')
3364 {
3365 replace_call_with_value (gsi, NULL_TREE);
3366 return true;
3367 }
3368
3369 /* If the string has length of 1, call putchar. */
3370 if (str[1] == '\0')
3371 {
3372 /* Given printf("c"), (where c is any one character,)
3373 convert "c"[0] to an int and pass that to the replacement
3374 function. */
3375 newarg = build_int_cst (integer_type_node, str[0]);
3376 if (fn_putchar)
3377 {
3378 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3379 replace_call_with_call_and_fold (gsi, repl);
3380 return true;
3381 }
3382 }
3383 else
3384 {
3385 /* If the string was "string\n", call puts("string"). */
3386 size_t len = strlen (str);
3387 if ((unsigned char)str[len - 1] == target_newline
3388 && (size_t) (int) len == len
3389 && (int) len > 0)
3390 {
3391 char *newstr;
3392 tree offset_node, string_cst;
3393
3394 /* Create a NUL-terminated string that's one char shorter
3395 than the original, stripping off the trailing '\n'. */
3396 newarg = build_string_literal (len, str);
3397 string_cst = string_constant (newarg, &offset_node);
3398 gcc_checking_assert (string_cst
3399 && (TREE_STRING_LENGTH (string_cst)
3400 == (int) len)
3401 && integer_zerop (offset_node)
3402 && (unsigned char)
3403 TREE_STRING_POINTER (string_cst)[len - 1]
3404 == target_newline);
3405 /* build_string_literal creates a new STRING_CST,
3406 modify it in place to avoid double copying. */
3407 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3408 newstr[len - 1] = '\0';
3409 if (fn_puts)
3410 {
3411 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3412 replace_call_with_call_and_fold (gsi, repl);
3413 return true;
3414 }
3415 }
3416 else
3417 /* We'd like to arrange to call fputs(string,stdout) here,
3418 but we need stdout and don't have a way to get it yet. */
3419 return false;
3420 }
3421 }
3422
3423 /* The other optimizations can be done only on the non-va_list variants. */
3424 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3425 return false;
3426
3427 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3428 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3429 {
3430 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3431 return false;
3432 if (fn_puts)
3433 {
3434 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3435 replace_call_with_call_and_fold (gsi, repl);
3436 return true;
3437 }
3438 }
3439
3440 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3441 else if (strcmp (fmt_str, target_percent_c) == 0)
3442 {
3443 if (!arg || ! useless_type_conversion_p (integer_type_node,
3444 TREE_TYPE (arg)))
3445 return false;
3446 if (fn_putchar)
3447 {
3448 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3449 replace_call_with_call_and_fold (gsi, repl);
3450 return true;
3451 }
3452 }
3453
3454 return false;
3455}
3456
edd7ae68 3457
fef5a0d9
RB
3458
3459/* Fold a call to __builtin_strlen with known length LEN. */
3460
3461static bool
dcb7fae2 3462gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3463{
355fe088 3464 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3465 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
3466 if (!len)
3467 return false;
2813904b 3468 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
3469 replace_call_with_value (gsi, len);
3470 return true;
cbdd87d4
RG
3471}
3472
48126138
NS
3473/* Fold a call to __builtin_acc_on_device. */
3474
3475static bool
3476gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3477{
3478 /* Defer folding until we know which compiler we're in. */
3479 if (symtab->state != EXPANSION)
3480 return false;
3481
3482 unsigned val_host = GOMP_DEVICE_HOST;
3483 unsigned val_dev = GOMP_DEVICE_NONE;
3484
3485#ifdef ACCEL_COMPILER
3486 val_host = GOMP_DEVICE_NOT_HOST;
3487 val_dev = ACCEL_COMPILER_acc_device;
3488#endif
3489
3490 location_t loc = gimple_location (gsi_stmt (*gsi));
3491
3492 tree host_eq = make_ssa_name (boolean_type_node);
3493 gimple *host_ass = gimple_build_assign
3494 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3495 gimple_set_location (host_ass, loc);
3496 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3497
3498 tree dev_eq = make_ssa_name (boolean_type_node);
3499 gimple *dev_ass = gimple_build_assign
3500 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3501 gimple_set_location (dev_ass, loc);
3502 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3503
3504 tree result = make_ssa_name (boolean_type_node);
3505 gimple *result_ass = gimple_build_assign
3506 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3507 gimple_set_location (result_ass, loc);
3508 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3509
3510 replace_call_with_value (gsi, result);
3511
3512 return true;
3513}
cbdd87d4 3514
fe75f732
PK
3515/* Fold realloc (0, n) -> malloc (n). */
3516
3517static bool
3518gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3519{
3520 gimple *stmt = gsi_stmt (*gsi);
3521 tree arg = gimple_call_arg (stmt, 0);
3522 tree size = gimple_call_arg (stmt, 1);
3523
3524 if (operand_equal_p (arg, null_pointer_node, 0))
3525 {
3526 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3527 if (fn_malloc)
3528 {
3529 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3530 replace_call_with_call_and_fold (gsi, repl);
3531 return true;
3532 }
3533 }
3534 return false;
3535}
3536
dcb7fae2
RB
3537/* Fold the non-target builtin at *GSI and return whether any simplification
3538 was made. */
cbdd87d4 3539
fef5a0d9 3540static bool
dcb7fae2 3541gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3542{
538dd0b7 3543 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3544 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3545
dcb7fae2
RB
3546 /* Give up for always_inline inline builtins until they are
3547 inlined. */
3548 if (avoid_folding_inline_builtin (callee))
3549 return false;
cbdd87d4 3550
edd7ae68
RB
3551 unsigned n = gimple_call_num_args (stmt);
3552 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3553 switch (fcode)
cbdd87d4 3554 {
b3d8d88e
MS
3555 case BUILT_IN_BCMP:
3556 return gimple_fold_builtin_bcmp (gsi);
3557 case BUILT_IN_BCOPY:
3558 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3559 case BUILT_IN_BZERO:
b3d8d88e
MS
3560 return gimple_fold_builtin_bzero (gsi);
3561
dcb7fae2
RB
3562 case BUILT_IN_MEMSET:
3563 return gimple_fold_builtin_memset (gsi,
3564 gimple_call_arg (stmt, 1),
3565 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3566 case BUILT_IN_MEMCPY:
3567 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3568 gimple_call_arg (stmt, 1), 0);
3569 case BUILT_IN_MEMPCPY:
3570 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3571 gimple_call_arg (stmt, 1), 1);
3572 case BUILT_IN_MEMMOVE:
3573 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3574 gimple_call_arg (stmt, 1), 3);
3575 case BUILT_IN_SPRINTF_CHK:
3576 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3577 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3578 case BUILT_IN_STRCAT_CHK:
3579 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3580 case BUILT_IN_STRNCAT_CHK:
3581 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3582 case BUILT_IN_STRLEN:
dcb7fae2 3583 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3584 case BUILT_IN_STRCPY:
dcb7fae2 3585 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3586 gimple_call_arg (stmt, 0),
dcb7fae2 3587 gimple_call_arg (stmt, 1));
cbdd87d4 3588 case BUILT_IN_STRNCPY:
dcb7fae2 3589 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3590 gimple_call_arg (stmt, 0),
3591 gimple_call_arg (stmt, 1),
dcb7fae2 3592 gimple_call_arg (stmt, 2));
9a7eefec 3593 case BUILT_IN_STRCAT:
dcb7fae2
RB
3594 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3595 gimple_call_arg (stmt, 1));
ad03a744
RB
3596 case BUILT_IN_STRNCAT:
3597 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3598 case BUILT_IN_INDEX:
912d9ec3 3599 case BUILT_IN_STRCHR:
71dea1dd
WD
3600 return gimple_fold_builtin_strchr (gsi, false);
3601 case BUILT_IN_RINDEX:
3602 case BUILT_IN_STRRCHR:
3603 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3604 case BUILT_IN_STRSTR:
3605 return gimple_fold_builtin_strstr (gsi);
a918bfbf
ML
3606 case BUILT_IN_STRCMP:
3607 case BUILT_IN_STRCASECMP:
3608 case BUILT_IN_STRNCMP:
3609 case BUILT_IN_STRNCASECMP:
3610 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3611 case BUILT_IN_MEMCHR:
3612 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3613 case BUILT_IN_FPUTS:
dcb7fae2
RB
3614 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3615 gimple_call_arg (stmt, 1), false);
cbdd87d4 3616 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3617 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3618 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3619 case BUILT_IN_MEMCPY_CHK:
3620 case BUILT_IN_MEMPCPY_CHK:
3621 case BUILT_IN_MEMMOVE_CHK:
3622 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3623 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3624 gimple_call_arg (stmt, 0),
3625 gimple_call_arg (stmt, 1),
3626 gimple_call_arg (stmt, 2),
3627 gimple_call_arg (stmt, 3),
edd7ae68 3628 fcode);
2625bb5d
RB
3629 case BUILT_IN_STPCPY:
3630 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3631 case BUILT_IN_STRCPY_CHK:
3632 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3633 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3634 gimple_call_arg (stmt, 0),
3635 gimple_call_arg (stmt, 1),
3636 gimple_call_arg (stmt, 2),
edd7ae68 3637 fcode);
cbdd87d4 3638 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3639 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3640 return gimple_fold_builtin_stxncpy_chk (gsi,
3641 gimple_call_arg (stmt, 0),
3642 gimple_call_arg (stmt, 1),
3643 gimple_call_arg (stmt, 2),
3644 gimple_call_arg (stmt, 3),
edd7ae68 3645 fcode);
cbdd87d4
RG
3646 case BUILT_IN_SNPRINTF_CHK:
3647 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3648 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3649
edd7ae68
RB
3650 case BUILT_IN_FPRINTF:
3651 case BUILT_IN_FPRINTF_UNLOCKED:
3652 case BUILT_IN_VFPRINTF:
3653 if (n == 2 || n == 3)
3654 return gimple_fold_builtin_fprintf (gsi,
3655 gimple_call_arg (stmt, 0),
3656 gimple_call_arg (stmt, 1),
3657 n == 3
3658 ? gimple_call_arg (stmt, 2)
3659 : NULL_TREE,
3660 fcode);
3661 break;
3662 case BUILT_IN_FPRINTF_CHK:
3663 case BUILT_IN_VFPRINTF_CHK:
3664 if (n == 3 || n == 4)
3665 return gimple_fold_builtin_fprintf (gsi,
3666 gimple_call_arg (stmt, 0),
3667 gimple_call_arg (stmt, 2),
3668 n == 4
3669 ? gimple_call_arg (stmt, 3)
3670 : NULL_TREE,
3671 fcode);
3672 break;
ad03a744
RB
3673 case BUILT_IN_PRINTF:
3674 case BUILT_IN_PRINTF_UNLOCKED:
3675 case BUILT_IN_VPRINTF:
3676 if (n == 1 || n == 2)
3677 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3678 n == 2
3679 ? gimple_call_arg (stmt, 1)
3680 : NULL_TREE, fcode);
3681 break;
3682 case BUILT_IN_PRINTF_CHK:
3683 case BUILT_IN_VPRINTF_CHK:
3684 if (n == 2 || n == 3)
3685 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3686 n == 3
3687 ? gimple_call_arg (stmt, 2)
3688 : NULL_TREE, fcode);
242a37f1 3689 break;
48126138
NS
3690 case BUILT_IN_ACC_ON_DEVICE:
3691 return gimple_fold_builtin_acc_on_device (gsi,
3692 gimple_call_arg (stmt, 0));
fe75f732
PK
3693 case BUILT_IN_REALLOC:
3694 return gimple_fold_builtin_realloc (gsi);
3695
fef5a0d9
RB
3696 default:;
3697 }
3698
3699 /* Try the generic builtin folder. */
3700 bool ignore = (gimple_call_lhs (stmt) == NULL);
3701 tree result = fold_call_stmt (stmt, ignore);
3702 if (result)
3703 {
3704 if (ignore)
3705 STRIP_NOPS (result);
3706 else
3707 result = fold_convert (gimple_call_return_type (stmt), result);
3708 if (!update_call_from_tree (gsi, result))
3709 gimplify_and_update_call_from_tree (gsi, result);
3710 return true;
3711 }
3712
3713 return false;
3714}
3715
451e8dae
NS
3716/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3717 function calls to constants, where possible. */
3718
3719static tree
3720fold_internal_goacc_dim (const gimple *call)
3721{
629b3d75
MJ
3722 int axis = oacc_get_ifn_dim_arg (call);
3723 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 3724 tree result = NULL_TREE;
67d2229e 3725 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 3726
67d2229e 3727 switch (gimple_call_internal_fn (call))
451e8dae 3728 {
67d2229e
TV
3729 case IFN_GOACC_DIM_POS:
3730 /* If the size is 1, we know the answer. */
3731 if (size == 1)
3732 result = build_int_cst (type, 0);
3733 break;
3734 case IFN_GOACC_DIM_SIZE:
3735 /* If the size is not dynamic, we know the answer. */
3736 if (size)
3737 result = build_int_cst (type, size);
3738 break;
3739 default:
3740 break;
451e8dae
NS
3741 }
3742
3743 return result;
3744}
3745
849a76a5
JJ
3746/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3747 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3748 &var where var is only addressable because of such calls. */
3749
3750bool
3751optimize_atomic_compare_exchange_p (gimple *stmt)
3752{
3753 if (gimple_call_num_args (stmt) != 6
3754 || !flag_inline_atomics
3755 || !optimize
45b2222a 3756 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
3757 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3758 || !gimple_vdef (stmt)
3759 || !gimple_vuse (stmt))
3760 return false;
3761
3762 tree fndecl = gimple_call_fndecl (stmt);
3763 switch (DECL_FUNCTION_CODE (fndecl))
3764 {
3765 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3766 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3767 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3768 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3769 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3770 break;
3771 default:
3772 return false;
3773 }
3774
3775 tree expected = gimple_call_arg (stmt, 1);
3776 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3777 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3778 return false;
3779
3780 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3781 if (!is_gimple_reg_type (etype)
849a76a5 3782 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3783 || TREE_THIS_VOLATILE (etype)
3784 || VECTOR_TYPE_P (etype)
3785 || TREE_CODE (etype) == COMPLEX_TYPE
3786 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3787 might not preserve all the bits. See PR71716. */
3788 || SCALAR_FLOAT_TYPE_P (etype)
3789 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
849a76a5
JJ
3790 return false;
3791
3792 tree weak = gimple_call_arg (stmt, 3);
3793 if (!integer_zerop (weak) && !integer_onep (weak))
3794 return false;
3795
3796 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3797 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3798 machine_mode mode = TYPE_MODE (itype);
3799
3800 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3801 == CODE_FOR_nothing
3802 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3803 return false;
3804
1de3c940 3805 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
849a76a5
JJ
3806 return false;
3807
3808 return true;
3809}
3810
3811/* Fold
3812 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3813 into
3814 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3815 i = IMAGPART_EXPR <t>;
3816 r = (_Bool) i;
3817 e = REALPART_EXPR <t>; */
3818
3819void
3820fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3821{
3822 gimple *stmt = gsi_stmt (*gsi);
3823 tree fndecl = gimple_call_fndecl (stmt);
3824 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3825 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3826 tree ctype = build_complex_type (itype);
3827 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
3828 bool throws = false;
3829 edge e = NULL;
849a76a5
JJ
3830 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3831 expected);
3832 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3833 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3834 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3835 {
3836 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3837 build1 (VIEW_CONVERT_EXPR, itype,
3838 gimple_assign_lhs (g)));
3839 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3840 }
3841 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3842 + int_size_in_bytes (itype);
3843 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3844 gimple_call_arg (stmt, 0),
3845 gimple_assign_lhs (g),
3846 gimple_call_arg (stmt, 2),
3847 build_int_cst (integer_type_node, flag),
3848 gimple_call_arg (stmt, 4),
3849 gimple_call_arg (stmt, 5));
3850 tree lhs = make_ssa_name (ctype);
3851 gimple_call_set_lhs (g, lhs);
3852 gimple_set_vdef (g, gimple_vdef (stmt));
3853 gimple_set_vuse (g, gimple_vuse (stmt));
3854 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46
JJ
3855 tree oldlhs = gimple_call_lhs (stmt);
3856 if (stmt_can_throw_internal (stmt))
3857 {
3858 throws = true;
3859 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3860 }
3861 gimple_call_set_nothrow (as_a <gcall *> (g),
3862 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3863 gimple_call_set_lhs (stmt, NULL_TREE);
3864 gsi_replace (gsi, g, true);
3865 if (oldlhs)
849a76a5 3866 {
849a76a5
JJ
3867 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3868 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
3869 if (throws)
3870 {
3871 gsi_insert_on_edge_immediate (e, g);
3872 *gsi = gsi_for_stmt (g);
3873 }
3874 else
3875 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3876 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3877 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 3878 }
849a76a5
JJ
3879 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3880 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
3881 if (throws && oldlhs == NULL_TREE)
3882 {
3883 gsi_insert_on_edge_immediate (e, g);
3884 *gsi = gsi_for_stmt (g);
3885 }
3886 else
3887 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
3888 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3889 {
3890 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3891 VIEW_CONVERT_EXPR,
3892 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3893 gimple_assign_lhs (g)));
3894 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3895 }
3896 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3897 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3898 *gsi = gsiret;
3899}
3900
1304953e
JJ
3901/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3902 doesn't fit into TYPE. The test for overflow should be regardless of
3903 -fwrapv, and even for unsigned types. */
3904
3905bool
3906arith_overflowed_p (enum tree_code code, const_tree type,
3907 const_tree arg0, const_tree arg1)
3908{
3909 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3910 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3911 widest2_int_cst;
3912 widest2_int warg0 = widest2_int_cst (arg0);
3913 widest2_int warg1 = widest2_int_cst (arg1);
3914 widest2_int wres;
3915 switch (code)
3916 {
3917 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3918 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3919 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3920 default: gcc_unreachable ();
3921 }
3922 signop sign = TYPE_SIGN (type);
3923 if (sign == UNSIGNED && wi::neg_p (wres))
3924 return true;
3925 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3926}
3927
cbdd87d4
RG
3928/* Attempt to fold a call statement referenced by the statement iterator GSI.
3929 The statement may be replaced by another statement, e.g., if the call
3930 simplifies to a constant value. Return true if any changes were made.
3931 It is assumed that the operands have been previously folded. */
3932
e021c122 3933static bool
ceeffab0 3934gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3935{
538dd0b7 3936 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3937 tree callee;
e021c122
RG
3938 bool changed = false;
3939 unsigned i;
cbdd87d4 3940
e021c122
RG
3941 /* Fold *& in call arguments. */
3942 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3943 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3944 {
3945 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3946 if (tmp)
3947 {
3948 gimple_call_set_arg (stmt, i, tmp);
3949 changed = true;
3950 }
3951 }
3b45a007
RG
3952
3953 /* Check for virtual calls that became direct calls. */
3954 callee = gimple_call_fn (stmt);
25583c4f 3955 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3956 {
49c471e3
MJ
3957 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3958 {
450ad0cd
JH
3959 if (dump_file && virtual_method_call_p (callee)
3960 && !possible_polymorphic_call_target_p
6f8091fc
JH
3961 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3962 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3963 {
3964 fprintf (dump_file,
a70e9985 3965 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3966 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3967 fprintf (dump_file, " to ");
3968 print_generic_expr (dump_file, callee, TDF_SLIM);
3969 fprintf (dump_file, "\n");
3970 }
3971
49c471e3 3972 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3973 changed = true;
3974 }
a70e9985 3975 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3976 {
61dd6a2e
JH
3977 bool final;
3978 vec <cgraph_node *>targets
058d0a90 3979 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3980 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3981 {
a70e9985 3982 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3983 if (dump_enabled_p ())
3984 {
807b7d62 3985 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3986 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3987 "folding virtual function call to %s\n",
3988 targets.length () == 1
3989 ? targets[0]->name ()
3990 : "__builtin_unreachable");
3991 }
61dd6a2e 3992 if (targets.length () == 1)
cf3e5a89 3993 {
18954840
JJ
3994 tree fndecl = targets[0]->decl;
3995 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 3996 changed = true;
18954840
JJ
3997 /* If changing the call to __cxa_pure_virtual
3998 or similar noreturn function, adjust gimple_call_fntype
3999 too. */
865f7046 4000 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4001 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4002 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4003 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4004 == void_type_node))
4005 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4006 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4007 if (lhs
4008 && gimple_call_noreturn_p (stmt)
18954840 4009 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4010 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4011 {
4012 if (TREE_CODE (lhs) == SSA_NAME)
4013 {
b731b390 4014 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4015 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4016 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4017 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4018 }
4019 gimple_call_set_lhs (stmt, NULL_TREE);
4020 }
0b986c6a 4021 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4022 }
a70e9985 4023 else
cf3e5a89
JJ
4024 {
4025 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4026 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4027 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4028 /* If the call had a SSA name as lhs morph that into
4029 an uninitialized value. */
a70e9985
JJ
4030 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4031 {
b731b390 4032 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4033 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4034 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4035 set_ssa_default_def (cfun, var, lhs);
42e52a51 4036 }
2da6996c
RB
4037 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4038 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4039 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4040 return true;
4041 }
e021c122 4042 }
49c471e3 4043 }
e021c122 4044 }
49c471e3 4045
f2d3d07e
RH
4046 /* Check for indirect calls that became direct calls, and then
4047 no longer require a static chain. */
4048 if (gimple_call_chain (stmt))
4049 {
4050 tree fn = gimple_call_fndecl (stmt);
4051 if (fn && !DECL_STATIC_CHAIN (fn))
4052 {
4053 gimple_call_set_chain (stmt, NULL);
4054 changed = true;
4055 }
4056 else
4057 {
4058 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4059 if (tmp)
4060 {
4061 gimple_call_set_chain (stmt, tmp);
4062 changed = true;
4063 }
4064 }
4065 }
4066
e021c122
RG
4067 if (inplace)
4068 return changed;
4069
4070 /* Check for builtins that CCP can handle using information not
4071 available in the generic fold routines. */
fef5a0d9
RB
4072 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4073 {
4074 if (gimple_fold_builtin (gsi))
4075 changed = true;
4076 }
4077 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4078 {
ea679d55 4079 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4080 }
368b454d 4081 else if (gimple_call_internal_p (stmt))
ed9c79e1 4082 {
368b454d
JJ
4083 enum tree_code subcode = ERROR_MARK;
4084 tree result = NULL_TREE;
1304953e
JJ
4085 bool cplx_result = false;
4086 tree overflow = NULL_TREE;
368b454d
JJ
4087 switch (gimple_call_internal_fn (stmt))
4088 {
4089 case IFN_BUILTIN_EXPECT:
4090 result = fold_builtin_expect (gimple_location (stmt),
4091 gimple_call_arg (stmt, 0),
4092 gimple_call_arg (stmt, 1),
4093 gimple_call_arg (stmt, 2));
4094 break;
0e82f089 4095 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4096 {
4097 tree offset = gimple_call_arg (stmt, 1);
4098 tree objsize = gimple_call_arg (stmt, 2);
4099 if (integer_all_onesp (objsize)
4100 || (TREE_CODE (offset) == INTEGER_CST
4101 && TREE_CODE (objsize) == INTEGER_CST
4102 && tree_int_cst_le (offset, objsize)))
4103 {
4104 replace_call_with_value (gsi, NULL_TREE);
4105 return true;
4106 }
4107 }
4108 break;
4109 case IFN_UBSAN_PTR:
4110 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4111 {
ca1150f0 4112 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4113 return true;
4114 }
4115 break;
ca1150f0
JJ
4116 case IFN_UBSAN_BOUNDS:
4117 {
4118 tree index = gimple_call_arg (stmt, 1);
4119 tree bound = gimple_call_arg (stmt, 2);
4120 if (TREE_CODE (index) == INTEGER_CST
4121 && TREE_CODE (bound) == INTEGER_CST)
4122 {
4123 index = fold_convert (TREE_TYPE (bound), index);
4124 if (TREE_CODE (index) == INTEGER_CST
4125 && tree_int_cst_le (index, bound))
4126 {
4127 replace_call_with_value (gsi, NULL_TREE);
4128 return true;
4129 }
4130 }
4131 }
4132 break;
451e8dae
NS
4133 case IFN_GOACC_DIM_SIZE:
4134 case IFN_GOACC_DIM_POS:
4135 result = fold_internal_goacc_dim (stmt);
4136 break;
368b454d
JJ
4137 case IFN_UBSAN_CHECK_ADD:
4138 subcode = PLUS_EXPR;
4139 break;
4140 case IFN_UBSAN_CHECK_SUB:
4141 subcode = MINUS_EXPR;
4142 break;
4143 case IFN_UBSAN_CHECK_MUL:
4144 subcode = MULT_EXPR;
4145 break;
1304953e
JJ
4146 case IFN_ADD_OVERFLOW:
4147 subcode = PLUS_EXPR;
4148 cplx_result = true;
4149 break;
4150 case IFN_SUB_OVERFLOW:
4151 subcode = MINUS_EXPR;
4152 cplx_result = true;
4153 break;
4154 case IFN_MUL_OVERFLOW:
4155 subcode = MULT_EXPR;
4156 cplx_result = true;
4157 break;
368b454d
JJ
4158 default:
4159 break;
4160 }
4161 if (subcode != ERROR_MARK)
4162 {
4163 tree arg0 = gimple_call_arg (stmt, 0);
4164 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4165 tree type = TREE_TYPE (arg0);
4166 if (cplx_result)
4167 {
4168 tree lhs = gimple_call_lhs (stmt);
4169 if (lhs == NULL_TREE)
4170 type = NULL_TREE;
4171 else
4172 type = TREE_TYPE (TREE_TYPE (lhs));
4173 }
4174 if (type == NULL_TREE)
4175 ;
368b454d 4176 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4177 else if (integer_zerop (arg1))
4178 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4179 /* x = 0 + y; x = 0 * y; */
4180 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4181 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4182 /* x = y - y; */
4183 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4184 result = integer_zero_node;
368b454d 4185 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4186 else if (subcode == MULT_EXPR && integer_onep (arg1))
4187 result = arg0;
4188 else if (subcode == MULT_EXPR && integer_onep (arg0))
4189 result = arg1;
4190 else if (TREE_CODE (arg0) == INTEGER_CST
4191 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4192 {
1304953e
JJ
4193 if (cplx_result)
4194 result = int_const_binop (subcode, fold_convert (type, arg0),
4195 fold_convert (type, arg1));
4196 else
4197 result = int_const_binop (subcode, arg0, arg1);
4198 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4199 {
4200 if (cplx_result)
4201 overflow = build_one_cst (type);
4202 else
4203 result = NULL_TREE;
4204 }
4205 }
4206 if (result)
4207 {
4208 if (result == integer_zero_node)
4209 result = build_zero_cst (type);
4210 else if (cplx_result && TREE_TYPE (result) != type)
4211 {
4212 if (TREE_CODE (result) == INTEGER_CST)
4213 {
4214 if (arith_overflowed_p (PLUS_EXPR, type, result,
4215 integer_zero_node))
4216 overflow = build_one_cst (type);
4217 }
4218 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4219 && TYPE_UNSIGNED (type))
4220 || (TYPE_PRECISION (type)
4221 < (TYPE_PRECISION (TREE_TYPE (result))
4222 + (TYPE_UNSIGNED (TREE_TYPE (result))
4223 && !TYPE_UNSIGNED (type)))))
4224 result = NULL_TREE;
4225 if (result)
4226 result = fold_convert (type, result);
4227 }
368b454d
JJ
4228 }
4229 }
1304953e 4230
ed9c79e1
JJ
4231 if (result)
4232 {
1304953e
JJ
4233 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4234 result = drop_tree_overflow (result);
4235 if (cplx_result)
4236 {
4237 if (overflow == NULL_TREE)
4238 overflow = build_zero_cst (TREE_TYPE (result));
4239 tree ctype = build_complex_type (TREE_TYPE (result));
4240 if (TREE_CODE (result) == INTEGER_CST
4241 && TREE_CODE (overflow) == INTEGER_CST)
4242 result = build_complex (ctype, result, overflow);
4243 else
4244 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4245 ctype, result, overflow);
4246 }
ed9c79e1
JJ
4247 if (!update_call_from_tree (gsi, result))
4248 gimplify_and_update_call_from_tree (gsi, result);
4249 changed = true;
4250 }
4251 }
3b45a007 4252
e021c122 4253 return changed;
cbdd87d4
RG
4254}
4255
e0ee10ed 4256
89a79e96
RB
4257/* Return true whether NAME has a use on STMT. */
4258
4259static bool
355fe088 4260has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4261{
4262 imm_use_iterator iter;
4263 use_operand_p use_p;
4264 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4265 if (USE_STMT (use_p) == stmt)
4266 return true;
4267 return false;
4268}
4269
e0ee10ed
RB
4270/* Worker for fold_stmt_1 dispatch to pattern based folding with
4271 gimple_simplify.
4272
4273 Replaces *GSI with the simplification result in RCODE and OPS
4274 and the associated statements in *SEQ. Does the replacement
4275 according to INPLACE and returns true if the operation succeeded. */
4276
4277static bool
4278replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4279 code_helper rcode, tree *ops,
4280 gimple_seq *seq, bool inplace)
4281{
355fe088 4282 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
4283
4284 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4285 newly created statements. See also maybe_push_res_to_seq.
4286 As an exception allow such uses if there was a use of the
4287 same SSA name on the old stmt. */
e0ee10ed 4288 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
4289 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4290 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
4291 || (ops[1]
4292 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
4293 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4294 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
4295 || (ops[2]
4296 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96 4297 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
e0237993
JJ
4298 && !has_use_on_stmt (ops[2], stmt))
4299 || (COMPARISON_CLASS_P (ops[0])
4300 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4301 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4302 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4303 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4304 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4305 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
e0ee10ed
RB
4306 return false;
4307
fec40d06
RS
4308 /* Don't insert new statements when INPLACE is true, even if we could
4309 reuse STMT for the final statement. */
4310 if (inplace && !gimple_seq_empty_p (*seq))
4311 return false;
4312
538dd0b7 4313 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
4314 {
4315 gcc_assert (rcode.is_tree_code ());
4316 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4317 /* GIMPLE_CONDs condition may not throw. */
4318 && (!flag_exceptions
4319 || !cfun->can_throw_non_call_exceptions
4320 || !operation_could_trap_p (rcode,
4321 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4322 false, NULL_TREE)))
538dd0b7 4323 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 4324 else if (rcode == SSA_NAME)
538dd0b7 4325 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
4326 build_zero_cst (TREE_TYPE (ops[0])));
4327 else if (rcode == INTEGER_CST)
4328 {
4329 if (integer_zerop (ops[0]))
538dd0b7 4330 gimple_cond_make_false (cond_stmt);
e0ee10ed 4331 else
538dd0b7 4332 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4333 }
4334 else if (!inplace)
4335 {
4336 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4337 ops, seq);
4338 if (!res)
4339 return false;
538dd0b7 4340 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4341 build_zero_cst (TREE_TYPE (res)));
4342 }
4343 else
4344 return false;
4345 if (dump_file && (dump_flags & TDF_DETAILS))
4346 {
4347 fprintf (dump_file, "gimple_simplified to ");
4348 if (!gimple_seq_empty_p (*seq))
4349 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4350 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4351 0, TDF_SLIM);
4352 }
4353 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4354 return true;
4355 }
4356 else if (is_gimple_assign (stmt)
4357 && rcode.is_tree_code ())
4358 {
4359 if (!inplace
f3582e54 4360 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
4361 {
4362 maybe_build_generic_op (rcode,
545cd7ec 4363 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
00d66391 4364 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
4365 if (dump_file && (dump_flags & TDF_DETAILS))
4366 {
4367 fprintf (dump_file, "gimple_simplified to ");
4368 if (!gimple_seq_empty_p (*seq))
4369 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4370 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4371 0, TDF_SLIM);
4372 }
4373 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4374 return true;
4375 }
4376 }
37d486ab 4377 else if (rcode.is_fn_code ()
c9e926ce 4378 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
4379 {
4380 unsigned i;
4381 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4382 {
4383 gcc_assert (ops[i] != NULL_TREE);
4384 gimple_call_set_arg (stmt, i, ops[i]);
4385 }
4386 if (i < 3)
4387 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
4388 if (dump_file && (dump_flags & TDF_DETAILS))
4389 {
4390 fprintf (dump_file, "gimple_simplified to ");
4391 if (!gimple_seq_empty_p (*seq))
4392 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4393 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4394 }
4395 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4396 return true;
4397 }
e0ee10ed
RB
4398 else if (!inplace)
4399 {
4400 if (gimple_has_lhs (stmt))
4401 {
4402 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
4403 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4404 ops, seq, lhs))
4405 return false;
e0ee10ed
RB
4406 if (dump_file && (dump_flags & TDF_DETAILS))
4407 {
4408 fprintf (dump_file, "gimple_simplified to ");
4409 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4410 }
4411 gsi_replace_with_seq_vops (gsi, *seq);
4412 return true;
4413 }
4414 else
4415 gcc_unreachable ();
4416 }
4417
4418 return false;
4419}
4420
040292e7
RB
4421/* Canonicalize MEM_REFs invariant address operand after propagation. */
4422
4423static bool
4424maybe_canonicalize_mem_ref_addr (tree *t)
4425{
4426 bool res = false;
4427
4428 if (TREE_CODE (*t) == ADDR_EXPR)
4429 t = &TREE_OPERAND (*t, 0);
4430
f17a223d
RB
4431 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4432 generic vector extension. The actual vector referenced is
4433 view-converted to an array type for this purpose. If the index
4434 is constant the canonical representation in the middle-end is a
4435 BIT_FIELD_REF so re-write the former to the latter here. */
4436 if (TREE_CODE (*t) == ARRAY_REF
4437 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4438 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4439 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4440 {
4441 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4442 if (VECTOR_TYPE_P (vtype))
4443 {
4444 tree low = array_ref_low_bound (*t);
4445 if (TREE_CODE (low) == INTEGER_CST)
4446 {
4447 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4448 {
4449 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4450 wi::to_widest (low));
4451 idx = wi::mul (idx, wi::to_widest
4452 (TYPE_SIZE (TREE_TYPE (*t))));
4453 widest_int ext
4454 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4455 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4456 {
4457 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4458 TREE_TYPE (*t),
4459 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4460 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4461 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4462 res = true;
4463 }
4464 }
4465 }
4466 }
4467 }
4468
040292e7
RB
4469 while (handled_component_p (*t))
4470 t = &TREE_OPERAND (*t, 0);
4471
4472 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4473 of invariant addresses into a SSA name MEM_REF address. */
4474 if (TREE_CODE (*t) == MEM_REF
4475 || TREE_CODE (*t) == TARGET_MEM_REF)
4476 {
4477 tree addr = TREE_OPERAND (*t, 0);
4478 if (TREE_CODE (addr) == ADDR_EXPR
4479 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4480 || handled_component_p (TREE_OPERAND (addr, 0))))
4481 {
4482 tree base;
a90c8804 4483 poly_int64 coffset;
040292e7
RB
4484 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4485 &coffset);
4486 if (!base)
4487 gcc_unreachable ();
4488
4489 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4490 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4491 TREE_OPERAND (*t, 1),
4492 size_int (coffset));
4493 res = true;
4494 }
4495 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4496 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4497 }
4498
4499 /* Canonicalize back MEM_REFs to plain reference trees if the object
4500 accessed is a decl that has the same access semantics as the MEM_REF. */
4501 if (TREE_CODE (*t) == MEM_REF
4502 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4503 && integer_zerop (TREE_OPERAND (*t, 1))
4504 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4505 {
4506 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4507 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4508 if (/* Same volatile qualification. */
4509 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4510 /* Same TBAA behavior with -fstrict-aliasing. */
4511 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4512 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4513 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4514 /* Same alignment. */
4515 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4516 /* We have to look out here to not drop a required conversion
4517 from the rhs to the lhs if *t appears on the lhs or vice-versa
4518 if it appears on the rhs. Thus require strict type
4519 compatibility. */
4520 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4521 {
4522 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4523 res = true;
4524 }
4525 }
4526
4527 /* Canonicalize TARGET_MEM_REF in particular with respect to
4528 the indexes becoming constant. */
4529 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4530 {
4531 tree tem = maybe_fold_tmr (*t);
4532 if (tem)
4533 {
4534 *t = tem;
4535 res = true;
4536 }
4537 }
4538
4539 return res;
4540}
4541
cbdd87d4
RG
4542/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4543 distinguishes both cases. */
4544
4545static bool
e0ee10ed 4546fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4547{
4548 bool changed = false;
355fe088 4549 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4550 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4551 unsigned i;
a8b85ce9 4552 fold_defer_overflow_warnings ();
cbdd87d4 4553
040292e7
RB
4554 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4555 after propagation.
4556 ??? This shouldn't be done in generic folding but in the
4557 propagation helpers which also know whether an address was
89a79e96
RB
4558 propagated.
4559 Also canonicalize operand order. */
040292e7
RB
4560 switch (gimple_code (stmt))
4561 {
4562 case GIMPLE_ASSIGN:
4563 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4564 {
4565 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4566 if ((REFERENCE_CLASS_P (*rhs)
4567 || TREE_CODE (*rhs) == ADDR_EXPR)
4568 && maybe_canonicalize_mem_ref_addr (rhs))
4569 changed = true;
4570 tree *lhs = gimple_assign_lhs_ptr (stmt);
4571 if (REFERENCE_CLASS_P (*lhs)
4572 && maybe_canonicalize_mem_ref_addr (lhs))
4573 changed = true;
4574 }
89a79e96
RB
4575 else
4576 {
4577 /* Canonicalize operand order. */
4578 enum tree_code code = gimple_assign_rhs_code (stmt);
4579 if (TREE_CODE_CLASS (code) == tcc_comparison
4580 || commutative_tree_code (code)
4581 || commutative_ternary_tree_code (code))
4582 {
4583 tree rhs1 = gimple_assign_rhs1 (stmt);
4584 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4585 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4586 {
4587 gimple_assign_set_rhs1 (stmt, rhs2);
4588 gimple_assign_set_rhs2 (stmt, rhs1);
4589 if (TREE_CODE_CLASS (code) == tcc_comparison)
4590 gimple_assign_set_rhs_code (stmt,
4591 swap_tree_comparison (code));
4592 changed = true;
4593 }
4594 }
4595 }
040292e7
RB
4596 break;
4597 case GIMPLE_CALL:
4598 {
4599 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4600 {
4601 tree *arg = gimple_call_arg_ptr (stmt, i);
4602 if (REFERENCE_CLASS_P (*arg)
4603 && maybe_canonicalize_mem_ref_addr (arg))
4604 changed = true;
4605 }
4606 tree *lhs = gimple_call_lhs_ptr (stmt);
4607 if (*lhs
4608 && REFERENCE_CLASS_P (*lhs)
4609 && maybe_canonicalize_mem_ref_addr (lhs))
4610 changed = true;
4611 break;
4612 }
4613 case GIMPLE_ASM:
4614 {
538dd0b7
DM
4615 gasm *asm_stmt = as_a <gasm *> (stmt);
4616 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4617 {
538dd0b7 4618 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4619 tree op = TREE_VALUE (link);
4620 if (REFERENCE_CLASS_P (op)
4621 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4622 changed = true;
4623 }
538dd0b7 4624 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4625 {
538dd0b7 4626 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4627 tree op = TREE_VALUE (link);
4628 if ((REFERENCE_CLASS_P (op)
4629 || TREE_CODE (op) == ADDR_EXPR)
4630 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4631 changed = true;
4632 }
4633 }
4634 break;
4635 case GIMPLE_DEBUG:
4636 if (gimple_debug_bind_p (stmt))
4637 {
4638 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4639 if (*val
4640 && (REFERENCE_CLASS_P (*val)
4641 || TREE_CODE (*val) == ADDR_EXPR)
4642 && maybe_canonicalize_mem_ref_addr (val))
4643 changed = true;
4644 }
4645 break;
89a79e96
RB
4646 case GIMPLE_COND:
4647 {
4648 /* Canonicalize operand order. */
4649 tree lhs = gimple_cond_lhs (stmt);
4650 tree rhs = gimple_cond_rhs (stmt);
14e72812 4651 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4652 {
4653 gcond *gc = as_a <gcond *> (stmt);
4654 gimple_cond_set_lhs (gc, rhs);
4655 gimple_cond_set_rhs (gc, lhs);
4656 gimple_cond_set_code (gc,
4657 swap_tree_comparison (gimple_cond_code (gc)));
4658 changed = true;
4659 }
4660 }
040292e7
RB
4661 default:;
4662 }
4663
e0ee10ed
RB
4664 /* Dispatch to pattern-based folding. */
4665 if (!inplace
4666 || is_gimple_assign (stmt)
4667 || gimple_code (stmt) == GIMPLE_COND)
4668 {
4669 gimple_seq seq = NULL;
4670 code_helper rcode;
4671 tree ops[3] = {};
0ff093d8
RB
4672 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4673 valueize, valueize))
e0ee10ed
RB
4674 {
4675 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4676 changed = true;
4677 else
4678 gimple_seq_discard (seq);
4679 }
4680 }
4681
4682 stmt = gsi_stmt (*gsi);
4683
cbdd87d4
RG
4684 /* Fold the main computation performed by the statement. */
4685 switch (gimple_code (stmt))
4686 {
4687 case GIMPLE_ASSIGN:
4688 {
819ec64c
RB
4689 /* Try to canonicalize for boolean-typed X the comparisons
4690 X == 0, X == 1, X != 0, and X != 1. */
4691 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4692 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4693 {
819ec64c
RB
4694 tree lhs = gimple_assign_lhs (stmt);
4695 tree op1 = gimple_assign_rhs1 (stmt);
4696 tree op2 = gimple_assign_rhs2 (stmt);
4697 tree type = TREE_TYPE (op1);
4698
4699 /* Check whether the comparison operands are of the same boolean
4700 type as the result type is.
4701 Check that second operand is an integer-constant with value
4702 one or zero. */
4703 if (TREE_CODE (op2) == INTEGER_CST
4704 && (integer_zerop (op2) || integer_onep (op2))
4705 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4706 {
4707 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4708 bool is_logical_not = false;
4709
4710 /* X == 0 and X != 1 is a logical-not.of X
4711 X == 1 and X != 0 is X */
4712 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4713 || (cmp_code == NE_EXPR && integer_onep (op2)))
4714 is_logical_not = true;
4715
4716 if (is_logical_not == false)
4717 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4718 /* Only for one-bit precision typed X the transformation
4719 !X -> ~X is valied. */
4720 else if (TYPE_PRECISION (type) == 1)
4721 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4722 /* Otherwise we use !X -> X ^ 1. */
4723 else
4724 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4725 build_int_cst (type, 1));
4726 changed = true;
4727 break;
4728 }
5fbcc0ed 4729 }
819ec64c
RB
4730
4731 unsigned old_num_ops = gimple_num_ops (stmt);
4732 tree lhs = gimple_assign_lhs (stmt);
4733 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4734 if (new_rhs
4735 && !useless_type_conversion_p (TREE_TYPE (lhs),
4736 TREE_TYPE (new_rhs)))
4737 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4738 if (new_rhs
4739 && (!inplace
4740 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4741 {
4742 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4743 changed = true;
4744 }
4745 break;
4746 }
4747
cbdd87d4 4748 case GIMPLE_CALL:
ceeffab0 4749 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4750 break;
4751
4752 case GIMPLE_ASM:
4753 /* Fold *& in asm operands. */
38384150 4754 {
538dd0b7 4755 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4756 size_t noutputs;
4757 const char **oconstraints;
4758 const char *constraint;
4759 bool allows_mem, allows_reg;
4760
538dd0b7 4761 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4762 oconstraints = XALLOCAVEC (const char *, noutputs);
4763
538dd0b7 4764 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4765 {
538dd0b7 4766 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4767 tree op = TREE_VALUE (link);
4768 oconstraints[i]
4769 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4770 if (REFERENCE_CLASS_P (op)
4771 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4772 {
4773 TREE_VALUE (link) = op;
4774 changed = true;
4775 }
4776 }
538dd0b7 4777 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4778 {
538dd0b7 4779 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4780 tree op = TREE_VALUE (link);
4781 constraint
4782 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4783 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4784 oconstraints, &allows_mem, &allows_reg);
4785 if (REFERENCE_CLASS_P (op)
4786 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4787 != NULL_TREE)
4788 {
4789 TREE_VALUE (link) = op;
4790 changed = true;
4791 }
4792 }
4793 }
cbdd87d4
RG
4794 break;
4795
bd422c4a
RG
4796 case GIMPLE_DEBUG:
4797 if (gimple_debug_bind_p (stmt))
4798 {
4799 tree val = gimple_debug_bind_get_value (stmt);
4800 if (val
4801 && REFERENCE_CLASS_P (val))
4802 {
4803 tree tem = maybe_fold_reference (val, false);
4804 if (tem)
4805 {
4806 gimple_debug_bind_set_value (stmt, tem);
4807 changed = true;
4808 }
4809 }
3e888a5e
RG
4810 else if (val
4811 && TREE_CODE (val) == ADDR_EXPR)
4812 {
4813 tree ref = TREE_OPERAND (val, 0);
4814 tree tem = maybe_fold_reference (ref, false);
4815 if (tem)
4816 {
4817 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4818 gimple_debug_bind_set_value (stmt, tem);
4819 changed = true;
4820 }
4821 }
bd422c4a
RG
4822 }
4823 break;
4824
cfe3d653
PK
4825 case GIMPLE_RETURN:
4826 {
4827 greturn *ret_stmt = as_a<greturn *> (stmt);
4828 tree ret = gimple_return_retval(ret_stmt);
4829
4830 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4831 {
4832 tree val = valueize (ret);
1af928db
RB
4833 if (val && val != ret
4834 && may_propagate_copy (ret, val))
cfe3d653
PK
4835 {
4836 gimple_return_set_retval (ret_stmt, val);
4837 changed = true;
4838 }
4839 }
4840 }
4841 break;
4842
cbdd87d4
RG
4843 default:;
4844 }
4845
4846 stmt = gsi_stmt (*gsi);
4847
37376165
RB
4848 /* Fold *& on the lhs. */
4849 if (gimple_has_lhs (stmt))
cbdd87d4
RG
4850 {
4851 tree lhs = gimple_get_lhs (stmt);
4852 if (lhs && REFERENCE_CLASS_P (lhs))
4853 {
4854 tree new_lhs = maybe_fold_reference (lhs, true);
4855 if (new_lhs)
4856 {
4857 gimple_set_lhs (stmt, new_lhs);
4858 changed = true;
4859 }
4860 }
4861 }
4862
a8b85ce9 4863 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
4864 return changed;
4865}
4866
e0ee10ed
RB
4867/* Valueziation callback that ends up not following SSA edges. */
4868
4869tree
4870no_follow_ssa_edges (tree)
4871{
4872 return NULL_TREE;
4873}
4874
45cc9f96
RB
4875/* Valueization callback that ends up following single-use SSA edges only. */
4876
4877tree
4878follow_single_use_edges (tree val)
4879{
4880 if (TREE_CODE (val) == SSA_NAME
4881 && !has_single_use (val))
4882 return NULL_TREE;
4883 return val;
4884}
4885
cbdd87d4
RG
4886/* Fold the statement pointed to by GSI. In some cases, this function may
4887 replace the whole statement with a new one. Returns true iff folding
4888 makes any changes.
4889 The statement pointed to by GSI should be in valid gimple form but may
4890 be in unfolded state as resulting from for example constant propagation
4891 which can produce *&x = 0. */
4892
4893bool
4894fold_stmt (gimple_stmt_iterator *gsi)
4895{
e0ee10ed
RB
4896 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4897}
4898
4899bool
4900fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4901{
4902 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
4903}
4904
59401b92 4905/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
4906 *&x created by constant propagation are handled. The statement cannot
4907 be replaced with a new one. Return true if the statement was
4908 changed, false otherwise.
59401b92 4909 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
4910 be in unfolded state as resulting from for example constant propagation
4911 which can produce *&x = 0. */
4912
4913bool
59401b92 4914fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 4915{
355fe088 4916 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 4917 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 4918 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
4919 return changed;
4920}
4921
e89065a1
SL
4922/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4923 if EXPR is null or we don't know how.
4924 If non-null, the result always has boolean type. */
4925
4926static tree
4927canonicalize_bool (tree expr, bool invert)
4928{
4929 if (!expr)
4930 return NULL_TREE;
4931 else if (invert)
4932 {
4933 if (integer_nonzerop (expr))
4934 return boolean_false_node;
4935 else if (integer_zerop (expr))
4936 return boolean_true_node;
4937 else if (TREE_CODE (expr) == SSA_NAME)
4938 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4939 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4940 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4941 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4942 boolean_type_node,
4943 TREE_OPERAND (expr, 0),
4944 TREE_OPERAND (expr, 1));
4945 else
4946 return NULL_TREE;
4947 }
4948 else
4949 {
4950 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4951 return expr;
4952 if (integer_nonzerop (expr))
4953 return boolean_true_node;
4954 else if (integer_zerop (expr))
4955 return boolean_false_node;
4956 else if (TREE_CODE (expr) == SSA_NAME)
4957 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4958 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4959 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4960 return fold_build2 (TREE_CODE (expr),
4961 boolean_type_node,
4962 TREE_OPERAND (expr, 0),
4963 TREE_OPERAND (expr, 1));
4964 else
4965 return NULL_TREE;
4966 }
4967}
4968
4969/* Check to see if a boolean expression EXPR is logically equivalent to the
4970 comparison (OP1 CODE OP2). Check for various identities involving
4971 SSA_NAMEs. */
4972
4973static bool
4974same_bool_comparison_p (const_tree expr, enum tree_code code,
4975 const_tree op1, const_tree op2)
4976{
355fe088 4977 gimple *s;
e89065a1
SL
4978
4979 /* The obvious case. */
4980 if (TREE_CODE (expr) == code
4981 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4982 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4983 return true;
4984
4985 /* Check for comparing (name, name != 0) and the case where expr
4986 is an SSA_NAME with a definition matching the comparison. */
4987 if (TREE_CODE (expr) == SSA_NAME
4988 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4989 {
4990 if (operand_equal_p (expr, op1, 0))
4991 return ((code == NE_EXPR && integer_zerop (op2))
4992 || (code == EQ_EXPR && integer_nonzerop (op2)));
4993 s = SSA_NAME_DEF_STMT (expr);
4994 if (is_gimple_assign (s)
4995 && gimple_assign_rhs_code (s) == code
4996 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4997 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4998 return true;
4999 }
5000
5001 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5002 of name is a comparison, recurse. */
5003 if (TREE_CODE (op1) == SSA_NAME
5004 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5005 {
5006 s = SSA_NAME_DEF_STMT (op1);
5007 if (is_gimple_assign (s)
5008 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5009 {
5010 enum tree_code c = gimple_assign_rhs_code (s);
5011 if ((c == NE_EXPR && integer_zerop (op2))
5012 || (c == EQ_EXPR && integer_nonzerop (op2)))
5013 return same_bool_comparison_p (expr, c,
5014 gimple_assign_rhs1 (s),
5015 gimple_assign_rhs2 (s));
5016 if ((c == EQ_EXPR && integer_zerop (op2))
5017 || (c == NE_EXPR && integer_nonzerop (op2)))
5018 return same_bool_comparison_p (expr,
5019 invert_tree_comparison (c, false),
5020 gimple_assign_rhs1 (s),
5021 gimple_assign_rhs2 (s));
5022 }
5023 }
5024 return false;
5025}
5026
5027/* Check to see if two boolean expressions OP1 and OP2 are logically
5028 equivalent. */
5029
5030static bool
5031same_bool_result_p (const_tree op1, const_tree op2)
5032{
5033 /* Simple cases first. */
5034 if (operand_equal_p (op1, op2, 0))
5035 return true;
5036
5037 /* Check the cases where at least one of the operands is a comparison.
5038 These are a bit smarter than operand_equal_p in that they apply some
5039 identifies on SSA_NAMEs. */
98209db3 5040 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5041 && same_bool_comparison_p (op1, TREE_CODE (op2),
5042 TREE_OPERAND (op2, 0),
5043 TREE_OPERAND (op2, 1)))
5044 return true;
98209db3 5045 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5046 && same_bool_comparison_p (op2, TREE_CODE (op1),
5047 TREE_OPERAND (op1, 0),
5048 TREE_OPERAND (op1, 1)))
5049 return true;
5050
5051 /* Default case. */
5052 return false;
5053}
5054
5055/* Forward declarations for some mutually recursive functions. */
5056
5057static tree
5058and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5059 enum tree_code code2, tree op2a, tree op2b);
5060static tree
5061and_var_with_comparison (tree var, bool invert,
5062 enum tree_code code2, tree op2a, tree op2b);
5063static tree
355fe088 5064and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5065 enum tree_code code2, tree op2a, tree op2b);
5066static tree
5067or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5068 enum tree_code code2, tree op2a, tree op2b);
5069static tree
5070or_var_with_comparison (tree var, bool invert,
5071 enum tree_code code2, tree op2a, tree op2b);
5072static tree
355fe088 5073or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5074 enum tree_code code2, tree op2a, tree op2b);
5075
5076/* Helper function for and_comparisons_1: try to simplify the AND of the
5077 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5078 If INVERT is true, invert the value of the VAR before doing the AND.
5079 Return NULL_EXPR if we can't simplify this to a single expression. */
5080
5081static tree
5082and_var_with_comparison (tree var, bool invert,
5083 enum tree_code code2, tree op2a, tree op2b)
5084{
5085 tree t;
355fe088 5086 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5087
5088 /* We can only deal with variables whose definitions are assignments. */
5089 if (!is_gimple_assign (stmt))
5090 return NULL_TREE;
5091
5092 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5093 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5094 Then we only have to consider the simpler non-inverted cases. */
5095 if (invert)
5096 t = or_var_with_comparison_1 (stmt,
5097 invert_tree_comparison (code2, false),
5098 op2a, op2b);
5099 else
5100 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5101 return canonicalize_bool (t, invert);
5102}
5103
5104/* Try to simplify the AND of the ssa variable defined by the assignment
5105 STMT with the comparison specified by (OP2A CODE2 OP2B).
5106 Return NULL_EXPR if we can't simplify this to a single expression. */
5107
5108static tree
355fe088 5109and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5110 enum tree_code code2, tree op2a, tree op2b)
5111{
5112 tree var = gimple_assign_lhs (stmt);
5113 tree true_test_var = NULL_TREE;
5114 tree false_test_var = NULL_TREE;
5115 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5116
5117 /* Check for identities like (var AND (var == 0)) => false. */
5118 if (TREE_CODE (op2a) == SSA_NAME
5119 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5120 {
5121 if ((code2 == NE_EXPR && integer_zerop (op2b))
5122 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5123 {
5124 true_test_var = op2a;
5125 if (var == true_test_var)
5126 return var;
5127 }
5128 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5129 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5130 {
5131 false_test_var = op2a;
5132 if (var == false_test_var)
5133 return boolean_false_node;
5134 }
5135 }
5136
5137 /* If the definition is a comparison, recurse on it. */
5138 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5139 {
5140 tree t = and_comparisons_1 (innercode,
5141 gimple_assign_rhs1 (stmt),
5142 gimple_assign_rhs2 (stmt),
5143 code2,
5144 op2a,
5145 op2b);
5146 if (t)
5147 return t;
5148 }
5149
5150 /* If the definition is an AND or OR expression, we may be able to
5151 simplify by reassociating. */
eb9820c0
KT
5152 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5153 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5154 {
5155 tree inner1 = gimple_assign_rhs1 (stmt);
5156 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5157 gimple *s;
e89065a1
SL
5158 tree t;
5159 tree partial = NULL_TREE;
eb9820c0 5160 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5161
5162 /* Check for boolean identities that don't require recursive examination
5163 of inner1/inner2:
5164 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5165 inner1 AND (inner1 OR inner2) => inner1
5166 !inner1 AND (inner1 AND inner2) => false
5167 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5168 Likewise for similar cases involving inner2. */
5169 if (inner1 == true_test_var)
5170 return (is_and ? var : inner1);
5171 else if (inner2 == true_test_var)
5172 return (is_and ? var : inner2);
5173 else if (inner1 == false_test_var)
5174 return (is_and
5175 ? boolean_false_node
5176 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5177 else if (inner2 == false_test_var)
5178 return (is_and
5179 ? boolean_false_node
5180 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5181
5182 /* Next, redistribute/reassociate the AND across the inner tests.
5183 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5184 if (TREE_CODE (inner1) == SSA_NAME
5185 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5186 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5187 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5188 gimple_assign_rhs1 (s),
5189 gimple_assign_rhs2 (s),
5190 code2, op2a, op2b)))
5191 {
5192 /* Handle the AND case, where we are reassociating:
5193 (inner1 AND inner2) AND (op2a code2 op2b)
5194 => (t AND inner2)
5195 If the partial result t is a constant, we win. Otherwise
5196 continue on to try reassociating with the other inner test. */
5197 if (is_and)
5198 {
5199 if (integer_onep (t))
5200 return inner2;
5201 else if (integer_zerop (t))
5202 return boolean_false_node;
5203 }
5204
5205 /* Handle the OR case, where we are redistributing:
5206 (inner1 OR inner2) AND (op2a code2 op2b)
5207 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5208 else if (integer_onep (t))
5209 return boolean_true_node;
5210
5211 /* Save partial result for later. */
5212 partial = t;
e89065a1
SL
5213 }
5214
5215 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5216 if (TREE_CODE (inner2) == SSA_NAME
5217 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5218 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5219 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5220 gimple_assign_rhs1 (s),
5221 gimple_assign_rhs2 (s),
5222 code2, op2a, op2b)))
5223 {
5224 /* Handle the AND case, where we are reassociating:
5225 (inner1 AND inner2) AND (op2a code2 op2b)
5226 => (inner1 AND t) */
5227 if (is_and)
5228 {
5229 if (integer_onep (t))
5230 return inner1;
5231 else if (integer_zerop (t))
5232 return boolean_false_node;
8236c8eb
JJ
5233 /* If both are the same, we can apply the identity
5234 (x AND x) == x. */
5235 else if (partial && same_bool_result_p (t, partial))
5236 return t;
e89065a1
SL
5237 }
5238
5239 /* Handle the OR case. where we are redistributing:
5240 (inner1 OR inner2) AND (op2a code2 op2b)
5241 => (t OR (inner1 AND (op2a code2 op2b)))
5242 => (t OR partial) */
5243 else
5244 {
5245 if (integer_onep (t))
5246 return boolean_true_node;
5247 else if (partial)
5248 {
5249 /* We already got a simplification for the other
5250 operand to the redistributed OR expression. The
5251 interesting case is when at least one is false.
5252 Or, if both are the same, we can apply the identity
5253 (x OR x) == x. */
5254 if (integer_zerop (partial))
5255 return t;
5256 else if (integer_zerop (t))
5257 return partial;
5258 else if (same_bool_result_p (t, partial))
5259 return t;
5260 }
5261 }
5262 }
5263 }
5264 return NULL_TREE;
5265}
5266
5267/* Try to simplify the AND of two comparisons defined by
5268 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5269 If this can be done without constructing an intermediate value,
5270 return the resulting tree; otherwise NULL_TREE is returned.
5271 This function is deliberately asymmetric as it recurses on SSA_DEFs
5272 in the first comparison but not the second. */
5273
5274static tree
5275and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5276 enum tree_code code2, tree op2a, tree op2b)
5277{
ae22ac3c 5278 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5279
e89065a1
SL
5280 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5281 if (operand_equal_p (op1a, op2a, 0)
5282 && operand_equal_p (op1b, op2b, 0))
5283 {
eb9820c0 5284 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5285 tree t = combine_comparisons (UNKNOWN_LOCATION,
5286 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5287 truth_type, op1a, op1b);
e89065a1
SL
5288 if (t)
5289 return t;
5290 }
5291
5292 /* Likewise the swapped case of the above. */
5293 if (operand_equal_p (op1a, op2b, 0)
5294 && operand_equal_p (op1b, op2a, 0))
5295 {
eb9820c0 5296 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5297 tree t = combine_comparisons (UNKNOWN_LOCATION,
5298 TRUTH_ANDIF_EXPR, code1,
5299 swap_tree_comparison (code2),
31ed6226 5300 truth_type, op1a, op1b);
e89065a1
SL
5301 if (t)
5302 return t;
5303 }
5304
5305 /* If both comparisons are of the same value against constants, we might
5306 be able to merge them. */
5307 if (operand_equal_p (op1a, op2a, 0)
5308 && TREE_CODE (op1b) == INTEGER_CST
5309 && TREE_CODE (op2b) == INTEGER_CST)
5310 {
5311 int cmp = tree_int_cst_compare (op1b, op2b);
5312
5313 /* If we have (op1a == op1b), we should either be able to
5314 return that or FALSE, depending on whether the constant op1b
5315 also satisfies the other comparison against op2b. */
5316 if (code1 == EQ_EXPR)
5317 {
5318 bool done = true;
5319 bool val;
5320 switch (code2)
5321 {
5322 case EQ_EXPR: val = (cmp == 0); break;
5323 case NE_EXPR: val = (cmp != 0); break;
5324 case LT_EXPR: val = (cmp < 0); break;
5325 case GT_EXPR: val = (cmp > 0); break;
5326 case LE_EXPR: val = (cmp <= 0); break;
5327 case GE_EXPR: val = (cmp >= 0); break;
5328 default: done = false;
5329 }
5330 if (done)
5331 {
5332 if (val)
5333 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5334 else
5335 return boolean_false_node;
5336 }
5337 }
5338 /* Likewise if the second comparison is an == comparison. */
5339 else if (code2 == EQ_EXPR)
5340 {
5341 bool done = true;
5342 bool val;
5343 switch (code1)
5344 {
5345 case EQ_EXPR: val = (cmp == 0); break;
5346 case NE_EXPR: val = (cmp != 0); break;
5347 case LT_EXPR: val = (cmp > 0); break;
5348 case GT_EXPR: val = (cmp < 0); break;
5349 case LE_EXPR: val = (cmp >= 0); break;
5350 case GE_EXPR: val = (cmp <= 0); break;
5351 default: done = false;
5352 }
5353 if (done)
5354 {
5355 if (val)
5356 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5357 else
5358 return boolean_false_node;
5359 }
5360 }
5361
5362 /* Same business with inequality tests. */
5363 else if (code1 == NE_EXPR)
5364 {
5365 bool val;
5366 switch (code2)
5367 {
5368 case EQ_EXPR: val = (cmp != 0); break;
5369 case NE_EXPR: val = (cmp == 0); break;
5370 case LT_EXPR: val = (cmp >= 0); break;
5371 case GT_EXPR: val = (cmp <= 0); break;
5372 case LE_EXPR: val = (cmp > 0); break;
5373 case GE_EXPR: val = (cmp < 0); break;
5374 default:
5375 val = false;
5376 }
5377 if (val)
5378 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5379 }
5380 else if (code2 == NE_EXPR)
5381 {
5382 bool val;
5383 switch (code1)
5384 {
5385 case EQ_EXPR: val = (cmp == 0); break;
5386 case NE_EXPR: val = (cmp != 0); break;
5387 case LT_EXPR: val = (cmp <= 0); break;
5388 case GT_EXPR: val = (cmp >= 0); break;
5389 case LE_EXPR: val = (cmp < 0); break;
5390 case GE_EXPR: val = (cmp > 0); break;
5391 default:
5392 val = false;
5393 }
5394 if (val)
5395 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5396 }
5397
5398 /* Chose the more restrictive of two < or <= comparisons. */
5399 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5400 && (code2 == LT_EXPR || code2 == LE_EXPR))
5401 {
5402 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5403 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5404 else
5405 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5406 }
5407
5408 /* Likewise chose the more restrictive of two > or >= comparisons. */
5409 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5410 && (code2 == GT_EXPR || code2 == GE_EXPR))
5411 {
5412 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5413 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5414 else
5415 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5416 }
5417
5418 /* Check for singleton ranges. */
5419 else if (cmp == 0
5420 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5421 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5422 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5423
5424 /* Check for disjoint ranges. */
5425 else if (cmp <= 0
5426 && (code1 == LT_EXPR || code1 == LE_EXPR)
5427 && (code2 == GT_EXPR || code2 == GE_EXPR))
5428 return boolean_false_node;
5429 else if (cmp >= 0
5430 && (code1 == GT_EXPR || code1 == GE_EXPR)
5431 && (code2 == LT_EXPR || code2 == LE_EXPR))
5432 return boolean_false_node;
5433 }
5434
5435 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5436 NAME's definition is a truth value. See if there are any simplifications
5437 that can be done against the NAME's definition. */
5438 if (TREE_CODE (op1a) == SSA_NAME
5439 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5440 && (integer_zerop (op1b) || integer_onep (op1b)))
5441 {
5442 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5443 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5444 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5445 switch (gimple_code (stmt))
5446 {
5447 case GIMPLE_ASSIGN:
5448 /* Try to simplify by copy-propagating the definition. */
5449 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5450
5451 case GIMPLE_PHI:
5452 /* If every argument to the PHI produces the same result when
5453 ANDed with the second comparison, we win.
5454 Do not do this unless the type is bool since we need a bool
5455 result here anyway. */
5456 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5457 {
5458 tree result = NULL_TREE;
5459 unsigned i;
5460 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5461 {
5462 tree arg = gimple_phi_arg_def (stmt, i);
5463
5464 /* If this PHI has itself as an argument, ignore it.
5465 If all the other args produce the same result,
5466 we're still OK. */
5467 if (arg == gimple_phi_result (stmt))
5468 continue;
5469 else if (TREE_CODE (arg) == INTEGER_CST)
5470 {
5471 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5472 {
5473 if (!result)
5474 result = boolean_false_node;
5475 else if (!integer_zerop (result))
5476 return NULL_TREE;
5477 }
5478 else if (!result)
5479 result = fold_build2 (code2, boolean_type_node,
5480 op2a, op2b);
5481 else if (!same_bool_comparison_p (result,
5482 code2, op2a, op2b))
5483 return NULL_TREE;
5484 }
0e8b84ec
JJ
5485 else if (TREE_CODE (arg) == SSA_NAME
5486 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5487 {
6c66f733 5488 tree temp;
355fe088 5489 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5490 /* In simple cases we can look through PHI nodes,
5491 but we have to be careful with loops.
5492 See PR49073. */
5493 if (! dom_info_available_p (CDI_DOMINATORS)
5494 || gimple_bb (def_stmt) == gimple_bb (stmt)
5495 || dominated_by_p (CDI_DOMINATORS,
5496 gimple_bb (def_stmt),
5497 gimple_bb (stmt)))
5498 return NULL_TREE;
5499 temp = and_var_with_comparison (arg, invert, code2,
5500 op2a, op2b);
e89065a1
SL
5501 if (!temp)
5502 return NULL_TREE;
5503 else if (!result)
5504 result = temp;
5505 else if (!same_bool_result_p (result, temp))
5506 return NULL_TREE;
5507 }
5508 else
5509 return NULL_TREE;
5510 }
5511 return result;
5512 }
5513
5514 default:
5515 break;
5516 }
5517 }
5518 return NULL_TREE;
5519}
5520
5521/* Try to simplify the AND of two comparisons, specified by
5522 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5523 If this can be simplified to a single expression (without requiring
5524 introducing more SSA variables to hold intermediate values),
5525 return the resulting tree. Otherwise return NULL_TREE.
5526 If the result expression is non-null, it has boolean type. */
5527
5528tree
5529maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5530 enum tree_code code2, tree op2a, tree op2b)
5531{
5532 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5533 if (t)
5534 return t;
5535 else
5536 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5537}
5538
5539/* Helper function for or_comparisons_1: try to simplify the OR of the
5540 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5541 If INVERT is true, invert the value of VAR before doing the OR.
5542 Return NULL_EXPR if we can't simplify this to a single expression. */
5543
5544static tree
5545or_var_with_comparison (tree var, bool invert,
5546 enum tree_code code2, tree op2a, tree op2b)
5547{
5548 tree t;
355fe088 5549 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5550
5551 /* We can only deal with variables whose definitions are assignments. */
5552 if (!is_gimple_assign (stmt))
5553 return NULL_TREE;
5554
5555 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5556 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5557 Then we only have to consider the simpler non-inverted cases. */
5558 if (invert)
5559 t = and_var_with_comparison_1 (stmt,
5560 invert_tree_comparison (code2, false),
5561 op2a, op2b);
5562 else
5563 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5564 return canonicalize_bool (t, invert);
5565}
5566
5567/* Try to simplify the OR of the ssa variable defined by the assignment
5568 STMT with the comparison specified by (OP2A CODE2 OP2B).
5569 Return NULL_EXPR if we can't simplify this to a single expression. */
5570
5571static tree
355fe088 5572or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5573 enum tree_code code2, tree op2a, tree op2b)
5574{
5575 tree var = gimple_assign_lhs (stmt);
5576 tree true_test_var = NULL_TREE;
5577 tree false_test_var = NULL_TREE;
5578 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5579
5580 /* Check for identities like (var OR (var != 0)) => true . */
5581 if (TREE_CODE (op2a) == SSA_NAME
5582 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5583 {
5584 if ((code2 == NE_EXPR && integer_zerop (op2b))
5585 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5586 {
5587 true_test_var = op2a;
5588 if (var == true_test_var)
5589 return var;
5590 }
5591 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5592 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5593 {
5594 false_test_var = op2a;
5595 if (var == false_test_var)
5596 return boolean_true_node;
5597 }
5598 }
5599
5600 /* If the definition is a comparison, recurse on it. */
5601 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5602 {
5603 tree t = or_comparisons_1 (innercode,
5604 gimple_assign_rhs1 (stmt),
5605 gimple_assign_rhs2 (stmt),
5606 code2,
5607 op2a,
5608 op2b);
5609 if (t)
5610 return t;
5611 }
5612
5613 /* If the definition is an AND or OR expression, we may be able to
5614 simplify by reassociating. */
eb9820c0
KT
5615 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5616 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5617 {
5618 tree inner1 = gimple_assign_rhs1 (stmt);
5619 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5620 gimple *s;
e89065a1
SL
5621 tree t;
5622 tree partial = NULL_TREE;
eb9820c0 5623 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5624
5625 /* Check for boolean identities that don't require recursive examination
5626 of inner1/inner2:
5627 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5628 inner1 OR (inner1 AND inner2) => inner1
5629 !inner1 OR (inner1 OR inner2) => true
5630 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5631 */
5632 if (inner1 == true_test_var)
5633 return (is_or ? var : inner1);
5634 else if (inner2 == true_test_var)
5635 return (is_or ? var : inner2);
5636 else if (inner1 == false_test_var)
5637 return (is_or
5638 ? boolean_true_node
5639 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5640 else if (inner2 == false_test_var)
5641 return (is_or
5642 ? boolean_true_node
5643 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5644
5645 /* Next, redistribute/reassociate the OR across the inner tests.
5646 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5647 if (TREE_CODE (inner1) == SSA_NAME
5648 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5649 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5650 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5651 gimple_assign_rhs1 (s),
5652 gimple_assign_rhs2 (s),
5653 code2, op2a, op2b)))
5654 {
5655 /* Handle the OR case, where we are reassociating:
5656 (inner1 OR inner2) OR (op2a code2 op2b)
5657 => (t OR inner2)
5658 If the partial result t is a constant, we win. Otherwise
5659 continue on to try reassociating with the other inner test. */
8236c8eb 5660 if (is_or)
e89065a1
SL
5661 {
5662 if (integer_onep (t))
5663 return boolean_true_node;
5664 else if (integer_zerop (t))
5665 return inner2;
5666 }
5667
5668 /* Handle the AND case, where we are redistributing:
5669 (inner1 AND inner2) OR (op2a code2 op2b)
5670 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5671 else if (integer_zerop (t))
5672 return boolean_false_node;
5673
5674 /* Save partial result for later. */
5675 partial = t;
e89065a1
SL
5676 }
5677
5678 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5679 if (TREE_CODE (inner2) == SSA_NAME
5680 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5681 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5682 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5683 gimple_assign_rhs1 (s),
5684 gimple_assign_rhs2 (s),
5685 code2, op2a, op2b)))
5686 {
5687 /* Handle the OR case, where we are reassociating:
5688 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5689 => (inner1 OR t)
5690 => (t OR partial) */
5691 if (is_or)
e89065a1
SL
5692 {
5693 if (integer_zerop (t))
5694 return inner1;
5695 else if (integer_onep (t))
5696 return boolean_true_node;
8236c8eb
JJ
5697 /* If both are the same, we can apply the identity
5698 (x OR x) == x. */
5699 else if (partial && same_bool_result_p (t, partial))
5700 return t;
e89065a1
SL
5701 }
5702
5703 /* Handle the AND case, where we are redistributing:
5704 (inner1 AND inner2) OR (op2a code2 op2b)
5705 => (t AND (inner1 OR (op2a code2 op2b)))
5706 => (t AND partial) */
5707 else
5708 {
5709 if (integer_zerop (t))
5710 return boolean_false_node;
5711 else if (partial)
5712 {
5713 /* We already got a simplification for the other
5714 operand to the redistributed AND expression. The
5715 interesting case is when at least one is true.
5716 Or, if both are the same, we can apply the identity
8236c8eb 5717 (x AND x) == x. */
e89065a1
SL
5718 if (integer_onep (partial))
5719 return t;
5720 else if (integer_onep (t))
5721 return partial;
5722 else if (same_bool_result_p (t, partial))
8236c8eb 5723 return t;
e89065a1
SL
5724 }
5725 }
5726 }
5727 }
5728 return NULL_TREE;
5729}
5730
5731/* Try to simplify the OR of two comparisons defined by
5732 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5733 If this can be done without constructing an intermediate value,
5734 return the resulting tree; otherwise NULL_TREE is returned.
5735 This function is deliberately asymmetric as it recurses on SSA_DEFs
5736 in the first comparison but not the second. */
5737
5738static tree
5739or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5740 enum tree_code code2, tree op2a, tree op2b)
5741{
ae22ac3c 5742 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5743
e89065a1
SL
5744 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5745 if (operand_equal_p (op1a, op2a, 0)
5746 && operand_equal_p (op1b, op2b, 0))
5747 {
eb9820c0 5748 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5749 tree t = combine_comparisons (UNKNOWN_LOCATION,
5750 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5751 truth_type, op1a, op1b);
e89065a1
SL
5752 if (t)
5753 return t;
5754 }
5755
5756 /* Likewise the swapped case of the above. */
5757 if (operand_equal_p (op1a, op2b, 0)
5758 && operand_equal_p (op1b, op2a, 0))
5759 {
eb9820c0 5760 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5761 tree t = combine_comparisons (UNKNOWN_LOCATION,
5762 TRUTH_ORIF_EXPR, code1,
5763 swap_tree_comparison (code2),
31ed6226 5764 truth_type, op1a, op1b);
e89065a1
SL
5765 if (t)
5766 return t;
5767 }
5768
5769 /* If both comparisons are of the same value against constants, we might
5770 be able to merge them. */
5771 if (operand_equal_p (op1a, op2a, 0)
5772 && TREE_CODE (op1b) == INTEGER_CST
5773 && TREE_CODE (op2b) == INTEGER_CST)
5774 {
5775 int cmp = tree_int_cst_compare (op1b, op2b);
5776
5777 /* If we have (op1a != op1b), we should either be able to
5778 return that or TRUE, depending on whether the constant op1b
5779 also satisfies the other comparison against op2b. */
5780 if (code1 == NE_EXPR)
5781 {
5782 bool done = true;
5783 bool val;
5784 switch (code2)
5785 {
5786 case EQ_EXPR: val = (cmp == 0); break;
5787 case NE_EXPR: val = (cmp != 0); break;
5788 case LT_EXPR: val = (cmp < 0); break;
5789 case GT_EXPR: val = (cmp > 0); break;
5790 case LE_EXPR: val = (cmp <= 0); break;
5791 case GE_EXPR: val = (cmp >= 0); break;
5792 default: done = false;
5793 }
5794 if (done)
5795 {
5796 if (val)
5797 return boolean_true_node;
5798 else
5799 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5800 }
5801 }
5802 /* Likewise if the second comparison is a != comparison. */
5803 else if (code2 == NE_EXPR)
5804 {
5805 bool done = true;
5806 bool val;
5807 switch (code1)
5808 {
5809 case EQ_EXPR: val = (cmp == 0); break;
5810 case NE_EXPR: val = (cmp != 0); break;
5811 case LT_EXPR: val = (cmp > 0); break;
5812 case GT_EXPR: val = (cmp < 0); break;
5813 case LE_EXPR: val = (cmp >= 0); break;
5814 case GE_EXPR: val = (cmp <= 0); break;
5815 default: done = false;
5816 }
5817 if (done)
5818 {
5819 if (val)
5820 return boolean_true_node;
5821 else
5822 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5823 }
5824 }
5825
5826 /* See if an equality test is redundant with the other comparison. */
5827 else if (code1 == EQ_EXPR)
5828 {
5829 bool val;
5830 switch (code2)
5831 {
5832 case EQ_EXPR: val = (cmp == 0); break;
5833 case NE_EXPR: val = (cmp != 0); break;
5834 case LT_EXPR: val = (cmp < 0); break;
5835 case GT_EXPR: val = (cmp > 0); break;
5836 case LE_EXPR: val = (cmp <= 0); break;
5837 case GE_EXPR: val = (cmp >= 0); break;
5838 default:
5839 val = false;
5840 }
5841 if (val)
5842 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5843 }
5844 else if (code2 == EQ_EXPR)
5845 {
5846 bool val;
5847 switch (code1)
5848 {
5849 case EQ_EXPR: val = (cmp == 0); break;
5850 case NE_EXPR: val = (cmp != 0); break;
5851 case LT_EXPR: val = (cmp > 0); break;
5852 case GT_EXPR: val = (cmp < 0); break;
5853 case LE_EXPR: val = (cmp >= 0); break;
5854 case GE_EXPR: val = (cmp <= 0); break;
5855 default:
5856 val = false;
5857 }
5858 if (val)
5859 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5860 }
5861
5862 /* Chose the less restrictive of two < or <= comparisons. */
5863 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5864 && (code2 == LT_EXPR || code2 == LE_EXPR))
5865 {
5866 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5867 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5868 else
5869 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5870 }
5871
5872 /* Likewise chose the less restrictive of two > or >= comparisons. */
5873 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5874 && (code2 == GT_EXPR || code2 == GE_EXPR))
5875 {
5876 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5877 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5878 else
5879 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5880 }
5881
5882 /* Check for singleton ranges. */
5883 else if (cmp == 0
5884 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5885 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5886 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5887
5888 /* Check for less/greater pairs that don't restrict the range at all. */
5889 else if (cmp >= 0
5890 && (code1 == LT_EXPR || code1 == LE_EXPR)
5891 && (code2 == GT_EXPR || code2 == GE_EXPR))
5892 return boolean_true_node;
5893 else if (cmp <= 0
5894 && (code1 == GT_EXPR || code1 == GE_EXPR)
5895 && (code2 == LT_EXPR || code2 == LE_EXPR))
5896 return boolean_true_node;
5897 }
5898
5899 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5900 NAME's definition is a truth value. See if there are any simplifications
5901 that can be done against the NAME's definition. */
5902 if (TREE_CODE (op1a) == SSA_NAME
5903 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5904 && (integer_zerop (op1b) || integer_onep (op1b)))
5905 {
5906 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5907 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5908 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5909 switch (gimple_code (stmt))
5910 {
5911 case GIMPLE_ASSIGN:
5912 /* Try to simplify by copy-propagating the definition. */
5913 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5914
5915 case GIMPLE_PHI:
5916 /* If every argument to the PHI produces the same result when
5917 ORed with the second comparison, we win.
5918 Do not do this unless the type is bool since we need a bool
5919 result here anyway. */
5920 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5921 {
5922 tree result = NULL_TREE;
5923 unsigned i;
5924 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5925 {
5926 tree arg = gimple_phi_arg_def (stmt, i);
5927
5928 /* If this PHI has itself as an argument, ignore it.
5929 If all the other args produce the same result,
5930 we're still OK. */
5931 if (arg == gimple_phi_result (stmt))
5932 continue;
5933 else if (TREE_CODE (arg) == INTEGER_CST)
5934 {
5935 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5936 {
5937 if (!result)
5938 result = boolean_true_node;
5939 else if (!integer_onep (result))
5940 return NULL_TREE;
5941 }
5942 else if (!result)
5943 result = fold_build2 (code2, boolean_type_node,
5944 op2a, op2b);
5945 else if (!same_bool_comparison_p (result,
5946 code2, op2a, op2b))
5947 return NULL_TREE;
5948 }
0e8b84ec
JJ
5949 else if (TREE_CODE (arg) == SSA_NAME
5950 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5951 {
6c66f733 5952 tree temp;
355fe088 5953 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5954 /* In simple cases we can look through PHI nodes,
5955 but we have to be careful with loops.
5956 See PR49073. */
5957 if (! dom_info_available_p (CDI_DOMINATORS)
5958 || gimple_bb (def_stmt) == gimple_bb (stmt)
5959 || dominated_by_p (CDI_DOMINATORS,
5960 gimple_bb (def_stmt),
5961 gimple_bb (stmt)))
5962 return NULL_TREE;
5963 temp = or_var_with_comparison (arg, invert, code2,
5964 op2a, op2b);
e89065a1
SL
5965 if (!temp)
5966 return NULL_TREE;
5967 else if (!result)
5968 result = temp;
5969 else if (!same_bool_result_p (result, temp))
5970 return NULL_TREE;
5971 }
5972 else
5973 return NULL_TREE;
5974 }
5975 return result;
5976 }
5977
5978 default:
5979 break;
5980 }
5981 }
5982 return NULL_TREE;
5983}
5984
5985/* Try to simplify the OR of two comparisons, specified by
5986 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5987 If this can be simplified to a single expression (without requiring
5988 introducing more SSA variables to hold intermediate values),
5989 return the resulting tree. Otherwise return NULL_TREE.
5990 If the result expression is non-null, it has boolean type. */
5991
5992tree
5993maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5994 enum tree_code code2, tree op2a, tree op2b)
5995{
5996 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5997 if (t)
5998 return t;
5999 else
6000 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6001}
cfef45c8
RG
6002
6003
6004/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6005
6006 Either NULL_TREE, a simplified but non-constant or a constant
6007 is returned.
6008
6009 ??? This should go into a gimple-fold-inline.h file to be eventually
6010 privatized with the single valueize function used in the various TUs
6011 to avoid the indirect function call overhead. */
6012
6013tree
355fe088 6014gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6015 tree (*gvalueize) (tree))
cfef45c8 6016{
45cc9f96
RB
6017 code_helper rcode;
6018 tree ops[3] = {};
6019 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6020 edges if there are intermediate VARYING defs. For this reason
6021 do not follow SSA edges here even though SCCVN can technically
6022 just deal fine with that. */
34050b6b 6023 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 6024 {
34050b6b 6025 tree res = NULL_TREE;
c0f62740 6026 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
6027 res = ops[0];
6028 else if (mprts_hook)
6029 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6030 if (res)
45cc9f96 6031 {
34050b6b
RB
6032 if (dump_file && dump_flags & TDF_DETAILS)
6033 {
6034 fprintf (dump_file, "Match-and-simplified ");
6035 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6036 fprintf (dump_file, " to ");
ef6cb4c7 6037 print_generic_expr (dump_file, res);
34050b6b
RB
6038 fprintf (dump_file, "\n");
6039 }
6040 return res;
45cc9f96 6041 }
45cc9f96
RB
6042 }
6043
cfef45c8
RG
6044 location_t loc = gimple_location (stmt);
6045 switch (gimple_code (stmt))
6046 {
6047 case GIMPLE_ASSIGN:
6048 {
6049 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6050
6051 switch (get_gimple_rhs_class (subcode))
6052 {
6053 case GIMPLE_SINGLE_RHS:
6054 {
6055 tree rhs = gimple_assign_rhs1 (stmt);
6056 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6057
6058 if (TREE_CODE (rhs) == SSA_NAME)
6059 {
6060 /* If the RHS is an SSA_NAME, return its known constant value,
6061 if any. */
6062 return (*valueize) (rhs);
6063 }
6064 /* Handle propagating invariant addresses into address
6065 operations. */
6066 else if (TREE_CODE (rhs) == ADDR_EXPR
6067 && !is_gimple_min_invariant (rhs))
6068 {
a90c8804 6069 poly_int64 offset = 0;
cfef45c8
RG
6070 tree base;
6071 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6072 &offset,
6073 valueize);
6074 if (base
6075 && (CONSTANT_CLASS_P (base)
6076 || decl_address_invariant_p (base)))
6077 return build_invariant_address (TREE_TYPE (rhs),
6078 base, offset);
6079 }
6080 else if (TREE_CODE (rhs) == CONSTRUCTOR
6081 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6082 && (CONSTRUCTOR_NELTS (rhs)
6083 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6084 {
794e3180
RS
6085 unsigned i, nelts;
6086 tree val;
cfef45c8 6087
794e3180 6088 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
5ebaa477 6089 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6090 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6091 {
6092 val = (*valueize) (val);
6093 if (TREE_CODE (val) == INTEGER_CST
6094 || TREE_CODE (val) == REAL_CST
6095 || TREE_CODE (val) == FIXED_CST)
794e3180 6096 vec.quick_push (val);
cfef45c8
RG
6097 else
6098 return NULL_TREE;
6099 }
6100
5ebaa477 6101 return vec.build ();
cfef45c8 6102 }
bdf37f7a
JH
6103 if (subcode == OBJ_TYPE_REF)
6104 {
6105 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6106 /* If callee is constant, we can fold away the wrapper. */
6107 if (is_gimple_min_invariant (val))
6108 return val;
6109 }
cfef45c8
RG
6110
6111 if (kind == tcc_reference)
6112 {
6113 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6114 || TREE_CODE (rhs) == REALPART_EXPR
6115 || TREE_CODE (rhs) == IMAGPART_EXPR)
6116 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6117 {
6118 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6119 return fold_unary_loc (EXPR_LOCATION (rhs),
6120 TREE_CODE (rhs),
6121 TREE_TYPE (rhs), val);
6122 }
6123 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6124 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6125 {
6126 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6127 return fold_ternary_loc (EXPR_LOCATION (rhs),
6128 TREE_CODE (rhs),
6129 TREE_TYPE (rhs), val,
6130 TREE_OPERAND (rhs, 1),
6131 TREE_OPERAND (rhs, 2));
6132 }
6133 else if (TREE_CODE (rhs) == MEM_REF
6134 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6135 {
6136 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6137 if (TREE_CODE (val) == ADDR_EXPR
6138 && is_gimple_min_invariant (val))
6139 {
6140 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6141 unshare_expr (val),
6142 TREE_OPERAND (rhs, 1));
6143 if (tem)
6144 rhs = tem;
6145 }
6146 }
6147 return fold_const_aggregate_ref_1 (rhs, valueize);
6148 }
6149 else if (kind == tcc_declaration)
6150 return get_symbol_constant_value (rhs);
6151 return rhs;
6152 }
6153
6154 case GIMPLE_UNARY_RHS:
f3582e54 6155 return NULL_TREE;
cfef45c8
RG
6156
6157 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6158 /* Translate &x + CST into an invariant form suitable for
6159 further propagation. */
6160 if (subcode == POINTER_PLUS_EXPR)
6161 {
4b1b9e64
RB
6162 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6163 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6164 if (TREE_CODE (op0) == ADDR_EXPR
6165 && TREE_CODE (op1) == INTEGER_CST)
6166 {
6167 tree off = fold_convert (ptr_type_node, op1);
6168 return build_fold_addr_expr_loc
6169 (loc,
6170 fold_build2 (MEM_REF,
6171 TREE_TYPE (TREE_TYPE (op0)),
6172 unshare_expr (op0), off));
6173 }
6174 }
59c20dc7
RB
6175 /* Canonicalize bool != 0 and bool == 0 appearing after
6176 valueization. While gimple_simplify handles this
6177 it can get confused by the ~X == 1 -> X == 0 transform
6178 which we cant reduce to a SSA name or a constant
6179 (and we have no way to tell gimple_simplify to not
6180 consider those transforms in the first place). */
6181 else if (subcode == EQ_EXPR
6182 || subcode == NE_EXPR)
6183 {
6184 tree lhs = gimple_assign_lhs (stmt);
6185 tree op0 = gimple_assign_rhs1 (stmt);
6186 if (useless_type_conversion_p (TREE_TYPE (lhs),
6187 TREE_TYPE (op0)))
6188 {
6189 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6190 op0 = (*valueize) (op0);
8861704d
RB
6191 if (TREE_CODE (op0) == INTEGER_CST)
6192 std::swap (op0, op1);
6193 if (TREE_CODE (op1) == INTEGER_CST
6194 && ((subcode == NE_EXPR && integer_zerop (op1))
6195 || (subcode == EQ_EXPR && integer_onep (op1))))
6196 return op0;
59c20dc7
RB
6197 }
6198 }
4b1b9e64 6199 return NULL_TREE;
cfef45c8
RG
6200
6201 case GIMPLE_TERNARY_RHS:
6202 {
6203 /* Handle ternary operators that can appear in GIMPLE form. */
6204 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6205 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6206 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6207 return fold_ternary_loc (loc, subcode,
6208 gimple_expr_type (stmt), op0, op1, op2);
6209 }
6210
6211 default:
6212 gcc_unreachable ();
6213 }
6214 }
6215
6216 case GIMPLE_CALL:
6217 {
25583c4f 6218 tree fn;
538dd0b7 6219 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6220
6221 if (gimple_call_internal_p (stmt))
31e071ae
MP
6222 {
6223 enum tree_code subcode = ERROR_MARK;
6224 switch (gimple_call_internal_fn (stmt))
6225 {
6226 case IFN_UBSAN_CHECK_ADD:
6227 subcode = PLUS_EXPR;
6228 break;
6229 case IFN_UBSAN_CHECK_SUB:
6230 subcode = MINUS_EXPR;
6231 break;
6232 case IFN_UBSAN_CHECK_MUL:
6233 subcode = MULT_EXPR;
6234 break;
68fa96d6
ML
6235 case IFN_BUILTIN_EXPECT:
6236 {
6237 tree arg0 = gimple_call_arg (stmt, 0);
6238 tree op0 = (*valueize) (arg0);
6239 if (TREE_CODE (op0) == INTEGER_CST)
6240 return op0;
6241 return NULL_TREE;
6242 }
31e071ae
MP
6243 default:
6244 return NULL_TREE;
6245 }
368b454d
JJ
6246 tree arg0 = gimple_call_arg (stmt, 0);
6247 tree arg1 = gimple_call_arg (stmt, 1);
6248 tree op0 = (*valueize) (arg0);
6249 tree op1 = (*valueize) (arg1);
31e071ae
MP
6250
6251 if (TREE_CODE (op0) != INTEGER_CST
6252 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6253 {
6254 switch (subcode)
6255 {
6256 case MULT_EXPR:
6257 /* x * 0 = 0 * x = 0 without overflow. */
6258 if (integer_zerop (op0) || integer_zerop (op1))
6259 return build_zero_cst (TREE_TYPE (arg0));
6260 break;
6261 case MINUS_EXPR:
6262 /* y - y = 0 without overflow. */
6263 if (operand_equal_p (op0, op1, 0))
6264 return build_zero_cst (TREE_TYPE (arg0));
6265 break;
6266 default:
6267 break;
6268 }
6269 }
6270 tree res
6271 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6272 if (res
6273 && TREE_CODE (res) == INTEGER_CST
6274 && !TREE_OVERFLOW (res))
6275 return res;
6276 return NULL_TREE;
6277 }
25583c4f
RS
6278
6279 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
6280 if (TREE_CODE (fn) == ADDR_EXPR
6281 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
6282 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6283 && gimple_builtin_call_types_compatible_p (stmt,
6284 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6285 {
6286 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6287 tree retval;
cfef45c8
RG
6288 unsigned i;
6289 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6290 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6291 retval = fold_builtin_call_array (loc,
538dd0b7 6292 gimple_call_return_type (call_stmt),
cfef45c8 6293 fn, gimple_call_num_args (stmt), args);
cfef45c8 6294 if (retval)
5c944c6c
RB
6295 {
6296 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6297 STRIP_NOPS (retval);
538dd0b7
DM
6298 retval = fold_convert (gimple_call_return_type (call_stmt),
6299 retval);
5c944c6c 6300 }
cfef45c8
RG
6301 return retval;
6302 }
6303 return NULL_TREE;
6304 }
6305
6306 default:
6307 return NULL_TREE;
6308 }
6309}
6310
6311/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6312 Returns NULL_TREE if folding to a constant is not possible, otherwise
6313 returns a constant according to is_gimple_min_invariant. */
6314
6315tree
355fe088 6316gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6317{
6318 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6319 if (res && is_gimple_min_invariant (res))
6320 return res;
6321 return NULL_TREE;
6322}
6323
6324
6325/* The following set of functions are supposed to fold references using
6326 their constant initializers. */
6327
cfef45c8
RG
6328/* See if we can find constructor defining value of BASE.
6329 When we know the consructor with constant offset (such as
6330 base is array[40] and we do know constructor of array), then
6331 BIT_OFFSET is adjusted accordingly.
6332
6333 As a special case, return error_mark_node when constructor
6334 is not explicitly available, but it is known to be zero
6335 such as 'static const int a;'. */
6336static tree
588db50c 6337get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6338 tree (*valueize)(tree))
6339{
588db50c 6340 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6341 bool reverse;
6342
cfef45c8
RG
6343 if (TREE_CODE (base) == MEM_REF)
6344 {
6345 if (!integer_zerop (TREE_OPERAND (base, 1)))
6346 {
9541ffee 6347 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 6348 return NULL_TREE;
aca52e6f 6349 *bit_offset += (mem_ref_offset (base).force_shwi ()
cfef45c8
RG
6350 * BITS_PER_UNIT);
6351 }
6352
6353 if (valueize
6354 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6355 base = valueize (TREE_OPERAND (base, 0));
6356 if (!base || TREE_CODE (base) != ADDR_EXPR)
6357 return NULL_TREE;
6358 base = TREE_OPERAND (base, 0);
6359 }
13e88953
RB
6360 else if (valueize
6361 && TREE_CODE (base) == SSA_NAME)
6362 base = valueize (base);
cfef45c8
RG
6363
6364 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6365 DECL_INITIAL. If BASE is a nested reference into another
6366 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6367 the inner reference. */
6368 switch (TREE_CODE (base))
6369 {
6370 case VAR_DECL:
cfef45c8 6371 case CONST_DECL:
6a6dac52
JH
6372 {
6373 tree init = ctor_for_folding (base);
6374
688010ba 6375 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6376 NULL means unknown, while error_mark_node is 0. */
6377 if (init == error_mark_node)
6378 return NULL_TREE;
6379 if (!init)
6380 return error_mark_node;
6381 return init;
6382 }
cfef45c8 6383
13e88953
RB
6384 case VIEW_CONVERT_EXPR:
6385 return get_base_constructor (TREE_OPERAND (base, 0),
6386 bit_offset, valueize);
6387
cfef45c8
RG
6388 case ARRAY_REF:
6389 case COMPONENT_REF:
ee45a32d
EB
6390 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6391 &reverse);
588db50c 6392 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6393 return NULL_TREE;
6394 *bit_offset += bit_offset2;
6395 return get_base_constructor (base, bit_offset, valueize);
6396
cfef45c8
RG
6397 case CONSTRUCTOR:
6398 return base;
6399
6400 default:
13e88953
RB
6401 if (CONSTANT_CLASS_P (base))
6402 return base;
6403
cfef45c8
RG
6404 return NULL_TREE;
6405 }
6406}
6407
cfef45c8
RG
6408/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6409 SIZE to the memory at bit OFFSET. */
6410
6411static tree
6412fold_array_ctor_reference (tree type, tree ctor,
6413 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6414 unsigned HOST_WIDE_INT size,
6415 tree from_decl)
cfef45c8 6416{
807e902e
KZ
6417 offset_int low_bound;
6418 offset_int elt_size;
807e902e 6419 offset_int access_index;
6a636014 6420 tree domain_type = NULL_TREE;
cfef45c8
RG
6421 HOST_WIDE_INT inner_offset;
6422
6423 /* Compute low bound and elt size. */
eb8f1123
RG
6424 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6425 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6426 if (domain_type && TYPE_MIN_VALUE (domain_type))
6427 {
6428 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6429 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6430 return NULL_TREE;
807e902e 6431 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6432 }
6433 else
807e902e 6434 low_bound = 0;
cfef45c8 6435 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6436 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6437 return NULL_TREE;
807e902e 6438 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
6439
6440 /* We can handle only constantly sized accesses that are known to not
6441 be larger than size of array element. */
6442 if (!TYPE_SIZE_UNIT (type)
6443 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
032c80e9 6444 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
807e902e 6445 || elt_size == 0)
cfef45c8
RG
6446 return NULL_TREE;
6447
6448 /* Compute the array index we look for. */
807e902e
KZ
6449 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6450 elt_size);
27bcd47c 6451 access_index += low_bound;
cfef45c8
RG
6452
6453 /* And offset within the access. */
27bcd47c 6454 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6455
6456 /* See if the array field is large enough to span whole access. We do not
6457 care to fold accesses spanning multiple array indexes. */
27bcd47c 6458 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6459 return NULL_TREE;
6a636014
AL
6460 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6461 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 6462
cfef45c8
RG
6463 /* When memory is not explicitely mentioned in constructor,
6464 it is 0 (or out of range). */
6465 return build_zero_cst (type);
6466}
6467
6468/* CTOR is CONSTRUCTOR of an aggregate or vector.
6469 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6470
6471static tree
6472fold_nonarray_ctor_reference (tree type, tree ctor,
6473 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6474 unsigned HOST_WIDE_INT size,
6475 tree from_decl)
cfef45c8
RG
6476{
6477 unsigned HOST_WIDE_INT cnt;
6478 tree cfield, cval;
6479
6480 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6481 cval)
6482 {
6483 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6484 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6485 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
6486 offset_int bitoffset;
6487 offset_int bitoffset_end, access_end;
cfef45c8
RG
6488
6489 /* Variable sized objects in static constructors makes no sense,
6490 but field_size can be NULL for flexible array members. */
6491 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6492 && TREE_CODE (byte_offset) == INTEGER_CST
6493 && (field_size != NULL_TREE
6494 ? TREE_CODE (field_size) == INTEGER_CST
6495 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6496
6497 /* Compute bit offset of the field. */
807e902e 6498 bitoffset = (wi::to_offset (field_offset)
8de73453 6499 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8
RG
6500 /* Compute bit offset where the field ends. */
6501 if (field_size != NULL_TREE)
807e902e 6502 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6503 else
807e902e 6504 bitoffset_end = 0;
cfef45c8 6505
807e902e 6506 access_end = offset_int (offset) + size;
b8b2b009
JJ
6507
6508 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6509 [BITOFFSET, BITOFFSET_END)? */
807e902e 6510 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6511 && (field_size == NULL_TREE
807e902e 6512 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6513 {
807e902e 6514 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
6515 /* We do have overlap. Now see if field is large enough to
6516 cover the access. Give up for accesses spanning multiple
6517 fields. */
807e902e 6518 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6519 return NULL_TREE;
032c80e9 6520 if (offset < bitoffset)
b8b2b009 6521 return NULL_TREE;
cfef45c8 6522 return fold_ctor_reference (type, cval,
27bcd47c 6523 inner_offset.to_uhwi (), size,
c44c2088 6524 from_decl);
cfef45c8
RG
6525 }
6526 }
6527 /* When memory is not explicitely mentioned in constructor, it is 0. */
6528 return build_zero_cst (type);
6529}
6530
30acf282
RS
6531/* CTOR is value initializing memory, fold reference of type TYPE and
6532 size POLY_SIZE to the memory at bit POLY_OFFSET. */
cfef45c8 6533
8403c2cf 6534tree
30acf282
RS
6535fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6536 poly_uint64 poly_size, tree from_decl)
cfef45c8
RG
6537{
6538 tree ret;
6539
6540 /* We found the field with exact match. */
6541 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6542 && known_eq (poly_offset, 0U))
9d60be38 6543 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6544
30acf282
RS
6545 /* The remaining optimizations need a constant size and offset. */
6546 unsigned HOST_WIDE_INT size, offset;
6547 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6548 return NULL_TREE;
6549
cfef45c8
RG
6550 /* We are at the end of walk, see if we can view convert the
6551 result. */
6552 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6553 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6554 && !compare_tree_int (TYPE_SIZE (type), size)
6555 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6556 {
9d60be38 6557 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6558 if (ret)
672d9f8e
RB
6559 {
6560 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6561 if (ret)
6562 STRIP_USELESS_TYPE_CONVERSION (ret);
6563 }
cfef45c8
RG
6564 return ret;
6565 }
b2505143
RB
6566 /* For constants and byte-aligned/sized reads try to go through
6567 native_encode/interpret. */
6568 if (CONSTANT_CLASS_P (ctor)
6569 && BITS_PER_UNIT == 8
6570 && offset % BITS_PER_UNIT == 0
6571 && size % BITS_PER_UNIT == 0
6572 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6573 {
6574 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6575 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6576 offset / BITS_PER_UNIT);
6577 if (len > 0)
6578 return native_interpret_expr (type, buf, len);
b2505143 6579 }
cfef45c8
RG
6580 if (TREE_CODE (ctor) == CONSTRUCTOR)
6581 {
6582
eb8f1123
RG
6583 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6584 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
6585 return fold_array_ctor_reference (type, ctor, offset, size,
6586 from_decl);
cfef45c8 6587 else
c44c2088
JH
6588 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6589 from_decl);
cfef45c8
RG
6590 }
6591
6592 return NULL_TREE;
6593}
6594
6595/* Return the tree representing the element referenced by T if T is an
6596 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6597 names using VALUEIZE. Return NULL_TREE otherwise. */
6598
6599tree
6600fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6601{
6602 tree ctor, idx, base;
588db50c 6603 poly_int64 offset, size, max_size;
cfef45c8 6604 tree tem;
ee45a32d 6605 bool reverse;
cfef45c8 6606
f8a7df45
RG
6607 if (TREE_THIS_VOLATILE (t))
6608 return NULL_TREE;
6609
3a65ee74 6610 if (DECL_P (t))
cfef45c8
RG
6611 return get_symbol_constant_value (t);
6612
6613 tem = fold_read_from_constant_string (t);
6614 if (tem)
6615 return tem;
6616
6617 switch (TREE_CODE (t))
6618 {
6619 case ARRAY_REF:
6620 case ARRAY_RANGE_REF:
6621 /* Constant indexes are handled well by get_base_constructor.
6622 Only special case variable offsets.
6623 FIXME: This code can't handle nested references with variable indexes
6624 (they will be handled only by iteration of ccp). Perhaps we can bring
6625 get_ref_base_and_extent here and make it use a valueize callback. */
6626 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6627 && valueize
6628 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6629 && poly_int_tree_p (idx))
cfef45c8
RG
6630 {
6631 tree low_bound, unit_size;
6632
6633 /* If the resulting bit-offset is constant, track it. */
6634 if ((low_bound = array_ref_low_bound (t),
588db50c 6635 poly_int_tree_p (low_bound))
cfef45c8 6636 && (unit_size = array_ref_element_size (t),
807e902e 6637 tree_fits_uhwi_p (unit_size)))
cfef45c8 6638 {
588db50c
RS
6639 poly_offset_int woffset
6640 = wi::sext (wi::to_poly_offset (idx)
6641 - wi::to_poly_offset (low_bound),
807e902e
KZ
6642 TYPE_PRECISION (TREE_TYPE (idx)));
6643
588db50c 6644 if (woffset.to_shwi (&offset))
807e902e 6645 {
807e902e
KZ
6646 /* TODO: This code seems wrong, multiply then check
6647 to see if it fits. */
6648 offset *= tree_to_uhwi (unit_size);
6649 offset *= BITS_PER_UNIT;
6650
6651 base = TREE_OPERAND (t, 0);
6652 ctor = get_base_constructor (base, &offset, valueize);
6653 /* Empty constructor. Always fold to 0. */
6654 if (ctor == error_mark_node)
6655 return build_zero_cst (TREE_TYPE (t));
6656 /* Out of bound array access. Value is undefined,
6657 but don't fold. */
588db50c 6658 if (maybe_lt (offset, 0))
807e902e
KZ
6659 return NULL_TREE;
6660 /* We can not determine ctor. */
6661 if (!ctor)
6662 return NULL_TREE;
6663 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6664 tree_to_uhwi (unit_size)
6665 * BITS_PER_UNIT,
6666 base);
6667 }
cfef45c8
RG
6668 }
6669 }
6670 /* Fallthru. */
6671
6672 case COMPONENT_REF:
6673 case BIT_FIELD_REF:
6674 case TARGET_MEM_REF:
6675 case MEM_REF:
ee45a32d 6676 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6677 ctor = get_base_constructor (base, &offset, valueize);
6678
6679 /* Empty constructor. Always fold to 0. */
6680 if (ctor == error_mark_node)
6681 return build_zero_cst (TREE_TYPE (t));
6682 /* We do not know precise address. */
588db50c 6683 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8
RG
6684 return NULL_TREE;
6685 /* We can not determine ctor. */
6686 if (!ctor)
6687 return NULL_TREE;
6688
6689 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 6690 if (maybe_lt (offset, 0))
cfef45c8
RG
6691 return NULL_TREE;
6692
c44c2088
JH
6693 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6694 base);
cfef45c8
RG
6695
6696 case REALPART_EXPR:
6697 case IMAGPART_EXPR:
6698 {
6699 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6700 if (c && TREE_CODE (c) == COMPLEX_CST)
6701 return fold_build1_loc (EXPR_LOCATION (t),
6702 TREE_CODE (t), TREE_TYPE (t), c);
6703 break;
6704 }
6705
6706 default:
6707 break;
6708 }
6709
6710 return NULL_TREE;
6711}
6712
6713tree
6714fold_const_aggregate_ref (tree t)
6715{
6716 return fold_const_aggregate_ref_1 (t, NULL);
6717}
06bc3ec7 6718
85942f45 6719/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6720 at OFFSET.
6721 Set CAN_REFER if non-NULL to false if method
6722 is not referable or if the virtual table is ill-formed (such as rewriten
6723 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6724
6725tree
85942f45
JH
6726gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6727 tree v,
ec77d61f
JH
6728 unsigned HOST_WIDE_INT offset,
6729 bool *can_refer)
81fa35bd 6730{
85942f45
JH
6731 tree vtable = v, init, fn;
6732 unsigned HOST_WIDE_INT size;
8c311b50
JH
6733 unsigned HOST_WIDE_INT elt_size, access_index;
6734 tree domain_type;
81fa35bd 6735
ec77d61f
JH
6736 if (can_refer)
6737 *can_refer = true;
6738
9de2f554 6739 /* First of all double check we have virtual table. */
8813a647 6740 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 6741 {
ec77d61f
JH
6742 /* Pass down that we lost track of the target. */
6743 if (can_refer)
6744 *can_refer = false;
6745 return NULL_TREE;
6746 }
9de2f554 6747
2aa3da06
JH
6748 init = ctor_for_folding (v);
6749
9de2f554 6750 /* The virtual tables should always be born with constructors
2aa3da06
JH
6751 and we always should assume that they are avaialble for
6752 folding. At the moment we do not stream them in all cases,
6753 but it should never happen that ctor seem unreachable. */
6754 gcc_assert (init);
6755 if (init == error_mark_node)
6756 {
ec77d61f
JH
6757 /* Pass down that we lost track of the target. */
6758 if (can_refer)
6759 *can_refer = false;
2aa3da06
JH
6760 return NULL_TREE;
6761 }
81fa35bd 6762 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 6763 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 6764 offset *= BITS_PER_UNIT;
81fa35bd 6765 offset += token * size;
9de2f554 6766
8c311b50
JH
6767 /* Lookup the value in the constructor that is assumed to be array.
6768 This is equivalent to
6769 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6770 offset, size, NULL);
6771 but in a constant time. We expect that frontend produced a simple
6772 array without indexed initializers. */
6773
6774 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6775 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6776 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6777 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6778
6779 access_index = offset / BITS_PER_UNIT / elt_size;
6780 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6781
6782 /* This code makes an assumption that there are no
6783 indexed fileds produced by C++ FE, so we can directly index the array. */
6784 if (access_index < CONSTRUCTOR_NELTS (init))
6785 {
6786 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6787 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6788 STRIP_NOPS (fn);
6789 }
6790 else
6791 fn = NULL;
9de2f554
JH
6792
6793 /* For type inconsistent program we may end up looking up virtual method
6794 in virtual table that does not contain TOKEN entries. We may overrun
6795 the virtual table and pick up a constant or RTTI info pointer.
6796 In any case the call is undefined. */
6797 if (!fn
6798 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6799 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6800 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6801 else
6802 {
6803 fn = TREE_OPERAND (fn, 0);
6804
6805 /* When cgraph node is missing and function is not public, we cannot
6806 devirtualize. This can happen in WHOPR when the actual method
6807 ends up in other partition, because we found devirtualization
6808 possibility too late. */
6809 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
6810 {
6811 if (can_refer)
6812 {
6813 *can_refer = false;
6814 return fn;
6815 }
6816 return NULL_TREE;
6817 }
9de2f554 6818 }
81fa35bd 6819
7501ca28
RG
6820 /* Make sure we create a cgraph node for functions we'll reference.
6821 They can be non-existent if the reference comes from an entry
6822 of an external vtable for example. */
d52f5295 6823 cgraph_node::get_create (fn);
7501ca28 6824
81fa35bd
MJ
6825 return fn;
6826}
6827
85942f45
JH
6828/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6829 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6830 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
6831 OBJ_TYPE_REF_OBJECT(REF).
6832 Set CAN_REFER if non-NULL to false if method
6833 is not referable or if the virtual table is ill-formed (such as rewriten
6834 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
6835
6836tree
ec77d61f
JH
6837gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6838 bool *can_refer)
85942f45
JH
6839{
6840 unsigned HOST_WIDE_INT offset;
6841 tree v;
6842
6843 v = BINFO_VTABLE (known_binfo);
6844 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6845 if (!v)
6846 return NULL_TREE;
6847
6848 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
6849 {
6850 if (can_refer)
6851 *can_refer = false;
6852 return NULL_TREE;
6853 }
6854 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
6855}
6856
737f500a
RB
6857/* Given a pointer value T, return a simplified version of an
6858 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
6859 possible. Note that the resulting type may be different from
6860 the type pointed to in the sense that it is still compatible
6861 from the langhooks point of view. */
6862
6863tree
6864gimple_fold_indirect_ref (tree t)
6865{
6866 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6867 tree sub = t;
6868 tree subtype;
6869
6870 STRIP_NOPS (sub);
6871 subtype = TREE_TYPE (sub);
737f500a
RB
6872 if (!POINTER_TYPE_P (subtype)
6873 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
6874 return NULL_TREE;
6875
6876 if (TREE_CODE (sub) == ADDR_EXPR)
6877 {
6878 tree op = TREE_OPERAND (sub, 0);
6879 tree optype = TREE_TYPE (op);
6880 /* *&p => p */
6881 if (useless_type_conversion_p (type, optype))
6882 return op;
6883
6884 /* *(foo *)&fooarray => fooarray[0] */
6885 if (TREE_CODE (optype) == ARRAY_TYPE
6886 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6887 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6888 {
6889 tree type_domain = TYPE_DOMAIN (optype);
6890 tree min_val = size_zero_node;
6891 if (type_domain && TYPE_MIN_VALUE (type_domain))
6892 min_val = TYPE_MIN_VALUE (type_domain);
6893 if (TREE_CODE (min_val) == INTEGER_CST)
6894 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6895 }
6896 /* *(foo *)&complexfoo => __real__ complexfoo */
6897 else if (TREE_CODE (optype) == COMPLEX_TYPE
6898 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6899 return fold_build1 (REALPART_EXPR, type, op);
6900 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6901 else if (TREE_CODE (optype) == VECTOR_TYPE
6902 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6903 {
6904 tree part_width = TYPE_SIZE (type);
6905 tree index = bitsize_int (0);
6906 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6907 }
6908 }
6909
6910 /* *(p + CST) -> ... */
6911 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6912 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6913 {
6914 tree addr = TREE_OPERAND (sub, 0);
6915 tree off = TREE_OPERAND (sub, 1);
6916 tree addrtype;
6917
6918 STRIP_NOPS (addr);
6919 addrtype = TREE_TYPE (addr);
6920
6921 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6922 if (TREE_CODE (addr) == ADDR_EXPR
6923 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6924 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 6925 && tree_fits_uhwi_p (off))
b184c8f1 6926 {
ae7e9ddd 6927 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
6928 tree part_width = TYPE_SIZE (type);
6929 unsigned HOST_WIDE_INT part_widthi
9439e9a1 6930 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
6931 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6932 tree index = bitsize_int (indexi);
6933 if (offset / part_widthi
e934916c 6934 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
6935 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6936 part_width, index);
6937 }
6938
6939 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6940 if (TREE_CODE (addr) == ADDR_EXPR
6941 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6942 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6943 {
6944 tree size = TYPE_SIZE_UNIT (type);
6945 if (tree_int_cst_equal (size, off))
6946 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6947 }
6948
6949 /* *(p + CST) -> MEM_REF <p, CST>. */
6950 if (TREE_CODE (addr) != ADDR_EXPR
6951 || DECL_P (TREE_OPERAND (addr, 0)))
6952 return fold_build2 (MEM_REF, type,
6953 addr,
8e6cdc90 6954 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
6955 }
6956
6957 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6958 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6959 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6960 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6961 {
6962 tree type_domain;
6963 tree min_val = size_zero_node;
6964 tree osub = sub;
6965 sub = gimple_fold_indirect_ref (sub);
6966 if (! sub)
6967 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6968 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6969 if (type_domain && TYPE_MIN_VALUE (type_domain))
6970 min_val = TYPE_MIN_VALUE (type_domain);
6971 if (TREE_CODE (min_val) == INTEGER_CST)
6972 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6973 }
6974
6975 return NULL_TREE;
6976}
19e51b40
JJ
6977
6978/* Return true if CODE is an operation that when operating on signed
6979 integer types involves undefined behavior on overflow and the
6980 operation can be expressed with unsigned arithmetic. */
6981
6982bool
6983arith_code_with_undefined_signed_overflow (tree_code code)
6984{
6985 switch (code)
6986 {
6987 case PLUS_EXPR:
6988 case MINUS_EXPR:
6989 case MULT_EXPR:
6990 case NEGATE_EXPR:
6991 case POINTER_PLUS_EXPR:
6992 return true;
6993 default:
6994 return false;
6995 }
6996}
6997
6998/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6999 operation that can be transformed to unsigned arithmetic by converting
7000 its operand, carrying out the operation in the corresponding unsigned
7001 type and converting the result back to the original type.
7002
7003 Returns a sequence of statements that replace STMT and also contain
7004 a modified form of STMT itself. */
7005
7006gimple_seq
355fe088 7007rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7008{
7009 if (dump_file && (dump_flags & TDF_DETAILS))
7010 {
7011 fprintf (dump_file, "rewriting stmt with undefined signed "
7012 "overflow ");
7013 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7014 }
7015
7016 tree lhs = gimple_assign_lhs (stmt);
7017 tree type = unsigned_type_for (TREE_TYPE (lhs));
7018 gimple_seq stmts = NULL;
7019 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7020 {
74e3c262
RB
7021 tree op = gimple_op (stmt, i);
7022 op = gimple_convert (&stmts, type, op);
7023 gimple_set_op (stmt, i, op);
19e51b40
JJ
7024 }
7025 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7026 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7027 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7028 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7029 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7030 gimple_seq_add_stmt (&stmts, cvt);
7031
7032 return stmts;
7033}
d4f5cd5e 7034
3d2cf79f 7035
c26de36d
RB
7036/* The valueization hook we use for the gimple_build API simplification.
7037 This makes us match fold_buildN behavior by only combining with
7038 statements in the sequence(s) we are currently building. */
7039
7040static tree
7041gimple_build_valueize (tree op)
7042{
7043 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7044 return op;
7045 return NULL_TREE;
7046}
7047
3d2cf79f 7048/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7049 simplifying it first if possible. Returns the built
3d2cf79f
RB
7050 expression value and appends statements possibly defining it
7051 to SEQ. */
7052
7053tree
7054gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7055 enum tree_code code, tree type, tree op0)
3d2cf79f 7056{
c26de36d 7057 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7058 if (!res)
7059 {
a15ebbcd 7060 res = create_tmp_reg_or_ssa_name (type);
355fe088 7061 gimple *stmt;
3d2cf79f
RB
7062 if (code == REALPART_EXPR
7063 || code == IMAGPART_EXPR
7064 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7065 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7066 else
0d0e4a03 7067 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7068 gimple_set_location (stmt, loc);
7069 gimple_seq_add_stmt_without_update (seq, stmt);
7070 }
7071 return res;
7072}
7073
7074/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7075 simplifying it first if possible. Returns the built
3d2cf79f
RB
7076 expression value and appends statements possibly defining it
7077 to SEQ. */
7078
7079tree
7080gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7081 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7082{
c26de36d 7083 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7084 if (!res)
7085 {
a15ebbcd 7086 res = create_tmp_reg_or_ssa_name (type);
355fe088 7087 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7088 gimple_set_location (stmt, loc);
7089 gimple_seq_add_stmt_without_update (seq, stmt);
7090 }
7091 return res;
7092}
7093
7094/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7095 simplifying it first if possible. Returns the built
3d2cf79f
RB
7096 expression value and appends statements possibly defining it
7097 to SEQ. */
7098
7099tree
7100gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7101 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7102{
7103 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7104 seq, gimple_build_valueize);
3d2cf79f
RB
7105 if (!res)
7106 {
a15ebbcd 7107 res = create_tmp_reg_or_ssa_name (type);
355fe088 7108 gimple *stmt;
3d2cf79f 7109 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7110 stmt = gimple_build_assign (res, code,
7111 build3 (code, type, op0, op1, op2));
3d2cf79f 7112 else
0d0e4a03 7113 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7114 gimple_set_location (stmt, loc);
7115 gimple_seq_add_stmt_without_update (seq, stmt);
7116 }
7117 return res;
7118}
7119
7120/* Build the call FN (ARG0) with a result of type TYPE
7121 (or no result if TYPE is void) with location LOC,
c26de36d 7122 simplifying it first if possible. Returns the built
3d2cf79f
RB
7123 expression value (or NULL_TREE if TYPE is void) and appends
7124 statements possibly defining it to SEQ. */
7125
7126tree
7127gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7128 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 7129{
c26de36d 7130 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7131 if (!res)
7132 {
7133 tree decl = builtin_decl_implicit (fn);
355fe088 7134 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
7135 if (!VOID_TYPE_P (type))
7136 {
a15ebbcd 7137 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7138 gimple_call_set_lhs (stmt, res);
7139 }
7140 gimple_set_location (stmt, loc);
7141 gimple_seq_add_stmt_without_update (seq, stmt);
7142 }
7143 return res;
7144}
7145
7146/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7147 (or no result if TYPE is void) with location LOC,
c26de36d 7148 simplifying it first if possible. Returns the built
3d2cf79f
RB
7149 expression value (or NULL_TREE if TYPE is void) and appends
7150 statements possibly defining it to SEQ. */
7151
7152tree
7153gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7154 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 7155{
c26de36d 7156 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7157 if (!res)
7158 {
7159 tree decl = builtin_decl_implicit (fn);
355fe088 7160 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
7161 if (!VOID_TYPE_P (type))
7162 {
a15ebbcd 7163 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7164 gimple_call_set_lhs (stmt, res);
7165 }
7166 gimple_set_location (stmt, loc);
7167 gimple_seq_add_stmt_without_update (seq, stmt);
7168 }
7169 return res;
7170}
7171
7172/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7173 (or no result if TYPE is void) with location LOC,
c26de36d 7174 simplifying it first if possible. Returns the built
3d2cf79f
RB
7175 expression value (or NULL_TREE if TYPE is void) and appends
7176 statements possibly defining it to SEQ. */
7177
7178tree
7179gimple_build (gimple_seq *seq, location_t loc,
7180 enum built_in_function fn, tree type,
c26de36d 7181 tree arg0, tree arg1, tree arg2)
3d2cf79f 7182{
c26de36d
RB
7183 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7184 seq, gimple_build_valueize);
3d2cf79f
RB
7185 if (!res)
7186 {
7187 tree decl = builtin_decl_implicit (fn);
355fe088 7188 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
7189 if (!VOID_TYPE_P (type))
7190 {
a15ebbcd 7191 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7192 gimple_call_set_lhs (stmt, res);
7193 }
7194 gimple_set_location (stmt, loc);
7195 gimple_seq_add_stmt_without_update (seq, stmt);
7196 }
7197 return res;
7198}
7199
7200/* Build the conversion (TYPE) OP with a result of type TYPE
7201 with location LOC if such conversion is neccesary in GIMPLE,
7202 simplifying it first.
7203 Returns the built expression value and appends
7204 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7205
7206tree
7207gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7208{
7209 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7210 return op;
3d2cf79f 7211 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7212}
68e57f04 7213
74e3c262
RB
7214/* Build the conversion (ptrofftype) OP with a result of a type
7215 compatible with ptrofftype with location LOC if such conversion
7216 is neccesary in GIMPLE, simplifying it first.
7217 Returns the built expression value and appends
7218 statements possibly defining it to SEQ. */
7219
7220tree
7221gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7222{
7223 if (ptrofftype_p (TREE_TYPE (op)))
7224 return op;
7225 return gimple_convert (seq, loc, sizetype, op);
7226}
7227
e7c45b66
RS
7228/* Build a vector of type TYPE in which each element has the value OP.
7229 Return a gimple value for the result, appending any new statements
7230 to SEQ. */
7231
7232tree
7233gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7234 tree op)
7235{
7236 tree res, vec = build_vector_from_val (type, op);
7237 if (is_gimple_val (vec))
7238 return vec;
7239 if (gimple_in_ssa_p (cfun))
7240 res = make_ssa_name (type);
7241 else
7242 res = create_tmp_reg (type);
7243 gimple *stmt = gimple_build_assign (res, vec);
7244 gimple_set_location (stmt, loc);
7245 gimple_seq_add_stmt_without_update (seq, stmt);
7246 return res;
7247}
7248
abe73c3d
RS
7249/* Build a vector from BUILDER, handling the case in which some elements
7250 are non-constant. Return a gimple value for the result, appending any
7251 new instructions to SEQ.
7252
7253 BUILDER must not have a stepped encoding on entry. This is because
7254 the function is not geared up to handle the arithmetic that would
7255 be needed in the variable case, and any code building a vector that
7256 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7257
7258tree
abe73c3d
RS
7259gimple_build_vector (gimple_seq *seq, location_t loc,
7260 tree_vector_builder *builder)
e7c45b66 7261{
abe73c3d
RS
7262 gcc_assert (builder->nelts_per_pattern () <= 2);
7263 unsigned int encoded_nelts = builder->encoded_nelts ();
7264 for (unsigned int i = 0; i < encoded_nelts; ++i)
7265 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7266 {
abe73c3d
RS
7267 tree type = builder->type ();
7268 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
e7c45b66
RS
7269 vec<constructor_elt, va_gc> *v;
7270 vec_alloc (v, nelts);
7271 for (i = 0; i < nelts; ++i)
abe73c3d 7272 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7273
7274 tree res;
7275 if (gimple_in_ssa_p (cfun))
7276 res = make_ssa_name (type);
7277 else
7278 res = create_tmp_reg (type);
7279 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7280 gimple_set_location (stmt, loc);
7281 gimple_seq_add_stmt_without_update (seq, stmt);
7282 return res;
7283 }
abe73c3d 7284 return builder->build ();
e7c45b66
RS
7285}
7286
68e57f04
RS
7287/* Return true if the result of assignment STMT is known to be non-negative.
7288 If the return value is based on the assumption that signed overflow is
7289 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7290 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7291
7292static bool
7293gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7294 int depth)
7295{
7296 enum tree_code code = gimple_assign_rhs_code (stmt);
7297 switch (get_gimple_rhs_class (code))
7298 {
7299 case GIMPLE_UNARY_RHS:
7300 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7301 gimple_expr_type (stmt),
7302 gimple_assign_rhs1 (stmt),
7303 strict_overflow_p, depth);
7304 case GIMPLE_BINARY_RHS:
7305 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7306 gimple_expr_type (stmt),
7307 gimple_assign_rhs1 (stmt),
7308 gimple_assign_rhs2 (stmt),
7309 strict_overflow_p, depth);
7310 case GIMPLE_TERNARY_RHS:
7311 return false;
7312 case GIMPLE_SINGLE_RHS:
7313 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7314 strict_overflow_p, depth);
7315 case GIMPLE_INVALID_RHS:
7316 break;
7317 }
7318 gcc_unreachable ();
7319}
7320
7321/* Return true if return value of call STMT is known to be non-negative.
7322 If the return value is based on the assumption that signed overflow is
7323 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7324 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7325
7326static bool
7327gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7328 int depth)
7329{
7330 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7331 gimple_call_arg (stmt, 0) : NULL_TREE;
7332 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7333 gimple_call_arg (stmt, 1) : NULL_TREE;
7334
7335 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7336 gimple_call_combined_fn (stmt),
68e57f04
RS
7337 arg0,
7338 arg1,
7339 strict_overflow_p, depth);
7340}
7341
4534c203
RB
7342/* Return true if return value of call STMT is known to be non-negative.
7343 If the return value is based on the assumption that signed overflow is
7344 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7345 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7346
7347static bool
7348gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7349 int depth)
7350{
7351 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7352 {
7353 tree arg = gimple_phi_arg_def (stmt, i);
7354 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7355 return false;
7356 }
7357 return true;
7358}
7359
68e57f04
RS
7360/* Return true if STMT is known to compute a non-negative value.
7361 If the return value is based on the assumption that signed overflow is
7362 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7363 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7364
7365bool
7366gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7367 int depth)
7368{
7369 switch (gimple_code (stmt))
7370 {
7371 case GIMPLE_ASSIGN:
7372 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7373 depth);
7374 case GIMPLE_CALL:
7375 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7376 depth);
4534c203
RB
7377 case GIMPLE_PHI:
7378 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7379 depth);
68e57f04
RS
7380 default:
7381 return false;
7382 }
7383}
67dbe582
RS
7384
7385/* Return true if the floating-point value computed by assignment STMT
7386 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7387 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7388
7389 DEPTH is the current nesting depth of the query. */
7390
7391static bool
7392gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7393{
7394 enum tree_code code = gimple_assign_rhs_code (stmt);
7395 switch (get_gimple_rhs_class (code))
7396 {
7397 case GIMPLE_UNARY_RHS:
7398 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7399 gimple_assign_rhs1 (stmt), depth);
7400 case GIMPLE_BINARY_RHS:
7401 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7402 gimple_assign_rhs1 (stmt),
7403 gimple_assign_rhs2 (stmt), depth);
7404 case GIMPLE_TERNARY_RHS:
7405 return false;
7406 case GIMPLE_SINGLE_RHS:
7407 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7408 case GIMPLE_INVALID_RHS:
7409 break;
7410 }
7411 gcc_unreachable ();
7412}
7413
7414/* Return true if the floating-point value computed by call STMT is known
7415 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7416 considered integer values. Return false for signaling NaN.
67dbe582
RS
7417
7418 DEPTH is the current nesting depth of the query. */
7419
7420static bool
7421gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7422{
7423 tree arg0 = (gimple_call_num_args (stmt) > 0
7424 ? gimple_call_arg (stmt, 0)
7425 : NULL_TREE);
7426 tree arg1 = (gimple_call_num_args (stmt) > 1
7427 ? gimple_call_arg (stmt, 1)
7428 : NULL_TREE);
1d9da71f 7429 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7430 arg0, arg1, depth);
7431}
7432
7433/* Return true if the floating-point result of phi STMT is known to have
7434 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7435 integer values. Return false for signaling NaN.
67dbe582
RS
7436
7437 DEPTH is the current nesting depth of the query. */
7438
7439static bool
7440gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7441{
7442 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7443 {
7444 tree arg = gimple_phi_arg_def (stmt, i);
7445 if (!integer_valued_real_single_p (arg, depth + 1))
7446 return false;
7447 }
7448 return true;
7449}
7450
7451/* Return true if the floating-point value computed by STMT is known
7452 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7453 considered integer values. Return false for signaling NaN.
67dbe582
RS
7454
7455 DEPTH is the current nesting depth of the query. */
7456
7457bool
7458gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7459{
7460 switch (gimple_code (stmt))
7461 {
7462 case GIMPLE_ASSIGN:
7463 return gimple_assign_integer_valued_real_p (stmt, depth);
7464 case GIMPLE_CALL:
7465 return gimple_call_integer_valued_real_p (stmt, depth);
7466 case GIMPLE_PHI:
7467 return gimple_phi_integer_valued_real_p (stmt, depth);
7468 default:
7469 return false;
7470 }
7471}