]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
re PR target/85984 (ICE in create_pseudo_cfg, at dwarf2cfi.c:2874)
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
85ec4feb 2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
3de2a40e 58#include "ipa-chkp.h"
abd3a68c 59#include "tree-cfg.h"
a918bfbf 60#include "fold-const-call.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
45b2222a 63#include "asan.h"
025d57f0
MS
64#include "diagnostic-core.h"
65#include "intl.h"
6a33d0ff 66#include "calls.h"
5ebaa477 67#include "tree-vector-builder.h"
5d0d5d68 68#include "tree-ssa-strlen.h"
cbdd87d4 69
b3b9f3d0 70/* Return true when DECL can be referenced from current unit.
c44c2088
JH
71 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
72 We can get declarations that are not possible to reference for various
73 reasons:
1389294c 74
1389294c
JH
75 1) When analyzing C++ virtual tables.
76 C++ virtual tables do have known constructors even
77 when they are keyed to other compilation unit.
78 Those tables can contain pointers to methods and vars
79 in other units. Those methods have both STATIC and EXTERNAL
80 set.
81 2) In WHOPR mode devirtualization might lead to reference
82 to method that was partitioned elsehwere.
83 In this case we have static VAR_DECL or FUNCTION_DECL
84 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
85 declaring the body.
86 3) COMDAT functions referred by external vtables that
3e89949e 87 we devirtualize only during final compilation stage.
b3b9f3d0
JH
88 At this time we already decided that we will not output
89 the function body and thus we can't reference the symbol
90 directly. */
91
1389294c 92static bool
c44c2088 93can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 94{
2c8326a5 95 varpool_node *vnode;
1389294c 96 struct cgraph_node *node;
5e20cdc9 97 symtab_node *snode;
c44c2088 98
00de328a 99 if (DECL_ABSTRACT_P (decl))
1632a686
JH
100 return false;
101
102 /* We are concerned only about static/external vars and functions. */
103 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 104 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
105 return true;
106
107 /* Static objects can be referred only if they was not optimized out yet. */
108 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 {
3aaf0529
JH
110 /* Before we start optimizing unreachable code we can be sure all
111 static objects are defined. */
3dafb85c 112 if (symtab->function_flags_ready)
3aaf0529 113 return true;
d52f5295 114 snode = symtab_node::get (decl);
3aaf0529 115 if (!snode || !snode->definition)
1632a686 116 return false;
7de90a6c 117 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
118 return !node || !node->global.inlined_to;
119 }
120
6da8be89 121 /* We will later output the initializer, so we can refer to it.
c44c2088 122 So we are concerned only when DECL comes from initializer of
3aaf0529 123 external var or var that has been optimized out. */
c44c2088 124 if (!from_decl
8813a647 125 || !VAR_P (from_decl)
3aaf0529 126 || (!DECL_EXTERNAL (from_decl)
9041d2e6 127 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 128 && vnode->definition)
6da8be89 129 || (flag_ltrans
9041d2e6 130 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 131 && vnode->in_other_partition))
c44c2088 132 return true;
c44c2088
JH
133 /* We are folding reference from external vtable. The vtable may reffer
134 to a symbol keyed to other compilation unit. The other compilation
135 unit may be in separate DSO and the symbol may be hidden. */
136 if (DECL_VISIBILITY_SPECIFIED (decl)
137 && DECL_EXTERNAL (decl)
a33a931b 138 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 139 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 140 return false;
b3b9f3d0
JH
141 /* When function is public, we always can introduce new reference.
142 Exception are the COMDAT functions where introducing a direct
143 reference imply need to include function body in the curren tunit. */
144 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
145 return true;
3aaf0529
JH
146 /* We have COMDAT. We are going to check if we still have definition
147 or if the definition is going to be output in other partition.
148 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
149
150 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 151 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
152 output elsewhere when corresponding vtable is output.
153 This is however not possible - ABI specify that COMDATs are output in
154 units where they are used and when the other unit was compiled with LTO
155 it is possible that vtable was kept public while the function itself
156 was privatized. */
3dafb85c 157 if (!symtab->function_flags_ready)
b3b9f3d0 158 return true;
c44c2088 159
d52f5295 160 snode = symtab_node::get (decl);
3aaf0529
JH
161 if (!snode
162 || ((!snode->definition || DECL_EXTERNAL (decl))
163 && (!snode->in_other_partition
164 || (!snode->forced_by_abi && !snode->force_output))))
165 return false;
166 node = dyn_cast <cgraph_node *> (snode);
167 return !node || !node->global.inlined_to;
1389294c
JH
168}
169
a15ebbcd
ML
170/* Create a temporary for TYPE for a statement STMT. If the current function
171 is in SSA form, a SSA name is created. Otherwise a temporary register
172 is made. */
173
edc19e03
WS
174tree
175create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
176{
177 if (gimple_in_ssa_p (cfun))
178 return make_ssa_name (type, stmt);
179 else
180 return create_tmp_reg (type);
181}
182
0038d4e0 183/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
184 acceptable form for is_gimple_min_invariant.
185 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
186
187tree
c44c2088 188canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 189{
50619002
EB
190 tree orig_cval = cval;
191 STRIP_NOPS (cval);
315f5f1b
RG
192 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
193 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 194 {
315f5f1b
RG
195 tree ptr = TREE_OPERAND (cval, 0);
196 if (is_gimple_min_invariant (ptr))
197 cval = build1_loc (EXPR_LOCATION (cval),
198 ADDR_EXPR, TREE_TYPE (ptr),
199 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
200 ptr,
201 fold_convert (ptr_type_node,
202 TREE_OPERAND (cval, 1))));
17f39a39
JH
203 }
204 if (TREE_CODE (cval) == ADDR_EXPR)
205 {
5a27a197
RG
206 tree base = NULL_TREE;
207 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
208 {
209 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
210 if (base)
211 TREE_OPERAND (cval, 0) = base;
212 }
5a27a197
RG
213 else
214 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
215 if (!base)
216 return NULL_TREE;
b3b9f3d0 217
8813a647 218 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 219 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 220 return NULL_TREE;
13f92e8d
JJ
221 if (TREE_TYPE (base) == error_mark_node)
222 return NULL_TREE;
8813a647 223 if (VAR_P (base))
46eb666a 224 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
225 else if (TREE_CODE (base) == FUNCTION_DECL)
226 {
227 /* Make sure we create a cgraph node for functions we'll reference.
228 They can be non-existent if the reference comes from an entry
229 of an external vtable for example. */
d52f5295 230 cgraph_node::get_create (base);
7501ca28 231 }
0038d4e0 232 /* Fixup types in global initializers. */
73aef89e
RG
233 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
234 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
235
236 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
237 cval = fold_convert (TREE_TYPE (orig_cval), cval);
238 return cval;
17f39a39 239 }
846abd0d
RB
240 if (TREE_OVERFLOW_P (cval))
241 return drop_tree_overflow (cval);
50619002 242 return orig_cval;
17f39a39 243}
cbdd87d4
RG
244
245/* If SYM is a constant variable with known value, return the value.
246 NULL_TREE is returned otherwise. */
247
248tree
249get_symbol_constant_value (tree sym)
250{
6a6dac52
JH
251 tree val = ctor_for_folding (sym);
252 if (val != error_mark_node)
cbdd87d4 253 {
cbdd87d4
RG
254 if (val)
255 {
9d60be38 256 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 257 if (val && is_gimple_min_invariant (val))
17f39a39 258 return val;
1389294c
JH
259 else
260 return NULL_TREE;
cbdd87d4
RG
261 }
262 /* Variables declared 'const' without an initializer
263 have zero as the initializer if they may not be
264 overridden at link or run time. */
265 if (!val
b8a8c472 266 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 267 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
268 }
269
270 return NULL_TREE;
271}
272
273
cbdd87d4
RG
274
275/* Subroutine of fold_stmt. We perform several simplifications of the
276 memory reference tree EXPR and make sure to re-gimplify them properly
277 after propagation of constant addresses. IS_LHS is true if the
278 reference is supposed to be an lvalue. */
279
280static tree
281maybe_fold_reference (tree expr, bool is_lhs)
282{
17f39a39 283 tree result;
cbdd87d4 284
f0eddb90
RG
285 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
286 || TREE_CODE (expr) == REALPART_EXPR
287 || TREE_CODE (expr) == IMAGPART_EXPR)
288 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
289 return fold_unary_loc (EXPR_LOCATION (expr),
290 TREE_CODE (expr),
291 TREE_TYPE (expr),
292 TREE_OPERAND (expr, 0));
293 else if (TREE_CODE (expr) == BIT_FIELD_REF
294 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
295 return fold_ternary_loc (EXPR_LOCATION (expr),
296 TREE_CODE (expr),
297 TREE_TYPE (expr),
298 TREE_OPERAND (expr, 0),
299 TREE_OPERAND (expr, 1),
300 TREE_OPERAND (expr, 2));
301
f0eddb90
RG
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
cbdd87d4 306
cbdd87d4
RG
307 return NULL_TREE;
308}
309
310
311/* Attempt to fold an assignment statement pointed-to by SI. Returns a
312 replacement rhs for the statement or NULL_TREE if no simplification
313 could be made. It is assumed that the operands have been previously
314 folded. */
315
316static tree
317fold_gimple_assign (gimple_stmt_iterator *si)
318{
355fe088 319 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
320 enum tree_code subcode = gimple_assign_rhs_code (stmt);
321 location_t loc = gimple_location (stmt);
322
323 tree result = NULL_TREE;
324
325 switch (get_gimple_rhs_class (subcode))
326 {
327 case GIMPLE_SINGLE_RHS:
328 {
329 tree rhs = gimple_assign_rhs1 (stmt);
330
8c00ba08
JW
331 if (TREE_CLOBBER_P (rhs))
332 return NULL_TREE;
333
4e71066d 334 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
335 return maybe_fold_reference (rhs, false);
336
bdf37f7a
JH
337 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 {
339 tree val = OBJ_TYPE_REF_EXPR (rhs);
340 if (is_gimple_min_invariant (val))
341 return val;
f8a39967 342 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
343 {
344 bool final;
345 vec <cgraph_node *>targets
f8a39967 346 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 347 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 348 {
2b5f0895
XDL
349 if (dump_enabled_p ())
350 {
807b7d62 351 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
352 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
353 "resolving virtual function address "
354 "reference to function %s\n",
355 targets.length () == 1
356 ? targets[0]->name ()
3ef276e4 357 : "NULL");
2b5f0895 358 }
3ef276e4
RB
359 if (targets.length () == 1)
360 {
361 val = fold_convert (TREE_TYPE (val),
362 build_fold_addr_expr_loc
363 (loc, targets[0]->decl));
364 STRIP_USELESS_TYPE_CONVERSION (val);
365 }
366 else
367 /* We can not use __builtin_unreachable here because it
368 can not have address taken. */
369 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
370 return val;
371 }
372 }
bdf37f7a 373 }
7524f419 374
cbdd87d4
RG
375 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 {
70f34814
RG
377 tree ref = TREE_OPERAND (rhs, 0);
378 tree tem = maybe_fold_reference (ref, true);
379 if (tem
380 && TREE_CODE (tem) == MEM_REF
381 && integer_zerop (TREE_OPERAND (tem, 1)))
382 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
383 else if (tem)
cbdd87d4
RG
384 result = fold_convert (TREE_TYPE (rhs),
385 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
386 else if (TREE_CODE (ref) == MEM_REF
387 && integer_zerop (TREE_OPERAND (ref, 1)))
388 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
389
390 if (result)
391 {
392 /* Strip away useless type conversions. Both the
393 NON_LVALUE_EXPR that may have been added by fold, and
394 "useless" type conversions that might now be apparent
395 due to propagation. */
396 STRIP_USELESS_TYPE_CONVERSION (result);
397
398 if (result != rhs && valid_gimple_rhs_p (result))
399 return result;
400 }
cbdd87d4
RG
401 }
402
403 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 404 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
405 {
406 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
407 unsigned i;
408 tree val;
409
410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 411 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
412 return NULL_TREE;
413
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
416 }
417
418 else if (DECL_P (rhs))
9d60be38 419 return get_symbol_constant_value (rhs);
cbdd87d4
RG
420 }
421 break;
422
423 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
424 break;
425
426 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
427 break;
428
0354c0c7 429 case GIMPLE_TERNARY_RHS:
5c099d40
RB
430 result = fold_ternary_loc (loc, subcode,
431 TREE_TYPE (gimple_assign_lhs (stmt)),
432 gimple_assign_rhs1 (stmt),
433 gimple_assign_rhs2 (stmt),
434 gimple_assign_rhs3 (stmt));
0354c0c7
BS
435
436 if (result)
437 {
438 STRIP_USELESS_TYPE_CONVERSION (result);
439 if (valid_gimple_rhs_p (result))
440 return result;
0354c0c7
BS
441 }
442 break;
443
cbdd87d4
RG
444 case GIMPLE_INVALID_RHS:
445 gcc_unreachable ();
446 }
447
448 return NULL_TREE;
449}
450
fef5a0d9
RB
451
452/* Replace a statement at *SI_P with a sequence of statements in STMTS,
453 adjusting the replacement stmts location and virtual operands.
454 If the statement has a lhs the last stmt in the sequence is expected
455 to assign to that lhs. */
456
457static void
458gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459{
355fe088 460 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
461
462 if (gimple_has_location (stmt))
463 annotate_all_with_location (stmts, gimple_location (stmt));
464
465 /* First iterate over the replacement statements backward, assigning
466 virtual operands to their defining statements. */
355fe088 467 gimple *laststore = NULL;
fef5a0d9
RB
468 for (gimple_stmt_iterator i = gsi_last (stmts);
469 !gsi_end_p (i); gsi_prev (&i))
470 {
355fe088 471 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
472 if ((gimple_assign_single_p (new_stmt)
473 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
474 || (is_gimple_call (new_stmt)
475 && (gimple_call_flags (new_stmt)
476 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 {
478 tree vdef;
479 if (!laststore)
480 vdef = gimple_vdef (stmt);
481 else
482 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
483 gimple_set_vdef (new_stmt, vdef);
484 if (vdef && TREE_CODE (vdef) == SSA_NAME)
485 SSA_NAME_DEF_STMT (vdef) = new_stmt;
486 laststore = new_stmt;
487 }
488 }
489
490 /* Second iterate over the statements forward, assigning virtual
491 operands to their uses. */
492 tree reaching_vuse = gimple_vuse (stmt);
493 for (gimple_stmt_iterator i = gsi_start (stmts);
494 !gsi_end_p (i); gsi_next (&i))
495 {
355fe088 496 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
497 /* If the new statement possibly has a VUSE, update it with exact SSA
498 name we know will reach this one. */
499 if (gimple_has_mem_ops (new_stmt))
500 gimple_set_vuse (new_stmt, reaching_vuse);
501 gimple_set_modified (new_stmt, true);
502 if (gimple_vdef (new_stmt))
503 reaching_vuse = gimple_vdef (new_stmt);
504 }
505
506 /* If the new sequence does not do a store release the virtual
507 definition of the original statement. */
508 if (reaching_vuse
509 && reaching_vuse == gimple_vuse (stmt))
510 {
511 tree vdef = gimple_vdef (stmt);
512 if (vdef
513 && TREE_CODE (vdef) == SSA_NAME)
514 {
515 unlink_stmt_vdef (stmt);
516 release_ssa_name (vdef);
517 }
518 }
519
520 /* Finally replace the original statement with the sequence. */
521 gsi_replace_with_seq (si_p, stmts, false);
522}
523
cbdd87d4
RG
524/* Convert EXPR into a GIMPLE value suitable for substitution on the
525 RHS of an assignment. Insert the necessary statements before
526 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
527 is replaced. If the call is expected to produces a result, then it
528 is replaced by an assignment of the new RHS to the result variable.
529 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
530 GIMPLE_NOP. A proper VDEF chain is retained by making the first
531 VUSE and the last VDEF of the whole sequence be the same as the replaced
532 statement and using new SSA names for stores in between. */
cbdd87d4
RG
533
534void
535gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536{
537 tree lhs;
355fe088 538 gimple *stmt, *new_stmt;
cbdd87d4 539 gimple_stmt_iterator i;
355a7673 540 gimple_seq stmts = NULL;
cbdd87d4
RG
541
542 stmt = gsi_stmt (*si_p);
543
544 gcc_assert (is_gimple_call (stmt));
545
45852dcc 546 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 547
e256dfce 548 lhs = gimple_call_lhs (stmt);
cbdd87d4 549 if (lhs == NULL_TREE)
6e572326
RG
550 {
551 gimplify_and_add (expr, &stmts);
552 /* We can end up with folding a memcpy of an empty class assignment
553 which gets optimized away by C++ gimplification. */
554 if (gimple_seq_empty_p (stmts))
555 {
9fdc58de 556 pop_gimplify_context (NULL);
6e572326
RG
557 if (gimple_in_ssa_p (cfun))
558 {
559 unlink_stmt_vdef (stmt);
560 release_defs (stmt);
561 }
f6b4dc28 562 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
563 return;
564 }
565 }
cbdd87d4 566 else
e256dfce 567 {
381cdae4 568 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
569 new_stmt = gimple_build_assign (lhs, tmp);
570 i = gsi_last (stmts);
571 gsi_insert_after_without_update (&i, new_stmt,
572 GSI_CONTINUE_LINKING);
573 }
cbdd87d4
RG
574
575 pop_gimplify_context (NULL);
576
fef5a0d9
RB
577 gsi_replace_with_seq_vops (si_p, stmts);
578}
cbdd87d4 579
fef5a0d9
RB
580
581/* Replace the call at *GSI with the gimple value VAL. */
582
e3174bdf 583void
fef5a0d9
RB
584replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585{
355fe088 586 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 587 tree lhs = gimple_call_lhs (stmt);
355fe088 588 gimple *repl;
fef5a0d9 589 if (lhs)
e256dfce 590 {
fef5a0d9
RB
591 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
592 val = fold_convert (TREE_TYPE (lhs), val);
593 repl = gimple_build_assign (lhs, val);
594 }
595 else
596 repl = gimple_build_nop ();
597 tree vdef = gimple_vdef (stmt);
598 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 {
600 unlink_stmt_vdef (stmt);
601 release_ssa_name (vdef);
602 }
f6b4dc28 603 gsi_replace (gsi, repl, false);
fef5a0d9
RB
604}
605
606/* Replace the call at *GSI with the new call REPL and fold that
607 again. */
608
609static void
355fe088 610replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 611{
355fe088 612 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
613 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
614 gimple_set_location (repl, gimple_location (stmt));
615 if (gimple_vdef (stmt)
616 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 {
618 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
619 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 }
00296d7f
JJ
621 if (gimple_vuse (stmt))
622 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 623 gsi_replace (gsi, repl, false);
fef5a0d9
RB
624 fold_stmt (gsi);
625}
626
627/* Return true if VAR is a VAR_DECL or a component thereof. */
628
629static bool
630var_decl_component_p (tree var)
631{
632 tree inner = var;
633 while (handled_component_p (inner))
634 inner = TREE_OPERAND (inner, 0);
635 return SSA_VAR_P (inner);
636}
637
6512c0f1
MS
638/* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
641
642static bool
643size_must_be_zero_p (tree size)
644{
645 if (integer_zerop (size))
646 return true;
647
648 if (TREE_CODE (size) != SSA_NAME)
649 return false;
650
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
655
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
658
659 wide_int wone = wi::one (prec);
660
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
666}
667
cc8bea0a
MS
668/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
fef5a0d9
RB
674
675static bool
676gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
678{
355fe088 679 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
684
cc8bea0a 685 bool nowarn = gimple_no_warning_p (stmt);
cc8bea0a 686
6512c0f1
MS
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
fef5a0d9 690 {
355fe088 691 gimple *repl;
fef5a0d9
RB
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 698 {
fef5a0d9
RB
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
701 }
f6b4dc28 702 gsi_replace (gsi, repl, false);
fef5a0d9
RB
703 return true;
704 }
705
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
709 {
cc8bea0a
MS
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 712 32667). */
fef5a0d9
RB
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
717 {
f6b4dc28 718 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
719 return true;
720 }
721 goto done;
722 }
723 else
724 {
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
728
3de2a40e
IE
729 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
730 pointers as wide integer) and also may result in huge function
731 size because of inlined bounds copy. Thus don't inline for
732 functions we want to instrument. */
733 if (flag_check_pointer_bounds
734 && chkp_instrumentable_p (cfun->decl)
735 /* Even if data may contain pointers we can inline if copy
736 less than a pointer size. */
737 && (!tree_fits_uhwi_p (len)
738 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
739 return false;
740
fef5a0d9
RB
741 /* Build accesses at offset zero with a ref-all character type. */
742 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
743 ptr_mode, true), 0);
744
745 /* If we can perform the copy efficiently with first doing all loads
746 and then all stores inline it that way. Currently efficiently
747 means that we can load all the memory into a single integer
748 register which is what MOVE_MAX gives us. */
749 src_align = get_pointer_alignment (src);
750 dest_align = get_pointer_alignment (dest);
751 if (tree_fits_uhwi_p (len)
752 && compare_tree_int (len, MOVE_MAX) <= 0
753 /* ??? Don't transform copies from strings with known length this
754 confuses the tree-ssa-strlen.c. This doesn't handle
755 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
756 reason. */
757 && !c_strlen (src, 2))
758 {
759 unsigned ilen = tree_to_uhwi (len);
146ec50f 760 if (pow2p_hwi (ilen))
fef5a0d9 761 {
cc8bea0a
MS
762 /* Detect invalid bounds and overlapping copies and issue
763 either -Warray-bounds or -Wrestrict. */
764 if (!nowarn
765 && check_bounds_or_overlap (as_a <gcall *>(stmt),
766 dest, src, len, len))
767 gimple_set_no_warning (stmt, true);
768
64ab8765 769 scalar_int_mode mode;
fef5a0d9
RB
770 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
771 if (type
64ab8765
RS
772 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
773 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
774 /* If the destination pointer is not aligned we must be able
775 to emit an unaligned store. */
64ab8765 776 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 777 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 778 || (optab_handler (movmisalign_optab, mode)
f869c12f 779 != CODE_FOR_nothing)))
fef5a0d9
RB
780 {
781 tree srctype = type;
782 tree desttype = type;
64ab8765 783 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
784 srctype = build_aligned_type (type, src_align);
785 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
786 tree tem = fold_const_aggregate_ref (srcmem);
787 if (tem)
788 srcmem = tem;
64ab8765 789 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 790 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 791 && (optab_handler (movmisalign_optab, mode)
f869c12f 792 == CODE_FOR_nothing))
fef5a0d9
RB
793 srcmem = NULL_TREE;
794 if (srcmem)
795 {
355fe088 796 gimple *new_stmt;
fef5a0d9
RB
797 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
798 {
799 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
800 srcmem
801 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
802 new_stmt);
fef5a0d9
RB
803 gimple_assign_set_lhs (new_stmt, srcmem);
804 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
805 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
806 }
64ab8765 807 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
808 desttype = build_aligned_type (type, dest_align);
809 new_stmt
810 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
811 dest, off0),
812 srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
815 if (gimple_vdef (new_stmt)
816 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
817 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
818 if (!lhs)
819 {
f6b4dc28 820 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
821 return true;
822 }
823 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
824 goto done;
825 }
826 }
827 }
828 }
829
830 if (endp == 3)
831 {
832 /* Both DEST and SRC must be pointer types.
833 ??? This is what old code did. Is the testing for pointer types
834 really mandatory?
835
836 If either SRC is readonly or length is 1, we can use memcpy. */
837 if (!dest_align || !src_align)
838 return false;
839 if (readonly_data_expr (src)
840 || (tree_fits_uhwi_p (len)
841 && (MIN (src_align, dest_align) / BITS_PER_UNIT
842 >= tree_to_uhwi (len))))
843 {
844 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 if (!fn)
846 return false;
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
850 fold_stmt (gsi);
851 return true;
852 }
853
854 /* If *src and *dest can't overlap, optimize into memcpy as well. */
855 if (TREE_CODE (src) == ADDR_EXPR
856 && TREE_CODE (dest) == ADDR_EXPR)
857 {
858 tree src_base, dest_base, fn;
a90c8804
RS
859 poly_int64 src_offset = 0, dest_offset = 0;
860 poly_uint64 maxsize;
fef5a0d9
RB
861
862 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
863 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
864 if (src_base == NULL)
865 src_base = srcvar;
fef5a0d9 866 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
867 dest_base = get_addr_base_and_unit_offset (destvar,
868 &dest_offset);
869 if (dest_base == NULL)
870 dest_base = destvar;
a90c8804 871 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 872 maxsize = -1;
fef5a0d9
RB
873 if (SSA_VAR_P (src_base)
874 && SSA_VAR_P (dest_base))
875 {
876 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
877 && ranges_maybe_overlap_p (src_offset, maxsize,
878 dest_offset, maxsize))
fef5a0d9
RB
879 return false;
880 }
881 else if (TREE_CODE (src_base) == MEM_REF
882 && TREE_CODE (dest_base) == MEM_REF)
883 {
884 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
885 TREE_OPERAND (dest_base, 0), 0))
886 return false;
a90c8804
RS
887 poly_offset_int full_src_offset
888 = mem_ref_offset (src_base) + src_offset;
889 poly_offset_int full_dest_offset
890 = mem_ref_offset (dest_base) + dest_offset;
891 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
892 full_dest_offset, maxsize))
fef5a0d9
RB
893 return false;
894 }
895 else
896 return false;
897
898 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
899 if (!fn)
900 return false;
901 gimple_call_set_fndecl (stmt, fn);
902 gimple_call_set_arg (stmt, 0, dest);
903 gimple_call_set_arg (stmt, 1, src);
904 fold_stmt (gsi);
905 return true;
906 }
907
908 /* If the destination and source do not alias optimize into
909 memcpy as well. */
910 if ((is_gimple_min_invariant (dest)
911 || TREE_CODE (dest) == SSA_NAME)
912 && (is_gimple_min_invariant (src)
913 || TREE_CODE (src) == SSA_NAME))
914 {
915 ao_ref destr, srcr;
916 ao_ref_init_from_ptr_and_size (&destr, dest, len);
917 ao_ref_init_from_ptr_and_size (&srcr, src, len);
918 if (!refs_may_alias_p_1 (&destr, &srcr, false))
919 {
920 tree fn;
921 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
922 if (!fn)
923 return false;
924 gimple_call_set_fndecl (stmt, fn);
925 gimple_call_set_arg (stmt, 0, dest);
926 gimple_call_set_arg (stmt, 1, src);
927 fold_stmt (gsi);
928 return true;
929 }
930 }
931
932 return false;
933 }
934
935 if (!tree_fits_shwi_p (len))
936 return false;
fef5a0d9
RB
937 if (!POINTER_TYPE_P (TREE_TYPE (src))
938 || !POINTER_TYPE_P (TREE_TYPE (dest)))
939 return false;
940 /* In the following try to find a type that is most natural to be
941 used for the memcpy source and destination and that allows
942 the most optimization when memcpy is turned into a plain assignment
943 using that type. In theory we could always use a char[len] type
944 but that only gains us that the destination and source possibly
945 no longer will have their address taken. */
fef5a0d9
RB
946 srctype = TREE_TYPE (TREE_TYPE (src));
947 if (TREE_CODE (srctype) == ARRAY_TYPE
948 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 949 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
950 desttype = TREE_TYPE (TREE_TYPE (dest));
951 if (TREE_CODE (desttype) == ARRAY_TYPE
952 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 953 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
954 if (TREE_ADDRESSABLE (srctype)
955 || TREE_ADDRESSABLE (desttype))
956 return false;
957
958 /* Make sure we are not copying using a floating-point mode or
959 a type whose size possibly does not match its precision. */
960 if (FLOAT_MODE_P (TYPE_MODE (desttype))
961 || TREE_CODE (desttype) == BOOLEAN_TYPE
962 || TREE_CODE (desttype) == ENUMERAL_TYPE)
963 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
964 if (FLOAT_MODE_P (TYPE_MODE (srctype))
965 || TREE_CODE (srctype) == BOOLEAN_TYPE
966 || TREE_CODE (srctype) == ENUMERAL_TYPE)
967 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
968 if (!srctype)
969 srctype = desttype;
970 if (!desttype)
971 desttype = srctype;
972 if (!srctype)
973 return false;
974
975 src_align = get_pointer_alignment (src);
976 dest_align = get_pointer_alignment (dest);
977 if (dest_align < TYPE_ALIGN (desttype)
978 || src_align < TYPE_ALIGN (srctype))
979 return false;
980
42f74245
RB
981 destvar = NULL_TREE;
982 if (TREE_CODE (dest) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 985 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 986
42f74245
RB
987 srcvar = NULL_TREE;
988 if (TREE_CODE (src) == ADDR_EXPR
989 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
990 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
991 {
992 if (!destvar
993 || src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 995 src, off0);
fef5a0d9
RB
996 else if (!STRICT_ALIGNMENT)
997 {
998 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
999 src_align);
42f74245 1000 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 1001 }
fef5a0d9 1002 }
fef5a0d9
RB
1003
1004 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1005 return false;
1006
1007 if (srcvar == NULL_TREE)
1008 {
fef5a0d9
RB
1009 if (src_align >= TYPE_ALIGN (desttype))
1010 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1011 else
1012 {
1013 if (STRICT_ALIGNMENT)
1014 return false;
1015 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1016 src_align);
1017 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1018 }
1019 }
1020 else if (destvar == NULL_TREE)
1021 {
fef5a0d9
RB
1022 if (dest_align >= TYPE_ALIGN (srctype))
1023 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1024 else
1025 {
1026 if (STRICT_ALIGNMENT)
1027 return false;
1028 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1029 dest_align);
1030 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1031 }
1032 }
1033
cc8bea0a
MS
1034 /* Detect invalid bounds and overlapping copies and issue either
1035 -Warray-bounds or -Wrestrict. */
1036 if (!nowarn)
1037 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1038
355fe088 1039 gimple *new_stmt;
fef5a0d9
RB
1040 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1041 {
921b13d0
RB
1042 tree tem = fold_const_aggregate_ref (srcvar);
1043 if (tem)
1044 srcvar = tem;
1045 if (! is_gimple_min_invariant (srcvar))
1046 {
1047 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1048 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1049 new_stmt);
921b13d0
RB
1050 gimple_assign_set_lhs (new_stmt, srcvar);
1051 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1052 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1053 }
d7257171
RB
1054 new_stmt = gimple_build_assign (destvar, srcvar);
1055 goto set_vop_and_replace;
fef5a0d9 1056 }
d7257171
RB
1057
1058 /* We get an aggregate copy. Use an unsigned char[] type to
1059 perform the copying to preserve padding and to avoid any issues
1060 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1061 desttype = build_array_type_nelts (unsigned_char_type_node,
1062 tree_to_uhwi (len));
1063 srctype = desttype;
1064 if (src_align > TYPE_ALIGN (srctype))
1065 srctype = build_aligned_type (srctype, src_align);
1066 if (dest_align > TYPE_ALIGN (desttype))
1067 desttype = build_aligned_type (desttype, dest_align);
1068 new_stmt
1069 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1070 fold_build2 (MEM_REF, srctype, src, off0));
1071set_vop_and_replace:
fef5a0d9
RB
1072 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1073 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1074 if (gimple_vdef (new_stmt)
1075 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1076 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1077 if (!lhs)
1078 {
f6b4dc28 1079 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1080 return true;
1081 }
1082 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1083 }
1084
1085done:
74e3c262 1086 gimple_seq stmts = NULL;
fef5a0d9
RB
1087 if (endp == 0 || endp == 3)
1088 len = NULL_TREE;
1089 else if (endp == 2)
74e3c262
RB
1090 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1091 ssize_int (1));
fef5a0d9 1092 if (endp == 2 || endp == 1)
74e3c262
RB
1093 {
1094 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1095 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1096 TREE_TYPE (dest), dest, len);
1097 }
fef5a0d9 1098
74e3c262 1099 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1100 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1101 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1102 return true;
1103}
1104
b3d8d88e
MS
1105/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1106 to built-in memcmp (a, b, len). */
1107
1108static bool
1109gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1110{
1111 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1112
1113 if (!fn)
1114 return false;
1115
1116 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1117
1118 gimple *stmt = gsi_stmt (*gsi);
1119 tree a = gimple_call_arg (stmt, 0);
1120 tree b = gimple_call_arg (stmt, 1);
1121 tree len = gimple_call_arg (stmt, 2);
1122
1123 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1124 replace_call_with_call_and_fold (gsi, repl);
1125
1126 return true;
1127}
1128
1129/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1130 to built-in memmove (dest, src, len). */
1131
1132static bool
1133gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1134{
1135 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1136
1137 if (!fn)
1138 return false;
1139
1140 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1141 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1142 len) into memmove (dest, src, len). */
1143
1144 gimple *stmt = gsi_stmt (*gsi);
1145 tree src = gimple_call_arg (stmt, 0);
1146 tree dest = gimple_call_arg (stmt, 1);
1147 tree len = gimple_call_arg (stmt, 2);
1148
1149 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1150 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1151 replace_call_with_call_and_fold (gsi, repl);
1152
1153 return true;
1154}
1155
1156/* Transform a call to built-in bzero (dest, len) at *GSI into one
1157 to built-in memset (dest, 0, len). */
1158
1159static bool
1160gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1161{
1162 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1163
1164 if (!fn)
1165 return false;
1166
1167 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1168
1169 gimple *stmt = gsi_stmt (*gsi);
1170 tree dest = gimple_call_arg (stmt, 0);
1171 tree len = gimple_call_arg (stmt, 1);
1172
1173 gimple_seq seq = NULL;
1174 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1175 gimple_seq_add_stmt_without_update (&seq, repl);
1176 gsi_replace_with_seq_vops (gsi, seq);
1177 fold_stmt (gsi);
1178
1179 return true;
1180}
1181
fef5a0d9
RB
1182/* Fold function call to builtin memset or bzero at *GSI setting the
1183 memory of size LEN to VAL. Return whether a simplification was made. */
1184
1185static bool
1186gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1187{
355fe088 1188 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1189 tree etype;
1190 unsigned HOST_WIDE_INT length, cval;
1191
1192 /* If the LEN parameter is zero, return DEST. */
1193 if (integer_zerop (len))
1194 {
1195 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1196 return true;
1197 }
1198
1199 if (! tree_fits_uhwi_p (len))
1200 return false;
1201
1202 if (TREE_CODE (c) != INTEGER_CST)
1203 return false;
1204
1205 tree dest = gimple_call_arg (stmt, 0);
1206 tree var = dest;
1207 if (TREE_CODE (var) != ADDR_EXPR)
1208 return false;
1209
1210 var = TREE_OPERAND (var, 0);
1211 if (TREE_THIS_VOLATILE (var))
1212 return false;
1213
1214 etype = TREE_TYPE (var);
1215 if (TREE_CODE (etype) == ARRAY_TYPE)
1216 etype = TREE_TYPE (etype);
1217
1218 if (!INTEGRAL_TYPE_P (etype)
1219 && !POINTER_TYPE_P (etype))
1220 return NULL_TREE;
1221
1222 if (! var_decl_component_p (var))
1223 return NULL_TREE;
1224
1225 length = tree_to_uhwi (len);
7a504f33 1226 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1227 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1228 return NULL_TREE;
1229
1230 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1231 return NULL_TREE;
1232
1233 if (integer_zerop (c))
1234 cval = 0;
1235 else
1236 {
1237 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1238 return NULL_TREE;
1239
1240 cval = TREE_INT_CST_LOW (c);
1241 cval &= 0xff;
1242 cval |= cval << 8;
1243 cval |= cval << 16;
1244 cval |= (cval << 31) << 1;
1245 }
1246
1247 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1248 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1249 gimple_set_vuse (store, gimple_vuse (stmt));
1250 tree vdef = gimple_vdef (stmt);
1251 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1252 {
1253 gimple_set_vdef (store, gimple_vdef (stmt));
1254 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1255 }
1256 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1257 if (gimple_call_lhs (stmt))
1258 {
355fe088 1259 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1260 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1261 }
1262 else
1263 {
1264 gimple_stmt_iterator gsi2 = *gsi;
1265 gsi_prev (gsi);
1266 gsi_remove (&gsi2, true);
1267 }
1268
1269 return true;
1270}
1271
1272
88d0c3f0
MS
1273/* Obtain the minimum and maximum string length or minimum and maximum
1274 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1275 If ARG is an SSA name variable, follow its use-def chains. When
1276 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
c8602fe6 1277 if we are unable to determine the length or value, return false.
88d0c3f0
MS
1278 VISITED is a bitmap of visited variables.
1279 TYPE is 0 if string length should be obtained, 1 for maximum string
1280 length and 2 for maximum value ARG can have.
c8602fe6 1281 When FUZZY is non-zero and the length of a string cannot be determined,
88d0c3f0 1282 the function instead considers as the maximum possible length the
c8602fe6
JJ
1283 size of a character array it may refer to. If FUZZY is 2, it will handle
1284 PHIs and COND_EXPRs optimistically, if we can determine string length
1285 minimum and maximum, it will use the minimum from the ones where it
1286 can be determined.
3f343040
MS
1287 Set *FLEXP to true if the range of the string lengths has been
1288 obtained from the upper bound of an array at the end of a struct.
1289 Such an array may hold a string that's longer than its upper bound
1290 due to it being used as a poor-man's flexible array member. */
fef5a0d9
RB
1291
1292static bool
88d0c3f0 1293get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
c8602fe6 1294 int fuzzy, bool *flexp)
fef5a0d9 1295{
c42d0aa0 1296 tree var, val = NULL_TREE;
355fe088 1297 gimple *def_stmt;
fef5a0d9 1298
c8602fe6
JJ
1299 /* The minimum and maximum length. */
1300 tree *const minlen = length;
88d0c3f0
MS
1301 tree *const maxlen = length + 1;
1302
fef5a0d9
RB
1303 if (TREE_CODE (arg) != SSA_NAME)
1304 {
1305 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1306 if (TREE_CODE (arg) == ADDR_EXPR
c42d0aa0 1307 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1308 {
c42d0aa0
MS
1309 tree op = TREE_OPERAND (arg, 0);
1310 if (integer_zerop (TREE_OPERAND (op, 1)))
1311 {
1312 tree aop0 = TREE_OPERAND (op, 0);
1313 if (TREE_CODE (aop0) == INDIRECT_REF
1314 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1315 return get_range_strlen (TREE_OPERAND (aop0, 0),
1316 length, visited, type, fuzzy, flexp);
1317 }
1318 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1319 {
1320 /* Fail if an array is the last member of a struct object
1321 since it could be treated as a (fake) flexible array
1322 member. */
1323 tree idx = TREE_OPERAND (op, 1);
1324
1325 arg = TREE_OPERAND (op, 0);
1326 tree optype = TREE_TYPE (arg);
1327 if (tree dom = TYPE_DOMAIN (optype))
1328 if (tree bound = TYPE_MAX_VALUE (dom))
1329 if (TREE_CODE (bound) == INTEGER_CST
1330 && TREE_CODE (idx) == INTEGER_CST
1331 && tree_int_cst_lt (bound, idx))
1332 return false;
1333 }
fef5a0d9
RB
1334 }
1335
1336 if (type == 2)
1337 {
1338 val = arg;
1339 if (TREE_CODE (val) != INTEGER_CST
1340 || tree_int_cst_sgn (val) < 0)
1341 return false;
1342 }
1343 else
1344 val = c_strlen (arg, 1);
88d0c3f0
MS
1345
1346 if (!val && fuzzy)
1347 {
1348 if (TREE_CODE (arg) == ADDR_EXPR)
1349 return get_range_strlen (TREE_OPERAND (arg, 0), length,
3f343040 1350 visited, type, fuzzy, flexp);
88d0c3f0 1351
c42d0aa0
MS
1352 if (TREE_CODE (arg) == ARRAY_REF)
1353 {
1354 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1355
1bfd6a00 1356 /* Determine the "innermost" array type. */
c42d0aa0
MS
1357 while (TREE_CODE (type) == ARRAY_TYPE
1358 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1359 type = TREE_TYPE (type);
1360
1bfd6a00
MS
1361 /* Avoid arrays of pointers. */
1362 tree eltype = TREE_TYPE (type);
1363 if (TREE_CODE (type) != ARRAY_TYPE
1364 || !INTEGRAL_TYPE_P (eltype))
1365 return false;
1366
c42d0aa0
MS
1367 val = TYPE_SIZE_UNIT (type);
1368 if (!val || integer_zerop (val))
1369 return false;
1370
1371 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1372 integer_one_node);
1373 /* Set the minimum size to zero since the string in
1374 the array could have zero length. */
1375 *minlen = ssize_int (0);
204a7ecb
JJ
1376
1377 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1378 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1379 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1380 *flexp = true;
c42d0aa0
MS
1381 }
1382 else if (TREE_CODE (arg) == COMPONENT_REF
204a7ecb
JJ
1383 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1384 == ARRAY_TYPE))
88d0c3f0
MS
1385 {
1386 /* Use the type of the member array to determine the upper
1387 bound on the length of the array. This may be overly
1388 optimistic if the array itself isn't NUL-terminated and
1389 the caller relies on the subsequent member to contain
c42d0aa0
MS
1390 the NUL but that would only be considered valid if
1391 the array were the last member of a struct.
3f343040
MS
1392 Set *FLEXP to true if the array whose bound is being
1393 used is at the end of a struct. */
c3e46927 1394 if (array_at_struct_end_p (arg))
3f343040
MS
1395 *flexp = true;
1396
88d0c3f0 1397 arg = TREE_OPERAND (arg, 1);
c42d0aa0
MS
1398
1399 tree type = TREE_TYPE (arg);
1400
1401 while (TREE_CODE (type) == ARRAY_TYPE
1402 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1403 type = TREE_TYPE (type);
1404
1405 /* Fail when the array bound is unknown or zero. */
1406 val = TYPE_SIZE_UNIT (type);
88d0c3f0
MS
1407 if (!val || integer_zerop (val))
1408 return false;
1409 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1410 integer_one_node);
e495e31a
MS
1411 /* Set the minimum size to zero since the string in
1412 the array could have zero length. */
1413 *minlen = ssize_int (0);
88d0c3f0 1414 }
2004617a 1415
c42d0aa0 1416 if (VAR_P (arg))
2004617a 1417 {
c42d0aa0
MS
1418 tree type = TREE_TYPE (arg);
1419 if (POINTER_TYPE_P (type))
1420 type = TREE_TYPE (type);
1421
1422 if (TREE_CODE (type) == ARRAY_TYPE)
1423 {
1424 val = TYPE_SIZE_UNIT (type);
1425 if (!val
1426 || TREE_CODE (val) != INTEGER_CST
1427 || integer_zerop (val))
1428 return false;
1429 val = wide_int_to_tree (TREE_TYPE (val),
204a7ecb 1430 wi::sub (wi::to_wide (val), 1));
c42d0aa0
MS
1431 /* Set the minimum size to zero since the string in
1432 the array could have zero length. */
1433 *minlen = ssize_int (0);
1434 }
2004617a 1435 }
88d0c3f0
MS
1436 }
1437
fef5a0d9
RB
1438 if (!val)
1439 return false;
1440
c8602fe6
JJ
1441 if (!*minlen
1442 || (type > 0
1443 && TREE_CODE (*minlen) == INTEGER_CST
1444 && TREE_CODE (val) == INTEGER_CST
1445 && tree_int_cst_lt (val, *minlen)))
88d0c3f0
MS
1446 *minlen = val;
1447
1448 if (*maxlen)
fef5a0d9
RB
1449 {
1450 if (type > 0)
1451 {
88d0c3f0 1452 if (TREE_CODE (*maxlen) != INTEGER_CST
fef5a0d9
RB
1453 || TREE_CODE (val) != INTEGER_CST)
1454 return false;
1455
88d0c3f0
MS
1456 if (tree_int_cst_lt (*maxlen, val))
1457 *maxlen = val;
fef5a0d9
RB
1458 return true;
1459 }
88d0c3f0 1460 else if (simple_cst_equal (val, *maxlen) != 1)
fef5a0d9
RB
1461 return false;
1462 }
1463
88d0c3f0 1464 *maxlen = val;
fef5a0d9
RB
1465 return true;
1466 }
1467
1468 /* If ARG is registered for SSA update we cannot look at its defining
1469 statement. */
1470 if (name_registered_for_update_p (arg))
1471 return false;
1472
1473 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1474 if (!*visited)
1475 *visited = BITMAP_ALLOC (NULL);
1476 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1477 return true;
1478
1479 var = arg;
1480 def_stmt = SSA_NAME_DEF_STMT (var);
1481
1482 switch (gimple_code (def_stmt))
1483 {
1484 case GIMPLE_ASSIGN:
1485 /* The RHS of the statement defining VAR must either have a
1486 constant length or come from another SSA_NAME with a constant
1487 length. */
1488 if (gimple_assign_single_p (def_stmt)
1489 || gimple_assign_unary_nop_p (def_stmt))
1490 {
1491 tree rhs = gimple_assign_rhs1 (def_stmt);
3f343040 1492 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1493 }
1494 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1495 {
c8602fe6
JJ
1496 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1497 gimple_assign_rhs3 (def_stmt) };
1498
1499 for (unsigned int i = 0; i < 2; i++)
1500 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1501 flexp))
1502 {
1503 if (fuzzy == 2)
1504 *maxlen = build_all_ones_cst (size_type_node);
1505 else
1506 return false;
1507 }
1508 return true;
cc8bea0a 1509 }
fef5a0d9
RB
1510 return false;
1511
1512 case GIMPLE_PHI:
c8602fe6
JJ
1513 /* All the arguments of the PHI node must have the same constant
1514 length. */
1515 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1516 {
1517 tree arg = gimple_phi_arg (def_stmt, i)->def;
1518
1519 /* If this PHI has itself as an argument, we cannot
1520 determine the string length of this argument. However,
1521 if we can find a constant string length for the other
1522 PHI args then we can still be sure that this is a
1523 constant string length. So be optimistic and just
1524 continue with the next argument. */
1525 if (arg == gimple_phi_result (def_stmt))
1526 continue;
1527
3f343040 1528 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
88d0c3f0 1529 {
c8602fe6 1530 if (fuzzy == 2)
88d0c3f0
MS
1531 *maxlen = build_all_ones_cst (size_type_node);
1532 else
1533 return false;
1534 }
fef5a0d9 1535 }
fef5a0d9
RB
1536 return true;
1537
1538 default:
1539 return false;
1540 }
1541}
1542
88d0c3f0
MS
1543/* Determine the minimum and maximum value or string length that ARG
1544 refers to and store each in the first two elements of MINMAXLEN.
1545 For expressions that point to strings of unknown lengths that are
1546 character arrays, use the upper bound of the array as the maximum
1547 length. For example, given an expression like 'x ? array : "xyz"'
1548 and array declared as 'char array[8]', MINMAXLEN[0] will be set
c8602fe6 1549 to 0 and MINMAXLEN[1] to 7, the longest string that could be
88d0c3f0 1550 stored in array.
3f343040
MS
1551 Return true if the range of the string lengths has been obtained
1552 from the upper bound of an array at the end of a struct. Such
1553 an array may hold a string that's longer than its upper bound
c8602fe6
JJ
1554 due to it being used as a poor-man's flexible array member.
1555
1556 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1557 and false if PHIs and COND_EXPRs are to be handled optimistically,
1558 if we can determine string length minimum and maximum; it will use
1559 the minimum from the ones where it can be determined.
1560 STRICT false should be only used for warning code. */
88d0c3f0 1561
3f343040 1562bool
c8602fe6 1563get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
88d0c3f0
MS
1564{
1565 bitmap visited = NULL;
1566
1567 minmaxlen[0] = NULL_TREE;
1568 minmaxlen[1] = NULL_TREE;
1569
3f343040 1570 bool flexarray = false;
c8602fe6
JJ
1571 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1572 &flexarray))
1573 {
1574 minmaxlen[0] = NULL_TREE;
1575 minmaxlen[1] = NULL_TREE;
1576 }
88d0c3f0
MS
1577
1578 if (visited)
1579 BITMAP_FREE (visited);
3f343040
MS
1580
1581 return flexarray;
88d0c3f0
MS
1582}
1583
dcb7fae2
RB
1584tree
1585get_maxval_strlen (tree arg, int type)
1586{
1587 bitmap visited = NULL;
88d0c3f0 1588 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1589
1590 bool dummy;
c8602fe6 1591 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
88d0c3f0 1592 len[1] = NULL_TREE;
dcb7fae2
RB
1593 if (visited)
1594 BITMAP_FREE (visited);
1595
88d0c3f0 1596 return len[1];
dcb7fae2
RB
1597}
1598
fef5a0d9
RB
1599
1600/* Fold function call to builtin strcpy with arguments DEST and SRC.
1601 If LEN is not NULL, it represents the length of the string to be
1602 copied. Return NULL_TREE if no simplification can be made. */
1603
1604static bool
1605gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1606 tree dest, tree src)
fef5a0d9 1607{
cc8bea0a
MS
1608 gimple *stmt = gsi_stmt (*gsi);
1609 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1610 tree fn;
1611
1612 /* If SRC and DEST are the same (and not volatile), return DEST. */
1613 if (operand_equal_p (src, dest, 0))
1614 {
8cd95cec
MS
1615 /* Issue -Wrestrict unless the pointers are null (those do
1616 not point to objects and so do not indicate an overlap;
1617 such calls could be the result of sanitization and jump
1618 threading). */
1619 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1620 {
1621 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1622
e9b9fa4c
MS
1623 warning_at (loc, OPT_Wrestrict,
1624 "%qD source argument is the same as destination",
1625 func);
1626 }
cc8bea0a 1627
fef5a0d9
RB
1628 replace_call_with_value (gsi, dest);
1629 return true;
1630 }
1631
1632 if (optimize_function_for_size_p (cfun))
1633 return false;
1634
1635 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1636 if (!fn)
1637 return false;
1638
1579e1f8 1639 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1640 if (!len)
dcb7fae2 1641 return false;
fef5a0d9
RB
1642
1643 len = fold_convert_loc (loc, size_type_node, len);
1644 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1645 len = force_gimple_operand_gsi (gsi, len, true,
1646 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1647 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1648 replace_call_with_call_and_fold (gsi, repl);
1649 return true;
1650}
1651
1652/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1653 If SLEN is not NULL, it represents the length of the source string.
1654 Return NULL_TREE if no simplification can be made. */
1655
1656static bool
dcb7fae2
RB
1657gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1658 tree dest, tree src, tree len)
fef5a0d9 1659{
025d57f0
MS
1660 gimple *stmt = gsi_stmt (*gsi);
1661 location_t loc = gimple_location (stmt);
6a33d0ff 1662 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1663
1664 /* If the LEN parameter is zero, return DEST. */
1665 if (integer_zerop (len))
1666 {
6a33d0ff
MS
1667 /* Avoid warning if the destination refers to a an array/pointer
1668 decorate with attribute nonstring. */
1669 if (!nonstring)
1670 {
1671 tree fndecl = gimple_call_fndecl (stmt);
1672 gcall *call = as_a <gcall *> (stmt);
1673
1674 /* Warn about the lack of nul termination: the result is not
1675 a (nul-terminated) string. */
1676 tree slen = get_maxval_strlen (src, 0);
1677 if (slen && !integer_zerop (slen))
1678 warning_at (loc, OPT_Wstringop_truncation,
1679 "%G%qD destination unchanged after copying no bytes "
1680 "from a string of length %E",
1681 call, fndecl, slen);
1682 else
1683 warning_at (loc, OPT_Wstringop_truncation,
1684 "%G%qD destination unchanged after copying no bytes",
1685 call, fndecl);
1686 }
025d57f0 1687
fef5a0d9
RB
1688 replace_call_with_value (gsi, dest);
1689 return true;
1690 }
1691
1692 /* We can't compare slen with len as constants below if len is not a
1693 constant. */
dcb7fae2 1694 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1695 return false;
1696
fef5a0d9 1697 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1698 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1699 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1700 return false;
1701
025d57f0
MS
1702 /* The size of the source string including the terminating nul. */
1703 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1704
1705 /* We do not support simplification of this case, though we do
1706 support it when expanding trees into RTL. */
1707 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1708 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1709 return false;
1710
5d0d5d68
MS
1711 /* Diagnose truncation that leaves the copy unterminated. */
1712 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1713
fef5a0d9 1714 /* OK transform into builtin memcpy. */
025d57f0 1715 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1716 if (!fn)
1717 return false;
1718
1719 len = fold_convert_loc (loc, size_type_node, len);
1720 len = force_gimple_operand_gsi (gsi, len, true,
1721 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1722 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1723 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1724
fef5a0d9
RB
1725 return true;
1726}
1727
71dea1dd
WD
1728/* Fold function call to builtin strchr or strrchr.
1729 If both arguments are constant, evaluate and fold the result,
1730 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1731 In general strlen is significantly faster than strchr
1732 due to being a simpler operation. */
1733static bool
71dea1dd 1734gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1735{
1736 gimple *stmt = gsi_stmt (*gsi);
1737 tree str = gimple_call_arg (stmt, 0);
1738 tree c = gimple_call_arg (stmt, 1);
1739 location_t loc = gimple_location (stmt);
71dea1dd
WD
1740 const char *p;
1741 char ch;
912d9ec3 1742
71dea1dd 1743 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1744 return false;
1745
71dea1dd
WD
1746 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1747 {
1748 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1749
1750 if (p1 == NULL)
1751 {
1752 replace_call_with_value (gsi, integer_zero_node);
1753 return true;
1754 }
1755
1756 tree len = build_int_cst (size_type_node, p1 - p);
1757 gimple_seq stmts = NULL;
1758 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1759 POINTER_PLUS_EXPR, str, len);
1760 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1761 gsi_replace_with_seq_vops (gsi, stmts);
1762 return true;
1763 }
1764
1765 if (!integer_zerop (c))
912d9ec3
WD
1766 return false;
1767
71dea1dd 1768 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1769 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1770 {
1771 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1772
c8952930 1773 if (strchr_fn)
71dea1dd
WD
1774 {
1775 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1776 replace_call_with_call_and_fold (gsi, repl);
1777 return true;
1778 }
1779
1780 return false;
1781 }
1782
912d9ec3
WD
1783 tree len;
1784 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1785
1786 if (!strlen_fn)
1787 return false;
1788
1789 /* Create newstr = strlen (str). */
1790 gimple_seq stmts = NULL;
1791 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1792 gimple_set_location (new_stmt, loc);
a15ebbcd 1793 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1794 gimple_call_set_lhs (new_stmt, len);
1795 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1796
1797 /* Create (str p+ strlen (str)). */
1798 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1799 POINTER_PLUS_EXPR, str, len);
1800 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1801 gsi_replace_with_seq_vops (gsi, stmts);
1802 /* gsi now points at the assignment to the lhs, get a
1803 stmt iterator to the strlen.
1804 ??? We can't use gsi_for_stmt as that doesn't work when the
1805 CFG isn't built yet. */
1806 gimple_stmt_iterator gsi2 = *gsi;
1807 gsi_prev (&gsi2);
1808 fold_stmt (&gsi2);
1809 return true;
1810}
1811
c8952930
JJ
1812/* Fold function call to builtin strstr.
1813 If both arguments are constant, evaluate and fold the result,
1814 additionally fold strstr (x, "") into x and strstr (x, "c")
1815 into strchr (x, 'c'). */
1816static bool
1817gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1818{
1819 gimple *stmt = gsi_stmt (*gsi);
1820 tree haystack = gimple_call_arg (stmt, 0);
1821 tree needle = gimple_call_arg (stmt, 1);
1822 const char *p, *q;
1823
1824 if (!gimple_call_lhs (stmt))
1825 return false;
1826
1827 q = c_getstr (needle);
1828 if (q == NULL)
1829 return false;
1830
1831 if ((p = c_getstr (haystack)))
1832 {
1833 const char *r = strstr (p, q);
1834
1835 if (r == NULL)
1836 {
1837 replace_call_with_value (gsi, integer_zero_node);
1838 return true;
1839 }
1840
1841 tree len = build_int_cst (size_type_node, r - p);
1842 gimple_seq stmts = NULL;
1843 gimple *new_stmt
1844 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1845 haystack, len);
1846 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1847 gsi_replace_with_seq_vops (gsi, stmts);
1848 return true;
1849 }
1850
1851 /* For strstr (x, "") return x. */
1852 if (q[0] == '\0')
1853 {
1854 replace_call_with_value (gsi, haystack);
1855 return true;
1856 }
1857
1858 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1859 if (q[1] == '\0')
1860 {
1861 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1862 if (strchr_fn)
1863 {
1864 tree c = build_int_cst (integer_type_node, q[0]);
1865 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1866 replace_call_with_call_and_fold (gsi, repl);
1867 return true;
1868 }
1869 }
1870
1871 return false;
1872}
1873
fef5a0d9
RB
1874/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1875 to the call.
1876
1877 Return NULL_TREE if no simplification was possible, otherwise return the
1878 simplified form of the call as a tree.
1879
1880 The simplified form may be a constant or other expression which
1881 computes the same value, but in a more efficient manner (including
1882 calls to other builtin functions).
1883
1884 The call may contain arguments which need to be evaluated, but
1885 which are not useful to determine the result of the call. In
1886 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1887 COMPOUND_EXPR will be an argument which must be evaluated.
1888 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1889 COMPOUND_EXPR in the chain will contain the tree for the simplified
1890 form of the builtin function call. */
1891
1892static bool
dcb7fae2 1893gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1894{
355fe088 1895 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1896 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1897
1898 const char *p = c_getstr (src);
1899
1900 /* If the string length is zero, return the dst parameter. */
1901 if (p && *p == '\0')
1902 {
1903 replace_call_with_value (gsi, dst);
1904 return true;
1905 }
1906
1907 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1908 return false;
1909
1910 /* See if we can store by pieces into (dst + strlen(dst)). */
1911 tree newdst;
1912 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1913 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1914
1915 if (!strlen_fn || !memcpy_fn)
1916 return false;
1917
1918 /* If the length of the source string isn't computable don't
1919 split strcat into strlen and memcpy. */
dcb7fae2 1920 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1921 if (! len)
fef5a0d9
RB
1922 return false;
1923
1924 /* Create strlen (dst). */
1925 gimple_seq stmts = NULL, stmts2;
355fe088 1926 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 1927 gimple_set_location (repl, loc);
a15ebbcd 1928 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
1929 gimple_call_set_lhs (repl, newdst);
1930 gimple_seq_add_stmt_without_update (&stmts, repl);
1931
1932 /* Create (dst p+ strlen (dst)). */
1933 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1934 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1935 gimple_seq_add_seq_without_update (&stmts, stmts2);
1936
1937 len = fold_convert_loc (loc, size_type_node, len);
1938 len = size_binop_loc (loc, PLUS_EXPR, len,
1939 build_int_cst (size_type_node, 1));
1940 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1941 gimple_seq_add_seq_without_update (&stmts, stmts2);
1942
1943 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1944 gimple_seq_add_stmt_without_update (&stmts, repl);
1945 if (gimple_call_lhs (stmt))
1946 {
1947 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1948 gimple_seq_add_stmt_without_update (&stmts, repl);
1949 gsi_replace_with_seq_vops (gsi, stmts);
1950 /* gsi now points at the assignment to the lhs, get a
1951 stmt iterator to the memcpy call.
1952 ??? We can't use gsi_for_stmt as that doesn't work when the
1953 CFG isn't built yet. */
1954 gimple_stmt_iterator gsi2 = *gsi;
1955 gsi_prev (&gsi2);
1956 fold_stmt (&gsi2);
1957 }
1958 else
1959 {
1960 gsi_replace_with_seq_vops (gsi, stmts);
1961 fold_stmt (gsi);
1962 }
1963 return true;
1964}
1965
07f1cf56
RB
1966/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1967 are the arguments to the call. */
1968
1969static bool
1970gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1971{
355fe088 1972 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1973 tree dest = gimple_call_arg (stmt, 0);
1974 tree src = gimple_call_arg (stmt, 1);
1975 tree size = gimple_call_arg (stmt, 2);
1976 tree fn;
1977 const char *p;
1978
1979
1980 p = c_getstr (src);
1981 /* If the SRC parameter is "", return DEST. */
1982 if (p && *p == '\0')
1983 {
1984 replace_call_with_value (gsi, dest);
1985 return true;
1986 }
1987
1988 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1989 return false;
1990
1991 /* If __builtin_strcat_chk is used, assume strcat is available. */
1992 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1993 if (!fn)
1994 return false;
1995
355fe088 1996 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1997 replace_call_with_call_and_fold (gsi, repl);
1998 return true;
1999}
2000
ad03a744
RB
2001/* Simplify a call to the strncat builtin. */
2002
2003static bool
2004gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2005{
2006 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2007 tree dst = gimple_call_arg (stmt, 0);
2008 tree src = gimple_call_arg (stmt, 1);
2009 tree len = gimple_call_arg (stmt, 2);
2010
2011 const char *p = c_getstr (src);
2012
2013 /* If the requested length is zero, or the src parameter string
2014 length is zero, return the dst parameter. */
2015 if (integer_zerop (len) || (p && *p == '\0'))
2016 {
2017 replace_call_with_value (gsi, dst);
2018 return true;
2019 }
2020
025d57f0
MS
2021 if (TREE_CODE (len) != INTEGER_CST || !p)
2022 return false;
2023
2024 unsigned srclen = strlen (p);
2025
2026 int cmpsrc = compare_tree_int (len, srclen);
2027
2028 /* Return early if the requested len is less than the string length.
2029 Warnings will be issued elsewhere later. */
2030 if (cmpsrc < 0)
2031 return false;
2032
2033 unsigned HOST_WIDE_INT dstsize;
2034
2035 bool nowarn = gimple_no_warning_p (stmt);
2036
2037 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2038 {
025d57f0 2039 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2040
025d57f0
MS
2041 if (cmpdst >= 0)
2042 {
2043 tree fndecl = gimple_call_fndecl (stmt);
2044
2045 /* Strncat copies (at most) LEN bytes and always appends
2046 the terminating NUL so the specified bound should never
2047 be equal to (or greater than) the size of the destination.
2048 If it is, the copy could overflow. */
2049 location_t loc = gimple_location (stmt);
2050 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2051 cmpdst == 0
2052 ? G_("%G%qD specified bound %E equals "
2053 "destination size")
2054 : G_("%G%qD specified bound %E exceeds "
2055 "destination size %wu"),
2056 stmt, fndecl, len, dstsize);
2057 if (nowarn)
2058 gimple_set_no_warning (stmt, true);
2059 }
2060 }
ad03a744 2061
025d57f0
MS
2062 if (!nowarn && cmpsrc == 0)
2063 {
2064 tree fndecl = gimple_call_fndecl (stmt);
2065
2066 /* To avoid certain truncation the specified bound should also
2067 not be equal to (or less than) the length of the source. */
2068 location_t loc = gimple_location (stmt);
2069 if (warning_at (loc, OPT_Wstringop_overflow_,
2070 "%G%qD specified bound %E equals source length",
2071 stmt, fndecl, len))
2072 gimple_set_no_warning (stmt, true);
ad03a744
RB
2073 }
2074
025d57f0
MS
2075 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2076
2077 /* If the replacement _DECL isn't initialized, don't do the
2078 transformation. */
2079 if (!fn)
2080 return false;
2081
2082 /* Otherwise, emit a call to strcat. */
2083 gcall *repl = gimple_build_call (fn, 2, dst, src);
2084 replace_call_with_call_and_fold (gsi, repl);
2085 return true;
ad03a744
RB
2086}
2087
745583f9
RB
2088/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2089 LEN, and SIZE. */
2090
2091static bool
2092gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2093{
355fe088 2094 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2095 tree dest = gimple_call_arg (stmt, 0);
2096 tree src = gimple_call_arg (stmt, 1);
2097 tree len = gimple_call_arg (stmt, 2);
2098 tree size = gimple_call_arg (stmt, 3);
2099 tree fn;
2100 const char *p;
2101
2102 p = c_getstr (src);
2103 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2104 if ((p && *p == '\0')
2105 || integer_zerop (len))
2106 {
2107 replace_call_with_value (gsi, dest);
2108 return true;
2109 }
2110
2111 if (! tree_fits_uhwi_p (size))
2112 return false;
2113
2114 if (! integer_all_onesp (size))
2115 {
2116 tree src_len = c_strlen (src, 1);
2117 if (src_len
2118 && tree_fits_uhwi_p (src_len)
2119 && tree_fits_uhwi_p (len)
2120 && ! tree_int_cst_lt (len, src_len))
2121 {
2122 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2123 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2124 if (!fn)
2125 return false;
2126
355fe088 2127 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2128 replace_call_with_call_and_fold (gsi, repl);
2129 return true;
2130 }
2131 return false;
2132 }
2133
2134 /* If __builtin_strncat_chk is used, assume strncat is available. */
2135 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2136 if (!fn)
2137 return false;
2138
355fe088 2139 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2140 replace_call_with_call_and_fold (gsi, repl);
2141 return true;
2142}
2143
a918bfbf
ML
2144/* Build and append gimple statements to STMTS that would load a first
2145 character of a memory location identified by STR. LOC is location
2146 of the statement. */
2147
2148static tree
2149gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2150{
2151 tree var;
2152
2153 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2154 tree cst_uchar_ptr_node
2155 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2156 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2157
2158 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2159 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2160 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2161
2162 gimple_assign_set_lhs (stmt, var);
2163 gimple_seq_add_stmt_without_update (stmts, stmt);
2164
2165 return var;
2166}
2167
2168/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2169 FCODE is the name of the builtin. */
2170
2171static bool
2172gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2173{
2174 gimple *stmt = gsi_stmt (*gsi);
2175 tree callee = gimple_call_fndecl (stmt);
2176 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2177
2178 tree type = integer_type_node;
2179 tree str1 = gimple_call_arg (stmt, 0);
2180 tree str2 = gimple_call_arg (stmt, 1);
2181 tree lhs = gimple_call_lhs (stmt);
2182 HOST_WIDE_INT length = -1;
2183
2184 /* Handle strncmp and strncasecmp functions. */
2185 if (gimple_call_num_args (stmt) == 3)
2186 {
2187 tree len = gimple_call_arg (stmt, 2);
2188 if (tree_fits_uhwi_p (len))
2189 length = tree_to_uhwi (len);
2190 }
2191
2192 /* If the LEN parameter is zero, return zero. */
2193 if (length == 0)
2194 {
2195 replace_call_with_value (gsi, integer_zero_node);
2196 return true;
2197 }
2198
2199 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2200 if (operand_equal_p (str1, str2, 0))
2201 {
2202 replace_call_with_value (gsi, integer_zero_node);
2203 return true;
2204 }
2205
2206 const char *p1 = c_getstr (str1);
2207 const char *p2 = c_getstr (str2);
2208
2209 /* For known strings, return an immediate value. */
2210 if (p1 && p2)
2211 {
2212 int r = 0;
2213 bool known_result = false;
2214
2215 switch (fcode)
2216 {
2217 case BUILT_IN_STRCMP:
2218 {
2219 r = strcmp (p1, p2);
2220 known_result = true;
2221 break;
2222 }
2223 case BUILT_IN_STRNCMP:
2224 {
2225 if (length == -1)
2226 break;
2227 r = strncmp (p1, p2, length);
2228 known_result = true;
2229 break;
2230 }
2231 /* Only handleable situation is where the string are equal (result 0),
2232 which is already handled by operand_equal_p case. */
2233 case BUILT_IN_STRCASECMP:
2234 break;
2235 case BUILT_IN_STRNCASECMP:
2236 {
2237 if (length == -1)
2238 break;
2239 r = strncmp (p1, p2, length);
2240 if (r == 0)
2241 known_result = true;
5de73c05 2242 break;
a918bfbf
ML
2243 }
2244 default:
2245 gcc_unreachable ();
2246 }
2247
2248 if (known_result)
2249 {
2250 replace_call_with_value (gsi, build_cmp_result (type, r));
2251 return true;
2252 }
2253 }
2254
2255 bool nonzero_length = length >= 1
2256 || fcode == BUILT_IN_STRCMP
2257 || fcode == BUILT_IN_STRCASECMP;
2258
2259 location_t loc = gimple_location (stmt);
2260
2261 /* If the second arg is "", return *(const unsigned char*)arg1. */
2262 if (p2 && *p2 == '\0' && nonzero_length)
2263 {
2264 gimple_seq stmts = NULL;
2265 tree var = gimple_load_first_char (loc, str1, &stmts);
2266 if (lhs)
2267 {
2268 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2269 gimple_seq_add_stmt_without_update (&stmts, stmt);
2270 }
2271
2272 gsi_replace_with_seq_vops (gsi, stmts);
2273 return true;
2274 }
2275
2276 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2277 if (p1 && *p1 == '\0' && nonzero_length)
2278 {
2279 gimple_seq stmts = NULL;
2280 tree var = gimple_load_first_char (loc, str2, &stmts);
2281
2282 if (lhs)
2283 {
2284 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2285 stmt = gimple_build_assign (c, NOP_EXPR, var);
2286 gimple_seq_add_stmt_without_update (&stmts, stmt);
2287
2288 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2289 gimple_seq_add_stmt_without_update (&stmts, stmt);
2290 }
2291
2292 gsi_replace_with_seq_vops (gsi, stmts);
2293 return true;
2294 }
2295
2296 /* If len parameter is one, return an expression corresponding to
2297 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2298 if (fcode == BUILT_IN_STRNCMP && length == 1)
2299 {
2300 gimple_seq stmts = NULL;
2301 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2302 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2303
2304 if (lhs)
2305 {
2306 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2307 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2308 gimple_seq_add_stmt_without_update (&stmts, convert1);
2309
2310 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2311 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2312 gimple_seq_add_stmt_without_update (&stmts, convert2);
2313
2314 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2315 gimple_seq_add_stmt_without_update (&stmts, stmt);
2316 }
2317
2318 gsi_replace_with_seq_vops (gsi, stmts);
2319 return true;
2320 }
2321
caed5c92
QZ
2322 /* If length is larger than the length of one constant string,
2323 replace strncmp with corresponding strcmp */
2324 if (fcode == BUILT_IN_STRNCMP
2325 && length > 0
2326 && ((p2 && (size_t) length > strlen (p2))
2327 || (p1 && (size_t) length > strlen (p1))))
2328 {
2329 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2330 if (!fn)
2331 return false;
2332 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2333 replace_call_with_call_and_fold (gsi, repl);
2334 return true;
2335 }
2336
a918bfbf
ML
2337 return false;
2338}
2339
488c6247
ML
2340/* Fold a call to the memchr pointed by GSI iterator. */
2341
2342static bool
2343gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2344{
2345 gimple *stmt = gsi_stmt (*gsi);
2346 tree lhs = gimple_call_lhs (stmt);
2347 tree arg1 = gimple_call_arg (stmt, 0);
2348 tree arg2 = gimple_call_arg (stmt, 1);
2349 tree len = gimple_call_arg (stmt, 2);
2350
2351 /* If the LEN parameter is zero, return zero. */
2352 if (integer_zerop (len))
2353 {
2354 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2355 return true;
2356 }
2357
2358 char c;
2359 if (TREE_CODE (arg2) != INTEGER_CST
2360 || !tree_fits_uhwi_p (len)
2361 || !target_char_cst_p (arg2, &c))
2362 return false;
2363
2364 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2365 unsigned HOST_WIDE_INT string_length;
2366 const char *p1 = c_getstr (arg1, &string_length);
2367
2368 if (p1)
2369 {
2370 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2371 if (r == NULL)
2372 {
2373 if (length <= string_length)
2374 {
2375 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2376 return true;
2377 }
2378 }
2379 else
2380 {
2381 unsigned HOST_WIDE_INT offset = r - p1;
2382 gimple_seq stmts = NULL;
2383 if (lhs != NULL_TREE)
2384 {
2385 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2386 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2387 arg1, offset_cst);
2388 gimple_seq_add_stmt_without_update (&stmts, stmt);
2389 }
2390 else
2391 gimple_seq_add_stmt_without_update (&stmts,
2392 gimple_build_nop ());
2393
2394 gsi_replace_with_seq_vops (gsi, stmts);
2395 return true;
2396 }
2397 }
2398
2399 return false;
2400}
a918bfbf 2401
fef5a0d9
RB
2402/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2403 to the call. IGNORE is true if the value returned
2404 by the builtin will be ignored. UNLOCKED is true is true if this
2405 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2406 the known length of the string. Return NULL_TREE if no simplification
2407 was possible. */
2408
2409static bool
2410gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2411 tree arg0, tree arg1,
dcb7fae2 2412 bool unlocked)
fef5a0d9 2413{
355fe088 2414 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2415
fef5a0d9
RB
2416 /* If we're using an unlocked function, assume the other unlocked
2417 functions exist explicitly. */
2418 tree const fn_fputc = (unlocked
2419 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2420 : builtin_decl_implicit (BUILT_IN_FPUTC));
2421 tree const fn_fwrite = (unlocked
2422 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2423 : builtin_decl_implicit (BUILT_IN_FWRITE));
2424
2425 /* If the return value is used, don't do the transformation. */
dcb7fae2 2426 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2427 return false;
2428
fef5a0d9
RB
2429 /* Get the length of the string passed to fputs. If the length
2430 can't be determined, punt. */
dcb7fae2 2431 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
2432 if (!len
2433 || TREE_CODE (len) != INTEGER_CST)
2434 return false;
2435
2436 switch (compare_tree_int (len, 1))
2437 {
2438 case -1: /* length is 0, delete the call entirely . */
2439 replace_call_with_value (gsi, integer_zero_node);
2440 return true;
2441
2442 case 0: /* length is 1, call fputc. */
2443 {
2444 const char *p = c_getstr (arg0);
2445 if (p != NULL)
2446 {
2447 if (!fn_fputc)
2448 return false;
2449
355fe088 2450 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2451 build_int_cst
2452 (integer_type_node, p[0]), arg1);
2453 replace_call_with_call_and_fold (gsi, repl);
2454 return true;
2455 }
2456 }
2457 /* FALLTHROUGH */
2458 case 1: /* length is greater than 1, call fwrite. */
2459 {
2460 /* If optimizing for size keep fputs. */
2461 if (optimize_function_for_size_p (cfun))
2462 return false;
2463 /* New argument list transforming fputs(string, stream) to
2464 fwrite(string, 1, len, stream). */
2465 if (!fn_fwrite)
2466 return false;
2467
355fe088 2468 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2469 size_one_node, len, arg1);
2470 replace_call_with_call_and_fold (gsi, repl);
2471 return true;
2472 }
2473 default:
2474 gcc_unreachable ();
2475 }
2476 return false;
2477}
2478
2479/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2480 DEST, SRC, LEN, and SIZE are the arguments to the call.
2481 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2482 code of the builtin. If MAXLEN is not NULL, it is maximum length
2483 passed as third argument. */
2484
2485static bool
2486gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2487 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2488 enum built_in_function fcode)
2489{
355fe088 2490 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2491 location_t loc = gimple_location (stmt);
2492 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2493 tree fn;
2494
2495 /* If SRC and DEST are the same (and not volatile), return DEST
2496 (resp. DEST+LEN for __mempcpy_chk). */
2497 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2498 {
2499 if (fcode != BUILT_IN_MEMPCPY_CHK)
2500 {
2501 replace_call_with_value (gsi, dest);
2502 return true;
2503 }
2504 else
2505 {
74e3c262
RB
2506 gimple_seq stmts = NULL;
2507 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2508 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2509 TREE_TYPE (dest), dest, len);
74e3c262 2510 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2511 replace_call_with_value (gsi, temp);
2512 return true;
2513 }
2514 }
2515
2516 if (! tree_fits_uhwi_p (size))
2517 return false;
2518
dcb7fae2 2519 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
2520 if (! integer_all_onesp (size))
2521 {
2522 if (! tree_fits_uhwi_p (len))
2523 {
2524 /* If LEN is not constant, try MAXLEN too.
2525 For MAXLEN only allow optimizing into non-_ocs function
2526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2527 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2528 {
2529 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2530 {
2531 /* (void) __mempcpy_chk () can be optimized into
2532 (void) __memcpy_chk (). */
2533 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2534 if (!fn)
2535 return false;
2536
355fe088 2537 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2538 replace_call_with_call_and_fold (gsi, repl);
2539 return true;
2540 }
2541 return false;
2542 }
2543 }
2544 else
2545 maxlen = len;
2546
2547 if (tree_int_cst_lt (size, maxlen))
2548 return false;
2549 }
2550
2551 fn = NULL_TREE;
2552 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2553 mem{cpy,pcpy,move,set} is available. */
2554 switch (fcode)
2555 {
2556 case BUILT_IN_MEMCPY_CHK:
2557 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2558 break;
2559 case BUILT_IN_MEMPCPY_CHK:
2560 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2561 break;
2562 case BUILT_IN_MEMMOVE_CHK:
2563 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2564 break;
2565 case BUILT_IN_MEMSET_CHK:
2566 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2567 break;
2568 default:
2569 break;
2570 }
2571
2572 if (!fn)
2573 return false;
2574
355fe088 2575 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2576 replace_call_with_call_and_fold (gsi, repl);
2577 return true;
2578}
2579
2580/* Fold a call to the __st[rp]cpy_chk builtin.
2581 DEST, SRC, and SIZE are the arguments to the call.
2582 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2583 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2584 strings passed as second argument. */
2585
2586static bool
2587gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2588 tree dest,
fef5a0d9 2589 tree src, tree size,
fef5a0d9
RB
2590 enum built_in_function fcode)
2591{
355fe088 2592 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2593 location_t loc = gimple_location (stmt);
2594 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2595 tree len, fn;
2596
2597 /* If SRC and DEST are the same (and not volatile), return DEST. */
2598 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2599 {
8cd95cec
MS
2600 /* Issue -Wrestrict unless the pointers are null (those do
2601 not point to objects and so do not indicate an overlap;
2602 such calls could be the result of sanitization and jump
2603 threading). */
2604 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2605 {
2606 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2607
e9b9fa4c
MS
2608 warning_at (loc, OPT_Wrestrict,
2609 "%qD source argument is the same as destination",
2610 func);
2611 }
cc8bea0a 2612
fef5a0d9
RB
2613 replace_call_with_value (gsi, dest);
2614 return true;
2615 }
2616
2617 if (! tree_fits_uhwi_p (size))
2618 return false;
2619
dcb7fae2 2620 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
2621 if (! integer_all_onesp (size))
2622 {
2623 len = c_strlen (src, 1);
2624 if (! len || ! tree_fits_uhwi_p (len))
2625 {
2626 /* If LEN is not constant, try MAXLEN too.
2627 For MAXLEN only allow optimizing into non-_ocs function
2628 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2629 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2630 {
2631 if (fcode == BUILT_IN_STPCPY_CHK)
2632 {
2633 if (! ignore)
2634 return false;
2635
2636 /* If return value of __stpcpy_chk is ignored,
2637 optimize into __strcpy_chk. */
2638 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2639 if (!fn)
2640 return false;
2641
355fe088 2642 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2643 replace_call_with_call_and_fold (gsi, repl);
2644 return true;
2645 }
2646
2647 if (! len || TREE_SIDE_EFFECTS (len))
2648 return false;
2649
2650 /* If c_strlen returned something, but not a constant,
2651 transform __strcpy_chk into __memcpy_chk. */
2652 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2653 if (!fn)
2654 return false;
2655
74e3c262
RB
2656 gimple_seq stmts = NULL;
2657 len = gimple_convert (&stmts, loc, size_type_node, len);
2658 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2659 build_int_cst (size_type_node, 1));
2660 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2661 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2662 replace_call_with_call_and_fold (gsi, repl);
2663 return true;
2664 }
e256dfce 2665 }
fef5a0d9
RB
2666 else
2667 maxlen = len;
2668
2669 if (! tree_int_cst_lt (maxlen, size))
2670 return false;
e256dfce
RG
2671 }
2672
fef5a0d9
RB
2673 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2674 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2675 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2676 if (!fn)
2677 return false;
2678
355fe088 2679 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2680 replace_call_with_call_and_fold (gsi, repl);
2681 return true;
2682}
2683
2684/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2685 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2686 length passed as third argument. IGNORE is true if return value can be
2687 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2688
2689static bool
2690gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2691 tree dest, tree src,
dcb7fae2 2692 tree len, tree size,
fef5a0d9
RB
2693 enum built_in_function fcode)
2694{
355fe088 2695 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2696 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2697 tree fn;
2698
2699 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2700 {
fef5a0d9
RB
2701 /* If return value of __stpncpy_chk is ignored,
2702 optimize into __strncpy_chk. */
2703 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2704 if (fn)
2705 {
355fe088 2706 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2707 replace_call_with_call_and_fold (gsi, repl);
2708 return true;
2709 }
cbdd87d4
RG
2710 }
2711
fef5a0d9
RB
2712 if (! tree_fits_uhwi_p (size))
2713 return false;
2714
dcb7fae2 2715 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2716 if (! integer_all_onesp (size))
cbdd87d4 2717 {
fef5a0d9 2718 if (! tree_fits_uhwi_p (len))
fe2ef088 2719 {
fef5a0d9
RB
2720 /* If LEN is not constant, try MAXLEN too.
2721 For MAXLEN only allow optimizing into non-_ocs function
2722 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2723 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2724 return false;
8a1561bc 2725 }
fef5a0d9
RB
2726 else
2727 maxlen = len;
2728
2729 if (tree_int_cst_lt (size, maxlen))
2730 return false;
cbdd87d4
RG
2731 }
2732
fef5a0d9
RB
2733 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2734 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2735 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2736 if (!fn)
2737 return false;
2738
355fe088 2739 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2740 replace_call_with_call_and_fold (gsi, repl);
2741 return true;
cbdd87d4
RG
2742}
2743
2625bb5d
RB
2744/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2745 Return NULL_TREE if no simplification can be made. */
2746
2747static bool
2748gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2749{
2750 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2751 location_t loc = gimple_location (stmt);
2752 tree dest = gimple_call_arg (stmt, 0);
2753 tree src = gimple_call_arg (stmt, 1);
2754 tree fn, len, lenp1;
2755
2756 /* If the result is unused, replace stpcpy with strcpy. */
2757 if (gimple_call_lhs (stmt) == NULL_TREE)
2758 {
2759 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2760 if (!fn)
2761 return false;
2762 gimple_call_set_fndecl (stmt, fn);
2763 fold_stmt (gsi);
2764 return true;
2765 }
2766
2767 len = c_strlen (src, 1);
2768 if (!len
2769 || TREE_CODE (len) != INTEGER_CST)
2770 return false;
2771
2772 if (optimize_function_for_size_p (cfun)
2773 /* If length is zero it's small enough. */
2774 && !integer_zerop (len))
2775 return false;
2776
2777 /* If the source has a known length replace stpcpy with memcpy. */
2778 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2779 if (!fn)
2780 return false;
2781
2782 gimple_seq stmts = NULL;
2783 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2784 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2785 tem, build_int_cst (size_type_node, 1));
2786 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2787 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2788 gimple_set_vuse (repl, gimple_vuse (stmt));
2789 gimple_set_vdef (repl, gimple_vdef (stmt));
2790 if (gimple_vdef (repl)
2791 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2792 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2793 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2794 /* Replace the result with dest + len. */
2795 stmts = NULL;
2796 tem = gimple_convert (&stmts, loc, sizetype, len);
2797 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2798 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2799 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2800 gsi_replace (gsi, ret, false);
2625bb5d
RB
2801 /* Finally fold the memcpy call. */
2802 gimple_stmt_iterator gsi2 = *gsi;
2803 gsi_prev (&gsi2);
2804 fold_stmt (&gsi2);
2805 return true;
2806}
2807
fef5a0d9
RB
2808/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2809 NULL_TREE if a normal call should be emitted rather than expanding
2810 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2811 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2812 passed as second argument. */
cbdd87d4
RG
2813
2814static bool
fef5a0d9 2815gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2816 enum built_in_function fcode)
cbdd87d4 2817{
538dd0b7 2818 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2819 tree dest, size, len, fn, fmt, flag;
2820 const char *fmt_str;
cbdd87d4 2821
fef5a0d9
RB
2822 /* Verify the required arguments in the original call. */
2823 if (gimple_call_num_args (stmt) < 5)
2824 return false;
cbdd87d4 2825
fef5a0d9
RB
2826 dest = gimple_call_arg (stmt, 0);
2827 len = gimple_call_arg (stmt, 1);
2828 flag = gimple_call_arg (stmt, 2);
2829 size = gimple_call_arg (stmt, 3);
2830 fmt = gimple_call_arg (stmt, 4);
2831
2832 if (! tree_fits_uhwi_p (size))
2833 return false;
2834
2835 if (! integer_all_onesp (size))
2836 {
dcb7fae2 2837 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2838 if (! tree_fits_uhwi_p (len))
cbdd87d4 2839 {
fef5a0d9
RB
2840 /* If LEN is not constant, try MAXLEN too.
2841 For MAXLEN only allow optimizing into non-_ocs function
2842 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2843 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2844 return false;
2845 }
2846 else
fef5a0d9 2847 maxlen = len;
cbdd87d4 2848
fef5a0d9
RB
2849 if (tree_int_cst_lt (size, maxlen))
2850 return false;
2851 }
cbdd87d4 2852
fef5a0d9
RB
2853 if (!init_target_chars ())
2854 return false;
cbdd87d4 2855
fef5a0d9
RB
2856 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2857 or if format doesn't contain % chars or is "%s". */
2858 if (! integer_zerop (flag))
2859 {
2860 fmt_str = c_getstr (fmt);
2861 if (fmt_str == NULL)
2862 return false;
2863 if (strchr (fmt_str, target_percent) != NULL
2864 && strcmp (fmt_str, target_percent_s))
2865 return false;
cbdd87d4
RG
2866 }
2867
fef5a0d9
RB
2868 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2869 available. */
2870 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2871 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2872 if (!fn)
491e0b9b
RG
2873 return false;
2874
fef5a0d9
RB
2875 /* Replace the called function and the first 5 argument by 3 retaining
2876 trailing varargs. */
2877 gimple_call_set_fndecl (stmt, fn);
2878 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2879 gimple_call_set_arg (stmt, 0, dest);
2880 gimple_call_set_arg (stmt, 1, len);
2881 gimple_call_set_arg (stmt, 2, fmt);
2882 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2883 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2884 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2885 fold_stmt (gsi);
2886 return true;
2887}
cbdd87d4 2888
fef5a0d9
RB
2889/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2890 Return NULL_TREE if a normal call should be emitted rather than
2891 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2892 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2893
fef5a0d9
RB
2894static bool
2895gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2896 enum built_in_function fcode)
2897{
538dd0b7 2898 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2899 tree dest, size, len, fn, fmt, flag;
2900 const char *fmt_str;
2901 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2902
fef5a0d9
RB
2903 /* Verify the required arguments in the original call. */
2904 if (nargs < 4)
2905 return false;
2906 dest = gimple_call_arg (stmt, 0);
2907 flag = gimple_call_arg (stmt, 1);
2908 size = gimple_call_arg (stmt, 2);
2909 fmt = gimple_call_arg (stmt, 3);
2910
2911 if (! tree_fits_uhwi_p (size))
2912 return false;
2913
2914 len = NULL_TREE;
2915
2916 if (!init_target_chars ())
2917 return false;
2918
2919 /* Check whether the format is a literal string constant. */
2920 fmt_str = c_getstr (fmt);
2921 if (fmt_str != NULL)
2922 {
2923 /* If the format doesn't contain % args or %%, we know the size. */
2924 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2925 {
fef5a0d9
RB
2926 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2927 len = build_int_cstu (size_type_node, strlen (fmt_str));
2928 }
2929 /* If the format is "%s" and first ... argument is a string literal,
2930 we know the size too. */
2931 else if (fcode == BUILT_IN_SPRINTF_CHK
2932 && strcmp (fmt_str, target_percent_s) == 0)
2933 {
2934 tree arg;
cbdd87d4 2935
fef5a0d9
RB
2936 if (nargs == 5)
2937 {
2938 arg = gimple_call_arg (stmt, 4);
2939 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2940 {
2941 len = c_strlen (arg, 1);
2942 if (! len || ! tree_fits_uhwi_p (len))
2943 len = NULL_TREE;
2944 }
2945 }
2946 }
2947 }
cbdd87d4 2948
fef5a0d9
RB
2949 if (! integer_all_onesp (size))
2950 {
2951 if (! len || ! tree_int_cst_lt (len, size))
2952 return false;
2953 }
cbdd87d4 2954
fef5a0d9
RB
2955 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2956 or if format doesn't contain % chars or is "%s". */
2957 if (! integer_zerop (flag))
2958 {
2959 if (fmt_str == NULL)
2960 return false;
2961 if (strchr (fmt_str, target_percent) != NULL
2962 && strcmp (fmt_str, target_percent_s))
2963 return false;
2964 }
cbdd87d4 2965
fef5a0d9
RB
2966 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2967 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2968 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2969 if (!fn)
2970 return false;
2971
2972 /* Replace the called function and the first 4 argument by 2 retaining
2973 trailing varargs. */
2974 gimple_call_set_fndecl (stmt, fn);
2975 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2976 gimple_call_set_arg (stmt, 0, dest);
2977 gimple_call_set_arg (stmt, 1, fmt);
2978 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2979 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2980 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2981 fold_stmt (gsi);
2982 return true;
2983}
2984
35770bb2
RB
2985/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2986 ORIG may be null if this is a 2-argument call. We don't attempt to
2987 simplify calls with more than 3 arguments.
2988
a104bd88 2989 Return true if simplification was possible, otherwise false. */
35770bb2 2990
a104bd88 2991bool
dcb7fae2 2992gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2993{
355fe088 2994 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2995 tree dest = gimple_call_arg (stmt, 0);
2996 tree fmt = gimple_call_arg (stmt, 1);
2997 tree orig = NULL_TREE;
2998 const char *fmt_str = NULL;
2999
3000 /* Verify the required arguments in the original call. We deal with two
3001 types of sprintf() calls: 'sprintf (str, fmt)' and
3002 'sprintf (dest, "%s", orig)'. */
3003 if (gimple_call_num_args (stmt) > 3)
3004 return false;
3005
3006 if (gimple_call_num_args (stmt) == 3)
3007 orig = gimple_call_arg (stmt, 2);
3008
3009 /* Check whether the format is a literal string constant. */
3010 fmt_str = c_getstr (fmt);
3011 if (fmt_str == NULL)
3012 return false;
3013
3014 if (!init_target_chars ())
3015 return false;
3016
3017 /* If the format doesn't contain % args or %%, use strcpy. */
3018 if (strchr (fmt_str, target_percent) == NULL)
3019 {
3020 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3021
3022 if (!fn)
3023 return false;
3024
3025 /* Don't optimize sprintf (buf, "abc", ptr++). */
3026 if (orig)
3027 return false;
3028
3029 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3030 'format' is known to contain no % formats. */
3031 gimple_seq stmts = NULL;
355fe088 3032 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
3033 gimple_seq_add_stmt_without_update (&stmts, repl);
3034 if (gimple_call_lhs (stmt))
3035 {
3036 repl = gimple_build_assign (gimple_call_lhs (stmt),
3037 build_int_cst (integer_type_node,
3038 strlen (fmt_str)));
3039 gimple_seq_add_stmt_without_update (&stmts, repl);
3040 gsi_replace_with_seq_vops (gsi, stmts);
3041 /* gsi now points at the assignment to the lhs, get a
3042 stmt iterator to the memcpy call.
3043 ??? We can't use gsi_for_stmt as that doesn't work when the
3044 CFG isn't built yet. */
3045 gimple_stmt_iterator gsi2 = *gsi;
3046 gsi_prev (&gsi2);
3047 fold_stmt (&gsi2);
3048 }
3049 else
3050 {
3051 gsi_replace_with_seq_vops (gsi, stmts);
3052 fold_stmt (gsi);
3053 }
3054 return true;
3055 }
3056
3057 /* If the format is "%s", use strcpy if the result isn't used. */
3058 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3059 {
3060 tree fn;
3061 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3062
3063 if (!fn)
3064 return false;
3065
3066 /* Don't crash on sprintf (str1, "%s"). */
3067 if (!orig)
3068 return false;
3069
dcb7fae2
RB
3070 tree orig_len = NULL_TREE;
3071 if (gimple_call_lhs (stmt))
35770bb2 3072 {
dcb7fae2 3073 orig_len = get_maxval_strlen (orig, 0);
d7e78447 3074 if (!orig_len)
35770bb2
RB
3075 return false;
3076 }
3077
3078 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3079 gimple_seq stmts = NULL;
355fe088 3080 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
3081 gimple_seq_add_stmt_without_update (&stmts, repl);
3082 if (gimple_call_lhs (stmt))
3083 {
d7e78447
RB
3084 if (!useless_type_conversion_p (integer_type_node,
3085 TREE_TYPE (orig_len)))
3086 orig_len = fold_convert (integer_type_node, orig_len);
3087 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
3088 gimple_seq_add_stmt_without_update (&stmts, repl);
3089 gsi_replace_with_seq_vops (gsi, stmts);
3090 /* gsi now points at the assignment to the lhs, get a
3091 stmt iterator to the memcpy call.
3092 ??? We can't use gsi_for_stmt as that doesn't work when the
3093 CFG isn't built yet. */
3094 gimple_stmt_iterator gsi2 = *gsi;
3095 gsi_prev (&gsi2);
3096 fold_stmt (&gsi2);
3097 }
3098 else
3099 {
3100 gsi_replace_with_seq_vops (gsi, stmts);
3101 fold_stmt (gsi);
3102 }
3103 return true;
3104 }
3105 return false;
3106}
3107
d7e78447
RB
3108/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3109 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3110 attempt to simplify calls with more than 4 arguments.
35770bb2 3111
a104bd88 3112 Return true if simplification was possible, otherwise false. */
d7e78447 3113
a104bd88 3114bool
dcb7fae2 3115gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3116{
538dd0b7 3117 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3118 tree dest = gimple_call_arg (stmt, 0);
3119 tree destsize = gimple_call_arg (stmt, 1);
3120 tree fmt = gimple_call_arg (stmt, 2);
3121 tree orig = NULL_TREE;
3122 const char *fmt_str = NULL;
3123
3124 if (gimple_call_num_args (stmt) > 4)
3125 return false;
3126
3127 if (gimple_call_num_args (stmt) == 4)
3128 orig = gimple_call_arg (stmt, 3);
3129
3130 if (!tree_fits_uhwi_p (destsize))
3131 return false;
3132 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3133
3134 /* Check whether the format is a literal string constant. */
3135 fmt_str = c_getstr (fmt);
3136 if (fmt_str == NULL)
3137 return false;
3138
3139 if (!init_target_chars ())
3140 return false;
3141
3142 /* If the format doesn't contain % args or %%, use strcpy. */
3143 if (strchr (fmt_str, target_percent) == NULL)
3144 {
3145 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3146 if (!fn)
3147 return false;
3148
3149 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3150 if (orig)
3151 return false;
3152
3153 /* We could expand this as
3154 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3155 or to
3156 memcpy (str, fmt_with_nul_at_cstm1, cst);
3157 but in the former case that might increase code size
3158 and in the latter case grow .rodata section too much.
3159 So punt for now. */
3160 size_t len = strlen (fmt_str);
3161 if (len >= destlen)
3162 return false;
3163
3164 gimple_seq stmts = NULL;
355fe088 3165 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3166 gimple_seq_add_stmt_without_update (&stmts, repl);
3167 if (gimple_call_lhs (stmt))
3168 {
3169 repl = gimple_build_assign (gimple_call_lhs (stmt),
3170 build_int_cst (integer_type_node, len));
3171 gimple_seq_add_stmt_without_update (&stmts, repl);
3172 gsi_replace_with_seq_vops (gsi, stmts);
3173 /* gsi now points at the assignment to the lhs, get a
3174 stmt iterator to the memcpy call.
3175 ??? We can't use gsi_for_stmt as that doesn't work when the
3176 CFG isn't built yet. */
3177 gimple_stmt_iterator gsi2 = *gsi;
3178 gsi_prev (&gsi2);
3179 fold_stmt (&gsi2);
3180 }
3181 else
3182 {
3183 gsi_replace_with_seq_vops (gsi, stmts);
3184 fold_stmt (gsi);
3185 }
3186 return true;
3187 }
3188
3189 /* If the format is "%s", use strcpy if the result isn't used. */
3190 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3191 {
3192 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3193 if (!fn)
3194 return false;
3195
3196 /* Don't crash on snprintf (str1, cst, "%s"). */
3197 if (!orig)
3198 return false;
3199
dcb7fae2 3200 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 3201 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3202 return false;
d7e78447
RB
3203
3204 /* We could expand this as
3205 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3206 or to
3207 memcpy (str1, str2_with_nul_at_cstm1, cst);
3208 but in the former case that might increase code size
3209 and in the latter case grow .rodata section too much.
3210 So punt for now. */
3211 if (compare_tree_int (orig_len, destlen) >= 0)
3212 return false;
3213
3214 /* Convert snprintf (str1, cst, "%s", str2) into
3215 strcpy (str1, str2) if strlen (str2) < cst. */
3216 gimple_seq stmts = NULL;
355fe088 3217 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3218 gimple_seq_add_stmt_without_update (&stmts, repl);
3219 if (gimple_call_lhs (stmt))
3220 {
3221 if (!useless_type_conversion_p (integer_type_node,
3222 TREE_TYPE (orig_len)))
3223 orig_len = fold_convert (integer_type_node, orig_len);
3224 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3225 gimple_seq_add_stmt_without_update (&stmts, repl);
3226 gsi_replace_with_seq_vops (gsi, stmts);
3227 /* gsi now points at the assignment to the lhs, get a
3228 stmt iterator to the memcpy call.
3229 ??? We can't use gsi_for_stmt as that doesn't work when the
3230 CFG isn't built yet. */
3231 gimple_stmt_iterator gsi2 = *gsi;
3232 gsi_prev (&gsi2);
3233 fold_stmt (&gsi2);
3234 }
3235 else
3236 {
3237 gsi_replace_with_seq_vops (gsi, stmts);
3238 fold_stmt (gsi);
3239 }
3240 return true;
3241 }
3242 return false;
3243}
35770bb2 3244
edd7ae68
RB
3245/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3246 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3247 more than 3 arguments, and ARG may be null in the 2-argument case.
3248
3249 Return NULL_TREE if no simplification was possible, otherwise return the
3250 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3251 code of the function to be simplified. */
3252
3253static bool
3254gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3255 tree fp, tree fmt, tree arg,
3256 enum built_in_function fcode)
3257{
3258 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3259 tree fn_fputc, fn_fputs;
3260 const char *fmt_str = NULL;
3261
3262 /* If the return value is used, don't do the transformation. */
3263 if (gimple_call_lhs (stmt) != NULL_TREE)
3264 return false;
3265
3266 /* Check whether the format is a literal string constant. */
3267 fmt_str = c_getstr (fmt);
3268 if (fmt_str == NULL)
3269 return false;
3270
3271 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3272 {
3273 /* If we're using an unlocked function, assume the other
3274 unlocked functions exist explicitly. */
3275 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3276 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3277 }
3278 else
3279 {
3280 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3281 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3282 }
3283
3284 if (!init_target_chars ())
3285 return false;
3286
3287 /* If the format doesn't contain % args or %%, use strcpy. */
3288 if (strchr (fmt_str, target_percent) == NULL)
3289 {
3290 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3291 && arg)
3292 return false;
3293
3294 /* If the format specifier was "", fprintf does nothing. */
3295 if (fmt_str[0] == '\0')
3296 {
3297 replace_call_with_value (gsi, NULL_TREE);
3298 return true;
3299 }
3300
3301 /* When "string" doesn't contain %, replace all cases of
3302 fprintf (fp, string) with fputs (string, fp). The fputs
3303 builtin will take care of special cases like length == 1. */
3304 if (fn_fputs)
3305 {
3306 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3307 replace_call_with_call_and_fold (gsi, repl);
3308 return true;
3309 }
3310 }
3311
3312 /* The other optimizations can be done only on the non-va_list variants. */
3313 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3314 return false;
3315
3316 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3317 else if (strcmp (fmt_str, target_percent_s) == 0)
3318 {
3319 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3320 return false;
3321 if (fn_fputs)
3322 {
3323 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3324 replace_call_with_call_and_fold (gsi, repl);
3325 return true;
3326 }
3327 }
3328
3329 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3330 else if (strcmp (fmt_str, target_percent_c) == 0)
3331 {
3332 if (!arg
3333 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3334 return false;
3335 if (fn_fputc)
3336 {
3337 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3338 replace_call_with_call_and_fold (gsi, repl);
3339 return true;
3340 }
3341 }
3342
3343 return false;
3344}
3345
ad03a744
RB
3346/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3347 FMT and ARG are the arguments to the call; we don't fold cases with
3348 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3349
3350 Return NULL_TREE if no simplification was possible, otherwise return the
3351 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3352 code of the function to be simplified. */
3353
3354static bool
3355gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3356 tree arg, enum built_in_function fcode)
3357{
3358 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3359 tree fn_putchar, fn_puts, newarg;
3360 const char *fmt_str = NULL;
3361
3362 /* If the return value is used, don't do the transformation. */
3363 if (gimple_call_lhs (stmt) != NULL_TREE)
3364 return false;
3365
3366 /* Check whether the format is a literal string constant. */
3367 fmt_str = c_getstr (fmt);
3368 if (fmt_str == NULL)
3369 return false;
3370
3371 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3372 {
3373 /* If we're using an unlocked function, assume the other
3374 unlocked functions exist explicitly. */
3375 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3376 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3377 }
3378 else
3379 {
3380 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3381 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3382 }
3383
3384 if (!init_target_chars ())
3385 return false;
3386
3387 if (strcmp (fmt_str, target_percent_s) == 0
3388 || strchr (fmt_str, target_percent) == NULL)
3389 {
3390 const char *str;
3391
3392 if (strcmp (fmt_str, target_percent_s) == 0)
3393 {
3394 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3395 return false;
3396
3397 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3398 return false;
3399
3400 str = c_getstr (arg);
3401 if (str == NULL)
3402 return false;
3403 }
3404 else
3405 {
3406 /* The format specifier doesn't contain any '%' characters. */
3407 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3408 && arg)
3409 return false;
3410 str = fmt_str;
3411 }
3412
3413 /* If the string was "", printf does nothing. */
3414 if (str[0] == '\0')
3415 {
3416 replace_call_with_value (gsi, NULL_TREE);
3417 return true;
3418 }
3419
3420 /* If the string has length of 1, call putchar. */
3421 if (str[1] == '\0')
3422 {
3423 /* Given printf("c"), (where c is any one character,)
3424 convert "c"[0] to an int and pass that to the replacement
3425 function. */
3426 newarg = build_int_cst (integer_type_node, str[0]);
3427 if (fn_putchar)
3428 {
3429 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3430 replace_call_with_call_and_fold (gsi, repl);
3431 return true;
3432 }
3433 }
3434 else
3435 {
3436 /* If the string was "string\n", call puts("string"). */
3437 size_t len = strlen (str);
3438 if ((unsigned char)str[len - 1] == target_newline
3439 && (size_t) (int) len == len
3440 && (int) len > 0)
3441 {
3442 char *newstr;
3443 tree offset_node, string_cst;
3444
3445 /* Create a NUL-terminated string that's one char shorter
3446 than the original, stripping off the trailing '\n'. */
3447 newarg = build_string_literal (len, str);
3448 string_cst = string_constant (newarg, &offset_node);
3449 gcc_checking_assert (string_cst
3450 && (TREE_STRING_LENGTH (string_cst)
3451 == (int) len)
3452 && integer_zerop (offset_node)
3453 && (unsigned char)
3454 TREE_STRING_POINTER (string_cst)[len - 1]
3455 == target_newline);
3456 /* build_string_literal creates a new STRING_CST,
3457 modify it in place to avoid double copying. */
3458 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3459 newstr[len - 1] = '\0';
3460 if (fn_puts)
3461 {
3462 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3463 replace_call_with_call_and_fold (gsi, repl);
3464 return true;
3465 }
3466 }
3467 else
3468 /* We'd like to arrange to call fputs(string,stdout) here,
3469 but we need stdout and don't have a way to get it yet. */
3470 return false;
3471 }
3472 }
3473
3474 /* The other optimizations can be done only on the non-va_list variants. */
3475 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3476 return false;
3477
3478 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3479 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3480 {
3481 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3482 return false;
3483 if (fn_puts)
3484 {
3485 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3486 replace_call_with_call_and_fold (gsi, repl);
3487 return true;
3488 }
3489 }
3490
3491 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3492 else if (strcmp (fmt_str, target_percent_c) == 0)
3493 {
3494 if (!arg || ! useless_type_conversion_p (integer_type_node,
3495 TREE_TYPE (arg)))
3496 return false;
3497 if (fn_putchar)
3498 {
3499 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3500 replace_call_with_call_and_fold (gsi, repl);
3501 return true;
3502 }
3503 }
3504
3505 return false;
3506}
3507
edd7ae68 3508
fef5a0d9
RB
3509
3510/* Fold a call to __builtin_strlen with known length LEN. */
3511
3512static bool
dcb7fae2 3513gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3514{
355fe088 3515 gimple *stmt = gsi_stmt (*gsi);
c42d0aa0
MS
3516
3517 wide_int minlen;
3518 wide_int maxlen;
3519
3520 tree lenrange[2];
c8602fe6 3521 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
c42d0aa0
MS
3522 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3523 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3524 {
3525 /* The range of lengths refers to either a single constant
3526 string or to the longest and shortest constant string
3527 referenced by the argument of the strlen() call, or to
3528 the strings that can possibly be stored in the arrays
3529 the argument refers to. */
3530 minlen = wi::to_wide (lenrange[0]);
3531 maxlen = wi::to_wide (lenrange[1]);
3532 }
3533 else
3534 {
3535 unsigned prec = TYPE_PRECISION (sizetype);
3536
3537 minlen = wi::shwi (0, prec);
3538 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3539 }
3540
3541 if (minlen == maxlen)
3542 {
3543 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3544 true, GSI_SAME_STMT);
3545 replace_call_with_value (gsi, lenrange[0]);
3546 return true;
3547 }
3548
3549 tree lhs = gimple_call_lhs (stmt);
3550 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3551 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3552
3553 return false;
cbdd87d4
RG
3554}
3555
48126138
NS
3556/* Fold a call to __builtin_acc_on_device. */
3557
3558static bool
3559gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3560{
3561 /* Defer folding until we know which compiler we're in. */
3562 if (symtab->state != EXPANSION)
3563 return false;
3564
3565 unsigned val_host = GOMP_DEVICE_HOST;
3566 unsigned val_dev = GOMP_DEVICE_NONE;
3567
3568#ifdef ACCEL_COMPILER
3569 val_host = GOMP_DEVICE_NOT_HOST;
3570 val_dev = ACCEL_COMPILER_acc_device;
3571#endif
3572
3573 location_t loc = gimple_location (gsi_stmt (*gsi));
3574
3575 tree host_eq = make_ssa_name (boolean_type_node);
3576 gimple *host_ass = gimple_build_assign
3577 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3578 gimple_set_location (host_ass, loc);
3579 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3580
3581 tree dev_eq = make_ssa_name (boolean_type_node);
3582 gimple *dev_ass = gimple_build_assign
3583 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3584 gimple_set_location (dev_ass, loc);
3585 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3586
3587 tree result = make_ssa_name (boolean_type_node);
3588 gimple *result_ass = gimple_build_assign
3589 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3590 gimple_set_location (result_ass, loc);
3591 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3592
3593 replace_call_with_value (gsi, result);
3594
3595 return true;
3596}
cbdd87d4 3597
fe75f732
PK
3598/* Fold realloc (0, n) -> malloc (n). */
3599
3600static bool
3601gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3602{
3603 gimple *stmt = gsi_stmt (*gsi);
3604 tree arg = gimple_call_arg (stmt, 0);
3605 tree size = gimple_call_arg (stmt, 1);
3606
3607 if (operand_equal_p (arg, null_pointer_node, 0))
3608 {
3609 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3610 if (fn_malloc)
3611 {
3612 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3613 replace_call_with_call_and_fold (gsi, repl);
3614 return true;
3615 }
3616 }
3617 return false;
3618}
3619
dcb7fae2
RB
3620/* Fold the non-target builtin at *GSI and return whether any simplification
3621 was made. */
cbdd87d4 3622
fef5a0d9 3623static bool
dcb7fae2 3624gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3625{
538dd0b7 3626 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3627 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3628
dcb7fae2
RB
3629 /* Give up for always_inline inline builtins until they are
3630 inlined. */
3631 if (avoid_folding_inline_builtin (callee))
3632 return false;
cbdd87d4 3633
edd7ae68
RB
3634 unsigned n = gimple_call_num_args (stmt);
3635 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3636 switch (fcode)
cbdd87d4 3637 {
b3d8d88e
MS
3638 case BUILT_IN_BCMP:
3639 return gimple_fold_builtin_bcmp (gsi);
3640 case BUILT_IN_BCOPY:
3641 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3642 case BUILT_IN_BZERO:
b3d8d88e
MS
3643 return gimple_fold_builtin_bzero (gsi);
3644
dcb7fae2
RB
3645 case BUILT_IN_MEMSET:
3646 return gimple_fold_builtin_memset (gsi,
3647 gimple_call_arg (stmt, 1),
3648 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3649 case BUILT_IN_MEMCPY:
3650 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3651 gimple_call_arg (stmt, 1), 0);
3652 case BUILT_IN_MEMPCPY:
3653 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3654 gimple_call_arg (stmt, 1), 1);
3655 case BUILT_IN_MEMMOVE:
3656 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3657 gimple_call_arg (stmt, 1), 3);
3658 case BUILT_IN_SPRINTF_CHK:
3659 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3660 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3661 case BUILT_IN_STRCAT_CHK:
3662 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3663 case BUILT_IN_STRNCAT_CHK:
3664 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3665 case BUILT_IN_STRLEN:
dcb7fae2 3666 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3667 case BUILT_IN_STRCPY:
dcb7fae2 3668 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3669 gimple_call_arg (stmt, 0),
dcb7fae2 3670 gimple_call_arg (stmt, 1));
cbdd87d4 3671 case BUILT_IN_STRNCPY:
dcb7fae2 3672 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3673 gimple_call_arg (stmt, 0),
3674 gimple_call_arg (stmt, 1),
dcb7fae2 3675 gimple_call_arg (stmt, 2));
9a7eefec 3676 case BUILT_IN_STRCAT:
dcb7fae2
RB
3677 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3678 gimple_call_arg (stmt, 1));
ad03a744
RB
3679 case BUILT_IN_STRNCAT:
3680 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3681 case BUILT_IN_INDEX:
912d9ec3 3682 case BUILT_IN_STRCHR:
71dea1dd
WD
3683 return gimple_fold_builtin_strchr (gsi, false);
3684 case BUILT_IN_RINDEX:
3685 case BUILT_IN_STRRCHR:
3686 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3687 case BUILT_IN_STRSTR:
3688 return gimple_fold_builtin_strstr (gsi);
a918bfbf
ML
3689 case BUILT_IN_STRCMP:
3690 case BUILT_IN_STRCASECMP:
3691 case BUILT_IN_STRNCMP:
3692 case BUILT_IN_STRNCASECMP:
3693 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3694 case BUILT_IN_MEMCHR:
3695 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3696 case BUILT_IN_FPUTS:
dcb7fae2
RB
3697 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3698 gimple_call_arg (stmt, 1), false);
cbdd87d4 3699 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3700 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3701 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3702 case BUILT_IN_MEMCPY_CHK:
3703 case BUILT_IN_MEMPCPY_CHK:
3704 case BUILT_IN_MEMMOVE_CHK:
3705 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3706 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3707 gimple_call_arg (stmt, 0),
3708 gimple_call_arg (stmt, 1),
3709 gimple_call_arg (stmt, 2),
3710 gimple_call_arg (stmt, 3),
edd7ae68 3711 fcode);
2625bb5d
RB
3712 case BUILT_IN_STPCPY:
3713 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3714 case BUILT_IN_STRCPY_CHK:
3715 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3716 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3717 gimple_call_arg (stmt, 0),
3718 gimple_call_arg (stmt, 1),
3719 gimple_call_arg (stmt, 2),
edd7ae68 3720 fcode);
cbdd87d4 3721 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3722 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3723 return gimple_fold_builtin_stxncpy_chk (gsi,
3724 gimple_call_arg (stmt, 0),
3725 gimple_call_arg (stmt, 1),
3726 gimple_call_arg (stmt, 2),
3727 gimple_call_arg (stmt, 3),
edd7ae68 3728 fcode);
cbdd87d4
RG
3729 case BUILT_IN_SNPRINTF_CHK:
3730 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3731 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3732
edd7ae68
RB
3733 case BUILT_IN_FPRINTF:
3734 case BUILT_IN_FPRINTF_UNLOCKED:
3735 case BUILT_IN_VFPRINTF:
3736 if (n == 2 || n == 3)
3737 return gimple_fold_builtin_fprintf (gsi,
3738 gimple_call_arg (stmt, 0),
3739 gimple_call_arg (stmt, 1),
3740 n == 3
3741 ? gimple_call_arg (stmt, 2)
3742 : NULL_TREE,
3743 fcode);
3744 break;
3745 case BUILT_IN_FPRINTF_CHK:
3746 case BUILT_IN_VFPRINTF_CHK:
3747 if (n == 3 || n == 4)
3748 return gimple_fold_builtin_fprintf (gsi,
3749 gimple_call_arg (stmt, 0),
3750 gimple_call_arg (stmt, 2),
3751 n == 4
3752 ? gimple_call_arg (stmt, 3)
3753 : NULL_TREE,
3754 fcode);
3755 break;
ad03a744
RB
3756 case BUILT_IN_PRINTF:
3757 case BUILT_IN_PRINTF_UNLOCKED:
3758 case BUILT_IN_VPRINTF:
3759 if (n == 1 || n == 2)
3760 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3761 n == 2
3762 ? gimple_call_arg (stmt, 1)
3763 : NULL_TREE, fcode);
3764 break;
3765 case BUILT_IN_PRINTF_CHK:
3766 case BUILT_IN_VPRINTF_CHK:
3767 if (n == 2 || n == 3)
3768 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3769 n == 3
3770 ? gimple_call_arg (stmt, 2)
3771 : NULL_TREE, fcode);
242a37f1 3772 break;
48126138
NS
3773 case BUILT_IN_ACC_ON_DEVICE:
3774 return gimple_fold_builtin_acc_on_device (gsi,
3775 gimple_call_arg (stmt, 0));
fe75f732
PK
3776 case BUILT_IN_REALLOC:
3777 return gimple_fold_builtin_realloc (gsi);
3778
fef5a0d9
RB
3779 default:;
3780 }
3781
3782 /* Try the generic builtin folder. */
3783 bool ignore = (gimple_call_lhs (stmt) == NULL);
3784 tree result = fold_call_stmt (stmt, ignore);
3785 if (result)
3786 {
3787 if (ignore)
3788 STRIP_NOPS (result);
3789 else
3790 result = fold_convert (gimple_call_return_type (stmt), result);
3791 if (!update_call_from_tree (gsi, result))
3792 gimplify_and_update_call_from_tree (gsi, result);
3793 return true;
3794 }
3795
3796 return false;
3797}
3798
451e8dae
NS
3799/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3800 function calls to constants, where possible. */
3801
3802static tree
3803fold_internal_goacc_dim (const gimple *call)
3804{
629b3d75
MJ
3805 int axis = oacc_get_ifn_dim_arg (call);
3806 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 3807 tree result = NULL_TREE;
67d2229e 3808 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 3809
67d2229e 3810 switch (gimple_call_internal_fn (call))
451e8dae 3811 {
67d2229e
TV
3812 case IFN_GOACC_DIM_POS:
3813 /* If the size is 1, we know the answer. */
3814 if (size == 1)
3815 result = build_int_cst (type, 0);
3816 break;
3817 case IFN_GOACC_DIM_SIZE:
3818 /* If the size is not dynamic, we know the answer. */
3819 if (size)
3820 result = build_int_cst (type, size);
3821 break;
3822 default:
3823 break;
451e8dae
NS
3824 }
3825
3826 return result;
3827}
3828
849a76a5
JJ
3829/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3830 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3831 &var where var is only addressable because of such calls. */
3832
3833bool
3834optimize_atomic_compare_exchange_p (gimple *stmt)
3835{
3836 if (gimple_call_num_args (stmt) != 6
3837 || !flag_inline_atomics
3838 || !optimize
45b2222a 3839 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
3840 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3841 || !gimple_vdef (stmt)
3842 || !gimple_vuse (stmt))
3843 return false;
3844
3845 tree fndecl = gimple_call_fndecl (stmt);
3846 switch (DECL_FUNCTION_CODE (fndecl))
3847 {
3848 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3849 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3850 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3851 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3852 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3853 break;
3854 default:
3855 return false;
3856 }
3857
3858 tree expected = gimple_call_arg (stmt, 1);
3859 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3860 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3861 return false;
3862
3863 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3864 if (!is_gimple_reg_type (etype)
849a76a5 3865 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3866 || TREE_THIS_VOLATILE (etype)
3867 || VECTOR_TYPE_P (etype)
3868 || TREE_CODE (etype) == COMPLEX_TYPE
3869 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3870 might not preserve all the bits. See PR71716. */
3871 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
3872 || maybe_ne (TYPE_PRECISION (etype),
3873 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
3874 return false;
3875
3876 tree weak = gimple_call_arg (stmt, 3);
3877 if (!integer_zerop (weak) && !integer_onep (weak))
3878 return false;
3879
3880 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3881 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3882 machine_mode mode = TYPE_MODE (itype);
3883
3884 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3885 == CODE_FOR_nothing
3886 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3887 return false;
3888
cf098191 3889 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
3890 return false;
3891
3892 return true;
3893}
3894
3895/* Fold
3896 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3897 into
3898 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3899 i = IMAGPART_EXPR <t>;
3900 r = (_Bool) i;
3901 e = REALPART_EXPR <t>; */
3902
3903void
3904fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3905{
3906 gimple *stmt = gsi_stmt (*gsi);
3907 tree fndecl = gimple_call_fndecl (stmt);
3908 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3909 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3910 tree ctype = build_complex_type (itype);
3911 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
3912 bool throws = false;
3913 edge e = NULL;
849a76a5
JJ
3914 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3915 expected);
3916 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3917 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3918 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3919 {
3920 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3921 build1 (VIEW_CONVERT_EXPR, itype,
3922 gimple_assign_lhs (g)));
3923 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3924 }
3925 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3926 + int_size_in_bytes (itype);
3927 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3928 gimple_call_arg (stmt, 0),
3929 gimple_assign_lhs (g),
3930 gimple_call_arg (stmt, 2),
3931 build_int_cst (integer_type_node, flag),
3932 gimple_call_arg (stmt, 4),
3933 gimple_call_arg (stmt, 5));
3934 tree lhs = make_ssa_name (ctype);
3935 gimple_call_set_lhs (g, lhs);
3936 gimple_set_vdef (g, gimple_vdef (stmt));
3937 gimple_set_vuse (g, gimple_vuse (stmt));
3938 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46
JJ
3939 tree oldlhs = gimple_call_lhs (stmt);
3940 if (stmt_can_throw_internal (stmt))
3941 {
3942 throws = true;
3943 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3944 }
3945 gimple_call_set_nothrow (as_a <gcall *> (g),
3946 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3947 gimple_call_set_lhs (stmt, NULL_TREE);
3948 gsi_replace (gsi, g, true);
3949 if (oldlhs)
849a76a5 3950 {
849a76a5
JJ
3951 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3952 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
3953 if (throws)
3954 {
3955 gsi_insert_on_edge_immediate (e, g);
3956 *gsi = gsi_for_stmt (g);
3957 }
3958 else
3959 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3960 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3961 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 3962 }
849a76a5
JJ
3963 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3964 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
3965 if (throws && oldlhs == NULL_TREE)
3966 {
3967 gsi_insert_on_edge_immediate (e, g);
3968 *gsi = gsi_for_stmt (g);
3969 }
3970 else
3971 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
3972 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3973 {
3974 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3975 VIEW_CONVERT_EXPR,
3976 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3977 gimple_assign_lhs (g)));
3978 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3979 }
3980 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3981 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3982 *gsi = gsiret;
3983}
3984
1304953e
JJ
3985/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3986 doesn't fit into TYPE. The test for overflow should be regardless of
3987 -fwrapv, and even for unsigned types. */
3988
3989bool
3990arith_overflowed_p (enum tree_code code, const_tree type,
3991 const_tree arg0, const_tree arg1)
3992{
3993 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3994 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3995 widest2_int_cst;
3996 widest2_int warg0 = widest2_int_cst (arg0);
3997 widest2_int warg1 = widest2_int_cst (arg1);
3998 widest2_int wres;
3999 switch (code)
4000 {
4001 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4002 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4003 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4004 default: gcc_unreachable ();
4005 }
4006 signop sign = TYPE_SIGN (type);
4007 if (sign == UNSIGNED && wi::neg_p (wres))
4008 return true;
4009 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4010}
4011
cbdd87d4
RG
4012/* Attempt to fold a call statement referenced by the statement iterator GSI.
4013 The statement may be replaced by another statement, e.g., if the call
4014 simplifies to a constant value. Return true if any changes were made.
4015 It is assumed that the operands have been previously folded. */
4016
e021c122 4017static bool
ceeffab0 4018gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4019{
538dd0b7 4020 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4021 tree callee;
e021c122
RG
4022 bool changed = false;
4023 unsigned i;
cbdd87d4 4024
e021c122
RG
4025 /* Fold *& in call arguments. */
4026 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4027 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4028 {
4029 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4030 if (tmp)
4031 {
4032 gimple_call_set_arg (stmt, i, tmp);
4033 changed = true;
4034 }
4035 }
3b45a007
RG
4036
4037 /* Check for virtual calls that became direct calls. */
4038 callee = gimple_call_fn (stmt);
25583c4f 4039 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4040 {
49c471e3
MJ
4041 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4042 {
450ad0cd
JH
4043 if (dump_file && virtual_method_call_p (callee)
4044 && !possible_polymorphic_call_target_p
6f8091fc
JH
4045 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4046 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4047 {
4048 fprintf (dump_file,
a70e9985 4049 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4050 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4051 fprintf (dump_file, " to ");
4052 print_generic_expr (dump_file, callee, TDF_SLIM);
4053 fprintf (dump_file, "\n");
4054 }
4055
49c471e3 4056 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4057 changed = true;
4058 }
a70e9985 4059 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4060 {
61dd6a2e
JH
4061 bool final;
4062 vec <cgraph_node *>targets
058d0a90 4063 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4064 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4065 {
a70e9985 4066 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4067 if (dump_enabled_p ())
4068 {
807b7d62 4069 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
4070 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4071 "folding virtual function call to %s\n",
4072 targets.length () == 1
4073 ? targets[0]->name ()
4074 : "__builtin_unreachable");
4075 }
61dd6a2e 4076 if (targets.length () == 1)
cf3e5a89 4077 {
18954840
JJ
4078 tree fndecl = targets[0]->decl;
4079 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4080 changed = true;
18954840
JJ
4081 /* If changing the call to __cxa_pure_virtual
4082 or similar noreturn function, adjust gimple_call_fntype
4083 too. */
865f7046 4084 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4085 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4086 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4087 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4088 == void_type_node))
4089 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4090 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4091 if (lhs
4092 && gimple_call_noreturn_p (stmt)
18954840 4093 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4094 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4095 {
4096 if (TREE_CODE (lhs) == SSA_NAME)
4097 {
b731b390 4098 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4099 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4100 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4101 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4102 }
4103 gimple_call_set_lhs (stmt, NULL_TREE);
4104 }
0b986c6a 4105 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4106 }
a70e9985 4107 else
cf3e5a89
JJ
4108 {
4109 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4110 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4111 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4112 /* If the call had a SSA name as lhs morph that into
4113 an uninitialized value. */
a70e9985
JJ
4114 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4115 {
b731b390 4116 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4117 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4118 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4119 set_ssa_default_def (cfun, var, lhs);
42e52a51 4120 }
2da6996c
RB
4121 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4122 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4123 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4124 return true;
4125 }
e021c122 4126 }
49c471e3 4127 }
e021c122 4128 }
49c471e3 4129
f2d3d07e
RH
4130 /* Check for indirect calls that became direct calls, and then
4131 no longer require a static chain. */
4132 if (gimple_call_chain (stmt))
4133 {
4134 tree fn = gimple_call_fndecl (stmt);
4135 if (fn && !DECL_STATIC_CHAIN (fn))
4136 {
4137 gimple_call_set_chain (stmt, NULL);
4138 changed = true;
4139 }
4140 else
4141 {
4142 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4143 if (tmp)
4144 {
4145 gimple_call_set_chain (stmt, tmp);
4146 changed = true;
4147 }
4148 }
4149 }
4150
e021c122
RG
4151 if (inplace)
4152 return changed;
4153
4154 /* Check for builtins that CCP can handle using information not
4155 available in the generic fold routines. */
fef5a0d9
RB
4156 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4157 {
4158 if (gimple_fold_builtin (gsi))
4159 changed = true;
4160 }
4161 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4162 {
ea679d55 4163 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4164 }
368b454d 4165 else if (gimple_call_internal_p (stmt))
ed9c79e1 4166 {
368b454d
JJ
4167 enum tree_code subcode = ERROR_MARK;
4168 tree result = NULL_TREE;
1304953e
JJ
4169 bool cplx_result = false;
4170 tree overflow = NULL_TREE;
368b454d
JJ
4171 switch (gimple_call_internal_fn (stmt))
4172 {
4173 case IFN_BUILTIN_EXPECT:
4174 result = fold_builtin_expect (gimple_location (stmt),
4175 gimple_call_arg (stmt, 0),
4176 gimple_call_arg (stmt, 1),
4177 gimple_call_arg (stmt, 2));
4178 break;
0e82f089 4179 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4180 {
4181 tree offset = gimple_call_arg (stmt, 1);
4182 tree objsize = gimple_call_arg (stmt, 2);
4183 if (integer_all_onesp (objsize)
4184 || (TREE_CODE (offset) == INTEGER_CST
4185 && TREE_CODE (objsize) == INTEGER_CST
4186 && tree_int_cst_le (offset, objsize)))
4187 {
4188 replace_call_with_value (gsi, NULL_TREE);
4189 return true;
4190 }
4191 }
4192 break;
4193 case IFN_UBSAN_PTR:
4194 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4195 {
ca1150f0 4196 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4197 return true;
4198 }
4199 break;
ca1150f0
JJ
4200 case IFN_UBSAN_BOUNDS:
4201 {
4202 tree index = gimple_call_arg (stmt, 1);
4203 tree bound = gimple_call_arg (stmt, 2);
4204 if (TREE_CODE (index) == INTEGER_CST
4205 && TREE_CODE (bound) == INTEGER_CST)
4206 {
4207 index = fold_convert (TREE_TYPE (bound), index);
4208 if (TREE_CODE (index) == INTEGER_CST
4209 && tree_int_cst_le (index, bound))
4210 {
4211 replace_call_with_value (gsi, NULL_TREE);
4212 return true;
4213 }
4214 }
4215 }
4216 break;
451e8dae
NS
4217 case IFN_GOACC_DIM_SIZE:
4218 case IFN_GOACC_DIM_POS:
4219 result = fold_internal_goacc_dim (stmt);
4220 break;
368b454d
JJ
4221 case IFN_UBSAN_CHECK_ADD:
4222 subcode = PLUS_EXPR;
4223 break;
4224 case IFN_UBSAN_CHECK_SUB:
4225 subcode = MINUS_EXPR;
4226 break;
4227 case IFN_UBSAN_CHECK_MUL:
4228 subcode = MULT_EXPR;
4229 break;
1304953e
JJ
4230 case IFN_ADD_OVERFLOW:
4231 subcode = PLUS_EXPR;
4232 cplx_result = true;
4233 break;
4234 case IFN_SUB_OVERFLOW:
4235 subcode = MINUS_EXPR;
4236 cplx_result = true;
4237 break;
4238 case IFN_MUL_OVERFLOW:
4239 subcode = MULT_EXPR;
4240 cplx_result = true;
4241 break;
368b454d
JJ
4242 default:
4243 break;
4244 }
4245 if (subcode != ERROR_MARK)
4246 {
4247 tree arg0 = gimple_call_arg (stmt, 0);
4248 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4249 tree type = TREE_TYPE (arg0);
4250 if (cplx_result)
4251 {
4252 tree lhs = gimple_call_lhs (stmt);
4253 if (lhs == NULL_TREE)
4254 type = NULL_TREE;
4255 else
4256 type = TREE_TYPE (TREE_TYPE (lhs));
4257 }
4258 if (type == NULL_TREE)
4259 ;
368b454d 4260 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4261 else if (integer_zerop (arg1))
4262 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4263 /* x = 0 + y; x = 0 * y; */
4264 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4265 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4266 /* x = y - y; */
4267 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4268 result = integer_zero_node;
368b454d 4269 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4270 else if (subcode == MULT_EXPR && integer_onep (arg1))
4271 result = arg0;
4272 else if (subcode == MULT_EXPR && integer_onep (arg0))
4273 result = arg1;
4274 else if (TREE_CODE (arg0) == INTEGER_CST
4275 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4276 {
1304953e
JJ
4277 if (cplx_result)
4278 result = int_const_binop (subcode, fold_convert (type, arg0),
4279 fold_convert (type, arg1));
4280 else
4281 result = int_const_binop (subcode, arg0, arg1);
4282 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4283 {
4284 if (cplx_result)
4285 overflow = build_one_cst (type);
4286 else
4287 result = NULL_TREE;
4288 }
4289 }
4290 if (result)
4291 {
4292 if (result == integer_zero_node)
4293 result = build_zero_cst (type);
4294 else if (cplx_result && TREE_TYPE (result) != type)
4295 {
4296 if (TREE_CODE (result) == INTEGER_CST)
4297 {
4298 if (arith_overflowed_p (PLUS_EXPR, type, result,
4299 integer_zero_node))
4300 overflow = build_one_cst (type);
4301 }
4302 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4303 && TYPE_UNSIGNED (type))
4304 || (TYPE_PRECISION (type)
4305 < (TYPE_PRECISION (TREE_TYPE (result))
4306 + (TYPE_UNSIGNED (TREE_TYPE (result))
4307 && !TYPE_UNSIGNED (type)))))
4308 result = NULL_TREE;
4309 if (result)
4310 result = fold_convert (type, result);
4311 }
368b454d
JJ
4312 }
4313 }
1304953e 4314
ed9c79e1
JJ
4315 if (result)
4316 {
1304953e
JJ
4317 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4318 result = drop_tree_overflow (result);
4319 if (cplx_result)
4320 {
4321 if (overflow == NULL_TREE)
4322 overflow = build_zero_cst (TREE_TYPE (result));
4323 tree ctype = build_complex_type (TREE_TYPE (result));
4324 if (TREE_CODE (result) == INTEGER_CST
4325 && TREE_CODE (overflow) == INTEGER_CST)
4326 result = build_complex (ctype, result, overflow);
4327 else
4328 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4329 ctype, result, overflow);
4330 }
ed9c79e1
JJ
4331 if (!update_call_from_tree (gsi, result))
4332 gimplify_and_update_call_from_tree (gsi, result);
4333 changed = true;
4334 }
4335 }
3b45a007 4336
e021c122 4337 return changed;
cbdd87d4
RG
4338}
4339
e0ee10ed 4340
89a79e96
RB
4341/* Return true whether NAME has a use on STMT. */
4342
4343static bool
355fe088 4344has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4345{
4346 imm_use_iterator iter;
4347 use_operand_p use_p;
4348 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4349 if (USE_STMT (use_p) == stmt)
4350 return true;
4351 return false;
4352}
4353
e0ee10ed
RB
4354/* Worker for fold_stmt_1 dispatch to pattern based folding with
4355 gimple_simplify.
4356
4357 Replaces *GSI with the simplification result in RCODE and OPS
4358 and the associated statements in *SEQ. Does the replacement
4359 according to INPLACE and returns true if the operation succeeded. */
4360
4361static bool
4362replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4363 gimple_match_op *res_op,
e0ee10ed
RB
4364 gimple_seq *seq, bool inplace)
4365{
355fe088 4366 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4367 tree *ops = res_op->ops;
4368 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4369
4370 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4371 newly created statements. See also maybe_push_res_to_seq.
4372 As an exception allow such uses if there was a use of the
4373 same SSA name on the old stmt. */
5d75ad95
RS
4374 for (unsigned int i = 0; i < num_ops; ++i)
4375 if (TREE_CODE (ops[i]) == SSA_NAME
4376 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4377 && !has_use_on_stmt (ops[i], stmt))
4378 return false;
4379
4380 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4381 for (unsigned int i = 0; i < 2; ++i)
4382 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4383 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4384 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4385 return false;
e0ee10ed 4386
fec40d06
RS
4387 /* Don't insert new statements when INPLACE is true, even if we could
4388 reuse STMT for the final statement. */
4389 if (inplace && !gimple_seq_empty_p (*seq))
4390 return false;
4391
538dd0b7 4392 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4393 {
5d75ad95
RS
4394 gcc_assert (res_op->code.is_tree_code ());
4395 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4396 /* GIMPLE_CONDs condition may not throw. */
4397 && (!flag_exceptions
4398 || !cfun->can_throw_non_call_exceptions
5d75ad95 4399 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4400 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4401 false, NULL_TREE)))
5d75ad95
RS
4402 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4403 else if (res_op->code == SSA_NAME)
538dd0b7 4404 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4405 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4406 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4407 {
4408 if (integer_zerop (ops[0]))
538dd0b7 4409 gimple_cond_make_false (cond_stmt);
e0ee10ed 4410 else
538dd0b7 4411 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4412 }
4413 else if (!inplace)
4414 {
5d75ad95 4415 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4416 if (!res)
4417 return false;
538dd0b7 4418 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4419 build_zero_cst (TREE_TYPE (res)));
4420 }
4421 else
4422 return false;
4423 if (dump_file && (dump_flags & TDF_DETAILS))
4424 {
4425 fprintf (dump_file, "gimple_simplified to ");
4426 if (!gimple_seq_empty_p (*seq))
4427 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4428 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4429 0, TDF_SLIM);
4430 }
4431 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4432 return true;
4433 }
4434 else if (is_gimple_assign (stmt)
5d75ad95 4435 && res_op->code.is_tree_code ())
e0ee10ed
RB
4436 {
4437 if (!inplace
5d75ad95 4438 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4439 {
5d75ad95
RS
4440 maybe_build_generic_op (res_op);
4441 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4442 res_op->op_or_null (0),
4443 res_op->op_or_null (1),
4444 res_op->op_or_null (2));
e0ee10ed
RB
4445 if (dump_file && (dump_flags & TDF_DETAILS))
4446 {
4447 fprintf (dump_file, "gimple_simplified to ");
4448 if (!gimple_seq_empty_p (*seq))
4449 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4450 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4451 0, TDF_SLIM);
4452 }
4453 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4454 return true;
4455 }
4456 }
5d75ad95
RS
4457 else if (res_op->code.is_fn_code ()
4458 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4459 {
5d75ad95
RS
4460 gcc_assert (num_ops == gimple_call_num_args (stmt));
4461 for (unsigned int i = 0; i < num_ops; ++i)
4462 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4463 if (dump_file && (dump_flags & TDF_DETAILS))
4464 {
4465 fprintf (dump_file, "gimple_simplified to ");
4466 if (!gimple_seq_empty_p (*seq))
4467 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4468 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4469 }
4470 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4471 return true;
4472 }
e0ee10ed
RB
4473 else if (!inplace)
4474 {
4475 if (gimple_has_lhs (stmt))
4476 {
4477 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4478 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4479 return false;
e0ee10ed
RB
4480 if (dump_file && (dump_flags & TDF_DETAILS))
4481 {
4482 fprintf (dump_file, "gimple_simplified to ");
4483 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4484 }
4485 gsi_replace_with_seq_vops (gsi, *seq);
4486 return true;
4487 }
4488 else
4489 gcc_unreachable ();
4490 }
4491
4492 return false;
4493}
4494
040292e7
RB
4495/* Canonicalize MEM_REFs invariant address operand after propagation. */
4496
4497static bool
4498maybe_canonicalize_mem_ref_addr (tree *t)
4499{
4500 bool res = false;
4501
4502 if (TREE_CODE (*t) == ADDR_EXPR)
4503 t = &TREE_OPERAND (*t, 0);
4504
f17a223d
RB
4505 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4506 generic vector extension. The actual vector referenced is
4507 view-converted to an array type for this purpose. If the index
4508 is constant the canonical representation in the middle-end is a
4509 BIT_FIELD_REF so re-write the former to the latter here. */
4510 if (TREE_CODE (*t) == ARRAY_REF
4511 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4512 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4513 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4514 {
4515 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4516 if (VECTOR_TYPE_P (vtype))
4517 {
4518 tree low = array_ref_low_bound (*t);
4519 if (TREE_CODE (low) == INTEGER_CST)
4520 {
4521 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4522 {
4523 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4524 wi::to_widest (low));
4525 idx = wi::mul (idx, wi::to_widest
4526 (TYPE_SIZE (TREE_TYPE (*t))));
4527 widest_int ext
4528 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4529 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4530 {
4531 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4532 TREE_TYPE (*t),
4533 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4534 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4535 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4536 res = true;
4537 }
4538 }
4539 }
4540 }
4541 }
4542
040292e7
RB
4543 while (handled_component_p (*t))
4544 t = &TREE_OPERAND (*t, 0);
4545
4546 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4547 of invariant addresses into a SSA name MEM_REF address. */
4548 if (TREE_CODE (*t) == MEM_REF
4549 || TREE_CODE (*t) == TARGET_MEM_REF)
4550 {
4551 tree addr = TREE_OPERAND (*t, 0);
4552 if (TREE_CODE (addr) == ADDR_EXPR
4553 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4554 || handled_component_p (TREE_OPERAND (addr, 0))))
4555 {
4556 tree base;
a90c8804 4557 poly_int64 coffset;
040292e7
RB
4558 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4559 &coffset);
4560 if (!base)
4561 gcc_unreachable ();
4562
4563 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4564 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4565 TREE_OPERAND (*t, 1),
4566 size_int (coffset));
4567 res = true;
4568 }
4569 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4570 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4571 }
4572
4573 /* Canonicalize back MEM_REFs to plain reference trees if the object
4574 accessed is a decl that has the same access semantics as the MEM_REF. */
4575 if (TREE_CODE (*t) == MEM_REF
4576 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4577 && integer_zerop (TREE_OPERAND (*t, 1))
4578 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4579 {
4580 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4581 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4582 if (/* Same volatile qualification. */
4583 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4584 /* Same TBAA behavior with -fstrict-aliasing. */
4585 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4586 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4587 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4588 /* Same alignment. */
4589 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4590 /* We have to look out here to not drop a required conversion
4591 from the rhs to the lhs if *t appears on the lhs or vice-versa
4592 if it appears on the rhs. Thus require strict type
4593 compatibility. */
4594 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4595 {
4596 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4597 res = true;
4598 }
4599 }
4600
4601 /* Canonicalize TARGET_MEM_REF in particular with respect to
4602 the indexes becoming constant. */
4603 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4604 {
4605 tree tem = maybe_fold_tmr (*t);
4606 if (tem)
4607 {
4608 *t = tem;
4609 res = true;
4610 }
4611 }
4612
4613 return res;
4614}
4615
cbdd87d4
RG
4616/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4617 distinguishes both cases. */
4618
4619static bool
e0ee10ed 4620fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4621{
4622 bool changed = false;
355fe088 4623 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4624 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4625 unsigned i;
a8b85ce9 4626 fold_defer_overflow_warnings ();
cbdd87d4 4627
040292e7
RB
4628 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4629 after propagation.
4630 ??? This shouldn't be done in generic folding but in the
4631 propagation helpers which also know whether an address was
89a79e96
RB
4632 propagated.
4633 Also canonicalize operand order. */
040292e7
RB
4634 switch (gimple_code (stmt))
4635 {
4636 case GIMPLE_ASSIGN:
4637 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4638 {
4639 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4640 if ((REFERENCE_CLASS_P (*rhs)
4641 || TREE_CODE (*rhs) == ADDR_EXPR)
4642 && maybe_canonicalize_mem_ref_addr (rhs))
4643 changed = true;
4644 tree *lhs = gimple_assign_lhs_ptr (stmt);
4645 if (REFERENCE_CLASS_P (*lhs)
4646 && maybe_canonicalize_mem_ref_addr (lhs))
4647 changed = true;
4648 }
89a79e96
RB
4649 else
4650 {
4651 /* Canonicalize operand order. */
4652 enum tree_code code = gimple_assign_rhs_code (stmt);
4653 if (TREE_CODE_CLASS (code) == tcc_comparison
4654 || commutative_tree_code (code)
4655 || commutative_ternary_tree_code (code))
4656 {
4657 tree rhs1 = gimple_assign_rhs1 (stmt);
4658 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4659 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4660 {
4661 gimple_assign_set_rhs1 (stmt, rhs2);
4662 gimple_assign_set_rhs2 (stmt, rhs1);
4663 if (TREE_CODE_CLASS (code) == tcc_comparison)
4664 gimple_assign_set_rhs_code (stmt,
4665 swap_tree_comparison (code));
4666 changed = true;
4667 }
4668 }
4669 }
040292e7
RB
4670 break;
4671 case GIMPLE_CALL:
4672 {
4673 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4674 {
4675 tree *arg = gimple_call_arg_ptr (stmt, i);
4676 if (REFERENCE_CLASS_P (*arg)
4677 && maybe_canonicalize_mem_ref_addr (arg))
4678 changed = true;
4679 }
4680 tree *lhs = gimple_call_lhs_ptr (stmt);
4681 if (*lhs
4682 && REFERENCE_CLASS_P (*lhs)
4683 && maybe_canonicalize_mem_ref_addr (lhs))
4684 changed = true;
4685 break;
4686 }
4687 case GIMPLE_ASM:
4688 {
538dd0b7
DM
4689 gasm *asm_stmt = as_a <gasm *> (stmt);
4690 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4691 {
538dd0b7 4692 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4693 tree op = TREE_VALUE (link);
4694 if (REFERENCE_CLASS_P (op)
4695 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4696 changed = true;
4697 }
538dd0b7 4698 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4699 {
538dd0b7 4700 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4701 tree op = TREE_VALUE (link);
4702 if ((REFERENCE_CLASS_P (op)
4703 || TREE_CODE (op) == ADDR_EXPR)
4704 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4705 changed = true;
4706 }
4707 }
4708 break;
4709 case GIMPLE_DEBUG:
4710 if (gimple_debug_bind_p (stmt))
4711 {
4712 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4713 if (*val
4714 && (REFERENCE_CLASS_P (*val)
4715 || TREE_CODE (*val) == ADDR_EXPR)
4716 && maybe_canonicalize_mem_ref_addr (val))
4717 changed = true;
4718 }
4719 break;
89a79e96
RB
4720 case GIMPLE_COND:
4721 {
4722 /* Canonicalize operand order. */
4723 tree lhs = gimple_cond_lhs (stmt);
4724 tree rhs = gimple_cond_rhs (stmt);
14e72812 4725 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4726 {
4727 gcond *gc = as_a <gcond *> (stmt);
4728 gimple_cond_set_lhs (gc, rhs);
4729 gimple_cond_set_rhs (gc, lhs);
4730 gimple_cond_set_code (gc,
4731 swap_tree_comparison (gimple_cond_code (gc)));
4732 changed = true;
4733 }
4734 }
040292e7
RB
4735 default:;
4736 }
4737
e0ee10ed
RB
4738 /* Dispatch to pattern-based folding. */
4739 if (!inplace
4740 || is_gimple_assign (stmt)
4741 || gimple_code (stmt) == GIMPLE_COND)
4742 {
4743 gimple_seq seq = NULL;
5d75ad95
RS
4744 gimple_match_op res_op;
4745 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 4746 valueize, valueize))
e0ee10ed 4747 {
5d75ad95 4748 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
4749 changed = true;
4750 else
4751 gimple_seq_discard (seq);
4752 }
4753 }
4754
4755 stmt = gsi_stmt (*gsi);
4756
cbdd87d4
RG
4757 /* Fold the main computation performed by the statement. */
4758 switch (gimple_code (stmt))
4759 {
4760 case GIMPLE_ASSIGN:
4761 {
819ec64c
RB
4762 /* Try to canonicalize for boolean-typed X the comparisons
4763 X == 0, X == 1, X != 0, and X != 1. */
4764 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4765 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4766 {
819ec64c
RB
4767 tree lhs = gimple_assign_lhs (stmt);
4768 tree op1 = gimple_assign_rhs1 (stmt);
4769 tree op2 = gimple_assign_rhs2 (stmt);
4770 tree type = TREE_TYPE (op1);
4771
4772 /* Check whether the comparison operands are of the same boolean
4773 type as the result type is.
4774 Check that second operand is an integer-constant with value
4775 one or zero. */
4776 if (TREE_CODE (op2) == INTEGER_CST
4777 && (integer_zerop (op2) || integer_onep (op2))
4778 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4779 {
4780 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4781 bool is_logical_not = false;
4782
4783 /* X == 0 and X != 1 is a logical-not.of X
4784 X == 1 and X != 0 is X */
4785 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4786 || (cmp_code == NE_EXPR && integer_onep (op2)))
4787 is_logical_not = true;
4788
4789 if (is_logical_not == false)
4790 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4791 /* Only for one-bit precision typed X the transformation
4792 !X -> ~X is valied. */
4793 else if (TYPE_PRECISION (type) == 1)
4794 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4795 /* Otherwise we use !X -> X ^ 1. */
4796 else
4797 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4798 build_int_cst (type, 1));
4799 changed = true;
4800 break;
4801 }
5fbcc0ed 4802 }
819ec64c
RB
4803
4804 unsigned old_num_ops = gimple_num_ops (stmt);
4805 tree lhs = gimple_assign_lhs (stmt);
4806 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4807 if (new_rhs
4808 && !useless_type_conversion_p (TREE_TYPE (lhs),
4809 TREE_TYPE (new_rhs)))
4810 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4811 if (new_rhs
4812 && (!inplace
4813 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4814 {
4815 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4816 changed = true;
4817 }
4818 break;
4819 }
4820
cbdd87d4 4821 case GIMPLE_CALL:
ceeffab0 4822 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4823 break;
4824
4825 case GIMPLE_ASM:
4826 /* Fold *& in asm operands. */
38384150 4827 {
538dd0b7 4828 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4829 size_t noutputs;
4830 const char **oconstraints;
4831 const char *constraint;
4832 bool allows_mem, allows_reg;
4833
538dd0b7 4834 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4835 oconstraints = XALLOCAVEC (const char *, noutputs);
4836
538dd0b7 4837 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4838 {
538dd0b7 4839 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4840 tree op = TREE_VALUE (link);
4841 oconstraints[i]
4842 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4843 if (REFERENCE_CLASS_P (op)
4844 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4845 {
4846 TREE_VALUE (link) = op;
4847 changed = true;
4848 }
4849 }
538dd0b7 4850 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4851 {
538dd0b7 4852 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4853 tree op = TREE_VALUE (link);
4854 constraint
4855 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4856 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4857 oconstraints, &allows_mem, &allows_reg);
4858 if (REFERENCE_CLASS_P (op)
4859 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4860 != NULL_TREE)
4861 {
4862 TREE_VALUE (link) = op;
4863 changed = true;
4864 }
4865 }
4866 }
cbdd87d4
RG
4867 break;
4868
bd422c4a
RG
4869 case GIMPLE_DEBUG:
4870 if (gimple_debug_bind_p (stmt))
4871 {
4872 tree val = gimple_debug_bind_get_value (stmt);
4873 if (val
4874 && REFERENCE_CLASS_P (val))
4875 {
4876 tree tem = maybe_fold_reference (val, false);
4877 if (tem)
4878 {
4879 gimple_debug_bind_set_value (stmt, tem);
4880 changed = true;
4881 }
4882 }
3e888a5e
RG
4883 else if (val
4884 && TREE_CODE (val) == ADDR_EXPR)
4885 {
4886 tree ref = TREE_OPERAND (val, 0);
4887 tree tem = maybe_fold_reference (ref, false);
4888 if (tem)
4889 {
4890 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4891 gimple_debug_bind_set_value (stmt, tem);
4892 changed = true;
4893 }
4894 }
bd422c4a
RG
4895 }
4896 break;
4897
cfe3d653
PK
4898 case GIMPLE_RETURN:
4899 {
4900 greturn *ret_stmt = as_a<greturn *> (stmt);
4901 tree ret = gimple_return_retval(ret_stmt);
4902
4903 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4904 {
4905 tree val = valueize (ret);
1af928db
RB
4906 if (val && val != ret
4907 && may_propagate_copy (ret, val))
cfe3d653
PK
4908 {
4909 gimple_return_set_retval (ret_stmt, val);
4910 changed = true;
4911 }
4912 }
4913 }
4914 break;
4915
cbdd87d4
RG
4916 default:;
4917 }
4918
4919 stmt = gsi_stmt (*gsi);
4920
37376165
RB
4921 /* Fold *& on the lhs. */
4922 if (gimple_has_lhs (stmt))
cbdd87d4
RG
4923 {
4924 tree lhs = gimple_get_lhs (stmt);
4925 if (lhs && REFERENCE_CLASS_P (lhs))
4926 {
4927 tree new_lhs = maybe_fold_reference (lhs, true);
4928 if (new_lhs)
4929 {
4930 gimple_set_lhs (stmt, new_lhs);
4931 changed = true;
4932 }
4933 }
4934 }
4935
a8b85ce9 4936 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
4937 return changed;
4938}
4939
e0ee10ed
RB
4940/* Valueziation callback that ends up not following SSA edges. */
4941
4942tree
4943no_follow_ssa_edges (tree)
4944{
4945 return NULL_TREE;
4946}
4947
45cc9f96
RB
4948/* Valueization callback that ends up following single-use SSA edges only. */
4949
4950tree
4951follow_single_use_edges (tree val)
4952{
4953 if (TREE_CODE (val) == SSA_NAME
4954 && !has_single_use (val))
4955 return NULL_TREE;
4956 return val;
4957}
4958
c566cc9f
RS
4959/* Valueization callback that follows all SSA edges. */
4960
4961tree
4962follow_all_ssa_edges (tree val)
4963{
4964 return val;
4965}
4966
cbdd87d4
RG
4967/* Fold the statement pointed to by GSI. In some cases, this function may
4968 replace the whole statement with a new one. Returns true iff folding
4969 makes any changes.
4970 The statement pointed to by GSI should be in valid gimple form but may
4971 be in unfolded state as resulting from for example constant propagation
4972 which can produce *&x = 0. */
4973
4974bool
4975fold_stmt (gimple_stmt_iterator *gsi)
4976{
e0ee10ed
RB
4977 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4978}
4979
4980bool
4981fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4982{
4983 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
4984}
4985
59401b92 4986/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
4987 *&x created by constant propagation are handled. The statement cannot
4988 be replaced with a new one. Return true if the statement was
4989 changed, false otherwise.
59401b92 4990 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
4991 be in unfolded state as resulting from for example constant propagation
4992 which can produce *&x = 0. */
4993
4994bool
59401b92 4995fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 4996{
355fe088 4997 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 4998 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 4999 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5000 return changed;
5001}
5002
e89065a1
SL
5003/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5004 if EXPR is null or we don't know how.
5005 If non-null, the result always has boolean type. */
5006
5007static tree
5008canonicalize_bool (tree expr, bool invert)
5009{
5010 if (!expr)
5011 return NULL_TREE;
5012 else if (invert)
5013 {
5014 if (integer_nonzerop (expr))
5015 return boolean_false_node;
5016 else if (integer_zerop (expr))
5017 return boolean_true_node;
5018 else if (TREE_CODE (expr) == SSA_NAME)
5019 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5020 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5021 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5022 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5023 boolean_type_node,
5024 TREE_OPERAND (expr, 0),
5025 TREE_OPERAND (expr, 1));
5026 else
5027 return NULL_TREE;
5028 }
5029 else
5030 {
5031 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5032 return expr;
5033 if (integer_nonzerop (expr))
5034 return boolean_true_node;
5035 else if (integer_zerop (expr))
5036 return boolean_false_node;
5037 else if (TREE_CODE (expr) == SSA_NAME)
5038 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5039 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5040 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5041 return fold_build2 (TREE_CODE (expr),
5042 boolean_type_node,
5043 TREE_OPERAND (expr, 0),
5044 TREE_OPERAND (expr, 1));
5045 else
5046 return NULL_TREE;
5047 }
5048}
5049
5050/* Check to see if a boolean expression EXPR is logically equivalent to the
5051 comparison (OP1 CODE OP2). Check for various identities involving
5052 SSA_NAMEs. */
5053
5054static bool
5055same_bool_comparison_p (const_tree expr, enum tree_code code,
5056 const_tree op1, const_tree op2)
5057{
355fe088 5058 gimple *s;
e89065a1
SL
5059
5060 /* The obvious case. */
5061 if (TREE_CODE (expr) == code
5062 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5063 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5064 return true;
5065
5066 /* Check for comparing (name, name != 0) and the case where expr
5067 is an SSA_NAME with a definition matching the comparison. */
5068 if (TREE_CODE (expr) == SSA_NAME
5069 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5070 {
5071 if (operand_equal_p (expr, op1, 0))
5072 return ((code == NE_EXPR && integer_zerop (op2))
5073 || (code == EQ_EXPR && integer_nonzerop (op2)));
5074 s = SSA_NAME_DEF_STMT (expr);
5075 if (is_gimple_assign (s)
5076 && gimple_assign_rhs_code (s) == code
5077 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5078 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5079 return true;
5080 }
5081
5082 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5083 of name is a comparison, recurse. */
5084 if (TREE_CODE (op1) == SSA_NAME
5085 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5086 {
5087 s = SSA_NAME_DEF_STMT (op1);
5088 if (is_gimple_assign (s)
5089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5090 {
5091 enum tree_code c = gimple_assign_rhs_code (s);
5092 if ((c == NE_EXPR && integer_zerop (op2))
5093 || (c == EQ_EXPR && integer_nonzerop (op2)))
5094 return same_bool_comparison_p (expr, c,
5095 gimple_assign_rhs1 (s),
5096 gimple_assign_rhs2 (s));
5097 if ((c == EQ_EXPR && integer_zerop (op2))
5098 || (c == NE_EXPR && integer_nonzerop (op2)))
5099 return same_bool_comparison_p (expr,
5100 invert_tree_comparison (c, false),
5101 gimple_assign_rhs1 (s),
5102 gimple_assign_rhs2 (s));
5103 }
5104 }
5105 return false;
5106}
5107
5108/* Check to see if two boolean expressions OP1 and OP2 are logically
5109 equivalent. */
5110
5111static bool
5112same_bool_result_p (const_tree op1, const_tree op2)
5113{
5114 /* Simple cases first. */
5115 if (operand_equal_p (op1, op2, 0))
5116 return true;
5117
5118 /* Check the cases where at least one of the operands is a comparison.
5119 These are a bit smarter than operand_equal_p in that they apply some
5120 identifies on SSA_NAMEs. */
98209db3 5121 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5122 && same_bool_comparison_p (op1, TREE_CODE (op2),
5123 TREE_OPERAND (op2, 0),
5124 TREE_OPERAND (op2, 1)))
5125 return true;
98209db3 5126 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5127 && same_bool_comparison_p (op2, TREE_CODE (op1),
5128 TREE_OPERAND (op1, 0),
5129 TREE_OPERAND (op1, 1)))
5130 return true;
5131
5132 /* Default case. */
5133 return false;
5134}
5135
5136/* Forward declarations for some mutually recursive functions. */
5137
5138static tree
5139and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5140 enum tree_code code2, tree op2a, tree op2b);
5141static tree
5142and_var_with_comparison (tree var, bool invert,
5143 enum tree_code code2, tree op2a, tree op2b);
5144static tree
355fe088 5145and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5146 enum tree_code code2, tree op2a, tree op2b);
5147static tree
5148or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5149 enum tree_code code2, tree op2a, tree op2b);
5150static tree
5151or_var_with_comparison (tree var, bool invert,
5152 enum tree_code code2, tree op2a, tree op2b);
5153static tree
355fe088 5154or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5155 enum tree_code code2, tree op2a, tree op2b);
5156
5157/* Helper function for and_comparisons_1: try to simplify the AND of the
5158 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5159 If INVERT is true, invert the value of the VAR before doing the AND.
5160 Return NULL_EXPR if we can't simplify this to a single expression. */
5161
5162static tree
5163and_var_with_comparison (tree var, bool invert,
5164 enum tree_code code2, tree op2a, tree op2b)
5165{
5166 tree t;
355fe088 5167 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5168
5169 /* We can only deal with variables whose definitions are assignments. */
5170 if (!is_gimple_assign (stmt))
5171 return NULL_TREE;
5172
5173 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5174 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5175 Then we only have to consider the simpler non-inverted cases. */
5176 if (invert)
5177 t = or_var_with_comparison_1 (stmt,
5178 invert_tree_comparison (code2, false),
5179 op2a, op2b);
5180 else
5181 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5182 return canonicalize_bool (t, invert);
5183}
5184
5185/* Try to simplify the AND of the ssa variable defined by the assignment
5186 STMT with the comparison specified by (OP2A CODE2 OP2B).
5187 Return NULL_EXPR if we can't simplify this to a single expression. */
5188
5189static tree
355fe088 5190and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5191 enum tree_code code2, tree op2a, tree op2b)
5192{
5193 tree var = gimple_assign_lhs (stmt);
5194 tree true_test_var = NULL_TREE;
5195 tree false_test_var = NULL_TREE;
5196 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5197
5198 /* Check for identities like (var AND (var == 0)) => false. */
5199 if (TREE_CODE (op2a) == SSA_NAME
5200 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5201 {
5202 if ((code2 == NE_EXPR && integer_zerop (op2b))
5203 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5204 {
5205 true_test_var = op2a;
5206 if (var == true_test_var)
5207 return var;
5208 }
5209 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5210 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5211 {
5212 false_test_var = op2a;
5213 if (var == false_test_var)
5214 return boolean_false_node;
5215 }
5216 }
5217
5218 /* If the definition is a comparison, recurse on it. */
5219 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5220 {
5221 tree t = and_comparisons_1 (innercode,
5222 gimple_assign_rhs1 (stmt),
5223 gimple_assign_rhs2 (stmt),
5224 code2,
5225 op2a,
5226 op2b);
5227 if (t)
5228 return t;
5229 }
5230
5231 /* If the definition is an AND or OR expression, we may be able to
5232 simplify by reassociating. */
eb9820c0
KT
5233 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5234 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5235 {
5236 tree inner1 = gimple_assign_rhs1 (stmt);
5237 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5238 gimple *s;
e89065a1
SL
5239 tree t;
5240 tree partial = NULL_TREE;
eb9820c0 5241 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5242
5243 /* Check for boolean identities that don't require recursive examination
5244 of inner1/inner2:
5245 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5246 inner1 AND (inner1 OR inner2) => inner1
5247 !inner1 AND (inner1 AND inner2) => false
5248 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5249 Likewise for similar cases involving inner2. */
5250 if (inner1 == true_test_var)
5251 return (is_and ? var : inner1);
5252 else if (inner2 == true_test_var)
5253 return (is_and ? var : inner2);
5254 else if (inner1 == false_test_var)
5255 return (is_and
5256 ? boolean_false_node
5257 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5258 else if (inner2 == false_test_var)
5259 return (is_and
5260 ? boolean_false_node
5261 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5262
5263 /* Next, redistribute/reassociate the AND across the inner tests.
5264 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5265 if (TREE_CODE (inner1) == SSA_NAME
5266 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5267 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5268 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5269 gimple_assign_rhs1 (s),
5270 gimple_assign_rhs2 (s),
5271 code2, op2a, op2b)))
5272 {
5273 /* Handle the AND case, where we are reassociating:
5274 (inner1 AND inner2) AND (op2a code2 op2b)
5275 => (t AND inner2)
5276 If the partial result t is a constant, we win. Otherwise
5277 continue on to try reassociating with the other inner test. */
5278 if (is_and)
5279 {
5280 if (integer_onep (t))
5281 return inner2;
5282 else if (integer_zerop (t))
5283 return boolean_false_node;
5284 }
5285
5286 /* Handle the OR case, where we are redistributing:
5287 (inner1 OR inner2) AND (op2a code2 op2b)
5288 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5289 else if (integer_onep (t))
5290 return boolean_true_node;
5291
5292 /* Save partial result for later. */
5293 partial = t;
e89065a1
SL
5294 }
5295
5296 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5297 if (TREE_CODE (inner2) == SSA_NAME
5298 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5299 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5300 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5301 gimple_assign_rhs1 (s),
5302 gimple_assign_rhs2 (s),
5303 code2, op2a, op2b)))
5304 {
5305 /* Handle the AND case, where we are reassociating:
5306 (inner1 AND inner2) AND (op2a code2 op2b)
5307 => (inner1 AND t) */
5308 if (is_and)
5309 {
5310 if (integer_onep (t))
5311 return inner1;
5312 else if (integer_zerop (t))
5313 return boolean_false_node;
8236c8eb
JJ
5314 /* If both are the same, we can apply the identity
5315 (x AND x) == x. */
5316 else if (partial && same_bool_result_p (t, partial))
5317 return t;
e89065a1
SL
5318 }
5319
5320 /* Handle the OR case. where we are redistributing:
5321 (inner1 OR inner2) AND (op2a code2 op2b)
5322 => (t OR (inner1 AND (op2a code2 op2b)))
5323 => (t OR partial) */
5324 else
5325 {
5326 if (integer_onep (t))
5327 return boolean_true_node;
5328 else if (partial)
5329 {
5330 /* We already got a simplification for the other
5331 operand to the redistributed OR expression. The
5332 interesting case is when at least one is false.
5333 Or, if both are the same, we can apply the identity
5334 (x OR x) == x. */
5335 if (integer_zerop (partial))
5336 return t;
5337 else if (integer_zerop (t))
5338 return partial;
5339 else if (same_bool_result_p (t, partial))
5340 return t;
5341 }
5342 }
5343 }
5344 }
5345 return NULL_TREE;
5346}
5347
5348/* Try to simplify the AND of two comparisons defined by
5349 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5350 If this can be done without constructing an intermediate value,
5351 return the resulting tree; otherwise NULL_TREE is returned.
5352 This function is deliberately asymmetric as it recurses on SSA_DEFs
5353 in the first comparison but not the second. */
5354
5355static tree
5356and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5357 enum tree_code code2, tree op2a, tree op2b)
5358{
ae22ac3c 5359 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5360
e89065a1
SL
5361 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5362 if (operand_equal_p (op1a, op2a, 0)
5363 && operand_equal_p (op1b, op2b, 0))
5364 {
eb9820c0 5365 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5366 tree t = combine_comparisons (UNKNOWN_LOCATION,
5367 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5368 truth_type, op1a, op1b);
e89065a1
SL
5369 if (t)
5370 return t;
5371 }
5372
5373 /* Likewise the swapped case of the above. */
5374 if (operand_equal_p (op1a, op2b, 0)
5375 && operand_equal_p (op1b, op2a, 0))
5376 {
eb9820c0 5377 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5378 tree t = combine_comparisons (UNKNOWN_LOCATION,
5379 TRUTH_ANDIF_EXPR, code1,
5380 swap_tree_comparison (code2),
31ed6226 5381 truth_type, op1a, op1b);
e89065a1
SL
5382 if (t)
5383 return t;
5384 }
5385
5386 /* If both comparisons are of the same value against constants, we might
5387 be able to merge them. */
5388 if (operand_equal_p (op1a, op2a, 0)
5389 && TREE_CODE (op1b) == INTEGER_CST
5390 && TREE_CODE (op2b) == INTEGER_CST)
5391 {
5392 int cmp = tree_int_cst_compare (op1b, op2b);
5393
5394 /* If we have (op1a == op1b), we should either be able to
5395 return that or FALSE, depending on whether the constant op1b
5396 also satisfies the other comparison against op2b. */
5397 if (code1 == EQ_EXPR)
5398 {
5399 bool done = true;
5400 bool val;
5401 switch (code2)
5402 {
5403 case EQ_EXPR: val = (cmp == 0); break;
5404 case NE_EXPR: val = (cmp != 0); break;
5405 case LT_EXPR: val = (cmp < 0); break;
5406 case GT_EXPR: val = (cmp > 0); break;
5407 case LE_EXPR: val = (cmp <= 0); break;
5408 case GE_EXPR: val = (cmp >= 0); break;
5409 default: done = false;
5410 }
5411 if (done)
5412 {
5413 if (val)
5414 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5415 else
5416 return boolean_false_node;
5417 }
5418 }
5419 /* Likewise if the second comparison is an == comparison. */
5420 else if (code2 == EQ_EXPR)
5421 {
5422 bool done = true;
5423 bool val;
5424 switch (code1)
5425 {
5426 case EQ_EXPR: val = (cmp == 0); break;
5427 case NE_EXPR: val = (cmp != 0); break;
5428 case LT_EXPR: val = (cmp > 0); break;
5429 case GT_EXPR: val = (cmp < 0); break;
5430 case LE_EXPR: val = (cmp >= 0); break;
5431 case GE_EXPR: val = (cmp <= 0); break;
5432 default: done = false;
5433 }
5434 if (done)
5435 {
5436 if (val)
5437 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5438 else
5439 return boolean_false_node;
5440 }
5441 }
5442
5443 /* Same business with inequality tests. */
5444 else if (code1 == NE_EXPR)
5445 {
5446 bool val;
5447 switch (code2)
5448 {
5449 case EQ_EXPR: val = (cmp != 0); break;
5450 case NE_EXPR: val = (cmp == 0); break;
5451 case LT_EXPR: val = (cmp >= 0); break;
5452 case GT_EXPR: val = (cmp <= 0); break;
5453 case LE_EXPR: val = (cmp > 0); break;
5454 case GE_EXPR: val = (cmp < 0); break;
5455 default:
5456 val = false;
5457 }
5458 if (val)
5459 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5460 }
5461 else if (code2 == NE_EXPR)
5462 {
5463 bool val;
5464 switch (code1)
5465 {
5466 case EQ_EXPR: val = (cmp == 0); break;
5467 case NE_EXPR: val = (cmp != 0); break;
5468 case LT_EXPR: val = (cmp <= 0); break;
5469 case GT_EXPR: val = (cmp >= 0); break;
5470 case LE_EXPR: val = (cmp < 0); break;
5471 case GE_EXPR: val = (cmp > 0); break;
5472 default:
5473 val = false;
5474 }
5475 if (val)
5476 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5477 }
5478
5479 /* Chose the more restrictive of two < or <= comparisons. */
5480 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5481 && (code2 == LT_EXPR || code2 == LE_EXPR))
5482 {
5483 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5484 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5485 else
5486 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5487 }
5488
5489 /* Likewise chose the more restrictive of two > or >= comparisons. */
5490 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5491 && (code2 == GT_EXPR || code2 == GE_EXPR))
5492 {
5493 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5494 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5495 else
5496 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5497 }
5498
5499 /* Check for singleton ranges. */
5500 else if (cmp == 0
5501 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5502 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5503 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5504
5505 /* Check for disjoint ranges. */
5506 else if (cmp <= 0
5507 && (code1 == LT_EXPR || code1 == LE_EXPR)
5508 && (code2 == GT_EXPR || code2 == GE_EXPR))
5509 return boolean_false_node;
5510 else if (cmp >= 0
5511 && (code1 == GT_EXPR || code1 == GE_EXPR)
5512 && (code2 == LT_EXPR || code2 == LE_EXPR))
5513 return boolean_false_node;
5514 }
5515
5516 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5517 NAME's definition is a truth value. See if there are any simplifications
5518 that can be done against the NAME's definition. */
5519 if (TREE_CODE (op1a) == SSA_NAME
5520 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5521 && (integer_zerop (op1b) || integer_onep (op1b)))
5522 {
5523 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5524 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5525 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5526 switch (gimple_code (stmt))
5527 {
5528 case GIMPLE_ASSIGN:
5529 /* Try to simplify by copy-propagating the definition. */
5530 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5531
5532 case GIMPLE_PHI:
5533 /* If every argument to the PHI produces the same result when
5534 ANDed with the second comparison, we win.
5535 Do not do this unless the type is bool since we need a bool
5536 result here anyway. */
5537 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5538 {
5539 tree result = NULL_TREE;
5540 unsigned i;
5541 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5542 {
5543 tree arg = gimple_phi_arg_def (stmt, i);
5544
5545 /* If this PHI has itself as an argument, ignore it.
5546 If all the other args produce the same result,
5547 we're still OK. */
5548 if (arg == gimple_phi_result (stmt))
5549 continue;
5550 else if (TREE_CODE (arg) == INTEGER_CST)
5551 {
5552 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5553 {
5554 if (!result)
5555 result = boolean_false_node;
5556 else if (!integer_zerop (result))
5557 return NULL_TREE;
5558 }
5559 else if (!result)
5560 result = fold_build2 (code2, boolean_type_node,
5561 op2a, op2b);
5562 else if (!same_bool_comparison_p (result,
5563 code2, op2a, op2b))
5564 return NULL_TREE;
5565 }
0e8b84ec
JJ
5566 else if (TREE_CODE (arg) == SSA_NAME
5567 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5568 {
6c66f733 5569 tree temp;
355fe088 5570 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5571 /* In simple cases we can look through PHI nodes,
5572 but we have to be careful with loops.
5573 See PR49073. */
5574 if (! dom_info_available_p (CDI_DOMINATORS)
5575 || gimple_bb (def_stmt) == gimple_bb (stmt)
5576 || dominated_by_p (CDI_DOMINATORS,
5577 gimple_bb (def_stmt),
5578 gimple_bb (stmt)))
5579 return NULL_TREE;
5580 temp = and_var_with_comparison (arg, invert, code2,
5581 op2a, op2b);
e89065a1
SL
5582 if (!temp)
5583 return NULL_TREE;
5584 else if (!result)
5585 result = temp;
5586 else if (!same_bool_result_p (result, temp))
5587 return NULL_TREE;
5588 }
5589 else
5590 return NULL_TREE;
5591 }
5592 return result;
5593 }
5594
5595 default:
5596 break;
5597 }
5598 }
5599 return NULL_TREE;
5600}
5601
5602/* Try to simplify the AND of two comparisons, specified by
5603 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5604 If this can be simplified to a single expression (without requiring
5605 introducing more SSA variables to hold intermediate values),
5606 return the resulting tree. Otherwise return NULL_TREE.
5607 If the result expression is non-null, it has boolean type. */
5608
5609tree
5610maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5611 enum tree_code code2, tree op2a, tree op2b)
5612{
5613 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5614 if (t)
5615 return t;
5616 else
5617 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5618}
5619
5620/* Helper function for or_comparisons_1: try to simplify the OR of the
5621 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5622 If INVERT is true, invert the value of VAR before doing the OR.
5623 Return NULL_EXPR if we can't simplify this to a single expression. */
5624
5625static tree
5626or_var_with_comparison (tree var, bool invert,
5627 enum tree_code code2, tree op2a, tree op2b)
5628{
5629 tree t;
355fe088 5630 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5631
5632 /* We can only deal with variables whose definitions are assignments. */
5633 if (!is_gimple_assign (stmt))
5634 return NULL_TREE;
5635
5636 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5637 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5638 Then we only have to consider the simpler non-inverted cases. */
5639 if (invert)
5640 t = and_var_with_comparison_1 (stmt,
5641 invert_tree_comparison (code2, false),
5642 op2a, op2b);
5643 else
5644 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5645 return canonicalize_bool (t, invert);
5646}
5647
5648/* Try to simplify the OR of the ssa variable defined by the assignment
5649 STMT with the comparison specified by (OP2A CODE2 OP2B).
5650 Return NULL_EXPR if we can't simplify this to a single expression. */
5651
5652static tree
355fe088 5653or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5654 enum tree_code code2, tree op2a, tree op2b)
5655{
5656 tree var = gimple_assign_lhs (stmt);
5657 tree true_test_var = NULL_TREE;
5658 tree false_test_var = NULL_TREE;
5659 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5660
5661 /* Check for identities like (var OR (var != 0)) => true . */
5662 if (TREE_CODE (op2a) == SSA_NAME
5663 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5664 {
5665 if ((code2 == NE_EXPR && integer_zerop (op2b))
5666 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5667 {
5668 true_test_var = op2a;
5669 if (var == true_test_var)
5670 return var;
5671 }
5672 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5673 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5674 {
5675 false_test_var = op2a;
5676 if (var == false_test_var)
5677 return boolean_true_node;
5678 }
5679 }
5680
5681 /* If the definition is a comparison, recurse on it. */
5682 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5683 {
5684 tree t = or_comparisons_1 (innercode,
5685 gimple_assign_rhs1 (stmt),
5686 gimple_assign_rhs2 (stmt),
5687 code2,
5688 op2a,
5689 op2b);
5690 if (t)
5691 return t;
5692 }
5693
5694 /* If the definition is an AND or OR expression, we may be able to
5695 simplify by reassociating. */
eb9820c0
KT
5696 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5697 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5698 {
5699 tree inner1 = gimple_assign_rhs1 (stmt);
5700 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5701 gimple *s;
e89065a1
SL
5702 tree t;
5703 tree partial = NULL_TREE;
eb9820c0 5704 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5705
5706 /* Check for boolean identities that don't require recursive examination
5707 of inner1/inner2:
5708 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5709 inner1 OR (inner1 AND inner2) => inner1
5710 !inner1 OR (inner1 OR inner2) => true
5711 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5712 */
5713 if (inner1 == true_test_var)
5714 return (is_or ? var : inner1);
5715 else if (inner2 == true_test_var)
5716 return (is_or ? var : inner2);
5717 else if (inner1 == false_test_var)
5718 return (is_or
5719 ? boolean_true_node
5720 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5721 else if (inner2 == false_test_var)
5722 return (is_or
5723 ? boolean_true_node
5724 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5725
5726 /* Next, redistribute/reassociate the OR across the inner tests.
5727 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5728 if (TREE_CODE (inner1) == SSA_NAME
5729 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5730 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5731 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5732 gimple_assign_rhs1 (s),
5733 gimple_assign_rhs2 (s),
5734 code2, op2a, op2b)))
5735 {
5736 /* Handle the OR case, where we are reassociating:
5737 (inner1 OR inner2) OR (op2a code2 op2b)
5738 => (t OR inner2)
5739 If the partial result t is a constant, we win. Otherwise
5740 continue on to try reassociating with the other inner test. */
8236c8eb 5741 if (is_or)
e89065a1
SL
5742 {
5743 if (integer_onep (t))
5744 return boolean_true_node;
5745 else if (integer_zerop (t))
5746 return inner2;
5747 }
5748
5749 /* Handle the AND case, where we are redistributing:
5750 (inner1 AND inner2) OR (op2a code2 op2b)
5751 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5752 else if (integer_zerop (t))
5753 return boolean_false_node;
5754
5755 /* Save partial result for later. */
5756 partial = t;
e89065a1
SL
5757 }
5758
5759 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5760 if (TREE_CODE (inner2) == SSA_NAME
5761 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5762 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5763 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5764 gimple_assign_rhs1 (s),
5765 gimple_assign_rhs2 (s),
5766 code2, op2a, op2b)))
5767 {
5768 /* Handle the OR case, where we are reassociating:
5769 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5770 => (inner1 OR t)
5771 => (t OR partial) */
5772 if (is_or)
e89065a1
SL
5773 {
5774 if (integer_zerop (t))
5775 return inner1;
5776 else if (integer_onep (t))
5777 return boolean_true_node;
8236c8eb
JJ
5778 /* If both are the same, we can apply the identity
5779 (x OR x) == x. */
5780 else if (partial && same_bool_result_p (t, partial))
5781 return t;
e89065a1
SL
5782 }
5783
5784 /* Handle the AND case, where we are redistributing:
5785 (inner1 AND inner2) OR (op2a code2 op2b)
5786 => (t AND (inner1 OR (op2a code2 op2b)))
5787 => (t AND partial) */
5788 else
5789 {
5790 if (integer_zerop (t))
5791 return boolean_false_node;
5792 else if (partial)
5793 {
5794 /* We already got a simplification for the other
5795 operand to the redistributed AND expression. The
5796 interesting case is when at least one is true.
5797 Or, if both are the same, we can apply the identity
8236c8eb 5798 (x AND x) == x. */
e89065a1
SL
5799 if (integer_onep (partial))
5800 return t;
5801 else if (integer_onep (t))
5802 return partial;
5803 else if (same_bool_result_p (t, partial))
8236c8eb 5804 return t;
e89065a1
SL
5805 }
5806 }
5807 }
5808 }
5809 return NULL_TREE;
5810}
5811
5812/* Try to simplify the OR of two comparisons defined by
5813 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5814 If this can be done without constructing an intermediate value,
5815 return the resulting tree; otherwise NULL_TREE is returned.
5816 This function is deliberately asymmetric as it recurses on SSA_DEFs
5817 in the first comparison but not the second. */
5818
5819static tree
5820or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5821 enum tree_code code2, tree op2a, tree op2b)
5822{
ae22ac3c 5823 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5824
e89065a1
SL
5825 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5826 if (operand_equal_p (op1a, op2a, 0)
5827 && operand_equal_p (op1b, op2b, 0))
5828 {
eb9820c0 5829 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5830 tree t = combine_comparisons (UNKNOWN_LOCATION,
5831 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5832 truth_type, op1a, op1b);
e89065a1
SL
5833 if (t)
5834 return t;
5835 }
5836
5837 /* Likewise the swapped case of the above. */
5838 if (operand_equal_p (op1a, op2b, 0)
5839 && operand_equal_p (op1b, op2a, 0))
5840 {
eb9820c0 5841 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5842 tree t = combine_comparisons (UNKNOWN_LOCATION,
5843 TRUTH_ORIF_EXPR, code1,
5844 swap_tree_comparison (code2),
31ed6226 5845 truth_type, op1a, op1b);
e89065a1
SL
5846 if (t)
5847 return t;
5848 }
5849
5850 /* If both comparisons are of the same value against constants, we might
5851 be able to merge them. */
5852 if (operand_equal_p (op1a, op2a, 0)
5853 && TREE_CODE (op1b) == INTEGER_CST
5854 && TREE_CODE (op2b) == INTEGER_CST)
5855 {
5856 int cmp = tree_int_cst_compare (op1b, op2b);
5857
5858 /* If we have (op1a != op1b), we should either be able to
5859 return that or TRUE, depending on whether the constant op1b
5860 also satisfies the other comparison against op2b. */
5861 if (code1 == NE_EXPR)
5862 {
5863 bool done = true;
5864 bool val;
5865 switch (code2)
5866 {
5867 case EQ_EXPR: val = (cmp == 0); break;
5868 case NE_EXPR: val = (cmp != 0); break;
5869 case LT_EXPR: val = (cmp < 0); break;
5870 case GT_EXPR: val = (cmp > 0); break;
5871 case LE_EXPR: val = (cmp <= 0); break;
5872 case GE_EXPR: val = (cmp >= 0); break;
5873 default: done = false;
5874 }
5875 if (done)
5876 {
5877 if (val)
5878 return boolean_true_node;
5879 else
5880 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5881 }
5882 }
5883 /* Likewise if the second comparison is a != comparison. */
5884 else if (code2 == NE_EXPR)
5885 {
5886 bool done = true;
5887 bool val;
5888 switch (code1)
5889 {
5890 case EQ_EXPR: val = (cmp == 0); break;
5891 case NE_EXPR: val = (cmp != 0); break;
5892 case LT_EXPR: val = (cmp > 0); break;
5893 case GT_EXPR: val = (cmp < 0); break;
5894 case LE_EXPR: val = (cmp >= 0); break;
5895 case GE_EXPR: val = (cmp <= 0); break;
5896 default: done = false;
5897 }
5898 if (done)
5899 {
5900 if (val)
5901 return boolean_true_node;
5902 else
5903 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5904 }
5905 }
5906
5907 /* See if an equality test is redundant with the other comparison. */
5908 else if (code1 == EQ_EXPR)
5909 {
5910 bool val;
5911 switch (code2)
5912 {
5913 case EQ_EXPR: val = (cmp == 0); break;
5914 case NE_EXPR: val = (cmp != 0); break;
5915 case LT_EXPR: val = (cmp < 0); break;
5916 case GT_EXPR: val = (cmp > 0); break;
5917 case LE_EXPR: val = (cmp <= 0); break;
5918 case GE_EXPR: val = (cmp >= 0); break;
5919 default:
5920 val = false;
5921 }
5922 if (val)
5923 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5924 }
5925 else if (code2 == EQ_EXPR)
5926 {
5927 bool val;
5928 switch (code1)
5929 {
5930 case EQ_EXPR: val = (cmp == 0); break;
5931 case NE_EXPR: val = (cmp != 0); break;
5932 case LT_EXPR: val = (cmp > 0); break;
5933 case GT_EXPR: val = (cmp < 0); break;
5934 case LE_EXPR: val = (cmp >= 0); break;
5935 case GE_EXPR: val = (cmp <= 0); break;
5936 default:
5937 val = false;
5938 }
5939 if (val)
5940 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5941 }
5942
5943 /* Chose the less restrictive of two < or <= comparisons. */
5944 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5945 && (code2 == LT_EXPR || code2 == LE_EXPR))
5946 {
5947 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5948 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5949 else
5950 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5951 }
5952
5953 /* Likewise chose the less restrictive of two > or >= comparisons. */
5954 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5955 && (code2 == GT_EXPR || code2 == GE_EXPR))
5956 {
5957 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5958 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5959 else
5960 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5961 }
5962
5963 /* Check for singleton ranges. */
5964 else if (cmp == 0
5965 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5966 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5967 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5968
5969 /* Check for less/greater pairs that don't restrict the range at all. */
5970 else if (cmp >= 0
5971 && (code1 == LT_EXPR || code1 == LE_EXPR)
5972 && (code2 == GT_EXPR || code2 == GE_EXPR))
5973 return boolean_true_node;
5974 else if (cmp <= 0
5975 && (code1 == GT_EXPR || code1 == GE_EXPR)
5976 && (code2 == LT_EXPR || code2 == LE_EXPR))
5977 return boolean_true_node;
5978 }
5979
5980 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5981 NAME's definition is a truth value. See if there are any simplifications
5982 that can be done against the NAME's definition. */
5983 if (TREE_CODE (op1a) == SSA_NAME
5984 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5985 && (integer_zerop (op1b) || integer_onep (op1b)))
5986 {
5987 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5988 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5989 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5990 switch (gimple_code (stmt))
5991 {
5992 case GIMPLE_ASSIGN:
5993 /* Try to simplify by copy-propagating the definition. */
5994 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5995
5996 case GIMPLE_PHI:
5997 /* If every argument to the PHI produces the same result when
5998 ORed with the second comparison, we win.
5999 Do not do this unless the type is bool since we need a bool
6000 result here anyway. */
6001 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6002 {
6003 tree result = NULL_TREE;
6004 unsigned i;
6005 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6006 {
6007 tree arg = gimple_phi_arg_def (stmt, i);
6008
6009 /* If this PHI has itself as an argument, ignore it.
6010 If all the other args produce the same result,
6011 we're still OK. */
6012 if (arg == gimple_phi_result (stmt))
6013 continue;
6014 else if (TREE_CODE (arg) == INTEGER_CST)
6015 {
6016 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6017 {
6018 if (!result)
6019 result = boolean_true_node;
6020 else if (!integer_onep (result))
6021 return NULL_TREE;
6022 }
6023 else if (!result)
6024 result = fold_build2 (code2, boolean_type_node,
6025 op2a, op2b);
6026 else if (!same_bool_comparison_p (result,
6027 code2, op2a, op2b))
6028 return NULL_TREE;
6029 }
0e8b84ec
JJ
6030 else if (TREE_CODE (arg) == SSA_NAME
6031 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6032 {
6c66f733 6033 tree temp;
355fe088 6034 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6035 /* In simple cases we can look through PHI nodes,
6036 but we have to be careful with loops.
6037 See PR49073. */
6038 if (! dom_info_available_p (CDI_DOMINATORS)
6039 || gimple_bb (def_stmt) == gimple_bb (stmt)
6040 || dominated_by_p (CDI_DOMINATORS,
6041 gimple_bb (def_stmt),
6042 gimple_bb (stmt)))
6043 return NULL_TREE;
6044 temp = or_var_with_comparison (arg, invert, code2,
6045 op2a, op2b);
e89065a1
SL
6046 if (!temp)
6047 return NULL_TREE;
6048 else if (!result)
6049 result = temp;
6050 else if (!same_bool_result_p (result, temp))
6051 return NULL_TREE;
6052 }
6053 else
6054 return NULL_TREE;
6055 }
6056 return result;
6057 }
6058
6059 default:
6060 break;
6061 }
6062 }
6063 return NULL_TREE;
6064}
6065
6066/* Try to simplify the OR of two comparisons, specified by
6067 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6068 If this can be simplified to a single expression (without requiring
6069 introducing more SSA variables to hold intermediate values),
6070 return the resulting tree. Otherwise return NULL_TREE.
6071 If the result expression is non-null, it has boolean type. */
6072
6073tree
6074maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6075 enum tree_code code2, tree op2a, tree op2b)
6076{
6077 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6078 if (t)
6079 return t;
6080 else
6081 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6082}
cfef45c8
RG
6083
6084
6085/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6086
6087 Either NULL_TREE, a simplified but non-constant or a constant
6088 is returned.
6089
6090 ??? This should go into a gimple-fold-inline.h file to be eventually
6091 privatized with the single valueize function used in the various TUs
6092 to avoid the indirect function call overhead. */
6093
6094tree
355fe088 6095gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6096 tree (*gvalueize) (tree))
cfef45c8 6097{
5d75ad95 6098 gimple_match_op res_op;
45cc9f96
RB
6099 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6100 edges if there are intermediate VARYING defs. For this reason
6101 do not follow SSA edges here even though SCCVN can technically
6102 just deal fine with that. */
5d75ad95 6103 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6104 {
34050b6b 6105 tree res = NULL_TREE;
5d75ad95
RS
6106 if (gimple_simplified_result_is_gimple_val (&res_op))
6107 res = res_op.ops[0];
34050b6b 6108 else if (mprts_hook)
5d75ad95 6109 res = mprts_hook (&res_op);
34050b6b 6110 if (res)
45cc9f96 6111 {
34050b6b
RB
6112 if (dump_file && dump_flags & TDF_DETAILS)
6113 {
6114 fprintf (dump_file, "Match-and-simplified ");
6115 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6116 fprintf (dump_file, " to ");
ef6cb4c7 6117 print_generic_expr (dump_file, res);
34050b6b
RB
6118 fprintf (dump_file, "\n");
6119 }
6120 return res;
45cc9f96 6121 }
45cc9f96
RB
6122 }
6123
cfef45c8
RG
6124 location_t loc = gimple_location (stmt);
6125 switch (gimple_code (stmt))
6126 {
6127 case GIMPLE_ASSIGN:
6128 {
6129 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6130
6131 switch (get_gimple_rhs_class (subcode))
6132 {
6133 case GIMPLE_SINGLE_RHS:
6134 {
6135 tree rhs = gimple_assign_rhs1 (stmt);
6136 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6137
6138 if (TREE_CODE (rhs) == SSA_NAME)
6139 {
6140 /* If the RHS is an SSA_NAME, return its known constant value,
6141 if any. */
6142 return (*valueize) (rhs);
6143 }
6144 /* Handle propagating invariant addresses into address
6145 operations. */
6146 else if (TREE_CODE (rhs) == ADDR_EXPR
6147 && !is_gimple_min_invariant (rhs))
6148 {
a90c8804 6149 poly_int64 offset = 0;
cfef45c8
RG
6150 tree base;
6151 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6152 &offset,
6153 valueize);
6154 if (base
6155 && (CONSTANT_CLASS_P (base)
6156 || decl_address_invariant_p (base)))
6157 return build_invariant_address (TREE_TYPE (rhs),
6158 base, offset);
6159 }
6160 else if (TREE_CODE (rhs) == CONSTRUCTOR
6161 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6162 && known_eq (CONSTRUCTOR_NELTS (rhs),
6163 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6164 {
794e3180
RS
6165 unsigned i, nelts;
6166 tree val;
cfef45c8 6167
928686b1 6168 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6169 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6170 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6171 {
6172 val = (*valueize) (val);
6173 if (TREE_CODE (val) == INTEGER_CST
6174 || TREE_CODE (val) == REAL_CST
6175 || TREE_CODE (val) == FIXED_CST)
794e3180 6176 vec.quick_push (val);
cfef45c8
RG
6177 else
6178 return NULL_TREE;
6179 }
6180
5ebaa477 6181 return vec.build ();
cfef45c8 6182 }
bdf37f7a
JH
6183 if (subcode == OBJ_TYPE_REF)
6184 {
6185 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6186 /* If callee is constant, we can fold away the wrapper. */
6187 if (is_gimple_min_invariant (val))
6188 return val;
6189 }
cfef45c8
RG
6190
6191 if (kind == tcc_reference)
6192 {
6193 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6194 || TREE_CODE (rhs) == REALPART_EXPR
6195 || TREE_CODE (rhs) == IMAGPART_EXPR)
6196 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6197 {
6198 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6199 return fold_unary_loc (EXPR_LOCATION (rhs),
6200 TREE_CODE (rhs),
6201 TREE_TYPE (rhs), val);
6202 }
6203 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6204 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6205 {
6206 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6207 return fold_ternary_loc (EXPR_LOCATION (rhs),
6208 TREE_CODE (rhs),
6209 TREE_TYPE (rhs), val,
6210 TREE_OPERAND (rhs, 1),
6211 TREE_OPERAND (rhs, 2));
6212 }
6213 else if (TREE_CODE (rhs) == MEM_REF
6214 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6215 {
6216 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6217 if (TREE_CODE (val) == ADDR_EXPR
6218 && is_gimple_min_invariant (val))
6219 {
6220 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6221 unshare_expr (val),
6222 TREE_OPERAND (rhs, 1));
6223 if (tem)
6224 rhs = tem;
6225 }
6226 }
6227 return fold_const_aggregate_ref_1 (rhs, valueize);
6228 }
6229 else if (kind == tcc_declaration)
6230 return get_symbol_constant_value (rhs);
6231 return rhs;
6232 }
6233
6234 case GIMPLE_UNARY_RHS:
f3582e54 6235 return NULL_TREE;
cfef45c8
RG
6236
6237 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6238 /* Translate &x + CST into an invariant form suitable for
6239 further propagation. */
6240 if (subcode == POINTER_PLUS_EXPR)
6241 {
4b1b9e64
RB
6242 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6243 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6244 if (TREE_CODE (op0) == ADDR_EXPR
6245 && TREE_CODE (op1) == INTEGER_CST)
6246 {
6247 tree off = fold_convert (ptr_type_node, op1);
6248 return build_fold_addr_expr_loc
6249 (loc,
6250 fold_build2 (MEM_REF,
6251 TREE_TYPE (TREE_TYPE (op0)),
6252 unshare_expr (op0), off));
6253 }
6254 }
59c20dc7
RB
6255 /* Canonicalize bool != 0 and bool == 0 appearing after
6256 valueization. While gimple_simplify handles this
6257 it can get confused by the ~X == 1 -> X == 0 transform
6258 which we cant reduce to a SSA name or a constant
6259 (and we have no way to tell gimple_simplify to not
6260 consider those transforms in the first place). */
6261 else if (subcode == EQ_EXPR
6262 || subcode == NE_EXPR)
6263 {
6264 tree lhs = gimple_assign_lhs (stmt);
6265 tree op0 = gimple_assign_rhs1 (stmt);
6266 if (useless_type_conversion_p (TREE_TYPE (lhs),
6267 TREE_TYPE (op0)))
6268 {
6269 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6270 op0 = (*valueize) (op0);
8861704d
RB
6271 if (TREE_CODE (op0) == INTEGER_CST)
6272 std::swap (op0, op1);
6273 if (TREE_CODE (op1) == INTEGER_CST
6274 && ((subcode == NE_EXPR && integer_zerop (op1))
6275 || (subcode == EQ_EXPR && integer_onep (op1))))
6276 return op0;
59c20dc7
RB
6277 }
6278 }
4b1b9e64 6279 return NULL_TREE;
cfef45c8
RG
6280
6281 case GIMPLE_TERNARY_RHS:
6282 {
6283 /* Handle ternary operators that can appear in GIMPLE form. */
6284 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6285 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6286 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6287 return fold_ternary_loc (loc, subcode,
6288 gimple_expr_type (stmt), op0, op1, op2);
6289 }
6290
6291 default:
6292 gcc_unreachable ();
6293 }
6294 }
6295
6296 case GIMPLE_CALL:
6297 {
25583c4f 6298 tree fn;
538dd0b7 6299 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6300
6301 if (gimple_call_internal_p (stmt))
31e071ae
MP
6302 {
6303 enum tree_code subcode = ERROR_MARK;
6304 switch (gimple_call_internal_fn (stmt))
6305 {
6306 case IFN_UBSAN_CHECK_ADD:
6307 subcode = PLUS_EXPR;
6308 break;
6309 case IFN_UBSAN_CHECK_SUB:
6310 subcode = MINUS_EXPR;
6311 break;
6312 case IFN_UBSAN_CHECK_MUL:
6313 subcode = MULT_EXPR;
6314 break;
68fa96d6
ML
6315 case IFN_BUILTIN_EXPECT:
6316 {
6317 tree arg0 = gimple_call_arg (stmt, 0);
6318 tree op0 = (*valueize) (arg0);
6319 if (TREE_CODE (op0) == INTEGER_CST)
6320 return op0;
6321 return NULL_TREE;
6322 }
31e071ae
MP
6323 default:
6324 return NULL_TREE;
6325 }
368b454d
JJ
6326 tree arg0 = gimple_call_arg (stmt, 0);
6327 tree arg1 = gimple_call_arg (stmt, 1);
6328 tree op0 = (*valueize) (arg0);
6329 tree op1 = (*valueize) (arg1);
31e071ae
MP
6330
6331 if (TREE_CODE (op0) != INTEGER_CST
6332 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6333 {
6334 switch (subcode)
6335 {
6336 case MULT_EXPR:
6337 /* x * 0 = 0 * x = 0 without overflow. */
6338 if (integer_zerop (op0) || integer_zerop (op1))
6339 return build_zero_cst (TREE_TYPE (arg0));
6340 break;
6341 case MINUS_EXPR:
6342 /* y - y = 0 without overflow. */
6343 if (operand_equal_p (op0, op1, 0))
6344 return build_zero_cst (TREE_TYPE (arg0));
6345 break;
6346 default:
6347 break;
6348 }
6349 }
6350 tree res
6351 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6352 if (res
6353 && TREE_CODE (res) == INTEGER_CST
6354 && !TREE_OVERFLOW (res))
6355 return res;
6356 return NULL_TREE;
6357 }
25583c4f
RS
6358
6359 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
6360 if (TREE_CODE (fn) == ADDR_EXPR
6361 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
6362 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6363 && gimple_builtin_call_types_compatible_p (stmt,
6364 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6365 {
6366 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6367 tree retval;
cfef45c8
RG
6368 unsigned i;
6369 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6370 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6371 retval = fold_builtin_call_array (loc,
538dd0b7 6372 gimple_call_return_type (call_stmt),
cfef45c8 6373 fn, gimple_call_num_args (stmt), args);
cfef45c8 6374 if (retval)
5c944c6c
RB
6375 {
6376 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6377 STRIP_NOPS (retval);
538dd0b7
DM
6378 retval = fold_convert (gimple_call_return_type (call_stmt),
6379 retval);
5c944c6c 6380 }
cfef45c8
RG
6381 return retval;
6382 }
6383 return NULL_TREE;
6384 }
6385
6386 default:
6387 return NULL_TREE;
6388 }
6389}
6390
6391/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6392 Returns NULL_TREE if folding to a constant is not possible, otherwise
6393 returns a constant according to is_gimple_min_invariant. */
6394
6395tree
355fe088 6396gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6397{
6398 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6399 if (res && is_gimple_min_invariant (res))
6400 return res;
6401 return NULL_TREE;
6402}
6403
6404
6405/* The following set of functions are supposed to fold references using
6406 their constant initializers. */
6407
cfef45c8
RG
6408/* See if we can find constructor defining value of BASE.
6409 When we know the consructor with constant offset (such as
6410 base is array[40] and we do know constructor of array), then
6411 BIT_OFFSET is adjusted accordingly.
6412
6413 As a special case, return error_mark_node when constructor
6414 is not explicitly available, but it is known to be zero
6415 such as 'static const int a;'. */
6416static tree
588db50c 6417get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6418 tree (*valueize)(tree))
6419{
588db50c 6420 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6421 bool reverse;
6422
cfef45c8
RG
6423 if (TREE_CODE (base) == MEM_REF)
6424 {
6a5aca53
ML
6425 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6426 if (!boff.to_shwi (bit_offset))
6427 return NULL_TREE;
cfef45c8
RG
6428
6429 if (valueize
6430 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6431 base = valueize (TREE_OPERAND (base, 0));
6432 if (!base || TREE_CODE (base) != ADDR_EXPR)
6433 return NULL_TREE;
6434 base = TREE_OPERAND (base, 0);
6435 }
13e88953
RB
6436 else if (valueize
6437 && TREE_CODE (base) == SSA_NAME)
6438 base = valueize (base);
cfef45c8
RG
6439
6440 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6441 DECL_INITIAL. If BASE is a nested reference into another
6442 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6443 the inner reference. */
6444 switch (TREE_CODE (base))
6445 {
6446 case VAR_DECL:
cfef45c8 6447 case CONST_DECL:
6a6dac52
JH
6448 {
6449 tree init = ctor_for_folding (base);
6450
688010ba 6451 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6452 NULL means unknown, while error_mark_node is 0. */
6453 if (init == error_mark_node)
6454 return NULL_TREE;
6455 if (!init)
6456 return error_mark_node;
6457 return init;
6458 }
cfef45c8 6459
13e88953
RB
6460 case VIEW_CONVERT_EXPR:
6461 return get_base_constructor (TREE_OPERAND (base, 0),
6462 bit_offset, valueize);
6463
cfef45c8
RG
6464 case ARRAY_REF:
6465 case COMPONENT_REF:
ee45a32d
EB
6466 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6467 &reverse);
588db50c 6468 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6469 return NULL_TREE;
6470 *bit_offset += bit_offset2;
6471 return get_base_constructor (base, bit_offset, valueize);
6472
cfef45c8
RG
6473 case CONSTRUCTOR:
6474 return base;
6475
6476 default:
13e88953
RB
6477 if (CONSTANT_CLASS_P (base))
6478 return base;
6479
cfef45c8
RG
6480 return NULL_TREE;
6481 }
6482}
6483
cfef45c8
RG
6484/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6485 SIZE to the memory at bit OFFSET. */
6486
6487static tree
6488fold_array_ctor_reference (tree type, tree ctor,
6489 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6490 unsigned HOST_WIDE_INT size,
6491 tree from_decl)
cfef45c8 6492{
807e902e
KZ
6493 offset_int low_bound;
6494 offset_int elt_size;
807e902e 6495 offset_int access_index;
6a636014 6496 tree domain_type = NULL_TREE;
cfef45c8
RG
6497 HOST_WIDE_INT inner_offset;
6498
6499 /* Compute low bound and elt size. */
eb8f1123
RG
6500 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6501 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6502 if (domain_type && TYPE_MIN_VALUE (domain_type))
6503 {
6504 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6505 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6506 return NULL_TREE;
807e902e 6507 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6508 }
6509 else
807e902e 6510 low_bound = 0;
cfef45c8 6511 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6512 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6513 return NULL_TREE;
807e902e 6514 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
6515
6516 /* We can handle only constantly sized accesses that are known to not
6517 be larger than size of array element. */
6518 if (!TYPE_SIZE_UNIT (type)
6519 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
032c80e9 6520 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
807e902e 6521 || elt_size == 0)
cfef45c8
RG
6522 return NULL_TREE;
6523
6524 /* Compute the array index we look for. */
807e902e
KZ
6525 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6526 elt_size);
27bcd47c 6527 access_index += low_bound;
cfef45c8
RG
6528
6529 /* And offset within the access. */
27bcd47c 6530 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6531
6532 /* See if the array field is large enough to span whole access. We do not
6533 care to fold accesses spanning multiple array indexes. */
27bcd47c 6534 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6535 return NULL_TREE;
6a636014
AL
6536 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6537 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 6538
cfef45c8
RG
6539 /* When memory is not explicitely mentioned in constructor,
6540 it is 0 (or out of range). */
6541 return build_zero_cst (type);
6542}
6543
6544/* CTOR is CONSTRUCTOR of an aggregate or vector.
6545 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6546
6547static tree
6548fold_nonarray_ctor_reference (tree type, tree ctor,
6549 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6550 unsigned HOST_WIDE_INT size,
6551 tree from_decl)
cfef45c8
RG
6552{
6553 unsigned HOST_WIDE_INT cnt;
6554 tree cfield, cval;
6555
6556 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6557 cval)
6558 {
6559 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6560 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6561 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
6562 offset_int bitoffset;
6563 offset_int bitoffset_end, access_end;
cfef45c8
RG
6564
6565 /* Variable sized objects in static constructors makes no sense,
6566 but field_size can be NULL for flexible array members. */
6567 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6568 && TREE_CODE (byte_offset) == INTEGER_CST
6569 && (field_size != NULL_TREE
6570 ? TREE_CODE (field_size) == INTEGER_CST
6571 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6572
6573 /* Compute bit offset of the field. */
807e902e 6574 bitoffset = (wi::to_offset (field_offset)
8de73453 6575 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8
RG
6576 /* Compute bit offset where the field ends. */
6577 if (field_size != NULL_TREE)
807e902e 6578 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6579 else
807e902e 6580 bitoffset_end = 0;
cfef45c8 6581
807e902e 6582 access_end = offset_int (offset) + size;
b8b2b009
JJ
6583
6584 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6585 [BITOFFSET, BITOFFSET_END)? */
807e902e 6586 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6587 && (field_size == NULL_TREE
807e902e 6588 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6589 {
807e902e 6590 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
6591 /* We do have overlap. Now see if field is large enough to
6592 cover the access. Give up for accesses spanning multiple
6593 fields. */
807e902e 6594 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6595 return NULL_TREE;
032c80e9 6596 if (offset < bitoffset)
b8b2b009 6597 return NULL_TREE;
cfef45c8 6598 return fold_ctor_reference (type, cval,
27bcd47c 6599 inner_offset.to_uhwi (), size,
c44c2088 6600 from_decl);
cfef45c8
RG
6601 }
6602 }
6603 /* When memory is not explicitely mentioned in constructor, it is 0. */
6604 return build_zero_cst (type);
6605}
6606
30acf282
RS
6607/* CTOR is value initializing memory, fold reference of type TYPE and
6608 size POLY_SIZE to the memory at bit POLY_OFFSET. */
cfef45c8 6609
8403c2cf 6610tree
30acf282
RS
6611fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6612 poly_uint64 poly_size, tree from_decl)
cfef45c8
RG
6613{
6614 tree ret;
6615
6616 /* We found the field with exact match. */
6617 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6618 && known_eq (poly_offset, 0U))
9d60be38 6619 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6620
30acf282
RS
6621 /* The remaining optimizations need a constant size and offset. */
6622 unsigned HOST_WIDE_INT size, offset;
6623 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6624 return NULL_TREE;
6625
cfef45c8
RG
6626 /* We are at the end of walk, see if we can view convert the
6627 result. */
6628 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6629 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6630 && !compare_tree_int (TYPE_SIZE (type), size)
6631 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6632 {
9d60be38 6633 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6634 if (ret)
672d9f8e
RB
6635 {
6636 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6637 if (ret)
6638 STRIP_USELESS_TYPE_CONVERSION (ret);
6639 }
cfef45c8
RG
6640 return ret;
6641 }
b2505143
RB
6642 /* For constants and byte-aligned/sized reads try to go through
6643 native_encode/interpret. */
6644 if (CONSTANT_CLASS_P (ctor)
6645 && BITS_PER_UNIT == 8
6646 && offset % BITS_PER_UNIT == 0
6647 && size % BITS_PER_UNIT == 0
6648 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6649 {
6650 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6651 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6652 offset / BITS_PER_UNIT);
6653 if (len > 0)
6654 return native_interpret_expr (type, buf, len);
b2505143 6655 }
cfef45c8
RG
6656 if (TREE_CODE (ctor) == CONSTRUCTOR)
6657 {
6658
eb8f1123
RG
6659 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6660 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
6661 return fold_array_ctor_reference (type, ctor, offset, size,
6662 from_decl);
cfef45c8 6663 else
c44c2088
JH
6664 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6665 from_decl);
cfef45c8
RG
6666 }
6667
6668 return NULL_TREE;
6669}
6670
6671/* Return the tree representing the element referenced by T if T is an
6672 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6673 names using VALUEIZE. Return NULL_TREE otherwise. */
6674
6675tree
6676fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6677{
6678 tree ctor, idx, base;
588db50c 6679 poly_int64 offset, size, max_size;
cfef45c8 6680 tree tem;
ee45a32d 6681 bool reverse;
cfef45c8 6682
f8a7df45
RG
6683 if (TREE_THIS_VOLATILE (t))
6684 return NULL_TREE;
6685
3a65ee74 6686 if (DECL_P (t))
cfef45c8
RG
6687 return get_symbol_constant_value (t);
6688
6689 tem = fold_read_from_constant_string (t);
6690 if (tem)
6691 return tem;
6692
6693 switch (TREE_CODE (t))
6694 {
6695 case ARRAY_REF:
6696 case ARRAY_RANGE_REF:
6697 /* Constant indexes are handled well by get_base_constructor.
6698 Only special case variable offsets.
6699 FIXME: This code can't handle nested references with variable indexes
6700 (they will be handled only by iteration of ccp). Perhaps we can bring
6701 get_ref_base_and_extent here and make it use a valueize callback. */
6702 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6703 && valueize
6704 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6705 && poly_int_tree_p (idx))
cfef45c8
RG
6706 {
6707 tree low_bound, unit_size;
6708
6709 /* If the resulting bit-offset is constant, track it. */
6710 if ((low_bound = array_ref_low_bound (t),
588db50c 6711 poly_int_tree_p (low_bound))
cfef45c8 6712 && (unit_size = array_ref_element_size (t),
807e902e 6713 tree_fits_uhwi_p (unit_size)))
cfef45c8 6714 {
588db50c
RS
6715 poly_offset_int woffset
6716 = wi::sext (wi::to_poly_offset (idx)
6717 - wi::to_poly_offset (low_bound),
807e902e
KZ
6718 TYPE_PRECISION (TREE_TYPE (idx)));
6719
588db50c 6720 if (woffset.to_shwi (&offset))
807e902e 6721 {
807e902e
KZ
6722 /* TODO: This code seems wrong, multiply then check
6723 to see if it fits. */
6724 offset *= tree_to_uhwi (unit_size);
6725 offset *= BITS_PER_UNIT;
6726
6727 base = TREE_OPERAND (t, 0);
6728 ctor = get_base_constructor (base, &offset, valueize);
6729 /* Empty constructor. Always fold to 0. */
6730 if (ctor == error_mark_node)
6731 return build_zero_cst (TREE_TYPE (t));
6732 /* Out of bound array access. Value is undefined,
6733 but don't fold. */
588db50c 6734 if (maybe_lt (offset, 0))
807e902e
KZ
6735 return NULL_TREE;
6736 /* We can not determine ctor. */
6737 if (!ctor)
6738 return NULL_TREE;
6739 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6740 tree_to_uhwi (unit_size)
6741 * BITS_PER_UNIT,
6742 base);
6743 }
cfef45c8
RG
6744 }
6745 }
6746 /* Fallthru. */
6747
6748 case COMPONENT_REF:
6749 case BIT_FIELD_REF:
6750 case TARGET_MEM_REF:
6751 case MEM_REF:
ee45a32d 6752 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6753 ctor = get_base_constructor (base, &offset, valueize);
6754
6755 /* Empty constructor. Always fold to 0. */
6756 if (ctor == error_mark_node)
6757 return build_zero_cst (TREE_TYPE (t));
6758 /* We do not know precise address. */
588db50c 6759 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8
RG
6760 return NULL_TREE;
6761 /* We can not determine ctor. */
6762 if (!ctor)
6763 return NULL_TREE;
6764
6765 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 6766 if (maybe_lt (offset, 0))
cfef45c8
RG
6767 return NULL_TREE;
6768
c44c2088
JH
6769 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6770 base);
cfef45c8
RG
6771
6772 case REALPART_EXPR:
6773 case IMAGPART_EXPR:
6774 {
6775 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6776 if (c && TREE_CODE (c) == COMPLEX_CST)
6777 return fold_build1_loc (EXPR_LOCATION (t),
6778 TREE_CODE (t), TREE_TYPE (t), c);
6779 break;
6780 }
6781
6782 default:
6783 break;
6784 }
6785
6786 return NULL_TREE;
6787}
6788
6789tree
6790fold_const_aggregate_ref (tree t)
6791{
6792 return fold_const_aggregate_ref_1 (t, NULL);
6793}
06bc3ec7 6794
85942f45 6795/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6796 at OFFSET.
6797 Set CAN_REFER if non-NULL to false if method
6798 is not referable or if the virtual table is ill-formed (such as rewriten
6799 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6800
6801tree
85942f45
JH
6802gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6803 tree v,
ec77d61f
JH
6804 unsigned HOST_WIDE_INT offset,
6805 bool *can_refer)
81fa35bd 6806{
85942f45
JH
6807 tree vtable = v, init, fn;
6808 unsigned HOST_WIDE_INT size;
8c311b50
JH
6809 unsigned HOST_WIDE_INT elt_size, access_index;
6810 tree domain_type;
81fa35bd 6811
ec77d61f
JH
6812 if (can_refer)
6813 *can_refer = true;
6814
9de2f554 6815 /* First of all double check we have virtual table. */
8813a647 6816 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 6817 {
ec77d61f
JH
6818 /* Pass down that we lost track of the target. */
6819 if (can_refer)
6820 *can_refer = false;
6821 return NULL_TREE;
6822 }
9de2f554 6823
2aa3da06
JH
6824 init = ctor_for_folding (v);
6825
9de2f554 6826 /* The virtual tables should always be born with constructors
2aa3da06
JH
6827 and we always should assume that they are avaialble for
6828 folding. At the moment we do not stream them in all cases,
6829 but it should never happen that ctor seem unreachable. */
6830 gcc_assert (init);
6831 if (init == error_mark_node)
6832 {
ec77d61f
JH
6833 /* Pass down that we lost track of the target. */
6834 if (can_refer)
6835 *can_refer = false;
2aa3da06
JH
6836 return NULL_TREE;
6837 }
81fa35bd 6838 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 6839 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 6840 offset *= BITS_PER_UNIT;
81fa35bd 6841 offset += token * size;
9de2f554 6842
8c311b50
JH
6843 /* Lookup the value in the constructor that is assumed to be array.
6844 This is equivalent to
6845 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6846 offset, size, NULL);
6847 but in a constant time. We expect that frontend produced a simple
6848 array without indexed initializers. */
6849
6850 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6851 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6852 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6853 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6854
6855 access_index = offset / BITS_PER_UNIT / elt_size;
6856 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6857
6858 /* This code makes an assumption that there are no
6859 indexed fileds produced by C++ FE, so we can directly index the array. */
6860 if (access_index < CONSTRUCTOR_NELTS (init))
6861 {
6862 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6863 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6864 STRIP_NOPS (fn);
6865 }
6866 else
6867 fn = NULL;
9de2f554
JH
6868
6869 /* For type inconsistent program we may end up looking up virtual method
6870 in virtual table that does not contain TOKEN entries. We may overrun
6871 the virtual table and pick up a constant or RTTI info pointer.
6872 In any case the call is undefined. */
6873 if (!fn
6874 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6875 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6876 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6877 else
6878 {
6879 fn = TREE_OPERAND (fn, 0);
6880
6881 /* When cgraph node is missing and function is not public, we cannot
6882 devirtualize. This can happen in WHOPR when the actual method
6883 ends up in other partition, because we found devirtualization
6884 possibility too late. */
6885 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
6886 {
6887 if (can_refer)
6888 {
6889 *can_refer = false;
6890 return fn;
6891 }
6892 return NULL_TREE;
6893 }
9de2f554 6894 }
81fa35bd 6895
7501ca28
RG
6896 /* Make sure we create a cgraph node for functions we'll reference.
6897 They can be non-existent if the reference comes from an entry
6898 of an external vtable for example. */
d52f5295 6899 cgraph_node::get_create (fn);
7501ca28 6900
81fa35bd
MJ
6901 return fn;
6902}
6903
85942f45
JH
6904/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6905 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6906 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
6907 OBJ_TYPE_REF_OBJECT(REF).
6908 Set CAN_REFER if non-NULL to false if method
6909 is not referable or if the virtual table is ill-formed (such as rewriten
6910 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
6911
6912tree
ec77d61f
JH
6913gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6914 bool *can_refer)
85942f45
JH
6915{
6916 unsigned HOST_WIDE_INT offset;
6917 tree v;
6918
6919 v = BINFO_VTABLE (known_binfo);
6920 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6921 if (!v)
6922 return NULL_TREE;
6923
6924 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
6925 {
6926 if (can_refer)
6927 *can_refer = false;
6928 return NULL_TREE;
6929 }
6930 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
6931}
6932
737f500a
RB
6933/* Given a pointer value T, return a simplified version of an
6934 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
6935 possible. Note that the resulting type may be different from
6936 the type pointed to in the sense that it is still compatible
6937 from the langhooks point of view. */
6938
6939tree
6940gimple_fold_indirect_ref (tree t)
6941{
6942 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6943 tree sub = t;
6944 tree subtype;
6945
6946 STRIP_NOPS (sub);
6947 subtype = TREE_TYPE (sub);
737f500a
RB
6948 if (!POINTER_TYPE_P (subtype)
6949 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
6950 return NULL_TREE;
6951
6952 if (TREE_CODE (sub) == ADDR_EXPR)
6953 {
6954 tree op = TREE_OPERAND (sub, 0);
6955 tree optype = TREE_TYPE (op);
6956 /* *&p => p */
6957 if (useless_type_conversion_p (type, optype))
6958 return op;
6959
6960 /* *(foo *)&fooarray => fooarray[0] */
6961 if (TREE_CODE (optype) == ARRAY_TYPE
6962 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6963 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6964 {
6965 tree type_domain = TYPE_DOMAIN (optype);
6966 tree min_val = size_zero_node;
6967 if (type_domain && TYPE_MIN_VALUE (type_domain))
6968 min_val = TYPE_MIN_VALUE (type_domain);
6969 if (TREE_CODE (min_val) == INTEGER_CST)
6970 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6971 }
6972 /* *(foo *)&complexfoo => __real__ complexfoo */
6973 else if (TREE_CODE (optype) == COMPLEX_TYPE
6974 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6975 return fold_build1 (REALPART_EXPR, type, op);
6976 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6977 else if (TREE_CODE (optype) == VECTOR_TYPE
6978 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6979 {
6980 tree part_width = TYPE_SIZE (type);
6981 tree index = bitsize_int (0);
6982 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6983 }
6984 }
6985
6986 /* *(p + CST) -> ... */
6987 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6988 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6989 {
6990 tree addr = TREE_OPERAND (sub, 0);
6991 tree off = TREE_OPERAND (sub, 1);
6992 tree addrtype;
6993
6994 STRIP_NOPS (addr);
6995 addrtype = TREE_TYPE (addr);
6996
6997 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6998 if (TREE_CODE (addr) == ADDR_EXPR
6999 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7000 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7001 && tree_fits_uhwi_p (off))
b184c8f1 7002 {
ae7e9ddd 7003 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7004 tree part_width = TYPE_SIZE (type);
7005 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7006 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7007 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7008 tree index = bitsize_int (indexi);
928686b1
RS
7009 if (known_lt (offset / part_widthi,
7010 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7011 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7012 part_width, index);
7013 }
7014
7015 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7016 if (TREE_CODE (addr) == ADDR_EXPR
7017 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7018 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7019 {
7020 tree size = TYPE_SIZE_UNIT (type);
7021 if (tree_int_cst_equal (size, off))
7022 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7023 }
7024
7025 /* *(p + CST) -> MEM_REF <p, CST>. */
7026 if (TREE_CODE (addr) != ADDR_EXPR
7027 || DECL_P (TREE_OPERAND (addr, 0)))
7028 return fold_build2 (MEM_REF, type,
7029 addr,
8e6cdc90 7030 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7031 }
7032
7033 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7034 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7035 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7036 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7037 {
7038 tree type_domain;
7039 tree min_val = size_zero_node;
7040 tree osub = sub;
7041 sub = gimple_fold_indirect_ref (sub);
7042 if (! sub)
7043 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7044 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7045 if (type_domain && TYPE_MIN_VALUE (type_domain))
7046 min_val = TYPE_MIN_VALUE (type_domain);
7047 if (TREE_CODE (min_val) == INTEGER_CST)
7048 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7049 }
7050
7051 return NULL_TREE;
7052}
19e51b40
JJ
7053
7054/* Return true if CODE is an operation that when operating on signed
7055 integer types involves undefined behavior on overflow and the
7056 operation can be expressed with unsigned arithmetic. */
7057
7058bool
7059arith_code_with_undefined_signed_overflow (tree_code code)
7060{
7061 switch (code)
7062 {
7063 case PLUS_EXPR:
7064 case MINUS_EXPR:
7065 case MULT_EXPR:
7066 case NEGATE_EXPR:
7067 case POINTER_PLUS_EXPR:
7068 return true;
7069 default:
7070 return false;
7071 }
7072}
7073
7074/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7075 operation that can be transformed to unsigned arithmetic by converting
7076 its operand, carrying out the operation in the corresponding unsigned
7077 type and converting the result back to the original type.
7078
7079 Returns a sequence of statements that replace STMT and also contain
7080 a modified form of STMT itself. */
7081
7082gimple_seq
355fe088 7083rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7084{
7085 if (dump_file && (dump_flags & TDF_DETAILS))
7086 {
7087 fprintf (dump_file, "rewriting stmt with undefined signed "
7088 "overflow ");
7089 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7090 }
7091
7092 tree lhs = gimple_assign_lhs (stmt);
7093 tree type = unsigned_type_for (TREE_TYPE (lhs));
7094 gimple_seq stmts = NULL;
7095 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7096 {
74e3c262
RB
7097 tree op = gimple_op (stmt, i);
7098 op = gimple_convert (&stmts, type, op);
7099 gimple_set_op (stmt, i, op);
19e51b40
JJ
7100 }
7101 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7102 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7103 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7104 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7105 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7106 gimple_seq_add_stmt (&stmts, cvt);
7107
7108 return stmts;
7109}
d4f5cd5e 7110
3d2cf79f 7111
c26de36d
RB
7112/* The valueization hook we use for the gimple_build API simplification.
7113 This makes us match fold_buildN behavior by only combining with
7114 statements in the sequence(s) we are currently building. */
7115
7116static tree
7117gimple_build_valueize (tree op)
7118{
7119 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7120 return op;
7121 return NULL_TREE;
7122}
7123
3d2cf79f 7124/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7125 simplifying it first if possible. Returns the built
3d2cf79f
RB
7126 expression value and appends statements possibly defining it
7127 to SEQ. */
7128
7129tree
7130gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7131 enum tree_code code, tree type, tree op0)
3d2cf79f 7132{
c26de36d 7133 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7134 if (!res)
7135 {
a15ebbcd 7136 res = create_tmp_reg_or_ssa_name (type);
355fe088 7137 gimple *stmt;
3d2cf79f
RB
7138 if (code == REALPART_EXPR
7139 || code == IMAGPART_EXPR
7140 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7141 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7142 else
0d0e4a03 7143 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7144 gimple_set_location (stmt, loc);
7145 gimple_seq_add_stmt_without_update (seq, stmt);
7146 }
7147 return res;
7148}
7149
7150/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7151 simplifying it first if possible. Returns the built
3d2cf79f
RB
7152 expression value and appends statements possibly defining it
7153 to SEQ. */
7154
7155tree
7156gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7157 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7158{
c26de36d 7159 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7160 if (!res)
7161 {
a15ebbcd 7162 res = create_tmp_reg_or_ssa_name (type);
355fe088 7163 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7164 gimple_set_location (stmt, loc);
7165 gimple_seq_add_stmt_without_update (seq, stmt);
7166 }
7167 return res;
7168}
7169
7170/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7171 simplifying it first if possible. Returns the built
3d2cf79f
RB
7172 expression value and appends statements possibly defining it
7173 to SEQ. */
7174
7175tree
7176gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7177 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7178{
7179 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7180 seq, gimple_build_valueize);
3d2cf79f
RB
7181 if (!res)
7182 {
a15ebbcd 7183 res = create_tmp_reg_or_ssa_name (type);
355fe088 7184 gimple *stmt;
3d2cf79f 7185 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7186 stmt = gimple_build_assign (res, code,
7187 build3 (code, type, op0, op1, op2));
3d2cf79f 7188 else
0d0e4a03 7189 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7190 gimple_set_location (stmt, loc);
7191 gimple_seq_add_stmt_without_update (seq, stmt);
7192 }
7193 return res;
7194}
7195
7196/* Build the call FN (ARG0) with a result of type TYPE
7197 (or no result if TYPE is void) with location LOC,
c26de36d 7198 simplifying it first if possible. Returns the built
3d2cf79f
RB
7199 expression value (or NULL_TREE if TYPE is void) and appends
7200 statements possibly defining it to SEQ. */
7201
7202tree
eb69361d
RS
7203gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7204 tree type, tree arg0)
3d2cf79f 7205{
c26de36d 7206 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7207 if (!res)
7208 {
eb69361d
RS
7209 gcall *stmt;
7210 if (internal_fn_p (fn))
7211 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7212 else
7213 {
7214 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7215 stmt = gimple_build_call (decl, 1, arg0);
7216 }
3d2cf79f
RB
7217 if (!VOID_TYPE_P (type))
7218 {
a15ebbcd 7219 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7220 gimple_call_set_lhs (stmt, res);
7221 }
7222 gimple_set_location (stmt, loc);
7223 gimple_seq_add_stmt_without_update (seq, stmt);
7224 }
7225 return res;
7226}
7227
7228/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7229 (or no result if TYPE is void) with location LOC,
c26de36d 7230 simplifying it first if possible. Returns the built
3d2cf79f
RB
7231 expression value (or NULL_TREE if TYPE is void) and appends
7232 statements possibly defining it to SEQ. */
7233
7234tree
eb69361d
RS
7235gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7236 tree type, tree arg0, tree arg1)
3d2cf79f 7237{
c26de36d 7238 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7239 if (!res)
7240 {
eb69361d
RS
7241 gcall *stmt;
7242 if (internal_fn_p (fn))
7243 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7244 else
7245 {
7246 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7247 stmt = gimple_build_call (decl, 2, arg0, arg1);
7248 }
3d2cf79f
RB
7249 if (!VOID_TYPE_P (type))
7250 {
a15ebbcd 7251 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7252 gimple_call_set_lhs (stmt, res);
7253 }
7254 gimple_set_location (stmt, loc);
7255 gimple_seq_add_stmt_without_update (seq, stmt);
7256 }
7257 return res;
7258}
7259
7260/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7261 (or no result if TYPE is void) with location LOC,
c26de36d 7262 simplifying it first if possible. Returns the built
3d2cf79f
RB
7263 expression value (or NULL_TREE if TYPE is void) and appends
7264 statements possibly defining it to SEQ. */
7265
7266tree
eb69361d
RS
7267gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7268 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7269{
c26de36d
RB
7270 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7271 seq, gimple_build_valueize);
3d2cf79f
RB
7272 if (!res)
7273 {
eb69361d
RS
7274 gcall *stmt;
7275 if (internal_fn_p (fn))
7276 stmt = gimple_build_call_internal (as_internal_fn (fn),
7277 3, arg0, arg1, arg2);
7278 else
7279 {
7280 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7281 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7282 }
3d2cf79f
RB
7283 if (!VOID_TYPE_P (type))
7284 {
a15ebbcd 7285 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7286 gimple_call_set_lhs (stmt, res);
7287 }
7288 gimple_set_location (stmt, loc);
7289 gimple_seq_add_stmt_without_update (seq, stmt);
7290 }
7291 return res;
7292}
7293
7294/* Build the conversion (TYPE) OP with a result of type TYPE
7295 with location LOC if such conversion is neccesary in GIMPLE,
7296 simplifying it first.
7297 Returns the built expression value and appends
7298 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7299
7300tree
7301gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7302{
7303 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7304 return op;
3d2cf79f 7305 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7306}
68e57f04 7307
74e3c262
RB
7308/* Build the conversion (ptrofftype) OP with a result of a type
7309 compatible with ptrofftype with location LOC if such conversion
7310 is neccesary in GIMPLE, simplifying it first.
7311 Returns the built expression value and appends
7312 statements possibly defining it to SEQ. */
7313
7314tree
7315gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7316{
7317 if (ptrofftype_p (TREE_TYPE (op)))
7318 return op;
7319 return gimple_convert (seq, loc, sizetype, op);
7320}
7321
e7c45b66
RS
7322/* Build a vector of type TYPE in which each element has the value OP.
7323 Return a gimple value for the result, appending any new statements
7324 to SEQ. */
7325
7326tree
7327gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7328 tree op)
7329{
928686b1
RS
7330 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7331 && !CONSTANT_CLASS_P (op))
7332 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7333
e7c45b66
RS
7334 tree res, vec = build_vector_from_val (type, op);
7335 if (is_gimple_val (vec))
7336 return vec;
7337 if (gimple_in_ssa_p (cfun))
7338 res = make_ssa_name (type);
7339 else
7340 res = create_tmp_reg (type);
7341 gimple *stmt = gimple_build_assign (res, vec);
7342 gimple_set_location (stmt, loc);
7343 gimple_seq_add_stmt_without_update (seq, stmt);
7344 return res;
7345}
7346
abe73c3d
RS
7347/* Build a vector from BUILDER, handling the case in which some elements
7348 are non-constant. Return a gimple value for the result, appending any
7349 new instructions to SEQ.
7350
7351 BUILDER must not have a stepped encoding on entry. This is because
7352 the function is not geared up to handle the arithmetic that would
7353 be needed in the variable case, and any code building a vector that
7354 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7355
7356tree
abe73c3d
RS
7357gimple_build_vector (gimple_seq *seq, location_t loc,
7358 tree_vector_builder *builder)
e7c45b66 7359{
abe73c3d
RS
7360 gcc_assert (builder->nelts_per_pattern () <= 2);
7361 unsigned int encoded_nelts = builder->encoded_nelts ();
7362 for (unsigned int i = 0; i < encoded_nelts; ++i)
7363 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7364 {
abe73c3d 7365 tree type = builder->type ();
928686b1 7366 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7367 vec<constructor_elt, va_gc> *v;
7368 vec_alloc (v, nelts);
7369 for (i = 0; i < nelts; ++i)
abe73c3d 7370 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7371
7372 tree res;
7373 if (gimple_in_ssa_p (cfun))
7374 res = make_ssa_name (type);
7375 else
7376 res = create_tmp_reg (type);
7377 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7378 gimple_set_location (stmt, loc);
7379 gimple_seq_add_stmt_without_update (seq, stmt);
7380 return res;
7381 }
abe73c3d 7382 return builder->build ();
e7c45b66
RS
7383}
7384
68e57f04
RS
7385/* Return true if the result of assignment STMT is known to be non-negative.
7386 If the return value is based on the assumption that signed overflow is
7387 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7388 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7389
7390static bool
7391gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7392 int depth)
7393{
7394 enum tree_code code = gimple_assign_rhs_code (stmt);
7395 switch (get_gimple_rhs_class (code))
7396 {
7397 case GIMPLE_UNARY_RHS:
7398 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7399 gimple_expr_type (stmt),
7400 gimple_assign_rhs1 (stmt),
7401 strict_overflow_p, depth);
7402 case GIMPLE_BINARY_RHS:
7403 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7404 gimple_expr_type (stmt),
7405 gimple_assign_rhs1 (stmt),
7406 gimple_assign_rhs2 (stmt),
7407 strict_overflow_p, depth);
7408 case GIMPLE_TERNARY_RHS:
7409 return false;
7410 case GIMPLE_SINGLE_RHS:
7411 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7412 strict_overflow_p, depth);
7413 case GIMPLE_INVALID_RHS:
7414 break;
7415 }
7416 gcc_unreachable ();
7417}
7418
7419/* Return true if return value of call STMT is known to be non-negative.
7420 If the return value is based on the assumption that signed overflow is
7421 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7422 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7423
7424static bool
7425gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7426 int depth)
7427{
7428 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7429 gimple_call_arg (stmt, 0) : NULL_TREE;
7430 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7431 gimple_call_arg (stmt, 1) : NULL_TREE;
7432
7433 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7434 gimple_call_combined_fn (stmt),
68e57f04
RS
7435 arg0,
7436 arg1,
7437 strict_overflow_p, depth);
7438}
7439
4534c203
RB
7440/* Return true if return value of call STMT is known to be non-negative.
7441 If the return value is based on the assumption that signed overflow is
7442 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7443 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7444
7445static bool
7446gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7447 int depth)
7448{
7449 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7450 {
7451 tree arg = gimple_phi_arg_def (stmt, i);
7452 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7453 return false;
7454 }
7455 return true;
7456}
7457
68e57f04
RS
7458/* Return true if STMT is known to compute a non-negative value.
7459 If the return value is based on the assumption that signed overflow is
7460 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7461 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7462
7463bool
7464gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7465 int depth)
7466{
7467 switch (gimple_code (stmt))
7468 {
7469 case GIMPLE_ASSIGN:
7470 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7471 depth);
7472 case GIMPLE_CALL:
7473 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7474 depth);
4534c203
RB
7475 case GIMPLE_PHI:
7476 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7477 depth);
68e57f04
RS
7478 default:
7479 return false;
7480 }
7481}
67dbe582
RS
7482
7483/* Return true if the floating-point value computed by assignment STMT
7484 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7485 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7486
7487 DEPTH is the current nesting depth of the query. */
7488
7489static bool
7490gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7491{
7492 enum tree_code code = gimple_assign_rhs_code (stmt);
7493 switch (get_gimple_rhs_class (code))
7494 {
7495 case GIMPLE_UNARY_RHS:
7496 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7497 gimple_assign_rhs1 (stmt), depth);
7498 case GIMPLE_BINARY_RHS:
7499 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7500 gimple_assign_rhs1 (stmt),
7501 gimple_assign_rhs2 (stmt), depth);
7502 case GIMPLE_TERNARY_RHS:
7503 return false;
7504 case GIMPLE_SINGLE_RHS:
7505 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7506 case GIMPLE_INVALID_RHS:
7507 break;
7508 }
7509 gcc_unreachable ();
7510}
7511
7512/* Return true if the floating-point value computed by call STMT is known
7513 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7514 considered integer values. Return false for signaling NaN.
67dbe582
RS
7515
7516 DEPTH is the current nesting depth of the query. */
7517
7518static bool
7519gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7520{
7521 tree arg0 = (gimple_call_num_args (stmt) > 0
7522 ? gimple_call_arg (stmt, 0)
7523 : NULL_TREE);
7524 tree arg1 = (gimple_call_num_args (stmt) > 1
7525 ? gimple_call_arg (stmt, 1)
7526 : NULL_TREE);
1d9da71f 7527 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7528 arg0, arg1, depth);
7529}
7530
7531/* Return true if the floating-point result of phi STMT is known to have
7532 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7533 integer values. Return false for signaling NaN.
67dbe582
RS
7534
7535 DEPTH is the current nesting depth of the query. */
7536
7537static bool
7538gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7539{
7540 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7541 {
7542 tree arg = gimple_phi_arg_def (stmt, i);
7543 if (!integer_valued_real_single_p (arg, depth + 1))
7544 return false;
7545 }
7546 return true;
7547}
7548
7549/* Return true if the floating-point value computed by STMT is known
7550 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7551 considered integer values. Return false for signaling NaN.
67dbe582
RS
7552
7553 DEPTH is the current nesting depth of the query. */
7554
7555bool
7556gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7557{
7558 switch (gimple_code (stmt))
7559 {
7560 case GIMPLE_ASSIGN:
7561 return gimple_assign_integer_valued_real_p (stmt, depth);
7562 case GIMPLE_CALL:
7563 return gimple_call_integer_valued_real_p (stmt, depth);
7564 case GIMPLE_PHI:
7565 return gimple_phi_integer_valued_real_p (stmt, depth);
7566 default:
7567 return false;
7568 }
7569}