]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
S/390: Make "b" constraint match literal pool references
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
85ec4feb 2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
cbdd87d4 68
b3b9f3d0 69/* Return true when DECL can be referenced from current unit.
c44c2088
JH
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
1389294c 73
1389294c
JH
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
3e89949e 86 we devirtualize only during final compilation stage.
b3b9f3d0
JH
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
90
1389294c 91static bool
c44c2088 92can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 93{
2c8326a5 94 varpool_node *vnode;
1389294c 95 struct cgraph_node *node;
5e20cdc9 96 symtab_node *snode;
c44c2088 97
00de328a 98 if (DECL_ABSTRACT_P (decl))
1632a686
JH
99 return false;
100
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 103 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
104 return true;
105
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
108 {
3aaf0529
JH
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
3dafb85c 111 if (symtab->function_flags_ready)
3aaf0529 112 return true;
d52f5295 113 snode = symtab_node::get (decl);
3aaf0529 114 if (!snode || !snode->definition)
1632a686 115 return false;
7de90a6c 116 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
117 return !node || !node->global.inlined_to;
118 }
119
6da8be89 120 /* We will later output the initializer, so we can refer to it.
c44c2088 121 So we are concerned only when DECL comes from initializer of
3aaf0529 122 external var or var that has been optimized out. */
c44c2088 123 if (!from_decl
8813a647 124 || !VAR_P (from_decl)
3aaf0529 125 || (!DECL_EXTERNAL (from_decl)
9041d2e6 126 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 127 && vnode->definition)
6da8be89 128 || (flag_ltrans
9041d2e6 129 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 130 && vnode->in_other_partition))
c44c2088 131 return true;
c44c2088
JH
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
a33a931b 137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 139 return false;
b3b9f3d0
JH
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
3aaf0529
JH
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
148
149 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 150 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
3dafb85c 156 if (!symtab->function_flags_ready)
b3b9f3d0 157 return true;
c44c2088 158
d52f5295 159 snode = symtab_node::get (decl);
3aaf0529
JH
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
1389294c
JH
167}
168
a15ebbcd
ML
169/* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
172
edc19e03
WS
173tree
174create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
175{
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
180}
181
0038d4e0 182/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
185
186tree
c44c2088 187canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 188{
50619002
EB
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
315f5f1b
RG
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 193 {
315f5f1b
RG
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
17f39a39
JH
202 }
203 if (TREE_CODE (cval) == ADDR_EXPR)
204 {
5a27a197
RG
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
207 {
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
211 }
5a27a197
RG
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
214 if (!base)
215 return NULL_TREE;
b3b9f3d0 216
8813a647 217 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 218 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 219 return NULL_TREE;
13f92e8d
JJ
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
8813a647 222 if (VAR_P (base))
46eb666a 223 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
224 else if (TREE_CODE (base) == FUNCTION_DECL)
225 {
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
d52f5295 229 cgraph_node::get_create (base);
7501ca28 230 }
0038d4e0 231 /* Fixup types in global initializers. */
73aef89e
RG
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
234
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
17f39a39 238 }
846abd0d
RB
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
50619002 241 return orig_cval;
17f39a39 242}
cbdd87d4
RG
243
244/* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
246
247tree
248get_symbol_constant_value (tree sym)
249{
6a6dac52
JH
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
cbdd87d4 252 {
cbdd87d4
RG
253 if (val)
254 {
9d60be38 255 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 256 if (val && is_gimple_min_invariant (val))
17f39a39 257 return val;
1389294c
JH
258 else
259 return NULL_TREE;
cbdd87d4
RG
260 }
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
b8a8c472 265 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 266 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
267 }
268
269 return NULL_TREE;
270}
271
272
cbdd87d4
RG
273
274/* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
278
279static tree
280maybe_fold_reference (tree expr, bool is_lhs)
281{
17f39a39 282 tree result;
cbdd87d4 283
f0eddb90
RG
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
300
f0eddb90
RG
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
cbdd87d4 305
cbdd87d4
RG
306 return NULL_TREE;
307}
308
309
310/* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
314
315static tree
316fold_gimple_assign (gimple_stmt_iterator *si)
317{
355fe088 318 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
321
322 tree result = NULL_TREE;
323
324 switch (get_gimple_rhs_class (subcode))
325 {
326 case GIMPLE_SINGLE_RHS:
327 {
328 tree rhs = gimple_assign_rhs1 (stmt);
329
8c00ba08
JW
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
332
4e71066d 333 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
334 return maybe_fold_reference (rhs, false);
335
bdf37f7a
JH
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
337 {
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
f8a39967 341 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
342 {
343 bool final;
344 vec <cgraph_node *>targets
f8a39967 345 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 347 {
2b5f0895
XDL
348 if (dump_enabled_p ())
349 {
4f5b9c80 350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
3ef276e4 355 : "NULL");
2b5f0895 356 }
3ef276e4
RB
357 if (targets.length () == 1)
358 {
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
363 }
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
368 return val;
369 }
370 }
bdf37f7a 371 }
7524f419 372
cbdd87d4
RG
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
374 {
70f34814
RG
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
cbdd87d4
RG
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
387
388 if (result)
389 {
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
395
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
398 }
cbdd87d4
RG
399 }
400
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
403 {
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
407
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 409 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
410 return NULL_TREE;
411
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
414 }
415
416 else if (DECL_P (rhs))
9d60be38 417 return get_symbol_constant_value (rhs);
cbdd87d4
RG
418 }
419 break;
420
421 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
422 break;
423
424 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
425 break;
426
0354c0c7 427 case GIMPLE_TERNARY_RHS:
5c099d40
RB
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
0354c0c7
BS
433
434 if (result)
435 {
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
0354c0c7
BS
439 }
440 break;
441
cbdd87d4
RG
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
444 }
445
446 return NULL_TREE;
447}
448
fef5a0d9
RB
449
450/* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
454
455static void
456gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
457{
355fe088 458 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
459
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
462
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
355fe088 465 gimple *laststore = NULL;
fef5a0d9
RB
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
468 {
355fe088 469 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
475 {
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
485 }
486 }
487
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
493 {
355fe088 494 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
502 }
503
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
508 {
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
512 {
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
515 }
516 }
517
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
520}
521
cbdd87d4
RG
522/* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
cbdd87d4
RG
531
532void
533gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
534{
535 tree lhs;
355fe088 536 gimple *stmt, *new_stmt;
cbdd87d4 537 gimple_stmt_iterator i;
355a7673 538 gimple_seq stmts = NULL;
cbdd87d4
RG
539
540 stmt = gsi_stmt (*si_p);
541
542 gcc_assert (is_gimple_call (stmt));
543
45852dcc 544 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 545
e256dfce 546 lhs = gimple_call_lhs (stmt);
cbdd87d4 547 if (lhs == NULL_TREE)
6e572326
RG
548 {
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
553 {
9fdc58de 554 pop_gimplify_context (NULL);
6e572326
RG
555 if (gimple_in_ssa_p (cfun))
556 {
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
559 }
f6b4dc28 560 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
561 return;
562 }
563 }
cbdd87d4 564 else
e256dfce 565 {
381cdae4 566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
571 }
cbdd87d4
RG
572
573 pop_gimplify_context (NULL);
574
fef5a0d9
RB
575 gsi_replace_with_seq_vops (si_p, stmts);
576}
cbdd87d4 577
fef5a0d9
RB
578
579/* Replace the call at *GSI with the gimple value VAL. */
580
e3174bdf 581void
fef5a0d9
RB
582replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
583{
355fe088 584 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 585 tree lhs = gimple_call_lhs (stmt);
355fe088 586 gimple *repl;
fef5a0d9 587 if (lhs)
e256dfce 588 {
fef5a0d9
RB
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
592 }
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
597 {
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
600 }
f6b4dc28 601 gsi_replace (gsi, repl, false);
fef5a0d9
RB
602}
603
604/* Replace the call at *GSI with the new call REPL and fold that
605 again. */
606
607static void
355fe088 608replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 609{
355fe088 610 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
615 {
616 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
618 }
00296d7f
JJ
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 621 gsi_replace (gsi, repl, false);
fef5a0d9
RB
622 fold_stmt (gsi);
623}
624
625/* Return true if VAR is a VAR_DECL or a component thereof. */
626
627static bool
628var_decl_component_p (tree var)
629{
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
47cac108
RB
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
636}
637
6512c0f1
MS
638/* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
641
642static bool
643size_must_be_zero_p (tree size)
644{
645 if (integer_zerop (size))
646 return true;
647
3f27391f 648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
649 return false;
650
651 wide_int min, max;
54994253 652 enum value_range_kind rtype = get_range_info (size, &min, &max);
6512c0f1
MS
653 if (rtype != VR_ANTI_RANGE)
654 return false;
655
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
658
659 wide_int wone = wi::one (prec);
660
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
666}
667
cc8bea0a
MS
668/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
fef5a0d9
RB
674
675static bool
676gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
678{
355fe088 679 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
684
cc8bea0a 685 bool nowarn = gimple_no_warning_p (stmt);
cc8bea0a 686
6512c0f1
MS
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
fef5a0d9 690 {
355fe088 691 gimple *repl;
fef5a0d9
RB
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 698 {
fef5a0d9
RB
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
701 }
f6b4dc28 702 gsi_replace (gsi, repl, false);
fef5a0d9
RB
703 return true;
704 }
705
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
709 {
cc8bea0a
MS
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 712 32667). */
fef5a0d9
RB
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
717 {
f6b4dc28 718 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
719 return true;
720 }
721 goto done;
722 }
723 else
724 {
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
d01b568a
BE
728 const char *tmp_str;
729 unsigned HOST_WIDE_INT tmp_len;
fef5a0d9
RB
730
731 /* Build accesses at offset zero with a ref-all character type. */
732 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
733 ptr_mode, true), 0);
734
735 /* If we can perform the copy efficiently with first doing all loads
736 and then all stores inline it that way. Currently efficiently
737 means that we can load all the memory into a single integer
738 register which is what MOVE_MAX gives us. */
739 src_align = get_pointer_alignment (src);
740 dest_align = get_pointer_alignment (dest);
741 if (tree_fits_uhwi_p (len)
742 && compare_tree_int (len, MOVE_MAX) <= 0
743 /* ??? Don't transform copies from strings with known length this
744 confuses the tree-ssa-strlen.c. This doesn't handle
745 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
746 reason. */
d01b568a
BE
747 && !c_strlen (src, 2)
748 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
749 && memchr (tmp_str, 0, tmp_len) == NULL))
fef5a0d9
RB
750 {
751 unsigned ilen = tree_to_uhwi (len);
146ec50f 752 if (pow2p_hwi (ilen))
fef5a0d9 753 {
cc8bea0a
MS
754 /* Detect invalid bounds and overlapping copies and issue
755 either -Warray-bounds or -Wrestrict. */
756 if (!nowarn
757 && check_bounds_or_overlap (as_a <gcall *>(stmt),
758 dest, src, len, len))
759 gimple_set_no_warning (stmt, true);
760
64ab8765 761 scalar_int_mode mode;
fef5a0d9
RB
762 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
763 if (type
64ab8765
RS
764 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
765 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
766 /* If the destination pointer is not aligned we must be able
767 to emit an unaligned store. */
64ab8765 768 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 769 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 770 || (optab_handler (movmisalign_optab, mode)
f869c12f 771 != CODE_FOR_nothing)))
fef5a0d9
RB
772 {
773 tree srctype = type;
774 tree desttype = type;
64ab8765 775 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
776 srctype = build_aligned_type (type, src_align);
777 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
778 tree tem = fold_const_aggregate_ref (srcmem);
779 if (tem)
780 srcmem = tem;
64ab8765 781 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 782 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 783 && (optab_handler (movmisalign_optab, mode)
f869c12f 784 == CODE_FOR_nothing))
fef5a0d9
RB
785 srcmem = NULL_TREE;
786 if (srcmem)
787 {
355fe088 788 gimple *new_stmt;
fef5a0d9
RB
789 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
790 {
791 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
792 srcmem
793 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
794 new_stmt);
fef5a0d9
RB
795 gimple_assign_set_lhs (new_stmt, srcmem);
796 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
797 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
798 }
64ab8765 799 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
800 desttype = build_aligned_type (type, dest_align);
801 new_stmt
802 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
803 dest, off0),
804 srcmem);
805 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
806 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
807 if (gimple_vdef (new_stmt)
808 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
809 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
810 if (!lhs)
811 {
f6b4dc28 812 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
813 return true;
814 }
815 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 goto done;
817 }
818 }
819 }
820 }
821
822 if (endp == 3)
823 {
824 /* Both DEST and SRC must be pointer types.
825 ??? This is what old code did. Is the testing for pointer types
826 really mandatory?
827
828 If either SRC is readonly or length is 1, we can use memcpy. */
829 if (!dest_align || !src_align)
830 return false;
831 if (readonly_data_expr (src)
832 || (tree_fits_uhwi_p (len)
833 && (MIN (src_align, dest_align) / BITS_PER_UNIT
834 >= tree_to_uhwi (len))))
835 {
836 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
837 if (!fn)
838 return false;
839 gimple_call_set_fndecl (stmt, fn);
840 gimple_call_set_arg (stmt, 0, dest);
841 gimple_call_set_arg (stmt, 1, src);
842 fold_stmt (gsi);
843 return true;
844 }
845
846 /* If *src and *dest can't overlap, optimize into memcpy as well. */
847 if (TREE_CODE (src) == ADDR_EXPR
848 && TREE_CODE (dest) == ADDR_EXPR)
849 {
850 tree src_base, dest_base, fn;
a90c8804
RS
851 poly_int64 src_offset = 0, dest_offset = 0;
852 poly_uint64 maxsize;
fef5a0d9
RB
853
854 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
855 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
856 if (src_base == NULL)
857 src_base = srcvar;
fef5a0d9 858 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
859 dest_base = get_addr_base_and_unit_offset (destvar,
860 &dest_offset);
861 if (dest_base == NULL)
862 dest_base = destvar;
a90c8804 863 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 864 maxsize = -1;
fef5a0d9
RB
865 if (SSA_VAR_P (src_base)
866 && SSA_VAR_P (dest_base))
867 {
868 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
869 && ranges_maybe_overlap_p (src_offset, maxsize,
870 dest_offset, maxsize))
fef5a0d9
RB
871 return false;
872 }
873 else if (TREE_CODE (src_base) == MEM_REF
874 && TREE_CODE (dest_base) == MEM_REF)
875 {
876 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
877 TREE_OPERAND (dest_base, 0), 0))
878 return false;
a90c8804
RS
879 poly_offset_int full_src_offset
880 = mem_ref_offset (src_base) + src_offset;
881 poly_offset_int full_dest_offset
882 = mem_ref_offset (dest_base) + dest_offset;
883 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
884 full_dest_offset, maxsize))
fef5a0d9
RB
885 return false;
886 }
887 else
888 return false;
889
890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
891 if (!fn)
892 return false;
893 gimple_call_set_fndecl (stmt, fn);
894 gimple_call_set_arg (stmt, 0, dest);
895 gimple_call_set_arg (stmt, 1, src);
896 fold_stmt (gsi);
897 return true;
898 }
899
900 /* If the destination and source do not alias optimize into
901 memcpy as well. */
902 if ((is_gimple_min_invariant (dest)
903 || TREE_CODE (dest) == SSA_NAME)
904 && (is_gimple_min_invariant (src)
905 || TREE_CODE (src) == SSA_NAME))
906 {
907 ao_ref destr, srcr;
908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
911 {
912 tree fn;
913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
914 if (!fn)
915 return false;
916 gimple_call_set_fndecl (stmt, fn);
917 gimple_call_set_arg (stmt, 0, dest);
918 gimple_call_set_arg (stmt, 1, src);
919 fold_stmt (gsi);
920 return true;
921 }
922 }
923
924 return false;
925 }
926
927 if (!tree_fits_shwi_p (len))
928 return false;
fef5a0d9
RB
929 if (!POINTER_TYPE_P (TREE_TYPE (src))
930 || !POINTER_TYPE_P (TREE_TYPE (dest)))
931 return false;
932 /* In the following try to find a type that is most natural to be
933 used for the memcpy source and destination and that allows
934 the most optimization when memcpy is turned into a plain assignment
935 using that type. In theory we could always use a char[len] type
936 but that only gains us that the destination and source possibly
937 no longer will have their address taken. */
fef5a0d9
RB
938 srctype = TREE_TYPE (TREE_TYPE (src));
939 if (TREE_CODE (srctype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 941 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
942 desttype = TREE_TYPE (TREE_TYPE (dest));
943 if (TREE_CODE (desttype) == ARRAY_TYPE
944 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 945 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
946 if (TREE_ADDRESSABLE (srctype)
947 || TREE_ADDRESSABLE (desttype))
948 return false;
949
950 /* Make sure we are not copying using a floating-point mode or
951 a type whose size possibly does not match its precision. */
952 if (FLOAT_MODE_P (TYPE_MODE (desttype))
953 || TREE_CODE (desttype) == BOOLEAN_TYPE
954 || TREE_CODE (desttype) == ENUMERAL_TYPE)
955 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
956 if (FLOAT_MODE_P (TYPE_MODE (srctype))
957 || TREE_CODE (srctype) == BOOLEAN_TYPE
958 || TREE_CODE (srctype) == ENUMERAL_TYPE)
959 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
960 if (!srctype)
961 srctype = desttype;
962 if (!desttype)
963 desttype = srctype;
964 if (!srctype)
965 return false;
966
967 src_align = get_pointer_alignment (src);
968 dest_align = get_pointer_alignment (dest);
969 if (dest_align < TYPE_ALIGN (desttype)
970 || src_align < TYPE_ALIGN (srctype))
971 return false;
972
42f74245
RB
973 destvar = NULL_TREE;
974 if (TREE_CODE (dest) == ADDR_EXPR
975 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 977 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 978
42f74245
RB
979 srcvar = NULL_TREE;
980 if (TREE_CODE (src) == ADDR_EXPR
981 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
982 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
983 {
984 if (!destvar
985 || src_align >= TYPE_ALIGN (desttype))
986 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 987 src, off0);
fef5a0d9
RB
988 else if (!STRICT_ALIGNMENT)
989 {
990 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
991 src_align);
42f74245 992 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 993 }
fef5a0d9 994 }
fef5a0d9
RB
995
996 if (srcvar == NULL_TREE && destvar == NULL_TREE)
997 return false;
998
999 if (srcvar == NULL_TREE)
1000 {
fef5a0d9
RB
1001 if (src_align >= TYPE_ALIGN (desttype))
1002 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1003 else
1004 {
1005 if (STRICT_ALIGNMENT)
1006 return false;
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1010 }
1011 }
1012 else if (destvar == NULL_TREE)
1013 {
fef5a0d9
RB
1014 if (dest_align >= TYPE_ALIGN (srctype))
1015 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1016 else
1017 {
1018 if (STRICT_ALIGNMENT)
1019 return false;
1020 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1021 dest_align);
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1023 }
1024 }
1025
cc8bea0a
MS
1026 /* Detect invalid bounds and overlapping copies and issue either
1027 -Warray-bounds or -Wrestrict. */
1028 if (!nowarn)
1029 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1030
355fe088 1031 gimple *new_stmt;
fef5a0d9
RB
1032 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1033 {
921b13d0
RB
1034 tree tem = fold_const_aggregate_ref (srcvar);
1035 if (tem)
1036 srcvar = tem;
1037 if (! is_gimple_min_invariant (srcvar))
1038 {
1039 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1040 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1041 new_stmt);
921b13d0
RB
1042 gimple_assign_set_lhs (new_stmt, srcvar);
1043 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1044 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1045 }
d7257171
RB
1046 new_stmt = gimple_build_assign (destvar, srcvar);
1047 goto set_vop_and_replace;
fef5a0d9 1048 }
d7257171
RB
1049
1050 /* We get an aggregate copy. Use an unsigned char[] type to
1051 perform the copying to preserve padding and to avoid any issues
1052 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1053 desttype = build_array_type_nelts (unsigned_char_type_node,
1054 tree_to_uhwi (len));
1055 srctype = desttype;
1056 if (src_align > TYPE_ALIGN (srctype))
1057 srctype = build_aligned_type (srctype, src_align);
1058 if (dest_align > TYPE_ALIGN (desttype))
1059 desttype = build_aligned_type (desttype, dest_align);
1060 new_stmt
1061 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1062 fold_build2 (MEM_REF, srctype, src, off0));
1063set_vop_and_replace:
fef5a0d9
RB
1064 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1065 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1066 if (gimple_vdef (new_stmt)
1067 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1068 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1069 if (!lhs)
1070 {
f6b4dc28 1071 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1072 return true;
1073 }
1074 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1075 }
1076
1077done:
74e3c262 1078 gimple_seq stmts = NULL;
fef5a0d9
RB
1079 if (endp == 0 || endp == 3)
1080 len = NULL_TREE;
1081 else if (endp == 2)
74e3c262
RB
1082 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1083 ssize_int (1));
fef5a0d9 1084 if (endp == 2 || endp == 1)
74e3c262
RB
1085 {
1086 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1087 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1088 TREE_TYPE (dest), dest, len);
1089 }
fef5a0d9 1090
74e3c262 1091 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1092 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1093 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1094 return true;
1095}
1096
b3d8d88e
MS
1097/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1098 to built-in memcmp (a, b, len). */
1099
1100static bool
1101gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1102{
1103 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1104
1105 if (!fn)
1106 return false;
1107
1108 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1109
1110 gimple *stmt = gsi_stmt (*gsi);
1111 tree a = gimple_call_arg (stmt, 0);
1112 tree b = gimple_call_arg (stmt, 1);
1113 tree len = gimple_call_arg (stmt, 2);
1114
1115 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1116 replace_call_with_call_and_fold (gsi, repl);
1117
1118 return true;
1119}
1120
1121/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1122 to built-in memmove (dest, src, len). */
1123
1124static bool
1125gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1126{
1127 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1128
1129 if (!fn)
1130 return false;
1131
1132 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1133 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1134 len) into memmove (dest, src, len). */
1135
1136 gimple *stmt = gsi_stmt (*gsi);
1137 tree src = gimple_call_arg (stmt, 0);
1138 tree dest = gimple_call_arg (stmt, 1);
1139 tree len = gimple_call_arg (stmt, 2);
1140
1141 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1142 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1143 replace_call_with_call_and_fold (gsi, repl);
1144
1145 return true;
1146}
1147
1148/* Transform a call to built-in bzero (dest, len) at *GSI into one
1149 to built-in memset (dest, 0, len). */
1150
1151static bool
1152gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1153{
1154 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1155
1156 if (!fn)
1157 return false;
1158
1159 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1160
1161 gimple *stmt = gsi_stmt (*gsi);
1162 tree dest = gimple_call_arg (stmt, 0);
1163 tree len = gimple_call_arg (stmt, 1);
1164
1165 gimple_seq seq = NULL;
1166 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1167 gimple_seq_add_stmt_without_update (&seq, repl);
1168 gsi_replace_with_seq_vops (gsi, seq);
1169 fold_stmt (gsi);
1170
1171 return true;
1172}
1173
fef5a0d9
RB
1174/* Fold function call to builtin memset or bzero at *GSI setting the
1175 memory of size LEN to VAL. Return whether a simplification was made. */
1176
1177static bool
1178gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1179{
355fe088 1180 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1181 tree etype;
1182 unsigned HOST_WIDE_INT length, cval;
1183
1184 /* If the LEN parameter is zero, return DEST. */
1185 if (integer_zerop (len))
1186 {
1187 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1188 return true;
1189 }
1190
1191 if (! tree_fits_uhwi_p (len))
1192 return false;
1193
1194 if (TREE_CODE (c) != INTEGER_CST)
1195 return false;
1196
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree var = dest;
1199 if (TREE_CODE (var) != ADDR_EXPR)
1200 return false;
1201
1202 var = TREE_OPERAND (var, 0);
1203 if (TREE_THIS_VOLATILE (var))
1204 return false;
1205
1206 etype = TREE_TYPE (var);
1207 if (TREE_CODE (etype) == ARRAY_TYPE)
1208 etype = TREE_TYPE (etype);
1209
1210 if (!INTEGRAL_TYPE_P (etype)
1211 && !POINTER_TYPE_P (etype))
1212 return NULL_TREE;
1213
1214 if (! var_decl_component_p (var))
1215 return NULL_TREE;
1216
1217 length = tree_to_uhwi (len);
7a504f33 1218 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1219 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1220 return NULL_TREE;
1221
1222 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1223 return NULL_TREE;
1224
1225 if (integer_zerop (c))
1226 cval = 0;
1227 else
1228 {
1229 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1230 return NULL_TREE;
1231
1232 cval = TREE_INT_CST_LOW (c);
1233 cval &= 0xff;
1234 cval |= cval << 8;
1235 cval |= cval << 16;
1236 cval |= (cval << 31) << 1;
1237 }
1238
1239 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1240 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1241 gimple_set_vuse (store, gimple_vuse (stmt));
1242 tree vdef = gimple_vdef (stmt);
1243 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1244 {
1245 gimple_set_vdef (store, gimple_vdef (stmt));
1246 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1247 }
1248 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1249 if (gimple_call_lhs (stmt))
1250 {
355fe088 1251 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1252 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1253 }
1254 else
1255 {
1256 gimple_stmt_iterator gsi2 = *gsi;
1257 gsi_prev (gsi);
1258 gsi_remove (&gsi2, true);
1259 }
1260
1261 return true;
1262}
1263
1264
88d0c3f0
MS
1265/* Obtain the minimum and maximum string length or minimum and maximum
1266 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1267 If ARG is an SSA name variable, follow its use-def chains. When
1268 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
c8602fe6 1269 if we are unable to determine the length or value, return false.
88d0c3f0
MS
1270 VISITED is a bitmap of visited variables.
1271 TYPE is 0 if string length should be obtained, 1 for maximum string
1272 length and 2 for maximum value ARG can have.
c8602fe6 1273 When FUZZY is non-zero and the length of a string cannot be determined,
88d0c3f0 1274 the function instead considers as the maximum possible length the
c8602fe6
JJ
1275 size of a character array it may refer to. If FUZZY is 2, it will handle
1276 PHIs and COND_EXPRs optimistically, if we can determine string length
1277 minimum and maximum, it will use the minimum from the ones where it
1278 can be determined.
3f343040
MS
1279 Set *FLEXP to true if the range of the string lengths has been
1280 obtained from the upper bound of an array at the end of a struct.
1281 Such an array may hold a string that's longer than its upper bound
4148b00d 1282 due to it being used as a poor-man's flexible array member.
e08341bb 1283 Pass NONSTR through to children.
4148b00d
BE
1284 ELTSIZE is 1 for normal single byte character strings, and 2 or
1285 4 for wide characer strings. ELTSIZE is by default 1. */
fef5a0d9
RB
1286
1287static bool
88d0c3f0 1288get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
e08341bb 1289 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
fef5a0d9 1290{
c42d0aa0 1291 tree var, val = NULL_TREE;
355fe088 1292 gimple *def_stmt;
fef5a0d9 1293
c8602fe6
JJ
1294 /* The minimum and maximum length. */
1295 tree *const minlen = length;
88d0c3f0
MS
1296 tree *const maxlen = length + 1;
1297
fef5a0d9
RB
1298 if (TREE_CODE (arg) != SSA_NAME)
1299 {
1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1301 if (TREE_CODE (arg) == ADDR_EXPR
c42d0aa0 1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1303 {
c42d0aa0
MS
1304 tree op = TREE_OPERAND (arg, 0);
1305 if (integer_zerop (TREE_OPERAND (op, 1)))
1306 {
1307 tree aop0 = TREE_OPERAND (op, 0);
1308 if (TREE_CODE (aop0) == INDIRECT_REF
1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
4148b00d 1310 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
e08341bb
MS
1311 visited, type, fuzzy, flexp,
1312 eltsize, nonstr);
c42d0aa0
MS
1313 }
1314 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1315 {
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1320
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
1329 }
fef5a0d9
RB
1330 }
1331
1332 if (type == 2)
1333 {
1334 val = arg;
1335 if (TREE_CODE (val) != INTEGER_CST
1336 || tree_int_cst_sgn (val) < 0)
1337 return false;
1338 }
1339 else
7d583f42
JL
1340 {
1341 c_strlen_data data;
1342 memset (&data, 0, sizeof (c_strlen_data));
1343 val = c_strlen (arg, 1, &data, eltsize);
1344
1345 /* If we potentially had a non-terminated string, then
1346 bubble that information up to the caller. */
78125561
JL
1347 if (!val && data.decl)
1348 {
1349 *nonstr = data.decl;
1350 *minlen = data.len;
1351 *maxlen = data.len;
1352 return type == 0 ? false : true;
1353 }
7d583f42 1354 }
88d0c3f0
MS
1355
1356 if (!val && fuzzy)
1357 {
1358 if (TREE_CODE (arg) == ADDR_EXPR)
1359 return get_range_strlen (TREE_OPERAND (arg, 0), length,
e08341bb
MS
1360 visited, type, fuzzy, flexp,
1361 eltsize, nonstr);
88d0c3f0 1362
c42d0aa0
MS
1363 if (TREE_CODE (arg) == ARRAY_REF)
1364 {
1365 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1366
1bfd6a00 1367 /* Determine the "innermost" array type. */
c42d0aa0
MS
1368 while (TREE_CODE (type) == ARRAY_TYPE
1369 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1370 type = TREE_TYPE (type);
1371
1bfd6a00
MS
1372 /* Avoid arrays of pointers. */
1373 tree eltype = TREE_TYPE (type);
1374 if (TREE_CODE (type) != ARRAY_TYPE
1375 || !INTEGRAL_TYPE_P (eltype))
1376 return false;
1377
c42d0aa0
MS
1378 val = TYPE_SIZE_UNIT (type);
1379 if (!val || integer_zerop (val))
1380 return false;
1381
1382 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1383 integer_one_node);
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 *minlen = ssize_int (0);
204a7ecb
JJ
1387
1388 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1389 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1390 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1391 *flexp = true;
c42d0aa0
MS
1392 }
1393 else if (TREE_CODE (arg) == COMPONENT_REF
204a7ecb
JJ
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1395 == ARRAY_TYPE))
88d0c3f0
MS
1396 {
1397 /* Use the type of the member array to determine the upper
1398 bound on the length of the array. This may be overly
1399 optimistic if the array itself isn't NUL-terminated and
1400 the caller relies on the subsequent member to contain
c42d0aa0
MS
1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct.
3f343040
MS
1403 Set *FLEXP to true if the array whose bound is being
1404 used is at the end of a struct. */
c3e46927 1405 if (array_at_struct_end_p (arg))
3f343040
MS
1406 *flexp = true;
1407
88d0c3f0 1408 arg = TREE_OPERAND (arg, 1);
c42d0aa0
MS
1409
1410 tree type = TREE_TYPE (arg);
1411
1412 while (TREE_CODE (type) == ARRAY_TYPE
1413 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1414 type = TREE_TYPE (type);
1415
1416 /* Fail when the array bound is unknown or zero. */
1417 val = TYPE_SIZE_UNIT (type);
88d0c3f0
MS
1418 if (!val || integer_zerop (val))
1419 return false;
1420 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1421 integer_one_node);
e495e31a
MS
1422 /* Set the minimum size to zero since the string in
1423 the array could have zero length. */
1424 *minlen = ssize_int (0);
88d0c3f0 1425 }
2004617a 1426
c42d0aa0 1427 if (VAR_P (arg))
2004617a 1428 {
c42d0aa0
MS
1429 tree type = TREE_TYPE (arg);
1430 if (POINTER_TYPE_P (type))
1431 type = TREE_TYPE (type);
1432
1433 if (TREE_CODE (type) == ARRAY_TYPE)
1434 {
1435 val = TYPE_SIZE_UNIT (type);
1436 if (!val
1437 || TREE_CODE (val) != INTEGER_CST
1438 || integer_zerop (val))
1439 return false;
1440 val = wide_int_to_tree (TREE_TYPE (val),
204a7ecb 1441 wi::sub (wi::to_wide (val), 1));
c42d0aa0
MS
1442 /* Set the minimum size to zero since the string in
1443 the array could have zero length. */
1444 *minlen = ssize_int (0);
1445 }
2004617a 1446 }
88d0c3f0
MS
1447 }
1448
fef5a0d9
RB
1449 if (!val)
1450 return false;
1451
c8602fe6
JJ
1452 if (!*minlen
1453 || (type > 0
1454 && TREE_CODE (*minlen) == INTEGER_CST
1455 && TREE_CODE (val) == INTEGER_CST
1456 && tree_int_cst_lt (val, *minlen)))
88d0c3f0
MS
1457 *minlen = val;
1458
1459 if (*maxlen)
fef5a0d9
RB
1460 {
1461 if (type > 0)
1462 {
88d0c3f0 1463 if (TREE_CODE (*maxlen) != INTEGER_CST
fef5a0d9
RB
1464 || TREE_CODE (val) != INTEGER_CST)
1465 return false;
1466
88d0c3f0
MS
1467 if (tree_int_cst_lt (*maxlen, val))
1468 *maxlen = val;
fef5a0d9
RB
1469 return true;
1470 }
88d0c3f0 1471 else if (simple_cst_equal (val, *maxlen) != 1)
fef5a0d9
RB
1472 return false;
1473 }
1474
88d0c3f0 1475 *maxlen = val;
fef5a0d9
RB
1476 return true;
1477 }
1478
1479 /* If ARG is registered for SSA update we cannot look at its defining
1480 statement. */
1481 if (name_registered_for_update_p (arg))
1482 return false;
1483
1484 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1485 if (!*visited)
1486 *visited = BITMAP_ALLOC (NULL);
1487 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1488 return true;
1489
1490 var = arg;
1491 def_stmt = SSA_NAME_DEF_STMT (var);
1492
1493 switch (gimple_code (def_stmt))
1494 {
1495 case GIMPLE_ASSIGN:
1496 /* The RHS of the statement defining VAR must either have a
1497 constant length or come from another SSA_NAME with a constant
1498 length. */
1499 if (gimple_assign_single_p (def_stmt)
1500 || gimple_assign_unary_nop_p (def_stmt))
1501 {
1502 tree rhs = gimple_assign_rhs1 (def_stmt);
4148b00d 1503 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
e08341bb 1504 eltsize, nonstr);
fef5a0d9
RB
1505 }
1506 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1507 {
c8602fe6
JJ
1508 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1509 gimple_assign_rhs3 (def_stmt) };
1510
1511 for (unsigned int i = 0; i < 2; i++)
1512 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
e08341bb 1513 flexp, eltsize, nonstr))
c8602fe6
JJ
1514 {
1515 if (fuzzy == 2)
1516 *maxlen = build_all_ones_cst (size_type_node);
1517 else
1518 return false;
1519 }
1520 return true;
cc8bea0a 1521 }
fef5a0d9
RB
1522 return false;
1523
1524 case GIMPLE_PHI:
c8602fe6
JJ
1525 /* All the arguments of the PHI node must have the same constant
1526 length. */
1527 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1528 {
1529 tree arg = gimple_phi_arg (def_stmt, i)->def;
1530
1531 /* If this PHI has itself as an argument, we cannot
1532 determine the string length of this argument. However,
1533 if we can find a constant string length for the other
1534 PHI args then we can still be sure that this is a
1535 constant string length. So be optimistic and just
1536 continue with the next argument. */
1537 if (arg == gimple_phi_result (def_stmt))
1538 continue;
1539
4148b00d 1540 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
e08341bb 1541 eltsize, nonstr))
88d0c3f0 1542 {
c8602fe6 1543 if (fuzzy == 2)
88d0c3f0
MS
1544 *maxlen = build_all_ones_cst (size_type_node);
1545 else
1546 return false;
1547 }
fef5a0d9 1548 }
fef5a0d9
RB
1549 return true;
1550
1551 default:
1552 return false;
1553 }
1554}
1555
88d0c3f0
MS
1556/* Determine the minimum and maximum value or string length that ARG
1557 refers to and store each in the first two elements of MINMAXLEN.
1558 For expressions that point to strings of unknown lengths that are
1559 character arrays, use the upper bound of the array as the maximum
1560 length. For example, given an expression like 'x ? array : "xyz"'
1561 and array declared as 'char array[8]', MINMAXLEN[0] will be set
c8602fe6 1562 to 0 and MINMAXLEN[1] to 7, the longest string that could be
88d0c3f0 1563 stored in array.
3f343040
MS
1564 Return true if the range of the string lengths has been obtained
1565 from the upper bound of an array at the end of a struct. Such
1566 an array may hold a string that's longer than its upper bound
c8602fe6
JJ
1567 due to it being used as a poor-man's flexible array member.
1568
1569 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1570 and false if PHIs and COND_EXPRs are to be handled optimistically,
1571 if we can determine string length minimum and maximum; it will use
1572 the minimum from the ones where it can be determined.
4148b00d 1573 STRICT false should be only used for warning code.
e08341bb
MS
1574 When non-null, clear *NONSTR if ARG refers to a constant array
1575 that is known not be nul-terminated. Otherwise set it to
1576 the declaration of the constant non-terminated array.
4148b00d
BE
1577
1578 ELTSIZE is 1 for normal single byte character strings, and 2 or
1579 4 for wide characer strings. ELTSIZE is by default 1. */
88d0c3f0 1580
3f343040 1581bool
e08341bb
MS
1582get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1583 bool strict, tree *nonstr /* = NULL */)
88d0c3f0
MS
1584{
1585 bitmap visited = NULL;
1586
1587 minmaxlen[0] = NULL_TREE;
1588 minmaxlen[1] = NULL_TREE;
1589
e08341bb
MS
1590 tree nonstrbuf;
1591 if (!nonstr)
1592 nonstr = &nonstrbuf;
1593 *nonstr = NULL_TREE;
1594
3f343040 1595 bool flexarray = false;
c8602fe6 1596 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
e08341bb 1597 &flexarray, eltsize, nonstr))
c8602fe6
JJ
1598 {
1599 minmaxlen[0] = NULL_TREE;
1600 minmaxlen[1] = NULL_TREE;
1601 }
88d0c3f0
MS
1602
1603 if (visited)
1604 BITMAP_FREE (visited);
3f343040
MS
1605
1606 return flexarray;
88d0c3f0
MS
1607}
1608
e08341bb
MS
1609/* Return the maximum string length for ARG, counting by TYPE
1610 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1611 if the caller is prepared to handle unterminated strings.
1612
1613 If an unterminated string is discovered and our caller handles
1614 unterminated strings, then bubble up the offending DECL and
1615 return the maximum size. Otherwise return NULL. */
1616
dcb7fae2 1617tree
e08341bb 1618get_maxval_strlen (tree arg, int type, tree *nonstr /* = NULL */)
dcb7fae2
RB
1619{
1620 bitmap visited = NULL;
88d0c3f0 1621 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1622
1623 bool dummy;
e08341bb
MS
1624 /* Set to non-null if ARG refers to an untermianted array. */
1625 tree mynonstr = NULL_TREE;
1626 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy, 1, &mynonstr))
88d0c3f0 1627 len[1] = NULL_TREE;
dcb7fae2
RB
1628 if (visited)
1629 BITMAP_FREE (visited);
1630
e08341bb
MS
1631 if (nonstr)
1632 {
1633 /* For callers prepared to handle unterminated arrays set
1634 *NONSTR to point to the declaration of the array and return
1635 the maximum length/size. */
1636 *nonstr = mynonstr;
1637 return len[1];
1638 }
1639
1640 /* Fail if the constant array isn't nul-terminated. */
1641 return mynonstr ? NULL_TREE : len[1];
dcb7fae2
RB
1642}
1643
fef5a0d9
RB
1644
1645/* Fold function call to builtin strcpy with arguments DEST and SRC.
1646 If LEN is not NULL, it represents the length of the string to be
1647 copied. Return NULL_TREE if no simplification can be made. */
1648
1649static bool
1650gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1651 tree dest, tree src)
fef5a0d9 1652{
cc8bea0a
MS
1653 gimple *stmt = gsi_stmt (*gsi);
1654 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1655 tree fn;
1656
1657 /* If SRC and DEST are the same (and not volatile), return DEST. */
1658 if (operand_equal_p (src, dest, 0))
1659 {
8cd95cec
MS
1660 /* Issue -Wrestrict unless the pointers are null (those do
1661 not point to objects and so do not indicate an overlap;
1662 such calls could be the result of sanitization and jump
1663 threading). */
1664 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1665 {
1666 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1667
e9b9fa4c
MS
1668 warning_at (loc, OPT_Wrestrict,
1669 "%qD source argument is the same as destination",
1670 func);
1671 }
cc8bea0a 1672
fef5a0d9
RB
1673 replace_call_with_value (gsi, dest);
1674 return true;
1675 }
1676
1677 if (optimize_function_for_size_p (cfun))
1678 return false;
1679
1680 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1681 if (!fn)
1682 return false;
1683
e08341bb
MS
1684 /* Set to non-null if ARG refers to an unterminated array. */
1685 tree nonstr = NULL;
1686 tree len = get_maxval_strlen (src, 0, &nonstr);
1687
1688 if (nonstr)
1689 {
1690 /* Avoid folding calls with unterminated arrays. */
1691 if (!gimple_no_warning_p (stmt))
1692 warn_string_no_nul (loc, "strcpy", src, nonstr);
1693 gimple_set_no_warning (stmt, true);
1694 return false;
1695 }
1696
fef5a0d9 1697 if (!len)
dcb7fae2 1698 return false;
fef5a0d9
RB
1699
1700 len = fold_convert_loc (loc, size_type_node, len);
1701 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1702 len = force_gimple_operand_gsi (gsi, len, true,
1703 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1704 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1705 replace_call_with_call_and_fold (gsi, repl);
1706 return true;
1707}
1708
1709/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1710 If SLEN is not NULL, it represents the length of the source string.
1711 Return NULL_TREE if no simplification can be made. */
1712
1713static bool
dcb7fae2
RB
1714gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1715 tree dest, tree src, tree len)
fef5a0d9 1716{
025d57f0
MS
1717 gimple *stmt = gsi_stmt (*gsi);
1718 location_t loc = gimple_location (stmt);
6a33d0ff 1719 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1720
1721 /* If the LEN parameter is zero, return DEST. */
1722 if (integer_zerop (len))
1723 {
6a33d0ff
MS
1724 /* Avoid warning if the destination refers to a an array/pointer
1725 decorate with attribute nonstring. */
1726 if (!nonstring)
1727 {
1728 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1729
1730 /* Warn about the lack of nul termination: the result is not
1731 a (nul-terminated) string. */
1732 tree slen = get_maxval_strlen (src, 0);
1733 if (slen && !integer_zerop (slen))
1734 warning_at (loc, OPT_Wstringop_truncation,
1735 "%G%qD destination unchanged after copying no bytes "
1736 "from a string of length %E",
8a45b051 1737 stmt, fndecl, slen);
6a33d0ff
MS
1738 else
1739 warning_at (loc, OPT_Wstringop_truncation,
1740 "%G%qD destination unchanged after copying no bytes",
8a45b051 1741 stmt, fndecl);
6a33d0ff 1742 }
025d57f0 1743
fef5a0d9
RB
1744 replace_call_with_value (gsi, dest);
1745 return true;
1746 }
1747
1748 /* We can't compare slen with len as constants below if len is not a
1749 constant. */
dcb7fae2 1750 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1751 return false;
1752
fef5a0d9 1753 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1754 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1755 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1756 return false;
1757
025d57f0
MS
1758 /* The size of the source string including the terminating nul. */
1759 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1760
1761 /* We do not support simplification of this case, though we do
1762 support it when expanding trees into RTL. */
1763 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1764 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1765 return false;
1766
5d0d5d68
MS
1767 /* Diagnose truncation that leaves the copy unterminated. */
1768 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1769
fef5a0d9 1770 /* OK transform into builtin memcpy. */
025d57f0 1771 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1772 if (!fn)
1773 return false;
1774
1775 len = fold_convert_loc (loc, size_type_node, len);
1776 len = force_gimple_operand_gsi (gsi, len, true,
1777 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1778 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1779 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1780
fef5a0d9
RB
1781 return true;
1782}
1783
71dea1dd
WD
1784/* Fold function call to builtin strchr or strrchr.
1785 If both arguments are constant, evaluate and fold the result,
1786 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1787 In general strlen is significantly faster than strchr
1788 due to being a simpler operation. */
1789static bool
71dea1dd 1790gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1791{
1792 gimple *stmt = gsi_stmt (*gsi);
1793 tree str = gimple_call_arg (stmt, 0);
1794 tree c = gimple_call_arg (stmt, 1);
1795 location_t loc = gimple_location (stmt);
71dea1dd
WD
1796 const char *p;
1797 char ch;
912d9ec3 1798
71dea1dd 1799 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1800 return false;
1801
71dea1dd
WD
1802 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1803 {
1804 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1805
1806 if (p1 == NULL)
1807 {
1808 replace_call_with_value (gsi, integer_zero_node);
1809 return true;
1810 }
1811
1812 tree len = build_int_cst (size_type_node, p1 - p);
1813 gimple_seq stmts = NULL;
1814 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1815 POINTER_PLUS_EXPR, str, len);
1816 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1817 gsi_replace_with_seq_vops (gsi, stmts);
1818 return true;
1819 }
1820
1821 if (!integer_zerop (c))
912d9ec3
WD
1822 return false;
1823
71dea1dd 1824 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1825 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1826 {
1827 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1828
c8952930 1829 if (strchr_fn)
71dea1dd
WD
1830 {
1831 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1832 replace_call_with_call_and_fold (gsi, repl);
1833 return true;
1834 }
1835
1836 return false;
1837 }
1838
912d9ec3
WD
1839 tree len;
1840 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1841
1842 if (!strlen_fn)
1843 return false;
1844
1845 /* Create newstr = strlen (str). */
1846 gimple_seq stmts = NULL;
1847 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1848 gimple_set_location (new_stmt, loc);
a15ebbcd 1849 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1850 gimple_call_set_lhs (new_stmt, len);
1851 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1852
1853 /* Create (str p+ strlen (str)). */
1854 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1855 POINTER_PLUS_EXPR, str, len);
1856 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1857 gsi_replace_with_seq_vops (gsi, stmts);
1858 /* gsi now points at the assignment to the lhs, get a
1859 stmt iterator to the strlen.
1860 ??? We can't use gsi_for_stmt as that doesn't work when the
1861 CFG isn't built yet. */
1862 gimple_stmt_iterator gsi2 = *gsi;
1863 gsi_prev (&gsi2);
1864 fold_stmt (&gsi2);
1865 return true;
1866}
1867
c8952930
JJ
1868/* Fold function call to builtin strstr.
1869 If both arguments are constant, evaluate and fold the result,
1870 additionally fold strstr (x, "") into x and strstr (x, "c")
1871 into strchr (x, 'c'). */
1872static bool
1873gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1874{
1875 gimple *stmt = gsi_stmt (*gsi);
1876 tree haystack = gimple_call_arg (stmt, 0);
1877 tree needle = gimple_call_arg (stmt, 1);
1878 const char *p, *q;
1879
1880 if (!gimple_call_lhs (stmt))
1881 return false;
1882
1883 q = c_getstr (needle);
1884 if (q == NULL)
1885 return false;
1886
1887 if ((p = c_getstr (haystack)))
1888 {
1889 const char *r = strstr (p, q);
1890
1891 if (r == NULL)
1892 {
1893 replace_call_with_value (gsi, integer_zero_node);
1894 return true;
1895 }
1896
1897 tree len = build_int_cst (size_type_node, r - p);
1898 gimple_seq stmts = NULL;
1899 gimple *new_stmt
1900 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1901 haystack, len);
1902 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1903 gsi_replace_with_seq_vops (gsi, stmts);
1904 return true;
1905 }
1906
1907 /* For strstr (x, "") return x. */
1908 if (q[0] == '\0')
1909 {
1910 replace_call_with_value (gsi, haystack);
1911 return true;
1912 }
1913
1914 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1915 if (q[1] == '\0')
1916 {
1917 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1918 if (strchr_fn)
1919 {
1920 tree c = build_int_cst (integer_type_node, q[0]);
1921 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1922 replace_call_with_call_and_fold (gsi, repl);
1923 return true;
1924 }
1925 }
1926
1927 return false;
1928}
1929
fef5a0d9
RB
1930/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1931 to the call.
1932
1933 Return NULL_TREE if no simplification was possible, otherwise return the
1934 simplified form of the call as a tree.
1935
1936 The simplified form may be a constant or other expression which
1937 computes the same value, but in a more efficient manner (including
1938 calls to other builtin functions).
1939
1940 The call may contain arguments which need to be evaluated, but
1941 which are not useful to determine the result of the call. In
1942 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1943 COMPOUND_EXPR will be an argument which must be evaluated.
1944 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1945 COMPOUND_EXPR in the chain will contain the tree for the simplified
1946 form of the builtin function call. */
1947
1948static bool
dcb7fae2 1949gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1950{
355fe088 1951 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1952 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1953
1954 const char *p = c_getstr (src);
1955
1956 /* If the string length is zero, return the dst parameter. */
1957 if (p && *p == '\0')
1958 {
1959 replace_call_with_value (gsi, dst);
1960 return true;
1961 }
1962
1963 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1964 return false;
1965
1966 /* See if we can store by pieces into (dst + strlen(dst)). */
1967 tree newdst;
1968 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1969 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1970
1971 if (!strlen_fn || !memcpy_fn)
1972 return false;
1973
1974 /* If the length of the source string isn't computable don't
1975 split strcat into strlen and memcpy. */
dcb7fae2 1976 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1977 if (! len)
fef5a0d9
RB
1978 return false;
1979
1980 /* Create strlen (dst). */
1981 gimple_seq stmts = NULL, stmts2;
355fe088 1982 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 1983 gimple_set_location (repl, loc);
a15ebbcd 1984 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
1985 gimple_call_set_lhs (repl, newdst);
1986 gimple_seq_add_stmt_without_update (&stmts, repl);
1987
1988 /* Create (dst p+ strlen (dst)). */
1989 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1990 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1991 gimple_seq_add_seq_without_update (&stmts, stmts2);
1992
1993 len = fold_convert_loc (loc, size_type_node, len);
1994 len = size_binop_loc (loc, PLUS_EXPR, len,
1995 build_int_cst (size_type_node, 1));
1996 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1997 gimple_seq_add_seq_without_update (&stmts, stmts2);
1998
1999 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2000 gimple_seq_add_stmt_without_update (&stmts, repl);
2001 if (gimple_call_lhs (stmt))
2002 {
2003 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2004 gimple_seq_add_stmt_without_update (&stmts, repl);
2005 gsi_replace_with_seq_vops (gsi, stmts);
2006 /* gsi now points at the assignment to the lhs, get a
2007 stmt iterator to the memcpy call.
2008 ??? We can't use gsi_for_stmt as that doesn't work when the
2009 CFG isn't built yet. */
2010 gimple_stmt_iterator gsi2 = *gsi;
2011 gsi_prev (&gsi2);
2012 fold_stmt (&gsi2);
2013 }
2014 else
2015 {
2016 gsi_replace_with_seq_vops (gsi, stmts);
2017 fold_stmt (gsi);
2018 }
2019 return true;
2020}
2021
07f1cf56
RB
2022/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2023 are the arguments to the call. */
2024
2025static bool
2026gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2027{
355fe088 2028 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2029 tree dest = gimple_call_arg (stmt, 0);
2030 tree src = gimple_call_arg (stmt, 1);
2031 tree size = gimple_call_arg (stmt, 2);
2032 tree fn;
2033 const char *p;
2034
2035
2036 p = c_getstr (src);
2037 /* If the SRC parameter is "", return DEST. */
2038 if (p && *p == '\0')
2039 {
2040 replace_call_with_value (gsi, dest);
2041 return true;
2042 }
2043
2044 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2045 return false;
2046
2047 /* If __builtin_strcat_chk is used, assume strcat is available. */
2048 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2049 if (!fn)
2050 return false;
2051
355fe088 2052 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2053 replace_call_with_call_and_fold (gsi, repl);
2054 return true;
2055}
2056
ad03a744
RB
2057/* Simplify a call to the strncat builtin. */
2058
2059static bool
2060gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2061{
8a45b051 2062 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2063 tree dst = gimple_call_arg (stmt, 0);
2064 tree src = gimple_call_arg (stmt, 1);
2065 tree len = gimple_call_arg (stmt, 2);
2066
2067 const char *p = c_getstr (src);
2068
2069 /* If the requested length is zero, or the src parameter string
2070 length is zero, return the dst parameter. */
2071 if (integer_zerop (len) || (p && *p == '\0'))
2072 {
2073 replace_call_with_value (gsi, dst);
2074 return true;
2075 }
2076
025d57f0
MS
2077 if (TREE_CODE (len) != INTEGER_CST || !p)
2078 return false;
2079
2080 unsigned srclen = strlen (p);
2081
2082 int cmpsrc = compare_tree_int (len, srclen);
2083
2084 /* Return early if the requested len is less than the string length.
2085 Warnings will be issued elsewhere later. */
2086 if (cmpsrc < 0)
2087 return false;
2088
2089 unsigned HOST_WIDE_INT dstsize;
2090
2091 bool nowarn = gimple_no_warning_p (stmt);
2092
2093 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2094 {
025d57f0 2095 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2096
025d57f0
MS
2097 if (cmpdst >= 0)
2098 {
2099 tree fndecl = gimple_call_fndecl (stmt);
2100
2101 /* Strncat copies (at most) LEN bytes and always appends
2102 the terminating NUL so the specified bound should never
2103 be equal to (or greater than) the size of the destination.
2104 If it is, the copy could overflow. */
2105 location_t loc = gimple_location (stmt);
2106 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2107 cmpdst == 0
2108 ? G_("%G%qD specified bound %E equals "
2109 "destination size")
2110 : G_("%G%qD specified bound %E exceeds "
2111 "destination size %wu"),
2112 stmt, fndecl, len, dstsize);
2113 if (nowarn)
2114 gimple_set_no_warning (stmt, true);
2115 }
2116 }
ad03a744 2117
025d57f0
MS
2118 if (!nowarn && cmpsrc == 0)
2119 {
2120 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2121 location_t loc = gimple_location (stmt);
eec5f615
MS
2122
2123 /* To avoid possible overflow the specified bound should also
2124 not be equal to the length of the source, even when the size
2125 of the destination is unknown (it's not an uncommon mistake
2126 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2127 if (warning_at (loc, OPT_Wstringop_overflow_,
2128 "%G%qD specified bound %E equals source length",
2129 stmt, fndecl, len))
2130 gimple_set_no_warning (stmt, true);
ad03a744
RB
2131 }
2132
025d57f0
MS
2133 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2134
2135 /* If the replacement _DECL isn't initialized, don't do the
2136 transformation. */
2137 if (!fn)
2138 return false;
2139
2140 /* Otherwise, emit a call to strcat. */
2141 gcall *repl = gimple_build_call (fn, 2, dst, src);
2142 replace_call_with_call_and_fold (gsi, repl);
2143 return true;
ad03a744
RB
2144}
2145
745583f9
RB
2146/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2147 LEN, and SIZE. */
2148
2149static bool
2150gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2151{
355fe088 2152 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2153 tree dest = gimple_call_arg (stmt, 0);
2154 tree src = gimple_call_arg (stmt, 1);
2155 tree len = gimple_call_arg (stmt, 2);
2156 tree size = gimple_call_arg (stmt, 3);
2157 tree fn;
2158 const char *p;
2159
2160 p = c_getstr (src);
2161 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2162 if ((p && *p == '\0')
2163 || integer_zerop (len))
2164 {
2165 replace_call_with_value (gsi, dest);
2166 return true;
2167 }
2168
2169 if (! tree_fits_uhwi_p (size))
2170 return false;
2171
2172 if (! integer_all_onesp (size))
2173 {
2174 tree src_len = c_strlen (src, 1);
2175 if (src_len
2176 && tree_fits_uhwi_p (src_len)
2177 && tree_fits_uhwi_p (len)
2178 && ! tree_int_cst_lt (len, src_len))
2179 {
2180 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2181 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2182 if (!fn)
2183 return false;
2184
355fe088 2185 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2186 replace_call_with_call_and_fold (gsi, repl);
2187 return true;
2188 }
2189 return false;
2190 }
2191
2192 /* If __builtin_strncat_chk is used, assume strncat is available. */
2193 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2194 if (!fn)
2195 return false;
2196
355fe088 2197 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2198 replace_call_with_call_and_fold (gsi, repl);
2199 return true;
2200}
2201
a918bfbf
ML
2202/* Build and append gimple statements to STMTS that would load a first
2203 character of a memory location identified by STR. LOC is location
2204 of the statement. */
2205
2206static tree
2207gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2208{
2209 tree var;
2210
2211 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2212 tree cst_uchar_ptr_node
2213 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2214 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2215
2216 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2217 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2218 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2219
2220 gimple_assign_set_lhs (stmt, var);
2221 gimple_seq_add_stmt_without_update (stmts, stmt);
2222
2223 return var;
2224}
2225
2226/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2227 FCODE is the name of the builtin. */
2228
2229static bool
2230gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2231{
2232 gimple *stmt = gsi_stmt (*gsi);
2233 tree callee = gimple_call_fndecl (stmt);
2234 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2235
2236 tree type = integer_type_node;
2237 tree str1 = gimple_call_arg (stmt, 0);
2238 tree str2 = gimple_call_arg (stmt, 1);
2239 tree lhs = gimple_call_lhs (stmt);
2240 HOST_WIDE_INT length = -1;
2241
2242 /* Handle strncmp and strncasecmp functions. */
2243 if (gimple_call_num_args (stmt) == 3)
2244 {
2245 tree len = gimple_call_arg (stmt, 2);
2246 if (tree_fits_uhwi_p (len))
2247 length = tree_to_uhwi (len);
2248 }
2249
2250 /* If the LEN parameter is zero, return zero. */
2251 if (length == 0)
2252 {
2253 replace_call_with_value (gsi, integer_zero_node);
2254 return true;
2255 }
2256
2257 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2258 if (operand_equal_p (str1, str2, 0))
2259 {
2260 replace_call_with_value (gsi, integer_zero_node);
2261 return true;
2262 }
2263
2264 const char *p1 = c_getstr (str1);
2265 const char *p2 = c_getstr (str2);
2266
2267 /* For known strings, return an immediate value. */
2268 if (p1 && p2)
2269 {
2270 int r = 0;
2271 bool known_result = false;
2272
2273 switch (fcode)
2274 {
2275 case BUILT_IN_STRCMP:
8b0b334a 2276 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
2277 {
2278 r = strcmp (p1, p2);
2279 known_result = true;
2280 break;
2281 }
2282 case BUILT_IN_STRNCMP:
8b0b334a 2283 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
2284 {
2285 if (length == -1)
2286 break;
2287 r = strncmp (p1, p2, length);
2288 known_result = true;
2289 break;
2290 }
2291 /* Only handleable situation is where the string are equal (result 0),
2292 which is already handled by operand_equal_p case. */
2293 case BUILT_IN_STRCASECMP:
2294 break;
2295 case BUILT_IN_STRNCASECMP:
2296 {
2297 if (length == -1)
2298 break;
2299 r = strncmp (p1, p2, length);
2300 if (r == 0)
2301 known_result = true;
5de73c05 2302 break;
a918bfbf
ML
2303 }
2304 default:
2305 gcc_unreachable ();
2306 }
2307
2308 if (known_result)
2309 {
2310 replace_call_with_value (gsi, build_cmp_result (type, r));
2311 return true;
2312 }
2313 }
2314
2315 bool nonzero_length = length >= 1
2316 || fcode == BUILT_IN_STRCMP
8b0b334a 2317 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2318 || fcode == BUILT_IN_STRCASECMP;
2319
2320 location_t loc = gimple_location (stmt);
2321
2322 /* If the second arg is "", return *(const unsigned char*)arg1. */
2323 if (p2 && *p2 == '\0' && nonzero_length)
2324 {
2325 gimple_seq stmts = NULL;
2326 tree var = gimple_load_first_char (loc, str1, &stmts);
2327 if (lhs)
2328 {
2329 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2330 gimple_seq_add_stmt_without_update (&stmts, stmt);
2331 }
2332
2333 gsi_replace_with_seq_vops (gsi, stmts);
2334 return true;
2335 }
2336
2337 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2338 if (p1 && *p1 == '\0' && nonzero_length)
2339 {
2340 gimple_seq stmts = NULL;
2341 tree var = gimple_load_first_char (loc, str2, &stmts);
2342
2343 if (lhs)
2344 {
2345 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2346 stmt = gimple_build_assign (c, NOP_EXPR, var);
2347 gimple_seq_add_stmt_without_update (&stmts, stmt);
2348
2349 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2350 gimple_seq_add_stmt_without_update (&stmts, stmt);
2351 }
2352
2353 gsi_replace_with_seq_vops (gsi, stmts);
2354 return true;
2355 }
2356
2357 /* If len parameter is one, return an expression corresponding to
2358 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2359 if (fcode == BUILT_IN_STRNCMP && length == 1)
2360 {
2361 gimple_seq stmts = NULL;
2362 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2363 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2364
2365 if (lhs)
2366 {
2367 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2368 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2369 gimple_seq_add_stmt_without_update (&stmts, convert1);
2370
2371 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2372 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2373 gimple_seq_add_stmt_without_update (&stmts, convert2);
2374
2375 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2376 gimple_seq_add_stmt_without_update (&stmts, stmt);
2377 }
2378
2379 gsi_replace_with_seq_vops (gsi, stmts);
2380 return true;
2381 }
2382
caed5c92
QZ
2383 /* If length is larger than the length of one constant string,
2384 replace strncmp with corresponding strcmp */
2385 if (fcode == BUILT_IN_STRNCMP
2386 && length > 0
2387 && ((p2 && (size_t) length > strlen (p2))
2388 || (p1 && (size_t) length > strlen (p1))))
2389 {
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2391 if (!fn)
2392 return false;
2393 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2396 }
2397
a918bfbf
ML
2398 return false;
2399}
2400
488c6247
ML
2401/* Fold a call to the memchr pointed by GSI iterator. */
2402
2403static bool
2404gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2405{
2406 gimple *stmt = gsi_stmt (*gsi);
2407 tree lhs = gimple_call_lhs (stmt);
2408 tree arg1 = gimple_call_arg (stmt, 0);
2409 tree arg2 = gimple_call_arg (stmt, 1);
2410 tree len = gimple_call_arg (stmt, 2);
2411
2412 /* If the LEN parameter is zero, return zero. */
2413 if (integer_zerop (len))
2414 {
2415 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2416 return true;
2417 }
2418
2419 char c;
2420 if (TREE_CODE (arg2) != INTEGER_CST
2421 || !tree_fits_uhwi_p (len)
2422 || !target_char_cst_p (arg2, &c))
2423 return false;
2424
2425 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2426 unsigned HOST_WIDE_INT string_length;
2427 const char *p1 = c_getstr (arg1, &string_length);
2428
2429 if (p1)
2430 {
2431 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2432 if (r == NULL)
2433 {
2434 if (length <= string_length)
2435 {
2436 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2437 return true;
2438 }
2439 }
2440 else
2441 {
2442 unsigned HOST_WIDE_INT offset = r - p1;
2443 gimple_seq stmts = NULL;
2444 if (lhs != NULL_TREE)
2445 {
2446 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2447 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2448 arg1, offset_cst);
2449 gimple_seq_add_stmt_without_update (&stmts, stmt);
2450 }
2451 else
2452 gimple_seq_add_stmt_without_update (&stmts,
2453 gimple_build_nop ());
2454
2455 gsi_replace_with_seq_vops (gsi, stmts);
2456 return true;
2457 }
2458 }
2459
2460 return false;
2461}
a918bfbf 2462
fef5a0d9
RB
2463/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2464 to the call. IGNORE is true if the value returned
2465 by the builtin will be ignored. UNLOCKED is true is true if this
2466 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2467 the known length of the string. Return NULL_TREE if no simplification
2468 was possible. */
2469
2470static bool
2471gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2472 tree arg0, tree arg1,
dcb7fae2 2473 bool unlocked)
fef5a0d9 2474{
355fe088 2475 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2476
fef5a0d9
RB
2477 /* If we're using an unlocked function, assume the other unlocked
2478 functions exist explicitly. */
2479 tree const fn_fputc = (unlocked
2480 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2481 : builtin_decl_implicit (BUILT_IN_FPUTC));
2482 tree const fn_fwrite = (unlocked
2483 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2484 : builtin_decl_implicit (BUILT_IN_FWRITE));
2485
2486 /* If the return value is used, don't do the transformation. */
dcb7fae2 2487 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2488 return false;
2489
fef5a0d9
RB
2490 /* Get the length of the string passed to fputs. If the length
2491 can't be determined, punt. */
dcb7fae2 2492 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
2493 if (!len
2494 || TREE_CODE (len) != INTEGER_CST)
2495 return false;
2496
2497 switch (compare_tree_int (len, 1))
2498 {
2499 case -1: /* length is 0, delete the call entirely . */
2500 replace_call_with_value (gsi, integer_zero_node);
2501 return true;
2502
2503 case 0: /* length is 1, call fputc. */
2504 {
2505 const char *p = c_getstr (arg0);
2506 if (p != NULL)
2507 {
2508 if (!fn_fputc)
2509 return false;
2510
355fe088 2511 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2512 build_int_cst
2513 (integer_type_node, p[0]), arg1);
2514 replace_call_with_call_and_fold (gsi, repl);
2515 return true;
2516 }
2517 }
2518 /* FALLTHROUGH */
2519 case 1: /* length is greater than 1, call fwrite. */
2520 {
2521 /* If optimizing for size keep fputs. */
2522 if (optimize_function_for_size_p (cfun))
2523 return false;
2524 /* New argument list transforming fputs(string, stream) to
2525 fwrite(string, 1, len, stream). */
2526 if (!fn_fwrite)
2527 return false;
2528
355fe088 2529 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2530 size_one_node, len, arg1);
2531 replace_call_with_call_and_fold (gsi, repl);
2532 return true;
2533 }
2534 default:
2535 gcc_unreachable ();
2536 }
2537 return false;
2538}
2539
2540/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2541 DEST, SRC, LEN, and SIZE are the arguments to the call.
2542 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2543 code of the builtin. If MAXLEN is not NULL, it is maximum length
2544 passed as third argument. */
2545
2546static bool
2547gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2548 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2549 enum built_in_function fcode)
2550{
355fe088 2551 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2552 location_t loc = gimple_location (stmt);
2553 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2554 tree fn;
2555
2556 /* If SRC and DEST are the same (and not volatile), return DEST
2557 (resp. DEST+LEN for __mempcpy_chk). */
2558 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2559 {
2560 if (fcode != BUILT_IN_MEMPCPY_CHK)
2561 {
2562 replace_call_with_value (gsi, dest);
2563 return true;
2564 }
2565 else
2566 {
74e3c262
RB
2567 gimple_seq stmts = NULL;
2568 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2569 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2570 TREE_TYPE (dest), dest, len);
74e3c262 2571 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2572 replace_call_with_value (gsi, temp);
2573 return true;
2574 }
2575 }
2576
2577 if (! tree_fits_uhwi_p (size))
2578 return false;
2579
dcb7fae2 2580 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
2581 if (! integer_all_onesp (size))
2582 {
2583 if (! tree_fits_uhwi_p (len))
2584 {
2585 /* If LEN is not constant, try MAXLEN too.
2586 For MAXLEN only allow optimizing into non-_ocs function
2587 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2588 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2589 {
2590 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2591 {
2592 /* (void) __mempcpy_chk () can be optimized into
2593 (void) __memcpy_chk (). */
2594 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2595 if (!fn)
2596 return false;
2597
355fe088 2598 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2601 }
2602 return false;
2603 }
2604 }
2605 else
2606 maxlen = len;
2607
2608 if (tree_int_cst_lt (size, maxlen))
2609 return false;
2610 }
2611
2612 fn = NULL_TREE;
2613 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2614 mem{cpy,pcpy,move,set} is available. */
2615 switch (fcode)
2616 {
2617 case BUILT_IN_MEMCPY_CHK:
2618 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2619 break;
2620 case BUILT_IN_MEMPCPY_CHK:
2621 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2622 break;
2623 case BUILT_IN_MEMMOVE_CHK:
2624 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2625 break;
2626 case BUILT_IN_MEMSET_CHK:
2627 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2628 break;
2629 default:
2630 break;
2631 }
2632
2633 if (!fn)
2634 return false;
2635
355fe088 2636 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2637 replace_call_with_call_and_fold (gsi, repl);
2638 return true;
2639}
2640
2641/* Fold a call to the __st[rp]cpy_chk builtin.
2642 DEST, SRC, and SIZE are the arguments to the call.
2643 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2644 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2645 strings passed as second argument. */
2646
2647static bool
2648gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2649 tree dest,
fef5a0d9 2650 tree src, tree size,
fef5a0d9
RB
2651 enum built_in_function fcode)
2652{
355fe088 2653 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2654 location_t loc = gimple_location (stmt);
2655 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2656 tree len, fn;
2657
2658 /* If SRC and DEST are the same (and not volatile), return DEST. */
2659 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2660 {
8cd95cec
MS
2661 /* Issue -Wrestrict unless the pointers are null (those do
2662 not point to objects and so do not indicate an overlap;
2663 such calls could be the result of sanitization and jump
2664 threading). */
2665 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2666 {
2667 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2668
e9b9fa4c
MS
2669 warning_at (loc, OPT_Wrestrict,
2670 "%qD source argument is the same as destination",
2671 func);
2672 }
cc8bea0a 2673
fef5a0d9
RB
2674 replace_call_with_value (gsi, dest);
2675 return true;
2676 }
2677
2678 if (! tree_fits_uhwi_p (size))
2679 return false;
2680
dcb7fae2 2681 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
2682 if (! integer_all_onesp (size))
2683 {
2684 len = c_strlen (src, 1);
2685 if (! len || ! tree_fits_uhwi_p (len))
2686 {
2687 /* If LEN is not constant, try MAXLEN too.
2688 For MAXLEN only allow optimizing into non-_ocs function
2689 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2690 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2691 {
2692 if (fcode == BUILT_IN_STPCPY_CHK)
2693 {
2694 if (! ignore)
2695 return false;
2696
2697 /* If return value of __stpcpy_chk is ignored,
2698 optimize into __strcpy_chk. */
2699 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2700 if (!fn)
2701 return false;
2702
355fe088 2703 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2704 replace_call_with_call_and_fold (gsi, repl);
2705 return true;
2706 }
2707
2708 if (! len || TREE_SIDE_EFFECTS (len))
2709 return false;
2710
2711 /* If c_strlen returned something, but not a constant,
2712 transform __strcpy_chk into __memcpy_chk. */
2713 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2714 if (!fn)
2715 return false;
2716
74e3c262
RB
2717 gimple_seq stmts = NULL;
2718 len = gimple_convert (&stmts, loc, size_type_node, len);
2719 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2720 build_int_cst (size_type_node, 1));
2721 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2722 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2723 replace_call_with_call_and_fold (gsi, repl);
2724 return true;
2725 }
e256dfce 2726 }
fef5a0d9
RB
2727 else
2728 maxlen = len;
2729
2730 if (! tree_int_cst_lt (maxlen, size))
2731 return false;
e256dfce
RG
2732 }
2733
fef5a0d9
RB
2734 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2735 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2736 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2737 if (!fn)
2738 return false;
2739
355fe088 2740 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2741 replace_call_with_call_and_fold (gsi, repl);
2742 return true;
2743}
2744
2745/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2746 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2747 length passed as third argument. IGNORE is true if return value can be
2748 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2749
2750static bool
2751gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2752 tree dest, tree src,
dcb7fae2 2753 tree len, tree size,
fef5a0d9
RB
2754 enum built_in_function fcode)
2755{
355fe088 2756 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2757 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2758 tree fn;
2759
2760 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2761 {
fef5a0d9
RB
2762 /* If return value of __stpncpy_chk is ignored,
2763 optimize into __strncpy_chk. */
2764 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2765 if (fn)
2766 {
355fe088 2767 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2768 replace_call_with_call_and_fold (gsi, repl);
2769 return true;
2770 }
cbdd87d4
RG
2771 }
2772
fef5a0d9
RB
2773 if (! tree_fits_uhwi_p (size))
2774 return false;
2775
dcb7fae2 2776 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2777 if (! integer_all_onesp (size))
cbdd87d4 2778 {
fef5a0d9 2779 if (! tree_fits_uhwi_p (len))
fe2ef088 2780 {
fef5a0d9
RB
2781 /* If LEN is not constant, try MAXLEN too.
2782 For MAXLEN only allow optimizing into non-_ocs function
2783 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2784 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2785 return false;
8a1561bc 2786 }
fef5a0d9
RB
2787 else
2788 maxlen = len;
2789
2790 if (tree_int_cst_lt (size, maxlen))
2791 return false;
cbdd87d4
RG
2792 }
2793
fef5a0d9
RB
2794 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2795 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2796 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2797 if (!fn)
2798 return false;
2799
355fe088 2800 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2801 replace_call_with_call_and_fold (gsi, repl);
2802 return true;
cbdd87d4
RG
2803}
2804
2625bb5d
RB
2805/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2806 Return NULL_TREE if no simplification can be made. */
2807
2808static bool
2809gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2810{
2811 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2812 location_t loc = gimple_location (stmt);
2813 tree dest = gimple_call_arg (stmt, 0);
2814 tree src = gimple_call_arg (stmt, 1);
01b0acb7 2815 tree fn, lenp1;
2625bb5d
RB
2816
2817 /* If the result is unused, replace stpcpy with strcpy. */
2818 if (gimple_call_lhs (stmt) == NULL_TREE)
2819 {
2820 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2821 if (!fn)
2822 return false;
2823 gimple_call_set_fndecl (stmt, fn);
2824 fold_stmt (gsi);
2825 return true;
2826 }
2827
01b0acb7 2828 /* Set to non-null if ARG refers to an unterminated array. */
7d583f42
JL
2829 c_strlen_data data;
2830 memset (&data, 0, sizeof (c_strlen_data));
2831 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
2832 if (!len
2833 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 2834 {
7d583f42
JL
2835 data.decl = unterminated_array (src);
2836 if (!data.decl)
01b0acb7
MS
2837 return false;
2838 }
2839
7d583f42 2840 if (data.decl)
01b0acb7
MS
2841 {
2842 /* Avoid folding calls with unterminated arrays. */
2843 if (!gimple_no_warning_p (stmt))
7d583f42 2844 warn_string_no_nul (loc, "stpcpy", src, data.decl);
01b0acb7
MS
2845 gimple_set_no_warning (stmt, true);
2846 return false;
2847 }
2625bb5d
RB
2848
2849 if (optimize_function_for_size_p (cfun)
2850 /* If length is zero it's small enough. */
2851 && !integer_zerop (len))
2852 return false;
2853
2854 /* If the source has a known length replace stpcpy with memcpy. */
2855 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2856 if (!fn)
2857 return false;
2858
2859 gimple_seq stmts = NULL;
2860 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2861 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2862 tem, build_int_cst (size_type_node, 1));
2863 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2864 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2865 gimple_set_vuse (repl, gimple_vuse (stmt));
2866 gimple_set_vdef (repl, gimple_vdef (stmt));
2867 if (gimple_vdef (repl)
2868 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2869 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2870 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2871 /* Replace the result with dest + len. */
2872 stmts = NULL;
2873 tem = gimple_convert (&stmts, loc, sizetype, len);
2874 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2875 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2876 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2877 gsi_replace (gsi, ret, false);
2625bb5d
RB
2878 /* Finally fold the memcpy call. */
2879 gimple_stmt_iterator gsi2 = *gsi;
2880 gsi_prev (&gsi2);
2881 fold_stmt (&gsi2);
2882 return true;
2883}
2884
fef5a0d9
RB
2885/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2886 NULL_TREE if a normal call should be emitted rather than expanding
2887 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2888 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2889 passed as second argument. */
cbdd87d4
RG
2890
2891static bool
fef5a0d9 2892gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2893 enum built_in_function fcode)
cbdd87d4 2894{
538dd0b7 2895 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2896 tree dest, size, len, fn, fmt, flag;
2897 const char *fmt_str;
cbdd87d4 2898
fef5a0d9
RB
2899 /* Verify the required arguments in the original call. */
2900 if (gimple_call_num_args (stmt) < 5)
2901 return false;
cbdd87d4 2902
fef5a0d9
RB
2903 dest = gimple_call_arg (stmt, 0);
2904 len = gimple_call_arg (stmt, 1);
2905 flag = gimple_call_arg (stmt, 2);
2906 size = gimple_call_arg (stmt, 3);
2907 fmt = gimple_call_arg (stmt, 4);
2908
2909 if (! tree_fits_uhwi_p (size))
2910 return false;
2911
2912 if (! integer_all_onesp (size))
2913 {
dcb7fae2 2914 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2915 if (! tree_fits_uhwi_p (len))
cbdd87d4 2916 {
fef5a0d9
RB
2917 /* If LEN is not constant, try MAXLEN too.
2918 For MAXLEN only allow optimizing into non-_ocs function
2919 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2920 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2921 return false;
2922 }
2923 else
fef5a0d9 2924 maxlen = len;
cbdd87d4 2925
fef5a0d9
RB
2926 if (tree_int_cst_lt (size, maxlen))
2927 return false;
2928 }
cbdd87d4 2929
fef5a0d9
RB
2930 if (!init_target_chars ())
2931 return false;
cbdd87d4 2932
fef5a0d9
RB
2933 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2934 or if format doesn't contain % chars or is "%s". */
2935 if (! integer_zerop (flag))
2936 {
2937 fmt_str = c_getstr (fmt);
2938 if (fmt_str == NULL)
2939 return false;
2940 if (strchr (fmt_str, target_percent) != NULL
2941 && strcmp (fmt_str, target_percent_s))
2942 return false;
cbdd87d4
RG
2943 }
2944
fef5a0d9
RB
2945 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2946 available. */
2947 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2948 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2949 if (!fn)
491e0b9b
RG
2950 return false;
2951
fef5a0d9
RB
2952 /* Replace the called function and the first 5 argument by 3 retaining
2953 trailing varargs. */
2954 gimple_call_set_fndecl (stmt, fn);
2955 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2956 gimple_call_set_arg (stmt, 0, dest);
2957 gimple_call_set_arg (stmt, 1, len);
2958 gimple_call_set_arg (stmt, 2, fmt);
2959 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2960 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2961 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2962 fold_stmt (gsi);
2963 return true;
2964}
cbdd87d4 2965
fef5a0d9
RB
2966/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2967 Return NULL_TREE if a normal call should be emitted rather than
2968 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2969 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2970
fef5a0d9
RB
2971static bool
2972gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2973 enum built_in_function fcode)
2974{
538dd0b7 2975 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2976 tree dest, size, len, fn, fmt, flag;
2977 const char *fmt_str;
2978 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2979
fef5a0d9
RB
2980 /* Verify the required arguments in the original call. */
2981 if (nargs < 4)
2982 return false;
2983 dest = gimple_call_arg (stmt, 0);
2984 flag = gimple_call_arg (stmt, 1);
2985 size = gimple_call_arg (stmt, 2);
2986 fmt = gimple_call_arg (stmt, 3);
2987
2988 if (! tree_fits_uhwi_p (size))
2989 return false;
2990
2991 len = NULL_TREE;
2992
2993 if (!init_target_chars ())
2994 return false;
2995
2996 /* Check whether the format is a literal string constant. */
2997 fmt_str = c_getstr (fmt);
2998 if (fmt_str != NULL)
2999 {
3000 /* If the format doesn't contain % args or %%, we know the size. */
3001 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3002 {
fef5a0d9
RB
3003 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3004 len = build_int_cstu (size_type_node, strlen (fmt_str));
3005 }
3006 /* If the format is "%s" and first ... argument is a string literal,
3007 we know the size too. */
3008 else if (fcode == BUILT_IN_SPRINTF_CHK
3009 && strcmp (fmt_str, target_percent_s) == 0)
3010 {
3011 tree arg;
cbdd87d4 3012
fef5a0d9
RB
3013 if (nargs == 5)
3014 {
3015 arg = gimple_call_arg (stmt, 4);
3016 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3017 {
3018 len = c_strlen (arg, 1);
3019 if (! len || ! tree_fits_uhwi_p (len))
3020 len = NULL_TREE;
3021 }
3022 }
3023 }
3024 }
cbdd87d4 3025
fef5a0d9
RB
3026 if (! integer_all_onesp (size))
3027 {
3028 if (! len || ! tree_int_cst_lt (len, size))
3029 return false;
3030 }
cbdd87d4 3031
fef5a0d9
RB
3032 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3033 or if format doesn't contain % chars or is "%s". */
3034 if (! integer_zerop (flag))
3035 {
3036 if (fmt_str == NULL)
3037 return false;
3038 if (strchr (fmt_str, target_percent) != NULL
3039 && strcmp (fmt_str, target_percent_s))
3040 return false;
3041 }
cbdd87d4 3042
fef5a0d9
RB
3043 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3044 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3045 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3046 if (!fn)
3047 return false;
3048
3049 /* Replace the called function and the first 4 argument by 2 retaining
3050 trailing varargs. */
3051 gimple_call_set_fndecl (stmt, fn);
3052 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3053 gimple_call_set_arg (stmt, 0, dest);
3054 gimple_call_set_arg (stmt, 1, fmt);
3055 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3056 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3057 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3058 fold_stmt (gsi);
3059 return true;
3060}
3061
35770bb2
RB
3062/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3063 ORIG may be null if this is a 2-argument call. We don't attempt to
3064 simplify calls with more than 3 arguments.
3065
a104bd88 3066 Return true if simplification was possible, otherwise false. */
35770bb2 3067
a104bd88 3068bool
dcb7fae2 3069gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3070{
355fe088 3071 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3072 tree dest = gimple_call_arg (stmt, 0);
3073 tree fmt = gimple_call_arg (stmt, 1);
3074 tree orig = NULL_TREE;
3075 const char *fmt_str = NULL;
3076
3077 /* Verify the required arguments in the original call. We deal with two
3078 types of sprintf() calls: 'sprintf (str, fmt)' and
3079 'sprintf (dest, "%s", orig)'. */
3080 if (gimple_call_num_args (stmt) > 3)
3081 return false;
3082
3083 if (gimple_call_num_args (stmt) == 3)
3084 orig = gimple_call_arg (stmt, 2);
3085
3086 /* Check whether the format is a literal string constant. */
3087 fmt_str = c_getstr (fmt);
3088 if (fmt_str == NULL)
3089 return false;
3090
3091 if (!init_target_chars ())
3092 return false;
3093
3094 /* If the format doesn't contain % args or %%, use strcpy. */
3095 if (strchr (fmt_str, target_percent) == NULL)
3096 {
3097 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3098
3099 if (!fn)
3100 return false;
3101
3102 /* Don't optimize sprintf (buf, "abc", ptr++). */
3103 if (orig)
3104 return false;
3105
3106 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3107 'format' is known to contain no % formats. */
3108 gimple_seq stmts = NULL;
355fe088 3109 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3110
3111 /* Propagate the NO_WARNING bit to avoid issuing the same
3112 warning more than once. */
3113 if (gimple_no_warning_p (stmt))
3114 gimple_set_no_warning (repl, true);
3115
35770bb2
RB
3116 gimple_seq_add_stmt_without_update (&stmts, repl);
3117 if (gimple_call_lhs (stmt))
3118 {
3119 repl = gimple_build_assign (gimple_call_lhs (stmt),
3120 build_int_cst (integer_type_node,
3121 strlen (fmt_str)));
3122 gimple_seq_add_stmt_without_update (&stmts, repl);
3123 gsi_replace_with_seq_vops (gsi, stmts);
3124 /* gsi now points at the assignment to the lhs, get a
3125 stmt iterator to the memcpy call.
3126 ??? We can't use gsi_for_stmt as that doesn't work when the
3127 CFG isn't built yet. */
3128 gimple_stmt_iterator gsi2 = *gsi;
3129 gsi_prev (&gsi2);
3130 fold_stmt (&gsi2);
3131 }
3132 else
3133 {
3134 gsi_replace_with_seq_vops (gsi, stmts);
3135 fold_stmt (gsi);
3136 }
3137 return true;
3138 }
3139
3140 /* If the format is "%s", use strcpy if the result isn't used. */
3141 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3142 {
3143 tree fn;
3144 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3145
3146 if (!fn)
3147 return false;
3148
3149 /* Don't crash on sprintf (str1, "%s"). */
3150 if (!orig)
3151 return false;
3152
dcb7fae2
RB
3153 tree orig_len = NULL_TREE;
3154 if (gimple_call_lhs (stmt))
35770bb2 3155 {
dcb7fae2 3156 orig_len = get_maxval_strlen (orig, 0);
d7e78447 3157 if (!orig_len)
35770bb2
RB
3158 return false;
3159 }
3160
3161 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3162 gimple_seq stmts = NULL;
355fe088 3163 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3164
3165 /* Propagate the NO_WARNING bit to avoid issuing the same
3166 warning more than once. */
3167 if (gimple_no_warning_p (stmt))
3168 gimple_set_no_warning (repl, true);
3169
35770bb2
RB
3170 gimple_seq_add_stmt_without_update (&stmts, repl);
3171 if (gimple_call_lhs (stmt))
3172 {
d7e78447
RB
3173 if (!useless_type_conversion_p (integer_type_node,
3174 TREE_TYPE (orig_len)))
3175 orig_len = fold_convert (integer_type_node, orig_len);
3176 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
3177 gimple_seq_add_stmt_without_update (&stmts, repl);
3178 gsi_replace_with_seq_vops (gsi, stmts);
3179 /* gsi now points at the assignment to the lhs, get a
3180 stmt iterator to the memcpy call.
3181 ??? We can't use gsi_for_stmt as that doesn't work when the
3182 CFG isn't built yet. */
3183 gimple_stmt_iterator gsi2 = *gsi;
3184 gsi_prev (&gsi2);
3185 fold_stmt (&gsi2);
3186 }
3187 else
3188 {
3189 gsi_replace_with_seq_vops (gsi, stmts);
3190 fold_stmt (gsi);
3191 }
3192 return true;
3193 }
3194 return false;
3195}
3196
d7e78447
RB
3197/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3198 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3199 attempt to simplify calls with more than 4 arguments.
35770bb2 3200
a104bd88 3201 Return true if simplification was possible, otherwise false. */
d7e78447 3202
a104bd88 3203bool
dcb7fae2 3204gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3205{
538dd0b7 3206 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3207 tree dest = gimple_call_arg (stmt, 0);
3208 tree destsize = gimple_call_arg (stmt, 1);
3209 tree fmt = gimple_call_arg (stmt, 2);
3210 tree orig = NULL_TREE;
3211 const char *fmt_str = NULL;
3212
3213 if (gimple_call_num_args (stmt) > 4)
3214 return false;
3215
3216 if (gimple_call_num_args (stmt) == 4)
3217 orig = gimple_call_arg (stmt, 3);
3218
3219 if (!tree_fits_uhwi_p (destsize))
3220 return false;
3221 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3222
3223 /* Check whether the format is a literal string constant. */
3224 fmt_str = c_getstr (fmt);
3225 if (fmt_str == NULL)
3226 return false;
3227
3228 if (!init_target_chars ())
3229 return false;
3230
3231 /* If the format doesn't contain % args or %%, use strcpy. */
3232 if (strchr (fmt_str, target_percent) == NULL)
3233 {
3234 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3235 if (!fn)
3236 return false;
3237
3238 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3239 if (orig)
3240 return false;
3241
3242 /* We could expand this as
3243 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3244 or to
3245 memcpy (str, fmt_with_nul_at_cstm1, cst);
3246 but in the former case that might increase code size
3247 and in the latter case grow .rodata section too much.
3248 So punt for now. */
3249 size_t len = strlen (fmt_str);
3250 if (len >= destlen)
3251 return false;
3252
3253 gimple_seq stmts = NULL;
355fe088 3254 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3255 gimple_seq_add_stmt_without_update (&stmts, repl);
3256 if (gimple_call_lhs (stmt))
3257 {
3258 repl = gimple_build_assign (gimple_call_lhs (stmt),
3259 build_int_cst (integer_type_node, len));
3260 gimple_seq_add_stmt_without_update (&stmts, repl);
3261 gsi_replace_with_seq_vops (gsi, stmts);
3262 /* gsi now points at the assignment to the lhs, get a
3263 stmt iterator to the memcpy call.
3264 ??? We can't use gsi_for_stmt as that doesn't work when the
3265 CFG isn't built yet. */
3266 gimple_stmt_iterator gsi2 = *gsi;
3267 gsi_prev (&gsi2);
3268 fold_stmt (&gsi2);
3269 }
3270 else
3271 {
3272 gsi_replace_with_seq_vops (gsi, stmts);
3273 fold_stmt (gsi);
3274 }
3275 return true;
3276 }
3277
3278 /* If the format is "%s", use strcpy if the result isn't used. */
3279 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3280 {
3281 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3282 if (!fn)
3283 return false;
3284
3285 /* Don't crash on snprintf (str1, cst, "%s"). */
3286 if (!orig)
3287 return false;
3288
dcb7fae2 3289 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 3290 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3291 return false;
d7e78447
RB
3292
3293 /* We could expand this as
3294 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3295 or to
3296 memcpy (str1, str2_with_nul_at_cstm1, cst);
3297 but in the former case that might increase code size
3298 and in the latter case grow .rodata section too much.
3299 So punt for now. */
3300 if (compare_tree_int (orig_len, destlen) >= 0)
3301 return false;
3302
3303 /* Convert snprintf (str1, cst, "%s", str2) into
3304 strcpy (str1, str2) if strlen (str2) < cst. */
3305 gimple_seq stmts = NULL;
355fe088 3306 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3307 gimple_seq_add_stmt_without_update (&stmts, repl);
3308 if (gimple_call_lhs (stmt))
3309 {
3310 if (!useless_type_conversion_p (integer_type_node,
3311 TREE_TYPE (orig_len)))
3312 orig_len = fold_convert (integer_type_node, orig_len);
3313 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3314 gimple_seq_add_stmt_without_update (&stmts, repl);
3315 gsi_replace_with_seq_vops (gsi, stmts);
3316 /* gsi now points at the assignment to the lhs, get a
3317 stmt iterator to the memcpy call.
3318 ??? We can't use gsi_for_stmt as that doesn't work when the
3319 CFG isn't built yet. */
3320 gimple_stmt_iterator gsi2 = *gsi;
3321 gsi_prev (&gsi2);
3322 fold_stmt (&gsi2);
3323 }
3324 else
3325 {
3326 gsi_replace_with_seq_vops (gsi, stmts);
3327 fold_stmt (gsi);
3328 }
3329 return true;
3330 }
3331 return false;
3332}
35770bb2 3333
edd7ae68
RB
3334/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3335 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3336 more than 3 arguments, and ARG may be null in the 2-argument case.
3337
3338 Return NULL_TREE if no simplification was possible, otherwise return the
3339 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3340 code of the function to be simplified. */
3341
3342static bool
3343gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3344 tree fp, tree fmt, tree arg,
3345 enum built_in_function fcode)
3346{
3347 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3348 tree fn_fputc, fn_fputs;
3349 const char *fmt_str = NULL;
3350
3351 /* If the return value is used, don't do the transformation. */
3352 if (gimple_call_lhs (stmt) != NULL_TREE)
3353 return false;
3354
3355 /* Check whether the format is a literal string constant. */
3356 fmt_str = c_getstr (fmt);
3357 if (fmt_str == NULL)
3358 return false;
3359
3360 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3361 {
3362 /* If we're using an unlocked function, assume the other
3363 unlocked functions exist explicitly. */
3364 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3365 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3366 }
3367 else
3368 {
3369 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3370 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3371 }
3372
3373 if (!init_target_chars ())
3374 return false;
3375
3376 /* If the format doesn't contain % args or %%, use strcpy. */
3377 if (strchr (fmt_str, target_percent) == NULL)
3378 {
3379 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3380 && arg)
3381 return false;
3382
3383 /* If the format specifier was "", fprintf does nothing. */
3384 if (fmt_str[0] == '\0')
3385 {
3386 replace_call_with_value (gsi, NULL_TREE);
3387 return true;
3388 }
3389
3390 /* When "string" doesn't contain %, replace all cases of
3391 fprintf (fp, string) with fputs (string, fp). The fputs
3392 builtin will take care of special cases like length == 1. */
3393 if (fn_fputs)
3394 {
3395 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3396 replace_call_with_call_and_fold (gsi, repl);
3397 return true;
3398 }
3399 }
3400
3401 /* The other optimizations can be done only on the non-va_list variants. */
3402 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3403 return false;
3404
3405 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3406 else if (strcmp (fmt_str, target_percent_s) == 0)
3407 {
3408 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3409 return false;
3410 if (fn_fputs)
3411 {
3412 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3413 replace_call_with_call_and_fold (gsi, repl);
3414 return true;
3415 }
3416 }
3417
3418 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3419 else if (strcmp (fmt_str, target_percent_c) == 0)
3420 {
3421 if (!arg
3422 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3423 return false;
3424 if (fn_fputc)
3425 {
3426 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3427 replace_call_with_call_and_fold (gsi, repl);
3428 return true;
3429 }
3430 }
3431
3432 return false;
3433}
3434
ad03a744
RB
3435/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3436 FMT and ARG are the arguments to the call; we don't fold cases with
3437 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3438
3439 Return NULL_TREE if no simplification was possible, otherwise return the
3440 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3441 code of the function to be simplified. */
3442
3443static bool
3444gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3445 tree arg, enum built_in_function fcode)
3446{
3447 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3448 tree fn_putchar, fn_puts, newarg;
3449 const char *fmt_str = NULL;
3450
3451 /* If the return value is used, don't do the transformation. */
3452 if (gimple_call_lhs (stmt) != NULL_TREE)
3453 return false;
3454
3455 /* Check whether the format is a literal string constant. */
3456 fmt_str = c_getstr (fmt);
3457 if (fmt_str == NULL)
3458 return false;
3459
3460 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3461 {
3462 /* If we're using an unlocked function, assume the other
3463 unlocked functions exist explicitly. */
3464 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3465 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3466 }
3467 else
3468 {
3469 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3470 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3471 }
3472
3473 if (!init_target_chars ())
3474 return false;
3475
3476 if (strcmp (fmt_str, target_percent_s) == 0
3477 || strchr (fmt_str, target_percent) == NULL)
3478 {
3479 const char *str;
3480
3481 if (strcmp (fmt_str, target_percent_s) == 0)
3482 {
3483 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3484 return false;
3485
3486 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3487 return false;
3488
3489 str = c_getstr (arg);
3490 if (str == NULL)
3491 return false;
3492 }
3493 else
3494 {
3495 /* The format specifier doesn't contain any '%' characters. */
3496 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3497 && arg)
3498 return false;
3499 str = fmt_str;
3500 }
3501
3502 /* If the string was "", printf does nothing. */
3503 if (str[0] == '\0')
3504 {
3505 replace_call_with_value (gsi, NULL_TREE);
3506 return true;
3507 }
3508
3509 /* If the string has length of 1, call putchar. */
3510 if (str[1] == '\0')
3511 {
3512 /* Given printf("c"), (where c is any one character,)
3513 convert "c"[0] to an int and pass that to the replacement
3514 function. */
3515 newarg = build_int_cst (integer_type_node, str[0]);
3516 if (fn_putchar)
3517 {
3518 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3519 replace_call_with_call_and_fold (gsi, repl);
3520 return true;
3521 }
3522 }
3523 else
3524 {
3525 /* If the string was "string\n", call puts("string"). */
3526 size_t len = strlen (str);
3527 if ((unsigned char)str[len - 1] == target_newline
3528 && (size_t) (int) len == len
3529 && (int) len > 0)
3530 {
3531 char *newstr;
ad03a744
RB
3532
3533 /* Create a NUL-terminated string that's one char shorter
3534 than the original, stripping off the trailing '\n'. */
a353fec4 3535 newstr = xstrdup (str);
ad03a744 3536 newstr[len - 1] = '\0';
a353fec4
BE
3537 newarg = build_string_literal (len, newstr);
3538 free (newstr);
ad03a744
RB
3539 if (fn_puts)
3540 {
3541 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3542 replace_call_with_call_and_fold (gsi, repl);
3543 return true;
3544 }
3545 }
3546 else
3547 /* We'd like to arrange to call fputs(string,stdout) here,
3548 but we need stdout and don't have a way to get it yet. */
3549 return false;
3550 }
3551 }
3552
3553 /* The other optimizations can be done only on the non-va_list variants. */
3554 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3555 return false;
3556
3557 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3558 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3559 {
3560 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3561 return false;
3562 if (fn_puts)
3563 {
3564 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3565 replace_call_with_call_and_fold (gsi, repl);
3566 return true;
3567 }
3568 }
3569
3570 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3571 else if (strcmp (fmt_str, target_percent_c) == 0)
3572 {
3573 if (!arg || ! useless_type_conversion_p (integer_type_node,
3574 TREE_TYPE (arg)))
3575 return false;
3576 if (fn_putchar)
3577 {
3578 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3579 replace_call_with_call_and_fold (gsi, repl);
3580 return true;
3581 }
3582 }
3583
3584 return false;
3585}
3586
edd7ae68 3587
fef5a0d9
RB
3588
3589/* Fold a call to __builtin_strlen with known length LEN. */
3590
3591static bool
dcb7fae2 3592gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3593{
355fe088 3594 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3595 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3596
3597 wide_int minlen;
3598 wide_int maxlen;
3599
e08341bb
MS
3600 /* Set to non-null if ARG refers to an unterminated array. */
3601 tree nonstr;
c42d0aa0 3602 tree lenrange[2];
e08341bb 3603 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
78125561 3604 && !nonstr
c42d0aa0
MS
3605 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3606 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3607 {
3608 /* The range of lengths refers to either a single constant
3609 string or to the longest and shortest constant string
3610 referenced by the argument of the strlen() call, or to
3611 the strings that can possibly be stored in the arrays
3612 the argument refers to. */
3613 minlen = wi::to_wide (lenrange[0]);
3614 maxlen = wi::to_wide (lenrange[1]);
3615 }
3616 else
3617 {
3618 unsigned prec = TYPE_PRECISION (sizetype);
3619
3620 minlen = wi::shwi (0, prec);
3621 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3622 }
3623
3624 if (minlen == maxlen)
3625 {
3626 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3627 true, GSI_SAME_STMT);
3628 replace_call_with_value (gsi, lenrange[0]);
3629 return true;
3630 }
3631
a7bf6c08
MS
3632 if (tree lhs = gimple_call_lhs (stmt))
3633 if (TREE_CODE (lhs) == SSA_NAME
3634 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3635 set_range_info (lhs, VR_RANGE, minlen, maxlen);
c42d0aa0
MS
3636
3637 return false;
cbdd87d4
RG
3638}
3639
48126138
NS
3640/* Fold a call to __builtin_acc_on_device. */
3641
3642static bool
3643gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3644{
3645 /* Defer folding until we know which compiler we're in. */
3646 if (symtab->state != EXPANSION)
3647 return false;
3648
3649 unsigned val_host = GOMP_DEVICE_HOST;
3650 unsigned val_dev = GOMP_DEVICE_NONE;
3651
3652#ifdef ACCEL_COMPILER
3653 val_host = GOMP_DEVICE_NOT_HOST;
3654 val_dev = ACCEL_COMPILER_acc_device;
3655#endif
3656
3657 location_t loc = gimple_location (gsi_stmt (*gsi));
3658
3659 tree host_eq = make_ssa_name (boolean_type_node);
3660 gimple *host_ass = gimple_build_assign
3661 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3662 gimple_set_location (host_ass, loc);
3663 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3664
3665 tree dev_eq = make_ssa_name (boolean_type_node);
3666 gimple *dev_ass = gimple_build_assign
3667 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3668 gimple_set_location (dev_ass, loc);
3669 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3670
3671 tree result = make_ssa_name (boolean_type_node);
3672 gimple *result_ass = gimple_build_assign
3673 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3674 gimple_set_location (result_ass, loc);
3675 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3676
3677 replace_call_with_value (gsi, result);
3678
3679 return true;
3680}
cbdd87d4 3681
fe75f732
PK
3682/* Fold realloc (0, n) -> malloc (n). */
3683
3684static bool
3685gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3686{
3687 gimple *stmt = gsi_stmt (*gsi);
3688 tree arg = gimple_call_arg (stmt, 0);
3689 tree size = gimple_call_arg (stmt, 1);
3690
3691 if (operand_equal_p (arg, null_pointer_node, 0))
3692 {
3693 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3694 if (fn_malloc)
3695 {
3696 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3697 replace_call_with_call_and_fold (gsi, repl);
3698 return true;
3699 }
3700 }
3701 return false;
3702}
3703
dcb7fae2
RB
3704/* Fold the non-target builtin at *GSI and return whether any simplification
3705 was made. */
cbdd87d4 3706
fef5a0d9 3707static bool
dcb7fae2 3708gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3709{
538dd0b7 3710 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3711 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3712
dcb7fae2
RB
3713 /* Give up for always_inline inline builtins until they are
3714 inlined. */
3715 if (avoid_folding_inline_builtin (callee))
3716 return false;
cbdd87d4 3717
edd7ae68
RB
3718 unsigned n = gimple_call_num_args (stmt);
3719 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3720 switch (fcode)
cbdd87d4 3721 {
b3d8d88e
MS
3722 case BUILT_IN_BCMP:
3723 return gimple_fold_builtin_bcmp (gsi);
3724 case BUILT_IN_BCOPY:
3725 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3726 case BUILT_IN_BZERO:
b3d8d88e
MS
3727 return gimple_fold_builtin_bzero (gsi);
3728
dcb7fae2
RB
3729 case BUILT_IN_MEMSET:
3730 return gimple_fold_builtin_memset (gsi,
3731 gimple_call_arg (stmt, 1),
3732 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3733 case BUILT_IN_MEMCPY:
3734 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3735 gimple_call_arg (stmt, 1), 0);
3736 case BUILT_IN_MEMPCPY:
3737 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3738 gimple_call_arg (stmt, 1), 1);
3739 case BUILT_IN_MEMMOVE:
3740 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3741 gimple_call_arg (stmt, 1), 3);
3742 case BUILT_IN_SPRINTF_CHK:
3743 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3744 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3745 case BUILT_IN_STRCAT_CHK:
3746 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3747 case BUILT_IN_STRNCAT_CHK:
3748 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3749 case BUILT_IN_STRLEN:
dcb7fae2 3750 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3751 case BUILT_IN_STRCPY:
dcb7fae2 3752 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3753 gimple_call_arg (stmt, 0),
dcb7fae2 3754 gimple_call_arg (stmt, 1));
cbdd87d4 3755 case BUILT_IN_STRNCPY:
dcb7fae2 3756 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3757 gimple_call_arg (stmt, 0),
3758 gimple_call_arg (stmt, 1),
dcb7fae2 3759 gimple_call_arg (stmt, 2));
9a7eefec 3760 case BUILT_IN_STRCAT:
dcb7fae2
RB
3761 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3762 gimple_call_arg (stmt, 1));
ad03a744
RB
3763 case BUILT_IN_STRNCAT:
3764 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3765 case BUILT_IN_INDEX:
912d9ec3 3766 case BUILT_IN_STRCHR:
71dea1dd
WD
3767 return gimple_fold_builtin_strchr (gsi, false);
3768 case BUILT_IN_RINDEX:
3769 case BUILT_IN_STRRCHR:
3770 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3771 case BUILT_IN_STRSTR:
3772 return gimple_fold_builtin_strstr (gsi);
a918bfbf 3773 case BUILT_IN_STRCMP:
8b0b334a 3774 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
3775 case BUILT_IN_STRCASECMP:
3776 case BUILT_IN_STRNCMP:
8b0b334a 3777 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
3778 case BUILT_IN_STRNCASECMP:
3779 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3780 case BUILT_IN_MEMCHR:
3781 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3782 case BUILT_IN_FPUTS:
dcb7fae2
RB
3783 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3784 gimple_call_arg (stmt, 1), false);
cbdd87d4 3785 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3786 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3787 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3788 case BUILT_IN_MEMCPY_CHK:
3789 case BUILT_IN_MEMPCPY_CHK:
3790 case BUILT_IN_MEMMOVE_CHK:
3791 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3792 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3793 gimple_call_arg (stmt, 0),
3794 gimple_call_arg (stmt, 1),
3795 gimple_call_arg (stmt, 2),
3796 gimple_call_arg (stmt, 3),
edd7ae68 3797 fcode);
2625bb5d
RB
3798 case BUILT_IN_STPCPY:
3799 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3800 case BUILT_IN_STRCPY_CHK:
3801 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3802 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3803 gimple_call_arg (stmt, 0),
3804 gimple_call_arg (stmt, 1),
3805 gimple_call_arg (stmt, 2),
edd7ae68 3806 fcode);
cbdd87d4 3807 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3808 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3809 return gimple_fold_builtin_stxncpy_chk (gsi,
3810 gimple_call_arg (stmt, 0),
3811 gimple_call_arg (stmt, 1),
3812 gimple_call_arg (stmt, 2),
3813 gimple_call_arg (stmt, 3),
edd7ae68 3814 fcode);
cbdd87d4
RG
3815 case BUILT_IN_SNPRINTF_CHK:
3816 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3817 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3818
edd7ae68
RB
3819 case BUILT_IN_FPRINTF:
3820 case BUILT_IN_FPRINTF_UNLOCKED:
3821 case BUILT_IN_VFPRINTF:
3822 if (n == 2 || n == 3)
3823 return gimple_fold_builtin_fprintf (gsi,
3824 gimple_call_arg (stmt, 0),
3825 gimple_call_arg (stmt, 1),
3826 n == 3
3827 ? gimple_call_arg (stmt, 2)
3828 : NULL_TREE,
3829 fcode);
3830 break;
3831 case BUILT_IN_FPRINTF_CHK:
3832 case BUILT_IN_VFPRINTF_CHK:
3833 if (n == 3 || n == 4)
3834 return gimple_fold_builtin_fprintf (gsi,
3835 gimple_call_arg (stmt, 0),
3836 gimple_call_arg (stmt, 2),
3837 n == 4
3838 ? gimple_call_arg (stmt, 3)
3839 : NULL_TREE,
3840 fcode);
3841 break;
ad03a744
RB
3842 case BUILT_IN_PRINTF:
3843 case BUILT_IN_PRINTF_UNLOCKED:
3844 case BUILT_IN_VPRINTF:
3845 if (n == 1 || n == 2)
3846 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3847 n == 2
3848 ? gimple_call_arg (stmt, 1)
3849 : NULL_TREE, fcode);
3850 break;
3851 case BUILT_IN_PRINTF_CHK:
3852 case BUILT_IN_VPRINTF_CHK:
3853 if (n == 2 || n == 3)
3854 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3855 n == 3
3856 ? gimple_call_arg (stmt, 2)
3857 : NULL_TREE, fcode);
242a37f1 3858 break;
48126138
NS
3859 case BUILT_IN_ACC_ON_DEVICE:
3860 return gimple_fold_builtin_acc_on_device (gsi,
3861 gimple_call_arg (stmt, 0));
fe75f732
PK
3862 case BUILT_IN_REALLOC:
3863 return gimple_fold_builtin_realloc (gsi);
3864
fef5a0d9
RB
3865 default:;
3866 }
3867
3868 /* Try the generic builtin folder. */
3869 bool ignore = (gimple_call_lhs (stmt) == NULL);
3870 tree result = fold_call_stmt (stmt, ignore);
3871 if (result)
3872 {
3873 if (ignore)
3874 STRIP_NOPS (result);
3875 else
3876 result = fold_convert (gimple_call_return_type (stmt), result);
3877 if (!update_call_from_tree (gsi, result))
3878 gimplify_and_update_call_from_tree (gsi, result);
3879 return true;
3880 }
3881
3882 return false;
3883}
3884
451e8dae
NS
3885/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3886 function calls to constants, where possible. */
3887
3888static tree
3889fold_internal_goacc_dim (const gimple *call)
3890{
629b3d75
MJ
3891 int axis = oacc_get_ifn_dim_arg (call);
3892 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 3893 tree result = NULL_TREE;
67d2229e 3894 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 3895
67d2229e 3896 switch (gimple_call_internal_fn (call))
451e8dae 3897 {
67d2229e
TV
3898 case IFN_GOACC_DIM_POS:
3899 /* If the size is 1, we know the answer. */
3900 if (size == 1)
3901 result = build_int_cst (type, 0);
3902 break;
3903 case IFN_GOACC_DIM_SIZE:
3904 /* If the size is not dynamic, we know the answer. */
3905 if (size)
3906 result = build_int_cst (type, size);
3907 break;
3908 default:
3909 break;
451e8dae
NS
3910 }
3911
3912 return result;
3913}
3914
849a76a5
JJ
3915/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3916 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3917 &var where var is only addressable because of such calls. */
3918
3919bool
3920optimize_atomic_compare_exchange_p (gimple *stmt)
3921{
3922 if (gimple_call_num_args (stmt) != 6
3923 || !flag_inline_atomics
3924 || !optimize
45b2222a 3925 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
3926 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3927 || !gimple_vdef (stmt)
3928 || !gimple_vuse (stmt))
3929 return false;
3930
3931 tree fndecl = gimple_call_fndecl (stmt);
3932 switch (DECL_FUNCTION_CODE (fndecl))
3933 {
3934 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3935 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3936 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3937 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3938 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3939 break;
3940 default:
3941 return false;
3942 }
3943
3944 tree expected = gimple_call_arg (stmt, 1);
3945 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3946 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3947 return false;
3948
3949 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3950 if (!is_gimple_reg_type (etype)
849a76a5 3951 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3952 || TREE_THIS_VOLATILE (etype)
3953 || VECTOR_TYPE_P (etype)
3954 || TREE_CODE (etype) == COMPLEX_TYPE
3955 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3956 might not preserve all the bits. See PR71716. */
3957 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
3958 || maybe_ne (TYPE_PRECISION (etype),
3959 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
3960 return false;
3961
3962 tree weak = gimple_call_arg (stmt, 3);
3963 if (!integer_zerop (weak) && !integer_onep (weak))
3964 return false;
3965
3966 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3967 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3968 machine_mode mode = TYPE_MODE (itype);
3969
3970 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3971 == CODE_FOR_nothing
3972 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3973 return false;
3974
cf098191 3975 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
3976 return false;
3977
3978 return true;
3979}
3980
3981/* Fold
3982 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3983 into
3984 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3985 i = IMAGPART_EXPR <t>;
3986 r = (_Bool) i;
3987 e = REALPART_EXPR <t>; */
3988
3989void
3990fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3991{
3992 gimple *stmt = gsi_stmt (*gsi);
3993 tree fndecl = gimple_call_fndecl (stmt);
3994 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3995 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3996 tree ctype = build_complex_type (itype);
3997 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
3998 bool throws = false;
3999 edge e = NULL;
849a76a5
JJ
4000 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4001 expected);
4002 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4003 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4004 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4005 {
4006 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4007 build1 (VIEW_CONVERT_EXPR, itype,
4008 gimple_assign_lhs (g)));
4009 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4010 }
4011 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4012 + int_size_in_bytes (itype);
4013 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4014 gimple_call_arg (stmt, 0),
4015 gimple_assign_lhs (g),
4016 gimple_call_arg (stmt, 2),
4017 build_int_cst (integer_type_node, flag),
4018 gimple_call_arg (stmt, 4),
4019 gimple_call_arg (stmt, 5));
4020 tree lhs = make_ssa_name (ctype);
4021 gimple_call_set_lhs (g, lhs);
4022 gimple_set_vdef (g, gimple_vdef (stmt));
4023 gimple_set_vuse (g, gimple_vuse (stmt));
4024 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46
JJ
4025 tree oldlhs = gimple_call_lhs (stmt);
4026 if (stmt_can_throw_internal (stmt))
4027 {
4028 throws = true;
4029 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4030 }
4031 gimple_call_set_nothrow (as_a <gcall *> (g),
4032 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4033 gimple_call_set_lhs (stmt, NULL_TREE);
4034 gsi_replace (gsi, g, true);
4035 if (oldlhs)
849a76a5 4036 {
849a76a5
JJ
4037 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4038 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
4039 if (throws)
4040 {
4041 gsi_insert_on_edge_immediate (e, g);
4042 *gsi = gsi_for_stmt (g);
4043 }
4044 else
4045 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4046 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4047 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 4048 }
849a76a5
JJ
4049 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4050 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
4051 if (throws && oldlhs == NULL_TREE)
4052 {
4053 gsi_insert_on_edge_immediate (e, g);
4054 *gsi = gsi_for_stmt (g);
4055 }
4056 else
4057 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
4058 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4059 {
4060 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4061 VIEW_CONVERT_EXPR,
4062 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4063 gimple_assign_lhs (g)));
4064 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4065 }
4066 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4067 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4068 *gsi = gsiret;
4069}
4070
1304953e
JJ
4071/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4072 doesn't fit into TYPE. The test for overflow should be regardless of
4073 -fwrapv, and even for unsigned types. */
4074
4075bool
4076arith_overflowed_p (enum tree_code code, const_tree type,
4077 const_tree arg0, const_tree arg1)
4078{
1304953e
JJ
4079 widest2_int warg0 = widest2_int_cst (arg0);
4080 widest2_int warg1 = widest2_int_cst (arg1);
4081 widest2_int wres;
4082 switch (code)
4083 {
4084 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4085 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4086 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4087 default: gcc_unreachable ();
4088 }
4089 signop sign = TYPE_SIGN (type);
4090 if (sign == UNSIGNED && wi::neg_p (wres))
4091 return true;
4092 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4093}
4094
cbdd87d4
RG
4095/* Attempt to fold a call statement referenced by the statement iterator GSI.
4096 The statement may be replaced by another statement, e.g., if the call
4097 simplifies to a constant value. Return true if any changes were made.
4098 It is assumed that the operands have been previously folded. */
4099
e021c122 4100static bool
ceeffab0 4101gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4102{
538dd0b7 4103 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4104 tree callee;
e021c122
RG
4105 bool changed = false;
4106 unsigned i;
cbdd87d4 4107
e021c122
RG
4108 /* Fold *& in call arguments. */
4109 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4110 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4111 {
4112 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4113 if (tmp)
4114 {
4115 gimple_call_set_arg (stmt, i, tmp);
4116 changed = true;
4117 }
4118 }
3b45a007
RG
4119
4120 /* Check for virtual calls that became direct calls. */
4121 callee = gimple_call_fn (stmt);
25583c4f 4122 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4123 {
49c471e3
MJ
4124 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4125 {
450ad0cd
JH
4126 if (dump_file && virtual_method_call_p (callee)
4127 && !possible_polymorphic_call_target_p
6f8091fc
JH
4128 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4129 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4130 {
4131 fprintf (dump_file,
a70e9985 4132 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4134 fprintf (dump_file, " to ");
4135 print_generic_expr (dump_file, callee, TDF_SLIM);
4136 fprintf (dump_file, "\n");
4137 }
4138
49c471e3 4139 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4140 changed = true;
4141 }
a70e9985 4142 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4143 {
61dd6a2e
JH
4144 bool final;
4145 vec <cgraph_node *>targets
058d0a90 4146 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4147 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4148 {
a70e9985 4149 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4150 if (dump_enabled_p ())
4151 {
4f5b9c80 4152 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4153 "folding virtual function call to %s\n",
4154 targets.length () == 1
4155 ? targets[0]->name ()
4156 : "__builtin_unreachable");
4157 }
61dd6a2e 4158 if (targets.length () == 1)
cf3e5a89 4159 {
18954840
JJ
4160 tree fndecl = targets[0]->decl;
4161 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4162 changed = true;
18954840
JJ
4163 /* If changing the call to __cxa_pure_virtual
4164 or similar noreturn function, adjust gimple_call_fntype
4165 too. */
865f7046 4166 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4167 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4168 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4169 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4170 == void_type_node))
4171 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4172 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4173 if (lhs
4174 && gimple_call_noreturn_p (stmt)
18954840 4175 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4176 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4177 {
4178 if (TREE_CODE (lhs) == SSA_NAME)
4179 {
b731b390 4180 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4181 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4182 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4183 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4184 }
4185 gimple_call_set_lhs (stmt, NULL_TREE);
4186 }
0b986c6a 4187 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4188 }
a70e9985 4189 else
cf3e5a89
JJ
4190 {
4191 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4192 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4193 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4194 /* If the call had a SSA name as lhs morph that into
4195 an uninitialized value. */
a70e9985
JJ
4196 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4197 {
b731b390 4198 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4199 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4200 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4201 set_ssa_default_def (cfun, var, lhs);
42e52a51 4202 }
2da6996c
RB
4203 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4204 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4205 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4206 return true;
4207 }
e021c122 4208 }
49c471e3 4209 }
e021c122 4210 }
49c471e3 4211
f2d3d07e
RH
4212 /* Check for indirect calls that became direct calls, and then
4213 no longer require a static chain. */
4214 if (gimple_call_chain (stmt))
4215 {
4216 tree fn = gimple_call_fndecl (stmt);
4217 if (fn && !DECL_STATIC_CHAIN (fn))
4218 {
4219 gimple_call_set_chain (stmt, NULL);
4220 changed = true;
4221 }
4222 else
4223 {
4224 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4225 if (tmp)
4226 {
4227 gimple_call_set_chain (stmt, tmp);
4228 changed = true;
4229 }
4230 }
4231 }
4232
e021c122
RG
4233 if (inplace)
4234 return changed;
4235
4236 /* Check for builtins that CCP can handle using information not
4237 available in the generic fold routines. */
fef5a0d9
RB
4238 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4239 {
4240 if (gimple_fold_builtin (gsi))
4241 changed = true;
4242 }
4243 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4244 {
ea679d55 4245 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4246 }
368b454d 4247 else if (gimple_call_internal_p (stmt))
ed9c79e1 4248 {
368b454d
JJ
4249 enum tree_code subcode = ERROR_MARK;
4250 tree result = NULL_TREE;
1304953e
JJ
4251 bool cplx_result = false;
4252 tree overflow = NULL_TREE;
368b454d
JJ
4253 switch (gimple_call_internal_fn (stmt))
4254 {
4255 case IFN_BUILTIN_EXPECT:
4256 result = fold_builtin_expect (gimple_location (stmt),
4257 gimple_call_arg (stmt, 0),
4258 gimple_call_arg (stmt, 1),
1e9168b2
ML
4259 gimple_call_arg (stmt, 2),
4260 NULL_TREE);
368b454d 4261 break;
0e82f089 4262 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4263 {
4264 tree offset = gimple_call_arg (stmt, 1);
4265 tree objsize = gimple_call_arg (stmt, 2);
4266 if (integer_all_onesp (objsize)
4267 || (TREE_CODE (offset) == INTEGER_CST
4268 && TREE_CODE (objsize) == INTEGER_CST
4269 && tree_int_cst_le (offset, objsize)))
4270 {
4271 replace_call_with_value (gsi, NULL_TREE);
4272 return true;
4273 }
4274 }
4275 break;
4276 case IFN_UBSAN_PTR:
4277 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4278 {
ca1150f0 4279 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4280 return true;
4281 }
4282 break;
ca1150f0
JJ
4283 case IFN_UBSAN_BOUNDS:
4284 {
4285 tree index = gimple_call_arg (stmt, 1);
4286 tree bound = gimple_call_arg (stmt, 2);
4287 if (TREE_CODE (index) == INTEGER_CST
4288 && TREE_CODE (bound) == INTEGER_CST)
4289 {
4290 index = fold_convert (TREE_TYPE (bound), index);
4291 if (TREE_CODE (index) == INTEGER_CST
4292 && tree_int_cst_le (index, bound))
4293 {
4294 replace_call_with_value (gsi, NULL_TREE);
4295 return true;
4296 }
4297 }
4298 }
4299 break;
451e8dae
NS
4300 case IFN_GOACC_DIM_SIZE:
4301 case IFN_GOACC_DIM_POS:
4302 result = fold_internal_goacc_dim (stmt);
4303 break;
368b454d
JJ
4304 case IFN_UBSAN_CHECK_ADD:
4305 subcode = PLUS_EXPR;
4306 break;
4307 case IFN_UBSAN_CHECK_SUB:
4308 subcode = MINUS_EXPR;
4309 break;
4310 case IFN_UBSAN_CHECK_MUL:
4311 subcode = MULT_EXPR;
4312 break;
1304953e
JJ
4313 case IFN_ADD_OVERFLOW:
4314 subcode = PLUS_EXPR;
4315 cplx_result = true;
4316 break;
4317 case IFN_SUB_OVERFLOW:
4318 subcode = MINUS_EXPR;
4319 cplx_result = true;
4320 break;
4321 case IFN_MUL_OVERFLOW:
4322 subcode = MULT_EXPR;
4323 cplx_result = true;
4324 break;
368b454d
JJ
4325 default:
4326 break;
4327 }
4328 if (subcode != ERROR_MARK)
4329 {
4330 tree arg0 = gimple_call_arg (stmt, 0);
4331 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4332 tree type = TREE_TYPE (arg0);
4333 if (cplx_result)
4334 {
4335 tree lhs = gimple_call_lhs (stmt);
4336 if (lhs == NULL_TREE)
4337 type = NULL_TREE;
4338 else
4339 type = TREE_TYPE (TREE_TYPE (lhs));
4340 }
4341 if (type == NULL_TREE)
4342 ;
368b454d 4343 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4344 else if (integer_zerop (arg1))
4345 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4346 /* x = 0 + y; x = 0 * y; */
4347 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4348 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4349 /* x = y - y; */
4350 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4351 result = integer_zero_node;
368b454d 4352 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4353 else if (subcode == MULT_EXPR && integer_onep (arg1))
4354 result = arg0;
4355 else if (subcode == MULT_EXPR && integer_onep (arg0))
4356 result = arg1;
4357 else if (TREE_CODE (arg0) == INTEGER_CST
4358 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4359 {
1304953e
JJ
4360 if (cplx_result)
4361 result = int_const_binop (subcode, fold_convert (type, arg0),
4362 fold_convert (type, arg1));
4363 else
4364 result = int_const_binop (subcode, arg0, arg1);
4365 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4366 {
4367 if (cplx_result)
4368 overflow = build_one_cst (type);
4369 else
4370 result = NULL_TREE;
4371 }
4372 }
4373 if (result)
4374 {
4375 if (result == integer_zero_node)
4376 result = build_zero_cst (type);
4377 else if (cplx_result && TREE_TYPE (result) != type)
4378 {
4379 if (TREE_CODE (result) == INTEGER_CST)
4380 {
4381 if (arith_overflowed_p (PLUS_EXPR, type, result,
4382 integer_zero_node))
4383 overflow = build_one_cst (type);
4384 }
4385 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4386 && TYPE_UNSIGNED (type))
4387 || (TYPE_PRECISION (type)
4388 < (TYPE_PRECISION (TREE_TYPE (result))
4389 + (TYPE_UNSIGNED (TREE_TYPE (result))
4390 && !TYPE_UNSIGNED (type)))))
4391 result = NULL_TREE;
4392 if (result)
4393 result = fold_convert (type, result);
4394 }
368b454d
JJ
4395 }
4396 }
1304953e 4397
ed9c79e1
JJ
4398 if (result)
4399 {
1304953e
JJ
4400 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4401 result = drop_tree_overflow (result);
4402 if (cplx_result)
4403 {
4404 if (overflow == NULL_TREE)
4405 overflow = build_zero_cst (TREE_TYPE (result));
4406 tree ctype = build_complex_type (TREE_TYPE (result));
4407 if (TREE_CODE (result) == INTEGER_CST
4408 && TREE_CODE (overflow) == INTEGER_CST)
4409 result = build_complex (ctype, result, overflow);
4410 else
4411 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4412 ctype, result, overflow);
4413 }
ed9c79e1
JJ
4414 if (!update_call_from_tree (gsi, result))
4415 gimplify_and_update_call_from_tree (gsi, result);
4416 changed = true;
4417 }
4418 }
3b45a007 4419
e021c122 4420 return changed;
cbdd87d4
RG
4421}
4422
e0ee10ed 4423
89a79e96
RB
4424/* Return true whether NAME has a use on STMT. */
4425
4426static bool
355fe088 4427has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4428{
4429 imm_use_iterator iter;
4430 use_operand_p use_p;
4431 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4432 if (USE_STMT (use_p) == stmt)
4433 return true;
4434 return false;
4435}
4436
e0ee10ed
RB
4437/* Worker for fold_stmt_1 dispatch to pattern based folding with
4438 gimple_simplify.
4439
4440 Replaces *GSI with the simplification result in RCODE and OPS
4441 and the associated statements in *SEQ. Does the replacement
4442 according to INPLACE and returns true if the operation succeeded. */
4443
4444static bool
4445replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4446 gimple_match_op *res_op,
e0ee10ed
RB
4447 gimple_seq *seq, bool inplace)
4448{
355fe088 4449 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4450 tree *ops = res_op->ops;
4451 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4452
4453 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4454 newly created statements. See also maybe_push_res_to_seq.
4455 As an exception allow such uses if there was a use of the
4456 same SSA name on the old stmt. */
5d75ad95
RS
4457 for (unsigned int i = 0; i < num_ops; ++i)
4458 if (TREE_CODE (ops[i]) == SSA_NAME
4459 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4460 && !has_use_on_stmt (ops[i], stmt))
4461 return false;
4462
4463 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4464 for (unsigned int i = 0; i < 2; ++i)
4465 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4466 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4467 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4468 return false;
e0ee10ed 4469
fec40d06
RS
4470 /* Don't insert new statements when INPLACE is true, even if we could
4471 reuse STMT for the final statement. */
4472 if (inplace && !gimple_seq_empty_p (*seq))
4473 return false;
4474
538dd0b7 4475 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4476 {
5d75ad95
RS
4477 gcc_assert (res_op->code.is_tree_code ());
4478 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4479 /* GIMPLE_CONDs condition may not throw. */
4480 && (!flag_exceptions
4481 || !cfun->can_throw_non_call_exceptions
5d75ad95 4482 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4483 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4484 false, NULL_TREE)))
5d75ad95
RS
4485 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4486 else if (res_op->code == SSA_NAME)
538dd0b7 4487 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4488 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4489 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4490 {
4491 if (integer_zerop (ops[0]))
538dd0b7 4492 gimple_cond_make_false (cond_stmt);
e0ee10ed 4493 else
538dd0b7 4494 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4495 }
4496 else if (!inplace)
4497 {
5d75ad95 4498 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4499 if (!res)
4500 return false;
538dd0b7 4501 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4502 build_zero_cst (TREE_TYPE (res)));
4503 }
4504 else
4505 return false;
4506 if (dump_file && (dump_flags & TDF_DETAILS))
4507 {
4508 fprintf (dump_file, "gimple_simplified to ");
4509 if (!gimple_seq_empty_p (*seq))
4510 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4511 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4512 0, TDF_SLIM);
4513 }
4514 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4515 return true;
4516 }
4517 else if (is_gimple_assign (stmt)
5d75ad95 4518 && res_op->code.is_tree_code ())
e0ee10ed
RB
4519 {
4520 if (!inplace
5d75ad95 4521 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4522 {
5d75ad95
RS
4523 maybe_build_generic_op (res_op);
4524 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4525 res_op->op_or_null (0),
4526 res_op->op_or_null (1),
4527 res_op->op_or_null (2));
e0ee10ed
RB
4528 if (dump_file && (dump_flags & TDF_DETAILS))
4529 {
4530 fprintf (dump_file, "gimple_simplified to ");
4531 if (!gimple_seq_empty_p (*seq))
4532 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4533 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4534 0, TDF_SLIM);
4535 }
4536 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4537 return true;
4538 }
4539 }
5d75ad95
RS
4540 else if (res_op->code.is_fn_code ()
4541 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4542 {
5d75ad95
RS
4543 gcc_assert (num_ops == gimple_call_num_args (stmt));
4544 for (unsigned int i = 0; i < num_ops; ++i)
4545 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4546 if (dump_file && (dump_flags & TDF_DETAILS))
4547 {
4548 fprintf (dump_file, "gimple_simplified to ");
4549 if (!gimple_seq_empty_p (*seq))
4550 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4551 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4552 }
4553 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4554 return true;
4555 }
e0ee10ed
RB
4556 else if (!inplace)
4557 {
4558 if (gimple_has_lhs (stmt))
4559 {
4560 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4561 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4562 return false;
e0ee10ed
RB
4563 if (dump_file && (dump_flags & TDF_DETAILS))
4564 {
4565 fprintf (dump_file, "gimple_simplified to ");
4566 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4567 }
4568 gsi_replace_with_seq_vops (gsi, *seq);
4569 return true;
4570 }
4571 else
4572 gcc_unreachable ();
4573 }
4574
4575 return false;
4576}
4577
040292e7
RB
4578/* Canonicalize MEM_REFs invariant address operand after propagation. */
4579
4580static bool
4581maybe_canonicalize_mem_ref_addr (tree *t)
4582{
4583 bool res = false;
4584
4585 if (TREE_CODE (*t) == ADDR_EXPR)
4586 t = &TREE_OPERAND (*t, 0);
4587
f17a223d
RB
4588 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4589 generic vector extension. The actual vector referenced is
4590 view-converted to an array type for this purpose. If the index
4591 is constant the canonical representation in the middle-end is a
4592 BIT_FIELD_REF so re-write the former to the latter here. */
4593 if (TREE_CODE (*t) == ARRAY_REF
4594 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4595 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4596 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4597 {
4598 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4599 if (VECTOR_TYPE_P (vtype))
4600 {
4601 tree low = array_ref_low_bound (*t);
4602 if (TREE_CODE (low) == INTEGER_CST)
4603 {
4604 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4605 {
4606 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4607 wi::to_widest (low));
4608 idx = wi::mul (idx, wi::to_widest
4609 (TYPE_SIZE (TREE_TYPE (*t))));
4610 widest_int ext
4611 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4612 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4613 {
4614 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4615 TREE_TYPE (*t),
4616 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4617 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4618 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4619 res = true;
4620 }
4621 }
4622 }
4623 }
4624 }
4625
040292e7
RB
4626 while (handled_component_p (*t))
4627 t = &TREE_OPERAND (*t, 0);
4628
4629 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4630 of invariant addresses into a SSA name MEM_REF address. */
4631 if (TREE_CODE (*t) == MEM_REF
4632 || TREE_CODE (*t) == TARGET_MEM_REF)
4633 {
4634 tree addr = TREE_OPERAND (*t, 0);
4635 if (TREE_CODE (addr) == ADDR_EXPR
4636 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4637 || handled_component_p (TREE_OPERAND (addr, 0))))
4638 {
4639 tree base;
a90c8804 4640 poly_int64 coffset;
040292e7
RB
4641 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4642 &coffset);
4643 if (!base)
4644 gcc_unreachable ();
4645
4646 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4647 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4648 TREE_OPERAND (*t, 1),
4649 size_int (coffset));
4650 res = true;
4651 }
4652 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4653 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4654 }
4655
4656 /* Canonicalize back MEM_REFs to plain reference trees if the object
4657 accessed is a decl that has the same access semantics as the MEM_REF. */
4658 if (TREE_CODE (*t) == MEM_REF
4659 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4660 && integer_zerop (TREE_OPERAND (*t, 1))
4661 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4662 {
4663 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4664 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4665 if (/* Same volatile qualification. */
4666 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4667 /* Same TBAA behavior with -fstrict-aliasing. */
4668 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4669 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4670 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4671 /* Same alignment. */
4672 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4673 /* We have to look out here to not drop a required conversion
4674 from the rhs to the lhs if *t appears on the lhs or vice-versa
4675 if it appears on the rhs. Thus require strict type
4676 compatibility. */
4677 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4678 {
4679 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4680 res = true;
4681 }
4682 }
4683
4684 /* Canonicalize TARGET_MEM_REF in particular with respect to
4685 the indexes becoming constant. */
4686 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4687 {
4688 tree tem = maybe_fold_tmr (*t);
4689 if (tem)
4690 {
4691 *t = tem;
4692 res = true;
4693 }
4694 }
4695
4696 return res;
4697}
4698
cbdd87d4
RG
4699/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4700 distinguishes both cases. */
4701
4702static bool
e0ee10ed 4703fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4704{
4705 bool changed = false;
355fe088 4706 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4707 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4708 unsigned i;
a8b85ce9 4709 fold_defer_overflow_warnings ();
cbdd87d4 4710
040292e7
RB
4711 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4712 after propagation.
4713 ??? This shouldn't be done in generic folding but in the
4714 propagation helpers which also know whether an address was
89a79e96
RB
4715 propagated.
4716 Also canonicalize operand order. */
040292e7
RB
4717 switch (gimple_code (stmt))
4718 {
4719 case GIMPLE_ASSIGN:
4720 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4721 {
4722 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4723 if ((REFERENCE_CLASS_P (*rhs)
4724 || TREE_CODE (*rhs) == ADDR_EXPR)
4725 && maybe_canonicalize_mem_ref_addr (rhs))
4726 changed = true;
4727 tree *lhs = gimple_assign_lhs_ptr (stmt);
4728 if (REFERENCE_CLASS_P (*lhs)
4729 && maybe_canonicalize_mem_ref_addr (lhs))
4730 changed = true;
4731 }
89a79e96
RB
4732 else
4733 {
4734 /* Canonicalize operand order. */
4735 enum tree_code code = gimple_assign_rhs_code (stmt);
4736 if (TREE_CODE_CLASS (code) == tcc_comparison
4737 || commutative_tree_code (code)
4738 || commutative_ternary_tree_code (code))
4739 {
4740 tree rhs1 = gimple_assign_rhs1 (stmt);
4741 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4742 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4743 {
4744 gimple_assign_set_rhs1 (stmt, rhs2);
4745 gimple_assign_set_rhs2 (stmt, rhs1);
4746 if (TREE_CODE_CLASS (code) == tcc_comparison)
4747 gimple_assign_set_rhs_code (stmt,
4748 swap_tree_comparison (code));
4749 changed = true;
4750 }
4751 }
4752 }
040292e7
RB
4753 break;
4754 case GIMPLE_CALL:
4755 {
4756 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4757 {
4758 tree *arg = gimple_call_arg_ptr (stmt, i);
4759 if (REFERENCE_CLASS_P (*arg)
4760 && maybe_canonicalize_mem_ref_addr (arg))
4761 changed = true;
4762 }
4763 tree *lhs = gimple_call_lhs_ptr (stmt);
4764 if (*lhs
4765 && REFERENCE_CLASS_P (*lhs)
4766 && maybe_canonicalize_mem_ref_addr (lhs))
4767 changed = true;
4768 break;
4769 }
4770 case GIMPLE_ASM:
4771 {
538dd0b7
DM
4772 gasm *asm_stmt = as_a <gasm *> (stmt);
4773 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4774 {
538dd0b7 4775 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4776 tree op = TREE_VALUE (link);
4777 if (REFERENCE_CLASS_P (op)
4778 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4779 changed = true;
4780 }
538dd0b7 4781 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4782 {
538dd0b7 4783 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4784 tree op = TREE_VALUE (link);
4785 if ((REFERENCE_CLASS_P (op)
4786 || TREE_CODE (op) == ADDR_EXPR)
4787 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4788 changed = true;
4789 }
4790 }
4791 break;
4792 case GIMPLE_DEBUG:
4793 if (gimple_debug_bind_p (stmt))
4794 {
4795 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4796 if (*val
4797 && (REFERENCE_CLASS_P (*val)
4798 || TREE_CODE (*val) == ADDR_EXPR)
4799 && maybe_canonicalize_mem_ref_addr (val))
4800 changed = true;
4801 }
4802 break;
89a79e96
RB
4803 case GIMPLE_COND:
4804 {
4805 /* Canonicalize operand order. */
4806 tree lhs = gimple_cond_lhs (stmt);
4807 tree rhs = gimple_cond_rhs (stmt);
14e72812 4808 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4809 {
4810 gcond *gc = as_a <gcond *> (stmt);
4811 gimple_cond_set_lhs (gc, rhs);
4812 gimple_cond_set_rhs (gc, lhs);
4813 gimple_cond_set_code (gc,
4814 swap_tree_comparison (gimple_cond_code (gc)));
4815 changed = true;
4816 }
4817 }
040292e7
RB
4818 default:;
4819 }
4820
e0ee10ed
RB
4821 /* Dispatch to pattern-based folding. */
4822 if (!inplace
4823 || is_gimple_assign (stmt)
4824 || gimple_code (stmt) == GIMPLE_COND)
4825 {
4826 gimple_seq seq = NULL;
5d75ad95
RS
4827 gimple_match_op res_op;
4828 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 4829 valueize, valueize))
e0ee10ed 4830 {
5d75ad95 4831 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
4832 changed = true;
4833 else
4834 gimple_seq_discard (seq);
4835 }
4836 }
4837
4838 stmt = gsi_stmt (*gsi);
4839
cbdd87d4
RG
4840 /* Fold the main computation performed by the statement. */
4841 switch (gimple_code (stmt))
4842 {
4843 case GIMPLE_ASSIGN:
4844 {
819ec64c
RB
4845 /* Try to canonicalize for boolean-typed X the comparisons
4846 X == 0, X == 1, X != 0, and X != 1. */
4847 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4848 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4849 {
819ec64c
RB
4850 tree lhs = gimple_assign_lhs (stmt);
4851 tree op1 = gimple_assign_rhs1 (stmt);
4852 tree op2 = gimple_assign_rhs2 (stmt);
4853 tree type = TREE_TYPE (op1);
4854
4855 /* Check whether the comparison operands are of the same boolean
4856 type as the result type is.
4857 Check that second operand is an integer-constant with value
4858 one or zero. */
4859 if (TREE_CODE (op2) == INTEGER_CST
4860 && (integer_zerop (op2) || integer_onep (op2))
4861 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4862 {
4863 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4864 bool is_logical_not = false;
4865
4866 /* X == 0 and X != 1 is a logical-not.of X
4867 X == 1 and X != 0 is X */
4868 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4869 || (cmp_code == NE_EXPR && integer_onep (op2)))
4870 is_logical_not = true;
4871
4872 if (is_logical_not == false)
4873 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4874 /* Only for one-bit precision typed X the transformation
4875 !X -> ~X is valied. */
4876 else if (TYPE_PRECISION (type) == 1)
4877 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4878 /* Otherwise we use !X -> X ^ 1. */
4879 else
4880 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4881 build_int_cst (type, 1));
4882 changed = true;
4883 break;
4884 }
5fbcc0ed 4885 }
819ec64c
RB
4886
4887 unsigned old_num_ops = gimple_num_ops (stmt);
4888 tree lhs = gimple_assign_lhs (stmt);
4889 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4890 if (new_rhs
4891 && !useless_type_conversion_p (TREE_TYPE (lhs),
4892 TREE_TYPE (new_rhs)))
4893 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4894 if (new_rhs
4895 && (!inplace
4896 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4897 {
4898 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4899 changed = true;
4900 }
4901 break;
4902 }
4903
cbdd87d4 4904 case GIMPLE_CALL:
ceeffab0 4905 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4906 break;
4907
4908 case GIMPLE_ASM:
4909 /* Fold *& in asm operands. */
38384150 4910 {
538dd0b7 4911 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4912 size_t noutputs;
4913 const char **oconstraints;
4914 const char *constraint;
4915 bool allows_mem, allows_reg;
4916
538dd0b7 4917 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4918 oconstraints = XALLOCAVEC (const char *, noutputs);
4919
538dd0b7 4920 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4921 {
538dd0b7 4922 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4923 tree op = TREE_VALUE (link);
4924 oconstraints[i]
4925 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4926 if (REFERENCE_CLASS_P (op)
4927 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4928 {
4929 TREE_VALUE (link) = op;
4930 changed = true;
4931 }
4932 }
538dd0b7 4933 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4934 {
538dd0b7 4935 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4936 tree op = TREE_VALUE (link);
4937 constraint
4938 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4939 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4940 oconstraints, &allows_mem, &allows_reg);
4941 if (REFERENCE_CLASS_P (op)
4942 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4943 != NULL_TREE)
4944 {
4945 TREE_VALUE (link) = op;
4946 changed = true;
4947 }
4948 }
4949 }
cbdd87d4
RG
4950 break;
4951
bd422c4a
RG
4952 case GIMPLE_DEBUG:
4953 if (gimple_debug_bind_p (stmt))
4954 {
4955 tree val = gimple_debug_bind_get_value (stmt);
4956 if (val
4957 && REFERENCE_CLASS_P (val))
4958 {
4959 tree tem = maybe_fold_reference (val, false);
4960 if (tem)
4961 {
4962 gimple_debug_bind_set_value (stmt, tem);
4963 changed = true;
4964 }
4965 }
3e888a5e
RG
4966 else if (val
4967 && TREE_CODE (val) == ADDR_EXPR)
4968 {
4969 tree ref = TREE_OPERAND (val, 0);
4970 tree tem = maybe_fold_reference (ref, false);
4971 if (tem)
4972 {
4973 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4974 gimple_debug_bind_set_value (stmt, tem);
4975 changed = true;
4976 }
4977 }
bd422c4a
RG
4978 }
4979 break;
4980
cfe3d653
PK
4981 case GIMPLE_RETURN:
4982 {
4983 greturn *ret_stmt = as_a<greturn *> (stmt);
4984 tree ret = gimple_return_retval(ret_stmt);
4985
4986 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4987 {
4988 tree val = valueize (ret);
1af928db
RB
4989 if (val && val != ret
4990 && may_propagate_copy (ret, val))
cfe3d653
PK
4991 {
4992 gimple_return_set_retval (ret_stmt, val);
4993 changed = true;
4994 }
4995 }
4996 }
4997 break;
4998
cbdd87d4
RG
4999 default:;
5000 }
5001
5002 stmt = gsi_stmt (*gsi);
5003
37376165
RB
5004 /* Fold *& on the lhs. */
5005 if (gimple_has_lhs (stmt))
cbdd87d4
RG
5006 {
5007 tree lhs = gimple_get_lhs (stmt);
5008 if (lhs && REFERENCE_CLASS_P (lhs))
5009 {
5010 tree new_lhs = maybe_fold_reference (lhs, true);
5011 if (new_lhs)
5012 {
5013 gimple_set_lhs (stmt, new_lhs);
5014 changed = true;
5015 }
5016 }
5017 }
5018
a8b85ce9 5019 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
5020 return changed;
5021}
5022
e0ee10ed
RB
5023/* Valueziation callback that ends up not following SSA edges. */
5024
5025tree
5026no_follow_ssa_edges (tree)
5027{
5028 return NULL_TREE;
5029}
5030
45cc9f96
RB
5031/* Valueization callback that ends up following single-use SSA edges only. */
5032
5033tree
5034follow_single_use_edges (tree val)
5035{
5036 if (TREE_CODE (val) == SSA_NAME
5037 && !has_single_use (val))
5038 return NULL_TREE;
5039 return val;
5040}
5041
c566cc9f
RS
5042/* Valueization callback that follows all SSA edges. */
5043
5044tree
5045follow_all_ssa_edges (tree val)
5046{
5047 return val;
5048}
5049
cbdd87d4
RG
5050/* Fold the statement pointed to by GSI. In some cases, this function may
5051 replace the whole statement with a new one. Returns true iff folding
5052 makes any changes.
5053 The statement pointed to by GSI should be in valid gimple form but may
5054 be in unfolded state as resulting from for example constant propagation
5055 which can produce *&x = 0. */
5056
5057bool
5058fold_stmt (gimple_stmt_iterator *gsi)
5059{
e0ee10ed
RB
5060 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5061}
5062
5063bool
5064fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5065{
5066 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
5067}
5068
59401b92 5069/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
5070 *&x created by constant propagation are handled. The statement cannot
5071 be replaced with a new one. Return true if the statement was
5072 changed, false otherwise.
59401b92 5073 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
5074 be in unfolded state as resulting from for example constant propagation
5075 which can produce *&x = 0. */
5076
5077bool
59401b92 5078fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 5079{
355fe088 5080 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 5081 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 5082 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5083 return changed;
5084}
5085
e89065a1
SL
5086/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5087 if EXPR is null or we don't know how.
5088 If non-null, the result always has boolean type. */
5089
5090static tree
5091canonicalize_bool (tree expr, bool invert)
5092{
5093 if (!expr)
5094 return NULL_TREE;
5095 else if (invert)
5096 {
5097 if (integer_nonzerop (expr))
5098 return boolean_false_node;
5099 else if (integer_zerop (expr))
5100 return boolean_true_node;
5101 else if (TREE_CODE (expr) == SSA_NAME)
5102 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5103 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5104 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5105 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5106 boolean_type_node,
5107 TREE_OPERAND (expr, 0),
5108 TREE_OPERAND (expr, 1));
5109 else
5110 return NULL_TREE;
5111 }
5112 else
5113 {
5114 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5115 return expr;
5116 if (integer_nonzerop (expr))
5117 return boolean_true_node;
5118 else if (integer_zerop (expr))
5119 return boolean_false_node;
5120 else if (TREE_CODE (expr) == SSA_NAME)
5121 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5122 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5123 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5124 return fold_build2 (TREE_CODE (expr),
5125 boolean_type_node,
5126 TREE_OPERAND (expr, 0),
5127 TREE_OPERAND (expr, 1));
5128 else
5129 return NULL_TREE;
5130 }
5131}
5132
5133/* Check to see if a boolean expression EXPR is logically equivalent to the
5134 comparison (OP1 CODE OP2). Check for various identities involving
5135 SSA_NAMEs. */
5136
5137static bool
5138same_bool_comparison_p (const_tree expr, enum tree_code code,
5139 const_tree op1, const_tree op2)
5140{
355fe088 5141 gimple *s;
e89065a1
SL
5142
5143 /* The obvious case. */
5144 if (TREE_CODE (expr) == code
5145 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5146 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5147 return true;
5148
5149 /* Check for comparing (name, name != 0) and the case where expr
5150 is an SSA_NAME with a definition matching the comparison. */
5151 if (TREE_CODE (expr) == SSA_NAME
5152 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5153 {
5154 if (operand_equal_p (expr, op1, 0))
5155 return ((code == NE_EXPR && integer_zerop (op2))
5156 || (code == EQ_EXPR && integer_nonzerop (op2)));
5157 s = SSA_NAME_DEF_STMT (expr);
5158 if (is_gimple_assign (s)
5159 && gimple_assign_rhs_code (s) == code
5160 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5161 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5162 return true;
5163 }
5164
5165 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5166 of name is a comparison, recurse. */
5167 if (TREE_CODE (op1) == SSA_NAME
5168 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5169 {
5170 s = SSA_NAME_DEF_STMT (op1);
5171 if (is_gimple_assign (s)
5172 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5173 {
5174 enum tree_code c = gimple_assign_rhs_code (s);
5175 if ((c == NE_EXPR && integer_zerop (op2))
5176 || (c == EQ_EXPR && integer_nonzerop (op2)))
5177 return same_bool_comparison_p (expr, c,
5178 gimple_assign_rhs1 (s),
5179 gimple_assign_rhs2 (s));
5180 if ((c == EQ_EXPR && integer_zerop (op2))
5181 || (c == NE_EXPR && integer_nonzerop (op2)))
5182 return same_bool_comparison_p (expr,
5183 invert_tree_comparison (c, false),
5184 gimple_assign_rhs1 (s),
5185 gimple_assign_rhs2 (s));
5186 }
5187 }
5188 return false;
5189}
5190
5191/* Check to see if two boolean expressions OP1 and OP2 are logically
5192 equivalent. */
5193
5194static bool
5195same_bool_result_p (const_tree op1, const_tree op2)
5196{
5197 /* Simple cases first. */
5198 if (operand_equal_p (op1, op2, 0))
5199 return true;
5200
5201 /* Check the cases where at least one of the operands is a comparison.
5202 These are a bit smarter than operand_equal_p in that they apply some
5203 identifies on SSA_NAMEs. */
98209db3 5204 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5205 && same_bool_comparison_p (op1, TREE_CODE (op2),
5206 TREE_OPERAND (op2, 0),
5207 TREE_OPERAND (op2, 1)))
5208 return true;
98209db3 5209 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5210 && same_bool_comparison_p (op2, TREE_CODE (op1),
5211 TREE_OPERAND (op1, 0),
5212 TREE_OPERAND (op1, 1)))
5213 return true;
5214
5215 /* Default case. */
5216 return false;
5217}
5218
5219/* Forward declarations for some mutually recursive functions. */
5220
5221static tree
5222and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5223 enum tree_code code2, tree op2a, tree op2b);
5224static tree
5225and_var_with_comparison (tree var, bool invert,
5226 enum tree_code code2, tree op2a, tree op2b);
5227static tree
355fe088 5228and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5229 enum tree_code code2, tree op2a, tree op2b);
5230static tree
5231or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5232 enum tree_code code2, tree op2a, tree op2b);
5233static tree
5234or_var_with_comparison (tree var, bool invert,
5235 enum tree_code code2, tree op2a, tree op2b);
5236static tree
355fe088 5237or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5238 enum tree_code code2, tree op2a, tree op2b);
5239
5240/* Helper function for and_comparisons_1: try to simplify the AND of the
5241 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5242 If INVERT is true, invert the value of the VAR before doing the AND.
5243 Return NULL_EXPR if we can't simplify this to a single expression. */
5244
5245static tree
5246and_var_with_comparison (tree var, bool invert,
5247 enum tree_code code2, tree op2a, tree op2b)
5248{
5249 tree t;
355fe088 5250 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5251
5252 /* We can only deal with variables whose definitions are assignments. */
5253 if (!is_gimple_assign (stmt))
5254 return NULL_TREE;
5255
5256 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5257 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5258 Then we only have to consider the simpler non-inverted cases. */
5259 if (invert)
5260 t = or_var_with_comparison_1 (stmt,
5261 invert_tree_comparison (code2, false),
5262 op2a, op2b);
5263 else
5264 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5265 return canonicalize_bool (t, invert);
5266}
5267
5268/* Try to simplify the AND of the ssa variable defined by the assignment
5269 STMT with the comparison specified by (OP2A CODE2 OP2B).
5270 Return NULL_EXPR if we can't simplify this to a single expression. */
5271
5272static tree
355fe088 5273and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5274 enum tree_code code2, tree op2a, tree op2b)
5275{
5276 tree var = gimple_assign_lhs (stmt);
5277 tree true_test_var = NULL_TREE;
5278 tree false_test_var = NULL_TREE;
5279 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5280
5281 /* Check for identities like (var AND (var == 0)) => false. */
5282 if (TREE_CODE (op2a) == SSA_NAME
5283 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5284 {
5285 if ((code2 == NE_EXPR && integer_zerop (op2b))
5286 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5287 {
5288 true_test_var = op2a;
5289 if (var == true_test_var)
5290 return var;
5291 }
5292 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5293 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5294 {
5295 false_test_var = op2a;
5296 if (var == false_test_var)
5297 return boolean_false_node;
5298 }
5299 }
5300
5301 /* If the definition is a comparison, recurse on it. */
5302 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5303 {
5304 tree t = and_comparisons_1 (innercode,
5305 gimple_assign_rhs1 (stmt),
5306 gimple_assign_rhs2 (stmt),
5307 code2,
5308 op2a,
5309 op2b);
5310 if (t)
5311 return t;
5312 }
5313
5314 /* If the definition is an AND or OR expression, we may be able to
5315 simplify by reassociating. */
eb9820c0
KT
5316 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5317 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5318 {
5319 tree inner1 = gimple_assign_rhs1 (stmt);
5320 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5321 gimple *s;
e89065a1
SL
5322 tree t;
5323 tree partial = NULL_TREE;
eb9820c0 5324 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5325
5326 /* Check for boolean identities that don't require recursive examination
5327 of inner1/inner2:
5328 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5329 inner1 AND (inner1 OR inner2) => inner1
5330 !inner1 AND (inner1 AND inner2) => false
5331 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5332 Likewise for similar cases involving inner2. */
5333 if (inner1 == true_test_var)
5334 return (is_and ? var : inner1);
5335 else if (inner2 == true_test_var)
5336 return (is_and ? var : inner2);
5337 else if (inner1 == false_test_var)
5338 return (is_and
5339 ? boolean_false_node
5340 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5341 else if (inner2 == false_test_var)
5342 return (is_and
5343 ? boolean_false_node
5344 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5345
5346 /* Next, redistribute/reassociate the AND across the inner tests.
5347 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5348 if (TREE_CODE (inner1) == SSA_NAME
5349 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5350 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5351 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5352 gimple_assign_rhs1 (s),
5353 gimple_assign_rhs2 (s),
5354 code2, op2a, op2b)))
5355 {
5356 /* Handle the AND case, where we are reassociating:
5357 (inner1 AND inner2) AND (op2a code2 op2b)
5358 => (t AND inner2)
5359 If the partial result t is a constant, we win. Otherwise
5360 continue on to try reassociating with the other inner test. */
5361 if (is_and)
5362 {
5363 if (integer_onep (t))
5364 return inner2;
5365 else if (integer_zerop (t))
5366 return boolean_false_node;
5367 }
5368
5369 /* Handle the OR case, where we are redistributing:
5370 (inner1 OR inner2) AND (op2a code2 op2b)
5371 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5372 else if (integer_onep (t))
5373 return boolean_true_node;
5374
5375 /* Save partial result for later. */
5376 partial = t;
e89065a1
SL
5377 }
5378
5379 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5380 if (TREE_CODE (inner2) == SSA_NAME
5381 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5382 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5383 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5384 gimple_assign_rhs1 (s),
5385 gimple_assign_rhs2 (s),
5386 code2, op2a, op2b)))
5387 {
5388 /* Handle the AND case, where we are reassociating:
5389 (inner1 AND inner2) AND (op2a code2 op2b)
5390 => (inner1 AND t) */
5391 if (is_and)
5392 {
5393 if (integer_onep (t))
5394 return inner1;
5395 else if (integer_zerop (t))
5396 return boolean_false_node;
8236c8eb
JJ
5397 /* If both are the same, we can apply the identity
5398 (x AND x) == x. */
5399 else if (partial && same_bool_result_p (t, partial))
5400 return t;
e89065a1
SL
5401 }
5402
5403 /* Handle the OR case. where we are redistributing:
5404 (inner1 OR inner2) AND (op2a code2 op2b)
5405 => (t OR (inner1 AND (op2a code2 op2b)))
5406 => (t OR partial) */
5407 else
5408 {
5409 if (integer_onep (t))
5410 return boolean_true_node;
5411 else if (partial)
5412 {
5413 /* We already got a simplification for the other
5414 operand to the redistributed OR expression. The
5415 interesting case is when at least one is false.
5416 Or, if both are the same, we can apply the identity
5417 (x OR x) == x. */
5418 if (integer_zerop (partial))
5419 return t;
5420 else if (integer_zerop (t))
5421 return partial;
5422 else if (same_bool_result_p (t, partial))
5423 return t;
5424 }
5425 }
5426 }
5427 }
5428 return NULL_TREE;
5429}
5430
5431/* Try to simplify the AND of two comparisons defined by
5432 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5433 If this can be done without constructing an intermediate value,
5434 return the resulting tree; otherwise NULL_TREE is returned.
5435 This function is deliberately asymmetric as it recurses on SSA_DEFs
5436 in the first comparison but not the second. */
5437
5438static tree
5439and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5440 enum tree_code code2, tree op2a, tree op2b)
5441{
ae22ac3c 5442 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5443
e89065a1
SL
5444 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5445 if (operand_equal_p (op1a, op2a, 0)
5446 && operand_equal_p (op1b, op2b, 0))
5447 {
eb9820c0 5448 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5449 tree t = combine_comparisons (UNKNOWN_LOCATION,
5450 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5451 truth_type, op1a, op1b);
e89065a1
SL
5452 if (t)
5453 return t;
5454 }
5455
5456 /* Likewise the swapped case of the above. */
5457 if (operand_equal_p (op1a, op2b, 0)
5458 && operand_equal_p (op1b, op2a, 0))
5459 {
eb9820c0 5460 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5461 tree t = combine_comparisons (UNKNOWN_LOCATION,
5462 TRUTH_ANDIF_EXPR, code1,
5463 swap_tree_comparison (code2),
31ed6226 5464 truth_type, op1a, op1b);
e89065a1
SL
5465 if (t)
5466 return t;
5467 }
5468
5469 /* If both comparisons are of the same value against constants, we might
5470 be able to merge them. */
5471 if (operand_equal_p (op1a, op2a, 0)
5472 && TREE_CODE (op1b) == INTEGER_CST
5473 && TREE_CODE (op2b) == INTEGER_CST)
5474 {
5475 int cmp = tree_int_cst_compare (op1b, op2b);
5476
5477 /* If we have (op1a == op1b), we should either be able to
5478 return that or FALSE, depending on whether the constant op1b
5479 also satisfies the other comparison against op2b. */
5480 if (code1 == EQ_EXPR)
5481 {
5482 bool done = true;
5483 bool val;
5484 switch (code2)
5485 {
5486 case EQ_EXPR: val = (cmp == 0); break;
5487 case NE_EXPR: val = (cmp != 0); break;
5488 case LT_EXPR: val = (cmp < 0); break;
5489 case GT_EXPR: val = (cmp > 0); break;
5490 case LE_EXPR: val = (cmp <= 0); break;
5491 case GE_EXPR: val = (cmp >= 0); break;
5492 default: done = false;
5493 }
5494 if (done)
5495 {
5496 if (val)
5497 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5498 else
5499 return boolean_false_node;
5500 }
5501 }
5502 /* Likewise if the second comparison is an == comparison. */
5503 else if (code2 == EQ_EXPR)
5504 {
5505 bool done = true;
5506 bool val;
5507 switch (code1)
5508 {
5509 case EQ_EXPR: val = (cmp == 0); break;
5510 case NE_EXPR: val = (cmp != 0); break;
5511 case LT_EXPR: val = (cmp > 0); break;
5512 case GT_EXPR: val = (cmp < 0); break;
5513 case LE_EXPR: val = (cmp >= 0); break;
5514 case GE_EXPR: val = (cmp <= 0); break;
5515 default: done = false;
5516 }
5517 if (done)
5518 {
5519 if (val)
5520 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5521 else
5522 return boolean_false_node;
5523 }
5524 }
5525
5526 /* Same business with inequality tests. */
5527 else if (code1 == NE_EXPR)
5528 {
5529 bool val;
5530 switch (code2)
5531 {
5532 case EQ_EXPR: val = (cmp != 0); break;
5533 case NE_EXPR: val = (cmp == 0); break;
5534 case LT_EXPR: val = (cmp >= 0); break;
5535 case GT_EXPR: val = (cmp <= 0); break;
5536 case LE_EXPR: val = (cmp > 0); break;
5537 case GE_EXPR: val = (cmp < 0); break;
5538 default:
5539 val = false;
5540 }
5541 if (val)
5542 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5543 }
5544 else if (code2 == NE_EXPR)
5545 {
5546 bool val;
5547 switch (code1)
5548 {
5549 case EQ_EXPR: val = (cmp == 0); break;
5550 case NE_EXPR: val = (cmp != 0); break;
5551 case LT_EXPR: val = (cmp <= 0); break;
5552 case GT_EXPR: val = (cmp >= 0); break;
5553 case LE_EXPR: val = (cmp < 0); break;
5554 case GE_EXPR: val = (cmp > 0); break;
5555 default:
5556 val = false;
5557 }
5558 if (val)
5559 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5560 }
5561
5562 /* Chose the more restrictive of two < or <= comparisons. */
5563 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5564 && (code2 == LT_EXPR || code2 == LE_EXPR))
5565 {
5566 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5567 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5568 else
5569 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5570 }
5571
5572 /* Likewise chose the more restrictive of two > or >= comparisons. */
5573 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5574 && (code2 == GT_EXPR || code2 == GE_EXPR))
5575 {
5576 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5577 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5578 else
5579 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5580 }
5581
5582 /* Check for singleton ranges. */
5583 else if (cmp == 0
5584 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5585 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5586 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5587
5588 /* Check for disjoint ranges. */
5589 else if (cmp <= 0
5590 && (code1 == LT_EXPR || code1 == LE_EXPR)
5591 && (code2 == GT_EXPR || code2 == GE_EXPR))
5592 return boolean_false_node;
5593 else if (cmp >= 0
5594 && (code1 == GT_EXPR || code1 == GE_EXPR)
5595 && (code2 == LT_EXPR || code2 == LE_EXPR))
5596 return boolean_false_node;
5597 }
5598
5599 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5600 NAME's definition is a truth value. See if there are any simplifications
5601 that can be done against the NAME's definition. */
5602 if (TREE_CODE (op1a) == SSA_NAME
5603 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5604 && (integer_zerop (op1b) || integer_onep (op1b)))
5605 {
5606 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5607 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5608 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5609 switch (gimple_code (stmt))
5610 {
5611 case GIMPLE_ASSIGN:
5612 /* Try to simplify by copy-propagating the definition. */
5613 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5614
5615 case GIMPLE_PHI:
5616 /* If every argument to the PHI produces the same result when
5617 ANDed with the second comparison, we win.
5618 Do not do this unless the type is bool since we need a bool
5619 result here anyway. */
5620 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5621 {
5622 tree result = NULL_TREE;
5623 unsigned i;
5624 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5625 {
5626 tree arg = gimple_phi_arg_def (stmt, i);
5627
5628 /* If this PHI has itself as an argument, ignore it.
5629 If all the other args produce the same result,
5630 we're still OK. */
5631 if (arg == gimple_phi_result (stmt))
5632 continue;
5633 else if (TREE_CODE (arg) == INTEGER_CST)
5634 {
5635 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5636 {
5637 if (!result)
5638 result = boolean_false_node;
5639 else if (!integer_zerop (result))
5640 return NULL_TREE;
5641 }
5642 else if (!result)
5643 result = fold_build2 (code2, boolean_type_node,
5644 op2a, op2b);
5645 else if (!same_bool_comparison_p (result,
5646 code2, op2a, op2b))
5647 return NULL_TREE;
5648 }
0e8b84ec
JJ
5649 else if (TREE_CODE (arg) == SSA_NAME
5650 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5651 {
6c66f733 5652 tree temp;
355fe088 5653 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5654 /* In simple cases we can look through PHI nodes,
5655 but we have to be careful with loops.
5656 See PR49073. */
5657 if (! dom_info_available_p (CDI_DOMINATORS)
5658 || gimple_bb (def_stmt) == gimple_bb (stmt)
5659 || dominated_by_p (CDI_DOMINATORS,
5660 gimple_bb (def_stmt),
5661 gimple_bb (stmt)))
5662 return NULL_TREE;
5663 temp = and_var_with_comparison (arg, invert, code2,
5664 op2a, op2b);
e89065a1
SL
5665 if (!temp)
5666 return NULL_TREE;
5667 else if (!result)
5668 result = temp;
5669 else if (!same_bool_result_p (result, temp))
5670 return NULL_TREE;
5671 }
5672 else
5673 return NULL_TREE;
5674 }
5675 return result;
5676 }
5677
5678 default:
5679 break;
5680 }
5681 }
5682 return NULL_TREE;
5683}
5684
5685/* Try to simplify the AND of two comparisons, specified by
5686 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5687 If this can be simplified to a single expression (without requiring
5688 introducing more SSA variables to hold intermediate values),
5689 return the resulting tree. Otherwise return NULL_TREE.
5690 If the result expression is non-null, it has boolean type. */
5691
5692tree
5693maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5694 enum tree_code code2, tree op2a, tree op2b)
5695{
5696 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5697 if (t)
5698 return t;
5699 else
5700 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5701}
5702
5703/* Helper function for or_comparisons_1: try to simplify the OR of the
5704 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5705 If INVERT is true, invert the value of VAR before doing the OR.
5706 Return NULL_EXPR if we can't simplify this to a single expression. */
5707
5708static tree
5709or_var_with_comparison (tree var, bool invert,
5710 enum tree_code code2, tree op2a, tree op2b)
5711{
5712 tree t;
355fe088 5713 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5714
5715 /* We can only deal with variables whose definitions are assignments. */
5716 if (!is_gimple_assign (stmt))
5717 return NULL_TREE;
5718
5719 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5720 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5721 Then we only have to consider the simpler non-inverted cases. */
5722 if (invert)
5723 t = and_var_with_comparison_1 (stmt,
5724 invert_tree_comparison (code2, false),
5725 op2a, op2b);
5726 else
5727 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5728 return canonicalize_bool (t, invert);
5729}
5730
5731/* Try to simplify the OR of the ssa variable defined by the assignment
5732 STMT with the comparison specified by (OP2A CODE2 OP2B).
5733 Return NULL_EXPR if we can't simplify this to a single expression. */
5734
5735static tree
355fe088 5736or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5737 enum tree_code code2, tree op2a, tree op2b)
5738{
5739 tree var = gimple_assign_lhs (stmt);
5740 tree true_test_var = NULL_TREE;
5741 tree false_test_var = NULL_TREE;
5742 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5743
5744 /* Check for identities like (var OR (var != 0)) => true . */
5745 if (TREE_CODE (op2a) == SSA_NAME
5746 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5747 {
5748 if ((code2 == NE_EXPR && integer_zerop (op2b))
5749 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5750 {
5751 true_test_var = op2a;
5752 if (var == true_test_var)
5753 return var;
5754 }
5755 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5756 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5757 {
5758 false_test_var = op2a;
5759 if (var == false_test_var)
5760 return boolean_true_node;
5761 }
5762 }
5763
5764 /* If the definition is a comparison, recurse on it. */
5765 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5766 {
5767 tree t = or_comparisons_1 (innercode,
5768 gimple_assign_rhs1 (stmt),
5769 gimple_assign_rhs2 (stmt),
5770 code2,
5771 op2a,
5772 op2b);
5773 if (t)
5774 return t;
5775 }
5776
5777 /* If the definition is an AND or OR expression, we may be able to
5778 simplify by reassociating. */
eb9820c0
KT
5779 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5780 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5781 {
5782 tree inner1 = gimple_assign_rhs1 (stmt);
5783 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5784 gimple *s;
e89065a1
SL
5785 tree t;
5786 tree partial = NULL_TREE;
eb9820c0 5787 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5788
5789 /* Check for boolean identities that don't require recursive examination
5790 of inner1/inner2:
5791 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5792 inner1 OR (inner1 AND inner2) => inner1
5793 !inner1 OR (inner1 OR inner2) => true
5794 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5795 */
5796 if (inner1 == true_test_var)
5797 return (is_or ? var : inner1);
5798 else if (inner2 == true_test_var)
5799 return (is_or ? var : inner2);
5800 else if (inner1 == false_test_var)
5801 return (is_or
5802 ? boolean_true_node
5803 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5804 else if (inner2 == false_test_var)
5805 return (is_or
5806 ? boolean_true_node
5807 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5808
5809 /* Next, redistribute/reassociate the OR across the inner tests.
5810 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5811 if (TREE_CODE (inner1) == SSA_NAME
5812 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5813 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5814 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5815 gimple_assign_rhs1 (s),
5816 gimple_assign_rhs2 (s),
5817 code2, op2a, op2b)))
5818 {
5819 /* Handle the OR case, where we are reassociating:
5820 (inner1 OR inner2) OR (op2a code2 op2b)
5821 => (t OR inner2)
5822 If the partial result t is a constant, we win. Otherwise
5823 continue on to try reassociating with the other inner test. */
8236c8eb 5824 if (is_or)
e89065a1
SL
5825 {
5826 if (integer_onep (t))
5827 return boolean_true_node;
5828 else if (integer_zerop (t))
5829 return inner2;
5830 }
5831
5832 /* Handle the AND case, where we are redistributing:
5833 (inner1 AND inner2) OR (op2a code2 op2b)
5834 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5835 else if (integer_zerop (t))
5836 return boolean_false_node;
5837
5838 /* Save partial result for later. */
5839 partial = t;
e89065a1
SL
5840 }
5841
5842 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5843 if (TREE_CODE (inner2) == SSA_NAME
5844 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5845 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5846 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5847 gimple_assign_rhs1 (s),
5848 gimple_assign_rhs2 (s),
5849 code2, op2a, op2b)))
5850 {
5851 /* Handle the OR case, where we are reassociating:
5852 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5853 => (inner1 OR t)
5854 => (t OR partial) */
5855 if (is_or)
e89065a1
SL
5856 {
5857 if (integer_zerop (t))
5858 return inner1;
5859 else if (integer_onep (t))
5860 return boolean_true_node;
8236c8eb
JJ
5861 /* If both are the same, we can apply the identity
5862 (x OR x) == x. */
5863 else if (partial && same_bool_result_p (t, partial))
5864 return t;
e89065a1
SL
5865 }
5866
5867 /* Handle the AND case, where we are redistributing:
5868 (inner1 AND inner2) OR (op2a code2 op2b)
5869 => (t AND (inner1 OR (op2a code2 op2b)))
5870 => (t AND partial) */
5871 else
5872 {
5873 if (integer_zerop (t))
5874 return boolean_false_node;
5875 else if (partial)
5876 {
5877 /* We already got a simplification for the other
5878 operand to the redistributed AND expression. The
5879 interesting case is when at least one is true.
5880 Or, if both are the same, we can apply the identity
8236c8eb 5881 (x AND x) == x. */
e89065a1
SL
5882 if (integer_onep (partial))
5883 return t;
5884 else if (integer_onep (t))
5885 return partial;
5886 else if (same_bool_result_p (t, partial))
8236c8eb 5887 return t;
e89065a1
SL
5888 }
5889 }
5890 }
5891 }
5892 return NULL_TREE;
5893}
5894
5895/* Try to simplify the OR of two comparisons defined by
5896 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5897 If this can be done without constructing an intermediate value,
5898 return the resulting tree; otherwise NULL_TREE is returned.
5899 This function is deliberately asymmetric as it recurses on SSA_DEFs
5900 in the first comparison but not the second. */
5901
5902static tree
5903or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5904 enum tree_code code2, tree op2a, tree op2b)
5905{
ae22ac3c 5906 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5907
e89065a1
SL
5908 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5909 if (operand_equal_p (op1a, op2a, 0)
5910 && operand_equal_p (op1b, op2b, 0))
5911 {
eb9820c0 5912 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5913 tree t = combine_comparisons (UNKNOWN_LOCATION,
5914 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5915 truth_type, op1a, op1b);
e89065a1
SL
5916 if (t)
5917 return t;
5918 }
5919
5920 /* Likewise the swapped case of the above. */
5921 if (operand_equal_p (op1a, op2b, 0)
5922 && operand_equal_p (op1b, op2a, 0))
5923 {
eb9820c0 5924 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5925 tree t = combine_comparisons (UNKNOWN_LOCATION,
5926 TRUTH_ORIF_EXPR, code1,
5927 swap_tree_comparison (code2),
31ed6226 5928 truth_type, op1a, op1b);
e89065a1
SL
5929 if (t)
5930 return t;
5931 }
5932
5933 /* If both comparisons are of the same value against constants, we might
5934 be able to merge them. */
5935 if (operand_equal_p (op1a, op2a, 0)
5936 && TREE_CODE (op1b) == INTEGER_CST
5937 && TREE_CODE (op2b) == INTEGER_CST)
5938 {
5939 int cmp = tree_int_cst_compare (op1b, op2b);
5940
5941 /* If we have (op1a != op1b), we should either be able to
5942 return that or TRUE, depending on whether the constant op1b
5943 also satisfies the other comparison against op2b. */
5944 if (code1 == NE_EXPR)
5945 {
5946 bool done = true;
5947 bool val;
5948 switch (code2)
5949 {
5950 case EQ_EXPR: val = (cmp == 0); break;
5951 case NE_EXPR: val = (cmp != 0); break;
5952 case LT_EXPR: val = (cmp < 0); break;
5953 case GT_EXPR: val = (cmp > 0); break;
5954 case LE_EXPR: val = (cmp <= 0); break;
5955 case GE_EXPR: val = (cmp >= 0); break;
5956 default: done = false;
5957 }
5958 if (done)
5959 {
5960 if (val)
5961 return boolean_true_node;
5962 else
5963 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5964 }
5965 }
5966 /* Likewise if the second comparison is a != comparison. */
5967 else if (code2 == NE_EXPR)
5968 {
5969 bool done = true;
5970 bool val;
5971 switch (code1)
5972 {
5973 case EQ_EXPR: val = (cmp == 0); break;
5974 case NE_EXPR: val = (cmp != 0); break;
5975 case LT_EXPR: val = (cmp > 0); break;
5976 case GT_EXPR: val = (cmp < 0); break;
5977 case LE_EXPR: val = (cmp >= 0); break;
5978 case GE_EXPR: val = (cmp <= 0); break;
5979 default: done = false;
5980 }
5981 if (done)
5982 {
5983 if (val)
5984 return boolean_true_node;
5985 else
5986 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5987 }
5988 }
5989
5990 /* See if an equality test is redundant with the other comparison. */
5991 else if (code1 == EQ_EXPR)
5992 {
5993 bool val;
5994 switch (code2)
5995 {
5996 case EQ_EXPR: val = (cmp == 0); break;
5997 case NE_EXPR: val = (cmp != 0); break;
5998 case LT_EXPR: val = (cmp < 0); break;
5999 case GT_EXPR: val = (cmp > 0); break;
6000 case LE_EXPR: val = (cmp <= 0); break;
6001 case GE_EXPR: val = (cmp >= 0); break;
6002 default:
6003 val = false;
6004 }
6005 if (val)
6006 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6007 }
6008 else if (code2 == EQ_EXPR)
6009 {
6010 bool val;
6011 switch (code1)
6012 {
6013 case EQ_EXPR: val = (cmp == 0); break;
6014 case NE_EXPR: val = (cmp != 0); break;
6015 case LT_EXPR: val = (cmp > 0); break;
6016 case GT_EXPR: val = (cmp < 0); break;
6017 case LE_EXPR: val = (cmp >= 0); break;
6018 case GE_EXPR: val = (cmp <= 0); break;
6019 default:
6020 val = false;
6021 }
6022 if (val)
6023 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6024 }
6025
6026 /* Chose the less restrictive of two < or <= comparisons. */
6027 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6028 && (code2 == LT_EXPR || code2 == LE_EXPR))
6029 {
6030 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6031 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6032 else
6033 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6034 }
6035
6036 /* Likewise chose the less restrictive of two > or >= comparisons. */
6037 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6038 && (code2 == GT_EXPR || code2 == GE_EXPR))
6039 {
6040 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6041 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6042 else
6043 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6044 }
6045
6046 /* Check for singleton ranges. */
6047 else if (cmp == 0
6048 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6049 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6050 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6051
6052 /* Check for less/greater pairs that don't restrict the range at all. */
6053 else if (cmp >= 0
6054 && (code1 == LT_EXPR || code1 == LE_EXPR)
6055 && (code2 == GT_EXPR || code2 == GE_EXPR))
6056 return boolean_true_node;
6057 else if (cmp <= 0
6058 && (code1 == GT_EXPR || code1 == GE_EXPR)
6059 && (code2 == LT_EXPR || code2 == LE_EXPR))
6060 return boolean_true_node;
6061 }
6062
6063 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6064 NAME's definition is a truth value. See if there are any simplifications
6065 that can be done against the NAME's definition. */
6066 if (TREE_CODE (op1a) == SSA_NAME
6067 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6068 && (integer_zerop (op1b) || integer_onep (op1b)))
6069 {
6070 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6071 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6072 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6073 switch (gimple_code (stmt))
6074 {
6075 case GIMPLE_ASSIGN:
6076 /* Try to simplify by copy-propagating the definition. */
6077 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6078
6079 case GIMPLE_PHI:
6080 /* If every argument to the PHI produces the same result when
6081 ORed with the second comparison, we win.
6082 Do not do this unless the type is bool since we need a bool
6083 result here anyway. */
6084 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6085 {
6086 tree result = NULL_TREE;
6087 unsigned i;
6088 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6089 {
6090 tree arg = gimple_phi_arg_def (stmt, i);
6091
6092 /* If this PHI has itself as an argument, ignore it.
6093 If all the other args produce the same result,
6094 we're still OK. */
6095 if (arg == gimple_phi_result (stmt))
6096 continue;
6097 else if (TREE_CODE (arg) == INTEGER_CST)
6098 {
6099 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6100 {
6101 if (!result)
6102 result = boolean_true_node;
6103 else if (!integer_onep (result))
6104 return NULL_TREE;
6105 }
6106 else if (!result)
6107 result = fold_build2 (code2, boolean_type_node,
6108 op2a, op2b);
6109 else if (!same_bool_comparison_p (result,
6110 code2, op2a, op2b))
6111 return NULL_TREE;
6112 }
0e8b84ec
JJ
6113 else if (TREE_CODE (arg) == SSA_NAME
6114 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6115 {
6c66f733 6116 tree temp;
355fe088 6117 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6118 /* In simple cases we can look through PHI nodes,
6119 but we have to be careful with loops.
6120 See PR49073. */
6121 if (! dom_info_available_p (CDI_DOMINATORS)
6122 || gimple_bb (def_stmt) == gimple_bb (stmt)
6123 || dominated_by_p (CDI_DOMINATORS,
6124 gimple_bb (def_stmt),
6125 gimple_bb (stmt)))
6126 return NULL_TREE;
6127 temp = or_var_with_comparison (arg, invert, code2,
6128 op2a, op2b);
e89065a1
SL
6129 if (!temp)
6130 return NULL_TREE;
6131 else if (!result)
6132 result = temp;
6133 else if (!same_bool_result_p (result, temp))
6134 return NULL_TREE;
6135 }
6136 else
6137 return NULL_TREE;
6138 }
6139 return result;
6140 }
6141
6142 default:
6143 break;
6144 }
6145 }
6146 return NULL_TREE;
6147}
6148
6149/* Try to simplify the OR of two comparisons, specified by
6150 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6151 If this can be simplified to a single expression (without requiring
6152 introducing more SSA variables to hold intermediate values),
6153 return the resulting tree. Otherwise return NULL_TREE.
6154 If the result expression is non-null, it has boolean type. */
6155
6156tree
6157maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6158 enum tree_code code2, tree op2a, tree op2b)
6159{
6160 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6161 if (t)
6162 return t;
6163 else
6164 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6165}
cfef45c8
RG
6166
6167
6168/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6169
6170 Either NULL_TREE, a simplified but non-constant or a constant
6171 is returned.
6172
6173 ??? This should go into a gimple-fold-inline.h file to be eventually
6174 privatized with the single valueize function used in the various TUs
6175 to avoid the indirect function call overhead. */
6176
6177tree
355fe088 6178gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6179 tree (*gvalueize) (tree))
cfef45c8 6180{
5d75ad95 6181 gimple_match_op res_op;
45cc9f96
RB
6182 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6183 edges if there are intermediate VARYING defs. For this reason
6184 do not follow SSA edges here even though SCCVN can technically
6185 just deal fine with that. */
5d75ad95 6186 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6187 {
34050b6b 6188 tree res = NULL_TREE;
5d75ad95
RS
6189 if (gimple_simplified_result_is_gimple_val (&res_op))
6190 res = res_op.ops[0];
34050b6b 6191 else if (mprts_hook)
5d75ad95 6192 res = mprts_hook (&res_op);
34050b6b 6193 if (res)
45cc9f96 6194 {
34050b6b
RB
6195 if (dump_file && dump_flags & TDF_DETAILS)
6196 {
6197 fprintf (dump_file, "Match-and-simplified ");
6198 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6199 fprintf (dump_file, " to ");
ef6cb4c7 6200 print_generic_expr (dump_file, res);
34050b6b
RB
6201 fprintf (dump_file, "\n");
6202 }
6203 return res;
45cc9f96 6204 }
45cc9f96
RB
6205 }
6206
cfef45c8
RG
6207 location_t loc = gimple_location (stmt);
6208 switch (gimple_code (stmt))
6209 {
6210 case GIMPLE_ASSIGN:
6211 {
6212 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6213
6214 switch (get_gimple_rhs_class (subcode))
6215 {
6216 case GIMPLE_SINGLE_RHS:
6217 {
6218 tree rhs = gimple_assign_rhs1 (stmt);
6219 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6220
6221 if (TREE_CODE (rhs) == SSA_NAME)
6222 {
6223 /* If the RHS is an SSA_NAME, return its known constant value,
6224 if any. */
6225 return (*valueize) (rhs);
6226 }
6227 /* Handle propagating invariant addresses into address
6228 operations. */
6229 else if (TREE_CODE (rhs) == ADDR_EXPR
6230 && !is_gimple_min_invariant (rhs))
6231 {
a90c8804 6232 poly_int64 offset = 0;
cfef45c8
RG
6233 tree base;
6234 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6235 &offset,
6236 valueize);
6237 if (base
6238 && (CONSTANT_CLASS_P (base)
6239 || decl_address_invariant_p (base)))
6240 return build_invariant_address (TREE_TYPE (rhs),
6241 base, offset);
6242 }
6243 else if (TREE_CODE (rhs) == CONSTRUCTOR
6244 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6245 && known_eq (CONSTRUCTOR_NELTS (rhs),
6246 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6247 {
794e3180
RS
6248 unsigned i, nelts;
6249 tree val;
cfef45c8 6250
928686b1 6251 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6252 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6253 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6254 {
6255 val = (*valueize) (val);
6256 if (TREE_CODE (val) == INTEGER_CST
6257 || TREE_CODE (val) == REAL_CST
6258 || TREE_CODE (val) == FIXED_CST)
794e3180 6259 vec.quick_push (val);
cfef45c8
RG
6260 else
6261 return NULL_TREE;
6262 }
6263
5ebaa477 6264 return vec.build ();
cfef45c8 6265 }
bdf37f7a
JH
6266 if (subcode == OBJ_TYPE_REF)
6267 {
6268 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6269 /* If callee is constant, we can fold away the wrapper. */
6270 if (is_gimple_min_invariant (val))
6271 return val;
6272 }
cfef45c8
RG
6273
6274 if (kind == tcc_reference)
6275 {
6276 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6277 || TREE_CODE (rhs) == REALPART_EXPR
6278 || TREE_CODE (rhs) == IMAGPART_EXPR)
6279 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6280 {
6281 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6282 return fold_unary_loc (EXPR_LOCATION (rhs),
6283 TREE_CODE (rhs),
6284 TREE_TYPE (rhs), val);
6285 }
6286 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6287 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6288 {
6289 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6290 return fold_ternary_loc (EXPR_LOCATION (rhs),
6291 TREE_CODE (rhs),
6292 TREE_TYPE (rhs), val,
6293 TREE_OPERAND (rhs, 1),
6294 TREE_OPERAND (rhs, 2));
6295 }
6296 else if (TREE_CODE (rhs) == MEM_REF
6297 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6298 {
6299 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6300 if (TREE_CODE (val) == ADDR_EXPR
6301 && is_gimple_min_invariant (val))
6302 {
6303 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6304 unshare_expr (val),
6305 TREE_OPERAND (rhs, 1));
6306 if (tem)
6307 rhs = tem;
6308 }
6309 }
6310 return fold_const_aggregate_ref_1 (rhs, valueize);
6311 }
6312 else if (kind == tcc_declaration)
6313 return get_symbol_constant_value (rhs);
6314 return rhs;
6315 }
6316
6317 case GIMPLE_UNARY_RHS:
f3582e54 6318 return NULL_TREE;
cfef45c8
RG
6319
6320 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6321 /* Translate &x + CST into an invariant form suitable for
6322 further propagation. */
6323 if (subcode == POINTER_PLUS_EXPR)
6324 {
4b1b9e64
RB
6325 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6326 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6327 if (TREE_CODE (op0) == ADDR_EXPR
6328 && TREE_CODE (op1) == INTEGER_CST)
6329 {
6330 tree off = fold_convert (ptr_type_node, op1);
6331 return build_fold_addr_expr_loc
6332 (loc,
6333 fold_build2 (MEM_REF,
6334 TREE_TYPE (TREE_TYPE (op0)),
6335 unshare_expr (op0), off));
6336 }
6337 }
59c20dc7
RB
6338 /* Canonicalize bool != 0 and bool == 0 appearing after
6339 valueization. While gimple_simplify handles this
6340 it can get confused by the ~X == 1 -> X == 0 transform
6341 which we cant reduce to a SSA name or a constant
6342 (and we have no way to tell gimple_simplify to not
6343 consider those transforms in the first place). */
6344 else if (subcode == EQ_EXPR
6345 || subcode == NE_EXPR)
6346 {
6347 tree lhs = gimple_assign_lhs (stmt);
6348 tree op0 = gimple_assign_rhs1 (stmt);
6349 if (useless_type_conversion_p (TREE_TYPE (lhs),
6350 TREE_TYPE (op0)))
6351 {
6352 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6353 op0 = (*valueize) (op0);
8861704d
RB
6354 if (TREE_CODE (op0) == INTEGER_CST)
6355 std::swap (op0, op1);
6356 if (TREE_CODE (op1) == INTEGER_CST
6357 && ((subcode == NE_EXPR && integer_zerop (op1))
6358 || (subcode == EQ_EXPR && integer_onep (op1))))
6359 return op0;
59c20dc7
RB
6360 }
6361 }
4b1b9e64 6362 return NULL_TREE;
cfef45c8
RG
6363
6364 case GIMPLE_TERNARY_RHS:
6365 {
6366 /* Handle ternary operators that can appear in GIMPLE form. */
6367 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6368 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6369 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6370 return fold_ternary_loc (loc, subcode,
6371 gimple_expr_type (stmt), op0, op1, op2);
6372 }
6373
6374 default:
6375 gcc_unreachable ();
6376 }
6377 }
6378
6379 case GIMPLE_CALL:
6380 {
25583c4f 6381 tree fn;
538dd0b7 6382 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6383
6384 if (gimple_call_internal_p (stmt))
31e071ae
MP
6385 {
6386 enum tree_code subcode = ERROR_MARK;
6387 switch (gimple_call_internal_fn (stmt))
6388 {
6389 case IFN_UBSAN_CHECK_ADD:
6390 subcode = PLUS_EXPR;
6391 break;
6392 case IFN_UBSAN_CHECK_SUB:
6393 subcode = MINUS_EXPR;
6394 break;
6395 case IFN_UBSAN_CHECK_MUL:
6396 subcode = MULT_EXPR;
6397 break;
68fa96d6
ML
6398 case IFN_BUILTIN_EXPECT:
6399 {
6400 tree arg0 = gimple_call_arg (stmt, 0);
6401 tree op0 = (*valueize) (arg0);
6402 if (TREE_CODE (op0) == INTEGER_CST)
6403 return op0;
6404 return NULL_TREE;
6405 }
31e071ae
MP
6406 default:
6407 return NULL_TREE;
6408 }
368b454d
JJ
6409 tree arg0 = gimple_call_arg (stmt, 0);
6410 tree arg1 = gimple_call_arg (stmt, 1);
6411 tree op0 = (*valueize) (arg0);
6412 tree op1 = (*valueize) (arg1);
31e071ae
MP
6413
6414 if (TREE_CODE (op0) != INTEGER_CST
6415 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6416 {
6417 switch (subcode)
6418 {
6419 case MULT_EXPR:
6420 /* x * 0 = 0 * x = 0 without overflow. */
6421 if (integer_zerop (op0) || integer_zerop (op1))
6422 return build_zero_cst (TREE_TYPE (arg0));
6423 break;
6424 case MINUS_EXPR:
6425 /* y - y = 0 without overflow. */
6426 if (operand_equal_p (op0, op1, 0))
6427 return build_zero_cst (TREE_TYPE (arg0));
6428 break;
6429 default:
6430 break;
6431 }
6432 }
6433 tree res
6434 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6435 if (res
6436 && TREE_CODE (res) == INTEGER_CST
6437 && !TREE_OVERFLOW (res))
6438 return res;
6439 return NULL_TREE;
6440 }
25583c4f
RS
6441
6442 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 6443 if (TREE_CODE (fn) == ADDR_EXPR
3d78e008 6444 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
6445 && gimple_builtin_call_types_compatible_p (stmt,
6446 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6447 {
6448 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6449 tree retval;
cfef45c8
RG
6450 unsigned i;
6451 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6452 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6453 retval = fold_builtin_call_array (loc,
538dd0b7 6454 gimple_call_return_type (call_stmt),
cfef45c8 6455 fn, gimple_call_num_args (stmt), args);
cfef45c8 6456 if (retval)
5c944c6c
RB
6457 {
6458 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6459 STRIP_NOPS (retval);
538dd0b7
DM
6460 retval = fold_convert (gimple_call_return_type (call_stmt),
6461 retval);
5c944c6c 6462 }
cfef45c8
RG
6463 return retval;
6464 }
6465 return NULL_TREE;
6466 }
6467
6468 default:
6469 return NULL_TREE;
6470 }
6471}
6472
6473/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6474 Returns NULL_TREE if folding to a constant is not possible, otherwise
6475 returns a constant according to is_gimple_min_invariant. */
6476
6477tree
355fe088 6478gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6479{
6480 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6481 if (res && is_gimple_min_invariant (res))
6482 return res;
6483 return NULL_TREE;
6484}
6485
6486
6487/* The following set of functions are supposed to fold references using
6488 their constant initializers. */
6489
cfef45c8
RG
6490/* See if we can find constructor defining value of BASE.
6491 When we know the consructor with constant offset (such as
6492 base is array[40] and we do know constructor of array), then
6493 BIT_OFFSET is adjusted accordingly.
6494
6495 As a special case, return error_mark_node when constructor
6496 is not explicitly available, but it is known to be zero
6497 such as 'static const int a;'. */
6498static tree
588db50c 6499get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6500 tree (*valueize)(tree))
6501{
588db50c 6502 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6503 bool reverse;
6504
cfef45c8
RG
6505 if (TREE_CODE (base) == MEM_REF)
6506 {
6a5aca53
ML
6507 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6508 if (!boff.to_shwi (bit_offset))
6509 return NULL_TREE;
cfef45c8
RG
6510
6511 if (valueize
6512 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6513 base = valueize (TREE_OPERAND (base, 0));
6514 if (!base || TREE_CODE (base) != ADDR_EXPR)
6515 return NULL_TREE;
6516 base = TREE_OPERAND (base, 0);
6517 }
13e88953
RB
6518 else if (valueize
6519 && TREE_CODE (base) == SSA_NAME)
6520 base = valueize (base);
cfef45c8
RG
6521
6522 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6523 DECL_INITIAL. If BASE is a nested reference into another
6524 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6525 the inner reference. */
6526 switch (TREE_CODE (base))
6527 {
6528 case VAR_DECL:
cfef45c8 6529 case CONST_DECL:
6a6dac52
JH
6530 {
6531 tree init = ctor_for_folding (base);
6532
688010ba 6533 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6534 NULL means unknown, while error_mark_node is 0. */
6535 if (init == error_mark_node)
6536 return NULL_TREE;
6537 if (!init)
6538 return error_mark_node;
6539 return init;
6540 }
cfef45c8 6541
13e88953
RB
6542 case VIEW_CONVERT_EXPR:
6543 return get_base_constructor (TREE_OPERAND (base, 0),
6544 bit_offset, valueize);
6545
cfef45c8
RG
6546 case ARRAY_REF:
6547 case COMPONENT_REF:
ee45a32d
EB
6548 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6549 &reverse);
588db50c 6550 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6551 return NULL_TREE;
6552 *bit_offset += bit_offset2;
6553 return get_base_constructor (base, bit_offset, valueize);
6554
cfef45c8
RG
6555 case CONSTRUCTOR:
6556 return base;
6557
6558 default:
13e88953
RB
6559 if (CONSTANT_CLASS_P (base))
6560 return base;
6561
cfef45c8
RG
6562 return NULL_TREE;
6563 }
6564}
6565
35b4d3a6
MS
6566/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6567 to the memory at bit OFFSET. When non-null, TYPE is the expected
6568 type of the reference; otherwise the type of the referenced element
6569 is used instead. When SIZE is zero, attempt to fold a reference to
6570 the entire element which OFFSET refers to. Increment *SUBOFF by
6571 the bit offset of the accessed element. */
cfef45c8
RG
6572
6573static tree
6574fold_array_ctor_reference (tree type, tree ctor,
6575 unsigned HOST_WIDE_INT offset,
c44c2088 6576 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6577 tree from_decl,
6578 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6579{
807e902e
KZ
6580 offset_int low_bound;
6581 offset_int elt_size;
807e902e 6582 offset_int access_index;
6a636014 6583 tree domain_type = NULL_TREE;
cfef45c8
RG
6584 HOST_WIDE_INT inner_offset;
6585
6586 /* Compute low bound and elt size. */
eb8f1123
RG
6587 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6588 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6589 if (domain_type && TYPE_MIN_VALUE (domain_type))
6590 {
6591 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6592 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6593 return NULL_TREE;
807e902e 6594 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6595 }
6596 else
807e902e 6597 low_bound = 0;
cfef45c8 6598 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6599 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6600 return NULL_TREE;
807e902e 6601 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6602
35b4d3a6
MS
6603 /* When TYPE is non-null, verify that it specifies a constant-sized
6604 accessed not larger than size of array element. */
6605 if (type
6606 && (!TYPE_SIZE_UNIT (type)
6607 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6608 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6609 || elt_size == 0))
cfef45c8
RG
6610 return NULL_TREE;
6611
6612 /* Compute the array index we look for. */
807e902e
KZ
6613 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6614 elt_size);
27bcd47c 6615 access_index += low_bound;
cfef45c8
RG
6616
6617 /* And offset within the access. */
27bcd47c 6618 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6619
6620 /* See if the array field is large enough to span whole access. We do not
6621 care to fold accesses spanning multiple array indexes. */
27bcd47c 6622 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6623 return NULL_TREE;
6a636014 6624 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6625 {
6626 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6627 {
6628 /* For the final reference to the entire accessed element
6629 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6630 may be null) in favor of the type of the element, and set
6631 SIZE to the size of the accessed element. */
6632 inner_offset = 0;
6633 type = TREE_TYPE (val);
6634 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6635 }
6636
6637 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6638 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6639 suboff);
6640 }
cfef45c8 6641
35b4d3a6
MS
6642 /* Memory not explicitly mentioned in constructor is 0 (or
6643 the reference is out of range). */
6644 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6645}
6646
35b4d3a6
MS
6647/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6648 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6649 is the expected type of the reference; otherwise the type of
6650 the referenced member is used instead. When SIZE is zero,
6651 attempt to fold a reference to the entire member which OFFSET
6652 refers to; in this case. Increment *SUBOFF by the bit offset
6653 of the accessed member. */
cfef45c8
RG
6654
6655static tree
6656fold_nonarray_ctor_reference (tree type, tree ctor,
6657 unsigned HOST_WIDE_INT offset,
c44c2088 6658 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6659 tree from_decl,
6660 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6661{
6662 unsigned HOST_WIDE_INT cnt;
6663 tree cfield, cval;
6664
6665 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6666 cval)
6667 {
6668 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6669 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6670 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6671
6672 if (!field_size)
6673 {
6674 /* Determine the size of the flexible array member from
6675 the size of the initializer provided for it. */
6676 field_size = TYPE_SIZE (TREE_TYPE (cval));
6677 }
cfef45c8
RG
6678
6679 /* Variable sized objects in static constructors makes no sense,
6680 but field_size can be NULL for flexible array members. */
6681 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6682 && TREE_CODE (byte_offset) == INTEGER_CST
6683 && (field_size != NULL_TREE
6684 ? TREE_CODE (field_size) == INTEGER_CST
6685 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6686
6687 /* Compute bit offset of the field. */
35b4d3a6
MS
6688 offset_int bitoffset
6689 = (wi::to_offset (field_offset)
6690 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6691 /* Compute bit offset where the field ends. */
35b4d3a6 6692 offset_int bitoffset_end;
cfef45c8 6693 if (field_size != NULL_TREE)
807e902e 6694 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6695 else
807e902e 6696 bitoffset_end = 0;
cfef45c8 6697
35b4d3a6
MS
6698 /* Compute the bit offset of the end of the desired access.
6699 As a special case, if the size of the desired access is
6700 zero, assume the access is to the entire field (and let
6701 the caller make any necessary adjustments by storing
6702 the actual bounds of the field in FIELDBOUNDS). */
6703 offset_int access_end = offset_int (offset);
6704 if (size)
6705 access_end += size;
6706 else
6707 access_end = bitoffset_end;
b8b2b009 6708
35b4d3a6
MS
6709 /* Is there any overlap between the desired access at
6710 [OFFSET, OFFSET+SIZE) and the offset of the field within
6711 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6712 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6713 && (field_size == NULL_TREE
807e902e 6714 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6715 {
35b4d3a6
MS
6716 *suboff += bitoffset.to_uhwi ();
6717
6718 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6719 {
6720 /* For the final reference to the entire accessed member
6721 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6722 be null) in favor of the type of the member, and set
6723 SIZE to the size of the accessed member. */
6724 offset = bitoffset.to_uhwi ();
6725 type = TREE_TYPE (cval);
6726 size = (bitoffset_end - bitoffset).to_uhwi ();
6727 }
6728
6729 /* We do have overlap. Now see if the field is large enough
6730 to cover the access. Give up for accesses that extend
6731 beyond the end of the object or that span multiple fields. */
807e902e 6732 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6733 return NULL_TREE;
032c80e9 6734 if (offset < bitoffset)
b8b2b009 6735 return NULL_TREE;
35b4d3a6
MS
6736
6737 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6738 return fold_ctor_reference (type, cval,
27bcd47c 6739 inner_offset.to_uhwi (), size,
35b4d3a6 6740 from_decl, suboff);
cfef45c8
RG
6741 }
6742 }
35b4d3a6
MS
6743 /* Memory not explicitly mentioned in constructor is 0. */
6744 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6745}
6746
35b4d3a6
MS
6747/* CTOR is value initializing memory. Fold a reference of TYPE and
6748 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6749 is zero, attempt to fold a reference to the entire subobject
6750 which OFFSET refers to. This is used when folding accesses to
6751 string members of aggregates. When non-null, set *SUBOFF to
6752 the bit offset of the accessed subobject. */
cfef45c8 6753
8403c2cf 6754tree
35b4d3a6
MS
6755fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6756 const poly_uint64 &poly_size, tree from_decl,
6757 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
6758{
6759 tree ret;
6760
6761 /* We found the field with exact match. */
35b4d3a6
MS
6762 if (type
6763 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6764 && known_eq (poly_offset, 0U))
9d60be38 6765 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6766
30acf282
RS
6767 /* The remaining optimizations need a constant size and offset. */
6768 unsigned HOST_WIDE_INT size, offset;
6769 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6770 return NULL_TREE;
6771
cfef45c8
RG
6772 /* We are at the end of walk, see if we can view convert the
6773 result. */
6774 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6775 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6776 && !compare_tree_int (TYPE_SIZE (type), size)
6777 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6778 {
9d60be38 6779 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6780 if (ret)
672d9f8e
RB
6781 {
6782 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6783 if (ret)
6784 STRIP_USELESS_TYPE_CONVERSION (ret);
6785 }
cfef45c8
RG
6786 return ret;
6787 }
b2505143
RB
6788 /* For constants and byte-aligned/sized reads try to go through
6789 native_encode/interpret. */
6790 if (CONSTANT_CLASS_P (ctor)
6791 && BITS_PER_UNIT == 8
6792 && offset % BITS_PER_UNIT == 0
6793 && size % BITS_PER_UNIT == 0
6794 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6795 {
6796 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6797 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6798 offset / BITS_PER_UNIT);
6799 if (len > 0)
6800 return native_interpret_expr (type, buf, len);
b2505143 6801 }
cfef45c8
RG
6802 if (TREE_CODE (ctor) == CONSTRUCTOR)
6803 {
35b4d3a6
MS
6804 unsigned HOST_WIDE_INT dummy = 0;
6805 if (!suboff)
6806 suboff = &dummy;
cfef45c8 6807
eb8f1123
RG
6808 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6809 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088 6810 return fold_array_ctor_reference (type, ctor, offset, size,
35b4d3a6
MS
6811 from_decl, suboff);
6812
6813 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6814 from_decl, suboff);
cfef45c8
RG
6815 }
6816
6817 return NULL_TREE;
6818}
6819
6820/* Return the tree representing the element referenced by T if T is an
6821 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6822 names using VALUEIZE. Return NULL_TREE otherwise. */
6823
6824tree
6825fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6826{
6827 tree ctor, idx, base;
588db50c 6828 poly_int64 offset, size, max_size;
cfef45c8 6829 tree tem;
ee45a32d 6830 bool reverse;
cfef45c8 6831
f8a7df45
RG
6832 if (TREE_THIS_VOLATILE (t))
6833 return NULL_TREE;
6834
3a65ee74 6835 if (DECL_P (t))
cfef45c8
RG
6836 return get_symbol_constant_value (t);
6837
6838 tem = fold_read_from_constant_string (t);
6839 if (tem)
6840 return tem;
6841
6842 switch (TREE_CODE (t))
6843 {
6844 case ARRAY_REF:
6845 case ARRAY_RANGE_REF:
6846 /* Constant indexes are handled well by get_base_constructor.
6847 Only special case variable offsets.
6848 FIXME: This code can't handle nested references with variable indexes
6849 (they will be handled only by iteration of ccp). Perhaps we can bring
6850 get_ref_base_and_extent here and make it use a valueize callback. */
6851 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6852 && valueize
6853 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6854 && poly_int_tree_p (idx))
cfef45c8
RG
6855 {
6856 tree low_bound, unit_size;
6857
6858 /* If the resulting bit-offset is constant, track it. */
6859 if ((low_bound = array_ref_low_bound (t),
588db50c 6860 poly_int_tree_p (low_bound))
cfef45c8 6861 && (unit_size = array_ref_element_size (t),
807e902e 6862 tree_fits_uhwi_p (unit_size)))
cfef45c8 6863 {
588db50c
RS
6864 poly_offset_int woffset
6865 = wi::sext (wi::to_poly_offset (idx)
6866 - wi::to_poly_offset (low_bound),
807e902e
KZ
6867 TYPE_PRECISION (TREE_TYPE (idx)));
6868
588db50c 6869 if (woffset.to_shwi (&offset))
807e902e 6870 {
807e902e
KZ
6871 /* TODO: This code seems wrong, multiply then check
6872 to see if it fits. */
6873 offset *= tree_to_uhwi (unit_size);
6874 offset *= BITS_PER_UNIT;
6875
6876 base = TREE_OPERAND (t, 0);
6877 ctor = get_base_constructor (base, &offset, valueize);
6878 /* Empty constructor. Always fold to 0. */
6879 if (ctor == error_mark_node)
6880 return build_zero_cst (TREE_TYPE (t));
6881 /* Out of bound array access. Value is undefined,
6882 but don't fold. */
588db50c 6883 if (maybe_lt (offset, 0))
807e902e
KZ
6884 return NULL_TREE;
6885 /* We can not determine ctor. */
6886 if (!ctor)
6887 return NULL_TREE;
6888 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6889 tree_to_uhwi (unit_size)
6890 * BITS_PER_UNIT,
6891 base);
6892 }
cfef45c8
RG
6893 }
6894 }
6895 /* Fallthru. */
6896
6897 case COMPONENT_REF:
6898 case BIT_FIELD_REF:
6899 case TARGET_MEM_REF:
6900 case MEM_REF:
ee45a32d 6901 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6902 ctor = get_base_constructor (base, &offset, valueize);
6903
6904 /* Empty constructor. Always fold to 0. */
6905 if (ctor == error_mark_node)
6906 return build_zero_cst (TREE_TYPE (t));
6907 /* We do not know precise address. */
588db50c 6908 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8
RG
6909 return NULL_TREE;
6910 /* We can not determine ctor. */
6911 if (!ctor)
6912 return NULL_TREE;
6913
6914 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 6915 if (maybe_lt (offset, 0))
cfef45c8
RG
6916 return NULL_TREE;
6917
c44c2088
JH
6918 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6919 base);
cfef45c8
RG
6920
6921 case REALPART_EXPR:
6922 case IMAGPART_EXPR:
6923 {
6924 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6925 if (c && TREE_CODE (c) == COMPLEX_CST)
6926 return fold_build1_loc (EXPR_LOCATION (t),
6927 TREE_CODE (t), TREE_TYPE (t), c);
6928 break;
6929 }
6930
6931 default:
6932 break;
6933 }
6934
6935 return NULL_TREE;
6936}
6937
6938tree
6939fold_const_aggregate_ref (tree t)
6940{
6941 return fold_const_aggregate_ref_1 (t, NULL);
6942}
06bc3ec7 6943
85942f45 6944/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6945 at OFFSET.
6946 Set CAN_REFER if non-NULL to false if method
6947 is not referable or if the virtual table is ill-formed (such as rewriten
6948 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6949
6950tree
85942f45
JH
6951gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6952 tree v,
ec77d61f
JH
6953 unsigned HOST_WIDE_INT offset,
6954 bool *can_refer)
81fa35bd 6955{
85942f45
JH
6956 tree vtable = v, init, fn;
6957 unsigned HOST_WIDE_INT size;
8c311b50
JH
6958 unsigned HOST_WIDE_INT elt_size, access_index;
6959 tree domain_type;
81fa35bd 6960
ec77d61f
JH
6961 if (can_refer)
6962 *can_refer = true;
6963
9de2f554 6964 /* First of all double check we have virtual table. */
8813a647 6965 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 6966 {
ec77d61f
JH
6967 /* Pass down that we lost track of the target. */
6968 if (can_refer)
6969 *can_refer = false;
6970 return NULL_TREE;
6971 }
9de2f554 6972
2aa3da06
JH
6973 init = ctor_for_folding (v);
6974
9de2f554 6975 /* The virtual tables should always be born with constructors
2aa3da06
JH
6976 and we always should assume that they are avaialble for
6977 folding. At the moment we do not stream them in all cases,
6978 but it should never happen that ctor seem unreachable. */
6979 gcc_assert (init);
6980 if (init == error_mark_node)
6981 {
ec77d61f
JH
6982 /* Pass down that we lost track of the target. */
6983 if (can_refer)
6984 *can_refer = false;
2aa3da06
JH
6985 return NULL_TREE;
6986 }
81fa35bd 6987 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 6988 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 6989 offset *= BITS_PER_UNIT;
81fa35bd 6990 offset += token * size;
9de2f554 6991
8c311b50
JH
6992 /* Lookup the value in the constructor that is assumed to be array.
6993 This is equivalent to
6994 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6995 offset, size, NULL);
6996 but in a constant time. We expect that frontend produced a simple
6997 array without indexed initializers. */
6998
6999 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7000 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7001 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7002 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7003
7004 access_index = offset / BITS_PER_UNIT / elt_size;
7005 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7006
bf8d8309
MP
7007 /* The C++ FE can now produce indexed fields, and we check if the indexes
7008 match. */
8c311b50
JH
7009 if (access_index < CONSTRUCTOR_NELTS (init))
7010 {
7011 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
7012 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7013 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
7014 STRIP_NOPS (fn);
7015 }
7016 else
7017 fn = NULL;
9de2f554
JH
7018
7019 /* For type inconsistent program we may end up looking up virtual method
7020 in virtual table that does not contain TOKEN entries. We may overrun
7021 the virtual table and pick up a constant or RTTI info pointer.
7022 In any case the call is undefined. */
7023 if (!fn
7024 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7025 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7026 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7027 else
7028 {
7029 fn = TREE_OPERAND (fn, 0);
7030
7031 /* When cgraph node is missing and function is not public, we cannot
7032 devirtualize. This can happen in WHOPR when the actual method
7033 ends up in other partition, because we found devirtualization
7034 possibility too late. */
7035 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
7036 {
7037 if (can_refer)
7038 {
7039 *can_refer = false;
7040 return fn;
7041 }
7042 return NULL_TREE;
7043 }
9de2f554 7044 }
81fa35bd 7045
7501ca28
RG
7046 /* Make sure we create a cgraph node for functions we'll reference.
7047 They can be non-existent if the reference comes from an entry
7048 of an external vtable for example. */
d52f5295 7049 cgraph_node::get_create (fn);
7501ca28 7050
81fa35bd
MJ
7051 return fn;
7052}
7053
85942f45
JH
7054/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7055 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7056 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
7057 OBJ_TYPE_REF_OBJECT(REF).
7058 Set CAN_REFER if non-NULL to false if method
7059 is not referable or if the virtual table is ill-formed (such as rewriten
7060 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
7061
7062tree
ec77d61f
JH
7063gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7064 bool *can_refer)
85942f45
JH
7065{
7066 unsigned HOST_WIDE_INT offset;
7067 tree v;
7068
7069 v = BINFO_VTABLE (known_binfo);
7070 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7071 if (!v)
7072 return NULL_TREE;
7073
7074 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
7075 {
7076 if (can_refer)
7077 *can_refer = false;
7078 return NULL_TREE;
7079 }
7080 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
7081}
7082
737f500a
RB
7083/* Given a pointer value T, return a simplified version of an
7084 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
7085 possible. Note that the resulting type may be different from
7086 the type pointed to in the sense that it is still compatible
7087 from the langhooks point of view. */
7088
7089tree
7090gimple_fold_indirect_ref (tree t)
7091{
7092 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7093 tree sub = t;
7094 tree subtype;
7095
7096 STRIP_NOPS (sub);
7097 subtype = TREE_TYPE (sub);
737f500a
RB
7098 if (!POINTER_TYPE_P (subtype)
7099 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7100 return NULL_TREE;
7101
7102 if (TREE_CODE (sub) == ADDR_EXPR)
7103 {
7104 tree op = TREE_OPERAND (sub, 0);
7105 tree optype = TREE_TYPE (op);
7106 /* *&p => p */
7107 if (useless_type_conversion_p (type, optype))
7108 return op;
7109
7110 /* *(foo *)&fooarray => fooarray[0] */
7111 if (TREE_CODE (optype) == ARRAY_TYPE
7112 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7113 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7114 {
7115 tree type_domain = TYPE_DOMAIN (optype);
7116 tree min_val = size_zero_node;
7117 if (type_domain && TYPE_MIN_VALUE (type_domain))
7118 min_val = TYPE_MIN_VALUE (type_domain);
7119 if (TREE_CODE (min_val) == INTEGER_CST)
7120 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7121 }
7122 /* *(foo *)&complexfoo => __real__ complexfoo */
7123 else if (TREE_CODE (optype) == COMPLEX_TYPE
7124 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7125 return fold_build1 (REALPART_EXPR, type, op);
7126 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7127 else if (TREE_CODE (optype) == VECTOR_TYPE
7128 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7129 {
7130 tree part_width = TYPE_SIZE (type);
7131 tree index = bitsize_int (0);
7132 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7133 }
7134 }
7135
7136 /* *(p + CST) -> ... */
7137 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7138 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7139 {
7140 tree addr = TREE_OPERAND (sub, 0);
7141 tree off = TREE_OPERAND (sub, 1);
7142 tree addrtype;
7143
7144 STRIP_NOPS (addr);
7145 addrtype = TREE_TYPE (addr);
7146
7147 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7148 if (TREE_CODE (addr) == ADDR_EXPR
7149 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7150 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7151 && tree_fits_uhwi_p (off))
b184c8f1 7152 {
ae7e9ddd 7153 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7154 tree part_width = TYPE_SIZE (type);
7155 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7156 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7157 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7158 tree index = bitsize_int (indexi);
928686b1
RS
7159 if (known_lt (offset / part_widthi,
7160 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7161 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7162 part_width, index);
7163 }
7164
7165 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7166 if (TREE_CODE (addr) == ADDR_EXPR
7167 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7168 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7169 {
7170 tree size = TYPE_SIZE_UNIT (type);
7171 if (tree_int_cst_equal (size, off))
7172 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7173 }
7174
7175 /* *(p + CST) -> MEM_REF <p, CST>. */
7176 if (TREE_CODE (addr) != ADDR_EXPR
7177 || DECL_P (TREE_OPERAND (addr, 0)))
7178 return fold_build2 (MEM_REF, type,
7179 addr,
8e6cdc90 7180 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7181 }
7182
7183 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7184 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7185 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7186 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7187 {
7188 tree type_domain;
7189 tree min_val = size_zero_node;
7190 tree osub = sub;
7191 sub = gimple_fold_indirect_ref (sub);
7192 if (! sub)
7193 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7194 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7195 if (type_domain && TYPE_MIN_VALUE (type_domain))
7196 min_val = TYPE_MIN_VALUE (type_domain);
7197 if (TREE_CODE (min_val) == INTEGER_CST)
7198 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7199 }
7200
7201 return NULL_TREE;
7202}
19e51b40
JJ
7203
7204/* Return true if CODE is an operation that when operating on signed
7205 integer types involves undefined behavior on overflow and the
7206 operation can be expressed with unsigned arithmetic. */
7207
7208bool
7209arith_code_with_undefined_signed_overflow (tree_code code)
7210{
7211 switch (code)
7212 {
7213 case PLUS_EXPR:
7214 case MINUS_EXPR:
7215 case MULT_EXPR:
7216 case NEGATE_EXPR:
7217 case POINTER_PLUS_EXPR:
7218 return true;
7219 default:
7220 return false;
7221 }
7222}
7223
7224/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7225 operation that can be transformed to unsigned arithmetic by converting
7226 its operand, carrying out the operation in the corresponding unsigned
7227 type and converting the result back to the original type.
7228
7229 Returns a sequence of statements that replace STMT and also contain
7230 a modified form of STMT itself. */
7231
7232gimple_seq
355fe088 7233rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7234{
7235 if (dump_file && (dump_flags & TDF_DETAILS))
7236 {
7237 fprintf (dump_file, "rewriting stmt with undefined signed "
7238 "overflow ");
7239 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7240 }
7241
7242 tree lhs = gimple_assign_lhs (stmt);
7243 tree type = unsigned_type_for (TREE_TYPE (lhs));
7244 gimple_seq stmts = NULL;
7245 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7246 {
74e3c262
RB
7247 tree op = gimple_op (stmt, i);
7248 op = gimple_convert (&stmts, type, op);
7249 gimple_set_op (stmt, i, op);
19e51b40
JJ
7250 }
7251 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7252 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7253 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7254 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7255 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7256 gimple_seq_add_stmt (&stmts, cvt);
7257
7258 return stmts;
7259}
d4f5cd5e 7260
3d2cf79f 7261
c26de36d
RB
7262/* The valueization hook we use for the gimple_build API simplification.
7263 This makes us match fold_buildN behavior by only combining with
7264 statements in the sequence(s) we are currently building. */
7265
7266static tree
7267gimple_build_valueize (tree op)
7268{
7269 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7270 return op;
7271 return NULL_TREE;
7272}
7273
3d2cf79f 7274/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7275 simplifying it first if possible. Returns the built
3d2cf79f
RB
7276 expression value and appends statements possibly defining it
7277 to SEQ. */
7278
7279tree
7280gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7281 enum tree_code code, tree type, tree op0)
3d2cf79f 7282{
c26de36d 7283 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7284 if (!res)
7285 {
a15ebbcd 7286 res = create_tmp_reg_or_ssa_name (type);
355fe088 7287 gimple *stmt;
3d2cf79f
RB
7288 if (code == REALPART_EXPR
7289 || code == IMAGPART_EXPR
7290 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7291 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7292 else
0d0e4a03 7293 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7294 gimple_set_location (stmt, loc);
7295 gimple_seq_add_stmt_without_update (seq, stmt);
7296 }
7297 return res;
7298}
7299
7300/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7301 simplifying it first if possible. Returns the built
3d2cf79f
RB
7302 expression value and appends statements possibly defining it
7303 to SEQ. */
7304
7305tree
7306gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7307 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7308{
c26de36d 7309 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7310 if (!res)
7311 {
a15ebbcd 7312 res = create_tmp_reg_or_ssa_name (type);
355fe088 7313 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7314 gimple_set_location (stmt, loc);
7315 gimple_seq_add_stmt_without_update (seq, stmt);
7316 }
7317 return res;
7318}
7319
7320/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7321 simplifying it first if possible. Returns the built
3d2cf79f
RB
7322 expression value and appends statements possibly defining it
7323 to SEQ. */
7324
7325tree
7326gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7327 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7328{
7329 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7330 seq, gimple_build_valueize);
3d2cf79f
RB
7331 if (!res)
7332 {
a15ebbcd 7333 res = create_tmp_reg_or_ssa_name (type);
355fe088 7334 gimple *stmt;
3d2cf79f 7335 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7336 stmt = gimple_build_assign (res, code,
7337 build3 (code, type, op0, op1, op2));
3d2cf79f 7338 else
0d0e4a03 7339 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7340 gimple_set_location (stmt, loc);
7341 gimple_seq_add_stmt_without_update (seq, stmt);
7342 }
7343 return res;
7344}
7345
7346/* Build the call FN (ARG0) with a result of type TYPE
7347 (or no result if TYPE is void) with location LOC,
c26de36d 7348 simplifying it first if possible. Returns the built
3d2cf79f
RB
7349 expression value (or NULL_TREE if TYPE is void) and appends
7350 statements possibly defining it to SEQ. */
7351
7352tree
eb69361d
RS
7353gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7354 tree type, tree arg0)
3d2cf79f 7355{
c26de36d 7356 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7357 if (!res)
7358 {
eb69361d
RS
7359 gcall *stmt;
7360 if (internal_fn_p (fn))
7361 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7362 else
7363 {
7364 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7365 stmt = gimple_build_call (decl, 1, arg0);
7366 }
3d2cf79f
RB
7367 if (!VOID_TYPE_P (type))
7368 {
a15ebbcd 7369 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7370 gimple_call_set_lhs (stmt, res);
7371 }
7372 gimple_set_location (stmt, loc);
7373 gimple_seq_add_stmt_without_update (seq, stmt);
7374 }
7375 return res;
7376}
7377
7378/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7379 (or no result if TYPE is void) with location LOC,
c26de36d 7380 simplifying it first if possible. Returns the built
3d2cf79f
RB
7381 expression value (or NULL_TREE if TYPE is void) and appends
7382 statements possibly defining it to SEQ. */
7383
7384tree
eb69361d
RS
7385gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7386 tree type, tree arg0, tree arg1)
3d2cf79f 7387{
c26de36d 7388 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7389 if (!res)
7390 {
eb69361d
RS
7391 gcall *stmt;
7392 if (internal_fn_p (fn))
7393 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7394 else
7395 {
7396 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7397 stmt = gimple_build_call (decl, 2, arg0, arg1);
7398 }
3d2cf79f
RB
7399 if (!VOID_TYPE_P (type))
7400 {
a15ebbcd 7401 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7402 gimple_call_set_lhs (stmt, res);
7403 }
7404 gimple_set_location (stmt, loc);
7405 gimple_seq_add_stmt_without_update (seq, stmt);
7406 }
7407 return res;
7408}
7409
7410/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7411 (or no result if TYPE is void) with location LOC,
c26de36d 7412 simplifying it first if possible. Returns the built
3d2cf79f
RB
7413 expression value (or NULL_TREE if TYPE is void) and appends
7414 statements possibly defining it to SEQ. */
7415
7416tree
eb69361d
RS
7417gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7418 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7419{
c26de36d
RB
7420 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7421 seq, gimple_build_valueize);
3d2cf79f
RB
7422 if (!res)
7423 {
eb69361d
RS
7424 gcall *stmt;
7425 if (internal_fn_p (fn))
7426 stmt = gimple_build_call_internal (as_internal_fn (fn),
7427 3, arg0, arg1, arg2);
7428 else
7429 {
7430 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7431 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7432 }
3d2cf79f
RB
7433 if (!VOID_TYPE_P (type))
7434 {
a15ebbcd 7435 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7436 gimple_call_set_lhs (stmt, res);
7437 }
7438 gimple_set_location (stmt, loc);
7439 gimple_seq_add_stmt_without_update (seq, stmt);
7440 }
7441 return res;
7442}
7443
7444/* Build the conversion (TYPE) OP with a result of type TYPE
7445 with location LOC if such conversion is neccesary in GIMPLE,
7446 simplifying it first.
7447 Returns the built expression value and appends
7448 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7449
7450tree
7451gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7452{
7453 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7454 return op;
3d2cf79f 7455 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7456}
68e57f04 7457
74e3c262
RB
7458/* Build the conversion (ptrofftype) OP with a result of a type
7459 compatible with ptrofftype with location LOC if such conversion
7460 is neccesary in GIMPLE, simplifying it first.
7461 Returns the built expression value and appends
7462 statements possibly defining it to SEQ. */
7463
7464tree
7465gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7466{
7467 if (ptrofftype_p (TREE_TYPE (op)))
7468 return op;
7469 return gimple_convert (seq, loc, sizetype, op);
7470}
7471
e7c45b66
RS
7472/* Build a vector of type TYPE in which each element has the value OP.
7473 Return a gimple value for the result, appending any new statements
7474 to SEQ. */
7475
7476tree
7477gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7478 tree op)
7479{
928686b1
RS
7480 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7481 && !CONSTANT_CLASS_P (op))
7482 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7483
e7c45b66
RS
7484 tree res, vec = build_vector_from_val (type, op);
7485 if (is_gimple_val (vec))
7486 return vec;
7487 if (gimple_in_ssa_p (cfun))
7488 res = make_ssa_name (type);
7489 else
7490 res = create_tmp_reg (type);
7491 gimple *stmt = gimple_build_assign (res, vec);
7492 gimple_set_location (stmt, loc);
7493 gimple_seq_add_stmt_without_update (seq, stmt);
7494 return res;
7495}
7496
abe73c3d
RS
7497/* Build a vector from BUILDER, handling the case in which some elements
7498 are non-constant. Return a gimple value for the result, appending any
7499 new instructions to SEQ.
7500
7501 BUILDER must not have a stepped encoding on entry. This is because
7502 the function is not geared up to handle the arithmetic that would
7503 be needed in the variable case, and any code building a vector that
7504 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7505
7506tree
abe73c3d
RS
7507gimple_build_vector (gimple_seq *seq, location_t loc,
7508 tree_vector_builder *builder)
e7c45b66 7509{
abe73c3d
RS
7510 gcc_assert (builder->nelts_per_pattern () <= 2);
7511 unsigned int encoded_nelts = builder->encoded_nelts ();
7512 for (unsigned int i = 0; i < encoded_nelts; ++i)
7513 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7514 {
abe73c3d 7515 tree type = builder->type ();
928686b1 7516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7517 vec<constructor_elt, va_gc> *v;
7518 vec_alloc (v, nelts);
7519 for (i = 0; i < nelts; ++i)
abe73c3d 7520 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7521
7522 tree res;
7523 if (gimple_in_ssa_p (cfun))
7524 res = make_ssa_name (type);
7525 else
7526 res = create_tmp_reg (type);
7527 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7528 gimple_set_location (stmt, loc);
7529 gimple_seq_add_stmt_without_update (seq, stmt);
7530 return res;
7531 }
abe73c3d 7532 return builder->build ();
e7c45b66
RS
7533}
7534
68e57f04
RS
7535/* Return true if the result of assignment STMT is known to be non-negative.
7536 If the return value is based on the assumption that signed overflow is
7537 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7538 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7539
7540static bool
7541gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7542 int depth)
7543{
7544 enum tree_code code = gimple_assign_rhs_code (stmt);
7545 switch (get_gimple_rhs_class (code))
7546 {
7547 case GIMPLE_UNARY_RHS:
7548 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7549 gimple_expr_type (stmt),
7550 gimple_assign_rhs1 (stmt),
7551 strict_overflow_p, depth);
7552 case GIMPLE_BINARY_RHS:
7553 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7554 gimple_expr_type (stmt),
7555 gimple_assign_rhs1 (stmt),
7556 gimple_assign_rhs2 (stmt),
7557 strict_overflow_p, depth);
7558 case GIMPLE_TERNARY_RHS:
7559 return false;
7560 case GIMPLE_SINGLE_RHS:
7561 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7562 strict_overflow_p, depth);
7563 case GIMPLE_INVALID_RHS:
7564 break;
7565 }
7566 gcc_unreachable ();
7567}
7568
7569/* Return true if return value of call STMT is known to be non-negative.
7570 If the return value is based on the assumption that signed overflow is
7571 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7572 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7573
7574static bool
7575gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7576 int depth)
7577{
7578 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7579 gimple_call_arg (stmt, 0) : NULL_TREE;
7580 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7581 gimple_call_arg (stmt, 1) : NULL_TREE;
7582
7583 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7584 gimple_call_combined_fn (stmt),
68e57f04
RS
7585 arg0,
7586 arg1,
7587 strict_overflow_p, depth);
7588}
7589
4534c203
RB
7590/* Return true if return value of call STMT is known to be non-negative.
7591 If the return value is based on the assumption that signed overflow is
7592 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7593 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7594
7595static bool
7596gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7597 int depth)
7598{
7599 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7600 {
7601 tree arg = gimple_phi_arg_def (stmt, i);
7602 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7603 return false;
7604 }
7605 return true;
7606}
7607
68e57f04
RS
7608/* Return true if STMT is known to compute a non-negative value.
7609 If the return value is based on the assumption that signed overflow is
7610 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7611 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7612
7613bool
7614gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7615 int depth)
7616{
7617 switch (gimple_code (stmt))
7618 {
7619 case GIMPLE_ASSIGN:
7620 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7621 depth);
7622 case GIMPLE_CALL:
7623 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7624 depth);
4534c203
RB
7625 case GIMPLE_PHI:
7626 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7627 depth);
68e57f04
RS
7628 default:
7629 return false;
7630 }
7631}
67dbe582
RS
7632
7633/* Return true if the floating-point value computed by assignment STMT
7634 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7635 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7636
7637 DEPTH is the current nesting depth of the query. */
7638
7639static bool
7640gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7641{
7642 enum tree_code code = gimple_assign_rhs_code (stmt);
7643 switch (get_gimple_rhs_class (code))
7644 {
7645 case GIMPLE_UNARY_RHS:
7646 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7647 gimple_assign_rhs1 (stmt), depth);
7648 case GIMPLE_BINARY_RHS:
7649 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7650 gimple_assign_rhs1 (stmt),
7651 gimple_assign_rhs2 (stmt), depth);
7652 case GIMPLE_TERNARY_RHS:
7653 return false;
7654 case GIMPLE_SINGLE_RHS:
7655 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7656 case GIMPLE_INVALID_RHS:
7657 break;
7658 }
7659 gcc_unreachable ();
7660}
7661
7662/* Return true if the floating-point value computed by call STMT is known
7663 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7664 considered integer values. Return false for signaling NaN.
67dbe582
RS
7665
7666 DEPTH is the current nesting depth of the query. */
7667
7668static bool
7669gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7670{
7671 tree arg0 = (gimple_call_num_args (stmt) > 0
7672 ? gimple_call_arg (stmt, 0)
7673 : NULL_TREE);
7674 tree arg1 = (gimple_call_num_args (stmt) > 1
7675 ? gimple_call_arg (stmt, 1)
7676 : NULL_TREE);
1d9da71f 7677 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7678 arg0, arg1, depth);
7679}
7680
7681/* Return true if the floating-point result of phi STMT is known to have
7682 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7683 integer values. Return false for signaling NaN.
67dbe582
RS
7684
7685 DEPTH is the current nesting depth of the query. */
7686
7687static bool
7688gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7689{
7690 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7691 {
7692 tree arg = gimple_phi_arg_def (stmt, i);
7693 if (!integer_valued_real_single_p (arg, depth + 1))
7694 return false;
7695 }
7696 return true;
7697}
7698
7699/* Return true if the floating-point value computed by STMT is known
7700 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7701 considered integer values. Return false for signaling NaN.
67dbe582
RS
7702
7703 DEPTH is the current nesting depth of the query. */
7704
7705bool
7706gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7707{
7708 switch (gimple_code (stmt))
7709 {
7710 case GIMPLE_ASSIGN:
7711 return gimple_assign_integer_valued_real_p (stmt, depth);
7712 case GIMPLE_CALL:
7713 return gimple_call_integer_valued_real_p (stmt, depth);
7714 case GIMPLE_PHI:
7715 return gimple_phi_integer_valued_real_p (stmt, depth);
7716 default:
7717 return false;
7718 }
7719}