]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
2018-07-23 Bernd Edlinger <bernd.edlinger@hotmail.de>
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
85ec4feb 2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
cbdd87d4 68
b3b9f3d0 69/* Return true when DECL can be referenced from current unit.
c44c2088
JH
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
1389294c 73
1389294c
JH
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
3e89949e 86 we devirtualize only during final compilation stage.
b3b9f3d0
JH
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
90
1389294c 91static bool
c44c2088 92can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 93{
2c8326a5 94 varpool_node *vnode;
1389294c 95 struct cgraph_node *node;
5e20cdc9 96 symtab_node *snode;
c44c2088 97
00de328a 98 if (DECL_ABSTRACT_P (decl))
1632a686
JH
99 return false;
100
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 103 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
104 return true;
105
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
108 {
3aaf0529
JH
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
3dafb85c 111 if (symtab->function_flags_ready)
3aaf0529 112 return true;
d52f5295 113 snode = symtab_node::get (decl);
3aaf0529 114 if (!snode || !snode->definition)
1632a686 115 return false;
7de90a6c 116 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
117 return !node || !node->global.inlined_to;
118 }
119
6da8be89 120 /* We will later output the initializer, so we can refer to it.
c44c2088 121 So we are concerned only when DECL comes from initializer of
3aaf0529 122 external var or var that has been optimized out. */
c44c2088 123 if (!from_decl
8813a647 124 || !VAR_P (from_decl)
3aaf0529 125 || (!DECL_EXTERNAL (from_decl)
9041d2e6 126 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 127 && vnode->definition)
6da8be89 128 || (flag_ltrans
9041d2e6 129 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 130 && vnode->in_other_partition))
c44c2088 131 return true;
c44c2088
JH
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
a33a931b 137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 139 return false;
b3b9f3d0
JH
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
3aaf0529
JH
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
148
149 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 150 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
3dafb85c 156 if (!symtab->function_flags_ready)
b3b9f3d0 157 return true;
c44c2088 158
d52f5295 159 snode = symtab_node::get (decl);
3aaf0529
JH
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
1389294c
JH
167}
168
a15ebbcd
ML
169/* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
172
edc19e03
WS
173tree
174create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
175{
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
180}
181
0038d4e0 182/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
185
186tree
c44c2088 187canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 188{
50619002
EB
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
315f5f1b
RG
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 193 {
315f5f1b
RG
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
17f39a39
JH
202 }
203 if (TREE_CODE (cval) == ADDR_EXPR)
204 {
5a27a197
RG
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
207 {
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
211 }
5a27a197
RG
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
214 if (!base)
215 return NULL_TREE;
b3b9f3d0 216
8813a647 217 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 218 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 219 return NULL_TREE;
13f92e8d
JJ
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
8813a647 222 if (VAR_P (base))
46eb666a 223 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
224 else if (TREE_CODE (base) == FUNCTION_DECL)
225 {
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
d52f5295 229 cgraph_node::get_create (base);
7501ca28 230 }
0038d4e0 231 /* Fixup types in global initializers. */
73aef89e
RG
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
234
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
17f39a39 238 }
846abd0d
RB
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
50619002 241 return orig_cval;
17f39a39 242}
cbdd87d4
RG
243
244/* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
246
247tree
248get_symbol_constant_value (tree sym)
249{
6a6dac52
JH
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
cbdd87d4 252 {
cbdd87d4
RG
253 if (val)
254 {
9d60be38 255 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 256 if (val && is_gimple_min_invariant (val))
17f39a39 257 return val;
1389294c
JH
258 else
259 return NULL_TREE;
cbdd87d4
RG
260 }
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
b8a8c472 265 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 266 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
267 }
268
269 return NULL_TREE;
270}
271
272
cbdd87d4
RG
273
274/* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
278
279static tree
280maybe_fold_reference (tree expr, bool is_lhs)
281{
17f39a39 282 tree result;
cbdd87d4 283
f0eddb90
RG
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
300
f0eddb90
RG
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
cbdd87d4 305
cbdd87d4
RG
306 return NULL_TREE;
307}
308
309
310/* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
314
315static tree
316fold_gimple_assign (gimple_stmt_iterator *si)
317{
355fe088 318 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
321
322 tree result = NULL_TREE;
323
324 switch (get_gimple_rhs_class (subcode))
325 {
326 case GIMPLE_SINGLE_RHS:
327 {
328 tree rhs = gimple_assign_rhs1 (stmt);
329
8c00ba08
JW
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
332
4e71066d 333 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
334 return maybe_fold_reference (rhs, false);
335
bdf37f7a
JH
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
337 {
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
f8a39967 341 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
342 {
343 bool final;
344 vec <cgraph_node *>targets
f8a39967 345 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 347 {
2b5f0895
XDL
348 if (dump_enabled_p ())
349 {
4f5b9c80 350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
3ef276e4 355 : "NULL");
2b5f0895 356 }
3ef276e4
RB
357 if (targets.length () == 1)
358 {
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
363 }
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
368 return val;
369 }
370 }
bdf37f7a 371 }
7524f419 372
cbdd87d4
RG
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
374 {
70f34814
RG
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
cbdd87d4
RG
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
387
388 if (result)
389 {
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
395
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
398 }
cbdd87d4
RG
399 }
400
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
403 {
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
407
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 409 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
410 return NULL_TREE;
411
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
414 }
415
416 else if (DECL_P (rhs))
9d60be38 417 return get_symbol_constant_value (rhs);
cbdd87d4
RG
418 }
419 break;
420
421 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
422 break;
423
424 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
425 break;
426
0354c0c7 427 case GIMPLE_TERNARY_RHS:
5c099d40
RB
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
0354c0c7
BS
433
434 if (result)
435 {
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
0354c0c7
BS
439 }
440 break;
441
cbdd87d4
RG
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
444 }
445
446 return NULL_TREE;
447}
448
fef5a0d9
RB
449
450/* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
454
455static void
456gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
457{
355fe088 458 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
459
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
462
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
355fe088 465 gimple *laststore = NULL;
fef5a0d9
RB
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
468 {
355fe088 469 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
475 {
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
485 }
486 }
487
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
493 {
355fe088 494 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
502 }
503
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
508 {
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
512 {
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
515 }
516 }
517
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
520}
521
cbdd87d4
RG
522/* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
cbdd87d4
RG
531
532void
533gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
534{
535 tree lhs;
355fe088 536 gimple *stmt, *new_stmt;
cbdd87d4 537 gimple_stmt_iterator i;
355a7673 538 gimple_seq stmts = NULL;
cbdd87d4
RG
539
540 stmt = gsi_stmt (*si_p);
541
542 gcc_assert (is_gimple_call (stmt));
543
45852dcc 544 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 545
e256dfce 546 lhs = gimple_call_lhs (stmt);
cbdd87d4 547 if (lhs == NULL_TREE)
6e572326
RG
548 {
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
553 {
9fdc58de 554 pop_gimplify_context (NULL);
6e572326
RG
555 if (gimple_in_ssa_p (cfun))
556 {
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
559 }
f6b4dc28 560 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
561 return;
562 }
563 }
cbdd87d4 564 else
e256dfce 565 {
381cdae4 566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
571 }
cbdd87d4
RG
572
573 pop_gimplify_context (NULL);
574
fef5a0d9
RB
575 gsi_replace_with_seq_vops (si_p, stmts);
576}
cbdd87d4 577
fef5a0d9
RB
578
579/* Replace the call at *GSI with the gimple value VAL. */
580
e3174bdf 581void
fef5a0d9
RB
582replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
583{
355fe088 584 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 585 tree lhs = gimple_call_lhs (stmt);
355fe088 586 gimple *repl;
fef5a0d9 587 if (lhs)
e256dfce 588 {
fef5a0d9
RB
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
592 }
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
597 {
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
600 }
f6b4dc28 601 gsi_replace (gsi, repl, false);
fef5a0d9
RB
602}
603
604/* Replace the call at *GSI with the new call REPL and fold that
605 again. */
606
607static void
355fe088 608replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 609{
355fe088 610 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
615 {
616 gimple_set_vdef (repl, gimple_vdef (stmt));
fef5a0d9
RB
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
618 }
00296d7f
JJ
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
f6b4dc28 621 gsi_replace (gsi, repl, false);
fef5a0d9
RB
622 fold_stmt (gsi);
623}
624
625/* Return true if VAR is a VAR_DECL or a component thereof. */
626
627static bool
628var_decl_component_p (tree var)
629{
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
47cac108
RB
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
636}
637
6512c0f1
MS
638/* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
641
642static bool
643size_must_be_zero_p (tree size)
644{
645 if (integer_zerop (size))
646 return true;
647
3f27391f 648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
649 return false;
650
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
655
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
658
659 wide_int wone = wi::one (prec);
660
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
666}
667
cc8bea0a
MS
668/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
fef5a0d9
RB
674
675static bool
676gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
678{
355fe088 679 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
684
cc8bea0a 685 bool nowarn = gimple_no_warning_p (stmt);
cc8bea0a 686
6512c0f1
MS
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
fef5a0d9 690 {
355fe088 691 gimple *repl;
fef5a0d9
RB
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 698 {
fef5a0d9
RB
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
701 }
f6b4dc28 702 gsi_replace (gsi, repl, false);
fef5a0d9
RB
703 return true;
704 }
705
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
709 {
cc8bea0a
MS
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 712 32667). */
fef5a0d9
RB
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
717 {
f6b4dc28 718 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
719 return true;
720 }
721 goto done;
722 }
723 else
724 {
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
728
729 /* Build accesses at offset zero with a ref-all character type. */
730 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
731 ptr_mode, true), 0);
732
733 /* If we can perform the copy efficiently with first doing all loads
734 and then all stores inline it that way. Currently efficiently
735 means that we can load all the memory into a single integer
736 register which is what MOVE_MAX gives us. */
737 src_align = get_pointer_alignment (src);
738 dest_align = get_pointer_alignment (dest);
739 if (tree_fits_uhwi_p (len)
740 && compare_tree_int (len, MOVE_MAX) <= 0
741 /* ??? Don't transform copies from strings with known length this
742 confuses the tree-ssa-strlen.c. This doesn't handle
743 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
744 reason. */
745 && !c_strlen (src, 2))
746 {
747 unsigned ilen = tree_to_uhwi (len);
146ec50f 748 if (pow2p_hwi (ilen))
fef5a0d9 749 {
cc8bea0a
MS
750 /* Detect invalid bounds and overlapping copies and issue
751 either -Warray-bounds or -Wrestrict. */
752 if (!nowarn
753 && check_bounds_or_overlap (as_a <gcall *>(stmt),
754 dest, src, len, len))
755 gimple_set_no_warning (stmt, true);
756
64ab8765 757 scalar_int_mode mode;
fef5a0d9
RB
758 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
759 if (type
64ab8765
RS
760 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
761 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
762 /* If the destination pointer is not aligned we must be able
763 to emit an unaligned store. */
64ab8765 764 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 765 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 766 || (optab_handler (movmisalign_optab, mode)
f869c12f 767 != CODE_FOR_nothing)))
fef5a0d9
RB
768 {
769 tree srctype = type;
770 tree desttype = type;
64ab8765 771 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
772 srctype = build_aligned_type (type, src_align);
773 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
774 tree tem = fold_const_aggregate_ref (srcmem);
775 if (tem)
776 srcmem = tem;
64ab8765 777 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 778 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 779 && (optab_handler (movmisalign_optab, mode)
f869c12f 780 == CODE_FOR_nothing))
fef5a0d9
RB
781 srcmem = NULL_TREE;
782 if (srcmem)
783 {
355fe088 784 gimple *new_stmt;
fef5a0d9
RB
785 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
786 {
787 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
788 srcmem
789 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
790 new_stmt);
fef5a0d9
RB
791 gimple_assign_set_lhs (new_stmt, srcmem);
792 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
793 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
794 }
64ab8765 795 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
796 desttype = build_aligned_type (type, dest_align);
797 new_stmt
798 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
799 dest, off0),
800 srcmem);
801 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
802 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
803 if (gimple_vdef (new_stmt)
804 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
805 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
806 if (!lhs)
807 {
f6b4dc28 808 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
809 return true;
810 }
811 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
812 goto done;
813 }
814 }
815 }
816 }
817
818 if (endp == 3)
819 {
820 /* Both DEST and SRC must be pointer types.
821 ??? This is what old code did. Is the testing for pointer types
822 really mandatory?
823
824 If either SRC is readonly or length is 1, we can use memcpy. */
825 if (!dest_align || !src_align)
826 return false;
827 if (readonly_data_expr (src)
828 || (tree_fits_uhwi_p (len)
829 && (MIN (src_align, dest_align) / BITS_PER_UNIT
830 >= tree_to_uhwi (len))))
831 {
832 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
833 if (!fn)
834 return false;
835 gimple_call_set_fndecl (stmt, fn);
836 gimple_call_set_arg (stmt, 0, dest);
837 gimple_call_set_arg (stmt, 1, src);
838 fold_stmt (gsi);
839 return true;
840 }
841
842 /* If *src and *dest can't overlap, optimize into memcpy as well. */
843 if (TREE_CODE (src) == ADDR_EXPR
844 && TREE_CODE (dest) == ADDR_EXPR)
845 {
846 tree src_base, dest_base, fn;
a90c8804
RS
847 poly_int64 src_offset = 0, dest_offset = 0;
848 poly_uint64 maxsize;
fef5a0d9
RB
849
850 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
851 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
852 if (src_base == NULL)
853 src_base = srcvar;
fef5a0d9 854 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
855 dest_base = get_addr_base_and_unit_offset (destvar,
856 &dest_offset);
857 if (dest_base == NULL)
858 dest_base = destvar;
a90c8804 859 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 860 maxsize = -1;
fef5a0d9
RB
861 if (SSA_VAR_P (src_base)
862 && SSA_VAR_P (dest_base))
863 {
864 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
865 && ranges_maybe_overlap_p (src_offset, maxsize,
866 dest_offset, maxsize))
fef5a0d9
RB
867 return false;
868 }
869 else if (TREE_CODE (src_base) == MEM_REF
870 && TREE_CODE (dest_base) == MEM_REF)
871 {
872 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
873 TREE_OPERAND (dest_base, 0), 0))
874 return false;
a90c8804
RS
875 poly_offset_int full_src_offset
876 = mem_ref_offset (src_base) + src_offset;
877 poly_offset_int full_dest_offset
878 = mem_ref_offset (dest_base) + dest_offset;
879 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
880 full_dest_offset, maxsize))
fef5a0d9
RB
881 return false;
882 }
883 else
884 return false;
885
886 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
887 if (!fn)
888 return false;
889 gimple_call_set_fndecl (stmt, fn);
890 gimple_call_set_arg (stmt, 0, dest);
891 gimple_call_set_arg (stmt, 1, src);
892 fold_stmt (gsi);
893 return true;
894 }
895
896 /* If the destination and source do not alias optimize into
897 memcpy as well. */
898 if ((is_gimple_min_invariant (dest)
899 || TREE_CODE (dest) == SSA_NAME)
900 && (is_gimple_min_invariant (src)
901 || TREE_CODE (src) == SSA_NAME))
902 {
903 ao_ref destr, srcr;
904 ao_ref_init_from_ptr_and_size (&destr, dest, len);
905 ao_ref_init_from_ptr_and_size (&srcr, src, len);
906 if (!refs_may_alias_p_1 (&destr, &srcr, false))
907 {
908 tree fn;
909 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
917 }
918 }
919
920 return false;
921 }
922
923 if (!tree_fits_shwi_p (len))
924 return false;
fef5a0d9
RB
925 if (!POINTER_TYPE_P (TREE_TYPE (src))
926 || !POINTER_TYPE_P (TREE_TYPE (dest)))
927 return false;
928 /* In the following try to find a type that is most natural to be
929 used for the memcpy source and destination and that allows
930 the most optimization when memcpy is turned into a plain assignment
931 using that type. In theory we could always use a char[len] type
932 but that only gains us that the destination and source possibly
933 no longer will have their address taken. */
fef5a0d9
RB
934 srctype = TREE_TYPE (TREE_TYPE (src));
935 if (TREE_CODE (srctype) == ARRAY_TYPE
936 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 937 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
938 desttype = TREE_TYPE (TREE_TYPE (dest));
939 if (TREE_CODE (desttype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 941 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
942 if (TREE_ADDRESSABLE (srctype)
943 || TREE_ADDRESSABLE (desttype))
944 return false;
945
946 /* Make sure we are not copying using a floating-point mode or
947 a type whose size possibly does not match its precision. */
948 if (FLOAT_MODE_P (TYPE_MODE (desttype))
949 || TREE_CODE (desttype) == BOOLEAN_TYPE
950 || TREE_CODE (desttype) == ENUMERAL_TYPE)
951 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
952 if (FLOAT_MODE_P (TYPE_MODE (srctype))
953 || TREE_CODE (srctype) == BOOLEAN_TYPE
954 || TREE_CODE (srctype) == ENUMERAL_TYPE)
955 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
956 if (!srctype)
957 srctype = desttype;
958 if (!desttype)
959 desttype = srctype;
960 if (!srctype)
961 return false;
962
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (dest_align < TYPE_ALIGN (desttype)
966 || src_align < TYPE_ALIGN (srctype))
967 return false;
968
42f74245
RB
969 destvar = NULL_TREE;
970 if (TREE_CODE (dest) == ADDR_EXPR
971 && var_decl_component_p (TREE_OPERAND (dest, 0))
fef5a0d9 972 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 973 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
fef5a0d9 974
42f74245
RB
975 srcvar = NULL_TREE;
976 if (TREE_CODE (src) == ADDR_EXPR
977 && var_decl_component_p (TREE_OPERAND (src, 0))
fef5a0d9
RB
978 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
979 {
980 if (!destvar
981 || src_align >= TYPE_ALIGN (desttype))
982 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
42f74245 983 src, off0);
fef5a0d9
RB
984 else if (!STRICT_ALIGNMENT)
985 {
986 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
987 src_align);
42f74245 988 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9 989 }
fef5a0d9 990 }
fef5a0d9
RB
991
992 if (srcvar == NULL_TREE && destvar == NULL_TREE)
993 return false;
994
995 if (srcvar == NULL_TREE)
996 {
fef5a0d9
RB
997 if (src_align >= TYPE_ALIGN (desttype))
998 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
999 else
1000 {
1001 if (STRICT_ALIGNMENT)
1002 return false;
1003 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1004 src_align);
1005 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1006 }
1007 }
1008 else if (destvar == NULL_TREE)
1009 {
fef5a0d9
RB
1010 if (dest_align >= TYPE_ALIGN (srctype))
1011 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1012 else
1013 {
1014 if (STRICT_ALIGNMENT)
1015 return false;
1016 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1017 dest_align);
1018 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1019 }
1020 }
1021
cc8bea0a
MS
1022 /* Detect invalid bounds and overlapping copies and issue either
1023 -Warray-bounds or -Wrestrict. */
1024 if (!nowarn)
1025 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1026
355fe088 1027 gimple *new_stmt;
fef5a0d9
RB
1028 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1029 {
921b13d0
RB
1030 tree tem = fold_const_aggregate_ref (srcvar);
1031 if (tem)
1032 srcvar = tem;
1033 if (! is_gimple_min_invariant (srcvar))
1034 {
1035 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1036 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1037 new_stmt);
921b13d0
RB
1038 gimple_assign_set_lhs (new_stmt, srcvar);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1041 }
d7257171
RB
1042 new_stmt = gimple_build_assign (destvar, srcvar);
1043 goto set_vop_and_replace;
fef5a0d9 1044 }
d7257171
RB
1045
1046 /* We get an aggregate copy. Use an unsigned char[] type to
1047 perform the copying to preserve padding and to avoid any issues
1048 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1049 desttype = build_array_type_nelts (unsigned_char_type_node,
1050 tree_to_uhwi (len));
1051 srctype = desttype;
1052 if (src_align > TYPE_ALIGN (srctype))
1053 srctype = build_aligned_type (srctype, src_align);
1054 if (dest_align > TYPE_ALIGN (desttype))
1055 desttype = build_aligned_type (desttype, dest_align);
1056 new_stmt
1057 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1058 fold_build2 (MEM_REF, srctype, src, off0));
1059set_vop_and_replace:
fef5a0d9
RB
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1062 if (gimple_vdef (new_stmt)
1063 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1065 if (!lhs)
1066 {
f6b4dc28 1067 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1068 return true;
1069 }
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1071 }
1072
1073done:
74e3c262 1074 gimple_seq stmts = NULL;
fef5a0d9
RB
1075 if (endp == 0 || endp == 3)
1076 len = NULL_TREE;
1077 else if (endp == 2)
74e3c262
RB
1078 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1079 ssize_int (1));
fef5a0d9 1080 if (endp == 2 || endp == 1)
74e3c262
RB
1081 {
1082 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1083 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1084 TREE_TYPE (dest), dest, len);
1085 }
fef5a0d9 1086
74e3c262 1087 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1088 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1089 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1090 return true;
1091}
1092
b3d8d88e
MS
1093/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1094 to built-in memcmp (a, b, len). */
1095
1096static bool
1097gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1098{
1099 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1100
1101 if (!fn)
1102 return false;
1103
1104 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1105
1106 gimple *stmt = gsi_stmt (*gsi);
1107 tree a = gimple_call_arg (stmt, 0);
1108 tree b = gimple_call_arg (stmt, 1);
1109 tree len = gimple_call_arg (stmt, 2);
1110
1111 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1112 replace_call_with_call_and_fold (gsi, repl);
1113
1114 return true;
1115}
1116
1117/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1118 to built-in memmove (dest, src, len). */
1119
1120static bool
1121gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1122{
1123 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1124
1125 if (!fn)
1126 return false;
1127
1128 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1129 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1130 len) into memmove (dest, src, len). */
1131
1132 gimple *stmt = gsi_stmt (*gsi);
1133 tree src = gimple_call_arg (stmt, 0);
1134 tree dest = gimple_call_arg (stmt, 1);
1135 tree len = gimple_call_arg (stmt, 2);
1136
1137 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1138 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1139 replace_call_with_call_and_fold (gsi, repl);
1140
1141 return true;
1142}
1143
1144/* Transform a call to built-in bzero (dest, len) at *GSI into one
1145 to built-in memset (dest, 0, len). */
1146
1147static bool
1148gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1149{
1150 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1151
1152 if (!fn)
1153 return false;
1154
1155 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1156
1157 gimple *stmt = gsi_stmt (*gsi);
1158 tree dest = gimple_call_arg (stmt, 0);
1159 tree len = gimple_call_arg (stmt, 1);
1160
1161 gimple_seq seq = NULL;
1162 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1163 gimple_seq_add_stmt_without_update (&seq, repl);
1164 gsi_replace_with_seq_vops (gsi, seq);
1165 fold_stmt (gsi);
1166
1167 return true;
1168}
1169
fef5a0d9
RB
1170/* Fold function call to builtin memset or bzero at *GSI setting the
1171 memory of size LEN to VAL. Return whether a simplification was made. */
1172
1173static bool
1174gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1175{
355fe088 1176 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1177 tree etype;
1178 unsigned HOST_WIDE_INT length, cval;
1179
1180 /* If the LEN parameter is zero, return DEST. */
1181 if (integer_zerop (len))
1182 {
1183 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1184 return true;
1185 }
1186
1187 if (! tree_fits_uhwi_p (len))
1188 return false;
1189
1190 if (TREE_CODE (c) != INTEGER_CST)
1191 return false;
1192
1193 tree dest = gimple_call_arg (stmt, 0);
1194 tree var = dest;
1195 if (TREE_CODE (var) != ADDR_EXPR)
1196 return false;
1197
1198 var = TREE_OPERAND (var, 0);
1199 if (TREE_THIS_VOLATILE (var))
1200 return false;
1201
1202 etype = TREE_TYPE (var);
1203 if (TREE_CODE (etype) == ARRAY_TYPE)
1204 etype = TREE_TYPE (etype);
1205
1206 if (!INTEGRAL_TYPE_P (etype)
1207 && !POINTER_TYPE_P (etype))
1208 return NULL_TREE;
1209
1210 if (! var_decl_component_p (var))
1211 return NULL_TREE;
1212
1213 length = tree_to_uhwi (len);
7a504f33 1214 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1215 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1216 return NULL_TREE;
1217
1218 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1219 return NULL_TREE;
1220
1221 if (integer_zerop (c))
1222 cval = 0;
1223 else
1224 {
1225 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1226 return NULL_TREE;
1227
1228 cval = TREE_INT_CST_LOW (c);
1229 cval &= 0xff;
1230 cval |= cval << 8;
1231 cval |= cval << 16;
1232 cval |= (cval << 31) << 1;
1233 }
1234
1235 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1236 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1237 gimple_set_vuse (store, gimple_vuse (stmt));
1238 tree vdef = gimple_vdef (stmt);
1239 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1240 {
1241 gimple_set_vdef (store, gimple_vdef (stmt));
1242 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1243 }
1244 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1245 if (gimple_call_lhs (stmt))
1246 {
355fe088 1247 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1248 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1249 }
1250 else
1251 {
1252 gimple_stmt_iterator gsi2 = *gsi;
1253 gsi_prev (gsi);
1254 gsi_remove (&gsi2, true);
1255 }
1256
1257 return true;
1258}
1259
1260
88d0c3f0
MS
1261/* Obtain the minimum and maximum string length or minimum and maximum
1262 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1263 If ARG is an SSA name variable, follow its use-def chains. When
1264 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
c8602fe6 1265 if we are unable to determine the length or value, return false.
88d0c3f0
MS
1266 VISITED is a bitmap of visited variables.
1267 TYPE is 0 if string length should be obtained, 1 for maximum string
1268 length and 2 for maximum value ARG can have.
c8602fe6 1269 When FUZZY is non-zero and the length of a string cannot be determined,
88d0c3f0 1270 the function instead considers as the maximum possible length the
c8602fe6
JJ
1271 size of a character array it may refer to. If FUZZY is 2, it will handle
1272 PHIs and COND_EXPRs optimistically, if we can determine string length
1273 minimum and maximum, it will use the minimum from the ones where it
1274 can be determined.
3f343040
MS
1275 Set *FLEXP to true if the range of the string lengths has been
1276 obtained from the upper bound of an array at the end of a struct.
1277 Such an array may hold a string that's longer than its upper bound
1278 due to it being used as a poor-man's flexible array member. */
fef5a0d9
RB
1279
1280static bool
88d0c3f0 1281get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
c8602fe6 1282 int fuzzy, bool *flexp)
fef5a0d9 1283{
c42d0aa0 1284 tree var, val = NULL_TREE;
355fe088 1285 gimple *def_stmt;
fef5a0d9 1286
c8602fe6
JJ
1287 /* The minimum and maximum length. */
1288 tree *const minlen = length;
88d0c3f0
MS
1289 tree *const maxlen = length + 1;
1290
fef5a0d9
RB
1291 if (TREE_CODE (arg) != SSA_NAME)
1292 {
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
c42d0aa0 1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1296 {
c42d0aa0
MS
1297 tree op = TREE_OPERAND (arg, 0);
1298 if (integer_zerop (TREE_OPERAND (op, 1)))
1299 {
1300 tree aop0 = TREE_OPERAND (op, 0);
1301 if (TREE_CODE (aop0) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1303 return get_range_strlen (TREE_OPERAND (aop0, 0),
1304 length, visited, type, fuzzy, flexp);
1305 }
1306 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1307 {
1308 /* Fail if an array is the last member of a struct object
1309 since it could be treated as a (fake) flexible array
1310 member. */
1311 tree idx = TREE_OPERAND (op, 1);
1312
1313 arg = TREE_OPERAND (op, 0);
1314 tree optype = TREE_TYPE (arg);
1315 if (tree dom = TYPE_DOMAIN (optype))
1316 if (tree bound = TYPE_MAX_VALUE (dom))
1317 if (TREE_CODE (bound) == INTEGER_CST
1318 && TREE_CODE (idx) == INTEGER_CST
1319 && tree_int_cst_lt (bound, idx))
1320 return false;
1321 }
fef5a0d9
RB
1322 }
1323
1324 if (type == 2)
1325 {
1326 val = arg;
1327 if (TREE_CODE (val) != INTEGER_CST
1328 || tree_int_cst_sgn (val) < 0)
1329 return false;
1330 }
1331 else
1332 val = c_strlen (arg, 1);
88d0c3f0
MS
1333
1334 if (!val && fuzzy)
1335 {
1336 if (TREE_CODE (arg) == ADDR_EXPR)
1337 return get_range_strlen (TREE_OPERAND (arg, 0), length,
3f343040 1338 visited, type, fuzzy, flexp);
88d0c3f0 1339
c42d0aa0
MS
1340 if (TREE_CODE (arg) == ARRAY_REF)
1341 {
1342 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1343
1bfd6a00 1344 /* Determine the "innermost" array type. */
c42d0aa0
MS
1345 while (TREE_CODE (type) == ARRAY_TYPE
1346 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1347 type = TREE_TYPE (type);
1348
1bfd6a00
MS
1349 /* Avoid arrays of pointers. */
1350 tree eltype = TREE_TYPE (type);
1351 if (TREE_CODE (type) != ARRAY_TYPE
1352 || !INTEGRAL_TYPE_P (eltype))
1353 return false;
1354
c42d0aa0
MS
1355 val = TYPE_SIZE_UNIT (type);
1356 if (!val || integer_zerop (val))
1357 return false;
1358
1359 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1360 integer_one_node);
1361 /* Set the minimum size to zero since the string in
1362 the array could have zero length. */
1363 *minlen = ssize_int (0);
204a7ecb
JJ
1364
1365 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1366 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1367 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1368 *flexp = true;
c42d0aa0
MS
1369 }
1370 else if (TREE_CODE (arg) == COMPONENT_REF
204a7ecb
JJ
1371 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1372 == ARRAY_TYPE))
88d0c3f0
MS
1373 {
1374 /* Use the type of the member array to determine the upper
1375 bound on the length of the array. This may be overly
1376 optimistic if the array itself isn't NUL-terminated and
1377 the caller relies on the subsequent member to contain
c42d0aa0
MS
1378 the NUL but that would only be considered valid if
1379 the array were the last member of a struct.
3f343040
MS
1380 Set *FLEXP to true if the array whose bound is being
1381 used is at the end of a struct. */
c3e46927 1382 if (array_at_struct_end_p (arg))
3f343040
MS
1383 *flexp = true;
1384
88d0c3f0 1385 arg = TREE_OPERAND (arg, 1);
c42d0aa0
MS
1386
1387 tree type = TREE_TYPE (arg);
1388
1389 while (TREE_CODE (type) == ARRAY_TYPE
1390 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1391 type = TREE_TYPE (type);
1392
1393 /* Fail when the array bound is unknown or zero. */
1394 val = TYPE_SIZE_UNIT (type);
88d0c3f0
MS
1395 if (!val || integer_zerop (val))
1396 return false;
1397 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1398 integer_one_node);
e495e31a
MS
1399 /* Set the minimum size to zero since the string in
1400 the array could have zero length. */
1401 *minlen = ssize_int (0);
88d0c3f0 1402 }
2004617a 1403
c42d0aa0 1404 if (VAR_P (arg))
2004617a 1405 {
c42d0aa0
MS
1406 tree type = TREE_TYPE (arg);
1407 if (POINTER_TYPE_P (type))
1408 type = TREE_TYPE (type);
1409
1410 if (TREE_CODE (type) == ARRAY_TYPE)
1411 {
1412 val = TYPE_SIZE_UNIT (type);
1413 if (!val
1414 || TREE_CODE (val) != INTEGER_CST
1415 || integer_zerop (val))
1416 return false;
1417 val = wide_int_to_tree (TREE_TYPE (val),
204a7ecb 1418 wi::sub (wi::to_wide (val), 1));
c42d0aa0
MS
1419 /* Set the minimum size to zero since the string in
1420 the array could have zero length. */
1421 *minlen = ssize_int (0);
1422 }
2004617a 1423 }
88d0c3f0
MS
1424 }
1425
fef5a0d9
RB
1426 if (!val)
1427 return false;
1428
c8602fe6
JJ
1429 if (!*minlen
1430 || (type > 0
1431 && TREE_CODE (*minlen) == INTEGER_CST
1432 && TREE_CODE (val) == INTEGER_CST
1433 && tree_int_cst_lt (val, *minlen)))
88d0c3f0
MS
1434 *minlen = val;
1435
1436 if (*maxlen)
fef5a0d9
RB
1437 {
1438 if (type > 0)
1439 {
88d0c3f0 1440 if (TREE_CODE (*maxlen) != INTEGER_CST
fef5a0d9
RB
1441 || TREE_CODE (val) != INTEGER_CST)
1442 return false;
1443
88d0c3f0
MS
1444 if (tree_int_cst_lt (*maxlen, val))
1445 *maxlen = val;
fef5a0d9
RB
1446 return true;
1447 }
88d0c3f0 1448 else if (simple_cst_equal (val, *maxlen) != 1)
fef5a0d9
RB
1449 return false;
1450 }
1451
88d0c3f0 1452 *maxlen = val;
fef5a0d9
RB
1453 return true;
1454 }
1455
1456 /* If ARG is registered for SSA update we cannot look at its defining
1457 statement. */
1458 if (name_registered_for_update_p (arg))
1459 return false;
1460
1461 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1462 if (!*visited)
1463 *visited = BITMAP_ALLOC (NULL);
1464 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1465 return true;
1466
1467 var = arg;
1468 def_stmt = SSA_NAME_DEF_STMT (var);
1469
1470 switch (gimple_code (def_stmt))
1471 {
1472 case GIMPLE_ASSIGN:
1473 /* The RHS of the statement defining VAR must either have a
1474 constant length or come from another SSA_NAME with a constant
1475 length. */
1476 if (gimple_assign_single_p (def_stmt)
1477 || gimple_assign_unary_nop_p (def_stmt))
1478 {
1479 tree rhs = gimple_assign_rhs1 (def_stmt);
3f343040 1480 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1481 }
1482 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1483 {
c8602fe6
JJ
1484 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1485 gimple_assign_rhs3 (def_stmt) };
1486
1487 for (unsigned int i = 0; i < 2; i++)
1488 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1489 flexp))
1490 {
1491 if (fuzzy == 2)
1492 *maxlen = build_all_ones_cst (size_type_node);
1493 else
1494 return false;
1495 }
1496 return true;
cc8bea0a 1497 }
fef5a0d9
RB
1498 return false;
1499
1500 case GIMPLE_PHI:
c8602fe6
JJ
1501 /* All the arguments of the PHI node must have the same constant
1502 length. */
1503 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1504 {
1505 tree arg = gimple_phi_arg (def_stmt, i)->def;
1506
1507 /* If this PHI has itself as an argument, we cannot
1508 determine the string length of this argument. However,
1509 if we can find a constant string length for the other
1510 PHI args then we can still be sure that this is a
1511 constant string length. So be optimistic and just
1512 continue with the next argument. */
1513 if (arg == gimple_phi_result (def_stmt))
1514 continue;
1515
3f343040 1516 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
88d0c3f0 1517 {
c8602fe6 1518 if (fuzzy == 2)
88d0c3f0
MS
1519 *maxlen = build_all_ones_cst (size_type_node);
1520 else
1521 return false;
1522 }
fef5a0d9 1523 }
fef5a0d9
RB
1524 return true;
1525
1526 default:
1527 return false;
1528 }
1529}
1530
88d0c3f0
MS
1531/* Determine the minimum and maximum value or string length that ARG
1532 refers to and store each in the first two elements of MINMAXLEN.
1533 For expressions that point to strings of unknown lengths that are
1534 character arrays, use the upper bound of the array as the maximum
1535 length. For example, given an expression like 'x ? array : "xyz"'
1536 and array declared as 'char array[8]', MINMAXLEN[0] will be set
c8602fe6 1537 to 0 and MINMAXLEN[1] to 7, the longest string that could be
88d0c3f0 1538 stored in array.
3f343040
MS
1539 Return true if the range of the string lengths has been obtained
1540 from the upper bound of an array at the end of a struct. Such
1541 an array may hold a string that's longer than its upper bound
c8602fe6
JJ
1542 due to it being used as a poor-man's flexible array member.
1543
1544 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1545 and false if PHIs and COND_EXPRs are to be handled optimistically,
1546 if we can determine string length minimum and maximum; it will use
1547 the minimum from the ones where it can be determined.
1548 STRICT false should be only used for warning code. */
88d0c3f0 1549
3f343040 1550bool
c8602fe6 1551get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
88d0c3f0
MS
1552{
1553 bitmap visited = NULL;
1554
1555 minmaxlen[0] = NULL_TREE;
1556 minmaxlen[1] = NULL_TREE;
1557
3f343040 1558 bool flexarray = false;
c8602fe6
JJ
1559 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1560 &flexarray))
1561 {
1562 minmaxlen[0] = NULL_TREE;
1563 minmaxlen[1] = NULL_TREE;
1564 }
88d0c3f0
MS
1565
1566 if (visited)
1567 BITMAP_FREE (visited);
3f343040
MS
1568
1569 return flexarray;
88d0c3f0
MS
1570}
1571
dcb7fae2
RB
1572tree
1573get_maxval_strlen (tree arg, int type)
1574{
1575 bitmap visited = NULL;
88d0c3f0 1576 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1577
1578 bool dummy;
c8602fe6 1579 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
88d0c3f0 1580 len[1] = NULL_TREE;
dcb7fae2
RB
1581 if (visited)
1582 BITMAP_FREE (visited);
1583
88d0c3f0 1584 return len[1];
dcb7fae2
RB
1585}
1586
fef5a0d9
RB
1587
1588/* Fold function call to builtin strcpy with arguments DEST and SRC.
1589 If LEN is not NULL, it represents the length of the string to be
1590 copied. Return NULL_TREE if no simplification can be made. */
1591
1592static bool
1593gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1594 tree dest, tree src)
fef5a0d9 1595{
cc8bea0a
MS
1596 gimple *stmt = gsi_stmt (*gsi);
1597 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1598 tree fn;
1599
1600 /* If SRC and DEST are the same (and not volatile), return DEST. */
1601 if (operand_equal_p (src, dest, 0))
1602 {
8cd95cec
MS
1603 /* Issue -Wrestrict unless the pointers are null (those do
1604 not point to objects and so do not indicate an overlap;
1605 such calls could be the result of sanitization and jump
1606 threading). */
1607 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1608 {
1609 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1610
e9b9fa4c
MS
1611 warning_at (loc, OPT_Wrestrict,
1612 "%qD source argument is the same as destination",
1613 func);
1614 }
cc8bea0a 1615
fef5a0d9
RB
1616 replace_call_with_value (gsi, dest);
1617 return true;
1618 }
1619
1620 if (optimize_function_for_size_p (cfun))
1621 return false;
1622
1623 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1624 if (!fn)
1625 return false;
1626
1579e1f8 1627 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1628 if (!len)
dcb7fae2 1629 return false;
fef5a0d9
RB
1630
1631 len = fold_convert_loc (loc, size_type_node, len);
1632 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1633 len = force_gimple_operand_gsi (gsi, len, true,
1634 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1635 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1636 replace_call_with_call_and_fold (gsi, repl);
1637 return true;
1638}
1639
1640/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1641 If SLEN is not NULL, it represents the length of the source string.
1642 Return NULL_TREE if no simplification can be made. */
1643
1644static bool
dcb7fae2
RB
1645gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1646 tree dest, tree src, tree len)
fef5a0d9 1647{
025d57f0
MS
1648 gimple *stmt = gsi_stmt (*gsi);
1649 location_t loc = gimple_location (stmt);
6a33d0ff 1650 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1651
1652 /* If the LEN parameter is zero, return DEST. */
1653 if (integer_zerop (len))
1654 {
6a33d0ff
MS
1655 /* Avoid warning if the destination refers to a an array/pointer
1656 decorate with attribute nonstring. */
1657 if (!nonstring)
1658 {
1659 tree fndecl = gimple_call_fndecl (stmt);
1660 gcall *call = as_a <gcall *> (stmt);
1661
1662 /* Warn about the lack of nul termination: the result is not
1663 a (nul-terminated) string. */
1664 tree slen = get_maxval_strlen (src, 0);
1665 if (slen && !integer_zerop (slen))
1666 warning_at (loc, OPT_Wstringop_truncation,
1667 "%G%qD destination unchanged after copying no bytes "
1668 "from a string of length %E",
1669 call, fndecl, slen);
1670 else
1671 warning_at (loc, OPT_Wstringop_truncation,
1672 "%G%qD destination unchanged after copying no bytes",
1673 call, fndecl);
1674 }
025d57f0 1675
fef5a0d9
RB
1676 replace_call_with_value (gsi, dest);
1677 return true;
1678 }
1679
1680 /* We can't compare slen with len as constants below if len is not a
1681 constant. */
dcb7fae2 1682 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1683 return false;
1684
fef5a0d9 1685 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1686 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1687 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1688 return false;
1689
025d57f0
MS
1690 /* The size of the source string including the terminating nul. */
1691 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1692
1693 /* We do not support simplification of this case, though we do
1694 support it when expanding trees into RTL. */
1695 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1696 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1697 return false;
1698
5d0d5d68
MS
1699 /* Diagnose truncation that leaves the copy unterminated. */
1700 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1701
fef5a0d9 1702 /* OK transform into builtin memcpy. */
025d57f0 1703 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1704 if (!fn)
1705 return false;
1706
1707 len = fold_convert_loc (loc, size_type_node, len);
1708 len = force_gimple_operand_gsi (gsi, len, true,
1709 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1710 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1711 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1712
fef5a0d9
RB
1713 return true;
1714}
1715
71dea1dd
WD
1716/* Fold function call to builtin strchr or strrchr.
1717 If both arguments are constant, evaluate and fold the result,
1718 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1719 In general strlen is significantly faster than strchr
1720 due to being a simpler operation. */
1721static bool
71dea1dd 1722gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1723{
1724 gimple *stmt = gsi_stmt (*gsi);
1725 tree str = gimple_call_arg (stmt, 0);
1726 tree c = gimple_call_arg (stmt, 1);
1727 location_t loc = gimple_location (stmt);
71dea1dd
WD
1728 const char *p;
1729 char ch;
912d9ec3 1730
71dea1dd 1731 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1732 return false;
1733
71dea1dd
WD
1734 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1735 {
1736 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1737
1738 if (p1 == NULL)
1739 {
1740 replace_call_with_value (gsi, integer_zero_node);
1741 return true;
1742 }
1743
1744 tree len = build_int_cst (size_type_node, p1 - p);
1745 gimple_seq stmts = NULL;
1746 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1747 POINTER_PLUS_EXPR, str, len);
1748 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1749 gsi_replace_with_seq_vops (gsi, stmts);
1750 return true;
1751 }
1752
1753 if (!integer_zerop (c))
912d9ec3
WD
1754 return false;
1755
71dea1dd 1756 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1757 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1758 {
1759 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1760
c8952930 1761 if (strchr_fn)
71dea1dd
WD
1762 {
1763 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1764 replace_call_with_call_and_fold (gsi, repl);
1765 return true;
1766 }
1767
1768 return false;
1769 }
1770
912d9ec3
WD
1771 tree len;
1772 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1773
1774 if (!strlen_fn)
1775 return false;
1776
1777 /* Create newstr = strlen (str). */
1778 gimple_seq stmts = NULL;
1779 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1780 gimple_set_location (new_stmt, loc);
a15ebbcd 1781 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1782 gimple_call_set_lhs (new_stmt, len);
1783 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1784
1785 /* Create (str p+ strlen (str)). */
1786 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1787 POINTER_PLUS_EXPR, str, len);
1788 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1789 gsi_replace_with_seq_vops (gsi, stmts);
1790 /* gsi now points at the assignment to the lhs, get a
1791 stmt iterator to the strlen.
1792 ??? We can't use gsi_for_stmt as that doesn't work when the
1793 CFG isn't built yet. */
1794 gimple_stmt_iterator gsi2 = *gsi;
1795 gsi_prev (&gsi2);
1796 fold_stmt (&gsi2);
1797 return true;
1798}
1799
c8952930
JJ
1800/* Fold function call to builtin strstr.
1801 If both arguments are constant, evaluate and fold the result,
1802 additionally fold strstr (x, "") into x and strstr (x, "c")
1803 into strchr (x, 'c'). */
1804static bool
1805gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1806{
1807 gimple *stmt = gsi_stmt (*gsi);
1808 tree haystack = gimple_call_arg (stmt, 0);
1809 tree needle = gimple_call_arg (stmt, 1);
1810 const char *p, *q;
1811
1812 if (!gimple_call_lhs (stmt))
1813 return false;
1814
1815 q = c_getstr (needle);
1816 if (q == NULL)
1817 return false;
1818
1819 if ((p = c_getstr (haystack)))
1820 {
1821 const char *r = strstr (p, q);
1822
1823 if (r == NULL)
1824 {
1825 replace_call_with_value (gsi, integer_zero_node);
1826 return true;
1827 }
1828
1829 tree len = build_int_cst (size_type_node, r - p);
1830 gimple_seq stmts = NULL;
1831 gimple *new_stmt
1832 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1833 haystack, len);
1834 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1835 gsi_replace_with_seq_vops (gsi, stmts);
1836 return true;
1837 }
1838
1839 /* For strstr (x, "") return x. */
1840 if (q[0] == '\0')
1841 {
1842 replace_call_with_value (gsi, haystack);
1843 return true;
1844 }
1845
1846 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1847 if (q[1] == '\0')
1848 {
1849 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1850 if (strchr_fn)
1851 {
1852 tree c = build_int_cst (integer_type_node, q[0]);
1853 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1854 replace_call_with_call_and_fold (gsi, repl);
1855 return true;
1856 }
1857 }
1858
1859 return false;
1860}
1861
fef5a0d9
RB
1862/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1863 to the call.
1864
1865 Return NULL_TREE if no simplification was possible, otherwise return the
1866 simplified form of the call as a tree.
1867
1868 The simplified form may be a constant or other expression which
1869 computes the same value, but in a more efficient manner (including
1870 calls to other builtin functions).
1871
1872 The call may contain arguments which need to be evaluated, but
1873 which are not useful to determine the result of the call. In
1874 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1875 COMPOUND_EXPR will be an argument which must be evaluated.
1876 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1877 COMPOUND_EXPR in the chain will contain the tree for the simplified
1878 form of the builtin function call. */
1879
1880static bool
dcb7fae2 1881gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1882{
355fe088 1883 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1884 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1885
1886 const char *p = c_getstr (src);
1887
1888 /* If the string length is zero, return the dst parameter. */
1889 if (p && *p == '\0')
1890 {
1891 replace_call_with_value (gsi, dst);
1892 return true;
1893 }
1894
1895 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1896 return false;
1897
1898 /* See if we can store by pieces into (dst + strlen(dst)). */
1899 tree newdst;
1900 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1901 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1902
1903 if (!strlen_fn || !memcpy_fn)
1904 return false;
1905
1906 /* If the length of the source string isn't computable don't
1907 split strcat into strlen and memcpy. */
dcb7fae2 1908 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1909 if (! len)
fef5a0d9
RB
1910 return false;
1911
1912 /* Create strlen (dst). */
1913 gimple_seq stmts = NULL, stmts2;
355fe088 1914 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 1915 gimple_set_location (repl, loc);
a15ebbcd 1916 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
1917 gimple_call_set_lhs (repl, newdst);
1918 gimple_seq_add_stmt_without_update (&stmts, repl);
1919
1920 /* Create (dst p+ strlen (dst)). */
1921 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1922 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1923 gimple_seq_add_seq_without_update (&stmts, stmts2);
1924
1925 len = fold_convert_loc (loc, size_type_node, len);
1926 len = size_binop_loc (loc, PLUS_EXPR, len,
1927 build_int_cst (size_type_node, 1));
1928 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1929 gimple_seq_add_seq_without_update (&stmts, stmts2);
1930
1931 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1932 gimple_seq_add_stmt_without_update (&stmts, repl);
1933 if (gimple_call_lhs (stmt))
1934 {
1935 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1936 gimple_seq_add_stmt_without_update (&stmts, repl);
1937 gsi_replace_with_seq_vops (gsi, stmts);
1938 /* gsi now points at the assignment to the lhs, get a
1939 stmt iterator to the memcpy call.
1940 ??? We can't use gsi_for_stmt as that doesn't work when the
1941 CFG isn't built yet. */
1942 gimple_stmt_iterator gsi2 = *gsi;
1943 gsi_prev (&gsi2);
1944 fold_stmt (&gsi2);
1945 }
1946 else
1947 {
1948 gsi_replace_with_seq_vops (gsi, stmts);
1949 fold_stmt (gsi);
1950 }
1951 return true;
1952}
1953
07f1cf56
RB
1954/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1955 are the arguments to the call. */
1956
1957static bool
1958gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1959{
355fe088 1960 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1961 tree dest = gimple_call_arg (stmt, 0);
1962 tree src = gimple_call_arg (stmt, 1);
1963 tree size = gimple_call_arg (stmt, 2);
1964 tree fn;
1965 const char *p;
1966
1967
1968 p = c_getstr (src);
1969 /* If the SRC parameter is "", return DEST. */
1970 if (p && *p == '\0')
1971 {
1972 replace_call_with_value (gsi, dest);
1973 return true;
1974 }
1975
1976 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1977 return false;
1978
1979 /* If __builtin_strcat_chk is used, assume strcat is available. */
1980 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1981 if (!fn)
1982 return false;
1983
355fe088 1984 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1985 replace_call_with_call_and_fold (gsi, repl);
1986 return true;
1987}
1988
ad03a744
RB
1989/* Simplify a call to the strncat builtin. */
1990
1991static bool
1992gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1993{
1994 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1995 tree dst = gimple_call_arg (stmt, 0);
1996 tree src = gimple_call_arg (stmt, 1);
1997 tree len = gimple_call_arg (stmt, 2);
1998
1999 const char *p = c_getstr (src);
2000
2001 /* If the requested length is zero, or the src parameter string
2002 length is zero, return the dst parameter. */
2003 if (integer_zerop (len) || (p && *p == '\0'))
2004 {
2005 replace_call_with_value (gsi, dst);
2006 return true;
2007 }
2008
025d57f0
MS
2009 if (TREE_CODE (len) != INTEGER_CST || !p)
2010 return false;
2011
2012 unsigned srclen = strlen (p);
2013
2014 int cmpsrc = compare_tree_int (len, srclen);
2015
2016 /* Return early if the requested len is less than the string length.
2017 Warnings will be issued elsewhere later. */
2018 if (cmpsrc < 0)
2019 return false;
2020
2021 unsigned HOST_WIDE_INT dstsize;
2022
2023 bool nowarn = gimple_no_warning_p (stmt);
2024
2025 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2026 {
025d57f0 2027 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2028
025d57f0
MS
2029 if (cmpdst >= 0)
2030 {
2031 tree fndecl = gimple_call_fndecl (stmt);
2032
2033 /* Strncat copies (at most) LEN bytes and always appends
2034 the terminating NUL so the specified bound should never
2035 be equal to (or greater than) the size of the destination.
2036 If it is, the copy could overflow. */
2037 location_t loc = gimple_location (stmt);
2038 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2039 cmpdst == 0
2040 ? G_("%G%qD specified bound %E equals "
2041 "destination size")
2042 : G_("%G%qD specified bound %E exceeds "
2043 "destination size %wu"),
2044 stmt, fndecl, len, dstsize);
2045 if (nowarn)
2046 gimple_set_no_warning (stmt, true);
2047 }
2048 }
ad03a744 2049
025d57f0
MS
2050 if (!nowarn && cmpsrc == 0)
2051 {
2052 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2053 location_t loc = gimple_location (stmt);
eec5f615
MS
2054
2055 /* To avoid possible overflow the specified bound should also
2056 not be equal to the length of the source, even when the size
2057 of the destination is unknown (it's not an uncommon mistake
2058 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2059 if (warning_at (loc, OPT_Wstringop_overflow_,
2060 "%G%qD specified bound %E equals source length",
2061 stmt, fndecl, len))
2062 gimple_set_no_warning (stmt, true);
ad03a744
RB
2063 }
2064
025d57f0
MS
2065 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2066
2067 /* If the replacement _DECL isn't initialized, don't do the
2068 transformation. */
2069 if (!fn)
2070 return false;
2071
2072 /* Otherwise, emit a call to strcat. */
2073 gcall *repl = gimple_build_call (fn, 2, dst, src);
2074 replace_call_with_call_and_fold (gsi, repl);
2075 return true;
ad03a744
RB
2076}
2077
745583f9
RB
2078/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2079 LEN, and SIZE. */
2080
2081static bool
2082gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2083{
355fe088 2084 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2085 tree dest = gimple_call_arg (stmt, 0);
2086 tree src = gimple_call_arg (stmt, 1);
2087 tree len = gimple_call_arg (stmt, 2);
2088 tree size = gimple_call_arg (stmt, 3);
2089 tree fn;
2090 const char *p;
2091
2092 p = c_getstr (src);
2093 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2094 if ((p && *p == '\0')
2095 || integer_zerop (len))
2096 {
2097 replace_call_with_value (gsi, dest);
2098 return true;
2099 }
2100
2101 if (! tree_fits_uhwi_p (size))
2102 return false;
2103
2104 if (! integer_all_onesp (size))
2105 {
2106 tree src_len = c_strlen (src, 1);
2107 if (src_len
2108 && tree_fits_uhwi_p (src_len)
2109 && tree_fits_uhwi_p (len)
2110 && ! tree_int_cst_lt (len, src_len))
2111 {
2112 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2113 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2114 if (!fn)
2115 return false;
2116
355fe088 2117 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2120 }
2121 return false;
2122 }
2123
2124 /* If __builtin_strncat_chk is used, assume strncat is available. */
2125 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2126 if (!fn)
2127 return false;
2128
355fe088 2129 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2130 replace_call_with_call_and_fold (gsi, repl);
2131 return true;
2132}
2133
a918bfbf
ML
2134/* Build and append gimple statements to STMTS that would load a first
2135 character of a memory location identified by STR. LOC is location
2136 of the statement. */
2137
2138static tree
2139gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2140{
2141 tree var;
2142
2143 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2144 tree cst_uchar_ptr_node
2145 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2146 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2147
2148 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2149 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2150 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2151
2152 gimple_assign_set_lhs (stmt, var);
2153 gimple_seq_add_stmt_without_update (stmts, stmt);
2154
2155 return var;
2156}
2157
2158/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2159 FCODE is the name of the builtin. */
2160
2161static bool
2162gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2163{
2164 gimple *stmt = gsi_stmt (*gsi);
2165 tree callee = gimple_call_fndecl (stmt);
2166 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2167
2168 tree type = integer_type_node;
2169 tree str1 = gimple_call_arg (stmt, 0);
2170 tree str2 = gimple_call_arg (stmt, 1);
2171 tree lhs = gimple_call_lhs (stmt);
2172 HOST_WIDE_INT length = -1;
2173
2174 /* Handle strncmp and strncasecmp functions. */
2175 if (gimple_call_num_args (stmt) == 3)
2176 {
2177 tree len = gimple_call_arg (stmt, 2);
2178 if (tree_fits_uhwi_p (len))
2179 length = tree_to_uhwi (len);
2180 }
2181
2182 /* If the LEN parameter is zero, return zero. */
2183 if (length == 0)
2184 {
2185 replace_call_with_value (gsi, integer_zero_node);
2186 return true;
2187 }
2188
2189 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2190 if (operand_equal_p (str1, str2, 0))
2191 {
2192 replace_call_with_value (gsi, integer_zero_node);
2193 return true;
2194 }
2195
2196 const char *p1 = c_getstr (str1);
2197 const char *p2 = c_getstr (str2);
2198
2199 /* For known strings, return an immediate value. */
2200 if (p1 && p2)
2201 {
2202 int r = 0;
2203 bool known_result = false;
2204
2205 switch (fcode)
2206 {
2207 case BUILT_IN_STRCMP:
8b0b334a 2208 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
2209 {
2210 r = strcmp (p1, p2);
2211 known_result = true;
2212 break;
2213 }
2214 case BUILT_IN_STRNCMP:
8b0b334a 2215 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
2216 {
2217 if (length == -1)
2218 break;
2219 r = strncmp (p1, p2, length);
2220 known_result = true;
2221 break;
2222 }
2223 /* Only handleable situation is where the string are equal (result 0),
2224 which is already handled by operand_equal_p case. */
2225 case BUILT_IN_STRCASECMP:
2226 break;
2227 case BUILT_IN_STRNCASECMP:
2228 {
2229 if (length == -1)
2230 break;
2231 r = strncmp (p1, p2, length);
2232 if (r == 0)
2233 known_result = true;
5de73c05 2234 break;
a918bfbf
ML
2235 }
2236 default:
2237 gcc_unreachable ();
2238 }
2239
2240 if (known_result)
2241 {
2242 replace_call_with_value (gsi, build_cmp_result (type, r));
2243 return true;
2244 }
2245 }
2246
2247 bool nonzero_length = length >= 1
2248 || fcode == BUILT_IN_STRCMP
8b0b334a 2249 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2250 || fcode == BUILT_IN_STRCASECMP;
2251
2252 location_t loc = gimple_location (stmt);
2253
2254 /* If the second arg is "", return *(const unsigned char*)arg1. */
2255 if (p2 && *p2 == '\0' && nonzero_length)
2256 {
2257 gimple_seq stmts = NULL;
2258 tree var = gimple_load_first_char (loc, str1, &stmts);
2259 if (lhs)
2260 {
2261 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2262 gimple_seq_add_stmt_without_update (&stmts, stmt);
2263 }
2264
2265 gsi_replace_with_seq_vops (gsi, stmts);
2266 return true;
2267 }
2268
2269 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2270 if (p1 && *p1 == '\0' && nonzero_length)
2271 {
2272 gimple_seq stmts = NULL;
2273 tree var = gimple_load_first_char (loc, str2, &stmts);
2274
2275 if (lhs)
2276 {
2277 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2278 stmt = gimple_build_assign (c, NOP_EXPR, var);
2279 gimple_seq_add_stmt_without_update (&stmts, stmt);
2280
2281 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2282 gimple_seq_add_stmt_without_update (&stmts, stmt);
2283 }
2284
2285 gsi_replace_with_seq_vops (gsi, stmts);
2286 return true;
2287 }
2288
2289 /* If len parameter is one, return an expression corresponding to
2290 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2291 if (fcode == BUILT_IN_STRNCMP && length == 1)
2292 {
2293 gimple_seq stmts = NULL;
2294 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2295 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2296
2297 if (lhs)
2298 {
2299 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2300 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2301 gimple_seq_add_stmt_without_update (&stmts, convert1);
2302
2303 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2304 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2305 gimple_seq_add_stmt_without_update (&stmts, convert2);
2306
2307 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2308 gimple_seq_add_stmt_without_update (&stmts, stmt);
2309 }
2310
2311 gsi_replace_with_seq_vops (gsi, stmts);
2312 return true;
2313 }
2314
caed5c92
QZ
2315 /* If length is larger than the length of one constant string,
2316 replace strncmp with corresponding strcmp */
2317 if (fcode == BUILT_IN_STRNCMP
2318 && length > 0
2319 && ((p2 && (size_t) length > strlen (p2))
2320 || (p1 && (size_t) length > strlen (p1))))
2321 {
2322 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2323 if (!fn)
2324 return false;
2325 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2326 replace_call_with_call_and_fold (gsi, repl);
2327 return true;
2328 }
2329
a918bfbf
ML
2330 return false;
2331}
2332
488c6247
ML
2333/* Fold a call to the memchr pointed by GSI iterator. */
2334
2335static bool
2336gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2337{
2338 gimple *stmt = gsi_stmt (*gsi);
2339 tree lhs = gimple_call_lhs (stmt);
2340 tree arg1 = gimple_call_arg (stmt, 0);
2341 tree arg2 = gimple_call_arg (stmt, 1);
2342 tree len = gimple_call_arg (stmt, 2);
2343
2344 /* If the LEN parameter is zero, return zero. */
2345 if (integer_zerop (len))
2346 {
2347 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2348 return true;
2349 }
2350
2351 char c;
2352 if (TREE_CODE (arg2) != INTEGER_CST
2353 || !tree_fits_uhwi_p (len)
2354 || !target_char_cst_p (arg2, &c))
2355 return false;
2356
2357 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2358 unsigned HOST_WIDE_INT string_length;
2359 const char *p1 = c_getstr (arg1, &string_length);
2360
2361 if (p1)
2362 {
2363 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2364 if (r == NULL)
2365 {
2366 if (length <= string_length)
2367 {
2368 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2369 return true;
2370 }
2371 }
2372 else
2373 {
2374 unsigned HOST_WIDE_INT offset = r - p1;
2375 gimple_seq stmts = NULL;
2376 if (lhs != NULL_TREE)
2377 {
2378 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2379 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2380 arg1, offset_cst);
2381 gimple_seq_add_stmt_without_update (&stmts, stmt);
2382 }
2383 else
2384 gimple_seq_add_stmt_without_update (&stmts,
2385 gimple_build_nop ());
2386
2387 gsi_replace_with_seq_vops (gsi, stmts);
2388 return true;
2389 }
2390 }
2391
2392 return false;
2393}
a918bfbf 2394
fef5a0d9
RB
2395/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2396 to the call. IGNORE is true if the value returned
2397 by the builtin will be ignored. UNLOCKED is true is true if this
2398 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2399 the known length of the string. Return NULL_TREE if no simplification
2400 was possible. */
2401
2402static bool
2403gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2404 tree arg0, tree arg1,
dcb7fae2 2405 bool unlocked)
fef5a0d9 2406{
355fe088 2407 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2408
fef5a0d9
RB
2409 /* If we're using an unlocked function, assume the other unlocked
2410 functions exist explicitly. */
2411 tree const fn_fputc = (unlocked
2412 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2413 : builtin_decl_implicit (BUILT_IN_FPUTC));
2414 tree const fn_fwrite = (unlocked
2415 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2416 : builtin_decl_implicit (BUILT_IN_FWRITE));
2417
2418 /* If the return value is used, don't do the transformation. */
dcb7fae2 2419 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2420 return false;
2421
fef5a0d9
RB
2422 /* Get the length of the string passed to fputs. If the length
2423 can't be determined, punt. */
dcb7fae2 2424 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
2425 if (!len
2426 || TREE_CODE (len) != INTEGER_CST)
2427 return false;
2428
2429 switch (compare_tree_int (len, 1))
2430 {
2431 case -1: /* length is 0, delete the call entirely . */
2432 replace_call_with_value (gsi, integer_zero_node);
2433 return true;
2434
2435 case 0: /* length is 1, call fputc. */
2436 {
2437 const char *p = c_getstr (arg0);
2438 if (p != NULL)
2439 {
2440 if (!fn_fputc)
2441 return false;
2442
355fe088 2443 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2444 build_int_cst
2445 (integer_type_node, p[0]), arg1);
2446 replace_call_with_call_and_fold (gsi, repl);
2447 return true;
2448 }
2449 }
2450 /* FALLTHROUGH */
2451 case 1: /* length is greater than 1, call fwrite. */
2452 {
2453 /* If optimizing for size keep fputs. */
2454 if (optimize_function_for_size_p (cfun))
2455 return false;
2456 /* New argument list transforming fputs(string, stream) to
2457 fwrite(string, 1, len, stream). */
2458 if (!fn_fwrite)
2459 return false;
2460
355fe088 2461 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2462 size_one_node, len, arg1);
2463 replace_call_with_call_and_fold (gsi, repl);
2464 return true;
2465 }
2466 default:
2467 gcc_unreachable ();
2468 }
2469 return false;
2470}
2471
2472/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2473 DEST, SRC, LEN, and SIZE are the arguments to the call.
2474 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2475 code of the builtin. If MAXLEN is not NULL, it is maximum length
2476 passed as third argument. */
2477
2478static bool
2479gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2480 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2481 enum built_in_function fcode)
2482{
355fe088 2483 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2484 location_t loc = gimple_location (stmt);
2485 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2486 tree fn;
2487
2488 /* If SRC and DEST are the same (and not volatile), return DEST
2489 (resp. DEST+LEN for __mempcpy_chk). */
2490 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2491 {
2492 if (fcode != BUILT_IN_MEMPCPY_CHK)
2493 {
2494 replace_call_with_value (gsi, dest);
2495 return true;
2496 }
2497 else
2498 {
74e3c262
RB
2499 gimple_seq stmts = NULL;
2500 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2501 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2502 TREE_TYPE (dest), dest, len);
74e3c262 2503 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2504 replace_call_with_value (gsi, temp);
2505 return true;
2506 }
2507 }
2508
2509 if (! tree_fits_uhwi_p (size))
2510 return false;
2511
dcb7fae2 2512 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
2513 if (! integer_all_onesp (size))
2514 {
2515 if (! tree_fits_uhwi_p (len))
2516 {
2517 /* If LEN is not constant, try MAXLEN too.
2518 For MAXLEN only allow optimizing into non-_ocs function
2519 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2520 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2521 {
2522 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2523 {
2524 /* (void) __mempcpy_chk () can be optimized into
2525 (void) __memcpy_chk (). */
2526 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2527 if (!fn)
2528 return false;
2529
355fe088 2530 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2531 replace_call_with_call_and_fold (gsi, repl);
2532 return true;
2533 }
2534 return false;
2535 }
2536 }
2537 else
2538 maxlen = len;
2539
2540 if (tree_int_cst_lt (size, maxlen))
2541 return false;
2542 }
2543
2544 fn = NULL_TREE;
2545 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2546 mem{cpy,pcpy,move,set} is available. */
2547 switch (fcode)
2548 {
2549 case BUILT_IN_MEMCPY_CHK:
2550 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2551 break;
2552 case BUILT_IN_MEMPCPY_CHK:
2553 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2554 break;
2555 case BUILT_IN_MEMMOVE_CHK:
2556 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2557 break;
2558 case BUILT_IN_MEMSET_CHK:
2559 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2560 break;
2561 default:
2562 break;
2563 }
2564
2565 if (!fn)
2566 return false;
2567
355fe088 2568 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2569 replace_call_with_call_and_fold (gsi, repl);
2570 return true;
2571}
2572
2573/* Fold a call to the __st[rp]cpy_chk builtin.
2574 DEST, SRC, and SIZE are the arguments to the call.
2575 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2576 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2577 strings passed as second argument. */
2578
2579static bool
2580gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2581 tree dest,
fef5a0d9 2582 tree src, tree size,
fef5a0d9
RB
2583 enum built_in_function fcode)
2584{
355fe088 2585 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2586 location_t loc = gimple_location (stmt);
2587 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2588 tree len, fn;
2589
2590 /* If SRC and DEST are the same (and not volatile), return DEST. */
2591 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2592 {
8cd95cec
MS
2593 /* Issue -Wrestrict unless the pointers are null (those do
2594 not point to objects and so do not indicate an overlap;
2595 such calls could be the result of sanitization and jump
2596 threading). */
2597 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2598 {
2599 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2600
e9b9fa4c
MS
2601 warning_at (loc, OPT_Wrestrict,
2602 "%qD source argument is the same as destination",
2603 func);
2604 }
cc8bea0a 2605
fef5a0d9
RB
2606 replace_call_with_value (gsi, dest);
2607 return true;
2608 }
2609
2610 if (! tree_fits_uhwi_p (size))
2611 return false;
2612
dcb7fae2 2613 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
2614 if (! integer_all_onesp (size))
2615 {
2616 len = c_strlen (src, 1);
2617 if (! len || ! tree_fits_uhwi_p (len))
2618 {
2619 /* If LEN is not constant, try MAXLEN too.
2620 For MAXLEN only allow optimizing into non-_ocs function
2621 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2622 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2623 {
2624 if (fcode == BUILT_IN_STPCPY_CHK)
2625 {
2626 if (! ignore)
2627 return false;
2628
2629 /* If return value of __stpcpy_chk is ignored,
2630 optimize into __strcpy_chk. */
2631 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2632 if (!fn)
2633 return false;
2634
355fe088 2635 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2636 replace_call_with_call_and_fold (gsi, repl);
2637 return true;
2638 }
2639
2640 if (! len || TREE_SIDE_EFFECTS (len))
2641 return false;
2642
2643 /* If c_strlen returned something, but not a constant,
2644 transform __strcpy_chk into __memcpy_chk. */
2645 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2646 if (!fn)
2647 return false;
2648
74e3c262
RB
2649 gimple_seq stmts = NULL;
2650 len = gimple_convert (&stmts, loc, size_type_node, len);
2651 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2652 build_int_cst (size_type_node, 1));
2653 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2654 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2655 replace_call_with_call_and_fold (gsi, repl);
2656 return true;
2657 }
e256dfce 2658 }
fef5a0d9
RB
2659 else
2660 maxlen = len;
2661
2662 if (! tree_int_cst_lt (maxlen, size))
2663 return false;
e256dfce
RG
2664 }
2665
fef5a0d9
RB
2666 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2667 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2668 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2669 if (!fn)
2670 return false;
2671
355fe088 2672 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2673 replace_call_with_call_and_fold (gsi, repl);
2674 return true;
2675}
2676
2677/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2678 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2679 length passed as third argument. IGNORE is true if return value can be
2680 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2681
2682static bool
2683gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2684 tree dest, tree src,
dcb7fae2 2685 tree len, tree size,
fef5a0d9
RB
2686 enum built_in_function fcode)
2687{
355fe088 2688 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2689 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2690 tree fn;
2691
2692 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2693 {
fef5a0d9
RB
2694 /* If return value of __stpncpy_chk is ignored,
2695 optimize into __strncpy_chk. */
2696 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2697 if (fn)
2698 {
355fe088 2699 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2700 replace_call_with_call_and_fold (gsi, repl);
2701 return true;
2702 }
cbdd87d4
RG
2703 }
2704
fef5a0d9
RB
2705 if (! tree_fits_uhwi_p (size))
2706 return false;
2707
dcb7fae2 2708 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2709 if (! integer_all_onesp (size))
cbdd87d4 2710 {
fef5a0d9 2711 if (! tree_fits_uhwi_p (len))
fe2ef088 2712 {
fef5a0d9
RB
2713 /* If LEN is not constant, try MAXLEN too.
2714 For MAXLEN only allow optimizing into non-_ocs function
2715 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2716 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2717 return false;
8a1561bc 2718 }
fef5a0d9
RB
2719 else
2720 maxlen = len;
2721
2722 if (tree_int_cst_lt (size, maxlen))
2723 return false;
cbdd87d4
RG
2724 }
2725
fef5a0d9
RB
2726 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2727 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2728 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2729 if (!fn)
2730 return false;
2731
355fe088 2732 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2733 replace_call_with_call_and_fold (gsi, repl);
2734 return true;
cbdd87d4
RG
2735}
2736
2625bb5d
RB
2737/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2738 Return NULL_TREE if no simplification can be made. */
2739
2740static bool
2741gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2742{
2743 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2744 location_t loc = gimple_location (stmt);
2745 tree dest = gimple_call_arg (stmt, 0);
2746 tree src = gimple_call_arg (stmt, 1);
2747 tree fn, len, lenp1;
2748
2749 /* If the result is unused, replace stpcpy with strcpy. */
2750 if (gimple_call_lhs (stmt) == NULL_TREE)
2751 {
2752 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2753 if (!fn)
2754 return false;
2755 gimple_call_set_fndecl (stmt, fn);
2756 fold_stmt (gsi);
2757 return true;
2758 }
2759
2760 len = c_strlen (src, 1);
2761 if (!len
2762 || TREE_CODE (len) != INTEGER_CST)
2763 return false;
2764
2765 if (optimize_function_for_size_p (cfun)
2766 /* If length is zero it's small enough. */
2767 && !integer_zerop (len))
2768 return false;
2769
2770 /* If the source has a known length replace stpcpy with memcpy. */
2771 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2772 if (!fn)
2773 return false;
2774
2775 gimple_seq stmts = NULL;
2776 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2777 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2778 tem, build_int_cst (size_type_node, 1));
2779 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2780 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2781 gimple_set_vuse (repl, gimple_vuse (stmt));
2782 gimple_set_vdef (repl, gimple_vdef (stmt));
2783 if (gimple_vdef (repl)
2784 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2785 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2786 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2787 /* Replace the result with dest + len. */
2788 stmts = NULL;
2789 tem = gimple_convert (&stmts, loc, sizetype, len);
2790 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2791 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2792 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2793 gsi_replace (gsi, ret, false);
2625bb5d
RB
2794 /* Finally fold the memcpy call. */
2795 gimple_stmt_iterator gsi2 = *gsi;
2796 gsi_prev (&gsi2);
2797 fold_stmt (&gsi2);
2798 return true;
2799}
2800
fef5a0d9
RB
2801/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2802 NULL_TREE if a normal call should be emitted rather than expanding
2803 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2804 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2805 passed as second argument. */
cbdd87d4
RG
2806
2807static bool
fef5a0d9 2808gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2809 enum built_in_function fcode)
cbdd87d4 2810{
538dd0b7 2811 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2812 tree dest, size, len, fn, fmt, flag;
2813 const char *fmt_str;
cbdd87d4 2814
fef5a0d9
RB
2815 /* Verify the required arguments in the original call. */
2816 if (gimple_call_num_args (stmt) < 5)
2817 return false;
cbdd87d4 2818
fef5a0d9
RB
2819 dest = gimple_call_arg (stmt, 0);
2820 len = gimple_call_arg (stmt, 1);
2821 flag = gimple_call_arg (stmt, 2);
2822 size = gimple_call_arg (stmt, 3);
2823 fmt = gimple_call_arg (stmt, 4);
2824
2825 if (! tree_fits_uhwi_p (size))
2826 return false;
2827
2828 if (! integer_all_onesp (size))
2829 {
dcb7fae2 2830 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2831 if (! tree_fits_uhwi_p (len))
cbdd87d4 2832 {
fef5a0d9
RB
2833 /* If LEN is not constant, try MAXLEN too.
2834 For MAXLEN only allow optimizing into non-_ocs function
2835 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2836 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2837 return false;
2838 }
2839 else
fef5a0d9 2840 maxlen = len;
cbdd87d4 2841
fef5a0d9
RB
2842 if (tree_int_cst_lt (size, maxlen))
2843 return false;
2844 }
cbdd87d4 2845
fef5a0d9
RB
2846 if (!init_target_chars ())
2847 return false;
cbdd87d4 2848
fef5a0d9
RB
2849 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2850 or if format doesn't contain % chars or is "%s". */
2851 if (! integer_zerop (flag))
2852 {
2853 fmt_str = c_getstr (fmt);
2854 if (fmt_str == NULL)
2855 return false;
2856 if (strchr (fmt_str, target_percent) != NULL
2857 && strcmp (fmt_str, target_percent_s))
2858 return false;
cbdd87d4
RG
2859 }
2860
fef5a0d9
RB
2861 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2862 available. */
2863 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2864 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2865 if (!fn)
491e0b9b
RG
2866 return false;
2867
fef5a0d9
RB
2868 /* Replace the called function and the first 5 argument by 3 retaining
2869 trailing varargs. */
2870 gimple_call_set_fndecl (stmt, fn);
2871 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2872 gimple_call_set_arg (stmt, 0, dest);
2873 gimple_call_set_arg (stmt, 1, len);
2874 gimple_call_set_arg (stmt, 2, fmt);
2875 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2876 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2877 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2878 fold_stmt (gsi);
2879 return true;
2880}
cbdd87d4 2881
fef5a0d9
RB
2882/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2883 Return NULL_TREE if a normal call should be emitted rather than
2884 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2885 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2886
fef5a0d9
RB
2887static bool
2888gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2889 enum built_in_function fcode)
2890{
538dd0b7 2891 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2892 tree dest, size, len, fn, fmt, flag;
2893 const char *fmt_str;
2894 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2895
fef5a0d9
RB
2896 /* Verify the required arguments in the original call. */
2897 if (nargs < 4)
2898 return false;
2899 dest = gimple_call_arg (stmt, 0);
2900 flag = gimple_call_arg (stmt, 1);
2901 size = gimple_call_arg (stmt, 2);
2902 fmt = gimple_call_arg (stmt, 3);
2903
2904 if (! tree_fits_uhwi_p (size))
2905 return false;
2906
2907 len = NULL_TREE;
2908
2909 if (!init_target_chars ())
2910 return false;
2911
2912 /* Check whether the format is a literal string constant. */
2913 fmt_str = c_getstr (fmt);
2914 if (fmt_str != NULL)
2915 {
2916 /* If the format doesn't contain % args or %%, we know the size. */
2917 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2918 {
fef5a0d9
RB
2919 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2920 len = build_int_cstu (size_type_node, strlen (fmt_str));
2921 }
2922 /* If the format is "%s" and first ... argument is a string literal,
2923 we know the size too. */
2924 else if (fcode == BUILT_IN_SPRINTF_CHK
2925 && strcmp (fmt_str, target_percent_s) == 0)
2926 {
2927 tree arg;
cbdd87d4 2928
fef5a0d9
RB
2929 if (nargs == 5)
2930 {
2931 arg = gimple_call_arg (stmt, 4);
2932 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2933 {
2934 len = c_strlen (arg, 1);
2935 if (! len || ! tree_fits_uhwi_p (len))
2936 len = NULL_TREE;
2937 }
2938 }
2939 }
2940 }
cbdd87d4 2941
fef5a0d9
RB
2942 if (! integer_all_onesp (size))
2943 {
2944 if (! len || ! tree_int_cst_lt (len, size))
2945 return false;
2946 }
cbdd87d4 2947
fef5a0d9
RB
2948 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2949 or if format doesn't contain % chars or is "%s". */
2950 if (! integer_zerop (flag))
2951 {
2952 if (fmt_str == NULL)
2953 return false;
2954 if (strchr (fmt_str, target_percent) != NULL
2955 && strcmp (fmt_str, target_percent_s))
2956 return false;
2957 }
cbdd87d4 2958
fef5a0d9
RB
2959 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2960 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2961 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2962 if (!fn)
2963 return false;
2964
2965 /* Replace the called function and the first 4 argument by 2 retaining
2966 trailing varargs. */
2967 gimple_call_set_fndecl (stmt, fn);
2968 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2969 gimple_call_set_arg (stmt, 0, dest);
2970 gimple_call_set_arg (stmt, 1, fmt);
2971 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2972 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2973 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2974 fold_stmt (gsi);
2975 return true;
2976}
2977
35770bb2
RB
2978/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2979 ORIG may be null if this is a 2-argument call. We don't attempt to
2980 simplify calls with more than 3 arguments.
2981
a104bd88 2982 Return true if simplification was possible, otherwise false. */
35770bb2 2983
a104bd88 2984bool
dcb7fae2 2985gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2986{
355fe088 2987 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2988 tree dest = gimple_call_arg (stmt, 0);
2989 tree fmt = gimple_call_arg (stmt, 1);
2990 tree orig = NULL_TREE;
2991 const char *fmt_str = NULL;
2992
2993 /* Verify the required arguments in the original call. We deal with two
2994 types of sprintf() calls: 'sprintf (str, fmt)' and
2995 'sprintf (dest, "%s", orig)'. */
2996 if (gimple_call_num_args (stmt) > 3)
2997 return false;
2998
2999 if (gimple_call_num_args (stmt) == 3)
3000 orig = gimple_call_arg (stmt, 2);
3001
3002 /* Check whether the format is a literal string constant. */
3003 fmt_str = c_getstr (fmt);
3004 if (fmt_str == NULL)
3005 return false;
3006
3007 if (!init_target_chars ())
3008 return false;
3009
3010 /* If the format doesn't contain % args or %%, use strcpy. */
3011 if (strchr (fmt_str, target_percent) == NULL)
3012 {
3013 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3014
3015 if (!fn)
3016 return false;
3017
3018 /* Don't optimize sprintf (buf, "abc", ptr++). */
3019 if (orig)
3020 return false;
3021
3022 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3023 'format' is known to contain no % formats. */
3024 gimple_seq stmts = NULL;
355fe088 3025 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
3026 gimple_seq_add_stmt_without_update (&stmts, repl);
3027 if (gimple_call_lhs (stmt))
3028 {
3029 repl = gimple_build_assign (gimple_call_lhs (stmt),
3030 build_int_cst (integer_type_node,
3031 strlen (fmt_str)));
3032 gimple_seq_add_stmt_without_update (&stmts, repl);
3033 gsi_replace_with_seq_vops (gsi, stmts);
3034 /* gsi now points at the assignment to the lhs, get a
3035 stmt iterator to the memcpy call.
3036 ??? We can't use gsi_for_stmt as that doesn't work when the
3037 CFG isn't built yet. */
3038 gimple_stmt_iterator gsi2 = *gsi;
3039 gsi_prev (&gsi2);
3040 fold_stmt (&gsi2);
3041 }
3042 else
3043 {
3044 gsi_replace_with_seq_vops (gsi, stmts);
3045 fold_stmt (gsi);
3046 }
3047 return true;
3048 }
3049
3050 /* If the format is "%s", use strcpy if the result isn't used. */
3051 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3052 {
3053 tree fn;
3054 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3055
3056 if (!fn)
3057 return false;
3058
3059 /* Don't crash on sprintf (str1, "%s"). */
3060 if (!orig)
3061 return false;
3062
dcb7fae2
RB
3063 tree orig_len = NULL_TREE;
3064 if (gimple_call_lhs (stmt))
35770bb2 3065 {
dcb7fae2 3066 orig_len = get_maxval_strlen (orig, 0);
d7e78447 3067 if (!orig_len)
35770bb2
RB
3068 return false;
3069 }
3070
3071 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3072 gimple_seq stmts = NULL;
355fe088 3073 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
3074 gimple_seq_add_stmt_without_update (&stmts, repl);
3075 if (gimple_call_lhs (stmt))
3076 {
d7e78447
RB
3077 if (!useless_type_conversion_p (integer_type_node,
3078 TREE_TYPE (orig_len)))
3079 orig_len = fold_convert (integer_type_node, orig_len);
3080 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
3081 gimple_seq_add_stmt_without_update (&stmts, repl);
3082 gsi_replace_with_seq_vops (gsi, stmts);
3083 /* gsi now points at the assignment to the lhs, get a
3084 stmt iterator to the memcpy call.
3085 ??? We can't use gsi_for_stmt as that doesn't work when the
3086 CFG isn't built yet. */
3087 gimple_stmt_iterator gsi2 = *gsi;
3088 gsi_prev (&gsi2);
3089 fold_stmt (&gsi2);
3090 }
3091 else
3092 {
3093 gsi_replace_with_seq_vops (gsi, stmts);
3094 fold_stmt (gsi);
3095 }
3096 return true;
3097 }
3098 return false;
3099}
3100
d7e78447
RB
3101/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3102 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3103 attempt to simplify calls with more than 4 arguments.
35770bb2 3104
a104bd88 3105 Return true if simplification was possible, otherwise false. */
d7e78447 3106
a104bd88 3107bool
dcb7fae2 3108gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3109{
538dd0b7 3110 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3111 tree dest = gimple_call_arg (stmt, 0);
3112 tree destsize = gimple_call_arg (stmt, 1);
3113 tree fmt = gimple_call_arg (stmt, 2);
3114 tree orig = NULL_TREE;
3115 const char *fmt_str = NULL;
3116
3117 if (gimple_call_num_args (stmt) > 4)
3118 return false;
3119
3120 if (gimple_call_num_args (stmt) == 4)
3121 orig = gimple_call_arg (stmt, 3);
3122
3123 if (!tree_fits_uhwi_p (destsize))
3124 return false;
3125 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3126
3127 /* Check whether the format is a literal string constant. */
3128 fmt_str = c_getstr (fmt);
3129 if (fmt_str == NULL)
3130 return false;
3131
3132 if (!init_target_chars ())
3133 return false;
3134
3135 /* If the format doesn't contain % args or %%, use strcpy. */
3136 if (strchr (fmt_str, target_percent) == NULL)
3137 {
3138 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3139 if (!fn)
3140 return false;
3141
3142 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3143 if (orig)
3144 return false;
3145
3146 /* We could expand this as
3147 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3148 or to
3149 memcpy (str, fmt_with_nul_at_cstm1, cst);
3150 but in the former case that might increase code size
3151 and in the latter case grow .rodata section too much.
3152 So punt for now. */
3153 size_t len = strlen (fmt_str);
3154 if (len >= destlen)
3155 return false;
3156
3157 gimple_seq stmts = NULL;
355fe088 3158 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
3159 gimple_seq_add_stmt_without_update (&stmts, repl);
3160 if (gimple_call_lhs (stmt))
3161 {
3162 repl = gimple_build_assign (gimple_call_lhs (stmt),
3163 build_int_cst (integer_type_node, len));
3164 gimple_seq_add_stmt_without_update (&stmts, repl);
3165 gsi_replace_with_seq_vops (gsi, stmts);
3166 /* gsi now points at the assignment to the lhs, get a
3167 stmt iterator to the memcpy call.
3168 ??? We can't use gsi_for_stmt as that doesn't work when the
3169 CFG isn't built yet. */
3170 gimple_stmt_iterator gsi2 = *gsi;
3171 gsi_prev (&gsi2);
3172 fold_stmt (&gsi2);
3173 }
3174 else
3175 {
3176 gsi_replace_with_seq_vops (gsi, stmts);
3177 fold_stmt (gsi);
3178 }
3179 return true;
3180 }
3181
3182 /* If the format is "%s", use strcpy if the result isn't used. */
3183 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3184 {
3185 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3186 if (!fn)
3187 return false;
3188
3189 /* Don't crash on snprintf (str1, cst, "%s"). */
3190 if (!orig)
3191 return false;
3192
dcb7fae2 3193 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 3194 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3195 return false;
d7e78447
RB
3196
3197 /* We could expand this as
3198 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3199 or to
3200 memcpy (str1, str2_with_nul_at_cstm1, cst);
3201 but in the former case that might increase code size
3202 and in the latter case grow .rodata section too much.
3203 So punt for now. */
3204 if (compare_tree_int (orig_len, destlen) >= 0)
3205 return false;
3206
3207 /* Convert snprintf (str1, cst, "%s", str2) into
3208 strcpy (str1, str2) if strlen (str2) < cst. */
3209 gimple_seq stmts = NULL;
355fe088 3210 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
3211 gimple_seq_add_stmt_without_update (&stmts, repl);
3212 if (gimple_call_lhs (stmt))
3213 {
3214 if (!useless_type_conversion_p (integer_type_node,
3215 TREE_TYPE (orig_len)))
3216 orig_len = fold_convert (integer_type_node, orig_len);
3217 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3218 gimple_seq_add_stmt_without_update (&stmts, repl);
3219 gsi_replace_with_seq_vops (gsi, stmts);
3220 /* gsi now points at the assignment to the lhs, get a
3221 stmt iterator to the memcpy call.
3222 ??? We can't use gsi_for_stmt as that doesn't work when the
3223 CFG isn't built yet. */
3224 gimple_stmt_iterator gsi2 = *gsi;
3225 gsi_prev (&gsi2);
3226 fold_stmt (&gsi2);
3227 }
3228 else
3229 {
3230 gsi_replace_with_seq_vops (gsi, stmts);
3231 fold_stmt (gsi);
3232 }
3233 return true;
3234 }
3235 return false;
3236}
35770bb2 3237
edd7ae68
RB
3238/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3239 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3240 more than 3 arguments, and ARG may be null in the 2-argument case.
3241
3242 Return NULL_TREE if no simplification was possible, otherwise return the
3243 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3244 code of the function to be simplified. */
3245
3246static bool
3247gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3248 tree fp, tree fmt, tree arg,
3249 enum built_in_function fcode)
3250{
3251 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3252 tree fn_fputc, fn_fputs;
3253 const char *fmt_str = NULL;
3254
3255 /* If the return value is used, don't do the transformation. */
3256 if (gimple_call_lhs (stmt) != NULL_TREE)
3257 return false;
3258
3259 /* Check whether the format is a literal string constant. */
3260 fmt_str = c_getstr (fmt);
3261 if (fmt_str == NULL)
3262 return false;
3263
3264 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3265 {
3266 /* If we're using an unlocked function, assume the other
3267 unlocked functions exist explicitly. */
3268 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3269 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3270 }
3271 else
3272 {
3273 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3274 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3275 }
3276
3277 if (!init_target_chars ())
3278 return false;
3279
3280 /* If the format doesn't contain % args or %%, use strcpy. */
3281 if (strchr (fmt_str, target_percent) == NULL)
3282 {
3283 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3284 && arg)
3285 return false;
3286
3287 /* If the format specifier was "", fprintf does nothing. */
3288 if (fmt_str[0] == '\0')
3289 {
3290 replace_call_with_value (gsi, NULL_TREE);
3291 return true;
3292 }
3293
3294 /* When "string" doesn't contain %, replace all cases of
3295 fprintf (fp, string) with fputs (string, fp). The fputs
3296 builtin will take care of special cases like length == 1. */
3297 if (fn_fputs)
3298 {
3299 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3300 replace_call_with_call_and_fold (gsi, repl);
3301 return true;
3302 }
3303 }
3304
3305 /* The other optimizations can be done only on the non-va_list variants. */
3306 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3307 return false;
3308
3309 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3310 else if (strcmp (fmt_str, target_percent_s) == 0)
3311 {
3312 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3313 return false;
3314 if (fn_fputs)
3315 {
3316 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3317 replace_call_with_call_and_fold (gsi, repl);
3318 return true;
3319 }
3320 }
3321
3322 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3323 else if (strcmp (fmt_str, target_percent_c) == 0)
3324 {
3325 if (!arg
3326 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3327 return false;
3328 if (fn_fputc)
3329 {
3330 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3331 replace_call_with_call_and_fold (gsi, repl);
3332 return true;
3333 }
3334 }
3335
3336 return false;
3337}
3338
ad03a744
RB
3339/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3340 FMT and ARG are the arguments to the call; we don't fold cases with
3341 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3342
3343 Return NULL_TREE if no simplification was possible, otherwise return the
3344 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3345 code of the function to be simplified. */
3346
3347static bool
3348gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3349 tree arg, enum built_in_function fcode)
3350{
3351 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3352 tree fn_putchar, fn_puts, newarg;
3353 const char *fmt_str = NULL;
3354
3355 /* If the return value is used, don't do the transformation. */
3356 if (gimple_call_lhs (stmt) != NULL_TREE)
3357 return false;
3358
3359 /* Check whether the format is a literal string constant. */
3360 fmt_str = c_getstr (fmt);
3361 if (fmt_str == NULL)
3362 return false;
3363
3364 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3365 {
3366 /* If we're using an unlocked function, assume the other
3367 unlocked functions exist explicitly. */
3368 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3369 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3370 }
3371 else
3372 {
3373 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3374 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3375 }
3376
3377 if (!init_target_chars ())
3378 return false;
3379
3380 if (strcmp (fmt_str, target_percent_s) == 0
3381 || strchr (fmt_str, target_percent) == NULL)
3382 {
3383 const char *str;
3384
3385 if (strcmp (fmt_str, target_percent_s) == 0)
3386 {
3387 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3388 return false;
3389
3390 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3391 return false;
3392
3393 str = c_getstr (arg);
3394 if (str == NULL)
3395 return false;
3396 }
3397 else
3398 {
3399 /* The format specifier doesn't contain any '%' characters. */
3400 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3401 && arg)
3402 return false;
3403 str = fmt_str;
3404 }
3405
3406 /* If the string was "", printf does nothing. */
3407 if (str[0] == '\0')
3408 {
3409 replace_call_with_value (gsi, NULL_TREE);
3410 return true;
3411 }
3412
3413 /* If the string has length of 1, call putchar. */
3414 if (str[1] == '\0')
3415 {
3416 /* Given printf("c"), (where c is any one character,)
3417 convert "c"[0] to an int and pass that to the replacement
3418 function. */
3419 newarg = build_int_cst (integer_type_node, str[0]);
3420 if (fn_putchar)
3421 {
3422 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3423 replace_call_with_call_and_fold (gsi, repl);
3424 return true;
3425 }
3426 }
3427 else
3428 {
3429 /* If the string was "string\n", call puts("string"). */
3430 size_t len = strlen (str);
3431 if ((unsigned char)str[len - 1] == target_newline
3432 && (size_t) (int) len == len
3433 && (int) len > 0)
3434 {
3435 char *newstr;
3436 tree offset_node, string_cst;
3437
3438 /* Create a NUL-terminated string that's one char shorter
3439 than the original, stripping off the trailing '\n'. */
3440 newarg = build_string_literal (len, str);
3441 string_cst = string_constant (newarg, &offset_node);
3442 gcc_checking_assert (string_cst
3443 && (TREE_STRING_LENGTH (string_cst)
3444 == (int) len)
3445 && integer_zerop (offset_node)
3446 && (unsigned char)
3447 TREE_STRING_POINTER (string_cst)[len - 1]
3448 == target_newline);
3449 /* build_string_literal creates a new STRING_CST,
3450 modify it in place to avoid double copying. */
3451 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3452 newstr[len - 1] = '\0';
3453 if (fn_puts)
3454 {
3455 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3456 replace_call_with_call_and_fold (gsi, repl);
3457 return true;
3458 }
3459 }
3460 else
3461 /* We'd like to arrange to call fputs(string,stdout) here,
3462 but we need stdout and don't have a way to get it yet. */
3463 return false;
3464 }
3465 }
3466
3467 /* The other optimizations can be done only on the non-va_list variants. */
3468 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3469 return false;
3470
3471 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3472 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3473 {
3474 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3475 return false;
3476 if (fn_puts)
3477 {
3478 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3479 replace_call_with_call_and_fold (gsi, repl);
3480 return true;
3481 }
3482 }
3483
3484 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3485 else if (strcmp (fmt_str, target_percent_c) == 0)
3486 {
3487 if (!arg || ! useless_type_conversion_p (integer_type_node,
3488 TREE_TYPE (arg)))
3489 return false;
3490 if (fn_putchar)
3491 {
3492 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3493 replace_call_with_call_and_fold (gsi, repl);
3494 return true;
3495 }
3496 }
3497
3498 return false;
3499}
3500
edd7ae68 3501
fef5a0d9
RB
3502
3503/* Fold a call to __builtin_strlen with known length LEN. */
3504
3505static bool
dcb7fae2 3506gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3507{
355fe088 3508 gimple *stmt = gsi_stmt (*gsi);
c42d0aa0
MS
3509
3510 wide_int minlen;
3511 wide_int maxlen;
3512
3513 tree lenrange[2];
c8602fe6 3514 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
c42d0aa0
MS
3515 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3516 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3517 {
3518 /* The range of lengths refers to either a single constant
3519 string or to the longest and shortest constant string
3520 referenced by the argument of the strlen() call, or to
3521 the strings that can possibly be stored in the arrays
3522 the argument refers to. */
3523 minlen = wi::to_wide (lenrange[0]);
3524 maxlen = wi::to_wide (lenrange[1]);
3525 }
3526 else
3527 {
3528 unsigned prec = TYPE_PRECISION (sizetype);
3529
3530 minlen = wi::shwi (0, prec);
3531 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3532 }
3533
3534 if (minlen == maxlen)
3535 {
3536 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3537 true, GSI_SAME_STMT);
3538 replace_call_with_value (gsi, lenrange[0]);
3539 return true;
3540 }
3541
a7bf6c08
MS
3542 if (tree lhs = gimple_call_lhs (stmt))
3543 if (TREE_CODE (lhs) == SSA_NAME
3544 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3545 set_range_info (lhs, VR_RANGE, minlen, maxlen);
c42d0aa0
MS
3546
3547 return false;
cbdd87d4
RG
3548}
3549
48126138
NS
3550/* Fold a call to __builtin_acc_on_device. */
3551
3552static bool
3553gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3554{
3555 /* Defer folding until we know which compiler we're in. */
3556 if (symtab->state != EXPANSION)
3557 return false;
3558
3559 unsigned val_host = GOMP_DEVICE_HOST;
3560 unsigned val_dev = GOMP_DEVICE_NONE;
3561
3562#ifdef ACCEL_COMPILER
3563 val_host = GOMP_DEVICE_NOT_HOST;
3564 val_dev = ACCEL_COMPILER_acc_device;
3565#endif
3566
3567 location_t loc = gimple_location (gsi_stmt (*gsi));
3568
3569 tree host_eq = make_ssa_name (boolean_type_node);
3570 gimple *host_ass = gimple_build_assign
3571 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3572 gimple_set_location (host_ass, loc);
3573 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3574
3575 tree dev_eq = make_ssa_name (boolean_type_node);
3576 gimple *dev_ass = gimple_build_assign
3577 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3578 gimple_set_location (dev_ass, loc);
3579 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3580
3581 tree result = make_ssa_name (boolean_type_node);
3582 gimple *result_ass = gimple_build_assign
3583 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3584 gimple_set_location (result_ass, loc);
3585 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3586
3587 replace_call_with_value (gsi, result);
3588
3589 return true;
3590}
cbdd87d4 3591
fe75f732
PK
3592/* Fold realloc (0, n) -> malloc (n). */
3593
3594static bool
3595gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3596{
3597 gimple *stmt = gsi_stmt (*gsi);
3598 tree arg = gimple_call_arg (stmt, 0);
3599 tree size = gimple_call_arg (stmt, 1);
3600
3601 if (operand_equal_p (arg, null_pointer_node, 0))
3602 {
3603 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3604 if (fn_malloc)
3605 {
3606 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3607 replace_call_with_call_and_fold (gsi, repl);
3608 return true;
3609 }
3610 }
3611 return false;
3612}
3613
dcb7fae2
RB
3614/* Fold the non-target builtin at *GSI and return whether any simplification
3615 was made. */
cbdd87d4 3616
fef5a0d9 3617static bool
dcb7fae2 3618gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3619{
538dd0b7 3620 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3621 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3622
dcb7fae2
RB
3623 /* Give up for always_inline inline builtins until they are
3624 inlined. */
3625 if (avoid_folding_inline_builtin (callee))
3626 return false;
cbdd87d4 3627
edd7ae68
RB
3628 unsigned n = gimple_call_num_args (stmt);
3629 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3630 switch (fcode)
cbdd87d4 3631 {
b3d8d88e
MS
3632 case BUILT_IN_BCMP:
3633 return gimple_fold_builtin_bcmp (gsi);
3634 case BUILT_IN_BCOPY:
3635 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3636 case BUILT_IN_BZERO:
b3d8d88e
MS
3637 return gimple_fold_builtin_bzero (gsi);
3638
dcb7fae2
RB
3639 case BUILT_IN_MEMSET:
3640 return gimple_fold_builtin_memset (gsi,
3641 gimple_call_arg (stmt, 1),
3642 gimple_call_arg (stmt, 2));
dcb7fae2
RB
3643 case BUILT_IN_MEMCPY:
3644 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3645 gimple_call_arg (stmt, 1), 0);
3646 case BUILT_IN_MEMPCPY:
3647 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3648 gimple_call_arg (stmt, 1), 1);
3649 case BUILT_IN_MEMMOVE:
3650 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3651 gimple_call_arg (stmt, 1), 3);
3652 case BUILT_IN_SPRINTF_CHK:
3653 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3654 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3655 case BUILT_IN_STRCAT_CHK:
3656 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3657 case BUILT_IN_STRNCAT_CHK:
3658 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3659 case BUILT_IN_STRLEN:
dcb7fae2 3660 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3661 case BUILT_IN_STRCPY:
dcb7fae2 3662 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3663 gimple_call_arg (stmt, 0),
dcb7fae2 3664 gimple_call_arg (stmt, 1));
cbdd87d4 3665 case BUILT_IN_STRNCPY:
dcb7fae2 3666 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3667 gimple_call_arg (stmt, 0),
3668 gimple_call_arg (stmt, 1),
dcb7fae2 3669 gimple_call_arg (stmt, 2));
9a7eefec 3670 case BUILT_IN_STRCAT:
dcb7fae2
RB
3671 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3672 gimple_call_arg (stmt, 1));
ad03a744
RB
3673 case BUILT_IN_STRNCAT:
3674 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3675 case BUILT_IN_INDEX:
912d9ec3 3676 case BUILT_IN_STRCHR:
71dea1dd
WD
3677 return gimple_fold_builtin_strchr (gsi, false);
3678 case BUILT_IN_RINDEX:
3679 case BUILT_IN_STRRCHR:
3680 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3681 case BUILT_IN_STRSTR:
3682 return gimple_fold_builtin_strstr (gsi);
a918bfbf 3683 case BUILT_IN_STRCMP:
8b0b334a 3684 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
3685 case BUILT_IN_STRCASECMP:
3686 case BUILT_IN_STRNCMP:
8b0b334a 3687 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
3688 case BUILT_IN_STRNCASECMP:
3689 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3690 case BUILT_IN_MEMCHR:
3691 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3692 case BUILT_IN_FPUTS:
dcb7fae2
RB
3693 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3694 gimple_call_arg (stmt, 1), false);
cbdd87d4 3695 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3696 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3697 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3698 case BUILT_IN_MEMCPY_CHK:
3699 case BUILT_IN_MEMPCPY_CHK:
3700 case BUILT_IN_MEMMOVE_CHK:
3701 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3702 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3703 gimple_call_arg (stmt, 0),
3704 gimple_call_arg (stmt, 1),
3705 gimple_call_arg (stmt, 2),
3706 gimple_call_arg (stmt, 3),
edd7ae68 3707 fcode);
2625bb5d
RB
3708 case BUILT_IN_STPCPY:
3709 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3710 case BUILT_IN_STRCPY_CHK:
3711 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3712 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3713 gimple_call_arg (stmt, 0),
3714 gimple_call_arg (stmt, 1),
3715 gimple_call_arg (stmt, 2),
edd7ae68 3716 fcode);
cbdd87d4 3717 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3718 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3719 return gimple_fold_builtin_stxncpy_chk (gsi,
3720 gimple_call_arg (stmt, 0),
3721 gimple_call_arg (stmt, 1),
3722 gimple_call_arg (stmt, 2),
3723 gimple_call_arg (stmt, 3),
edd7ae68 3724 fcode);
cbdd87d4
RG
3725 case BUILT_IN_SNPRINTF_CHK:
3726 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3727 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3728
edd7ae68
RB
3729 case BUILT_IN_FPRINTF:
3730 case BUILT_IN_FPRINTF_UNLOCKED:
3731 case BUILT_IN_VFPRINTF:
3732 if (n == 2 || n == 3)
3733 return gimple_fold_builtin_fprintf (gsi,
3734 gimple_call_arg (stmt, 0),
3735 gimple_call_arg (stmt, 1),
3736 n == 3
3737 ? gimple_call_arg (stmt, 2)
3738 : NULL_TREE,
3739 fcode);
3740 break;
3741 case BUILT_IN_FPRINTF_CHK:
3742 case BUILT_IN_VFPRINTF_CHK:
3743 if (n == 3 || n == 4)
3744 return gimple_fold_builtin_fprintf (gsi,
3745 gimple_call_arg (stmt, 0),
3746 gimple_call_arg (stmt, 2),
3747 n == 4
3748 ? gimple_call_arg (stmt, 3)
3749 : NULL_TREE,
3750 fcode);
3751 break;
ad03a744
RB
3752 case BUILT_IN_PRINTF:
3753 case BUILT_IN_PRINTF_UNLOCKED:
3754 case BUILT_IN_VPRINTF:
3755 if (n == 1 || n == 2)
3756 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3757 n == 2
3758 ? gimple_call_arg (stmt, 1)
3759 : NULL_TREE, fcode);
3760 break;
3761 case BUILT_IN_PRINTF_CHK:
3762 case BUILT_IN_VPRINTF_CHK:
3763 if (n == 2 || n == 3)
3764 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3765 n == 3
3766 ? gimple_call_arg (stmt, 2)
3767 : NULL_TREE, fcode);
242a37f1 3768 break;
48126138
NS
3769 case BUILT_IN_ACC_ON_DEVICE:
3770 return gimple_fold_builtin_acc_on_device (gsi,
3771 gimple_call_arg (stmt, 0));
fe75f732
PK
3772 case BUILT_IN_REALLOC:
3773 return gimple_fold_builtin_realloc (gsi);
3774
fef5a0d9
RB
3775 default:;
3776 }
3777
3778 /* Try the generic builtin folder. */
3779 bool ignore = (gimple_call_lhs (stmt) == NULL);
3780 tree result = fold_call_stmt (stmt, ignore);
3781 if (result)
3782 {
3783 if (ignore)
3784 STRIP_NOPS (result);
3785 else
3786 result = fold_convert (gimple_call_return_type (stmt), result);
3787 if (!update_call_from_tree (gsi, result))
3788 gimplify_and_update_call_from_tree (gsi, result);
3789 return true;
3790 }
3791
3792 return false;
3793}
3794
451e8dae
NS
3795/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3796 function calls to constants, where possible. */
3797
3798static tree
3799fold_internal_goacc_dim (const gimple *call)
3800{
629b3d75
MJ
3801 int axis = oacc_get_ifn_dim_arg (call);
3802 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 3803 tree result = NULL_TREE;
67d2229e 3804 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 3805
67d2229e 3806 switch (gimple_call_internal_fn (call))
451e8dae 3807 {
67d2229e
TV
3808 case IFN_GOACC_DIM_POS:
3809 /* If the size is 1, we know the answer. */
3810 if (size == 1)
3811 result = build_int_cst (type, 0);
3812 break;
3813 case IFN_GOACC_DIM_SIZE:
3814 /* If the size is not dynamic, we know the answer. */
3815 if (size)
3816 result = build_int_cst (type, size);
3817 break;
3818 default:
3819 break;
451e8dae
NS
3820 }
3821
3822 return result;
3823}
3824
849a76a5
JJ
3825/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3826 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3827 &var where var is only addressable because of such calls. */
3828
3829bool
3830optimize_atomic_compare_exchange_p (gimple *stmt)
3831{
3832 if (gimple_call_num_args (stmt) != 6
3833 || !flag_inline_atomics
3834 || !optimize
45b2222a 3835 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
3836 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3837 || !gimple_vdef (stmt)
3838 || !gimple_vuse (stmt))
3839 return false;
3840
3841 tree fndecl = gimple_call_fndecl (stmt);
3842 switch (DECL_FUNCTION_CODE (fndecl))
3843 {
3844 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3845 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3846 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3847 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3848 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3849 break;
3850 default:
3851 return false;
3852 }
3853
3854 tree expected = gimple_call_arg (stmt, 1);
3855 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3856 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3857 return false;
3858
3859 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3860 if (!is_gimple_reg_type (etype)
849a76a5 3861 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3862 || TREE_THIS_VOLATILE (etype)
3863 || VECTOR_TYPE_P (etype)
3864 || TREE_CODE (etype) == COMPLEX_TYPE
3865 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3866 might not preserve all the bits. See PR71716. */
3867 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
3868 || maybe_ne (TYPE_PRECISION (etype),
3869 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
3870 return false;
3871
3872 tree weak = gimple_call_arg (stmt, 3);
3873 if (!integer_zerop (weak) && !integer_onep (weak))
3874 return false;
3875
3876 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3877 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3878 machine_mode mode = TYPE_MODE (itype);
3879
3880 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3881 == CODE_FOR_nothing
3882 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3883 return false;
3884
cf098191 3885 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
3886 return false;
3887
3888 return true;
3889}
3890
3891/* Fold
3892 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3893 into
3894 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3895 i = IMAGPART_EXPR <t>;
3896 r = (_Bool) i;
3897 e = REALPART_EXPR <t>; */
3898
3899void
3900fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3901{
3902 gimple *stmt = gsi_stmt (*gsi);
3903 tree fndecl = gimple_call_fndecl (stmt);
3904 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3905 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3906 tree ctype = build_complex_type (itype);
3907 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
3908 bool throws = false;
3909 edge e = NULL;
849a76a5
JJ
3910 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3911 expected);
3912 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3913 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3914 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3915 {
3916 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3917 build1 (VIEW_CONVERT_EXPR, itype,
3918 gimple_assign_lhs (g)));
3919 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3920 }
3921 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3922 + int_size_in_bytes (itype);
3923 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3924 gimple_call_arg (stmt, 0),
3925 gimple_assign_lhs (g),
3926 gimple_call_arg (stmt, 2),
3927 build_int_cst (integer_type_node, flag),
3928 gimple_call_arg (stmt, 4),
3929 gimple_call_arg (stmt, 5));
3930 tree lhs = make_ssa_name (ctype);
3931 gimple_call_set_lhs (g, lhs);
3932 gimple_set_vdef (g, gimple_vdef (stmt));
3933 gimple_set_vuse (g, gimple_vuse (stmt));
3934 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46
JJ
3935 tree oldlhs = gimple_call_lhs (stmt);
3936 if (stmt_can_throw_internal (stmt))
3937 {
3938 throws = true;
3939 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3940 }
3941 gimple_call_set_nothrow (as_a <gcall *> (g),
3942 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3943 gimple_call_set_lhs (stmt, NULL_TREE);
3944 gsi_replace (gsi, g, true);
3945 if (oldlhs)
849a76a5 3946 {
849a76a5
JJ
3947 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3948 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
3949 if (throws)
3950 {
3951 gsi_insert_on_edge_immediate (e, g);
3952 *gsi = gsi_for_stmt (g);
3953 }
3954 else
3955 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3956 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3957 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 3958 }
849a76a5
JJ
3959 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3960 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
3961 if (throws && oldlhs == NULL_TREE)
3962 {
3963 gsi_insert_on_edge_immediate (e, g);
3964 *gsi = gsi_for_stmt (g);
3965 }
3966 else
3967 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
3968 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3969 {
3970 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3971 VIEW_CONVERT_EXPR,
3972 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3973 gimple_assign_lhs (g)));
3974 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3975 }
3976 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3977 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3978 *gsi = gsiret;
3979}
3980
1304953e
JJ
3981/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3982 doesn't fit into TYPE. The test for overflow should be regardless of
3983 -fwrapv, and even for unsigned types. */
3984
3985bool
3986arith_overflowed_p (enum tree_code code, const_tree type,
3987 const_tree arg0, const_tree arg1)
3988{
1304953e
JJ
3989 widest2_int warg0 = widest2_int_cst (arg0);
3990 widest2_int warg1 = widest2_int_cst (arg1);
3991 widest2_int wres;
3992 switch (code)
3993 {
3994 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3995 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3996 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3997 default: gcc_unreachable ();
3998 }
3999 signop sign = TYPE_SIGN (type);
4000 if (sign == UNSIGNED && wi::neg_p (wres))
4001 return true;
4002 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4003}
4004
cbdd87d4
RG
4005/* Attempt to fold a call statement referenced by the statement iterator GSI.
4006 The statement may be replaced by another statement, e.g., if the call
4007 simplifies to a constant value. Return true if any changes were made.
4008 It is assumed that the operands have been previously folded. */
4009
e021c122 4010static bool
ceeffab0 4011gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4012{
538dd0b7 4013 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4014 tree callee;
e021c122
RG
4015 bool changed = false;
4016 unsigned i;
cbdd87d4 4017
e021c122
RG
4018 /* Fold *& in call arguments. */
4019 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4020 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4021 {
4022 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4023 if (tmp)
4024 {
4025 gimple_call_set_arg (stmt, i, tmp);
4026 changed = true;
4027 }
4028 }
3b45a007
RG
4029
4030 /* Check for virtual calls that became direct calls. */
4031 callee = gimple_call_fn (stmt);
25583c4f 4032 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4033 {
49c471e3
MJ
4034 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4035 {
450ad0cd
JH
4036 if (dump_file && virtual_method_call_p (callee)
4037 && !possible_polymorphic_call_target_p
6f8091fc
JH
4038 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4039 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4040 {
4041 fprintf (dump_file,
a70e9985 4042 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4043 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4044 fprintf (dump_file, " to ");
4045 print_generic_expr (dump_file, callee, TDF_SLIM);
4046 fprintf (dump_file, "\n");
4047 }
4048
49c471e3 4049 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4050 changed = true;
4051 }
a70e9985 4052 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4053 {
61dd6a2e
JH
4054 bool final;
4055 vec <cgraph_node *>targets
058d0a90 4056 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4057 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4058 {
a70e9985 4059 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4060 if (dump_enabled_p ())
4061 {
4f5b9c80 4062 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4063 "folding virtual function call to %s\n",
4064 targets.length () == 1
4065 ? targets[0]->name ()
4066 : "__builtin_unreachable");
4067 }
61dd6a2e 4068 if (targets.length () == 1)
cf3e5a89 4069 {
18954840
JJ
4070 tree fndecl = targets[0]->decl;
4071 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4072 changed = true;
18954840
JJ
4073 /* If changing the call to __cxa_pure_virtual
4074 or similar noreturn function, adjust gimple_call_fntype
4075 too. */
865f7046 4076 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4077 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4078 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4079 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4080 == void_type_node))
4081 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4082 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4083 if (lhs
4084 && gimple_call_noreturn_p (stmt)
18954840 4085 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4086 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4087 {
4088 if (TREE_CODE (lhs) == SSA_NAME)
4089 {
b731b390 4090 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4091 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4092 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4093 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4094 }
4095 gimple_call_set_lhs (stmt, NULL_TREE);
4096 }
0b986c6a 4097 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4098 }
a70e9985 4099 else
cf3e5a89
JJ
4100 {
4101 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4102 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4103 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4104 /* If the call had a SSA name as lhs morph that into
4105 an uninitialized value. */
a70e9985
JJ
4106 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4107 {
b731b390 4108 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4109 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4110 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4111 set_ssa_default_def (cfun, var, lhs);
42e52a51 4112 }
2da6996c
RB
4113 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4114 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4115 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4116 return true;
4117 }
e021c122 4118 }
49c471e3 4119 }
e021c122 4120 }
49c471e3 4121
f2d3d07e
RH
4122 /* Check for indirect calls that became direct calls, and then
4123 no longer require a static chain. */
4124 if (gimple_call_chain (stmt))
4125 {
4126 tree fn = gimple_call_fndecl (stmt);
4127 if (fn && !DECL_STATIC_CHAIN (fn))
4128 {
4129 gimple_call_set_chain (stmt, NULL);
4130 changed = true;
4131 }
4132 else
4133 {
4134 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4135 if (tmp)
4136 {
4137 gimple_call_set_chain (stmt, tmp);
4138 changed = true;
4139 }
4140 }
4141 }
4142
e021c122
RG
4143 if (inplace)
4144 return changed;
4145
4146 /* Check for builtins that CCP can handle using information not
4147 available in the generic fold routines. */
fef5a0d9
RB
4148 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4149 {
4150 if (gimple_fold_builtin (gsi))
4151 changed = true;
4152 }
4153 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4154 {
ea679d55 4155 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4156 }
368b454d 4157 else if (gimple_call_internal_p (stmt))
ed9c79e1 4158 {
368b454d
JJ
4159 enum tree_code subcode = ERROR_MARK;
4160 tree result = NULL_TREE;
1304953e
JJ
4161 bool cplx_result = false;
4162 tree overflow = NULL_TREE;
368b454d
JJ
4163 switch (gimple_call_internal_fn (stmt))
4164 {
4165 case IFN_BUILTIN_EXPECT:
4166 result = fold_builtin_expect (gimple_location (stmt),
4167 gimple_call_arg (stmt, 0),
4168 gimple_call_arg (stmt, 1),
4169 gimple_call_arg (stmt, 2));
4170 break;
0e82f089 4171 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4172 {
4173 tree offset = gimple_call_arg (stmt, 1);
4174 tree objsize = gimple_call_arg (stmt, 2);
4175 if (integer_all_onesp (objsize)
4176 || (TREE_CODE (offset) == INTEGER_CST
4177 && TREE_CODE (objsize) == INTEGER_CST
4178 && tree_int_cst_le (offset, objsize)))
4179 {
4180 replace_call_with_value (gsi, NULL_TREE);
4181 return true;
4182 }
4183 }
4184 break;
4185 case IFN_UBSAN_PTR:
4186 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4187 {
ca1150f0 4188 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4189 return true;
4190 }
4191 break;
ca1150f0
JJ
4192 case IFN_UBSAN_BOUNDS:
4193 {
4194 tree index = gimple_call_arg (stmt, 1);
4195 tree bound = gimple_call_arg (stmt, 2);
4196 if (TREE_CODE (index) == INTEGER_CST
4197 && TREE_CODE (bound) == INTEGER_CST)
4198 {
4199 index = fold_convert (TREE_TYPE (bound), index);
4200 if (TREE_CODE (index) == INTEGER_CST
4201 && tree_int_cst_le (index, bound))
4202 {
4203 replace_call_with_value (gsi, NULL_TREE);
4204 return true;
4205 }
4206 }
4207 }
4208 break;
451e8dae
NS
4209 case IFN_GOACC_DIM_SIZE:
4210 case IFN_GOACC_DIM_POS:
4211 result = fold_internal_goacc_dim (stmt);
4212 break;
368b454d
JJ
4213 case IFN_UBSAN_CHECK_ADD:
4214 subcode = PLUS_EXPR;
4215 break;
4216 case IFN_UBSAN_CHECK_SUB:
4217 subcode = MINUS_EXPR;
4218 break;
4219 case IFN_UBSAN_CHECK_MUL:
4220 subcode = MULT_EXPR;
4221 break;
1304953e
JJ
4222 case IFN_ADD_OVERFLOW:
4223 subcode = PLUS_EXPR;
4224 cplx_result = true;
4225 break;
4226 case IFN_SUB_OVERFLOW:
4227 subcode = MINUS_EXPR;
4228 cplx_result = true;
4229 break;
4230 case IFN_MUL_OVERFLOW:
4231 subcode = MULT_EXPR;
4232 cplx_result = true;
4233 break;
368b454d
JJ
4234 default:
4235 break;
4236 }
4237 if (subcode != ERROR_MARK)
4238 {
4239 tree arg0 = gimple_call_arg (stmt, 0);
4240 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4241 tree type = TREE_TYPE (arg0);
4242 if (cplx_result)
4243 {
4244 tree lhs = gimple_call_lhs (stmt);
4245 if (lhs == NULL_TREE)
4246 type = NULL_TREE;
4247 else
4248 type = TREE_TYPE (TREE_TYPE (lhs));
4249 }
4250 if (type == NULL_TREE)
4251 ;
368b454d 4252 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4253 else if (integer_zerop (arg1))
4254 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4255 /* x = 0 + y; x = 0 * y; */
4256 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4257 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4258 /* x = y - y; */
4259 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4260 result = integer_zero_node;
368b454d 4261 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4262 else if (subcode == MULT_EXPR && integer_onep (arg1))
4263 result = arg0;
4264 else if (subcode == MULT_EXPR && integer_onep (arg0))
4265 result = arg1;
4266 else if (TREE_CODE (arg0) == INTEGER_CST
4267 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4268 {
1304953e
JJ
4269 if (cplx_result)
4270 result = int_const_binop (subcode, fold_convert (type, arg0),
4271 fold_convert (type, arg1));
4272 else
4273 result = int_const_binop (subcode, arg0, arg1);
4274 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4275 {
4276 if (cplx_result)
4277 overflow = build_one_cst (type);
4278 else
4279 result = NULL_TREE;
4280 }
4281 }
4282 if (result)
4283 {
4284 if (result == integer_zero_node)
4285 result = build_zero_cst (type);
4286 else if (cplx_result && TREE_TYPE (result) != type)
4287 {
4288 if (TREE_CODE (result) == INTEGER_CST)
4289 {
4290 if (arith_overflowed_p (PLUS_EXPR, type, result,
4291 integer_zero_node))
4292 overflow = build_one_cst (type);
4293 }
4294 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4295 && TYPE_UNSIGNED (type))
4296 || (TYPE_PRECISION (type)
4297 < (TYPE_PRECISION (TREE_TYPE (result))
4298 + (TYPE_UNSIGNED (TREE_TYPE (result))
4299 && !TYPE_UNSIGNED (type)))))
4300 result = NULL_TREE;
4301 if (result)
4302 result = fold_convert (type, result);
4303 }
368b454d
JJ
4304 }
4305 }
1304953e 4306
ed9c79e1
JJ
4307 if (result)
4308 {
1304953e
JJ
4309 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4310 result = drop_tree_overflow (result);
4311 if (cplx_result)
4312 {
4313 if (overflow == NULL_TREE)
4314 overflow = build_zero_cst (TREE_TYPE (result));
4315 tree ctype = build_complex_type (TREE_TYPE (result));
4316 if (TREE_CODE (result) == INTEGER_CST
4317 && TREE_CODE (overflow) == INTEGER_CST)
4318 result = build_complex (ctype, result, overflow);
4319 else
4320 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4321 ctype, result, overflow);
4322 }
ed9c79e1
JJ
4323 if (!update_call_from_tree (gsi, result))
4324 gimplify_and_update_call_from_tree (gsi, result);
4325 changed = true;
4326 }
4327 }
3b45a007 4328
e021c122 4329 return changed;
cbdd87d4
RG
4330}
4331
e0ee10ed 4332
89a79e96
RB
4333/* Return true whether NAME has a use on STMT. */
4334
4335static bool
355fe088 4336has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4337{
4338 imm_use_iterator iter;
4339 use_operand_p use_p;
4340 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4341 if (USE_STMT (use_p) == stmt)
4342 return true;
4343 return false;
4344}
4345
e0ee10ed
RB
4346/* Worker for fold_stmt_1 dispatch to pattern based folding with
4347 gimple_simplify.
4348
4349 Replaces *GSI with the simplification result in RCODE and OPS
4350 and the associated statements in *SEQ. Does the replacement
4351 according to INPLACE and returns true if the operation succeeded. */
4352
4353static bool
4354replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4355 gimple_match_op *res_op,
e0ee10ed
RB
4356 gimple_seq *seq, bool inplace)
4357{
355fe088 4358 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4359 tree *ops = res_op->ops;
4360 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4361
4362 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4363 newly created statements. See also maybe_push_res_to_seq.
4364 As an exception allow such uses if there was a use of the
4365 same SSA name on the old stmt. */
5d75ad95
RS
4366 for (unsigned int i = 0; i < num_ops; ++i)
4367 if (TREE_CODE (ops[i]) == SSA_NAME
4368 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4369 && !has_use_on_stmt (ops[i], stmt))
4370 return false;
4371
4372 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4373 for (unsigned int i = 0; i < 2; ++i)
4374 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4375 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4376 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4377 return false;
e0ee10ed 4378
fec40d06
RS
4379 /* Don't insert new statements when INPLACE is true, even if we could
4380 reuse STMT for the final statement. */
4381 if (inplace && !gimple_seq_empty_p (*seq))
4382 return false;
4383
538dd0b7 4384 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4385 {
5d75ad95
RS
4386 gcc_assert (res_op->code.is_tree_code ());
4387 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4388 /* GIMPLE_CONDs condition may not throw. */
4389 && (!flag_exceptions
4390 || !cfun->can_throw_non_call_exceptions
5d75ad95 4391 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4392 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4393 false, NULL_TREE)))
5d75ad95
RS
4394 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4395 else if (res_op->code == SSA_NAME)
538dd0b7 4396 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4397 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4398 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4399 {
4400 if (integer_zerop (ops[0]))
538dd0b7 4401 gimple_cond_make_false (cond_stmt);
e0ee10ed 4402 else
538dd0b7 4403 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4404 }
4405 else if (!inplace)
4406 {
5d75ad95 4407 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4408 if (!res)
4409 return false;
538dd0b7 4410 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4411 build_zero_cst (TREE_TYPE (res)));
4412 }
4413 else
4414 return false;
4415 if (dump_file && (dump_flags & TDF_DETAILS))
4416 {
4417 fprintf (dump_file, "gimple_simplified to ");
4418 if (!gimple_seq_empty_p (*seq))
4419 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4420 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4421 0, TDF_SLIM);
4422 }
4423 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4424 return true;
4425 }
4426 else if (is_gimple_assign (stmt)
5d75ad95 4427 && res_op->code.is_tree_code ())
e0ee10ed
RB
4428 {
4429 if (!inplace
5d75ad95 4430 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4431 {
5d75ad95
RS
4432 maybe_build_generic_op (res_op);
4433 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4434 res_op->op_or_null (0),
4435 res_op->op_or_null (1),
4436 res_op->op_or_null (2));
e0ee10ed
RB
4437 if (dump_file && (dump_flags & TDF_DETAILS))
4438 {
4439 fprintf (dump_file, "gimple_simplified to ");
4440 if (!gimple_seq_empty_p (*seq))
4441 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4442 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4443 0, TDF_SLIM);
4444 }
4445 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4446 return true;
4447 }
4448 }
5d75ad95
RS
4449 else if (res_op->code.is_fn_code ()
4450 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4451 {
5d75ad95
RS
4452 gcc_assert (num_ops == gimple_call_num_args (stmt));
4453 for (unsigned int i = 0; i < num_ops; ++i)
4454 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4455 if (dump_file && (dump_flags & TDF_DETAILS))
4456 {
4457 fprintf (dump_file, "gimple_simplified to ");
4458 if (!gimple_seq_empty_p (*seq))
4459 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4460 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4461 }
4462 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4463 return true;
4464 }
e0ee10ed
RB
4465 else if (!inplace)
4466 {
4467 if (gimple_has_lhs (stmt))
4468 {
4469 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4470 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4471 return false;
e0ee10ed
RB
4472 if (dump_file && (dump_flags & TDF_DETAILS))
4473 {
4474 fprintf (dump_file, "gimple_simplified to ");
4475 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4476 }
4477 gsi_replace_with_seq_vops (gsi, *seq);
4478 return true;
4479 }
4480 else
4481 gcc_unreachable ();
4482 }
4483
4484 return false;
4485}
4486
040292e7
RB
4487/* Canonicalize MEM_REFs invariant address operand after propagation. */
4488
4489static bool
4490maybe_canonicalize_mem_ref_addr (tree *t)
4491{
4492 bool res = false;
4493
4494 if (TREE_CODE (*t) == ADDR_EXPR)
4495 t = &TREE_OPERAND (*t, 0);
4496
f17a223d
RB
4497 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4498 generic vector extension. The actual vector referenced is
4499 view-converted to an array type for this purpose. If the index
4500 is constant the canonical representation in the middle-end is a
4501 BIT_FIELD_REF so re-write the former to the latter here. */
4502 if (TREE_CODE (*t) == ARRAY_REF
4503 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4504 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4505 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4506 {
4507 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4508 if (VECTOR_TYPE_P (vtype))
4509 {
4510 tree low = array_ref_low_bound (*t);
4511 if (TREE_CODE (low) == INTEGER_CST)
4512 {
4513 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4514 {
4515 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4516 wi::to_widest (low));
4517 idx = wi::mul (idx, wi::to_widest
4518 (TYPE_SIZE (TREE_TYPE (*t))));
4519 widest_int ext
4520 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4521 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4522 {
4523 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4524 TREE_TYPE (*t),
4525 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4526 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4527 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4528 res = true;
4529 }
4530 }
4531 }
4532 }
4533 }
4534
040292e7
RB
4535 while (handled_component_p (*t))
4536 t = &TREE_OPERAND (*t, 0);
4537
4538 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4539 of invariant addresses into a SSA name MEM_REF address. */
4540 if (TREE_CODE (*t) == MEM_REF
4541 || TREE_CODE (*t) == TARGET_MEM_REF)
4542 {
4543 tree addr = TREE_OPERAND (*t, 0);
4544 if (TREE_CODE (addr) == ADDR_EXPR
4545 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4546 || handled_component_p (TREE_OPERAND (addr, 0))))
4547 {
4548 tree base;
a90c8804 4549 poly_int64 coffset;
040292e7
RB
4550 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4551 &coffset);
4552 if (!base)
4553 gcc_unreachable ();
4554
4555 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4556 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4557 TREE_OPERAND (*t, 1),
4558 size_int (coffset));
4559 res = true;
4560 }
4561 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4562 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4563 }
4564
4565 /* Canonicalize back MEM_REFs to plain reference trees if the object
4566 accessed is a decl that has the same access semantics as the MEM_REF. */
4567 if (TREE_CODE (*t) == MEM_REF
4568 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4569 && integer_zerop (TREE_OPERAND (*t, 1))
4570 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4571 {
4572 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4573 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4574 if (/* Same volatile qualification. */
4575 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4576 /* Same TBAA behavior with -fstrict-aliasing. */
4577 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4578 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4579 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4580 /* Same alignment. */
4581 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4582 /* We have to look out here to not drop a required conversion
4583 from the rhs to the lhs if *t appears on the lhs or vice-versa
4584 if it appears on the rhs. Thus require strict type
4585 compatibility. */
4586 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4587 {
4588 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4589 res = true;
4590 }
4591 }
4592
4593 /* Canonicalize TARGET_MEM_REF in particular with respect to
4594 the indexes becoming constant. */
4595 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4596 {
4597 tree tem = maybe_fold_tmr (*t);
4598 if (tem)
4599 {
4600 *t = tem;
4601 res = true;
4602 }
4603 }
4604
4605 return res;
4606}
4607
cbdd87d4
RG
4608/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4609 distinguishes both cases. */
4610
4611static bool
e0ee10ed 4612fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4613{
4614 bool changed = false;
355fe088 4615 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4616 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4617 unsigned i;
a8b85ce9 4618 fold_defer_overflow_warnings ();
cbdd87d4 4619
040292e7
RB
4620 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4621 after propagation.
4622 ??? This shouldn't be done in generic folding but in the
4623 propagation helpers which also know whether an address was
89a79e96
RB
4624 propagated.
4625 Also canonicalize operand order. */
040292e7
RB
4626 switch (gimple_code (stmt))
4627 {
4628 case GIMPLE_ASSIGN:
4629 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4630 {
4631 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4632 if ((REFERENCE_CLASS_P (*rhs)
4633 || TREE_CODE (*rhs) == ADDR_EXPR)
4634 && maybe_canonicalize_mem_ref_addr (rhs))
4635 changed = true;
4636 tree *lhs = gimple_assign_lhs_ptr (stmt);
4637 if (REFERENCE_CLASS_P (*lhs)
4638 && maybe_canonicalize_mem_ref_addr (lhs))
4639 changed = true;
4640 }
89a79e96
RB
4641 else
4642 {
4643 /* Canonicalize operand order. */
4644 enum tree_code code = gimple_assign_rhs_code (stmt);
4645 if (TREE_CODE_CLASS (code) == tcc_comparison
4646 || commutative_tree_code (code)
4647 || commutative_ternary_tree_code (code))
4648 {
4649 tree rhs1 = gimple_assign_rhs1 (stmt);
4650 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4651 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4652 {
4653 gimple_assign_set_rhs1 (stmt, rhs2);
4654 gimple_assign_set_rhs2 (stmt, rhs1);
4655 if (TREE_CODE_CLASS (code) == tcc_comparison)
4656 gimple_assign_set_rhs_code (stmt,
4657 swap_tree_comparison (code));
4658 changed = true;
4659 }
4660 }
4661 }
040292e7
RB
4662 break;
4663 case GIMPLE_CALL:
4664 {
4665 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4666 {
4667 tree *arg = gimple_call_arg_ptr (stmt, i);
4668 if (REFERENCE_CLASS_P (*arg)
4669 && maybe_canonicalize_mem_ref_addr (arg))
4670 changed = true;
4671 }
4672 tree *lhs = gimple_call_lhs_ptr (stmt);
4673 if (*lhs
4674 && REFERENCE_CLASS_P (*lhs)
4675 && maybe_canonicalize_mem_ref_addr (lhs))
4676 changed = true;
4677 break;
4678 }
4679 case GIMPLE_ASM:
4680 {
538dd0b7
DM
4681 gasm *asm_stmt = as_a <gasm *> (stmt);
4682 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4683 {
538dd0b7 4684 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4685 tree op = TREE_VALUE (link);
4686 if (REFERENCE_CLASS_P (op)
4687 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4688 changed = true;
4689 }
538dd0b7 4690 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4691 {
538dd0b7 4692 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4693 tree op = TREE_VALUE (link);
4694 if ((REFERENCE_CLASS_P (op)
4695 || TREE_CODE (op) == ADDR_EXPR)
4696 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4697 changed = true;
4698 }
4699 }
4700 break;
4701 case GIMPLE_DEBUG:
4702 if (gimple_debug_bind_p (stmt))
4703 {
4704 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4705 if (*val
4706 && (REFERENCE_CLASS_P (*val)
4707 || TREE_CODE (*val) == ADDR_EXPR)
4708 && maybe_canonicalize_mem_ref_addr (val))
4709 changed = true;
4710 }
4711 break;
89a79e96
RB
4712 case GIMPLE_COND:
4713 {
4714 /* Canonicalize operand order. */
4715 tree lhs = gimple_cond_lhs (stmt);
4716 tree rhs = gimple_cond_rhs (stmt);
14e72812 4717 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4718 {
4719 gcond *gc = as_a <gcond *> (stmt);
4720 gimple_cond_set_lhs (gc, rhs);
4721 gimple_cond_set_rhs (gc, lhs);
4722 gimple_cond_set_code (gc,
4723 swap_tree_comparison (gimple_cond_code (gc)));
4724 changed = true;
4725 }
4726 }
040292e7
RB
4727 default:;
4728 }
4729
e0ee10ed
RB
4730 /* Dispatch to pattern-based folding. */
4731 if (!inplace
4732 || is_gimple_assign (stmt)
4733 || gimple_code (stmt) == GIMPLE_COND)
4734 {
4735 gimple_seq seq = NULL;
5d75ad95
RS
4736 gimple_match_op res_op;
4737 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 4738 valueize, valueize))
e0ee10ed 4739 {
5d75ad95 4740 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
4741 changed = true;
4742 else
4743 gimple_seq_discard (seq);
4744 }
4745 }
4746
4747 stmt = gsi_stmt (*gsi);
4748
cbdd87d4
RG
4749 /* Fold the main computation performed by the statement. */
4750 switch (gimple_code (stmt))
4751 {
4752 case GIMPLE_ASSIGN:
4753 {
819ec64c
RB
4754 /* Try to canonicalize for boolean-typed X the comparisons
4755 X == 0, X == 1, X != 0, and X != 1. */
4756 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4757 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4758 {
819ec64c
RB
4759 tree lhs = gimple_assign_lhs (stmt);
4760 tree op1 = gimple_assign_rhs1 (stmt);
4761 tree op2 = gimple_assign_rhs2 (stmt);
4762 tree type = TREE_TYPE (op1);
4763
4764 /* Check whether the comparison operands are of the same boolean
4765 type as the result type is.
4766 Check that second operand is an integer-constant with value
4767 one or zero. */
4768 if (TREE_CODE (op2) == INTEGER_CST
4769 && (integer_zerop (op2) || integer_onep (op2))
4770 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4771 {
4772 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4773 bool is_logical_not = false;
4774
4775 /* X == 0 and X != 1 is a logical-not.of X
4776 X == 1 and X != 0 is X */
4777 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4778 || (cmp_code == NE_EXPR && integer_onep (op2)))
4779 is_logical_not = true;
4780
4781 if (is_logical_not == false)
4782 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4783 /* Only for one-bit precision typed X the transformation
4784 !X -> ~X is valied. */
4785 else if (TYPE_PRECISION (type) == 1)
4786 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4787 /* Otherwise we use !X -> X ^ 1. */
4788 else
4789 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4790 build_int_cst (type, 1));
4791 changed = true;
4792 break;
4793 }
5fbcc0ed 4794 }
819ec64c
RB
4795
4796 unsigned old_num_ops = gimple_num_ops (stmt);
4797 tree lhs = gimple_assign_lhs (stmt);
4798 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4799 if (new_rhs
4800 && !useless_type_conversion_p (TREE_TYPE (lhs),
4801 TREE_TYPE (new_rhs)))
4802 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4803 if (new_rhs
4804 && (!inplace
4805 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4806 {
4807 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4808 changed = true;
4809 }
4810 break;
4811 }
4812
cbdd87d4 4813 case GIMPLE_CALL:
ceeffab0 4814 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4815 break;
4816
4817 case GIMPLE_ASM:
4818 /* Fold *& in asm operands. */
38384150 4819 {
538dd0b7 4820 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4821 size_t noutputs;
4822 const char **oconstraints;
4823 const char *constraint;
4824 bool allows_mem, allows_reg;
4825
538dd0b7 4826 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4827 oconstraints = XALLOCAVEC (const char *, noutputs);
4828
538dd0b7 4829 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4830 {
538dd0b7 4831 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4832 tree op = TREE_VALUE (link);
4833 oconstraints[i]
4834 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4835 if (REFERENCE_CLASS_P (op)
4836 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4837 {
4838 TREE_VALUE (link) = op;
4839 changed = true;
4840 }
4841 }
538dd0b7 4842 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4843 {
538dd0b7 4844 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4845 tree op = TREE_VALUE (link);
4846 constraint
4847 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4848 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4849 oconstraints, &allows_mem, &allows_reg);
4850 if (REFERENCE_CLASS_P (op)
4851 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4852 != NULL_TREE)
4853 {
4854 TREE_VALUE (link) = op;
4855 changed = true;
4856 }
4857 }
4858 }
cbdd87d4
RG
4859 break;
4860
bd422c4a
RG
4861 case GIMPLE_DEBUG:
4862 if (gimple_debug_bind_p (stmt))
4863 {
4864 tree val = gimple_debug_bind_get_value (stmt);
4865 if (val
4866 && REFERENCE_CLASS_P (val))
4867 {
4868 tree tem = maybe_fold_reference (val, false);
4869 if (tem)
4870 {
4871 gimple_debug_bind_set_value (stmt, tem);
4872 changed = true;
4873 }
4874 }
3e888a5e
RG
4875 else if (val
4876 && TREE_CODE (val) == ADDR_EXPR)
4877 {
4878 tree ref = TREE_OPERAND (val, 0);
4879 tree tem = maybe_fold_reference (ref, false);
4880 if (tem)
4881 {
4882 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4883 gimple_debug_bind_set_value (stmt, tem);
4884 changed = true;
4885 }
4886 }
bd422c4a
RG
4887 }
4888 break;
4889
cfe3d653
PK
4890 case GIMPLE_RETURN:
4891 {
4892 greturn *ret_stmt = as_a<greturn *> (stmt);
4893 tree ret = gimple_return_retval(ret_stmt);
4894
4895 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4896 {
4897 tree val = valueize (ret);
1af928db
RB
4898 if (val && val != ret
4899 && may_propagate_copy (ret, val))
cfe3d653
PK
4900 {
4901 gimple_return_set_retval (ret_stmt, val);
4902 changed = true;
4903 }
4904 }
4905 }
4906 break;
4907
cbdd87d4
RG
4908 default:;
4909 }
4910
4911 stmt = gsi_stmt (*gsi);
4912
37376165
RB
4913 /* Fold *& on the lhs. */
4914 if (gimple_has_lhs (stmt))
cbdd87d4
RG
4915 {
4916 tree lhs = gimple_get_lhs (stmt);
4917 if (lhs && REFERENCE_CLASS_P (lhs))
4918 {
4919 tree new_lhs = maybe_fold_reference (lhs, true);
4920 if (new_lhs)
4921 {
4922 gimple_set_lhs (stmt, new_lhs);
4923 changed = true;
4924 }
4925 }
4926 }
4927
a8b85ce9 4928 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
4929 return changed;
4930}
4931
e0ee10ed
RB
4932/* Valueziation callback that ends up not following SSA edges. */
4933
4934tree
4935no_follow_ssa_edges (tree)
4936{
4937 return NULL_TREE;
4938}
4939
45cc9f96
RB
4940/* Valueization callback that ends up following single-use SSA edges only. */
4941
4942tree
4943follow_single_use_edges (tree val)
4944{
4945 if (TREE_CODE (val) == SSA_NAME
4946 && !has_single_use (val))
4947 return NULL_TREE;
4948 return val;
4949}
4950
c566cc9f
RS
4951/* Valueization callback that follows all SSA edges. */
4952
4953tree
4954follow_all_ssa_edges (tree val)
4955{
4956 return val;
4957}
4958
cbdd87d4
RG
4959/* Fold the statement pointed to by GSI. In some cases, this function may
4960 replace the whole statement with a new one. Returns true iff folding
4961 makes any changes.
4962 The statement pointed to by GSI should be in valid gimple form but may
4963 be in unfolded state as resulting from for example constant propagation
4964 which can produce *&x = 0. */
4965
4966bool
4967fold_stmt (gimple_stmt_iterator *gsi)
4968{
e0ee10ed
RB
4969 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4970}
4971
4972bool
4973fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4974{
4975 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
4976}
4977
59401b92 4978/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
4979 *&x created by constant propagation are handled. The statement cannot
4980 be replaced with a new one. Return true if the statement was
4981 changed, false otherwise.
59401b92 4982 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
4983 be in unfolded state as resulting from for example constant propagation
4984 which can produce *&x = 0. */
4985
4986bool
59401b92 4987fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 4988{
355fe088 4989 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 4990 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 4991 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
4992 return changed;
4993}
4994
e89065a1
SL
4995/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4996 if EXPR is null or we don't know how.
4997 If non-null, the result always has boolean type. */
4998
4999static tree
5000canonicalize_bool (tree expr, bool invert)
5001{
5002 if (!expr)
5003 return NULL_TREE;
5004 else if (invert)
5005 {
5006 if (integer_nonzerop (expr))
5007 return boolean_false_node;
5008 else if (integer_zerop (expr))
5009 return boolean_true_node;
5010 else if (TREE_CODE (expr) == SSA_NAME)
5011 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5012 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5013 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5014 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5015 boolean_type_node,
5016 TREE_OPERAND (expr, 0),
5017 TREE_OPERAND (expr, 1));
5018 else
5019 return NULL_TREE;
5020 }
5021 else
5022 {
5023 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5024 return expr;
5025 if (integer_nonzerop (expr))
5026 return boolean_true_node;
5027 else if (integer_zerop (expr))
5028 return boolean_false_node;
5029 else if (TREE_CODE (expr) == SSA_NAME)
5030 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5031 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5032 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5033 return fold_build2 (TREE_CODE (expr),
5034 boolean_type_node,
5035 TREE_OPERAND (expr, 0),
5036 TREE_OPERAND (expr, 1));
5037 else
5038 return NULL_TREE;
5039 }
5040}
5041
5042/* Check to see if a boolean expression EXPR is logically equivalent to the
5043 comparison (OP1 CODE OP2). Check for various identities involving
5044 SSA_NAMEs. */
5045
5046static bool
5047same_bool_comparison_p (const_tree expr, enum tree_code code,
5048 const_tree op1, const_tree op2)
5049{
355fe088 5050 gimple *s;
e89065a1
SL
5051
5052 /* The obvious case. */
5053 if (TREE_CODE (expr) == code
5054 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5055 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5056 return true;
5057
5058 /* Check for comparing (name, name != 0) and the case where expr
5059 is an SSA_NAME with a definition matching the comparison. */
5060 if (TREE_CODE (expr) == SSA_NAME
5061 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5062 {
5063 if (operand_equal_p (expr, op1, 0))
5064 return ((code == NE_EXPR && integer_zerop (op2))
5065 || (code == EQ_EXPR && integer_nonzerop (op2)));
5066 s = SSA_NAME_DEF_STMT (expr);
5067 if (is_gimple_assign (s)
5068 && gimple_assign_rhs_code (s) == code
5069 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5070 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5071 return true;
5072 }
5073
5074 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5075 of name is a comparison, recurse. */
5076 if (TREE_CODE (op1) == SSA_NAME
5077 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5078 {
5079 s = SSA_NAME_DEF_STMT (op1);
5080 if (is_gimple_assign (s)
5081 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5082 {
5083 enum tree_code c = gimple_assign_rhs_code (s);
5084 if ((c == NE_EXPR && integer_zerop (op2))
5085 || (c == EQ_EXPR && integer_nonzerop (op2)))
5086 return same_bool_comparison_p (expr, c,
5087 gimple_assign_rhs1 (s),
5088 gimple_assign_rhs2 (s));
5089 if ((c == EQ_EXPR && integer_zerop (op2))
5090 || (c == NE_EXPR && integer_nonzerop (op2)))
5091 return same_bool_comparison_p (expr,
5092 invert_tree_comparison (c, false),
5093 gimple_assign_rhs1 (s),
5094 gimple_assign_rhs2 (s));
5095 }
5096 }
5097 return false;
5098}
5099
5100/* Check to see if two boolean expressions OP1 and OP2 are logically
5101 equivalent. */
5102
5103static bool
5104same_bool_result_p (const_tree op1, const_tree op2)
5105{
5106 /* Simple cases first. */
5107 if (operand_equal_p (op1, op2, 0))
5108 return true;
5109
5110 /* Check the cases where at least one of the operands is a comparison.
5111 These are a bit smarter than operand_equal_p in that they apply some
5112 identifies on SSA_NAMEs. */
98209db3 5113 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5114 && same_bool_comparison_p (op1, TREE_CODE (op2),
5115 TREE_OPERAND (op2, 0),
5116 TREE_OPERAND (op2, 1)))
5117 return true;
98209db3 5118 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5119 && same_bool_comparison_p (op2, TREE_CODE (op1),
5120 TREE_OPERAND (op1, 0),
5121 TREE_OPERAND (op1, 1)))
5122 return true;
5123
5124 /* Default case. */
5125 return false;
5126}
5127
5128/* Forward declarations for some mutually recursive functions. */
5129
5130static tree
5131and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5132 enum tree_code code2, tree op2a, tree op2b);
5133static tree
5134and_var_with_comparison (tree var, bool invert,
5135 enum tree_code code2, tree op2a, tree op2b);
5136static tree
355fe088 5137and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5138 enum tree_code code2, tree op2a, tree op2b);
5139static tree
5140or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5141 enum tree_code code2, tree op2a, tree op2b);
5142static tree
5143or_var_with_comparison (tree var, bool invert,
5144 enum tree_code code2, tree op2a, tree op2b);
5145static tree
355fe088 5146or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5147 enum tree_code code2, tree op2a, tree op2b);
5148
5149/* Helper function for and_comparisons_1: try to simplify the AND of the
5150 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5151 If INVERT is true, invert the value of the VAR before doing the AND.
5152 Return NULL_EXPR if we can't simplify this to a single expression. */
5153
5154static tree
5155and_var_with_comparison (tree var, bool invert,
5156 enum tree_code code2, tree op2a, tree op2b)
5157{
5158 tree t;
355fe088 5159 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5160
5161 /* We can only deal with variables whose definitions are assignments. */
5162 if (!is_gimple_assign (stmt))
5163 return NULL_TREE;
5164
5165 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5166 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5167 Then we only have to consider the simpler non-inverted cases. */
5168 if (invert)
5169 t = or_var_with_comparison_1 (stmt,
5170 invert_tree_comparison (code2, false),
5171 op2a, op2b);
5172 else
5173 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5174 return canonicalize_bool (t, invert);
5175}
5176
5177/* Try to simplify the AND of the ssa variable defined by the assignment
5178 STMT with the comparison specified by (OP2A CODE2 OP2B).
5179 Return NULL_EXPR if we can't simplify this to a single expression. */
5180
5181static tree
355fe088 5182and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5183 enum tree_code code2, tree op2a, tree op2b)
5184{
5185 tree var = gimple_assign_lhs (stmt);
5186 tree true_test_var = NULL_TREE;
5187 tree false_test_var = NULL_TREE;
5188 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5189
5190 /* Check for identities like (var AND (var == 0)) => false. */
5191 if (TREE_CODE (op2a) == SSA_NAME
5192 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5193 {
5194 if ((code2 == NE_EXPR && integer_zerop (op2b))
5195 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5196 {
5197 true_test_var = op2a;
5198 if (var == true_test_var)
5199 return var;
5200 }
5201 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5202 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5203 {
5204 false_test_var = op2a;
5205 if (var == false_test_var)
5206 return boolean_false_node;
5207 }
5208 }
5209
5210 /* If the definition is a comparison, recurse on it. */
5211 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5212 {
5213 tree t = and_comparisons_1 (innercode,
5214 gimple_assign_rhs1 (stmt),
5215 gimple_assign_rhs2 (stmt),
5216 code2,
5217 op2a,
5218 op2b);
5219 if (t)
5220 return t;
5221 }
5222
5223 /* If the definition is an AND or OR expression, we may be able to
5224 simplify by reassociating. */
eb9820c0
KT
5225 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5226 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5227 {
5228 tree inner1 = gimple_assign_rhs1 (stmt);
5229 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5230 gimple *s;
e89065a1
SL
5231 tree t;
5232 tree partial = NULL_TREE;
eb9820c0 5233 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5234
5235 /* Check for boolean identities that don't require recursive examination
5236 of inner1/inner2:
5237 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5238 inner1 AND (inner1 OR inner2) => inner1
5239 !inner1 AND (inner1 AND inner2) => false
5240 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5241 Likewise for similar cases involving inner2. */
5242 if (inner1 == true_test_var)
5243 return (is_and ? var : inner1);
5244 else if (inner2 == true_test_var)
5245 return (is_and ? var : inner2);
5246 else if (inner1 == false_test_var)
5247 return (is_and
5248 ? boolean_false_node
5249 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5250 else if (inner2 == false_test_var)
5251 return (is_and
5252 ? boolean_false_node
5253 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5254
5255 /* Next, redistribute/reassociate the AND across the inner tests.
5256 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5257 if (TREE_CODE (inner1) == SSA_NAME
5258 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5259 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5260 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5261 gimple_assign_rhs1 (s),
5262 gimple_assign_rhs2 (s),
5263 code2, op2a, op2b)))
5264 {
5265 /* Handle the AND case, where we are reassociating:
5266 (inner1 AND inner2) AND (op2a code2 op2b)
5267 => (t AND inner2)
5268 If the partial result t is a constant, we win. Otherwise
5269 continue on to try reassociating with the other inner test. */
5270 if (is_and)
5271 {
5272 if (integer_onep (t))
5273 return inner2;
5274 else if (integer_zerop (t))
5275 return boolean_false_node;
5276 }
5277
5278 /* Handle the OR case, where we are redistributing:
5279 (inner1 OR inner2) AND (op2a code2 op2b)
5280 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5281 else if (integer_onep (t))
5282 return boolean_true_node;
5283
5284 /* Save partial result for later. */
5285 partial = t;
e89065a1
SL
5286 }
5287
5288 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5289 if (TREE_CODE (inner2) == SSA_NAME
5290 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5291 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5292 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5293 gimple_assign_rhs1 (s),
5294 gimple_assign_rhs2 (s),
5295 code2, op2a, op2b)))
5296 {
5297 /* Handle the AND case, where we are reassociating:
5298 (inner1 AND inner2) AND (op2a code2 op2b)
5299 => (inner1 AND t) */
5300 if (is_and)
5301 {
5302 if (integer_onep (t))
5303 return inner1;
5304 else if (integer_zerop (t))
5305 return boolean_false_node;
8236c8eb
JJ
5306 /* If both are the same, we can apply the identity
5307 (x AND x) == x. */
5308 else if (partial && same_bool_result_p (t, partial))
5309 return t;
e89065a1
SL
5310 }
5311
5312 /* Handle the OR case. where we are redistributing:
5313 (inner1 OR inner2) AND (op2a code2 op2b)
5314 => (t OR (inner1 AND (op2a code2 op2b)))
5315 => (t OR partial) */
5316 else
5317 {
5318 if (integer_onep (t))
5319 return boolean_true_node;
5320 else if (partial)
5321 {
5322 /* We already got a simplification for the other
5323 operand to the redistributed OR expression. The
5324 interesting case is when at least one is false.
5325 Or, if both are the same, we can apply the identity
5326 (x OR x) == x. */
5327 if (integer_zerop (partial))
5328 return t;
5329 else if (integer_zerop (t))
5330 return partial;
5331 else if (same_bool_result_p (t, partial))
5332 return t;
5333 }
5334 }
5335 }
5336 }
5337 return NULL_TREE;
5338}
5339
5340/* Try to simplify the AND of two comparisons defined by
5341 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5342 If this can be done without constructing an intermediate value,
5343 return the resulting tree; otherwise NULL_TREE is returned.
5344 This function is deliberately asymmetric as it recurses on SSA_DEFs
5345 in the first comparison but not the second. */
5346
5347static tree
5348and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5349 enum tree_code code2, tree op2a, tree op2b)
5350{
ae22ac3c 5351 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5352
e89065a1
SL
5353 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5354 if (operand_equal_p (op1a, op2a, 0)
5355 && operand_equal_p (op1b, op2b, 0))
5356 {
eb9820c0 5357 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5358 tree t = combine_comparisons (UNKNOWN_LOCATION,
5359 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5360 truth_type, op1a, op1b);
e89065a1
SL
5361 if (t)
5362 return t;
5363 }
5364
5365 /* Likewise the swapped case of the above. */
5366 if (operand_equal_p (op1a, op2b, 0)
5367 && operand_equal_p (op1b, op2a, 0))
5368 {
eb9820c0 5369 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5370 tree t = combine_comparisons (UNKNOWN_LOCATION,
5371 TRUTH_ANDIF_EXPR, code1,
5372 swap_tree_comparison (code2),
31ed6226 5373 truth_type, op1a, op1b);
e89065a1
SL
5374 if (t)
5375 return t;
5376 }
5377
5378 /* If both comparisons are of the same value against constants, we might
5379 be able to merge them. */
5380 if (operand_equal_p (op1a, op2a, 0)
5381 && TREE_CODE (op1b) == INTEGER_CST
5382 && TREE_CODE (op2b) == INTEGER_CST)
5383 {
5384 int cmp = tree_int_cst_compare (op1b, op2b);
5385
5386 /* If we have (op1a == op1b), we should either be able to
5387 return that or FALSE, depending on whether the constant op1b
5388 also satisfies the other comparison against op2b. */
5389 if (code1 == EQ_EXPR)
5390 {
5391 bool done = true;
5392 bool val;
5393 switch (code2)
5394 {
5395 case EQ_EXPR: val = (cmp == 0); break;
5396 case NE_EXPR: val = (cmp != 0); break;
5397 case LT_EXPR: val = (cmp < 0); break;
5398 case GT_EXPR: val = (cmp > 0); break;
5399 case LE_EXPR: val = (cmp <= 0); break;
5400 case GE_EXPR: val = (cmp >= 0); break;
5401 default: done = false;
5402 }
5403 if (done)
5404 {
5405 if (val)
5406 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5407 else
5408 return boolean_false_node;
5409 }
5410 }
5411 /* Likewise if the second comparison is an == comparison. */
5412 else if (code2 == EQ_EXPR)
5413 {
5414 bool done = true;
5415 bool val;
5416 switch (code1)
5417 {
5418 case EQ_EXPR: val = (cmp == 0); break;
5419 case NE_EXPR: val = (cmp != 0); break;
5420 case LT_EXPR: val = (cmp > 0); break;
5421 case GT_EXPR: val = (cmp < 0); break;
5422 case LE_EXPR: val = (cmp >= 0); break;
5423 case GE_EXPR: val = (cmp <= 0); break;
5424 default: done = false;
5425 }
5426 if (done)
5427 {
5428 if (val)
5429 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5430 else
5431 return boolean_false_node;
5432 }
5433 }
5434
5435 /* Same business with inequality tests. */
5436 else if (code1 == NE_EXPR)
5437 {
5438 bool val;
5439 switch (code2)
5440 {
5441 case EQ_EXPR: val = (cmp != 0); break;
5442 case NE_EXPR: val = (cmp == 0); break;
5443 case LT_EXPR: val = (cmp >= 0); break;
5444 case GT_EXPR: val = (cmp <= 0); break;
5445 case LE_EXPR: val = (cmp > 0); break;
5446 case GE_EXPR: val = (cmp < 0); break;
5447 default:
5448 val = false;
5449 }
5450 if (val)
5451 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5452 }
5453 else if (code2 == NE_EXPR)
5454 {
5455 bool val;
5456 switch (code1)
5457 {
5458 case EQ_EXPR: val = (cmp == 0); break;
5459 case NE_EXPR: val = (cmp != 0); break;
5460 case LT_EXPR: val = (cmp <= 0); break;
5461 case GT_EXPR: val = (cmp >= 0); break;
5462 case LE_EXPR: val = (cmp < 0); break;
5463 case GE_EXPR: val = (cmp > 0); break;
5464 default:
5465 val = false;
5466 }
5467 if (val)
5468 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5469 }
5470
5471 /* Chose the more restrictive of two < or <= comparisons. */
5472 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5473 && (code2 == LT_EXPR || code2 == LE_EXPR))
5474 {
5475 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5476 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5477 else
5478 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5479 }
5480
5481 /* Likewise chose the more restrictive of two > or >= comparisons. */
5482 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5483 && (code2 == GT_EXPR || code2 == GE_EXPR))
5484 {
5485 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5486 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5487 else
5488 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5489 }
5490
5491 /* Check for singleton ranges. */
5492 else if (cmp == 0
5493 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5494 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5495 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5496
5497 /* Check for disjoint ranges. */
5498 else if (cmp <= 0
5499 && (code1 == LT_EXPR || code1 == LE_EXPR)
5500 && (code2 == GT_EXPR || code2 == GE_EXPR))
5501 return boolean_false_node;
5502 else if (cmp >= 0
5503 && (code1 == GT_EXPR || code1 == GE_EXPR)
5504 && (code2 == LT_EXPR || code2 == LE_EXPR))
5505 return boolean_false_node;
5506 }
5507
5508 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5509 NAME's definition is a truth value. See if there are any simplifications
5510 that can be done against the NAME's definition. */
5511 if (TREE_CODE (op1a) == SSA_NAME
5512 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5513 && (integer_zerop (op1b) || integer_onep (op1b)))
5514 {
5515 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5516 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5517 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5518 switch (gimple_code (stmt))
5519 {
5520 case GIMPLE_ASSIGN:
5521 /* Try to simplify by copy-propagating the definition. */
5522 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5523
5524 case GIMPLE_PHI:
5525 /* If every argument to the PHI produces the same result when
5526 ANDed with the second comparison, we win.
5527 Do not do this unless the type is bool since we need a bool
5528 result here anyway. */
5529 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5530 {
5531 tree result = NULL_TREE;
5532 unsigned i;
5533 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5534 {
5535 tree arg = gimple_phi_arg_def (stmt, i);
5536
5537 /* If this PHI has itself as an argument, ignore it.
5538 If all the other args produce the same result,
5539 we're still OK. */
5540 if (arg == gimple_phi_result (stmt))
5541 continue;
5542 else if (TREE_CODE (arg) == INTEGER_CST)
5543 {
5544 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5545 {
5546 if (!result)
5547 result = boolean_false_node;
5548 else if (!integer_zerop (result))
5549 return NULL_TREE;
5550 }
5551 else if (!result)
5552 result = fold_build2 (code2, boolean_type_node,
5553 op2a, op2b);
5554 else if (!same_bool_comparison_p (result,
5555 code2, op2a, op2b))
5556 return NULL_TREE;
5557 }
0e8b84ec
JJ
5558 else if (TREE_CODE (arg) == SSA_NAME
5559 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5560 {
6c66f733 5561 tree temp;
355fe088 5562 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5563 /* In simple cases we can look through PHI nodes,
5564 but we have to be careful with loops.
5565 See PR49073. */
5566 if (! dom_info_available_p (CDI_DOMINATORS)
5567 || gimple_bb (def_stmt) == gimple_bb (stmt)
5568 || dominated_by_p (CDI_DOMINATORS,
5569 gimple_bb (def_stmt),
5570 gimple_bb (stmt)))
5571 return NULL_TREE;
5572 temp = and_var_with_comparison (arg, invert, code2,
5573 op2a, op2b);
e89065a1
SL
5574 if (!temp)
5575 return NULL_TREE;
5576 else if (!result)
5577 result = temp;
5578 else if (!same_bool_result_p (result, temp))
5579 return NULL_TREE;
5580 }
5581 else
5582 return NULL_TREE;
5583 }
5584 return result;
5585 }
5586
5587 default:
5588 break;
5589 }
5590 }
5591 return NULL_TREE;
5592}
5593
5594/* Try to simplify the AND of two comparisons, specified by
5595 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5596 If this can be simplified to a single expression (without requiring
5597 introducing more SSA variables to hold intermediate values),
5598 return the resulting tree. Otherwise return NULL_TREE.
5599 If the result expression is non-null, it has boolean type. */
5600
5601tree
5602maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5603 enum tree_code code2, tree op2a, tree op2b)
5604{
5605 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5606 if (t)
5607 return t;
5608 else
5609 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5610}
5611
5612/* Helper function for or_comparisons_1: try to simplify the OR of the
5613 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5614 If INVERT is true, invert the value of VAR before doing the OR.
5615 Return NULL_EXPR if we can't simplify this to a single expression. */
5616
5617static tree
5618or_var_with_comparison (tree var, bool invert,
5619 enum tree_code code2, tree op2a, tree op2b)
5620{
5621 tree t;
355fe088 5622 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5623
5624 /* We can only deal with variables whose definitions are assignments. */
5625 if (!is_gimple_assign (stmt))
5626 return NULL_TREE;
5627
5628 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5629 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5630 Then we only have to consider the simpler non-inverted cases. */
5631 if (invert)
5632 t = and_var_with_comparison_1 (stmt,
5633 invert_tree_comparison (code2, false),
5634 op2a, op2b);
5635 else
5636 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5637 return canonicalize_bool (t, invert);
5638}
5639
5640/* Try to simplify the OR of the ssa variable defined by the assignment
5641 STMT with the comparison specified by (OP2A CODE2 OP2B).
5642 Return NULL_EXPR if we can't simplify this to a single expression. */
5643
5644static tree
355fe088 5645or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5646 enum tree_code code2, tree op2a, tree op2b)
5647{
5648 tree var = gimple_assign_lhs (stmt);
5649 tree true_test_var = NULL_TREE;
5650 tree false_test_var = NULL_TREE;
5651 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5652
5653 /* Check for identities like (var OR (var != 0)) => true . */
5654 if (TREE_CODE (op2a) == SSA_NAME
5655 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5656 {
5657 if ((code2 == NE_EXPR && integer_zerop (op2b))
5658 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5659 {
5660 true_test_var = op2a;
5661 if (var == true_test_var)
5662 return var;
5663 }
5664 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5665 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5666 {
5667 false_test_var = op2a;
5668 if (var == false_test_var)
5669 return boolean_true_node;
5670 }
5671 }
5672
5673 /* If the definition is a comparison, recurse on it. */
5674 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5675 {
5676 tree t = or_comparisons_1 (innercode,
5677 gimple_assign_rhs1 (stmt),
5678 gimple_assign_rhs2 (stmt),
5679 code2,
5680 op2a,
5681 op2b);
5682 if (t)
5683 return t;
5684 }
5685
5686 /* If the definition is an AND or OR expression, we may be able to
5687 simplify by reassociating. */
eb9820c0
KT
5688 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5689 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5690 {
5691 tree inner1 = gimple_assign_rhs1 (stmt);
5692 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5693 gimple *s;
e89065a1
SL
5694 tree t;
5695 tree partial = NULL_TREE;
eb9820c0 5696 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5697
5698 /* Check for boolean identities that don't require recursive examination
5699 of inner1/inner2:
5700 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5701 inner1 OR (inner1 AND inner2) => inner1
5702 !inner1 OR (inner1 OR inner2) => true
5703 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5704 */
5705 if (inner1 == true_test_var)
5706 return (is_or ? var : inner1);
5707 else if (inner2 == true_test_var)
5708 return (is_or ? var : inner2);
5709 else if (inner1 == false_test_var)
5710 return (is_or
5711 ? boolean_true_node
5712 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5713 else if (inner2 == false_test_var)
5714 return (is_or
5715 ? boolean_true_node
5716 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5717
5718 /* Next, redistribute/reassociate the OR across the inner tests.
5719 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5720 if (TREE_CODE (inner1) == SSA_NAME
5721 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5722 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5723 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5724 gimple_assign_rhs1 (s),
5725 gimple_assign_rhs2 (s),
5726 code2, op2a, op2b)))
5727 {
5728 /* Handle the OR case, where we are reassociating:
5729 (inner1 OR inner2) OR (op2a code2 op2b)
5730 => (t OR inner2)
5731 If the partial result t is a constant, we win. Otherwise
5732 continue on to try reassociating with the other inner test. */
8236c8eb 5733 if (is_or)
e89065a1
SL
5734 {
5735 if (integer_onep (t))
5736 return boolean_true_node;
5737 else if (integer_zerop (t))
5738 return inner2;
5739 }
5740
5741 /* Handle the AND case, where we are redistributing:
5742 (inner1 AND inner2) OR (op2a code2 op2b)
5743 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5744 else if (integer_zerop (t))
5745 return boolean_false_node;
5746
5747 /* Save partial result for later. */
5748 partial = t;
e89065a1
SL
5749 }
5750
5751 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5752 if (TREE_CODE (inner2) == SSA_NAME
5753 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5754 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5755 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5756 gimple_assign_rhs1 (s),
5757 gimple_assign_rhs2 (s),
5758 code2, op2a, op2b)))
5759 {
5760 /* Handle the OR case, where we are reassociating:
5761 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5762 => (inner1 OR t)
5763 => (t OR partial) */
5764 if (is_or)
e89065a1
SL
5765 {
5766 if (integer_zerop (t))
5767 return inner1;
5768 else if (integer_onep (t))
5769 return boolean_true_node;
8236c8eb
JJ
5770 /* If both are the same, we can apply the identity
5771 (x OR x) == x. */
5772 else if (partial && same_bool_result_p (t, partial))
5773 return t;
e89065a1
SL
5774 }
5775
5776 /* Handle the AND case, where we are redistributing:
5777 (inner1 AND inner2) OR (op2a code2 op2b)
5778 => (t AND (inner1 OR (op2a code2 op2b)))
5779 => (t AND partial) */
5780 else
5781 {
5782 if (integer_zerop (t))
5783 return boolean_false_node;
5784 else if (partial)
5785 {
5786 /* We already got a simplification for the other
5787 operand to the redistributed AND expression. The
5788 interesting case is when at least one is true.
5789 Or, if both are the same, we can apply the identity
8236c8eb 5790 (x AND x) == x. */
e89065a1
SL
5791 if (integer_onep (partial))
5792 return t;
5793 else if (integer_onep (t))
5794 return partial;
5795 else if (same_bool_result_p (t, partial))
8236c8eb 5796 return t;
e89065a1
SL
5797 }
5798 }
5799 }
5800 }
5801 return NULL_TREE;
5802}
5803
5804/* Try to simplify the OR of two comparisons defined by
5805 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5806 If this can be done without constructing an intermediate value,
5807 return the resulting tree; otherwise NULL_TREE is returned.
5808 This function is deliberately asymmetric as it recurses on SSA_DEFs
5809 in the first comparison but not the second. */
5810
5811static tree
5812or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5813 enum tree_code code2, tree op2a, tree op2b)
5814{
ae22ac3c 5815 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5816
e89065a1
SL
5817 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5818 if (operand_equal_p (op1a, op2a, 0)
5819 && operand_equal_p (op1b, op2b, 0))
5820 {
eb9820c0 5821 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5822 tree t = combine_comparisons (UNKNOWN_LOCATION,
5823 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5824 truth_type, op1a, op1b);
e89065a1
SL
5825 if (t)
5826 return t;
5827 }
5828
5829 /* Likewise the swapped case of the above. */
5830 if (operand_equal_p (op1a, op2b, 0)
5831 && operand_equal_p (op1b, op2a, 0))
5832 {
eb9820c0 5833 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5834 tree t = combine_comparisons (UNKNOWN_LOCATION,
5835 TRUTH_ORIF_EXPR, code1,
5836 swap_tree_comparison (code2),
31ed6226 5837 truth_type, op1a, op1b);
e89065a1
SL
5838 if (t)
5839 return t;
5840 }
5841
5842 /* If both comparisons are of the same value against constants, we might
5843 be able to merge them. */
5844 if (operand_equal_p (op1a, op2a, 0)
5845 && TREE_CODE (op1b) == INTEGER_CST
5846 && TREE_CODE (op2b) == INTEGER_CST)
5847 {
5848 int cmp = tree_int_cst_compare (op1b, op2b);
5849
5850 /* If we have (op1a != op1b), we should either be able to
5851 return that or TRUE, depending on whether the constant op1b
5852 also satisfies the other comparison against op2b. */
5853 if (code1 == NE_EXPR)
5854 {
5855 bool done = true;
5856 bool val;
5857 switch (code2)
5858 {
5859 case EQ_EXPR: val = (cmp == 0); break;
5860 case NE_EXPR: val = (cmp != 0); break;
5861 case LT_EXPR: val = (cmp < 0); break;
5862 case GT_EXPR: val = (cmp > 0); break;
5863 case LE_EXPR: val = (cmp <= 0); break;
5864 case GE_EXPR: val = (cmp >= 0); break;
5865 default: done = false;
5866 }
5867 if (done)
5868 {
5869 if (val)
5870 return boolean_true_node;
5871 else
5872 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5873 }
5874 }
5875 /* Likewise if the second comparison is a != comparison. */
5876 else if (code2 == NE_EXPR)
5877 {
5878 bool done = true;
5879 bool val;
5880 switch (code1)
5881 {
5882 case EQ_EXPR: val = (cmp == 0); break;
5883 case NE_EXPR: val = (cmp != 0); break;
5884 case LT_EXPR: val = (cmp > 0); break;
5885 case GT_EXPR: val = (cmp < 0); break;
5886 case LE_EXPR: val = (cmp >= 0); break;
5887 case GE_EXPR: val = (cmp <= 0); break;
5888 default: done = false;
5889 }
5890 if (done)
5891 {
5892 if (val)
5893 return boolean_true_node;
5894 else
5895 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5896 }
5897 }
5898
5899 /* See if an equality test is redundant with the other comparison. */
5900 else if (code1 == EQ_EXPR)
5901 {
5902 bool val;
5903 switch (code2)
5904 {
5905 case EQ_EXPR: val = (cmp == 0); break;
5906 case NE_EXPR: val = (cmp != 0); break;
5907 case LT_EXPR: val = (cmp < 0); break;
5908 case GT_EXPR: val = (cmp > 0); break;
5909 case LE_EXPR: val = (cmp <= 0); break;
5910 case GE_EXPR: val = (cmp >= 0); break;
5911 default:
5912 val = false;
5913 }
5914 if (val)
5915 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5916 }
5917 else if (code2 == EQ_EXPR)
5918 {
5919 bool val;
5920 switch (code1)
5921 {
5922 case EQ_EXPR: val = (cmp == 0); break;
5923 case NE_EXPR: val = (cmp != 0); break;
5924 case LT_EXPR: val = (cmp > 0); break;
5925 case GT_EXPR: val = (cmp < 0); break;
5926 case LE_EXPR: val = (cmp >= 0); break;
5927 case GE_EXPR: val = (cmp <= 0); break;
5928 default:
5929 val = false;
5930 }
5931 if (val)
5932 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5933 }
5934
5935 /* Chose the less restrictive of two < or <= comparisons. */
5936 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5937 && (code2 == LT_EXPR || code2 == LE_EXPR))
5938 {
5939 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5940 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5941 else
5942 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5943 }
5944
5945 /* Likewise chose the less restrictive of two > or >= comparisons. */
5946 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5947 && (code2 == GT_EXPR || code2 == GE_EXPR))
5948 {
5949 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5950 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5951 else
5952 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5953 }
5954
5955 /* Check for singleton ranges. */
5956 else if (cmp == 0
5957 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5958 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5959 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5960
5961 /* Check for less/greater pairs that don't restrict the range at all. */
5962 else if (cmp >= 0
5963 && (code1 == LT_EXPR || code1 == LE_EXPR)
5964 && (code2 == GT_EXPR || code2 == GE_EXPR))
5965 return boolean_true_node;
5966 else if (cmp <= 0
5967 && (code1 == GT_EXPR || code1 == GE_EXPR)
5968 && (code2 == LT_EXPR || code2 == LE_EXPR))
5969 return boolean_true_node;
5970 }
5971
5972 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5973 NAME's definition is a truth value. See if there are any simplifications
5974 that can be done against the NAME's definition. */
5975 if (TREE_CODE (op1a) == SSA_NAME
5976 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5977 && (integer_zerop (op1b) || integer_onep (op1b)))
5978 {
5979 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5980 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5981 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5982 switch (gimple_code (stmt))
5983 {
5984 case GIMPLE_ASSIGN:
5985 /* Try to simplify by copy-propagating the definition. */
5986 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5987
5988 case GIMPLE_PHI:
5989 /* If every argument to the PHI produces the same result when
5990 ORed with the second comparison, we win.
5991 Do not do this unless the type is bool since we need a bool
5992 result here anyway. */
5993 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5994 {
5995 tree result = NULL_TREE;
5996 unsigned i;
5997 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5998 {
5999 tree arg = gimple_phi_arg_def (stmt, i);
6000
6001 /* If this PHI has itself as an argument, ignore it.
6002 If all the other args produce the same result,
6003 we're still OK. */
6004 if (arg == gimple_phi_result (stmt))
6005 continue;
6006 else if (TREE_CODE (arg) == INTEGER_CST)
6007 {
6008 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6009 {
6010 if (!result)
6011 result = boolean_true_node;
6012 else if (!integer_onep (result))
6013 return NULL_TREE;
6014 }
6015 else if (!result)
6016 result = fold_build2 (code2, boolean_type_node,
6017 op2a, op2b);
6018 else if (!same_bool_comparison_p (result,
6019 code2, op2a, op2b))
6020 return NULL_TREE;
6021 }
0e8b84ec
JJ
6022 else if (TREE_CODE (arg) == SSA_NAME
6023 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6024 {
6c66f733 6025 tree temp;
355fe088 6026 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6027 /* In simple cases we can look through PHI nodes,
6028 but we have to be careful with loops.
6029 See PR49073. */
6030 if (! dom_info_available_p (CDI_DOMINATORS)
6031 || gimple_bb (def_stmt) == gimple_bb (stmt)
6032 || dominated_by_p (CDI_DOMINATORS,
6033 gimple_bb (def_stmt),
6034 gimple_bb (stmt)))
6035 return NULL_TREE;
6036 temp = or_var_with_comparison (arg, invert, code2,
6037 op2a, op2b);
e89065a1
SL
6038 if (!temp)
6039 return NULL_TREE;
6040 else if (!result)
6041 result = temp;
6042 else if (!same_bool_result_p (result, temp))
6043 return NULL_TREE;
6044 }
6045 else
6046 return NULL_TREE;
6047 }
6048 return result;
6049 }
6050
6051 default:
6052 break;
6053 }
6054 }
6055 return NULL_TREE;
6056}
6057
6058/* Try to simplify the OR of two comparisons, specified by
6059 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6060 If this can be simplified to a single expression (without requiring
6061 introducing more SSA variables to hold intermediate values),
6062 return the resulting tree. Otherwise return NULL_TREE.
6063 If the result expression is non-null, it has boolean type. */
6064
6065tree
6066maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6067 enum tree_code code2, tree op2a, tree op2b)
6068{
6069 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6070 if (t)
6071 return t;
6072 else
6073 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6074}
cfef45c8
RG
6075
6076
6077/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6078
6079 Either NULL_TREE, a simplified but non-constant or a constant
6080 is returned.
6081
6082 ??? This should go into a gimple-fold-inline.h file to be eventually
6083 privatized with the single valueize function used in the various TUs
6084 to avoid the indirect function call overhead. */
6085
6086tree
355fe088 6087gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6088 tree (*gvalueize) (tree))
cfef45c8 6089{
5d75ad95 6090 gimple_match_op res_op;
45cc9f96
RB
6091 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6092 edges if there are intermediate VARYING defs. For this reason
6093 do not follow SSA edges here even though SCCVN can technically
6094 just deal fine with that. */
5d75ad95 6095 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6096 {
34050b6b 6097 tree res = NULL_TREE;
5d75ad95
RS
6098 if (gimple_simplified_result_is_gimple_val (&res_op))
6099 res = res_op.ops[0];
34050b6b 6100 else if (mprts_hook)
5d75ad95 6101 res = mprts_hook (&res_op);
34050b6b 6102 if (res)
45cc9f96 6103 {
34050b6b
RB
6104 if (dump_file && dump_flags & TDF_DETAILS)
6105 {
6106 fprintf (dump_file, "Match-and-simplified ");
6107 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6108 fprintf (dump_file, " to ");
ef6cb4c7 6109 print_generic_expr (dump_file, res);
34050b6b
RB
6110 fprintf (dump_file, "\n");
6111 }
6112 return res;
45cc9f96 6113 }
45cc9f96
RB
6114 }
6115
cfef45c8
RG
6116 location_t loc = gimple_location (stmt);
6117 switch (gimple_code (stmt))
6118 {
6119 case GIMPLE_ASSIGN:
6120 {
6121 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6122
6123 switch (get_gimple_rhs_class (subcode))
6124 {
6125 case GIMPLE_SINGLE_RHS:
6126 {
6127 tree rhs = gimple_assign_rhs1 (stmt);
6128 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6129
6130 if (TREE_CODE (rhs) == SSA_NAME)
6131 {
6132 /* If the RHS is an SSA_NAME, return its known constant value,
6133 if any. */
6134 return (*valueize) (rhs);
6135 }
6136 /* Handle propagating invariant addresses into address
6137 operations. */
6138 else if (TREE_CODE (rhs) == ADDR_EXPR
6139 && !is_gimple_min_invariant (rhs))
6140 {
a90c8804 6141 poly_int64 offset = 0;
cfef45c8
RG
6142 tree base;
6143 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6144 &offset,
6145 valueize);
6146 if (base
6147 && (CONSTANT_CLASS_P (base)
6148 || decl_address_invariant_p (base)))
6149 return build_invariant_address (TREE_TYPE (rhs),
6150 base, offset);
6151 }
6152 else if (TREE_CODE (rhs) == CONSTRUCTOR
6153 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6154 && known_eq (CONSTRUCTOR_NELTS (rhs),
6155 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6156 {
794e3180
RS
6157 unsigned i, nelts;
6158 tree val;
cfef45c8 6159
928686b1 6160 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6161 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6162 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6163 {
6164 val = (*valueize) (val);
6165 if (TREE_CODE (val) == INTEGER_CST
6166 || TREE_CODE (val) == REAL_CST
6167 || TREE_CODE (val) == FIXED_CST)
794e3180 6168 vec.quick_push (val);
cfef45c8
RG
6169 else
6170 return NULL_TREE;
6171 }
6172
5ebaa477 6173 return vec.build ();
cfef45c8 6174 }
bdf37f7a
JH
6175 if (subcode == OBJ_TYPE_REF)
6176 {
6177 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6178 /* If callee is constant, we can fold away the wrapper. */
6179 if (is_gimple_min_invariant (val))
6180 return val;
6181 }
cfef45c8
RG
6182
6183 if (kind == tcc_reference)
6184 {
6185 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6186 || TREE_CODE (rhs) == REALPART_EXPR
6187 || TREE_CODE (rhs) == IMAGPART_EXPR)
6188 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6189 {
6190 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6191 return fold_unary_loc (EXPR_LOCATION (rhs),
6192 TREE_CODE (rhs),
6193 TREE_TYPE (rhs), val);
6194 }
6195 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6196 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6197 {
6198 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6199 return fold_ternary_loc (EXPR_LOCATION (rhs),
6200 TREE_CODE (rhs),
6201 TREE_TYPE (rhs), val,
6202 TREE_OPERAND (rhs, 1),
6203 TREE_OPERAND (rhs, 2));
6204 }
6205 else if (TREE_CODE (rhs) == MEM_REF
6206 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6207 {
6208 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6209 if (TREE_CODE (val) == ADDR_EXPR
6210 && is_gimple_min_invariant (val))
6211 {
6212 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6213 unshare_expr (val),
6214 TREE_OPERAND (rhs, 1));
6215 if (tem)
6216 rhs = tem;
6217 }
6218 }
6219 return fold_const_aggregate_ref_1 (rhs, valueize);
6220 }
6221 else if (kind == tcc_declaration)
6222 return get_symbol_constant_value (rhs);
6223 return rhs;
6224 }
6225
6226 case GIMPLE_UNARY_RHS:
f3582e54 6227 return NULL_TREE;
cfef45c8
RG
6228
6229 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6230 /* Translate &x + CST into an invariant form suitable for
6231 further propagation. */
6232 if (subcode == POINTER_PLUS_EXPR)
6233 {
4b1b9e64
RB
6234 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6235 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6236 if (TREE_CODE (op0) == ADDR_EXPR
6237 && TREE_CODE (op1) == INTEGER_CST)
6238 {
6239 tree off = fold_convert (ptr_type_node, op1);
6240 return build_fold_addr_expr_loc
6241 (loc,
6242 fold_build2 (MEM_REF,
6243 TREE_TYPE (TREE_TYPE (op0)),
6244 unshare_expr (op0), off));
6245 }
6246 }
59c20dc7
RB
6247 /* Canonicalize bool != 0 and bool == 0 appearing after
6248 valueization. While gimple_simplify handles this
6249 it can get confused by the ~X == 1 -> X == 0 transform
6250 which we cant reduce to a SSA name or a constant
6251 (and we have no way to tell gimple_simplify to not
6252 consider those transforms in the first place). */
6253 else if (subcode == EQ_EXPR
6254 || subcode == NE_EXPR)
6255 {
6256 tree lhs = gimple_assign_lhs (stmt);
6257 tree op0 = gimple_assign_rhs1 (stmt);
6258 if (useless_type_conversion_p (TREE_TYPE (lhs),
6259 TREE_TYPE (op0)))
6260 {
6261 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6262 op0 = (*valueize) (op0);
8861704d
RB
6263 if (TREE_CODE (op0) == INTEGER_CST)
6264 std::swap (op0, op1);
6265 if (TREE_CODE (op1) == INTEGER_CST
6266 && ((subcode == NE_EXPR && integer_zerop (op1))
6267 || (subcode == EQ_EXPR && integer_onep (op1))))
6268 return op0;
59c20dc7
RB
6269 }
6270 }
4b1b9e64 6271 return NULL_TREE;
cfef45c8
RG
6272
6273 case GIMPLE_TERNARY_RHS:
6274 {
6275 /* Handle ternary operators that can appear in GIMPLE form. */
6276 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6277 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6278 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6279 return fold_ternary_loc (loc, subcode,
6280 gimple_expr_type (stmt), op0, op1, op2);
6281 }
6282
6283 default:
6284 gcc_unreachable ();
6285 }
6286 }
6287
6288 case GIMPLE_CALL:
6289 {
25583c4f 6290 tree fn;
538dd0b7 6291 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6292
6293 if (gimple_call_internal_p (stmt))
31e071ae
MP
6294 {
6295 enum tree_code subcode = ERROR_MARK;
6296 switch (gimple_call_internal_fn (stmt))
6297 {
6298 case IFN_UBSAN_CHECK_ADD:
6299 subcode = PLUS_EXPR;
6300 break;
6301 case IFN_UBSAN_CHECK_SUB:
6302 subcode = MINUS_EXPR;
6303 break;
6304 case IFN_UBSAN_CHECK_MUL:
6305 subcode = MULT_EXPR;
6306 break;
68fa96d6
ML
6307 case IFN_BUILTIN_EXPECT:
6308 {
6309 tree arg0 = gimple_call_arg (stmt, 0);
6310 tree op0 = (*valueize) (arg0);
6311 if (TREE_CODE (op0) == INTEGER_CST)
6312 return op0;
6313 return NULL_TREE;
6314 }
31e071ae
MP
6315 default:
6316 return NULL_TREE;
6317 }
368b454d
JJ
6318 tree arg0 = gimple_call_arg (stmt, 0);
6319 tree arg1 = gimple_call_arg (stmt, 1);
6320 tree op0 = (*valueize) (arg0);
6321 tree op1 = (*valueize) (arg1);
31e071ae
MP
6322
6323 if (TREE_CODE (op0) != INTEGER_CST
6324 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6325 {
6326 switch (subcode)
6327 {
6328 case MULT_EXPR:
6329 /* x * 0 = 0 * x = 0 without overflow. */
6330 if (integer_zerop (op0) || integer_zerop (op1))
6331 return build_zero_cst (TREE_TYPE (arg0));
6332 break;
6333 case MINUS_EXPR:
6334 /* y - y = 0 without overflow. */
6335 if (operand_equal_p (op0, op1, 0))
6336 return build_zero_cst (TREE_TYPE (arg0));
6337 break;
6338 default:
6339 break;
6340 }
6341 }
6342 tree res
6343 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6344 if (res
6345 && TREE_CODE (res) == INTEGER_CST
6346 && !TREE_OVERFLOW (res))
6347 return res;
6348 return NULL_TREE;
6349 }
25583c4f
RS
6350
6351 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
6352 if (TREE_CODE (fn) == ADDR_EXPR
6353 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
6354 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6355 && gimple_builtin_call_types_compatible_p (stmt,
6356 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6357 {
6358 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6359 tree retval;
cfef45c8
RG
6360 unsigned i;
6361 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6362 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6363 retval = fold_builtin_call_array (loc,
538dd0b7 6364 gimple_call_return_type (call_stmt),
cfef45c8 6365 fn, gimple_call_num_args (stmt), args);
cfef45c8 6366 if (retval)
5c944c6c
RB
6367 {
6368 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6369 STRIP_NOPS (retval);
538dd0b7
DM
6370 retval = fold_convert (gimple_call_return_type (call_stmt),
6371 retval);
5c944c6c 6372 }
cfef45c8
RG
6373 return retval;
6374 }
6375 return NULL_TREE;
6376 }
6377
6378 default:
6379 return NULL_TREE;
6380 }
6381}
6382
6383/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6384 Returns NULL_TREE if folding to a constant is not possible, otherwise
6385 returns a constant according to is_gimple_min_invariant. */
6386
6387tree
355fe088 6388gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6389{
6390 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6391 if (res && is_gimple_min_invariant (res))
6392 return res;
6393 return NULL_TREE;
6394}
6395
6396
6397/* The following set of functions are supposed to fold references using
6398 their constant initializers. */
6399
cfef45c8
RG
6400/* See if we can find constructor defining value of BASE.
6401 When we know the consructor with constant offset (such as
6402 base is array[40] and we do know constructor of array), then
6403 BIT_OFFSET is adjusted accordingly.
6404
6405 As a special case, return error_mark_node when constructor
6406 is not explicitly available, but it is known to be zero
6407 such as 'static const int a;'. */
6408static tree
588db50c 6409get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6410 tree (*valueize)(tree))
6411{
588db50c 6412 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6413 bool reverse;
6414
cfef45c8
RG
6415 if (TREE_CODE (base) == MEM_REF)
6416 {
6a5aca53
ML
6417 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6418 if (!boff.to_shwi (bit_offset))
6419 return NULL_TREE;
cfef45c8
RG
6420
6421 if (valueize
6422 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6423 base = valueize (TREE_OPERAND (base, 0));
6424 if (!base || TREE_CODE (base) != ADDR_EXPR)
6425 return NULL_TREE;
6426 base = TREE_OPERAND (base, 0);
6427 }
13e88953
RB
6428 else if (valueize
6429 && TREE_CODE (base) == SSA_NAME)
6430 base = valueize (base);
cfef45c8
RG
6431
6432 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6433 DECL_INITIAL. If BASE is a nested reference into another
6434 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6435 the inner reference. */
6436 switch (TREE_CODE (base))
6437 {
6438 case VAR_DECL:
cfef45c8 6439 case CONST_DECL:
6a6dac52
JH
6440 {
6441 tree init = ctor_for_folding (base);
6442
688010ba 6443 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6444 NULL means unknown, while error_mark_node is 0. */
6445 if (init == error_mark_node)
6446 return NULL_TREE;
6447 if (!init)
6448 return error_mark_node;
6449 return init;
6450 }
cfef45c8 6451
13e88953
RB
6452 case VIEW_CONVERT_EXPR:
6453 return get_base_constructor (TREE_OPERAND (base, 0),
6454 bit_offset, valueize);
6455
cfef45c8
RG
6456 case ARRAY_REF:
6457 case COMPONENT_REF:
ee45a32d
EB
6458 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6459 &reverse);
588db50c 6460 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6461 return NULL_TREE;
6462 *bit_offset += bit_offset2;
6463 return get_base_constructor (base, bit_offset, valueize);
6464
cfef45c8
RG
6465 case CONSTRUCTOR:
6466 return base;
6467
6468 default:
13e88953
RB
6469 if (CONSTANT_CLASS_P (base))
6470 return base;
6471
cfef45c8
RG
6472 return NULL_TREE;
6473 }
6474}
6475
35b4d3a6
MS
6476/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6477 to the memory at bit OFFSET. When non-null, TYPE is the expected
6478 type of the reference; otherwise the type of the referenced element
6479 is used instead. When SIZE is zero, attempt to fold a reference to
6480 the entire element which OFFSET refers to. Increment *SUBOFF by
6481 the bit offset of the accessed element. */
cfef45c8
RG
6482
6483static tree
6484fold_array_ctor_reference (tree type, tree ctor,
6485 unsigned HOST_WIDE_INT offset,
c44c2088 6486 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6487 tree from_decl,
6488 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6489{
807e902e
KZ
6490 offset_int low_bound;
6491 offset_int elt_size;
807e902e 6492 offset_int access_index;
6a636014 6493 tree domain_type = NULL_TREE;
cfef45c8
RG
6494 HOST_WIDE_INT inner_offset;
6495
6496 /* Compute low bound and elt size. */
eb8f1123
RG
6497 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6498 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6499 if (domain_type && TYPE_MIN_VALUE (domain_type))
6500 {
6501 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6502 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6503 return NULL_TREE;
807e902e 6504 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6505 }
6506 else
807e902e 6507 low_bound = 0;
cfef45c8 6508 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6509 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6510 return NULL_TREE;
807e902e 6511 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6512
35b4d3a6
MS
6513 /* When TYPE is non-null, verify that it specifies a constant-sized
6514 accessed not larger than size of array element. */
6515 if (type
6516 && (!TYPE_SIZE_UNIT (type)
6517 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6518 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6519 || elt_size == 0))
cfef45c8
RG
6520 return NULL_TREE;
6521
6522 /* Compute the array index we look for. */
807e902e
KZ
6523 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6524 elt_size);
27bcd47c 6525 access_index += low_bound;
cfef45c8
RG
6526
6527 /* And offset within the access. */
27bcd47c 6528 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6529
6530 /* See if the array field is large enough to span whole access. We do not
6531 care to fold accesses spanning multiple array indexes. */
27bcd47c 6532 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6533 return NULL_TREE;
6a636014 6534 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6535 {
6536 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6537 {
6538 /* For the final reference to the entire accessed element
6539 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6540 may be null) in favor of the type of the element, and set
6541 SIZE to the size of the accessed element. */
6542 inner_offset = 0;
6543 type = TREE_TYPE (val);
6544 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6545 }
6546
6547 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6548 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6549 suboff);
6550 }
cfef45c8 6551
35b4d3a6
MS
6552 /* Memory not explicitly mentioned in constructor is 0 (or
6553 the reference is out of range). */
6554 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6555}
6556
35b4d3a6
MS
6557/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6558 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6559 is the expected type of the reference; otherwise the type of
6560 the referenced member is used instead. When SIZE is zero,
6561 attempt to fold a reference to the entire member which OFFSET
6562 refers to; in this case. Increment *SUBOFF by the bit offset
6563 of the accessed member. */
cfef45c8
RG
6564
6565static tree
6566fold_nonarray_ctor_reference (tree type, tree ctor,
6567 unsigned HOST_WIDE_INT offset,
c44c2088 6568 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6569 tree from_decl,
6570 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6571{
6572 unsigned HOST_WIDE_INT cnt;
6573 tree cfield, cval;
6574
6575 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6576 cval)
6577 {
6578 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6579 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6580 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6581
6582 if (!field_size)
6583 {
6584 /* Determine the size of the flexible array member from
6585 the size of the initializer provided for it. */
6586 field_size = TYPE_SIZE (TREE_TYPE (cval));
6587 }
cfef45c8
RG
6588
6589 /* Variable sized objects in static constructors makes no sense,
6590 but field_size can be NULL for flexible array members. */
6591 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6592 && TREE_CODE (byte_offset) == INTEGER_CST
6593 && (field_size != NULL_TREE
6594 ? TREE_CODE (field_size) == INTEGER_CST
6595 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6596
6597 /* Compute bit offset of the field. */
35b4d3a6
MS
6598 offset_int bitoffset
6599 = (wi::to_offset (field_offset)
6600 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6601 /* Compute bit offset where the field ends. */
35b4d3a6 6602 offset_int bitoffset_end;
cfef45c8 6603 if (field_size != NULL_TREE)
807e902e 6604 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6605 else
807e902e 6606 bitoffset_end = 0;
cfef45c8 6607
35b4d3a6
MS
6608 /* Compute the bit offset of the end of the desired access.
6609 As a special case, if the size of the desired access is
6610 zero, assume the access is to the entire field (and let
6611 the caller make any necessary adjustments by storing
6612 the actual bounds of the field in FIELDBOUNDS). */
6613 offset_int access_end = offset_int (offset);
6614 if (size)
6615 access_end += size;
6616 else
6617 access_end = bitoffset_end;
b8b2b009 6618
35b4d3a6
MS
6619 /* Is there any overlap between the desired access at
6620 [OFFSET, OFFSET+SIZE) and the offset of the field within
6621 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6622 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6623 && (field_size == NULL_TREE
807e902e 6624 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6625 {
35b4d3a6
MS
6626 *suboff += bitoffset.to_uhwi ();
6627
6628 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6629 {
6630 /* For the final reference to the entire accessed member
6631 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6632 be null) in favor of the type of the member, and set
6633 SIZE to the size of the accessed member. */
6634 offset = bitoffset.to_uhwi ();
6635 type = TREE_TYPE (cval);
6636 size = (bitoffset_end - bitoffset).to_uhwi ();
6637 }
6638
6639 /* We do have overlap. Now see if the field is large enough
6640 to cover the access. Give up for accesses that extend
6641 beyond the end of the object or that span multiple fields. */
807e902e 6642 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6643 return NULL_TREE;
032c80e9 6644 if (offset < bitoffset)
b8b2b009 6645 return NULL_TREE;
35b4d3a6
MS
6646
6647 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6648 return fold_ctor_reference (type, cval,
27bcd47c 6649 inner_offset.to_uhwi (), size,
35b4d3a6 6650 from_decl, suboff);
cfef45c8
RG
6651 }
6652 }
35b4d3a6
MS
6653 /* Memory not explicitly mentioned in constructor is 0. */
6654 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6655}
6656
35b4d3a6
MS
6657/* CTOR is value initializing memory. Fold a reference of TYPE and
6658 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6659 is zero, attempt to fold a reference to the entire subobject
6660 which OFFSET refers to. This is used when folding accesses to
6661 string members of aggregates. When non-null, set *SUBOFF to
6662 the bit offset of the accessed subobject. */
cfef45c8 6663
8403c2cf 6664tree
35b4d3a6
MS
6665fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6666 const poly_uint64 &poly_size, tree from_decl,
6667 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
6668{
6669 tree ret;
6670
6671 /* We found the field with exact match. */
35b4d3a6
MS
6672 if (type
6673 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6674 && known_eq (poly_offset, 0U))
9d60be38 6675 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6676
30acf282
RS
6677 /* The remaining optimizations need a constant size and offset. */
6678 unsigned HOST_WIDE_INT size, offset;
6679 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6680 return NULL_TREE;
6681
cfef45c8
RG
6682 /* We are at the end of walk, see if we can view convert the
6683 result. */
6684 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6685 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6686 && !compare_tree_int (TYPE_SIZE (type), size)
6687 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6688 {
9d60be38 6689 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6690 if (ret)
672d9f8e
RB
6691 {
6692 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6693 if (ret)
6694 STRIP_USELESS_TYPE_CONVERSION (ret);
6695 }
cfef45c8
RG
6696 return ret;
6697 }
b2505143
RB
6698 /* For constants and byte-aligned/sized reads try to go through
6699 native_encode/interpret. */
6700 if (CONSTANT_CLASS_P (ctor)
6701 && BITS_PER_UNIT == 8
6702 && offset % BITS_PER_UNIT == 0
6703 && size % BITS_PER_UNIT == 0
6704 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6705 {
6706 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6707 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6708 offset / BITS_PER_UNIT);
6709 if (len > 0)
6710 return native_interpret_expr (type, buf, len);
b2505143 6711 }
cfef45c8
RG
6712 if (TREE_CODE (ctor) == CONSTRUCTOR)
6713 {
35b4d3a6
MS
6714 unsigned HOST_WIDE_INT dummy = 0;
6715 if (!suboff)
6716 suboff = &dummy;
cfef45c8 6717
eb8f1123
RG
6718 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6719 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088 6720 return fold_array_ctor_reference (type, ctor, offset, size,
35b4d3a6
MS
6721 from_decl, suboff);
6722
6723 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6724 from_decl, suboff);
cfef45c8
RG
6725 }
6726
6727 return NULL_TREE;
6728}
6729
6730/* Return the tree representing the element referenced by T if T is an
6731 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6732 names using VALUEIZE. Return NULL_TREE otherwise. */
6733
6734tree
6735fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6736{
6737 tree ctor, idx, base;
588db50c 6738 poly_int64 offset, size, max_size;
cfef45c8 6739 tree tem;
ee45a32d 6740 bool reverse;
cfef45c8 6741
f8a7df45
RG
6742 if (TREE_THIS_VOLATILE (t))
6743 return NULL_TREE;
6744
3a65ee74 6745 if (DECL_P (t))
cfef45c8
RG
6746 return get_symbol_constant_value (t);
6747
6748 tem = fold_read_from_constant_string (t);
6749 if (tem)
6750 return tem;
6751
6752 switch (TREE_CODE (t))
6753 {
6754 case ARRAY_REF:
6755 case ARRAY_RANGE_REF:
6756 /* Constant indexes are handled well by get_base_constructor.
6757 Only special case variable offsets.
6758 FIXME: This code can't handle nested references with variable indexes
6759 (they will be handled only by iteration of ccp). Perhaps we can bring
6760 get_ref_base_and_extent here and make it use a valueize callback. */
6761 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6762 && valueize
6763 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6764 && poly_int_tree_p (idx))
cfef45c8
RG
6765 {
6766 tree low_bound, unit_size;
6767
6768 /* If the resulting bit-offset is constant, track it. */
6769 if ((low_bound = array_ref_low_bound (t),
588db50c 6770 poly_int_tree_p (low_bound))
cfef45c8 6771 && (unit_size = array_ref_element_size (t),
807e902e 6772 tree_fits_uhwi_p (unit_size)))
cfef45c8 6773 {
588db50c
RS
6774 poly_offset_int woffset
6775 = wi::sext (wi::to_poly_offset (idx)
6776 - wi::to_poly_offset (low_bound),
807e902e
KZ
6777 TYPE_PRECISION (TREE_TYPE (idx)));
6778
588db50c 6779 if (woffset.to_shwi (&offset))
807e902e 6780 {
807e902e
KZ
6781 /* TODO: This code seems wrong, multiply then check
6782 to see if it fits. */
6783 offset *= tree_to_uhwi (unit_size);
6784 offset *= BITS_PER_UNIT;
6785
6786 base = TREE_OPERAND (t, 0);
6787 ctor = get_base_constructor (base, &offset, valueize);
6788 /* Empty constructor. Always fold to 0. */
6789 if (ctor == error_mark_node)
6790 return build_zero_cst (TREE_TYPE (t));
6791 /* Out of bound array access. Value is undefined,
6792 but don't fold. */
588db50c 6793 if (maybe_lt (offset, 0))
807e902e
KZ
6794 return NULL_TREE;
6795 /* We can not determine ctor. */
6796 if (!ctor)
6797 return NULL_TREE;
6798 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6799 tree_to_uhwi (unit_size)
6800 * BITS_PER_UNIT,
6801 base);
6802 }
cfef45c8
RG
6803 }
6804 }
6805 /* Fallthru. */
6806
6807 case COMPONENT_REF:
6808 case BIT_FIELD_REF:
6809 case TARGET_MEM_REF:
6810 case MEM_REF:
ee45a32d 6811 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6812 ctor = get_base_constructor (base, &offset, valueize);
6813
6814 /* Empty constructor. Always fold to 0. */
6815 if (ctor == error_mark_node)
6816 return build_zero_cst (TREE_TYPE (t));
6817 /* We do not know precise address. */
588db50c 6818 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8
RG
6819 return NULL_TREE;
6820 /* We can not determine ctor. */
6821 if (!ctor)
6822 return NULL_TREE;
6823
6824 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 6825 if (maybe_lt (offset, 0))
cfef45c8
RG
6826 return NULL_TREE;
6827
c44c2088
JH
6828 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6829 base);
cfef45c8
RG
6830
6831 case REALPART_EXPR:
6832 case IMAGPART_EXPR:
6833 {
6834 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6835 if (c && TREE_CODE (c) == COMPLEX_CST)
6836 return fold_build1_loc (EXPR_LOCATION (t),
6837 TREE_CODE (t), TREE_TYPE (t), c);
6838 break;
6839 }
6840
6841 default:
6842 break;
6843 }
6844
6845 return NULL_TREE;
6846}
6847
6848tree
6849fold_const_aggregate_ref (tree t)
6850{
6851 return fold_const_aggregate_ref_1 (t, NULL);
6852}
06bc3ec7 6853
85942f45 6854/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6855 at OFFSET.
6856 Set CAN_REFER if non-NULL to false if method
6857 is not referable or if the virtual table is ill-formed (such as rewriten
6858 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6859
6860tree
85942f45
JH
6861gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6862 tree v,
ec77d61f
JH
6863 unsigned HOST_WIDE_INT offset,
6864 bool *can_refer)
81fa35bd 6865{
85942f45
JH
6866 tree vtable = v, init, fn;
6867 unsigned HOST_WIDE_INT size;
8c311b50
JH
6868 unsigned HOST_WIDE_INT elt_size, access_index;
6869 tree domain_type;
81fa35bd 6870
ec77d61f
JH
6871 if (can_refer)
6872 *can_refer = true;
6873
9de2f554 6874 /* First of all double check we have virtual table. */
8813a647 6875 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 6876 {
ec77d61f
JH
6877 /* Pass down that we lost track of the target. */
6878 if (can_refer)
6879 *can_refer = false;
6880 return NULL_TREE;
6881 }
9de2f554 6882
2aa3da06
JH
6883 init = ctor_for_folding (v);
6884
9de2f554 6885 /* The virtual tables should always be born with constructors
2aa3da06
JH
6886 and we always should assume that they are avaialble for
6887 folding. At the moment we do not stream them in all cases,
6888 but it should never happen that ctor seem unreachable. */
6889 gcc_assert (init);
6890 if (init == error_mark_node)
6891 {
ec77d61f
JH
6892 /* Pass down that we lost track of the target. */
6893 if (can_refer)
6894 *can_refer = false;
2aa3da06
JH
6895 return NULL_TREE;
6896 }
81fa35bd 6897 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 6898 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 6899 offset *= BITS_PER_UNIT;
81fa35bd 6900 offset += token * size;
9de2f554 6901
8c311b50
JH
6902 /* Lookup the value in the constructor that is assumed to be array.
6903 This is equivalent to
6904 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6905 offset, size, NULL);
6906 but in a constant time. We expect that frontend produced a simple
6907 array without indexed initializers. */
6908
6909 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6910 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6911 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6912 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6913
6914 access_index = offset / BITS_PER_UNIT / elt_size;
6915 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6916
6917 /* This code makes an assumption that there are no
6918 indexed fileds produced by C++ FE, so we can directly index the array. */
6919 if (access_index < CONSTRUCTOR_NELTS (init))
6920 {
6921 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6922 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6923 STRIP_NOPS (fn);
6924 }
6925 else
6926 fn = NULL;
9de2f554
JH
6927
6928 /* For type inconsistent program we may end up looking up virtual method
6929 in virtual table that does not contain TOKEN entries. We may overrun
6930 the virtual table and pick up a constant or RTTI info pointer.
6931 In any case the call is undefined. */
6932 if (!fn
6933 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6934 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6935 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6936 else
6937 {
6938 fn = TREE_OPERAND (fn, 0);
6939
6940 /* When cgraph node is missing and function is not public, we cannot
6941 devirtualize. This can happen in WHOPR when the actual method
6942 ends up in other partition, because we found devirtualization
6943 possibility too late. */
6944 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
6945 {
6946 if (can_refer)
6947 {
6948 *can_refer = false;
6949 return fn;
6950 }
6951 return NULL_TREE;
6952 }
9de2f554 6953 }
81fa35bd 6954
7501ca28
RG
6955 /* Make sure we create a cgraph node for functions we'll reference.
6956 They can be non-existent if the reference comes from an entry
6957 of an external vtable for example. */
d52f5295 6958 cgraph_node::get_create (fn);
7501ca28 6959
81fa35bd
MJ
6960 return fn;
6961}
6962
85942f45
JH
6963/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6964 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6965 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
6966 OBJ_TYPE_REF_OBJECT(REF).
6967 Set CAN_REFER if non-NULL to false if method
6968 is not referable or if the virtual table is ill-formed (such as rewriten
6969 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
6970
6971tree
ec77d61f
JH
6972gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6973 bool *can_refer)
85942f45
JH
6974{
6975 unsigned HOST_WIDE_INT offset;
6976 tree v;
6977
6978 v = BINFO_VTABLE (known_binfo);
6979 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6980 if (!v)
6981 return NULL_TREE;
6982
6983 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
6984 {
6985 if (can_refer)
6986 *can_refer = false;
6987 return NULL_TREE;
6988 }
6989 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
6990}
6991
737f500a
RB
6992/* Given a pointer value T, return a simplified version of an
6993 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
6994 possible. Note that the resulting type may be different from
6995 the type pointed to in the sense that it is still compatible
6996 from the langhooks point of view. */
6997
6998tree
6999gimple_fold_indirect_ref (tree t)
7000{
7001 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7002 tree sub = t;
7003 tree subtype;
7004
7005 STRIP_NOPS (sub);
7006 subtype = TREE_TYPE (sub);
737f500a
RB
7007 if (!POINTER_TYPE_P (subtype)
7008 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7009 return NULL_TREE;
7010
7011 if (TREE_CODE (sub) == ADDR_EXPR)
7012 {
7013 tree op = TREE_OPERAND (sub, 0);
7014 tree optype = TREE_TYPE (op);
7015 /* *&p => p */
7016 if (useless_type_conversion_p (type, optype))
7017 return op;
7018
7019 /* *(foo *)&fooarray => fooarray[0] */
7020 if (TREE_CODE (optype) == ARRAY_TYPE
7021 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7022 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7023 {
7024 tree type_domain = TYPE_DOMAIN (optype);
7025 tree min_val = size_zero_node;
7026 if (type_domain && TYPE_MIN_VALUE (type_domain))
7027 min_val = TYPE_MIN_VALUE (type_domain);
7028 if (TREE_CODE (min_val) == INTEGER_CST)
7029 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7030 }
7031 /* *(foo *)&complexfoo => __real__ complexfoo */
7032 else if (TREE_CODE (optype) == COMPLEX_TYPE
7033 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7034 return fold_build1 (REALPART_EXPR, type, op);
7035 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7036 else if (TREE_CODE (optype) == VECTOR_TYPE
7037 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7038 {
7039 tree part_width = TYPE_SIZE (type);
7040 tree index = bitsize_int (0);
7041 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7042 }
7043 }
7044
7045 /* *(p + CST) -> ... */
7046 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7047 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7048 {
7049 tree addr = TREE_OPERAND (sub, 0);
7050 tree off = TREE_OPERAND (sub, 1);
7051 tree addrtype;
7052
7053 STRIP_NOPS (addr);
7054 addrtype = TREE_TYPE (addr);
7055
7056 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7057 if (TREE_CODE (addr) == ADDR_EXPR
7058 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7059 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7060 && tree_fits_uhwi_p (off))
b184c8f1 7061 {
ae7e9ddd 7062 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7063 tree part_width = TYPE_SIZE (type);
7064 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7065 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7066 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7067 tree index = bitsize_int (indexi);
928686b1
RS
7068 if (known_lt (offset / part_widthi,
7069 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7070 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7071 part_width, index);
7072 }
7073
7074 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7075 if (TREE_CODE (addr) == ADDR_EXPR
7076 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7077 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7078 {
7079 tree size = TYPE_SIZE_UNIT (type);
7080 if (tree_int_cst_equal (size, off))
7081 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7082 }
7083
7084 /* *(p + CST) -> MEM_REF <p, CST>. */
7085 if (TREE_CODE (addr) != ADDR_EXPR
7086 || DECL_P (TREE_OPERAND (addr, 0)))
7087 return fold_build2 (MEM_REF, type,
7088 addr,
8e6cdc90 7089 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7090 }
7091
7092 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7093 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7094 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7095 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7096 {
7097 tree type_domain;
7098 tree min_val = size_zero_node;
7099 tree osub = sub;
7100 sub = gimple_fold_indirect_ref (sub);
7101 if (! sub)
7102 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7103 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7104 if (type_domain && TYPE_MIN_VALUE (type_domain))
7105 min_val = TYPE_MIN_VALUE (type_domain);
7106 if (TREE_CODE (min_val) == INTEGER_CST)
7107 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7108 }
7109
7110 return NULL_TREE;
7111}
19e51b40
JJ
7112
7113/* Return true if CODE is an operation that when operating on signed
7114 integer types involves undefined behavior on overflow and the
7115 operation can be expressed with unsigned arithmetic. */
7116
7117bool
7118arith_code_with_undefined_signed_overflow (tree_code code)
7119{
7120 switch (code)
7121 {
7122 case PLUS_EXPR:
7123 case MINUS_EXPR:
7124 case MULT_EXPR:
7125 case NEGATE_EXPR:
7126 case POINTER_PLUS_EXPR:
7127 return true;
7128 default:
7129 return false;
7130 }
7131}
7132
7133/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7134 operation that can be transformed to unsigned arithmetic by converting
7135 its operand, carrying out the operation in the corresponding unsigned
7136 type and converting the result back to the original type.
7137
7138 Returns a sequence of statements that replace STMT and also contain
7139 a modified form of STMT itself. */
7140
7141gimple_seq
355fe088 7142rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7143{
7144 if (dump_file && (dump_flags & TDF_DETAILS))
7145 {
7146 fprintf (dump_file, "rewriting stmt with undefined signed "
7147 "overflow ");
7148 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7149 }
7150
7151 tree lhs = gimple_assign_lhs (stmt);
7152 tree type = unsigned_type_for (TREE_TYPE (lhs));
7153 gimple_seq stmts = NULL;
7154 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7155 {
74e3c262
RB
7156 tree op = gimple_op (stmt, i);
7157 op = gimple_convert (&stmts, type, op);
7158 gimple_set_op (stmt, i, op);
19e51b40
JJ
7159 }
7160 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7161 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7162 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7163 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7164 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7165 gimple_seq_add_stmt (&stmts, cvt);
7166
7167 return stmts;
7168}
d4f5cd5e 7169
3d2cf79f 7170
c26de36d
RB
7171/* The valueization hook we use for the gimple_build API simplification.
7172 This makes us match fold_buildN behavior by only combining with
7173 statements in the sequence(s) we are currently building. */
7174
7175static tree
7176gimple_build_valueize (tree op)
7177{
7178 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7179 return op;
7180 return NULL_TREE;
7181}
7182
3d2cf79f 7183/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7184 simplifying it first if possible. Returns the built
3d2cf79f
RB
7185 expression value and appends statements possibly defining it
7186 to SEQ. */
7187
7188tree
7189gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7190 enum tree_code code, tree type, tree op0)
3d2cf79f 7191{
c26de36d 7192 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7193 if (!res)
7194 {
a15ebbcd 7195 res = create_tmp_reg_or_ssa_name (type);
355fe088 7196 gimple *stmt;
3d2cf79f
RB
7197 if (code == REALPART_EXPR
7198 || code == IMAGPART_EXPR
7199 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7200 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7201 else
0d0e4a03 7202 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7203 gimple_set_location (stmt, loc);
7204 gimple_seq_add_stmt_without_update (seq, stmt);
7205 }
7206 return res;
7207}
7208
7209/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7210 simplifying it first if possible. Returns the built
3d2cf79f
RB
7211 expression value and appends statements possibly defining it
7212 to SEQ. */
7213
7214tree
7215gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7216 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7217{
c26de36d 7218 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7219 if (!res)
7220 {
a15ebbcd 7221 res = create_tmp_reg_or_ssa_name (type);
355fe088 7222 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7223 gimple_set_location (stmt, loc);
7224 gimple_seq_add_stmt_without_update (seq, stmt);
7225 }
7226 return res;
7227}
7228
7229/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7230 simplifying it first if possible. Returns the built
3d2cf79f
RB
7231 expression value and appends statements possibly defining it
7232 to SEQ. */
7233
7234tree
7235gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7236 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7237{
7238 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7239 seq, gimple_build_valueize);
3d2cf79f
RB
7240 if (!res)
7241 {
a15ebbcd 7242 res = create_tmp_reg_or_ssa_name (type);
355fe088 7243 gimple *stmt;
3d2cf79f 7244 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7245 stmt = gimple_build_assign (res, code,
7246 build3 (code, type, op0, op1, op2));
3d2cf79f 7247 else
0d0e4a03 7248 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7249 gimple_set_location (stmt, loc);
7250 gimple_seq_add_stmt_without_update (seq, stmt);
7251 }
7252 return res;
7253}
7254
7255/* Build the call FN (ARG0) with a result of type TYPE
7256 (or no result if TYPE is void) with location LOC,
c26de36d 7257 simplifying it first if possible. Returns the built
3d2cf79f
RB
7258 expression value (or NULL_TREE if TYPE is void) and appends
7259 statements possibly defining it to SEQ. */
7260
7261tree
eb69361d
RS
7262gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7263 tree type, tree arg0)
3d2cf79f 7264{
c26de36d 7265 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7266 if (!res)
7267 {
eb69361d
RS
7268 gcall *stmt;
7269 if (internal_fn_p (fn))
7270 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7271 else
7272 {
7273 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7274 stmt = gimple_build_call (decl, 1, arg0);
7275 }
3d2cf79f
RB
7276 if (!VOID_TYPE_P (type))
7277 {
a15ebbcd 7278 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7279 gimple_call_set_lhs (stmt, res);
7280 }
7281 gimple_set_location (stmt, loc);
7282 gimple_seq_add_stmt_without_update (seq, stmt);
7283 }
7284 return res;
7285}
7286
7287/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7288 (or no result if TYPE is void) with location LOC,
c26de36d 7289 simplifying it first if possible. Returns the built
3d2cf79f
RB
7290 expression value (or NULL_TREE if TYPE is void) and appends
7291 statements possibly defining it to SEQ. */
7292
7293tree
eb69361d
RS
7294gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7295 tree type, tree arg0, tree arg1)
3d2cf79f 7296{
c26de36d 7297 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7298 if (!res)
7299 {
eb69361d
RS
7300 gcall *stmt;
7301 if (internal_fn_p (fn))
7302 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7303 else
7304 {
7305 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7306 stmt = gimple_build_call (decl, 2, arg0, arg1);
7307 }
3d2cf79f
RB
7308 if (!VOID_TYPE_P (type))
7309 {
a15ebbcd 7310 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7311 gimple_call_set_lhs (stmt, res);
7312 }
7313 gimple_set_location (stmt, loc);
7314 gimple_seq_add_stmt_without_update (seq, stmt);
7315 }
7316 return res;
7317}
7318
7319/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7320 (or no result if TYPE is void) with location LOC,
c26de36d 7321 simplifying it first if possible. Returns the built
3d2cf79f
RB
7322 expression value (or NULL_TREE if TYPE is void) and appends
7323 statements possibly defining it to SEQ. */
7324
7325tree
eb69361d
RS
7326gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7327 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7328{
c26de36d
RB
7329 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7330 seq, gimple_build_valueize);
3d2cf79f
RB
7331 if (!res)
7332 {
eb69361d
RS
7333 gcall *stmt;
7334 if (internal_fn_p (fn))
7335 stmt = gimple_build_call_internal (as_internal_fn (fn),
7336 3, arg0, arg1, arg2);
7337 else
7338 {
7339 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7340 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7341 }
3d2cf79f
RB
7342 if (!VOID_TYPE_P (type))
7343 {
a15ebbcd 7344 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7345 gimple_call_set_lhs (stmt, res);
7346 }
7347 gimple_set_location (stmt, loc);
7348 gimple_seq_add_stmt_without_update (seq, stmt);
7349 }
7350 return res;
7351}
7352
7353/* Build the conversion (TYPE) OP with a result of type TYPE
7354 with location LOC if such conversion is neccesary in GIMPLE,
7355 simplifying it first.
7356 Returns the built expression value and appends
7357 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7358
7359tree
7360gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7361{
7362 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7363 return op;
3d2cf79f 7364 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7365}
68e57f04 7366
74e3c262
RB
7367/* Build the conversion (ptrofftype) OP with a result of a type
7368 compatible with ptrofftype with location LOC if such conversion
7369 is neccesary in GIMPLE, simplifying it first.
7370 Returns the built expression value and appends
7371 statements possibly defining it to SEQ. */
7372
7373tree
7374gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7375{
7376 if (ptrofftype_p (TREE_TYPE (op)))
7377 return op;
7378 return gimple_convert (seq, loc, sizetype, op);
7379}
7380
e7c45b66
RS
7381/* Build a vector of type TYPE in which each element has the value OP.
7382 Return a gimple value for the result, appending any new statements
7383 to SEQ. */
7384
7385tree
7386gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7387 tree op)
7388{
928686b1
RS
7389 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7390 && !CONSTANT_CLASS_P (op))
7391 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7392
e7c45b66
RS
7393 tree res, vec = build_vector_from_val (type, op);
7394 if (is_gimple_val (vec))
7395 return vec;
7396 if (gimple_in_ssa_p (cfun))
7397 res = make_ssa_name (type);
7398 else
7399 res = create_tmp_reg (type);
7400 gimple *stmt = gimple_build_assign (res, vec);
7401 gimple_set_location (stmt, loc);
7402 gimple_seq_add_stmt_without_update (seq, stmt);
7403 return res;
7404}
7405
abe73c3d
RS
7406/* Build a vector from BUILDER, handling the case in which some elements
7407 are non-constant. Return a gimple value for the result, appending any
7408 new instructions to SEQ.
7409
7410 BUILDER must not have a stepped encoding on entry. This is because
7411 the function is not geared up to handle the arithmetic that would
7412 be needed in the variable case, and any code building a vector that
7413 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7414
7415tree
abe73c3d
RS
7416gimple_build_vector (gimple_seq *seq, location_t loc,
7417 tree_vector_builder *builder)
e7c45b66 7418{
abe73c3d
RS
7419 gcc_assert (builder->nelts_per_pattern () <= 2);
7420 unsigned int encoded_nelts = builder->encoded_nelts ();
7421 for (unsigned int i = 0; i < encoded_nelts; ++i)
7422 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7423 {
abe73c3d 7424 tree type = builder->type ();
928686b1 7425 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7426 vec<constructor_elt, va_gc> *v;
7427 vec_alloc (v, nelts);
7428 for (i = 0; i < nelts; ++i)
abe73c3d 7429 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7430
7431 tree res;
7432 if (gimple_in_ssa_p (cfun))
7433 res = make_ssa_name (type);
7434 else
7435 res = create_tmp_reg (type);
7436 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7437 gimple_set_location (stmt, loc);
7438 gimple_seq_add_stmt_without_update (seq, stmt);
7439 return res;
7440 }
abe73c3d 7441 return builder->build ();
e7c45b66
RS
7442}
7443
68e57f04
RS
7444/* Return true if the result of assignment STMT is known to be non-negative.
7445 If the return value is based on the assumption that signed overflow is
7446 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7447 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7448
7449static bool
7450gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7451 int depth)
7452{
7453 enum tree_code code = gimple_assign_rhs_code (stmt);
7454 switch (get_gimple_rhs_class (code))
7455 {
7456 case GIMPLE_UNARY_RHS:
7457 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7458 gimple_expr_type (stmt),
7459 gimple_assign_rhs1 (stmt),
7460 strict_overflow_p, depth);
7461 case GIMPLE_BINARY_RHS:
7462 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7463 gimple_expr_type (stmt),
7464 gimple_assign_rhs1 (stmt),
7465 gimple_assign_rhs2 (stmt),
7466 strict_overflow_p, depth);
7467 case GIMPLE_TERNARY_RHS:
7468 return false;
7469 case GIMPLE_SINGLE_RHS:
7470 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7471 strict_overflow_p, depth);
7472 case GIMPLE_INVALID_RHS:
7473 break;
7474 }
7475 gcc_unreachable ();
7476}
7477
7478/* Return true if return value of call STMT is known to be non-negative.
7479 If the return value is based on the assumption that signed overflow is
7480 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7481 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7482
7483static bool
7484gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7485 int depth)
7486{
7487 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7488 gimple_call_arg (stmt, 0) : NULL_TREE;
7489 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7490 gimple_call_arg (stmt, 1) : NULL_TREE;
7491
7492 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7493 gimple_call_combined_fn (stmt),
68e57f04
RS
7494 arg0,
7495 arg1,
7496 strict_overflow_p, depth);
7497}
7498
4534c203
RB
7499/* Return true if return value of call STMT is known to be non-negative.
7500 If the return value is based on the assumption that signed overflow is
7501 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7502 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7503
7504static bool
7505gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7506 int depth)
7507{
7508 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7509 {
7510 tree arg = gimple_phi_arg_def (stmt, i);
7511 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7512 return false;
7513 }
7514 return true;
7515}
7516
68e57f04
RS
7517/* Return true if STMT is known to compute a non-negative value.
7518 If the return value is based on the assumption that signed overflow is
7519 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7520 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7521
7522bool
7523gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7524 int depth)
7525{
7526 switch (gimple_code (stmt))
7527 {
7528 case GIMPLE_ASSIGN:
7529 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7530 depth);
7531 case GIMPLE_CALL:
7532 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7533 depth);
4534c203
RB
7534 case GIMPLE_PHI:
7535 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7536 depth);
68e57f04
RS
7537 default:
7538 return false;
7539 }
7540}
67dbe582
RS
7541
7542/* Return true if the floating-point value computed by assignment STMT
7543 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7544 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7545
7546 DEPTH is the current nesting depth of the query. */
7547
7548static bool
7549gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7550{
7551 enum tree_code code = gimple_assign_rhs_code (stmt);
7552 switch (get_gimple_rhs_class (code))
7553 {
7554 case GIMPLE_UNARY_RHS:
7555 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7556 gimple_assign_rhs1 (stmt), depth);
7557 case GIMPLE_BINARY_RHS:
7558 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7559 gimple_assign_rhs1 (stmt),
7560 gimple_assign_rhs2 (stmt), depth);
7561 case GIMPLE_TERNARY_RHS:
7562 return false;
7563 case GIMPLE_SINGLE_RHS:
7564 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7565 case GIMPLE_INVALID_RHS:
7566 break;
7567 }
7568 gcc_unreachable ();
7569}
7570
7571/* Return true if the floating-point value computed by call STMT is known
7572 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7573 considered integer values. Return false for signaling NaN.
67dbe582
RS
7574
7575 DEPTH is the current nesting depth of the query. */
7576
7577static bool
7578gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7579{
7580 tree arg0 = (gimple_call_num_args (stmt) > 0
7581 ? gimple_call_arg (stmt, 0)
7582 : NULL_TREE);
7583 tree arg1 = (gimple_call_num_args (stmt) > 1
7584 ? gimple_call_arg (stmt, 1)
7585 : NULL_TREE);
1d9da71f 7586 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7587 arg0, arg1, depth);
7588}
7589
7590/* Return true if the floating-point result of phi STMT is known to have
7591 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7592 integer values. Return false for signaling NaN.
67dbe582
RS
7593
7594 DEPTH is the current nesting depth of the query. */
7595
7596static bool
7597gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7598{
7599 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7600 {
7601 tree arg = gimple_phi_arg_def (stmt, i);
7602 if (!integer_valued_real_single_p (arg, depth + 1))
7603 return false;
7604 }
7605 return true;
7606}
7607
7608/* Return true if the floating-point value computed by STMT is known
7609 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7610 considered integer values. Return false for signaling NaN.
67dbe582
RS
7611
7612 DEPTH is the current nesting depth of the query. */
7613
7614bool
7615gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7616{
7617 switch (gimple_code (stmt))
7618 {
7619 case GIMPLE_ASSIGN:
7620 return gimple_assign_integer_valued_real_p (stmt, depth);
7621 case GIMPLE_CALL:
7622 return gimple_call_integer_valued_real_p (stmt, depth);
7623 case GIMPLE_PHI:
7624 return gimple_phi_integer_valued_real_p (stmt, depth);
7625 default:
7626 return false;
7627 }
7628}