]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
rs6000.c (rs6000_gimple_fold_builtin): Add handling for early GIMPLE expansion of...
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
cbe34bb5 2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
cbdd87d4 44#include "tree-ssa-propagate.h"
450ad0cd 45#include "ipa-utils.h"
4484a35a 46#include "tree-ssa-address.h"
862d0b35 47#include "langhooks.h"
19e51b40 48#include "gimplify-me.h"
2b5f0895 49#include "dbgcnt.h"
9b2b7279 50#include "builtins.h"
e0ee10ed
RB
51#include "tree-eh.h"
52#include "gimple-match.h"
48126138 53#include "gomp-constants.h"
f869c12f 54#include "optabs-query.h"
629b3d75 55#include "omp-general.h"
3de2a40e 56#include "ipa-chkp.h"
abd3a68c 57#include "tree-cfg.h"
a918bfbf 58#include "fold-const-call.h"
cbdd87d4 59
b3b9f3d0 60/* Return true when DECL can be referenced from current unit.
c44c2088
JH
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
1389294c 64
1389294c
JH
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
3e89949e 77 we devirtualize only during final compilation stage.
b3b9f3d0
JH
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
81
1389294c 82static bool
c44c2088 83can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 84{
2c8326a5 85 varpool_node *vnode;
1389294c 86 struct cgraph_node *node;
5e20cdc9 87 symtab_node *snode;
c44c2088 88
00de328a 89 if (DECL_ABSTRACT_P (decl))
1632a686
JH
90 return false;
91
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 94 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
95 return true;
96
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 {
3aaf0529
JH
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
3dafb85c 102 if (symtab->function_flags_ready)
3aaf0529 103 return true;
d52f5295 104 snode = symtab_node::get (decl);
3aaf0529 105 if (!snode || !snode->definition)
1632a686 106 return false;
7de90a6c 107 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
108 return !node || !node->global.inlined_to;
109 }
110
6da8be89 111 /* We will later output the initializer, so we can refer to it.
c44c2088 112 So we are concerned only when DECL comes from initializer of
3aaf0529 113 external var or var that has been optimized out. */
c44c2088 114 if (!from_decl
8813a647 115 || !VAR_P (from_decl)
3aaf0529 116 || (!DECL_EXTERNAL (from_decl)
9041d2e6 117 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 118 && vnode->definition)
6da8be89 119 || (flag_ltrans
9041d2e6 120 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 121 && vnode->in_other_partition))
c44c2088 122 return true;
c44c2088
JH
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
a33a931b 128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 130 return false;
b3b9f3d0
JH
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
3aaf0529
JH
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
139
140 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 141 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
3dafb85c 147 if (!symtab->function_flags_ready)
b3b9f3d0 148 return true;
c44c2088 149
d52f5295 150 snode = symtab_node::get (decl);
3aaf0529
JH
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
1389294c
JH
158}
159
a15ebbcd
ML
160/* Create a temporary for TYPE for a statement STMT. If the current function
161 is in SSA form, a SSA name is created. Otherwise a temporary register
162 is made. */
163
164static tree
165create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL)
166{
167 if (gimple_in_ssa_p (cfun))
168 return make_ssa_name (type, stmt);
169 else
170 return create_tmp_reg (type);
171}
172
0038d4e0 173/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
174 acceptable form for is_gimple_min_invariant.
175 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
176
177tree
c44c2088 178canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 179{
50619002
EB
180 tree orig_cval = cval;
181 STRIP_NOPS (cval);
315f5f1b
RG
182 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
183 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 184 {
315f5f1b
RG
185 tree ptr = TREE_OPERAND (cval, 0);
186 if (is_gimple_min_invariant (ptr))
187 cval = build1_loc (EXPR_LOCATION (cval),
188 ADDR_EXPR, TREE_TYPE (ptr),
189 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
190 ptr,
191 fold_convert (ptr_type_node,
192 TREE_OPERAND (cval, 1))));
17f39a39
JH
193 }
194 if (TREE_CODE (cval) == ADDR_EXPR)
195 {
5a27a197
RG
196 tree base = NULL_TREE;
197 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
198 {
199 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
200 if (base)
201 TREE_OPERAND (cval, 0) = base;
202 }
5a27a197
RG
203 else
204 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
205 if (!base)
206 return NULL_TREE;
b3b9f3d0 207
8813a647 208 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 209 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 210 return NULL_TREE;
13f92e8d
JJ
211 if (TREE_TYPE (base) == error_mark_node)
212 return NULL_TREE;
8813a647 213 if (VAR_P (base))
46eb666a 214 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
215 else if (TREE_CODE (base) == FUNCTION_DECL)
216 {
217 /* Make sure we create a cgraph node for functions we'll reference.
218 They can be non-existent if the reference comes from an entry
219 of an external vtable for example. */
d52f5295 220 cgraph_node::get_create (base);
7501ca28 221 }
0038d4e0 222 /* Fixup types in global initializers. */
73aef89e
RG
223 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
224 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
225
226 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
227 cval = fold_convert (TREE_TYPE (orig_cval), cval);
228 return cval;
17f39a39 229 }
846abd0d
RB
230 if (TREE_OVERFLOW_P (cval))
231 return drop_tree_overflow (cval);
50619002 232 return orig_cval;
17f39a39 233}
cbdd87d4
RG
234
235/* If SYM is a constant variable with known value, return the value.
236 NULL_TREE is returned otherwise. */
237
238tree
239get_symbol_constant_value (tree sym)
240{
6a6dac52
JH
241 tree val = ctor_for_folding (sym);
242 if (val != error_mark_node)
cbdd87d4 243 {
cbdd87d4
RG
244 if (val)
245 {
9d60be38 246 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 247 if (val && is_gimple_min_invariant (val))
17f39a39 248 return val;
1389294c
JH
249 else
250 return NULL_TREE;
cbdd87d4
RG
251 }
252 /* Variables declared 'const' without an initializer
253 have zero as the initializer if they may not be
254 overridden at link or run time. */
255 if (!val
b8a8c472 256 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 257 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
258 }
259
260 return NULL_TREE;
261}
262
263
cbdd87d4
RG
264
265/* Subroutine of fold_stmt. We perform several simplifications of the
266 memory reference tree EXPR and make sure to re-gimplify them properly
267 after propagation of constant addresses. IS_LHS is true if the
268 reference is supposed to be an lvalue. */
269
270static tree
271maybe_fold_reference (tree expr, bool is_lhs)
272{
17f39a39 273 tree result;
cbdd87d4 274
f0eddb90
RG
275 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
276 || TREE_CODE (expr) == REALPART_EXPR
277 || TREE_CODE (expr) == IMAGPART_EXPR)
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_unary_loc (EXPR_LOCATION (expr),
280 TREE_CODE (expr),
281 TREE_TYPE (expr),
282 TREE_OPERAND (expr, 0));
283 else if (TREE_CODE (expr) == BIT_FIELD_REF
284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
285 return fold_ternary_loc (EXPR_LOCATION (expr),
286 TREE_CODE (expr),
287 TREE_TYPE (expr),
288 TREE_OPERAND (expr, 0),
289 TREE_OPERAND (expr, 1),
290 TREE_OPERAND (expr, 2));
291
f0eddb90
RG
292 if (!is_lhs
293 && (result = fold_const_aggregate_ref (expr))
294 && is_gimple_min_invariant (result))
295 return result;
cbdd87d4 296
cbdd87d4
RG
297 return NULL_TREE;
298}
299
300
301/* Attempt to fold an assignment statement pointed-to by SI. Returns a
302 replacement rhs for the statement or NULL_TREE if no simplification
303 could be made. It is assumed that the operands have been previously
304 folded. */
305
306static tree
307fold_gimple_assign (gimple_stmt_iterator *si)
308{
355fe088 309 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
310 enum tree_code subcode = gimple_assign_rhs_code (stmt);
311 location_t loc = gimple_location (stmt);
312
313 tree result = NULL_TREE;
314
315 switch (get_gimple_rhs_class (subcode))
316 {
317 case GIMPLE_SINGLE_RHS:
318 {
319 tree rhs = gimple_assign_rhs1 (stmt);
320
8c00ba08
JW
321 if (TREE_CLOBBER_P (rhs))
322 return NULL_TREE;
323
4e71066d 324 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
325 return maybe_fold_reference (rhs, false);
326
bdf37f7a
JH
327 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
328 {
329 tree val = OBJ_TYPE_REF_EXPR (rhs);
330 if (is_gimple_min_invariant (val))
331 return val;
f8a39967 332 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
333 {
334 bool final;
335 vec <cgraph_node *>targets
f8a39967 336 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 337 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 338 {
2b5f0895
XDL
339 if (dump_enabled_p ())
340 {
807b7d62 341 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
342 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
343 "resolving virtual function address "
344 "reference to function %s\n",
345 targets.length () == 1
346 ? targets[0]->name ()
3ef276e4 347 : "NULL");
2b5f0895 348 }
3ef276e4
RB
349 if (targets.length () == 1)
350 {
351 val = fold_convert (TREE_TYPE (val),
352 build_fold_addr_expr_loc
353 (loc, targets[0]->decl));
354 STRIP_USELESS_TYPE_CONVERSION (val);
355 }
356 else
357 /* We can not use __builtin_unreachable here because it
358 can not have address taken. */
359 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
360 return val;
361 }
362 }
bdf37f7a 363 }
7524f419 364
cbdd87d4
RG
365 else if (TREE_CODE (rhs) == ADDR_EXPR)
366 {
70f34814
RG
367 tree ref = TREE_OPERAND (rhs, 0);
368 tree tem = maybe_fold_reference (ref, true);
369 if (tem
370 && TREE_CODE (tem) == MEM_REF
371 && integer_zerop (TREE_OPERAND (tem, 1)))
372 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
373 else if (tem)
cbdd87d4
RG
374 result = fold_convert (TREE_TYPE (rhs),
375 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
376 else if (TREE_CODE (ref) == MEM_REF
377 && integer_zerop (TREE_OPERAND (ref, 1)))
378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
379
380 if (result)
381 {
382 /* Strip away useless type conversions. Both the
383 NON_LVALUE_EXPR that may have been added by fold, and
384 "useless" type conversions that might now be apparent
385 due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
387
388 if (result != rhs && valid_gimple_rhs_p (result))
389 return result;
390 }
cbdd87d4
RG
391 }
392
393 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 394 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
395 {
396 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
397 unsigned i;
398 tree val;
399
400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 401 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
402 return NULL_TREE;
403
404 return build_vector_from_ctor (TREE_TYPE (rhs),
405 CONSTRUCTOR_ELTS (rhs));
406 }
407
408 else if (DECL_P (rhs))
9d60be38 409 return get_symbol_constant_value (rhs);
cbdd87d4
RG
410 }
411 break;
412
413 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
414 break;
415
416 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
417 break;
418
0354c0c7 419 case GIMPLE_TERNARY_RHS:
5c099d40
RB
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
0354c0c7
BS
425
426 if (result)
427 {
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
0354c0c7
BS
431 }
432 break;
433
cbdd87d4
RG
434 case GIMPLE_INVALID_RHS:
435 gcc_unreachable ();
436 }
437
438 return NULL_TREE;
439}
440
fef5a0d9
RB
441
442/* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
446
447static void
448gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
449{
355fe088 450 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
451
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
454
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
355fe088 457 gimple *laststore = NULL;
fef5a0d9
RB
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
460 {
355fe088 461 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
467 {
468 tree vdef;
469 if (!laststore)
470 vdef = gimple_vdef (stmt);
471 else
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
477 }
478 }
479
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
485 {
355fe088 486 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
494 }
495
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
498 if (reaching_vuse
499 && reaching_vuse == gimple_vuse (stmt))
500 {
501 tree vdef = gimple_vdef (stmt);
502 if (vdef
503 && TREE_CODE (vdef) == SSA_NAME)
504 {
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
507 }
508 }
509
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
512}
513
cbdd87d4
RG
514/* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
cbdd87d4
RG
523
524void
525gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
526{
527 tree lhs;
355fe088 528 gimple *stmt, *new_stmt;
cbdd87d4 529 gimple_stmt_iterator i;
355a7673 530 gimple_seq stmts = NULL;
cbdd87d4
RG
531
532 stmt = gsi_stmt (*si_p);
533
534 gcc_assert (is_gimple_call (stmt));
535
45852dcc 536 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 537
e256dfce 538 lhs = gimple_call_lhs (stmt);
cbdd87d4 539 if (lhs == NULL_TREE)
6e572326
RG
540 {
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
545 {
9fdc58de 546 pop_gimplify_context (NULL);
6e572326
RG
547 if (gimple_in_ssa_p (cfun))
548 {
549 unlink_stmt_vdef (stmt);
550 release_defs (stmt);
551 }
f6b4dc28 552 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
553 return;
554 }
555 }
cbdd87d4 556 else
e256dfce 557 {
381cdae4 558 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
563 }
cbdd87d4
RG
564
565 pop_gimplify_context (NULL);
566
fef5a0d9
RB
567 gsi_replace_with_seq_vops (si_p, stmts);
568}
cbdd87d4 569
fef5a0d9
RB
570
571/* Replace the call at *GSI with the gimple value VAL. */
572
573static void
574replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
575{
355fe088 576 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 577 tree lhs = gimple_call_lhs (stmt);
355fe088 578 gimple *repl;
fef5a0d9 579 if (lhs)
e256dfce 580 {
fef5a0d9
RB
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
584 }
585 else
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
589 {
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
592 }
f6b4dc28 593 gsi_replace (gsi, repl, false);
fef5a0d9
RB
594}
595
596/* Replace the call at *GSI with the new call REPL and fold that
597 again. */
598
599static void
355fe088 600replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 601{
355fe088 602 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
607 {
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
611 }
f6b4dc28 612 gsi_replace (gsi, repl, false);
fef5a0d9
RB
613 fold_stmt (gsi);
614}
615
616/* Return true if VAR is a VAR_DECL or a component thereof. */
617
618static bool
619var_decl_component_p (tree var)
620{
621 tree inner = var;
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
625}
626
627/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 628 false if no simplification can be made.
fef5a0d9
RB
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
633 (memmove). */
634
635static bool
636gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
638{
355fe088 639 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
644
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
647 {
355fe088 648 gimple *repl;
fef5a0d9
RB
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
651 else
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 655 {
fef5a0d9
RB
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
658 }
f6b4dc28 659 gsi_replace (gsi, repl, false);
fef5a0d9
RB
660 return true;
661 }
662
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
666 {
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
670 if (!lhs)
671 {
f6b4dc28 672 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
673 return true;
674 }
675 goto done;
676 }
677 else
678 {
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
681 tree off0;
682
3de2a40e
IE
683 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
684 pointers as wide integer) and also may result in huge function
685 size because of inlined bounds copy. Thus don't inline for
686 functions we want to instrument. */
687 if (flag_check_pointer_bounds
688 && chkp_instrumentable_p (cfun->decl)
689 /* Even if data may contain pointers we can inline if copy
690 less than a pointer size. */
691 && (!tree_fits_uhwi_p (len)
692 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
693 return false;
694
fef5a0d9
RB
695 /* Build accesses at offset zero with a ref-all character type. */
696 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
697 ptr_mode, true), 0);
698
699 /* If we can perform the copy efficiently with first doing all loads
700 and then all stores inline it that way. Currently efficiently
701 means that we can load all the memory into a single integer
702 register which is what MOVE_MAX gives us. */
703 src_align = get_pointer_alignment (src);
704 dest_align = get_pointer_alignment (dest);
705 if (tree_fits_uhwi_p (len)
706 && compare_tree_int (len, MOVE_MAX) <= 0
707 /* ??? Don't transform copies from strings with known length this
708 confuses the tree-ssa-strlen.c. This doesn't handle
709 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
710 reason. */
711 && !c_strlen (src, 2))
712 {
713 unsigned ilen = tree_to_uhwi (len);
146ec50f 714 if (pow2p_hwi (ilen))
fef5a0d9
RB
715 {
716 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
717 if (type
718 && TYPE_MODE (type) != BLKmode
719 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
720 == ilen * 8)
721 /* If the destination pointer is not aligned we must be able
722 to emit an unaligned store. */
723 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
f869c12f
RR
724 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
725 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
726 != CODE_FOR_nothing)))
fef5a0d9
RB
727 {
728 tree srctype = type;
729 tree desttype = type;
730 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
731 srctype = build_aligned_type (type, src_align);
732 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
733 tree tem = fold_const_aggregate_ref (srcmem);
734 if (tem)
735 srcmem = tem;
736 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
737 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
f869c12f
RR
738 src_align)
739 && (optab_handler (movmisalign_optab,
740 TYPE_MODE (type))
741 == CODE_FOR_nothing))
fef5a0d9
RB
742 srcmem = NULL_TREE;
743 if (srcmem)
744 {
355fe088 745 gimple *new_stmt;
fef5a0d9
RB
746 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
747 {
748 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
749 srcmem
750 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
751 new_stmt);
fef5a0d9
RB
752 gimple_assign_set_lhs (new_stmt, srcmem);
753 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
754 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
755 }
756 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
757 desttype = build_aligned_type (type, dest_align);
758 new_stmt
759 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
760 dest, off0),
761 srcmem);
762 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
763 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
764 if (gimple_vdef (new_stmt)
765 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
766 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
767 if (!lhs)
768 {
f6b4dc28 769 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
770 return true;
771 }
772 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
773 goto done;
774 }
775 }
776 }
777 }
778
779 if (endp == 3)
780 {
781 /* Both DEST and SRC must be pointer types.
782 ??? This is what old code did. Is the testing for pointer types
783 really mandatory?
784
785 If either SRC is readonly or length is 1, we can use memcpy. */
786 if (!dest_align || !src_align)
787 return false;
788 if (readonly_data_expr (src)
789 || (tree_fits_uhwi_p (len)
790 && (MIN (src_align, dest_align) / BITS_PER_UNIT
791 >= tree_to_uhwi (len))))
792 {
793 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
794 if (!fn)
795 return false;
796 gimple_call_set_fndecl (stmt, fn);
797 gimple_call_set_arg (stmt, 0, dest);
798 gimple_call_set_arg (stmt, 1, src);
799 fold_stmt (gsi);
800 return true;
801 }
802
803 /* If *src and *dest can't overlap, optimize into memcpy as well. */
804 if (TREE_CODE (src) == ADDR_EXPR
805 && TREE_CODE (dest) == ADDR_EXPR)
806 {
807 tree src_base, dest_base, fn;
808 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
4fda19ef 809 HOST_WIDE_INT maxsize;
fef5a0d9
RB
810
811 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
812 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
813 if (src_base == NULL)
814 src_base = srcvar;
fef5a0d9 815 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
816 dest_base = get_addr_base_and_unit_offset (destvar,
817 &dest_offset);
818 if (dest_base == NULL)
819 dest_base = destvar;
fef5a0d9
RB
820 if (tree_fits_uhwi_p (len))
821 maxsize = tree_to_uhwi (len);
822 else
823 maxsize = -1;
fef5a0d9
RB
824 if (SSA_VAR_P (src_base)
825 && SSA_VAR_P (dest_base))
826 {
827 if (operand_equal_p (src_base, dest_base, 0)
828 && ranges_overlap_p (src_offset, maxsize,
829 dest_offset, maxsize))
830 return false;
831 }
832 else if (TREE_CODE (src_base) == MEM_REF
833 && TREE_CODE (dest_base) == MEM_REF)
834 {
835 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
836 TREE_OPERAND (dest_base, 0), 0))
837 return false;
838 offset_int off = mem_ref_offset (src_base) + src_offset;
839 if (!wi::fits_shwi_p (off))
840 return false;
841 src_offset = off.to_shwi ();
842
843 off = mem_ref_offset (dest_base) + dest_offset;
844 if (!wi::fits_shwi_p (off))
845 return false;
846 dest_offset = off.to_shwi ();
847 if (ranges_overlap_p (src_offset, maxsize,
848 dest_offset, maxsize))
849 return false;
850 }
851 else
852 return false;
853
854 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
855 if (!fn)
856 return false;
857 gimple_call_set_fndecl (stmt, fn);
858 gimple_call_set_arg (stmt, 0, dest);
859 gimple_call_set_arg (stmt, 1, src);
860 fold_stmt (gsi);
861 return true;
862 }
863
864 /* If the destination and source do not alias optimize into
865 memcpy as well. */
866 if ((is_gimple_min_invariant (dest)
867 || TREE_CODE (dest) == SSA_NAME)
868 && (is_gimple_min_invariant (src)
869 || TREE_CODE (src) == SSA_NAME))
870 {
871 ao_ref destr, srcr;
872 ao_ref_init_from_ptr_and_size (&destr, dest, len);
873 ao_ref_init_from_ptr_and_size (&srcr, src, len);
874 if (!refs_may_alias_p_1 (&destr, &srcr, false))
875 {
876 tree fn;
877 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
878 if (!fn)
879 return false;
880 gimple_call_set_fndecl (stmt, fn);
881 gimple_call_set_arg (stmt, 0, dest);
882 gimple_call_set_arg (stmt, 1, src);
883 fold_stmt (gsi);
884 return true;
885 }
886 }
887
888 return false;
889 }
890
891 if (!tree_fits_shwi_p (len))
892 return false;
893 /* FIXME:
894 This logic lose for arguments like (type *)malloc (sizeof (type)),
895 since we strip the casts of up to VOID return value from malloc.
896 Perhaps we ought to inherit type from non-VOID argument here? */
897 STRIP_NOPS (src);
898 STRIP_NOPS (dest);
899 if (!POINTER_TYPE_P (TREE_TYPE (src))
900 || !POINTER_TYPE_P (TREE_TYPE (dest)))
901 return false;
902 /* In the following try to find a type that is most natural to be
903 used for the memcpy source and destination and that allows
904 the most optimization when memcpy is turned into a plain assignment
905 using that type. In theory we could always use a char[len] type
906 but that only gains us that the destination and source possibly
907 no longer will have their address taken. */
908 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
909 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
910 {
911 tree tem = TREE_OPERAND (src, 0);
912 STRIP_NOPS (tem);
913 if (tem != TREE_OPERAND (src, 0))
914 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
915 }
916 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
917 {
918 tree tem = TREE_OPERAND (dest, 0);
919 STRIP_NOPS (tem);
920 if (tem != TREE_OPERAND (dest, 0))
921 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
922 }
923 srctype = TREE_TYPE (TREE_TYPE (src));
924 if (TREE_CODE (srctype) == ARRAY_TYPE
925 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
926 {
927 srctype = TREE_TYPE (srctype);
928 STRIP_NOPS (src);
929 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
930 }
931 desttype = TREE_TYPE (TREE_TYPE (dest));
932 if (TREE_CODE (desttype) == ARRAY_TYPE
933 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
934 {
935 desttype = TREE_TYPE (desttype);
936 STRIP_NOPS (dest);
937 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
938 }
939 if (TREE_ADDRESSABLE (srctype)
940 || TREE_ADDRESSABLE (desttype))
941 return false;
942
943 /* Make sure we are not copying using a floating-point mode or
944 a type whose size possibly does not match its precision. */
945 if (FLOAT_MODE_P (TYPE_MODE (desttype))
946 || TREE_CODE (desttype) == BOOLEAN_TYPE
947 || TREE_CODE (desttype) == ENUMERAL_TYPE)
948 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
949 if (FLOAT_MODE_P (TYPE_MODE (srctype))
950 || TREE_CODE (srctype) == BOOLEAN_TYPE
951 || TREE_CODE (srctype) == ENUMERAL_TYPE)
952 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
953 if (!srctype)
954 srctype = desttype;
955 if (!desttype)
956 desttype = srctype;
957 if (!srctype)
958 return false;
959
960 src_align = get_pointer_alignment (src);
961 dest_align = get_pointer_alignment (dest);
962 if (dest_align < TYPE_ALIGN (desttype)
963 || src_align < TYPE_ALIGN (srctype))
964 return false;
965
966 destvar = dest;
967 STRIP_NOPS (destvar);
968 if (TREE_CODE (destvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (destvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
971 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
972 else
973 destvar = NULL_TREE;
974
975 srcvar = src;
976 STRIP_NOPS (srcvar);
977 if (TREE_CODE (srcvar) == ADDR_EXPR
978 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 {
981 if (!destvar
982 || src_align >= TYPE_ALIGN (desttype))
983 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
984 srcvar, off0);
985 else if (!STRICT_ALIGNMENT)
986 {
987 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
988 src_align);
989 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
990 }
e256dfce 991 else
fef5a0d9
RB
992 srcvar = NULL_TREE;
993 }
994 else
995 srcvar = NULL_TREE;
996
997 if (srcvar == NULL_TREE && destvar == NULL_TREE)
998 return false;
999
1000 if (srcvar == NULL_TREE)
1001 {
1002 STRIP_NOPS (src);
1003 if (src_align >= TYPE_ALIGN (desttype))
1004 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1005 else
1006 {
1007 if (STRICT_ALIGNMENT)
1008 return false;
1009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1010 src_align);
1011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1012 }
1013 }
1014 else if (destvar == NULL_TREE)
1015 {
1016 STRIP_NOPS (dest);
1017 if (dest_align >= TYPE_ALIGN (srctype))
1018 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1019 else
1020 {
1021 if (STRICT_ALIGNMENT)
1022 return false;
1023 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1024 dest_align);
1025 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1026 }
1027 }
1028
355fe088 1029 gimple *new_stmt;
fef5a0d9
RB
1030 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 {
921b13d0
RB
1032 tree tem = fold_const_aggregate_ref (srcvar);
1033 if (tem)
1034 srcvar = tem;
1035 if (! is_gimple_min_invariant (srcvar))
1036 {
1037 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1038 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1039 new_stmt);
921b13d0
RB
1040 gimple_assign_set_lhs (new_stmt, srcvar);
1041 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 }
fef5a0d9
RB
1044 }
1045 new_stmt = gimple_build_assign (destvar, srcvar);
1046 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1047 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1048 if (gimple_vdef (new_stmt)
1049 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1050 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1051 if (!lhs)
1052 {
f6b4dc28 1053 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1054 return true;
1055 }
1056 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1057 }
1058
1059done:
74e3c262 1060 gimple_seq stmts = NULL;
fef5a0d9
RB
1061 if (endp == 0 || endp == 3)
1062 len = NULL_TREE;
1063 else if (endp == 2)
74e3c262
RB
1064 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1065 ssize_int (1));
fef5a0d9 1066 if (endp == 2 || endp == 1)
74e3c262
RB
1067 {
1068 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1069 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1070 TREE_TYPE (dest), dest, len);
1071 }
fef5a0d9 1072
74e3c262 1073 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1074 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1075 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1076 return true;
1077}
1078
1079/* Fold function call to builtin memset or bzero at *GSI setting the
1080 memory of size LEN to VAL. Return whether a simplification was made. */
1081
1082static bool
1083gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1084{
355fe088 1085 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1086 tree etype;
1087 unsigned HOST_WIDE_INT length, cval;
1088
1089 /* If the LEN parameter is zero, return DEST. */
1090 if (integer_zerop (len))
1091 {
1092 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1093 return true;
1094 }
1095
1096 if (! tree_fits_uhwi_p (len))
1097 return false;
1098
1099 if (TREE_CODE (c) != INTEGER_CST)
1100 return false;
1101
1102 tree dest = gimple_call_arg (stmt, 0);
1103 tree var = dest;
1104 if (TREE_CODE (var) != ADDR_EXPR)
1105 return false;
1106
1107 var = TREE_OPERAND (var, 0);
1108 if (TREE_THIS_VOLATILE (var))
1109 return false;
1110
1111 etype = TREE_TYPE (var);
1112 if (TREE_CODE (etype) == ARRAY_TYPE)
1113 etype = TREE_TYPE (etype);
1114
1115 if (!INTEGRAL_TYPE_P (etype)
1116 && !POINTER_TYPE_P (etype))
1117 return NULL_TREE;
1118
1119 if (! var_decl_component_p (var))
1120 return NULL_TREE;
1121
1122 length = tree_to_uhwi (len);
1123 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1124 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1125 return NULL_TREE;
1126
1127 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1128 return NULL_TREE;
1129
1130 if (integer_zerop (c))
1131 cval = 0;
1132 else
1133 {
1134 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1135 return NULL_TREE;
1136
1137 cval = TREE_INT_CST_LOW (c);
1138 cval &= 0xff;
1139 cval |= cval << 8;
1140 cval |= cval << 16;
1141 cval |= (cval << 31) << 1;
1142 }
1143
1144 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1145 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1146 gimple_set_vuse (store, gimple_vuse (stmt));
1147 tree vdef = gimple_vdef (stmt);
1148 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1149 {
1150 gimple_set_vdef (store, gimple_vdef (stmt));
1151 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1152 }
1153 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1154 if (gimple_call_lhs (stmt))
1155 {
355fe088 1156 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1157 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1158 }
1159 else
1160 {
1161 gimple_stmt_iterator gsi2 = *gsi;
1162 gsi_prev (gsi);
1163 gsi_remove (&gsi2, true);
1164 }
1165
1166 return true;
1167}
1168
1169
88d0c3f0
MS
1170/* Obtain the minimum and maximum string length or minimum and maximum
1171 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1172 If ARG is an SSA name variable, follow its use-def chains. When
1173 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1174 if we are unable to determine the length or value, return False.
1175 VISITED is a bitmap of visited variables.
1176 TYPE is 0 if string length should be obtained, 1 for maximum string
1177 length and 2 for maximum value ARG can have.
1178 When FUZZY is set and the length of a string cannot be determined,
1179 the function instead considers as the maximum possible length the
3f343040
MS
1180 size of a character array it may refer to.
1181 Set *FLEXP to true if the range of the string lengths has been
1182 obtained from the upper bound of an array at the end of a struct.
1183 Such an array may hold a string that's longer than its upper bound
1184 due to it being used as a poor-man's flexible array member. */
fef5a0d9
RB
1185
1186static bool
88d0c3f0 1187get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
3f343040 1188 bool fuzzy, bool *flexp)
fef5a0d9
RB
1189{
1190 tree var, val;
355fe088 1191 gimple *def_stmt;
fef5a0d9 1192
88d0c3f0
MS
1193 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1194 but MINLEN may be cleared during the execution of the function. */
1195 tree *minlen = length;
1196 tree *const maxlen = length + 1;
1197
fef5a0d9
RB
1198 if (TREE_CODE (arg) != SSA_NAME)
1199 {
1200 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1201 if (TREE_CODE (arg) == ADDR_EXPR
1202 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1203 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1204 {
1205 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1206 if (TREE_CODE (aop0) == INDIRECT_REF
1207 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
88d0c3f0 1208 return get_range_strlen (TREE_OPERAND (aop0, 0),
3f343040 1209 length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1210 }
1211
1212 if (type == 2)
1213 {
1214 val = arg;
1215 if (TREE_CODE (val) != INTEGER_CST
1216 || tree_int_cst_sgn (val) < 0)
1217 return false;
1218 }
1219 else
1220 val = c_strlen (arg, 1);
88d0c3f0
MS
1221
1222 if (!val && fuzzy)
1223 {
1224 if (TREE_CODE (arg) == ADDR_EXPR)
1225 return get_range_strlen (TREE_OPERAND (arg, 0), length,
3f343040 1226 visited, type, fuzzy, flexp);
88d0c3f0
MS
1227
1228 if (TREE_CODE (arg) == COMPONENT_REF
1229 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1230 {
1231 /* Use the type of the member array to determine the upper
1232 bound on the length of the array. This may be overly
1233 optimistic if the array itself isn't NUL-terminated and
1234 the caller relies on the subsequent member to contain
3f343040
MS
1235 the NUL.
1236 Set *FLEXP to true if the array whose bound is being
1237 used is at the end of a struct. */
c3e46927 1238 if (array_at_struct_end_p (arg))
3f343040
MS
1239 *flexp = true;
1240
88d0c3f0
MS
1241 arg = TREE_OPERAND (arg, 1);
1242 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1243 if (!val || integer_zerop (val))
1244 return false;
1245 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1246 integer_one_node);
e495e31a
MS
1247 /* Set the minimum size to zero since the string in
1248 the array could have zero length. */
1249 *minlen = ssize_int (0);
88d0c3f0
MS
1250 }
1251 }
1252
fef5a0d9
RB
1253 if (!val)
1254 return false;
1255
88d0c3f0
MS
1256 if (minlen
1257 && (!*minlen
1258 || (type > 0
1259 && TREE_CODE (*minlen) == INTEGER_CST
1260 && TREE_CODE (val) == INTEGER_CST
1261 && tree_int_cst_lt (val, *minlen))))
1262 *minlen = val;
1263
1264 if (*maxlen)
fef5a0d9
RB
1265 {
1266 if (type > 0)
1267 {
88d0c3f0 1268 if (TREE_CODE (*maxlen) != INTEGER_CST
fef5a0d9
RB
1269 || TREE_CODE (val) != INTEGER_CST)
1270 return false;
1271
88d0c3f0
MS
1272 if (tree_int_cst_lt (*maxlen, val))
1273 *maxlen = val;
fef5a0d9
RB
1274 return true;
1275 }
88d0c3f0 1276 else if (simple_cst_equal (val, *maxlen) != 1)
fef5a0d9
RB
1277 return false;
1278 }
1279
88d0c3f0 1280 *maxlen = val;
fef5a0d9
RB
1281 return true;
1282 }
1283
1284 /* If ARG is registered for SSA update we cannot look at its defining
1285 statement. */
1286 if (name_registered_for_update_p (arg))
1287 return false;
1288
1289 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1290 if (!*visited)
1291 *visited = BITMAP_ALLOC (NULL);
1292 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1293 return true;
1294
1295 var = arg;
1296 def_stmt = SSA_NAME_DEF_STMT (var);
1297
1298 switch (gimple_code (def_stmt))
1299 {
1300 case GIMPLE_ASSIGN:
1301 /* The RHS of the statement defining VAR must either have a
1302 constant length or come from another SSA_NAME with a constant
1303 length. */
1304 if (gimple_assign_single_p (def_stmt)
1305 || gimple_assign_unary_nop_p (def_stmt))
1306 {
1307 tree rhs = gimple_assign_rhs1 (def_stmt);
3f343040 1308 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1309 }
1310 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1311 {
1312 tree op2 = gimple_assign_rhs2 (def_stmt);
1313 tree op3 = gimple_assign_rhs3 (def_stmt);
3f343040
MS
1314 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1315 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
fef5a0d9
RB
1316 }
1317 return false;
1318
1319 case GIMPLE_PHI:
1320 {
1321 /* All the arguments of the PHI node must have the same constant
1322 length. */
1323 unsigned i;
1324
1325 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1326 {
1327 tree arg = gimple_phi_arg (def_stmt, i)->def;
1328
1329 /* If this PHI has itself as an argument, we cannot
1330 determine the string length of this argument. However,
1331 if we can find a constant string length for the other
1332 PHI args then we can still be sure that this is a
1333 constant string length. So be optimistic and just
1334 continue with the next argument. */
1335 if (arg == gimple_phi_result (def_stmt))
1336 continue;
1337
3f343040 1338 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
88d0c3f0
MS
1339 {
1340 if (fuzzy)
1341 *maxlen = build_all_ones_cst (size_type_node);
1342 else
1343 return false;
1344 }
fef5a0d9
RB
1345 }
1346 }
1347 return true;
1348
1349 default:
1350 return false;
1351 }
1352}
1353
88d0c3f0
MS
1354/* Determine the minimum and maximum value or string length that ARG
1355 refers to and store each in the first two elements of MINMAXLEN.
1356 For expressions that point to strings of unknown lengths that are
1357 character arrays, use the upper bound of the array as the maximum
1358 length. For example, given an expression like 'x ? array : "xyz"'
1359 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1360 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1361 stored in array.
3f343040
MS
1362 Return true if the range of the string lengths has been obtained
1363 from the upper bound of an array at the end of a struct. Such
1364 an array may hold a string that's longer than its upper bound
1365 due to it being used as a poor-man's flexible array member. */
88d0c3f0 1366
3f343040
MS
1367bool
1368get_range_strlen (tree arg, tree minmaxlen[2])
88d0c3f0
MS
1369{
1370 bitmap visited = NULL;
1371
1372 minmaxlen[0] = NULL_TREE;
1373 minmaxlen[1] = NULL_TREE;
1374
3f343040
MS
1375 bool flexarray = false;
1376 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
88d0c3f0
MS
1377
1378 if (visited)
1379 BITMAP_FREE (visited);
3f343040
MS
1380
1381 return flexarray;
88d0c3f0
MS
1382}
1383
dcb7fae2
RB
1384tree
1385get_maxval_strlen (tree arg, int type)
1386{
1387 bitmap visited = NULL;
88d0c3f0 1388 tree len[2] = { NULL_TREE, NULL_TREE };
3f343040
MS
1389
1390 bool dummy;
1391 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
88d0c3f0 1392 len[1] = NULL_TREE;
dcb7fae2
RB
1393 if (visited)
1394 BITMAP_FREE (visited);
1395
88d0c3f0 1396 return len[1];
dcb7fae2
RB
1397}
1398
fef5a0d9
RB
1399
1400/* Fold function call to builtin strcpy with arguments DEST and SRC.
1401 If LEN is not NULL, it represents the length of the string to be
1402 copied. Return NULL_TREE if no simplification can be made. */
1403
1404static bool
1405gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1406 tree dest, tree src)
fef5a0d9 1407{
dcb7fae2 1408 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1409 tree fn;
1410
1411 /* If SRC and DEST are the same (and not volatile), return DEST. */
1412 if (operand_equal_p (src, dest, 0))
1413 {
1414 replace_call_with_value (gsi, dest);
1415 return true;
1416 }
1417
1418 if (optimize_function_for_size_p (cfun))
1419 return false;
1420
1421 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1422 if (!fn)
1423 return false;
1424
1579e1f8 1425 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1426 if (!len)
dcb7fae2 1427 return false;
fef5a0d9
RB
1428
1429 len = fold_convert_loc (loc, size_type_node, len);
1430 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1431 len = force_gimple_operand_gsi (gsi, len, true,
1432 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1433 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1434 replace_call_with_call_and_fold (gsi, repl);
1435 return true;
1436}
1437
1438/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1439 If SLEN is not NULL, it represents the length of the source string.
1440 Return NULL_TREE if no simplification can be made. */
1441
1442static bool
dcb7fae2
RB
1443gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1444 tree dest, tree src, tree len)
fef5a0d9 1445{
dcb7fae2 1446 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1447 tree fn;
1448
1449 /* If the LEN parameter is zero, return DEST. */
1450 if (integer_zerop (len))
1451 {
1452 replace_call_with_value (gsi, dest);
1453 return true;
1454 }
1455
1456 /* We can't compare slen with len as constants below if len is not a
1457 constant. */
dcb7fae2 1458 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1459 return false;
1460
fef5a0d9 1461 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1462 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1463 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1464 return false;
1465
1466 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1467
1468 /* We do not support simplification of this case, though we do
1469 support it when expanding trees into RTL. */
1470 /* FIXME: generate a call to __builtin_memset. */
1471 if (tree_int_cst_lt (slen, len))
1472 return false;
1473
1474 /* OK transform into builtin memcpy. */
1475 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1476 if (!fn)
1477 return false;
1478
1479 len = fold_convert_loc (loc, size_type_node, len);
1480 len = force_gimple_operand_gsi (gsi, len, true,
1481 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1482 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1483 replace_call_with_call_and_fold (gsi, repl);
1484 return true;
1485}
1486
71dea1dd
WD
1487/* Fold function call to builtin strchr or strrchr.
1488 If both arguments are constant, evaluate and fold the result,
1489 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1490 In general strlen is significantly faster than strchr
1491 due to being a simpler operation. */
1492static bool
71dea1dd 1493gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1494{
1495 gimple *stmt = gsi_stmt (*gsi);
1496 tree str = gimple_call_arg (stmt, 0);
1497 tree c = gimple_call_arg (stmt, 1);
1498 location_t loc = gimple_location (stmt);
71dea1dd
WD
1499 const char *p;
1500 char ch;
912d9ec3 1501
71dea1dd 1502 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1503 return false;
1504
71dea1dd
WD
1505 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1506 {
1507 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1508
1509 if (p1 == NULL)
1510 {
1511 replace_call_with_value (gsi, integer_zero_node);
1512 return true;
1513 }
1514
1515 tree len = build_int_cst (size_type_node, p1 - p);
1516 gimple_seq stmts = NULL;
1517 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1518 POINTER_PLUS_EXPR, str, len);
1519 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1520 gsi_replace_with_seq_vops (gsi, stmts);
1521 return true;
1522 }
1523
1524 if (!integer_zerop (c))
912d9ec3
WD
1525 return false;
1526
71dea1dd 1527 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1528 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1529 {
1530 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1531
c8952930 1532 if (strchr_fn)
71dea1dd
WD
1533 {
1534 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1535 replace_call_with_call_and_fold (gsi, repl);
1536 return true;
1537 }
1538
1539 return false;
1540 }
1541
912d9ec3
WD
1542 tree len;
1543 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1544
1545 if (!strlen_fn)
1546 return false;
1547
1548 /* Create newstr = strlen (str). */
1549 gimple_seq stmts = NULL;
1550 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1551 gimple_set_location (new_stmt, loc);
a15ebbcd 1552 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1553 gimple_call_set_lhs (new_stmt, len);
1554 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1555
1556 /* Create (str p+ strlen (str)). */
1557 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1558 POINTER_PLUS_EXPR, str, len);
1559 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1560 gsi_replace_with_seq_vops (gsi, stmts);
1561 /* gsi now points at the assignment to the lhs, get a
1562 stmt iterator to the strlen.
1563 ??? We can't use gsi_for_stmt as that doesn't work when the
1564 CFG isn't built yet. */
1565 gimple_stmt_iterator gsi2 = *gsi;
1566 gsi_prev (&gsi2);
1567 fold_stmt (&gsi2);
1568 return true;
1569}
1570
c8952930
JJ
1571/* Fold function call to builtin strstr.
1572 If both arguments are constant, evaluate and fold the result,
1573 additionally fold strstr (x, "") into x and strstr (x, "c")
1574 into strchr (x, 'c'). */
1575static bool
1576gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1577{
1578 gimple *stmt = gsi_stmt (*gsi);
1579 tree haystack = gimple_call_arg (stmt, 0);
1580 tree needle = gimple_call_arg (stmt, 1);
1581 const char *p, *q;
1582
1583 if (!gimple_call_lhs (stmt))
1584 return false;
1585
1586 q = c_getstr (needle);
1587 if (q == NULL)
1588 return false;
1589
1590 if ((p = c_getstr (haystack)))
1591 {
1592 const char *r = strstr (p, q);
1593
1594 if (r == NULL)
1595 {
1596 replace_call_with_value (gsi, integer_zero_node);
1597 return true;
1598 }
1599
1600 tree len = build_int_cst (size_type_node, r - p);
1601 gimple_seq stmts = NULL;
1602 gimple *new_stmt
1603 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1604 haystack, len);
1605 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1606 gsi_replace_with_seq_vops (gsi, stmts);
1607 return true;
1608 }
1609
1610 /* For strstr (x, "") return x. */
1611 if (q[0] == '\0')
1612 {
1613 replace_call_with_value (gsi, haystack);
1614 return true;
1615 }
1616
1617 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1618 if (q[1] == '\0')
1619 {
1620 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1621 if (strchr_fn)
1622 {
1623 tree c = build_int_cst (integer_type_node, q[0]);
1624 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1625 replace_call_with_call_and_fold (gsi, repl);
1626 return true;
1627 }
1628 }
1629
1630 return false;
1631}
1632
fef5a0d9
RB
1633/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1634 to the call.
1635
1636 Return NULL_TREE if no simplification was possible, otherwise return the
1637 simplified form of the call as a tree.
1638
1639 The simplified form may be a constant or other expression which
1640 computes the same value, but in a more efficient manner (including
1641 calls to other builtin functions).
1642
1643 The call may contain arguments which need to be evaluated, but
1644 which are not useful to determine the result of the call. In
1645 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1646 COMPOUND_EXPR will be an argument which must be evaluated.
1647 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1648 COMPOUND_EXPR in the chain will contain the tree for the simplified
1649 form of the builtin function call. */
1650
1651static bool
dcb7fae2 1652gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1653{
355fe088 1654 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1655 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1656
1657 const char *p = c_getstr (src);
1658
1659 /* If the string length is zero, return the dst parameter. */
1660 if (p && *p == '\0')
1661 {
1662 replace_call_with_value (gsi, dst);
1663 return true;
1664 }
1665
1666 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1667 return false;
1668
1669 /* See if we can store by pieces into (dst + strlen(dst)). */
1670 tree newdst;
1671 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1672 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1673
1674 if (!strlen_fn || !memcpy_fn)
1675 return false;
1676
1677 /* If the length of the source string isn't computable don't
1678 split strcat into strlen and memcpy. */
dcb7fae2 1679 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1680 if (! len)
fef5a0d9
RB
1681 return false;
1682
1683 /* Create strlen (dst). */
1684 gimple_seq stmts = NULL, stmts2;
355fe088 1685 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 1686 gimple_set_location (repl, loc);
a15ebbcd 1687 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
1688 gimple_call_set_lhs (repl, newdst);
1689 gimple_seq_add_stmt_without_update (&stmts, repl);
1690
1691 /* Create (dst p+ strlen (dst)). */
1692 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1693 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1694 gimple_seq_add_seq_without_update (&stmts, stmts2);
1695
1696 len = fold_convert_loc (loc, size_type_node, len);
1697 len = size_binop_loc (loc, PLUS_EXPR, len,
1698 build_int_cst (size_type_node, 1));
1699 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1700 gimple_seq_add_seq_without_update (&stmts, stmts2);
1701
1702 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1703 gimple_seq_add_stmt_without_update (&stmts, repl);
1704 if (gimple_call_lhs (stmt))
1705 {
1706 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1707 gimple_seq_add_stmt_without_update (&stmts, repl);
1708 gsi_replace_with_seq_vops (gsi, stmts);
1709 /* gsi now points at the assignment to the lhs, get a
1710 stmt iterator to the memcpy call.
1711 ??? We can't use gsi_for_stmt as that doesn't work when the
1712 CFG isn't built yet. */
1713 gimple_stmt_iterator gsi2 = *gsi;
1714 gsi_prev (&gsi2);
1715 fold_stmt (&gsi2);
1716 }
1717 else
1718 {
1719 gsi_replace_with_seq_vops (gsi, stmts);
1720 fold_stmt (gsi);
1721 }
1722 return true;
1723}
1724
07f1cf56
RB
1725/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1726 are the arguments to the call. */
1727
1728static bool
1729gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1730{
355fe088 1731 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1732 tree dest = gimple_call_arg (stmt, 0);
1733 tree src = gimple_call_arg (stmt, 1);
1734 tree size = gimple_call_arg (stmt, 2);
1735 tree fn;
1736 const char *p;
1737
1738
1739 p = c_getstr (src);
1740 /* If the SRC parameter is "", return DEST. */
1741 if (p && *p == '\0')
1742 {
1743 replace_call_with_value (gsi, dest);
1744 return true;
1745 }
1746
1747 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1748 return false;
1749
1750 /* If __builtin_strcat_chk is used, assume strcat is available. */
1751 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1752 if (!fn)
1753 return false;
1754
355fe088 1755 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1756 replace_call_with_call_and_fold (gsi, repl);
1757 return true;
1758}
1759
ad03a744
RB
1760/* Simplify a call to the strncat builtin. */
1761
1762static bool
1763gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1764{
1765 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1766 tree dst = gimple_call_arg (stmt, 0);
1767 tree src = gimple_call_arg (stmt, 1);
1768 tree len = gimple_call_arg (stmt, 2);
1769
1770 const char *p = c_getstr (src);
1771
1772 /* If the requested length is zero, or the src parameter string
1773 length is zero, return the dst parameter. */
1774 if (integer_zerop (len) || (p && *p == '\0'))
1775 {
1776 replace_call_with_value (gsi, dst);
1777 return true;
1778 }
1779
1780 /* If the requested len is greater than or equal to the string
1781 length, call strcat. */
1782 if (TREE_CODE (len) == INTEGER_CST && p
1783 && compare_tree_int (len, strlen (p)) >= 0)
1784 {
1785 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1786
1787 /* If the replacement _DECL isn't initialized, don't do the
1788 transformation. */
1789 if (!fn)
1790 return false;
1791
1792 gcall *repl = gimple_build_call (fn, 2, dst, src);
1793 replace_call_with_call_and_fold (gsi, repl);
1794 return true;
1795 }
1796
1797 return false;
1798}
1799
745583f9
RB
1800/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1801 LEN, and SIZE. */
1802
1803static bool
1804gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1805{
355fe088 1806 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1807 tree dest = gimple_call_arg (stmt, 0);
1808 tree src = gimple_call_arg (stmt, 1);
1809 tree len = gimple_call_arg (stmt, 2);
1810 tree size = gimple_call_arg (stmt, 3);
1811 tree fn;
1812 const char *p;
1813
1814 p = c_getstr (src);
1815 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1816 if ((p && *p == '\0')
1817 || integer_zerop (len))
1818 {
1819 replace_call_with_value (gsi, dest);
1820 return true;
1821 }
1822
1823 if (! tree_fits_uhwi_p (size))
1824 return false;
1825
1826 if (! integer_all_onesp (size))
1827 {
1828 tree src_len = c_strlen (src, 1);
1829 if (src_len
1830 && tree_fits_uhwi_p (src_len)
1831 && tree_fits_uhwi_p (len)
1832 && ! tree_int_cst_lt (len, src_len))
1833 {
1834 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1835 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1836 if (!fn)
1837 return false;
1838
355fe088 1839 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1840 replace_call_with_call_and_fold (gsi, repl);
1841 return true;
1842 }
1843 return false;
1844 }
1845
1846 /* If __builtin_strncat_chk is used, assume strncat is available. */
1847 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1848 if (!fn)
1849 return false;
1850
355fe088 1851 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1852 replace_call_with_call_and_fold (gsi, repl);
1853 return true;
1854}
1855
a918bfbf
ML
1856/* Build and append gimple statements to STMTS that would load a first
1857 character of a memory location identified by STR. LOC is location
1858 of the statement. */
1859
1860static tree
1861gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1862{
1863 tree var;
1864
1865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1866 tree cst_uchar_ptr_node
1867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1868 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1869
1870 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1871 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1872 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1873
1874 gimple_assign_set_lhs (stmt, var);
1875 gimple_seq_add_stmt_without_update (stmts, stmt);
1876
1877 return var;
1878}
1879
1880/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1881 FCODE is the name of the builtin. */
1882
1883static bool
1884gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1885{
1886 gimple *stmt = gsi_stmt (*gsi);
1887 tree callee = gimple_call_fndecl (stmt);
1888 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
1889
1890 tree type = integer_type_node;
1891 tree str1 = gimple_call_arg (stmt, 0);
1892 tree str2 = gimple_call_arg (stmt, 1);
1893 tree lhs = gimple_call_lhs (stmt);
1894 HOST_WIDE_INT length = -1;
1895
1896 /* Handle strncmp and strncasecmp functions. */
1897 if (gimple_call_num_args (stmt) == 3)
1898 {
1899 tree len = gimple_call_arg (stmt, 2);
1900 if (tree_fits_uhwi_p (len))
1901 length = tree_to_uhwi (len);
1902 }
1903
1904 /* If the LEN parameter is zero, return zero. */
1905 if (length == 0)
1906 {
1907 replace_call_with_value (gsi, integer_zero_node);
1908 return true;
1909 }
1910
1911 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
1912 if (operand_equal_p (str1, str2, 0))
1913 {
1914 replace_call_with_value (gsi, integer_zero_node);
1915 return true;
1916 }
1917
1918 const char *p1 = c_getstr (str1);
1919 const char *p2 = c_getstr (str2);
1920
1921 /* For known strings, return an immediate value. */
1922 if (p1 && p2)
1923 {
1924 int r = 0;
1925 bool known_result = false;
1926
1927 switch (fcode)
1928 {
1929 case BUILT_IN_STRCMP:
1930 {
1931 r = strcmp (p1, p2);
1932 known_result = true;
1933 break;
1934 }
1935 case BUILT_IN_STRNCMP:
1936 {
1937 if (length == -1)
1938 break;
1939 r = strncmp (p1, p2, length);
1940 known_result = true;
1941 break;
1942 }
1943 /* Only handleable situation is where the string are equal (result 0),
1944 which is already handled by operand_equal_p case. */
1945 case BUILT_IN_STRCASECMP:
1946 break;
1947 case BUILT_IN_STRNCASECMP:
1948 {
1949 if (length == -1)
1950 break;
1951 r = strncmp (p1, p2, length);
1952 if (r == 0)
1953 known_result = true;
1954 break;;
1955 }
1956 default:
1957 gcc_unreachable ();
1958 }
1959
1960 if (known_result)
1961 {
1962 replace_call_with_value (gsi, build_cmp_result (type, r));
1963 return true;
1964 }
1965 }
1966
1967 bool nonzero_length = length >= 1
1968 || fcode == BUILT_IN_STRCMP
1969 || fcode == BUILT_IN_STRCASECMP;
1970
1971 location_t loc = gimple_location (stmt);
1972
1973 /* If the second arg is "", return *(const unsigned char*)arg1. */
1974 if (p2 && *p2 == '\0' && nonzero_length)
1975 {
1976 gimple_seq stmts = NULL;
1977 tree var = gimple_load_first_char (loc, str1, &stmts);
1978 if (lhs)
1979 {
1980 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
1981 gimple_seq_add_stmt_without_update (&stmts, stmt);
1982 }
1983
1984 gsi_replace_with_seq_vops (gsi, stmts);
1985 return true;
1986 }
1987
1988 /* If the first arg is "", return -*(const unsigned char*)arg2. */
1989 if (p1 && *p1 == '\0' && nonzero_length)
1990 {
1991 gimple_seq stmts = NULL;
1992 tree var = gimple_load_first_char (loc, str2, &stmts);
1993
1994 if (lhs)
1995 {
1996 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
1997 stmt = gimple_build_assign (c, NOP_EXPR, var);
1998 gimple_seq_add_stmt_without_update (&stmts, stmt);
1999
2000 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2001 gimple_seq_add_stmt_without_update (&stmts, stmt);
2002 }
2003
2004 gsi_replace_with_seq_vops (gsi, stmts);
2005 return true;
2006 }
2007
2008 /* If len parameter is one, return an expression corresponding to
2009 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2010 if (fcode == BUILT_IN_STRNCMP && length == 1)
2011 {
2012 gimple_seq stmts = NULL;
2013 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2014 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2015
2016 if (lhs)
2017 {
2018 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2019 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2020 gimple_seq_add_stmt_without_update (&stmts, convert1);
2021
2022 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2023 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2024 gimple_seq_add_stmt_without_update (&stmts, convert2);
2025
2026 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2027 gimple_seq_add_stmt_without_update (&stmts, stmt);
2028 }
2029
2030 gsi_replace_with_seq_vops (gsi, stmts);
2031 return true;
2032 }
2033
2034 return false;
2035}
2036
488c6247
ML
2037/* Fold a call to the memchr pointed by GSI iterator. */
2038
2039static bool
2040gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2041{
2042 gimple *stmt = gsi_stmt (*gsi);
2043 tree lhs = gimple_call_lhs (stmt);
2044 tree arg1 = gimple_call_arg (stmt, 0);
2045 tree arg2 = gimple_call_arg (stmt, 1);
2046 tree len = gimple_call_arg (stmt, 2);
2047
2048 /* If the LEN parameter is zero, return zero. */
2049 if (integer_zerop (len))
2050 {
2051 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2052 return true;
2053 }
2054
2055 char c;
2056 if (TREE_CODE (arg2) != INTEGER_CST
2057 || !tree_fits_uhwi_p (len)
2058 || !target_char_cst_p (arg2, &c))
2059 return false;
2060
2061 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2062 unsigned HOST_WIDE_INT string_length;
2063 const char *p1 = c_getstr (arg1, &string_length);
2064
2065 if (p1)
2066 {
2067 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2068 if (r == NULL)
2069 {
2070 if (length <= string_length)
2071 {
2072 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2073 return true;
2074 }
2075 }
2076 else
2077 {
2078 unsigned HOST_WIDE_INT offset = r - p1;
2079 gimple_seq stmts = NULL;
2080 if (lhs != NULL_TREE)
2081 {
2082 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2083 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2084 arg1, offset_cst);
2085 gimple_seq_add_stmt_without_update (&stmts, stmt);
2086 }
2087 else
2088 gimple_seq_add_stmt_without_update (&stmts,
2089 gimple_build_nop ());
2090
2091 gsi_replace_with_seq_vops (gsi, stmts);
2092 return true;
2093 }
2094 }
2095
2096 return false;
2097}
a918bfbf 2098
fef5a0d9
RB
2099/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2100 to the call. IGNORE is true if the value returned
2101 by the builtin will be ignored. UNLOCKED is true is true if this
2102 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2103 the known length of the string. Return NULL_TREE if no simplification
2104 was possible. */
2105
2106static bool
2107gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2108 tree arg0, tree arg1,
dcb7fae2 2109 bool unlocked)
fef5a0d9 2110{
355fe088 2111 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2112
fef5a0d9
RB
2113 /* If we're using an unlocked function, assume the other unlocked
2114 functions exist explicitly. */
2115 tree const fn_fputc = (unlocked
2116 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2117 : builtin_decl_implicit (BUILT_IN_FPUTC));
2118 tree const fn_fwrite = (unlocked
2119 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2120 : builtin_decl_implicit (BUILT_IN_FWRITE));
2121
2122 /* If the return value is used, don't do the transformation. */
dcb7fae2 2123 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2124 return false;
2125
fef5a0d9
RB
2126 /* Get the length of the string passed to fputs. If the length
2127 can't be determined, punt. */
dcb7fae2 2128 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
2129 if (!len
2130 || TREE_CODE (len) != INTEGER_CST)
2131 return false;
2132
2133 switch (compare_tree_int (len, 1))
2134 {
2135 case -1: /* length is 0, delete the call entirely . */
2136 replace_call_with_value (gsi, integer_zero_node);
2137 return true;
2138
2139 case 0: /* length is 1, call fputc. */
2140 {
2141 const char *p = c_getstr (arg0);
2142 if (p != NULL)
2143 {
2144 if (!fn_fputc)
2145 return false;
2146
355fe088 2147 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2148 build_int_cst
2149 (integer_type_node, p[0]), arg1);
2150 replace_call_with_call_and_fold (gsi, repl);
2151 return true;
2152 }
2153 }
2154 /* FALLTHROUGH */
2155 case 1: /* length is greater than 1, call fwrite. */
2156 {
2157 /* If optimizing for size keep fputs. */
2158 if (optimize_function_for_size_p (cfun))
2159 return false;
2160 /* New argument list transforming fputs(string, stream) to
2161 fwrite(string, 1, len, stream). */
2162 if (!fn_fwrite)
2163 return false;
2164
355fe088 2165 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2166 size_one_node, len, arg1);
2167 replace_call_with_call_and_fold (gsi, repl);
2168 return true;
2169 }
2170 default:
2171 gcc_unreachable ();
2172 }
2173 return false;
2174}
2175
2176/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2177 DEST, SRC, LEN, and SIZE are the arguments to the call.
2178 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2179 code of the builtin. If MAXLEN is not NULL, it is maximum length
2180 passed as third argument. */
2181
2182static bool
2183gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2184 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2185 enum built_in_function fcode)
2186{
355fe088 2187 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2188 location_t loc = gimple_location (stmt);
2189 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2190 tree fn;
2191
2192 /* If SRC and DEST are the same (and not volatile), return DEST
2193 (resp. DEST+LEN for __mempcpy_chk). */
2194 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2195 {
2196 if (fcode != BUILT_IN_MEMPCPY_CHK)
2197 {
2198 replace_call_with_value (gsi, dest);
2199 return true;
2200 }
2201 else
2202 {
74e3c262
RB
2203 gimple_seq stmts = NULL;
2204 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2205 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2206 TREE_TYPE (dest), dest, len);
74e3c262 2207 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2208 replace_call_with_value (gsi, temp);
2209 return true;
2210 }
2211 }
2212
2213 if (! tree_fits_uhwi_p (size))
2214 return false;
2215
dcb7fae2 2216 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
2217 if (! integer_all_onesp (size))
2218 {
2219 if (! tree_fits_uhwi_p (len))
2220 {
2221 /* If LEN is not constant, try MAXLEN too.
2222 For MAXLEN only allow optimizing into non-_ocs function
2223 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2224 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2225 {
2226 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2227 {
2228 /* (void) __mempcpy_chk () can be optimized into
2229 (void) __memcpy_chk (). */
2230 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2231 if (!fn)
2232 return false;
2233
355fe088 2234 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2235 replace_call_with_call_and_fold (gsi, repl);
2236 return true;
2237 }
2238 return false;
2239 }
2240 }
2241 else
2242 maxlen = len;
2243
2244 if (tree_int_cst_lt (size, maxlen))
2245 return false;
2246 }
2247
2248 fn = NULL_TREE;
2249 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2250 mem{cpy,pcpy,move,set} is available. */
2251 switch (fcode)
2252 {
2253 case BUILT_IN_MEMCPY_CHK:
2254 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2255 break;
2256 case BUILT_IN_MEMPCPY_CHK:
2257 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2258 break;
2259 case BUILT_IN_MEMMOVE_CHK:
2260 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2261 break;
2262 case BUILT_IN_MEMSET_CHK:
2263 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2264 break;
2265 default:
2266 break;
2267 }
2268
2269 if (!fn)
2270 return false;
2271
355fe088 2272 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2273 replace_call_with_call_and_fold (gsi, repl);
2274 return true;
2275}
2276
2277/* Fold a call to the __st[rp]cpy_chk builtin.
2278 DEST, SRC, and SIZE are the arguments to the call.
2279 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2280 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2281 strings passed as second argument. */
2282
2283static bool
2284gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2285 tree dest,
fef5a0d9 2286 tree src, tree size,
fef5a0d9
RB
2287 enum built_in_function fcode)
2288{
355fe088 2289 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2290 location_t loc = gimple_location (stmt);
2291 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2292 tree len, fn;
2293
2294 /* If SRC and DEST are the same (and not volatile), return DEST. */
2295 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2296 {
2297 replace_call_with_value (gsi, dest);
2298 return true;
2299 }
2300
2301 if (! tree_fits_uhwi_p (size))
2302 return false;
2303
dcb7fae2 2304 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
2305 if (! integer_all_onesp (size))
2306 {
2307 len = c_strlen (src, 1);
2308 if (! len || ! tree_fits_uhwi_p (len))
2309 {
2310 /* If LEN is not constant, try MAXLEN too.
2311 For MAXLEN only allow optimizing into non-_ocs function
2312 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2313 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2314 {
2315 if (fcode == BUILT_IN_STPCPY_CHK)
2316 {
2317 if (! ignore)
2318 return false;
2319
2320 /* If return value of __stpcpy_chk is ignored,
2321 optimize into __strcpy_chk. */
2322 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2323 if (!fn)
2324 return false;
2325
355fe088 2326 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2327 replace_call_with_call_and_fold (gsi, repl);
2328 return true;
2329 }
2330
2331 if (! len || TREE_SIDE_EFFECTS (len))
2332 return false;
2333
2334 /* If c_strlen returned something, but not a constant,
2335 transform __strcpy_chk into __memcpy_chk. */
2336 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2337 if (!fn)
2338 return false;
2339
74e3c262
RB
2340 gimple_seq stmts = NULL;
2341 len = gimple_convert (&stmts, loc, size_type_node, len);
2342 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2343 build_int_cst (size_type_node, 1));
2344 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2345 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2346 replace_call_with_call_and_fold (gsi, repl);
2347 return true;
2348 }
e256dfce 2349 }
fef5a0d9
RB
2350 else
2351 maxlen = len;
2352
2353 if (! tree_int_cst_lt (maxlen, size))
2354 return false;
e256dfce
RG
2355 }
2356
fef5a0d9
RB
2357 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2358 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2359 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2360 if (!fn)
2361 return false;
2362
355fe088 2363 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2364 replace_call_with_call_and_fold (gsi, repl);
2365 return true;
2366}
2367
2368/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2369 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2370 length passed as third argument. IGNORE is true if return value can be
2371 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2372
2373static bool
2374gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2375 tree dest, tree src,
dcb7fae2 2376 tree len, tree size,
fef5a0d9
RB
2377 enum built_in_function fcode)
2378{
355fe088 2379 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2380 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2381 tree fn;
2382
2383 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2384 {
fef5a0d9
RB
2385 /* If return value of __stpncpy_chk is ignored,
2386 optimize into __strncpy_chk. */
2387 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2388 if (fn)
2389 {
355fe088 2390 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2391 replace_call_with_call_and_fold (gsi, repl);
2392 return true;
2393 }
cbdd87d4
RG
2394 }
2395
fef5a0d9
RB
2396 if (! tree_fits_uhwi_p (size))
2397 return false;
2398
dcb7fae2 2399 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2400 if (! integer_all_onesp (size))
cbdd87d4 2401 {
fef5a0d9 2402 if (! tree_fits_uhwi_p (len))
fe2ef088 2403 {
fef5a0d9
RB
2404 /* If LEN is not constant, try MAXLEN too.
2405 For MAXLEN only allow optimizing into non-_ocs function
2406 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2407 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2408 return false;
8a1561bc 2409 }
fef5a0d9
RB
2410 else
2411 maxlen = len;
2412
2413 if (tree_int_cst_lt (size, maxlen))
2414 return false;
cbdd87d4
RG
2415 }
2416
fef5a0d9
RB
2417 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2418 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2419 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2420 if (!fn)
2421 return false;
2422
355fe088 2423 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2424 replace_call_with_call_and_fold (gsi, repl);
2425 return true;
cbdd87d4
RG
2426}
2427
2625bb5d
RB
2428/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2429 Return NULL_TREE if no simplification can be made. */
2430
2431static bool
2432gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2433{
2434 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2435 location_t loc = gimple_location (stmt);
2436 tree dest = gimple_call_arg (stmt, 0);
2437 tree src = gimple_call_arg (stmt, 1);
2438 tree fn, len, lenp1;
2439
2440 /* If the result is unused, replace stpcpy with strcpy. */
2441 if (gimple_call_lhs (stmt) == NULL_TREE)
2442 {
2443 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2444 if (!fn)
2445 return false;
2446 gimple_call_set_fndecl (stmt, fn);
2447 fold_stmt (gsi);
2448 return true;
2449 }
2450
2451 len = c_strlen (src, 1);
2452 if (!len
2453 || TREE_CODE (len) != INTEGER_CST)
2454 return false;
2455
2456 if (optimize_function_for_size_p (cfun)
2457 /* If length is zero it's small enough. */
2458 && !integer_zerop (len))
2459 return false;
2460
2461 /* If the source has a known length replace stpcpy with memcpy. */
2462 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2463 if (!fn)
2464 return false;
2465
2466 gimple_seq stmts = NULL;
2467 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2468 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2469 tem, build_int_cst (size_type_node, 1));
2470 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2471 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2472 gimple_set_vuse (repl, gimple_vuse (stmt));
2473 gimple_set_vdef (repl, gimple_vdef (stmt));
2474 if (gimple_vdef (repl)
2475 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2476 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2477 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2478 /* Replace the result with dest + len. */
2479 stmts = NULL;
2480 tem = gimple_convert (&stmts, loc, sizetype, len);
2481 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2482 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2483 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2484 gsi_replace (gsi, ret, false);
2625bb5d
RB
2485 /* Finally fold the memcpy call. */
2486 gimple_stmt_iterator gsi2 = *gsi;
2487 gsi_prev (&gsi2);
2488 fold_stmt (&gsi2);
2489 return true;
2490}
2491
fef5a0d9
RB
2492/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2493 NULL_TREE if a normal call should be emitted rather than expanding
2494 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2495 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2496 passed as second argument. */
cbdd87d4
RG
2497
2498static bool
fef5a0d9 2499gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2500 enum built_in_function fcode)
cbdd87d4 2501{
538dd0b7 2502 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2503 tree dest, size, len, fn, fmt, flag;
2504 const char *fmt_str;
cbdd87d4 2505
fef5a0d9
RB
2506 /* Verify the required arguments in the original call. */
2507 if (gimple_call_num_args (stmt) < 5)
2508 return false;
cbdd87d4 2509
fef5a0d9
RB
2510 dest = gimple_call_arg (stmt, 0);
2511 len = gimple_call_arg (stmt, 1);
2512 flag = gimple_call_arg (stmt, 2);
2513 size = gimple_call_arg (stmt, 3);
2514 fmt = gimple_call_arg (stmt, 4);
2515
2516 if (! tree_fits_uhwi_p (size))
2517 return false;
2518
2519 if (! integer_all_onesp (size))
2520 {
dcb7fae2 2521 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2522 if (! tree_fits_uhwi_p (len))
cbdd87d4 2523 {
fef5a0d9
RB
2524 /* If LEN is not constant, try MAXLEN too.
2525 For MAXLEN only allow optimizing into non-_ocs function
2526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2527 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2528 return false;
2529 }
2530 else
fef5a0d9 2531 maxlen = len;
cbdd87d4 2532
fef5a0d9
RB
2533 if (tree_int_cst_lt (size, maxlen))
2534 return false;
2535 }
cbdd87d4 2536
fef5a0d9
RB
2537 if (!init_target_chars ())
2538 return false;
cbdd87d4 2539
fef5a0d9
RB
2540 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2541 or if format doesn't contain % chars or is "%s". */
2542 if (! integer_zerop (flag))
2543 {
2544 fmt_str = c_getstr (fmt);
2545 if (fmt_str == NULL)
2546 return false;
2547 if (strchr (fmt_str, target_percent) != NULL
2548 && strcmp (fmt_str, target_percent_s))
2549 return false;
cbdd87d4
RG
2550 }
2551
fef5a0d9
RB
2552 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2553 available. */
2554 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2555 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2556 if (!fn)
491e0b9b
RG
2557 return false;
2558
fef5a0d9
RB
2559 /* Replace the called function and the first 5 argument by 3 retaining
2560 trailing varargs. */
2561 gimple_call_set_fndecl (stmt, fn);
2562 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2563 gimple_call_set_arg (stmt, 0, dest);
2564 gimple_call_set_arg (stmt, 1, len);
2565 gimple_call_set_arg (stmt, 2, fmt);
2566 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2567 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2568 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2569 fold_stmt (gsi);
2570 return true;
2571}
cbdd87d4 2572
fef5a0d9
RB
2573/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2574 Return NULL_TREE if a normal call should be emitted rather than
2575 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2576 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2577
fef5a0d9
RB
2578static bool
2579gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2580 enum built_in_function fcode)
2581{
538dd0b7 2582 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2583 tree dest, size, len, fn, fmt, flag;
2584 const char *fmt_str;
2585 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2586
fef5a0d9
RB
2587 /* Verify the required arguments in the original call. */
2588 if (nargs < 4)
2589 return false;
2590 dest = gimple_call_arg (stmt, 0);
2591 flag = gimple_call_arg (stmt, 1);
2592 size = gimple_call_arg (stmt, 2);
2593 fmt = gimple_call_arg (stmt, 3);
2594
2595 if (! tree_fits_uhwi_p (size))
2596 return false;
2597
2598 len = NULL_TREE;
2599
2600 if (!init_target_chars ())
2601 return false;
2602
2603 /* Check whether the format is a literal string constant. */
2604 fmt_str = c_getstr (fmt);
2605 if (fmt_str != NULL)
2606 {
2607 /* If the format doesn't contain % args or %%, we know the size. */
2608 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2609 {
fef5a0d9
RB
2610 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2611 len = build_int_cstu (size_type_node, strlen (fmt_str));
2612 }
2613 /* If the format is "%s" and first ... argument is a string literal,
2614 we know the size too. */
2615 else if (fcode == BUILT_IN_SPRINTF_CHK
2616 && strcmp (fmt_str, target_percent_s) == 0)
2617 {
2618 tree arg;
cbdd87d4 2619
fef5a0d9
RB
2620 if (nargs == 5)
2621 {
2622 arg = gimple_call_arg (stmt, 4);
2623 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2624 {
2625 len = c_strlen (arg, 1);
2626 if (! len || ! tree_fits_uhwi_p (len))
2627 len = NULL_TREE;
2628 }
2629 }
2630 }
2631 }
cbdd87d4 2632
fef5a0d9
RB
2633 if (! integer_all_onesp (size))
2634 {
2635 if (! len || ! tree_int_cst_lt (len, size))
2636 return false;
2637 }
cbdd87d4 2638
fef5a0d9
RB
2639 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2640 or if format doesn't contain % chars or is "%s". */
2641 if (! integer_zerop (flag))
2642 {
2643 if (fmt_str == NULL)
2644 return false;
2645 if (strchr (fmt_str, target_percent) != NULL
2646 && strcmp (fmt_str, target_percent_s))
2647 return false;
2648 }
cbdd87d4 2649
fef5a0d9
RB
2650 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2651 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2652 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2653 if (!fn)
2654 return false;
2655
2656 /* Replace the called function and the first 4 argument by 2 retaining
2657 trailing varargs. */
2658 gimple_call_set_fndecl (stmt, fn);
2659 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2660 gimple_call_set_arg (stmt, 0, dest);
2661 gimple_call_set_arg (stmt, 1, fmt);
2662 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2663 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2664 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2665 fold_stmt (gsi);
2666 return true;
2667}
2668
35770bb2
RB
2669/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2670 ORIG may be null if this is a 2-argument call. We don't attempt to
2671 simplify calls with more than 3 arguments.
2672
2673 Return NULL_TREE if no simplification was possible, otherwise return the
2674 simplified form of the call as a tree. If IGNORED is true, it means that
2675 the caller does not use the returned value of the function. */
2676
2677static bool
dcb7fae2 2678gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2679{
355fe088 2680 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2681 tree dest = gimple_call_arg (stmt, 0);
2682 tree fmt = gimple_call_arg (stmt, 1);
2683 tree orig = NULL_TREE;
2684 const char *fmt_str = NULL;
2685
2686 /* Verify the required arguments in the original call. We deal with two
2687 types of sprintf() calls: 'sprintf (str, fmt)' and
2688 'sprintf (dest, "%s", orig)'. */
2689 if (gimple_call_num_args (stmt) > 3)
2690 return false;
2691
2692 if (gimple_call_num_args (stmt) == 3)
2693 orig = gimple_call_arg (stmt, 2);
2694
2695 /* Check whether the format is a literal string constant. */
2696 fmt_str = c_getstr (fmt);
2697 if (fmt_str == NULL)
2698 return false;
2699
2700 if (!init_target_chars ())
2701 return false;
2702
2703 /* If the format doesn't contain % args or %%, use strcpy. */
2704 if (strchr (fmt_str, target_percent) == NULL)
2705 {
2706 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2707
2708 if (!fn)
2709 return false;
2710
2711 /* Don't optimize sprintf (buf, "abc", ptr++). */
2712 if (orig)
2713 return false;
2714
2715 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2716 'format' is known to contain no % formats. */
2717 gimple_seq stmts = NULL;
355fe088 2718 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2719 gimple_seq_add_stmt_without_update (&stmts, repl);
2720 if (gimple_call_lhs (stmt))
2721 {
2722 repl = gimple_build_assign (gimple_call_lhs (stmt),
2723 build_int_cst (integer_type_node,
2724 strlen (fmt_str)));
2725 gimple_seq_add_stmt_without_update (&stmts, repl);
2726 gsi_replace_with_seq_vops (gsi, stmts);
2727 /* gsi now points at the assignment to the lhs, get a
2728 stmt iterator to the memcpy call.
2729 ??? We can't use gsi_for_stmt as that doesn't work when the
2730 CFG isn't built yet. */
2731 gimple_stmt_iterator gsi2 = *gsi;
2732 gsi_prev (&gsi2);
2733 fold_stmt (&gsi2);
2734 }
2735 else
2736 {
2737 gsi_replace_with_seq_vops (gsi, stmts);
2738 fold_stmt (gsi);
2739 }
2740 return true;
2741 }
2742
2743 /* If the format is "%s", use strcpy if the result isn't used. */
2744 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2745 {
2746 tree fn;
2747 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2748
2749 if (!fn)
2750 return false;
2751
2752 /* Don't crash on sprintf (str1, "%s"). */
2753 if (!orig)
2754 return false;
2755
dcb7fae2
RB
2756 tree orig_len = NULL_TREE;
2757 if (gimple_call_lhs (stmt))
35770bb2 2758 {
dcb7fae2 2759 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2760 if (!orig_len)
35770bb2
RB
2761 return false;
2762 }
2763
2764 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2765 gimple_seq stmts = NULL;
355fe088 2766 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2767 gimple_seq_add_stmt_without_update (&stmts, repl);
2768 if (gimple_call_lhs (stmt))
2769 {
d7e78447
RB
2770 if (!useless_type_conversion_p (integer_type_node,
2771 TREE_TYPE (orig_len)))
2772 orig_len = fold_convert (integer_type_node, orig_len);
2773 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2774 gimple_seq_add_stmt_without_update (&stmts, repl);
2775 gsi_replace_with_seq_vops (gsi, stmts);
2776 /* gsi now points at the assignment to the lhs, get a
2777 stmt iterator to the memcpy call.
2778 ??? We can't use gsi_for_stmt as that doesn't work when the
2779 CFG isn't built yet. */
2780 gimple_stmt_iterator gsi2 = *gsi;
2781 gsi_prev (&gsi2);
2782 fold_stmt (&gsi2);
2783 }
2784 else
2785 {
2786 gsi_replace_with_seq_vops (gsi, stmts);
2787 fold_stmt (gsi);
2788 }
2789 return true;
2790 }
2791 return false;
2792}
2793
d7e78447
RB
2794/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2795 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2796 attempt to simplify calls with more than 4 arguments.
35770bb2 2797
d7e78447
RB
2798 Return NULL_TREE if no simplification was possible, otherwise return the
2799 simplified form of the call as a tree. If IGNORED is true, it means that
2800 the caller does not use the returned value of the function. */
2801
2802static bool
dcb7fae2 2803gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2804{
538dd0b7 2805 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2806 tree dest = gimple_call_arg (stmt, 0);
2807 tree destsize = gimple_call_arg (stmt, 1);
2808 tree fmt = gimple_call_arg (stmt, 2);
2809 tree orig = NULL_TREE;
2810 const char *fmt_str = NULL;
2811
2812 if (gimple_call_num_args (stmt) > 4)
2813 return false;
2814
2815 if (gimple_call_num_args (stmt) == 4)
2816 orig = gimple_call_arg (stmt, 3);
2817
2818 if (!tree_fits_uhwi_p (destsize))
2819 return false;
2820 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2821
2822 /* Check whether the format is a literal string constant. */
2823 fmt_str = c_getstr (fmt);
2824 if (fmt_str == NULL)
2825 return false;
2826
2827 if (!init_target_chars ())
2828 return false;
2829
2830 /* If the format doesn't contain % args or %%, use strcpy. */
2831 if (strchr (fmt_str, target_percent) == NULL)
2832 {
2833 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2834 if (!fn)
2835 return false;
2836
2837 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2838 if (orig)
2839 return false;
2840
2841 /* We could expand this as
2842 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2843 or to
2844 memcpy (str, fmt_with_nul_at_cstm1, cst);
2845 but in the former case that might increase code size
2846 and in the latter case grow .rodata section too much.
2847 So punt for now. */
2848 size_t len = strlen (fmt_str);
2849 if (len >= destlen)
2850 return false;
2851
2852 gimple_seq stmts = NULL;
355fe088 2853 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2854 gimple_seq_add_stmt_without_update (&stmts, repl);
2855 if (gimple_call_lhs (stmt))
2856 {
2857 repl = gimple_build_assign (gimple_call_lhs (stmt),
2858 build_int_cst (integer_type_node, len));
2859 gimple_seq_add_stmt_without_update (&stmts, repl);
2860 gsi_replace_with_seq_vops (gsi, stmts);
2861 /* gsi now points at the assignment to the lhs, get a
2862 stmt iterator to the memcpy call.
2863 ??? We can't use gsi_for_stmt as that doesn't work when the
2864 CFG isn't built yet. */
2865 gimple_stmt_iterator gsi2 = *gsi;
2866 gsi_prev (&gsi2);
2867 fold_stmt (&gsi2);
2868 }
2869 else
2870 {
2871 gsi_replace_with_seq_vops (gsi, stmts);
2872 fold_stmt (gsi);
2873 }
2874 return true;
2875 }
2876
2877 /* If the format is "%s", use strcpy if the result isn't used. */
2878 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2879 {
2880 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2881 if (!fn)
2882 return false;
2883
2884 /* Don't crash on snprintf (str1, cst, "%s"). */
2885 if (!orig)
2886 return false;
2887
dcb7fae2 2888 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2889 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2890 return false;
d7e78447
RB
2891
2892 /* We could expand this as
2893 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2894 or to
2895 memcpy (str1, str2_with_nul_at_cstm1, cst);
2896 but in the former case that might increase code size
2897 and in the latter case grow .rodata section too much.
2898 So punt for now. */
2899 if (compare_tree_int (orig_len, destlen) >= 0)
2900 return false;
2901
2902 /* Convert snprintf (str1, cst, "%s", str2) into
2903 strcpy (str1, str2) if strlen (str2) < cst. */
2904 gimple_seq stmts = NULL;
355fe088 2905 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
2906 gimple_seq_add_stmt_without_update (&stmts, repl);
2907 if (gimple_call_lhs (stmt))
2908 {
2909 if (!useless_type_conversion_p (integer_type_node,
2910 TREE_TYPE (orig_len)))
2911 orig_len = fold_convert (integer_type_node, orig_len);
2912 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2913 gimple_seq_add_stmt_without_update (&stmts, repl);
2914 gsi_replace_with_seq_vops (gsi, stmts);
2915 /* gsi now points at the assignment to the lhs, get a
2916 stmt iterator to the memcpy call.
2917 ??? We can't use gsi_for_stmt as that doesn't work when the
2918 CFG isn't built yet. */
2919 gimple_stmt_iterator gsi2 = *gsi;
2920 gsi_prev (&gsi2);
2921 fold_stmt (&gsi2);
2922 }
2923 else
2924 {
2925 gsi_replace_with_seq_vops (gsi, stmts);
2926 fold_stmt (gsi);
2927 }
2928 return true;
2929 }
2930 return false;
2931}
35770bb2 2932
edd7ae68
RB
2933/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2934 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2935 more than 3 arguments, and ARG may be null in the 2-argument case.
2936
2937 Return NULL_TREE if no simplification was possible, otherwise return the
2938 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2939 code of the function to be simplified. */
2940
2941static bool
2942gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2943 tree fp, tree fmt, tree arg,
2944 enum built_in_function fcode)
2945{
2946 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2947 tree fn_fputc, fn_fputs;
2948 const char *fmt_str = NULL;
2949
2950 /* If the return value is used, don't do the transformation. */
2951 if (gimple_call_lhs (stmt) != NULL_TREE)
2952 return false;
2953
2954 /* Check whether the format is a literal string constant. */
2955 fmt_str = c_getstr (fmt);
2956 if (fmt_str == NULL)
2957 return false;
2958
2959 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2960 {
2961 /* If we're using an unlocked function, assume the other
2962 unlocked functions exist explicitly. */
2963 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2964 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2965 }
2966 else
2967 {
2968 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2969 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2970 }
2971
2972 if (!init_target_chars ())
2973 return false;
2974
2975 /* If the format doesn't contain % args or %%, use strcpy. */
2976 if (strchr (fmt_str, target_percent) == NULL)
2977 {
2978 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2979 && arg)
2980 return false;
2981
2982 /* If the format specifier was "", fprintf does nothing. */
2983 if (fmt_str[0] == '\0')
2984 {
2985 replace_call_with_value (gsi, NULL_TREE);
2986 return true;
2987 }
2988
2989 /* When "string" doesn't contain %, replace all cases of
2990 fprintf (fp, string) with fputs (string, fp). The fputs
2991 builtin will take care of special cases like length == 1. */
2992 if (fn_fputs)
2993 {
2994 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2995 replace_call_with_call_and_fold (gsi, repl);
2996 return true;
2997 }
2998 }
2999
3000 /* The other optimizations can be done only on the non-va_list variants. */
3001 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3002 return false;
3003
3004 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3005 else if (strcmp (fmt_str, target_percent_s) == 0)
3006 {
3007 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3008 return false;
3009 if (fn_fputs)
3010 {
3011 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3012 replace_call_with_call_and_fold (gsi, repl);
3013 return true;
3014 }
3015 }
3016
3017 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3018 else if (strcmp (fmt_str, target_percent_c) == 0)
3019 {
3020 if (!arg
3021 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3022 return false;
3023 if (fn_fputc)
3024 {
3025 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3026 replace_call_with_call_and_fold (gsi, repl);
3027 return true;
3028 }
3029 }
3030
3031 return false;
3032}
3033
ad03a744
RB
3034/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3035 FMT and ARG are the arguments to the call; we don't fold cases with
3036 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3037
3038 Return NULL_TREE if no simplification was possible, otherwise return the
3039 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3040 code of the function to be simplified. */
3041
3042static bool
3043gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3044 tree arg, enum built_in_function fcode)
3045{
3046 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3047 tree fn_putchar, fn_puts, newarg;
3048 const char *fmt_str = NULL;
3049
3050 /* If the return value is used, don't do the transformation. */
3051 if (gimple_call_lhs (stmt) != NULL_TREE)
3052 return false;
3053
3054 /* Check whether the format is a literal string constant. */
3055 fmt_str = c_getstr (fmt);
3056 if (fmt_str == NULL)
3057 return false;
3058
3059 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3060 {
3061 /* If we're using an unlocked function, assume the other
3062 unlocked functions exist explicitly. */
3063 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3064 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3065 }
3066 else
3067 {
3068 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3069 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3070 }
3071
3072 if (!init_target_chars ())
3073 return false;
3074
3075 if (strcmp (fmt_str, target_percent_s) == 0
3076 || strchr (fmt_str, target_percent) == NULL)
3077 {
3078 const char *str;
3079
3080 if (strcmp (fmt_str, target_percent_s) == 0)
3081 {
3082 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3083 return false;
3084
3085 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3086 return false;
3087
3088 str = c_getstr (arg);
3089 if (str == NULL)
3090 return false;
3091 }
3092 else
3093 {
3094 /* The format specifier doesn't contain any '%' characters. */
3095 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3096 && arg)
3097 return false;
3098 str = fmt_str;
3099 }
3100
3101 /* If the string was "", printf does nothing. */
3102 if (str[0] == '\0')
3103 {
3104 replace_call_with_value (gsi, NULL_TREE);
3105 return true;
3106 }
3107
3108 /* If the string has length of 1, call putchar. */
3109 if (str[1] == '\0')
3110 {
3111 /* Given printf("c"), (where c is any one character,)
3112 convert "c"[0] to an int and pass that to the replacement
3113 function. */
3114 newarg = build_int_cst (integer_type_node, str[0]);
3115 if (fn_putchar)
3116 {
3117 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3118 replace_call_with_call_and_fold (gsi, repl);
3119 return true;
3120 }
3121 }
3122 else
3123 {
3124 /* If the string was "string\n", call puts("string"). */
3125 size_t len = strlen (str);
3126 if ((unsigned char)str[len - 1] == target_newline
3127 && (size_t) (int) len == len
3128 && (int) len > 0)
3129 {
3130 char *newstr;
3131 tree offset_node, string_cst;
3132
3133 /* Create a NUL-terminated string that's one char shorter
3134 than the original, stripping off the trailing '\n'. */
3135 newarg = build_string_literal (len, str);
3136 string_cst = string_constant (newarg, &offset_node);
3137 gcc_checking_assert (string_cst
3138 && (TREE_STRING_LENGTH (string_cst)
3139 == (int) len)
3140 && integer_zerop (offset_node)
3141 && (unsigned char)
3142 TREE_STRING_POINTER (string_cst)[len - 1]
3143 == target_newline);
3144 /* build_string_literal creates a new STRING_CST,
3145 modify it in place to avoid double copying. */
3146 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3147 newstr[len - 1] = '\0';
3148 if (fn_puts)
3149 {
3150 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3151 replace_call_with_call_and_fold (gsi, repl);
3152 return true;
3153 }
3154 }
3155 else
3156 /* We'd like to arrange to call fputs(string,stdout) here,
3157 but we need stdout and don't have a way to get it yet. */
3158 return false;
3159 }
3160 }
3161
3162 /* The other optimizations can be done only on the non-va_list variants. */
3163 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3164 return false;
3165
3166 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3167 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3168 {
3169 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3170 return false;
3171 if (fn_puts)
3172 {
3173 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3174 replace_call_with_call_and_fold (gsi, repl);
3175 return true;
3176 }
3177 }
3178
3179 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3180 else if (strcmp (fmt_str, target_percent_c) == 0)
3181 {
3182 if (!arg || ! useless_type_conversion_p (integer_type_node,
3183 TREE_TYPE (arg)))
3184 return false;
3185 if (fn_putchar)
3186 {
3187 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3188 replace_call_with_call_and_fold (gsi, repl);
3189 return true;
3190 }
3191 }
3192
3193 return false;
3194}
3195
edd7ae68 3196
fef5a0d9
RB
3197
3198/* Fold a call to __builtin_strlen with known length LEN. */
3199
3200static bool
dcb7fae2 3201gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3202{
355fe088 3203 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3204 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
3205 if (!len)
3206 return false;
2813904b 3207 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
3208 replace_call_with_value (gsi, len);
3209 return true;
cbdd87d4
RG
3210}
3211
48126138
NS
3212/* Fold a call to __builtin_acc_on_device. */
3213
3214static bool
3215gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3216{
3217 /* Defer folding until we know which compiler we're in. */
3218 if (symtab->state != EXPANSION)
3219 return false;
3220
3221 unsigned val_host = GOMP_DEVICE_HOST;
3222 unsigned val_dev = GOMP_DEVICE_NONE;
3223
3224#ifdef ACCEL_COMPILER
3225 val_host = GOMP_DEVICE_NOT_HOST;
3226 val_dev = ACCEL_COMPILER_acc_device;
3227#endif
3228
3229 location_t loc = gimple_location (gsi_stmt (*gsi));
3230
3231 tree host_eq = make_ssa_name (boolean_type_node);
3232 gimple *host_ass = gimple_build_assign
3233 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3234 gimple_set_location (host_ass, loc);
3235 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3236
3237 tree dev_eq = make_ssa_name (boolean_type_node);
3238 gimple *dev_ass = gimple_build_assign
3239 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3240 gimple_set_location (dev_ass, loc);
3241 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3242
3243 tree result = make_ssa_name (boolean_type_node);
3244 gimple *result_ass = gimple_build_assign
3245 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3246 gimple_set_location (result_ass, loc);
3247 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3248
3249 replace_call_with_value (gsi, result);
3250
3251 return true;
3252}
cbdd87d4 3253
fe75f732
PK
3254/* Fold realloc (0, n) -> malloc (n). */
3255
3256static bool
3257gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3258{
3259 gimple *stmt = gsi_stmt (*gsi);
3260 tree arg = gimple_call_arg (stmt, 0);
3261 tree size = gimple_call_arg (stmt, 1);
3262
3263 if (operand_equal_p (arg, null_pointer_node, 0))
3264 {
3265 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3266 if (fn_malloc)
3267 {
3268 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3269 replace_call_with_call_and_fold (gsi, repl);
3270 return true;
3271 }
3272 }
3273 return false;
3274}
3275
dcb7fae2
RB
3276/* Fold the non-target builtin at *GSI and return whether any simplification
3277 was made. */
cbdd87d4 3278
fef5a0d9 3279static bool
dcb7fae2 3280gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3281{
538dd0b7 3282 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3283 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3284
dcb7fae2
RB
3285 /* Give up for always_inline inline builtins until they are
3286 inlined. */
3287 if (avoid_folding_inline_builtin (callee))
3288 return false;
cbdd87d4 3289
edd7ae68
RB
3290 unsigned n = gimple_call_num_args (stmt);
3291 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3292 switch (fcode)
cbdd87d4 3293 {
dcb7fae2
RB
3294 case BUILT_IN_BZERO:
3295 return gimple_fold_builtin_memset (gsi, integer_zero_node,
3296 gimple_call_arg (stmt, 1));
3297 case BUILT_IN_MEMSET:
3298 return gimple_fold_builtin_memset (gsi,
3299 gimple_call_arg (stmt, 1),
3300 gimple_call_arg (stmt, 2));
3301 case BUILT_IN_BCOPY:
3302 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
3303 gimple_call_arg (stmt, 0), 3);
3304 case BUILT_IN_MEMCPY:
3305 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3306 gimple_call_arg (stmt, 1), 0);
3307 case BUILT_IN_MEMPCPY:
3308 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3309 gimple_call_arg (stmt, 1), 1);
3310 case BUILT_IN_MEMMOVE:
3311 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3312 gimple_call_arg (stmt, 1), 3);
3313 case BUILT_IN_SPRINTF_CHK:
3314 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3315 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3316 case BUILT_IN_STRCAT_CHK:
3317 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3318 case BUILT_IN_STRNCAT_CHK:
3319 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3320 case BUILT_IN_STRLEN:
dcb7fae2 3321 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3322 case BUILT_IN_STRCPY:
dcb7fae2 3323 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3324 gimple_call_arg (stmt, 0),
dcb7fae2 3325 gimple_call_arg (stmt, 1));
cbdd87d4 3326 case BUILT_IN_STRNCPY:
dcb7fae2 3327 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3328 gimple_call_arg (stmt, 0),
3329 gimple_call_arg (stmt, 1),
dcb7fae2 3330 gimple_call_arg (stmt, 2));
9a7eefec 3331 case BUILT_IN_STRCAT:
dcb7fae2
RB
3332 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3333 gimple_call_arg (stmt, 1));
ad03a744
RB
3334 case BUILT_IN_STRNCAT:
3335 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3336 case BUILT_IN_INDEX:
912d9ec3 3337 case BUILT_IN_STRCHR:
71dea1dd
WD
3338 return gimple_fold_builtin_strchr (gsi, false);
3339 case BUILT_IN_RINDEX:
3340 case BUILT_IN_STRRCHR:
3341 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3342 case BUILT_IN_STRSTR:
3343 return gimple_fold_builtin_strstr (gsi);
a918bfbf
ML
3344 case BUILT_IN_STRCMP:
3345 case BUILT_IN_STRCASECMP:
3346 case BUILT_IN_STRNCMP:
3347 case BUILT_IN_STRNCASECMP:
3348 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3349 case BUILT_IN_MEMCHR:
3350 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3351 case BUILT_IN_FPUTS:
dcb7fae2
RB
3352 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3353 gimple_call_arg (stmt, 1), false);
cbdd87d4 3354 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3355 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3356 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3357 case BUILT_IN_MEMCPY_CHK:
3358 case BUILT_IN_MEMPCPY_CHK:
3359 case BUILT_IN_MEMMOVE_CHK:
3360 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3361 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3362 gimple_call_arg (stmt, 0),
3363 gimple_call_arg (stmt, 1),
3364 gimple_call_arg (stmt, 2),
3365 gimple_call_arg (stmt, 3),
edd7ae68 3366 fcode);
2625bb5d
RB
3367 case BUILT_IN_STPCPY:
3368 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3369 case BUILT_IN_STRCPY_CHK:
3370 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3371 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3372 gimple_call_arg (stmt, 0),
3373 gimple_call_arg (stmt, 1),
3374 gimple_call_arg (stmt, 2),
edd7ae68 3375 fcode);
cbdd87d4 3376 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3377 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3378 return gimple_fold_builtin_stxncpy_chk (gsi,
3379 gimple_call_arg (stmt, 0),
3380 gimple_call_arg (stmt, 1),
3381 gimple_call_arg (stmt, 2),
3382 gimple_call_arg (stmt, 3),
edd7ae68 3383 fcode);
cbdd87d4
RG
3384 case BUILT_IN_SNPRINTF_CHK:
3385 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3386 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 3387 case BUILT_IN_SNPRINTF:
dcb7fae2 3388 return gimple_fold_builtin_snprintf (gsi);
d7e78447 3389 case BUILT_IN_SPRINTF:
dcb7fae2 3390 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
3391 case BUILT_IN_FPRINTF:
3392 case BUILT_IN_FPRINTF_UNLOCKED:
3393 case BUILT_IN_VFPRINTF:
3394 if (n == 2 || n == 3)
3395 return gimple_fold_builtin_fprintf (gsi,
3396 gimple_call_arg (stmt, 0),
3397 gimple_call_arg (stmt, 1),
3398 n == 3
3399 ? gimple_call_arg (stmt, 2)
3400 : NULL_TREE,
3401 fcode);
3402 break;
3403 case BUILT_IN_FPRINTF_CHK:
3404 case BUILT_IN_VFPRINTF_CHK:
3405 if (n == 3 || n == 4)
3406 return gimple_fold_builtin_fprintf (gsi,
3407 gimple_call_arg (stmt, 0),
3408 gimple_call_arg (stmt, 2),
3409 n == 4
3410 ? gimple_call_arg (stmt, 3)
3411 : NULL_TREE,
3412 fcode);
3413 break;
ad03a744
RB
3414 case BUILT_IN_PRINTF:
3415 case BUILT_IN_PRINTF_UNLOCKED:
3416 case BUILT_IN_VPRINTF:
3417 if (n == 1 || n == 2)
3418 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3419 n == 2
3420 ? gimple_call_arg (stmt, 1)
3421 : NULL_TREE, fcode);
3422 break;
3423 case BUILT_IN_PRINTF_CHK:
3424 case BUILT_IN_VPRINTF_CHK:
3425 if (n == 2 || n == 3)
3426 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3427 n == 3
3428 ? gimple_call_arg (stmt, 2)
3429 : NULL_TREE, fcode);
242a37f1 3430 break;
48126138
NS
3431 case BUILT_IN_ACC_ON_DEVICE:
3432 return gimple_fold_builtin_acc_on_device (gsi,
3433 gimple_call_arg (stmt, 0));
fe75f732
PK
3434 case BUILT_IN_REALLOC:
3435 return gimple_fold_builtin_realloc (gsi);
3436
fef5a0d9
RB
3437 default:;
3438 }
3439
3440 /* Try the generic builtin folder. */
3441 bool ignore = (gimple_call_lhs (stmt) == NULL);
3442 tree result = fold_call_stmt (stmt, ignore);
3443 if (result)
3444 {
3445 if (ignore)
3446 STRIP_NOPS (result);
3447 else
3448 result = fold_convert (gimple_call_return_type (stmt), result);
3449 if (!update_call_from_tree (gsi, result))
3450 gimplify_and_update_call_from_tree (gsi, result);
3451 return true;
3452 }
3453
3454 return false;
3455}
3456
451e8dae
NS
3457/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3458 function calls to constants, where possible. */
3459
3460static tree
3461fold_internal_goacc_dim (const gimple *call)
3462{
629b3d75
MJ
3463 int axis = oacc_get_ifn_dim_arg (call);
3464 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae
NS
3465 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3466 tree result = NULL_TREE;
3467
3468 /* If the size is 1, or we only want the size and it is not dynamic,
3469 we know the answer. */
3470 if (size == 1 || (!is_pos && size))
3471 {
3472 tree type = TREE_TYPE (gimple_call_lhs (call));
3473 result = build_int_cst (type, size - is_pos);
3474 }
3475
3476 return result;
3477}
3478
849a76a5
JJ
3479/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3480 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3481 &var where var is only addressable because of such calls. */
3482
3483bool
3484optimize_atomic_compare_exchange_p (gimple *stmt)
3485{
3486 if (gimple_call_num_args (stmt) != 6
3487 || !flag_inline_atomics
3488 || !optimize
3489 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3490 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3491 || !gimple_vdef (stmt)
3492 || !gimple_vuse (stmt))
3493 return false;
3494
3495 tree fndecl = gimple_call_fndecl (stmt);
3496 switch (DECL_FUNCTION_CODE (fndecl))
3497 {
3498 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3499 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3500 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3501 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3502 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3503 break;
3504 default:
3505 return false;
3506 }
3507
3508 tree expected = gimple_call_arg (stmt, 1);
3509 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
3510 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3511 return false;
3512
3513 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3514 if (!is_gimple_reg_type (etype)
849a76a5 3515 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
3516 || TREE_THIS_VOLATILE (etype)
3517 || VECTOR_TYPE_P (etype)
3518 || TREE_CODE (etype) == COMPLEX_TYPE
3519 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3520 might not preserve all the bits. See PR71716. */
3521 || SCALAR_FLOAT_TYPE_P (etype)
3522 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
849a76a5
JJ
3523 return false;
3524
3525 tree weak = gimple_call_arg (stmt, 3);
3526 if (!integer_zerop (weak) && !integer_onep (weak))
3527 return false;
3528
3529 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3530 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3531 machine_mode mode = TYPE_MODE (itype);
3532
3533 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3534 == CODE_FOR_nothing
3535 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3536 return false;
3537
1de3c940 3538 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
849a76a5
JJ
3539 return false;
3540
3541 return true;
3542}
3543
3544/* Fold
3545 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3546 into
3547 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3548 i = IMAGPART_EXPR <t>;
3549 r = (_Bool) i;
3550 e = REALPART_EXPR <t>; */
3551
3552void
3553fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3554{
3555 gimple *stmt = gsi_stmt (*gsi);
3556 tree fndecl = gimple_call_fndecl (stmt);
3557 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3558 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3559 tree ctype = build_complex_type (itype);
3560 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
3561 bool throws = false;
3562 edge e = NULL;
849a76a5
JJ
3563 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3564 expected);
3565 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3566 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3567 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3568 {
3569 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3570 build1 (VIEW_CONVERT_EXPR, itype,
3571 gimple_assign_lhs (g)));
3572 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3573 }
3574 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3575 + int_size_in_bytes (itype);
3576 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3577 gimple_call_arg (stmt, 0),
3578 gimple_assign_lhs (g),
3579 gimple_call_arg (stmt, 2),
3580 build_int_cst (integer_type_node, flag),
3581 gimple_call_arg (stmt, 4),
3582 gimple_call_arg (stmt, 5));
3583 tree lhs = make_ssa_name (ctype);
3584 gimple_call_set_lhs (g, lhs);
3585 gimple_set_vdef (g, gimple_vdef (stmt));
3586 gimple_set_vuse (g, gimple_vuse (stmt));
3587 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
cc195d46
JJ
3588 tree oldlhs = gimple_call_lhs (stmt);
3589 if (stmt_can_throw_internal (stmt))
3590 {
3591 throws = true;
3592 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3593 }
3594 gimple_call_set_nothrow (as_a <gcall *> (g),
3595 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3596 gimple_call_set_lhs (stmt, NULL_TREE);
3597 gsi_replace (gsi, g, true);
3598 if (oldlhs)
849a76a5 3599 {
849a76a5
JJ
3600 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3601 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
3602 if (throws)
3603 {
3604 gsi_insert_on_edge_immediate (e, g);
3605 *gsi = gsi_for_stmt (g);
3606 }
3607 else
3608 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3609 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3610 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 3611 }
849a76a5
JJ
3612 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3613 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
3614 if (throws && oldlhs == NULL_TREE)
3615 {
3616 gsi_insert_on_edge_immediate (e, g);
3617 *gsi = gsi_for_stmt (g);
3618 }
3619 else
3620 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
3621 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3622 {
3623 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3624 VIEW_CONVERT_EXPR,
3625 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3626 gimple_assign_lhs (g)));
3627 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3628 }
3629 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3630 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3631 *gsi = gsiret;
3632}
3633
1304953e
JJ
3634/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3635 doesn't fit into TYPE. The test for overflow should be regardless of
3636 -fwrapv, and even for unsigned types. */
3637
3638bool
3639arith_overflowed_p (enum tree_code code, const_tree type,
3640 const_tree arg0, const_tree arg1)
3641{
3642 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3643 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3644 widest2_int_cst;
3645 widest2_int warg0 = widest2_int_cst (arg0);
3646 widest2_int warg1 = widest2_int_cst (arg1);
3647 widest2_int wres;
3648 switch (code)
3649 {
3650 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3651 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3652 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3653 default: gcc_unreachable ();
3654 }
3655 signop sign = TYPE_SIGN (type);
3656 if (sign == UNSIGNED && wi::neg_p (wres))
3657 return true;
3658 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3659}
3660
cbdd87d4
RG
3661/* Attempt to fold a call statement referenced by the statement iterator GSI.
3662 The statement may be replaced by another statement, e.g., if the call
3663 simplifies to a constant value. Return true if any changes were made.
3664 It is assumed that the operands have been previously folded. */
3665
e021c122 3666static bool
ceeffab0 3667gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3668{
538dd0b7 3669 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3670 tree callee;
e021c122
RG
3671 bool changed = false;
3672 unsigned i;
cbdd87d4 3673
e021c122
RG
3674 /* Fold *& in call arguments. */
3675 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3676 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3677 {
3678 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3679 if (tmp)
3680 {
3681 gimple_call_set_arg (stmt, i, tmp);
3682 changed = true;
3683 }
3684 }
3b45a007
RG
3685
3686 /* Check for virtual calls that became direct calls. */
3687 callee = gimple_call_fn (stmt);
25583c4f 3688 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3689 {
49c471e3
MJ
3690 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3691 {
450ad0cd
JH
3692 if (dump_file && virtual_method_call_p (callee)
3693 && !possible_polymorphic_call_target_p
6f8091fc
JH
3694 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3695 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3696 {
3697 fprintf (dump_file,
a70e9985 3698 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3699 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3700 fprintf (dump_file, " to ");
3701 print_generic_expr (dump_file, callee, TDF_SLIM);
3702 fprintf (dump_file, "\n");
3703 }
3704
49c471e3 3705 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3706 changed = true;
3707 }
a70e9985 3708 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3709 {
61dd6a2e
JH
3710 bool final;
3711 vec <cgraph_node *>targets
058d0a90 3712 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3713 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3714 {
a70e9985 3715 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3716 if (dump_enabled_p ())
3717 {
807b7d62 3718 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3719 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3720 "folding virtual function call to %s\n",
3721 targets.length () == 1
3722 ? targets[0]->name ()
3723 : "__builtin_unreachable");
3724 }
61dd6a2e 3725 if (targets.length () == 1)
cf3e5a89 3726 {
18954840
JJ
3727 tree fndecl = targets[0]->decl;
3728 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 3729 changed = true;
18954840
JJ
3730 /* If changing the call to __cxa_pure_virtual
3731 or similar noreturn function, adjust gimple_call_fntype
3732 too. */
865f7046 3733 if (gimple_call_noreturn_p (stmt)
18954840
JJ
3734 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3735 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3736 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3737 == void_type_node))
3738 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 3739 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
3740 if (lhs
3741 && gimple_call_noreturn_p (stmt)
18954840 3742 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 3743 || should_remove_lhs_p (lhs)))
a70e9985
JJ
3744 {
3745 if (TREE_CODE (lhs) == SSA_NAME)
3746 {
b731b390 3747 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3748 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3749 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3750 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3751 }
3752 gimple_call_set_lhs (stmt, NULL_TREE);
3753 }
0b986c6a 3754 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3755 }
a70e9985 3756 else
cf3e5a89
JJ
3757 {
3758 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3759 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3760 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3761 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3762 {
b731b390 3763 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3764 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3765
3766 /* To satisfy condition for
3767 cgraph_update_edges_for_call_stmt_node,
3768 we need to preserve GIMPLE_CALL statement
3769 at position of GSI iterator. */
a70e9985 3770 update_call_from_tree (gsi, def);
eb14a79f 3771 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3772 }
3773 else
42e52a51
RB
3774 {
3775 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3776 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3777 gsi_replace (gsi, new_stmt, false);
3778 }
cf3e5a89
JJ
3779 return true;
3780 }
e021c122 3781 }
49c471e3 3782 }
e021c122 3783 }
49c471e3 3784
f2d3d07e
RH
3785 /* Check for indirect calls that became direct calls, and then
3786 no longer require a static chain. */
3787 if (gimple_call_chain (stmt))
3788 {
3789 tree fn = gimple_call_fndecl (stmt);
3790 if (fn && !DECL_STATIC_CHAIN (fn))
3791 {
3792 gimple_call_set_chain (stmt, NULL);
3793 changed = true;
3794 }
3795 else
3796 {
3797 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3798 if (tmp)
3799 {
3800 gimple_call_set_chain (stmt, tmp);
3801 changed = true;
3802 }
3803 }
3804 }
3805
e021c122
RG
3806 if (inplace)
3807 return changed;
3808
3809 /* Check for builtins that CCP can handle using information not
3810 available in the generic fold routines. */
fef5a0d9
RB
3811 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3812 {
3813 if (gimple_fold_builtin (gsi))
3814 changed = true;
3815 }
3816 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3817 {
ea679d55 3818 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3819 }
368b454d 3820 else if (gimple_call_internal_p (stmt))
ed9c79e1 3821 {
368b454d
JJ
3822 enum tree_code subcode = ERROR_MARK;
3823 tree result = NULL_TREE;
1304953e
JJ
3824 bool cplx_result = false;
3825 tree overflow = NULL_TREE;
368b454d
JJ
3826 switch (gimple_call_internal_fn (stmt))
3827 {
3828 case IFN_BUILTIN_EXPECT:
3829 result = fold_builtin_expect (gimple_location (stmt),
3830 gimple_call_arg (stmt, 0),
3831 gimple_call_arg (stmt, 1),
3832 gimple_call_arg (stmt, 2));
3833 break;
0e82f089
MP
3834 case IFN_UBSAN_OBJECT_SIZE:
3835 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3836 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3837 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3838 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3839 gimple_call_arg (stmt, 2))))
3840 {
f6b4dc28 3841 gsi_replace (gsi, gimple_build_nop (), false);
0e82f089
MP
3842 unlink_stmt_vdef (stmt);
3843 release_defs (stmt);
3844 return true;
3845 }
3846 break;
451e8dae
NS
3847 case IFN_GOACC_DIM_SIZE:
3848 case IFN_GOACC_DIM_POS:
3849 result = fold_internal_goacc_dim (stmt);
3850 break;
368b454d
JJ
3851 case IFN_UBSAN_CHECK_ADD:
3852 subcode = PLUS_EXPR;
3853 break;
3854 case IFN_UBSAN_CHECK_SUB:
3855 subcode = MINUS_EXPR;
3856 break;
3857 case IFN_UBSAN_CHECK_MUL:
3858 subcode = MULT_EXPR;
3859 break;
1304953e
JJ
3860 case IFN_ADD_OVERFLOW:
3861 subcode = PLUS_EXPR;
3862 cplx_result = true;
3863 break;
3864 case IFN_SUB_OVERFLOW:
3865 subcode = MINUS_EXPR;
3866 cplx_result = true;
3867 break;
3868 case IFN_MUL_OVERFLOW:
3869 subcode = MULT_EXPR;
3870 cplx_result = true;
3871 break;
368b454d
JJ
3872 default:
3873 break;
3874 }
3875 if (subcode != ERROR_MARK)
3876 {
3877 tree arg0 = gimple_call_arg (stmt, 0);
3878 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3879 tree type = TREE_TYPE (arg0);
3880 if (cplx_result)
3881 {
3882 tree lhs = gimple_call_lhs (stmt);
3883 if (lhs == NULL_TREE)
3884 type = NULL_TREE;
3885 else
3886 type = TREE_TYPE (TREE_TYPE (lhs));
3887 }
3888 if (type == NULL_TREE)
3889 ;
368b454d 3890 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3891 else if (integer_zerop (arg1))
3892 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3893 /* x = 0 + y; x = 0 * y; */
3894 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3895 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3896 /* x = y - y; */
3897 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3898 result = integer_zero_node;
368b454d 3899 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3900 else if (subcode == MULT_EXPR && integer_onep (arg1))
3901 result = arg0;
3902 else if (subcode == MULT_EXPR && integer_onep (arg0))
3903 result = arg1;
3904 else if (TREE_CODE (arg0) == INTEGER_CST
3905 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3906 {
1304953e
JJ
3907 if (cplx_result)
3908 result = int_const_binop (subcode, fold_convert (type, arg0),
3909 fold_convert (type, arg1));
3910 else
3911 result = int_const_binop (subcode, arg0, arg1);
3912 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3913 {
3914 if (cplx_result)
3915 overflow = build_one_cst (type);
3916 else
3917 result = NULL_TREE;
3918 }
3919 }
3920 if (result)
3921 {
3922 if (result == integer_zero_node)
3923 result = build_zero_cst (type);
3924 else if (cplx_result && TREE_TYPE (result) != type)
3925 {
3926 if (TREE_CODE (result) == INTEGER_CST)
3927 {
3928 if (arith_overflowed_p (PLUS_EXPR, type, result,
3929 integer_zero_node))
3930 overflow = build_one_cst (type);
3931 }
3932 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3933 && TYPE_UNSIGNED (type))
3934 || (TYPE_PRECISION (type)
3935 < (TYPE_PRECISION (TREE_TYPE (result))
3936 + (TYPE_UNSIGNED (TREE_TYPE (result))
3937 && !TYPE_UNSIGNED (type)))))
3938 result = NULL_TREE;
3939 if (result)
3940 result = fold_convert (type, result);
3941 }
368b454d
JJ
3942 }
3943 }
1304953e 3944
ed9c79e1
JJ
3945 if (result)
3946 {
1304953e
JJ
3947 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3948 result = drop_tree_overflow (result);
3949 if (cplx_result)
3950 {
3951 if (overflow == NULL_TREE)
3952 overflow = build_zero_cst (TREE_TYPE (result));
3953 tree ctype = build_complex_type (TREE_TYPE (result));
3954 if (TREE_CODE (result) == INTEGER_CST
3955 && TREE_CODE (overflow) == INTEGER_CST)
3956 result = build_complex (ctype, result, overflow);
3957 else
3958 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3959 ctype, result, overflow);
3960 }
ed9c79e1
JJ
3961 if (!update_call_from_tree (gsi, result))
3962 gimplify_and_update_call_from_tree (gsi, result);
3963 changed = true;
3964 }
3965 }
3b45a007 3966
e021c122 3967 return changed;
cbdd87d4
RG
3968}
3969
e0ee10ed 3970
89a79e96
RB
3971/* Return true whether NAME has a use on STMT. */
3972
3973static bool
355fe088 3974has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
3975{
3976 imm_use_iterator iter;
3977 use_operand_p use_p;
3978 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3979 if (USE_STMT (use_p) == stmt)
3980 return true;
3981 return false;
3982}
3983
e0ee10ed
RB
3984/* Worker for fold_stmt_1 dispatch to pattern based folding with
3985 gimple_simplify.
3986
3987 Replaces *GSI with the simplification result in RCODE and OPS
3988 and the associated statements in *SEQ. Does the replacement
3989 according to INPLACE and returns true if the operation succeeded. */
3990
3991static bool
3992replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3993 code_helper rcode, tree *ops,
3994 gimple_seq *seq, bool inplace)
3995{
355fe088 3996 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
3997
3998 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
3999 newly created statements. See also maybe_push_res_to_seq.
4000 As an exception allow such uses if there was a use of the
4001 same SSA name on the old stmt. */
e0ee10ed 4002 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
4003 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4004 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
4005 || (ops[1]
4006 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
4007 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4008 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
4009 || (ops[2]
4010 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96 4011 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
e0237993
JJ
4012 && !has_use_on_stmt (ops[2], stmt))
4013 || (COMPARISON_CLASS_P (ops[0])
4014 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4015 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4016 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4017 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4018 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4019 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
e0ee10ed
RB
4020 return false;
4021
fec40d06
RS
4022 /* Don't insert new statements when INPLACE is true, even if we could
4023 reuse STMT for the final statement. */
4024 if (inplace && !gimple_seq_empty_p (*seq))
4025 return false;
4026
538dd0b7 4027 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
4028 {
4029 gcc_assert (rcode.is_tree_code ());
4030 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4031 /* GIMPLE_CONDs condition may not throw. */
4032 && (!flag_exceptions
4033 || !cfun->can_throw_non_call_exceptions
4034 || !operation_could_trap_p (rcode,
4035 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4036 false, NULL_TREE)))
538dd0b7 4037 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 4038 else if (rcode == SSA_NAME)
538dd0b7 4039 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
4040 build_zero_cst (TREE_TYPE (ops[0])));
4041 else if (rcode == INTEGER_CST)
4042 {
4043 if (integer_zerop (ops[0]))
538dd0b7 4044 gimple_cond_make_false (cond_stmt);
e0ee10ed 4045 else
538dd0b7 4046 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4047 }
4048 else if (!inplace)
4049 {
4050 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4051 ops, seq);
4052 if (!res)
4053 return false;
538dd0b7 4054 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4055 build_zero_cst (TREE_TYPE (res)));
4056 }
4057 else
4058 return false;
4059 if (dump_file && (dump_flags & TDF_DETAILS))
4060 {
4061 fprintf (dump_file, "gimple_simplified to ");
4062 if (!gimple_seq_empty_p (*seq))
4063 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4064 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4065 0, TDF_SLIM);
4066 }
4067 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4068 return true;
4069 }
4070 else if (is_gimple_assign (stmt)
4071 && rcode.is_tree_code ())
4072 {
4073 if (!inplace
f3582e54 4074 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
4075 {
4076 maybe_build_generic_op (rcode,
545cd7ec 4077 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
00d66391 4078 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
4079 if (dump_file && (dump_flags & TDF_DETAILS))
4080 {
4081 fprintf (dump_file, "gimple_simplified to ");
4082 if (!gimple_seq_empty_p (*seq))
4083 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4084 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4085 0, TDF_SLIM);
4086 }
4087 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4088 return true;
4089 }
4090 }
37d486ab 4091 else if (rcode.is_fn_code ()
c9e926ce 4092 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
4093 {
4094 unsigned i;
4095 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4096 {
4097 gcc_assert (ops[i] != NULL_TREE);
4098 gimple_call_set_arg (stmt, i, ops[i]);
4099 }
4100 if (i < 3)
4101 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
4102 if (dump_file && (dump_flags & TDF_DETAILS))
4103 {
4104 fprintf (dump_file, "gimple_simplified to ");
4105 if (!gimple_seq_empty_p (*seq))
4106 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4107 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4108 }
4109 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4110 return true;
4111 }
e0ee10ed
RB
4112 else if (!inplace)
4113 {
4114 if (gimple_has_lhs (stmt))
4115 {
4116 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
4117 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4118 ops, seq, lhs))
4119 return false;
e0ee10ed
RB
4120 if (dump_file && (dump_flags & TDF_DETAILS))
4121 {
4122 fprintf (dump_file, "gimple_simplified to ");
4123 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4124 }
4125 gsi_replace_with_seq_vops (gsi, *seq);
4126 return true;
4127 }
4128 else
4129 gcc_unreachable ();
4130 }
4131
4132 return false;
4133}
4134
040292e7
RB
4135/* Canonicalize MEM_REFs invariant address operand after propagation. */
4136
4137static bool
4138maybe_canonicalize_mem_ref_addr (tree *t)
4139{
4140 bool res = false;
4141
4142 if (TREE_CODE (*t) == ADDR_EXPR)
4143 t = &TREE_OPERAND (*t, 0);
4144
f17a223d
RB
4145 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4146 generic vector extension. The actual vector referenced is
4147 view-converted to an array type for this purpose. If the index
4148 is constant the canonical representation in the middle-end is a
4149 BIT_FIELD_REF so re-write the former to the latter here. */
4150 if (TREE_CODE (*t) == ARRAY_REF
4151 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4152 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4153 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4154 {
4155 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4156 if (VECTOR_TYPE_P (vtype))
4157 {
4158 tree low = array_ref_low_bound (*t);
4159 if (TREE_CODE (low) == INTEGER_CST)
4160 {
4161 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4162 {
4163 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4164 wi::to_widest (low));
4165 idx = wi::mul (idx, wi::to_widest
4166 (TYPE_SIZE (TREE_TYPE (*t))));
4167 widest_int ext
4168 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4169 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4170 {
4171 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4172 TREE_TYPE (*t),
4173 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4174 TYPE_SIZE (TREE_TYPE (*t)),
4175 wide_int_to_tree (sizetype, idx));
4176 res = true;
4177 }
4178 }
4179 }
4180 }
4181 }
4182
040292e7
RB
4183 while (handled_component_p (*t))
4184 t = &TREE_OPERAND (*t, 0);
4185
4186 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4187 of invariant addresses into a SSA name MEM_REF address. */
4188 if (TREE_CODE (*t) == MEM_REF
4189 || TREE_CODE (*t) == TARGET_MEM_REF)
4190 {
4191 tree addr = TREE_OPERAND (*t, 0);
4192 if (TREE_CODE (addr) == ADDR_EXPR
4193 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4194 || handled_component_p (TREE_OPERAND (addr, 0))))
4195 {
4196 tree base;
4197 HOST_WIDE_INT coffset;
4198 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4199 &coffset);
4200 if (!base)
4201 gcc_unreachable ();
4202
4203 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4204 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4205 TREE_OPERAND (*t, 1),
4206 size_int (coffset));
4207 res = true;
4208 }
4209 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4210 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4211 }
4212
4213 /* Canonicalize back MEM_REFs to plain reference trees if the object
4214 accessed is a decl that has the same access semantics as the MEM_REF. */
4215 if (TREE_CODE (*t) == MEM_REF
4216 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4217 && integer_zerop (TREE_OPERAND (*t, 1))
4218 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4219 {
4220 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4221 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4222 if (/* Same volatile qualification. */
4223 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4224 /* Same TBAA behavior with -fstrict-aliasing. */
4225 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4226 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4227 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4228 /* Same alignment. */
4229 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4230 /* We have to look out here to not drop a required conversion
4231 from the rhs to the lhs if *t appears on the lhs or vice-versa
4232 if it appears on the rhs. Thus require strict type
4233 compatibility. */
4234 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4235 {
4236 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4237 res = true;
4238 }
4239 }
4240
4241 /* Canonicalize TARGET_MEM_REF in particular with respect to
4242 the indexes becoming constant. */
4243 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4244 {
4245 tree tem = maybe_fold_tmr (*t);
4246 if (tem)
4247 {
4248 *t = tem;
4249 res = true;
4250 }
4251 }
4252
4253 return res;
4254}
4255
cbdd87d4
RG
4256/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4257 distinguishes both cases. */
4258
4259static bool
e0ee10ed 4260fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4261{
4262 bool changed = false;
355fe088 4263 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4264 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4265 unsigned i;
a8b85ce9 4266 fold_defer_overflow_warnings ();
cbdd87d4 4267
040292e7
RB
4268 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4269 after propagation.
4270 ??? This shouldn't be done in generic folding but in the
4271 propagation helpers which also know whether an address was
89a79e96
RB
4272 propagated.
4273 Also canonicalize operand order. */
040292e7
RB
4274 switch (gimple_code (stmt))
4275 {
4276 case GIMPLE_ASSIGN:
4277 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4278 {
4279 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4280 if ((REFERENCE_CLASS_P (*rhs)
4281 || TREE_CODE (*rhs) == ADDR_EXPR)
4282 && maybe_canonicalize_mem_ref_addr (rhs))
4283 changed = true;
4284 tree *lhs = gimple_assign_lhs_ptr (stmt);
4285 if (REFERENCE_CLASS_P (*lhs)
4286 && maybe_canonicalize_mem_ref_addr (lhs))
4287 changed = true;
4288 }
89a79e96
RB
4289 else
4290 {
4291 /* Canonicalize operand order. */
4292 enum tree_code code = gimple_assign_rhs_code (stmt);
4293 if (TREE_CODE_CLASS (code) == tcc_comparison
4294 || commutative_tree_code (code)
4295 || commutative_ternary_tree_code (code))
4296 {
4297 tree rhs1 = gimple_assign_rhs1 (stmt);
4298 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4299 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4300 {
4301 gimple_assign_set_rhs1 (stmt, rhs2);
4302 gimple_assign_set_rhs2 (stmt, rhs1);
4303 if (TREE_CODE_CLASS (code) == tcc_comparison)
4304 gimple_assign_set_rhs_code (stmt,
4305 swap_tree_comparison (code));
4306 changed = true;
4307 }
4308 }
4309 }
040292e7
RB
4310 break;
4311 case GIMPLE_CALL:
4312 {
4313 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4314 {
4315 tree *arg = gimple_call_arg_ptr (stmt, i);
4316 if (REFERENCE_CLASS_P (*arg)
4317 && maybe_canonicalize_mem_ref_addr (arg))
4318 changed = true;
4319 }
4320 tree *lhs = gimple_call_lhs_ptr (stmt);
4321 if (*lhs
4322 && REFERENCE_CLASS_P (*lhs)
4323 && maybe_canonicalize_mem_ref_addr (lhs))
4324 changed = true;
4325 break;
4326 }
4327 case GIMPLE_ASM:
4328 {
538dd0b7
DM
4329 gasm *asm_stmt = as_a <gasm *> (stmt);
4330 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4331 {
538dd0b7 4332 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4333 tree op = TREE_VALUE (link);
4334 if (REFERENCE_CLASS_P (op)
4335 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4336 changed = true;
4337 }
538dd0b7 4338 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4339 {
538dd0b7 4340 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4341 tree op = TREE_VALUE (link);
4342 if ((REFERENCE_CLASS_P (op)
4343 || TREE_CODE (op) == ADDR_EXPR)
4344 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4345 changed = true;
4346 }
4347 }
4348 break;
4349 case GIMPLE_DEBUG:
4350 if (gimple_debug_bind_p (stmt))
4351 {
4352 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4353 if (*val
4354 && (REFERENCE_CLASS_P (*val)
4355 || TREE_CODE (*val) == ADDR_EXPR)
4356 && maybe_canonicalize_mem_ref_addr (val))
4357 changed = true;
4358 }
4359 break;
89a79e96
RB
4360 case GIMPLE_COND:
4361 {
4362 /* Canonicalize operand order. */
4363 tree lhs = gimple_cond_lhs (stmt);
4364 tree rhs = gimple_cond_rhs (stmt);
14e72812 4365 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
4366 {
4367 gcond *gc = as_a <gcond *> (stmt);
4368 gimple_cond_set_lhs (gc, rhs);
4369 gimple_cond_set_rhs (gc, lhs);
4370 gimple_cond_set_code (gc,
4371 swap_tree_comparison (gimple_cond_code (gc)));
4372 changed = true;
4373 }
4374 }
040292e7
RB
4375 default:;
4376 }
4377
e0ee10ed
RB
4378 /* Dispatch to pattern-based folding. */
4379 if (!inplace
4380 || is_gimple_assign (stmt)
4381 || gimple_code (stmt) == GIMPLE_COND)
4382 {
4383 gimple_seq seq = NULL;
4384 code_helper rcode;
4385 tree ops[3] = {};
0ff093d8
RB
4386 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4387 valueize, valueize))
e0ee10ed
RB
4388 {
4389 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4390 changed = true;
4391 else
4392 gimple_seq_discard (seq);
4393 }
4394 }
4395
4396 stmt = gsi_stmt (*gsi);
4397
cbdd87d4
RG
4398 /* Fold the main computation performed by the statement. */
4399 switch (gimple_code (stmt))
4400 {
4401 case GIMPLE_ASSIGN:
4402 {
819ec64c
RB
4403 /* Try to canonicalize for boolean-typed X the comparisons
4404 X == 0, X == 1, X != 0, and X != 1. */
4405 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4406 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 4407 {
819ec64c
RB
4408 tree lhs = gimple_assign_lhs (stmt);
4409 tree op1 = gimple_assign_rhs1 (stmt);
4410 tree op2 = gimple_assign_rhs2 (stmt);
4411 tree type = TREE_TYPE (op1);
4412
4413 /* Check whether the comparison operands are of the same boolean
4414 type as the result type is.
4415 Check that second operand is an integer-constant with value
4416 one or zero. */
4417 if (TREE_CODE (op2) == INTEGER_CST
4418 && (integer_zerop (op2) || integer_onep (op2))
4419 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4420 {
4421 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4422 bool is_logical_not = false;
4423
4424 /* X == 0 and X != 1 is a logical-not.of X
4425 X == 1 and X != 0 is X */
4426 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4427 || (cmp_code == NE_EXPR && integer_onep (op2)))
4428 is_logical_not = true;
4429
4430 if (is_logical_not == false)
4431 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4432 /* Only for one-bit precision typed X the transformation
4433 !X -> ~X is valied. */
4434 else if (TYPE_PRECISION (type) == 1)
4435 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4436 /* Otherwise we use !X -> X ^ 1. */
4437 else
4438 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4439 build_int_cst (type, 1));
4440 changed = true;
4441 break;
4442 }
5fbcc0ed 4443 }
819ec64c
RB
4444
4445 unsigned old_num_ops = gimple_num_ops (stmt);
4446 tree lhs = gimple_assign_lhs (stmt);
4447 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
4448 if (new_rhs
4449 && !useless_type_conversion_p (TREE_TYPE (lhs),
4450 TREE_TYPE (new_rhs)))
4451 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4452 if (new_rhs
4453 && (!inplace
4454 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4455 {
4456 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4457 changed = true;
4458 }
4459 break;
4460 }
4461
cbdd87d4 4462 case GIMPLE_CALL:
ceeffab0 4463 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
4464 break;
4465
4466 case GIMPLE_ASM:
4467 /* Fold *& in asm operands. */
38384150 4468 {
538dd0b7 4469 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
4470 size_t noutputs;
4471 const char **oconstraints;
4472 const char *constraint;
4473 bool allows_mem, allows_reg;
4474
538dd0b7 4475 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
4476 oconstraints = XALLOCAVEC (const char *, noutputs);
4477
538dd0b7 4478 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 4479 {
538dd0b7 4480 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
4481 tree op = TREE_VALUE (link);
4482 oconstraints[i]
4483 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4484 if (REFERENCE_CLASS_P (op)
4485 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4486 {
4487 TREE_VALUE (link) = op;
4488 changed = true;
4489 }
4490 }
538dd0b7 4491 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 4492 {
538dd0b7 4493 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
4494 tree op = TREE_VALUE (link);
4495 constraint
4496 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4497 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4498 oconstraints, &allows_mem, &allows_reg);
4499 if (REFERENCE_CLASS_P (op)
4500 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4501 != NULL_TREE)
4502 {
4503 TREE_VALUE (link) = op;
4504 changed = true;
4505 }
4506 }
4507 }
cbdd87d4
RG
4508 break;
4509
bd422c4a
RG
4510 case GIMPLE_DEBUG:
4511 if (gimple_debug_bind_p (stmt))
4512 {
4513 tree val = gimple_debug_bind_get_value (stmt);
4514 if (val
4515 && REFERENCE_CLASS_P (val))
4516 {
4517 tree tem = maybe_fold_reference (val, false);
4518 if (tem)
4519 {
4520 gimple_debug_bind_set_value (stmt, tem);
4521 changed = true;
4522 }
4523 }
3e888a5e
RG
4524 else if (val
4525 && TREE_CODE (val) == ADDR_EXPR)
4526 {
4527 tree ref = TREE_OPERAND (val, 0);
4528 tree tem = maybe_fold_reference (ref, false);
4529 if (tem)
4530 {
4531 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4532 gimple_debug_bind_set_value (stmt, tem);
4533 changed = true;
4534 }
4535 }
bd422c4a
RG
4536 }
4537 break;
4538
cfe3d653
PK
4539 case GIMPLE_RETURN:
4540 {
4541 greturn *ret_stmt = as_a<greturn *> (stmt);
4542 tree ret = gimple_return_retval(ret_stmt);
4543
4544 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4545 {
4546 tree val = valueize (ret);
1af928db
RB
4547 if (val && val != ret
4548 && may_propagate_copy (ret, val))
cfe3d653
PK
4549 {
4550 gimple_return_set_retval (ret_stmt, val);
4551 changed = true;
4552 }
4553 }
4554 }
4555 break;
4556
cbdd87d4
RG
4557 default:;
4558 }
4559
4560 stmt = gsi_stmt (*gsi);
4561
37376165
RB
4562 /* Fold *& on the lhs. */
4563 if (gimple_has_lhs (stmt))
cbdd87d4
RG
4564 {
4565 tree lhs = gimple_get_lhs (stmt);
4566 if (lhs && REFERENCE_CLASS_P (lhs))
4567 {
4568 tree new_lhs = maybe_fold_reference (lhs, true);
4569 if (new_lhs)
4570 {
4571 gimple_set_lhs (stmt, new_lhs);
4572 changed = true;
4573 }
4574 }
4575 }
4576
a8b85ce9 4577 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
4578 return changed;
4579}
4580
e0ee10ed
RB
4581/* Valueziation callback that ends up not following SSA edges. */
4582
4583tree
4584no_follow_ssa_edges (tree)
4585{
4586 return NULL_TREE;
4587}
4588
45cc9f96
RB
4589/* Valueization callback that ends up following single-use SSA edges only. */
4590
4591tree
4592follow_single_use_edges (tree val)
4593{
4594 if (TREE_CODE (val) == SSA_NAME
4595 && !has_single_use (val))
4596 return NULL_TREE;
4597 return val;
4598}
4599
cbdd87d4
RG
4600/* Fold the statement pointed to by GSI. In some cases, this function may
4601 replace the whole statement with a new one. Returns true iff folding
4602 makes any changes.
4603 The statement pointed to by GSI should be in valid gimple form but may
4604 be in unfolded state as resulting from for example constant propagation
4605 which can produce *&x = 0. */
4606
4607bool
4608fold_stmt (gimple_stmt_iterator *gsi)
4609{
e0ee10ed
RB
4610 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4611}
4612
4613bool
4614fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4615{
4616 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
4617}
4618
59401b92 4619/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
4620 *&x created by constant propagation are handled. The statement cannot
4621 be replaced with a new one. Return true if the statement was
4622 changed, false otherwise.
59401b92 4623 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
4624 be in unfolded state as resulting from for example constant propagation
4625 which can produce *&x = 0. */
4626
4627bool
59401b92 4628fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 4629{
355fe088 4630 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 4631 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 4632 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
4633 return changed;
4634}
4635
e89065a1
SL
4636/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4637 if EXPR is null or we don't know how.
4638 If non-null, the result always has boolean type. */
4639
4640static tree
4641canonicalize_bool (tree expr, bool invert)
4642{
4643 if (!expr)
4644 return NULL_TREE;
4645 else if (invert)
4646 {
4647 if (integer_nonzerop (expr))
4648 return boolean_false_node;
4649 else if (integer_zerop (expr))
4650 return boolean_true_node;
4651 else if (TREE_CODE (expr) == SSA_NAME)
4652 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4653 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4654 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4655 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4656 boolean_type_node,
4657 TREE_OPERAND (expr, 0),
4658 TREE_OPERAND (expr, 1));
4659 else
4660 return NULL_TREE;
4661 }
4662 else
4663 {
4664 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4665 return expr;
4666 if (integer_nonzerop (expr))
4667 return boolean_true_node;
4668 else if (integer_zerop (expr))
4669 return boolean_false_node;
4670 else if (TREE_CODE (expr) == SSA_NAME)
4671 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4672 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4673 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4674 return fold_build2 (TREE_CODE (expr),
4675 boolean_type_node,
4676 TREE_OPERAND (expr, 0),
4677 TREE_OPERAND (expr, 1));
4678 else
4679 return NULL_TREE;
4680 }
4681}
4682
4683/* Check to see if a boolean expression EXPR is logically equivalent to the
4684 comparison (OP1 CODE OP2). Check for various identities involving
4685 SSA_NAMEs. */
4686
4687static bool
4688same_bool_comparison_p (const_tree expr, enum tree_code code,
4689 const_tree op1, const_tree op2)
4690{
355fe088 4691 gimple *s;
e89065a1
SL
4692
4693 /* The obvious case. */
4694 if (TREE_CODE (expr) == code
4695 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4696 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4697 return true;
4698
4699 /* Check for comparing (name, name != 0) and the case where expr
4700 is an SSA_NAME with a definition matching the comparison. */
4701 if (TREE_CODE (expr) == SSA_NAME
4702 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4703 {
4704 if (operand_equal_p (expr, op1, 0))
4705 return ((code == NE_EXPR && integer_zerop (op2))
4706 || (code == EQ_EXPR && integer_nonzerop (op2)));
4707 s = SSA_NAME_DEF_STMT (expr);
4708 if (is_gimple_assign (s)
4709 && gimple_assign_rhs_code (s) == code
4710 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4711 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4712 return true;
4713 }
4714
4715 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4716 of name is a comparison, recurse. */
4717 if (TREE_CODE (op1) == SSA_NAME
4718 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4719 {
4720 s = SSA_NAME_DEF_STMT (op1);
4721 if (is_gimple_assign (s)
4722 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4723 {
4724 enum tree_code c = gimple_assign_rhs_code (s);
4725 if ((c == NE_EXPR && integer_zerop (op2))
4726 || (c == EQ_EXPR && integer_nonzerop (op2)))
4727 return same_bool_comparison_p (expr, c,
4728 gimple_assign_rhs1 (s),
4729 gimple_assign_rhs2 (s));
4730 if ((c == EQ_EXPR && integer_zerop (op2))
4731 || (c == NE_EXPR && integer_nonzerop (op2)))
4732 return same_bool_comparison_p (expr,
4733 invert_tree_comparison (c, false),
4734 gimple_assign_rhs1 (s),
4735 gimple_assign_rhs2 (s));
4736 }
4737 }
4738 return false;
4739}
4740
4741/* Check to see if two boolean expressions OP1 and OP2 are logically
4742 equivalent. */
4743
4744static bool
4745same_bool_result_p (const_tree op1, const_tree op2)
4746{
4747 /* Simple cases first. */
4748 if (operand_equal_p (op1, op2, 0))
4749 return true;
4750
4751 /* Check the cases where at least one of the operands is a comparison.
4752 These are a bit smarter than operand_equal_p in that they apply some
4753 identifies on SSA_NAMEs. */
98209db3 4754 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
4755 && same_bool_comparison_p (op1, TREE_CODE (op2),
4756 TREE_OPERAND (op2, 0),
4757 TREE_OPERAND (op2, 1)))
4758 return true;
98209db3 4759 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
4760 && same_bool_comparison_p (op2, TREE_CODE (op1),
4761 TREE_OPERAND (op1, 0),
4762 TREE_OPERAND (op1, 1)))
4763 return true;
4764
4765 /* Default case. */
4766 return false;
4767}
4768
4769/* Forward declarations for some mutually recursive functions. */
4770
4771static tree
4772and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4773 enum tree_code code2, tree op2a, tree op2b);
4774static tree
4775and_var_with_comparison (tree var, bool invert,
4776 enum tree_code code2, tree op2a, tree op2b);
4777static tree
355fe088 4778and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4779 enum tree_code code2, tree op2a, tree op2b);
4780static tree
4781or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4782 enum tree_code code2, tree op2a, tree op2b);
4783static tree
4784or_var_with_comparison (tree var, bool invert,
4785 enum tree_code code2, tree op2a, tree op2b);
4786static tree
355fe088 4787or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4788 enum tree_code code2, tree op2a, tree op2b);
4789
4790/* Helper function for and_comparisons_1: try to simplify the AND of the
4791 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4792 If INVERT is true, invert the value of the VAR before doing the AND.
4793 Return NULL_EXPR if we can't simplify this to a single expression. */
4794
4795static tree
4796and_var_with_comparison (tree var, bool invert,
4797 enum tree_code code2, tree op2a, tree op2b)
4798{
4799 tree t;
355fe088 4800 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4801
4802 /* We can only deal with variables whose definitions are assignments. */
4803 if (!is_gimple_assign (stmt))
4804 return NULL_TREE;
4805
4806 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4807 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4808 Then we only have to consider the simpler non-inverted cases. */
4809 if (invert)
4810 t = or_var_with_comparison_1 (stmt,
4811 invert_tree_comparison (code2, false),
4812 op2a, op2b);
4813 else
4814 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4815 return canonicalize_bool (t, invert);
4816}
4817
4818/* Try to simplify the AND of the ssa variable defined by the assignment
4819 STMT with the comparison specified by (OP2A CODE2 OP2B).
4820 Return NULL_EXPR if we can't simplify this to a single expression. */
4821
4822static tree
355fe088 4823and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4824 enum tree_code code2, tree op2a, tree op2b)
4825{
4826 tree var = gimple_assign_lhs (stmt);
4827 tree true_test_var = NULL_TREE;
4828 tree false_test_var = NULL_TREE;
4829 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4830
4831 /* Check for identities like (var AND (var == 0)) => false. */
4832 if (TREE_CODE (op2a) == SSA_NAME
4833 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4834 {
4835 if ((code2 == NE_EXPR && integer_zerop (op2b))
4836 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4837 {
4838 true_test_var = op2a;
4839 if (var == true_test_var)
4840 return var;
4841 }
4842 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4843 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4844 {
4845 false_test_var = op2a;
4846 if (var == false_test_var)
4847 return boolean_false_node;
4848 }
4849 }
4850
4851 /* If the definition is a comparison, recurse on it. */
4852 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4853 {
4854 tree t = and_comparisons_1 (innercode,
4855 gimple_assign_rhs1 (stmt),
4856 gimple_assign_rhs2 (stmt),
4857 code2,
4858 op2a,
4859 op2b);
4860 if (t)
4861 return t;
4862 }
4863
4864 /* If the definition is an AND or OR expression, we may be able to
4865 simplify by reassociating. */
eb9820c0
KT
4866 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4867 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4868 {
4869 tree inner1 = gimple_assign_rhs1 (stmt);
4870 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4871 gimple *s;
e89065a1
SL
4872 tree t;
4873 tree partial = NULL_TREE;
eb9820c0 4874 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4875
4876 /* Check for boolean identities that don't require recursive examination
4877 of inner1/inner2:
4878 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4879 inner1 AND (inner1 OR inner2) => inner1
4880 !inner1 AND (inner1 AND inner2) => false
4881 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4882 Likewise for similar cases involving inner2. */
4883 if (inner1 == true_test_var)
4884 return (is_and ? var : inner1);
4885 else if (inner2 == true_test_var)
4886 return (is_and ? var : inner2);
4887 else if (inner1 == false_test_var)
4888 return (is_and
4889 ? boolean_false_node
4890 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4891 else if (inner2 == false_test_var)
4892 return (is_and
4893 ? boolean_false_node
4894 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4895
4896 /* Next, redistribute/reassociate the AND across the inner tests.
4897 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4898 if (TREE_CODE (inner1) == SSA_NAME
4899 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4900 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4901 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4902 gimple_assign_rhs1 (s),
4903 gimple_assign_rhs2 (s),
4904 code2, op2a, op2b)))
4905 {
4906 /* Handle the AND case, where we are reassociating:
4907 (inner1 AND inner2) AND (op2a code2 op2b)
4908 => (t AND inner2)
4909 If the partial result t is a constant, we win. Otherwise
4910 continue on to try reassociating with the other inner test. */
4911 if (is_and)
4912 {
4913 if (integer_onep (t))
4914 return inner2;
4915 else if (integer_zerop (t))
4916 return boolean_false_node;
4917 }
4918
4919 /* Handle the OR case, where we are redistributing:
4920 (inner1 OR inner2) AND (op2a code2 op2b)
4921 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4922 else if (integer_onep (t))
4923 return boolean_true_node;
4924
4925 /* Save partial result for later. */
4926 partial = t;
e89065a1
SL
4927 }
4928
4929 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4930 if (TREE_CODE (inner2) == SSA_NAME
4931 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4932 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4933 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4934 gimple_assign_rhs1 (s),
4935 gimple_assign_rhs2 (s),
4936 code2, op2a, op2b)))
4937 {
4938 /* Handle the AND case, where we are reassociating:
4939 (inner1 AND inner2) AND (op2a code2 op2b)
4940 => (inner1 AND t) */
4941 if (is_and)
4942 {
4943 if (integer_onep (t))
4944 return inner1;
4945 else if (integer_zerop (t))
4946 return boolean_false_node;
8236c8eb
JJ
4947 /* If both are the same, we can apply the identity
4948 (x AND x) == x. */
4949 else if (partial && same_bool_result_p (t, partial))
4950 return t;
e89065a1
SL
4951 }
4952
4953 /* Handle the OR case. where we are redistributing:
4954 (inner1 OR inner2) AND (op2a code2 op2b)
4955 => (t OR (inner1 AND (op2a code2 op2b)))
4956 => (t OR partial) */
4957 else
4958 {
4959 if (integer_onep (t))
4960 return boolean_true_node;
4961 else if (partial)
4962 {
4963 /* We already got a simplification for the other
4964 operand to the redistributed OR expression. The
4965 interesting case is when at least one is false.
4966 Or, if both are the same, we can apply the identity
4967 (x OR x) == x. */
4968 if (integer_zerop (partial))
4969 return t;
4970 else if (integer_zerop (t))
4971 return partial;
4972 else if (same_bool_result_p (t, partial))
4973 return t;
4974 }
4975 }
4976 }
4977 }
4978 return NULL_TREE;
4979}
4980
4981/* Try to simplify the AND of two comparisons defined by
4982 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4983 If this can be done without constructing an intermediate value,
4984 return the resulting tree; otherwise NULL_TREE is returned.
4985 This function is deliberately asymmetric as it recurses on SSA_DEFs
4986 in the first comparison but not the second. */
4987
4988static tree
4989and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4990 enum tree_code code2, tree op2a, tree op2b)
4991{
ae22ac3c 4992 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4993
e89065a1
SL
4994 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4995 if (operand_equal_p (op1a, op2a, 0)
4996 && operand_equal_p (op1b, op2b, 0))
4997 {
eb9820c0 4998 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4999 tree t = combine_comparisons (UNKNOWN_LOCATION,
5000 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5001 truth_type, op1a, op1b);
e89065a1
SL
5002 if (t)
5003 return t;
5004 }
5005
5006 /* Likewise the swapped case of the above. */
5007 if (operand_equal_p (op1a, op2b, 0)
5008 && operand_equal_p (op1b, op2a, 0))
5009 {
eb9820c0 5010 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5011 tree t = combine_comparisons (UNKNOWN_LOCATION,
5012 TRUTH_ANDIF_EXPR, code1,
5013 swap_tree_comparison (code2),
31ed6226 5014 truth_type, op1a, op1b);
e89065a1
SL
5015 if (t)
5016 return t;
5017 }
5018
5019 /* If both comparisons are of the same value against constants, we might
5020 be able to merge them. */
5021 if (operand_equal_p (op1a, op2a, 0)
5022 && TREE_CODE (op1b) == INTEGER_CST
5023 && TREE_CODE (op2b) == INTEGER_CST)
5024 {
5025 int cmp = tree_int_cst_compare (op1b, op2b);
5026
5027 /* If we have (op1a == op1b), we should either be able to
5028 return that or FALSE, depending on whether the constant op1b
5029 also satisfies the other comparison against op2b. */
5030 if (code1 == EQ_EXPR)
5031 {
5032 bool done = true;
5033 bool val;
5034 switch (code2)
5035 {
5036 case EQ_EXPR: val = (cmp == 0); break;
5037 case NE_EXPR: val = (cmp != 0); break;
5038 case LT_EXPR: val = (cmp < 0); break;
5039 case GT_EXPR: val = (cmp > 0); break;
5040 case LE_EXPR: val = (cmp <= 0); break;
5041 case GE_EXPR: val = (cmp >= 0); break;
5042 default: done = false;
5043 }
5044 if (done)
5045 {
5046 if (val)
5047 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5048 else
5049 return boolean_false_node;
5050 }
5051 }
5052 /* Likewise if the second comparison is an == comparison. */
5053 else if (code2 == EQ_EXPR)
5054 {
5055 bool done = true;
5056 bool val;
5057 switch (code1)
5058 {
5059 case EQ_EXPR: val = (cmp == 0); break;
5060 case NE_EXPR: val = (cmp != 0); break;
5061 case LT_EXPR: val = (cmp > 0); break;
5062 case GT_EXPR: val = (cmp < 0); break;
5063 case LE_EXPR: val = (cmp >= 0); break;
5064 case GE_EXPR: val = (cmp <= 0); break;
5065 default: done = false;
5066 }
5067 if (done)
5068 {
5069 if (val)
5070 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5071 else
5072 return boolean_false_node;
5073 }
5074 }
5075
5076 /* Same business with inequality tests. */
5077 else if (code1 == NE_EXPR)
5078 {
5079 bool val;
5080 switch (code2)
5081 {
5082 case EQ_EXPR: val = (cmp != 0); break;
5083 case NE_EXPR: val = (cmp == 0); break;
5084 case LT_EXPR: val = (cmp >= 0); break;
5085 case GT_EXPR: val = (cmp <= 0); break;
5086 case LE_EXPR: val = (cmp > 0); break;
5087 case GE_EXPR: val = (cmp < 0); break;
5088 default:
5089 val = false;
5090 }
5091 if (val)
5092 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5093 }
5094 else if (code2 == NE_EXPR)
5095 {
5096 bool val;
5097 switch (code1)
5098 {
5099 case EQ_EXPR: val = (cmp == 0); break;
5100 case NE_EXPR: val = (cmp != 0); break;
5101 case LT_EXPR: val = (cmp <= 0); break;
5102 case GT_EXPR: val = (cmp >= 0); break;
5103 case LE_EXPR: val = (cmp < 0); break;
5104 case GE_EXPR: val = (cmp > 0); break;
5105 default:
5106 val = false;
5107 }
5108 if (val)
5109 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5110 }
5111
5112 /* Chose the more restrictive of two < or <= comparisons. */
5113 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5114 && (code2 == LT_EXPR || code2 == LE_EXPR))
5115 {
5116 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5117 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5118 else
5119 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5120 }
5121
5122 /* Likewise chose the more restrictive of two > or >= comparisons. */
5123 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5124 && (code2 == GT_EXPR || code2 == GE_EXPR))
5125 {
5126 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5127 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5128 else
5129 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5130 }
5131
5132 /* Check for singleton ranges. */
5133 else if (cmp == 0
5134 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5135 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5136 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5137
5138 /* Check for disjoint ranges. */
5139 else if (cmp <= 0
5140 && (code1 == LT_EXPR || code1 == LE_EXPR)
5141 && (code2 == GT_EXPR || code2 == GE_EXPR))
5142 return boolean_false_node;
5143 else if (cmp >= 0
5144 && (code1 == GT_EXPR || code1 == GE_EXPR)
5145 && (code2 == LT_EXPR || code2 == LE_EXPR))
5146 return boolean_false_node;
5147 }
5148
5149 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5150 NAME's definition is a truth value. See if there are any simplifications
5151 that can be done against the NAME's definition. */
5152 if (TREE_CODE (op1a) == SSA_NAME
5153 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5154 && (integer_zerop (op1b) || integer_onep (op1b)))
5155 {
5156 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5157 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5158 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5159 switch (gimple_code (stmt))
5160 {
5161 case GIMPLE_ASSIGN:
5162 /* Try to simplify by copy-propagating the definition. */
5163 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5164
5165 case GIMPLE_PHI:
5166 /* If every argument to the PHI produces the same result when
5167 ANDed with the second comparison, we win.
5168 Do not do this unless the type is bool since we need a bool
5169 result here anyway. */
5170 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5171 {
5172 tree result = NULL_TREE;
5173 unsigned i;
5174 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5175 {
5176 tree arg = gimple_phi_arg_def (stmt, i);
5177
5178 /* If this PHI has itself as an argument, ignore it.
5179 If all the other args produce the same result,
5180 we're still OK. */
5181 if (arg == gimple_phi_result (stmt))
5182 continue;
5183 else if (TREE_CODE (arg) == INTEGER_CST)
5184 {
5185 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5186 {
5187 if (!result)
5188 result = boolean_false_node;
5189 else if (!integer_zerop (result))
5190 return NULL_TREE;
5191 }
5192 else if (!result)
5193 result = fold_build2 (code2, boolean_type_node,
5194 op2a, op2b);
5195 else if (!same_bool_comparison_p (result,
5196 code2, op2a, op2b))
5197 return NULL_TREE;
5198 }
0e8b84ec
JJ
5199 else if (TREE_CODE (arg) == SSA_NAME
5200 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5201 {
6c66f733 5202 tree temp;
355fe088 5203 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5204 /* In simple cases we can look through PHI nodes,
5205 but we have to be careful with loops.
5206 See PR49073. */
5207 if (! dom_info_available_p (CDI_DOMINATORS)
5208 || gimple_bb (def_stmt) == gimple_bb (stmt)
5209 || dominated_by_p (CDI_DOMINATORS,
5210 gimple_bb (def_stmt),
5211 gimple_bb (stmt)))
5212 return NULL_TREE;
5213 temp = and_var_with_comparison (arg, invert, code2,
5214 op2a, op2b);
e89065a1
SL
5215 if (!temp)
5216 return NULL_TREE;
5217 else if (!result)
5218 result = temp;
5219 else if (!same_bool_result_p (result, temp))
5220 return NULL_TREE;
5221 }
5222 else
5223 return NULL_TREE;
5224 }
5225 return result;
5226 }
5227
5228 default:
5229 break;
5230 }
5231 }
5232 return NULL_TREE;
5233}
5234
5235/* Try to simplify the AND of two comparisons, specified by
5236 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5237 If this can be simplified to a single expression (without requiring
5238 introducing more SSA variables to hold intermediate values),
5239 return the resulting tree. Otherwise return NULL_TREE.
5240 If the result expression is non-null, it has boolean type. */
5241
5242tree
5243maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5244 enum tree_code code2, tree op2a, tree op2b)
5245{
5246 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5247 if (t)
5248 return t;
5249 else
5250 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5251}
5252
5253/* Helper function for or_comparisons_1: try to simplify the OR of the
5254 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5255 If INVERT is true, invert the value of VAR before doing the OR.
5256 Return NULL_EXPR if we can't simplify this to a single expression. */
5257
5258static tree
5259or_var_with_comparison (tree var, bool invert,
5260 enum tree_code code2, tree op2a, tree op2b)
5261{
5262 tree t;
355fe088 5263 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5264
5265 /* We can only deal with variables whose definitions are assignments. */
5266 if (!is_gimple_assign (stmt))
5267 return NULL_TREE;
5268
5269 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5270 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5271 Then we only have to consider the simpler non-inverted cases. */
5272 if (invert)
5273 t = and_var_with_comparison_1 (stmt,
5274 invert_tree_comparison (code2, false),
5275 op2a, op2b);
5276 else
5277 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5278 return canonicalize_bool (t, invert);
5279}
5280
5281/* Try to simplify the OR of the ssa variable defined by the assignment
5282 STMT with the comparison specified by (OP2A CODE2 OP2B).
5283 Return NULL_EXPR if we can't simplify this to a single expression. */
5284
5285static tree
355fe088 5286or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
5287 enum tree_code code2, tree op2a, tree op2b)
5288{
5289 tree var = gimple_assign_lhs (stmt);
5290 tree true_test_var = NULL_TREE;
5291 tree false_test_var = NULL_TREE;
5292 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5293
5294 /* Check for identities like (var OR (var != 0)) => true . */
5295 if (TREE_CODE (op2a) == SSA_NAME
5296 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5297 {
5298 if ((code2 == NE_EXPR && integer_zerop (op2b))
5299 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5300 {
5301 true_test_var = op2a;
5302 if (var == true_test_var)
5303 return var;
5304 }
5305 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5306 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5307 {
5308 false_test_var = op2a;
5309 if (var == false_test_var)
5310 return boolean_true_node;
5311 }
5312 }
5313
5314 /* If the definition is a comparison, recurse on it. */
5315 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5316 {
5317 tree t = or_comparisons_1 (innercode,
5318 gimple_assign_rhs1 (stmt),
5319 gimple_assign_rhs2 (stmt),
5320 code2,
5321 op2a,
5322 op2b);
5323 if (t)
5324 return t;
5325 }
5326
5327 /* If the definition is an AND or OR expression, we may be able to
5328 simplify by reassociating. */
eb9820c0
KT
5329 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5330 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5331 {
5332 tree inner1 = gimple_assign_rhs1 (stmt);
5333 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5334 gimple *s;
e89065a1
SL
5335 tree t;
5336 tree partial = NULL_TREE;
eb9820c0 5337 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5338
5339 /* Check for boolean identities that don't require recursive examination
5340 of inner1/inner2:
5341 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5342 inner1 OR (inner1 AND inner2) => inner1
5343 !inner1 OR (inner1 OR inner2) => true
5344 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5345 */
5346 if (inner1 == true_test_var)
5347 return (is_or ? var : inner1);
5348 else if (inner2 == true_test_var)
5349 return (is_or ? var : inner2);
5350 else if (inner1 == false_test_var)
5351 return (is_or
5352 ? boolean_true_node
5353 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5354 else if (inner2 == false_test_var)
5355 return (is_or
5356 ? boolean_true_node
5357 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5358
5359 /* Next, redistribute/reassociate the OR across the inner tests.
5360 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5361 if (TREE_CODE (inner1) == SSA_NAME
5362 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5363 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5364 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5365 gimple_assign_rhs1 (s),
5366 gimple_assign_rhs2 (s),
5367 code2, op2a, op2b)))
5368 {
5369 /* Handle the OR case, where we are reassociating:
5370 (inner1 OR inner2) OR (op2a code2 op2b)
5371 => (t OR inner2)
5372 If the partial result t is a constant, we win. Otherwise
5373 continue on to try reassociating with the other inner test. */
8236c8eb 5374 if (is_or)
e89065a1
SL
5375 {
5376 if (integer_onep (t))
5377 return boolean_true_node;
5378 else if (integer_zerop (t))
5379 return inner2;
5380 }
5381
5382 /* Handle the AND case, where we are redistributing:
5383 (inner1 AND inner2) OR (op2a code2 op2b)
5384 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
5385 else if (integer_zerop (t))
5386 return boolean_false_node;
5387
5388 /* Save partial result for later. */
5389 partial = t;
e89065a1
SL
5390 }
5391
5392 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5393 if (TREE_CODE (inner2) == SSA_NAME
5394 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5395 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5396 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5397 gimple_assign_rhs1 (s),
5398 gimple_assign_rhs2 (s),
5399 code2, op2a, op2b)))
5400 {
5401 /* Handle the OR case, where we are reassociating:
5402 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
5403 => (inner1 OR t)
5404 => (t OR partial) */
5405 if (is_or)
e89065a1
SL
5406 {
5407 if (integer_zerop (t))
5408 return inner1;
5409 else if (integer_onep (t))
5410 return boolean_true_node;
8236c8eb
JJ
5411 /* If both are the same, we can apply the identity
5412 (x OR x) == x. */
5413 else if (partial && same_bool_result_p (t, partial))
5414 return t;
e89065a1
SL
5415 }
5416
5417 /* Handle the AND case, where we are redistributing:
5418 (inner1 AND inner2) OR (op2a code2 op2b)
5419 => (t AND (inner1 OR (op2a code2 op2b)))
5420 => (t AND partial) */
5421 else
5422 {
5423 if (integer_zerop (t))
5424 return boolean_false_node;
5425 else if (partial)
5426 {
5427 /* We already got a simplification for the other
5428 operand to the redistributed AND expression. The
5429 interesting case is when at least one is true.
5430 Or, if both are the same, we can apply the identity
8236c8eb 5431 (x AND x) == x. */
e89065a1
SL
5432 if (integer_onep (partial))
5433 return t;
5434 else if (integer_onep (t))
5435 return partial;
5436 else if (same_bool_result_p (t, partial))
8236c8eb 5437 return t;
e89065a1
SL
5438 }
5439 }
5440 }
5441 }
5442 return NULL_TREE;
5443}
5444
5445/* Try to simplify the OR of two comparisons defined by
5446 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5447 If this can be done without constructing an intermediate value,
5448 return the resulting tree; otherwise NULL_TREE is returned.
5449 This function is deliberately asymmetric as it recurses on SSA_DEFs
5450 in the first comparison but not the second. */
5451
5452static tree
5453or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5454 enum tree_code code2, tree op2a, tree op2b)
5455{
ae22ac3c 5456 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5457
e89065a1
SL
5458 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5459 if (operand_equal_p (op1a, op2a, 0)
5460 && operand_equal_p (op1b, op2b, 0))
5461 {
eb9820c0 5462 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5463 tree t = combine_comparisons (UNKNOWN_LOCATION,
5464 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 5465 truth_type, op1a, op1b);
e89065a1
SL
5466 if (t)
5467 return t;
5468 }
5469
5470 /* Likewise the swapped case of the above. */
5471 if (operand_equal_p (op1a, op2b, 0)
5472 && operand_equal_p (op1b, op2a, 0))
5473 {
eb9820c0 5474 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5475 tree t = combine_comparisons (UNKNOWN_LOCATION,
5476 TRUTH_ORIF_EXPR, code1,
5477 swap_tree_comparison (code2),
31ed6226 5478 truth_type, op1a, op1b);
e89065a1
SL
5479 if (t)
5480 return t;
5481 }
5482
5483 /* If both comparisons are of the same value against constants, we might
5484 be able to merge them. */
5485 if (operand_equal_p (op1a, op2a, 0)
5486 && TREE_CODE (op1b) == INTEGER_CST
5487 && TREE_CODE (op2b) == INTEGER_CST)
5488 {
5489 int cmp = tree_int_cst_compare (op1b, op2b);
5490
5491 /* If we have (op1a != op1b), we should either be able to
5492 return that or TRUE, depending on whether the constant op1b
5493 also satisfies the other comparison against op2b. */
5494 if (code1 == NE_EXPR)
5495 {
5496 bool done = true;
5497 bool val;
5498 switch (code2)
5499 {
5500 case EQ_EXPR: val = (cmp == 0); break;
5501 case NE_EXPR: val = (cmp != 0); break;
5502 case LT_EXPR: val = (cmp < 0); break;
5503 case GT_EXPR: val = (cmp > 0); break;
5504 case LE_EXPR: val = (cmp <= 0); break;
5505 case GE_EXPR: val = (cmp >= 0); break;
5506 default: done = false;
5507 }
5508 if (done)
5509 {
5510 if (val)
5511 return boolean_true_node;
5512 else
5513 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5514 }
5515 }
5516 /* Likewise if the second comparison is a != comparison. */
5517 else if (code2 == NE_EXPR)
5518 {
5519 bool done = true;
5520 bool val;
5521 switch (code1)
5522 {
5523 case EQ_EXPR: val = (cmp == 0); break;
5524 case NE_EXPR: val = (cmp != 0); break;
5525 case LT_EXPR: val = (cmp > 0); break;
5526 case GT_EXPR: val = (cmp < 0); break;
5527 case LE_EXPR: val = (cmp >= 0); break;
5528 case GE_EXPR: val = (cmp <= 0); break;
5529 default: done = false;
5530 }
5531 if (done)
5532 {
5533 if (val)
5534 return boolean_true_node;
5535 else
5536 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5537 }
5538 }
5539
5540 /* See if an equality test is redundant with the other comparison. */
5541 else if (code1 == EQ_EXPR)
5542 {
5543 bool val;
5544 switch (code2)
5545 {
5546 case EQ_EXPR: val = (cmp == 0); break;
5547 case NE_EXPR: val = (cmp != 0); break;
5548 case LT_EXPR: val = (cmp < 0); break;
5549 case GT_EXPR: val = (cmp > 0); break;
5550 case LE_EXPR: val = (cmp <= 0); break;
5551 case GE_EXPR: val = (cmp >= 0); break;
5552 default:
5553 val = false;
5554 }
5555 if (val)
5556 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5557 }
5558 else if (code2 == EQ_EXPR)
5559 {
5560 bool val;
5561 switch (code1)
5562 {
5563 case EQ_EXPR: val = (cmp == 0); break;
5564 case NE_EXPR: val = (cmp != 0); break;
5565 case LT_EXPR: val = (cmp > 0); break;
5566 case GT_EXPR: val = (cmp < 0); break;
5567 case LE_EXPR: val = (cmp >= 0); break;
5568 case GE_EXPR: val = (cmp <= 0); break;
5569 default:
5570 val = false;
5571 }
5572 if (val)
5573 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5574 }
5575
5576 /* Chose the less restrictive of two < or <= comparisons. */
5577 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5578 && (code2 == LT_EXPR || code2 == LE_EXPR))
5579 {
5580 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5581 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5582 else
5583 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5584 }
5585
5586 /* Likewise chose the less restrictive of two > or >= comparisons. */
5587 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5588 && (code2 == GT_EXPR || code2 == GE_EXPR))
5589 {
5590 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5591 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5592 else
5593 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5594 }
5595
5596 /* Check for singleton ranges. */
5597 else if (cmp == 0
5598 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5599 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5600 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5601
5602 /* Check for less/greater pairs that don't restrict the range at all. */
5603 else if (cmp >= 0
5604 && (code1 == LT_EXPR || code1 == LE_EXPR)
5605 && (code2 == GT_EXPR || code2 == GE_EXPR))
5606 return boolean_true_node;
5607 else if (cmp <= 0
5608 && (code1 == GT_EXPR || code1 == GE_EXPR)
5609 && (code2 == LT_EXPR || code2 == LE_EXPR))
5610 return boolean_true_node;
5611 }
5612
5613 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5614 NAME's definition is a truth value. See if there are any simplifications
5615 that can be done against the NAME's definition. */
5616 if (TREE_CODE (op1a) == SSA_NAME
5617 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5618 && (integer_zerop (op1b) || integer_onep (op1b)))
5619 {
5620 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5621 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5622 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5623 switch (gimple_code (stmt))
5624 {
5625 case GIMPLE_ASSIGN:
5626 /* Try to simplify by copy-propagating the definition. */
5627 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5628
5629 case GIMPLE_PHI:
5630 /* If every argument to the PHI produces the same result when
5631 ORed with the second comparison, we win.
5632 Do not do this unless the type is bool since we need a bool
5633 result here anyway. */
5634 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5635 {
5636 tree result = NULL_TREE;
5637 unsigned i;
5638 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5639 {
5640 tree arg = gimple_phi_arg_def (stmt, i);
5641
5642 /* If this PHI has itself as an argument, ignore it.
5643 If all the other args produce the same result,
5644 we're still OK. */
5645 if (arg == gimple_phi_result (stmt))
5646 continue;
5647 else if (TREE_CODE (arg) == INTEGER_CST)
5648 {
5649 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5650 {
5651 if (!result)
5652 result = boolean_true_node;
5653 else if (!integer_onep (result))
5654 return NULL_TREE;
5655 }
5656 else if (!result)
5657 result = fold_build2 (code2, boolean_type_node,
5658 op2a, op2b);
5659 else if (!same_bool_comparison_p (result,
5660 code2, op2a, op2b))
5661 return NULL_TREE;
5662 }
0e8b84ec
JJ
5663 else if (TREE_CODE (arg) == SSA_NAME
5664 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5665 {
6c66f733 5666 tree temp;
355fe088 5667 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5668 /* In simple cases we can look through PHI nodes,
5669 but we have to be careful with loops.
5670 See PR49073. */
5671 if (! dom_info_available_p (CDI_DOMINATORS)
5672 || gimple_bb (def_stmt) == gimple_bb (stmt)
5673 || dominated_by_p (CDI_DOMINATORS,
5674 gimple_bb (def_stmt),
5675 gimple_bb (stmt)))
5676 return NULL_TREE;
5677 temp = or_var_with_comparison (arg, invert, code2,
5678 op2a, op2b);
e89065a1
SL
5679 if (!temp)
5680 return NULL_TREE;
5681 else if (!result)
5682 result = temp;
5683 else if (!same_bool_result_p (result, temp))
5684 return NULL_TREE;
5685 }
5686 else
5687 return NULL_TREE;
5688 }
5689 return result;
5690 }
5691
5692 default:
5693 break;
5694 }
5695 }
5696 return NULL_TREE;
5697}
5698
5699/* Try to simplify the OR of two comparisons, specified by
5700 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5701 If this can be simplified to a single expression (without requiring
5702 introducing more SSA variables to hold intermediate values),
5703 return the resulting tree. Otherwise return NULL_TREE.
5704 If the result expression is non-null, it has boolean type. */
5705
5706tree
5707maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5708 enum tree_code code2, tree op2a, tree op2b)
5709{
5710 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5711 if (t)
5712 return t;
5713 else
5714 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5715}
cfef45c8
RG
5716
5717
5718/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5719
5720 Either NULL_TREE, a simplified but non-constant or a constant
5721 is returned.
5722
5723 ??? This should go into a gimple-fold-inline.h file to be eventually
5724 privatized with the single valueize function used in the various TUs
5725 to avoid the indirect function call overhead. */
5726
5727tree
355fe088 5728gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 5729 tree (*gvalueize) (tree))
cfef45c8 5730{
45cc9f96
RB
5731 code_helper rcode;
5732 tree ops[3] = {};
5733 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5734 edges if there are intermediate VARYING defs. For this reason
5735 do not follow SSA edges here even though SCCVN can technically
5736 just deal fine with that. */
34050b6b 5737 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 5738 {
34050b6b 5739 tree res = NULL_TREE;
c0f62740 5740 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
5741 res = ops[0];
5742 else if (mprts_hook)
5743 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5744 if (res)
45cc9f96 5745 {
34050b6b
RB
5746 if (dump_file && dump_flags & TDF_DETAILS)
5747 {
5748 fprintf (dump_file, "Match-and-simplified ");
5749 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5750 fprintf (dump_file, " to ");
5751 print_generic_expr (dump_file, res, 0);
5752 fprintf (dump_file, "\n");
5753 }
5754 return res;
45cc9f96 5755 }
45cc9f96
RB
5756 }
5757
cfef45c8
RG
5758 location_t loc = gimple_location (stmt);
5759 switch (gimple_code (stmt))
5760 {
5761 case GIMPLE_ASSIGN:
5762 {
5763 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5764
5765 switch (get_gimple_rhs_class (subcode))
5766 {
5767 case GIMPLE_SINGLE_RHS:
5768 {
5769 tree rhs = gimple_assign_rhs1 (stmt);
5770 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5771
5772 if (TREE_CODE (rhs) == SSA_NAME)
5773 {
5774 /* If the RHS is an SSA_NAME, return its known constant value,
5775 if any. */
5776 return (*valueize) (rhs);
5777 }
5778 /* Handle propagating invariant addresses into address
5779 operations. */
5780 else if (TREE_CODE (rhs) == ADDR_EXPR
5781 && !is_gimple_min_invariant (rhs))
5782 {
d25c4172 5783 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5784 tree base;
5785 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5786 &offset,
5787 valueize);
5788 if (base
5789 && (CONSTANT_CLASS_P (base)
5790 || decl_address_invariant_p (base)))
5791 return build_invariant_address (TREE_TYPE (rhs),
5792 base, offset);
5793 }
5794 else if (TREE_CODE (rhs) == CONSTRUCTOR
5795 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5796 && (CONSTRUCTOR_NELTS (rhs)
5797 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5798 {
5799 unsigned i;
d2a12ae7 5800 tree val, *vec;
cfef45c8 5801
d2a12ae7
RG
5802 vec = XALLOCAVEC (tree,
5803 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
5804 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5805 {
5806 val = (*valueize) (val);
5807 if (TREE_CODE (val) == INTEGER_CST
5808 || TREE_CODE (val) == REAL_CST
5809 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5810 vec[i] = val;
cfef45c8
RG
5811 else
5812 return NULL_TREE;
5813 }
5814
d2a12ae7 5815 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5816 }
bdf37f7a
JH
5817 if (subcode == OBJ_TYPE_REF)
5818 {
5819 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5820 /* If callee is constant, we can fold away the wrapper. */
5821 if (is_gimple_min_invariant (val))
5822 return val;
5823 }
cfef45c8
RG
5824
5825 if (kind == tcc_reference)
5826 {
5827 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5828 || TREE_CODE (rhs) == REALPART_EXPR
5829 || TREE_CODE (rhs) == IMAGPART_EXPR)
5830 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5831 {
5832 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5833 return fold_unary_loc (EXPR_LOCATION (rhs),
5834 TREE_CODE (rhs),
5835 TREE_TYPE (rhs), val);
5836 }
5837 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5838 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5839 {
5840 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5841 return fold_ternary_loc (EXPR_LOCATION (rhs),
5842 TREE_CODE (rhs),
5843 TREE_TYPE (rhs), val,
5844 TREE_OPERAND (rhs, 1),
5845 TREE_OPERAND (rhs, 2));
5846 }
5847 else if (TREE_CODE (rhs) == MEM_REF
5848 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5849 {
5850 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5851 if (TREE_CODE (val) == ADDR_EXPR
5852 && is_gimple_min_invariant (val))
5853 {
5854 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5855 unshare_expr (val),
5856 TREE_OPERAND (rhs, 1));
5857 if (tem)
5858 rhs = tem;
5859 }
5860 }
5861 return fold_const_aggregate_ref_1 (rhs, valueize);
5862 }
5863 else if (kind == tcc_declaration)
5864 return get_symbol_constant_value (rhs);
5865 return rhs;
5866 }
5867
5868 case GIMPLE_UNARY_RHS:
f3582e54 5869 return NULL_TREE;
cfef45c8
RG
5870
5871 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5872 /* Translate &x + CST into an invariant form suitable for
5873 further propagation. */
5874 if (subcode == POINTER_PLUS_EXPR)
5875 {
4b1b9e64
RB
5876 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5877 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
5878 if (TREE_CODE (op0) == ADDR_EXPR
5879 && TREE_CODE (op1) == INTEGER_CST)
5880 {
5881 tree off = fold_convert (ptr_type_node, op1);
5882 return build_fold_addr_expr_loc
5883 (loc,
5884 fold_build2 (MEM_REF,
5885 TREE_TYPE (TREE_TYPE (op0)),
5886 unshare_expr (op0), off));
5887 }
5888 }
59c20dc7
RB
5889 /* Canonicalize bool != 0 and bool == 0 appearing after
5890 valueization. While gimple_simplify handles this
5891 it can get confused by the ~X == 1 -> X == 0 transform
5892 which we cant reduce to a SSA name or a constant
5893 (and we have no way to tell gimple_simplify to not
5894 consider those transforms in the first place). */
5895 else if (subcode == EQ_EXPR
5896 || subcode == NE_EXPR)
5897 {
5898 tree lhs = gimple_assign_lhs (stmt);
5899 tree op0 = gimple_assign_rhs1 (stmt);
5900 if (useless_type_conversion_p (TREE_TYPE (lhs),
5901 TREE_TYPE (op0)))
5902 {
5903 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5904 op0 = (*valueize) (op0);
8861704d
RB
5905 if (TREE_CODE (op0) == INTEGER_CST)
5906 std::swap (op0, op1);
5907 if (TREE_CODE (op1) == INTEGER_CST
5908 && ((subcode == NE_EXPR && integer_zerop (op1))
5909 || (subcode == EQ_EXPR && integer_onep (op1))))
5910 return op0;
59c20dc7
RB
5911 }
5912 }
4b1b9e64 5913 return NULL_TREE;
cfef45c8
RG
5914
5915 case GIMPLE_TERNARY_RHS:
5916 {
5917 /* Handle ternary operators that can appear in GIMPLE form. */
5918 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5919 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5920 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
5921 return fold_ternary_loc (loc, subcode,
5922 gimple_expr_type (stmt), op0, op1, op2);
5923 }
5924
5925 default:
5926 gcc_unreachable ();
5927 }
5928 }
5929
5930 case GIMPLE_CALL:
5931 {
25583c4f 5932 tree fn;
538dd0b7 5933 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5934
5935 if (gimple_call_internal_p (stmt))
31e071ae
MP
5936 {
5937 enum tree_code subcode = ERROR_MARK;
5938 switch (gimple_call_internal_fn (stmt))
5939 {
5940 case IFN_UBSAN_CHECK_ADD:
5941 subcode = PLUS_EXPR;
5942 break;
5943 case IFN_UBSAN_CHECK_SUB:
5944 subcode = MINUS_EXPR;
5945 break;
5946 case IFN_UBSAN_CHECK_MUL:
5947 subcode = MULT_EXPR;
5948 break;
68fa96d6
ML
5949 case IFN_BUILTIN_EXPECT:
5950 {
5951 tree arg0 = gimple_call_arg (stmt, 0);
5952 tree op0 = (*valueize) (arg0);
5953 if (TREE_CODE (op0) == INTEGER_CST)
5954 return op0;
5955 return NULL_TREE;
5956 }
31e071ae
MP
5957 default:
5958 return NULL_TREE;
5959 }
368b454d
JJ
5960 tree arg0 = gimple_call_arg (stmt, 0);
5961 tree arg1 = gimple_call_arg (stmt, 1);
5962 tree op0 = (*valueize) (arg0);
5963 tree op1 = (*valueize) (arg1);
31e071ae
MP
5964
5965 if (TREE_CODE (op0) != INTEGER_CST
5966 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5967 {
5968 switch (subcode)
5969 {
5970 case MULT_EXPR:
5971 /* x * 0 = 0 * x = 0 without overflow. */
5972 if (integer_zerop (op0) || integer_zerop (op1))
5973 return build_zero_cst (TREE_TYPE (arg0));
5974 break;
5975 case MINUS_EXPR:
5976 /* y - y = 0 without overflow. */
5977 if (operand_equal_p (op0, op1, 0))
5978 return build_zero_cst (TREE_TYPE (arg0));
5979 break;
5980 default:
5981 break;
5982 }
5983 }
5984 tree res
5985 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5986 if (res
5987 && TREE_CODE (res) == INTEGER_CST
5988 && !TREE_OVERFLOW (res))
5989 return res;
5990 return NULL_TREE;
5991 }
25583c4f
RS
5992
5993 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5994 if (TREE_CODE (fn) == ADDR_EXPR
5995 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5996 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5997 && gimple_builtin_call_types_compatible_p (stmt,
5998 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5999 {
6000 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6001 tree retval;
cfef45c8
RG
6002 unsigned i;
6003 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6004 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6005 retval = fold_builtin_call_array (loc,
538dd0b7 6006 gimple_call_return_type (call_stmt),
cfef45c8 6007 fn, gimple_call_num_args (stmt), args);
cfef45c8 6008 if (retval)
5c944c6c
RB
6009 {
6010 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6011 STRIP_NOPS (retval);
538dd0b7
DM
6012 retval = fold_convert (gimple_call_return_type (call_stmt),
6013 retval);
5c944c6c 6014 }
cfef45c8
RG
6015 return retval;
6016 }
6017 return NULL_TREE;
6018 }
6019
6020 default:
6021 return NULL_TREE;
6022 }
6023}
6024
6025/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6026 Returns NULL_TREE if folding to a constant is not possible, otherwise
6027 returns a constant according to is_gimple_min_invariant. */
6028
6029tree
355fe088 6030gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6031{
6032 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6033 if (res && is_gimple_min_invariant (res))
6034 return res;
6035 return NULL_TREE;
6036}
6037
6038
6039/* The following set of functions are supposed to fold references using
6040 their constant initializers. */
6041
cfef45c8
RG
6042/* See if we can find constructor defining value of BASE.
6043 When we know the consructor with constant offset (such as
6044 base is array[40] and we do know constructor of array), then
6045 BIT_OFFSET is adjusted accordingly.
6046
6047 As a special case, return error_mark_node when constructor
6048 is not explicitly available, but it is known to be zero
6049 such as 'static const int a;'. */
6050static tree
6051get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6052 tree (*valueize)(tree))
6053{
6054 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
6055 bool reverse;
6056
cfef45c8
RG
6057 if (TREE_CODE (base) == MEM_REF)
6058 {
6059 if (!integer_zerop (TREE_OPERAND (base, 1)))
6060 {
9541ffee 6061 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 6062 return NULL_TREE;
807e902e 6063 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
6064 * BITS_PER_UNIT);
6065 }
6066
6067 if (valueize
6068 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6069 base = valueize (TREE_OPERAND (base, 0));
6070 if (!base || TREE_CODE (base) != ADDR_EXPR)
6071 return NULL_TREE;
6072 base = TREE_OPERAND (base, 0);
6073 }
13e88953
RB
6074 else if (valueize
6075 && TREE_CODE (base) == SSA_NAME)
6076 base = valueize (base);
cfef45c8
RG
6077
6078 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6079 DECL_INITIAL. If BASE is a nested reference into another
6080 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6081 the inner reference. */
6082 switch (TREE_CODE (base))
6083 {
6084 case VAR_DECL:
cfef45c8 6085 case CONST_DECL:
6a6dac52
JH
6086 {
6087 tree init = ctor_for_folding (base);
6088
688010ba 6089 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6090 NULL means unknown, while error_mark_node is 0. */
6091 if (init == error_mark_node)
6092 return NULL_TREE;
6093 if (!init)
6094 return error_mark_node;
6095 return init;
6096 }
cfef45c8 6097
13e88953
RB
6098 case VIEW_CONVERT_EXPR:
6099 return get_base_constructor (TREE_OPERAND (base, 0),
6100 bit_offset, valueize);
6101
cfef45c8
RG
6102 case ARRAY_REF:
6103 case COMPONENT_REF:
ee45a32d
EB
6104 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6105 &reverse);
cfef45c8
RG
6106 if (max_size == -1 || size != max_size)
6107 return NULL_TREE;
6108 *bit_offset += bit_offset2;
6109 return get_base_constructor (base, bit_offset, valueize);
6110
cfef45c8
RG
6111 case CONSTRUCTOR:
6112 return base;
6113
6114 default:
13e88953
RB
6115 if (CONSTANT_CLASS_P (base))
6116 return base;
6117
cfef45c8
RG
6118 return NULL_TREE;
6119 }
6120}
6121
cfef45c8
RG
6122/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6123 SIZE to the memory at bit OFFSET. */
6124
6125static tree
6126fold_array_ctor_reference (tree type, tree ctor,
6127 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6128 unsigned HOST_WIDE_INT size,
6129 tree from_decl)
cfef45c8 6130{
807e902e
KZ
6131 offset_int low_bound;
6132 offset_int elt_size;
807e902e 6133 offset_int access_index;
6a636014 6134 tree domain_type = NULL_TREE;
cfef45c8
RG
6135 HOST_WIDE_INT inner_offset;
6136
6137 /* Compute low bound and elt size. */
eb8f1123
RG
6138 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6139 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6140 if (domain_type && TYPE_MIN_VALUE (domain_type))
6141 {
6142 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6143 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6144 return NULL_TREE;
807e902e 6145 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6146 }
6147 else
807e902e 6148 low_bound = 0;
cfef45c8 6149 /* Static constructors for variably sized objects makes no sense. */
9ef2eff0
RB
6150 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6151 return NULL_TREE;
807e902e 6152 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
6153
6154 /* We can handle only constantly sized accesses that are known to not
6155 be larger than size of array element. */
6156 if (!TYPE_SIZE_UNIT (type)
6157 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
032c80e9 6158 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
807e902e 6159 || elt_size == 0)
cfef45c8
RG
6160 return NULL_TREE;
6161
6162 /* Compute the array index we look for. */
807e902e
KZ
6163 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6164 elt_size);
27bcd47c 6165 access_index += low_bound;
cfef45c8
RG
6166
6167 /* And offset within the access. */
27bcd47c 6168 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
6169
6170 /* See if the array field is large enough to span whole access. We do not
6171 care to fold accesses spanning multiple array indexes. */
27bcd47c 6172 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 6173 return NULL_TREE;
6a636014
AL
6174 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6175 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 6176
cfef45c8
RG
6177 /* When memory is not explicitely mentioned in constructor,
6178 it is 0 (or out of range). */
6179 return build_zero_cst (type);
6180}
6181
6182/* CTOR is CONSTRUCTOR of an aggregate or vector.
6183 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6184
6185static tree
6186fold_nonarray_ctor_reference (tree type, tree ctor,
6187 unsigned HOST_WIDE_INT offset,
c44c2088
JH
6188 unsigned HOST_WIDE_INT size,
6189 tree from_decl)
cfef45c8
RG
6190{
6191 unsigned HOST_WIDE_INT cnt;
6192 tree cfield, cval;
6193
6194 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6195 cval)
6196 {
6197 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6198 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6199 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
6200 offset_int bitoffset;
6201 offset_int bitoffset_end, access_end;
cfef45c8
RG
6202
6203 /* Variable sized objects in static constructors makes no sense,
6204 but field_size can be NULL for flexible array members. */
6205 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6206 && TREE_CODE (byte_offset) == INTEGER_CST
6207 && (field_size != NULL_TREE
6208 ? TREE_CODE (field_size) == INTEGER_CST
6209 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6210
6211 /* Compute bit offset of the field. */
807e902e 6212 bitoffset = (wi::to_offset (field_offset)
8de73453 6213 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8
RG
6214 /* Compute bit offset where the field ends. */
6215 if (field_size != NULL_TREE)
807e902e 6216 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6217 else
807e902e 6218 bitoffset_end = 0;
cfef45c8 6219
807e902e 6220 access_end = offset_int (offset) + size;
b8b2b009
JJ
6221
6222 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6223 [BITOFFSET, BITOFFSET_END)? */
807e902e 6224 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6225 && (field_size == NULL_TREE
807e902e 6226 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6227 {
807e902e 6228 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
6229 /* We do have overlap. Now see if field is large enough to
6230 cover the access. Give up for accesses spanning multiple
6231 fields. */
807e902e 6232 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6233 return NULL_TREE;
032c80e9 6234 if (offset < bitoffset)
b8b2b009 6235 return NULL_TREE;
cfef45c8 6236 return fold_ctor_reference (type, cval,
27bcd47c 6237 inner_offset.to_uhwi (), size,
c44c2088 6238 from_decl);
cfef45c8
RG
6239 }
6240 }
6241 /* When memory is not explicitely mentioned in constructor, it is 0. */
6242 return build_zero_cst (type);
6243}
6244
6245/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6246 to the memory at bit OFFSET. */
6247
8403c2cf 6248tree
cfef45c8 6249fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 6250 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
6251{
6252 tree ret;
6253
6254 /* We found the field with exact match. */
6255 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6256 && !offset)
9d60be38 6257 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
6258
6259 /* We are at the end of walk, see if we can view convert the
6260 result. */
6261 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6262 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6263 && !compare_tree_int (TYPE_SIZE (type), size)
6264 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6265 {
9d60be38 6266 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6267 if (ret)
672d9f8e
RB
6268 {
6269 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6270 if (ret)
6271 STRIP_USELESS_TYPE_CONVERSION (ret);
6272 }
cfef45c8
RG
6273 return ret;
6274 }
b2505143
RB
6275 /* For constants and byte-aligned/sized reads try to go through
6276 native_encode/interpret. */
6277 if (CONSTANT_CLASS_P (ctor)
6278 && BITS_PER_UNIT == 8
6279 && offset % BITS_PER_UNIT == 0
6280 && size % BITS_PER_UNIT == 0
6281 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6282 {
6283 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6284 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6285 offset / BITS_PER_UNIT);
6286 if (len > 0)
6287 return native_interpret_expr (type, buf, len);
b2505143 6288 }
cfef45c8
RG
6289 if (TREE_CODE (ctor) == CONSTRUCTOR)
6290 {
6291
eb8f1123
RG
6292 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6293 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
6294 return fold_array_ctor_reference (type, ctor, offset, size,
6295 from_decl);
cfef45c8 6296 else
c44c2088
JH
6297 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6298 from_decl);
cfef45c8
RG
6299 }
6300
6301 return NULL_TREE;
6302}
6303
6304/* Return the tree representing the element referenced by T if T is an
6305 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6306 names using VALUEIZE. Return NULL_TREE otherwise. */
6307
6308tree
6309fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6310{
6311 tree ctor, idx, base;
6312 HOST_WIDE_INT offset, size, max_size;
6313 tree tem;
ee45a32d 6314 bool reverse;
cfef45c8 6315
f8a7df45
RG
6316 if (TREE_THIS_VOLATILE (t))
6317 return NULL_TREE;
6318
3a65ee74 6319 if (DECL_P (t))
cfef45c8
RG
6320 return get_symbol_constant_value (t);
6321
6322 tem = fold_read_from_constant_string (t);
6323 if (tem)
6324 return tem;
6325
6326 switch (TREE_CODE (t))
6327 {
6328 case ARRAY_REF:
6329 case ARRAY_RANGE_REF:
6330 /* Constant indexes are handled well by get_base_constructor.
6331 Only special case variable offsets.
6332 FIXME: This code can't handle nested references with variable indexes
6333 (they will be handled only by iteration of ccp). Perhaps we can bring
6334 get_ref_base_and_extent here and make it use a valueize callback. */
6335 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6336 && valueize
6337 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 6338 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
6339 {
6340 tree low_bound, unit_size;
6341
6342 /* If the resulting bit-offset is constant, track it. */
6343 if ((low_bound = array_ref_low_bound (t),
b48e22b2 6344 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 6345 && (unit_size = array_ref_element_size (t),
807e902e 6346 tree_fits_uhwi_p (unit_size)))
cfef45c8 6347 {
807e902e
KZ
6348 offset_int woffset
6349 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6350 TYPE_PRECISION (TREE_TYPE (idx)));
6351
6352 if (wi::fits_shwi_p (woffset))
6353 {
6354 offset = woffset.to_shwi ();
6355 /* TODO: This code seems wrong, multiply then check
6356 to see if it fits. */
6357 offset *= tree_to_uhwi (unit_size);
6358 offset *= BITS_PER_UNIT;
6359
6360 base = TREE_OPERAND (t, 0);
6361 ctor = get_base_constructor (base, &offset, valueize);
6362 /* Empty constructor. Always fold to 0. */
6363 if (ctor == error_mark_node)
6364 return build_zero_cst (TREE_TYPE (t));
6365 /* Out of bound array access. Value is undefined,
6366 but don't fold. */
6367 if (offset < 0)
6368 return NULL_TREE;
6369 /* We can not determine ctor. */
6370 if (!ctor)
6371 return NULL_TREE;
6372 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6373 tree_to_uhwi (unit_size)
6374 * BITS_PER_UNIT,
6375 base);
6376 }
cfef45c8
RG
6377 }
6378 }
6379 /* Fallthru. */
6380
6381 case COMPONENT_REF:
6382 case BIT_FIELD_REF:
6383 case TARGET_MEM_REF:
6384 case MEM_REF:
ee45a32d 6385 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
6386 ctor = get_base_constructor (base, &offset, valueize);
6387
6388 /* Empty constructor. Always fold to 0. */
6389 if (ctor == error_mark_node)
6390 return build_zero_cst (TREE_TYPE (t));
6391 /* We do not know precise address. */
6392 if (max_size == -1 || max_size != size)
6393 return NULL_TREE;
6394 /* We can not determine ctor. */
6395 if (!ctor)
6396 return NULL_TREE;
6397
6398 /* Out of bound array access. Value is undefined, but don't fold. */
6399 if (offset < 0)
6400 return NULL_TREE;
6401
c44c2088
JH
6402 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6403 base);
cfef45c8
RG
6404
6405 case REALPART_EXPR:
6406 case IMAGPART_EXPR:
6407 {
6408 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6409 if (c && TREE_CODE (c) == COMPLEX_CST)
6410 return fold_build1_loc (EXPR_LOCATION (t),
6411 TREE_CODE (t), TREE_TYPE (t), c);
6412 break;
6413 }
6414
6415 default:
6416 break;
6417 }
6418
6419 return NULL_TREE;
6420}
6421
6422tree
6423fold_const_aggregate_ref (tree t)
6424{
6425 return fold_const_aggregate_ref_1 (t, NULL);
6426}
06bc3ec7 6427
85942f45 6428/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
6429 at OFFSET.
6430 Set CAN_REFER if non-NULL to false if method
6431 is not referable or if the virtual table is ill-formed (such as rewriten
6432 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
6433
6434tree
85942f45
JH
6435gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6436 tree v,
ec77d61f
JH
6437 unsigned HOST_WIDE_INT offset,
6438 bool *can_refer)
81fa35bd 6439{
85942f45
JH
6440 tree vtable = v, init, fn;
6441 unsigned HOST_WIDE_INT size;
8c311b50
JH
6442 unsigned HOST_WIDE_INT elt_size, access_index;
6443 tree domain_type;
81fa35bd 6444
ec77d61f
JH
6445 if (can_refer)
6446 *can_refer = true;
6447
9de2f554 6448 /* First of all double check we have virtual table. */
8813a647 6449 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 6450 {
ec77d61f
JH
6451 /* Pass down that we lost track of the target. */
6452 if (can_refer)
6453 *can_refer = false;
6454 return NULL_TREE;
6455 }
9de2f554 6456
2aa3da06
JH
6457 init = ctor_for_folding (v);
6458
9de2f554 6459 /* The virtual tables should always be born with constructors
2aa3da06
JH
6460 and we always should assume that they are avaialble for
6461 folding. At the moment we do not stream them in all cases,
6462 but it should never happen that ctor seem unreachable. */
6463 gcc_assert (init);
6464 if (init == error_mark_node)
6465 {
6466 gcc_assert (in_lto_p);
ec77d61f
JH
6467 /* Pass down that we lost track of the target. */
6468 if (can_refer)
6469 *can_refer = false;
2aa3da06
JH
6470 return NULL_TREE;
6471 }
81fa35bd 6472 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 6473 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 6474 offset *= BITS_PER_UNIT;
81fa35bd 6475 offset += token * size;
9de2f554 6476
8c311b50
JH
6477 /* Lookup the value in the constructor that is assumed to be array.
6478 This is equivalent to
6479 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6480 offset, size, NULL);
6481 but in a constant time. We expect that frontend produced a simple
6482 array without indexed initializers. */
6483
6484 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6485 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6486 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6487 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6488
6489 access_index = offset / BITS_PER_UNIT / elt_size;
6490 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6491
6492 /* This code makes an assumption that there are no
6493 indexed fileds produced by C++ FE, so we can directly index the array. */
6494 if (access_index < CONSTRUCTOR_NELTS (init))
6495 {
6496 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6497 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6498 STRIP_NOPS (fn);
6499 }
6500 else
6501 fn = NULL;
9de2f554
JH
6502
6503 /* For type inconsistent program we may end up looking up virtual method
6504 in virtual table that does not contain TOKEN entries. We may overrun
6505 the virtual table and pick up a constant or RTTI info pointer.
6506 In any case the call is undefined. */
6507 if (!fn
6508 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6509 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6510 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6511 else
6512 {
6513 fn = TREE_OPERAND (fn, 0);
6514
6515 /* When cgraph node is missing and function is not public, we cannot
6516 devirtualize. This can happen in WHOPR when the actual method
6517 ends up in other partition, because we found devirtualization
6518 possibility too late. */
6519 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
6520 {
6521 if (can_refer)
6522 {
6523 *can_refer = false;
6524 return fn;
6525 }
6526 return NULL_TREE;
6527 }
9de2f554 6528 }
81fa35bd 6529
7501ca28
RG
6530 /* Make sure we create a cgraph node for functions we'll reference.
6531 They can be non-existent if the reference comes from an entry
6532 of an external vtable for example. */
d52f5295 6533 cgraph_node::get_create (fn);
7501ca28 6534
81fa35bd
MJ
6535 return fn;
6536}
6537
85942f45
JH
6538/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6539 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6540 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
6541 OBJ_TYPE_REF_OBJECT(REF).
6542 Set CAN_REFER if non-NULL to false if method
6543 is not referable or if the virtual table is ill-formed (such as rewriten
6544 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
6545
6546tree
ec77d61f
JH
6547gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6548 bool *can_refer)
85942f45
JH
6549{
6550 unsigned HOST_WIDE_INT offset;
6551 tree v;
6552
6553 v = BINFO_VTABLE (known_binfo);
6554 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6555 if (!v)
6556 return NULL_TREE;
6557
6558 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
6559 {
6560 if (can_refer)
6561 *can_refer = false;
6562 return NULL_TREE;
6563 }
6564 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
6565}
6566
737f500a
RB
6567/* Given a pointer value T, return a simplified version of an
6568 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
6569 possible. Note that the resulting type may be different from
6570 the type pointed to in the sense that it is still compatible
6571 from the langhooks point of view. */
6572
6573tree
6574gimple_fold_indirect_ref (tree t)
6575{
6576 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6577 tree sub = t;
6578 tree subtype;
6579
6580 STRIP_NOPS (sub);
6581 subtype = TREE_TYPE (sub);
737f500a
RB
6582 if (!POINTER_TYPE_P (subtype)
6583 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
6584 return NULL_TREE;
6585
6586 if (TREE_CODE (sub) == ADDR_EXPR)
6587 {
6588 tree op = TREE_OPERAND (sub, 0);
6589 tree optype = TREE_TYPE (op);
6590 /* *&p => p */
6591 if (useless_type_conversion_p (type, optype))
6592 return op;
6593
6594 /* *(foo *)&fooarray => fooarray[0] */
6595 if (TREE_CODE (optype) == ARRAY_TYPE
6596 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6597 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6598 {
6599 tree type_domain = TYPE_DOMAIN (optype);
6600 tree min_val = size_zero_node;
6601 if (type_domain && TYPE_MIN_VALUE (type_domain))
6602 min_val = TYPE_MIN_VALUE (type_domain);
6603 if (TREE_CODE (min_val) == INTEGER_CST)
6604 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6605 }
6606 /* *(foo *)&complexfoo => __real__ complexfoo */
6607 else if (TREE_CODE (optype) == COMPLEX_TYPE
6608 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6609 return fold_build1 (REALPART_EXPR, type, op);
6610 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6611 else if (TREE_CODE (optype) == VECTOR_TYPE
6612 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6613 {
6614 tree part_width = TYPE_SIZE (type);
6615 tree index = bitsize_int (0);
6616 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6617 }
6618 }
6619
6620 /* *(p + CST) -> ... */
6621 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6622 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6623 {
6624 tree addr = TREE_OPERAND (sub, 0);
6625 tree off = TREE_OPERAND (sub, 1);
6626 tree addrtype;
6627
6628 STRIP_NOPS (addr);
6629 addrtype = TREE_TYPE (addr);
6630
6631 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6632 if (TREE_CODE (addr) == ADDR_EXPR
6633 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6634 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 6635 && tree_fits_uhwi_p (off))
b184c8f1 6636 {
ae7e9ddd 6637 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
6638 tree part_width = TYPE_SIZE (type);
6639 unsigned HOST_WIDE_INT part_widthi
9439e9a1 6640 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
6641 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6642 tree index = bitsize_int (indexi);
6643 if (offset / part_widthi
e934916c 6644 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
6645 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6646 part_width, index);
6647 }
6648
6649 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6650 if (TREE_CODE (addr) == ADDR_EXPR
6651 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6652 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6653 {
6654 tree size = TYPE_SIZE_UNIT (type);
6655 if (tree_int_cst_equal (size, off))
6656 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6657 }
6658
6659 /* *(p + CST) -> MEM_REF <p, CST>. */
6660 if (TREE_CODE (addr) != ADDR_EXPR
6661 || DECL_P (TREE_OPERAND (addr, 0)))
6662 return fold_build2 (MEM_REF, type,
6663 addr,
807e902e 6664 wide_int_to_tree (ptype, off));
b184c8f1
AM
6665 }
6666
6667 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6668 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6669 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6670 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6671 {
6672 tree type_domain;
6673 tree min_val = size_zero_node;
6674 tree osub = sub;
6675 sub = gimple_fold_indirect_ref (sub);
6676 if (! sub)
6677 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6678 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6679 if (type_domain && TYPE_MIN_VALUE (type_domain))
6680 min_val = TYPE_MIN_VALUE (type_domain);
6681 if (TREE_CODE (min_val) == INTEGER_CST)
6682 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6683 }
6684
6685 return NULL_TREE;
6686}
19e51b40
JJ
6687
6688/* Return true if CODE is an operation that when operating on signed
6689 integer types involves undefined behavior on overflow and the
6690 operation can be expressed with unsigned arithmetic. */
6691
6692bool
6693arith_code_with_undefined_signed_overflow (tree_code code)
6694{
6695 switch (code)
6696 {
6697 case PLUS_EXPR:
6698 case MINUS_EXPR:
6699 case MULT_EXPR:
6700 case NEGATE_EXPR:
6701 case POINTER_PLUS_EXPR:
6702 return true;
6703 default:
6704 return false;
6705 }
6706}
6707
6708/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6709 operation that can be transformed to unsigned arithmetic by converting
6710 its operand, carrying out the operation in the corresponding unsigned
6711 type and converting the result back to the original type.
6712
6713 Returns a sequence of statements that replace STMT and also contain
6714 a modified form of STMT itself. */
6715
6716gimple_seq
355fe088 6717rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
6718{
6719 if (dump_file && (dump_flags & TDF_DETAILS))
6720 {
6721 fprintf (dump_file, "rewriting stmt with undefined signed "
6722 "overflow ");
6723 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6724 }
6725
6726 tree lhs = gimple_assign_lhs (stmt);
6727 tree type = unsigned_type_for (TREE_TYPE (lhs));
6728 gimple_seq stmts = NULL;
6729 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6730 {
74e3c262
RB
6731 tree op = gimple_op (stmt, i);
6732 op = gimple_convert (&stmts, type, op);
6733 gimple_set_op (stmt, i, op);
19e51b40
JJ
6734 }
6735 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6736 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6737 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6738 gimple_seq_add_stmt (&stmts, stmt);
355fe088 6739 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6740 gimple_seq_add_stmt (&stmts, cvt);
6741
6742 return stmts;
6743}
d4f5cd5e 6744
3d2cf79f 6745
c26de36d
RB
6746/* The valueization hook we use for the gimple_build API simplification.
6747 This makes us match fold_buildN behavior by only combining with
6748 statements in the sequence(s) we are currently building. */
6749
6750static tree
6751gimple_build_valueize (tree op)
6752{
6753 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6754 return op;
6755 return NULL_TREE;
6756}
6757
3d2cf79f 6758/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 6759 simplifying it first if possible. Returns the built
3d2cf79f
RB
6760 expression value and appends statements possibly defining it
6761 to SEQ. */
6762
6763tree
6764gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6765 enum tree_code code, tree type, tree op0)
3d2cf79f 6766{
c26de36d 6767 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
6768 if (!res)
6769 {
a15ebbcd 6770 res = create_tmp_reg_or_ssa_name (type);
355fe088 6771 gimple *stmt;
3d2cf79f
RB
6772 if (code == REALPART_EXPR
6773 || code == IMAGPART_EXPR
6774 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6775 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6776 else
0d0e4a03 6777 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6778 gimple_set_location (stmt, loc);
6779 gimple_seq_add_stmt_without_update (seq, stmt);
6780 }
6781 return res;
6782}
6783
6784/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6785 simplifying it first if possible. Returns the built
3d2cf79f
RB
6786 expression value and appends statements possibly defining it
6787 to SEQ. */
6788
6789tree
6790gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6791 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6792{
c26de36d 6793 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6794 if (!res)
6795 {
a15ebbcd 6796 res = create_tmp_reg_or_ssa_name (type);
355fe088 6797 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6798 gimple_set_location (stmt, loc);
6799 gimple_seq_add_stmt_without_update (seq, stmt);
6800 }
6801 return res;
6802}
6803
6804/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6805 simplifying it first if possible. Returns the built
3d2cf79f
RB
6806 expression value and appends statements possibly defining it
6807 to SEQ. */
6808
6809tree
6810gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6811 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6812{
6813 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6814 seq, gimple_build_valueize);
3d2cf79f
RB
6815 if (!res)
6816 {
a15ebbcd 6817 res = create_tmp_reg_or_ssa_name (type);
355fe088 6818 gimple *stmt;
3d2cf79f 6819 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6820 stmt = gimple_build_assign (res, code,
6821 build3 (code, type, op0, op1, op2));
3d2cf79f 6822 else
0d0e4a03 6823 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6824 gimple_set_location (stmt, loc);
6825 gimple_seq_add_stmt_without_update (seq, stmt);
6826 }
6827 return res;
6828}
6829
6830/* Build the call FN (ARG0) with a result of type TYPE
6831 (or no result if TYPE is void) with location LOC,
c26de36d 6832 simplifying it first if possible. Returns the built
3d2cf79f
RB
6833 expression value (or NULL_TREE if TYPE is void) and appends
6834 statements possibly defining it to SEQ. */
6835
6836tree
6837gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6838 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6839{
c26de36d 6840 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6841 if (!res)
6842 {
6843 tree decl = builtin_decl_implicit (fn);
355fe088 6844 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6845 if (!VOID_TYPE_P (type))
6846 {
a15ebbcd 6847 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
6848 gimple_call_set_lhs (stmt, res);
6849 }
6850 gimple_set_location (stmt, loc);
6851 gimple_seq_add_stmt_without_update (seq, stmt);
6852 }
6853 return res;
6854}
6855
6856/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6857 (or no result if TYPE is void) with location LOC,
c26de36d 6858 simplifying it first if possible. Returns the built
3d2cf79f
RB
6859 expression value (or NULL_TREE if TYPE is void) and appends
6860 statements possibly defining it to SEQ. */
6861
6862tree
6863gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6864 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6865{
c26de36d 6866 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6867 if (!res)
6868 {
6869 tree decl = builtin_decl_implicit (fn);
355fe088 6870 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6871 if (!VOID_TYPE_P (type))
6872 {
a15ebbcd 6873 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
6874 gimple_call_set_lhs (stmt, res);
6875 }
6876 gimple_set_location (stmt, loc);
6877 gimple_seq_add_stmt_without_update (seq, stmt);
6878 }
6879 return res;
6880}
6881
6882/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6883 (or no result if TYPE is void) with location LOC,
c26de36d 6884 simplifying it first if possible. Returns the built
3d2cf79f
RB
6885 expression value (or NULL_TREE if TYPE is void) and appends
6886 statements possibly defining it to SEQ. */
6887
6888tree
6889gimple_build (gimple_seq *seq, location_t loc,
6890 enum built_in_function fn, tree type,
c26de36d 6891 tree arg0, tree arg1, tree arg2)
3d2cf79f 6892{
c26de36d
RB
6893 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6894 seq, gimple_build_valueize);
3d2cf79f
RB
6895 if (!res)
6896 {
6897 tree decl = builtin_decl_implicit (fn);
355fe088 6898 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
6899 if (!VOID_TYPE_P (type))
6900 {
a15ebbcd 6901 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
6902 gimple_call_set_lhs (stmt, res);
6903 }
6904 gimple_set_location (stmt, loc);
6905 gimple_seq_add_stmt_without_update (seq, stmt);
6906 }
6907 return res;
6908}
6909
6910/* Build the conversion (TYPE) OP with a result of type TYPE
6911 with location LOC if such conversion is neccesary in GIMPLE,
6912 simplifying it first.
6913 Returns the built expression value and appends
6914 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6915
6916tree
6917gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6918{
6919 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6920 return op;
3d2cf79f 6921 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6922}
68e57f04 6923
74e3c262
RB
6924/* Build the conversion (ptrofftype) OP with a result of a type
6925 compatible with ptrofftype with location LOC if such conversion
6926 is neccesary in GIMPLE, simplifying it first.
6927 Returns the built expression value and appends
6928 statements possibly defining it to SEQ. */
6929
6930tree
6931gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6932{
6933 if (ptrofftype_p (TREE_TYPE (op)))
6934 return op;
6935 return gimple_convert (seq, loc, sizetype, op);
6936}
6937
68e57f04
RS
6938/* Return true if the result of assignment STMT is known to be non-negative.
6939 If the return value is based on the assumption that signed overflow is
6940 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6941 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6942
6943static bool
6944gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6945 int depth)
6946{
6947 enum tree_code code = gimple_assign_rhs_code (stmt);
6948 switch (get_gimple_rhs_class (code))
6949 {
6950 case GIMPLE_UNARY_RHS:
6951 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6952 gimple_expr_type (stmt),
6953 gimple_assign_rhs1 (stmt),
6954 strict_overflow_p, depth);
6955 case GIMPLE_BINARY_RHS:
6956 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6957 gimple_expr_type (stmt),
6958 gimple_assign_rhs1 (stmt),
6959 gimple_assign_rhs2 (stmt),
6960 strict_overflow_p, depth);
6961 case GIMPLE_TERNARY_RHS:
6962 return false;
6963 case GIMPLE_SINGLE_RHS:
6964 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6965 strict_overflow_p, depth);
6966 case GIMPLE_INVALID_RHS:
6967 break;
6968 }
6969 gcc_unreachable ();
6970}
6971
6972/* Return true if return value of call STMT is known to be non-negative.
6973 If the return value is based on the assumption that signed overflow is
6974 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6975 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6976
6977static bool
6978gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6979 int depth)
6980{
6981 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6982 gimple_call_arg (stmt, 0) : NULL_TREE;
6983 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6984 gimple_call_arg (stmt, 1) : NULL_TREE;
6985
6986 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 6987 gimple_call_combined_fn (stmt),
68e57f04
RS
6988 arg0,
6989 arg1,
6990 strict_overflow_p, depth);
6991}
6992
4534c203
RB
6993/* Return true if return value of call STMT is known to be non-negative.
6994 If the return value is based on the assumption that signed overflow is
6995 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6996 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6997
6998static bool
6999gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7000 int depth)
7001{
7002 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7003 {
7004 tree arg = gimple_phi_arg_def (stmt, i);
7005 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7006 return false;
7007 }
7008 return true;
7009}
7010
68e57f04
RS
7011/* Return true if STMT is known to compute a non-negative value.
7012 If the return value is based on the assumption that signed overflow is
7013 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7014 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7015
7016bool
7017gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7018 int depth)
7019{
7020 switch (gimple_code (stmt))
7021 {
7022 case GIMPLE_ASSIGN:
7023 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7024 depth);
7025 case GIMPLE_CALL:
7026 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7027 depth);
4534c203
RB
7028 case GIMPLE_PHI:
7029 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7030 depth);
68e57f04
RS
7031 default:
7032 return false;
7033 }
7034}
67dbe582
RS
7035
7036/* Return true if the floating-point value computed by assignment STMT
7037 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7038 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7039
7040 DEPTH is the current nesting depth of the query. */
7041
7042static bool
7043gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7044{
7045 enum tree_code code = gimple_assign_rhs_code (stmt);
7046 switch (get_gimple_rhs_class (code))
7047 {
7048 case GIMPLE_UNARY_RHS:
7049 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7050 gimple_assign_rhs1 (stmt), depth);
7051 case GIMPLE_BINARY_RHS:
7052 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7053 gimple_assign_rhs1 (stmt),
7054 gimple_assign_rhs2 (stmt), depth);
7055 case GIMPLE_TERNARY_RHS:
7056 return false;
7057 case GIMPLE_SINGLE_RHS:
7058 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7059 case GIMPLE_INVALID_RHS:
7060 break;
7061 }
7062 gcc_unreachable ();
7063}
7064
7065/* Return true if the floating-point value computed by call STMT is known
7066 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7067 considered integer values. Return false for signaling NaN.
67dbe582
RS
7068
7069 DEPTH is the current nesting depth of the query. */
7070
7071static bool
7072gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7073{
7074 tree arg0 = (gimple_call_num_args (stmt) > 0
7075 ? gimple_call_arg (stmt, 0)
7076 : NULL_TREE);
7077 tree arg1 = (gimple_call_num_args (stmt) > 1
7078 ? gimple_call_arg (stmt, 1)
7079 : NULL_TREE);
1d9da71f 7080 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7081 arg0, arg1, depth);
7082}
7083
7084/* Return true if the floating-point result of phi STMT is known to have
7085 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7086 integer values. Return false for signaling NaN.
67dbe582
RS
7087
7088 DEPTH is the current nesting depth of the query. */
7089
7090static bool
7091gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7092{
7093 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7094 {
7095 tree arg = gimple_phi_arg_def (stmt, i);
7096 if (!integer_valued_real_single_p (arg, depth + 1))
7097 return false;
7098 }
7099 return true;
7100}
7101
7102/* Return true if the floating-point value computed by STMT is known
7103 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7104 considered integer values. Return false for signaling NaN.
67dbe582
RS
7105
7106 DEPTH is the current nesting depth of the query. */
7107
7108bool
7109gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7110{
7111 switch (gimple_code (stmt))
7112 {
7113 case GIMPLE_ASSIGN:
7114 return gimple_assign_integer_valued_real_p (stmt, depth);
7115 case GIMPLE_CALL:
7116 return gimple_call_integer_valued_real_p (stmt, depth);
7117 case GIMPLE_PHI:
7118 return gimple_phi_integer_valued_real_p (stmt, depth);
7119 default:
7120 return false;
7121 }
7122}