]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
fix PR68343: disable fuse-*.c tests for isl 0.14 or earlier
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
818ab71a 2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
cbdd87d4 44#include "tree-ssa-propagate.h"
450ad0cd 45#include "ipa-utils.h"
4484a35a 46#include "tree-ssa-address.h"
862d0b35 47#include "langhooks.h"
19e51b40 48#include "gimplify-me.h"
2b5f0895 49#include "dbgcnt.h"
9b2b7279 50#include "builtins.h"
e0ee10ed
RB
51#include "tree-eh.h"
52#include "gimple-match.h"
48126138 53#include "gomp-constants.h"
f869c12f 54#include "optabs-query.h"
451e8dae 55#include "omp-low.h"
3de2a40e 56#include "ipa-chkp.h"
f869c12f 57
cbdd87d4 58
b3b9f3d0 59/* Return true when DECL can be referenced from current unit.
c44c2088
JH
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
1389294c 63
1389294c
JH
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
3e89949e 76 we devirtualize only during final compilation stage.
b3b9f3d0
JH
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
80
1389294c 81static bool
c44c2088 82can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 83{
2c8326a5 84 varpool_node *vnode;
1389294c 85 struct cgraph_node *node;
5e20cdc9 86 symtab_node *snode;
c44c2088 87
00de328a 88 if (DECL_ABSTRACT_P (decl))
1632a686
JH
89 return false;
90
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
95
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 {
3aaf0529
JH
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
3dafb85c 101 if (symtab->function_flags_ready)
3aaf0529 102 return true;
d52f5295 103 snode = symtab_node::get (decl);
3aaf0529 104 if (!snode || !snode->definition)
1632a686 105 return false;
7de90a6c 106 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
107 return !node || !node->global.inlined_to;
108 }
109
6da8be89 110 /* We will later output the initializer, so we can refer to it.
c44c2088 111 So we are concerned only when DECL comes from initializer of
3aaf0529 112 external var or var that has been optimized out. */
c44c2088
JH
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 115 || (!DECL_EXTERNAL (from_decl)
9041d2e6 116 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 117 && vnode->definition)
6da8be89 118 || (flag_ltrans
9041d2e6 119 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 120 && vnode->in_other_partition))
c44c2088 121 return true;
c44c2088
JH
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
a33a931b 127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 129 return false;
b3b9f3d0
JH
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
3aaf0529
JH
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
138
139 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 140 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
3dafb85c 146 if (!symtab->function_flags_ready)
b3b9f3d0 147 return true;
c44c2088 148
d52f5295 149 snode = symtab_node::get (decl);
3aaf0529
JH
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
1389294c
JH
157}
158
0038d4e0 159/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
162
163tree
c44c2088 164canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 165{
50619002
EB
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
315f5f1b
RG
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 170 {
315f5f1b
RG
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
17f39a39
JH
179 }
180 if (TREE_CODE (cval) == ADDR_EXPR)
181 {
5a27a197
RG
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
184 {
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
188 }
5a27a197
RG
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
191 if (!base)
192 return NULL_TREE;
b3b9f3d0 193
7501ca28
RG
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 196 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 197 return NULL_TREE;
7501ca28 198 if (TREE_CODE (base) == VAR_DECL)
46eb666a 199 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
200 else if (TREE_CODE (base) == FUNCTION_DECL)
201 {
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
d52f5295 205 cgraph_node::get_create (base);
7501ca28 206 }
0038d4e0 207 /* Fixup types in global initializers. */
73aef89e
RG
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
210
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
17f39a39 214 }
846abd0d
RB
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
50619002 217 return orig_cval;
17f39a39 218}
cbdd87d4
RG
219
220/* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
222
223tree
224get_symbol_constant_value (tree sym)
225{
6a6dac52
JH
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
cbdd87d4 228 {
cbdd87d4
RG
229 if (val)
230 {
9d60be38 231 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 232 if (val && is_gimple_min_invariant (val))
17f39a39 233 return val;
1389294c
JH
234 else
235 return NULL_TREE;
cbdd87d4
RG
236 }
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
b8a8c472 241 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 242 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
243 }
244
245 return NULL_TREE;
246}
247
248
cbdd87d4
RG
249
250/* Subroutine of fold_stmt. We perform several simplifications of the
251 memory reference tree EXPR and make sure to re-gimplify them properly
252 after propagation of constant addresses. IS_LHS is true if the
253 reference is supposed to be an lvalue. */
254
255static tree
256maybe_fold_reference (tree expr, bool is_lhs)
257{
17f39a39 258 tree result;
cbdd87d4 259
f0eddb90
RG
260 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
261 || TREE_CODE (expr) == REALPART_EXPR
262 || TREE_CODE (expr) == IMAGPART_EXPR)
263 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
264 return fold_unary_loc (EXPR_LOCATION (expr),
265 TREE_CODE (expr),
266 TREE_TYPE (expr),
267 TREE_OPERAND (expr, 0));
268 else if (TREE_CODE (expr) == BIT_FIELD_REF
269 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
270 return fold_ternary_loc (EXPR_LOCATION (expr),
271 TREE_CODE (expr),
272 TREE_TYPE (expr),
273 TREE_OPERAND (expr, 0),
274 TREE_OPERAND (expr, 1),
275 TREE_OPERAND (expr, 2));
276
f0eddb90
RG
277 if (!is_lhs
278 && (result = fold_const_aggregate_ref (expr))
279 && is_gimple_min_invariant (result))
280 return result;
cbdd87d4 281
cbdd87d4
RG
282 return NULL_TREE;
283}
284
285
286/* Attempt to fold an assignment statement pointed-to by SI. Returns a
287 replacement rhs for the statement or NULL_TREE if no simplification
288 could be made. It is assumed that the operands have been previously
289 folded. */
290
291static tree
292fold_gimple_assign (gimple_stmt_iterator *si)
293{
355fe088 294 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
295 enum tree_code subcode = gimple_assign_rhs_code (stmt);
296 location_t loc = gimple_location (stmt);
297
298 tree result = NULL_TREE;
299
300 switch (get_gimple_rhs_class (subcode))
301 {
302 case GIMPLE_SINGLE_RHS:
303 {
304 tree rhs = gimple_assign_rhs1 (stmt);
305
8c00ba08
JW
306 if (TREE_CLOBBER_P (rhs))
307 return NULL_TREE;
308
4e71066d 309 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
310 return maybe_fold_reference (rhs, false);
311
bdf37f7a
JH
312 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
313 {
314 tree val = OBJ_TYPE_REF_EXPR (rhs);
315 if (is_gimple_min_invariant (val))
316 return val;
f8a39967 317 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
318 {
319 bool final;
320 vec <cgraph_node *>targets
f8a39967 321 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 322 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 323 {
2b5f0895
XDL
324 if (dump_enabled_p ())
325 {
807b7d62 326 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
327 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
328 "resolving virtual function address "
329 "reference to function %s\n",
330 targets.length () == 1
331 ? targets[0]->name ()
3ef276e4 332 : "NULL");
2b5f0895 333 }
3ef276e4
RB
334 if (targets.length () == 1)
335 {
336 val = fold_convert (TREE_TYPE (val),
337 build_fold_addr_expr_loc
338 (loc, targets[0]->decl));
339 STRIP_USELESS_TYPE_CONVERSION (val);
340 }
341 else
342 /* We can not use __builtin_unreachable here because it
343 can not have address taken. */
344 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
345 return val;
346 }
347 }
bdf37f7a 348 }
7524f419 349
cbdd87d4
RG
350 else if (TREE_CODE (rhs) == ADDR_EXPR)
351 {
70f34814
RG
352 tree ref = TREE_OPERAND (rhs, 0);
353 tree tem = maybe_fold_reference (ref, true);
354 if (tem
355 && TREE_CODE (tem) == MEM_REF
356 && integer_zerop (TREE_OPERAND (tem, 1)))
357 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
358 else if (tem)
cbdd87d4
RG
359 result = fold_convert (TREE_TYPE (rhs),
360 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
361 else if (TREE_CODE (ref) == MEM_REF
362 && integer_zerop (TREE_OPERAND (ref, 1)))
363 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
364
365 if (result)
366 {
367 /* Strip away useless type conversions. Both the
368 NON_LVALUE_EXPR that may have been added by fold, and
369 "useless" type conversions that might now be apparent
370 due to propagation. */
371 STRIP_USELESS_TYPE_CONVERSION (result);
372
373 if (result != rhs && valid_gimple_rhs_p (result))
374 return result;
375 }
cbdd87d4
RG
376 }
377
378 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 379 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
380 {
381 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
382 unsigned i;
383 tree val;
384
385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 386 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
387 return NULL_TREE;
388
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
391 }
392
393 else if (DECL_P (rhs))
9d60be38 394 return get_symbol_constant_value (rhs);
cbdd87d4
RG
395 }
396 break;
397
398 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
399 break;
400
401 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
402 break;
403
0354c0c7 404 case GIMPLE_TERNARY_RHS:
5c099d40
RB
405 result = fold_ternary_loc (loc, subcode,
406 TREE_TYPE (gimple_assign_lhs (stmt)),
407 gimple_assign_rhs1 (stmt),
408 gimple_assign_rhs2 (stmt),
409 gimple_assign_rhs3 (stmt));
0354c0c7
BS
410
411 if (result)
412 {
413 STRIP_USELESS_TYPE_CONVERSION (result);
414 if (valid_gimple_rhs_p (result))
415 return result;
0354c0c7
BS
416 }
417 break;
418
cbdd87d4
RG
419 case GIMPLE_INVALID_RHS:
420 gcc_unreachable ();
421 }
422
423 return NULL_TREE;
424}
425
fef5a0d9
RB
426
427/* Replace a statement at *SI_P with a sequence of statements in STMTS,
428 adjusting the replacement stmts location and virtual operands.
429 If the statement has a lhs the last stmt in the sequence is expected
430 to assign to that lhs. */
431
432static void
433gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
434{
355fe088 435 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
436
437 if (gimple_has_location (stmt))
438 annotate_all_with_location (stmts, gimple_location (stmt));
439
440 /* First iterate over the replacement statements backward, assigning
441 virtual operands to their defining statements. */
355fe088 442 gimple *laststore = NULL;
fef5a0d9
RB
443 for (gimple_stmt_iterator i = gsi_last (stmts);
444 !gsi_end_p (i); gsi_prev (&i))
445 {
355fe088 446 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
447 if ((gimple_assign_single_p (new_stmt)
448 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
449 || (is_gimple_call (new_stmt)
450 && (gimple_call_flags (new_stmt)
451 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
452 {
453 tree vdef;
454 if (!laststore)
455 vdef = gimple_vdef (stmt);
456 else
457 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
458 gimple_set_vdef (new_stmt, vdef);
459 if (vdef && TREE_CODE (vdef) == SSA_NAME)
460 SSA_NAME_DEF_STMT (vdef) = new_stmt;
461 laststore = new_stmt;
462 }
463 }
464
465 /* Second iterate over the statements forward, assigning virtual
466 operands to their uses. */
467 tree reaching_vuse = gimple_vuse (stmt);
468 for (gimple_stmt_iterator i = gsi_start (stmts);
469 !gsi_end_p (i); gsi_next (&i))
470 {
355fe088 471 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
472 /* If the new statement possibly has a VUSE, update it with exact SSA
473 name we know will reach this one. */
474 if (gimple_has_mem_ops (new_stmt))
475 gimple_set_vuse (new_stmt, reaching_vuse);
476 gimple_set_modified (new_stmt, true);
477 if (gimple_vdef (new_stmt))
478 reaching_vuse = gimple_vdef (new_stmt);
479 }
480
481 /* If the new sequence does not do a store release the virtual
482 definition of the original statement. */
483 if (reaching_vuse
484 && reaching_vuse == gimple_vuse (stmt))
485 {
486 tree vdef = gimple_vdef (stmt);
487 if (vdef
488 && TREE_CODE (vdef) == SSA_NAME)
489 {
490 unlink_stmt_vdef (stmt);
491 release_ssa_name (vdef);
492 }
493 }
494
495 /* Finally replace the original statement with the sequence. */
496 gsi_replace_with_seq (si_p, stmts, false);
497}
498
cbdd87d4
RG
499/* Convert EXPR into a GIMPLE value suitable for substitution on the
500 RHS of an assignment. Insert the necessary statements before
501 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
502 is replaced. If the call is expected to produces a result, then it
503 is replaced by an assignment of the new RHS to the result variable.
504 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
505 GIMPLE_NOP. A proper VDEF chain is retained by making the first
506 VUSE and the last VDEF of the whole sequence be the same as the replaced
507 statement and using new SSA names for stores in between. */
cbdd87d4
RG
508
509void
510gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
511{
512 tree lhs;
355fe088 513 gimple *stmt, *new_stmt;
cbdd87d4 514 gimple_stmt_iterator i;
355a7673 515 gimple_seq stmts = NULL;
cbdd87d4
RG
516
517 stmt = gsi_stmt (*si_p);
518
519 gcc_assert (is_gimple_call (stmt));
520
45852dcc 521 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 522
e256dfce 523 lhs = gimple_call_lhs (stmt);
cbdd87d4 524 if (lhs == NULL_TREE)
6e572326
RG
525 {
526 gimplify_and_add (expr, &stmts);
527 /* We can end up with folding a memcpy of an empty class assignment
528 which gets optimized away by C++ gimplification. */
529 if (gimple_seq_empty_p (stmts))
530 {
9fdc58de 531 pop_gimplify_context (NULL);
6e572326
RG
532 if (gimple_in_ssa_p (cfun))
533 {
534 unlink_stmt_vdef (stmt);
535 release_defs (stmt);
536 }
f6b4dc28 537 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
538 return;
539 }
540 }
cbdd87d4 541 else
e256dfce
RG
542 {
543 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
544 new_stmt = gimple_build_assign (lhs, tmp);
545 i = gsi_last (stmts);
546 gsi_insert_after_without_update (&i, new_stmt,
547 GSI_CONTINUE_LINKING);
548 }
cbdd87d4
RG
549
550 pop_gimplify_context (NULL);
551
fef5a0d9
RB
552 gsi_replace_with_seq_vops (si_p, stmts);
553}
cbdd87d4 554
fef5a0d9
RB
555
556/* Replace the call at *GSI with the gimple value VAL. */
557
558static void
559replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
560{
355fe088 561 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 562 tree lhs = gimple_call_lhs (stmt);
355fe088 563 gimple *repl;
fef5a0d9 564 if (lhs)
e256dfce 565 {
fef5a0d9
RB
566 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
567 val = fold_convert (TREE_TYPE (lhs), val);
568 repl = gimple_build_assign (lhs, val);
569 }
570 else
571 repl = gimple_build_nop ();
572 tree vdef = gimple_vdef (stmt);
573 if (vdef && TREE_CODE (vdef) == SSA_NAME)
574 {
575 unlink_stmt_vdef (stmt);
576 release_ssa_name (vdef);
577 }
f6b4dc28 578 gsi_replace (gsi, repl, false);
fef5a0d9
RB
579}
580
581/* Replace the call at *GSI with the new call REPL and fold that
582 again. */
583
584static void
355fe088 585replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 586{
355fe088 587 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
588 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
589 gimple_set_location (repl, gimple_location (stmt));
590 if (gimple_vdef (stmt)
591 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
592 {
593 gimple_set_vdef (repl, gimple_vdef (stmt));
594 gimple_set_vuse (repl, gimple_vuse (stmt));
595 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
596 }
f6b4dc28 597 gsi_replace (gsi, repl, false);
fef5a0d9
RB
598 fold_stmt (gsi);
599}
600
601/* Return true if VAR is a VAR_DECL or a component thereof. */
602
603static bool
604var_decl_component_p (tree var)
605{
606 tree inner = var;
607 while (handled_component_p (inner))
608 inner = TREE_OPERAND (inner, 0);
609 return SSA_VAR_P (inner);
610}
611
612/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 613 false if no simplification can be made.
fef5a0d9
RB
614 If ENDP is 0, return DEST (like memcpy).
615 If ENDP is 1, return DEST+LEN (like mempcpy).
616 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
617 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
618 (memmove). */
619
620static bool
621gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
622 tree dest, tree src, int endp)
623{
355fe088 624 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
625 tree lhs = gimple_call_lhs (stmt);
626 tree len = gimple_call_arg (stmt, 2);
627 tree destvar, srcvar;
628 location_t loc = gimple_location (stmt);
629
630 /* If the LEN parameter is zero, return DEST. */
631 if (integer_zerop (len))
632 {
355fe088 633 gimple *repl;
fef5a0d9
RB
634 if (gimple_call_lhs (stmt))
635 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
636 else
637 repl = gimple_build_nop ();
638 tree vdef = gimple_vdef (stmt);
639 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 640 {
fef5a0d9
RB
641 unlink_stmt_vdef (stmt);
642 release_ssa_name (vdef);
643 }
f6b4dc28 644 gsi_replace (gsi, repl, false);
fef5a0d9
RB
645 return true;
646 }
647
648 /* If SRC and DEST are the same (and not volatile), return
649 DEST{,+LEN,+LEN-1}. */
650 if (operand_equal_p (src, dest, 0))
651 {
652 unlink_stmt_vdef (stmt);
653 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
654 release_ssa_name (gimple_vdef (stmt));
655 if (!lhs)
656 {
f6b4dc28 657 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
658 return true;
659 }
660 goto done;
661 }
662 else
663 {
664 tree srctype, desttype;
665 unsigned int src_align, dest_align;
666 tree off0;
667
3de2a40e
IE
668 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
669 pointers as wide integer) and also may result in huge function
670 size because of inlined bounds copy. Thus don't inline for
671 functions we want to instrument. */
672 if (flag_check_pointer_bounds
673 && chkp_instrumentable_p (cfun->decl)
674 /* Even if data may contain pointers we can inline if copy
675 less than a pointer size. */
676 && (!tree_fits_uhwi_p (len)
677 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
678 return false;
679
fef5a0d9
RB
680 /* Build accesses at offset zero with a ref-all character type. */
681 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
682 ptr_mode, true), 0);
683
684 /* If we can perform the copy efficiently with first doing all loads
685 and then all stores inline it that way. Currently efficiently
686 means that we can load all the memory into a single integer
687 register which is what MOVE_MAX gives us. */
688 src_align = get_pointer_alignment (src);
689 dest_align = get_pointer_alignment (dest);
690 if (tree_fits_uhwi_p (len)
691 && compare_tree_int (len, MOVE_MAX) <= 0
692 /* ??? Don't transform copies from strings with known length this
693 confuses the tree-ssa-strlen.c. This doesn't handle
694 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
695 reason. */
696 && !c_strlen (src, 2))
697 {
698 unsigned ilen = tree_to_uhwi (len);
699 if (exact_log2 (ilen) != -1)
700 {
701 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
702 if (type
703 && TYPE_MODE (type) != BLKmode
704 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
705 == ilen * 8)
706 /* If the destination pointer is not aligned we must be able
707 to emit an unaligned store. */
708 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
f869c12f
RR
709 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
710 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
711 != CODE_FOR_nothing)))
fef5a0d9
RB
712 {
713 tree srctype = type;
714 tree desttype = type;
715 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
716 srctype = build_aligned_type (type, src_align);
717 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
718 tree tem = fold_const_aggregate_ref (srcmem);
719 if (tem)
720 srcmem = tem;
721 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
722 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
f869c12f
RR
723 src_align)
724 && (optab_handler (movmisalign_optab,
725 TYPE_MODE (type))
726 == CODE_FOR_nothing))
fef5a0d9
RB
727 srcmem = NULL_TREE;
728 if (srcmem)
729 {
355fe088 730 gimple *new_stmt;
fef5a0d9
RB
731 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
732 {
733 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
734 if (gimple_in_ssa_p (cfun))
735 srcmem = make_ssa_name (TREE_TYPE (srcmem),
736 new_stmt);
737 else
b731b390 738 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
739 gimple_assign_set_lhs (new_stmt, srcmem);
740 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
741 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
742 }
743 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
744 desttype = build_aligned_type (type, dest_align);
745 new_stmt
746 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
747 dest, off0),
748 srcmem);
749 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
750 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
751 if (gimple_vdef (new_stmt)
752 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
753 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
754 if (!lhs)
755 {
f6b4dc28 756 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
757 return true;
758 }
759 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
760 goto done;
761 }
762 }
763 }
764 }
765
766 if (endp == 3)
767 {
768 /* Both DEST and SRC must be pointer types.
769 ??? This is what old code did. Is the testing for pointer types
770 really mandatory?
771
772 If either SRC is readonly or length is 1, we can use memcpy. */
773 if (!dest_align || !src_align)
774 return false;
775 if (readonly_data_expr (src)
776 || (tree_fits_uhwi_p (len)
777 && (MIN (src_align, dest_align) / BITS_PER_UNIT
778 >= tree_to_uhwi (len))))
779 {
780 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
781 if (!fn)
782 return false;
783 gimple_call_set_fndecl (stmt, fn);
784 gimple_call_set_arg (stmt, 0, dest);
785 gimple_call_set_arg (stmt, 1, src);
786 fold_stmt (gsi);
787 return true;
788 }
789
790 /* If *src and *dest can't overlap, optimize into memcpy as well. */
791 if (TREE_CODE (src) == ADDR_EXPR
792 && TREE_CODE (dest) == ADDR_EXPR)
793 {
794 tree src_base, dest_base, fn;
795 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
796 HOST_WIDE_INT size = -1;
797 HOST_WIDE_INT maxsize = -1;
ee45a32d 798 bool reverse;
fef5a0d9
RB
799
800 srcvar = TREE_OPERAND (src, 0);
801 src_base = get_ref_base_and_extent (srcvar, &src_offset,
ee45a32d 802 &size, &maxsize, &reverse);
fef5a0d9
RB
803 destvar = TREE_OPERAND (dest, 0);
804 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
ee45a32d 805 &size, &maxsize, &reverse);
fef5a0d9
RB
806 if (tree_fits_uhwi_p (len))
807 maxsize = tree_to_uhwi (len);
808 else
809 maxsize = -1;
810 src_offset /= BITS_PER_UNIT;
811 dest_offset /= BITS_PER_UNIT;
812 if (SSA_VAR_P (src_base)
813 && SSA_VAR_P (dest_base))
814 {
815 if (operand_equal_p (src_base, dest_base, 0)
816 && ranges_overlap_p (src_offset, maxsize,
817 dest_offset, maxsize))
818 return false;
819 }
820 else if (TREE_CODE (src_base) == MEM_REF
821 && TREE_CODE (dest_base) == MEM_REF)
822 {
823 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
824 TREE_OPERAND (dest_base, 0), 0))
825 return false;
826 offset_int off = mem_ref_offset (src_base) + src_offset;
827 if (!wi::fits_shwi_p (off))
828 return false;
829 src_offset = off.to_shwi ();
830
831 off = mem_ref_offset (dest_base) + dest_offset;
832 if (!wi::fits_shwi_p (off))
833 return false;
834 dest_offset = off.to_shwi ();
835 if (ranges_overlap_p (src_offset, maxsize,
836 dest_offset, maxsize))
837 return false;
838 }
839 else
840 return false;
841
842 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
843 if (!fn)
844 return false;
845 gimple_call_set_fndecl (stmt, fn);
846 gimple_call_set_arg (stmt, 0, dest);
847 gimple_call_set_arg (stmt, 1, src);
848 fold_stmt (gsi);
849 return true;
850 }
851
852 /* If the destination and source do not alias optimize into
853 memcpy as well. */
854 if ((is_gimple_min_invariant (dest)
855 || TREE_CODE (dest) == SSA_NAME)
856 && (is_gimple_min_invariant (src)
857 || TREE_CODE (src) == SSA_NAME))
858 {
859 ao_ref destr, srcr;
860 ao_ref_init_from_ptr_and_size (&destr, dest, len);
861 ao_ref_init_from_ptr_and_size (&srcr, src, len);
862 if (!refs_may_alias_p_1 (&destr, &srcr, false))
863 {
864 tree fn;
865 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
866 if (!fn)
867 return false;
868 gimple_call_set_fndecl (stmt, fn);
869 gimple_call_set_arg (stmt, 0, dest);
870 gimple_call_set_arg (stmt, 1, src);
871 fold_stmt (gsi);
872 return true;
873 }
874 }
875
876 return false;
877 }
878
879 if (!tree_fits_shwi_p (len))
880 return false;
881 /* FIXME:
882 This logic lose for arguments like (type *)malloc (sizeof (type)),
883 since we strip the casts of up to VOID return value from malloc.
884 Perhaps we ought to inherit type from non-VOID argument here? */
885 STRIP_NOPS (src);
886 STRIP_NOPS (dest);
887 if (!POINTER_TYPE_P (TREE_TYPE (src))
888 || !POINTER_TYPE_P (TREE_TYPE (dest)))
889 return false;
890 /* In the following try to find a type that is most natural to be
891 used for the memcpy source and destination and that allows
892 the most optimization when memcpy is turned into a plain assignment
893 using that type. In theory we could always use a char[len] type
894 but that only gains us that the destination and source possibly
895 no longer will have their address taken. */
896 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
897 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
898 {
899 tree tem = TREE_OPERAND (src, 0);
900 STRIP_NOPS (tem);
901 if (tem != TREE_OPERAND (src, 0))
902 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
903 }
904 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
905 {
906 tree tem = TREE_OPERAND (dest, 0);
907 STRIP_NOPS (tem);
908 if (tem != TREE_OPERAND (dest, 0))
909 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
910 }
911 srctype = TREE_TYPE (TREE_TYPE (src));
912 if (TREE_CODE (srctype) == ARRAY_TYPE
913 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
914 {
915 srctype = TREE_TYPE (srctype);
916 STRIP_NOPS (src);
917 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
918 }
919 desttype = TREE_TYPE (TREE_TYPE (dest));
920 if (TREE_CODE (desttype) == ARRAY_TYPE
921 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
922 {
923 desttype = TREE_TYPE (desttype);
924 STRIP_NOPS (dest);
925 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
926 }
927 if (TREE_ADDRESSABLE (srctype)
928 || TREE_ADDRESSABLE (desttype))
929 return false;
930
931 /* Make sure we are not copying using a floating-point mode or
932 a type whose size possibly does not match its precision. */
933 if (FLOAT_MODE_P (TYPE_MODE (desttype))
934 || TREE_CODE (desttype) == BOOLEAN_TYPE
935 || TREE_CODE (desttype) == ENUMERAL_TYPE)
936 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
937 if (FLOAT_MODE_P (TYPE_MODE (srctype))
938 || TREE_CODE (srctype) == BOOLEAN_TYPE
939 || TREE_CODE (srctype) == ENUMERAL_TYPE)
940 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
941 if (!srctype)
942 srctype = desttype;
943 if (!desttype)
944 desttype = srctype;
945 if (!srctype)
946 return false;
947
948 src_align = get_pointer_alignment (src);
949 dest_align = get_pointer_alignment (dest);
950 if (dest_align < TYPE_ALIGN (desttype)
951 || src_align < TYPE_ALIGN (srctype))
952 return false;
953
954 destvar = dest;
955 STRIP_NOPS (destvar);
956 if (TREE_CODE (destvar) == ADDR_EXPR
957 && var_decl_component_p (TREE_OPERAND (destvar, 0))
958 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
959 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
960 else
961 destvar = NULL_TREE;
962
963 srcvar = src;
964 STRIP_NOPS (srcvar);
965 if (TREE_CODE (srcvar) == ADDR_EXPR
966 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
967 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
968 {
969 if (!destvar
970 || src_align >= TYPE_ALIGN (desttype))
971 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
972 srcvar, off0);
973 else if (!STRICT_ALIGNMENT)
974 {
975 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
976 src_align);
977 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
978 }
e256dfce 979 else
fef5a0d9
RB
980 srcvar = NULL_TREE;
981 }
982 else
983 srcvar = NULL_TREE;
984
985 if (srcvar == NULL_TREE && destvar == NULL_TREE)
986 return false;
987
988 if (srcvar == NULL_TREE)
989 {
990 STRIP_NOPS (src);
991 if (src_align >= TYPE_ALIGN (desttype))
992 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
993 else
994 {
995 if (STRICT_ALIGNMENT)
996 return false;
997 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
998 src_align);
999 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1000 }
1001 }
1002 else if (destvar == NULL_TREE)
1003 {
1004 STRIP_NOPS (dest);
1005 if (dest_align >= TYPE_ALIGN (srctype))
1006 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1007 else
1008 {
1009 if (STRICT_ALIGNMENT)
1010 return false;
1011 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1012 dest_align);
1013 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1014 }
1015 }
1016
355fe088 1017 gimple *new_stmt;
fef5a0d9
RB
1018 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1019 {
1020 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1021 if (gimple_in_ssa_p (cfun))
1022 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1023 else
b731b390 1024 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1025 gimple_assign_set_lhs (new_stmt, srcvar);
1026 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1027 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1028 }
1029 new_stmt = gimple_build_assign (destvar, srcvar);
1030 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1031 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1032 if (gimple_vdef (new_stmt)
1033 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1034 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1035 if (!lhs)
1036 {
f6b4dc28 1037 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1038 return true;
1039 }
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1041 }
1042
1043done:
74e3c262 1044 gimple_seq stmts = NULL;
fef5a0d9
RB
1045 if (endp == 0 || endp == 3)
1046 len = NULL_TREE;
1047 else if (endp == 2)
74e3c262
RB
1048 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1049 ssize_int (1));
fef5a0d9 1050 if (endp == 2 || endp == 1)
74e3c262
RB
1051 {
1052 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1053 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1054 TREE_TYPE (dest), dest, len);
1055 }
fef5a0d9 1056
74e3c262 1057 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1058 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1059 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1060 return true;
1061}
1062
1063/* Fold function call to builtin memset or bzero at *GSI setting the
1064 memory of size LEN to VAL. Return whether a simplification was made. */
1065
1066static bool
1067gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1068{
355fe088 1069 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1070 tree etype;
1071 unsigned HOST_WIDE_INT length, cval;
1072
1073 /* If the LEN parameter is zero, return DEST. */
1074 if (integer_zerop (len))
1075 {
1076 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1077 return true;
1078 }
1079
1080 if (! tree_fits_uhwi_p (len))
1081 return false;
1082
1083 if (TREE_CODE (c) != INTEGER_CST)
1084 return false;
1085
1086 tree dest = gimple_call_arg (stmt, 0);
1087 tree var = dest;
1088 if (TREE_CODE (var) != ADDR_EXPR)
1089 return false;
1090
1091 var = TREE_OPERAND (var, 0);
1092 if (TREE_THIS_VOLATILE (var))
1093 return false;
1094
1095 etype = TREE_TYPE (var);
1096 if (TREE_CODE (etype) == ARRAY_TYPE)
1097 etype = TREE_TYPE (etype);
1098
1099 if (!INTEGRAL_TYPE_P (etype)
1100 && !POINTER_TYPE_P (etype))
1101 return NULL_TREE;
1102
1103 if (! var_decl_component_p (var))
1104 return NULL_TREE;
1105
1106 length = tree_to_uhwi (len);
1107 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1108 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1109 return NULL_TREE;
1110
1111 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1112 return NULL_TREE;
1113
1114 if (integer_zerop (c))
1115 cval = 0;
1116 else
1117 {
1118 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1119 return NULL_TREE;
1120
1121 cval = TREE_INT_CST_LOW (c);
1122 cval &= 0xff;
1123 cval |= cval << 8;
1124 cval |= cval << 16;
1125 cval |= (cval << 31) << 1;
1126 }
1127
1128 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1129 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1130 gimple_set_vuse (store, gimple_vuse (stmt));
1131 tree vdef = gimple_vdef (stmt);
1132 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1133 {
1134 gimple_set_vdef (store, gimple_vdef (stmt));
1135 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1136 }
1137 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1138 if (gimple_call_lhs (stmt))
1139 {
355fe088 1140 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1141 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1142 }
1143 else
1144 {
1145 gimple_stmt_iterator gsi2 = *gsi;
1146 gsi_prev (gsi);
1147 gsi_remove (&gsi2, true);
1148 }
1149
1150 return true;
1151}
1152
1153
1154/* Return the string length, maximum string length or maximum value of
1155 ARG in LENGTH.
1156 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1157 is not NULL and, for TYPE == 0, its value is not equal to the length
1158 we determine or if we are unable to determine the length or value,
1159 return false. VISITED is a bitmap of visited variables.
1160 TYPE is 0 if string length should be returned, 1 for maximum string
1161 length and 2 for maximum value ARG can have. */
1162
1163static bool
dcb7fae2 1164get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1165{
1166 tree var, val;
355fe088 1167 gimple *def_stmt;
fef5a0d9
RB
1168
1169 if (TREE_CODE (arg) != SSA_NAME)
1170 {
1171 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1172 if (TREE_CODE (arg) == ADDR_EXPR
1173 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1174 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1175 {
1176 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1177 if (TREE_CODE (aop0) == INDIRECT_REF
1178 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1179 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1180 length, visited, type);
1181 }
1182
1183 if (type == 2)
1184 {
1185 val = arg;
1186 if (TREE_CODE (val) != INTEGER_CST
1187 || tree_int_cst_sgn (val) < 0)
1188 return false;
1189 }
1190 else
1191 val = c_strlen (arg, 1);
1192 if (!val)
1193 return false;
1194
1195 if (*length)
1196 {
1197 if (type > 0)
1198 {
1199 if (TREE_CODE (*length) != INTEGER_CST
1200 || TREE_CODE (val) != INTEGER_CST)
1201 return false;
1202
1203 if (tree_int_cst_lt (*length, val))
1204 *length = val;
1205 return true;
1206 }
1207 else if (simple_cst_equal (val, *length) != 1)
1208 return false;
1209 }
1210
1211 *length = val;
1212 return true;
1213 }
1214
1215 /* If ARG is registered for SSA update we cannot look at its defining
1216 statement. */
1217 if (name_registered_for_update_p (arg))
1218 return false;
1219
1220 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1221 if (!*visited)
1222 *visited = BITMAP_ALLOC (NULL);
1223 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1224 return true;
1225
1226 var = arg;
1227 def_stmt = SSA_NAME_DEF_STMT (var);
1228
1229 switch (gimple_code (def_stmt))
1230 {
1231 case GIMPLE_ASSIGN:
1232 /* The RHS of the statement defining VAR must either have a
1233 constant length or come from another SSA_NAME with a constant
1234 length. */
1235 if (gimple_assign_single_p (def_stmt)
1236 || gimple_assign_unary_nop_p (def_stmt))
1237 {
1238 tree rhs = gimple_assign_rhs1 (def_stmt);
1239 return get_maxval_strlen (rhs, length, visited, type);
1240 }
1241 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1242 {
1243 tree op2 = gimple_assign_rhs2 (def_stmt);
1244 tree op3 = gimple_assign_rhs3 (def_stmt);
1245 return get_maxval_strlen (op2, length, visited, type)
1246 && get_maxval_strlen (op3, length, visited, type);
1247 }
1248 return false;
1249
1250 case GIMPLE_PHI:
1251 {
1252 /* All the arguments of the PHI node must have the same constant
1253 length. */
1254 unsigned i;
1255
1256 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1257 {
1258 tree arg = gimple_phi_arg (def_stmt, i)->def;
1259
1260 /* If this PHI has itself as an argument, we cannot
1261 determine the string length of this argument. However,
1262 if we can find a constant string length for the other
1263 PHI args then we can still be sure that this is a
1264 constant string length. So be optimistic and just
1265 continue with the next argument. */
1266 if (arg == gimple_phi_result (def_stmt))
1267 continue;
1268
1269 if (!get_maxval_strlen (arg, length, visited, type))
1270 return false;
1271 }
1272 }
1273 return true;
1274
1275 default:
1276 return false;
1277 }
1278}
1279
dcb7fae2
RB
1280tree
1281get_maxval_strlen (tree arg, int type)
1282{
1283 bitmap visited = NULL;
1284 tree len = NULL_TREE;
1285 if (!get_maxval_strlen (arg, &len, &visited, type))
1286 len = NULL_TREE;
1287 if (visited)
1288 BITMAP_FREE (visited);
1289
1290 return len;
1291}
1292
fef5a0d9
RB
1293
1294/* Fold function call to builtin strcpy with arguments DEST and SRC.
1295 If LEN is not NULL, it represents the length of the string to be
1296 copied. Return NULL_TREE if no simplification can be made. */
1297
1298static bool
1299gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1300 tree dest, tree src)
fef5a0d9 1301{
dcb7fae2 1302 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1303 tree fn;
1304
1305 /* If SRC and DEST are the same (and not volatile), return DEST. */
1306 if (operand_equal_p (src, dest, 0))
1307 {
1308 replace_call_with_value (gsi, dest);
1309 return true;
1310 }
1311
1312 if (optimize_function_for_size_p (cfun))
1313 return false;
1314
1315 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1316 if (!fn)
1317 return false;
1318
1579e1f8 1319 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1320 if (!len)
dcb7fae2 1321 return false;
fef5a0d9
RB
1322
1323 len = fold_convert_loc (loc, size_type_node, len);
1324 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1325 len = force_gimple_operand_gsi (gsi, len, true,
1326 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1327 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1328 replace_call_with_call_and_fold (gsi, repl);
1329 return true;
1330}
1331
1332/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1333 If SLEN is not NULL, it represents the length of the source string.
1334 Return NULL_TREE if no simplification can be made. */
1335
1336static bool
dcb7fae2
RB
1337gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1338 tree dest, tree src, tree len)
fef5a0d9 1339{
dcb7fae2 1340 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1341 tree fn;
1342
1343 /* If the LEN parameter is zero, return DEST. */
1344 if (integer_zerop (len))
1345 {
1346 replace_call_with_value (gsi, dest);
1347 return true;
1348 }
1349
1350 /* We can't compare slen with len as constants below if len is not a
1351 constant. */
dcb7fae2 1352 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1353 return false;
1354
fef5a0d9 1355 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1356 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1357 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1358 return false;
1359
1360 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1361
1362 /* We do not support simplification of this case, though we do
1363 support it when expanding trees into RTL. */
1364 /* FIXME: generate a call to __builtin_memset. */
1365 if (tree_int_cst_lt (slen, len))
1366 return false;
1367
1368 /* OK transform into builtin memcpy. */
1369 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1370 if (!fn)
1371 return false;
1372
1373 len = fold_convert_loc (loc, size_type_node, len);
1374 len = force_gimple_operand_gsi (gsi, len, true,
1375 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1376 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1377 replace_call_with_call_and_fold (gsi, repl);
1378 return true;
1379}
1380
1381/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1382 to the call.
1383
1384 Return NULL_TREE if no simplification was possible, otherwise return the
1385 simplified form of the call as a tree.
1386
1387 The simplified form may be a constant or other expression which
1388 computes the same value, but in a more efficient manner (including
1389 calls to other builtin functions).
1390
1391 The call may contain arguments which need to be evaluated, but
1392 which are not useful to determine the result of the call. In
1393 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1394 COMPOUND_EXPR will be an argument which must be evaluated.
1395 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1396 COMPOUND_EXPR in the chain will contain the tree for the simplified
1397 form of the builtin function call. */
1398
1399static bool
dcb7fae2 1400gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1401{
355fe088 1402 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1403 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1404
1405 const char *p = c_getstr (src);
1406
1407 /* If the string length is zero, return the dst parameter. */
1408 if (p && *p == '\0')
1409 {
1410 replace_call_with_value (gsi, dst);
1411 return true;
1412 }
1413
1414 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1415 return false;
1416
1417 /* See if we can store by pieces into (dst + strlen(dst)). */
1418 tree newdst;
1419 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1420 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1421
1422 if (!strlen_fn || !memcpy_fn)
1423 return false;
1424
1425 /* If the length of the source string isn't computable don't
1426 split strcat into strlen and memcpy. */
dcb7fae2 1427 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1428 if (! len)
fef5a0d9
RB
1429 return false;
1430
1431 /* Create strlen (dst). */
1432 gimple_seq stmts = NULL, stmts2;
355fe088 1433 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9
RB
1434 gimple_set_location (repl, loc);
1435 if (gimple_in_ssa_p (cfun))
b731b390 1436 newdst = make_ssa_name (size_type_node);
fef5a0d9 1437 else
b731b390 1438 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1439 gimple_call_set_lhs (repl, newdst);
1440 gimple_seq_add_stmt_without_update (&stmts, repl);
1441
1442 /* Create (dst p+ strlen (dst)). */
1443 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1444 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1445 gimple_seq_add_seq_without_update (&stmts, stmts2);
1446
1447 len = fold_convert_loc (loc, size_type_node, len);
1448 len = size_binop_loc (loc, PLUS_EXPR, len,
1449 build_int_cst (size_type_node, 1));
1450 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1451 gimple_seq_add_seq_without_update (&stmts, stmts2);
1452
1453 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1454 gimple_seq_add_stmt_without_update (&stmts, repl);
1455 if (gimple_call_lhs (stmt))
1456 {
1457 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1458 gimple_seq_add_stmt_without_update (&stmts, repl);
1459 gsi_replace_with_seq_vops (gsi, stmts);
1460 /* gsi now points at the assignment to the lhs, get a
1461 stmt iterator to the memcpy call.
1462 ??? We can't use gsi_for_stmt as that doesn't work when the
1463 CFG isn't built yet. */
1464 gimple_stmt_iterator gsi2 = *gsi;
1465 gsi_prev (&gsi2);
1466 fold_stmt (&gsi2);
1467 }
1468 else
1469 {
1470 gsi_replace_with_seq_vops (gsi, stmts);
1471 fold_stmt (gsi);
1472 }
1473 return true;
1474}
1475
07f1cf56
RB
1476/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1477 are the arguments to the call. */
1478
1479static bool
1480gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1481{
355fe088 1482 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1483 tree dest = gimple_call_arg (stmt, 0);
1484 tree src = gimple_call_arg (stmt, 1);
1485 tree size = gimple_call_arg (stmt, 2);
1486 tree fn;
1487 const char *p;
1488
1489
1490 p = c_getstr (src);
1491 /* If the SRC parameter is "", return DEST. */
1492 if (p && *p == '\0')
1493 {
1494 replace_call_with_value (gsi, dest);
1495 return true;
1496 }
1497
1498 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1499 return false;
1500
1501 /* If __builtin_strcat_chk is used, assume strcat is available. */
1502 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1503 if (!fn)
1504 return false;
1505
355fe088 1506 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1507 replace_call_with_call_and_fold (gsi, repl);
1508 return true;
1509}
1510
ad03a744
RB
1511/* Simplify a call to the strncat builtin. */
1512
1513static bool
1514gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1515{
1516 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1517 tree dst = gimple_call_arg (stmt, 0);
1518 tree src = gimple_call_arg (stmt, 1);
1519 tree len = gimple_call_arg (stmt, 2);
1520
1521 const char *p = c_getstr (src);
1522
1523 /* If the requested length is zero, or the src parameter string
1524 length is zero, return the dst parameter. */
1525 if (integer_zerop (len) || (p && *p == '\0'))
1526 {
1527 replace_call_with_value (gsi, dst);
1528 return true;
1529 }
1530
1531 /* If the requested len is greater than or equal to the string
1532 length, call strcat. */
1533 if (TREE_CODE (len) == INTEGER_CST && p
1534 && compare_tree_int (len, strlen (p)) >= 0)
1535 {
1536 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1537
1538 /* If the replacement _DECL isn't initialized, don't do the
1539 transformation. */
1540 if (!fn)
1541 return false;
1542
1543 gcall *repl = gimple_build_call (fn, 2, dst, src);
1544 replace_call_with_call_and_fold (gsi, repl);
1545 return true;
1546 }
1547
1548 return false;
1549}
1550
745583f9
RB
1551/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1552 LEN, and SIZE. */
1553
1554static bool
1555gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1556{
355fe088 1557 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1558 tree dest = gimple_call_arg (stmt, 0);
1559 tree src = gimple_call_arg (stmt, 1);
1560 tree len = gimple_call_arg (stmt, 2);
1561 tree size = gimple_call_arg (stmt, 3);
1562 tree fn;
1563 const char *p;
1564
1565 p = c_getstr (src);
1566 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1567 if ((p && *p == '\0')
1568 || integer_zerop (len))
1569 {
1570 replace_call_with_value (gsi, dest);
1571 return true;
1572 }
1573
1574 if (! tree_fits_uhwi_p (size))
1575 return false;
1576
1577 if (! integer_all_onesp (size))
1578 {
1579 tree src_len = c_strlen (src, 1);
1580 if (src_len
1581 && tree_fits_uhwi_p (src_len)
1582 && tree_fits_uhwi_p (len)
1583 && ! tree_int_cst_lt (len, src_len))
1584 {
1585 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1586 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1587 if (!fn)
1588 return false;
1589
355fe088 1590 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1591 replace_call_with_call_and_fold (gsi, repl);
1592 return true;
1593 }
1594 return false;
1595 }
1596
1597 /* If __builtin_strncat_chk is used, assume strncat is available. */
1598 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1599 if (!fn)
1600 return false;
1601
355fe088 1602 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1603 replace_call_with_call_and_fold (gsi, repl);
1604 return true;
1605}
1606
fef5a0d9
RB
1607/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1608 to the call. IGNORE is true if the value returned
1609 by the builtin will be ignored. UNLOCKED is true is true if this
1610 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1611 the known length of the string. Return NULL_TREE if no simplification
1612 was possible. */
1613
1614static bool
1615gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1616 tree arg0, tree arg1,
dcb7fae2 1617 bool unlocked)
fef5a0d9 1618{
355fe088 1619 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1620
fef5a0d9
RB
1621 /* If we're using an unlocked function, assume the other unlocked
1622 functions exist explicitly. */
1623 tree const fn_fputc = (unlocked
1624 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1625 : builtin_decl_implicit (BUILT_IN_FPUTC));
1626 tree const fn_fwrite = (unlocked
1627 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1628 : builtin_decl_implicit (BUILT_IN_FWRITE));
1629
1630 /* If the return value is used, don't do the transformation. */
dcb7fae2 1631 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1632 return false;
1633
fef5a0d9
RB
1634 /* Get the length of the string passed to fputs. If the length
1635 can't be determined, punt. */
dcb7fae2 1636 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1637 if (!len
1638 || TREE_CODE (len) != INTEGER_CST)
1639 return false;
1640
1641 switch (compare_tree_int (len, 1))
1642 {
1643 case -1: /* length is 0, delete the call entirely . */
1644 replace_call_with_value (gsi, integer_zero_node);
1645 return true;
1646
1647 case 0: /* length is 1, call fputc. */
1648 {
1649 const char *p = c_getstr (arg0);
1650 if (p != NULL)
1651 {
1652 if (!fn_fputc)
1653 return false;
1654
355fe088 1655 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
1656 build_int_cst
1657 (integer_type_node, p[0]), arg1);
1658 replace_call_with_call_and_fold (gsi, repl);
1659 return true;
1660 }
1661 }
1662 /* FALLTHROUGH */
1663 case 1: /* length is greater than 1, call fwrite. */
1664 {
1665 /* If optimizing for size keep fputs. */
1666 if (optimize_function_for_size_p (cfun))
1667 return false;
1668 /* New argument list transforming fputs(string, stream) to
1669 fwrite(string, 1, len, stream). */
1670 if (!fn_fwrite)
1671 return false;
1672
355fe088 1673 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
1674 size_one_node, len, arg1);
1675 replace_call_with_call_and_fold (gsi, repl);
1676 return true;
1677 }
1678 default:
1679 gcc_unreachable ();
1680 }
1681 return false;
1682}
1683
1684/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1685 DEST, SRC, LEN, and SIZE are the arguments to the call.
1686 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1687 code of the builtin. If MAXLEN is not NULL, it is maximum length
1688 passed as third argument. */
1689
1690static bool
1691gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1692 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1693 enum built_in_function fcode)
1694{
355fe088 1695 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1696 location_t loc = gimple_location (stmt);
1697 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1698 tree fn;
1699
1700 /* If SRC and DEST are the same (and not volatile), return DEST
1701 (resp. DEST+LEN for __mempcpy_chk). */
1702 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1703 {
1704 if (fcode != BUILT_IN_MEMPCPY_CHK)
1705 {
1706 replace_call_with_value (gsi, dest);
1707 return true;
1708 }
1709 else
1710 {
74e3c262
RB
1711 gimple_seq stmts = NULL;
1712 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
1713 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1714 TREE_TYPE (dest), dest, len);
74e3c262 1715 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
1716 replace_call_with_value (gsi, temp);
1717 return true;
1718 }
1719 }
1720
1721 if (! tree_fits_uhwi_p (size))
1722 return false;
1723
dcb7fae2 1724 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1725 if (! integer_all_onesp (size))
1726 {
1727 if (! tree_fits_uhwi_p (len))
1728 {
1729 /* If LEN is not constant, try MAXLEN too.
1730 For MAXLEN only allow optimizing into non-_ocs function
1731 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1732 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1733 {
1734 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1735 {
1736 /* (void) __mempcpy_chk () can be optimized into
1737 (void) __memcpy_chk (). */
1738 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1739 if (!fn)
1740 return false;
1741
355fe088 1742 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1743 replace_call_with_call_and_fold (gsi, repl);
1744 return true;
1745 }
1746 return false;
1747 }
1748 }
1749 else
1750 maxlen = len;
1751
1752 if (tree_int_cst_lt (size, maxlen))
1753 return false;
1754 }
1755
1756 fn = NULL_TREE;
1757 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1758 mem{cpy,pcpy,move,set} is available. */
1759 switch (fcode)
1760 {
1761 case BUILT_IN_MEMCPY_CHK:
1762 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1763 break;
1764 case BUILT_IN_MEMPCPY_CHK:
1765 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1766 break;
1767 case BUILT_IN_MEMMOVE_CHK:
1768 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1769 break;
1770 case BUILT_IN_MEMSET_CHK:
1771 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1772 break;
1773 default:
1774 break;
1775 }
1776
1777 if (!fn)
1778 return false;
1779
355fe088 1780 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1781 replace_call_with_call_and_fold (gsi, repl);
1782 return true;
1783}
1784
1785/* Fold a call to the __st[rp]cpy_chk builtin.
1786 DEST, SRC, and SIZE are the arguments to the call.
1787 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1788 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1789 strings passed as second argument. */
1790
1791static bool
1792gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1793 tree dest,
fef5a0d9 1794 tree src, tree size,
fef5a0d9
RB
1795 enum built_in_function fcode)
1796{
355fe088 1797 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1798 location_t loc = gimple_location (stmt);
1799 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1800 tree len, fn;
1801
1802 /* If SRC and DEST are the same (and not volatile), return DEST. */
1803 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1804 {
1805 replace_call_with_value (gsi, dest);
1806 return true;
1807 }
1808
1809 if (! tree_fits_uhwi_p (size))
1810 return false;
1811
dcb7fae2 1812 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1813 if (! integer_all_onesp (size))
1814 {
1815 len = c_strlen (src, 1);
1816 if (! len || ! tree_fits_uhwi_p (len))
1817 {
1818 /* If LEN is not constant, try MAXLEN too.
1819 For MAXLEN only allow optimizing into non-_ocs function
1820 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1821 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1822 {
1823 if (fcode == BUILT_IN_STPCPY_CHK)
1824 {
1825 if (! ignore)
1826 return false;
1827
1828 /* If return value of __stpcpy_chk is ignored,
1829 optimize into __strcpy_chk. */
1830 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1831 if (!fn)
1832 return false;
1833
355fe088 1834 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
1835 replace_call_with_call_and_fold (gsi, repl);
1836 return true;
1837 }
1838
1839 if (! len || TREE_SIDE_EFFECTS (len))
1840 return false;
1841
1842 /* If c_strlen returned something, but not a constant,
1843 transform __strcpy_chk into __memcpy_chk. */
1844 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1845 if (!fn)
1846 return false;
1847
74e3c262
RB
1848 gimple_seq stmts = NULL;
1849 len = gimple_convert (&stmts, loc, size_type_node, len);
1850 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1851 build_int_cst (size_type_node, 1));
1852 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1853 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1854 replace_call_with_call_and_fold (gsi, repl);
1855 return true;
1856 }
e256dfce 1857 }
fef5a0d9
RB
1858 else
1859 maxlen = len;
1860
1861 if (! tree_int_cst_lt (maxlen, size))
1862 return false;
e256dfce
RG
1863 }
1864
fef5a0d9
RB
1865 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1866 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1867 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1868 if (!fn)
1869 return false;
1870
355fe088 1871 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
1872 replace_call_with_call_and_fold (gsi, repl);
1873 return true;
1874}
1875
1876/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1877 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1878 length passed as third argument. IGNORE is true if return value can be
1879 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1880
1881static bool
1882gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1883 tree dest, tree src,
dcb7fae2 1884 tree len, tree size,
fef5a0d9
RB
1885 enum built_in_function fcode)
1886{
355fe088 1887 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1888 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1889 tree fn;
1890
1891 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1892 {
fef5a0d9
RB
1893 /* If return value of __stpncpy_chk is ignored,
1894 optimize into __strncpy_chk. */
1895 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1896 if (fn)
1897 {
355fe088 1898 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1899 replace_call_with_call_and_fold (gsi, repl);
1900 return true;
1901 }
cbdd87d4
RG
1902 }
1903
fef5a0d9
RB
1904 if (! tree_fits_uhwi_p (size))
1905 return false;
1906
dcb7fae2 1907 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1908 if (! integer_all_onesp (size))
cbdd87d4 1909 {
fef5a0d9 1910 if (! tree_fits_uhwi_p (len))
fe2ef088 1911 {
fef5a0d9
RB
1912 /* If LEN is not constant, try MAXLEN too.
1913 For MAXLEN only allow optimizing into non-_ocs function
1914 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1915 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1916 return false;
8a1561bc 1917 }
fef5a0d9
RB
1918 else
1919 maxlen = len;
1920
1921 if (tree_int_cst_lt (size, maxlen))
1922 return false;
cbdd87d4
RG
1923 }
1924
fef5a0d9
RB
1925 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1926 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1927 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1928 if (!fn)
1929 return false;
1930
355fe088 1931 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1932 replace_call_with_call_and_fold (gsi, repl);
1933 return true;
cbdd87d4
RG
1934}
1935
2625bb5d
RB
1936/* Fold function call to builtin stpcpy with arguments DEST and SRC.
1937 Return NULL_TREE if no simplification can be made. */
1938
1939static bool
1940gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1941{
1942 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1943 location_t loc = gimple_location (stmt);
1944 tree dest = gimple_call_arg (stmt, 0);
1945 tree src = gimple_call_arg (stmt, 1);
1946 tree fn, len, lenp1;
1947
1948 /* If the result is unused, replace stpcpy with strcpy. */
1949 if (gimple_call_lhs (stmt) == NULL_TREE)
1950 {
1951 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1952 if (!fn)
1953 return false;
1954 gimple_call_set_fndecl (stmt, fn);
1955 fold_stmt (gsi);
1956 return true;
1957 }
1958
1959 len = c_strlen (src, 1);
1960 if (!len
1961 || TREE_CODE (len) != INTEGER_CST)
1962 return false;
1963
1964 if (optimize_function_for_size_p (cfun)
1965 /* If length is zero it's small enough. */
1966 && !integer_zerop (len))
1967 return false;
1968
1969 /* If the source has a known length replace stpcpy with memcpy. */
1970 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1971 if (!fn)
1972 return false;
1973
1974 gimple_seq stmts = NULL;
1975 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1976 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1977 tem, build_int_cst (size_type_node, 1));
1978 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1979 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1980 gimple_set_vuse (repl, gimple_vuse (stmt));
1981 gimple_set_vdef (repl, gimple_vdef (stmt));
1982 if (gimple_vdef (repl)
1983 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1984 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1985 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1986 /* Replace the result with dest + len. */
1987 stmts = NULL;
1988 tem = gimple_convert (&stmts, loc, sizetype, len);
1989 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1990 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1991 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 1992 gsi_replace (gsi, ret, false);
2625bb5d
RB
1993 /* Finally fold the memcpy call. */
1994 gimple_stmt_iterator gsi2 = *gsi;
1995 gsi_prev (&gsi2);
1996 fold_stmt (&gsi2);
1997 return true;
1998}
1999
fef5a0d9
RB
2000/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2001 NULL_TREE if a normal call should be emitted rather than expanding
2002 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2003 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2004 passed as second argument. */
cbdd87d4
RG
2005
2006static bool
fef5a0d9 2007gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2008 enum built_in_function fcode)
cbdd87d4 2009{
538dd0b7 2010 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2011 tree dest, size, len, fn, fmt, flag;
2012 const char *fmt_str;
cbdd87d4 2013
fef5a0d9
RB
2014 /* Verify the required arguments in the original call. */
2015 if (gimple_call_num_args (stmt) < 5)
2016 return false;
cbdd87d4 2017
fef5a0d9
RB
2018 dest = gimple_call_arg (stmt, 0);
2019 len = gimple_call_arg (stmt, 1);
2020 flag = gimple_call_arg (stmt, 2);
2021 size = gimple_call_arg (stmt, 3);
2022 fmt = gimple_call_arg (stmt, 4);
2023
2024 if (! tree_fits_uhwi_p (size))
2025 return false;
2026
2027 if (! integer_all_onesp (size))
2028 {
dcb7fae2 2029 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2030 if (! tree_fits_uhwi_p (len))
cbdd87d4 2031 {
fef5a0d9
RB
2032 /* If LEN is not constant, try MAXLEN too.
2033 For MAXLEN only allow optimizing into non-_ocs function
2034 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2035 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2036 return false;
2037 }
2038 else
fef5a0d9 2039 maxlen = len;
cbdd87d4 2040
fef5a0d9
RB
2041 if (tree_int_cst_lt (size, maxlen))
2042 return false;
2043 }
cbdd87d4 2044
fef5a0d9
RB
2045 if (!init_target_chars ())
2046 return false;
cbdd87d4 2047
fef5a0d9
RB
2048 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2049 or if format doesn't contain % chars or is "%s". */
2050 if (! integer_zerop (flag))
2051 {
2052 fmt_str = c_getstr (fmt);
2053 if (fmt_str == NULL)
2054 return false;
2055 if (strchr (fmt_str, target_percent) != NULL
2056 && strcmp (fmt_str, target_percent_s))
2057 return false;
cbdd87d4
RG
2058 }
2059
fef5a0d9
RB
2060 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2061 available. */
2062 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2063 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2064 if (!fn)
491e0b9b
RG
2065 return false;
2066
fef5a0d9
RB
2067 /* Replace the called function and the first 5 argument by 3 retaining
2068 trailing varargs. */
2069 gimple_call_set_fndecl (stmt, fn);
2070 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2071 gimple_call_set_arg (stmt, 0, dest);
2072 gimple_call_set_arg (stmt, 1, len);
2073 gimple_call_set_arg (stmt, 2, fmt);
2074 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2075 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2076 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2077 fold_stmt (gsi);
2078 return true;
2079}
cbdd87d4 2080
fef5a0d9
RB
2081/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2082 Return NULL_TREE if a normal call should be emitted rather than
2083 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2084 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2085
fef5a0d9
RB
2086static bool
2087gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2088 enum built_in_function fcode)
2089{
538dd0b7 2090 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2091 tree dest, size, len, fn, fmt, flag;
2092 const char *fmt_str;
2093 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2094
fef5a0d9
RB
2095 /* Verify the required arguments in the original call. */
2096 if (nargs < 4)
2097 return false;
2098 dest = gimple_call_arg (stmt, 0);
2099 flag = gimple_call_arg (stmt, 1);
2100 size = gimple_call_arg (stmt, 2);
2101 fmt = gimple_call_arg (stmt, 3);
2102
2103 if (! tree_fits_uhwi_p (size))
2104 return false;
2105
2106 len = NULL_TREE;
2107
2108 if (!init_target_chars ())
2109 return false;
2110
2111 /* Check whether the format is a literal string constant. */
2112 fmt_str = c_getstr (fmt);
2113 if (fmt_str != NULL)
2114 {
2115 /* If the format doesn't contain % args or %%, we know the size. */
2116 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2117 {
fef5a0d9
RB
2118 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2119 len = build_int_cstu (size_type_node, strlen (fmt_str));
2120 }
2121 /* If the format is "%s" and first ... argument is a string literal,
2122 we know the size too. */
2123 else if (fcode == BUILT_IN_SPRINTF_CHK
2124 && strcmp (fmt_str, target_percent_s) == 0)
2125 {
2126 tree arg;
cbdd87d4 2127
fef5a0d9
RB
2128 if (nargs == 5)
2129 {
2130 arg = gimple_call_arg (stmt, 4);
2131 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2132 {
2133 len = c_strlen (arg, 1);
2134 if (! len || ! tree_fits_uhwi_p (len))
2135 len = NULL_TREE;
2136 }
2137 }
2138 }
2139 }
cbdd87d4 2140
fef5a0d9
RB
2141 if (! integer_all_onesp (size))
2142 {
2143 if (! len || ! tree_int_cst_lt (len, size))
2144 return false;
2145 }
cbdd87d4 2146
fef5a0d9
RB
2147 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2148 or if format doesn't contain % chars or is "%s". */
2149 if (! integer_zerop (flag))
2150 {
2151 if (fmt_str == NULL)
2152 return false;
2153 if (strchr (fmt_str, target_percent) != NULL
2154 && strcmp (fmt_str, target_percent_s))
2155 return false;
2156 }
cbdd87d4 2157
fef5a0d9
RB
2158 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2159 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2160 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2161 if (!fn)
2162 return false;
2163
2164 /* Replace the called function and the first 4 argument by 2 retaining
2165 trailing varargs. */
2166 gimple_call_set_fndecl (stmt, fn);
2167 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2168 gimple_call_set_arg (stmt, 0, dest);
2169 gimple_call_set_arg (stmt, 1, fmt);
2170 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2171 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2172 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2173 fold_stmt (gsi);
2174 return true;
2175}
2176
35770bb2
RB
2177/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2178 ORIG may be null if this is a 2-argument call. We don't attempt to
2179 simplify calls with more than 3 arguments.
2180
2181 Return NULL_TREE if no simplification was possible, otherwise return the
2182 simplified form of the call as a tree. If IGNORED is true, it means that
2183 the caller does not use the returned value of the function. */
2184
2185static bool
dcb7fae2 2186gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2187{
355fe088 2188 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2189 tree dest = gimple_call_arg (stmt, 0);
2190 tree fmt = gimple_call_arg (stmt, 1);
2191 tree orig = NULL_TREE;
2192 const char *fmt_str = NULL;
2193
2194 /* Verify the required arguments in the original call. We deal with two
2195 types of sprintf() calls: 'sprintf (str, fmt)' and
2196 'sprintf (dest, "%s", orig)'. */
2197 if (gimple_call_num_args (stmt) > 3)
2198 return false;
2199
2200 if (gimple_call_num_args (stmt) == 3)
2201 orig = gimple_call_arg (stmt, 2);
2202
2203 /* Check whether the format is a literal string constant. */
2204 fmt_str = c_getstr (fmt);
2205 if (fmt_str == NULL)
2206 return false;
2207
2208 if (!init_target_chars ())
2209 return false;
2210
2211 /* If the format doesn't contain % args or %%, use strcpy. */
2212 if (strchr (fmt_str, target_percent) == NULL)
2213 {
2214 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2215
2216 if (!fn)
2217 return false;
2218
2219 /* Don't optimize sprintf (buf, "abc", ptr++). */
2220 if (orig)
2221 return false;
2222
2223 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2224 'format' is known to contain no % formats. */
2225 gimple_seq stmts = NULL;
355fe088 2226 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2227 gimple_seq_add_stmt_without_update (&stmts, repl);
2228 if (gimple_call_lhs (stmt))
2229 {
2230 repl = gimple_build_assign (gimple_call_lhs (stmt),
2231 build_int_cst (integer_type_node,
2232 strlen (fmt_str)));
2233 gimple_seq_add_stmt_without_update (&stmts, repl);
2234 gsi_replace_with_seq_vops (gsi, stmts);
2235 /* gsi now points at the assignment to the lhs, get a
2236 stmt iterator to the memcpy call.
2237 ??? We can't use gsi_for_stmt as that doesn't work when the
2238 CFG isn't built yet. */
2239 gimple_stmt_iterator gsi2 = *gsi;
2240 gsi_prev (&gsi2);
2241 fold_stmt (&gsi2);
2242 }
2243 else
2244 {
2245 gsi_replace_with_seq_vops (gsi, stmts);
2246 fold_stmt (gsi);
2247 }
2248 return true;
2249 }
2250
2251 /* If the format is "%s", use strcpy if the result isn't used. */
2252 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2253 {
2254 tree fn;
2255 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2256
2257 if (!fn)
2258 return false;
2259
2260 /* Don't crash on sprintf (str1, "%s"). */
2261 if (!orig)
2262 return false;
2263
dcb7fae2
RB
2264 tree orig_len = NULL_TREE;
2265 if (gimple_call_lhs (stmt))
35770bb2 2266 {
dcb7fae2 2267 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2268 if (!orig_len)
35770bb2
RB
2269 return false;
2270 }
2271
2272 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2273 gimple_seq stmts = NULL;
355fe088 2274 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2275 gimple_seq_add_stmt_without_update (&stmts, repl);
2276 if (gimple_call_lhs (stmt))
2277 {
d7e78447
RB
2278 if (!useless_type_conversion_p (integer_type_node,
2279 TREE_TYPE (orig_len)))
2280 orig_len = fold_convert (integer_type_node, orig_len);
2281 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2282 gimple_seq_add_stmt_without_update (&stmts, repl);
2283 gsi_replace_with_seq_vops (gsi, stmts);
2284 /* gsi now points at the assignment to the lhs, get a
2285 stmt iterator to the memcpy call.
2286 ??? We can't use gsi_for_stmt as that doesn't work when the
2287 CFG isn't built yet. */
2288 gimple_stmt_iterator gsi2 = *gsi;
2289 gsi_prev (&gsi2);
2290 fold_stmt (&gsi2);
2291 }
2292 else
2293 {
2294 gsi_replace_with_seq_vops (gsi, stmts);
2295 fold_stmt (gsi);
2296 }
2297 return true;
2298 }
2299 return false;
2300}
2301
d7e78447
RB
2302/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2303 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2304 attempt to simplify calls with more than 4 arguments.
35770bb2 2305
d7e78447
RB
2306 Return NULL_TREE if no simplification was possible, otherwise return the
2307 simplified form of the call as a tree. If IGNORED is true, it means that
2308 the caller does not use the returned value of the function. */
2309
2310static bool
dcb7fae2 2311gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2312{
538dd0b7 2313 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2314 tree dest = gimple_call_arg (stmt, 0);
2315 tree destsize = gimple_call_arg (stmt, 1);
2316 tree fmt = gimple_call_arg (stmt, 2);
2317 tree orig = NULL_TREE;
2318 const char *fmt_str = NULL;
2319
2320 if (gimple_call_num_args (stmt) > 4)
2321 return false;
2322
2323 if (gimple_call_num_args (stmt) == 4)
2324 orig = gimple_call_arg (stmt, 3);
2325
2326 if (!tree_fits_uhwi_p (destsize))
2327 return false;
2328 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2329
2330 /* Check whether the format is a literal string constant. */
2331 fmt_str = c_getstr (fmt);
2332 if (fmt_str == NULL)
2333 return false;
2334
2335 if (!init_target_chars ())
2336 return false;
2337
2338 /* If the format doesn't contain % args or %%, use strcpy. */
2339 if (strchr (fmt_str, target_percent) == NULL)
2340 {
2341 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2342 if (!fn)
2343 return false;
2344
2345 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2346 if (orig)
2347 return false;
2348
2349 /* We could expand this as
2350 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2351 or to
2352 memcpy (str, fmt_with_nul_at_cstm1, cst);
2353 but in the former case that might increase code size
2354 and in the latter case grow .rodata section too much.
2355 So punt for now. */
2356 size_t len = strlen (fmt_str);
2357 if (len >= destlen)
2358 return false;
2359
2360 gimple_seq stmts = NULL;
355fe088 2361 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2362 gimple_seq_add_stmt_without_update (&stmts, repl);
2363 if (gimple_call_lhs (stmt))
2364 {
2365 repl = gimple_build_assign (gimple_call_lhs (stmt),
2366 build_int_cst (integer_type_node, len));
2367 gimple_seq_add_stmt_without_update (&stmts, repl);
2368 gsi_replace_with_seq_vops (gsi, stmts);
2369 /* gsi now points at the assignment to the lhs, get a
2370 stmt iterator to the memcpy call.
2371 ??? We can't use gsi_for_stmt as that doesn't work when the
2372 CFG isn't built yet. */
2373 gimple_stmt_iterator gsi2 = *gsi;
2374 gsi_prev (&gsi2);
2375 fold_stmt (&gsi2);
2376 }
2377 else
2378 {
2379 gsi_replace_with_seq_vops (gsi, stmts);
2380 fold_stmt (gsi);
2381 }
2382 return true;
2383 }
2384
2385 /* If the format is "%s", use strcpy if the result isn't used. */
2386 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2387 {
2388 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2389 if (!fn)
2390 return false;
2391
2392 /* Don't crash on snprintf (str1, cst, "%s"). */
2393 if (!orig)
2394 return false;
2395
dcb7fae2 2396 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2397 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2398 return false;
d7e78447
RB
2399
2400 /* We could expand this as
2401 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2402 or to
2403 memcpy (str1, str2_with_nul_at_cstm1, cst);
2404 but in the former case that might increase code size
2405 and in the latter case grow .rodata section too much.
2406 So punt for now. */
2407 if (compare_tree_int (orig_len, destlen) >= 0)
2408 return false;
2409
2410 /* Convert snprintf (str1, cst, "%s", str2) into
2411 strcpy (str1, str2) if strlen (str2) < cst. */
2412 gimple_seq stmts = NULL;
355fe088 2413 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
2414 gimple_seq_add_stmt_without_update (&stmts, repl);
2415 if (gimple_call_lhs (stmt))
2416 {
2417 if (!useless_type_conversion_p (integer_type_node,
2418 TREE_TYPE (orig_len)))
2419 orig_len = fold_convert (integer_type_node, orig_len);
2420 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2421 gimple_seq_add_stmt_without_update (&stmts, repl);
2422 gsi_replace_with_seq_vops (gsi, stmts);
2423 /* gsi now points at the assignment to the lhs, get a
2424 stmt iterator to the memcpy call.
2425 ??? We can't use gsi_for_stmt as that doesn't work when the
2426 CFG isn't built yet. */
2427 gimple_stmt_iterator gsi2 = *gsi;
2428 gsi_prev (&gsi2);
2429 fold_stmt (&gsi2);
2430 }
2431 else
2432 {
2433 gsi_replace_with_seq_vops (gsi, stmts);
2434 fold_stmt (gsi);
2435 }
2436 return true;
2437 }
2438 return false;
2439}
35770bb2 2440
edd7ae68
RB
2441/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2442 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2443 more than 3 arguments, and ARG may be null in the 2-argument case.
2444
2445 Return NULL_TREE if no simplification was possible, otherwise return the
2446 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2447 code of the function to be simplified. */
2448
2449static bool
2450gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2451 tree fp, tree fmt, tree arg,
2452 enum built_in_function fcode)
2453{
2454 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2455 tree fn_fputc, fn_fputs;
2456 const char *fmt_str = NULL;
2457
2458 /* If the return value is used, don't do the transformation. */
2459 if (gimple_call_lhs (stmt) != NULL_TREE)
2460 return false;
2461
2462 /* Check whether the format is a literal string constant. */
2463 fmt_str = c_getstr (fmt);
2464 if (fmt_str == NULL)
2465 return false;
2466
2467 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2468 {
2469 /* If we're using an unlocked function, assume the other
2470 unlocked functions exist explicitly. */
2471 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2472 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2473 }
2474 else
2475 {
2476 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2477 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2478 }
2479
2480 if (!init_target_chars ())
2481 return false;
2482
2483 /* If the format doesn't contain % args or %%, use strcpy. */
2484 if (strchr (fmt_str, target_percent) == NULL)
2485 {
2486 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2487 && arg)
2488 return false;
2489
2490 /* If the format specifier was "", fprintf does nothing. */
2491 if (fmt_str[0] == '\0')
2492 {
2493 replace_call_with_value (gsi, NULL_TREE);
2494 return true;
2495 }
2496
2497 /* When "string" doesn't contain %, replace all cases of
2498 fprintf (fp, string) with fputs (string, fp). The fputs
2499 builtin will take care of special cases like length == 1. */
2500 if (fn_fputs)
2501 {
2502 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2503 replace_call_with_call_and_fold (gsi, repl);
2504 return true;
2505 }
2506 }
2507
2508 /* The other optimizations can be done only on the non-va_list variants. */
2509 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2510 return false;
2511
2512 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2513 else if (strcmp (fmt_str, target_percent_s) == 0)
2514 {
2515 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2516 return false;
2517 if (fn_fputs)
2518 {
2519 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2520 replace_call_with_call_and_fold (gsi, repl);
2521 return true;
2522 }
2523 }
2524
2525 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2526 else if (strcmp (fmt_str, target_percent_c) == 0)
2527 {
2528 if (!arg
2529 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2530 return false;
2531 if (fn_fputc)
2532 {
2533 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2534 replace_call_with_call_and_fold (gsi, repl);
2535 return true;
2536 }
2537 }
2538
2539 return false;
2540}
2541
ad03a744
RB
2542/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2543 FMT and ARG are the arguments to the call; we don't fold cases with
2544 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2545
2546 Return NULL_TREE if no simplification was possible, otherwise return the
2547 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2548 code of the function to be simplified. */
2549
2550static bool
2551gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2552 tree arg, enum built_in_function fcode)
2553{
2554 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2555 tree fn_putchar, fn_puts, newarg;
2556 const char *fmt_str = NULL;
2557
2558 /* If the return value is used, don't do the transformation. */
2559 if (gimple_call_lhs (stmt) != NULL_TREE)
2560 return false;
2561
2562 /* Check whether the format is a literal string constant. */
2563 fmt_str = c_getstr (fmt);
2564 if (fmt_str == NULL)
2565 return false;
2566
2567 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2568 {
2569 /* If we're using an unlocked function, assume the other
2570 unlocked functions exist explicitly. */
2571 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2572 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2573 }
2574 else
2575 {
2576 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2577 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2578 }
2579
2580 if (!init_target_chars ())
2581 return false;
2582
2583 if (strcmp (fmt_str, target_percent_s) == 0
2584 || strchr (fmt_str, target_percent) == NULL)
2585 {
2586 const char *str;
2587
2588 if (strcmp (fmt_str, target_percent_s) == 0)
2589 {
2590 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2591 return false;
2592
2593 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2594 return false;
2595
2596 str = c_getstr (arg);
2597 if (str == NULL)
2598 return false;
2599 }
2600 else
2601 {
2602 /* The format specifier doesn't contain any '%' characters. */
2603 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2604 && arg)
2605 return false;
2606 str = fmt_str;
2607 }
2608
2609 /* If the string was "", printf does nothing. */
2610 if (str[0] == '\0')
2611 {
2612 replace_call_with_value (gsi, NULL_TREE);
2613 return true;
2614 }
2615
2616 /* If the string has length of 1, call putchar. */
2617 if (str[1] == '\0')
2618 {
2619 /* Given printf("c"), (where c is any one character,)
2620 convert "c"[0] to an int and pass that to the replacement
2621 function. */
2622 newarg = build_int_cst (integer_type_node, str[0]);
2623 if (fn_putchar)
2624 {
2625 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2626 replace_call_with_call_and_fold (gsi, repl);
2627 return true;
2628 }
2629 }
2630 else
2631 {
2632 /* If the string was "string\n", call puts("string"). */
2633 size_t len = strlen (str);
2634 if ((unsigned char)str[len - 1] == target_newline
2635 && (size_t) (int) len == len
2636 && (int) len > 0)
2637 {
2638 char *newstr;
2639 tree offset_node, string_cst;
2640
2641 /* Create a NUL-terminated string that's one char shorter
2642 than the original, stripping off the trailing '\n'. */
2643 newarg = build_string_literal (len, str);
2644 string_cst = string_constant (newarg, &offset_node);
2645 gcc_checking_assert (string_cst
2646 && (TREE_STRING_LENGTH (string_cst)
2647 == (int) len)
2648 && integer_zerop (offset_node)
2649 && (unsigned char)
2650 TREE_STRING_POINTER (string_cst)[len - 1]
2651 == target_newline);
2652 /* build_string_literal creates a new STRING_CST,
2653 modify it in place to avoid double copying. */
2654 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2655 newstr[len - 1] = '\0';
2656 if (fn_puts)
2657 {
2658 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2659 replace_call_with_call_and_fold (gsi, repl);
2660 return true;
2661 }
2662 }
2663 else
2664 /* We'd like to arrange to call fputs(string,stdout) here,
2665 but we need stdout and don't have a way to get it yet. */
2666 return false;
2667 }
2668 }
2669
2670 /* The other optimizations can be done only on the non-va_list variants. */
2671 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2672 return false;
2673
2674 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2675 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2676 {
2677 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2678 return false;
2679 if (fn_puts)
2680 {
2681 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2682 replace_call_with_call_and_fold (gsi, repl);
2683 return true;
2684 }
2685 }
2686
2687 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2688 else if (strcmp (fmt_str, target_percent_c) == 0)
2689 {
2690 if (!arg || ! useless_type_conversion_p (integer_type_node,
2691 TREE_TYPE (arg)))
2692 return false;
2693 if (fn_putchar)
2694 {
2695 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2696 replace_call_with_call_and_fold (gsi, repl);
2697 return true;
2698 }
2699 }
2700
2701 return false;
2702}
2703
edd7ae68 2704
fef5a0d9
RB
2705
2706/* Fold a call to __builtin_strlen with known length LEN. */
2707
2708static bool
dcb7fae2 2709gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2710{
355fe088 2711 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2712 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2713 if (!len)
2714 return false;
2813904b 2715 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2716 replace_call_with_value (gsi, len);
2717 return true;
cbdd87d4
RG
2718}
2719
48126138
NS
2720/* Fold a call to __builtin_acc_on_device. */
2721
2722static bool
2723gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2724{
2725 /* Defer folding until we know which compiler we're in. */
2726 if (symtab->state != EXPANSION)
2727 return false;
2728
2729 unsigned val_host = GOMP_DEVICE_HOST;
2730 unsigned val_dev = GOMP_DEVICE_NONE;
2731
2732#ifdef ACCEL_COMPILER
2733 val_host = GOMP_DEVICE_NOT_HOST;
2734 val_dev = ACCEL_COMPILER_acc_device;
2735#endif
2736
2737 location_t loc = gimple_location (gsi_stmt (*gsi));
2738
2739 tree host_eq = make_ssa_name (boolean_type_node);
2740 gimple *host_ass = gimple_build_assign
2741 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2742 gimple_set_location (host_ass, loc);
2743 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2744
2745 tree dev_eq = make_ssa_name (boolean_type_node);
2746 gimple *dev_ass = gimple_build_assign
2747 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2748 gimple_set_location (dev_ass, loc);
2749 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2750
2751 tree result = make_ssa_name (boolean_type_node);
2752 gimple *result_ass = gimple_build_assign
2753 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2754 gimple_set_location (result_ass, loc);
2755 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2756
2757 replace_call_with_value (gsi, result);
2758
2759 return true;
2760}
cbdd87d4 2761
dcb7fae2
RB
2762/* Fold the non-target builtin at *GSI and return whether any simplification
2763 was made. */
cbdd87d4 2764
fef5a0d9 2765static bool
dcb7fae2 2766gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2767{
538dd0b7 2768 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2769 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2770
dcb7fae2
RB
2771 /* Give up for always_inline inline builtins until they are
2772 inlined. */
2773 if (avoid_folding_inline_builtin (callee))
2774 return false;
cbdd87d4 2775
edd7ae68
RB
2776 unsigned n = gimple_call_num_args (stmt);
2777 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2778 switch (fcode)
cbdd87d4 2779 {
dcb7fae2
RB
2780 case BUILT_IN_BZERO:
2781 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2782 gimple_call_arg (stmt, 1));
2783 case BUILT_IN_MEMSET:
2784 return gimple_fold_builtin_memset (gsi,
2785 gimple_call_arg (stmt, 1),
2786 gimple_call_arg (stmt, 2));
2787 case BUILT_IN_BCOPY:
2788 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2789 gimple_call_arg (stmt, 0), 3);
2790 case BUILT_IN_MEMCPY:
2791 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2792 gimple_call_arg (stmt, 1), 0);
2793 case BUILT_IN_MEMPCPY:
2794 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2795 gimple_call_arg (stmt, 1), 1);
2796 case BUILT_IN_MEMMOVE:
2797 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2798 gimple_call_arg (stmt, 1), 3);
2799 case BUILT_IN_SPRINTF_CHK:
2800 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2801 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2802 case BUILT_IN_STRCAT_CHK:
2803 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2804 case BUILT_IN_STRNCAT_CHK:
2805 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2806 case BUILT_IN_STRLEN:
dcb7fae2 2807 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2808 case BUILT_IN_STRCPY:
dcb7fae2 2809 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2810 gimple_call_arg (stmt, 0),
dcb7fae2 2811 gimple_call_arg (stmt, 1));
cbdd87d4 2812 case BUILT_IN_STRNCPY:
dcb7fae2 2813 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2814 gimple_call_arg (stmt, 0),
2815 gimple_call_arg (stmt, 1),
dcb7fae2 2816 gimple_call_arg (stmt, 2));
9a7eefec 2817 case BUILT_IN_STRCAT:
dcb7fae2
RB
2818 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2819 gimple_call_arg (stmt, 1));
ad03a744
RB
2820 case BUILT_IN_STRNCAT:
2821 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2822 case BUILT_IN_FPUTS:
dcb7fae2
RB
2823 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2824 gimple_call_arg (stmt, 1), false);
cbdd87d4 2825 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2826 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2827 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2828 case BUILT_IN_MEMCPY_CHK:
2829 case BUILT_IN_MEMPCPY_CHK:
2830 case BUILT_IN_MEMMOVE_CHK:
2831 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2832 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2833 gimple_call_arg (stmt, 0),
2834 gimple_call_arg (stmt, 1),
2835 gimple_call_arg (stmt, 2),
2836 gimple_call_arg (stmt, 3),
edd7ae68 2837 fcode);
2625bb5d
RB
2838 case BUILT_IN_STPCPY:
2839 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2840 case BUILT_IN_STRCPY_CHK:
2841 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2842 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2843 gimple_call_arg (stmt, 0),
2844 gimple_call_arg (stmt, 1),
2845 gimple_call_arg (stmt, 2),
edd7ae68 2846 fcode);
cbdd87d4 2847 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2848 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2849 return gimple_fold_builtin_stxncpy_chk (gsi,
2850 gimple_call_arg (stmt, 0),
2851 gimple_call_arg (stmt, 1),
2852 gimple_call_arg (stmt, 2),
2853 gimple_call_arg (stmt, 3),
edd7ae68 2854 fcode);
cbdd87d4
RG
2855 case BUILT_IN_SNPRINTF_CHK:
2856 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2857 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2858 case BUILT_IN_SNPRINTF:
dcb7fae2 2859 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2860 case BUILT_IN_SPRINTF:
dcb7fae2 2861 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2862 case BUILT_IN_FPRINTF:
2863 case BUILT_IN_FPRINTF_UNLOCKED:
2864 case BUILT_IN_VFPRINTF:
2865 if (n == 2 || n == 3)
2866 return gimple_fold_builtin_fprintf (gsi,
2867 gimple_call_arg (stmt, 0),
2868 gimple_call_arg (stmt, 1),
2869 n == 3
2870 ? gimple_call_arg (stmt, 2)
2871 : NULL_TREE,
2872 fcode);
2873 break;
2874 case BUILT_IN_FPRINTF_CHK:
2875 case BUILT_IN_VFPRINTF_CHK:
2876 if (n == 3 || n == 4)
2877 return gimple_fold_builtin_fprintf (gsi,
2878 gimple_call_arg (stmt, 0),
2879 gimple_call_arg (stmt, 2),
2880 n == 4
2881 ? gimple_call_arg (stmt, 3)
2882 : NULL_TREE,
2883 fcode);
2884 break;
ad03a744
RB
2885 case BUILT_IN_PRINTF:
2886 case BUILT_IN_PRINTF_UNLOCKED:
2887 case BUILT_IN_VPRINTF:
2888 if (n == 1 || n == 2)
2889 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2890 n == 2
2891 ? gimple_call_arg (stmt, 1)
2892 : NULL_TREE, fcode);
2893 break;
2894 case BUILT_IN_PRINTF_CHK:
2895 case BUILT_IN_VPRINTF_CHK:
2896 if (n == 2 || n == 3)
2897 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2898 n == 3
2899 ? gimple_call_arg (stmt, 2)
2900 : NULL_TREE, fcode);
242a37f1 2901 break;
48126138
NS
2902 case BUILT_IN_ACC_ON_DEVICE:
2903 return gimple_fold_builtin_acc_on_device (gsi,
2904 gimple_call_arg (stmt, 0));
fef5a0d9
RB
2905 default:;
2906 }
2907
2908 /* Try the generic builtin folder. */
2909 bool ignore = (gimple_call_lhs (stmt) == NULL);
2910 tree result = fold_call_stmt (stmt, ignore);
2911 if (result)
2912 {
2913 if (ignore)
2914 STRIP_NOPS (result);
2915 else
2916 result = fold_convert (gimple_call_return_type (stmt), result);
2917 if (!update_call_from_tree (gsi, result))
2918 gimplify_and_update_call_from_tree (gsi, result);
2919 return true;
2920 }
2921
2922 return false;
2923}
2924
451e8dae
NS
2925/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2926 function calls to constants, where possible. */
2927
2928static tree
2929fold_internal_goacc_dim (const gimple *call)
2930{
2931 int axis = get_oacc_ifn_dim_arg (call);
2932 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2933 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2934 tree result = NULL_TREE;
2935
2936 /* If the size is 1, or we only want the size and it is not dynamic,
2937 we know the answer. */
2938 if (size == 1 || (!is_pos && size))
2939 {
2940 tree type = TREE_TYPE (gimple_call_lhs (call));
2941 result = build_int_cst (type, size - is_pos);
2942 }
2943
2944 return result;
2945}
2946
1304953e
JJ
2947/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2948 doesn't fit into TYPE. The test for overflow should be regardless of
2949 -fwrapv, and even for unsigned types. */
2950
2951bool
2952arith_overflowed_p (enum tree_code code, const_tree type,
2953 const_tree arg0, const_tree arg1)
2954{
2955 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2956 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2957 widest2_int_cst;
2958 widest2_int warg0 = widest2_int_cst (arg0);
2959 widest2_int warg1 = widest2_int_cst (arg1);
2960 widest2_int wres;
2961 switch (code)
2962 {
2963 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2964 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2965 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2966 default: gcc_unreachable ();
2967 }
2968 signop sign = TYPE_SIGN (type);
2969 if (sign == UNSIGNED && wi::neg_p (wres))
2970 return true;
2971 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2972}
2973
cbdd87d4
RG
2974/* Attempt to fold a call statement referenced by the statement iterator GSI.
2975 The statement may be replaced by another statement, e.g., if the call
2976 simplifies to a constant value. Return true if any changes were made.
2977 It is assumed that the operands have been previously folded. */
2978
e021c122 2979static bool
ceeffab0 2980gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 2981{
538dd0b7 2982 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 2983 tree callee;
e021c122
RG
2984 bool changed = false;
2985 unsigned i;
cbdd87d4 2986
e021c122
RG
2987 /* Fold *& in call arguments. */
2988 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2989 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2990 {
2991 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2992 if (tmp)
2993 {
2994 gimple_call_set_arg (stmt, i, tmp);
2995 changed = true;
2996 }
2997 }
3b45a007
RG
2998
2999 /* Check for virtual calls that became direct calls. */
3000 callee = gimple_call_fn (stmt);
25583c4f 3001 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3002 {
49c471e3
MJ
3003 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3004 {
450ad0cd
JH
3005 if (dump_file && virtual_method_call_p (callee)
3006 && !possible_polymorphic_call_target_p
6f8091fc
JH
3007 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3008 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3009 {
3010 fprintf (dump_file,
a70e9985 3011 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3012 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3013 fprintf (dump_file, " to ");
3014 print_generic_expr (dump_file, callee, TDF_SLIM);
3015 fprintf (dump_file, "\n");
3016 }
3017
49c471e3 3018 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3019 changed = true;
3020 }
a70e9985 3021 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3022 {
61dd6a2e
JH
3023 bool final;
3024 vec <cgraph_node *>targets
058d0a90 3025 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3026 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3027 {
a70e9985 3028 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3029 if (dump_enabled_p ())
3030 {
807b7d62 3031 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3032 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3033 "folding virtual function call to %s\n",
3034 targets.length () == 1
3035 ? targets[0]->name ()
3036 : "__builtin_unreachable");
3037 }
61dd6a2e 3038 if (targets.length () == 1)
cf3e5a89
JJ
3039 {
3040 gimple_call_set_fndecl (stmt, targets[0]->decl);
3041 changed = true;
a70e9985
JJ
3042 /* If the call becomes noreturn, remove the lhs. */
3043 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3044 {
3045 if (TREE_CODE (lhs) == SSA_NAME)
3046 {
b731b390 3047 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3048 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3049 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3050 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3051 }
3052 gimple_call_set_lhs (stmt, NULL_TREE);
3053 }
0b986c6a 3054 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3055 }
a70e9985 3056 else
cf3e5a89
JJ
3057 {
3058 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3059 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3060 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3061 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3062 {
b731b390 3063 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3064 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3065
3066 /* To satisfy condition for
3067 cgraph_update_edges_for_call_stmt_node,
3068 we need to preserve GIMPLE_CALL statement
3069 at position of GSI iterator. */
a70e9985 3070 update_call_from_tree (gsi, def);
eb14a79f 3071 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3072 }
3073 else
42e52a51
RB
3074 {
3075 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3076 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3077 gsi_replace (gsi, new_stmt, false);
3078 }
cf3e5a89
JJ
3079 return true;
3080 }
e021c122 3081 }
49c471e3 3082 }
e021c122 3083 }
49c471e3 3084
f2d3d07e
RH
3085 /* Check for indirect calls that became direct calls, and then
3086 no longer require a static chain. */
3087 if (gimple_call_chain (stmt))
3088 {
3089 tree fn = gimple_call_fndecl (stmt);
3090 if (fn && !DECL_STATIC_CHAIN (fn))
3091 {
3092 gimple_call_set_chain (stmt, NULL);
3093 changed = true;
3094 }
3095 else
3096 {
3097 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3098 if (tmp)
3099 {
3100 gimple_call_set_chain (stmt, tmp);
3101 changed = true;
3102 }
3103 }
3104 }
3105
e021c122
RG
3106 if (inplace)
3107 return changed;
3108
3109 /* Check for builtins that CCP can handle using information not
3110 available in the generic fold routines. */
fef5a0d9
RB
3111 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3112 {
3113 if (gimple_fold_builtin (gsi))
3114 changed = true;
3115 }
3116 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3117 {
ea679d55 3118 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3119 }
368b454d 3120 else if (gimple_call_internal_p (stmt))
ed9c79e1 3121 {
368b454d
JJ
3122 enum tree_code subcode = ERROR_MARK;
3123 tree result = NULL_TREE;
1304953e
JJ
3124 bool cplx_result = false;
3125 tree overflow = NULL_TREE;
368b454d
JJ
3126 switch (gimple_call_internal_fn (stmt))
3127 {
3128 case IFN_BUILTIN_EXPECT:
3129 result = fold_builtin_expect (gimple_location (stmt),
3130 gimple_call_arg (stmt, 0),
3131 gimple_call_arg (stmt, 1),
3132 gimple_call_arg (stmt, 2));
3133 break;
0e82f089
MP
3134 case IFN_UBSAN_OBJECT_SIZE:
3135 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3136 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3137 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3138 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3139 gimple_call_arg (stmt, 2))))
3140 {
f6b4dc28 3141 gsi_replace (gsi, gimple_build_nop (), false);
0e82f089
MP
3142 unlink_stmt_vdef (stmt);
3143 release_defs (stmt);
3144 return true;
3145 }
3146 break;
451e8dae
NS
3147 case IFN_GOACC_DIM_SIZE:
3148 case IFN_GOACC_DIM_POS:
3149 result = fold_internal_goacc_dim (stmt);
3150 break;
368b454d
JJ
3151 case IFN_UBSAN_CHECK_ADD:
3152 subcode = PLUS_EXPR;
3153 break;
3154 case IFN_UBSAN_CHECK_SUB:
3155 subcode = MINUS_EXPR;
3156 break;
3157 case IFN_UBSAN_CHECK_MUL:
3158 subcode = MULT_EXPR;
3159 break;
1304953e
JJ
3160 case IFN_ADD_OVERFLOW:
3161 subcode = PLUS_EXPR;
3162 cplx_result = true;
3163 break;
3164 case IFN_SUB_OVERFLOW:
3165 subcode = MINUS_EXPR;
3166 cplx_result = true;
3167 break;
3168 case IFN_MUL_OVERFLOW:
3169 subcode = MULT_EXPR;
3170 cplx_result = true;
3171 break;
368b454d
JJ
3172 default:
3173 break;
3174 }
3175 if (subcode != ERROR_MARK)
3176 {
3177 tree arg0 = gimple_call_arg (stmt, 0);
3178 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3179 tree type = TREE_TYPE (arg0);
3180 if (cplx_result)
3181 {
3182 tree lhs = gimple_call_lhs (stmt);
3183 if (lhs == NULL_TREE)
3184 type = NULL_TREE;
3185 else
3186 type = TREE_TYPE (TREE_TYPE (lhs));
3187 }
3188 if (type == NULL_TREE)
3189 ;
368b454d 3190 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3191 else if (integer_zerop (arg1))
3192 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3193 /* x = 0 + y; x = 0 * y; */
3194 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3195 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3196 /* x = y - y; */
3197 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3198 result = integer_zero_node;
368b454d 3199 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3200 else if (subcode == MULT_EXPR && integer_onep (arg1))
3201 result = arg0;
3202 else if (subcode == MULT_EXPR && integer_onep (arg0))
3203 result = arg1;
3204 else if (TREE_CODE (arg0) == INTEGER_CST
3205 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3206 {
1304953e
JJ
3207 if (cplx_result)
3208 result = int_const_binop (subcode, fold_convert (type, arg0),
3209 fold_convert (type, arg1));
3210 else
3211 result = int_const_binop (subcode, arg0, arg1);
3212 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3213 {
3214 if (cplx_result)
3215 overflow = build_one_cst (type);
3216 else
3217 result = NULL_TREE;
3218 }
3219 }
3220 if (result)
3221 {
3222 if (result == integer_zero_node)
3223 result = build_zero_cst (type);
3224 else if (cplx_result && TREE_TYPE (result) != type)
3225 {
3226 if (TREE_CODE (result) == INTEGER_CST)
3227 {
3228 if (arith_overflowed_p (PLUS_EXPR, type, result,
3229 integer_zero_node))
3230 overflow = build_one_cst (type);
3231 }
3232 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3233 && TYPE_UNSIGNED (type))
3234 || (TYPE_PRECISION (type)
3235 < (TYPE_PRECISION (TREE_TYPE (result))
3236 + (TYPE_UNSIGNED (TREE_TYPE (result))
3237 && !TYPE_UNSIGNED (type)))))
3238 result = NULL_TREE;
3239 if (result)
3240 result = fold_convert (type, result);
3241 }
368b454d
JJ
3242 }
3243 }
1304953e 3244
ed9c79e1
JJ
3245 if (result)
3246 {
1304953e
JJ
3247 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3248 result = drop_tree_overflow (result);
3249 if (cplx_result)
3250 {
3251 if (overflow == NULL_TREE)
3252 overflow = build_zero_cst (TREE_TYPE (result));
3253 tree ctype = build_complex_type (TREE_TYPE (result));
3254 if (TREE_CODE (result) == INTEGER_CST
3255 && TREE_CODE (overflow) == INTEGER_CST)
3256 result = build_complex (ctype, result, overflow);
3257 else
3258 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3259 ctype, result, overflow);
3260 }
ed9c79e1
JJ
3261 if (!update_call_from_tree (gsi, result))
3262 gimplify_and_update_call_from_tree (gsi, result);
3263 changed = true;
3264 }
3265 }
3b45a007 3266
e021c122 3267 return changed;
cbdd87d4
RG
3268}
3269
e0ee10ed 3270
89a79e96
RB
3271/* Return true whether NAME has a use on STMT. */
3272
3273static bool
355fe088 3274has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
3275{
3276 imm_use_iterator iter;
3277 use_operand_p use_p;
3278 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3279 if (USE_STMT (use_p) == stmt)
3280 return true;
3281 return false;
3282}
3283
e0ee10ed
RB
3284/* Worker for fold_stmt_1 dispatch to pattern based folding with
3285 gimple_simplify.
3286
3287 Replaces *GSI with the simplification result in RCODE and OPS
3288 and the associated statements in *SEQ. Does the replacement
3289 according to INPLACE and returns true if the operation succeeded. */
3290
3291static bool
3292replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3293 code_helper rcode, tree *ops,
3294 gimple_seq *seq, bool inplace)
3295{
355fe088 3296 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
3297
3298 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
3299 newly created statements. See also maybe_push_res_to_seq.
3300 As an exception allow such uses if there was a use of the
3301 same SSA name on the old stmt. */
e0ee10ed 3302 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
3303 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3304 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
3305 || (ops[1]
3306 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
3307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3308 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
3309 || (ops[2]
3310 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96 3311 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
e0237993
JJ
3312 && !has_use_on_stmt (ops[2], stmt))
3313 || (COMPARISON_CLASS_P (ops[0])
3314 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3315 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3316 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3317 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3318 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3319 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
e0ee10ed
RB
3320 return false;
3321
fec40d06
RS
3322 /* Don't insert new statements when INPLACE is true, even if we could
3323 reuse STMT for the final statement. */
3324 if (inplace && !gimple_seq_empty_p (*seq))
3325 return false;
3326
538dd0b7 3327 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3328 {
3329 gcc_assert (rcode.is_tree_code ());
3330 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3331 /* GIMPLE_CONDs condition may not throw. */
3332 && (!flag_exceptions
3333 || !cfun->can_throw_non_call_exceptions
3334 || !operation_could_trap_p (rcode,
3335 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3336 false, NULL_TREE)))
538dd0b7 3337 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3338 else if (rcode == SSA_NAME)
538dd0b7 3339 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3340 build_zero_cst (TREE_TYPE (ops[0])));
3341 else if (rcode == INTEGER_CST)
3342 {
3343 if (integer_zerop (ops[0]))
538dd0b7 3344 gimple_cond_make_false (cond_stmt);
e0ee10ed 3345 else
538dd0b7 3346 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3347 }
3348 else if (!inplace)
3349 {
3350 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3351 ops, seq);
3352 if (!res)
3353 return false;
538dd0b7 3354 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3355 build_zero_cst (TREE_TYPE (res)));
3356 }
3357 else
3358 return false;
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3360 {
3361 fprintf (dump_file, "gimple_simplified to ");
3362 if (!gimple_seq_empty_p (*seq))
3363 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3364 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3365 0, TDF_SLIM);
3366 }
3367 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3368 return true;
3369 }
3370 else if (is_gimple_assign (stmt)
3371 && rcode.is_tree_code ())
3372 {
3373 if (!inplace
f3582e54 3374 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3375 {
3376 maybe_build_generic_op (rcode,
3377 TREE_TYPE (gimple_assign_lhs (stmt)),
3378 &ops[0], ops[1], ops[2]);
00d66391 3379 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3380 if (dump_file && (dump_flags & TDF_DETAILS))
3381 {
3382 fprintf (dump_file, "gimple_simplified to ");
3383 if (!gimple_seq_empty_p (*seq))
3384 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3385 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3386 0, TDF_SLIM);
3387 }
3388 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3389 return true;
3390 }
3391 }
37d486ab 3392 else if (rcode.is_fn_code ()
c9e926ce 3393 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
3394 {
3395 unsigned i;
3396 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3397 {
3398 gcc_assert (ops[i] != NULL_TREE);
3399 gimple_call_set_arg (stmt, i, ops[i]);
3400 }
3401 if (i < 3)
3402 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
3403 if (dump_file && (dump_flags & TDF_DETAILS))
3404 {
3405 fprintf (dump_file, "gimple_simplified to ");
3406 if (!gimple_seq_empty_p (*seq))
3407 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3408 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3409 }
3410 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
3411 return true;
3412 }
e0ee10ed
RB
3413 else if (!inplace)
3414 {
3415 if (gimple_has_lhs (stmt))
3416 {
3417 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3418 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3419 ops, seq, lhs))
3420 return false;
e0ee10ed
RB
3421 if (dump_file && (dump_flags & TDF_DETAILS))
3422 {
3423 fprintf (dump_file, "gimple_simplified to ");
3424 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3425 }
3426 gsi_replace_with_seq_vops (gsi, *seq);
3427 return true;
3428 }
3429 else
3430 gcc_unreachable ();
3431 }
3432
3433 return false;
3434}
3435
040292e7
RB
3436/* Canonicalize MEM_REFs invariant address operand after propagation. */
3437
3438static bool
3439maybe_canonicalize_mem_ref_addr (tree *t)
3440{
3441 bool res = false;
3442
3443 if (TREE_CODE (*t) == ADDR_EXPR)
3444 t = &TREE_OPERAND (*t, 0);
3445
3446 while (handled_component_p (*t))
3447 t = &TREE_OPERAND (*t, 0);
3448
3449 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3450 of invariant addresses into a SSA name MEM_REF address. */
3451 if (TREE_CODE (*t) == MEM_REF
3452 || TREE_CODE (*t) == TARGET_MEM_REF)
3453 {
3454 tree addr = TREE_OPERAND (*t, 0);
3455 if (TREE_CODE (addr) == ADDR_EXPR
3456 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3457 || handled_component_p (TREE_OPERAND (addr, 0))))
3458 {
3459 tree base;
3460 HOST_WIDE_INT coffset;
3461 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3462 &coffset);
3463 if (!base)
3464 gcc_unreachable ();
3465
3466 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3467 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3468 TREE_OPERAND (*t, 1),
3469 size_int (coffset));
3470 res = true;
3471 }
3472 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3473 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3474 }
3475
3476 /* Canonicalize back MEM_REFs to plain reference trees if the object
3477 accessed is a decl that has the same access semantics as the MEM_REF. */
3478 if (TREE_CODE (*t) == MEM_REF
3479 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3480 && integer_zerop (TREE_OPERAND (*t, 1))
3481 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3482 {
3483 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3484 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3485 if (/* Same volatile qualification. */
3486 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3487 /* Same TBAA behavior with -fstrict-aliasing. */
3488 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3489 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3490 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3491 /* Same alignment. */
3492 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3493 /* We have to look out here to not drop a required conversion
3494 from the rhs to the lhs if *t appears on the lhs or vice-versa
3495 if it appears on the rhs. Thus require strict type
3496 compatibility. */
3497 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3498 {
3499 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3500 res = true;
3501 }
3502 }
3503
3504 /* Canonicalize TARGET_MEM_REF in particular with respect to
3505 the indexes becoming constant. */
3506 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3507 {
3508 tree tem = maybe_fold_tmr (*t);
3509 if (tem)
3510 {
3511 *t = tem;
3512 res = true;
3513 }
3514 }
3515
3516 return res;
3517}
3518
cbdd87d4
RG
3519/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3520 distinguishes both cases. */
3521
3522static bool
e0ee10ed 3523fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3524{
3525 bool changed = false;
355fe088 3526 gimple *stmt = gsi_stmt (*gsi);
cbdd87d4
RG
3527 unsigned i;
3528
040292e7
RB
3529 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3530 after propagation.
3531 ??? This shouldn't be done in generic folding but in the
3532 propagation helpers which also know whether an address was
89a79e96
RB
3533 propagated.
3534 Also canonicalize operand order. */
040292e7
RB
3535 switch (gimple_code (stmt))
3536 {
3537 case GIMPLE_ASSIGN:
3538 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3539 {
3540 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3541 if ((REFERENCE_CLASS_P (*rhs)
3542 || TREE_CODE (*rhs) == ADDR_EXPR)
3543 && maybe_canonicalize_mem_ref_addr (rhs))
3544 changed = true;
3545 tree *lhs = gimple_assign_lhs_ptr (stmt);
3546 if (REFERENCE_CLASS_P (*lhs)
3547 && maybe_canonicalize_mem_ref_addr (lhs))
3548 changed = true;
3549 }
89a79e96
RB
3550 else
3551 {
3552 /* Canonicalize operand order. */
3553 enum tree_code code = gimple_assign_rhs_code (stmt);
3554 if (TREE_CODE_CLASS (code) == tcc_comparison
3555 || commutative_tree_code (code)
3556 || commutative_ternary_tree_code (code))
3557 {
3558 tree rhs1 = gimple_assign_rhs1 (stmt);
3559 tree rhs2 = gimple_assign_rhs2 (stmt);
3560 if (tree_swap_operands_p (rhs1, rhs2, false))
3561 {
3562 gimple_assign_set_rhs1 (stmt, rhs2);
3563 gimple_assign_set_rhs2 (stmt, rhs1);
3564 if (TREE_CODE_CLASS (code) == tcc_comparison)
3565 gimple_assign_set_rhs_code (stmt,
3566 swap_tree_comparison (code));
3567 changed = true;
3568 }
3569 }
3570 }
040292e7
RB
3571 break;
3572 case GIMPLE_CALL:
3573 {
3574 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3575 {
3576 tree *arg = gimple_call_arg_ptr (stmt, i);
3577 if (REFERENCE_CLASS_P (*arg)
3578 && maybe_canonicalize_mem_ref_addr (arg))
3579 changed = true;
3580 }
3581 tree *lhs = gimple_call_lhs_ptr (stmt);
3582 if (*lhs
3583 && REFERENCE_CLASS_P (*lhs)
3584 && maybe_canonicalize_mem_ref_addr (lhs))
3585 changed = true;
3586 break;
3587 }
3588 case GIMPLE_ASM:
3589 {
538dd0b7
DM
3590 gasm *asm_stmt = as_a <gasm *> (stmt);
3591 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3592 {
538dd0b7 3593 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3594 tree op = TREE_VALUE (link);
3595 if (REFERENCE_CLASS_P (op)
3596 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3597 changed = true;
3598 }
538dd0b7 3599 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3600 {
538dd0b7 3601 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3602 tree op = TREE_VALUE (link);
3603 if ((REFERENCE_CLASS_P (op)
3604 || TREE_CODE (op) == ADDR_EXPR)
3605 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3606 changed = true;
3607 }
3608 }
3609 break;
3610 case GIMPLE_DEBUG:
3611 if (gimple_debug_bind_p (stmt))
3612 {
3613 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3614 if (*val
3615 && (REFERENCE_CLASS_P (*val)
3616 || TREE_CODE (*val) == ADDR_EXPR)
3617 && maybe_canonicalize_mem_ref_addr (val))
3618 changed = true;
3619 }
3620 break;
89a79e96
RB
3621 case GIMPLE_COND:
3622 {
3623 /* Canonicalize operand order. */
3624 tree lhs = gimple_cond_lhs (stmt);
3625 tree rhs = gimple_cond_rhs (stmt);
3626 if (tree_swap_operands_p (lhs, rhs, false))
3627 {
3628 gcond *gc = as_a <gcond *> (stmt);
3629 gimple_cond_set_lhs (gc, rhs);
3630 gimple_cond_set_rhs (gc, lhs);
3631 gimple_cond_set_code (gc,
3632 swap_tree_comparison (gimple_cond_code (gc)));
3633 changed = true;
3634 }
3635 }
040292e7
RB
3636 default:;
3637 }
3638
e0ee10ed
RB
3639 /* Dispatch to pattern-based folding. */
3640 if (!inplace
3641 || is_gimple_assign (stmt)
3642 || gimple_code (stmt) == GIMPLE_COND)
3643 {
3644 gimple_seq seq = NULL;
3645 code_helper rcode;
3646 tree ops[3] = {};
0ff093d8
RB
3647 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3648 valueize, valueize))
e0ee10ed
RB
3649 {
3650 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3651 changed = true;
3652 else
3653 gimple_seq_discard (seq);
3654 }
3655 }
3656
3657 stmt = gsi_stmt (*gsi);
3658
cbdd87d4
RG
3659 /* Fold the main computation performed by the statement. */
3660 switch (gimple_code (stmt))
3661 {
3662 case GIMPLE_ASSIGN:
3663 {
819ec64c
RB
3664 /* Try to canonicalize for boolean-typed X the comparisons
3665 X == 0, X == 1, X != 0, and X != 1. */
3666 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3667 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 3668 {
819ec64c
RB
3669 tree lhs = gimple_assign_lhs (stmt);
3670 tree op1 = gimple_assign_rhs1 (stmt);
3671 tree op2 = gimple_assign_rhs2 (stmt);
3672 tree type = TREE_TYPE (op1);
3673
3674 /* Check whether the comparison operands are of the same boolean
3675 type as the result type is.
3676 Check that second operand is an integer-constant with value
3677 one or zero. */
3678 if (TREE_CODE (op2) == INTEGER_CST
3679 && (integer_zerop (op2) || integer_onep (op2))
3680 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3681 {
3682 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3683 bool is_logical_not = false;
3684
3685 /* X == 0 and X != 1 is a logical-not.of X
3686 X == 1 and X != 0 is X */
3687 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3688 || (cmp_code == NE_EXPR && integer_onep (op2)))
3689 is_logical_not = true;
3690
3691 if (is_logical_not == false)
3692 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3693 /* Only for one-bit precision typed X the transformation
3694 !X -> ~X is valied. */
3695 else if (TYPE_PRECISION (type) == 1)
3696 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3697 /* Otherwise we use !X -> X ^ 1. */
3698 else
3699 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3700 build_int_cst (type, 1));
3701 changed = true;
3702 break;
3703 }
5fbcc0ed 3704 }
819ec64c
RB
3705
3706 unsigned old_num_ops = gimple_num_ops (stmt);
3707 tree lhs = gimple_assign_lhs (stmt);
3708 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3709 if (new_rhs
3710 && !useless_type_conversion_p (TREE_TYPE (lhs),
3711 TREE_TYPE (new_rhs)))
3712 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3713 if (new_rhs
3714 && (!inplace
3715 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3716 {
3717 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3718 changed = true;
3719 }
3720 break;
3721 }
3722
cbdd87d4 3723 case GIMPLE_CALL:
ceeffab0 3724 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3725 break;
3726
3727 case GIMPLE_ASM:
3728 /* Fold *& in asm operands. */
38384150 3729 {
538dd0b7 3730 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3731 size_t noutputs;
3732 const char **oconstraints;
3733 const char *constraint;
3734 bool allows_mem, allows_reg;
3735
538dd0b7 3736 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3737 oconstraints = XALLOCAVEC (const char *, noutputs);
3738
538dd0b7 3739 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3740 {
538dd0b7 3741 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3742 tree op = TREE_VALUE (link);
3743 oconstraints[i]
3744 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3745 if (REFERENCE_CLASS_P (op)
3746 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3747 {
3748 TREE_VALUE (link) = op;
3749 changed = true;
3750 }
3751 }
538dd0b7 3752 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3753 {
538dd0b7 3754 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3755 tree op = TREE_VALUE (link);
3756 constraint
3757 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3758 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3759 oconstraints, &allows_mem, &allows_reg);
3760 if (REFERENCE_CLASS_P (op)
3761 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3762 != NULL_TREE)
3763 {
3764 TREE_VALUE (link) = op;
3765 changed = true;
3766 }
3767 }
3768 }
cbdd87d4
RG
3769 break;
3770
bd422c4a
RG
3771 case GIMPLE_DEBUG:
3772 if (gimple_debug_bind_p (stmt))
3773 {
3774 tree val = gimple_debug_bind_get_value (stmt);
3775 if (val
3776 && REFERENCE_CLASS_P (val))
3777 {
3778 tree tem = maybe_fold_reference (val, false);
3779 if (tem)
3780 {
3781 gimple_debug_bind_set_value (stmt, tem);
3782 changed = true;
3783 }
3784 }
3e888a5e
RG
3785 else if (val
3786 && TREE_CODE (val) == ADDR_EXPR)
3787 {
3788 tree ref = TREE_OPERAND (val, 0);
3789 tree tem = maybe_fold_reference (ref, false);
3790 if (tem)
3791 {
3792 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3793 gimple_debug_bind_set_value (stmt, tem);
3794 changed = true;
3795 }
3796 }
bd422c4a
RG
3797 }
3798 break;
3799
cbdd87d4
RG
3800 default:;
3801 }
3802
3803 stmt = gsi_stmt (*gsi);
3804
37376165
RB
3805 /* Fold *& on the lhs. */
3806 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3807 {
3808 tree lhs = gimple_get_lhs (stmt);
3809 if (lhs && REFERENCE_CLASS_P (lhs))
3810 {
3811 tree new_lhs = maybe_fold_reference (lhs, true);
3812 if (new_lhs)
3813 {
3814 gimple_set_lhs (stmt, new_lhs);
3815 changed = true;
3816 }
3817 }
3818 }
3819
3820 return changed;
3821}
3822
e0ee10ed
RB
3823/* Valueziation callback that ends up not following SSA edges. */
3824
3825tree
3826no_follow_ssa_edges (tree)
3827{
3828 return NULL_TREE;
3829}
3830
45cc9f96
RB
3831/* Valueization callback that ends up following single-use SSA edges only. */
3832
3833tree
3834follow_single_use_edges (tree val)
3835{
3836 if (TREE_CODE (val) == SSA_NAME
3837 && !has_single_use (val))
3838 return NULL_TREE;
3839 return val;
3840}
3841
cbdd87d4
RG
3842/* Fold the statement pointed to by GSI. In some cases, this function may
3843 replace the whole statement with a new one. Returns true iff folding
3844 makes any changes.
3845 The statement pointed to by GSI should be in valid gimple form but may
3846 be in unfolded state as resulting from for example constant propagation
3847 which can produce *&x = 0. */
3848
3849bool
3850fold_stmt (gimple_stmt_iterator *gsi)
3851{
e0ee10ed
RB
3852 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3853}
3854
3855bool
3856fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3857{
3858 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3859}
3860
59401b92 3861/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3862 *&x created by constant propagation are handled. The statement cannot
3863 be replaced with a new one. Return true if the statement was
3864 changed, false otherwise.
59401b92 3865 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3866 be in unfolded state as resulting from for example constant propagation
3867 which can produce *&x = 0. */
3868
3869bool
59401b92 3870fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3871{
355fe088 3872 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 3873 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3874 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3875 return changed;
3876}
3877
e89065a1
SL
3878/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3879 if EXPR is null or we don't know how.
3880 If non-null, the result always has boolean type. */
3881
3882static tree
3883canonicalize_bool (tree expr, bool invert)
3884{
3885 if (!expr)
3886 return NULL_TREE;
3887 else if (invert)
3888 {
3889 if (integer_nonzerop (expr))
3890 return boolean_false_node;
3891 else if (integer_zerop (expr))
3892 return boolean_true_node;
3893 else if (TREE_CODE (expr) == SSA_NAME)
3894 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3895 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3896 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3897 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3898 boolean_type_node,
3899 TREE_OPERAND (expr, 0),
3900 TREE_OPERAND (expr, 1));
3901 else
3902 return NULL_TREE;
3903 }
3904 else
3905 {
3906 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3907 return expr;
3908 if (integer_nonzerop (expr))
3909 return boolean_true_node;
3910 else if (integer_zerop (expr))
3911 return boolean_false_node;
3912 else if (TREE_CODE (expr) == SSA_NAME)
3913 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3914 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3915 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3916 return fold_build2 (TREE_CODE (expr),
3917 boolean_type_node,
3918 TREE_OPERAND (expr, 0),
3919 TREE_OPERAND (expr, 1));
3920 else
3921 return NULL_TREE;
3922 }
3923}
3924
3925/* Check to see if a boolean expression EXPR is logically equivalent to the
3926 comparison (OP1 CODE OP2). Check for various identities involving
3927 SSA_NAMEs. */
3928
3929static bool
3930same_bool_comparison_p (const_tree expr, enum tree_code code,
3931 const_tree op1, const_tree op2)
3932{
355fe088 3933 gimple *s;
e89065a1
SL
3934
3935 /* The obvious case. */
3936 if (TREE_CODE (expr) == code
3937 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3938 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3939 return true;
3940
3941 /* Check for comparing (name, name != 0) and the case where expr
3942 is an SSA_NAME with a definition matching the comparison. */
3943 if (TREE_CODE (expr) == SSA_NAME
3944 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3945 {
3946 if (operand_equal_p (expr, op1, 0))
3947 return ((code == NE_EXPR && integer_zerop (op2))
3948 || (code == EQ_EXPR && integer_nonzerop (op2)));
3949 s = SSA_NAME_DEF_STMT (expr);
3950 if (is_gimple_assign (s)
3951 && gimple_assign_rhs_code (s) == code
3952 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3953 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3954 return true;
3955 }
3956
3957 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3958 of name is a comparison, recurse. */
3959 if (TREE_CODE (op1) == SSA_NAME
3960 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3961 {
3962 s = SSA_NAME_DEF_STMT (op1);
3963 if (is_gimple_assign (s)
3964 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3965 {
3966 enum tree_code c = gimple_assign_rhs_code (s);
3967 if ((c == NE_EXPR && integer_zerop (op2))
3968 || (c == EQ_EXPR && integer_nonzerop (op2)))
3969 return same_bool_comparison_p (expr, c,
3970 gimple_assign_rhs1 (s),
3971 gimple_assign_rhs2 (s));
3972 if ((c == EQ_EXPR && integer_zerop (op2))
3973 || (c == NE_EXPR && integer_nonzerop (op2)))
3974 return same_bool_comparison_p (expr,
3975 invert_tree_comparison (c, false),
3976 gimple_assign_rhs1 (s),
3977 gimple_assign_rhs2 (s));
3978 }
3979 }
3980 return false;
3981}
3982
3983/* Check to see if two boolean expressions OP1 and OP2 are logically
3984 equivalent. */
3985
3986static bool
3987same_bool_result_p (const_tree op1, const_tree op2)
3988{
3989 /* Simple cases first. */
3990 if (operand_equal_p (op1, op2, 0))
3991 return true;
3992
3993 /* Check the cases where at least one of the operands is a comparison.
3994 These are a bit smarter than operand_equal_p in that they apply some
3995 identifies on SSA_NAMEs. */
98209db3 3996 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
3997 && same_bool_comparison_p (op1, TREE_CODE (op2),
3998 TREE_OPERAND (op2, 0),
3999 TREE_OPERAND (op2, 1)))
4000 return true;
98209db3 4001 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
4002 && same_bool_comparison_p (op2, TREE_CODE (op1),
4003 TREE_OPERAND (op1, 0),
4004 TREE_OPERAND (op1, 1)))
4005 return true;
4006
4007 /* Default case. */
4008 return false;
4009}
4010
4011/* Forward declarations for some mutually recursive functions. */
4012
4013static tree
4014and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4015 enum tree_code code2, tree op2a, tree op2b);
4016static tree
4017and_var_with_comparison (tree var, bool invert,
4018 enum tree_code code2, tree op2a, tree op2b);
4019static tree
355fe088 4020and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4021 enum tree_code code2, tree op2a, tree op2b);
4022static tree
4023or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4024 enum tree_code code2, tree op2a, tree op2b);
4025static tree
4026or_var_with_comparison (tree var, bool invert,
4027 enum tree_code code2, tree op2a, tree op2b);
4028static tree
355fe088 4029or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4030 enum tree_code code2, tree op2a, tree op2b);
4031
4032/* Helper function for and_comparisons_1: try to simplify the AND of the
4033 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4034 If INVERT is true, invert the value of the VAR before doing the AND.
4035 Return NULL_EXPR if we can't simplify this to a single expression. */
4036
4037static tree
4038and_var_with_comparison (tree var, bool invert,
4039 enum tree_code code2, tree op2a, tree op2b)
4040{
4041 tree t;
355fe088 4042 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4043
4044 /* We can only deal with variables whose definitions are assignments. */
4045 if (!is_gimple_assign (stmt))
4046 return NULL_TREE;
4047
4048 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4049 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4050 Then we only have to consider the simpler non-inverted cases. */
4051 if (invert)
4052 t = or_var_with_comparison_1 (stmt,
4053 invert_tree_comparison (code2, false),
4054 op2a, op2b);
4055 else
4056 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4057 return canonicalize_bool (t, invert);
4058}
4059
4060/* Try to simplify the AND of the ssa variable defined by the assignment
4061 STMT with the comparison specified by (OP2A CODE2 OP2B).
4062 Return NULL_EXPR if we can't simplify this to a single expression. */
4063
4064static tree
355fe088 4065and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4066 enum tree_code code2, tree op2a, tree op2b)
4067{
4068 tree var = gimple_assign_lhs (stmt);
4069 tree true_test_var = NULL_TREE;
4070 tree false_test_var = NULL_TREE;
4071 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4072
4073 /* Check for identities like (var AND (var == 0)) => false. */
4074 if (TREE_CODE (op2a) == SSA_NAME
4075 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4076 {
4077 if ((code2 == NE_EXPR && integer_zerop (op2b))
4078 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4079 {
4080 true_test_var = op2a;
4081 if (var == true_test_var)
4082 return var;
4083 }
4084 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4085 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4086 {
4087 false_test_var = op2a;
4088 if (var == false_test_var)
4089 return boolean_false_node;
4090 }
4091 }
4092
4093 /* If the definition is a comparison, recurse on it. */
4094 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4095 {
4096 tree t = and_comparisons_1 (innercode,
4097 gimple_assign_rhs1 (stmt),
4098 gimple_assign_rhs2 (stmt),
4099 code2,
4100 op2a,
4101 op2b);
4102 if (t)
4103 return t;
4104 }
4105
4106 /* If the definition is an AND or OR expression, we may be able to
4107 simplify by reassociating. */
eb9820c0
KT
4108 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4109 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4110 {
4111 tree inner1 = gimple_assign_rhs1 (stmt);
4112 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4113 gimple *s;
e89065a1
SL
4114 tree t;
4115 tree partial = NULL_TREE;
eb9820c0 4116 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4117
4118 /* Check for boolean identities that don't require recursive examination
4119 of inner1/inner2:
4120 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4121 inner1 AND (inner1 OR inner2) => inner1
4122 !inner1 AND (inner1 AND inner2) => false
4123 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4124 Likewise for similar cases involving inner2. */
4125 if (inner1 == true_test_var)
4126 return (is_and ? var : inner1);
4127 else if (inner2 == true_test_var)
4128 return (is_and ? var : inner2);
4129 else if (inner1 == false_test_var)
4130 return (is_and
4131 ? boolean_false_node
4132 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4133 else if (inner2 == false_test_var)
4134 return (is_and
4135 ? boolean_false_node
4136 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4137
4138 /* Next, redistribute/reassociate the AND across the inner tests.
4139 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4140 if (TREE_CODE (inner1) == SSA_NAME
4141 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4142 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4143 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4144 gimple_assign_rhs1 (s),
4145 gimple_assign_rhs2 (s),
4146 code2, op2a, op2b)))
4147 {
4148 /* Handle the AND case, where we are reassociating:
4149 (inner1 AND inner2) AND (op2a code2 op2b)
4150 => (t AND inner2)
4151 If the partial result t is a constant, we win. Otherwise
4152 continue on to try reassociating with the other inner test. */
4153 if (is_and)
4154 {
4155 if (integer_onep (t))
4156 return inner2;
4157 else if (integer_zerop (t))
4158 return boolean_false_node;
4159 }
4160
4161 /* Handle the OR case, where we are redistributing:
4162 (inner1 OR inner2) AND (op2a code2 op2b)
4163 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4164 else if (integer_onep (t))
4165 return boolean_true_node;
4166
4167 /* Save partial result for later. */
4168 partial = t;
e89065a1
SL
4169 }
4170
4171 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4172 if (TREE_CODE (inner2) == SSA_NAME
4173 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4174 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4175 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4176 gimple_assign_rhs1 (s),
4177 gimple_assign_rhs2 (s),
4178 code2, op2a, op2b)))
4179 {
4180 /* Handle the AND case, where we are reassociating:
4181 (inner1 AND inner2) AND (op2a code2 op2b)
4182 => (inner1 AND t) */
4183 if (is_and)
4184 {
4185 if (integer_onep (t))
4186 return inner1;
4187 else if (integer_zerop (t))
4188 return boolean_false_node;
8236c8eb
JJ
4189 /* If both are the same, we can apply the identity
4190 (x AND x) == x. */
4191 else if (partial && same_bool_result_p (t, partial))
4192 return t;
e89065a1
SL
4193 }
4194
4195 /* Handle the OR case. where we are redistributing:
4196 (inner1 OR inner2) AND (op2a code2 op2b)
4197 => (t OR (inner1 AND (op2a code2 op2b)))
4198 => (t OR partial) */
4199 else
4200 {
4201 if (integer_onep (t))
4202 return boolean_true_node;
4203 else if (partial)
4204 {
4205 /* We already got a simplification for the other
4206 operand to the redistributed OR expression. The
4207 interesting case is when at least one is false.
4208 Or, if both are the same, we can apply the identity
4209 (x OR x) == x. */
4210 if (integer_zerop (partial))
4211 return t;
4212 else if (integer_zerop (t))
4213 return partial;
4214 else if (same_bool_result_p (t, partial))
4215 return t;
4216 }
4217 }
4218 }
4219 }
4220 return NULL_TREE;
4221}
4222
4223/* Try to simplify the AND of two comparisons defined by
4224 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4225 If this can be done without constructing an intermediate value,
4226 return the resulting tree; otherwise NULL_TREE is returned.
4227 This function is deliberately asymmetric as it recurses on SSA_DEFs
4228 in the first comparison but not the second. */
4229
4230static tree
4231and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4232 enum tree_code code2, tree op2a, tree op2b)
4233{
ae22ac3c 4234 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4235
e89065a1
SL
4236 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4237 if (operand_equal_p (op1a, op2a, 0)
4238 && operand_equal_p (op1b, op2b, 0))
4239 {
eb9820c0 4240 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4241 tree t = combine_comparisons (UNKNOWN_LOCATION,
4242 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4243 truth_type, op1a, op1b);
e89065a1
SL
4244 if (t)
4245 return t;
4246 }
4247
4248 /* Likewise the swapped case of the above. */
4249 if (operand_equal_p (op1a, op2b, 0)
4250 && operand_equal_p (op1b, op2a, 0))
4251 {
eb9820c0 4252 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4253 tree t = combine_comparisons (UNKNOWN_LOCATION,
4254 TRUTH_ANDIF_EXPR, code1,
4255 swap_tree_comparison (code2),
31ed6226 4256 truth_type, op1a, op1b);
e89065a1
SL
4257 if (t)
4258 return t;
4259 }
4260
4261 /* If both comparisons are of the same value against constants, we might
4262 be able to merge them. */
4263 if (operand_equal_p (op1a, op2a, 0)
4264 && TREE_CODE (op1b) == INTEGER_CST
4265 && TREE_CODE (op2b) == INTEGER_CST)
4266 {
4267 int cmp = tree_int_cst_compare (op1b, op2b);
4268
4269 /* If we have (op1a == op1b), we should either be able to
4270 return that or FALSE, depending on whether the constant op1b
4271 also satisfies the other comparison against op2b. */
4272 if (code1 == EQ_EXPR)
4273 {
4274 bool done = true;
4275 bool val;
4276 switch (code2)
4277 {
4278 case EQ_EXPR: val = (cmp == 0); break;
4279 case NE_EXPR: val = (cmp != 0); break;
4280 case LT_EXPR: val = (cmp < 0); break;
4281 case GT_EXPR: val = (cmp > 0); break;
4282 case LE_EXPR: val = (cmp <= 0); break;
4283 case GE_EXPR: val = (cmp >= 0); break;
4284 default: done = false;
4285 }
4286 if (done)
4287 {
4288 if (val)
4289 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4290 else
4291 return boolean_false_node;
4292 }
4293 }
4294 /* Likewise if the second comparison is an == comparison. */
4295 else if (code2 == EQ_EXPR)
4296 {
4297 bool done = true;
4298 bool val;
4299 switch (code1)
4300 {
4301 case EQ_EXPR: val = (cmp == 0); break;
4302 case NE_EXPR: val = (cmp != 0); break;
4303 case LT_EXPR: val = (cmp > 0); break;
4304 case GT_EXPR: val = (cmp < 0); break;
4305 case LE_EXPR: val = (cmp >= 0); break;
4306 case GE_EXPR: val = (cmp <= 0); break;
4307 default: done = false;
4308 }
4309 if (done)
4310 {
4311 if (val)
4312 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4313 else
4314 return boolean_false_node;
4315 }
4316 }
4317
4318 /* Same business with inequality tests. */
4319 else if (code1 == NE_EXPR)
4320 {
4321 bool val;
4322 switch (code2)
4323 {
4324 case EQ_EXPR: val = (cmp != 0); break;
4325 case NE_EXPR: val = (cmp == 0); break;
4326 case LT_EXPR: val = (cmp >= 0); break;
4327 case GT_EXPR: val = (cmp <= 0); break;
4328 case LE_EXPR: val = (cmp > 0); break;
4329 case GE_EXPR: val = (cmp < 0); break;
4330 default:
4331 val = false;
4332 }
4333 if (val)
4334 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4335 }
4336 else if (code2 == NE_EXPR)
4337 {
4338 bool val;
4339 switch (code1)
4340 {
4341 case EQ_EXPR: val = (cmp == 0); break;
4342 case NE_EXPR: val = (cmp != 0); break;
4343 case LT_EXPR: val = (cmp <= 0); break;
4344 case GT_EXPR: val = (cmp >= 0); break;
4345 case LE_EXPR: val = (cmp < 0); break;
4346 case GE_EXPR: val = (cmp > 0); break;
4347 default:
4348 val = false;
4349 }
4350 if (val)
4351 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4352 }
4353
4354 /* Chose the more restrictive of two < or <= comparisons. */
4355 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4356 && (code2 == LT_EXPR || code2 == LE_EXPR))
4357 {
4358 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4359 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4360 else
4361 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4362 }
4363
4364 /* Likewise chose the more restrictive of two > or >= comparisons. */
4365 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4366 && (code2 == GT_EXPR || code2 == GE_EXPR))
4367 {
4368 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4369 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4370 else
4371 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4372 }
4373
4374 /* Check for singleton ranges. */
4375 else if (cmp == 0
4376 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4377 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4378 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4379
4380 /* Check for disjoint ranges. */
4381 else if (cmp <= 0
4382 && (code1 == LT_EXPR || code1 == LE_EXPR)
4383 && (code2 == GT_EXPR || code2 == GE_EXPR))
4384 return boolean_false_node;
4385 else if (cmp >= 0
4386 && (code1 == GT_EXPR || code1 == GE_EXPR)
4387 && (code2 == LT_EXPR || code2 == LE_EXPR))
4388 return boolean_false_node;
4389 }
4390
4391 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4392 NAME's definition is a truth value. See if there are any simplifications
4393 that can be done against the NAME's definition. */
4394 if (TREE_CODE (op1a) == SSA_NAME
4395 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4396 && (integer_zerop (op1b) || integer_onep (op1b)))
4397 {
4398 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4399 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4400 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4401 switch (gimple_code (stmt))
4402 {
4403 case GIMPLE_ASSIGN:
4404 /* Try to simplify by copy-propagating the definition. */
4405 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4406
4407 case GIMPLE_PHI:
4408 /* If every argument to the PHI produces the same result when
4409 ANDed with the second comparison, we win.
4410 Do not do this unless the type is bool since we need a bool
4411 result here anyway. */
4412 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4413 {
4414 tree result = NULL_TREE;
4415 unsigned i;
4416 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4417 {
4418 tree arg = gimple_phi_arg_def (stmt, i);
4419
4420 /* If this PHI has itself as an argument, ignore it.
4421 If all the other args produce the same result,
4422 we're still OK. */
4423 if (arg == gimple_phi_result (stmt))
4424 continue;
4425 else if (TREE_CODE (arg) == INTEGER_CST)
4426 {
4427 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4428 {
4429 if (!result)
4430 result = boolean_false_node;
4431 else if (!integer_zerop (result))
4432 return NULL_TREE;
4433 }
4434 else if (!result)
4435 result = fold_build2 (code2, boolean_type_node,
4436 op2a, op2b);
4437 else if (!same_bool_comparison_p (result,
4438 code2, op2a, op2b))
4439 return NULL_TREE;
4440 }
0e8b84ec
JJ
4441 else if (TREE_CODE (arg) == SSA_NAME
4442 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4443 {
6c66f733 4444 tree temp;
355fe088 4445 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4446 /* In simple cases we can look through PHI nodes,
4447 but we have to be careful with loops.
4448 See PR49073. */
4449 if (! dom_info_available_p (CDI_DOMINATORS)
4450 || gimple_bb (def_stmt) == gimple_bb (stmt)
4451 || dominated_by_p (CDI_DOMINATORS,
4452 gimple_bb (def_stmt),
4453 gimple_bb (stmt)))
4454 return NULL_TREE;
4455 temp = and_var_with_comparison (arg, invert, code2,
4456 op2a, op2b);
e89065a1
SL
4457 if (!temp)
4458 return NULL_TREE;
4459 else if (!result)
4460 result = temp;
4461 else if (!same_bool_result_p (result, temp))
4462 return NULL_TREE;
4463 }
4464 else
4465 return NULL_TREE;
4466 }
4467 return result;
4468 }
4469
4470 default:
4471 break;
4472 }
4473 }
4474 return NULL_TREE;
4475}
4476
4477/* Try to simplify the AND of two comparisons, specified by
4478 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4479 If this can be simplified to a single expression (without requiring
4480 introducing more SSA variables to hold intermediate values),
4481 return the resulting tree. Otherwise return NULL_TREE.
4482 If the result expression is non-null, it has boolean type. */
4483
4484tree
4485maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4486 enum tree_code code2, tree op2a, tree op2b)
4487{
4488 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4489 if (t)
4490 return t;
4491 else
4492 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4493}
4494
4495/* Helper function for or_comparisons_1: try to simplify the OR of the
4496 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4497 If INVERT is true, invert the value of VAR before doing the OR.
4498 Return NULL_EXPR if we can't simplify this to a single expression. */
4499
4500static tree
4501or_var_with_comparison (tree var, bool invert,
4502 enum tree_code code2, tree op2a, tree op2b)
4503{
4504 tree t;
355fe088 4505 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4506
4507 /* We can only deal with variables whose definitions are assignments. */
4508 if (!is_gimple_assign (stmt))
4509 return NULL_TREE;
4510
4511 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4512 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4513 Then we only have to consider the simpler non-inverted cases. */
4514 if (invert)
4515 t = and_var_with_comparison_1 (stmt,
4516 invert_tree_comparison (code2, false),
4517 op2a, op2b);
4518 else
4519 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4520 return canonicalize_bool (t, invert);
4521}
4522
4523/* Try to simplify the OR of the ssa variable defined by the assignment
4524 STMT with the comparison specified by (OP2A CODE2 OP2B).
4525 Return NULL_EXPR if we can't simplify this to a single expression. */
4526
4527static tree
355fe088 4528or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4529 enum tree_code code2, tree op2a, tree op2b)
4530{
4531 tree var = gimple_assign_lhs (stmt);
4532 tree true_test_var = NULL_TREE;
4533 tree false_test_var = NULL_TREE;
4534 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4535
4536 /* Check for identities like (var OR (var != 0)) => true . */
4537 if (TREE_CODE (op2a) == SSA_NAME
4538 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4539 {
4540 if ((code2 == NE_EXPR && integer_zerop (op2b))
4541 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4542 {
4543 true_test_var = op2a;
4544 if (var == true_test_var)
4545 return var;
4546 }
4547 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4548 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4549 {
4550 false_test_var = op2a;
4551 if (var == false_test_var)
4552 return boolean_true_node;
4553 }
4554 }
4555
4556 /* If the definition is a comparison, recurse on it. */
4557 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4558 {
4559 tree t = or_comparisons_1 (innercode,
4560 gimple_assign_rhs1 (stmt),
4561 gimple_assign_rhs2 (stmt),
4562 code2,
4563 op2a,
4564 op2b);
4565 if (t)
4566 return t;
4567 }
4568
4569 /* If the definition is an AND or OR expression, we may be able to
4570 simplify by reassociating. */
eb9820c0
KT
4571 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4572 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4573 {
4574 tree inner1 = gimple_assign_rhs1 (stmt);
4575 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4576 gimple *s;
e89065a1
SL
4577 tree t;
4578 tree partial = NULL_TREE;
eb9820c0 4579 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4580
4581 /* Check for boolean identities that don't require recursive examination
4582 of inner1/inner2:
4583 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4584 inner1 OR (inner1 AND inner2) => inner1
4585 !inner1 OR (inner1 OR inner2) => true
4586 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4587 */
4588 if (inner1 == true_test_var)
4589 return (is_or ? var : inner1);
4590 else if (inner2 == true_test_var)
4591 return (is_or ? var : inner2);
4592 else if (inner1 == false_test_var)
4593 return (is_or
4594 ? boolean_true_node
4595 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4596 else if (inner2 == false_test_var)
4597 return (is_or
4598 ? boolean_true_node
4599 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4600
4601 /* Next, redistribute/reassociate the OR across the inner tests.
4602 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4603 if (TREE_CODE (inner1) == SSA_NAME
4604 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4605 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4606 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4607 gimple_assign_rhs1 (s),
4608 gimple_assign_rhs2 (s),
4609 code2, op2a, op2b)))
4610 {
4611 /* Handle the OR case, where we are reassociating:
4612 (inner1 OR inner2) OR (op2a code2 op2b)
4613 => (t OR inner2)
4614 If the partial result t is a constant, we win. Otherwise
4615 continue on to try reassociating with the other inner test. */
8236c8eb 4616 if (is_or)
e89065a1
SL
4617 {
4618 if (integer_onep (t))
4619 return boolean_true_node;
4620 else if (integer_zerop (t))
4621 return inner2;
4622 }
4623
4624 /* Handle the AND case, where we are redistributing:
4625 (inner1 AND inner2) OR (op2a code2 op2b)
4626 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4627 else if (integer_zerop (t))
4628 return boolean_false_node;
4629
4630 /* Save partial result for later. */
4631 partial = t;
e89065a1
SL
4632 }
4633
4634 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4635 if (TREE_CODE (inner2) == SSA_NAME
4636 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4637 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4638 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4639 gimple_assign_rhs1 (s),
4640 gimple_assign_rhs2 (s),
4641 code2, op2a, op2b)))
4642 {
4643 /* Handle the OR case, where we are reassociating:
4644 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4645 => (inner1 OR t)
4646 => (t OR partial) */
4647 if (is_or)
e89065a1
SL
4648 {
4649 if (integer_zerop (t))
4650 return inner1;
4651 else if (integer_onep (t))
4652 return boolean_true_node;
8236c8eb
JJ
4653 /* If both are the same, we can apply the identity
4654 (x OR x) == x. */
4655 else if (partial && same_bool_result_p (t, partial))
4656 return t;
e89065a1
SL
4657 }
4658
4659 /* Handle the AND case, where we are redistributing:
4660 (inner1 AND inner2) OR (op2a code2 op2b)
4661 => (t AND (inner1 OR (op2a code2 op2b)))
4662 => (t AND partial) */
4663 else
4664 {
4665 if (integer_zerop (t))
4666 return boolean_false_node;
4667 else if (partial)
4668 {
4669 /* We already got a simplification for the other
4670 operand to the redistributed AND expression. The
4671 interesting case is when at least one is true.
4672 Or, if both are the same, we can apply the identity
8236c8eb 4673 (x AND x) == x. */
e89065a1
SL
4674 if (integer_onep (partial))
4675 return t;
4676 else if (integer_onep (t))
4677 return partial;
4678 else if (same_bool_result_p (t, partial))
8236c8eb 4679 return t;
e89065a1
SL
4680 }
4681 }
4682 }
4683 }
4684 return NULL_TREE;
4685}
4686
4687/* Try to simplify the OR of two comparisons defined by
4688 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4689 If this can be done without constructing an intermediate value,
4690 return the resulting tree; otherwise NULL_TREE is returned.
4691 This function is deliberately asymmetric as it recurses on SSA_DEFs
4692 in the first comparison but not the second. */
4693
4694static tree
4695or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4696 enum tree_code code2, tree op2a, tree op2b)
4697{
ae22ac3c 4698 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4699
e89065a1
SL
4700 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4701 if (operand_equal_p (op1a, op2a, 0)
4702 && operand_equal_p (op1b, op2b, 0))
4703 {
eb9820c0 4704 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4705 tree t = combine_comparisons (UNKNOWN_LOCATION,
4706 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4707 truth_type, op1a, op1b);
e89065a1
SL
4708 if (t)
4709 return t;
4710 }
4711
4712 /* Likewise the swapped case of the above. */
4713 if (operand_equal_p (op1a, op2b, 0)
4714 && operand_equal_p (op1b, op2a, 0))
4715 {
eb9820c0 4716 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4717 tree t = combine_comparisons (UNKNOWN_LOCATION,
4718 TRUTH_ORIF_EXPR, code1,
4719 swap_tree_comparison (code2),
31ed6226 4720 truth_type, op1a, op1b);
e89065a1
SL
4721 if (t)
4722 return t;
4723 }
4724
4725 /* If both comparisons are of the same value against constants, we might
4726 be able to merge them. */
4727 if (operand_equal_p (op1a, op2a, 0)
4728 && TREE_CODE (op1b) == INTEGER_CST
4729 && TREE_CODE (op2b) == INTEGER_CST)
4730 {
4731 int cmp = tree_int_cst_compare (op1b, op2b);
4732
4733 /* If we have (op1a != op1b), we should either be able to
4734 return that or TRUE, depending on whether the constant op1b
4735 also satisfies the other comparison against op2b. */
4736 if (code1 == NE_EXPR)
4737 {
4738 bool done = true;
4739 bool val;
4740 switch (code2)
4741 {
4742 case EQ_EXPR: val = (cmp == 0); break;
4743 case NE_EXPR: val = (cmp != 0); break;
4744 case LT_EXPR: val = (cmp < 0); break;
4745 case GT_EXPR: val = (cmp > 0); break;
4746 case LE_EXPR: val = (cmp <= 0); break;
4747 case GE_EXPR: val = (cmp >= 0); break;
4748 default: done = false;
4749 }
4750 if (done)
4751 {
4752 if (val)
4753 return boolean_true_node;
4754 else
4755 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4756 }
4757 }
4758 /* Likewise if the second comparison is a != comparison. */
4759 else if (code2 == NE_EXPR)
4760 {
4761 bool done = true;
4762 bool val;
4763 switch (code1)
4764 {
4765 case EQ_EXPR: val = (cmp == 0); break;
4766 case NE_EXPR: val = (cmp != 0); break;
4767 case LT_EXPR: val = (cmp > 0); break;
4768 case GT_EXPR: val = (cmp < 0); break;
4769 case LE_EXPR: val = (cmp >= 0); break;
4770 case GE_EXPR: val = (cmp <= 0); break;
4771 default: done = false;
4772 }
4773 if (done)
4774 {
4775 if (val)
4776 return boolean_true_node;
4777 else
4778 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4779 }
4780 }
4781
4782 /* See if an equality test is redundant with the other comparison. */
4783 else if (code1 == EQ_EXPR)
4784 {
4785 bool val;
4786 switch (code2)
4787 {
4788 case EQ_EXPR: val = (cmp == 0); break;
4789 case NE_EXPR: val = (cmp != 0); break;
4790 case LT_EXPR: val = (cmp < 0); break;
4791 case GT_EXPR: val = (cmp > 0); break;
4792 case LE_EXPR: val = (cmp <= 0); break;
4793 case GE_EXPR: val = (cmp >= 0); break;
4794 default:
4795 val = false;
4796 }
4797 if (val)
4798 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4799 }
4800 else if (code2 == EQ_EXPR)
4801 {
4802 bool val;
4803 switch (code1)
4804 {
4805 case EQ_EXPR: val = (cmp == 0); break;
4806 case NE_EXPR: val = (cmp != 0); break;
4807 case LT_EXPR: val = (cmp > 0); break;
4808 case GT_EXPR: val = (cmp < 0); break;
4809 case LE_EXPR: val = (cmp >= 0); break;
4810 case GE_EXPR: val = (cmp <= 0); break;
4811 default:
4812 val = false;
4813 }
4814 if (val)
4815 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4816 }
4817
4818 /* Chose the less restrictive of two < or <= comparisons. */
4819 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4820 && (code2 == LT_EXPR || code2 == LE_EXPR))
4821 {
4822 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4823 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4824 else
4825 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4826 }
4827
4828 /* Likewise chose the less restrictive of two > or >= comparisons. */
4829 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4830 && (code2 == GT_EXPR || code2 == GE_EXPR))
4831 {
4832 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4833 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4834 else
4835 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4836 }
4837
4838 /* Check for singleton ranges. */
4839 else if (cmp == 0
4840 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4841 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4842 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4843
4844 /* Check for less/greater pairs that don't restrict the range at all. */
4845 else if (cmp >= 0
4846 && (code1 == LT_EXPR || code1 == LE_EXPR)
4847 && (code2 == GT_EXPR || code2 == GE_EXPR))
4848 return boolean_true_node;
4849 else if (cmp <= 0
4850 && (code1 == GT_EXPR || code1 == GE_EXPR)
4851 && (code2 == LT_EXPR || code2 == LE_EXPR))
4852 return boolean_true_node;
4853 }
4854
4855 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4856 NAME's definition is a truth value. See if there are any simplifications
4857 that can be done against the NAME's definition. */
4858 if (TREE_CODE (op1a) == SSA_NAME
4859 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4860 && (integer_zerop (op1b) || integer_onep (op1b)))
4861 {
4862 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4863 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4864 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4865 switch (gimple_code (stmt))
4866 {
4867 case GIMPLE_ASSIGN:
4868 /* Try to simplify by copy-propagating the definition. */
4869 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4870
4871 case GIMPLE_PHI:
4872 /* If every argument to the PHI produces the same result when
4873 ORed with the second comparison, we win.
4874 Do not do this unless the type is bool since we need a bool
4875 result here anyway. */
4876 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4877 {
4878 tree result = NULL_TREE;
4879 unsigned i;
4880 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4881 {
4882 tree arg = gimple_phi_arg_def (stmt, i);
4883
4884 /* If this PHI has itself as an argument, ignore it.
4885 If all the other args produce the same result,
4886 we're still OK. */
4887 if (arg == gimple_phi_result (stmt))
4888 continue;
4889 else if (TREE_CODE (arg) == INTEGER_CST)
4890 {
4891 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4892 {
4893 if (!result)
4894 result = boolean_true_node;
4895 else if (!integer_onep (result))
4896 return NULL_TREE;
4897 }
4898 else if (!result)
4899 result = fold_build2 (code2, boolean_type_node,
4900 op2a, op2b);
4901 else if (!same_bool_comparison_p (result,
4902 code2, op2a, op2b))
4903 return NULL_TREE;
4904 }
0e8b84ec
JJ
4905 else if (TREE_CODE (arg) == SSA_NAME
4906 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4907 {
6c66f733 4908 tree temp;
355fe088 4909 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4910 /* In simple cases we can look through PHI nodes,
4911 but we have to be careful with loops.
4912 See PR49073. */
4913 if (! dom_info_available_p (CDI_DOMINATORS)
4914 || gimple_bb (def_stmt) == gimple_bb (stmt)
4915 || dominated_by_p (CDI_DOMINATORS,
4916 gimple_bb (def_stmt),
4917 gimple_bb (stmt)))
4918 return NULL_TREE;
4919 temp = or_var_with_comparison (arg, invert, code2,
4920 op2a, op2b);
e89065a1
SL
4921 if (!temp)
4922 return NULL_TREE;
4923 else if (!result)
4924 result = temp;
4925 else if (!same_bool_result_p (result, temp))
4926 return NULL_TREE;
4927 }
4928 else
4929 return NULL_TREE;
4930 }
4931 return result;
4932 }
4933
4934 default:
4935 break;
4936 }
4937 }
4938 return NULL_TREE;
4939}
4940
4941/* Try to simplify the OR of two comparisons, specified by
4942 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4943 If this can be simplified to a single expression (without requiring
4944 introducing more SSA variables to hold intermediate values),
4945 return the resulting tree. Otherwise return NULL_TREE.
4946 If the result expression is non-null, it has boolean type. */
4947
4948tree
4949maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4950 enum tree_code code2, tree op2a, tree op2b)
4951{
4952 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4953 if (t)
4954 return t;
4955 else
4956 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4957}
cfef45c8
RG
4958
4959
4960/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4961
4962 Either NULL_TREE, a simplified but non-constant or a constant
4963 is returned.
4964
4965 ??? This should go into a gimple-fold-inline.h file to be eventually
4966 privatized with the single valueize function used in the various TUs
4967 to avoid the indirect function call overhead. */
4968
4969tree
355fe088 4970gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 4971 tree (*gvalueize) (tree))
cfef45c8 4972{
45cc9f96
RB
4973 code_helper rcode;
4974 tree ops[3] = {};
4975 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4976 edges if there are intermediate VARYING defs. For this reason
4977 do not follow SSA edges here even though SCCVN can technically
4978 just deal fine with that. */
34050b6b 4979 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 4980 {
34050b6b 4981 tree res = NULL_TREE;
c0f62740 4982 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
4983 res = ops[0];
4984 else if (mprts_hook)
4985 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4986 if (res)
45cc9f96 4987 {
34050b6b
RB
4988 if (dump_file && dump_flags & TDF_DETAILS)
4989 {
4990 fprintf (dump_file, "Match-and-simplified ");
4991 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4992 fprintf (dump_file, " to ");
4993 print_generic_expr (dump_file, res, 0);
4994 fprintf (dump_file, "\n");
4995 }
4996 return res;
45cc9f96 4997 }
45cc9f96
RB
4998 }
4999
cfef45c8
RG
5000 location_t loc = gimple_location (stmt);
5001 switch (gimple_code (stmt))
5002 {
5003 case GIMPLE_ASSIGN:
5004 {
5005 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5006
5007 switch (get_gimple_rhs_class (subcode))
5008 {
5009 case GIMPLE_SINGLE_RHS:
5010 {
5011 tree rhs = gimple_assign_rhs1 (stmt);
5012 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5013
5014 if (TREE_CODE (rhs) == SSA_NAME)
5015 {
5016 /* If the RHS is an SSA_NAME, return its known constant value,
5017 if any. */
5018 return (*valueize) (rhs);
5019 }
5020 /* Handle propagating invariant addresses into address
5021 operations. */
5022 else if (TREE_CODE (rhs) == ADDR_EXPR
5023 && !is_gimple_min_invariant (rhs))
5024 {
d25c4172 5025 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5026 tree base;
5027 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5028 &offset,
5029 valueize);
5030 if (base
5031 && (CONSTANT_CLASS_P (base)
5032 || decl_address_invariant_p (base)))
5033 return build_invariant_address (TREE_TYPE (rhs),
5034 base, offset);
5035 }
5036 else if (TREE_CODE (rhs) == CONSTRUCTOR
5037 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5038 && (CONSTRUCTOR_NELTS (rhs)
5039 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5040 {
5041 unsigned i;
d2a12ae7 5042 tree val, *vec;
cfef45c8 5043
d2a12ae7
RG
5044 vec = XALLOCAVEC (tree,
5045 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
5046 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5047 {
5048 val = (*valueize) (val);
5049 if (TREE_CODE (val) == INTEGER_CST
5050 || TREE_CODE (val) == REAL_CST
5051 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5052 vec[i] = val;
cfef45c8
RG
5053 else
5054 return NULL_TREE;
5055 }
5056
d2a12ae7 5057 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5058 }
bdf37f7a
JH
5059 if (subcode == OBJ_TYPE_REF)
5060 {
5061 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5062 /* If callee is constant, we can fold away the wrapper. */
5063 if (is_gimple_min_invariant (val))
5064 return val;
5065 }
cfef45c8
RG
5066
5067 if (kind == tcc_reference)
5068 {
5069 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5070 || TREE_CODE (rhs) == REALPART_EXPR
5071 || TREE_CODE (rhs) == IMAGPART_EXPR)
5072 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5073 {
5074 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5075 return fold_unary_loc (EXPR_LOCATION (rhs),
5076 TREE_CODE (rhs),
5077 TREE_TYPE (rhs), val);
5078 }
5079 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5080 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5081 {
5082 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5083 return fold_ternary_loc (EXPR_LOCATION (rhs),
5084 TREE_CODE (rhs),
5085 TREE_TYPE (rhs), val,
5086 TREE_OPERAND (rhs, 1),
5087 TREE_OPERAND (rhs, 2));
5088 }
5089 else if (TREE_CODE (rhs) == MEM_REF
5090 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5091 {
5092 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5093 if (TREE_CODE (val) == ADDR_EXPR
5094 && is_gimple_min_invariant (val))
5095 {
5096 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5097 unshare_expr (val),
5098 TREE_OPERAND (rhs, 1));
5099 if (tem)
5100 rhs = tem;
5101 }
5102 }
5103 return fold_const_aggregate_ref_1 (rhs, valueize);
5104 }
5105 else if (kind == tcc_declaration)
5106 return get_symbol_constant_value (rhs);
5107 return rhs;
5108 }
5109
5110 case GIMPLE_UNARY_RHS:
f3582e54 5111 return NULL_TREE;
cfef45c8
RG
5112
5113 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5114 /* Translate &x + CST into an invariant form suitable for
5115 further propagation. */
5116 if (subcode == POINTER_PLUS_EXPR)
5117 {
4b1b9e64
RB
5118 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5119 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
5120 if (TREE_CODE (op0) == ADDR_EXPR
5121 && TREE_CODE (op1) == INTEGER_CST)
5122 {
5123 tree off = fold_convert (ptr_type_node, op1);
5124 return build_fold_addr_expr_loc
5125 (loc,
5126 fold_build2 (MEM_REF,
5127 TREE_TYPE (TREE_TYPE (op0)),
5128 unshare_expr (op0), off));
5129 }
5130 }
59c20dc7
RB
5131 /* Canonicalize bool != 0 and bool == 0 appearing after
5132 valueization. While gimple_simplify handles this
5133 it can get confused by the ~X == 1 -> X == 0 transform
5134 which we cant reduce to a SSA name or a constant
5135 (and we have no way to tell gimple_simplify to not
5136 consider those transforms in the first place). */
5137 else if (subcode == EQ_EXPR
5138 || subcode == NE_EXPR)
5139 {
5140 tree lhs = gimple_assign_lhs (stmt);
5141 tree op0 = gimple_assign_rhs1 (stmt);
5142 if (useless_type_conversion_p (TREE_TYPE (lhs),
5143 TREE_TYPE (op0)))
5144 {
5145 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5146 op0 = (*valueize) (op0);
8861704d
RB
5147 if (TREE_CODE (op0) == INTEGER_CST)
5148 std::swap (op0, op1);
5149 if (TREE_CODE (op1) == INTEGER_CST
5150 && ((subcode == NE_EXPR && integer_zerop (op1))
5151 || (subcode == EQ_EXPR && integer_onep (op1))))
5152 return op0;
59c20dc7
RB
5153 }
5154 }
4b1b9e64 5155 return NULL_TREE;
cfef45c8
RG
5156
5157 case GIMPLE_TERNARY_RHS:
5158 {
5159 /* Handle ternary operators that can appear in GIMPLE form. */
5160 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5161 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5162 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
5163 return fold_ternary_loc (loc, subcode,
5164 gimple_expr_type (stmt), op0, op1, op2);
5165 }
5166
5167 default:
5168 gcc_unreachable ();
5169 }
5170 }
5171
5172 case GIMPLE_CALL:
5173 {
25583c4f 5174 tree fn;
538dd0b7 5175 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5176
5177 if (gimple_call_internal_p (stmt))
31e071ae
MP
5178 {
5179 enum tree_code subcode = ERROR_MARK;
5180 switch (gimple_call_internal_fn (stmt))
5181 {
5182 case IFN_UBSAN_CHECK_ADD:
5183 subcode = PLUS_EXPR;
5184 break;
5185 case IFN_UBSAN_CHECK_SUB:
5186 subcode = MINUS_EXPR;
5187 break;
5188 case IFN_UBSAN_CHECK_MUL:
5189 subcode = MULT_EXPR;
5190 break;
5191 default:
5192 return NULL_TREE;
5193 }
368b454d
JJ
5194 tree arg0 = gimple_call_arg (stmt, 0);
5195 tree arg1 = gimple_call_arg (stmt, 1);
5196 tree op0 = (*valueize) (arg0);
5197 tree op1 = (*valueize) (arg1);
31e071ae
MP
5198
5199 if (TREE_CODE (op0) != INTEGER_CST
5200 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5201 {
5202 switch (subcode)
5203 {
5204 case MULT_EXPR:
5205 /* x * 0 = 0 * x = 0 without overflow. */
5206 if (integer_zerop (op0) || integer_zerop (op1))
5207 return build_zero_cst (TREE_TYPE (arg0));
5208 break;
5209 case MINUS_EXPR:
5210 /* y - y = 0 without overflow. */
5211 if (operand_equal_p (op0, op1, 0))
5212 return build_zero_cst (TREE_TYPE (arg0));
5213 break;
5214 default:
5215 break;
5216 }
5217 }
5218 tree res
5219 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5220 if (res
5221 && TREE_CODE (res) == INTEGER_CST
5222 && !TREE_OVERFLOW (res))
5223 return res;
5224 return NULL_TREE;
5225 }
25583c4f
RS
5226
5227 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5228 if (TREE_CODE (fn) == ADDR_EXPR
5229 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5230 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5231 && gimple_builtin_call_types_compatible_p (stmt,
5232 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5233 {
5234 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5235 tree retval;
cfef45c8
RG
5236 unsigned i;
5237 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5238 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5239 retval = fold_builtin_call_array (loc,
538dd0b7 5240 gimple_call_return_type (call_stmt),
cfef45c8 5241 fn, gimple_call_num_args (stmt), args);
cfef45c8 5242 if (retval)
5c944c6c
RB
5243 {
5244 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5245 STRIP_NOPS (retval);
538dd0b7
DM
5246 retval = fold_convert (gimple_call_return_type (call_stmt),
5247 retval);
5c944c6c 5248 }
cfef45c8
RG
5249 return retval;
5250 }
5251 return NULL_TREE;
5252 }
5253
5254 default:
5255 return NULL_TREE;
5256 }
5257}
5258
5259/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5260 Returns NULL_TREE if folding to a constant is not possible, otherwise
5261 returns a constant according to is_gimple_min_invariant. */
5262
5263tree
355fe088 5264gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
5265{
5266 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5267 if (res && is_gimple_min_invariant (res))
5268 return res;
5269 return NULL_TREE;
5270}
5271
5272
5273/* The following set of functions are supposed to fold references using
5274 their constant initializers. */
5275
cfef45c8
RG
5276/* See if we can find constructor defining value of BASE.
5277 When we know the consructor with constant offset (such as
5278 base is array[40] and we do know constructor of array), then
5279 BIT_OFFSET is adjusted accordingly.
5280
5281 As a special case, return error_mark_node when constructor
5282 is not explicitly available, but it is known to be zero
5283 such as 'static const int a;'. */
5284static tree
5285get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5286 tree (*valueize)(tree))
5287{
5288 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
5289 bool reverse;
5290
cfef45c8
RG
5291 if (TREE_CODE (base) == MEM_REF)
5292 {
5293 if (!integer_zerop (TREE_OPERAND (base, 1)))
5294 {
9541ffee 5295 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5296 return NULL_TREE;
807e902e 5297 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5298 * BITS_PER_UNIT);
5299 }
5300
5301 if (valueize
5302 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5303 base = valueize (TREE_OPERAND (base, 0));
5304 if (!base || TREE_CODE (base) != ADDR_EXPR)
5305 return NULL_TREE;
5306 base = TREE_OPERAND (base, 0);
5307 }
5308
5309 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5310 DECL_INITIAL. If BASE is a nested reference into another
5311 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5312 the inner reference. */
5313 switch (TREE_CODE (base))
5314 {
5315 case VAR_DECL:
cfef45c8 5316 case CONST_DECL:
6a6dac52
JH
5317 {
5318 tree init = ctor_for_folding (base);
5319
688010ba 5320 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5321 NULL means unknown, while error_mark_node is 0. */
5322 if (init == error_mark_node)
5323 return NULL_TREE;
5324 if (!init)
5325 return error_mark_node;
5326 return init;
5327 }
cfef45c8
RG
5328
5329 case ARRAY_REF:
5330 case COMPONENT_REF:
ee45a32d
EB
5331 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5332 &reverse);
cfef45c8
RG
5333 if (max_size == -1 || size != max_size)
5334 return NULL_TREE;
5335 *bit_offset += bit_offset2;
5336 return get_base_constructor (base, bit_offset, valueize);
5337
5338 case STRING_CST:
5339 case CONSTRUCTOR:
5340 return base;
5341
5342 default:
5343 return NULL_TREE;
5344 }
5345}
5346
cfef45c8
RG
5347/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5348 SIZE to the memory at bit OFFSET. */
5349
5350static tree
5351fold_array_ctor_reference (tree type, tree ctor,
5352 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5353 unsigned HOST_WIDE_INT size,
5354 tree from_decl)
cfef45c8 5355{
807e902e
KZ
5356 offset_int low_bound;
5357 offset_int elt_size;
807e902e 5358 offset_int access_index;
6a636014 5359 tree domain_type = NULL_TREE;
cfef45c8
RG
5360 HOST_WIDE_INT inner_offset;
5361
5362 /* Compute low bound and elt size. */
eb8f1123
RG
5363 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5364 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5365 if (domain_type && TYPE_MIN_VALUE (domain_type))
5366 {
5367 /* Static constructors for variably sized objects makes no sense. */
5368 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
807e902e 5369 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5370 }
5371 else
807e902e 5372 low_bound = 0;
cfef45c8 5373 /* Static constructors for variably sized objects makes no sense. */
c3284718 5374 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5375 == INTEGER_CST);
807e902e 5376 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5377
5378 /* We can handle only constantly sized accesses that are known to not
5379 be larger than size of array element. */
5380 if (!TYPE_SIZE_UNIT (type)
5381 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5382 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5383 || elt_size == 0)
cfef45c8
RG
5384 return NULL_TREE;
5385
5386 /* Compute the array index we look for. */
807e902e
KZ
5387 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5388 elt_size);
27bcd47c 5389 access_index += low_bound;
cfef45c8
RG
5390
5391 /* And offset within the access. */
27bcd47c 5392 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5393
5394 /* See if the array field is large enough to span whole access. We do not
5395 care to fold accesses spanning multiple array indexes. */
27bcd47c 5396 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 5397 return NULL_TREE;
6a636014
AL
5398 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5399 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 5400
cfef45c8
RG
5401 /* When memory is not explicitely mentioned in constructor,
5402 it is 0 (or out of range). */
5403 return build_zero_cst (type);
5404}
5405
5406/* CTOR is CONSTRUCTOR of an aggregate or vector.
5407 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5408
5409static tree
5410fold_nonarray_ctor_reference (tree type, tree ctor,
5411 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5412 unsigned HOST_WIDE_INT size,
5413 tree from_decl)
cfef45c8
RG
5414{
5415 unsigned HOST_WIDE_INT cnt;
5416 tree cfield, cval;
5417
5418 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5419 cval)
5420 {
5421 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5422 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5423 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5424 offset_int bitoffset;
5425 offset_int bitoffset_end, access_end;
cfef45c8
RG
5426
5427 /* Variable sized objects in static constructors makes no sense,
5428 but field_size can be NULL for flexible array members. */
5429 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5430 && TREE_CODE (byte_offset) == INTEGER_CST
5431 && (field_size != NULL_TREE
5432 ? TREE_CODE (field_size) == INTEGER_CST
5433 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5434
5435 /* Compute bit offset of the field. */
807e902e
KZ
5436 bitoffset = (wi::to_offset (field_offset)
5437 + wi::lshift (wi::to_offset (byte_offset),
5438 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5439 /* Compute bit offset where the field ends. */
5440 if (field_size != NULL_TREE)
807e902e 5441 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5442 else
807e902e 5443 bitoffset_end = 0;
cfef45c8 5444
807e902e 5445 access_end = offset_int (offset) + size;
b8b2b009
JJ
5446
5447 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5448 [BITOFFSET, BITOFFSET_END)? */
807e902e 5449 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5450 && (field_size == NULL_TREE
807e902e 5451 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5452 {
807e902e 5453 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5454 /* We do have overlap. Now see if field is large enough to
5455 cover the access. Give up for accesses spanning multiple
5456 fields. */
807e902e 5457 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5458 return NULL_TREE;
807e902e 5459 if (wi::lts_p (offset, bitoffset))
b8b2b009 5460 return NULL_TREE;
cfef45c8 5461 return fold_ctor_reference (type, cval,
27bcd47c 5462 inner_offset.to_uhwi (), size,
c44c2088 5463 from_decl);
cfef45c8
RG
5464 }
5465 }
5466 /* When memory is not explicitely mentioned in constructor, it is 0. */
5467 return build_zero_cst (type);
5468}
5469
5470/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5471 to the memory at bit OFFSET. */
5472
8403c2cf 5473tree
cfef45c8 5474fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5475 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5476{
5477 tree ret;
5478
5479 /* We found the field with exact match. */
5480 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5481 && !offset)
9d60be38 5482 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5483
5484 /* We are at the end of walk, see if we can view convert the
5485 result. */
5486 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5487 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5488 && !compare_tree_int (TYPE_SIZE (type), size)
5489 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5490 {
9d60be38 5491 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5492 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5493 if (ret)
86c5a5c3 5494 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5495 return ret;
5496 }
b2505143
RB
5497 /* For constants and byte-aligned/sized reads try to go through
5498 native_encode/interpret. */
5499 if (CONSTANT_CLASS_P (ctor)
5500 && BITS_PER_UNIT == 8
5501 && offset % BITS_PER_UNIT == 0
5502 && size % BITS_PER_UNIT == 0
5503 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5504 {
5505 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
5506 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5507 offset / BITS_PER_UNIT);
5508 if (len > 0)
5509 return native_interpret_expr (type, buf, len);
b2505143 5510 }
cfef45c8
RG
5511 if (TREE_CODE (ctor) == CONSTRUCTOR)
5512 {
5513
eb8f1123
RG
5514 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5515 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5516 return fold_array_ctor_reference (type, ctor, offset, size,
5517 from_decl);
cfef45c8 5518 else
c44c2088
JH
5519 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5520 from_decl);
cfef45c8
RG
5521 }
5522
5523 return NULL_TREE;
5524}
5525
5526/* Return the tree representing the element referenced by T if T is an
5527 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5528 names using VALUEIZE. Return NULL_TREE otherwise. */
5529
5530tree
5531fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5532{
5533 tree ctor, idx, base;
5534 HOST_WIDE_INT offset, size, max_size;
5535 tree tem;
ee45a32d 5536 bool reverse;
cfef45c8 5537
f8a7df45
RG
5538 if (TREE_THIS_VOLATILE (t))
5539 return NULL_TREE;
5540
3a65ee74 5541 if (DECL_P (t))
cfef45c8
RG
5542 return get_symbol_constant_value (t);
5543
5544 tem = fold_read_from_constant_string (t);
5545 if (tem)
5546 return tem;
5547
5548 switch (TREE_CODE (t))
5549 {
5550 case ARRAY_REF:
5551 case ARRAY_RANGE_REF:
5552 /* Constant indexes are handled well by get_base_constructor.
5553 Only special case variable offsets.
5554 FIXME: This code can't handle nested references with variable indexes
5555 (they will be handled only by iteration of ccp). Perhaps we can bring
5556 get_ref_base_and_extent here and make it use a valueize callback. */
5557 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5558 && valueize
5559 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5560 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5561 {
5562 tree low_bound, unit_size;
5563
5564 /* If the resulting bit-offset is constant, track it. */
5565 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5566 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5567 && (unit_size = array_ref_element_size (t),
807e902e 5568 tree_fits_uhwi_p (unit_size)))
cfef45c8 5569 {
807e902e
KZ
5570 offset_int woffset
5571 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5572 TYPE_PRECISION (TREE_TYPE (idx)));
5573
5574 if (wi::fits_shwi_p (woffset))
5575 {
5576 offset = woffset.to_shwi ();
5577 /* TODO: This code seems wrong, multiply then check
5578 to see if it fits. */
5579 offset *= tree_to_uhwi (unit_size);
5580 offset *= BITS_PER_UNIT;
5581
5582 base = TREE_OPERAND (t, 0);
5583 ctor = get_base_constructor (base, &offset, valueize);
5584 /* Empty constructor. Always fold to 0. */
5585 if (ctor == error_mark_node)
5586 return build_zero_cst (TREE_TYPE (t));
5587 /* Out of bound array access. Value is undefined,
5588 but don't fold. */
5589 if (offset < 0)
5590 return NULL_TREE;
5591 /* We can not determine ctor. */
5592 if (!ctor)
5593 return NULL_TREE;
5594 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5595 tree_to_uhwi (unit_size)
5596 * BITS_PER_UNIT,
5597 base);
5598 }
cfef45c8
RG
5599 }
5600 }
5601 /* Fallthru. */
5602
5603 case COMPONENT_REF:
5604 case BIT_FIELD_REF:
5605 case TARGET_MEM_REF:
5606 case MEM_REF:
ee45a32d 5607 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
5608 ctor = get_base_constructor (base, &offset, valueize);
5609
5610 /* Empty constructor. Always fold to 0. */
5611 if (ctor == error_mark_node)
5612 return build_zero_cst (TREE_TYPE (t));
5613 /* We do not know precise address. */
5614 if (max_size == -1 || max_size != size)
5615 return NULL_TREE;
5616 /* We can not determine ctor. */
5617 if (!ctor)
5618 return NULL_TREE;
5619
5620 /* Out of bound array access. Value is undefined, but don't fold. */
5621 if (offset < 0)
5622 return NULL_TREE;
5623
c44c2088
JH
5624 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5625 base);
cfef45c8
RG
5626
5627 case REALPART_EXPR:
5628 case IMAGPART_EXPR:
5629 {
5630 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5631 if (c && TREE_CODE (c) == COMPLEX_CST)
5632 return fold_build1_loc (EXPR_LOCATION (t),
5633 TREE_CODE (t), TREE_TYPE (t), c);
5634 break;
5635 }
5636
5637 default:
5638 break;
5639 }
5640
5641 return NULL_TREE;
5642}
5643
5644tree
5645fold_const_aggregate_ref (tree t)
5646{
5647 return fold_const_aggregate_ref_1 (t, NULL);
5648}
06bc3ec7 5649
85942f45 5650/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5651 at OFFSET.
5652 Set CAN_REFER if non-NULL to false if method
5653 is not referable or if the virtual table is ill-formed (such as rewriten
5654 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5655
5656tree
85942f45
JH
5657gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5658 tree v,
ec77d61f
JH
5659 unsigned HOST_WIDE_INT offset,
5660 bool *can_refer)
81fa35bd 5661{
85942f45
JH
5662 tree vtable = v, init, fn;
5663 unsigned HOST_WIDE_INT size;
8c311b50
JH
5664 unsigned HOST_WIDE_INT elt_size, access_index;
5665 tree domain_type;
81fa35bd 5666
ec77d61f
JH
5667 if (can_refer)
5668 *can_refer = true;
5669
9de2f554 5670 /* First of all double check we have virtual table. */
81fa35bd 5671 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5672 || !DECL_VIRTUAL_P (v))
ec77d61f 5673 {
ec77d61f
JH
5674 /* Pass down that we lost track of the target. */
5675 if (can_refer)
5676 *can_refer = false;
5677 return NULL_TREE;
5678 }
9de2f554 5679
2aa3da06
JH
5680 init = ctor_for_folding (v);
5681
9de2f554 5682 /* The virtual tables should always be born with constructors
2aa3da06
JH
5683 and we always should assume that they are avaialble for
5684 folding. At the moment we do not stream them in all cases,
5685 but it should never happen that ctor seem unreachable. */
5686 gcc_assert (init);
5687 if (init == error_mark_node)
5688 {
5689 gcc_assert (in_lto_p);
ec77d61f
JH
5690 /* Pass down that we lost track of the target. */
5691 if (can_refer)
5692 *can_refer = false;
2aa3da06
JH
5693 return NULL_TREE;
5694 }
81fa35bd 5695 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5696 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5697 offset *= BITS_PER_UNIT;
81fa35bd 5698 offset += token * size;
9de2f554 5699
8c311b50
JH
5700 /* Lookup the value in the constructor that is assumed to be array.
5701 This is equivalent to
5702 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5703 offset, size, NULL);
5704 but in a constant time. We expect that frontend produced a simple
5705 array without indexed initializers. */
5706
5707 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5708 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5709 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5710 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5711
5712 access_index = offset / BITS_PER_UNIT / elt_size;
5713 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5714
5715 /* This code makes an assumption that there are no
5716 indexed fileds produced by C++ FE, so we can directly index the array. */
5717 if (access_index < CONSTRUCTOR_NELTS (init))
5718 {
5719 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5720 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5721 STRIP_NOPS (fn);
5722 }
5723 else
5724 fn = NULL;
9de2f554
JH
5725
5726 /* For type inconsistent program we may end up looking up virtual method
5727 in virtual table that does not contain TOKEN entries. We may overrun
5728 the virtual table and pick up a constant or RTTI info pointer.
5729 In any case the call is undefined. */
5730 if (!fn
5731 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5732 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5733 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5734 else
5735 {
5736 fn = TREE_OPERAND (fn, 0);
5737
5738 /* When cgraph node is missing and function is not public, we cannot
5739 devirtualize. This can happen in WHOPR when the actual method
5740 ends up in other partition, because we found devirtualization
5741 possibility too late. */
5742 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5743 {
5744 if (can_refer)
5745 {
5746 *can_refer = false;
5747 return fn;
5748 }
5749 return NULL_TREE;
5750 }
9de2f554 5751 }
81fa35bd 5752
7501ca28
RG
5753 /* Make sure we create a cgraph node for functions we'll reference.
5754 They can be non-existent if the reference comes from an entry
5755 of an external vtable for example. */
d52f5295 5756 cgraph_node::get_create (fn);
7501ca28 5757
81fa35bd
MJ
5758 return fn;
5759}
5760
85942f45
JH
5761/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5762 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5763 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5764 OBJ_TYPE_REF_OBJECT(REF).
5765 Set CAN_REFER if non-NULL to false if method
5766 is not referable or if the virtual table is ill-formed (such as rewriten
5767 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5768
5769tree
ec77d61f
JH
5770gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5771 bool *can_refer)
85942f45
JH
5772{
5773 unsigned HOST_WIDE_INT offset;
5774 tree v;
5775
5776 v = BINFO_VTABLE (known_binfo);
5777 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5778 if (!v)
5779 return NULL_TREE;
5780
5781 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5782 {
5783 if (can_refer)
5784 *can_refer = false;
5785 return NULL_TREE;
5786 }
5787 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5788}
5789
b184c8f1
AM
5790/* Given a pointer value OP0, return a simplified version of an
5791 indirection through OP0, or NULL_TREE if no simplification is
5792 possible. Note that the resulting type may be different from
5793 the type pointed to in the sense that it is still compatible
5794 from the langhooks point of view. */
5795
5796tree
5797gimple_fold_indirect_ref (tree t)
5798{
5799 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5800 tree sub = t;
5801 tree subtype;
5802
5803 STRIP_NOPS (sub);
5804 subtype = TREE_TYPE (sub);
5805 if (!POINTER_TYPE_P (subtype))
5806 return NULL_TREE;
5807
5808 if (TREE_CODE (sub) == ADDR_EXPR)
5809 {
5810 tree op = TREE_OPERAND (sub, 0);
5811 tree optype = TREE_TYPE (op);
5812 /* *&p => p */
5813 if (useless_type_conversion_p (type, optype))
5814 return op;
5815
5816 /* *(foo *)&fooarray => fooarray[0] */
5817 if (TREE_CODE (optype) == ARRAY_TYPE
5818 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5819 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5820 {
5821 tree type_domain = TYPE_DOMAIN (optype);
5822 tree min_val = size_zero_node;
5823 if (type_domain && TYPE_MIN_VALUE (type_domain))
5824 min_val = TYPE_MIN_VALUE (type_domain);
5825 if (TREE_CODE (min_val) == INTEGER_CST)
5826 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5827 }
5828 /* *(foo *)&complexfoo => __real__ complexfoo */
5829 else if (TREE_CODE (optype) == COMPLEX_TYPE
5830 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5831 return fold_build1 (REALPART_EXPR, type, op);
5832 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5833 else if (TREE_CODE (optype) == VECTOR_TYPE
5834 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5835 {
5836 tree part_width = TYPE_SIZE (type);
5837 tree index = bitsize_int (0);
5838 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5839 }
5840 }
5841
5842 /* *(p + CST) -> ... */
5843 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5844 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5845 {
5846 tree addr = TREE_OPERAND (sub, 0);
5847 tree off = TREE_OPERAND (sub, 1);
5848 tree addrtype;
5849
5850 STRIP_NOPS (addr);
5851 addrtype = TREE_TYPE (addr);
5852
5853 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5854 if (TREE_CODE (addr) == ADDR_EXPR
5855 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5856 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5857 && tree_fits_uhwi_p (off))
b184c8f1 5858 {
ae7e9ddd 5859 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5860 tree part_width = TYPE_SIZE (type);
5861 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5862 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5863 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5864 tree index = bitsize_int (indexi);
5865 if (offset / part_widthi
e934916c 5866 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5867 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5868 part_width, index);
5869 }
5870
5871 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5872 if (TREE_CODE (addr) == ADDR_EXPR
5873 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5874 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5875 {
5876 tree size = TYPE_SIZE_UNIT (type);
5877 if (tree_int_cst_equal (size, off))
5878 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5879 }
5880
5881 /* *(p + CST) -> MEM_REF <p, CST>. */
5882 if (TREE_CODE (addr) != ADDR_EXPR
5883 || DECL_P (TREE_OPERAND (addr, 0)))
5884 return fold_build2 (MEM_REF, type,
5885 addr,
807e902e 5886 wide_int_to_tree (ptype, off));
b184c8f1
AM
5887 }
5888
5889 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5890 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5892 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5893 {
5894 tree type_domain;
5895 tree min_val = size_zero_node;
5896 tree osub = sub;
5897 sub = gimple_fold_indirect_ref (sub);
5898 if (! sub)
5899 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5900 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5901 if (type_domain && TYPE_MIN_VALUE (type_domain))
5902 min_val = TYPE_MIN_VALUE (type_domain);
5903 if (TREE_CODE (min_val) == INTEGER_CST)
5904 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5905 }
5906
5907 return NULL_TREE;
5908}
19e51b40
JJ
5909
5910/* Return true if CODE is an operation that when operating on signed
5911 integer types involves undefined behavior on overflow and the
5912 operation can be expressed with unsigned arithmetic. */
5913
5914bool
5915arith_code_with_undefined_signed_overflow (tree_code code)
5916{
5917 switch (code)
5918 {
5919 case PLUS_EXPR:
5920 case MINUS_EXPR:
5921 case MULT_EXPR:
5922 case NEGATE_EXPR:
5923 case POINTER_PLUS_EXPR:
5924 return true;
5925 default:
5926 return false;
5927 }
5928}
5929
5930/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5931 operation that can be transformed to unsigned arithmetic by converting
5932 its operand, carrying out the operation in the corresponding unsigned
5933 type and converting the result back to the original type.
5934
5935 Returns a sequence of statements that replace STMT and also contain
5936 a modified form of STMT itself. */
5937
5938gimple_seq
355fe088 5939rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
5940{
5941 if (dump_file && (dump_flags & TDF_DETAILS))
5942 {
5943 fprintf (dump_file, "rewriting stmt with undefined signed "
5944 "overflow ");
5945 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5946 }
5947
5948 tree lhs = gimple_assign_lhs (stmt);
5949 tree type = unsigned_type_for (TREE_TYPE (lhs));
5950 gimple_seq stmts = NULL;
5951 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5952 {
74e3c262
RB
5953 tree op = gimple_op (stmt, i);
5954 op = gimple_convert (&stmts, type, op);
5955 gimple_set_op (stmt, i, op);
19e51b40
JJ
5956 }
5957 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5958 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5959 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5960 gimple_seq_add_stmt (&stmts, stmt);
355fe088 5961 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
5962 gimple_seq_add_stmt (&stmts, cvt);
5963
5964 return stmts;
5965}
d4f5cd5e 5966
3d2cf79f 5967
c26de36d
RB
5968/* The valueization hook we use for the gimple_build API simplification.
5969 This makes us match fold_buildN behavior by only combining with
5970 statements in the sequence(s) we are currently building. */
5971
5972static tree
5973gimple_build_valueize (tree op)
5974{
5975 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5976 return op;
5977 return NULL_TREE;
5978}
5979
3d2cf79f 5980/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 5981 simplifying it first if possible. Returns the built
3d2cf79f
RB
5982 expression value and appends statements possibly defining it
5983 to SEQ. */
5984
5985tree
5986gimple_build (gimple_seq *seq, location_t loc,
c26de36d 5987 enum tree_code code, tree type, tree op0)
3d2cf79f 5988{
c26de36d 5989 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
5990 if (!res)
5991 {
5992 if (gimple_in_ssa_p (cfun))
b731b390 5993 res = make_ssa_name (type);
3d2cf79f 5994 else
b731b390 5995 res = create_tmp_reg (type);
355fe088 5996 gimple *stmt;
3d2cf79f
RB
5997 if (code == REALPART_EXPR
5998 || code == IMAGPART_EXPR
5999 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6000 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6001 else
0d0e4a03 6002 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6003 gimple_set_location (stmt, loc);
6004 gimple_seq_add_stmt_without_update (seq, stmt);
6005 }
6006 return res;
6007}
6008
6009/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6010 simplifying it first if possible. Returns the built
3d2cf79f
RB
6011 expression value and appends statements possibly defining it
6012 to SEQ. */
6013
6014tree
6015gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6016 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6017{
c26de36d 6018 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6019 if (!res)
6020 {
6021 if (gimple_in_ssa_p (cfun))
b731b390 6022 res = make_ssa_name (type);
3d2cf79f 6023 else
b731b390 6024 res = create_tmp_reg (type);
355fe088 6025 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6026 gimple_set_location (stmt, loc);
6027 gimple_seq_add_stmt_without_update (seq, stmt);
6028 }
6029 return res;
6030}
6031
6032/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6033 simplifying it first if possible. Returns the built
3d2cf79f
RB
6034 expression value and appends statements possibly defining it
6035 to SEQ. */
6036
6037tree
6038gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6039 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6040{
6041 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6042 seq, gimple_build_valueize);
3d2cf79f
RB
6043 if (!res)
6044 {
6045 if (gimple_in_ssa_p (cfun))
b731b390 6046 res = make_ssa_name (type);
3d2cf79f 6047 else
b731b390 6048 res = create_tmp_reg (type);
355fe088 6049 gimple *stmt;
3d2cf79f 6050 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6051 stmt = gimple_build_assign (res, code,
6052 build3 (code, type, op0, op1, op2));
3d2cf79f 6053 else
0d0e4a03 6054 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6055 gimple_set_location (stmt, loc);
6056 gimple_seq_add_stmt_without_update (seq, stmt);
6057 }
6058 return res;
6059}
6060
6061/* Build the call FN (ARG0) with a result of type TYPE
6062 (or no result if TYPE is void) with location LOC,
c26de36d 6063 simplifying it first if possible. Returns the built
3d2cf79f
RB
6064 expression value (or NULL_TREE if TYPE is void) and appends
6065 statements possibly defining it to SEQ. */
6066
6067tree
6068gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6069 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6070{
c26de36d 6071 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6072 if (!res)
6073 {
6074 tree decl = builtin_decl_implicit (fn);
355fe088 6075 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6076 if (!VOID_TYPE_P (type))
6077 {
6078 if (gimple_in_ssa_p (cfun))
b731b390 6079 res = make_ssa_name (type);
3d2cf79f 6080 else
b731b390 6081 res = create_tmp_reg (type);
3d2cf79f
RB
6082 gimple_call_set_lhs (stmt, res);
6083 }
6084 gimple_set_location (stmt, loc);
6085 gimple_seq_add_stmt_without_update (seq, stmt);
6086 }
6087 return res;
6088}
6089
6090/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6091 (or no result if TYPE is void) with location LOC,
c26de36d 6092 simplifying it first if possible. Returns the built
3d2cf79f
RB
6093 expression value (or NULL_TREE if TYPE is void) and appends
6094 statements possibly defining it to SEQ. */
6095
6096tree
6097gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6098 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6099{
c26de36d 6100 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6101 if (!res)
6102 {
6103 tree decl = builtin_decl_implicit (fn);
355fe088 6104 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6105 if (!VOID_TYPE_P (type))
6106 {
6107 if (gimple_in_ssa_p (cfun))
b731b390 6108 res = make_ssa_name (type);
3d2cf79f 6109 else
b731b390 6110 res = create_tmp_reg (type);
3d2cf79f
RB
6111 gimple_call_set_lhs (stmt, res);
6112 }
6113 gimple_set_location (stmt, loc);
6114 gimple_seq_add_stmt_without_update (seq, stmt);
6115 }
6116 return res;
6117}
6118
6119/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6120 (or no result if TYPE is void) with location LOC,
c26de36d 6121 simplifying it first if possible. Returns the built
3d2cf79f
RB
6122 expression value (or NULL_TREE if TYPE is void) and appends
6123 statements possibly defining it to SEQ. */
6124
6125tree
6126gimple_build (gimple_seq *seq, location_t loc,
6127 enum built_in_function fn, tree type,
c26de36d 6128 tree arg0, tree arg1, tree arg2)
3d2cf79f 6129{
c26de36d
RB
6130 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6131 seq, gimple_build_valueize);
3d2cf79f
RB
6132 if (!res)
6133 {
6134 tree decl = builtin_decl_implicit (fn);
355fe088 6135 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
6136 if (!VOID_TYPE_P (type))
6137 {
6138 if (gimple_in_ssa_p (cfun))
b731b390 6139 res = make_ssa_name (type);
3d2cf79f 6140 else
b731b390 6141 res = create_tmp_reg (type);
3d2cf79f
RB
6142 gimple_call_set_lhs (stmt, res);
6143 }
6144 gimple_set_location (stmt, loc);
6145 gimple_seq_add_stmt_without_update (seq, stmt);
6146 }
6147 return res;
6148}
6149
6150/* Build the conversion (TYPE) OP with a result of type TYPE
6151 with location LOC if such conversion is neccesary in GIMPLE,
6152 simplifying it first.
6153 Returns the built expression value and appends
6154 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6155
6156tree
6157gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6158{
6159 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6160 return op;
3d2cf79f 6161 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6162}
68e57f04 6163
74e3c262
RB
6164/* Build the conversion (ptrofftype) OP with a result of a type
6165 compatible with ptrofftype with location LOC if such conversion
6166 is neccesary in GIMPLE, simplifying it first.
6167 Returns the built expression value and appends
6168 statements possibly defining it to SEQ. */
6169
6170tree
6171gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6172{
6173 if (ptrofftype_p (TREE_TYPE (op)))
6174 return op;
6175 return gimple_convert (seq, loc, sizetype, op);
6176}
6177
68e57f04
RS
6178/* Return true if the result of assignment STMT is known to be non-negative.
6179 If the return value is based on the assumption that signed overflow is
6180 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6181 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6182
6183static bool
6184gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6185 int depth)
6186{
6187 enum tree_code code = gimple_assign_rhs_code (stmt);
6188 switch (get_gimple_rhs_class (code))
6189 {
6190 case GIMPLE_UNARY_RHS:
6191 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6192 gimple_expr_type (stmt),
6193 gimple_assign_rhs1 (stmt),
6194 strict_overflow_p, depth);
6195 case GIMPLE_BINARY_RHS:
6196 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6197 gimple_expr_type (stmt),
6198 gimple_assign_rhs1 (stmt),
6199 gimple_assign_rhs2 (stmt),
6200 strict_overflow_p, depth);
6201 case GIMPLE_TERNARY_RHS:
6202 return false;
6203 case GIMPLE_SINGLE_RHS:
6204 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6205 strict_overflow_p, depth);
6206 case GIMPLE_INVALID_RHS:
6207 break;
6208 }
6209 gcc_unreachable ();
6210}
6211
6212/* Return true if return value of call STMT is known to be non-negative.
6213 If the return value is based on the assumption that signed overflow is
6214 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6215 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6216
6217static bool
6218gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6219 int depth)
6220{
6221 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6222 gimple_call_arg (stmt, 0) : NULL_TREE;
6223 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6224 gimple_call_arg (stmt, 1) : NULL_TREE;
6225
6226 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 6227 gimple_call_combined_fn (stmt),
68e57f04
RS
6228 arg0,
6229 arg1,
6230 strict_overflow_p, depth);
6231}
6232
4534c203
RB
6233/* Return true if return value of call STMT is known to be non-negative.
6234 If the return value is based on the assumption that signed overflow is
6235 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6236 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6237
6238static bool
6239gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6240 int depth)
6241{
6242 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6243 {
6244 tree arg = gimple_phi_arg_def (stmt, i);
6245 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6246 return false;
6247 }
6248 return true;
6249}
6250
68e57f04
RS
6251/* Return true if STMT is known to compute a non-negative value.
6252 If the return value is based on the assumption that signed overflow is
6253 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6254 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6255
6256bool
6257gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6258 int depth)
6259{
6260 switch (gimple_code (stmt))
6261 {
6262 case GIMPLE_ASSIGN:
6263 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6264 depth);
6265 case GIMPLE_CALL:
6266 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6267 depth);
4534c203
RB
6268 case GIMPLE_PHI:
6269 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6270 depth);
68e57f04
RS
6271 default:
6272 return false;
6273 }
6274}
67dbe582
RS
6275
6276/* Return true if the floating-point value computed by assignment STMT
6277 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 6278 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
6279
6280 DEPTH is the current nesting depth of the query. */
6281
6282static bool
6283gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6284{
6285 enum tree_code code = gimple_assign_rhs_code (stmt);
6286 switch (get_gimple_rhs_class (code))
6287 {
6288 case GIMPLE_UNARY_RHS:
6289 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6290 gimple_assign_rhs1 (stmt), depth);
6291 case GIMPLE_BINARY_RHS:
6292 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6293 gimple_assign_rhs1 (stmt),
6294 gimple_assign_rhs2 (stmt), depth);
6295 case GIMPLE_TERNARY_RHS:
6296 return false;
6297 case GIMPLE_SINGLE_RHS:
6298 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6299 case GIMPLE_INVALID_RHS:
6300 break;
6301 }
6302 gcc_unreachable ();
6303}
6304
6305/* Return true if the floating-point value computed by call STMT is known
6306 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6307 considered integer values. Return false for signaling NaN.
67dbe582
RS
6308
6309 DEPTH is the current nesting depth of the query. */
6310
6311static bool
6312gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6313{
6314 tree arg0 = (gimple_call_num_args (stmt) > 0
6315 ? gimple_call_arg (stmt, 0)
6316 : NULL_TREE);
6317 tree arg1 = (gimple_call_num_args (stmt) > 1
6318 ? gimple_call_arg (stmt, 1)
6319 : NULL_TREE);
1d9da71f 6320 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
6321 arg0, arg1, depth);
6322}
6323
6324/* Return true if the floating-point result of phi STMT is known to have
6325 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 6326 integer values. Return false for signaling NaN.
67dbe582
RS
6327
6328 DEPTH is the current nesting depth of the query. */
6329
6330static bool
6331gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6332{
6333 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6334 {
6335 tree arg = gimple_phi_arg_def (stmt, i);
6336 if (!integer_valued_real_single_p (arg, depth + 1))
6337 return false;
6338 }
6339 return true;
6340}
6341
6342/* Return true if the floating-point value computed by STMT is known
6343 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6344 considered integer values. Return false for signaling NaN.
67dbe582
RS
6345
6346 DEPTH is the current nesting depth of the query. */
6347
6348bool
6349gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6350{
6351 switch (gimple_code (stmt))
6352 {
6353 case GIMPLE_ASSIGN:
6354 return gimple_assign_integer_valued_real_p (stmt, depth);
6355 case GIMPLE_CALL:
6356 return gimple_call_integer_valued_real_p (stmt, depth);
6357 case GIMPLE_PHI:
6358 return gimple_phi_integer_valued_real_p (stmt, depth);
6359 default:
6360 return false;
6361 }
6362}