]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Extend fold_const_call to combined_fn
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
5624e564 2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
cbdd87d4 44#include "tree-ssa-propagate.h"
450ad0cd 45#include "ipa-utils.h"
4484a35a 46#include "tree-ssa-address.h"
862d0b35 47#include "langhooks.h"
19e51b40 48#include "gimplify-me.h"
2b5f0895 49#include "dbgcnt.h"
9b2b7279 50#include "builtins.h"
e0ee10ed
RB
51#include "tree-eh.h"
52#include "gimple-match.h"
48126138 53#include "gomp-constants.h"
f869c12f 54#include "optabs-query.h"
451e8dae 55#include "omp-low.h"
f869c12f 56
cbdd87d4 57
b3b9f3d0 58/* Return true when DECL can be referenced from current unit.
c44c2088
JH
59 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
60 We can get declarations that are not possible to reference for various
61 reasons:
1389294c 62
1389294c
JH
63 1) When analyzing C++ virtual tables.
64 C++ virtual tables do have known constructors even
65 when they are keyed to other compilation unit.
66 Those tables can contain pointers to methods and vars
67 in other units. Those methods have both STATIC and EXTERNAL
68 set.
69 2) In WHOPR mode devirtualization might lead to reference
70 to method that was partitioned elsehwere.
71 In this case we have static VAR_DECL or FUNCTION_DECL
72 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
73 declaring the body.
74 3) COMDAT functions referred by external vtables that
3e89949e 75 we devirtualize only during final compilation stage.
b3b9f3d0
JH
76 At this time we already decided that we will not output
77 the function body and thus we can't reference the symbol
78 directly. */
79
1389294c 80static bool
c44c2088 81can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 82{
2c8326a5 83 varpool_node *vnode;
1389294c 84 struct cgraph_node *node;
5e20cdc9 85 symtab_node *snode;
c44c2088 86
00de328a 87 if (DECL_ABSTRACT_P (decl))
1632a686
JH
88 return false;
89
90 /* We are concerned only about static/external vars and functions. */
91 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
92 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
93 return true;
94
95 /* Static objects can be referred only if they was not optimized out yet. */
96 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
97 {
3aaf0529
JH
98 /* Before we start optimizing unreachable code we can be sure all
99 static objects are defined. */
3dafb85c 100 if (symtab->function_flags_ready)
3aaf0529 101 return true;
d52f5295 102 snode = symtab_node::get (decl);
3aaf0529 103 if (!snode || !snode->definition)
1632a686 104 return false;
7de90a6c 105 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
106 return !node || !node->global.inlined_to;
107 }
108
6da8be89 109 /* We will later output the initializer, so we can refer to it.
c44c2088 110 So we are concerned only when DECL comes from initializer of
3aaf0529 111 external var or var that has been optimized out. */
c44c2088
JH
112 if (!from_decl
113 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 114 || (!DECL_EXTERNAL (from_decl)
9041d2e6 115 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 116 && vnode->definition)
6da8be89 117 || (flag_ltrans
9041d2e6 118 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 119 && vnode->in_other_partition))
c44c2088 120 return true;
c44c2088
JH
121 /* We are folding reference from external vtable. The vtable may reffer
122 to a symbol keyed to other compilation unit. The other compilation
123 unit may be in separate DSO and the symbol may be hidden. */
124 if (DECL_VISIBILITY_SPECIFIED (decl)
125 && DECL_EXTERNAL (decl)
a33a931b 126 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 127 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 128 return false;
b3b9f3d0
JH
129 /* When function is public, we always can introduce new reference.
130 Exception are the COMDAT functions where introducing a direct
131 reference imply need to include function body in the curren tunit. */
132 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
133 return true;
3aaf0529
JH
134 /* We have COMDAT. We are going to check if we still have definition
135 or if the definition is going to be output in other partition.
136 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
137
138 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 139 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
140 output elsewhere when corresponding vtable is output.
141 This is however not possible - ABI specify that COMDATs are output in
142 units where they are used and when the other unit was compiled with LTO
143 it is possible that vtable was kept public while the function itself
144 was privatized. */
3dafb85c 145 if (!symtab->function_flags_ready)
b3b9f3d0 146 return true;
c44c2088 147
d52f5295 148 snode = symtab_node::get (decl);
3aaf0529
JH
149 if (!snode
150 || ((!snode->definition || DECL_EXTERNAL (decl))
151 && (!snode->in_other_partition
152 || (!snode->forced_by_abi && !snode->force_output))))
153 return false;
154 node = dyn_cast <cgraph_node *> (snode);
155 return !node || !node->global.inlined_to;
1389294c
JH
156}
157
0038d4e0 158/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
159 acceptable form for is_gimple_min_invariant.
160 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
161
162tree
c44c2088 163canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 164{
50619002
EB
165 tree orig_cval = cval;
166 STRIP_NOPS (cval);
315f5f1b
RG
167 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
168 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 169 {
315f5f1b
RG
170 tree ptr = TREE_OPERAND (cval, 0);
171 if (is_gimple_min_invariant (ptr))
172 cval = build1_loc (EXPR_LOCATION (cval),
173 ADDR_EXPR, TREE_TYPE (ptr),
174 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
175 ptr,
176 fold_convert (ptr_type_node,
177 TREE_OPERAND (cval, 1))));
17f39a39
JH
178 }
179 if (TREE_CODE (cval) == ADDR_EXPR)
180 {
5a27a197
RG
181 tree base = NULL_TREE;
182 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
183 {
184 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
185 if (base)
186 TREE_OPERAND (cval, 0) = base;
187 }
5a27a197
RG
188 else
189 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
190 if (!base)
191 return NULL_TREE;
b3b9f3d0 192
7501ca28
RG
193 if ((TREE_CODE (base) == VAR_DECL
194 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 195 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 196 return NULL_TREE;
7501ca28 197 if (TREE_CODE (base) == VAR_DECL)
46eb666a 198 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
199 else if (TREE_CODE (base) == FUNCTION_DECL)
200 {
201 /* Make sure we create a cgraph node for functions we'll reference.
202 They can be non-existent if the reference comes from an entry
203 of an external vtable for example. */
d52f5295 204 cgraph_node::get_create (base);
7501ca28 205 }
0038d4e0 206 /* Fixup types in global initializers. */
73aef89e
RG
207 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
208 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
209
210 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
211 cval = fold_convert (TREE_TYPE (orig_cval), cval);
212 return cval;
17f39a39 213 }
846abd0d
RB
214 if (TREE_OVERFLOW_P (cval))
215 return drop_tree_overflow (cval);
50619002 216 return orig_cval;
17f39a39 217}
cbdd87d4
RG
218
219/* If SYM is a constant variable with known value, return the value.
220 NULL_TREE is returned otherwise. */
221
222tree
223get_symbol_constant_value (tree sym)
224{
6a6dac52
JH
225 tree val = ctor_for_folding (sym);
226 if (val != error_mark_node)
cbdd87d4 227 {
cbdd87d4
RG
228 if (val)
229 {
9d60be38 230 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 231 if (val && is_gimple_min_invariant (val))
17f39a39 232 return val;
1389294c
JH
233 else
234 return NULL_TREE;
cbdd87d4
RG
235 }
236 /* Variables declared 'const' without an initializer
237 have zero as the initializer if they may not be
238 overridden at link or run time. */
239 if (!val
b8a8c472 240 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 241 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
242 }
243
244 return NULL_TREE;
245}
246
247
cbdd87d4
RG
248
249/* Subroutine of fold_stmt. We perform several simplifications of the
250 memory reference tree EXPR and make sure to re-gimplify them properly
251 after propagation of constant addresses. IS_LHS is true if the
252 reference is supposed to be an lvalue. */
253
254static tree
255maybe_fold_reference (tree expr, bool is_lhs)
256{
17f39a39 257 tree result;
cbdd87d4 258
f0eddb90
RG
259 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
260 || TREE_CODE (expr) == REALPART_EXPR
261 || TREE_CODE (expr) == IMAGPART_EXPR)
262 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
263 return fold_unary_loc (EXPR_LOCATION (expr),
264 TREE_CODE (expr),
265 TREE_TYPE (expr),
266 TREE_OPERAND (expr, 0));
267 else if (TREE_CODE (expr) == BIT_FIELD_REF
268 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
269 return fold_ternary_loc (EXPR_LOCATION (expr),
270 TREE_CODE (expr),
271 TREE_TYPE (expr),
272 TREE_OPERAND (expr, 0),
273 TREE_OPERAND (expr, 1),
274 TREE_OPERAND (expr, 2));
275
f0eddb90
RG
276 if (!is_lhs
277 && (result = fold_const_aggregate_ref (expr))
278 && is_gimple_min_invariant (result))
279 return result;
cbdd87d4 280
cbdd87d4
RG
281 return NULL_TREE;
282}
283
284
285/* Attempt to fold an assignment statement pointed-to by SI. Returns a
286 replacement rhs for the statement or NULL_TREE if no simplification
287 could be made. It is assumed that the operands have been previously
288 folded. */
289
290static tree
291fold_gimple_assign (gimple_stmt_iterator *si)
292{
355fe088 293 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
294 enum tree_code subcode = gimple_assign_rhs_code (stmt);
295 location_t loc = gimple_location (stmt);
296
297 tree result = NULL_TREE;
298
299 switch (get_gimple_rhs_class (subcode))
300 {
301 case GIMPLE_SINGLE_RHS:
302 {
303 tree rhs = gimple_assign_rhs1 (stmt);
304
8c00ba08
JW
305 if (TREE_CLOBBER_P (rhs))
306 return NULL_TREE;
307
4e71066d 308 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
309 return maybe_fold_reference (rhs, false);
310
bdf37f7a
JH
311 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
312 {
313 tree val = OBJ_TYPE_REF_EXPR (rhs);
314 if (is_gimple_min_invariant (val))
315 return val;
f8a39967 316 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
317 {
318 bool final;
319 vec <cgraph_node *>targets
f8a39967 320 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 321 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 322 {
2b5f0895
XDL
323 if (dump_enabled_p ())
324 {
807b7d62 325 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
326 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
327 "resolving virtual function address "
328 "reference to function %s\n",
329 targets.length () == 1
330 ? targets[0]->name ()
3ef276e4 331 : "NULL");
2b5f0895 332 }
3ef276e4
RB
333 if (targets.length () == 1)
334 {
335 val = fold_convert (TREE_TYPE (val),
336 build_fold_addr_expr_loc
337 (loc, targets[0]->decl));
338 STRIP_USELESS_TYPE_CONVERSION (val);
339 }
340 else
341 /* We can not use __builtin_unreachable here because it
342 can not have address taken. */
343 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
344 return val;
345 }
346 }
bdf37f7a 347 }
7524f419 348
cbdd87d4
RG
349 else if (TREE_CODE (rhs) == ADDR_EXPR)
350 {
70f34814
RG
351 tree ref = TREE_OPERAND (rhs, 0);
352 tree tem = maybe_fold_reference (ref, true);
353 if (tem
354 && TREE_CODE (tem) == MEM_REF
355 && integer_zerop (TREE_OPERAND (tem, 1)))
356 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
357 else if (tem)
cbdd87d4
RG
358 result = fold_convert (TREE_TYPE (rhs),
359 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
360 else if (TREE_CODE (ref) == MEM_REF
361 && integer_zerop (TREE_OPERAND (ref, 1)))
362 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
363
364 if (result)
365 {
366 /* Strip away useless type conversions. Both the
367 NON_LVALUE_EXPR that may have been added by fold, and
368 "useless" type conversions that might now be apparent
369 due to propagation. */
370 STRIP_USELESS_TYPE_CONVERSION (result);
371
372 if (result != rhs && valid_gimple_rhs_p (result))
373 return result;
374 }
cbdd87d4
RG
375 }
376
377 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 378 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
379 {
380 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
381 unsigned i;
382 tree val;
383
384 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 385 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
386 return NULL_TREE;
387
388 return build_vector_from_ctor (TREE_TYPE (rhs),
389 CONSTRUCTOR_ELTS (rhs));
390 }
391
392 else if (DECL_P (rhs))
9d60be38 393 return get_symbol_constant_value (rhs);
cbdd87d4
RG
394 }
395 break;
396
397 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
398 break;
399
400 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
401 break;
402
0354c0c7 403 case GIMPLE_TERNARY_RHS:
5c099d40
RB
404 result = fold_ternary_loc (loc, subcode,
405 TREE_TYPE (gimple_assign_lhs (stmt)),
406 gimple_assign_rhs1 (stmt),
407 gimple_assign_rhs2 (stmt),
408 gimple_assign_rhs3 (stmt));
0354c0c7
BS
409
410 if (result)
411 {
412 STRIP_USELESS_TYPE_CONVERSION (result);
413 if (valid_gimple_rhs_p (result))
414 return result;
0354c0c7
BS
415 }
416 break;
417
cbdd87d4
RG
418 case GIMPLE_INVALID_RHS:
419 gcc_unreachable ();
420 }
421
422 return NULL_TREE;
423}
424
fef5a0d9
RB
425
426/* Replace a statement at *SI_P with a sequence of statements in STMTS,
427 adjusting the replacement stmts location and virtual operands.
428 If the statement has a lhs the last stmt in the sequence is expected
429 to assign to that lhs. */
430
431static void
432gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
433{
355fe088 434 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
435
436 if (gimple_has_location (stmt))
437 annotate_all_with_location (stmts, gimple_location (stmt));
438
439 /* First iterate over the replacement statements backward, assigning
440 virtual operands to their defining statements. */
355fe088 441 gimple *laststore = NULL;
fef5a0d9
RB
442 for (gimple_stmt_iterator i = gsi_last (stmts);
443 !gsi_end_p (i); gsi_prev (&i))
444 {
355fe088 445 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
446 if ((gimple_assign_single_p (new_stmt)
447 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
448 || (is_gimple_call (new_stmt)
449 && (gimple_call_flags (new_stmt)
450 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
451 {
452 tree vdef;
453 if (!laststore)
454 vdef = gimple_vdef (stmt);
455 else
456 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
457 gimple_set_vdef (new_stmt, vdef);
458 if (vdef && TREE_CODE (vdef) == SSA_NAME)
459 SSA_NAME_DEF_STMT (vdef) = new_stmt;
460 laststore = new_stmt;
461 }
462 }
463
464 /* Second iterate over the statements forward, assigning virtual
465 operands to their uses. */
466 tree reaching_vuse = gimple_vuse (stmt);
467 for (gimple_stmt_iterator i = gsi_start (stmts);
468 !gsi_end_p (i); gsi_next (&i))
469 {
355fe088 470 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
471 /* If the new statement possibly has a VUSE, update it with exact SSA
472 name we know will reach this one. */
473 if (gimple_has_mem_ops (new_stmt))
474 gimple_set_vuse (new_stmt, reaching_vuse);
475 gimple_set_modified (new_stmt, true);
476 if (gimple_vdef (new_stmt))
477 reaching_vuse = gimple_vdef (new_stmt);
478 }
479
480 /* If the new sequence does not do a store release the virtual
481 definition of the original statement. */
482 if (reaching_vuse
483 && reaching_vuse == gimple_vuse (stmt))
484 {
485 tree vdef = gimple_vdef (stmt);
486 if (vdef
487 && TREE_CODE (vdef) == SSA_NAME)
488 {
489 unlink_stmt_vdef (stmt);
490 release_ssa_name (vdef);
491 }
492 }
493
494 /* Finally replace the original statement with the sequence. */
495 gsi_replace_with_seq (si_p, stmts, false);
496}
497
cbdd87d4
RG
498/* Convert EXPR into a GIMPLE value suitable for substitution on the
499 RHS of an assignment. Insert the necessary statements before
500 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
501 is replaced. If the call is expected to produces a result, then it
502 is replaced by an assignment of the new RHS to the result variable.
503 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
504 GIMPLE_NOP. A proper VDEF chain is retained by making the first
505 VUSE and the last VDEF of the whole sequence be the same as the replaced
506 statement and using new SSA names for stores in between. */
cbdd87d4
RG
507
508void
509gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
510{
511 tree lhs;
355fe088 512 gimple *stmt, *new_stmt;
cbdd87d4 513 gimple_stmt_iterator i;
355a7673 514 gimple_seq stmts = NULL;
cbdd87d4
RG
515
516 stmt = gsi_stmt (*si_p);
517
518 gcc_assert (is_gimple_call (stmt));
519
45852dcc 520 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 521
e256dfce 522 lhs = gimple_call_lhs (stmt);
cbdd87d4 523 if (lhs == NULL_TREE)
6e572326
RG
524 {
525 gimplify_and_add (expr, &stmts);
526 /* We can end up with folding a memcpy of an empty class assignment
527 which gets optimized away by C++ gimplification. */
528 if (gimple_seq_empty_p (stmts))
529 {
9fdc58de 530 pop_gimplify_context (NULL);
6e572326
RG
531 if (gimple_in_ssa_p (cfun))
532 {
533 unlink_stmt_vdef (stmt);
534 release_defs (stmt);
535 }
f6b4dc28 536 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
537 return;
538 }
539 }
cbdd87d4 540 else
e256dfce
RG
541 {
542 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
543 new_stmt = gimple_build_assign (lhs, tmp);
544 i = gsi_last (stmts);
545 gsi_insert_after_without_update (&i, new_stmt,
546 GSI_CONTINUE_LINKING);
547 }
cbdd87d4
RG
548
549 pop_gimplify_context (NULL);
550
fef5a0d9
RB
551 gsi_replace_with_seq_vops (si_p, stmts);
552}
cbdd87d4 553
fef5a0d9
RB
554
555/* Replace the call at *GSI with the gimple value VAL. */
556
557static void
558replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
559{
355fe088 560 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 561 tree lhs = gimple_call_lhs (stmt);
355fe088 562 gimple *repl;
fef5a0d9 563 if (lhs)
e256dfce 564 {
fef5a0d9
RB
565 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
566 val = fold_convert (TREE_TYPE (lhs), val);
567 repl = gimple_build_assign (lhs, val);
568 }
569 else
570 repl = gimple_build_nop ();
571 tree vdef = gimple_vdef (stmt);
572 if (vdef && TREE_CODE (vdef) == SSA_NAME)
573 {
574 unlink_stmt_vdef (stmt);
575 release_ssa_name (vdef);
576 }
f6b4dc28 577 gsi_replace (gsi, repl, false);
fef5a0d9
RB
578}
579
580/* Replace the call at *GSI with the new call REPL and fold that
581 again. */
582
583static void
355fe088 584replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 585{
355fe088 586 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
587 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
588 gimple_set_location (repl, gimple_location (stmt));
589 if (gimple_vdef (stmt)
590 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
591 {
592 gimple_set_vdef (repl, gimple_vdef (stmt));
593 gimple_set_vuse (repl, gimple_vuse (stmt));
594 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
595 }
f6b4dc28 596 gsi_replace (gsi, repl, false);
fef5a0d9
RB
597 fold_stmt (gsi);
598}
599
600/* Return true if VAR is a VAR_DECL or a component thereof. */
601
602static bool
603var_decl_component_p (tree var)
604{
605 tree inner = var;
606 while (handled_component_p (inner))
607 inner = TREE_OPERAND (inner, 0);
608 return SSA_VAR_P (inner);
609}
610
611/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 612 false if no simplification can be made.
fef5a0d9
RB
613 If ENDP is 0, return DEST (like memcpy).
614 If ENDP is 1, return DEST+LEN (like mempcpy).
615 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
616 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
617 (memmove). */
618
619static bool
620gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
621 tree dest, tree src, int endp)
622{
355fe088 623 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
624 tree lhs = gimple_call_lhs (stmt);
625 tree len = gimple_call_arg (stmt, 2);
626 tree destvar, srcvar;
627 location_t loc = gimple_location (stmt);
628
629 /* If the LEN parameter is zero, return DEST. */
630 if (integer_zerop (len))
631 {
355fe088 632 gimple *repl;
fef5a0d9
RB
633 if (gimple_call_lhs (stmt))
634 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
635 else
636 repl = gimple_build_nop ();
637 tree vdef = gimple_vdef (stmt);
638 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 639 {
fef5a0d9
RB
640 unlink_stmt_vdef (stmt);
641 release_ssa_name (vdef);
642 }
f6b4dc28 643 gsi_replace (gsi, repl, false);
fef5a0d9
RB
644 return true;
645 }
646
647 /* If SRC and DEST are the same (and not volatile), return
648 DEST{,+LEN,+LEN-1}. */
649 if (operand_equal_p (src, dest, 0))
650 {
651 unlink_stmt_vdef (stmt);
652 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
653 release_ssa_name (gimple_vdef (stmt));
654 if (!lhs)
655 {
f6b4dc28 656 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
657 return true;
658 }
659 goto done;
660 }
661 else
662 {
663 tree srctype, desttype;
664 unsigned int src_align, dest_align;
665 tree off0;
666
667 /* Build accesses at offset zero with a ref-all character type. */
668 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
669 ptr_mode, true), 0);
670
671 /* If we can perform the copy efficiently with first doing all loads
672 and then all stores inline it that way. Currently efficiently
673 means that we can load all the memory into a single integer
674 register which is what MOVE_MAX gives us. */
675 src_align = get_pointer_alignment (src);
676 dest_align = get_pointer_alignment (dest);
677 if (tree_fits_uhwi_p (len)
678 && compare_tree_int (len, MOVE_MAX) <= 0
679 /* ??? Don't transform copies from strings with known length this
680 confuses the tree-ssa-strlen.c. This doesn't handle
681 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
682 reason. */
683 && !c_strlen (src, 2))
684 {
685 unsigned ilen = tree_to_uhwi (len);
686 if (exact_log2 (ilen) != -1)
687 {
688 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
689 if (type
690 && TYPE_MODE (type) != BLKmode
691 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
692 == ilen * 8)
693 /* If the destination pointer is not aligned we must be able
694 to emit an unaligned store. */
695 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
f869c12f
RR
696 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
697 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
698 != CODE_FOR_nothing)))
fef5a0d9
RB
699 {
700 tree srctype = type;
701 tree desttype = type;
702 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
703 srctype = build_aligned_type (type, src_align);
704 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
705 tree tem = fold_const_aggregate_ref (srcmem);
706 if (tem)
707 srcmem = tem;
708 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
709 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
f869c12f
RR
710 src_align)
711 && (optab_handler (movmisalign_optab,
712 TYPE_MODE (type))
713 == CODE_FOR_nothing))
fef5a0d9
RB
714 srcmem = NULL_TREE;
715 if (srcmem)
716 {
355fe088 717 gimple *new_stmt;
fef5a0d9
RB
718 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
719 {
720 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
721 if (gimple_in_ssa_p (cfun))
722 srcmem = make_ssa_name (TREE_TYPE (srcmem),
723 new_stmt);
724 else
b731b390 725 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
726 gimple_assign_set_lhs (new_stmt, srcmem);
727 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
728 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
729 }
730 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
731 desttype = build_aligned_type (type, dest_align);
732 new_stmt
733 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
734 dest, off0),
735 srcmem);
736 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
737 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
738 if (gimple_vdef (new_stmt)
739 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
740 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
741 if (!lhs)
742 {
f6b4dc28 743 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
744 return true;
745 }
746 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
747 goto done;
748 }
749 }
750 }
751 }
752
753 if (endp == 3)
754 {
755 /* Both DEST and SRC must be pointer types.
756 ??? This is what old code did. Is the testing for pointer types
757 really mandatory?
758
759 If either SRC is readonly or length is 1, we can use memcpy. */
760 if (!dest_align || !src_align)
761 return false;
762 if (readonly_data_expr (src)
763 || (tree_fits_uhwi_p (len)
764 && (MIN (src_align, dest_align) / BITS_PER_UNIT
765 >= tree_to_uhwi (len))))
766 {
767 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
768 if (!fn)
769 return false;
770 gimple_call_set_fndecl (stmt, fn);
771 gimple_call_set_arg (stmt, 0, dest);
772 gimple_call_set_arg (stmt, 1, src);
773 fold_stmt (gsi);
774 return true;
775 }
776
777 /* If *src and *dest can't overlap, optimize into memcpy as well. */
778 if (TREE_CODE (src) == ADDR_EXPR
779 && TREE_CODE (dest) == ADDR_EXPR)
780 {
781 tree src_base, dest_base, fn;
782 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
783 HOST_WIDE_INT size = -1;
784 HOST_WIDE_INT maxsize = -1;
ee45a32d 785 bool reverse;
fef5a0d9
RB
786
787 srcvar = TREE_OPERAND (src, 0);
788 src_base = get_ref_base_and_extent (srcvar, &src_offset,
ee45a32d 789 &size, &maxsize, &reverse);
fef5a0d9
RB
790 destvar = TREE_OPERAND (dest, 0);
791 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
ee45a32d 792 &size, &maxsize, &reverse);
fef5a0d9
RB
793 if (tree_fits_uhwi_p (len))
794 maxsize = tree_to_uhwi (len);
795 else
796 maxsize = -1;
797 src_offset /= BITS_PER_UNIT;
798 dest_offset /= BITS_PER_UNIT;
799 if (SSA_VAR_P (src_base)
800 && SSA_VAR_P (dest_base))
801 {
802 if (operand_equal_p (src_base, dest_base, 0)
803 && ranges_overlap_p (src_offset, maxsize,
804 dest_offset, maxsize))
805 return false;
806 }
807 else if (TREE_CODE (src_base) == MEM_REF
808 && TREE_CODE (dest_base) == MEM_REF)
809 {
810 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
811 TREE_OPERAND (dest_base, 0), 0))
812 return false;
813 offset_int off = mem_ref_offset (src_base) + src_offset;
814 if (!wi::fits_shwi_p (off))
815 return false;
816 src_offset = off.to_shwi ();
817
818 off = mem_ref_offset (dest_base) + dest_offset;
819 if (!wi::fits_shwi_p (off))
820 return false;
821 dest_offset = off.to_shwi ();
822 if (ranges_overlap_p (src_offset, maxsize,
823 dest_offset, maxsize))
824 return false;
825 }
826 else
827 return false;
828
829 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
830 if (!fn)
831 return false;
832 gimple_call_set_fndecl (stmt, fn);
833 gimple_call_set_arg (stmt, 0, dest);
834 gimple_call_set_arg (stmt, 1, src);
835 fold_stmt (gsi);
836 return true;
837 }
838
839 /* If the destination and source do not alias optimize into
840 memcpy as well. */
841 if ((is_gimple_min_invariant (dest)
842 || TREE_CODE (dest) == SSA_NAME)
843 && (is_gimple_min_invariant (src)
844 || TREE_CODE (src) == SSA_NAME))
845 {
846 ao_ref destr, srcr;
847 ao_ref_init_from_ptr_and_size (&destr, dest, len);
848 ao_ref_init_from_ptr_and_size (&srcr, src, len);
849 if (!refs_may_alias_p_1 (&destr, &srcr, false))
850 {
851 tree fn;
852 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
853 if (!fn)
854 return false;
855 gimple_call_set_fndecl (stmt, fn);
856 gimple_call_set_arg (stmt, 0, dest);
857 gimple_call_set_arg (stmt, 1, src);
858 fold_stmt (gsi);
859 return true;
860 }
861 }
862
863 return false;
864 }
865
866 if (!tree_fits_shwi_p (len))
867 return false;
868 /* FIXME:
869 This logic lose for arguments like (type *)malloc (sizeof (type)),
870 since we strip the casts of up to VOID return value from malloc.
871 Perhaps we ought to inherit type from non-VOID argument here? */
872 STRIP_NOPS (src);
873 STRIP_NOPS (dest);
874 if (!POINTER_TYPE_P (TREE_TYPE (src))
875 || !POINTER_TYPE_P (TREE_TYPE (dest)))
876 return false;
877 /* In the following try to find a type that is most natural to be
878 used for the memcpy source and destination and that allows
879 the most optimization when memcpy is turned into a plain assignment
880 using that type. In theory we could always use a char[len] type
881 but that only gains us that the destination and source possibly
882 no longer will have their address taken. */
883 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
884 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
885 {
886 tree tem = TREE_OPERAND (src, 0);
887 STRIP_NOPS (tem);
888 if (tem != TREE_OPERAND (src, 0))
889 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
890 }
891 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
892 {
893 tree tem = TREE_OPERAND (dest, 0);
894 STRIP_NOPS (tem);
895 if (tem != TREE_OPERAND (dest, 0))
896 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
897 }
898 srctype = TREE_TYPE (TREE_TYPE (src));
899 if (TREE_CODE (srctype) == ARRAY_TYPE
900 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
901 {
902 srctype = TREE_TYPE (srctype);
903 STRIP_NOPS (src);
904 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
905 }
906 desttype = TREE_TYPE (TREE_TYPE (dest));
907 if (TREE_CODE (desttype) == ARRAY_TYPE
908 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
909 {
910 desttype = TREE_TYPE (desttype);
911 STRIP_NOPS (dest);
912 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
913 }
914 if (TREE_ADDRESSABLE (srctype)
915 || TREE_ADDRESSABLE (desttype))
916 return false;
917
918 /* Make sure we are not copying using a floating-point mode or
919 a type whose size possibly does not match its precision. */
920 if (FLOAT_MODE_P (TYPE_MODE (desttype))
921 || TREE_CODE (desttype) == BOOLEAN_TYPE
922 || TREE_CODE (desttype) == ENUMERAL_TYPE)
923 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
924 if (FLOAT_MODE_P (TYPE_MODE (srctype))
925 || TREE_CODE (srctype) == BOOLEAN_TYPE
926 || TREE_CODE (srctype) == ENUMERAL_TYPE)
927 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
928 if (!srctype)
929 srctype = desttype;
930 if (!desttype)
931 desttype = srctype;
932 if (!srctype)
933 return false;
934
935 src_align = get_pointer_alignment (src);
936 dest_align = get_pointer_alignment (dest);
937 if (dest_align < TYPE_ALIGN (desttype)
938 || src_align < TYPE_ALIGN (srctype))
939 return false;
940
941 destvar = dest;
942 STRIP_NOPS (destvar);
943 if (TREE_CODE (destvar) == ADDR_EXPR
944 && var_decl_component_p (TREE_OPERAND (destvar, 0))
945 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
946 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
947 else
948 destvar = NULL_TREE;
949
950 srcvar = src;
951 STRIP_NOPS (srcvar);
952 if (TREE_CODE (srcvar) == ADDR_EXPR
953 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
954 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
955 {
956 if (!destvar
957 || src_align >= TYPE_ALIGN (desttype))
958 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
959 srcvar, off0);
960 else if (!STRICT_ALIGNMENT)
961 {
962 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
963 src_align);
964 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
965 }
e256dfce 966 else
fef5a0d9
RB
967 srcvar = NULL_TREE;
968 }
969 else
970 srcvar = NULL_TREE;
971
972 if (srcvar == NULL_TREE && destvar == NULL_TREE)
973 return false;
974
975 if (srcvar == NULL_TREE)
976 {
977 STRIP_NOPS (src);
978 if (src_align >= TYPE_ALIGN (desttype))
979 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
980 else
981 {
982 if (STRICT_ALIGNMENT)
983 return false;
984 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
985 src_align);
986 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
987 }
988 }
989 else if (destvar == NULL_TREE)
990 {
991 STRIP_NOPS (dest);
992 if (dest_align >= TYPE_ALIGN (srctype))
993 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
994 else
995 {
996 if (STRICT_ALIGNMENT)
997 return false;
998 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
999 dest_align);
1000 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1001 }
1002 }
1003
355fe088 1004 gimple *new_stmt;
fef5a0d9
RB
1005 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1006 {
1007 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1008 if (gimple_in_ssa_p (cfun))
1009 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1010 else
b731b390 1011 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1012 gimple_assign_set_lhs (new_stmt, srcvar);
1013 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1014 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1015 }
1016 new_stmt = gimple_build_assign (destvar, srcvar);
1017 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1018 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1019 if (gimple_vdef (new_stmt)
1020 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1021 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1022 if (!lhs)
1023 {
f6b4dc28 1024 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1025 return true;
1026 }
1027 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1028 }
1029
1030done:
74e3c262 1031 gimple_seq stmts = NULL;
fef5a0d9
RB
1032 if (endp == 0 || endp == 3)
1033 len = NULL_TREE;
1034 else if (endp == 2)
74e3c262
RB
1035 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1036 ssize_int (1));
fef5a0d9 1037 if (endp == 2 || endp == 1)
74e3c262
RB
1038 {
1039 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1040 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1041 TREE_TYPE (dest), dest, len);
1042 }
fef5a0d9 1043
74e3c262 1044 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1045 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1046 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1047 return true;
1048}
1049
1050/* Fold function call to builtin memset or bzero at *GSI setting the
1051 memory of size LEN to VAL. Return whether a simplification was made. */
1052
1053static bool
1054gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1055{
355fe088 1056 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1057 tree etype;
1058 unsigned HOST_WIDE_INT length, cval;
1059
1060 /* If the LEN parameter is zero, return DEST. */
1061 if (integer_zerop (len))
1062 {
1063 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1064 return true;
1065 }
1066
1067 if (! tree_fits_uhwi_p (len))
1068 return false;
1069
1070 if (TREE_CODE (c) != INTEGER_CST)
1071 return false;
1072
1073 tree dest = gimple_call_arg (stmt, 0);
1074 tree var = dest;
1075 if (TREE_CODE (var) != ADDR_EXPR)
1076 return false;
1077
1078 var = TREE_OPERAND (var, 0);
1079 if (TREE_THIS_VOLATILE (var))
1080 return false;
1081
1082 etype = TREE_TYPE (var);
1083 if (TREE_CODE (etype) == ARRAY_TYPE)
1084 etype = TREE_TYPE (etype);
1085
1086 if (!INTEGRAL_TYPE_P (etype)
1087 && !POINTER_TYPE_P (etype))
1088 return NULL_TREE;
1089
1090 if (! var_decl_component_p (var))
1091 return NULL_TREE;
1092
1093 length = tree_to_uhwi (len);
1094 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1095 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1096 return NULL_TREE;
1097
1098 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1099 return NULL_TREE;
1100
1101 if (integer_zerop (c))
1102 cval = 0;
1103 else
1104 {
1105 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1106 return NULL_TREE;
1107
1108 cval = TREE_INT_CST_LOW (c);
1109 cval &= 0xff;
1110 cval |= cval << 8;
1111 cval |= cval << 16;
1112 cval |= (cval << 31) << 1;
1113 }
1114
1115 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1116 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1117 gimple_set_vuse (store, gimple_vuse (stmt));
1118 tree vdef = gimple_vdef (stmt);
1119 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1120 {
1121 gimple_set_vdef (store, gimple_vdef (stmt));
1122 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1123 }
1124 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1125 if (gimple_call_lhs (stmt))
1126 {
355fe088 1127 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1128 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1129 }
1130 else
1131 {
1132 gimple_stmt_iterator gsi2 = *gsi;
1133 gsi_prev (gsi);
1134 gsi_remove (&gsi2, true);
1135 }
1136
1137 return true;
1138}
1139
1140
1141/* Return the string length, maximum string length or maximum value of
1142 ARG in LENGTH.
1143 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1144 is not NULL and, for TYPE == 0, its value is not equal to the length
1145 we determine or if we are unable to determine the length or value,
1146 return false. VISITED is a bitmap of visited variables.
1147 TYPE is 0 if string length should be returned, 1 for maximum string
1148 length and 2 for maximum value ARG can have. */
1149
1150static bool
dcb7fae2 1151get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1152{
1153 tree var, val;
355fe088 1154 gimple *def_stmt;
fef5a0d9
RB
1155
1156 if (TREE_CODE (arg) != SSA_NAME)
1157 {
1158 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1159 if (TREE_CODE (arg) == ADDR_EXPR
1160 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1161 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1162 {
1163 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1164 if (TREE_CODE (aop0) == INDIRECT_REF
1165 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1166 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1167 length, visited, type);
1168 }
1169
1170 if (type == 2)
1171 {
1172 val = arg;
1173 if (TREE_CODE (val) != INTEGER_CST
1174 || tree_int_cst_sgn (val) < 0)
1175 return false;
1176 }
1177 else
1178 val = c_strlen (arg, 1);
1179 if (!val)
1180 return false;
1181
1182 if (*length)
1183 {
1184 if (type > 0)
1185 {
1186 if (TREE_CODE (*length) != INTEGER_CST
1187 || TREE_CODE (val) != INTEGER_CST)
1188 return false;
1189
1190 if (tree_int_cst_lt (*length, val))
1191 *length = val;
1192 return true;
1193 }
1194 else if (simple_cst_equal (val, *length) != 1)
1195 return false;
1196 }
1197
1198 *length = val;
1199 return true;
1200 }
1201
1202 /* If ARG is registered for SSA update we cannot look at its defining
1203 statement. */
1204 if (name_registered_for_update_p (arg))
1205 return false;
1206
1207 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1208 if (!*visited)
1209 *visited = BITMAP_ALLOC (NULL);
1210 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1211 return true;
1212
1213 var = arg;
1214 def_stmt = SSA_NAME_DEF_STMT (var);
1215
1216 switch (gimple_code (def_stmt))
1217 {
1218 case GIMPLE_ASSIGN:
1219 /* The RHS of the statement defining VAR must either have a
1220 constant length or come from another SSA_NAME with a constant
1221 length. */
1222 if (gimple_assign_single_p (def_stmt)
1223 || gimple_assign_unary_nop_p (def_stmt))
1224 {
1225 tree rhs = gimple_assign_rhs1 (def_stmt);
1226 return get_maxval_strlen (rhs, length, visited, type);
1227 }
1228 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1229 {
1230 tree op2 = gimple_assign_rhs2 (def_stmt);
1231 tree op3 = gimple_assign_rhs3 (def_stmt);
1232 return get_maxval_strlen (op2, length, visited, type)
1233 && get_maxval_strlen (op3, length, visited, type);
1234 }
1235 return false;
1236
1237 case GIMPLE_PHI:
1238 {
1239 /* All the arguments of the PHI node must have the same constant
1240 length. */
1241 unsigned i;
1242
1243 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1244 {
1245 tree arg = gimple_phi_arg (def_stmt, i)->def;
1246
1247 /* If this PHI has itself as an argument, we cannot
1248 determine the string length of this argument. However,
1249 if we can find a constant string length for the other
1250 PHI args then we can still be sure that this is a
1251 constant string length. So be optimistic and just
1252 continue with the next argument. */
1253 if (arg == gimple_phi_result (def_stmt))
1254 continue;
1255
1256 if (!get_maxval_strlen (arg, length, visited, type))
1257 return false;
1258 }
1259 }
1260 return true;
1261
1262 default:
1263 return false;
1264 }
1265}
1266
dcb7fae2
RB
1267tree
1268get_maxval_strlen (tree arg, int type)
1269{
1270 bitmap visited = NULL;
1271 tree len = NULL_TREE;
1272 if (!get_maxval_strlen (arg, &len, &visited, type))
1273 len = NULL_TREE;
1274 if (visited)
1275 BITMAP_FREE (visited);
1276
1277 return len;
1278}
1279
fef5a0d9
RB
1280
1281/* Fold function call to builtin strcpy with arguments DEST and SRC.
1282 If LEN is not NULL, it represents the length of the string to be
1283 copied. Return NULL_TREE if no simplification can be made. */
1284
1285static bool
1286gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1287 tree dest, tree src)
fef5a0d9 1288{
dcb7fae2 1289 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1290 tree fn;
1291
1292 /* If SRC and DEST are the same (and not volatile), return DEST. */
1293 if (operand_equal_p (src, dest, 0))
1294 {
1295 replace_call_with_value (gsi, dest);
1296 return true;
1297 }
1298
1299 if (optimize_function_for_size_p (cfun))
1300 return false;
1301
1302 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1303 if (!fn)
1304 return false;
1305
1579e1f8 1306 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1307 if (!len)
dcb7fae2 1308 return false;
fef5a0d9
RB
1309
1310 len = fold_convert_loc (loc, size_type_node, len);
1311 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1312 len = force_gimple_operand_gsi (gsi, len, true,
1313 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1314 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1315 replace_call_with_call_and_fold (gsi, repl);
1316 return true;
1317}
1318
1319/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1320 If SLEN is not NULL, it represents the length of the source string.
1321 Return NULL_TREE if no simplification can be made. */
1322
1323static bool
dcb7fae2
RB
1324gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1325 tree dest, tree src, tree len)
fef5a0d9 1326{
dcb7fae2 1327 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1328 tree fn;
1329
1330 /* If the LEN parameter is zero, return DEST. */
1331 if (integer_zerop (len))
1332 {
1333 replace_call_with_value (gsi, dest);
1334 return true;
1335 }
1336
1337 /* We can't compare slen with len as constants below if len is not a
1338 constant. */
dcb7fae2 1339 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1340 return false;
1341
fef5a0d9 1342 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1343 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1344 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1345 return false;
1346
1347 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1348
1349 /* We do not support simplification of this case, though we do
1350 support it when expanding trees into RTL. */
1351 /* FIXME: generate a call to __builtin_memset. */
1352 if (tree_int_cst_lt (slen, len))
1353 return false;
1354
1355 /* OK transform into builtin memcpy. */
1356 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1357 if (!fn)
1358 return false;
1359
1360 len = fold_convert_loc (loc, size_type_node, len);
1361 len = force_gimple_operand_gsi (gsi, len, true,
1362 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1363 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1364 replace_call_with_call_and_fold (gsi, repl);
1365 return true;
1366}
1367
1368/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1369 to the call.
1370
1371 Return NULL_TREE if no simplification was possible, otherwise return the
1372 simplified form of the call as a tree.
1373
1374 The simplified form may be a constant or other expression which
1375 computes the same value, but in a more efficient manner (including
1376 calls to other builtin functions).
1377
1378 The call may contain arguments which need to be evaluated, but
1379 which are not useful to determine the result of the call. In
1380 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1381 COMPOUND_EXPR will be an argument which must be evaluated.
1382 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1383 COMPOUND_EXPR in the chain will contain the tree for the simplified
1384 form of the builtin function call. */
1385
1386static bool
dcb7fae2 1387gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1388{
355fe088 1389 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1390 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1391
1392 const char *p = c_getstr (src);
1393
1394 /* If the string length is zero, return the dst parameter. */
1395 if (p && *p == '\0')
1396 {
1397 replace_call_with_value (gsi, dst);
1398 return true;
1399 }
1400
1401 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1402 return false;
1403
1404 /* See if we can store by pieces into (dst + strlen(dst)). */
1405 tree newdst;
1406 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1407 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1408
1409 if (!strlen_fn || !memcpy_fn)
1410 return false;
1411
1412 /* If the length of the source string isn't computable don't
1413 split strcat into strlen and memcpy. */
dcb7fae2 1414 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1415 if (! len)
fef5a0d9
RB
1416 return false;
1417
1418 /* Create strlen (dst). */
1419 gimple_seq stmts = NULL, stmts2;
355fe088 1420 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9
RB
1421 gimple_set_location (repl, loc);
1422 if (gimple_in_ssa_p (cfun))
b731b390 1423 newdst = make_ssa_name (size_type_node);
fef5a0d9 1424 else
b731b390 1425 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1426 gimple_call_set_lhs (repl, newdst);
1427 gimple_seq_add_stmt_without_update (&stmts, repl);
1428
1429 /* Create (dst p+ strlen (dst)). */
1430 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1431 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1432 gimple_seq_add_seq_without_update (&stmts, stmts2);
1433
1434 len = fold_convert_loc (loc, size_type_node, len);
1435 len = size_binop_loc (loc, PLUS_EXPR, len,
1436 build_int_cst (size_type_node, 1));
1437 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1438 gimple_seq_add_seq_without_update (&stmts, stmts2);
1439
1440 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1441 gimple_seq_add_stmt_without_update (&stmts, repl);
1442 if (gimple_call_lhs (stmt))
1443 {
1444 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1445 gimple_seq_add_stmt_without_update (&stmts, repl);
1446 gsi_replace_with_seq_vops (gsi, stmts);
1447 /* gsi now points at the assignment to the lhs, get a
1448 stmt iterator to the memcpy call.
1449 ??? We can't use gsi_for_stmt as that doesn't work when the
1450 CFG isn't built yet. */
1451 gimple_stmt_iterator gsi2 = *gsi;
1452 gsi_prev (&gsi2);
1453 fold_stmt (&gsi2);
1454 }
1455 else
1456 {
1457 gsi_replace_with_seq_vops (gsi, stmts);
1458 fold_stmt (gsi);
1459 }
1460 return true;
1461}
1462
07f1cf56
RB
1463/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1464 are the arguments to the call. */
1465
1466static bool
1467gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1468{
355fe088 1469 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1470 tree dest = gimple_call_arg (stmt, 0);
1471 tree src = gimple_call_arg (stmt, 1);
1472 tree size = gimple_call_arg (stmt, 2);
1473 tree fn;
1474 const char *p;
1475
1476
1477 p = c_getstr (src);
1478 /* If the SRC parameter is "", return DEST. */
1479 if (p && *p == '\0')
1480 {
1481 replace_call_with_value (gsi, dest);
1482 return true;
1483 }
1484
1485 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1486 return false;
1487
1488 /* If __builtin_strcat_chk is used, assume strcat is available. */
1489 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1490 if (!fn)
1491 return false;
1492
355fe088 1493 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1494 replace_call_with_call_and_fold (gsi, repl);
1495 return true;
1496}
1497
ad03a744
RB
1498/* Simplify a call to the strncat builtin. */
1499
1500static bool
1501gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1502{
1503 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1504 tree dst = gimple_call_arg (stmt, 0);
1505 tree src = gimple_call_arg (stmt, 1);
1506 tree len = gimple_call_arg (stmt, 2);
1507
1508 const char *p = c_getstr (src);
1509
1510 /* If the requested length is zero, or the src parameter string
1511 length is zero, return the dst parameter. */
1512 if (integer_zerop (len) || (p && *p == '\0'))
1513 {
1514 replace_call_with_value (gsi, dst);
1515 return true;
1516 }
1517
1518 /* If the requested len is greater than or equal to the string
1519 length, call strcat. */
1520 if (TREE_CODE (len) == INTEGER_CST && p
1521 && compare_tree_int (len, strlen (p)) >= 0)
1522 {
1523 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1524
1525 /* If the replacement _DECL isn't initialized, don't do the
1526 transformation. */
1527 if (!fn)
1528 return false;
1529
1530 gcall *repl = gimple_build_call (fn, 2, dst, src);
1531 replace_call_with_call_and_fold (gsi, repl);
1532 return true;
1533 }
1534
1535 return false;
1536}
1537
745583f9
RB
1538/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1539 LEN, and SIZE. */
1540
1541static bool
1542gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1543{
355fe088 1544 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1545 tree dest = gimple_call_arg (stmt, 0);
1546 tree src = gimple_call_arg (stmt, 1);
1547 tree len = gimple_call_arg (stmt, 2);
1548 tree size = gimple_call_arg (stmt, 3);
1549 tree fn;
1550 const char *p;
1551
1552 p = c_getstr (src);
1553 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1554 if ((p && *p == '\0')
1555 || integer_zerop (len))
1556 {
1557 replace_call_with_value (gsi, dest);
1558 return true;
1559 }
1560
1561 if (! tree_fits_uhwi_p (size))
1562 return false;
1563
1564 if (! integer_all_onesp (size))
1565 {
1566 tree src_len = c_strlen (src, 1);
1567 if (src_len
1568 && tree_fits_uhwi_p (src_len)
1569 && tree_fits_uhwi_p (len)
1570 && ! tree_int_cst_lt (len, src_len))
1571 {
1572 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1573 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1574 if (!fn)
1575 return false;
1576
355fe088 1577 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1578 replace_call_with_call_and_fold (gsi, repl);
1579 return true;
1580 }
1581 return false;
1582 }
1583
1584 /* If __builtin_strncat_chk is used, assume strncat is available. */
1585 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1586 if (!fn)
1587 return false;
1588
355fe088 1589 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1590 replace_call_with_call_and_fold (gsi, repl);
1591 return true;
1592}
1593
fef5a0d9
RB
1594/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1595 to the call. IGNORE is true if the value returned
1596 by the builtin will be ignored. UNLOCKED is true is true if this
1597 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1598 the known length of the string. Return NULL_TREE if no simplification
1599 was possible. */
1600
1601static bool
1602gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1603 tree arg0, tree arg1,
dcb7fae2 1604 bool unlocked)
fef5a0d9 1605{
355fe088 1606 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1607
fef5a0d9
RB
1608 /* If we're using an unlocked function, assume the other unlocked
1609 functions exist explicitly. */
1610 tree const fn_fputc = (unlocked
1611 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1612 : builtin_decl_implicit (BUILT_IN_FPUTC));
1613 tree const fn_fwrite = (unlocked
1614 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1615 : builtin_decl_implicit (BUILT_IN_FWRITE));
1616
1617 /* If the return value is used, don't do the transformation. */
dcb7fae2 1618 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1619 return false;
1620
fef5a0d9
RB
1621 /* Get the length of the string passed to fputs. If the length
1622 can't be determined, punt. */
dcb7fae2 1623 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1624 if (!len
1625 || TREE_CODE (len) != INTEGER_CST)
1626 return false;
1627
1628 switch (compare_tree_int (len, 1))
1629 {
1630 case -1: /* length is 0, delete the call entirely . */
1631 replace_call_with_value (gsi, integer_zero_node);
1632 return true;
1633
1634 case 0: /* length is 1, call fputc. */
1635 {
1636 const char *p = c_getstr (arg0);
1637 if (p != NULL)
1638 {
1639 if (!fn_fputc)
1640 return false;
1641
355fe088 1642 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
1643 build_int_cst
1644 (integer_type_node, p[0]), arg1);
1645 replace_call_with_call_and_fold (gsi, repl);
1646 return true;
1647 }
1648 }
1649 /* FALLTHROUGH */
1650 case 1: /* length is greater than 1, call fwrite. */
1651 {
1652 /* If optimizing for size keep fputs. */
1653 if (optimize_function_for_size_p (cfun))
1654 return false;
1655 /* New argument list transforming fputs(string, stream) to
1656 fwrite(string, 1, len, stream). */
1657 if (!fn_fwrite)
1658 return false;
1659
355fe088 1660 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
1661 size_one_node, len, arg1);
1662 replace_call_with_call_and_fold (gsi, repl);
1663 return true;
1664 }
1665 default:
1666 gcc_unreachable ();
1667 }
1668 return false;
1669}
1670
1671/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1672 DEST, SRC, LEN, and SIZE are the arguments to the call.
1673 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1674 code of the builtin. If MAXLEN is not NULL, it is maximum length
1675 passed as third argument. */
1676
1677static bool
1678gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1679 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1680 enum built_in_function fcode)
1681{
355fe088 1682 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1683 location_t loc = gimple_location (stmt);
1684 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1685 tree fn;
1686
1687 /* If SRC and DEST are the same (and not volatile), return DEST
1688 (resp. DEST+LEN for __mempcpy_chk). */
1689 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1690 {
1691 if (fcode != BUILT_IN_MEMPCPY_CHK)
1692 {
1693 replace_call_with_value (gsi, dest);
1694 return true;
1695 }
1696 else
1697 {
74e3c262
RB
1698 gimple_seq stmts = NULL;
1699 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1700 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, dest, len);
1701 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
1702 replace_call_with_value (gsi, temp);
1703 return true;
1704 }
1705 }
1706
1707 if (! tree_fits_uhwi_p (size))
1708 return false;
1709
dcb7fae2 1710 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1711 if (! integer_all_onesp (size))
1712 {
1713 if (! tree_fits_uhwi_p (len))
1714 {
1715 /* If LEN is not constant, try MAXLEN too.
1716 For MAXLEN only allow optimizing into non-_ocs function
1717 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1718 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1719 {
1720 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1721 {
1722 /* (void) __mempcpy_chk () can be optimized into
1723 (void) __memcpy_chk (). */
1724 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1725 if (!fn)
1726 return false;
1727
355fe088 1728 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1729 replace_call_with_call_and_fold (gsi, repl);
1730 return true;
1731 }
1732 return false;
1733 }
1734 }
1735 else
1736 maxlen = len;
1737
1738 if (tree_int_cst_lt (size, maxlen))
1739 return false;
1740 }
1741
1742 fn = NULL_TREE;
1743 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1744 mem{cpy,pcpy,move,set} is available. */
1745 switch (fcode)
1746 {
1747 case BUILT_IN_MEMCPY_CHK:
1748 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1749 break;
1750 case BUILT_IN_MEMPCPY_CHK:
1751 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1752 break;
1753 case BUILT_IN_MEMMOVE_CHK:
1754 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1755 break;
1756 case BUILT_IN_MEMSET_CHK:
1757 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1758 break;
1759 default:
1760 break;
1761 }
1762
1763 if (!fn)
1764 return false;
1765
355fe088 1766 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1767 replace_call_with_call_and_fold (gsi, repl);
1768 return true;
1769}
1770
1771/* Fold a call to the __st[rp]cpy_chk builtin.
1772 DEST, SRC, and SIZE are the arguments to the call.
1773 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1774 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1775 strings passed as second argument. */
1776
1777static bool
1778gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1779 tree dest,
fef5a0d9 1780 tree src, tree size,
fef5a0d9
RB
1781 enum built_in_function fcode)
1782{
355fe088 1783 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1784 location_t loc = gimple_location (stmt);
1785 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1786 tree len, fn;
1787
1788 /* If SRC and DEST are the same (and not volatile), return DEST. */
1789 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1790 {
1791 replace_call_with_value (gsi, dest);
1792 return true;
1793 }
1794
1795 if (! tree_fits_uhwi_p (size))
1796 return false;
1797
dcb7fae2 1798 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1799 if (! integer_all_onesp (size))
1800 {
1801 len = c_strlen (src, 1);
1802 if (! len || ! tree_fits_uhwi_p (len))
1803 {
1804 /* If LEN is not constant, try MAXLEN too.
1805 For MAXLEN only allow optimizing into non-_ocs function
1806 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1807 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1808 {
1809 if (fcode == BUILT_IN_STPCPY_CHK)
1810 {
1811 if (! ignore)
1812 return false;
1813
1814 /* If return value of __stpcpy_chk is ignored,
1815 optimize into __strcpy_chk. */
1816 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1817 if (!fn)
1818 return false;
1819
355fe088 1820 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
1821 replace_call_with_call_and_fold (gsi, repl);
1822 return true;
1823 }
1824
1825 if (! len || TREE_SIDE_EFFECTS (len))
1826 return false;
1827
1828 /* If c_strlen returned something, but not a constant,
1829 transform __strcpy_chk into __memcpy_chk. */
1830 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1831 if (!fn)
1832 return false;
1833
74e3c262
RB
1834 gimple_seq stmts = NULL;
1835 len = gimple_convert (&stmts, loc, size_type_node, len);
1836 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1837 build_int_cst (size_type_node, 1));
1838 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1839 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1840 replace_call_with_call_and_fold (gsi, repl);
1841 return true;
1842 }
e256dfce 1843 }
fef5a0d9
RB
1844 else
1845 maxlen = len;
1846
1847 if (! tree_int_cst_lt (maxlen, size))
1848 return false;
e256dfce
RG
1849 }
1850
fef5a0d9
RB
1851 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1852 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1853 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1854 if (!fn)
1855 return false;
1856
355fe088 1857 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
1858 replace_call_with_call_and_fold (gsi, repl);
1859 return true;
1860}
1861
1862/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1863 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1864 length passed as third argument. IGNORE is true if return value can be
1865 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1866
1867static bool
1868gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1869 tree dest, tree src,
dcb7fae2 1870 tree len, tree size,
fef5a0d9
RB
1871 enum built_in_function fcode)
1872{
355fe088 1873 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1874 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1875 tree fn;
1876
1877 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1878 {
fef5a0d9
RB
1879 /* If return value of __stpncpy_chk is ignored,
1880 optimize into __strncpy_chk. */
1881 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1882 if (fn)
1883 {
355fe088 1884 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1885 replace_call_with_call_and_fold (gsi, repl);
1886 return true;
1887 }
cbdd87d4
RG
1888 }
1889
fef5a0d9
RB
1890 if (! tree_fits_uhwi_p (size))
1891 return false;
1892
dcb7fae2 1893 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1894 if (! integer_all_onesp (size))
cbdd87d4 1895 {
fef5a0d9 1896 if (! tree_fits_uhwi_p (len))
fe2ef088 1897 {
fef5a0d9
RB
1898 /* If LEN is not constant, try MAXLEN too.
1899 For MAXLEN only allow optimizing into non-_ocs function
1900 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1901 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1902 return false;
8a1561bc 1903 }
fef5a0d9
RB
1904 else
1905 maxlen = len;
1906
1907 if (tree_int_cst_lt (size, maxlen))
1908 return false;
cbdd87d4
RG
1909 }
1910
fef5a0d9
RB
1911 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1912 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1913 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1914 if (!fn)
1915 return false;
1916
355fe088 1917 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1918 replace_call_with_call_and_fold (gsi, repl);
1919 return true;
cbdd87d4
RG
1920}
1921
2625bb5d
RB
1922/* Fold function call to builtin stpcpy with arguments DEST and SRC.
1923 Return NULL_TREE if no simplification can be made. */
1924
1925static bool
1926gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1927{
1928 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1929 location_t loc = gimple_location (stmt);
1930 tree dest = gimple_call_arg (stmt, 0);
1931 tree src = gimple_call_arg (stmt, 1);
1932 tree fn, len, lenp1;
1933
1934 /* If the result is unused, replace stpcpy with strcpy. */
1935 if (gimple_call_lhs (stmt) == NULL_TREE)
1936 {
1937 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1938 if (!fn)
1939 return false;
1940 gimple_call_set_fndecl (stmt, fn);
1941 fold_stmt (gsi);
1942 return true;
1943 }
1944
1945 len = c_strlen (src, 1);
1946 if (!len
1947 || TREE_CODE (len) != INTEGER_CST)
1948 return false;
1949
1950 if (optimize_function_for_size_p (cfun)
1951 /* If length is zero it's small enough. */
1952 && !integer_zerop (len))
1953 return false;
1954
1955 /* If the source has a known length replace stpcpy with memcpy. */
1956 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1957 if (!fn)
1958 return false;
1959
1960 gimple_seq stmts = NULL;
1961 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1962 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1963 tem, build_int_cst (size_type_node, 1));
1964 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1965 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1966 gimple_set_vuse (repl, gimple_vuse (stmt));
1967 gimple_set_vdef (repl, gimple_vdef (stmt));
1968 if (gimple_vdef (repl)
1969 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1970 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1971 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1972 /* Replace the result with dest + len. */
1973 stmts = NULL;
1974 tem = gimple_convert (&stmts, loc, sizetype, len);
1975 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1976 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1977 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 1978 gsi_replace (gsi, ret, false);
2625bb5d
RB
1979 /* Finally fold the memcpy call. */
1980 gimple_stmt_iterator gsi2 = *gsi;
1981 gsi_prev (&gsi2);
1982 fold_stmt (&gsi2);
1983 return true;
1984}
1985
fef5a0d9
RB
1986/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1987 NULL_TREE if a normal call should be emitted rather than expanding
1988 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1989 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1990 passed as second argument. */
cbdd87d4
RG
1991
1992static bool
fef5a0d9 1993gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1994 enum built_in_function fcode)
cbdd87d4 1995{
538dd0b7 1996 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
1997 tree dest, size, len, fn, fmt, flag;
1998 const char *fmt_str;
cbdd87d4 1999
fef5a0d9
RB
2000 /* Verify the required arguments in the original call. */
2001 if (gimple_call_num_args (stmt) < 5)
2002 return false;
cbdd87d4 2003
fef5a0d9
RB
2004 dest = gimple_call_arg (stmt, 0);
2005 len = gimple_call_arg (stmt, 1);
2006 flag = gimple_call_arg (stmt, 2);
2007 size = gimple_call_arg (stmt, 3);
2008 fmt = gimple_call_arg (stmt, 4);
2009
2010 if (! tree_fits_uhwi_p (size))
2011 return false;
2012
2013 if (! integer_all_onesp (size))
2014 {
dcb7fae2 2015 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2016 if (! tree_fits_uhwi_p (len))
cbdd87d4 2017 {
fef5a0d9
RB
2018 /* If LEN is not constant, try MAXLEN too.
2019 For MAXLEN only allow optimizing into non-_ocs function
2020 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2021 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2022 return false;
2023 }
2024 else
fef5a0d9 2025 maxlen = len;
cbdd87d4 2026
fef5a0d9
RB
2027 if (tree_int_cst_lt (size, maxlen))
2028 return false;
2029 }
cbdd87d4 2030
fef5a0d9
RB
2031 if (!init_target_chars ())
2032 return false;
cbdd87d4 2033
fef5a0d9
RB
2034 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2035 or if format doesn't contain % chars or is "%s". */
2036 if (! integer_zerop (flag))
2037 {
2038 fmt_str = c_getstr (fmt);
2039 if (fmt_str == NULL)
2040 return false;
2041 if (strchr (fmt_str, target_percent) != NULL
2042 && strcmp (fmt_str, target_percent_s))
2043 return false;
cbdd87d4
RG
2044 }
2045
fef5a0d9
RB
2046 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2047 available. */
2048 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2049 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2050 if (!fn)
491e0b9b
RG
2051 return false;
2052
fef5a0d9
RB
2053 /* Replace the called function and the first 5 argument by 3 retaining
2054 trailing varargs. */
2055 gimple_call_set_fndecl (stmt, fn);
2056 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2057 gimple_call_set_arg (stmt, 0, dest);
2058 gimple_call_set_arg (stmt, 1, len);
2059 gimple_call_set_arg (stmt, 2, fmt);
2060 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2061 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2062 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2063 fold_stmt (gsi);
2064 return true;
2065}
cbdd87d4 2066
fef5a0d9
RB
2067/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2068 Return NULL_TREE if a normal call should be emitted rather than
2069 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2070 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2071
fef5a0d9
RB
2072static bool
2073gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2074 enum built_in_function fcode)
2075{
538dd0b7 2076 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2077 tree dest, size, len, fn, fmt, flag;
2078 const char *fmt_str;
2079 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2080
fef5a0d9
RB
2081 /* Verify the required arguments in the original call. */
2082 if (nargs < 4)
2083 return false;
2084 dest = gimple_call_arg (stmt, 0);
2085 flag = gimple_call_arg (stmt, 1);
2086 size = gimple_call_arg (stmt, 2);
2087 fmt = gimple_call_arg (stmt, 3);
2088
2089 if (! tree_fits_uhwi_p (size))
2090 return false;
2091
2092 len = NULL_TREE;
2093
2094 if (!init_target_chars ())
2095 return false;
2096
2097 /* Check whether the format is a literal string constant. */
2098 fmt_str = c_getstr (fmt);
2099 if (fmt_str != NULL)
2100 {
2101 /* If the format doesn't contain % args or %%, we know the size. */
2102 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2103 {
fef5a0d9
RB
2104 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2105 len = build_int_cstu (size_type_node, strlen (fmt_str));
2106 }
2107 /* If the format is "%s" and first ... argument is a string literal,
2108 we know the size too. */
2109 else if (fcode == BUILT_IN_SPRINTF_CHK
2110 && strcmp (fmt_str, target_percent_s) == 0)
2111 {
2112 tree arg;
cbdd87d4 2113
fef5a0d9
RB
2114 if (nargs == 5)
2115 {
2116 arg = gimple_call_arg (stmt, 4);
2117 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2118 {
2119 len = c_strlen (arg, 1);
2120 if (! len || ! tree_fits_uhwi_p (len))
2121 len = NULL_TREE;
2122 }
2123 }
2124 }
2125 }
cbdd87d4 2126
fef5a0d9
RB
2127 if (! integer_all_onesp (size))
2128 {
2129 if (! len || ! tree_int_cst_lt (len, size))
2130 return false;
2131 }
cbdd87d4 2132
fef5a0d9
RB
2133 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2134 or if format doesn't contain % chars or is "%s". */
2135 if (! integer_zerop (flag))
2136 {
2137 if (fmt_str == NULL)
2138 return false;
2139 if (strchr (fmt_str, target_percent) != NULL
2140 && strcmp (fmt_str, target_percent_s))
2141 return false;
2142 }
cbdd87d4 2143
fef5a0d9
RB
2144 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2145 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2146 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2147 if (!fn)
2148 return false;
2149
2150 /* Replace the called function and the first 4 argument by 2 retaining
2151 trailing varargs. */
2152 gimple_call_set_fndecl (stmt, fn);
2153 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2154 gimple_call_set_arg (stmt, 0, dest);
2155 gimple_call_set_arg (stmt, 1, fmt);
2156 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2157 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2158 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2159 fold_stmt (gsi);
2160 return true;
2161}
2162
35770bb2
RB
2163/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2164 ORIG may be null if this is a 2-argument call. We don't attempt to
2165 simplify calls with more than 3 arguments.
2166
2167 Return NULL_TREE if no simplification was possible, otherwise return the
2168 simplified form of the call as a tree. If IGNORED is true, it means that
2169 the caller does not use the returned value of the function. */
2170
2171static bool
dcb7fae2 2172gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2173{
355fe088 2174 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2175 tree dest = gimple_call_arg (stmt, 0);
2176 tree fmt = gimple_call_arg (stmt, 1);
2177 tree orig = NULL_TREE;
2178 const char *fmt_str = NULL;
2179
2180 /* Verify the required arguments in the original call. We deal with two
2181 types of sprintf() calls: 'sprintf (str, fmt)' and
2182 'sprintf (dest, "%s", orig)'. */
2183 if (gimple_call_num_args (stmt) > 3)
2184 return false;
2185
2186 if (gimple_call_num_args (stmt) == 3)
2187 orig = gimple_call_arg (stmt, 2);
2188
2189 /* Check whether the format is a literal string constant. */
2190 fmt_str = c_getstr (fmt);
2191 if (fmt_str == NULL)
2192 return false;
2193
2194 if (!init_target_chars ())
2195 return false;
2196
2197 /* If the format doesn't contain % args or %%, use strcpy. */
2198 if (strchr (fmt_str, target_percent) == NULL)
2199 {
2200 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2201
2202 if (!fn)
2203 return false;
2204
2205 /* Don't optimize sprintf (buf, "abc", ptr++). */
2206 if (orig)
2207 return false;
2208
2209 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2210 'format' is known to contain no % formats. */
2211 gimple_seq stmts = NULL;
355fe088 2212 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2213 gimple_seq_add_stmt_without_update (&stmts, repl);
2214 if (gimple_call_lhs (stmt))
2215 {
2216 repl = gimple_build_assign (gimple_call_lhs (stmt),
2217 build_int_cst (integer_type_node,
2218 strlen (fmt_str)));
2219 gimple_seq_add_stmt_without_update (&stmts, repl);
2220 gsi_replace_with_seq_vops (gsi, stmts);
2221 /* gsi now points at the assignment to the lhs, get a
2222 stmt iterator to the memcpy call.
2223 ??? We can't use gsi_for_stmt as that doesn't work when the
2224 CFG isn't built yet. */
2225 gimple_stmt_iterator gsi2 = *gsi;
2226 gsi_prev (&gsi2);
2227 fold_stmt (&gsi2);
2228 }
2229 else
2230 {
2231 gsi_replace_with_seq_vops (gsi, stmts);
2232 fold_stmt (gsi);
2233 }
2234 return true;
2235 }
2236
2237 /* If the format is "%s", use strcpy if the result isn't used. */
2238 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2239 {
2240 tree fn;
2241 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2242
2243 if (!fn)
2244 return false;
2245
2246 /* Don't crash on sprintf (str1, "%s"). */
2247 if (!orig)
2248 return false;
2249
dcb7fae2
RB
2250 tree orig_len = NULL_TREE;
2251 if (gimple_call_lhs (stmt))
35770bb2 2252 {
dcb7fae2 2253 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2254 if (!orig_len)
35770bb2
RB
2255 return false;
2256 }
2257
2258 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2259 gimple_seq stmts = NULL;
355fe088 2260 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2261 gimple_seq_add_stmt_without_update (&stmts, repl);
2262 if (gimple_call_lhs (stmt))
2263 {
d7e78447
RB
2264 if (!useless_type_conversion_p (integer_type_node,
2265 TREE_TYPE (orig_len)))
2266 orig_len = fold_convert (integer_type_node, orig_len);
2267 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2268 gimple_seq_add_stmt_without_update (&stmts, repl);
2269 gsi_replace_with_seq_vops (gsi, stmts);
2270 /* gsi now points at the assignment to the lhs, get a
2271 stmt iterator to the memcpy call.
2272 ??? We can't use gsi_for_stmt as that doesn't work when the
2273 CFG isn't built yet. */
2274 gimple_stmt_iterator gsi2 = *gsi;
2275 gsi_prev (&gsi2);
2276 fold_stmt (&gsi2);
2277 }
2278 else
2279 {
2280 gsi_replace_with_seq_vops (gsi, stmts);
2281 fold_stmt (gsi);
2282 }
2283 return true;
2284 }
2285 return false;
2286}
2287
d7e78447
RB
2288/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2289 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2290 attempt to simplify calls with more than 4 arguments.
35770bb2 2291
d7e78447
RB
2292 Return NULL_TREE if no simplification was possible, otherwise return the
2293 simplified form of the call as a tree. If IGNORED is true, it means that
2294 the caller does not use the returned value of the function. */
2295
2296static bool
dcb7fae2 2297gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2298{
538dd0b7 2299 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2300 tree dest = gimple_call_arg (stmt, 0);
2301 tree destsize = gimple_call_arg (stmt, 1);
2302 tree fmt = gimple_call_arg (stmt, 2);
2303 tree orig = NULL_TREE;
2304 const char *fmt_str = NULL;
2305
2306 if (gimple_call_num_args (stmt) > 4)
2307 return false;
2308
2309 if (gimple_call_num_args (stmt) == 4)
2310 orig = gimple_call_arg (stmt, 3);
2311
2312 if (!tree_fits_uhwi_p (destsize))
2313 return false;
2314 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2315
2316 /* Check whether the format is a literal string constant. */
2317 fmt_str = c_getstr (fmt);
2318 if (fmt_str == NULL)
2319 return false;
2320
2321 if (!init_target_chars ())
2322 return false;
2323
2324 /* If the format doesn't contain % args or %%, use strcpy. */
2325 if (strchr (fmt_str, target_percent) == NULL)
2326 {
2327 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2328 if (!fn)
2329 return false;
2330
2331 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2332 if (orig)
2333 return false;
2334
2335 /* We could expand this as
2336 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2337 or to
2338 memcpy (str, fmt_with_nul_at_cstm1, cst);
2339 but in the former case that might increase code size
2340 and in the latter case grow .rodata section too much.
2341 So punt for now. */
2342 size_t len = strlen (fmt_str);
2343 if (len >= destlen)
2344 return false;
2345
2346 gimple_seq stmts = NULL;
355fe088 2347 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2348 gimple_seq_add_stmt_without_update (&stmts, repl);
2349 if (gimple_call_lhs (stmt))
2350 {
2351 repl = gimple_build_assign (gimple_call_lhs (stmt),
2352 build_int_cst (integer_type_node, len));
2353 gimple_seq_add_stmt_without_update (&stmts, repl);
2354 gsi_replace_with_seq_vops (gsi, stmts);
2355 /* gsi now points at the assignment to the lhs, get a
2356 stmt iterator to the memcpy call.
2357 ??? We can't use gsi_for_stmt as that doesn't work when the
2358 CFG isn't built yet. */
2359 gimple_stmt_iterator gsi2 = *gsi;
2360 gsi_prev (&gsi2);
2361 fold_stmt (&gsi2);
2362 }
2363 else
2364 {
2365 gsi_replace_with_seq_vops (gsi, stmts);
2366 fold_stmt (gsi);
2367 }
2368 return true;
2369 }
2370
2371 /* If the format is "%s", use strcpy if the result isn't used. */
2372 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2373 {
2374 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2375 if (!fn)
2376 return false;
2377
2378 /* Don't crash on snprintf (str1, cst, "%s"). */
2379 if (!orig)
2380 return false;
2381
dcb7fae2 2382 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2383 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2384 return false;
d7e78447
RB
2385
2386 /* We could expand this as
2387 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2388 or to
2389 memcpy (str1, str2_with_nul_at_cstm1, cst);
2390 but in the former case that might increase code size
2391 and in the latter case grow .rodata section too much.
2392 So punt for now. */
2393 if (compare_tree_int (orig_len, destlen) >= 0)
2394 return false;
2395
2396 /* Convert snprintf (str1, cst, "%s", str2) into
2397 strcpy (str1, str2) if strlen (str2) < cst. */
2398 gimple_seq stmts = NULL;
355fe088 2399 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
2400 gimple_seq_add_stmt_without_update (&stmts, repl);
2401 if (gimple_call_lhs (stmt))
2402 {
2403 if (!useless_type_conversion_p (integer_type_node,
2404 TREE_TYPE (orig_len)))
2405 orig_len = fold_convert (integer_type_node, orig_len);
2406 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2407 gimple_seq_add_stmt_without_update (&stmts, repl);
2408 gsi_replace_with_seq_vops (gsi, stmts);
2409 /* gsi now points at the assignment to the lhs, get a
2410 stmt iterator to the memcpy call.
2411 ??? We can't use gsi_for_stmt as that doesn't work when the
2412 CFG isn't built yet. */
2413 gimple_stmt_iterator gsi2 = *gsi;
2414 gsi_prev (&gsi2);
2415 fold_stmt (&gsi2);
2416 }
2417 else
2418 {
2419 gsi_replace_with_seq_vops (gsi, stmts);
2420 fold_stmt (gsi);
2421 }
2422 return true;
2423 }
2424 return false;
2425}
35770bb2 2426
edd7ae68
RB
2427/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2428 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2429 more than 3 arguments, and ARG may be null in the 2-argument case.
2430
2431 Return NULL_TREE if no simplification was possible, otherwise return the
2432 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2433 code of the function to be simplified. */
2434
2435static bool
2436gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2437 tree fp, tree fmt, tree arg,
2438 enum built_in_function fcode)
2439{
2440 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2441 tree fn_fputc, fn_fputs;
2442 const char *fmt_str = NULL;
2443
2444 /* If the return value is used, don't do the transformation. */
2445 if (gimple_call_lhs (stmt) != NULL_TREE)
2446 return false;
2447
2448 /* Check whether the format is a literal string constant. */
2449 fmt_str = c_getstr (fmt);
2450 if (fmt_str == NULL)
2451 return false;
2452
2453 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2454 {
2455 /* If we're using an unlocked function, assume the other
2456 unlocked functions exist explicitly. */
2457 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2458 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2459 }
2460 else
2461 {
2462 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2463 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2464 }
2465
2466 if (!init_target_chars ())
2467 return false;
2468
2469 /* If the format doesn't contain % args or %%, use strcpy. */
2470 if (strchr (fmt_str, target_percent) == NULL)
2471 {
2472 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2473 && arg)
2474 return false;
2475
2476 /* If the format specifier was "", fprintf does nothing. */
2477 if (fmt_str[0] == '\0')
2478 {
2479 replace_call_with_value (gsi, NULL_TREE);
2480 return true;
2481 }
2482
2483 /* When "string" doesn't contain %, replace all cases of
2484 fprintf (fp, string) with fputs (string, fp). The fputs
2485 builtin will take care of special cases like length == 1. */
2486 if (fn_fputs)
2487 {
2488 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2489 replace_call_with_call_and_fold (gsi, repl);
2490 return true;
2491 }
2492 }
2493
2494 /* The other optimizations can be done only on the non-va_list variants. */
2495 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2496 return false;
2497
2498 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2499 else if (strcmp (fmt_str, target_percent_s) == 0)
2500 {
2501 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2502 return false;
2503 if (fn_fputs)
2504 {
2505 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2506 replace_call_with_call_and_fold (gsi, repl);
2507 return true;
2508 }
2509 }
2510
2511 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2512 else if (strcmp (fmt_str, target_percent_c) == 0)
2513 {
2514 if (!arg
2515 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2516 return false;
2517 if (fn_fputc)
2518 {
2519 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2520 replace_call_with_call_and_fold (gsi, repl);
2521 return true;
2522 }
2523 }
2524
2525 return false;
2526}
2527
ad03a744
RB
2528/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2529 FMT and ARG are the arguments to the call; we don't fold cases with
2530 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2531
2532 Return NULL_TREE if no simplification was possible, otherwise return the
2533 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2534 code of the function to be simplified. */
2535
2536static bool
2537gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2538 tree arg, enum built_in_function fcode)
2539{
2540 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2541 tree fn_putchar, fn_puts, newarg;
2542 const char *fmt_str = NULL;
2543
2544 /* If the return value is used, don't do the transformation. */
2545 if (gimple_call_lhs (stmt) != NULL_TREE)
2546 return false;
2547
2548 /* Check whether the format is a literal string constant. */
2549 fmt_str = c_getstr (fmt);
2550 if (fmt_str == NULL)
2551 return false;
2552
2553 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2554 {
2555 /* If we're using an unlocked function, assume the other
2556 unlocked functions exist explicitly. */
2557 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2558 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2559 }
2560 else
2561 {
2562 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2563 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2564 }
2565
2566 if (!init_target_chars ())
2567 return false;
2568
2569 if (strcmp (fmt_str, target_percent_s) == 0
2570 || strchr (fmt_str, target_percent) == NULL)
2571 {
2572 const char *str;
2573
2574 if (strcmp (fmt_str, target_percent_s) == 0)
2575 {
2576 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2577 return false;
2578
2579 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2580 return false;
2581
2582 str = c_getstr (arg);
2583 if (str == NULL)
2584 return false;
2585 }
2586 else
2587 {
2588 /* The format specifier doesn't contain any '%' characters. */
2589 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2590 && arg)
2591 return false;
2592 str = fmt_str;
2593 }
2594
2595 /* If the string was "", printf does nothing. */
2596 if (str[0] == '\0')
2597 {
2598 replace_call_with_value (gsi, NULL_TREE);
2599 return true;
2600 }
2601
2602 /* If the string has length of 1, call putchar. */
2603 if (str[1] == '\0')
2604 {
2605 /* Given printf("c"), (where c is any one character,)
2606 convert "c"[0] to an int and pass that to the replacement
2607 function. */
2608 newarg = build_int_cst (integer_type_node, str[0]);
2609 if (fn_putchar)
2610 {
2611 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2612 replace_call_with_call_and_fold (gsi, repl);
2613 return true;
2614 }
2615 }
2616 else
2617 {
2618 /* If the string was "string\n", call puts("string"). */
2619 size_t len = strlen (str);
2620 if ((unsigned char)str[len - 1] == target_newline
2621 && (size_t) (int) len == len
2622 && (int) len > 0)
2623 {
2624 char *newstr;
2625 tree offset_node, string_cst;
2626
2627 /* Create a NUL-terminated string that's one char shorter
2628 than the original, stripping off the trailing '\n'. */
2629 newarg = build_string_literal (len, str);
2630 string_cst = string_constant (newarg, &offset_node);
2631 gcc_checking_assert (string_cst
2632 && (TREE_STRING_LENGTH (string_cst)
2633 == (int) len)
2634 && integer_zerop (offset_node)
2635 && (unsigned char)
2636 TREE_STRING_POINTER (string_cst)[len - 1]
2637 == target_newline);
2638 /* build_string_literal creates a new STRING_CST,
2639 modify it in place to avoid double copying. */
2640 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2641 newstr[len - 1] = '\0';
2642 if (fn_puts)
2643 {
2644 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2645 replace_call_with_call_and_fold (gsi, repl);
2646 return true;
2647 }
2648 }
2649 else
2650 /* We'd like to arrange to call fputs(string,stdout) here,
2651 but we need stdout and don't have a way to get it yet. */
2652 return false;
2653 }
2654 }
2655
2656 /* The other optimizations can be done only on the non-va_list variants. */
2657 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2658 return false;
2659
2660 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2661 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2662 {
2663 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2664 return false;
2665 if (fn_puts)
2666 {
2667 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2668 replace_call_with_call_and_fold (gsi, repl);
2669 return true;
2670 }
2671 }
2672
2673 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2674 else if (strcmp (fmt_str, target_percent_c) == 0)
2675 {
2676 if (!arg || ! useless_type_conversion_p (integer_type_node,
2677 TREE_TYPE (arg)))
2678 return false;
2679 if (fn_putchar)
2680 {
2681 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2682 replace_call_with_call_and_fold (gsi, repl);
2683 return true;
2684 }
2685 }
2686
2687 return false;
2688}
2689
edd7ae68 2690
fef5a0d9
RB
2691
2692/* Fold a call to __builtin_strlen with known length LEN. */
2693
2694static bool
dcb7fae2 2695gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2696{
355fe088 2697 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2698 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2699 if (!len)
2700 return false;
2813904b 2701 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2702 replace_call_with_value (gsi, len);
2703 return true;
cbdd87d4
RG
2704}
2705
48126138
NS
2706/* Fold a call to __builtin_acc_on_device. */
2707
2708static bool
2709gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2710{
2711 /* Defer folding until we know which compiler we're in. */
2712 if (symtab->state != EXPANSION)
2713 return false;
2714
2715 unsigned val_host = GOMP_DEVICE_HOST;
2716 unsigned val_dev = GOMP_DEVICE_NONE;
2717
2718#ifdef ACCEL_COMPILER
2719 val_host = GOMP_DEVICE_NOT_HOST;
2720 val_dev = ACCEL_COMPILER_acc_device;
2721#endif
2722
2723 location_t loc = gimple_location (gsi_stmt (*gsi));
2724
2725 tree host_eq = make_ssa_name (boolean_type_node);
2726 gimple *host_ass = gimple_build_assign
2727 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2728 gimple_set_location (host_ass, loc);
2729 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2730
2731 tree dev_eq = make_ssa_name (boolean_type_node);
2732 gimple *dev_ass = gimple_build_assign
2733 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2734 gimple_set_location (dev_ass, loc);
2735 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2736
2737 tree result = make_ssa_name (boolean_type_node);
2738 gimple *result_ass = gimple_build_assign
2739 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2740 gimple_set_location (result_ass, loc);
2741 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2742
2743 replace_call_with_value (gsi, result);
2744
2745 return true;
2746}
cbdd87d4 2747
dcb7fae2
RB
2748/* Fold the non-target builtin at *GSI and return whether any simplification
2749 was made. */
cbdd87d4 2750
fef5a0d9 2751static bool
dcb7fae2 2752gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2753{
538dd0b7 2754 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2755 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2756
dcb7fae2
RB
2757 /* Give up for always_inline inline builtins until they are
2758 inlined. */
2759 if (avoid_folding_inline_builtin (callee))
2760 return false;
cbdd87d4 2761
edd7ae68
RB
2762 unsigned n = gimple_call_num_args (stmt);
2763 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2764 switch (fcode)
cbdd87d4 2765 {
dcb7fae2
RB
2766 case BUILT_IN_BZERO:
2767 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2768 gimple_call_arg (stmt, 1));
2769 case BUILT_IN_MEMSET:
2770 return gimple_fold_builtin_memset (gsi,
2771 gimple_call_arg (stmt, 1),
2772 gimple_call_arg (stmt, 2));
2773 case BUILT_IN_BCOPY:
2774 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2775 gimple_call_arg (stmt, 0), 3);
2776 case BUILT_IN_MEMCPY:
2777 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2778 gimple_call_arg (stmt, 1), 0);
2779 case BUILT_IN_MEMPCPY:
2780 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2781 gimple_call_arg (stmt, 1), 1);
2782 case BUILT_IN_MEMMOVE:
2783 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2784 gimple_call_arg (stmt, 1), 3);
2785 case BUILT_IN_SPRINTF_CHK:
2786 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2787 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2788 case BUILT_IN_STRCAT_CHK:
2789 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2790 case BUILT_IN_STRNCAT_CHK:
2791 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2792 case BUILT_IN_STRLEN:
dcb7fae2 2793 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2794 case BUILT_IN_STRCPY:
dcb7fae2 2795 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2796 gimple_call_arg (stmt, 0),
dcb7fae2 2797 gimple_call_arg (stmt, 1));
cbdd87d4 2798 case BUILT_IN_STRNCPY:
dcb7fae2 2799 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2800 gimple_call_arg (stmt, 0),
2801 gimple_call_arg (stmt, 1),
dcb7fae2 2802 gimple_call_arg (stmt, 2));
9a7eefec 2803 case BUILT_IN_STRCAT:
dcb7fae2
RB
2804 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2805 gimple_call_arg (stmt, 1));
ad03a744
RB
2806 case BUILT_IN_STRNCAT:
2807 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2808 case BUILT_IN_FPUTS:
dcb7fae2
RB
2809 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2810 gimple_call_arg (stmt, 1), false);
cbdd87d4 2811 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2812 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2813 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2814 case BUILT_IN_MEMCPY_CHK:
2815 case BUILT_IN_MEMPCPY_CHK:
2816 case BUILT_IN_MEMMOVE_CHK:
2817 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2818 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2819 gimple_call_arg (stmt, 0),
2820 gimple_call_arg (stmt, 1),
2821 gimple_call_arg (stmt, 2),
2822 gimple_call_arg (stmt, 3),
edd7ae68 2823 fcode);
2625bb5d
RB
2824 case BUILT_IN_STPCPY:
2825 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2826 case BUILT_IN_STRCPY_CHK:
2827 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2828 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2829 gimple_call_arg (stmt, 0),
2830 gimple_call_arg (stmt, 1),
2831 gimple_call_arg (stmt, 2),
edd7ae68 2832 fcode);
cbdd87d4 2833 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2834 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2835 return gimple_fold_builtin_stxncpy_chk (gsi,
2836 gimple_call_arg (stmt, 0),
2837 gimple_call_arg (stmt, 1),
2838 gimple_call_arg (stmt, 2),
2839 gimple_call_arg (stmt, 3),
edd7ae68 2840 fcode);
cbdd87d4
RG
2841 case BUILT_IN_SNPRINTF_CHK:
2842 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2843 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2844 case BUILT_IN_SNPRINTF:
dcb7fae2 2845 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2846 case BUILT_IN_SPRINTF:
dcb7fae2 2847 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2848 case BUILT_IN_FPRINTF:
2849 case BUILT_IN_FPRINTF_UNLOCKED:
2850 case BUILT_IN_VFPRINTF:
2851 if (n == 2 || n == 3)
2852 return gimple_fold_builtin_fprintf (gsi,
2853 gimple_call_arg (stmt, 0),
2854 gimple_call_arg (stmt, 1),
2855 n == 3
2856 ? gimple_call_arg (stmt, 2)
2857 : NULL_TREE,
2858 fcode);
2859 break;
2860 case BUILT_IN_FPRINTF_CHK:
2861 case BUILT_IN_VFPRINTF_CHK:
2862 if (n == 3 || n == 4)
2863 return gimple_fold_builtin_fprintf (gsi,
2864 gimple_call_arg (stmt, 0),
2865 gimple_call_arg (stmt, 2),
2866 n == 4
2867 ? gimple_call_arg (stmt, 3)
2868 : NULL_TREE,
2869 fcode);
2870 break;
ad03a744
RB
2871 case BUILT_IN_PRINTF:
2872 case BUILT_IN_PRINTF_UNLOCKED:
2873 case BUILT_IN_VPRINTF:
2874 if (n == 1 || n == 2)
2875 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2876 n == 2
2877 ? gimple_call_arg (stmt, 1)
2878 : NULL_TREE, fcode);
2879 break;
2880 case BUILT_IN_PRINTF_CHK:
2881 case BUILT_IN_VPRINTF_CHK:
2882 if (n == 2 || n == 3)
2883 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2884 n == 3
2885 ? gimple_call_arg (stmt, 2)
2886 : NULL_TREE, fcode);
242a37f1 2887 break;
48126138
NS
2888 case BUILT_IN_ACC_ON_DEVICE:
2889 return gimple_fold_builtin_acc_on_device (gsi,
2890 gimple_call_arg (stmt, 0));
fef5a0d9
RB
2891 default:;
2892 }
2893
2894 /* Try the generic builtin folder. */
2895 bool ignore = (gimple_call_lhs (stmt) == NULL);
2896 tree result = fold_call_stmt (stmt, ignore);
2897 if (result)
2898 {
2899 if (ignore)
2900 STRIP_NOPS (result);
2901 else
2902 result = fold_convert (gimple_call_return_type (stmt), result);
2903 if (!update_call_from_tree (gsi, result))
2904 gimplify_and_update_call_from_tree (gsi, result);
2905 return true;
2906 }
2907
2908 return false;
2909}
2910
451e8dae
NS
2911/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2912 function calls to constants, where possible. */
2913
2914static tree
2915fold_internal_goacc_dim (const gimple *call)
2916{
2917 int axis = get_oacc_ifn_dim_arg (call);
2918 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2919 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2920 tree result = NULL_TREE;
2921
2922 /* If the size is 1, or we only want the size and it is not dynamic,
2923 we know the answer. */
2924 if (size == 1 || (!is_pos && size))
2925 {
2926 tree type = TREE_TYPE (gimple_call_lhs (call));
2927 result = build_int_cst (type, size - is_pos);
2928 }
2929
2930 return result;
2931}
2932
1304953e
JJ
2933/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2934 doesn't fit into TYPE. The test for overflow should be regardless of
2935 -fwrapv, and even for unsigned types. */
2936
2937bool
2938arith_overflowed_p (enum tree_code code, const_tree type,
2939 const_tree arg0, const_tree arg1)
2940{
2941 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2942 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2943 widest2_int_cst;
2944 widest2_int warg0 = widest2_int_cst (arg0);
2945 widest2_int warg1 = widest2_int_cst (arg1);
2946 widest2_int wres;
2947 switch (code)
2948 {
2949 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2950 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2951 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2952 default: gcc_unreachable ();
2953 }
2954 signop sign = TYPE_SIGN (type);
2955 if (sign == UNSIGNED && wi::neg_p (wres))
2956 return true;
2957 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2958}
2959
cbdd87d4
RG
2960/* Attempt to fold a call statement referenced by the statement iterator GSI.
2961 The statement may be replaced by another statement, e.g., if the call
2962 simplifies to a constant value. Return true if any changes were made.
2963 It is assumed that the operands have been previously folded. */
2964
e021c122 2965static bool
ceeffab0 2966gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 2967{
538dd0b7 2968 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 2969 tree callee;
e021c122
RG
2970 bool changed = false;
2971 unsigned i;
cbdd87d4 2972
e021c122
RG
2973 /* Fold *& in call arguments. */
2974 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2975 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2976 {
2977 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2978 if (tmp)
2979 {
2980 gimple_call_set_arg (stmt, i, tmp);
2981 changed = true;
2982 }
2983 }
3b45a007
RG
2984
2985 /* Check for virtual calls that became direct calls. */
2986 callee = gimple_call_fn (stmt);
25583c4f 2987 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 2988 {
49c471e3
MJ
2989 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2990 {
450ad0cd
JH
2991 if (dump_file && virtual_method_call_p (callee)
2992 && !possible_polymorphic_call_target_p
6f8091fc
JH
2993 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2994 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
2995 {
2996 fprintf (dump_file,
a70e9985 2997 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
2998 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2999 fprintf (dump_file, " to ");
3000 print_generic_expr (dump_file, callee, TDF_SLIM);
3001 fprintf (dump_file, "\n");
3002 }
3003
49c471e3 3004 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3005 changed = true;
3006 }
a70e9985 3007 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3008 {
61dd6a2e
JH
3009 bool final;
3010 vec <cgraph_node *>targets
058d0a90 3011 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3012 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3013 {
a70e9985 3014 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3015 if (dump_enabled_p ())
3016 {
807b7d62 3017 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3018 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3019 "folding virtual function call to %s\n",
3020 targets.length () == 1
3021 ? targets[0]->name ()
3022 : "__builtin_unreachable");
3023 }
61dd6a2e 3024 if (targets.length () == 1)
cf3e5a89
JJ
3025 {
3026 gimple_call_set_fndecl (stmt, targets[0]->decl);
3027 changed = true;
a70e9985
JJ
3028 /* If the call becomes noreturn, remove the lhs. */
3029 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3030 {
3031 if (TREE_CODE (lhs) == SSA_NAME)
3032 {
b731b390 3033 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3034 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3035 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3036 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3037 }
3038 gimple_call_set_lhs (stmt, NULL_TREE);
3039 }
0b986c6a 3040 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3041 }
a70e9985 3042 else
cf3e5a89
JJ
3043 {
3044 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3045 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3046 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3047 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3048 {
b731b390 3049 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3050 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3051
3052 /* To satisfy condition for
3053 cgraph_update_edges_for_call_stmt_node,
3054 we need to preserve GIMPLE_CALL statement
3055 at position of GSI iterator. */
a70e9985 3056 update_call_from_tree (gsi, def);
eb14a79f 3057 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3058 }
3059 else
42e52a51
RB
3060 {
3061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3062 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3063 gsi_replace (gsi, new_stmt, false);
3064 }
cf3e5a89
JJ
3065 return true;
3066 }
e021c122 3067 }
49c471e3 3068 }
e021c122 3069 }
49c471e3 3070
f2d3d07e
RH
3071 /* Check for indirect calls that became direct calls, and then
3072 no longer require a static chain. */
3073 if (gimple_call_chain (stmt))
3074 {
3075 tree fn = gimple_call_fndecl (stmt);
3076 if (fn && !DECL_STATIC_CHAIN (fn))
3077 {
3078 gimple_call_set_chain (stmt, NULL);
3079 changed = true;
3080 }
3081 else
3082 {
3083 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3084 if (tmp)
3085 {
3086 gimple_call_set_chain (stmt, tmp);
3087 changed = true;
3088 }
3089 }
3090 }
3091
e021c122
RG
3092 if (inplace)
3093 return changed;
3094
3095 /* Check for builtins that CCP can handle using information not
3096 available in the generic fold routines. */
fef5a0d9
RB
3097 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3098 {
3099 if (gimple_fold_builtin (gsi))
3100 changed = true;
3101 }
3102 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3103 {
ea679d55 3104 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3105 }
368b454d 3106 else if (gimple_call_internal_p (stmt))
ed9c79e1 3107 {
368b454d
JJ
3108 enum tree_code subcode = ERROR_MARK;
3109 tree result = NULL_TREE;
1304953e
JJ
3110 bool cplx_result = false;
3111 tree overflow = NULL_TREE;
368b454d
JJ
3112 switch (gimple_call_internal_fn (stmt))
3113 {
3114 case IFN_BUILTIN_EXPECT:
3115 result = fold_builtin_expect (gimple_location (stmt),
3116 gimple_call_arg (stmt, 0),
3117 gimple_call_arg (stmt, 1),
3118 gimple_call_arg (stmt, 2));
3119 break;
0e82f089
MP
3120 case IFN_UBSAN_OBJECT_SIZE:
3121 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3122 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3123 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3124 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3125 gimple_call_arg (stmt, 2))))
3126 {
f6b4dc28 3127 gsi_replace (gsi, gimple_build_nop (), false);
0e82f089
MP
3128 unlink_stmt_vdef (stmt);
3129 release_defs (stmt);
3130 return true;
3131 }
3132 break;
451e8dae
NS
3133 case IFN_GOACC_DIM_SIZE:
3134 case IFN_GOACC_DIM_POS:
3135 result = fold_internal_goacc_dim (stmt);
3136 break;
368b454d
JJ
3137 case IFN_UBSAN_CHECK_ADD:
3138 subcode = PLUS_EXPR;
3139 break;
3140 case IFN_UBSAN_CHECK_SUB:
3141 subcode = MINUS_EXPR;
3142 break;
3143 case IFN_UBSAN_CHECK_MUL:
3144 subcode = MULT_EXPR;
3145 break;
1304953e
JJ
3146 case IFN_ADD_OVERFLOW:
3147 subcode = PLUS_EXPR;
3148 cplx_result = true;
3149 break;
3150 case IFN_SUB_OVERFLOW:
3151 subcode = MINUS_EXPR;
3152 cplx_result = true;
3153 break;
3154 case IFN_MUL_OVERFLOW:
3155 subcode = MULT_EXPR;
3156 cplx_result = true;
3157 break;
368b454d
JJ
3158 default:
3159 break;
3160 }
3161 if (subcode != ERROR_MARK)
3162 {
3163 tree arg0 = gimple_call_arg (stmt, 0);
3164 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3165 tree type = TREE_TYPE (arg0);
3166 if (cplx_result)
3167 {
3168 tree lhs = gimple_call_lhs (stmt);
3169 if (lhs == NULL_TREE)
3170 type = NULL_TREE;
3171 else
3172 type = TREE_TYPE (TREE_TYPE (lhs));
3173 }
3174 if (type == NULL_TREE)
3175 ;
368b454d 3176 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3177 else if (integer_zerop (arg1))
3178 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3179 /* x = 0 + y; x = 0 * y; */
3180 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3181 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3182 /* x = y - y; */
3183 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3184 result = integer_zero_node;
368b454d 3185 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3186 else if (subcode == MULT_EXPR && integer_onep (arg1))
3187 result = arg0;
3188 else if (subcode == MULT_EXPR && integer_onep (arg0))
3189 result = arg1;
3190 else if (TREE_CODE (arg0) == INTEGER_CST
3191 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3192 {
1304953e
JJ
3193 if (cplx_result)
3194 result = int_const_binop (subcode, fold_convert (type, arg0),
3195 fold_convert (type, arg1));
3196 else
3197 result = int_const_binop (subcode, arg0, arg1);
3198 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3199 {
3200 if (cplx_result)
3201 overflow = build_one_cst (type);
3202 else
3203 result = NULL_TREE;
3204 }
3205 }
3206 if (result)
3207 {
3208 if (result == integer_zero_node)
3209 result = build_zero_cst (type);
3210 else if (cplx_result && TREE_TYPE (result) != type)
3211 {
3212 if (TREE_CODE (result) == INTEGER_CST)
3213 {
3214 if (arith_overflowed_p (PLUS_EXPR, type, result,
3215 integer_zero_node))
3216 overflow = build_one_cst (type);
3217 }
3218 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3219 && TYPE_UNSIGNED (type))
3220 || (TYPE_PRECISION (type)
3221 < (TYPE_PRECISION (TREE_TYPE (result))
3222 + (TYPE_UNSIGNED (TREE_TYPE (result))
3223 && !TYPE_UNSIGNED (type)))))
3224 result = NULL_TREE;
3225 if (result)
3226 result = fold_convert (type, result);
3227 }
368b454d
JJ
3228 }
3229 }
1304953e 3230
ed9c79e1
JJ
3231 if (result)
3232 {
1304953e
JJ
3233 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3234 result = drop_tree_overflow (result);
3235 if (cplx_result)
3236 {
3237 if (overflow == NULL_TREE)
3238 overflow = build_zero_cst (TREE_TYPE (result));
3239 tree ctype = build_complex_type (TREE_TYPE (result));
3240 if (TREE_CODE (result) == INTEGER_CST
3241 && TREE_CODE (overflow) == INTEGER_CST)
3242 result = build_complex (ctype, result, overflow);
3243 else
3244 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3245 ctype, result, overflow);
3246 }
ed9c79e1
JJ
3247 if (!update_call_from_tree (gsi, result))
3248 gimplify_and_update_call_from_tree (gsi, result);
3249 changed = true;
3250 }
3251 }
3b45a007 3252
e021c122 3253 return changed;
cbdd87d4
RG
3254}
3255
e0ee10ed 3256
89a79e96
RB
3257/* Return true whether NAME has a use on STMT. */
3258
3259static bool
355fe088 3260has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
3261{
3262 imm_use_iterator iter;
3263 use_operand_p use_p;
3264 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3265 if (USE_STMT (use_p) == stmt)
3266 return true;
3267 return false;
3268}
3269
e0ee10ed
RB
3270/* Worker for fold_stmt_1 dispatch to pattern based folding with
3271 gimple_simplify.
3272
3273 Replaces *GSI with the simplification result in RCODE and OPS
3274 and the associated statements in *SEQ. Does the replacement
3275 according to INPLACE and returns true if the operation succeeded. */
3276
3277static bool
3278replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3279 code_helper rcode, tree *ops,
3280 gimple_seq *seq, bool inplace)
3281{
355fe088 3282 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
3283
3284 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
3285 newly created statements. See also maybe_push_res_to_seq.
3286 As an exception allow such uses if there was a use of the
3287 same SSA name on the old stmt. */
e0ee10ed 3288 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
3289 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3290 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
3291 || (ops[1]
3292 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
3293 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3294 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
3295 || (ops[2]
3296 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96
RB
3297 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3298 && !has_use_on_stmt (ops[2], stmt)))
e0ee10ed
RB
3299 return false;
3300
fec40d06
RS
3301 /* Don't insert new statements when INPLACE is true, even if we could
3302 reuse STMT for the final statement. */
3303 if (inplace && !gimple_seq_empty_p (*seq))
3304 return false;
3305
538dd0b7 3306 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3307 {
3308 gcc_assert (rcode.is_tree_code ());
3309 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3310 /* GIMPLE_CONDs condition may not throw. */
3311 && (!flag_exceptions
3312 || !cfun->can_throw_non_call_exceptions
3313 || !operation_could_trap_p (rcode,
3314 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3315 false, NULL_TREE)))
538dd0b7 3316 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3317 else if (rcode == SSA_NAME)
538dd0b7 3318 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3319 build_zero_cst (TREE_TYPE (ops[0])));
3320 else if (rcode == INTEGER_CST)
3321 {
3322 if (integer_zerop (ops[0]))
538dd0b7 3323 gimple_cond_make_false (cond_stmt);
e0ee10ed 3324 else
538dd0b7 3325 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3326 }
3327 else if (!inplace)
3328 {
3329 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3330 ops, seq);
3331 if (!res)
3332 return false;
538dd0b7 3333 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3334 build_zero_cst (TREE_TYPE (res)));
3335 }
3336 else
3337 return false;
3338 if (dump_file && (dump_flags & TDF_DETAILS))
3339 {
3340 fprintf (dump_file, "gimple_simplified to ");
3341 if (!gimple_seq_empty_p (*seq))
3342 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3343 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3344 0, TDF_SLIM);
3345 }
3346 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3347 return true;
3348 }
3349 else if (is_gimple_assign (stmt)
3350 && rcode.is_tree_code ())
3351 {
3352 if (!inplace
f3582e54 3353 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3354 {
3355 maybe_build_generic_op (rcode,
3356 TREE_TYPE (gimple_assign_lhs (stmt)),
3357 &ops[0], ops[1], ops[2]);
00d66391 3358 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3360 {
3361 fprintf (dump_file, "gimple_simplified to ");
3362 if (!gimple_seq_empty_p (*seq))
3363 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3364 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3365 0, TDF_SLIM);
3366 }
3367 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3368 return true;
3369 }
3370 }
37d486ab
RB
3371 else if (rcode.is_fn_code ()
3372 && gimple_call_builtin_p (stmt, rcode))
3373 {
3374 unsigned i;
3375 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3376 {
3377 gcc_assert (ops[i] != NULL_TREE);
3378 gimple_call_set_arg (stmt, i, ops[i]);
3379 }
3380 if (i < 3)
3381 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
3382 if (dump_file && (dump_flags & TDF_DETAILS))
3383 {
3384 fprintf (dump_file, "gimple_simplified to ");
3385 if (!gimple_seq_empty_p (*seq))
3386 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3387 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3388 }
3389 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
3390 return true;
3391 }
e0ee10ed
RB
3392 else if (!inplace)
3393 {
3394 if (gimple_has_lhs (stmt))
3395 {
3396 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3397 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3398 ops, seq, lhs))
3399 return false;
e0ee10ed
RB
3400 if (dump_file && (dump_flags & TDF_DETAILS))
3401 {
3402 fprintf (dump_file, "gimple_simplified to ");
3403 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3404 }
3405 gsi_replace_with_seq_vops (gsi, *seq);
3406 return true;
3407 }
3408 else
3409 gcc_unreachable ();
3410 }
3411
3412 return false;
3413}
3414
040292e7
RB
3415/* Canonicalize MEM_REFs invariant address operand after propagation. */
3416
3417static bool
3418maybe_canonicalize_mem_ref_addr (tree *t)
3419{
3420 bool res = false;
3421
3422 if (TREE_CODE (*t) == ADDR_EXPR)
3423 t = &TREE_OPERAND (*t, 0);
3424
3425 while (handled_component_p (*t))
3426 t = &TREE_OPERAND (*t, 0);
3427
3428 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3429 of invariant addresses into a SSA name MEM_REF address. */
3430 if (TREE_CODE (*t) == MEM_REF
3431 || TREE_CODE (*t) == TARGET_MEM_REF)
3432 {
3433 tree addr = TREE_OPERAND (*t, 0);
3434 if (TREE_CODE (addr) == ADDR_EXPR
3435 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3436 || handled_component_p (TREE_OPERAND (addr, 0))))
3437 {
3438 tree base;
3439 HOST_WIDE_INT coffset;
3440 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3441 &coffset);
3442 if (!base)
3443 gcc_unreachable ();
3444
3445 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3446 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3447 TREE_OPERAND (*t, 1),
3448 size_int (coffset));
3449 res = true;
3450 }
3451 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3452 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3453 }
3454
3455 /* Canonicalize back MEM_REFs to plain reference trees if the object
3456 accessed is a decl that has the same access semantics as the MEM_REF. */
3457 if (TREE_CODE (*t) == MEM_REF
3458 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3459 && integer_zerop (TREE_OPERAND (*t, 1))
3460 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3461 {
3462 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3463 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3464 if (/* Same volatile qualification. */
3465 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3466 /* Same TBAA behavior with -fstrict-aliasing. */
3467 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3468 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3469 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3470 /* Same alignment. */
3471 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3472 /* We have to look out here to not drop a required conversion
3473 from the rhs to the lhs if *t appears on the lhs or vice-versa
3474 if it appears on the rhs. Thus require strict type
3475 compatibility. */
3476 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3477 {
3478 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3479 res = true;
3480 }
3481 }
3482
3483 /* Canonicalize TARGET_MEM_REF in particular with respect to
3484 the indexes becoming constant. */
3485 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3486 {
3487 tree tem = maybe_fold_tmr (*t);
3488 if (tem)
3489 {
3490 *t = tem;
3491 res = true;
3492 }
3493 }
3494
3495 return res;
3496}
3497
cbdd87d4
RG
3498/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3499 distinguishes both cases. */
3500
3501static bool
e0ee10ed 3502fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3503{
3504 bool changed = false;
355fe088 3505 gimple *stmt = gsi_stmt (*gsi);
cbdd87d4
RG
3506 unsigned i;
3507
040292e7
RB
3508 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3509 after propagation.
3510 ??? This shouldn't be done in generic folding but in the
3511 propagation helpers which also know whether an address was
89a79e96
RB
3512 propagated.
3513 Also canonicalize operand order. */
040292e7
RB
3514 switch (gimple_code (stmt))
3515 {
3516 case GIMPLE_ASSIGN:
3517 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3518 {
3519 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3520 if ((REFERENCE_CLASS_P (*rhs)
3521 || TREE_CODE (*rhs) == ADDR_EXPR)
3522 && maybe_canonicalize_mem_ref_addr (rhs))
3523 changed = true;
3524 tree *lhs = gimple_assign_lhs_ptr (stmt);
3525 if (REFERENCE_CLASS_P (*lhs)
3526 && maybe_canonicalize_mem_ref_addr (lhs))
3527 changed = true;
3528 }
89a79e96
RB
3529 else
3530 {
3531 /* Canonicalize operand order. */
3532 enum tree_code code = gimple_assign_rhs_code (stmt);
3533 if (TREE_CODE_CLASS (code) == tcc_comparison
3534 || commutative_tree_code (code)
3535 || commutative_ternary_tree_code (code))
3536 {
3537 tree rhs1 = gimple_assign_rhs1 (stmt);
3538 tree rhs2 = gimple_assign_rhs2 (stmt);
3539 if (tree_swap_operands_p (rhs1, rhs2, false))
3540 {
3541 gimple_assign_set_rhs1 (stmt, rhs2);
3542 gimple_assign_set_rhs2 (stmt, rhs1);
3543 if (TREE_CODE_CLASS (code) == tcc_comparison)
3544 gimple_assign_set_rhs_code (stmt,
3545 swap_tree_comparison (code));
3546 changed = true;
3547 }
3548 }
3549 }
040292e7
RB
3550 break;
3551 case GIMPLE_CALL:
3552 {
3553 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3554 {
3555 tree *arg = gimple_call_arg_ptr (stmt, i);
3556 if (REFERENCE_CLASS_P (*arg)
3557 && maybe_canonicalize_mem_ref_addr (arg))
3558 changed = true;
3559 }
3560 tree *lhs = gimple_call_lhs_ptr (stmt);
3561 if (*lhs
3562 && REFERENCE_CLASS_P (*lhs)
3563 && maybe_canonicalize_mem_ref_addr (lhs))
3564 changed = true;
3565 break;
3566 }
3567 case GIMPLE_ASM:
3568 {
538dd0b7
DM
3569 gasm *asm_stmt = as_a <gasm *> (stmt);
3570 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3571 {
538dd0b7 3572 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3573 tree op = TREE_VALUE (link);
3574 if (REFERENCE_CLASS_P (op)
3575 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3576 changed = true;
3577 }
538dd0b7 3578 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3579 {
538dd0b7 3580 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3581 tree op = TREE_VALUE (link);
3582 if ((REFERENCE_CLASS_P (op)
3583 || TREE_CODE (op) == ADDR_EXPR)
3584 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3585 changed = true;
3586 }
3587 }
3588 break;
3589 case GIMPLE_DEBUG:
3590 if (gimple_debug_bind_p (stmt))
3591 {
3592 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3593 if (*val
3594 && (REFERENCE_CLASS_P (*val)
3595 || TREE_CODE (*val) == ADDR_EXPR)
3596 && maybe_canonicalize_mem_ref_addr (val))
3597 changed = true;
3598 }
3599 break;
89a79e96
RB
3600 case GIMPLE_COND:
3601 {
3602 /* Canonicalize operand order. */
3603 tree lhs = gimple_cond_lhs (stmt);
3604 tree rhs = gimple_cond_rhs (stmt);
3605 if (tree_swap_operands_p (lhs, rhs, false))
3606 {
3607 gcond *gc = as_a <gcond *> (stmt);
3608 gimple_cond_set_lhs (gc, rhs);
3609 gimple_cond_set_rhs (gc, lhs);
3610 gimple_cond_set_code (gc,
3611 swap_tree_comparison (gimple_cond_code (gc)));
3612 changed = true;
3613 }
3614 }
040292e7
RB
3615 default:;
3616 }
3617
e0ee10ed
RB
3618 /* Dispatch to pattern-based folding. */
3619 if (!inplace
3620 || is_gimple_assign (stmt)
3621 || gimple_code (stmt) == GIMPLE_COND)
3622 {
3623 gimple_seq seq = NULL;
3624 code_helper rcode;
3625 tree ops[3] = {};
0ff093d8
RB
3626 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3627 valueize, valueize))
e0ee10ed
RB
3628 {
3629 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3630 changed = true;
3631 else
3632 gimple_seq_discard (seq);
3633 }
3634 }
3635
3636 stmt = gsi_stmt (*gsi);
3637
cbdd87d4
RG
3638 /* Fold the main computation performed by the statement. */
3639 switch (gimple_code (stmt))
3640 {
3641 case GIMPLE_ASSIGN:
3642 {
819ec64c
RB
3643 /* Try to canonicalize for boolean-typed X the comparisons
3644 X == 0, X == 1, X != 0, and X != 1. */
3645 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3646 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 3647 {
819ec64c
RB
3648 tree lhs = gimple_assign_lhs (stmt);
3649 tree op1 = gimple_assign_rhs1 (stmt);
3650 tree op2 = gimple_assign_rhs2 (stmt);
3651 tree type = TREE_TYPE (op1);
3652
3653 /* Check whether the comparison operands are of the same boolean
3654 type as the result type is.
3655 Check that second operand is an integer-constant with value
3656 one or zero. */
3657 if (TREE_CODE (op2) == INTEGER_CST
3658 && (integer_zerop (op2) || integer_onep (op2))
3659 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3660 {
3661 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3662 bool is_logical_not = false;
3663
3664 /* X == 0 and X != 1 is a logical-not.of X
3665 X == 1 and X != 0 is X */
3666 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3667 || (cmp_code == NE_EXPR && integer_onep (op2)))
3668 is_logical_not = true;
3669
3670 if (is_logical_not == false)
3671 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3672 /* Only for one-bit precision typed X the transformation
3673 !X -> ~X is valied. */
3674 else if (TYPE_PRECISION (type) == 1)
3675 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3676 /* Otherwise we use !X -> X ^ 1. */
3677 else
3678 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3679 build_int_cst (type, 1));
3680 changed = true;
3681 break;
3682 }
5fbcc0ed 3683 }
819ec64c
RB
3684
3685 unsigned old_num_ops = gimple_num_ops (stmt);
3686 tree lhs = gimple_assign_lhs (stmt);
3687 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3688 if (new_rhs
3689 && !useless_type_conversion_p (TREE_TYPE (lhs),
3690 TREE_TYPE (new_rhs)))
3691 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3692 if (new_rhs
3693 && (!inplace
3694 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3695 {
3696 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3697 changed = true;
3698 }
3699 break;
3700 }
3701
cbdd87d4 3702 case GIMPLE_CALL:
ceeffab0 3703 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3704 break;
3705
3706 case GIMPLE_ASM:
3707 /* Fold *& in asm operands. */
38384150 3708 {
538dd0b7 3709 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3710 size_t noutputs;
3711 const char **oconstraints;
3712 const char *constraint;
3713 bool allows_mem, allows_reg;
3714
538dd0b7 3715 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3716 oconstraints = XALLOCAVEC (const char *, noutputs);
3717
538dd0b7 3718 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3719 {
538dd0b7 3720 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3721 tree op = TREE_VALUE (link);
3722 oconstraints[i]
3723 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3724 if (REFERENCE_CLASS_P (op)
3725 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3726 {
3727 TREE_VALUE (link) = op;
3728 changed = true;
3729 }
3730 }
538dd0b7 3731 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3732 {
538dd0b7 3733 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3734 tree op = TREE_VALUE (link);
3735 constraint
3736 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3737 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3738 oconstraints, &allows_mem, &allows_reg);
3739 if (REFERENCE_CLASS_P (op)
3740 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3741 != NULL_TREE)
3742 {
3743 TREE_VALUE (link) = op;
3744 changed = true;
3745 }
3746 }
3747 }
cbdd87d4
RG
3748 break;
3749
bd422c4a
RG
3750 case GIMPLE_DEBUG:
3751 if (gimple_debug_bind_p (stmt))
3752 {
3753 tree val = gimple_debug_bind_get_value (stmt);
3754 if (val
3755 && REFERENCE_CLASS_P (val))
3756 {
3757 tree tem = maybe_fold_reference (val, false);
3758 if (tem)
3759 {
3760 gimple_debug_bind_set_value (stmt, tem);
3761 changed = true;
3762 }
3763 }
3e888a5e
RG
3764 else if (val
3765 && TREE_CODE (val) == ADDR_EXPR)
3766 {
3767 tree ref = TREE_OPERAND (val, 0);
3768 tree tem = maybe_fold_reference (ref, false);
3769 if (tem)
3770 {
3771 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3772 gimple_debug_bind_set_value (stmt, tem);
3773 changed = true;
3774 }
3775 }
bd422c4a
RG
3776 }
3777 break;
3778
cbdd87d4
RG
3779 default:;
3780 }
3781
3782 stmt = gsi_stmt (*gsi);
3783
37376165
RB
3784 /* Fold *& on the lhs. */
3785 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3786 {
3787 tree lhs = gimple_get_lhs (stmt);
3788 if (lhs && REFERENCE_CLASS_P (lhs))
3789 {
3790 tree new_lhs = maybe_fold_reference (lhs, true);
3791 if (new_lhs)
3792 {
3793 gimple_set_lhs (stmt, new_lhs);
3794 changed = true;
3795 }
3796 }
3797 }
3798
3799 return changed;
3800}
3801
e0ee10ed
RB
3802/* Valueziation callback that ends up not following SSA edges. */
3803
3804tree
3805no_follow_ssa_edges (tree)
3806{
3807 return NULL_TREE;
3808}
3809
45cc9f96
RB
3810/* Valueization callback that ends up following single-use SSA edges only. */
3811
3812tree
3813follow_single_use_edges (tree val)
3814{
3815 if (TREE_CODE (val) == SSA_NAME
3816 && !has_single_use (val))
3817 return NULL_TREE;
3818 return val;
3819}
3820
cbdd87d4
RG
3821/* Fold the statement pointed to by GSI. In some cases, this function may
3822 replace the whole statement with a new one. Returns true iff folding
3823 makes any changes.
3824 The statement pointed to by GSI should be in valid gimple form but may
3825 be in unfolded state as resulting from for example constant propagation
3826 which can produce *&x = 0. */
3827
3828bool
3829fold_stmt (gimple_stmt_iterator *gsi)
3830{
e0ee10ed
RB
3831 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3832}
3833
3834bool
3835fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3836{
3837 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3838}
3839
59401b92 3840/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3841 *&x created by constant propagation are handled. The statement cannot
3842 be replaced with a new one. Return true if the statement was
3843 changed, false otherwise.
59401b92 3844 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3845 be in unfolded state as resulting from for example constant propagation
3846 which can produce *&x = 0. */
3847
3848bool
59401b92 3849fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3850{
355fe088 3851 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 3852 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3853 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3854 return changed;
3855}
3856
e89065a1
SL
3857/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3858 if EXPR is null or we don't know how.
3859 If non-null, the result always has boolean type. */
3860
3861static tree
3862canonicalize_bool (tree expr, bool invert)
3863{
3864 if (!expr)
3865 return NULL_TREE;
3866 else if (invert)
3867 {
3868 if (integer_nonzerop (expr))
3869 return boolean_false_node;
3870 else if (integer_zerop (expr))
3871 return boolean_true_node;
3872 else if (TREE_CODE (expr) == SSA_NAME)
3873 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3874 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3875 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3876 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3877 boolean_type_node,
3878 TREE_OPERAND (expr, 0),
3879 TREE_OPERAND (expr, 1));
3880 else
3881 return NULL_TREE;
3882 }
3883 else
3884 {
3885 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3886 return expr;
3887 if (integer_nonzerop (expr))
3888 return boolean_true_node;
3889 else if (integer_zerop (expr))
3890 return boolean_false_node;
3891 else if (TREE_CODE (expr) == SSA_NAME)
3892 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3893 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3894 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3895 return fold_build2 (TREE_CODE (expr),
3896 boolean_type_node,
3897 TREE_OPERAND (expr, 0),
3898 TREE_OPERAND (expr, 1));
3899 else
3900 return NULL_TREE;
3901 }
3902}
3903
3904/* Check to see if a boolean expression EXPR is logically equivalent to the
3905 comparison (OP1 CODE OP2). Check for various identities involving
3906 SSA_NAMEs. */
3907
3908static bool
3909same_bool_comparison_p (const_tree expr, enum tree_code code,
3910 const_tree op1, const_tree op2)
3911{
355fe088 3912 gimple *s;
e89065a1
SL
3913
3914 /* The obvious case. */
3915 if (TREE_CODE (expr) == code
3916 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3917 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3918 return true;
3919
3920 /* Check for comparing (name, name != 0) and the case where expr
3921 is an SSA_NAME with a definition matching the comparison. */
3922 if (TREE_CODE (expr) == SSA_NAME
3923 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3924 {
3925 if (operand_equal_p (expr, op1, 0))
3926 return ((code == NE_EXPR && integer_zerop (op2))
3927 || (code == EQ_EXPR && integer_nonzerop (op2)));
3928 s = SSA_NAME_DEF_STMT (expr);
3929 if (is_gimple_assign (s)
3930 && gimple_assign_rhs_code (s) == code
3931 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3932 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3933 return true;
3934 }
3935
3936 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3937 of name is a comparison, recurse. */
3938 if (TREE_CODE (op1) == SSA_NAME
3939 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3940 {
3941 s = SSA_NAME_DEF_STMT (op1);
3942 if (is_gimple_assign (s)
3943 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3944 {
3945 enum tree_code c = gimple_assign_rhs_code (s);
3946 if ((c == NE_EXPR && integer_zerop (op2))
3947 || (c == EQ_EXPR && integer_nonzerop (op2)))
3948 return same_bool_comparison_p (expr, c,
3949 gimple_assign_rhs1 (s),
3950 gimple_assign_rhs2 (s));
3951 if ((c == EQ_EXPR && integer_zerop (op2))
3952 || (c == NE_EXPR && integer_nonzerop (op2)))
3953 return same_bool_comparison_p (expr,
3954 invert_tree_comparison (c, false),
3955 gimple_assign_rhs1 (s),
3956 gimple_assign_rhs2 (s));
3957 }
3958 }
3959 return false;
3960}
3961
3962/* Check to see if two boolean expressions OP1 and OP2 are logically
3963 equivalent. */
3964
3965static bool
3966same_bool_result_p (const_tree op1, const_tree op2)
3967{
3968 /* Simple cases first. */
3969 if (operand_equal_p (op1, op2, 0))
3970 return true;
3971
3972 /* Check the cases where at least one of the operands is a comparison.
3973 These are a bit smarter than operand_equal_p in that they apply some
3974 identifies on SSA_NAMEs. */
98209db3 3975 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
3976 && same_bool_comparison_p (op1, TREE_CODE (op2),
3977 TREE_OPERAND (op2, 0),
3978 TREE_OPERAND (op2, 1)))
3979 return true;
98209db3 3980 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
3981 && same_bool_comparison_p (op2, TREE_CODE (op1),
3982 TREE_OPERAND (op1, 0),
3983 TREE_OPERAND (op1, 1)))
3984 return true;
3985
3986 /* Default case. */
3987 return false;
3988}
3989
3990/* Forward declarations for some mutually recursive functions. */
3991
3992static tree
3993and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3994 enum tree_code code2, tree op2a, tree op2b);
3995static tree
3996and_var_with_comparison (tree var, bool invert,
3997 enum tree_code code2, tree op2a, tree op2b);
3998static tree
355fe088 3999and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4000 enum tree_code code2, tree op2a, tree op2b);
4001static tree
4002or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4003 enum tree_code code2, tree op2a, tree op2b);
4004static tree
4005or_var_with_comparison (tree var, bool invert,
4006 enum tree_code code2, tree op2a, tree op2b);
4007static tree
355fe088 4008or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4009 enum tree_code code2, tree op2a, tree op2b);
4010
4011/* Helper function for and_comparisons_1: try to simplify the AND of the
4012 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4013 If INVERT is true, invert the value of the VAR before doing the AND.
4014 Return NULL_EXPR if we can't simplify this to a single expression. */
4015
4016static tree
4017and_var_with_comparison (tree var, bool invert,
4018 enum tree_code code2, tree op2a, tree op2b)
4019{
4020 tree t;
355fe088 4021 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4022
4023 /* We can only deal with variables whose definitions are assignments. */
4024 if (!is_gimple_assign (stmt))
4025 return NULL_TREE;
4026
4027 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4028 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4029 Then we only have to consider the simpler non-inverted cases. */
4030 if (invert)
4031 t = or_var_with_comparison_1 (stmt,
4032 invert_tree_comparison (code2, false),
4033 op2a, op2b);
4034 else
4035 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4036 return canonicalize_bool (t, invert);
4037}
4038
4039/* Try to simplify the AND of the ssa variable defined by the assignment
4040 STMT with the comparison specified by (OP2A CODE2 OP2B).
4041 Return NULL_EXPR if we can't simplify this to a single expression. */
4042
4043static tree
355fe088 4044and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4045 enum tree_code code2, tree op2a, tree op2b)
4046{
4047 tree var = gimple_assign_lhs (stmt);
4048 tree true_test_var = NULL_TREE;
4049 tree false_test_var = NULL_TREE;
4050 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4051
4052 /* Check for identities like (var AND (var == 0)) => false. */
4053 if (TREE_CODE (op2a) == SSA_NAME
4054 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4055 {
4056 if ((code2 == NE_EXPR && integer_zerop (op2b))
4057 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4058 {
4059 true_test_var = op2a;
4060 if (var == true_test_var)
4061 return var;
4062 }
4063 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4064 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4065 {
4066 false_test_var = op2a;
4067 if (var == false_test_var)
4068 return boolean_false_node;
4069 }
4070 }
4071
4072 /* If the definition is a comparison, recurse on it. */
4073 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4074 {
4075 tree t = and_comparisons_1 (innercode,
4076 gimple_assign_rhs1 (stmt),
4077 gimple_assign_rhs2 (stmt),
4078 code2,
4079 op2a,
4080 op2b);
4081 if (t)
4082 return t;
4083 }
4084
4085 /* If the definition is an AND or OR expression, we may be able to
4086 simplify by reassociating. */
eb9820c0
KT
4087 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4088 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4089 {
4090 tree inner1 = gimple_assign_rhs1 (stmt);
4091 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4092 gimple *s;
e89065a1
SL
4093 tree t;
4094 tree partial = NULL_TREE;
eb9820c0 4095 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4096
4097 /* Check for boolean identities that don't require recursive examination
4098 of inner1/inner2:
4099 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4100 inner1 AND (inner1 OR inner2) => inner1
4101 !inner1 AND (inner1 AND inner2) => false
4102 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4103 Likewise for similar cases involving inner2. */
4104 if (inner1 == true_test_var)
4105 return (is_and ? var : inner1);
4106 else if (inner2 == true_test_var)
4107 return (is_and ? var : inner2);
4108 else if (inner1 == false_test_var)
4109 return (is_and
4110 ? boolean_false_node
4111 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4112 else if (inner2 == false_test_var)
4113 return (is_and
4114 ? boolean_false_node
4115 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4116
4117 /* Next, redistribute/reassociate the AND across the inner tests.
4118 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4119 if (TREE_CODE (inner1) == SSA_NAME
4120 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4121 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4122 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4123 gimple_assign_rhs1 (s),
4124 gimple_assign_rhs2 (s),
4125 code2, op2a, op2b)))
4126 {
4127 /* Handle the AND case, where we are reassociating:
4128 (inner1 AND inner2) AND (op2a code2 op2b)
4129 => (t AND inner2)
4130 If the partial result t is a constant, we win. Otherwise
4131 continue on to try reassociating with the other inner test. */
4132 if (is_and)
4133 {
4134 if (integer_onep (t))
4135 return inner2;
4136 else if (integer_zerop (t))
4137 return boolean_false_node;
4138 }
4139
4140 /* Handle the OR case, where we are redistributing:
4141 (inner1 OR inner2) AND (op2a code2 op2b)
4142 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4143 else if (integer_onep (t))
4144 return boolean_true_node;
4145
4146 /* Save partial result for later. */
4147 partial = t;
e89065a1
SL
4148 }
4149
4150 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4151 if (TREE_CODE (inner2) == SSA_NAME
4152 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4153 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4154 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4155 gimple_assign_rhs1 (s),
4156 gimple_assign_rhs2 (s),
4157 code2, op2a, op2b)))
4158 {
4159 /* Handle the AND case, where we are reassociating:
4160 (inner1 AND inner2) AND (op2a code2 op2b)
4161 => (inner1 AND t) */
4162 if (is_and)
4163 {
4164 if (integer_onep (t))
4165 return inner1;
4166 else if (integer_zerop (t))
4167 return boolean_false_node;
8236c8eb
JJ
4168 /* If both are the same, we can apply the identity
4169 (x AND x) == x. */
4170 else if (partial && same_bool_result_p (t, partial))
4171 return t;
e89065a1
SL
4172 }
4173
4174 /* Handle the OR case. where we are redistributing:
4175 (inner1 OR inner2) AND (op2a code2 op2b)
4176 => (t OR (inner1 AND (op2a code2 op2b)))
4177 => (t OR partial) */
4178 else
4179 {
4180 if (integer_onep (t))
4181 return boolean_true_node;
4182 else if (partial)
4183 {
4184 /* We already got a simplification for the other
4185 operand to the redistributed OR expression. The
4186 interesting case is when at least one is false.
4187 Or, if both are the same, we can apply the identity
4188 (x OR x) == x. */
4189 if (integer_zerop (partial))
4190 return t;
4191 else if (integer_zerop (t))
4192 return partial;
4193 else if (same_bool_result_p (t, partial))
4194 return t;
4195 }
4196 }
4197 }
4198 }
4199 return NULL_TREE;
4200}
4201
4202/* Try to simplify the AND of two comparisons defined by
4203 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4204 If this can be done without constructing an intermediate value,
4205 return the resulting tree; otherwise NULL_TREE is returned.
4206 This function is deliberately asymmetric as it recurses on SSA_DEFs
4207 in the first comparison but not the second. */
4208
4209static tree
4210and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4211 enum tree_code code2, tree op2a, tree op2b)
4212{
ae22ac3c 4213 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4214
e89065a1
SL
4215 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4216 if (operand_equal_p (op1a, op2a, 0)
4217 && operand_equal_p (op1b, op2b, 0))
4218 {
eb9820c0 4219 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4220 tree t = combine_comparisons (UNKNOWN_LOCATION,
4221 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4222 truth_type, op1a, op1b);
e89065a1
SL
4223 if (t)
4224 return t;
4225 }
4226
4227 /* Likewise the swapped case of the above. */
4228 if (operand_equal_p (op1a, op2b, 0)
4229 && operand_equal_p (op1b, op2a, 0))
4230 {
eb9820c0 4231 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4232 tree t = combine_comparisons (UNKNOWN_LOCATION,
4233 TRUTH_ANDIF_EXPR, code1,
4234 swap_tree_comparison (code2),
31ed6226 4235 truth_type, op1a, op1b);
e89065a1
SL
4236 if (t)
4237 return t;
4238 }
4239
4240 /* If both comparisons are of the same value against constants, we might
4241 be able to merge them. */
4242 if (operand_equal_p (op1a, op2a, 0)
4243 && TREE_CODE (op1b) == INTEGER_CST
4244 && TREE_CODE (op2b) == INTEGER_CST)
4245 {
4246 int cmp = tree_int_cst_compare (op1b, op2b);
4247
4248 /* If we have (op1a == op1b), we should either be able to
4249 return that or FALSE, depending on whether the constant op1b
4250 also satisfies the other comparison against op2b. */
4251 if (code1 == EQ_EXPR)
4252 {
4253 bool done = true;
4254 bool val;
4255 switch (code2)
4256 {
4257 case EQ_EXPR: val = (cmp == 0); break;
4258 case NE_EXPR: val = (cmp != 0); break;
4259 case LT_EXPR: val = (cmp < 0); break;
4260 case GT_EXPR: val = (cmp > 0); break;
4261 case LE_EXPR: val = (cmp <= 0); break;
4262 case GE_EXPR: val = (cmp >= 0); break;
4263 default: done = false;
4264 }
4265 if (done)
4266 {
4267 if (val)
4268 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4269 else
4270 return boolean_false_node;
4271 }
4272 }
4273 /* Likewise if the second comparison is an == comparison. */
4274 else if (code2 == EQ_EXPR)
4275 {
4276 bool done = true;
4277 bool val;
4278 switch (code1)
4279 {
4280 case EQ_EXPR: val = (cmp == 0); break;
4281 case NE_EXPR: val = (cmp != 0); break;
4282 case LT_EXPR: val = (cmp > 0); break;
4283 case GT_EXPR: val = (cmp < 0); break;
4284 case LE_EXPR: val = (cmp >= 0); break;
4285 case GE_EXPR: val = (cmp <= 0); break;
4286 default: done = false;
4287 }
4288 if (done)
4289 {
4290 if (val)
4291 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4292 else
4293 return boolean_false_node;
4294 }
4295 }
4296
4297 /* Same business with inequality tests. */
4298 else if (code1 == NE_EXPR)
4299 {
4300 bool val;
4301 switch (code2)
4302 {
4303 case EQ_EXPR: val = (cmp != 0); break;
4304 case NE_EXPR: val = (cmp == 0); break;
4305 case LT_EXPR: val = (cmp >= 0); break;
4306 case GT_EXPR: val = (cmp <= 0); break;
4307 case LE_EXPR: val = (cmp > 0); break;
4308 case GE_EXPR: val = (cmp < 0); break;
4309 default:
4310 val = false;
4311 }
4312 if (val)
4313 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4314 }
4315 else if (code2 == NE_EXPR)
4316 {
4317 bool val;
4318 switch (code1)
4319 {
4320 case EQ_EXPR: val = (cmp == 0); break;
4321 case NE_EXPR: val = (cmp != 0); break;
4322 case LT_EXPR: val = (cmp <= 0); break;
4323 case GT_EXPR: val = (cmp >= 0); break;
4324 case LE_EXPR: val = (cmp < 0); break;
4325 case GE_EXPR: val = (cmp > 0); break;
4326 default:
4327 val = false;
4328 }
4329 if (val)
4330 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4331 }
4332
4333 /* Chose the more restrictive of two < or <= comparisons. */
4334 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4335 && (code2 == LT_EXPR || code2 == LE_EXPR))
4336 {
4337 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4338 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4339 else
4340 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4341 }
4342
4343 /* Likewise chose the more restrictive of two > or >= comparisons. */
4344 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4345 && (code2 == GT_EXPR || code2 == GE_EXPR))
4346 {
4347 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4348 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4349 else
4350 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4351 }
4352
4353 /* Check for singleton ranges. */
4354 else if (cmp == 0
4355 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4356 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4357 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4358
4359 /* Check for disjoint ranges. */
4360 else if (cmp <= 0
4361 && (code1 == LT_EXPR || code1 == LE_EXPR)
4362 && (code2 == GT_EXPR || code2 == GE_EXPR))
4363 return boolean_false_node;
4364 else if (cmp >= 0
4365 && (code1 == GT_EXPR || code1 == GE_EXPR)
4366 && (code2 == LT_EXPR || code2 == LE_EXPR))
4367 return boolean_false_node;
4368 }
4369
4370 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4371 NAME's definition is a truth value. See if there are any simplifications
4372 that can be done against the NAME's definition. */
4373 if (TREE_CODE (op1a) == SSA_NAME
4374 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4375 && (integer_zerop (op1b) || integer_onep (op1b)))
4376 {
4377 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4378 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4379 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4380 switch (gimple_code (stmt))
4381 {
4382 case GIMPLE_ASSIGN:
4383 /* Try to simplify by copy-propagating the definition. */
4384 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4385
4386 case GIMPLE_PHI:
4387 /* If every argument to the PHI produces the same result when
4388 ANDed with the second comparison, we win.
4389 Do not do this unless the type is bool since we need a bool
4390 result here anyway. */
4391 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4392 {
4393 tree result = NULL_TREE;
4394 unsigned i;
4395 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4396 {
4397 tree arg = gimple_phi_arg_def (stmt, i);
4398
4399 /* If this PHI has itself as an argument, ignore it.
4400 If all the other args produce the same result,
4401 we're still OK. */
4402 if (arg == gimple_phi_result (stmt))
4403 continue;
4404 else if (TREE_CODE (arg) == INTEGER_CST)
4405 {
4406 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4407 {
4408 if (!result)
4409 result = boolean_false_node;
4410 else if (!integer_zerop (result))
4411 return NULL_TREE;
4412 }
4413 else if (!result)
4414 result = fold_build2 (code2, boolean_type_node,
4415 op2a, op2b);
4416 else if (!same_bool_comparison_p (result,
4417 code2, op2a, op2b))
4418 return NULL_TREE;
4419 }
0e8b84ec
JJ
4420 else if (TREE_CODE (arg) == SSA_NAME
4421 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4422 {
6c66f733 4423 tree temp;
355fe088 4424 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4425 /* In simple cases we can look through PHI nodes,
4426 but we have to be careful with loops.
4427 See PR49073. */
4428 if (! dom_info_available_p (CDI_DOMINATORS)
4429 || gimple_bb (def_stmt) == gimple_bb (stmt)
4430 || dominated_by_p (CDI_DOMINATORS,
4431 gimple_bb (def_stmt),
4432 gimple_bb (stmt)))
4433 return NULL_TREE;
4434 temp = and_var_with_comparison (arg, invert, code2,
4435 op2a, op2b);
e89065a1
SL
4436 if (!temp)
4437 return NULL_TREE;
4438 else if (!result)
4439 result = temp;
4440 else if (!same_bool_result_p (result, temp))
4441 return NULL_TREE;
4442 }
4443 else
4444 return NULL_TREE;
4445 }
4446 return result;
4447 }
4448
4449 default:
4450 break;
4451 }
4452 }
4453 return NULL_TREE;
4454}
4455
4456/* Try to simplify the AND of two comparisons, specified by
4457 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4458 If this can be simplified to a single expression (without requiring
4459 introducing more SSA variables to hold intermediate values),
4460 return the resulting tree. Otherwise return NULL_TREE.
4461 If the result expression is non-null, it has boolean type. */
4462
4463tree
4464maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4465 enum tree_code code2, tree op2a, tree op2b)
4466{
4467 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4468 if (t)
4469 return t;
4470 else
4471 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4472}
4473
4474/* Helper function for or_comparisons_1: try to simplify the OR of the
4475 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4476 If INVERT is true, invert the value of VAR before doing the OR.
4477 Return NULL_EXPR if we can't simplify this to a single expression. */
4478
4479static tree
4480or_var_with_comparison (tree var, bool invert,
4481 enum tree_code code2, tree op2a, tree op2b)
4482{
4483 tree t;
355fe088 4484 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4485
4486 /* We can only deal with variables whose definitions are assignments. */
4487 if (!is_gimple_assign (stmt))
4488 return NULL_TREE;
4489
4490 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4491 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4492 Then we only have to consider the simpler non-inverted cases. */
4493 if (invert)
4494 t = and_var_with_comparison_1 (stmt,
4495 invert_tree_comparison (code2, false),
4496 op2a, op2b);
4497 else
4498 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4499 return canonicalize_bool (t, invert);
4500}
4501
4502/* Try to simplify the OR of the ssa variable defined by the assignment
4503 STMT with the comparison specified by (OP2A CODE2 OP2B).
4504 Return NULL_EXPR if we can't simplify this to a single expression. */
4505
4506static tree
355fe088 4507or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4508 enum tree_code code2, tree op2a, tree op2b)
4509{
4510 tree var = gimple_assign_lhs (stmt);
4511 tree true_test_var = NULL_TREE;
4512 tree false_test_var = NULL_TREE;
4513 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4514
4515 /* Check for identities like (var OR (var != 0)) => true . */
4516 if (TREE_CODE (op2a) == SSA_NAME
4517 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4518 {
4519 if ((code2 == NE_EXPR && integer_zerop (op2b))
4520 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4521 {
4522 true_test_var = op2a;
4523 if (var == true_test_var)
4524 return var;
4525 }
4526 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4527 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4528 {
4529 false_test_var = op2a;
4530 if (var == false_test_var)
4531 return boolean_true_node;
4532 }
4533 }
4534
4535 /* If the definition is a comparison, recurse on it. */
4536 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4537 {
4538 tree t = or_comparisons_1 (innercode,
4539 gimple_assign_rhs1 (stmt),
4540 gimple_assign_rhs2 (stmt),
4541 code2,
4542 op2a,
4543 op2b);
4544 if (t)
4545 return t;
4546 }
4547
4548 /* If the definition is an AND or OR expression, we may be able to
4549 simplify by reassociating. */
eb9820c0
KT
4550 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4551 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4552 {
4553 tree inner1 = gimple_assign_rhs1 (stmt);
4554 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4555 gimple *s;
e89065a1
SL
4556 tree t;
4557 tree partial = NULL_TREE;
eb9820c0 4558 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4559
4560 /* Check for boolean identities that don't require recursive examination
4561 of inner1/inner2:
4562 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4563 inner1 OR (inner1 AND inner2) => inner1
4564 !inner1 OR (inner1 OR inner2) => true
4565 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4566 */
4567 if (inner1 == true_test_var)
4568 return (is_or ? var : inner1);
4569 else if (inner2 == true_test_var)
4570 return (is_or ? var : inner2);
4571 else if (inner1 == false_test_var)
4572 return (is_or
4573 ? boolean_true_node
4574 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4575 else if (inner2 == false_test_var)
4576 return (is_or
4577 ? boolean_true_node
4578 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4579
4580 /* Next, redistribute/reassociate the OR across the inner tests.
4581 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4582 if (TREE_CODE (inner1) == SSA_NAME
4583 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4584 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4585 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4586 gimple_assign_rhs1 (s),
4587 gimple_assign_rhs2 (s),
4588 code2, op2a, op2b)))
4589 {
4590 /* Handle the OR case, where we are reassociating:
4591 (inner1 OR inner2) OR (op2a code2 op2b)
4592 => (t OR inner2)
4593 If the partial result t is a constant, we win. Otherwise
4594 continue on to try reassociating with the other inner test. */
8236c8eb 4595 if (is_or)
e89065a1
SL
4596 {
4597 if (integer_onep (t))
4598 return boolean_true_node;
4599 else if (integer_zerop (t))
4600 return inner2;
4601 }
4602
4603 /* Handle the AND case, where we are redistributing:
4604 (inner1 AND inner2) OR (op2a code2 op2b)
4605 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4606 else if (integer_zerop (t))
4607 return boolean_false_node;
4608
4609 /* Save partial result for later. */
4610 partial = t;
e89065a1
SL
4611 }
4612
4613 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4614 if (TREE_CODE (inner2) == SSA_NAME
4615 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4616 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4617 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4618 gimple_assign_rhs1 (s),
4619 gimple_assign_rhs2 (s),
4620 code2, op2a, op2b)))
4621 {
4622 /* Handle the OR case, where we are reassociating:
4623 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4624 => (inner1 OR t)
4625 => (t OR partial) */
4626 if (is_or)
e89065a1
SL
4627 {
4628 if (integer_zerop (t))
4629 return inner1;
4630 else if (integer_onep (t))
4631 return boolean_true_node;
8236c8eb
JJ
4632 /* If both are the same, we can apply the identity
4633 (x OR x) == x. */
4634 else if (partial && same_bool_result_p (t, partial))
4635 return t;
e89065a1
SL
4636 }
4637
4638 /* Handle the AND case, where we are redistributing:
4639 (inner1 AND inner2) OR (op2a code2 op2b)
4640 => (t AND (inner1 OR (op2a code2 op2b)))
4641 => (t AND partial) */
4642 else
4643 {
4644 if (integer_zerop (t))
4645 return boolean_false_node;
4646 else if (partial)
4647 {
4648 /* We already got a simplification for the other
4649 operand to the redistributed AND expression. The
4650 interesting case is when at least one is true.
4651 Or, if both are the same, we can apply the identity
8236c8eb 4652 (x AND x) == x. */
e89065a1
SL
4653 if (integer_onep (partial))
4654 return t;
4655 else if (integer_onep (t))
4656 return partial;
4657 else if (same_bool_result_p (t, partial))
8236c8eb 4658 return t;
e89065a1
SL
4659 }
4660 }
4661 }
4662 }
4663 return NULL_TREE;
4664}
4665
4666/* Try to simplify the OR of two comparisons defined by
4667 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4668 If this can be done without constructing an intermediate value,
4669 return the resulting tree; otherwise NULL_TREE is returned.
4670 This function is deliberately asymmetric as it recurses on SSA_DEFs
4671 in the first comparison but not the second. */
4672
4673static tree
4674or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4675 enum tree_code code2, tree op2a, tree op2b)
4676{
ae22ac3c 4677 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4678
e89065a1
SL
4679 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4680 if (operand_equal_p (op1a, op2a, 0)
4681 && operand_equal_p (op1b, op2b, 0))
4682 {
eb9820c0 4683 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4684 tree t = combine_comparisons (UNKNOWN_LOCATION,
4685 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4686 truth_type, op1a, op1b);
e89065a1
SL
4687 if (t)
4688 return t;
4689 }
4690
4691 /* Likewise the swapped case of the above. */
4692 if (operand_equal_p (op1a, op2b, 0)
4693 && operand_equal_p (op1b, op2a, 0))
4694 {
eb9820c0 4695 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4696 tree t = combine_comparisons (UNKNOWN_LOCATION,
4697 TRUTH_ORIF_EXPR, code1,
4698 swap_tree_comparison (code2),
31ed6226 4699 truth_type, op1a, op1b);
e89065a1
SL
4700 if (t)
4701 return t;
4702 }
4703
4704 /* If both comparisons are of the same value against constants, we might
4705 be able to merge them. */
4706 if (operand_equal_p (op1a, op2a, 0)
4707 && TREE_CODE (op1b) == INTEGER_CST
4708 && TREE_CODE (op2b) == INTEGER_CST)
4709 {
4710 int cmp = tree_int_cst_compare (op1b, op2b);
4711
4712 /* If we have (op1a != op1b), we should either be able to
4713 return that or TRUE, depending on whether the constant op1b
4714 also satisfies the other comparison against op2b. */
4715 if (code1 == NE_EXPR)
4716 {
4717 bool done = true;
4718 bool val;
4719 switch (code2)
4720 {
4721 case EQ_EXPR: val = (cmp == 0); break;
4722 case NE_EXPR: val = (cmp != 0); break;
4723 case LT_EXPR: val = (cmp < 0); break;
4724 case GT_EXPR: val = (cmp > 0); break;
4725 case LE_EXPR: val = (cmp <= 0); break;
4726 case GE_EXPR: val = (cmp >= 0); break;
4727 default: done = false;
4728 }
4729 if (done)
4730 {
4731 if (val)
4732 return boolean_true_node;
4733 else
4734 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4735 }
4736 }
4737 /* Likewise if the second comparison is a != comparison. */
4738 else if (code2 == NE_EXPR)
4739 {
4740 bool done = true;
4741 bool val;
4742 switch (code1)
4743 {
4744 case EQ_EXPR: val = (cmp == 0); break;
4745 case NE_EXPR: val = (cmp != 0); break;
4746 case LT_EXPR: val = (cmp > 0); break;
4747 case GT_EXPR: val = (cmp < 0); break;
4748 case LE_EXPR: val = (cmp >= 0); break;
4749 case GE_EXPR: val = (cmp <= 0); break;
4750 default: done = false;
4751 }
4752 if (done)
4753 {
4754 if (val)
4755 return boolean_true_node;
4756 else
4757 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4758 }
4759 }
4760
4761 /* See if an equality test is redundant with the other comparison. */
4762 else if (code1 == EQ_EXPR)
4763 {
4764 bool val;
4765 switch (code2)
4766 {
4767 case EQ_EXPR: val = (cmp == 0); break;
4768 case NE_EXPR: val = (cmp != 0); break;
4769 case LT_EXPR: val = (cmp < 0); break;
4770 case GT_EXPR: val = (cmp > 0); break;
4771 case LE_EXPR: val = (cmp <= 0); break;
4772 case GE_EXPR: val = (cmp >= 0); break;
4773 default:
4774 val = false;
4775 }
4776 if (val)
4777 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4778 }
4779 else if (code2 == EQ_EXPR)
4780 {
4781 bool val;
4782 switch (code1)
4783 {
4784 case EQ_EXPR: val = (cmp == 0); break;
4785 case NE_EXPR: val = (cmp != 0); break;
4786 case LT_EXPR: val = (cmp > 0); break;
4787 case GT_EXPR: val = (cmp < 0); break;
4788 case LE_EXPR: val = (cmp >= 0); break;
4789 case GE_EXPR: val = (cmp <= 0); break;
4790 default:
4791 val = false;
4792 }
4793 if (val)
4794 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4795 }
4796
4797 /* Chose the less restrictive of two < or <= comparisons. */
4798 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4799 && (code2 == LT_EXPR || code2 == LE_EXPR))
4800 {
4801 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4802 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4803 else
4804 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4805 }
4806
4807 /* Likewise chose the less restrictive of two > or >= comparisons. */
4808 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4809 && (code2 == GT_EXPR || code2 == GE_EXPR))
4810 {
4811 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4812 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4813 else
4814 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4815 }
4816
4817 /* Check for singleton ranges. */
4818 else if (cmp == 0
4819 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4820 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4821 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4822
4823 /* Check for less/greater pairs that don't restrict the range at all. */
4824 else if (cmp >= 0
4825 && (code1 == LT_EXPR || code1 == LE_EXPR)
4826 && (code2 == GT_EXPR || code2 == GE_EXPR))
4827 return boolean_true_node;
4828 else if (cmp <= 0
4829 && (code1 == GT_EXPR || code1 == GE_EXPR)
4830 && (code2 == LT_EXPR || code2 == LE_EXPR))
4831 return boolean_true_node;
4832 }
4833
4834 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4835 NAME's definition is a truth value. See if there are any simplifications
4836 that can be done against the NAME's definition. */
4837 if (TREE_CODE (op1a) == SSA_NAME
4838 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4839 && (integer_zerop (op1b) || integer_onep (op1b)))
4840 {
4841 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4842 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4843 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4844 switch (gimple_code (stmt))
4845 {
4846 case GIMPLE_ASSIGN:
4847 /* Try to simplify by copy-propagating the definition. */
4848 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4849
4850 case GIMPLE_PHI:
4851 /* If every argument to the PHI produces the same result when
4852 ORed with the second comparison, we win.
4853 Do not do this unless the type is bool since we need a bool
4854 result here anyway. */
4855 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4856 {
4857 tree result = NULL_TREE;
4858 unsigned i;
4859 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4860 {
4861 tree arg = gimple_phi_arg_def (stmt, i);
4862
4863 /* If this PHI has itself as an argument, ignore it.
4864 If all the other args produce the same result,
4865 we're still OK. */
4866 if (arg == gimple_phi_result (stmt))
4867 continue;
4868 else if (TREE_CODE (arg) == INTEGER_CST)
4869 {
4870 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4871 {
4872 if (!result)
4873 result = boolean_true_node;
4874 else if (!integer_onep (result))
4875 return NULL_TREE;
4876 }
4877 else if (!result)
4878 result = fold_build2 (code2, boolean_type_node,
4879 op2a, op2b);
4880 else if (!same_bool_comparison_p (result,
4881 code2, op2a, op2b))
4882 return NULL_TREE;
4883 }
0e8b84ec
JJ
4884 else if (TREE_CODE (arg) == SSA_NAME
4885 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4886 {
6c66f733 4887 tree temp;
355fe088 4888 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4889 /* In simple cases we can look through PHI nodes,
4890 but we have to be careful with loops.
4891 See PR49073. */
4892 if (! dom_info_available_p (CDI_DOMINATORS)
4893 || gimple_bb (def_stmt) == gimple_bb (stmt)
4894 || dominated_by_p (CDI_DOMINATORS,
4895 gimple_bb (def_stmt),
4896 gimple_bb (stmt)))
4897 return NULL_TREE;
4898 temp = or_var_with_comparison (arg, invert, code2,
4899 op2a, op2b);
e89065a1
SL
4900 if (!temp)
4901 return NULL_TREE;
4902 else if (!result)
4903 result = temp;
4904 else if (!same_bool_result_p (result, temp))
4905 return NULL_TREE;
4906 }
4907 else
4908 return NULL_TREE;
4909 }
4910 return result;
4911 }
4912
4913 default:
4914 break;
4915 }
4916 }
4917 return NULL_TREE;
4918}
4919
4920/* Try to simplify the OR of two comparisons, specified by
4921 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4922 If this can be simplified to a single expression (without requiring
4923 introducing more SSA variables to hold intermediate values),
4924 return the resulting tree. Otherwise return NULL_TREE.
4925 If the result expression is non-null, it has boolean type. */
4926
4927tree
4928maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4929 enum tree_code code2, tree op2a, tree op2b)
4930{
4931 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4932 if (t)
4933 return t;
4934 else
4935 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4936}
cfef45c8
RG
4937
4938
4939/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4940
4941 Either NULL_TREE, a simplified but non-constant or a constant
4942 is returned.
4943
4944 ??? This should go into a gimple-fold-inline.h file to be eventually
4945 privatized with the single valueize function used in the various TUs
4946 to avoid the indirect function call overhead. */
4947
4948tree
355fe088 4949gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 4950 tree (*gvalueize) (tree))
cfef45c8 4951{
45cc9f96
RB
4952 code_helper rcode;
4953 tree ops[3] = {};
4954 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4955 edges if there are intermediate VARYING defs. For this reason
4956 do not follow SSA edges here even though SCCVN can technically
4957 just deal fine with that. */
34050b6b 4958 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 4959 {
34050b6b 4960 tree res = NULL_TREE;
c0f62740 4961 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
4962 res = ops[0];
4963 else if (mprts_hook)
4964 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4965 if (res)
45cc9f96 4966 {
34050b6b
RB
4967 if (dump_file && dump_flags & TDF_DETAILS)
4968 {
4969 fprintf (dump_file, "Match-and-simplified ");
4970 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4971 fprintf (dump_file, " to ");
4972 print_generic_expr (dump_file, res, 0);
4973 fprintf (dump_file, "\n");
4974 }
4975 return res;
45cc9f96 4976 }
45cc9f96
RB
4977 }
4978
cfef45c8
RG
4979 location_t loc = gimple_location (stmt);
4980 switch (gimple_code (stmt))
4981 {
4982 case GIMPLE_ASSIGN:
4983 {
4984 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4985
4986 switch (get_gimple_rhs_class (subcode))
4987 {
4988 case GIMPLE_SINGLE_RHS:
4989 {
4990 tree rhs = gimple_assign_rhs1 (stmt);
4991 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4992
4993 if (TREE_CODE (rhs) == SSA_NAME)
4994 {
4995 /* If the RHS is an SSA_NAME, return its known constant value,
4996 if any. */
4997 return (*valueize) (rhs);
4998 }
4999 /* Handle propagating invariant addresses into address
5000 operations. */
5001 else if (TREE_CODE (rhs) == ADDR_EXPR
5002 && !is_gimple_min_invariant (rhs))
5003 {
d25c4172 5004 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5005 tree base;
5006 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5007 &offset,
5008 valueize);
5009 if (base
5010 && (CONSTANT_CLASS_P (base)
5011 || decl_address_invariant_p (base)))
5012 return build_invariant_address (TREE_TYPE (rhs),
5013 base, offset);
5014 }
5015 else if (TREE_CODE (rhs) == CONSTRUCTOR
5016 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5017 && (CONSTRUCTOR_NELTS (rhs)
5018 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5019 {
5020 unsigned i;
d2a12ae7 5021 tree val, *vec;
cfef45c8 5022
d2a12ae7
RG
5023 vec = XALLOCAVEC (tree,
5024 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
5025 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5026 {
5027 val = (*valueize) (val);
5028 if (TREE_CODE (val) == INTEGER_CST
5029 || TREE_CODE (val) == REAL_CST
5030 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5031 vec[i] = val;
cfef45c8
RG
5032 else
5033 return NULL_TREE;
5034 }
5035
d2a12ae7 5036 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5037 }
bdf37f7a
JH
5038 if (subcode == OBJ_TYPE_REF)
5039 {
5040 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5041 /* If callee is constant, we can fold away the wrapper. */
5042 if (is_gimple_min_invariant (val))
5043 return val;
5044 }
cfef45c8
RG
5045
5046 if (kind == tcc_reference)
5047 {
5048 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5049 || TREE_CODE (rhs) == REALPART_EXPR
5050 || TREE_CODE (rhs) == IMAGPART_EXPR)
5051 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5052 {
5053 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5054 return fold_unary_loc (EXPR_LOCATION (rhs),
5055 TREE_CODE (rhs),
5056 TREE_TYPE (rhs), val);
5057 }
5058 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5059 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5060 {
5061 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5062 return fold_ternary_loc (EXPR_LOCATION (rhs),
5063 TREE_CODE (rhs),
5064 TREE_TYPE (rhs), val,
5065 TREE_OPERAND (rhs, 1),
5066 TREE_OPERAND (rhs, 2));
5067 }
5068 else if (TREE_CODE (rhs) == MEM_REF
5069 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5070 {
5071 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5072 if (TREE_CODE (val) == ADDR_EXPR
5073 && is_gimple_min_invariant (val))
5074 {
5075 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5076 unshare_expr (val),
5077 TREE_OPERAND (rhs, 1));
5078 if (tem)
5079 rhs = tem;
5080 }
5081 }
5082 return fold_const_aggregate_ref_1 (rhs, valueize);
5083 }
5084 else if (kind == tcc_declaration)
5085 return get_symbol_constant_value (rhs);
5086 return rhs;
5087 }
5088
5089 case GIMPLE_UNARY_RHS:
f3582e54 5090 return NULL_TREE;
cfef45c8
RG
5091
5092 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5093 /* Translate &x + CST into an invariant form suitable for
5094 further propagation. */
5095 if (subcode == POINTER_PLUS_EXPR)
5096 {
4b1b9e64
RB
5097 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5098 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
5099 if (TREE_CODE (op0) == ADDR_EXPR
5100 && TREE_CODE (op1) == INTEGER_CST)
5101 {
5102 tree off = fold_convert (ptr_type_node, op1);
5103 return build_fold_addr_expr_loc
5104 (loc,
5105 fold_build2 (MEM_REF,
5106 TREE_TYPE (TREE_TYPE (op0)),
5107 unshare_expr (op0), off));
5108 }
5109 }
59c20dc7
RB
5110 /* Canonicalize bool != 0 and bool == 0 appearing after
5111 valueization. While gimple_simplify handles this
5112 it can get confused by the ~X == 1 -> X == 0 transform
5113 which we cant reduce to a SSA name or a constant
5114 (and we have no way to tell gimple_simplify to not
5115 consider those transforms in the first place). */
5116 else if (subcode == EQ_EXPR
5117 || subcode == NE_EXPR)
5118 {
5119 tree lhs = gimple_assign_lhs (stmt);
5120 tree op0 = gimple_assign_rhs1 (stmt);
5121 if (useless_type_conversion_p (TREE_TYPE (lhs),
5122 TREE_TYPE (op0)))
5123 {
5124 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5125 op0 = (*valueize) (op0);
8861704d
RB
5126 if (TREE_CODE (op0) == INTEGER_CST)
5127 std::swap (op0, op1);
5128 if (TREE_CODE (op1) == INTEGER_CST
5129 && ((subcode == NE_EXPR && integer_zerop (op1))
5130 || (subcode == EQ_EXPR && integer_onep (op1))))
5131 return op0;
59c20dc7
RB
5132 }
5133 }
4b1b9e64 5134 return NULL_TREE;
cfef45c8
RG
5135
5136 case GIMPLE_TERNARY_RHS:
5137 {
5138 /* Handle ternary operators that can appear in GIMPLE form. */
5139 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5140 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5141 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
5142 return fold_ternary_loc (loc, subcode,
5143 gimple_expr_type (stmt), op0, op1, op2);
5144 }
5145
5146 default:
5147 gcc_unreachable ();
5148 }
5149 }
5150
5151 case GIMPLE_CALL:
5152 {
25583c4f 5153 tree fn;
538dd0b7 5154 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5155
5156 if (gimple_call_internal_p (stmt))
31e071ae
MP
5157 {
5158 enum tree_code subcode = ERROR_MARK;
5159 switch (gimple_call_internal_fn (stmt))
5160 {
5161 case IFN_UBSAN_CHECK_ADD:
5162 subcode = PLUS_EXPR;
5163 break;
5164 case IFN_UBSAN_CHECK_SUB:
5165 subcode = MINUS_EXPR;
5166 break;
5167 case IFN_UBSAN_CHECK_MUL:
5168 subcode = MULT_EXPR;
5169 break;
5170 default:
5171 return NULL_TREE;
5172 }
368b454d
JJ
5173 tree arg0 = gimple_call_arg (stmt, 0);
5174 tree arg1 = gimple_call_arg (stmt, 1);
5175 tree op0 = (*valueize) (arg0);
5176 tree op1 = (*valueize) (arg1);
31e071ae
MP
5177
5178 if (TREE_CODE (op0) != INTEGER_CST
5179 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5180 {
5181 switch (subcode)
5182 {
5183 case MULT_EXPR:
5184 /* x * 0 = 0 * x = 0 without overflow. */
5185 if (integer_zerop (op0) || integer_zerop (op1))
5186 return build_zero_cst (TREE_TYPE (arg0));
5187 break;
5188 case MINUS_EXPR:
5189 /* y - y = 0 without overflow. */
5190 if (operand_equal_p (op0, op1, 0))
5191 return build_zero_cst (TREE_TYPE (arg0));
5192 break;
5193 default:
5194 break;
5195 }
5196 }
5197 tree res
5198 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5199 if (res
5200 && TREE_CODE (res) == INTEGER_CST
5201 && !TREE_OVERFLOW (res))
5202 return res;
5203 return NULL_TREE;
5204 }
25583c4f
RS
5205
5206 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5207 if (TREE_CODE (fn) == ADDR_EXPR
5208 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5209 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5210 && gimple_builtin_call_types_compatible_p (stmt,
5211 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5212 {
5213 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5214 tree retval;
cfef45c8
RG
5215 unsigned i;
5216 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5217 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5218 retval = fold_builtin_call_array (loc,
538dd0b7 5219 gimple_call_return_type (call_stmt),
cfef45c8 5220 fn, gimple_call_num_args (stmt), args);
cfef45c8 5221 if (retval)
5c944c6c
RB
5222 {
5223 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5224 STRIP_NOPS (retval);
538dd0b7
DM
5225 retval = fold_convert (gimple_call_return_type (call_stmt),
5226 retval);
5c944c6c 5227 }
cfef45c8
RG
5228 return retval;
5229 }
5230 return NULL_TREE;
5231 }
5232
5233 default:
5234 return NULL_TREE;
5235 }
5236}
5237
5238/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5239 Returns NULL_TREE if folding to a constant is not possible, otherwise
5240 returns a constant according to is_gimple_min_invariant. */
5241
5242tree
355fe088 5243gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
5244{
5245 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5246 if (res && is_gimple_min_invariant (res))
5247 return res;
5248 return NULL_TREE;
5249}
5250
5251
5252/* The following set of functions are supposed to fold references using
5253 their constant initializers. */
5254
cfef45c8
RG
5255/* See if we can find constructor defining value of BASE.
5256 When we know the consructor with constant offset (such as
5257 base is array[40] and we do know constructor of array), then
5258 BIT_OFFSET is adjusted accordingly.
5259
5260 As a special case, return error_mark_node when constructor
5261 is not explicitly available, but it is known to be zero
5262 such as 'static const int a;'. */
5263static tree
5264get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5265 tree (*valueize)(tree))
5266{
5267 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
5268 bool reverse;
5269
cfef45c8
RG
5270 if (TREE_CODE (base) == MEM_REF)
5271 {
5272 if (!integer_zerop (TREE_OPERAND (base, 1)))
5273 {
9541ffee 5274 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5275 return NULL_TREE;
807e902e 5276 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5277 * BITS_PER_UNIT);
5278 }
5279
5280 if (valueize
5281 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5282 base = valueize (TREE_OPERAND (base, 0));
5283 if (!base || TREE_CODE (base) != ADDR_EXPR)
5284 return NULL_TREE;
5285 base = TREE_OPERAND (base, 0);
5286 }
5287
5288 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5289 DECL_INITIAL. If BASE is a nested reference into another
5290 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5291 the inner reference. */
5292 switch (TREE_CODE (base))
5293 {
5294 case VAR_DECL:
cfef45c8 5295 case CONST_DECL:
6a6dac52
JH
5296 {
5297 tree init = ctor_for_folding (base);
5298
688010ba 5299 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5300 NULL means unknown, while error_mark_node is 0. */
5301 if (init == error_mark_node)
5302 return NULL_TREE;
5303 if (!init)
5304 return error_mark_node;
5305 return init;
5306 }
cfef45c8
RG
5307
5308 case ARRAY_REF:
5309 case COMPONENT_REF:
ee45a32d
EB
5310 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5311 &reverse);
cfef45c8
RG
5312 if (max_size == -1 || size != max_size)
5313 return NULL_TREE;
5314 *bit_offset += bit_offset2;
5315 return get_base_constructor (base, bit_offset, valueize);
5316
5317 case STRING_CST:
5318 case CONSTRUCTOR:
5319 return base;
5320
5321 default:
5322 return NULL_TREE;
5323 }
5324}
5325
cfef45c8
RG
5326/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5327 SIZE to the memory at bit OFFSET. */
5328
5329static tree
5330fold_array_ctor_reference (tree type, tree ctor,
5331 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5332 unsigned HOST_WIDE_INT size,
5333 tree from_decl)
cfef45c8 5334{
807e902e
KZ
5335 offset_int low_bound;
5336 offset_int elt_size;
807e902e 5337 offset_int access_index;
6a636014 5338 tree domain_type = NULL_TREE;
cfef45c8
RG
5339 HOST_WIDE_INT inner_offset;
5340
5341 /* Compute low bound and elt size. */
eb8f1123
RG
5342 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5343 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5344 if (domain_type && TYPE_MIN_VALUE (domain_type))
5345 {
5346 /* Static constructors for variably sized objects makes no sense. */
5347 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
807e902e 5348 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5349 }
5350 else
807e902e 5351 low_bound = 0;
cfef45c8 5352 /* Static constructors for variably sized objects makes no sense. */
c3284718 5353 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5354 == INTEGER_CST);
807e902e 5355 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5356
5357 /* We can handle only constantly sized accesses that are known to not
5358 be larger than size of array element. */
5359 if (!TYPE_SIZE_UNIT (type)
5360 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5361 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5362 || elt_size == 0)
cfef45c8
RG
5363 return NULL_TREE;
5364
5365 /* Compute the array index we look for. */
807e902e
KZ
5366 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5367 elt_size);
27bcd47c 5368 access_index += low_bound;
cfef45c8
RG
5369
5370 /* And offset within the access. */
27bcd47c 5371 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5372
5373 /* See if the array field is large enough to span whole access. We do not
5374 care to fold accesses spanning multiple array indexes. */
27bcd47c 5375 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 5376 return NULL_TREE;
6a636014
AL
5377 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5378 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 5379
cfef45c8
RG
5380 /* When memory is not explicitely mentioned in constructor,
5381 it is 0 (or out of range). */
5382 return build_zero_cst (type);
5383}
5384
5385/* CTOR is CONSTRUCTOR of an aggregate or vector.
5386 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5387
5388static tree
5389fold_nonarray_ctor_reference (tree type, tree ctor,
5390 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5391 unsigned HOST_WIDE_INT size,
5392 tree from_decl)
cfef45c8
RG
5393{
5394 unsigned HOST_WIDE_INT cnt;
5395 tree cfield, cval;
5396
5397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5398 cval)
5399 {
5400 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5401 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5402 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5403 offset_int bitoffset;
5404 offset_int bitoffset_end, access_end;
cfef45c8
RG
5405
5406 /* Variable sized objects in static constructors makes no sense,
5407 but field_size can be NULL for flexible array members. */
5408 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5409 && TREE_CODE (byte_offset) == INTEGER_CST
5410 && (field_size != NULL_TREE
5411 ? TREE_CODE (field_size) == INTEGER_CST
5412 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5413
5414 /* Compute bit offset of the field. */
807e902e
KZ
5415 bitoffset = (wi::to_offset (field_offset)
5416 + wi::lshift (wi::to_offset (byte_offset),
5417 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5418 /* Compute bit offset where the field ends. */
5419 if (field_size != NULL_TREE)
807e902e 5420 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5421 else
807e902e 5422 bitoffset_end = 0;
cfef45c8 5423
807e902e 5424 access_end = offset_int (offset) + size;
b8b2b009
JJ
5425
5426 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5427 [BITOFFSET, BITOFFSET_END)? */
807e902e 5428 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5429 && (field_size == NULL_TREE
807e902e 5430 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5431 {
807e902e 5432 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5433 /* We do have overlap. Now see if field is large enough to
5434 cover the access. Give up for accesses spanning multiple
5435 fields. */
807e902e 5436 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5437 return NULL_TREE;
807e902e 5438 if (wi::lts_p (offset, bitoffset))
b8b2b009 5439 return NULL_TREE;
cfef45c8 5440 return fold_ctor_reference (type, cval,
27bcd47c 5441 inner_offset.to_uhwi (), size,
c44c2088 5442 from_decl);
cfef45c8
RG
5443 }
5444 }
5445 /* When memory is not explicitely mentioned in constructor, it is 0. */
5446 return build_zero_cst (type);
5447}
5448
5449/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5450 to the memory at bit OFFSET. */
5451
8403c2cf 5452tree
cfef45c8 5453fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5454 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5455{
5456 tree ret;
5457
5458 /* We found the field with exact match. */
5459 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5460 && !offset)
9d60be38 5461 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5462
5463 /* We are at the end of walk, see if we can view convert the
5464 result. */
5465 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5466 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5467 && !compare_tree_int (TYPE_SIZE (type), size)
5468 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5469 {
9d60be38 5470 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5471 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5472 if (ret)
86c5a5c3 5473 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5474 return ret;
5475 }
b2505143
RB
5476 /* For constants and byte-aligned/sized reads try to go through
5477 native_encode/interpret. */
5478 if (CONSTANT_CLASS_P (ctor)
5479 && BITS_PER_UNIT == 8
5480 && offset % BITS_PER_UNIT == 0
5481 && size % BITS_PER_UNIT == 0
5482 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5483 {
5484 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5485 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5486 offset / BITS_PER_UNIT) > 0)
5487 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5488 }
cfef45c8
RG
5489 if (TREE_CODE (ctor) == CONSTRUCTOR)
5490 {
5491
eb8f1123
RG
5492 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5493 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5494 return fold_array_ctor_reference (type, ctor, offset, size,
5495 from_decl);
cfef45c8 5496 else
c44c2088
JH
5497 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5498 from_decl);
cfef45c8
RG
5499 }
5500
5501 return NULL_TREE;
5502}
5503
5504/* Return the tree representing the element referenced by T if T is an
5505 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5506 names using VALUEIZE. Return NULL_TREE otherwise. */
5507
5508tree
5509fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5510{
5511 tree ctor, idx, base;
5512 HOST_WIDE_INT offset, size, max_size;
5513 tree tem;
ee45a32d 5514 bool reverse;
cfef45c8 5515
f8a7df45
RG
5516 if (TREE_THIS_VOLATILE (t))
5517 return NULL_TREE;
5518
3a65ee74 5519 if (DECL_P (t))
cfef45c8
RG
5520 return get_symbol_constant_value (t);
5521
5522 tem = fold_read_from_constant_string (t);
5523 if (tem)
5524 return tem;
5525
5526 switch (TREE_CODE (t))
5527 {
5528 case ARRAY_REF:
5529 case ARRAY_RANGE_REF:
5530 /* Constant indexes are handled well by get_base_constructor.
5531 Only special case variable offsets.
5532 FIXME: This code can't handle nested references with variable indexes
5533 (they will be handled only by iteration of ccp). Perhaps we can bring
5534 get_ref_base_and_extent here and make it use a valueize callback. */
5535 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5536 && valueize
5537 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5538 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5539 {
5540 tree low_bound, unit_size;
5541
5542 /* If the resulting bit-offset is constant, track it. */
5543 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5544 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5545 && (unit_size = array_ref_element_size (t),
807e902e 5546 tree_fits_uhwi_p (unit_size)))
cfef45c8 5547 {
807e902e
KZ
5548 offset_int woffset
5549 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5550 TYPE_PRECISION (TREE_TYPE (idx)));
5551
5552 if (wi::fits_shwi_p (woffset))
5553 {
5554 offset = woffset.to_shwi ();
5555 /* TODO: This code seems wrong, multiply then check
5556 to see if it fits. */
5557 offset *= tree_to_uhwi (unit_size);
5558 offset *= BITS_PER_UNIT;
5559
5560 base = TREE_OPERAND (t, 0);
5561 ctor = get_base_constructor (base, &offset, valueize);
5562 /* Empty constructor. Always fold to 0. */
5563 if (ctor == error_mark_node)
5564 return build_zero_cst (TREE_TYPE (t));
5565 /* Out of bound array access. Value is undefined,
5566 but don't fold. */
5567 if (offset < 0)
5568 return NULL_TREE;
5569 /* We can not determine ctor. */
5570 if (!ctor)
5571 return NULL_TREE;
5572 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5573 tree_to_uhwi (unit_size)
5574 * BITS_PER_UNIT,
5575 base);
5576 }
cfef45c8
RG
5577 }
5578 }
5579 /* Fallthru. */
5580
5581 case COMPONENT_REF:
5582 case BIT_FIELD_REF:
5583 case TARGET_MEM_REF:
5584 case MEM_REF:
ee45a32d 5585 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
5586 ctor = get_base_constructor (base, &offset, valueize);
5587
5588 /* Empty constructor. Always fold to 0. */
5589 if (ctor == error_mark_node)
5590 return build_zero_cst (TREE_TYPE (t));
5591 /* We do not know precise address. */
5592 if (max_size == -1 || max_size != size)
5593 return NULL_TREE;
5594 /* We can not determine ctor. */
5595 if (!ctor)
5596 return NULL_TREE;
5597
5598 /* Out of bound array access. Value is undefined, but don't fold. */
5599 if (offset < 0)
5600 return NULL_TREE;
5601
c44c2088
JH
5602 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5603 base);
cfef45c8
RG
5604
5605 case REALPART_EXPR:
5606 case IMAGPART_EXPR:
5607 {
5608 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5609 if (c && TREE_CODE (c) == COMPLEX_CST)
5610 return fold_build1_loc (EXPR_LOCATION (t),
5611 TREE_CODE (t), TREE_TYPE (t), c);
5612 break;
5613 }
5614
5615 default:
5616 break;
5617 }
5618
5619 return NULL_TREE;
5620}
5621
5622tree
5623fold_const_aggregate_ref (tree t)
5624{
5625 return fold_const_aggregate_ref_1 (t, NULL);
5626}
06bc3ec7 5627
85942f45 5628/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5629 at OFFSET.
5630 Set CAN_REFER if non-NULL to false if method
5631 is not referable or if the virtual table is ill-formed (such as rewriten
5632 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5633
5634tree
85942f45
JH
5635gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5636 tree v,
ec77d61f
JH
5637 unsigned HOST_WIDE_INT offset,
5638 bool *can_refer)
81fa35bd 5639{
85942f45
JH
5640 tree vtable = v, init, fn;
5641 unsigned HOST_WIDE_INT size;
8c311b50
JH
5642 unsigned HOST_WIDE_INT elt_size, access_index;
5643 tree domain_type;
81fa35bd 5644
ec77d61f
JH
5645 if (can_refer)
5646 *can_refer = true;
5647
9de2f554 5648 /* First of all double check we have virtual table. */
81fa35bd 5649 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5650 || !DECL_VIRTUAL_P (v))
ec77d61f 5651 {
ec77d61f
JH
5652 /* Pass down that we lost track of the target. */
5653 if (can_refer)
5654 *can_refer = false;
5655 return NULL_TREE;
5656 }
9de2f554 5657
2aa3da06
JH
5658 init = ctor_for_folding (v);
5659
9de2f554 5660 /* The virtual tables should always be born with constructors
2aa3da06
JH
5661 and we always should assume that they are avaialble for
5662 folding. At the moment we do not stream them in all cases,
5663 but it should never happen that ctor seem unreachable. */
5664 gcc_assert (init);
5665 if (init == error_mark_node)
5666 {
5667 gcc_assert (in_lto_p);
ec77d61f
JH
5668 /* Pass down that we lost track of the target. */
5669 if (can_refer)
5670 *can_refer = false;
2aa3da06
JH
5671 return NULL_TREE;
5672 }
81fa35bd 5673 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5674 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5675 offset *= BITS_PER_UNIT;
81fa35bd 5676 offset += token * size;
9de2f554 5677
8c311b50
JH
5678 /* Lookup the value in the constructor that is assumed to be array.
5679 This is equivalent to
5680 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5681 offset, size, NULL);
5682 but in a constant time. We expect that frontend produced a simple
5683 array without indexed initializers. */
5684
5685 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5686 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5687 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5688 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5689
5690 access_index = offset / BITS_PER_UNIT / elt_size;
5691 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5692
5693 /* This code makes an assumption that there are no
5694 indexed fileds produced by C++ FE, so we can directly index the array. */
5695 if (access_index < CONSTRUCTOR_NELTS (init))
5696 {
5697 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5698 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5699 STRIP_NOPS (fn);
5700 }
5701 else
5702 fn = NULL;
9de2f554
JH
5703
5704 /* For type inconsistent program we may end up looking up virtual method
5705 in virtual table that does not contain TOKEN entries. We may overrun
5706 the virtual table and pick up a constant or RTTI info pointer.
5707 In any case the call is undefined. */
5708 if (!fn
5709 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5710 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5711 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5712 else
5713 {
5714 fn = TREE_OPERAND (fn, 0);
5715
5716 /* When cgraph node is missing and function is not public, we cannot
5717 devirtualize. This can happen in WHOPR when the actual method
5718 ends up in other partition, because we found devirtualization
5719 possibility too late. */
5720 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5721 {
5722 if (can_refer)
5723 {
5724 *can_refer = false;
5725 return fn;
5726 }
5727 return NULL_TREE;
5728 }
9de2f554 5729 }
81fa35bd 5730
7501ca28
RG
5731 /* Make sure we create a cgraph node for functions we'll reference.
5732 They can be non-existent if the reference comes from an entry
5733 of an external vtable for example. */
d52f5295 5734 cgraph_node::get_create (fn);
7501ca28 5735
81fa35bd
MJ
5736 return fn;
5737}
5738
85942f45
JH
5739/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5740 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5741 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5742 OBJ_TYPE_REF_OBJECT(REF).
5743 Set CAN_REFER if non-NULL to false if method
5744 is not referable or if the virtual table is ill-formed (such as rewriten
5745 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5746
5747tree
ec77d61f
JH
5748gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5749 bool *can_refer)
85942f45
JH
5750{
5751 unsigned HOST_WIDE_INT offset;
5752 tree v;
5753
5754 v = BINFO_VTABLE (known_binfo);
5755 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5756 if (!v)
5757 return NULL_TREE;
5758
5759 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5760 {
5761 if (can_refer)
5762 *can_refer = false;
5763 return NULL_TREE;
5764 }
5765 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5766}
5767
b184c8f1
AM
5768/* Given a pointer value OP0, return a simplified version of an
5769 indirection through OP0, or NULL_TREE if no simplification is
5770 possible. Note that the resulting type may be different from
5771 the type pointed to in the sense that it is still compatible
5772 from the langhooks point of view. */
5773
5774tree
5775gimple_fold_indirect_ref (tree t)
5776{
5777 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5778 tree sub = t;
5779 tree subtype;
5780
5781 STRIP_NOPS (sub);
5782 subtype = TREE_TYPE (sub);
5783 if (!POINTER_TYPE_P (subtype))
5784 return NULL_TREE;
5785
5786 if (TREE_CODE (sub) == ADDR_EXPR)
5787 {
5788 tree op = TREE_OPERAND (sub, 0);
5789 tree optype = TREE_TYPE (op);
5790 /* *&p => p */
5791 if (useless_type_conversion_p (type, optype))
5792 return op;
5793
5794 /* *(foo *)&fooarray => fooarray[0] */
5795 if (TREE_CODE (optype) == ARRAY_TYPE
5796 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5797 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5798 {
5799 tree type_domain = TYPE_DOMAIN (optype);
5800 tree min_val = size_zero_node;
5801 if (type_domain && TYPE_MIN_VALUE (type_domain))
5802 min_val = TYPE_MIN_VALUE (type_domain);
5803 if (TREE_CODE (min_val) == INTEGER_CST)
5804 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5805 }
5806 /* *(foo *)&complexfoo => __real__ complexfoo */
5807 else if (TREE_CODE (optype) == COMPLEX_TYPE
5808 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5809 return fold_build1 (REALPART_EXPR, type, op);
5810 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5811 else if (TREE_CODE (optype) == VECTOR_TYPE
5812 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5813 {
5814 tree part_width = TYPE_SIZE (type);
5815 tree index = bitsize_int (0);
5816 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5817 }
5818 }
5819
5820 /* *(p + CST) -> ... */
5821 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5822 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5823 {
5824 tree addr = TREE_OPERAND (sub, 0);
5825 tree off = TREE_OPERAND (sub, 1);
5826 tree addrtype;
5827
5828 STRIP_NOPS (addr);
5829 addrtype = TREE_TYPE (addr);
5830
5831 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5832 if (TREE_CODE (addr) == ADDR_EXPR
5833 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5834 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5835 && tree_fits_uhwi_p (off))
b184c8f1 5836 {
ae7e9ddd 5837 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5838 tree part_width = TYPE_SIZE (type);
5839 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5840 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5841 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5842 tree index = bitsize_int (indexi);
5843 if (offset / part_widthi
e934916c 5844 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5845 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5846 part_width, index);
5847 }
5848
5849 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5850 if (TREE_CODE (addr) == ADDR_EXPR
5851 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5852 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5853 {
5854 tree size = TYPE_SIZE_UNIT (type);
5855 if (tree_int_cst_equal (size, off))
5856 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5857 }
5858
5859 /* *(p + CST) -> MEM_REF <p, CST>. */
5860 if (TREE_CODE (addr) != ADDR_EXPR
5861 || DECL_P (TREE_OPERAND (addr, 0)))
5862 return fold_build2 (MEM_REF, type,
5863 addr,
807e902e 5864 wide_int_to_tree (ptype, off));
b184c8f1
AM
5865 }
5866
5867 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5868 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5869 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5870 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5871 {
5872 tree type_domain;
5873 tree min_val = size_zero_node;
5874 tree osub = sub;
5875 sub = gimple_fold_indirect_ref (sub);
5876 if (! sub)
5877 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5878 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5879 if (type_domain && TYPE_MIN_VALUE (type_domain))
5880 min_val = TYPE_MIN_VALUE (type_domain);
5881 if (TREE_CODE (min_val) == INTEGER_CST)
5882 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5883 }
5884
5885 return NULL_TREE;
5886}
19e51b40
JJ
5887
5888/* Return true if CODE is an operation that when operating on signed
5889 integer types involves undefined behavior on overflow and the
5890 operation can be expressed with unsigned arithmetic. */
5891
5892bool
5893arith_code_with_undefined_signed_overflow (tree_code code)
5894{
5895 switch (code)
5896 {
5897 case PLUS_EXPR:
5898 case MINUS_EXPR:
5899 case MULT_EXPR:
5900 case NEGATE_EXPR:
5901 case POINTER_PLUS_EXPR:
5902 return true;
5903 default:
5904 return false;
5905 }
5906}
5907
5908/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5909 operation that can be transformed to unsigned arithmetic by converting
5910 its operand, carrying out the operation in the corresponding unsigned
5911 type and converting the result back to the original type.
5912
5913 Returns a sequence of statements that replace STMT and also contain
5914 a modified form of STMT itself. */
5915
5916gimple_seq
355fe088 5917rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
5918{
5919 if (dump_file && (dump_flags & TDF_DETAILS))
5920 {
5921 fprintf (dump_file, "rewriting stmt with undefined signed "
5922 "overflow ");
5923 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5924 }
5925
5926 tree lhs = gimple_assign_lhs (stmt);
5927 tree type = unsigned_type_for (TREE_TYPE (lhs));
5928 gimple_seq stmts = NULL;
5929 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5930 {
74e3c262
RB
5931 tree op = gimple_op (stmt, i);
5932 op = gimple_convert (&stmts, type, op);
5933 gimple_set_op (stmt, i, op);
19e51b40
JJ
5934 }
5935 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5936 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5937 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5938 gimple_seq_add_stmt (&stmts, stmt);
355fe088 5939 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
5940 gimple_seq_add_stmt (&stmts, cvt);
5941
5942 return stmts;
5943}
d4f5cd5e 5944
3d2cf79f 5945
c26de36d
RB
5946/* The valueization hook we use for the gimple_build API simplification.
5947 This makes us match fold_buildN behavior by only combining with
5948 statements in the sequence(s) we are currently building. */
5949
5950static tree
5951gimple_build_valueize (tree op)
5952{
5953 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5954 return op;
5955 return NULL_TREE;
5956}
5957
3d2cf79f 5958/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 5959 simplifying it first if possible. Returns the built
3d2cf79f
RB
5960 expression value and appends statements possibly defining it
5961 to SEQ. */
5962
5963tree
5964gimple_build (gimple_seq *seq, location_t loc,
c26de36d 5965 enum tree_code code, tree type, tree op0)
3d2cf79f 5966{
c26de36d 5967 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
5968 if (!res)
5969 {
5970 if (gimple_in_ssa_p (cfun))
b731b390 5971 res = make_ssa_name (type);
3d2cf79f 5972 else
b731b390 5973 res = create_tmp_reg (type);
355fe088 5974 gimple *stmt;
3d2cf79f
RB
5975 if (code == REALPART_EXPR
5976 || code == IMAGPART_EXPR
5977 || code == VIEW_CONVERT_EXPR)
0d0e4a03 5978 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 5979 else
0d0e4a03 5980 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
5981 gimple_set_location (stmt, loc);
5982 gimple_seq_add_stmt_without_update (seq, stmt);
5983 }
5984 return res;
5985}
5986
5987/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 5988 simplifying it first if possible. Returns the built
3d2cf79f
RB
5989 expression value and appends statements possibly defining it
5990 to SEQ. */
5991
5992tree
5993gimple_build (gimple_seq *seq, location_t loc,
c26de36d 5994 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 5995{
c26de36d 5996 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
5997 if (!res)
5998 {
5999 if (gimple_in_ssa_p (cfun))
b731b390 6000 res = make_ssa_name (type);
3d2cf79f 6001 else
b731b390 6002 res = create_tmp_reg (type);
355fe088 6003 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6004 gimple_set_location (stmt, loc);
6005 gimple_seq_add_stmt_without_update (seq, stmt);
6006 }
6007 return res;
6008}
6009
6010/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6011 simplifying it first if possible. Returns the built
3d2cf79f
RB
6012 expression value and appends statements possibly defining it
6013 to SEQ. */
6014
6015tree
6016gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6017 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6018{
6019 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6020 seq, gimple_build_valueize);
3d2cf79f
RB
6021 if (!res)
6022 {
6023 if (gimple_in_ssa_p (cfun))
b731b390 6024 res = make_ssa_name (type);
3d2cf79f 6025 else
b731b390 6026 res = create_tmp_reg (type);
355fe088 6027 gimple *stmt;
3d2cf79f 6028 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6029 stmt = gimple_build_assign (res, code,
6030 build3 (code, type, op0, op1, op2));
3d2cf79f 6031 else
0d0e4a03 6032 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6033 gimple_set_location (stmt, loc);
6034 gimple_seq_add_stmt_without_update (seq, stmt);
6035 }
6036 return res;
6037}
6038
6039/* Build the call FN (ARG0) with a result of type TYPE
6040 (or no result if TYPE is void) with location LOC,
c26de36d 6041 simplifying it first if possible. Returns the built
3d2cf79f
RB
6042 expression value (or NULL_TREE if TYPE is void) and appends
6043 statements possibly defining it to SEQ. */
6044
6045tree
6046gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6047 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6048{
c26de36d 6049 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6050 if (!res)
6051 {
6052 tree decl = builtin_decl_implicit (fn);
355fe088 6053 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6054 if (!VOID_TYPE_P (type))
6055 {
6056 if (gimple_in_ssa_p (cfun))
b731b390 6057 res = make_ssa_name (type);
3d2cf79f 6058 else
b731b390 6059 res = create_tmp_reg (type);
3d2cf79f
RB
6060 gimple_call_set_lhs (stmt, res);
6061 }
6062 gimple_set_location (stmt, loc);
6063 gimple_seq_add_stmt_without_update (seq, stmt);
6064 }
6065 return res;
6066}
6067
6068/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6069 (or no result if TYPE is void) with location LOC,
c26de36d 6070 simplifying it first if possible. Returns the built
3d2cf79f
RB
6071 expression value (or NULL_TREE if TYPE is void) and appends
6072 statements possibly defining it to SEQ. */
6073
6074tree
6075gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6076 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6077{
c26de36d 6078 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6079 if (!res)
6080 {
6081 tree decl = builtin_decl_implicit (fn);
355fe088 6082 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6083 if (!VOID_TYPE_P (type))
6084 {
6085 if (gimple_in_ssa_p (cfun))
b731b390 6086 res = make_ssa_name (type);
3d2cf79f 6087 else
b731b390 6088 res = create_tmp_reg (type);
3d2cf79f
RB
6089 gimple_call_set_lhs (stmt, res);
6090 }
6091 gimple_set_location (stmt, loc);
6092 gimple_seq_add_stmt_without_update (seq, stmt);
6093 }
6094 return res;
6095}
6096
6097/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6098 (or no result if TYPE is void) with location LOC,
c26de36d 6099 simplifying it first if possible. Returns the built
3d2cf79f
RB
6100 expression value (or NULL_TREE if TYPE is void) and appends
6101 statements possibly defining it to SEQ. */
6102
6103tree
6104gimple_build (gimple_seq *seq, location_t loc,
6105 enum built_in_function fn, tree type,
c26de36d 6106 tree arg0, tree arg1, tree arg2)
3d2cf79f 6107{
c26de36d
RB
6108 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6109 seq, gimple_build_valueize);
3d2cf79f
RB
6110 if (!res)
6111 {
6112 tree decl = builtin_decl_implicit (fn);
355fe088 6113 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
6114 if (!VOID_TYPE_P (type))
6115 {
6116 if (gimple_in_ssa_p (cfun))
b731b390 6117 res = make_ssa_name (type);
3d2cf79f 6118 else
b731b390 6119 res = create_tmp_reg (type);
3d2cf79f
RB
6120 gimple_call_set_lhs (stmt, res);
6121 }
6122 gimple_set_location (stmt, loc);
6123 gimple_seq_add_stmt_without_update (seq, stmt);
6124 }
6125 return res;
6126}
6127
6128/* Build the conversion (TYPE) OP with a result of type TYPE
6129 with location LOC if such conversion is neccesary in GIMPLE,
6130 simplifying it first.
6131 Returns the built expression value and appends
6132 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6133
6134tree
6135gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6136{
6137 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6138 return op;
3d2cf79f 6139 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6140}
68e57f04 6141
74e3c262
RB
6142/* Build the conversion (ptrofftype) OP with a result of a type
6143 compatible with ptrofftype with location LOC if such conversion
6144 is neccesary in GIMPLE, simplifying it first.
6145 Returns the built expression value and appends
6146 statements possibly defining it to SEQ. */
6147
6148tree
6149gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6150{
6151 if (ptrofftype_p (TREE_TYPE (op)))
6152 return op;
6153 return gimple_convert (seq, loc, sizetype, op);
6154}
6155
68e57f04
RS
6156/* Return true if the result of assignment STMT is known to be non-negative.
6157 If the return value is based on the assumption that signed overflow is
6158 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6159 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6160
6161static bool
6162gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6163 int depth)
6164{
6165 enum tree_code code = gimple_assign_rhs_code (stmt);
6166 switch (get_gimple_rhs_class (code))
6167 {
6168 case GIMPLE_UNARY_RHS:
6169 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6170 gimple_expr_type (stmt),
6171 gimple_assign_rhs1 (stmt),
6172 strict_overflow_p, depth);
6173 case GIMPLE_BINARY_RHS:
6174 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6175 gimple_expr_type (stmt),
6176 gimple_assign_rhs1 (stmt),
6177 gimple_assign_rhs2 (stmt),
6178 strict_overflow_p, depth);
6179 case GIMPLE_TERNARY_RHS:
6180 return false;
6181 case GIMPLE_SINGLE_RHS:
6182 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6183 strict_overflow_p, depth);
6184 case GIMPLE_INVALID_RHS:
6185 break;
6186 }
6187 gcc_unreachable ();
6188}
6189
6190/* Return true if return value of call STMT is known to be non-negative.
6191 If the return value is based on the assumption that signed overflow is
6192 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6193 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6194
6195static bool
6196gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6197 int depth)
6198{
6199 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6200 gimple_call_arg (stmt, 0) : NULL_TREE;
6201 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6202 gimple_call_arg (stmt, 1) : NULL_TREE;
6203
6204 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6205 gimple_call_fndecl (stmt),
6206 arg0,
6207 arg1,
6208 strict_overflow_p, depth);
6209}
6210
4534c203
RB
6211/* Return true if return value of call STMT is known to be non-negative.
6212 If the return value is based on the assumption that signed overflow is
6213 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6214 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6215
6216static bool
6217gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6218 int depth)
6219{
6220 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6221 {
6222 tree arg = gimple_phi_arg_def (stmt, i);
6223 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6224 return false;
6225 }
6226 return true;
6227}
6228
68e57f04
RS
6229/* Return true if STMT is known to compute a non-negative value.
6230 If the return value is based on the assumption that signed overflow is
6231 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6232 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6233
6234bool
6235gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6236 int depth)
6237{
6238 switch (gimple_code (stmt))
6239 {
6240 case GIMPLE_ASSIGN:
6241 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6242 depth);
6243 case GIMPLE_CALL:
6244 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6245 depth);
4534c203
RB
6246 case GIMPLE_PHI:
6247 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6248 depth);
68e57f04
RS
6249 default:
6250 return false;
6251 }
6252}
67dbe582
RS
6253
6254/* Return true if the floating-point value computed by assignment STMT
6255 is known to have an integer value. We also allow +Inf, -Inf and NaN
6256 to be considered integer values.
6257
6258 DEPTH is the current nesting depth of the query. */
6259
6260static bool
6261gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6262{
6263 enum tree_code code = gimple_assign_rhs_code (stmt);
6264 switch (get_gimple_rhs_class (code))
6265 {
6266 case GIMPLE_UNARY_RHS:
6267 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6268 gimple_assign_rhs1 (stmt), depth);
6269 case GIMPLE_BINARY_RHS:
6270 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6271 gimple_assign_rhs1 (stmt),
6272 gimple_assign_rhs2 (stmt), depth);
6273 case GIMPLE_TERNARY_RHS:
6274 return false;
6275 case GIMPLE_SINGLE_RHS:
6276 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6277 case GIMPLE_INVALID_RHS:
6278 break;
6279 }
6280 gcc_unreachable ();
6281}
6282
6283/* Return true if the floating-point value computed by call STMT is known
6284 to have an integer value. We also allow +Inf, -Inf and NaN to be
6285 considered integer values.
6286
6287 DEPTH is the current nesting depth of the query. */
6288
6289static bool
6290gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6291{
6292 tree arg0 = (gimple_call_num_args (stmt) > 0
6293 ? gimple_call_arg (stmt, 0)
6294 : NULL_TREE);
6295 tree arg1 = (gimple_call_num_args (stmt) > 1
6296 ? gimple_call_arg (stmt, 1)
6297 : NULL_TREE);
6298 return integer_valued_real_call_p (gimple_call_fndecl (stmt),
6299 arg0, arg1, depth);
6300}
6301
6302/* Return true if the floating-point result of phi STMT is known to have
6303 an integer value. We also allow +Inf, -Inf and NaN to be considered
6304 integer values.
6305
6306 DEPTH is the current nesting depth of the query. */
6307
6308static bool
6309gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6310{
6311 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6312 {
6313 tree arg = gimple_phi_arg_def (stmt, i);
6314 if (!integer_valued_real_single_p (arg, depth + 1))
6315 return false;
6316 }
6317 return true;
6318}
6319
6320/* Return true if the floating-point value computed by STMT is known
6321 to have an integer value. We also allow +Inf, -Inf and NaN to be
6322 considered integer values.
6323
6324 DEPTH is the current nesting depth of the query. */
6325
6326bool
6327gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6328{
6329 switch (gimple_code (stmt))
6330 {
6331 case GIMPLE_ASSIGN:
6332 return gimple_assign_integer_valued_real_p (stmt, depth);
6333 case GIMPLE_CALL:
6334 return gimple_call_integer_valued_real_p (stmt, depth);
6335 case GIMPLE_PHI:
6336 return gimple_phi_integer_valued_real_p (stmt, depth);
6337 default:
6338 return false;
6339 }
6340}