]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
re PR tree-optimization/67781 (wrong code generated on big-endian with -O1 -fexpensiv...
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
818ab71a 2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
cbdd87d4 44#include "tree-ssa-propagate.h"
450ad0cd 45#include "ipa-utils.h"
4484a35a 46#include "tree-ssa-address.h"
862d0b35 47#include "langhooks.h"
19e51b40 48#include "gimplify-me.h"
2b5f0895 49#include "dbgcnt.h"
9b2b7279 50#include "builtins.h"
e0ee10ed
RB
51#include "tree-eh.h"
52#include "gimple-match.h"
48126138 53#include "gomp-constants.h"
f869c12f 54#include "optabs-query.h"
451e8dae 55#include "omp-low.h"
3de2a40e 56#include "ipa-chkp.h"
f869c12f 57
cbdd87d4 58
b3b9f3d0 59/* Return true when DECL can be referenced from current unit.
c44c2088
JH
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
1389294c 63
1389294c
JH
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
3e89949e 76 we devirtualize only during final compilation stage.
b3b9f3d0
JH
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
80
1389294c 81static bool
c44c2088 82can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 83{
2c8326a5 84 varpool_node *vnode;
1389294c 85 struct cgraph_node *node;
5e20cdc9 86 symtab_node *snode;
c44c2088 87
00de328a 88 if (DECL_ABSTRACT_P (decl))
1632a686
JH
89 return false;
90
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
95
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 {
3aaf0529
JH
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
3dafb85c 101 if (symtab->function_flags_ready)
3aaf0529 102 return true;
d52f5295 103 snode = symtab_node::get (decl);
3aaf0529 104 if (!snode || !snode->definition)
1632a686 105 return false;
7de90a6c 106 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
107 return !node || !node->global.inlined_to;
108 }
109
6da8be89 110 /* We will later output the initializer, so we can refer to it.
c44c2088 111 So we are concerned only when DECL comes from initializer of
3aaf0529 112 external var or var that has been optimized out. */
c44c2088
JH
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 115 || (!DECL_EXTERNAL (from_decl)
9041d2e6 116 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 117 && vnode->definition)
6da8be89 118 || (flag_ltrans
9041d2e6 119 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 120 && vnode->in_other_partition))
c44c2088 121 return true;
c44c2088
JH
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
a33a931b 127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 129 return false;
b3b9f3d0
JH
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
3aaf0529
JH
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
138
139 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 140 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
3dafb85c 146 if (!symtab->function_flags_ready)
b3b9f3d0 147 return true;
c44c2088 148
d52f5295 149 snode = symtab_node::get (decl);
3aaf0529
JH
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
1389294c
JH
157}
158
0038d4e0 159/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
162
163tree
c44c2088 164canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 165{
50619002
EB
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
315f5f1b
RG
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 170 {
315f5f1b
RG
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
17f39a39
JH
179 }
180 if (TREE_CODE (cval) == ADDR_EXPR)
181 {
5a27a197
RG
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
184 {
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
188 }
5a27a197
RG
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
191 if (!base)
192 return NULL_TREE;
b3b9f3d0 193
7501ca28
RG
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 196 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 197 return NULL_TREE;
7501ca28 198 if (TREE_CODE (base) == VAR_DECL)
46eb666a 199 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
200 else if (TREE_CODE (base) == FUNCTION_DECL)
201 {
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
d52f5295 205 cgraph_node::get_create (base);
7501ca28 206 }
0038d4e0 207 /* Fixup types in global initializers. */
73aef89e
RG
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
210
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
17f39a39 214 }
846abd0d
RB
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
50619002 217 return orig_cval;
17f39a39 218}
cbdd87d4
RG
219
220/* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
222
223tree
224get_symbol_constant_value (tree sym)
225{
6a6dac52
JH
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
cbdd87d4 228 {
cbdd87d4
RG
229 if (val)
230 {
9d60be38 231 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 232 if (val && is_gimple_min_invariant (val))
17f39a39 233 return val;
1389294c
JH
234 else
235 return NULL_TREE;
cbdd87d4
RG
236 }
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
b8a8c472 241 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 242 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
243 }
244
245 return NULL_TREE;
246}
247
248
cbdd87d4
RG
249
250/* Subroutine of fold_stmt. We perform several simplifications of the
251 memory reference tree EXPR and make sure to re-gimplify them properly
252 after propagation of constant addresses. IS_LHS is true if the
253 reference is supposed to be an lvalue. */
254
255static tree
256maybe_fold_reference (tree expr, bool is_lhs)
257{
17f39a39 258 tree result;
cbdd87d4 259
f0eddb90
RG
260 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
261 || TREE_CODE (expr) == REALPART_EXPR
262 || TREE_CODE (expr) == IMAGPART_EXPR)
263 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
264 return fold_unary_loc (EXPR_LOCATION (expr),
265 TREE_CODE (expr),
266 TREE_TYPE (expr),
267 TREE_OPERAND (expr, 0));
268 else if (TREE_CODE (expr) == BIT_FIELD_REF
269 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
270 return fold_ternary_loc (EXPR_LOCATION (expr),
271 TREE_CODE (expr),
272 TREE_TYPE (expr),
273 TREE_OPERAND (expr, 0),
274 TREE_OPERAND (expr, 1),
275 TREE_OPERAND (expr, 2));
276
f0eddb90
RG
277 if (!is_lhs
278 && (result = fold_const_aggregate_ref (expr))
279 && is_gimple_min_invariant (result))
280 return result;
cbdd87d4 281
cbdd87d4
RG
282 return NULL_TREE;
283}
284
285
286/* Attempt to fold an assignment statement pointed-to by SI. Returns a
287 replacement rhs for the statement or NULL_TREE if no simplification
288 could be made. It is assumed that the operands have been previously
289 folded. */
290
291static tree
292fold_gimple_assign (gimple_stmt_iterator *si)
293{
355fe088 294 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
295 enum tree_code subcode = gimple_assign_rhs_code (stmt);
296 location_t loc = gimple_location (stmt);
297
298 tree result = NULL_TREE;
299
300 switch (get_gimple_rhs_class (subcode))
301 {
302 case GIMPLE_SINGLE_RHS:
303 {
304 tree rhs = gimple_assign_rhs1 (stmt);
305
8c00ba08
JW
306 if (TREE_CLOBBER_P (rhs))
307 return NULL_TREE;
308
4e71066d 309 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
310 return maybe_fold_reference (rhs, false);
311
bdf37f7a
JH
312 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
313 {
314 tree val = OBJ_TYPE_REF_EXPR (rhs);
315 if (is_gimple_min_invariant (val))
316 return val;
f8a39967 317 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
318 {
319 bool final;
320 vec <cgraph_node *>targets
f8a39967 321 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 322 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 323 {
2b5f0895
XDL
324 if (dump_enabled_p ())
325 {
807b7d62 326 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
327 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
328 "resolving virtual function address "
329 "reference to function %s\n",
330 targets.length () == 1
331 ? targets[0]->name ()
3ef276e4 332 : "NULL");
2b5f0895 333 }
3ef276e4
RB
334 if (targets.length () == 1)
335 {
336 val = fold_convert (TREE_TYPE (val),
337 build_fold_addr_expr_loc
338 (loc, targets[0]->decl));
339 STRIP_USELESS_TYPE_CONVERSION (val);
340 }
341 else
342 /* We can not use __builtin_unreachable here because it
343 can not have address taken. */
344 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
345 return val;
346 }
347 }
bdf37f7a 348 }
7524f419 349
cbdd87d4
RG
350 else if (TREE_CODE (rhs) == ADDR_EXPR)
351 {
70f34814
RG
352 tree ref = TREE_OPERAND (rhs, 0);
353 tree tem = maybe_fold_reference (ref, true);
354 if (tem
355 && TREE_CODE (tem) == MEM_REF
356 && integer_zerop (TREE_OPERAND (tem, 1)))
357 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
358 else if (tem)
cbdd87d4
RG
359 result = fold_convert (TREE_TYPE (rhs),
360 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
361 else if (TREE_CODE (ref) == MEM_REF
362 && integer_zerop (TREE_OPERAND (ref, 1)))
363 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
364
365 if (result)
366 {
367 /* Strip away useless type conversions. Both the
368 NON_LVALUE_EXPR that may have been added by fold, and
369 "useless" type conversions that might now be apparent
370 due to propagation. */
371 STRIP_USELESS_TYPE_CONVERSION (result);
372
373 if (result != rhs && valid_gimple_rhs_p (result))
374 return result;
375 }
cbdd87d4
RG
376 }
377
378 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 379 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
380 {
381 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
382 unsigned i;
383 tree val;
384
385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 386 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
387 return NULL_TREE;
388
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
391 }
392
393 else if (DECL_P (rhs))
9d60be38 394 return get_symbol_constant_value (rhs);
cbdd87d4
RG
395 }
396 break;
397
398 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
399 break;
400
401 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
402 break;
403
0354c0c7 404 case GIMPLE_TERNARY_RHS:
5c099d40
RB
405 result = fold_ternary_loc (loc, subcode,
406 TREE_TYPE (gimple_assign_lhs (stmt)),
407 gimple_assign_rhs1 (stmt),
408 gimple_assign_rhs2 (stmt),
409 gimple_assign_rhs3 (stmt));
0354c0c7
BS
410
411 if (result)
412 {
413 STRIP_USELESS_TYPE_CONVERSION (result);
414 if (valid_gimple_rhs_p (result))
415 return result;
0354c0c7
BS
416 }
417 break;
418
cbdd87d4
RG
419 case GIMPLE_INVALID_RHS:
420 gcc_unreachable ();
421 }
422
423 return NULL_TREE;
424}
425
fef5a0d9
RB
426
427/* Replace a statement at *SI_P with a sequence of statements in STMTS,
428 adjusting the replacement stmts location and virtual operands.
429 If the statement has a lhs the last stmt in the sequence is expected
430 to assign to that lhs. */
431
432static void
433gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
434{
355fe088 435 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
436
437 if (gimple_has_location (stmt))
438 annotate_all_with_location (stmts, gimple_location (stmt));
439
440 /* First iterate over the replacement statements backward, assigning
441 virtual operands to their defining statements. */
355fe088 442 gimple *laststore = NULL;
fef5a0d9
RB
443 for (gimple_stmt_iterator i = gsi_last (stmts);
444 !gsi_end_p (i); gsi_prev (&i))
445 {
355fe088 446 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
447 if ((gimple_assign_single_p (new_stmt)
448 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
449 || (is_gimple_call (new_stmt)
450 && (gimple_call_flags (new_stmt)
451 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
452 {
453 tree vdef;
454 if (!laststore)
455 vdef = gimple_vdef (stmt);
456 else
457 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
458 gimple_set_vdef (new_stmt, vdef);
459 if (vdef && TREE_CODE (vdef) == SSA_NAME)
460 SSA_NAME_DEF_STMT (vdef) = new_stmt;
461 laststore = new_stmt;
462 }
463 }
464
465 /* Second iterate over the statements forward, assigning virtual
466 operands to their uses. */
467 tree reaching_vuse = gimple_vuse (stmt);
468 for (gimple_stmt_iterator i = gsi_start (stmts);
469 !gsi_end_p (i); gsi_next (&i))
470 {
355fe088 471 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
472 /* If the new statement possibly has a VUSE, update it with exact SSA
473 name we know will reach this one. */
474 if (gimple_has_mem_ops (new_stmt))
475 gimple_set_vuse (new_stmt, reaching_vuse);
476 gimple_set_modified (new_stmt, true);
477 if (gimple_vdef (new_stmt))
478 reaching_vuse = gimple_vdef (new_stmt);
479 }
480
481 /* If the new sequence does not do a store release the virtual
482 definition of the original statement. */
483 if (reaching_vuse
484 && reaching_vuse == gimple_vuse (stmt))
485 {
486 tree vdef = gimple_vdef (stmt);
487 if (vdef
488 && TREE_CODE (vdef) == SSA_NAME)
489 {
490 unlink_stmt_vdef (stmt);
491 release_ssa_name (vdef);
492 }
493 }
494
495 /* Finally replace the original statement with the sequence. */
496 gsi_replace_with_seq (si_p, stmts, false);
497}
498
cbdd87d4
RG
499/* Convert EXPR into a GIMPLE value suitable for substitution on the
500 RHS of an assignment. Insert the necessary statements before
501 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
502 is replaced. If the call is expected to produces a result, then it
503 is replaced by an assignment of the new RHS to the result variable.
504 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
505 GIMPLE_NOP. A proper VDEF chain is retained by making the first
506 VUSE and the last VDEF of the whole sequence be the same as the replaced
507 statement and using new SSA names for stores in between. */
cbdd87d4
RG
508
509void
510gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
511{
512 tree lhs;
355fe088 513 gimple *stmt, *new_stmt;
cbdd87d4 514 gimple_stmt_iterator i;
355a7673 515 gimple_seq stmts = NULL;
cbdd87d4
RG
516
517 stmt = gsi_stmt (*si_p);
518
519 gcc_assert (is_gimple_call (stmt));
520
45852dcc 521 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 522
e256dfce 523 lhs = gimple_call_lhs (stmt);
cbdd87d4 524 if (lhs == NULL_TREE)
6e572326
RG
525 {
526 gimplify_and_add (expr, &stmts);
527 /* We can end up with folding a memcpy of an empty class assignment
528 which gets optimized away by C++ gimplification. */
529 if (gimple_seq_empty_p (stmts))
530 {
9fdc58de 531 pop_gimplify_context (NULL);
6e572326
RG
532 if (gimple_in_ssa_p (cfun))
533 {
534 unlink_stmt_vdef (stmt);
535 release_defs (stmt);
536 }
f6b4dc28 537 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
538 return;
539 }
540 }
cbdd87d4 541 else
e256dfce
RG
542 {
543 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
544 new_stmt = gimple_build_assign (lhs, tmp);
545 i = gsi_last (stmts);
546 gsi_insert_after_without_update (&i, new_stmt,
547 GSI_CONTINUE_LINKING);
548 }
cbdd87d4
RG
549
550 pop_gimplify_context (NULL);
551
fef5a0d9
RB
552 gsi_replace_with_seq_vops (si_p, stmts);
553}
cbdd87d4 554
fef5a0d9
RB
555
556/* Replace the call at *GSI with the gimple value VAL. */
557
558static void
559replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
560{
355fe088 561 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 562 tree lhs = gimple_call_lhs (stmt);
355fe088 563 gimple *repl;
fef5a0d9 564 if (lhs)
e256dfce 565 {
fef5a0d9
RB
566 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
567 val = fold_convert (TREE_TYPE (lhs), val);
568 repl = gimple_build_assign (lhs, val);
569 }
570 else
571 repl = gimple_build_nop ();
572 tree vdef = gimple_vdef (stmt);
573 if (vdef && TREE_CODE (vdef) == SSA_NAME)
574 {
575 unlink_stmt_vdef (stmt);
576 release_ssa_name (vdef);
577 }
f6b4dc28 578 gsi_replace (gsi, repl, false);
fef5a0d9
RB
579}
580
581/* Replace the call at *GSI with the new call REPL and fold that
582 again. */
583
584static void
355fe088 585replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 586{
355fe088 587 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
588 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
589 gimple_set_location (repl, gimple_location (stmt));
590 if (gimple_vdef (stmt)
591 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
592 {
593 gimple_set_vdef (repl, gimple_vdef (stmt));
594 gimple_set_vuse (repl, gimple_vuse (stmt));
595 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
596 }
f6b4dc28 597 gsi_replace (gsi, repl, false);
fef5a0d9
RB
598 fold_stmt (gsi);
599}
600
601/* Return true if VAR is a VAR_DECL or a component thereof. */
602
603static bool
604var_decl_component_p (tree var)
605{
606 tree inner = var;
607 while (handled_component_p (inner))
608 inner = TREE_OPERAND (inner, 0);
609 return SSA_VAR_P (inner);
610}
611
612/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 613 false if no simplification can be made.
fef5a0d9
RB
614 If ENDP is 0, return DEST (like memcpy).
615 If ENDP is 1, return DEST+LEN (like mempcpy).
616 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
617 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
618 (memmove). */
619
620static bool
621gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
622 tree dest, tree src, int endp)
623{
355fe088 624 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
625 tree lhs = gimple_call_lhs (stmt);
626 tree len = gimple_call_arg (stmt, 2);
627 tree destvar, srcvar;
628 location_t loc = gimple_location (stmt);
629
630 /* If the LEN parameter is zero, return DEST. */
631 if (integer_zerop (len))
632 {
355fe088 633 gimple *repl;
fef5a0d9
RB
634 if (gimple_call_lhs (stmt))
635 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
636 else
637 repl = gimple_build_nop ();
638 tree vdef = gimple_vdef (stmt);
639 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 640 {
fef5a0d9
RB
641 unlink_stmt_vdef (stmt);
642 release_ssa_name (vdef);
643 }
f6b4dc28 644 gsi_replace (gsi, repl, false);
fef5a0d9
RB
645 return true;
646 }
647
648 /* If SRC and DEST are the same (and not volatile), return
649 DEST{,+LEN,+LEN-1}. */
650 if (operand_equal_p (src, dest, 0))
651 {
652 unlink_stmt_vdef (stmt);
653 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
654 release_ssa_name (gimple_vdef (stmt));
655 if (!lhs)
656 {
f6b4dc28 657 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
658 return true;
659 }
660 goto done;
661 }
662 else
663 {
664 tree srctype, desttype;
665 unsigned int src_align, dest_align;
666 tree off0;
667
3de2a40e
IE
668 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
669 pointers as wide integer) and also may result in huge function
670 size because of inlined bounds copy. Thus don't inline for
671 functions we want to instrument. */
672 if (flag_check_pointer_bounds
673 && chkp_instrumentable_p (cfun->decl)
674 /* Even if data may contain pointers we can inline if copy
675 less than a pointer size. */
676 && (!tree_fits_uhwi_p (len)
677 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
678 return false;
679
fef5a0d9
RB
680 /* Build accesses at offset zero with a ref-all character type. */
681 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
682 ptr_mode, true), 0);
683
684 /* If we can perform the copy efficiently with first doing all loads
685 and then all stores inline it that way. Currently efficiently
686 means that we can load all the memory into a single integer
687 register which is what MOVE_MAX gives us. */
688 src_align = get_pointer_alignment (src);
689 dest_align = get_pointer_alignment (dest);
690 if (tree_fits_uhwi_p (len)
691 && compare_tree_int (len, MOVE_MAX) <= 0
692 /* ??? Don't transform copies from strings with known length this
693 confuses the tree-ssa-strlen.c. This doesn't handle
694 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
695 reason. */
696 && !c_strlen (src, 2))
697 {
698 unsigned ilen = tree_to_uhwi (len);
699 if (exact_log2 (ilen) != -1)
700 {
701 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
702 if (type
703 && TYPE_MODE (type) != BLKmode
704 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
705 == ilen * 8)
706 /* If the destination pointer is not aligned we must be able
707 to emit an unaligned store. */
708 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
f869c12f
RR
709 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
710 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
711 != CODE_FOR_nothing)))
fef5a0d9
RB
712 {
713 tree srctype = type;
714 tree desttype = type;
715 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
716 srctype = build_aligned_type (type, src_align);
717 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
718 tree tem = fold_const_aggregate_ref (srcmem);
719 if (tem)
720 srcmem = tem;
721 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
722 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
f869c12f
RR
723 src_align)
724 && (optab_handler (movmisalign_optab,
725 TYPE_MODE (type))
726 == CODE_FOR_nothing))
fef5a0d9
RB
727 srcmem = NULL_TREE;
728 if (srcmem)
729 {
355fe088 730 gimple *new_stmt;
fef5a0d9
RB
731 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
732 {
733 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
734 if (gimple_in_ssa_p (cfun))
735 srcmem = make_ssa_name (TREE_TYPE (srcmem),
736 new_stmt);
737 else
b731b390 738 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
739 gimple_assign_set_lhs (new_stmt, srcmem);
740 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
741 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
742 }
743 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
744 desttype = build_aligned_type (type, dest_align);
745 new_stmt
746 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
747 dest, off0),
748 srcmem);
749 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
750 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
751 if (gimple_vdef (new_stmt)
752 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
753 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
754 if (!lhs)
755 {
f6b4dc28 756 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
757 return true;
758 }
759 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
760 goto done;
761 }
762 }
763 }
764 }
765
766 if (endp == 3)
767 {
768 /* Both DEST and SRC must be pointer types.
769 ??? This is what old code did. Is the testing for pointer types
770 really mandatory?
771
772 If either SRC is readonly or length is 1, we can use memcpy. */
773 if (!dest_align || !src_align)
774 return false;
775 if (readonly_data_expr (src)
776 || (tree_fits_uhwi_p (len)
777 && (MIN (src_align, dest_align) / BITS_PER_UNIT
778 >= tree_to_uhwi (len))))
779 {
780 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
781 if (!fn)
782 return false;
783 gimple_call_set_fndecl (stmt, fn);
784 gimple_call_set_arg (stmt, 0, dest);
785 gimple_call_set_arg (stmt, 1, src);
786 fold_stmt (gsi);
787 return true;
788 }
789
790 /* If *src and *dest can't overlap, optimize into memcpy as well. */
791 if (TREE_CODE (src) == ADDR_EXPR
792 && TREE_CODE (dest) == ADDR_EXPR)
793 {
794 tree src_base, dest_base, fn;
795 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
796 HOST_WIDE_INT size = -1;
797 HOST_WIDE_INT maxsize = -1;
ee45a32d 798 bool reverse;
fef5a0d9
RB
799
800 srcvar = TREE_OPERAND (src, 0);
801 src_base = get_ref_base_and_extent (srcvar, &src_offset,
ee45a32d 802 &size, &maxsize, &reverse);
fef5a0d9
RB
803 destvar = TREE_OPERAND (dest, 0);
804 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
ee45a32d 805 &size, &maxsize, &reverse);
fef5a0d9
RB
806 if (tree_fits_uhwi_p (len))
807 maxsize = tree_to_uhwi (len);
808 else
809 maxsize = -1;
810 src_offset /= BITS_PER_UNIT;
811 dest_offset /= BITS_PER_UNIT;
812 if (SSA_VAR_P (src_base)
813 && SSA_VAR_P (dest_base))
814 {
815 if (operand_equal_p (src_base, dest_base, 0)
816 && ranges_overlap_p (src_offset, maxsize,
817 dest_offset, maxsize))
818 return false;
819 }
820 else if (TREE_CODE (src_base) == MEM_REF
821 && TREE_CODE (dest_base) == MEM_REF)
822 {
823 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
824 TREE_OPERAND (dest_base, 0), 0))
825 return false;
826 offset_int off = mem_ref_offset (src_base) + src_offset;
827 if (!wi::fits_shwi_p (off))
828 return false;
829 src_offset = off.to_shwi ();
830
831 off = mem_ref_offset (dest_base) + dest_offset;
832 if (!wi::fits_shwi_p (off))
833 return false;
834 dest_offset = off.to_shwi ();
835 if (ranges_overlap_p (src_offset, maxsize,
836 dest_offset, maxsize))
837 return false;
838 }
839 else
840 return false;
841
842 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
843 if (!fn)
844 return false;
845 gimple_call_set_fndecl (stmt, fn);
846 gimple_call_set_arg (stmt, 0, dest);
847 gimple_call_set_arg (stmt, 1, src);
848 fold_stmt (gsi);
849 return true;
850 }
851
852 /* If the destination and source do not alias optimize into
853 memcpy as well. */
854 if ((is_gimple_min_invariant (dest)
855 || TREE_CODE (dest) == SSA_NAME)
856 && (is_gimple_min_invariant (src)
857 || TREE_CODE (src) == SSA_NAME))
858 {
859 ao_ref destr, srcr;
860 ao_ref_init_from_ptr_and_size (&destr, dest, len);
861 ao_ref_init_from_ptr_and_size (&srcr, src, len);
862 if (!refs_may_alias_p_1 (&destr, &srcr, false))
863 {
864 tree fn;
865 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
866 if (!fn)
867 return false;
868 gimple_call_set_fndecl (stmt, fn);
869 gimple_call_set_arg (stmt, 0, dest);
870 gimple_call_set_arg (stmt, 1, src);
871 fold_stmt (gsi);
872 return true;
873 }
874 }
875
876 return false;
877 }
878
879 if (!tree_fits_shwi_p (len))
880 return false;
881 /* FIXME:
882 This logic lose for arguments like (type *)malloc (sizeof (type)),
883 since we strip the casts of up to VOID return value from malloc.
884 Perhaps we ought to inherit type from non-VOID argument here? */
885 STRIP_NOPS (src);
886 STRIP_NOPS (dest);
887 if (!POINTER_TYPE_P (TREE_TYPE (src))
888 || !POINTER_TYPE_P (TREE_TYPE (dest)))
889 return false;
890 /* In the following try to find a type that is most natural to be
891 used for the memcpy source and destination and that allows
892 the most optimization when memcpy is turned into a plain assignment
893 using that type. In theory we could always use a char[len] type
894 but that only gains us that the destination and source possibly
895 no longer will have their address taken. */
896 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
897 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
898 {
899 tree tem = TREE_OPERAND (src, 0);
900 STRIP_NOPS (tem);
901 if (tem != TREE_OPERAND (src, 0))
902 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
903 }
904 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
905 {
906 tree tem = TREE_OPERAND (dest, 0);
907 STRIP_NOPS (tem);
908 if (tem != TREE_OPERAND (dest, 0))
909 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
910 }
911 srctype = TREE_TYPE (TREE_TYPE (src));
912 if (TREE_CODE (srctype) == ARRAY_TYPE
913 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
914 {
915 srctype = TREE_TYPE (srctype);
916 STRIP_NOPS (src);
917 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
918 }
919 desttype = TREE_TYPE (TREE_TYPE (dest));
920 if (TREE_CODE (desttype) == ARRAY_TYPE
921 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
922 {
923 desttype = TREE_TYPE (desttype);
924 STRIP_NOPS (dest);
925 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
926 }
927 if (TREE_ADDRESSABLE (srctype)
928 || TREE_ADDRESSABLE (desttype))
929 return false;
930
931 /* Make sure we are not copying using a floating-point mode or
932 a type whose size possibly does not match its precision. */
933 if (FLOAT_MODE_P (TYPE_MODE (desttype))
934 || TREE_CODE (desttype) == BOOLEAN_TYPE
935 || TREE_CODE (desttype) == ENUMERAL_TYPE)
936 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
937 if (FLOAT_MODE_P (TYPE_MODE (srctype))
938 || TREE_CODE (srctype) == BOOLEAN_TYPE
939 || TREE_CODE (srctype) == ENUMERAL_TYPE)
940 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
941 if (!srctype)
942 srctype = desttype;
943 if (!desttype)
944 desttype = srctype;
945 if (!srctype)
946 return false;
947
948 src_align = get_pointer_alignment (src);
949 dest_align = get_pointer_alignment (dest);
950 if (dest_align < TYPE_ALIGN (desttype)
951 || src_align < TYPE_ALIGN (srctype))
952 return false;
953
954 destvar = dest;
955 STRIP_NOPS (destvar);
956 if (TREE_CODE (destvar) == ADDR_EXPR
957 && var_decl_component_p (TREE_OPERAND (destvar, 0))
958 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
959 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
960 else
961 destvar = NULL_TREE;
962
963 srcvar = src;
964 STRIP_NOPS (srcvar);
965 if (TREE_CODE (srcvar) == ADDR_EXPR
966 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
967 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
968 {
969 if (!destvar
970 || src_align >= TYPE_ALIGN (desttype))
971 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
972 srcvar, off0);
973 else if (!STRICT_ALIGNMENT)
974 {
975 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
976 src_align);
977 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
978 }
e256dfce 979 else
fef5a0d9
RB
980 srcvar = NULL_TREE;
981 }
982 else
983 srcvar = NULL_TREE;
984
985 if (srcvar == NULL_TREE && destvar == NULL_TREE)
986 return false;
987
988 if (srcvar == NULL_TREE)
989 {
990 STRIP_NOPS (src);
991 if (src_align >= TYPE_ALIGN (desttype))
992 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
993 else
994 {
995 if (STRICT_ALIGNMENT)
996 return false;
997 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
998 src_align);
999 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1000 }
1001 }
1002 else if (destvar == NULL_TREE)
1003 {
1004 STRIP_NOPS (dest);
1005 if (dest_align >= TYPE_ALIGN (srctype))
1006 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1007 else
1008 {
1009 if (STRICT_ALIGNMENT)
1010 return false;
1011 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1012 dest_align);
1013 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1014 }
1015 }
1016
355fe088 1017 gimple *new_stmt;
fef5a0d9
RB
1018 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1019 {
1020 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1021 if (gimple_in_ssa_p (cfun))
1022 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1023 else
b731b390 1024 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1025 gimple_assign_set_lhs (new_stmt, srcvar);
1026 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1027 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1028 }
1029 new_stmt = gimple_build_assign (destvar, srcvar);
1030 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1031 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1032 if (gimple_vdef (new_stmt)
1033 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1034 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1035 if (!lhs)
1036 {
f6b4dc28 1037 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1038 return true;
1039 }
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1041 }
1042
1043done:
74e3c262 1044 gimple_seq stmts = NULL;
fef5a0d9
RB
1045 if (endp == 0 || endp == 3)
1046 len = NULL_TREE;
1047 else if (endp == 2)
74e3c262
RB
1048 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1049 ssize_int (1));
fef5a0d9 1050 if (endp == 2 || endp == 1)
74e3c262
RB
1051 {
1052 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1053 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1054 TREE_TYPE (dest), dest, len);
1055 }
fef5a0d9 1056
74e3c262 1057 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1058 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1059 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1060 return true;
1061}
1062
1063/* Fold function call to builtin memset or bzero at *GSI setting the
1064 memory of size LEN to VAL. Return whether a simplification was made. */
1065
1066static bool
1067gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1068{
355fe088 1069 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1070 tree etype;
1071 unsigned HOST_WIDE_INT length, cval;
1072
1073 /* If the LEN parameter is zero, return DEST. */
1074 if (integer_zerop (len))
1075 {
1076 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1077 return true;
1078 }
1079
1080 if (! tree_fits_uhwi_p (len))
1081 return false;
1082
1083 if (TREE_CODE (c) != INTEGER_CST)
1084 return false;
1085
1086 tree dest = gimple_call_arg (stmt, 0);
1087 tree var = dest;
1088 if (TREE_CODE (var) != ADDR_EXPR)
1089 return false;
1090
1091 var = TREE_OPERAND (var, 0);
1092 if (TREE_THIS_VOLATILE (var))
1093 return false;
1094
1095 etype = TREE_TYPE (var);
1096 if (TREE_CODE (etype) == ARRAY_TYPE)
1097 etype = TREE_TYPE (etype);
1098
1099 if (!INTEGRAL_TYPE_P (etype)
1100 && !POINTER_TYPE_P (etype))
1101 return NULL_TREE;
1102
1103 if (! var_decl_component_p (var))
1104 return NULL_TREE;
1105
1106 length = tree_to_uhwi (len);
1107 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1108 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1109 return NULL_TREE;
1110
1111 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1112 return NULL_TREE;
1113
1114 if (integer_zerop (c))
1115 cval = 0;
1116 else
1117 {
1118 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1119 return NULL_TREE;
1120
1121 cval = TREE_INT_CST_LOW (c);
1122 cval &= 0xff;
1123 cval |= cval << 8;
1124 cval |= cval << 16;
1125 cval |= (cval << 31) << 1;
1126 }
1127
1128 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1129 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1130 gimple_set_vuse (store, gimple_vuse (stmt));
1131 tree vdef = gimple_vdef (stmt);
1132 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1133 {
1134 gimple_set_vdef (store, gimple_vdef (stmt));
1135 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1136 }
1137 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1138 if (gimple_call_lhs (stmt))
1139 {
355fe088 1140 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1141 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1142 }
1143 else
1144 {
1145 gimple_stmt_iterator gsi2 = *gsi;
1146 gsi_prev (gsi);
1147 gsi_remove (&gsi2, true);
1148 }
1149
1150 return true;
1151}
1152
1153
1154/* Return the string length, maximum string length or maximum value of
1155 ARG in LENGTH.
1156 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1157 is not NULL and, for TYPE == 0, its value is not equal to the length
1158 we determine or if we are unable to determine the length or value,
1159 return false. VISITED is a bitmap of visited variables.
1160 TYPE is 0 if string length should be returned, 1 for maximum string
1161 length and 2 for maximum value ARG can have. */
1162
1163static bool
dcb7fae2 1164get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1165{
1166 tree var, val;
355fe088 1167 gimple *def_stmt;
fef5a0d9
RB
1168
1169 if (TREE_CODE (arg) != SSA_NAME)
1170 {
1171 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1172 if (TREE_CODE (arg) == ADDR_EXPR
1173 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1174 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1175 {
1176 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1177 if (TREE_CODE (aop0) == INDIRECT_REF
1178 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1179 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1180 length, visited, type);
1181 }
1182
1183 if (type == 2)
1184 {
1185 val = arg;
1186 if (TREE_CODE (val) != INTEGER_CST
1187 || tree_int_cst_sgn (val) < 0)
1188 return false;
1189 }
1190 else
1191 val = c_strlen (arg, 1);
1192 if (!val)
1193 return false;
1194
1195 if (*length)
1196 {
1197 if (type > 0)
1198 {
1199 if (TREE_CODE (*length) != INTEGER_CST
1200 || TREE_CODE (val) != INTEGER_CST)
1201 return false;
1202
1203 if (tree_int_cst_lt (*length, val))
1204 *length = val;
1205 return true;
1206 }
1207 else if (simple_cst_equal (val, *length) != 1)
1208 return false;
1209 }
1210
1211 *length = val;
1212 return true;
1213 }
1214
1215 /* If ARG is registered for SSA update we cannot look at its defining
1216 statement. */
1217 if (name_registered_for_update_p (arg))
1218 return false;
1219
1220 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1221 if (!*visited)
1222 *visited = BITMAP_ALLOC (NULL);
1223 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1224 return true;
1225
1226 var = arg;
1227 def_stmt = SSA_NAME_DEF_STMT (var);
1228
1229 switch (gimple_code (def_stmt))
1230 {
1231 case GIMPLE_ASSIGN:
1232 /* The RHS of the statement defining VAR must either have a
1233 constant length or come from another SSA_NAME with a constant
1234 length. */
1235 if (gimple_assign_single_p (def_stmt)
1236 || gimple_assign_unary_nop_p (def_stmt))
1237 {
1238 tree rhs = gimple_assign_rhs1 (def_stmt);
1239 return get_maxval_strlen (rhs, length, visited, type);
1240 }
1241 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1242 {
1243 tree op2 = gimple_assign_rhs2 (def_stmt);
1244 tree op3 = gimple_assign_rhs3 (def_stmt);
1245 return get_maxval_strlen (op2, length, visited, type)
1246 && get_maxval_strlen (op3, length, visited, type);
1247 }
1248 return false;
1249
1250 case GIMPLE_PHI:
1251 {
1252 /* All the arguments of the PHI node must have the same constant
1253 length. */
1254 unsigned i;
1255
1256 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1257 {
1258 tree arg = gimple_phi_arg (def_stmt, i)->def;
1259
1260 /* If this PHI has itself as an argument, we cannot
1261 determine the string length of this argument. However,
1262 if we can find a constant string length for the other
1263 PHI args then we can still be sure that this is a
1264 constant string length. So be optimistic and just
1265 continue with the next argument. */
1266 if (arg == gimple_phi_result (def_stmt))
1267 continue;
1268
1269 if (!get_maxval_strlen (arg, length, visited, type))
1270 return false;
1271 }
1272 }
1273 return true;
1274
1275 default:
1276 return false;
1277 }
1278}
1279
dcb7fae2
RB
1280tree
1281get_maxval_strlen (tree arg, int type)
1282{
1283 bitmap visited = NULL;
1284 tree len = NULL_TREE;
1285 if (!get_maxval_strlen (arg, &len, &visited, type))
1286 len = NULL_TREE;
1287 if (visited)
1288 BITMAP_FREE (visited);
1289
1290 return len;
1291}
1292
fef5a0d9
RB
1293
1294/* Fold function call to builtin strcpy with arguments DEST and SRC.
1295 If LEN is not NULL, it represents the length of the string to be
1296 copied. Return NULL_TREE if no simplification can be made. */
1297
1298static bool
1299gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1300 tree dest, tree src)
fef5a0d9 1301{
dcb7fae2 1302 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1303 tree fn;
1304
1305 /* If SRC and DEST are the same (and not volatile), return DEST. */
1306 if (operand_equal_p (src, dest, 0))
1307 {
1308 replace_call_with_value (gsi, dest);
1309 return true;
1310 }
1311
1312 if (optimize_function_for_size_p (cfun))
1313 return false;
1314
1315 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1316 if (!fn)
1317 return false;
1318
1579e1f8 1319 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1320 if (!len)
dcb7fae2 1321 return false;
fef5a0d9
RB
1322
1323 len = fold_convert_loc (loc, size_type_node, len);
1324 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1325 len = force_gimple_operand_gsi (gsi, len, true,
1326 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1327 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1328 replace_call_with_call_and_fold (gsi, repl);
1329 return true;
1330}
1331
1332/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1333 If SLEN is not NULL, it represents the length of the source string.
1334 Return NULL_TREE if no simplification can be made. */
1335
1336static bool
dcb7fae2
RB
1337gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1338 tree dest, tree src, tree len)
fef5a0d9 1339{
dcb7fae2 1340 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1341 tree fn;
1342
1343 /* If the LEN parameter is zero, return DEST. */
1344 if (integer_zerop (len))
1345 {
1346 replace_call_with_value (gsi, dest);
1347 return true;
1348 }
1349
1350 /* We can't compare slen with len as constants below if len is not a
1351 constant. */
dcb7fae2 1352 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1353 return false;
1354
fef5a0d9 1355 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1356 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1357 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1358 return false;
1359
1360 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1361
1362 /* We do not support simplification of this case, though we do
1363 support it when expanding trees into RTL. */
1364 /* FIXME: generate a call to __builtin_memset. */
1365 if (tree_int_cst_lt (slen, len))
1366 return false;
1367
1368 /* OK transform into builtin memcpy. */
1369 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1370 if (!fn)
1371 return false;
1372
1373 len = fold_convert_loc (loc, size_type_node, len);
1374 len = force_gimple_operand_gsi (gsi, len, true,
1375 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1376 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1377 replace_call_with_call_and_fold (gsi, repl);
1378 return true;
1379}
1380
1381/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1382 to the call.
1383
1384 Return NULL_TREE if no simplification was possible, otherwise return the
1385 simplified form of the call as a tree.
1386
1387 The simplified form may be a constant or other expression which
1388 computes the same value, but in a more efficient manner (including
1389 calls to other builtin functions).
1390
1391 The call may contain arguments which need to be evaluated, but
1392 which are not useful to determine the result of the call. In
1393 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1394 COMPOUND_EXPR will be an argument which must be evaluated.
1395 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1396 COMPOUND_EXPR in the chain will contain the tree for the simplified
1397 form of the builtin function call. */
1398
1399static bool
dcb7fae2 1400gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1401{
355fe088 1402 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1403 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1404
1405 const char *p = c_getstr (src);
1406
1407 /* If the string length is zero, return the dst parameter. */
1408 if (p && *p == '\0')
1409 {
1410 replace_call_with_value (gsi, dst);
1411 return true;
1412 }
1413
1414 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1415 return false;
1416
1417 /* See if we can store by pieces into (dst + strlen(dst)). */
1418 tree newdst;
1419 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1420 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1421
1422 if (!strlen_fn || !memcpy_fn)
1423 return false;
1424
1425 /* If the length of the source string isn't computable don't
1426 split strcat into strlen and memcpy. */
dcb7fae2 1427 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1428 if (! len)
fef5a0d9
RB
1429 return false;
1430
1431 /* Create strlen (dst). */
1432 gimple_seq stmts = NULL, stmts2;
355fe088 1433 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9
RB
1434 gimple_set_location (repl, loc);
1435 if (gimple_in_ssa_p (cfun))
b731b390 1436 newdst = make_ssa_name (size_type_node);
fef5a0d9 1437 else
b731b390 1438 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1439 gimple_call_set_lhs (repl, newdst);
1440 gimple_seq_add_stmt_without_update (&stmts, repl);
1441
1442 /* Create (dst p+ strlen (dst)). */
1443 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1444 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1445 gimple_seq_add_seq_without_update (&stmts, stmts2);
1446
1447 len = fold_convert_loc (loc, size_type_node, len);
1448 len = size_binop_loc (loc, PLUS_EXPR, len,
1449 build_int_cst (size_type_node, 1));
1450 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1451 gimple_seq_add_seq_without_update (&stmts, stmts2);
1452
1453 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1454 gimple_seq_add_stmt_without_update (&stmts, repl);
1455 if (gimple_call_lhs (stmt))
1456 {
1457 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1458 gimple_seq_add_stmt_without_update (&stmts, repl);
1459 gsi_replace_with_seq_vops (gsi, stmts);
1460 /* gsi now points at the assignment to the lhs, get a
1461 stmt iterator to the memcpy call.
1462 ??? We can't use gsi_for_stmt as that doesn't work when the
1463 CFG isn't built yet. */
1464 gimple_stmt_iterator gsi2 = *gsi;
1465 gsi_prev (&gsi2);
1466 fold_stmt (&gsi2);
1467 }
1468 else
1469 {
1470 gsi_replace_with_seq_vops (gsi, stmts);
1471 fold_stmt (gsi);
1472 }
1473 return true;
1474}
1475
07f1cf56
RB
1476/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1477 are the arguments to the call. */
1478
1479static bool
1480gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1481{
355fe088 1482 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1483 tree dest = gimple_call_arg (stmt, 0);
1484 tree src = gimple_call_arg (stmt, 1);
1485 tree size = gimple_call_arg (stmt, 2);
1486 tree fn;
1487 const char *p;
1488
1489
1490 p = c_getstr (src);
1491 /* If the SRC parameter is "", return DEST. */
1492 if (p && *p == '\0')
1493 {
1494 replace_call_with_value (gsi, dest);
1495 return true;
1496 }
1497
1498 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1499 return false;
1500
1501 /* If __builtin_strcat_chk is used, assume strcat is available. */
1502 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1503 if (!fn)
1504 return false;
1505
355fe088 1506 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1507 replace_call_with_call_and_fold (gsi, repl);
1508 return true;
1509}
1510
ad03a744
RB
1511/* Simplify a call to the strncat builtin. */
1512
1513static bool
1514gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1515{
1516 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1517 tree dst = gimple_call_arg (stmt, 0);
1518 tree src = gimple_call_arg (stmt, 1);
1519 tree len = gimple_call_arg (stmt, 2);
1520
1521 const char *p = c_getstr (src);
1522
1523 /* If the requested length is zero, or the src parameter string
1524 length is zero, return the dst parameter. */
1525 if (integer_zerop (len) || (p && *p == '\0'))
1526 {
1527 replace_call_with_value (gsi, dst);
1528 return true;
1529 }
1530
1531 /* If the requested len is greater than or equal to the string
1532 length, call strcat. */
1533 if (TREE_CODE (len) == INTEGER_CST && p
1534 && compare_tree_int (len, strlen (p)) >= 0)
1535 {
1536 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1537
1538 /* If the replacement _DECL isn't initialized, don't do the
1539 transformation. */
1540 if (!fn)
1541 return false;
1542
1543 gcall *repl = gimple_build_call (fn, 2, dst, src);
1544 replace_call_with_call_and_fold (gsi, repl);
1545 return true;
1546 }
1547
1548 return false;
1549}
1550
745583f9
RB
1551/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1552 LEN, and SIZE. */
1553
1554static bool
1555gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1556{
355fe088 1557 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1558 tree dest = gimple_call_arg (stmt, 0);
1559 tree src = gimple_call_arg (stmt, 1);
1560 tree len = gimple_call_arg (stmt, 2);
1561 tree size = gimple_call_arg (stmt, 3);
1562 tree fn;
1563 const char *p;
1564
1565 p = c_getstr (src);
1566 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1567 if ((p && *p == '\0')
1568 || integer_zerop (len))
1569 {
1570 replace_call_with_value (gsi, dest);
1571 return true;
1572 }
1573
1574 if (! tree_fits_uhwi_p (size))
1575 return false;
1576
1577 if (! integer_all_onesp (size))
1578 {
1579 tree src_len = c_strlen (src, 1);
1580 if (src_len
1581 && tree_fits_uhwi_p (src_len)
1582 && tree_fits_uhwi_p (len)
1583 && ! tree_int_cst_lt (len, src_len))
1584 {
1585 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1586 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1587 if (!fn)
1588 return false;
1589
355fe088 1590 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1591 replace_call_with_call_and_fold (gsi, repl);
1592 return true;
1593 }
1594 return false;
1595 }
1596
1597 /* If __builtin_strncat_chk is used, assume strncat is available. */
1598 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1599 if (!fn)
1600 return false;
1601
355fe088 1602 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1603 replace_call_with_call_and_fold (gsi, repl);
1604 return true;
1605}
1606
fef5a0d9
RB
1607/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1608 to the call. IGNORE is true if the value returned
1609 by the builtin will be ignored. UNLOCKED is true is true if this
1610 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1611 the known length of the string. Return NULL_TREE if no simplification
1612 was possible. */
1613
1614static bool
1615gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1616 tree arg0, tree arg1,
dcb7fae2 1617 bool unlocked)
fef5a0d9 1618{
355fe088 1619 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1620
fef5a0d9
RB
1621 /* If we're using an unlocked function, assume the other unlocked
1622 functions exist explicitly. */
1623 tree const fn_fputc = (unlocked
1624 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1625 : builtin_decl_implicit (BUILT_IN_FPUTC));
1626 tree const fn_fwrite = (unlocked
1627 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1628 : builtin_decl_implicit (BUILT_IN_FWRITE));
1629
1630 /* If the return value is used, don't do the transformation. */
dcb7fae2 1631 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1632 return false;
1633
fef5a0d9
RB
1634 /* Get the length of the string passed to fputs. If the length
1635 can't be determined, punt. */
dcb7fae2 1636 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1637 if (!len
1638 || TREE_CODE (len) != INTEGER_CST)
1639 return false;
1640
1641 switch (compare_tree_int (len, 1))
1642 {
1643 case -1: /* length is 0, delete the call entirely . */
1644 replace_call_with_value (gsi, integer_zero_node);
1645 return true;
1646
1647 case 0: /* length is 1, call fputc. */
1648 {
1649 const char *p = c_getstr (arg0);
1650 if (p != NULL)
1651 {
1652 if (!fn_fputc)
1653 return false;
1654
355fe088 1655 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
1656 build_int_cst
1657 (integer_type_node, p[0]), arg1);
1658 replace_call_with_call_and_fold (gsi, repl);
1659 return true;
1660 }
1661 }
1662 /* FALLTHROUGH */
1663 case 1: /* length is greater than 1, call fwrite. */
1664 {
1665 /* If optimizing for size keep fputs. */
1666 if (optimize_function_for_size_p (cfun))
1667 return false;
1668 /* New argument list transforming fputs(string, stream) to
1669 fwrite(string, 1, len, stream). */
1670 if (!fn_fwrite)
1671 return false;
1672
355fe088 1673 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
1674 size_one_node, len, arg1);
1675 replace_call_with_call_and_fold (gsi, repl);
1676 return true;
1677 }
1678 default:
1679 gcc_unreachable ();
1680 }
1681 return false;
1682}
1683
1684/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1685 DEST, SRC, LEN, and SIZE are the arguments to the call.
1686 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1687 code of the builtin. If MAXLEN is not NULL, it is maximum length
1688 passed as third argument. */
1689
1690static bool
1691gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1692 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1693 enum built_in_function fcode)
1694{
355fe088 1695 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1696 location_t loc = gimple_location (stmt);
1697 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1698 tree fn;
1699
1700 /* If SRC and DEST are the same (and not volatile), return DEST
1701 (resp. DEST+LEN for __mempcpy_chk). */
1702 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1703 {
1704 if (fcode != BUILT_IN_MEMPCPY_CHK)
1705 {
1706 replace_call_with_value (gsi, dest);
1707 return true;
1708 }
1709 else
1710 {
74e3c262
RB
1711 gimple_seq stmts = NULL;
1712 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1713 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, dest, len);
1714 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
1715 replace_call_with_value (gsi, temp);
1716 return true;
1717 }
1718 }
1719
1720 if (! tree_fits_uhwi_p (size))
1721 return false;
1722
dcb7fae2 1723 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1724 if (! integer_all_onesp (size))
1725 {
1726 if (! tree_fits_uhwi_p (len))
1727 {
1728 /* If LEN is not constant, try MAXLEN too.
1729 For MAXLEN only allow optimizing into non-_ocs function
1730 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1731 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1732 {
1733 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1734 {
1735 /* (void) __mempcpy_chk () can be optimized into
1736 (void) __memcpy_chk (). */
1737 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1738 if (!fn)
1739 return false;
1740
355fe088 1741 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1742 replace_call_with_call_and_fold (gsi, repl);
1743 return true;
1744 }
1745 return false;
1746 }
1747 }
1748 else
1749 maxlen = len;
1750
1751 if (tree_int_cst_lt (size, maxlen))
1752 return false;
1753 }
1754
1755 fn = NULL_TREE;
1756 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1757 mem{cpy,pcpy,move,set} is available. */
1758 switch (fcode)
1759 {
1760 case BUILT_IN_MEMCPY_CHK:
1761 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1762 break;
1763 case BUILT_IN_MEMPCPY_CHK:
1764 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1765 break;
1766 case BUILT_IN_MEMMOVE_CHK:
1767 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1768 break;
1769 case BUILT_IN_MEMSET_CHK:
1770 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1771 break;
1772 default:
1773 break;
1774 }
1775
1776 if (!fn)
1777 return false;
1778
355fe088 1779 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1780 replace_call_with_call_and_fold (gsi, repl);
1781 return true;
1782}
1783
1784/* Fold a call to the __st[rp]cpy_chk builtin.
1785 DEST, SRC, and SIZE are the arguments to the call.
1786 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1787 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1788 strings passed as second argument. */
1789
1790static bool
1791gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1792 tree dest,
fef5a0d9 1793 tree src, tree size,
fef5a0d9
RB
1794 enum built_in_function fcode)
1795{
355fe088 1796 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1797 location_t loc = gimple_location (stmt);
1798 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1799 tree len, fn;
1800
1801 /* If SRC and DEST are the same (and not volatile), return DEST. */
1802 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1803 {
1804 replace_call_with_value (gsi, dest);
1805 return true;
1806 }
1807
1808 if (! tree_fits_uhwi_p (size))
1809 return false;
1810
dcb7fae2 1811 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1812 if (! integer_all_onesp (size))
1813 {
1814 len = c_strlen (src, 1);
1815 if (! len || ! tree_fits_uhwi_p (len))
1816 {
1817 /* If LEN is not constant, try MAXLEN too.
1818 For MAXLEN only allow optimizing into non-_ocs function
1819 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1820 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1821 {
1822 if (fcode == BUILT_IN_STPCPY_CHK)
1823 {
1824 if (! ignore)
1825 return false;
1826
1827 /* If return value of __stpcpy_chk is ignored,
1828 optimize into __strcpy_chk. */
1829 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1830 if (!fn)
1831 return false;
1832
355fe088 1833 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
1834 replace_call_with_call_and_fold (gsi, repl);
1835 return true;
1836 }
1837
1838 if (! len || TREE_SIDE_EFFECTS (len))
1839 return false;
1840
1841 /* If c_strlen returned something, but not a constant,
1842 transform __strcpy_chk into __memcpy_chk. */
1843 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1844 if (!fn)
1845 return false;
1846
74e3c262
RB
1847 gimple_seq stmts = NULL;
1848 len = gimple_convert (&stmts, loc, size_type_node, len);
1849 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1850 build_int_cst (size_type_node, 1));
1851 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1852 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1853 replace_call_with_call_and_fold (gsi, repl);
1854 return true;
1855 }
e256dfce 1856 }
fef5a0d9
RB
1857 else
1858 maxlen = len;
1859
1860 if (! tree_int_cst_lt (maxlen, size))
1861 return false;
e256dfce
RG
1862 }
1863
fef5a0d9
RB
1864 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1865 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1866 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1867 if (!fn)
1868 return false;
1869
355fe088 1870 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
1871 replace_call_with_call_and_fold (gsi, repl);
1872 return true;
1873}
1874
1875/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1876 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1877 length passed as third argument. IGNORE is true if return value can be
1878 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1879
1880static bool
1881gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1882 tree dest, tree src,
dcb7fae2 1883 tree len, tree size,
fef5a0d9
RB
1884 enum built_in_function fcode)
1885{
355fe088 1886 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1887 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1888 tree fn;
1889
1890 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1891 {
fef5a0d9
RB
1892 /* If return value of __stpncpy_chk is ignored,
1893 optimize into __strncpy_chk. */
1894 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1895 if (fn)
1896 {
355fe088 1897 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1898 replace_call_with_call_and_fold (gsi, repl);
1899 return true;
1900 }
cbdd87d4
RG
1901 }
1902
fef5a0d9
RB
1903 if (! tree_fits_uhwi_p (size))
1904 return false;
1905
dcb7fae2 1906 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1907 if (! integer_all_onesp (size))
cbdd87d4 1908 {
fef5a0d9 1909 if (! tree_fits_uhwi_p (len))
fe2ef088 1910 {
fef5a0d9
RB
1911 /* If LEN is not constant, try MAXLEN too.
1912 For MAXLEN only allow optimizing into non-_ocs function
1913 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1914 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1915 return false;
8a1561bc 1916 }
fef5a0d9
RB
1917 else
1918 maxlen = len;
1919
1920 if (tree_int_cst_lt (size, maxlen))
1921 return false;
cbdd87d4
RG
1922 }
1923
fef5a0d9
RB
1924 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1925 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1926 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1927 if (!fn)
1928 return false;
1929
355fe088 1930 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1931 replace_call_with_call_and_fold (gsi, repl);
1932 return true;
cbdd87d4
RG
1933}
1934
2625bb5d
RB
1935/* Fold function call to builtin stpcpy with arguments DEST and SRC.
1936 Return NULL_TREE if no simplification can be made. */
1937
1938static bool
1939gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1940{
1941 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1942 location_t loc = gimple_location (stmt);
1943 tree dest = gimple_call_arg (stmt, 0);
1944 tree src = gimple_call_arg (stmt, 1);
1945 tree fn, len, lenp1;
1946
1947 /* If the result is unused, replace stpcpy with strcpy. */
1948 if (gimple_call_lhs (stmt) == NULL_TREE)
1949 {
1950 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1951 if (!fn)
1952 return false;
1953 gimple_call_set_fndecl (stmt, fn);
1954 fold_stmt (gsi);
1955 return true;
1956 }
1957
1958 len = c_strlen (src, 1);
1959 if (!len
1960 || TREE_CODE (len) != INTEGER_CST)
1961 return false;
1962
1963 if (optimize_function_for_size_p (cfun)
1964 /* If length is zero it's small enough. */
1965 && !integer_zerop (len))
1966 return false;
1967
1968 /* If the source has a known length replace stpcpy with memcpy. */
1969 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1970 if (!fn)
1971 return false;
1972
1973 gimple_seq stmts = NULL;
1974 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1975 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1976 tem, build_int_cst (size_type_node, 1));
1977 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1978 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1979 gimple_set_vuse (repl, gimple_vuse (stmt));
1980 gimple_set_vdef (repl, gimple_vdef (stmt));
1981 if (gimple_vdef (repl)
1982 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1983 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1984 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1985 /* Replace the result with dest + len. */
1986 stmts = NULL;
1987 tem = gimple_convert (&stmts, loc, sizetype, len);
1988 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1989 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1990 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 1991 gsi_replace (gsi, ret, false);
2625bb5d
RB
1992 /* Finally fold the memcpy call. */
1993 gimple_stmt_iterator gsi2 = *gsi;
1994 gsi_prev (&gsi2);
1995 fold_stmt (&gsi2);
1996 return true;
1997}
1998
fef5a0d9
RB
1999/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2000 NULL_TREE if a normal call should be emitted rather than expanding
2001 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2002 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2003 passed as second argument. */
cbdd87d4
RG
2004
2005static bool
fef5a0d9 2006gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2007 enum built_in_function fcode)
cbdd87d4 2008{
538dd0b7 2009 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2010 tree dest, size, len, fn, fmt, flag;
2011 const char *fmt_str;
cbdd87d4 2012
fef5a0d9
RB
2013 /* Verify the required arguments in the original call. */
2014 if (gimple_call_num_args (stmt) < 5)
2015 return false;
cbdd87d4 2016
fef5a0d9
RB
2017 dest = gimple_call_arg (stmt, 0);
2018 len = gimple_call_arg (stmt, 1);
2019 flag = gimple_call_arg (stmt, 2);
2020 size = gimple_call_arg (stmt, 3);
2021 fmt = gimple_call_arg (stmt, 4);
2022
2023 if (! tree_fits_uhwi_p (size))
2024 return false;
2025
2026 if (! integer_all_onesp (size))
2027 {
dcb7fae2 2028 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2029 if (! tree_fits_uhwi_p (len))
cbdd87d4 2030 {
fef5a0d9
RB
2031 /* If LEN is not constant, try MAXLEN too.
2032 For MAXLEN only allow optimizing into non-_ocs function
2033 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2034 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2035 return false;
2036 }
2037 else
fef5a0d9 2038 maxlen = len;
cbdd87d4 2039
fef5a0d9
RB
2040 if (tree_int_cst_lt (size, maxlen))
2041 return false;
2042 }
cbdd87d4 2043
fef5a0d9
RB
2044 if (!init_target_chars ())
2045 return false;
cbdd87d4 2046
fef5a0d9
RB
2047 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2048 or if format doesn't contain % chars or is "%s". */
2049 if (! integer_zerop (flag))
2050 {
2051 fmt_str = c_getstr (fmt);
2052 if (fmt_str == NULL)
2053 return false;
2054 if (strchr (fmt_str, target_percent) != NULL
2055 && strcmp (fmt_str, target_percent_s))
2056 return false;
cbdd87d4
RG
2057 }
2058
fef5a0d9
RB
2059 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2060 available. */
2061 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2062 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2063 if (!fn)
491e0b9b
RG
2064 return false;
2065
fef5a0d9
RB
2066 /* Replace the called function and the first 5 argument by 3 retaining
2067 trailing varargs. */
2068 gimple_call_set_fndecl (stmt, fn);
2069 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2070 gimple_call_set_arg (stmt, 0, dest);
2071 gimple_call_set_arg (stmt, 1, len);
2072 gimple_call_set_arg (stmt, 2, fmt);
2073 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2074 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2075 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2076 fold_stmt (gsi);
2077 return true;
2078}
cbdd87d4 2079
fef5a0d9
RB
2080/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2081 Return NULL_TREE if a normal call should be emitted rather than
2082 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2083 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2084
fef5a0d9
RB
2085static bool
2086gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2087 enum built_in_function fcode)
2088{
538dd0b7 2089 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2090 tree dest, size, len, fn, fmt, flag;
2091 const char *fmt_str;
2092 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2093
fef5a0d9
RB
2094 /* Verify the required arguments in the original call. */
2095 if (nargs < 4)
2096 return false;
2097 dest = gimple_call_arg (stmt, 0);
2098 flag = gimple_call_arg (stmt, 1);
2099 size = gimple_call_arg (stmt, 2);
2100 fmt = gimple_call_arg (stmt, 3);
2101
2102 if (! tree_fits_uhwi_p (size))
2103 return false;
2104
2105 len = NULL_TREE;
2106
2107 if (!init_target_chars ())
2108 return false;
2109
2110 /* Check whether the format is a literal string constant. */
2111 fmt_str = c_getstr (fmt);
2112 if (fmt_str != NULL)
2113 {
2114 /* If the format doesn't contain % args or %%, we know the size. */
2115 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2116 {
fef5a0d9
RB
2117 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2118 len = build_int_cstu (size_type_node, strlen (fmt_str));
2119 }
2120 /* If the format is "%s" and first ... argument is a string literal,
2121 we know the size too. */
2122 else if (fcode == BUILT_IN_SPRINTF_CHK
2123 && strcmp (fmt_str, target_percent_s) == 0)
2124 {
2125 tree arg;
cbdd87d4 2126
fef5a0d9
RB
2127 if (nargs == 5)
2128 {
2129 arg = gimple_call_arg (stmt, 4);
2130 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2131 {
2132 len = c_strlen (arg, 1);
2133 if (! len || ! tree_fits_uhwi_p (len))
2134 len = NULL_TREE;
2135 }
2136 }
2137 }
2138 }
cbdd87d4 2139
fef5a0d9
RB
2140 if (! integer_all_onesp (size))
2141 {
2142 if (! len || ! tree_int_cst_lt (len, size))
2143 return false;
2144 }
cbdd87d4 2145
fef5a0d9
RB
2146 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2147 or if format doesn't contain % chars or is "%s". */
2148 if (! integer_zerop (flag))
2149 {
2150 if (fmt_str == NULL)
2151 return false;
2152 if (strchr (fmt_str, target_percent) != NULL
2153 && strcmp (fmt_str, target_percent_s))
2154 return false;
2155 }
cbdd87d4 2156
fef5a0d9
RB
2157 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2158 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2159 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2160 if (!fn)
2161 return false;
2162
2163 /* Replace the called function and the first 4 argument by 2 retaining
2164 trailing varargs. */
2165 gimple_call_set_fndecl (stmt, fn);
2166 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2167 gimple_call_set_arg (stmt, 0, dest);
2168 gimple_call_set_arg (stmt, 1, fmt);
2169 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2170 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2171 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2172 fold_stmt (gsi);
2173 return true;
2174}
2175
35770bb2
RB
2176/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2177 ORIG may be null if this is a 2-argument call. We don't attempt to
2178 simplify calls with more than 3 arguments.
2179
2180 Return NULL_TREE if no simplification was possible, otherwise return the
2181 simplified form of the call as a tree. If IGNORED is true, it means that
2182 the caller does not use the returned value of the function. */
2183
2184static bool
dcb7fae2 2185gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2186{
355fe088 2187 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2188 tree dest = gimple_call_arg (stmt, 0);
2189 tree fmt = gimple_call_arg (stmt, 1);
2190 tree orig = NULL_TREE;
2191 const char *fmt_str = NULL;
2192
2193 /* Verify the required arguments in the original call. We deal with two
2194 types of sprintf() calls: 'sprintf (str, fmt)' and
2195 'sprintf (dest, "%s", orig)'. */
2196 if (gimple_call_num_args (stmt) > 3)
2197 return false;
2198
2199 if (gimple_call_num_args (stmt) == 3)
2200 orig = gimple_call_arg (stmt, 2);
2201
2202 /* Check whether the format is a literal string constant. */
2203 fmt_str = c_getstr (fmt);
2204 if (fmt_str == NULL)
2205 return false;
2206
2207 if (!init_target_chars ())
2208 return false;
2209
2210 /* If the format doesn't contain % args or %%, use strcpy. */
2211 if (strchr (fmt_str, target_percent) == NULL)
2212 {
2213 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2214
2215 if (!fn)
2216 return false;
2217
2218 /* Don't optimize sprintf (buf, "abc", ptr++). */
2219 if (orig)
2220 return false;
2221
2222 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2223 'format' is known to contain no % formats. */
2224 gimple_seq stmts = NULL;
355fe088 2225 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2226 gimple_seq_add_stmt_without_update (&stmts, repl);
2227 if (gimple_call_lhs (stmt))
2228 {
2229 repl = gimple_build_assign (gimple_call_lhs (stmt),
2230 build_int_cst (integer_type_node,
2231 strlen (fmt_str)));
2232 gimple_seq_add_stmt_without_update (&stmts, repl);
2233 gsi_replace_with_seq_vops (gsi, stmts);
2234 /* gsi now points at the assignment to the lhs, get a
2235 stmt iterator to the memcpy call.
2236 ??? We can't use gsi_for_stmt as that doesn't work when the
2237 CFG isn't built yet. */
2238 gimple_stmt_iterator gsi2 = *gsi;
2239 gsi_prev (&gsi2);
2240 fold_stmt (&gsi2);
2241 }
2242 else
2243 {
2244 gsi_replace_with_seq_vops (gsi, stmts);
2245 fold_stmt (gsi);
2246 }
2247 return true;
2248 }
2249
2250 /* If the format is "%s", use strcpy if the result isn't used. */
2251 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2252 {
2253 tree fn;
2254 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2255
2256 if (!fn)
2257 return false;
2258
2259 /* Don't crash on sprintf (str1, "%s"). */
2260 if (!orig)
2261 return false;
2262
dcb7fae2
RB
2263 tree orig_len = NULL_TREE;
2264 if (gimple_call_lhs (stmt))
35770bb2 2265 {
dcb7fae2 2266 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2267 if (!orig_len)
35770bb2
RB
2268 return false;
2269 }
2270
2271 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2272 gimple_seq stmts = NULL;
355fe088 2273 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2274 gimple_seq_add_stmt_without_update (&stmts, repl);
2275 if (gimple_call_lhs (stmt))
2276 {
d7e78447
RB
2277 if (!useless_type_conversion_p (integer_type_node,
2278 TREE_TYPE (orig_len)))
2279 orig_len = fold_convert (integer_type_node, orig_len);
2280 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2281 gimple_seq_add_stmt_without_update (&stmts, repl);
2282 gsi_replace_with_seq_vops (gsi, stmts);
2283 /* gsi now points at the assignment to the lhs, get a
2284 stmt iterator to the memcpy call.
2285 ??? We can't use gsi_for_stmt as that doesn't work when the
2286 CFG isn't built yet. */
2287 gimple_stmt_iterator gsi2 = *gsi;
2288 gsi_prev (&gsi2);
2289 fold_stmt (&gsi2);
2290 }
2291 else
2292 {
2293 gsi_replace_with_seq_vops (gsi, stmts);
2294 fold_stmt (gsi);
2295 }
2296 return true;
2297 }
2298 return false;
2299}
2300
d7e78447
RB
2301/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2302 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2303 attempt to simplify calls with more than 4 arguments.
35770bb2 2304
d7e78447
RB
2305 Return NULL_TREE if no simplification was possible, otherwise return the
2306 simplified form of the call as a tree. If IGNORED is true, it means that
2307 the caller does not use the returned value of the function. */
2308
2309static bool
dcb7fae2 2310gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2311{
538dd0b7 2312 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2313 tree dest = gimple_call_arg (stmt, 0);
2314 tree destsize = gimple_call_arg (stmt, 1);
2315 tree fmt = gimple_call_arg (stmt, 2);
2316 tree orig = NULL_TREE;
2317 const char *fmt_str = NULL;
2318
2319 if (gimple_call_num_args (stmt) > 4)
2320 return false;
2321
2322 if (gimple_call_num_args (stmt) == 4)
2323 orig = gimple_call_arg (stmt, 3);
2324
2325 if (!tree_fits_uhwi_p (destsize))
2326 return false;
2327 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2328
2329 /* Check whether the format is a literal string constant. */
2330 fmt_str = c_getstr (fmt);
2331 if (fmt_str == NULL)
2332 return false;
2333
2334 if (!init_target_chars ())
2335 return false;
2336
2337 /* If the format doesn't contain % args or %%, use strcpy. */
2338 if (strchr (fmt_str, target_percent) == NULL)
2339 {
2340 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2341 if (!fn)
2342 return false;
2343
2344 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2345 if (orig)
2346 return false;
2347
2348 /* We could expand this as
2349 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2350 or to
2351 memcpy (str, fmt_with_nul_at_cstm1, cst);
2352 but in the former case that might increase code size
2353 and in the latter case grow .rodata section too much.
2354 So punt for now. */
2355 size_t len = strlen (fmt_str);
2356 if (len >= destlen)
2357 return false;
2358
2359 gimple_seq stmts = NULL;
355fe088 2360 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2361 gimple_seq_add_stmt_without_update (&stmts, repl);
2362 if (gimple_call_lhs (stmt))
2363 {
2364 repl = gimple_build_assign (gimple_call_lhs (stmt),
2365 build_int_cst (integer_type_node, len));
2366 gimple_seq_add_stmt_without_update (&stmts, repl);
2367 gsi_replace_with_seq_vops (gsi, stmts);
2368 /* gsi now points at the assignment to the lhs, get a
2369 stmt iterator to the memcpy call.
2370 ??? We can't use gsi_for_stmt as that doesn't work when the
2371 CFG isn't built yet. */
2372 gimple_stmt_iterator gsi2 = *gsi;
2373 gsi_prev (&gsi2);
2374 fold_stmt (&gsi2);
2375 }
2376 else
2377 {
2378 gsi_replace_with_seq_vops (gsi, stmts);
2379 fold_stmt (gsi);
2380 }
2381 return true;
2382 }
2383
2384 /* If the format is "%s", use strcpy if the result isn't used. */
2385 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2386 {
2387 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2388 if (!fn)
2389 return false;
2390
2391 /* Don't crash on snprintf (str1, cst, "%s"). */
2392 if (!orig)
2393 return false;
2394
dcb7fae2 2395 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2396 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2397 return false;
d7e78447
RB
2398
2399 /* We could expand this as
2400 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2401 or to
2402 memcpy (str1, str2_with_nul_at_cstm1, cst);
2403 but in the former case that might increase code size
2404 and in the latter case grow .rodata section too much.
2405 So punt for now. */
2406 if (compare_tree_int (orig_len, destlen) >= 0)
2407 return false;
2408
2409 /* Convert snprintf (str1, cst, "%s", str2) into
2410 strcpy (str1, str2) if strlen (str2) < cst. */
2411 gimple_seq stmts = NULL;
355fe088 2412 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
2413 gimple_seq_add_stmt_without_update (&stmts, repl);
2414 if (gimple_call_lhs (stmt))
2415 {
2416 if (!useless_type_conversion_p (integer_type_node,
2417 TREE_TYPE (orig_len)))
2418 orig_len = fold_convert (integer_type_node, orig_len);
2419 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2420 gimple_seq_add_stmt_without_update (&stmts, repl);
2421 gsi_replace_with_seq_vops (gsi, stmts);
2422 /* gsi now points at the assignment to the lhs, get a
2423 stmt iterator to the memcpy call.
2424 ??? We can't use gsi_for_stmt as that doesn't work when the
2425 CFG isn't built yet. */
2426 gimple_stmt_iterator gsi2 = *gsi;
2427 gsi_prev (&gsi2);
2428 fold_stmt (&gsi2);
2429 }
2430 else
2431 {
2432 gsi_replace_with_seq_vops (gsi, stmts);
2433 fold_stmt (gsi);
2434 }
2435 return true;
2436 }
2437 return false;
2438}
35770bb2 2439
edd7ae68
RB
2440/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2441 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2442 more than 3 arguments, and ARG may be null in the 2-argument case.
2443
2444 Return NULL_TREE if no simplification was possible, otherwise return the
2445 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2446 code of the function to be simplified. */
2447
2448static bool
2449gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2450 tree fp, tree fmt, tree arg,
2451 enum built_in_function fcode)
2452{
2453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2454 tree fn_fputc, fn_fputs;
2455 const char *fmt_str = NULL;
2456
2457 /* If the return value is used, don't do the transformation. */
2458 if (gimple_call_lhs (stmt) != NULL_TREE)
2459 return false;
2460
2461 /* Check whether the format is a literal string constant. */
2462 fmt_str = c_getstr (fmt);
2463 if (fmt_str == NULL)
2464 return false;
2465
2466 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2467 {
2468 /* If we're using an unlocked function, assume the other
2469 unlocked functions exist explicitly. */
2470 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2471 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2472 }
2473 else
2474 {
2475 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2476 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2477 }
2478
2479 if (!init_target_chars ())
2480 return false;
2481
2482 /* If the format doesn't contain % args or %%, use strcpy. */
2483 if (strchr (fmt_str, target_percent) == NULL)
2484 {
2485 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2486 && arg)
2487 return false;
2488
2489 /* If the format specifier was "", fprintf does nothing. */
2490 if (fmt_str[0] == '\0')
2491 {
2492 replace_call_with_value (gsi, NULL_TREE);
2493 return true;
2494 }
2495
2496 /* When "string" doesn't contain %, replace all cases of
2497 fprintf (fp, string) with fputs (string, fp). The fputs
2498 builtin will take care of special cases like length == 1. */
2499 if (fn_fputs)
2500 {
2501 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2502 replace_call_with_call_and_fold (gsi, repl);
2503 return true;
2504 }
2505 }
2506
2507 /* The other optimizations can be done only on the non-va_list variants. */
2508 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2509 return false;
2510
2511 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2512 else if (strcmp (fmt_str, target_percent_s) == 0)
2513 {
2514 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2515 return false;
2516 if (fn_fputs)
2517 {
2518 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2519 replace_call_with_call_and_fold (gsi, repl);
2520 return true;
2521 }
2522 }
2523
2524 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2525 else if (strcmp (fmt_str, target_percent_c) == 0)
2526 {
2527 if (!arg
2528 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2529 return false;
2530 if (fn_fputc)
2531 {
2532 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2533 replace_call_with_call_and_fold (gsi, repl);
2534 return true;
2535 }
2536 }
2537
2538 return false;
2539}
2540
ad03a744
RB
2541/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2542 FMT and ARG are the arguments to the call; we don't fold cases with
2543 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2544
2545 Return NULL_TREE if no simplification was possible, otherwise return the
2546 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2547 code of the function to be simplified. */
2548
2549static bool
2550gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2551 tree arg, enum built_in_function fcode)
2552{
2553 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2554 tree fn_putchar, fn_puts, newarg;
2555 const char *fmt_str = NULL;
2556
2557 /* If the return value is used, don't do the transformation. */
2558 if (gimple_call_lhs (stmt) != NULL_TREE)
2559 return false;
2560
2561 /* Check whether the format is a literal string constant. */
2562 fmt_str = c_getstr (fmt);
2563 if (fmt_str == NULL)
2564 return false;
2565
2566 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2567 {
2568 /* If we're using an unlocked function, assume the other
2569 unlocked functions exist explicitly. */
2570 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2571 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2572 }
2573 else
2574 {
2575 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2576 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2577 }
2578
2579 if (!init_target_chars ())
2580 return false;
2581
2582 if (strcmp (fmt_str, target_percent_s) == 0
2583 || strchr (fmt_str, target_percent) == NULL)
2584 {
2585 const char *str;
2586
2587 if (strcmp (fmt_str, target_percent_s) == 0)
2588 {
2589 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2590 return false;
2591
2592 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2593 return false;
2594
2595 str = c_getstr (arg);
2596 if (str == NULL)
2597 return false;
2598 }
2599 else
2600 {
2601 /* The format specifier doesn't contain any '%' characters. */
2602 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2603 && arg)
2604 return false;
2605 str = fmt_str;
2606 }
2607
2608 /* If the string was "", printf does nothing. */
2609 if (str[0] == '\0')
2610 {
2611 replace_call_with_value (gsi, NULL_TREE);
2612 return true;
2613 }
2614
2615 /* If the string has length of 1, call putchar. */
2616 if (str[1] == '\0')
2617 {
2618 /* Given printf("c"), (where c is any one character,)
2619 convert "c"[0] to an int and pass that to the replacement
2620 function. */
2621 newarg = build_int_cst (integer_type_node, str[0]);
2622 if (fn_putchar)
2623 {
2624 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2625 replace_call_with_call_and_fold (gsi, repl);
2626 return true;
2627 }
2628 }
2629 else
2630 {
2631 /* If the string was "string\n", call puts("string"). */
2632 size_t len = strlen (str);
2633 if ((unsigned char)str[len - 1] == target_newline
2634 && (size_t) (int) len == len
2635 && (int) len > 0)
2636 {
2637 char *newstr;
2638 tree offset_node, string_cst;
2639
2640 /* Create a NUL-terminated string that's one char shorter
2641 than the original, stripping off the trailing '\n'. */
2642 newarg = build_string_literal (len, str);
2643 string_cst = string_constant (newarg, &offset_node);
2644 gcc_checking_assert (string_cst
2645 && (TREE_STRING_LENGTH (string_cst)
2646 == (int) len)
2647 && integer_zerop (offset_node)
2648 && (unsigned char)
2649 TREE_STRING_POINTER (string_cst)[len - 1]
2650 == target_newline);
2651 /* build_string_literal creates a new STRING_CST,
2652 modify it in place to avoid double copying. */
2653 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2654 newstr[len - 1] = '\0';
2655 if (fn_puts)
2656 {
2657 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2658 replace_call_with_call_and_fold (gsi, repl);
2659 return true;
2660 }
2661 }
2662 else
2663 /* We'd like to arrange to call fputs(string,stdout) here,
2664 but we need stdout and don't have a way to get it yet. */
2665 return false;
2666 }
2667 }
2668
2669 /* The other optimizations can be done only on the non-va_list variants. */
2670 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2671 return false;
2672
2673 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2674 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2675 {
2676 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2677 return false;
2678 if (fn_puts)
2679 {
2680 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2681 replace_call_with_call_and_fold (gsi, repl);
2682 return true;
2683 }
2684 }
2685
2686 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2687 else if (strcmp (fmt_str, target_percent_c) == 0)
2688 {
2689 if (!arg || ! useless_type_conversion_p (integer_type_node,
2690 TREE_TYPE (arg)))
2691 return false;
2692 if (fn_putchar)
2693 {
2694 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2695 replace_call_with_call_and_fold (gsi, repl);
2696 return true;
2697 }
2698 }
2699
2700 return false;
2701}
2702
edd7ae68 2703
fef5a0d9
RB
2704
2705/* Fold a call to __builtin_strlen with known length LEN. */
2706
2707static bool
dcb7fae2 2708gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2709{
355fe088 2710 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2711 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2712 if (!len)
2713 return false;
2813904b 2714 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2715 replace_call_with_value (gsi, len);
2716 return true;
cbdd87d4
RG
2717}
2718
48126138
NS
2719/* Fold a call to __builtin_acc_on_device. */
2720
2721static bool
2722gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2723{
2724 /* Defer folding until we know which compiler we're in. */
2725 if (symtab->state != EXPANSION)
2726 return false;
2727
2728 unsigned val_host = GOMP_DEVICE_HOST;
2729 unsigned val_dev = GOMP_DEVICE_NONE;
2730
2731#ifdef ACCEL_COMPILER
2732 val_host = GOMP_DEVICE_NOT_HOST;
2733 val_dev = ACCEL_COMPILER_acc_device;
2734#endif
2735
2736 location_t loc = gimple_location (gsi_stmt (*gsi));
2737
2738 tree host_eq = make_ssa_name (boolean_type_node);
2739 gimple *host_ass = gimple_build_assign
2740 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2741 gimple_set_location (host_ass, loc);
2742 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2743
2744 tree dev_eq = make_ssa_name (boolean_type_node);
2745 gimple *dev_ass = gimple_build_assign
2746 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2747 gimple_set_location (dev_ass, loc);
2748 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2749
2750 tree result = make_ssa_name (boolean_type_node);
2751 gimple *result_ass = gimple_build_assign
2752 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2753 gimple_set_location (result_ass, loc);
2754 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2755
2756 replace_call_with_value (gsi, result);
2757
2758 return true;
2759}
cbdd87d4 2760
dcb7fae2
RB
2761/* Fold the non-target builtin at *GSI and return whether any simplification
2762 was made. */
cbdd87d4 2763
fef5a0d9 2764static bool
dcb7fae2 2765gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2766{
538dd0b7 2767 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2768 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2769
dcb7fae2
RB
2770 /* Give up for always_inline inline builtins until they are
2771 inlined. */
2772 if (avoid_folding_inline_builtin (callee))
2773 return false;
cbdd87d4 2774
edd7ae68
RB
2775 unsigned n = gimple_call_num_args (stmt);
2776 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2777 switch (fcode)
cbdd87d4 2778 {
dcb7fae2
RB
2779 case BUILT_IN_BZERO:
2780 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2781 gimple_call_arg (stmt, 1));
2782 case BUILT_IN_MEMSET:
2783 return gimple_fold_builtin_memset (gsi,
2784 gimple_call_arg (stmt, 1),
2785 gimple_call_arg (stmt, 2));
2786 case BUILT_IN_BCOPY:
2787 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2788 gimple_call_arg (stmt, 0), 3);
2789 case BUILT_IN_MEMCPY:
2790 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2791 gimple_call_arg (stmt, 1), 0);
2792 case BUILT_IN_MEMPCPY:
2793 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2794 gimple_call_arg (stmt, 1), 1);
2795 case BUILT_IN_MEMMOVE:
2796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2797 gimple_call_arg (stmt, 1), 3);
2798 case BUILT_IN_SPRINTF_CHK:
2799 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2800 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2801 case BUILT_IN_STRCAT_CHK:
2802 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2803 case BUILT_IN_STRNCAT_CHK:
2804 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2805 case BUILT_IN_STRLEN:
dcb7fae2 2806 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2807 case BUILT_IN_STRCPY:
dcb7fae2 2808 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2809 gimple_call_arg (stmt, 0),
dcb7fae2 2810 gimple_call_arg (stmt, 1));
cbdd87d4 2811 case BUILT_IN_STRNCPY:
dcb7fae2 2812 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2813 gimple_call_arg (stmt, 0),
2814 gimple_call_arg (stmt, 1),
dcb7fae2 2815 gimple_call_arg (stmt, 2));
9a7eefec 2816 case BUILT_IN_STRCAT:
dcb7fae2
RB
2817 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2818 gimple_call_arg (stmt, 1));
ad03a744
RB
2819 case BUILT_IN_STRNCAT:
2820 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2821 case BUILT_IN_FPUTS:
dcb7fae2
RB
2822 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2823 gimple_call_arg (stmt, 1), false);
cbdd87d4 2824 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2825 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2827 case BUILT_IN_MEMCPY_CHK:
2828 case BUILT_IN_MEMPCPY_CHK:
2829 case BUILT_IN_MEMMOVE_CHK:
2830 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2831 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2832 gimple_call_arg (stmt, 0),
2833 gimple_call_arg (stmt, 1),
2834 gimple_call_arg (stmt, 2),
2835 gimple_call_arg (stmt, 3),
edd7ae68 2836 fcode);
2625bb5d
RB
2837 case BUILT_IN_STPCPY:
2838 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2839 case BUILT_IN_STRCPY_CHK:
2840 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2841 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
edd7ae68 2845 fcode);
cbdd87d4 2846 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2847 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2848 return gimple_fold_builtin_stxncpy_chk (gsi,
2849 gimple_call_arg (stmt, 0),
2850 gimple_call_arg (stmt, 1),
2851 gimple_call_arg (stmt, 2),
2852 gimple_call_arg (stmt, 3),
edd7ae68 2853 fcode);
cbdd87d4
RG
2854 case BUILT_IN_SNPRINTF_CHK:
2855 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2856 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2857 case BUILT_IN_SNPRINTF:
dcb7fae2 2858 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2859 case BUILT_IN_SPRINTF:
dcb7fae2 2860 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2861 case BUILT_IN_FPRINTF:
2862 case BUILT_IN_FPRINTF_UNLOCKED:
2863 case BUILT_IN_VFPRINTF:
2864 if (n == 2 || n == 3)
2865 return gimple_fold_builtin_fprintf (gsi,
2866 gimple_call_arg (stmt, 0),
2867 gimple_call_arg (stmt, 1),
2868 n == 3
2869 ? gimple_call_arg (stmt, 2)
2870 : NULL_TREE,
2871 fcode);
2872 break;
2873 case BUILT_IN_FPRINTF_CHK:
2874 case BUILT_IN_VFPRINTF_CHK:
2875 if (n == 3 || n == 4)
2876 return gimple_fold_builtin_fprintf (gsi,
2877 gimple_call_arg (stmt, 0),
2878 gimple_call_arg (stmt, 2),
2879 n == 4
2880 ? gimple_call_arg (stmt, 3)
2881 : NULL_TREE,
2882 fcode);
2883 break;
ad03a744
RB
2884 case BUILT_IN_PRINTF:
2885 case BUILT_IN_PRINTF_UNLOCKED:
2886 case BUILT_IN_VPRINTF:
2887 if (n == 1 || n == 2)
2888 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2889 n == 2
2890 ? gimple_call_arg (stmt, 1)
2891 : NULL_TREE, fcode);
2892 break;
2893 case BUILT_IN_PRINTF_CHK:
2894 case BUILT_IN_VPRINTF_CHK:
2895 if (n == 2 || n == 3)
2896 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2897 n == 3
2898 ? gimple_call_arg (stmt, 2)
2899 : NULL_TREE, fcode);
242a37f1 2900 break;
48126138
NS
2901 case BUILT_IN_ACC_ON_DEVICE:
2902 return gimple_fold_builtin_acc_on_device (gsi,
2903 gimple_call_arg (stmt, 0));
fef5a0d9
RB
2904 default:;
2905 }
2906
2907 /* Try the generic builtin folder. */
2908 bool ignore = (gimple_call_lhs (stmt) == NULL);
2909 tree result = fold_call_stmt (stmt, ignore);
2910 if (result)
2911 {
2912 if (ignore)
2913 STRIP_NOPS (result);
2914 else
2915 result = fold_convert (gimple_call_return_type (stmt), result);
2916 if (!update_call_from_tree (gsi, result))
2917 gimplify_and_update_call_from_tree (gsi, result);
2918 return true;
2919 }
2920
2921 return false;
2922}
2923
451e8dae
NS
2924/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2925 function calls to constants, where possible. */
2926
2927static tree
2928fold_internal_goacc_dim (const gimple *call)
2929{
2930 int axis = get_oacc_ifn_dim_arg (call);
2931 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2932 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2933 tree result = NULL_TREE;
2934
2935 /* If the size is 1, or we only want the size and it is not dynamic,
2936 we know the answer. */
2937 if (size == 1 || (!is_pos && size))
2938 {
2939 tree type = TREE_TYPE (gimple_call_lhs (call));
2940 result = build_int_cst (type, size - is_pos);
2941 }
2942
2943 return result;
2944}
2945
1304953e
JJ
2946/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2947 doesn't fit into TYPE. The test for overflow should be regardless of
2948 -fwrapv, and even for unsigned types. */
2949
2950bool
2951arith_overflowed_p (enum tree_code code, const_tree type,
2952 const_tree arg0, const_tree arg1)
2953{
2954 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2955 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2956 widest2_int_cst;
2957 widest2_int warg0 = widest2_int_cst (arg0);
2958 widest2_int warg1 = widest2_int_cst (arg1);
2959 widest2_int wres;
2960 switch (code)
2961 {
2962 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2963 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2964 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2965 default: gcc_unreachable ();
2966 }
2967 signop sign = TYPE_SIGN (type);
2968 if (sign == UNSIGNED && wi::neg_p (wres))
2969 return true;
2970 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2971}
2972
cbdd87d4
RG
2973/* Attempt to fold a call statement referenced by the statement iterator GSI.
2974 The statement may be replaced by another statement, e.g., if the call
2975 simplifies to a constant value. Return true if any changes were made.
2976 It is assumed that the operands have been previously folded. */
2977
e021c122 2978static bool
ceeffab0 2979gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 2980{
538dd0b7 2981 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 2982 tree callee;
e021c122
RG
2983 bool changed = false;
2984 unsigned i;
cbdd87d4 2985
e021c122
RG
2986 /* Fold *& in call arguments. */
2987 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2988 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2989 {
2990 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2991 if (tmp)
2992 {
2993 gimple_call_set_arg (stmt, i, tmp);
2994 changed = true;
2995 }
2996 }
3b45a007
RG
2997
2998 /* Check for virtual calls that became direct calls. */
2999 callee = gimple_call_fn (stmt);
25583c4f 3000 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3001 {
49c471e3
MJ
3002 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3003 {
450ad0cd
JH
3004 if (dump_file && virtual_method_call_p (callee)
3005 && !possible_polymorphic_call_target_p
6f8091fc
JH
3006 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3007 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3008 {
3009 fprintf (dump_file,
a70e9985 3010 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3011 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3012 fprintf (dump_file, " to ");
3013 print_generic_expr (dump_file, callee, TDF_SLIM);
3014 fprintf (dump_file, "\n");
3015 }
3016
49c471e3 3017 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3018 changed = true;
3019 }
a70e9985 3020 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3021 {
61dd6a2e
JH
3022 bool final;
3023 vec <cgraph_node *>targets
058d0a90 3024 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3025 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3026 {
a70e9985 3027 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3028 if (dump_enabled_p ())
3029 {
807b7d62 3030 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3031 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3032 "folding virtual function call to %s\n",
3033 targets.length () == 1
3034 ? targets[0]->name ()
3035 : "__builtin_unreachable");
3036 }
61dd6a2e 3037 if (targets.length () == 1)
cf3e5a89
JJ
3038 {
3039 gimple_call_set_fndecl (stmt, targets[0]->decl);
3040 changed = true;
a70e9985
JJ
3041 /* If the call becomes noreturn, remove the lhs. */
3042 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3043 {
3044 if (TREE_CODE (lhs) == SSA_NAME)
3045 {
b731b390 3046 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3047 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3048 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3049 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3050 }
3051 gimple_call_set_lhs (stmt, NULL_TREE);
3052 }
0b986c6a 3053 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3054 }
a70e9985 3055 else
cf3e5a89
JJ
3056 {
3057 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3058 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3059 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3060 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3061 {
b731b390 3062 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3063 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3064
3065 /* To satisfy condition for
3066 cgraph_update_edges_for_call_stmt_node,
3067 we need to preserve GIMPLE_CALL statement
3068 at position of GSI iterator. */
a70e9985 3069 update_call_from_tree (gsi, def);
eb14a79f 3070 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3071 }
3072 else
42e52a51
RB
3073 {
3074 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3075 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3076 gsi_replace (gsi, new_stmt, false);
3077 }
cf3e5a89
JJ
3078 return true;
3079 }
e021c122 3080 }
49c471e3 3081 }
e021c122 3082 }
49c471e3 3083
f2d3d07e
RH
3084 /* Check for indirect calls that became direct calls, and then
3085 no longer require a static chain. */
3086 if (gimple_call_chain (stmt))
3087 {
3088 tree fn = gimple_call_fndecl (stmt);
3089 if (fn && !DECL_STATIC_CHAIN (fn))
3090 {
3091 gimple_call_set_chain (stmt, NULL);
3092 changed = true;
3093 }
3094 else
3095 {
3096 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3097 if (tmp)
3098 {
3099 gimple_call_set_chain (stmt, tmp);
3100 changed = true;
3101 }
3102 }
3103 }
3104
e021c122
RG
3105 if (inplace)
3106 return changed;
3107
3108 /* Check for builtins that CCP can handle using information not
3109 available in the generic fold routines. */
fef5a0d9
RB
3110 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3111 {
3112 if (gimple_fold_builtin (gsi))
3113 changed = true;
3114 }
3115 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3116 {
ea679d55 3117 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3118 }
368b454d 3119 else if (gimple_call_internal_p (stmt))
ed9c79e1 3120 {
368b454d
JJ
3121 enum tree_code subcode = ERROR_MARK;
3122 tree result = NULL_TREE;
1304953e
JJ
3123 bool cplx_result = false;
3124 tree overflow = NULL_TREE;
368b454d
JJ
3125 switch (gimple_call_internal_fn (stmt))
3126 {
3127 case IFN_BUILTIN_EXPECT:
3128 result = fold_builtin_expect (gimple_location (stmt),
3129 gimple_call_arg (stmt, 0),
3130 gimple_call_arg (stmt, 1),
3131 gimple_call_arg (stmt, 2));
3132 break;
0e82f089
MP
3133 case IFN_UBSAN_OBJECT_SIZE:
3134 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3135 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3136 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3137 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3138 gimple_call_arg (stmt, 2))))
3139 {
f6b4dc28 3140 gsi_replace (gsi, gimple_build_nop (), false);
0e82f089
MP
3141 unlink_stmt_vdef (stmt);
3142 release_defs (stmt);
3143 return true;
3144 }
3145 break;
451e8dae
NS
3146 case IFN_GOACC_DIM_SIZE:
3147 case IFN_GOACC_DIM_POS:
3148 result = fold_internal_goacc_dim (stmt);
3149 break;
368b454d
JJ
3150 case IFN_UBSAN_CHECK_ADD:
3151 subcode = PLUS_EXPR;
3152 break;
3153 case IFN_UBSAN_CHECK_SUB:
3154 subcode = MINUS_EXPR;
3155 break;
3156 case IFN_UBSAN_CHECK_MUL:
3157 subcode = MULT_EXPR;
3158 break;
1304953e
JJ
3159 case IFN_ADD_OVERFLOW:
3160 subcode = PLUS_EXPR;
3161 cplx_result = true;
3162 break;
3163 case IFN_SUB_OVERFLOW:
3164 subcode = MINUS_EXPR;
3165 cplx_result = true;
3166 break;
3167 case IFN_MUL_OVERFLOW:
3168 subcode = MULT_EXPR;
3169 cplx_result = true;
3170 break;
368b454d
JJ
3171 default:
3172 break;
3173 }
3174 if (subcode != ERROR_MARK)
3175 {
3176 tree arg0 = gimple_call_arg (stmt, 0);
3177 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3178 tree type = TREE_TYPE (arg0);
3179 if (cplx_result)
3180 {
3181 tree lhs = gimple_call_lhs (stmt);
3182 if (lhs == NULL_TREE)
3183 type = NULL_TREE;
3184 else
3185 type = TREE_TYPE (TREE_TYPE (lhs));
3186 }
3187 if (type == NULL_TREE)
3188 ;
368b454d 3189 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3190 else if (integer_zerop (arg1))
3191 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3192 /* x = 0 + y; x = 0 * y; */
3193 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3194 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3195 /* x = y - y; */
3196 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3197 result = integer_zero_node;
368b454d 3198 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3199 else if (subcode == MULT_EXPR && integer_onep (arg1))
3200 result = arg0;
3201 else if (subcode == MULT_EXPR && integer_onep (arg0))
3202 result = arg1;
3203 else if (TREE_CODE (arg0) == INTEGER_CST
3204 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3205 {
1304953e
JJ
3206 if (cplx_result)
3207 result = int_const_binop (subcode, fold_convert (type, arg0),
3208 fold_convert (type, arg1));
3209 else
3210 result = int_const_binop (subcode, arg0, arg1);
3211 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3212 {
3213 if (cplx_result)
3214 overflow = build_one_cst (type);
3215 else
3216 result = NULL_TREE;
3217 }
3218 }
3219 if (result)
3220 {
3221 if (result == integer_zero_node)
3222 result = build_zero_cst (type);
3223 else if (cplx_result && TREE_TYPE (result) != type)
3224 {
3225 if (TREE_CODE (result) == INTEGER_CST)
3226 {
3227 if (arith_overflowed_p (PLUS_EXPR, type, result,
3228 integer_zero_node))
3229 overflow = build_one_cst (type);
3230 }
3231 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3232 && TYPE_UNSIGNED (type))
3233 || (TYPE_PRECISION (type)
3234 < (TYPE_PRECISION (TREE_TYPE (result))
3235 + (TYPE_UNSIGNED (TREE_TYPE (result))
3236 && !TYPE_UNSIGNED (type)))))
3237 result = NULL_TREE;
3238 if (result)
3239 result = fold_convert (type, result);
3240 }
368b454d
JJ
3241 }
3242 }
1304953e 3243
ed9c79e1
JJ
3244 if (result)
3245 {
1304953e
JJ
3246 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3247 result = drop_tree_overflow (result);
3248 if (cplx_result)
3249 {
3250 if (overflow == NULL_TREE)
3251 overflow = build_zero_cst (TREE_TYPE (result));
3252 tree ctype = build_complex_type (TREE_TYPE (result));
3253 if (TREE_CODE (result) == INTEGER_CST
3254 && TREE_CODE (overflow) == INTEGER_CST)
3255 result = build_complex (ctype, result, overflow);
3256 else
3257 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3258 ctype, result, overflow);
3259 }
ed9c79e1
JJ
3260 if (!update_call_from_tree (gsi, result))
3261 gimplify_and_update_call_from_tree (gsi, result);
3262 changed = true;
3263 }
3264 }
3b45a007 3265
e021c122 3266 return changed;
cbdd87d4
RG
3267}
3268
e0ee10ed 3269
89a79e96
RB
3270/* Return true whether NAME has a use on STMT. */
3271
3272static bool
355fe088 3273has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
3274{
3275 imm_use_iterator iter;
3276 use_operand_p use_p;
3277 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3278 if (USE_STMT (use_p) == stmt)
3279 return true;
3280 return false;
3281}
3282
e0ee10ed
RB
3283/* Worker for fold_stmt_1 dispatch to pattern based folding with
3284 gimple_simplify.
3285
3286 Replaces *GSI with the simplification result in RCODE and OPS
3287 and the associated statements in *SEQ. Does the replacement
3288 according to INPLACE and returns true if the operation succeeded. */
3289
3290static bool
3291replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3292 code_helper rcode, tree *ops,
3293 gimple_seq *seq, bool inplace)
3294{
355fe088 3295 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
3296
3297 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
3298 newly created statements. See also maybe_push_res_to_seq.
3299 As an exception allow such uses if there was a use of the
3300 same SSA name on the old stmt. */
e0ee10ed 3301 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
3302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3303 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
3304 || (ops[1]
3305 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
3306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3307 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
3308 || (ops[2]
3309 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96
RB
3310 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3311 && !has_use_on_stmt (ops[2], stmt)))
e0ee10ed
RB
3312 return false;
3313
fec40d06
RS
3314 /* Don't insert new statements when INPLACE is true, even if we could
3315 reuse STMT for the final statement. */
3316 if (inplace && !gimple_seq_empty_p (*seq))
3317 return false;
3318
538dd0b7 3319 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3320 {
3321 gcc_assert (rcode.is_tree_code ());
3322 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3323 /* GIMPLE_CONDs condition may not throw. */
3324 && (!flag_exceptions
3325 || !cfun->can_throw_non_call_exceptions
3326 || !operation_could_trap_p (rcode,
3327 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3328 false, NULL_TREE)))
538dd0b7 3329 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3330 else if (rcode == SSA_NAME)
538dd0b7 3331 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3332 build_zero_cst (TREE_TYPE (ops[0])));
3333 else if (rcode == INTEGER_CST)
3334 {
3335 if (integer_zerop (ops[0]))
538dd0b7 3336 gimple_cond_make_false (cond_stmt);
e0ee10ed 3337 else
538dd0b7 3338 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3339 }
3340 else if (!inplace)
3341 {
3342 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3343 ops, seq);
3344 if (!res)
3345 return false;
538dd0b7 3346 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3347 build_zero_cst (TREE_TYPE (res)));
3348 }
3349 else
3350 return false;
3351 if (dump_file && (dump_flags & TDF_DETAILS))
3352 {
3353 fprintf (dump_file, "gimple_simplified to ");
3354 if (!gimple_seq_empty_p (*seq))
3355 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3356 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3357 0, TDF_SLIM);
3358 }
3359 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3360 return true;
3361 }
3362 else if (is_gimple_assign (stmt)
3363 && rcode.is_tree_code ())
3364 {
3365 if (!inplace
f3582e54 3366 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3367 {
3368 maybe_build_generic_op (rcode,
3369 TREE_TYPE (gimple_assign_lhs (stmt)),
3370 &ops[0], ops[1], ops[2]);
00d66391 3371 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3372 if (dump_file && (dump_flags & TDF_DETAILS))
3373 {
3374 fprintf (dump_file, "gimple_simplified to ");
3375 if (!gimple_seq_empty_p (*seq))
3376 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3377 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3378 0, TDF_SLIM);
3379 }
3380 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3381 return true;
3382 }
3383 }
37d486ab 3384 else if (rcode.is_fn_code ()
c9e926ce 3385 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
3386 {
3387 unsigned i;
3388 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3389 {
3390 gcc_assert (ops[i] != NULL_TREE);
3391 gimple_call_set_arg (stmt, i, ops[i]);
3392 }
3393 if (i < 3)
3394 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
3395 if (dump_file && (dump_flags & TDF_DETAILS))
3396 {
3397 fprintf (dump_file, "gimple_simplified to ");
3398 if (!gimple_seq_empty_p (*seq))
3399 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3400 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3401 }
3402 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
3403 return true;
3404 }
e0ee10ed
RB
3405 else if (!inplace)
3406 {
3407 if (gimple_has_lhs (stmt))
3408 {
3409 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3410 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3411 ops, seq, lhs))
3412 return false;
e0ee10ed
RB
3413 if (dump_file && (dump_flags & TDF_DETAILS))
3414 {
3415 fprintf (dump_file, "gimple_simplified to ");
3416 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3417 }
3418 gsi_replace_with_seq_vops (gsi, *seq);
3419 return true;
3420 }
3421 else
3422 gcc_unreachable ();
3423 }
3424
3425 return false;
3426}
3427
040292e7
RB
3428/* Canonicalize MEM_REFs invariant address operand after propagation. */
3429
3430static bool
3431maybe_canonicalize_mem_ref_addr (tree *t)
3432{
3433 bool res = false;
3434
3435 if (TREE_CODE (*t) == ADDR_EXPR)
3436 t = &TREE_OPERAND (*t, 0);
3437
3438 while (handled_component_p (*t))
3439 t = &TREE_OPERAND (*t, 0);
3440
3441 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3442 of invariant addresses into a SSA name MEM_REF address. */
3443 if (TREE_CODE (*t) == MEM_REF
3444 || TREE_CODE (*t) == TARGET_MEM_REF)
3445 {
3446 tree addr = TREE_OPERAND (*t, 0);
3447 if (TREE_CODE (addr) == ADDR_EXPR
3448 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3449 || handled_component_p (TREE_OPERAND (addr, 0))))
3450 {
3451 tree base;
3452 HOST_WIDE_INT coffset;
3453 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3454 &coffset);
3455 if (!base)
3456 gcc_unreachable ();
3457
3458 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3459 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3460 TREE_OPERAND (*t, 1),
3461 size_int (coffset));
3462 res = true;
3463 }
3464 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3465 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3466 }
3467
3468 /* Canonicalize back MEM_REFs to plain reference trees if the object
3469 accessed is a decl that has the same access semantics as the MEM_REF. */
3470 if (TREE_CODE (*t) == MEM_REF
3471 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3472 && integer_zerop (TREE_OPERAND (*t, 1))
3473 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3474 {
3475 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3476 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3477 if (/* Same volatile qualification. */
3478 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3479 /* Same TBAA behavior with -fstrict-aliasing. */
3480 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3481 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3482 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3483 /* Same alignment. */
3484 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3485 /* We have to look out here to not drop a required conversion
3486 from the rhs to the lhs if *t appears on the lhs or vice-versa
3487 if it appears on the rhs. Thus require strict type
3488 compatibility. */
3489 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3490 {
3491 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3492 res = true;
3493 }
3494 }
3495
3496 /* Canonicalize TARGET_MEM_REF in particular with respect to
3497 the indexes becoming constant. */
3498 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3499 {
3500 tree tem = maybe_fold_tmr (*t);
3501 if (tem)
3502 {
3503 *t = tem;
3504 res = true;
3505 }
3506 }
3507
3508 return res;
3509}
3510
cbdd87d4
RG
3511/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3512 distinguishes both cases. */
3513
3514static bool
e0ee10ed 3515fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3516{
3517 bool changed = false;
355fe088 3518 gimple *stmt = gsi_stmt (*gsi);
cbdd87d4
RG
3519 unsigned i;
3520
040292e7
RB
3521 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3522 after propagation.
3523 ??? This shouldn't be done in generic folding but in the
3524 propagation helpers which also know whether an address was
89a79e96
RB
3525 propagated.
3526 Also canonicalize operand order. */
040292e7
RB
3527 switch (gimple_code (stmt))
3528 {
3529 case GIMPLE_ASSIGN:
3530 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3531 {
3532 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3533 if ((REFERENCE_CLASS_P (*rhs)
3534 || TREE_CODE (*rhs) == ADDR_EXPR)
3535 && maybe_canonicalize_mem_ref_addr (rhs))
3536 changed = true;
3537 tree *lhs = gimple_assign_lhs_ptr (stmt);
3538 if (REFERENCE_CLASS_P (*lhs)
3539 && maybe_canonicalize_mem_ref_addr (lhs))
3540 changed = true;
3541 }
89a79e96
RB
3542 else
3543 {
3544 /* Canonicalize operand order. */
3545 enum tree_code code = gimple_assign_rhs_code (stmt);
3546 if (TREE_CODE_CLASS (code) == tcc_comparison
3547 || commutative_tree_code (code)
3548 || commutative_ternary_tree_code (code))
3549 {
3550 tree rhs1 = gimple_assign_rhs1 (stmt);
3551 tree rhs2 = gimple_assign_rhs2 (stmt);
3552 if (tree_swap_operands_p (rhs1, rhs2, false))
3553 {
3554 gimple_assign_set_rhs1 (stmt, rhs2);
3555 gimple_assign_set_rhs2 (stmt, rhs1);
3556 if (TREE_CODE_CLASS (code) == tcc_comparison)
3557 gimple_assign_set_rhs_code (stmt,
3558 swap_tree_comparison (code));
3559 changed = true;
3560 }
3561 }
3562 }
040292e7
RB
3563 break;
3564 case GIMPLE_CALL:
3565 {
3566 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3567 {
3568 tree *arg = gimple_call_arg_ptr (stmt, i);
3569 if (REFERENCE_CLASS_P (*arg)
3570 && maybe_canonicalize_mem_ref_addr (arg))
3571 changed = true;
3572 }
3573 tree *lhs = gimple_call_lhs_ptr (stmt);
3574 if (*lhs
3575 && REFERENCE_CLASS_P (*lhs)
3576 && maybe_canonicalize_mem_ref_addr (lhs))
3577 changed = true;
3578 break;
3579 }
3580 case GIMPLE_ASM:
3581 {
538dd0b7
DM
3582 gasm *asm_stmt = as_a <gasm *> (stmt);
3583 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3584 {
538dd0b7 3585 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3586 tree op = TREE_VALUE (link);
3587 if (REFERENCE_CLASS_P (op)
3588 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3589 changed = true;
3590 }
538dd0b7 3591 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3592 {
538dd0b7 3593 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3594 tree op = TREE_VALUE (link);
3595 if ((REFERENCE_CLASS_P (op)
3596 || TREE_CODE (op) == ADDR_EXPR)
3597 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3598 changed = true;
3599 }
3600 }
3601 break;
3602 case GIMPLE_DEBUG:
3603 if (gimple_debug_bind_p (stmt))
3604 {
3605 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3606 if (*val
3607 && (REFERENCE_CLASS_P (*val)
3608 || TREE_CODE (*val) == ADDR_EXPR)
3609 && maybe_canonicalize_mem_ref_addr (val))
3610 changed = true;
3611 }
3612 break;
89a79e96
RB
3613 case GIMPLE_COND:
3614 {
3615 /* Canonicalize operand order. */
3616 tree lhs = gimple_cond_lhs (stmt);
3617 tree rhs = gimple_cond_rhs (stmt);
3618 if (tree_swap_operands_p (lhs, rhs, false))
3619 {
3620 gcond *gc = as_a <gcond *> (stmt);
3621 gimple_cond_set_lhs (gc, rhs);
3622 gimple_cond_set_rhs (gc, lhs);
3623 gimple_cond_set_code (gc,
3624 swap_tree_comparison (gimple_cond_code (gc)));
3625 changed = true;
3626 }
3627 }
040292e7
RB
3628 default:;
3629 }
3630
e0ee10ed
RB
3631 /* Dispatch to pattern-based folding. */
3632 if (!inplace
3633 || is_gimple_assign (stmt)
3634 || gimple_code (stmt) == GIMPLE_COND)
3635 {
3636 gimple_seq seq = NULL;
3637 code_helper rcode;
3638 tree ops[3] = {};
0ff093d8
RB
3639 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3640 valueize, valueize))
e0ee10ed
RB
3641 {
3642 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3643 changed = true;
3644 else
3645 gimple_seq_discard (seq);
3646 }
3647 }
3648
3649 stmt = gsi_stmt (*gsi);
3650
cbdd87d4
RG
3651 /* Fold the main computation performed by the statement. */
3652 switch (gimple_code (stmt))
3653 {
3654 case GIMPLE_ASSIGN:
3655 {
819ec64c
RB
3656 /* Try to canonicalize for boolean-typed X the comparisons
3657 X == 0, X == 1, X != 0, and X != 1. */
3658 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3659 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 3660 {
819ec64c
RB
3661 tree lhs = gimple_assign_lhs (stmt);
3662 tree op1 = gimple_assign_rhs1 (stmt);
3663 tree op2 = gimple_assign_rhs2 (stmt);
3664 tree type = TREE_TYPE (op1);
3665
3666 /* Check whether the comparison operands are of the same boolean
3667 type as the result type is.
3668 Check that second operand is an integer-constant with value
3669 one or zero. */
3670 if (TREE_CODE (op2) == INTEGER_CST
3671 && (integer_zerop (op2) || integer_onep (op2))
3672 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3673 {
3674 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3675 bool is_logical_not = false;
3676
3677 /* X == 0 and X != 1 is a logical-not.of X
3678 X == 1 and X != 0 is X */
3679 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3680 || (cmp_code == NE_EXPR && integer_onep (op2)))
3681 is_logical_not = true;
3682
3683 if (is_logical_not == false)
3684 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3685 /* Only for one-bit precision typed X the transformation
3686 !X -> ~X is valied. */
3687 else if (TYPE_PRECISION (type) == 1)
3688 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3689 /* Otherwise we use !X -> X ^ 1. */
3690 else
3691 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3692 build_int_cst (type, 1));
3693 changed = true;
3694 break;
3695 }
5fbcc0ed 3696 }
819ec64c
RB
3697
3698 unsigned old_num_ops = gimple_num_ops (stmt);
3699 tree lhs = gimple_assign_lhs (stmt);
3700 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3701 if (new_rhs
3702 && !useless_type_conversion_p (TREE_TYPE (lhs),
3703 TREE_TYPE (new_rhs)))
3704 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3705 if (new_rhs
3706 && (!inplace
3707 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3708 {
3709 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3710 changed = true;
3711 }
3712 break;
3713 }
3714
cbdd87d4 3715 case GIMPLE_CALL:
ceeffab0 3716 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3717 break;
3718
3719 case GIMPLE_ASM:
3720 /* Fold *& in asm operands. */
38384150 3721 {
538dd0b7 3722 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3723 size_t noutputs;
3724 const char **oconstraints;
3725 const char *constraint;
3726 bool allows_mem, allows_reg;
3727
538dd0b7 3728 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3729 oconstraints = XALLOCAVEC (const char *, noutputs);
3730
538dd0b7 3731 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3732 {
538dd0b7 3733 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3734 tree op = TREE_VALUE (link);
3735 oconstraints[i]
3736 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3737 if (REFERENCE_CLASS_P (op)
3738 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3739 {
3740 TREE_VALUE (link) = op;
3741 changed = true;
3742 }
3743 }
538dd0b7 3744 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3745 {
538dd0b7 3746 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3747 tree op = TREE_VALUE (link);
3748 constraint
3749 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3750 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3751 oconstraints, &allows_mem, &allows_reg);
3752 if (REFERENCE_CLASS_P (op)
3753 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3754 != NULL_TREE)
3755 {
3756 TREE_VALUE (link) = op;
3757 changed = true;
3758 }
3759 }
3760 }
cbdd87d4
RG
3761 break;
3762
bd422c4a
RG
3763 case GIMPLE_DEBUG:
3764 if (gimple_debug_bind_p (stmt))
3765 {
3766 tree val = gimple_debug_bind_get_value (stmt);
3767 if (val
3768 && REFERENCE_CLASS_P (val))
3769 {
3770 tree tem = maybe_fold_reference (val, false);
3771 if (tem)
3772 {
3773 gimple_debug_bind_set_value (stmt, tem);
3774 changed = true;
3775 }
3776 }
3e888a5e
RG
3777 else if (val
3778 && TREE_CODE (val) == ADDR_EXPR)
3779 {
3780 tree ref = TREE_OPERAND (val, 0);
3781 tree tem = maybe_fold_reference (ref, false);
3782 if (tem)
3783 {
3784 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3785 gimple_debug_bind_set_value (stmt, tem);
3786 changed = true;
3787 }
3788 }
bd422c4a
RG
3789 }
3790 break;
3791
cbdd87d4
RG
3792 default:;
3793 }
3794
3795 stmt = gsi_stmt (*gsi);
3796
37376165
RB
3797 /* Fold *& on the lhs. */
3798 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3799 {
3800 tree lhs = gimple_get_lhs (stmt);
3801 if (lhs && REFERENCE_CLASS_P (lhs))
3802 {
3803 tree new_lhs = maybe_fold_reference (lhs, true);
3804 if (new_lhs)
3805 {
3806 gimple_set_lhs (stmt, new_lhs);
3807 changed = true;
3808 }
3809 }
3810 }
3811
3812 return changed;
3813}
3814
e0ee10ed
RB
3815/* Valueziation callback that ends up not following SSA edges. */
3816
3817tree
3818no_follow_ssa_edges (tree)
3819{
3820 return NULL_TREE;
3821}
3822
45cc9f96
RB
3823/* Valueization callback that ends up following single-use SSA edges only. */
3824
3825tree
3826follow_single_use_edges (tree val)
3827{
3828 if (TREE_CODE (val) == SSA_NAME
3829 && !has_single_use (val))
3830 return NULL_TREE;
3831 return val;
3832}
3833
cbdd87d4
RG
3834/* Fold the statement pointed to by GSI. In some cases, this function may
3835 replace the whole statement with a new one. Returns true iff folding
3836 makes any changes.
3837 The statement pointed to by GSI should be in valid gimple form but may
3838 be in unfolded state as resulting from for example constant propagation
3839 which can produce *&x = 0. */
3840
3841bool
3842fold_stmt (gimple_stmt_iterator *gsi)
3843{
e0ee10ed
RB
3844 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3845}
3846
3847bool
3848fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3849{
3850 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3851}
3852
59401b92 3853/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3854 *&x created by constant propagation are handled. The statement cannot
3855 be replaced with a new one. Return true if the statement was
3856 changed, false otherwise.
59401b92 3857 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3858 be in unfolded state as resulting from for example constant propagation
3859 which can produce *&x = 0. */
3860
3861bool
59401b92 3862fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3863{
355fe088 3864 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 3865 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3866 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3867 return changed;
3868}
3869
e89065a1
SL
3870/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3871 if EXPR is null or we don't know how.
3872 If non-null, the result always has boolean type. */
3873
3874static tree
3875canonicalize_bool (tree expr, bool invert)
3876{
3877 if (!expr)
3878 return NULL_TREE;
3879 else if (invert)
3880 {
3881 if (integer_nonzerop (expr))
3882 return boolean_false_node;
3883 else if (integer_zerop (expr))
3884 return boolean_true_node;
3885 else if (TREE_CODE (expr) == SSA_NAME)
3886 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3887 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3888 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3889 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3890 boolean_type_node,
3891 TREE_OPERAND (expr, 0),
3892 TREE_OPERAND (expr, 1));
3893 else
3894 return NULL_TREE;
3895 }
3896 else
3897 {
3898 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3899 return expr;
3900 if (integer_nonzerop (expr))
3901 return boolean_true_node;
3902 else if (integer_zerop (expr))
3903 return boolean_false_node;
3904 else if (TREE_CODE (expr) == SSA_NAME)
3905 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3906 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3907 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3908 return fold_build2 (TREE_CODE (expr),
3909 boolean_type_node,
3910 TREE_OPERAND (expr, 0),
3911 TREE_OPERAND (expr, 1));
3912 else
3913 return NULL_TREE;
3914 }
3915}
3916
3917/* Check to see if a boolean expression EXPR is logically equivalent to the
3918 comparison (OP1 CODE OP2). Check for various identities involving
3919 SSA_NAMEs. */
3920
3921static bool
3922same_bool_comparison_p (const_tree expr, enum tree_code code,
3923 const_tree op1, const_tree op2)
3924{
355fe088 3925 gimple *s;
e89065a1
SL
3926
3927 /* The obvious case. */
3928 if (TREE_CODE (expr) == code
3929 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3930 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3931 return true;
3932
3933 /* Check for comparing (name, name != 0) and the case where expr
3934 is an SSA_NAME with a definition matching the comparison. */
3935 if (TREE_CODE (expr) == SSA_NAME
3936 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3937 {
3938 if (operand_equal_p (expr, op1, 0))
3939 return ((code == NE_EXPR && integer_zerop (op2))
3940 || (code == EQ_EXPR && integer_nonzerop (op2)));
3941 s = SSA_NAME_DEF_STMT (expr);
3942 if (is_gimple_assign (s)
3943 && gimple_assign_rhs_code (s) == code
3944 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3945 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3946 return true;
3947 }
3948
3949 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3950 of name is a comparison, recurse. */
3951 if (TREE_CODE (op1) == SSA_NAME
3952 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3953 {
3954 s = SSA_NAME_DEF_STMT (op1);
3955 if (is_gimple_assign (s)
3956 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3957 {
3958 enum tree_code c = gimple_assign_rhs_code (s);
3959 if ((c == NE_EXPR && integer_zerop (op2))
3960 || (c == EQ_EXPR && integer_nonzerop (op2)))
3961 return same_bool_comparison_p (expr, c,
3962 gimple_assign_rhs1 (s),
3963 gimple_assign_rhs2 (s));
3964 if ((c == EQ_EXPR && integer_zerop (op2))
3965 || (c == NE_EXPR && integer_nonzerop (op2)))
3966 return same_bool_comparison_p (expr,
3967 invert_tree_comparison (c, false),
3968 gimple_assign_rhs1 (s),
3969 gimple_assign_rhs2 (s));
3970 }
3971 }
3972 return false;
3973}
3974
3975/* Check to see if two boolean expressions OP1 and OP2 are logically
3976 equivalent. */
3977
3978static bool
3979same_bool_result_p (const_tree op1, const_tree op2)
3980{
3981 /* Simple cases first. */
3982 if (operand_equal_p (op1, op2, 0))
3983 return true;
3984
3985 /* Check the cases where at least one of the operands is a comparison.
3986 These are a bit smarter than operand_equal_p in that they apply some
3987 identifies on SSA_NAMEs. */
98209db3 3988 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
3989 && same_bool_comparison_p (op1, TREE_CODE (op2),
3990 TREE_OPERAND (op2, 0),
3991 TREE_OPERAND (op2, 1)))
3992 return true;
98209db3 3993 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
3994 && same_bool_comparison_p (op2, TREE_CODE (op1),
3995 TREE_OPERAND (op1, 0),
3996 TREE_OPERAND (op1, 1)))
3997 return true;
3998
3999 /* Default case. */
4000 return false;
4001}
4002
4003/* Forward declarations for some mutually recursive functions. */
4004
4005static tree
4006and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4007 enum tree_code code2, tree op2a, tree op2b);
4008static tree
4009and_var_with_comparison (tree var, bool invert,
4010 enum tree_code code2, tree op2a, tree op2b);
4011static tree
355fe088 4012and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4013 enum tree_code code2, tree op2a, tree op2b);
4014static tree
4015or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4016 enum tree_code code2, tree op2a, tree op2b);
4017static tree
4018or_var_with_comparison (tree var, bool invert,
4019 enum tree_code code2, tree op2a, tree op2b);
4020static tree
355fe088 4021or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4022 enum tree_code code2, tree op2a, tree op2b);
4023
4024/* Helper function for and_comparisons_1: try to simplify the AND of the
4025 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4026 If INVERT is true, invert the value of the VAR before doing the AND.
4027 Return NULL_EXPR if we can't simplify this to a single expression. */
4028
4029static tree
4030and_var_with_comparison (tree var, bool invert,
4031 enum tree_code code2, tree op2a, tree op2b)
4032{
4033 tree t;
355fe088 4034 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4035
4036 /* We can only deal with variables whose definitions are assignments. */
4037 if (!is_gimple_assign (stmt))
4038 return NULL_TREE;
4039
4040 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4041 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4042 Then we only have to consider the simpler non-inverted cases. */
4043 if (invert)
4044 t = or_var_with_comparison_1 (stmt,
4045 invert_tree_comparison (code2, false),
4046 op2a, op2b);
4047 else
4048 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4049 return canonicalize_bool (t, invert);
4050}
4051
4052/* Try to simplify the AND of the ssa variable defined by the assignment
4053 STMT with the comparison specified by (OP2A CODE2 OP2B).
4054 Return NULL_EXPR if we can't simplify this to a single expression. */
4055
4056static tree
355fe088 4057and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4058 enum tree_code code2, tree op2a, tree op2b)
4059{
4060 tree var = gimple_assign_lhs (stmt);
4061 tree true_test_var = NULL_TREE;
4062 tree false_test_var = NULL_TREE;
4063 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4064
4065 /* Check for identities like (var AND (var == 0)) => false. */
4066 if (TREE_CODE (op2a) == SSA_NAME
4067 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4068 {
4069 if ((code2 == NE_EXPR && integer_zerop (op2b))
4070 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4071 {
4072 true_test_var = op2a;
4073 if (var == true_test_var)
4074 return var;
4075 }
4076 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4077 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4078 {
4079 false_test_var = op2a;
4080 if (var == false_test_var)
4081 return boolean_false_node;
4082 }
4083 }
4084
4085 /* If the definition is a comparison, recurse on it. */
4086 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4087 {
4088 tree t = and_comparisons_1 (innercode,
4089 gimple_assign_rhs1 (stmt),
4090 gimple_assign_rhs2 (stmt),
4091 code2,
4092 op2a,
4093 op2b);
4094 if (t)
4095 return t;
4096 }
4097
4098 /* If the definition is an AND or OR expression, we may be able to
4099 simplify by reassociating. */
eb9820c0
KT
4100 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4101 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4102 {
4103 tree inner1 = gimple_assign_rhs1 (stmt);
4104 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4105 gimple *s;
e89065a1
SL
4106 tree t;
4107 tree partial = NULL_TREE;
eb9820c0 4108 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4109
4110 /* Check for boolean identities that don't require recursive examination
4111 of inner1/inner2:
4112 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4113 inner1 AND (inner1 OR inner2) => inner1
4114 !inner1 AND (inner1 AND inner2) => false
4115 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4116 Likewise for similar cases involving inner2. */
4117 if (inner1 == true_test_var)
4118 return (is_and ? var : inner1);
4119 else if (inner2 == true_test_var)
4120 return (is_and ? var : inner2);
4121 else if (inner1 == false_test_var)
4122 return (is_and
4123 ? boolean_false_node
4124 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4125 else if (inner2 == false_test_var)
4126 return (is_and
4127 ? boolean_false_node
4128 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4129
4130 /* Next, redistribute/reassociate the AND across the inner tests.
4131 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4132 if (TREE_CODE (inner1) == SSA_NAME
4133 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4134 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4135 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4136 gimple_assign_rhs1 (s),
4137 gimple_assign_rhs2 (s),
4138 code2, op2a, op2b)))
4139 {
4140 /* Handle the AND case, where we are reassociating:
4141 (inner1 AND inner2) AND (op2a code2 op2b)
4142 => (t AND inner2)
4143 If the partial result t is a constant, we win. Otherwise
4144 continue on to try reassociating with the other inner test. */
4145 if (is_and)
4146 {
4147 if (integer_onep (t))
4148 return inner2;
4149 else if (integer_zerop (t))
4150 return boolean_false_node;
4151 }
4152
4153 /* Handle the OR case, where we are redistributing:
4154 (inner1 OR inner2) AND (op2a code2 op2b)
4155 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4156 else if (integer_onep (t))
4157 return boolean_true_node;
4158
4159 /* Save partial result for later. */
4160 partial = t;
e89065a1
SL
4161 }
4162
4163 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4164 if (TREE_CODE (inner2) == SSA_NAME
4165 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4166 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4167 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4168 gimple_assign_rhs1 (s),
4169 gimple_assign_rhs2 (s),
4170 code2, op2a, op2b)))
4171 {
4172 /* Handle the AND case, where we are reassociating:
4173 (inner1 AND inner2) AND (op2a code2 op2b)
4174 => (inner1 AND t) */
4175 if (is_and)
4176 {
4177 if (integer_onep (t))
4178 return inner1;
4179 else if (integer_zerop (t))
4180 return boolean_false_node;
8236c8eb
JJ
4181 /* If both are the same, we can apply the identity
4182 (x AND x) == x. */
4183 else if (partial && same_bool_result_p (t, partial))
4184 return t;
e89065a1
SL
4185 }
4186
4187 /* Handle the OR case. where we are redistributing:
4188 (inner1 OR inner2) AND (op2a code2 op2b)
4189 => (t OR (inner1 AND (op2a code2 op2b)))
4190 => (t OR partial) */
4191 else
4192 {
4193 if (integer_onep (t))
4194 return boolean_true_node;
4195 else if (partial)
4196 {
4197 /* We already got a simplification for the other
4198 operand to the redistributed OR expression. The
4199 interesting case is when at least one is false.
4200 Or, if both are the same, we can apply the identity
4201 (x OR x) == x. */
4202 if (integer_zerop (partial))
4203 return t;
4204 else if (integer_zerop (t))
4205 return partial;
4206 else if (same_bool_result_p (t, partial))
4207 return t;
4208 }
4209 }
4210 }
4211 }
4212 return NULL_TREE;
4213}
4214
4215/* Try to simplify the AND of two comparisons defined by
4216 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4217 If this can be done without constructing an intermediate value,
4218 return the resulting tree; otherwise NULL_TREE is returned.
4219 This function is deliberately asymmetric as it recurses on SSA_DEFs
4220 in the first comparison but not the second. */
4221
4222static tree
4223and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4224 enum tree_code code2, tree op2a, tree op2b)
4225{
ae22ac3c 4226 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4227
e89065a1
SL
4228 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4229 if (operand_equal_p (op1a, op2a, 0)
4230 && operand_equal_p (op1b, op2b, 0))
4231 {
eb9820c0 4232 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4233 tree t = combine_comparisons (UNKNOWN_LOCATION,
4234 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4235 truth_type, op1a, op1b);
e89065a1
SL
4236 if (t)
4237 return t;
4238 }
4239
4240 /* Likewise the swapped case of the above. */
4241 if (operand_equal_p (op1a, op2b, 0)
4242 && operand_equal_p (op1b, op2a, 0))
4243 {
eb9820c0 4244 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4245 tree t = combine_comparisons (UNKNOWN_LOCATION,
4246 TRUTH_ANDIF_EXPR, code1,
4247 swap_tree_comparison (code2),
31ed6226 4248 truth_type, op1a, op1b);
e89065a1
SL
4249 if (t)
4250 return t;
4251 }
4252
4253 /* If both comparisons are of the same value against constants, we might
4254 be able to merge them. */
4255 if (operand_equal_p (op1a, op2a, 0)
4256 && TREE_CODE (op1b) == INTEGER_CST
4257 && TREE_CODE (op2b) == INTEGER_CST)
4258 {
4259 int cmp = tree_int_cst_compare (op1b, op2b);
4260
4261 /* If we have (op1a == op1b), we should either be able to
4262 return that or FALSE, depending on whether the constant op1b
4263 also satisfies the other comparison against op2b. */
4264 if (code1 == EQ_EXPR)
4265 {
4266 bool done = true;
4267 bool val;
4268 switch (code2)
4269 {
4270 case EQ_EXPR: val = (cmp == 0); break;
4271 case NE_EXPR: val = (cmp != 0); break;
4272 case LT_EXPR: val = (cmp < 0); break;
4273 case GT_EXPR: val = (cmp > 0); break;
4274 case LE_EXPR: val = (cmp <= 0); break;
4275 case GE_EXPR: val = (cmp >= 0); break;
4276 default: done = false;
4277 }
4278 if (done)
4279 {
4280 if (val)
4281 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4282 else
4283 return boolean_false_node;
4284 }
4285 }
4286 /* Likewise if the second comparison is an == comparison. */
4287 else if (code2 == EQ_EXPR)
4288 {
4289 bool done = true;
4290 bool val;
4291 switch (code1)
4292 {
4293 case EQ_EXPR: val = (cmp == 0); break;
4294 case NE_EXPR: val = (cmp != 0); break;
4295 case LT_EXPR: val = (cmp > 0); break;
4296 case GT_EXPR: val = (cmp < 0); break;
4297 case LE_EXPR: val = (cmp >= 0); break;
4298 case GE_EXPR: val = (cmp <= 0); break;
4299 default: done = false;
4300 }
4301 if (done)
4302 {
4303 if (val)
4304 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4305 else
4306 return boolean_false_node;
4307 }
4308 }
4309
4310 /* Same business with inequality tests. */
4311 else if (code1 == NE_EXPR)
4312 {
4313 bool val;
4314 switch (code2)
4315 {
4316 case EQ_EXPR: val = (cmp != 0); break;
4317 case NE_EXPR: val = (cmp == 0); break;
4318 case LT_EXPR: val = (cmp >= 0); break;
4319 case GT_EXPR: val = (cmp <= 0); break;
4320 case LE_EXPR: val = (cmp > 0); break;
4321 case GE_EXPR: val = (cmp < 0); break;
4322 default:
4323 val = false;
4324 }
4325 if (val)
4326 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4327 }
4328 else if (code2 == NE_EXPR)
4329 {
4330 bool val;
4331 switch (code1)
4332 {
4333 case EQ_EXPR: val = (cmp == 0); break;
4334 case NE_EXPR: val = (cmp != 0); break;
4335 case LT_EXPR: val = (cmp <= 0); break;
4336 case GT_EXPR: val = (cmp >= 0); break;
4337 case LE_EXPR: val = (cmp < 0); break;
4338 case GE_EXPR: val = (cmp > 0); break;
4339 default:
4340 val = false;
4341 }
4342 if (val)
4343 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4344 }
4345
4346 /* Chose the more restrictive of two < or <= comparisons. */
4347 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4348 && (code2 == LT_EXPR || code2 == LE_EXPR))
4349 {
4350 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4351 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4352 else
4353 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4354 }
4355
4356 /* Likewise chose the more restrictive of two > or >= comparisons. */
4357 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4358 && (code2 == GT_EXPR || code2 == GE_EXPR))
4359 {
4360 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4361 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4362 else
4363 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4364 }
4365
4366 /* Check for singleton ranges. */
4367 else if (cmp == 0
4368 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4369 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4370 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4371
4372 /* Check for disjoint ranges. */
4373 else if (cmp <= 0
4374 && (code1 == LT_EXPR || code1 == LE_EXPR)
4375 && (code2 == GT_EXPR || code2 == GE_EXPR))
4376 return boolean_false_node;
4377 else if (cmp >= 0
4378 && (code1 == GT_EXPR || code1 == GE_EXPR)
4379 && (code2 == LT_EXPR || code2 == LE_EXPR))
4380 return boolean_false_node;
4381 }
4382
4383 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4384 NAME's definition is a truth value. See if there are any simplifications
4385 that can be done against the NAME's definition. */
4386 if (TREE_CODE (op1a) == SSA_NAME
4387 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4388 && (integer_zerop (op1b) || integer_onep (op1b)))
4389 {
4390 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4391 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4392 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4393 switch (gimple_code (stmt))
4394 {
4395 case GIMPLE_ASSIGN:
4396 /* Try to simplify by copy-propagating the definition. */
4397 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4398
4399 case GIMPLE_PHI:
4400 /* If every argument to the PHI produces the same result when
4401 ANDed with the second comparison, we win.
4402 Do not do this unless the type is bool since we need a bool
4403 result here anyway. */
4404 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4405 {
4406 tree result = NULL_TREE;
4407 unsigned i;
4408 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4409 {
4410 tree arg = gimple_phi_arg_def (stmt, i);
4411
4412 /* If this PHI has itself as an argument, ignore it.
4413 If all the other args produce the same result,
4414 we're still OK. */
4415 if (arg == gimple_phi_result (stmt))
4416 continue;
4417 else if (TREE_CODE (arg) == INTEGER_CST)
4418 {
4419 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4420 {
4421 if (!result)
4422 result = boolean_false_node;
4423 else if (!integer_zerop (result))
4424 return NULL_TREE;
4425 }
4426 else if (!result)
4427 result = fold_build2 (code2, boolean_type_node,
4428 op2a, op2b);
4429 else if (!same_bool_comparison_p (result,
4430 code2, op2a, op2b))
4431 return NULL_TREE;
4432 }
0e8b84ec
JJ
4433 else if (TREE_CODE (arg) == SSA_NAME
4434 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4435 {
6c66f733 4436 tree temp;
355fe088 4437 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4438 /* In simple cases we can look through PHI nodes,
4439 but we have to be careful with loops.
4440 See PR49073. */
4441 if (! dom_info_available_p (CDI_DOMINATORS)
4442 || gimple_bb (def_stmt) == gimple_bb (stmt)
4443 || dominated_by_p (CDI_DOMINATORS,
4444 gimple_bb (def_stmt),
4445 gimple_bb (stmt)))
4446 return NULL_TREE;
4447 temp = and_var_with_comparison (arg, invert, code2,
4448 op2a, op2b);
e89065a1
SL
4449 if (!temp)
4450 return NULL_TREE;
4451 else if (!result)
4452 result = temp;
4453 else if (!same_bool_result_p (result, temp))
4454 return NULL_TREE;
4455 }
4456 else
4457 return NULL_TREE;
4458 }
4459 return result;
4460 }
4461
4462 default:
4463 break;
4464 }
4465 }
4466 return NULL_TREE;
4467}
4468
4469/* Try to simplify the AND of two comparisons, specified by
4470 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4471 If this can be simplified to a single expression (without requiring
4472 introducing more SSA variables to hold intermediate values),
4473 return the resulting tree. Otherwise return NULL_TREE.
4474 If the result expression is non-null, it has boolean type. */
4475
4476tree
4477maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4478 enum tree_code code2, tree op2a, tree op2b)
4479{
4480 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4481 if (t)
4482 return t;
4483 else
4484 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4485}
4486
4487/* Helper function for or_comparisons_1: try to simplify the OR of the
4488 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4489 If INVERT is true, invert the value of VAR before doing the OR.
4490 Return NULL_EXPR if we can't simplify this to a single expression. */
4491
4492static tree
4493or_var_with_comparison (tree var, bool invert,
4494 enum tree_code code2, tree op2a, tree op2b)
4495{
4496 tree t;
355fe088 4497 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4498
4499 /* We can only deal with variables whose definitions are assignments. */
4500 if (!is_gimple_assign (stmt))
4501 return NULL_TREE;
4502
4503 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4504 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4505 Then we only have to consider the simpler non-inverted cases. */
4506 if (invert)
4507 t = and_var_with_comparison_1 (stmt,
4508 invert_tree_comparison (code2, false),
4509 op2a, op2b);
4510 else
4511 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4512 return canonicalize_bool (t, invert);
4513}
4514
4515/* Try to simplify the OR of the ssa variable defined by the assignment
4516 STMT with the comparison specified by (OP2A CODE2 OP2B).
4517 Return NULL_EXPR if we can't simplify this to a single expression. */
4518
4519static tree
355fe088 4520or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4521 enum tree_code code2, tree op2a, tree op2b)
4522{
4523 tree var = gimple_assign_lhs (stmt);
4524 tree true_test_var = NULL_TREE;
4525 tree false_test_var = NULL_TREE;
4526 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4527
4528 /* Check for identities like (var OR (var != 0)) => true . */
4529 if (TREE_CODE (op2a) == SSA_NAME
4530 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4531 {
4532 if ((code2 == NE_EXPR && integer_zerop (op2b))
4533 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4534 {
4535 true_test_var = op2a;
4536 if (var == true_test_var)
4537 return var;
4538 }
4539 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4540 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4541 {
4542 false_test_var = op2a;
4543 if (var == false_test_var)
4544 return boolean_true_node;
4545 }
4546 }
4547
4548 /* If the definition is a comparison, recurse on it. */
4549 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4550 {
4551 tree t = or_comparisons_1 (innercode,
4552 gimple_assign_rhs1 (stmt),
4553 gimple_assign_rhs2 (stmt),
4554 code2,
4555 op2a,
4556 op2b);
4557 if (t)
4558 return t;
4559 }
4560
4561 /* If the definition is an AND or OR expression, we may be able to
4562 simplify by reassociating. */
eb9820c0
KT
4563 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4564 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4565 {
4566 tree inner1 = gimple_assign_rhs1 (stmt);
4567 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4568 gimple *s;
e89065a1
SL
4569 tree t;
4570 tree partial = NULL_TREE;
eb9820c0 4571 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4572
4573 /* Check for boolean identities that don't require recursive examination
4574 of inner1/inner2:
4575 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4576 inner1 OR (inner1 AND inner2) => inner1
4577 !inner1 OR (inner1 OR inner2) => true
4578 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4579 */
4580 if (inner1 == true_test_var)
4581 return (is_or ? var : inner1);
4582 else if (inner2 == true_test_var)
4583 return (is_or ? var : inner2);
4584 else if (inner1 == false_test_var)
4585 return (is_or
4586 ? boolean_true_node
4587 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4588 else if (inner2 == false_test_var)
4589 return (is_or
4590 ? boolean_true_node
4591 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4592
4593 /* Next, redistribute/reassociate the OR across the inner tests.
4594 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4595 if (TREE_CODE (inner1) == SSA_NAME
4596 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4597 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4598 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4599 gimple_assign_rhs1 (s),
4600 gimple_assign_rhs2 (s),
4601 code2, op2a, op2b)))
4602 {
4603 /* Handle the OR case, where we are reassociating:
4604 (inner1 OR inner2) OR (op2a code2 op2b)
4605 => (t OR inner2)
4606 If the partial result t is a constant, we win. Otherwise
4607 continue on to try reassociating with the other inner test. */
8236c8eb 4608 if (is_or)
e89065a1
SL
4609 {
4610 if (integer_onep (t))
4611 return boolean_true_node;
4612 else if (integer_zerop (t))
4613 return inner2;
4614 }
4615
4616 /* Handle the AND case, where we are redistributing:
4617 (inner1 AND inner2) OR (op2a code2 op2b)
4618 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4619 else if (integer_zerop (t))
4620 return boolean_false_node;
4621
4622 /* Save partial result for later. */
4623 partial = t;
e89065a1
SL
4624 }
4625
4626 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4627 if (TREE_CODE (inner2) == SSA_NAME
4628 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4629 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4630 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4631 gimple_assign_rhs1 (s),
4632 gimple_assign_rhs2 (s),
4633 code2, op2a, op2b)))
4634 {
4635 /* Handle the OR case, where we are reassociating:
4636 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4637 => (inner1 OR t)
4638 => (t OR partial) */
4639 if (is_or)
e89065a1
SL
4640 {
4641 if (integer_zerop (t))
4642 return inner1;
4643 else if (integer_onep (t))
4644 return boolean_true_node;
8236c8eb
JJ
4645 /* If both are the same, we can apply the identity
4646 (x OR x) == x. */
4647 else if (partial && same_bool_result_p (t, partial))
4648 return t;
e89065a1
SL
4649 }
4650
4651 /* Handle the AND case, where we are redistributing:
4652 (inner1 AND inner2) OR (op2a code2 op2b)
4653 => (t AND (inner1 OR (op2a code2 op2b)))
4654 => (t AND partial) */
4655 else
4656 {
4657 if (integer_zerop (t))
4658 return boolean_false_node;
4659 else if (partial)
4660 {
4661 /* We already got a simplification for the other
4662 operand to the redistributed AND expression. The
4663 interesting case is when at least one is true.
4664 Or, if both are the same, we can apply the identity
8236c8eb 4665 (x AND x) == x. */
e89065a1
SL
4666 if (integer_onep (partial))
4667 return t;
4668 else if (integer_onep (t))
4669 return partial;
4670 else if (same_bool_result_p (t, partial))
8236c8eb 4671 return t;
e89065a1
SL
4672 }
4673 }
4674 }
4675 }
4676 return NULL_TREE;
4677}
4678
4679/* Try to simplify the OR of two comparisons defined by
4680 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4681 If this can be done without constructing an intermediate value,
4682 return the resulting tree; otherwise NULL_TREE is returned.
4683 This function is deliberately asymmetric as it recurses on SSA_DEFs
4684 in the first comparison but not the second. */
4685
4686static tree
4687or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4688 enum tree_code code2, tree op2a, tree op2b)
4689{
ae22ac3c 4690 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4691
e89065a1
SL
4692 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4693 if (operand_equal_p (op1a, op2a, 0)
4694 && operand_equal_p (op1b, op2b, 0))
4695 {
eb9820c0 4696 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4697 tree t = combine_comparisons (UNKNOWN_LOCATION,
4698 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4699 truth_type, op1a, op1b);
e89065a1
SL
4700 if (t)
4701 return t;
4702 }
4703
4704 /* Likewise the swapped case of the above. */
4705 if (operand_equal_p (op1a, op2b, 0)
4706 && operand_equal_p (op1b, op2a, 0))
4707 {
eb9820c0 4708 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4709 tree t = combine_comparisons (UNKNOWN_LOCATION,
4710 TRUTH_ORIF_EXPR, code1,
4711 swap_tree_comparison (code2),
31ed6226 4712 truth_type, op1a, op1b);
e89065a1
SL
4713 if (t)
4714 return t;
4715 }
4716
4717 /* If both comparisons are of the same value against constants, we might
4718 be able to merge them. */
4719 if (operand_equal_p (op1a, op2a, 0)
4720 && TREE_CODE (op1b) == INTEGER_CST
4721 && TREE_CODE (op2b) == INTEGER_CST)
4722 {
4723 int cmp = tree_int_cst_compare (op1b, op2b);
4724
4725 /* If we have (op1a != op1b), we should either be able to
4726 return that or TRUE, depending on whether the constant op1b
4727 also satisfies the other comparison against op2b. */
4728 if (code1 == NE_EXPR)
4729 {
4730 bool done = true;
4731 bool val;
4732 switch (code2)
4733 {
4734 case EQ_EXPR: val = (cmp == 0); break;
4735 case NE_EXPR: val = (cmp != 0); break;
4736 case LT_EXPR: val = (cmp < 0); break;
4737 case GT_EXPR: val = (cmp > 0); break;
4738 case LE_EXPR: val = (cmp <= 0); break;
4739 case GE_EXPR: val = (cmp >= 0); break;
4740 default: done = false;
4741 }
4742 if (done)
4743 {
4744 if (val)
4745 return boolean_true_node;
4746 else
4747 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4748 }
4749 }
4750 /* Likewise if the second comparison is a != comparison. */
4751 else if (code2 == NE_EXPR)
4752 {
4753 bool done = true;
4754 bool val;
4755 switch (code1)
4756 {
4757 case EQ_EXPR: val = (cmp == 0); break;
4758 case NE_EXPR: val = (cmp != 0); break;
4759 case LT_EXPR: val = (cmp > 0); break;
4760 case GT_EXPR: val = (cmp < 0); break;
4761 case LE_EXPR: val = (cmp >= 0); break;
4762 case GE_EXPR: val = (cmp <= 0); break;
4763 default: done = false;
4764 }
4765 if (done)
4766 {
4767 if (val)
4768 return boolean_true_node;
4769 else
4770 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4771 }
4772 }
4773
4774 /* See if an equality test is redundant with the other comparison. */
4775 else if (code1 == EQ_EXPR)
4776 {
4777 bool val;
4778 switch (code2)
4779 {
4780 case EQ_EXPR: val = (cmp == 0); break;
4781 case NE_EXPR: val = (cmp != 0); break;
4782 case LT_EXPR: val = (cmp < 0); break;
4783 case GT_EXPR: val = (cmp > 0); break;
4784 case LE_EXPR: val = (cmp <= 0); break;
4785 case GE_EXPR: val = (cmp >= 0); break;
4786 default:
4787 val = false;
4788 }
4789 if (val)
4790 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4791 }
4792 else if (code2 == EQ_EXPR)
4793 {
4794 bool val;
4795 switch (code1)
4796 {
4797 case EQ_EXPR: val = (cmp == 0); break;
4798 case NE_EXPR: val = (cmp != 0); break;
4799 case LT_EXPR: val = (cmp > 0); break;
4800 case GT_EXPR: val = (cmp < 0); break;
4801 case LE_EXPR: val = (cmp >= 0); break;
4802 case GE_EXPR: val = (cmp <= 0); break;
4803 default:
4804 val = false;
4805 }
4806 if (val)
4807 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4808 }
4809
4810 /* Chose the less restrictive of two < or <= comparisons. */
4811 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4812 && (code2 == LT_EXPR || code2 == LE_EXPR))
4813 {
4814 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4815 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4816 else
4817 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4818 }
4819
4820 /* Likewise chose the less restrictive of two > or >= comparisons. */
4821 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4822 && (code2 == GT_EXPR || code2 == GE_EXPR))
4823 {
4824 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4825 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4826 else
4827 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4828 }
4829
4830 /* Check for singleton ranges. */
4831 else if (cmp == 0
4832 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4833 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4834 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4835
4836 /* Check for less/greater pairs that don't restrict the range at all. */
4837 else if (cmp >= 0
4838 && (code1 == LT_EXPR || code1 == LE_EXPR)
4839 && (code2 == GT_EXPR || code2 == GE_EXPR))
4840 return boolean_true_node;
4841 else if (cmp <= 0
4842 && (code1 == GT_EXPR || code1 == GE_EXPR)
4843 && (code2 == LT_EXPR || code2 == LE_EXPR))
4844 return boolean_true_node;
4845 }
4846
4847 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4848 NAME's definition is a truth value. See if there are any simplifications
4849 that can be done against the NAME's definition. */
4850 if (TREE_CODE (op1a) == SSA_NAME
4851 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4852 && (integer_zerop (op1b) || integer_onep (op1b)))
4853 {
4854 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4855 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4856 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4857 switch (gimple_code (stmt))
4858 {
4859 case GIMPLE_ASSIGN:
4860 /* Try to simplify by copy-propagating the definition. */
4861 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4862
4863 case GIMPLE_PHI:
4864 /* If every argument to the PHI produces the same result when
4865 ORed with the second comparison, we win.
4866 Do not do this unless the type is bool since we need a bool
4867 result here anyway. */
4868 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4869 {
4870 tree result = NULL_TREE;
4871 unsigned i;
4872 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4873 {
4874 tree arg = gimple_phi_arg_def (stmt, i);
4875
4876 /* If this PHI has itself as an argument, ignore it.
4877 If all the other args produce the same result,
4878 we're still OK. */
4879 if (arg == gimple_phi_result (stmt))
4880 continue;
4881 else if (TREE_CODE (arg) == INTEGER_CST)
4882 {
4883 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4884 {
4885 if (!result)
4886 result = boolean_true_node;
4887 else if (!integer_onep (result))
4888 return NULL_TREE;
4889 }
4890 else if (!result)
4891 result = fold_build2 (code2, boolean_type_node,
4892 op2a, op2b);
4893 else if (!same_bool_comparison_p (result,
4894 code2, op2a, op2b))
4895 return NULL_TREE;
4896 }
0e8b84ec
JJ
4897 else if (TREE_CODE (arg) == SSA_NAME
4898 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4899 {
6c66f733 4900 tree temp;
355fe088 4901 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4902 /* In simple cases we can look through PHI nodes,
4903 but we have to be careful with loops.
4904 See PR49073. */
4905 if (! dom_info_available_p (CDI_DOMINATORS)
4906 || gimple_bb (def_stmt) == gimple_bb (stmt)
4907 || dominated_by_p (CDI_DOMINATORS,
4908 gimple_bb (def_stmt),
4909 gimple_bb (stmt)))
4910 return NULL_TREE;
4911 temp = or_var_with_comparison (arg, invert, code2,
4912 op2a, op2b);
e89065a1
SL
4913 if (!temp)
4914 return NULL_TREE;
4915 else if (!result)
4916 result = temp;
4917 else if (!same_bool_result_p (result, temp))
4918 return NULL_TREE;
4919 }
4920 else
4921 return NULL_TREE;
4922 }
4923 return result;
4924 }
4925
4926 default:
4927 break;
4928 }
4929 }
4930 return NULL_TREE;
4931}
4932
4933/* Try to simplify the OR of two comparisons, specified by
4934 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4935 If this can be simplified to a single expression (without requiring
4936 introducing more SSA variables to hold intermediate values),
4937 return the resulting tree. Otherwise return NULL_TREE.
4938 If the result expression is non-null, it has boolean type. */
4939
4940tree
4941maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4942 enum tree_code code2, tree op2a, tree op2b)
4943{
4944 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4945 if (t)
4946 return t;
4947 else
4948 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4949}
cfef45c8
RG
4950
4951
4952/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4953
4954 Either NULL_TREE, a simplified but non-constant or a constant
4955 is returned.
4956
4957 ??? This should go into a gimple-fold-inline.h file to be eventually
4958 privatized with the single valueize function used in the various TUs
4959 to avoid the indirect function call overhead. */
4960
4961tree
355fe088 4962gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 4963 tree (*gvalueize) (tree))
cfef45c8 4964{
45cc9f96
RB
4965 code_helper rcode;
4966 tree ops[3] = {};
4967 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4968 edges if there are intermediate VARYING defs. For this reason
4969 do not follow SSA edges here even though SCCVN can technically
4970 just deal fine with that. */
34050b6b 4971 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 4972 {
34050b6b 4973 tree res = NULL_TREE;
c0f62740 4974 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
4975 res = ops[0];
4976 else if (mprts_hook)
4977 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4978 if (res)
45cc9f96 4979 {
34050b6b
RB
4980 if (dump_file && dump_flags & TDF_DETAILS)
4981 {
4982 fprintf (dump_file, "Match-and-simplified ");
4983 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4984 fprintf (dump_file, " to ");
4985 print_generic_expr (dump_file, res, 0);
4986 fprintf (dump_file, "\n");
4987 }
4988 return res;
45cc9f96 4989 }
45cc9f96
RB
4990 }
4991
cfef45c8
RG
4992 location_t loc = gimple_location (stmt);
4993 switch (gimple_code (stmt))
4994 {
4995 case GIMPLE_ASSIGN:
4996 {
4997 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4998
4999 switch (get_gimple_rhs_class (subcode))
5000 {
5001 case GIMPLE_SINGLE_RHS:
5002 {
5003 tree rhs = gimple_assign_rhs1 (stmt);
5004 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5005
5006 if (TREE_CODE (rhs) == SSA_NAME)
5007 {
5008 /* If the RHS is an SSA_NAME, return its known constant value,
5009 if any. */
5010 return (*valueize) (rhs);
5011 }
5012 /* Handle propagating invariant addresses into address
5013 operations. */
5014 else if (TREE_CODE (rhs) == ADDR_EXPR
5015 && !is_gimple_min_invariant (rhs))
5016 {
d25c4172 5017 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5018 tree base;
5019 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5020 &offset,
5021 valueize);
5022 if (base
5023 && (CONSTANT_CLASS_P (base)
5024 || decl_address_invariant_p (base)))
5025 return build_invariant_address (TREE_TYPE (rhs),
5026 base, offset);
5027 }
5028 else if (TREE_CODE (rhs) == CONSTRUCTOR
5029 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5030 && (CONSTRUCTOR_NELTS (rhs)
5031 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5032 {
5033 unsigned i;
d2a12ae7 5034 tree val, *vec;
cfef45c8 5035
d2a12ae7
RG
5036 vec = XALLOCAVEC (tree,
5037 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
5038 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5039 {
5040 val = (*valueize) (val);
5041 if (TREE_CODE (val) == INTEGER_CST
5042 || TREE_CODE (val) == REAL_CST
5043 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5044 vec[i] = val;
cfef45c8
RG
5045 else
5046 return NULL_TREE;
5047 }
5048
d2a12ae7 5049 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5050 }
bdf37f7a
JH
5051 if (subcode == OBJ_TYPE_REF)
5052 {
5053 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5054 /* If callee is constant, we can fold away the wrapper. */
5055 if (is_gimple_min_invariant (val))
5056 return val;
5057 }
cfef45c8
RG
5058
5059 if (kind == tcc_reference)
5060 {
5061 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5062 || TREE_CODE (rhs) == REALPART_EXPR
5063 || TREE_CODE (rhs) == IMAGPART_EXPR)
5064 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5065 {
5066 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5067 return fold_unary_loc (EXPR_LOCATION (rhs),
5068 TREE_CODE (rhs),
5069 TREE_TYPE (rhs), val);
5070 }
5071 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5072 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5073 {
5074 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5075 return fold_ternary_loc (EXPR_LOCATION (rhs),
5076 TREE_CODE (rhs),
5077 TREE_TYPE (rhs), val,
5078 TREE_OPERAND (rhs, 1),
5079 TREE_OPERAND (rhs, 2));
5080 }
5081 else if (TREE_CODE (rhs) == MEM_REF
5082 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5083 {
5084 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5085 if (TREE_CODE (val) == ADDR_EXPR
5086 && is_gimple_min_invariant (val))
5087 {
5088 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5089 unshare_expr (val),
5090 TREE_OPERAND (rhs, 1));
5091 if (tem)
5092 rhs = tem;
5093 }
5094 }
5095 return fold_const_aggregate_ref_1 (rhs, valueize);
5096 }
5097 else if (kind == tcc_declaration)
5098 return get_symbol_constant_value (rhs);
5099 return rhs;
5100 }
5101
5102 case GIMPLE_UNARY_RHS:
f3582e54 5103 return NULL_TREE;
cfef45c8
RG
5104
5105 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5106 /* Translate &x + CST into an invariant form suitable for
5107 further propagation. */
5108 if (subcode == POINTER_PLUS_EXPR)
5109 {
4b1b9e64
RB
5110 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5111 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
5112 if (TREE_CODE (op0) == ADDR_EXPR
5113 && TREE_CODE (op1) == INTEGER_CST)
5114 {
5115 tree off = fold_convert (ptr_type_node, op1);
5116 return build_fold_addr_expr_loc
5117 (loc,
5118 fold_build2 (MEM_REF,
5119 TREE_TYPE (TREE_TYPE (op0)),
5120 unshare_expr (op0), off));
5121 }
5122 }
59c20dc7
RB
5123 /* Canonicalize bool != 0 and bool == 0 appearing after
5124 valueization. While gimple_simplify handles this
5125 it can get confused by the ~X == 1 -> X == 0 transform
5126 which we cant reduce to a SSA name or a constant
5127 (and we have no way to tell gimple_simplify to not
5128 consider those transforms in the first place). */
5129 else if (subcode == EQ_EXPR
5130 || subcode == NE_EXPR)
5131 {
5132 tree lhs = gimple_assign_lhs (stmt);
5133 tree op0 = gimple_assign_rhs1 (stmt);
5134 if (useless_type_conversion_p (TREE_TYPE (lhs),
5135 TREE_TYPE (op0)))
5136 {
5137 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5138 op0 = (*valueize) (op0);
8861704d
RB
5139 if (TREE_CODE (op0) == INTEGER_CST)
5140 std::swap (op0, op1);
5141 if (TREE_CODE (op1) == INTEGER_CST
5142 && ((subcode == NE_EXPR && integer_zerop (op1))
5143 || (subcode == EQ_EXPR && integer_onep (op1))))
5144 return op0;
59c20dc7
RB
5145 }
5146 }
4b1b9e64 5147 return NULL_TREE;
cfef45c8
RG
5148
5149 case GIMPLE_TERNARY_RHS:
5150 {
5151 /* Handle ternary operators that can appear in GIMPLE form. */
5152 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5153 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5154 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
5155 return fold_ternary_loc (loc, subcode,
5156 gimple_expr_type (stmt), op0, op1, op2);
5157 }
5158
5159 default:
5160 gcc_unreachable ();
5161 }
5162 }
5163
5164 case GIMPLE_CALL:
5165 {
25583c4f 5166 tree fn;
538dd0b7 5167 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5168
5169 if (gimple_call_internal_p (stmt))
31e071ae
MP
5170 {
5171 enum tree_code subcode = ERROR_MARK;
5172 switch (gimple_call_internal_fn (stmt))
5173 {
5174 case IFN_UBSAN_CHECK_ADD:
5175 subcode = PLUS_EXPR;
5176 break;
5177 case IFN_UBSAN_CHECK_SUB:
5178 subcode = MINUS_EXPR;
5179 break;
5180 case IFN_UBSAN_CHECK_MUL:
5181 subcode = MULT_EXPR;
5182 break;
5183 default:
5184 return NULL_TREE;
5185 }
368b454d
JJ
5186 tree arg0 = gimple_call_arg (stmt, 0);
5187 tree arg1 = gimple_call_arg (stmt, 1);
5188 tree op0 = (*valueize) (arg0);
5189 tree op1 = (*valueize) (arg1);
31e071ae
MP
5190
5191 if (TREE_CODE (op0) != INTEGER_CST
5192 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5193 {
5194 switch (subcode)
5195 {
5196 case MULT_EXPR:
5197 /* x * 0 = 0 * x = 0 without overflow. */
5198 if (integer_zerop (op0) || integer_zerop (op1))
5199 return build_zero_cst (TREE_TYPE (arg0));
5200 break;
5201 case MINUS_EXPR:
5202 /* y - y = 0 without overflow. */
5203 if (operand_equal_p (op0, op1, 0))
5204 return build_zero_cst (TREE_TYPE (arg0));
5205 break;
5206 default:
5207 break;
5208 }
5209 }
5210 tree res
5211 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5212 if (res
5213 && TREE_CODE (res) == INTEGER_CST
5214 && !TREE_OVERFLOW (res))
5215 return res;
5216 return NULL_TREE;
5217 }
25583c4f
RS
5218
5219 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5220 if (TREE_CODE (fn) == ADDR_EXPR
5221 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5222 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5223 && gimple_builtin_call_types_compatible_p (stmt,
5224 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5225 {
5226 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5227 tree retval;
cfef45c8
RG
5228 unsigned i;
5229 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5230 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5231 retval = fold_builtin_call_array (loc,
538dd0b7 5232 gimple_call_return_type (call_stmt),
cfef45c8 5233 fn, gimple_call_num_args (stmt), args);
cfef45c8 5234 if (retval)
5c944c6c
RB
5235 {
5236 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5237 STRIP_NOPS (retval);
538dd0b7
DM
5238 retval = fold_convert (gimple_call_return_type (call_stmt),
5239 retval);
5c944c6c 5240 }
cfef45c8
RG
5241 return retval;
5242 }
5243 return NULL_TREE;
5244 }
5245
5246 default:
5247 return NULL_TREE;
5248 }
5249}
5250
5251/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5252 Returns NULL_TREE if folding to a constant is not possible, otherwise
5253 returns a constant according to is_gimple_min_invariant. */
5254
5255tree
355fe088 5256gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
5257{
5258 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5259 if (res && is_gimple_min_invariant (res))
5260 return res;
5261 return NULL_TREE;
5262}
5263
5264
5265/* The following set of functions are supposed to fold references using
5266 their constant initializers. */
5267
cfef45c8
RG
5268/* See if we can find constructor defining value of BASE.
5269 When we know the consructor with constant offset (such as
5270 base is array[40] and we do know constructor of array), then
5271 BIT_OFFSET is adjusted accordingly.
5272
5273 As a special case, return error_mark_node when constructor
5274 is not explicitly available, but it is known to be zero
5275 such as 'static const int a;'. */
5276static tree
5277get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5278 tree (*valueize)(tree))
5279{
5280 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
5281 bool reverse;
5282
cfef45c8
RG
5283 if (TREE_CODE (base) == MEM_REF)
5284 {
5285 if (!integer_zerop (TREE_OPERAND (base, 1)))
5286 {
9541ffee 5287 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5288 return NULL_TREE;
807e902e 5289 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5290 * BITS_PER_UNIT);
5291 }
5292
5293 if (valueize
5294 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5295 base = valueize (TREE_OPERAND (base, 0));
5296 if (!base || TREE_CODE (base) != ADDR_EXPR)
5297 return NULL_TREE;
5298 base = TREE_OPERAND (base, 0);
5299 }
5300
5301 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5302 DECL_INITIAL. If BASE is a nested reference into another
5303 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5304 the inner reference. */
5305 switch (TREE_CODE (base))
5306 {
5307 case VAR_DECL:
cfef45c8 5308 case CONST_DECL:
6a6dac52
JH
5309 {
5310 tree init = ctor_for_folding (base);
5311
688010ba 5312 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5313 NULL means unknown, while error_mark_node is 0. */
5314 if (init == error_mark_node)
5315 return NULL_TREE;
5316 if (!init)
5317 return error_mark_node;
5318 return init;
5319 }
cfef45c8
RG
5320
5321 case ARRAY_REF:
5322 case COMPONENT_REF:
ee45a32d
EB
5323 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5324 &reverse);
cfef45c8
RG
5325 if (max_size == -1 || size != max_size)
5326 return NULL_TREE;
5327 *bit_offset += bit_offset2;
5328 return get_base_constructor (base, bit_offset, valueize);
5329
5330 case STRING_CST:
5331 case CONSTRUCTOR:
5332 return base;
5333
5334 default:
5335 return NULL_TREE;
5336 }
5337}
5338
cfef45c8
RG
5339/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5340 SIZE to the memory at bit OFFSET. */
5341
5342static tree
5343fold_array_ctor_reference (tree type, tree ctor,
5344 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5345 unsigned HOST_WIDE_INT size,
5346 tree from_decl)
cfef45c8 5347{
807e902e
KZ
5348 offset_int low_bound;
5349 offset_int elt_size;
807e902e 5350 offset_int access_index;
6a636014 5351 tree domain_type = NULL_TREE;
cfef45c8
RG
5352 HOST_WIDE_INT inner_offset;
5353
5354 /* Compute low bound and elt size. */
eb8f1123
RG
5355 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5356 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5357 if (domain_type && TYPE_MIN_VALUE (domain_type))
5358 {
5359 /* Static constructors for variably sized objects makes no sense. */
5360 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
807e902e 5361 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5362 }
5363 else
807e902e 5364 low_bound = 0;
cfef45c8 5365 /* Static constructors for variably sized objects makes no sense. */
c3284718 5366 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5367 == INTEGER_CST);
807e902e 5368 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5369
5370 /* We can handle only constantly sized accesses that are known to not
5371 be larger than size of array element. */
5372 if (!TYPE_SIZE_UNIT (type)
5373 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5374 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5375 || elt_size == 0)
cfef45c8
RG
5376 return NULL_TREE;
5377
5378 /* Compute the array index we look for. */
807e902e
KZ
5379 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5380 elt_size);
27bcd47c 5381 access_index += low_bound;
cfef45c8
RG
5382
5383 /* And offset within the access. */
27bcd47c 5384 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5385
5386 /* See if the array field is large enough to span whole access. We do not
5387 care to fold accesses spanning multiple array indexes. */
27bcd47c 5388 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 5389 return NULL_TREE;
6a636014
AL
5390 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5391 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 5392
cfef45c8
RG
5393 /* When memory is not explicitely mentioned in constructor,
5394 it is 0 (or out of range). */
5395 return build_zero_cst (type);
5396}
5397
5398/* CTOR is CONSTRUCTOR of an aggregate or vector.
5399 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5400
5401static tree
5402fold_nonarray_ctor_reference (tree type, tree ctor,
5403 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5404 unsigned HOST_WIDE_INT size,
5405 tree from_decl)
cfef45c8
RG
5406{
5407 unsigned HOST_WIDE_INT cnt;
5408 tree cfield, cval;
5409
5410 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5411 cval)
5412 {
5413 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5414 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5415 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5416 offset_int bitoffset;
5417 offset_int bitoffset_end, access_end;
cfef45c8
RG
5418
5419 /* Variable sized objects in static constructors makes no sense,
5420 but field_size can be NULL for flexible array members. */
5421 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5422 && TREE_CODE (byte_offset) == INTEGER_CST
5423 && (field_size != NULL_TREE
5424 ? TREE_CODE (field_size) == INTEGER_CST
5425 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5426
5427 /* Compute bit offset of the field. */
807e902e
KZ
5428 bitoffset = (wi::to_offset (field_offset)
5429 + wi::lshift (wi::to_offset (byte_offset),
5430 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5431 /* Compute bit offset where the field ends. */
5432 if (field_size != NULL_TREE)
807e902e 5433 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5434 else
807e902e 5435 bitoffset_end = 0;
cfef45c8 5436
807e902e 5437 access_end = offset_int (offset) + size;
b8b2b009
JJ
5438
5439 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5440 [BITOFFSET, BITOFFSET_END)? */
807e902e 5441 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5442 && (field_size == NULL_TREE
807e902e 5443 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5444 {
807e902e 5445 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5446 /* We do have overlap. Now see if field is large enough to
5447 cover the access. Give up for accesses spanning multiple
5448 fields. */
807e902e 5449 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5450 return NULL_TREE;
807e902e 5451 if (wi::lts_p (offset, bitoffset))
b8b2b009 5452 return NULL_TREE;
cfef45c8 5453 return fold_ctor_reference (type, cval,
27bcd47c 5454 inner_offset.to_uhwi (), size,
c44c2088 5455 from_decl);
cfef45c8
RG
5456 }
5457 }
5458 /* When memory is not explicitely mentioned in constructor, it is 0. */
5459 return build_zero_cst (type);
5460}
5461
5462/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5463 to the memory at bit OFFSET. */
5464
8403c2cf 5465tree
cfef45c8 5466fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5467 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5468{
5469 tree ret;
5470
5471 /* We found the field with exact match. */
5472 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5473 && !offset)
9d60be38 5474 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5475
5476 /* We are at the end of walk, see if we can view convert the
5477 result. */
5478 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5479 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5480 && !compare_tree_int (TYPE_SIZE (type), size)
5481 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5482 {
9d60be38 5483 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5484 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5485 if (ret)
86c5a5c3 5486 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5487 return ret;
5488 }
b2505143
RB
5489 /* For constants and byte-aligned/sized reads try to go through
5490 native_encode/interpret. */
5491 if (CONSTANT_CLASS_P (ctor)
5492 && BITS_PER_UNIT == 8
5493 && offset % BITS_PER_UNIT == 0
5494 && size % BITS_PER_UNIT == 0
5495 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5496 {
5497 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
5498 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5499 offset / BITS_PER_UNIT);
5500 if (len > 0)
5501 return native_interpret_expr (type, buf, len);
b2505143 5502 }
cfef45c8
RG
5503 if (TREE_CODE (ctor) == CONSTRUCTOR)
5504 {
5505
eb8f1123
RG
5506 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5507 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5508 return fold_array_ctor_reference (type, ctor, offset, size,
5509 from_decl);
cfef45c8 5510 else
c44c2088
JH
5511 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5512 from_decl);
cfef45c8
RG
5513 }
5514
5515 return NULL_TREE;
5516}
5517
5518/* Return the tree representing the element referenced by T if T is an
5519 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5520 names using VALUEIZE. Return NULL_TREE otherwise. */
5521
5522tree
5523fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5524{
5525 tree ctor, idx, base;
5526 HOST_WIDE_INT offset, size, max_size;
5527 tree tem;
ee45a32d 5528 bool reverse;
cfef45c8 5529
f8a7df45
RG
5530 if (TREE_THIS_VOLATILE (t))
5531 return NULL_TREE;
5532
3a65ee74 5533 if (DECL_P (t))
cfef45c8
RG
5534 return get_symbol_constant_value (t);
5535
5536 tem = fold_read_from_constant_string (t);
5537 if (tem)
5538 return tem;
5539
5540 switch (TREE_CODE (t))
5541 {
5542 case ARRAY_REF:
5543 case ARRAY_RANGE_REF:
5544 /* Constant indexes are handled well by get_base_constructor.
5545 Only special case variable offsets.
5546 FIXME: This code can't handle nested references with variable indexes
5547 (they will be handled only by iteration of ccp). Perhaps we can bring
5548 get_ref_base_and_extent here and make it use a valueize callback. */
5549 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5550 && valueize
5551 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5552 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5553 {
5554 tree low_bound, unit_size;
5555
5556 /* If the resulting bit-offset is constant, track it. */
5557 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5558 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5559 && (unit_size = array_ref_element_size (t),
807e902e 5560 tree_fits_uhwi_p (unit_size)))
cfef45c8 5561 {
807e902e
KZ
5562 offset_int woffset
5563 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5564 TYPE_PRECISION (TREE_TYPE (idx)));
5565
5566 if (wi::fits_shwi_p (woffset))
5567 {
5568 offset = woffset.to_shwi ();
5569 /* TODO: This code seems wrong, multiply then check
5570 to see if it fits. */
5571 offset *= tree_to_uhwi (unit_size);
5572 offset *= BITS_PER_UNIT;
5573
5574 base = TREE_OPERAND (t, 0);
5575 ctor = get_base_constructor (base, &offset, valueize);
5576 /* Empty constructor. Always fold to 0. */
5577 if (ctor == error_mark_node)
5578 return build_zero_cst (TREE_TYPE (t));
5579 /* Out of bound array access. Value is undefined,
5580 but don't fold. */
5581 if (offset < 0)
5582 return NULL_TREE;
5583 /* We can not determine ctor. */
5584 if (!ctor)
5585 return NULL_TREE;
5586 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5587 tree_to_uhwi (unit_size)
5588 * BITS_PER_UNIT,
5589 base);
5590 }
cfef45c8
RG
5591 }
5592 }
5593 /* Fallthru. */
5594
5595 case COMPONENT_REF:
5596 case BIT_FIELD_REF:
5597 case TARGET_MEM_REF:
5598 case MEM_REF:
ee45a32d 5599 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
5600 ctor = get_base_constructor (base, &offset, valueize);
5601
5602 /* Empty constructor. Always fold to 0. */
5603 if (ctor == error_mark_node)
5604 return build_zero_cst (TREE_TYPE (t));
5605 /* We do not know precise address. */
5606 if (max_size == -1 || max_size != size)
5607 return NULL_TREE;
5608 /* We can not determine ctor. */
5609 if (!ctor)
5610 return NULL_TREE;
5611
5612 /* Out of bound array access. Value is undefined, but don't fold. */
5613 if (offset < 0)
5614 return NULL_TREE;
5615
c44c2088
JH
5616 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5617 base);
cfef45c8
RG
5618
5619 case REALPART_EXPR:
5620 case IMAGPART_EXPR:
5621 {
5622 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5623 if (c && TREE_CODE (c) == COMPLEX_CST)
5624 return fold_build1_loc (EXPR_LOCATION (t),
5625 TREE_CODE (t), TREE_TYPE (t), c);
5626 break;
5627 }
5628
5629 default:
5630 break;
5631 }
5632
5633 return NULL_TREE;
5634}
5635
5636tree
5637fold_const_aggregate_ref (tree t)
5638{
5639 return fold_const_aggregate_ref_1 (t, NULL);
5640}
06bc3ec7 5641
85942f45 5642/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5643 at OFFSET.
5644 Set CAN_REFER if non-NULL to false if method
5645 is not referable or if the virtual table is ill-formed (such as rewriten
5646 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5647
5648tree
85942f45
JH
5649gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5650 tree v,
ec77d61f
JH
5651 unsigned HOST_WIDE_INT offset,
5652 bool *can_refer)
81fa35bd 5653{
85942f45
JH
5654 tree vtable = v, init, fn;
5655 unsigned HOST_WIDE_INT size;
8c311b50
JH
5656 unsigned HOST_WIDE_INT elt_size, access_index;
5657 tree domain_type;
81fa35bd 5658
ec77d61f
JH
5659 if (can_refer)
5660 *can_refer = true;
5661
9de2f554 5662 /* First of all double check we have virtual table. */
81fa35bd 5663 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5664 || !DECL_VIRTUAL_P (v))
ec77d61f 5665 {
ec77d61f
JH
5666 /* Pass down that we lost track of the target. */
5667 if (can_refer)
5668 *can_refer = false;
5669 return NULL_TREE;
5670 }
9de2f554 5671
2aa3da06
JH
5672 init = ctor_for_folding (v);
5673
9de2f554 5674 /* The virtual tables should always be born with constructors
2aa3da06
JH
5675 and we always should assume that they are avaialble for
5676 folding. At the moment we do not stream them in all cases,
5677 but it should never happen that ctor seem unreachable. */
5678 gcc_assert (init);
5679 if (init == error_mark_node)
5680 {
5681 gcc_assert (in_lto_p);
ec77d61f
JH
5682 /* Pass down that we lost track of the target. */
5683 if (can_refer)
5684 *can_refer = false;
2aa3da06
JH
5685 return NULL_TREE;
5686 }
81fa35bd 5687 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5688 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5689 offset *= BITS_PER_UNIT;
81fa35bd 5690 offset += token * size;
9de2f554 5691
8c311b50
JH
5692 /* Lookup the value in the constructor that is assumed to be array.
5693 This is equivalent to
5694 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5695 offset, size, NULL);
5696 but in a constant time. We expect that frontend produced a simple
5697 array without indexed initializers. */
5698
5699 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5700 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5701 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5702 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5703
5704 access_index = offset / BITS_PER_UNIT / elt_size;
5705 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5706
5707 /* This code makes an assumption that there are no
5708 indexed fileds produced by C++ FE, so we can directly index the array. */
5709 if (access_index < CONSTRUCTOR_NELTS (init))
5710 {
5711 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5712 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5713 STRIP_NOPS (fn);
5714 }
5715 else
5716 fn = NULL;
9de2f554
JH
5717
5718 /* For type inconsistent program we may end up looking up virtual method
5719 in virtual table that does not contain TOKEN entries. We may overrun
5720 the virtual table and pick up a constant or RTTI info pointer.
5721 In any case the call is undefined. */
5722 if (!fn
5723 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5724 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5725 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5726 else
5727 {
5728 fn = TREE_OPERAND (fn, 0);
5729
5730 /* When cgraph node is missing and function is not public, we cannot
5731 devirtualize. This can happen in WHOPR when the actual method
5732 ends up in other partition, because we found devirtualization
5733 possibility too late. */
5734 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5735 {
5736 if (can_refer)
5737 {
5738 *can_refer = false;
5739 return fn;
5740 }
5741 return NULL_TREE;
5742 }
9de2f554 5743 }
81fa35bd 5744
7501ca28
RG
5745 /* Make sure we create a cgraph node for functions we'll reference.
5746 They can be non-existent if the reference comes from an entry
5747 of an external vtable for example. */
d52f5295 5748 cgraph_node::get_create (fn);
7501ca28 5749
81fa35bd
MJ
5750 return fn;
5751}
5752
85942f45
JH
5753/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5754 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5755 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5756 OBJ_TYPE_REF_OBJECT(REF).
5757 Set CAN_REFER if non-NULL to false if method
5758 is not referable or if the virtual table is ill-formed (such as rewriten
5759 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5760
5761tree
ec77d61f
JH
5762gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5763 bool *can_refer)
85942f45
JH
5764{
5765 unsigned HOST_WIDE_INT offset;
5766 tree v;
5767
5768 v = BINFO_VTABLE (known_binfo);
5769 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5770 if (!v)
5771 return NULL_TREE;
5772
5773 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5774 {
5775 if (can_refer)
5776 *can_refer = false;
5777 return NULL_TREE;
5778 }
5779 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5780}
5781
b184c8f1
AM
5782/* Given a pointer value OP0, return a simplified version of an
5783 indirection through OP0, or NULL_TREE if no simplification is
5784 possible. Note that the resulting type may be different from
5785 the type pointed to in the sense that it is still compatible
5786 from the langhooks point of view. */
5787
5788tree
5789gimple_fold_indirect_ref (tree t)
5790{
5791 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5792 tree sub = t;
5793 tree subtype;
5794
5795 STRIP_NOPS (sub);
5796 subtype = TREE_TYPE (sub);
5797 if (!POINTER_TYPE_P (subtype))
5798 return NULL_TREE;
5799
5800 if (TREE_CODE (sub) == ADDR_EXPR)
5801 {
5802 tree op = TREE_OPERAND (sub, 0);
5803 tree optype = TREE_TYPE (op);
5804 /* *&p => p */
5805 if (useless_type_conversion_p (type, optype))
5806 return op;
5807
5808 /* *(foo *)&fooarray => fooarray[0] */
5809 if (TREE_CODE (optype) == ARRAY_TYPE
5810 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5811 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5812 {
5813 tree type_domain = TYPE_DOMAIN (optype);
5814 tree min_val = size_zero_node;
5815 if (type_domain && TYPE_MIN_VALUE (type_domain))
5816 min_val = TYPE_MIN_VALUE (type_domain);
5817 if (TREE_CODE (min_val) == INTEGER_CST)
5818 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5819 }
5820 /* *(foo *)&complexfoo => __real__ complexfoo */
5821 else if (TREE_CODE (optype) == COMPLEX_TYPE
5822 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5823 return fold_build1 (REALPART_EXPR, type, op);
5824 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5825 else if (TREE_CODE (optype) == VECTOR_TYPE
5826 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5827 {
5828 tree part_width = TYPE_SIZE (type);
5829 tree index = bitsize_int (0);
5830 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5831 }
5832 }
5833
5834 /* *(p + CST) -> ... */
5835 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5836 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5837 {
5838 tree addr = TREE_OPERAND (sub, 0);
5839 tree off = TREE_OPERAND (sub, 1);
5840 tree addrtype;
5841
5842 STRIP_NOPS (addr);
5843 addrtype = TREE_TYPE (addr);
5844
5845 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5846 if (TREE_CODE (addr) == ADDR_EXPR
5847 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5848 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5849 && tree_fits_uhwi_p (off))
b184c8f1 5850 {
ae7e9ddd 5851 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5852 tree part_width = TYPE_SIZE (type);
5853 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5854 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5855 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5856 tree index = bitsize_int (indexi);
5857 if (offset / part_widthi
e934916c 5858 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5859 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5860 part_width, index);
5861 }
5862
5863 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5864 if (TREE_CODE (addr) == ADDR_EXPR
5865 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5866 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5867 {
5868 tree size = TYPE_SIZE_UNIT (type);
5869 if (tree_int_cst_equal (size, off))
5870 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5871 }
5872
5873 /* *(p + CST) -> MEM_REF <p, CST>. */
5874 if (TREE_CODE (addr) != ADDR_EXPR
5875 || DECL_P (TREE_OPERAND (addr, 0)))
5876 return fold_build2 (MEM_REF, type,
5877 addr,
807e902e 5878 wide_int_to_tree (ptype, off));
b184c8f1
AM
5879 }
5880
5881 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5882 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5883 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5884 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5885 {
5886 tree type_domain;
5887 tree min_val = size_zero_node;
5888 tree osub = sub;
5889 sub = gimple_fold_indirect_ref (sub);
5890 if (! sub)
5891 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5892 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5893 if (type_domain && TYPE_MIN_VALUE (type_domain))
5894 min_val = TYPE_MIN_VALUE (type_domain);
5895 if (TREE_CODE (min_val) == INTEGER_CST)
5896 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5897 }
5898
5899 return NULL_TREE;
5900}
19e51b40
JJ
5901
5902/* Return true if CODE is an operation that when operating on signed
5903 integer types involves undefined behavior on overflow and the
5904 operation can be expressed with unsigned arithmetic. */
5905
5906bool
5907arith_code_with_undefined_signed_overflow (tree_code code)
5908{
5909 switch (code)
5910 {
5911 case PLUS_EXPR:
5912 case MINUS_EXPR:
5913 case MULT_EXPR:
5914 case NEGATE_EXPR:
5915 case POINTER_PLUS_EXPR:
5916 return true;
5917 default:
5918 return false;
5919 }
5920}
5921
5922/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5923 operation that can be transformed to unsigned arithmetic by converting
5924 its operand, carrying out the operation in the corresponding unsigned
5925 type and converting the result back to the original type.
5926
5927 Returns a sequence of statements that replace STMT and also contain
5928 a modified form of STMT itself. */
5929
5930gimple_seq
355fe088 5931rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
5932{
5933 if (dump_file && (dump_flags & TDF_DETAILS))
5934 {
5935 fprintf (dump_file, "rewriting stmt with undefined signed "
5936 "overflow ");
5937 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5938 }
5939
5940 tree lhs = gimple_assign_lhs (stmt);
5941 tree type = unsigned_type_for (TREE_TYPE (lhs));
5942 gimple_seq stmts = NULL;
5943 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5944 {
74e3c262
RB
5945 tree op = gimple_op (stmt, i);
5946 op = gimple_convert (&stmts, type, op);
5947 gimple_set_op (stmt, i, op);
19e51b40
JJ
5948 }
5949 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5950 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5951 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5952 gimple_seq_add_stmt (&stmts, stmt);
355fe088 5953 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
5954 gimple_seq_add_stmt (&stmts, cvt);
5955
5956 return stmts;
5957}
d4f5cd5e 5958
3d2cf79f 5959
c26de36d
RB
5960/* The valueization hook we use for the gimple_build API simplification.
5961 This makes us match fold_buildN behavior by only combining with
5962 statements in the sequence(s) we are currently building. */
5963
5964static tree
5965gimple_build_valueize (tree op)
5966{
5967 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5968 return op;
5969 return NULL_TREE;
5970}
5971
3d2cf79f 5972/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 5973 simplifying it first if possible. Returns the built
3d2cf79f
RB
5974 expression value and appends statements possibly defining it
5975 to SEQ. */
5976
5977tree
5978gimple_build (gimple_seq *seq, location_t loc,
c26de36d 5979 enum tree_code code, tree type, tree op0)
3d2cf79f 5980{
c26de36d 5981 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
5982 if (!res)
5983 {
5984 if (gimple_in_ssa_p (cfun))
b731b390 5985 res = make_ssa_name (type);
3d2cf79f 5986 else
b731b390 5987 res = create_tmp_reg (type);
355fe088 5988 gimple *stmt;
3d2cf79f
RB
5989 if (code == REALPART_EXPR
5990 || code == IMAGPART_EXPR
5991 || code == VIEW_CONVERT_EXPR)
0d0e4a03 5992 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 5993 else
0d0e4a03 5994 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
5995 gimple_set_location (stmt, loc);
5996 gimple_seq_add_stmt_without_update (seq, stmt);
5997 }
5998 return res;
5999}
6000
6001/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6002 simplifying it first if possible. Returns the built
3d2cf79f
RB
6003 expression value and appends statements possibly defining it
6004 to SEQ. */
6005
6006tree
6007gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6008 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6009{
c26de36d 6010 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6011 if (!res)
6012 {
6013 if (gimple_in_ssa_p (cfun))
b731b390 6014 res = make_ssa_name (type);
3d2cf79f 6015 else
b731b390 6016 res = create_tmp_reg (type);
355fe088 6017 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6018 gimple_set_location (stmt, loc);
6019 gimple_seq_add_stmt_without_update (seq, stmt);
6020 }
6021 return res;
6022}
6023
6024/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6025 simplifying it first if possible. Returns the built
3d2cf79f
RB
6026 expression value and appends statements possibly defining it
6027 to SEQ. */
6028
6029tree
6030gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6031 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6032{
6033 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6034 seq, gimple_build_valueize);
3d2cf79f
RB
6035 if (!res)
6036 {
6037 if (gimple_in_ssa_p (cfun))
b731b390 6038 res = make_ssa_name (type);
3d2cf79f 6039 else
b731b390 6040 res = create_tmp_reg (type);
355fe088 6041 gimple *stmt;
3d2cf79f 6042 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6043 stmt = gimple_build_assign (res, code,
6044 build3 (code, type, op0, op1, op2));
3d2cf79f 6045 else
0d0e4a03 6046 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6047 gimple_set_location (stmt, loc);
6048 gimple_seq_add_stmt_without_update (seq, stmt);
6049 }
6050 return res;
6051}
6052
6053/* Build the call FN (ARG0) with a result of type TYPE
6054 (or no result if TYPE is void) with location LOC,
c26de36d 6055 simplifying it first if possible. Returns the built
3d2cf79f
RB
6056 expression value (or NULL_TREE if TYPE is void) and appends
6057 statements possibly defining it to SEQ. */
6058
6059tree
6060gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6061 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6062{
c26de36d 6063 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6064 if (!res)
6065 {
6066 tree decl = builtin_decl_implicit (fn);
355fe088 6067 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6068 if (!VOID_TYPE_P (type))
6069 {
6070 if (gimple_in_ssa_p (cfun))
b731b390 6071 res = make_ssa_name (type);
3d2cf79f 6072 else
b731b390 6073 res = create_tmp_reg (type);
3d2cf79f
RB
6074 gimple_call_set_lhs (stmt, res);
6075 }
6076 gimple_set_location (stmt, loc);
6077 gimple_seq_add_stmt_without_update (seq, stmt);
6078 }
6079 return res;
6080}
6081
6082/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6083 (or no result if TYPE is void) with location LOC,
c26de36d 6084 simplifying it first if possible. Returns the built
3d2cf79f
RB
6085 expression value (or NULL_TREE if TYPE is void) and appends
6086 statements possibly defining it to SEQ. */
6087
6088tree
6089gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6090 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6091{
c26de36d 6092 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6093 if (!res)
6094 {
6095 tree decl = builtin_decl_implicit (fn);
355fe088 6096 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6097 if (!VOID_TYPE_P (type))
6098 {
6099 if (gimple_in_ssa_p (cfun))
b731b390 6100 res = make_ssa_name (type);
3d2cf79f 6101 else
b731b390 6102 res = create_tmp_reg (type);
3d2cf79f
RB
6103 gimple_call_set_lhs (stmt, res);
6104 }
6105 gimple_set_location (stmt, loc);
6106 gimple_seq_add_stmt_without_update (seq, stmt);
6107 }
6108 return res;
6109}
6110
6111/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6112 (or no result if TYPE is void) with location LOC,
c26de36d 6113 simplifying it first if possible. Returns the built
3d2cf79f
RB
6114 expression value (or NULL_TREE if TYPE is void) and appends
6115 statements possibly defining it to SEQ. */
6116
6117tree
6118gimple_build (gimple_seq *seq, location_t loc,
6119 enum built_in_function fn, tree type,
c26de36d 6120 tree arg0, tree arg1, tree arg2)
3d2cf79f 6121{
c26de36d
RB
6122 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6123 seq, gimple_build_valueize);
3d2cf79f
RB
6124 if (!res)
6125 {
6126 tree decl = builtin_decl_implicit (fn);
355fe088 6127 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
6128 if (!VOID_TYPE_P (type))
6129 {
6130 if (gimple_in_ssa_p (cfun))
b731b390 6131 res = make_ssa_name (type);
3d2cf79f 6132 else
b731b390 6133 res = create_tmp_reg (type);
3d2cf79f
RB
6134 gimple_call_set_lhs (stmt, res);
6135 }
6136 gimple_set_location (stmt, loc);
6137 gimple_seq_add_stmt_without_update (seq, stmt);
6138 }
6139 return res;
6140}
6141
6142/* Build the conversion (TYPE) OP with a result of type TYPE
6143 with location LOC if such conversion is neccesary in GIMPLE,
6144 simplifying it first.
6145 Returns the built expression value and appends
6146 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6147
6148tree
6149gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6150{
6151 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6152 return op;
3d2cf79f 6153 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6154}
68e57f04 6155
74e3c262
RB
6156/* Build the conversion (ptrofftype) OP with a result of a type
6157 compatible with ptrofftype with location LOC if such conversion
6158 is neccesary in GIMPLE, simplifying it first.
6159 Returns the built expression value and appends
6160 statements possibly defining it to SEQ. */
6161
6162tree
6163gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6164{
6165 if (ptrofftype_p (TREE_TYPE (op)))
6166 return op;
6167 return gimple_convert (seq, loc, sizetype, op);
6168}
6169
68e57f04
RS
6170/* Return true if the result of assignment STMT is known to be non-negative.
6171 If the return value is based on the assumption that signed overflow is
6172 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6173 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6174
6175static bool
6176gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6177 int depth)
6178{
6179 enum tree_code code = gimple_assign_rhs_code (stmt);
6180 switch (get_gimple_rhs_class (code))
6181 {
6182 case GIMPLE_UNARY_RHS:
6183 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6184 gimple_expr_type (stmt),
6185 gimple_assign_rhs1 (stmt),
6186 strict_overflow_p, depth);
6187 case GIMPLE_BINARY_RHS:
6188 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6189 gimple_expr_type (stmt),
6190 gimple_assign_rhs1 (stmt),
6191 gimple_assign_rhs2 (stmt),
6192 strict_overflow_p, depth);
6193 case GIMPLE_TERNARY_RHS:
6194 return false;
6195 case GIMPLE_SINGLE_RHS:
6196 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6197 strict_overflow_p, depth);
6198 case GIMPLE_INVALID_RHS:
6199 break;
6200 }
6201 gcc_unreachable ();
6202}
6203
6204/* Return true if return value of call STMT is known to be non-negative.
6205 If the return value is based on the assumption that signed overflow is
6206 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6207 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6208
6209static bool
6210gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6211 int depth)
6212{
6213 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6214 gimple_call_arg (stmt, 0) : NULL_TREE;
6215 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6216 gimple_call_arg (stmt, 1) : NULL_TREE;
6217
6218 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 6219 gimple_call_combined_fn (stmt),
68e57f04
RS
6220 arg0,
6221 arg1,
6222 strict_overflow_p, depth);
6223}
6224
4534c203
RB
6225/* Return true if return value of call STMT is known to be non-negative.
6226 If the return value is based on the assumption that signed overflow is
6227 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6228 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6229
6230static bool
6231gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6232 int depth)
6233{
6234 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6235 {
6236 tree arg = gimple_phi_arg_def (stmt, i);
6237 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6238 return false;
6239 }
6240 return true;
6241}
6242
68e57f04
RS
6243/* Return true if STMT is known to compute a non-negative value.
6244 If the return value is based on the assumption that signed overflow is
6245 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6246 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6247
6248bool
6249gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6250 int depth)
6251{
6252 switch (gimple_code (stmt))
6253 {
6254 case GIMPLE_ASSIGN:
6255 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6256 depth);
6257 case GIMPLE_CALL:
6258 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6259 depth);
4534c203
RB
6260 case GIMPLE_PHI:
6261 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6262 depth);
68e57f04
RS
6263 default:
6264 return false;
6265 }
6266}
67dbe582
RS
6267
6268/* Return true if the floating-point value computed by assignment STMT
6269 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 6270 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
6271
6272 DEPTH is the current nesting depth of the query. */
6273
6274static bool
6275gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6276{
6277 enum tree_code code = gimple_assign_rhs_code (stmt);
6278 switch (get_gimple_rhs_class (code))
6279 {
6280 case GIMPLE_UNARY_RHS:
6281 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6282 gimple_assign_rhs1 (stmt), depth);
6283 case GIMPLE_BINARY_RHS:
6284 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6285 gimple_assign_rhs1 (stmt),
6286 gimple_assign_rhs2 (stmt), depth);
6287 case GIMPLE_TERNARY_RHS:
6288 return false;
6289 case GIMPLE_SINGLE_RHS:
6290 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6291 case GIMPLE_INVALID_RHS:
6292 break;
6293 }
6294 gcc_unreachable ();
6295}
6296
6297/* Return true if the floating-point value computed by call STMT is known
6298 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6299 considered integer values. Return false for signaling NaN.
67dbe582
RS
6300
6301 DEPTH is the current nesting depth of the query. */
6302
6303static bool
6304gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6305{
6306 tree arg0 = (gimple_call_num_args (stmt) > 0
6307 ? gimple_call_arg (stmt, 0)
6308 : NULL_TREE);
6309 tree arg1 = (gimple_call_num_args (stmt) > 1
6310 ? gimple_call_arg (stmt, 1)
6311 : NULL_TREE);
1d9da71f 6312 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
6313 arg0, arg1, depth);
6314}
6315
6316/* Return true if the floating-point result of phi STMT is known to have
6317 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 6318 integer values. Return false for signaling NaN.
67dbe582
RS
6319
6320 DEPTH is the current nesting depth of the query. */
6321
6322static bool
6323gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6324{
6325 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6326 {
6327 tree arg = gimple_phi_arg_def (stmt, i);
6328 if (!integer_valued_real_single_p (arg, depth + 1))
6329 return false;
6330 }
6331 return true;
6332}
6333
6334/* Return true if the floating-point value computed by STMT is known
6335 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6336 considered integer values. Return false for signaling NaN.
67dbe582
RS
6337
6338 DEPTH is the current nesting depth of the query. */
6339
6340bool
6341gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6342{
6343 switch (gimple_code (stmt))
6344 {
6345 case GIMPLE_ASSIGN:
6346 return gimple_assign_integer_valued_real_p (stmt, depth);
6347 case GIMPLE_CALL:
6348 return gimple_call_integer_valued_real_p (stmt, depth);
6349 case GIMPLE_PHI:
6350 return gimple_phi_integer_valued_real_p (stmt, depth);
6351 default:
6352 return false;
6353 }
6354}