]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Daily bump.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
818ab71a 2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
cbdd87d4 44#include "tree-ssa-propagate.h"
450ad0cd 45#include "ipa-utils.h"
4484a35a 46#include "tree-ssa-address.h"
862d0b35 47#include "langhooks.h"
19e51b40 48#include "gimplify-me.h"
2b5f0895 49#include "dbgcnt.h"
9b2b7279 50#include "builtins.h"
e0ee10ed
RB
51#include "tree-eh.h"
52#include "gimple-match.h"
48126138 53#include "gomp-constants.h"
f869c12f 54#include "optabs-query.h"
451e8dae 55#include "omp-low.h"
3de2a40e 56#include "ipa-chkp.h"
abd3a68c 57#include "tree-cfg.h"
f869c12f 58
cbdd87d4 59
b3b9f3d0 60/* Return true when DECL can be referenced from current unit.
c44c2088
JH
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
1389294c 64
1389294c
JH
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
3e89949e 77 we devirtualize only during final compilation stage.
b3b9f3d0
JH
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
81
1389294c 82static bool
c44c2088 83can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 84{
2c8326a5 85 varpool_node *vnode;
1389294c 86 struct cgraph_node *node;
5e20cdc9 87 symtab_node *snode;
c44c2088 88
00de328a 89 if (DECL_ABSTRACT_P (decl))
1632a686
JH
90 return false;
91
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
95 return true;
96
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 {
3aaf0529
JH
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
3dafb85c 102 if (symtab->function_flags_ready)
3aaf0529 103 return true;
d52f5295 104 snode = symtab_node::get (decl);
3aaf0529 105 if (!snode || !snode->definition)
1632a686 106 return false;
7de90a6c 107 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
108 return !node || !node->global.inlined_to;
109 }
110
6da8be89 111 /* We will later output the initializer, so we can refer to it.
c44c2088 112 So we are concerned only when DECL comes from initializer of
3aaf0529 113 external var or var that has been optimized out. */
c44c2088
JH
114 if (!from_decl
115 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 116 || (!DECL_EXTERNAL (from_decl)
9041d2e6 117 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 118 && vnode->definition)
6da8be89 119 || (flag_ltrans
9041d2e6 120 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 121 && vnode->in_other_partition))
c44c2088 122 return true;
c44c2088
JH
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
a33a931b 128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 130 return false;
b3b9f3d0
JH
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
3aaf0529
JH
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
139
140 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 141 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
3dafb85c 147 if (!symtab->function_flags_ready)
b3b9f3d0 148 return true;
c44c2088 149
d52f5295 150 snode = symtab_node::get (decl);
3aaf0529
JH
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
1389294c
JH
158}
159
0038d4e0 160/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
163
164tree
c44c2088 165canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 166{
50619002
EB
167 tree orig_cval = cval;
168 STRIP_NOPS (cval);
315f5f1b
RG
169 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 171 {
315f5f1b
RG
172 tree ptr = TREE_OPERAND (cval, 0);
173 if (is_gimple_min_invariant (ptr))
174 cval = build1_loc (EXPR_LOCATION (cval),
175 ADDR_EXPR, TREE_TYPE (ptr),
176 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
177 ptr,
178 fold_convert (ptr_type_node,
179 TREE_OPERAND (cval, 1))));
17f39a39
JH
180 }
181 if (TREE_CODE (cval) == ADDR_EXPR)
182 {
5a27a197
RG
183 tree base = NULL_TREE;
184 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
185 {
186 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
187 if (base)
188 TREE_OPERAND (cval, 0) = base;
189 }
5a27a197
RG
190 else
191 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
192 if (!base)
193 return NULL_TREE;
b3b9f3d0 194
7501ca28
RG
195 if ((TREE_CODE (base) == VAR_DECL
196 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 197 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 198 return NULL_TREE;
13f92e8d
JJ
199 if (TREE_TYPE (base) == error_mark_node)
200 return NULL_TREE;
7501ca28 201 if (TREE_CODE (base) == VAR_DECL)
46eb666a 202 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
203 else if (TREE_CODE (base) == FUNCTION_DECL)
204 {
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
d52f5295 208 cgraph_node::get_create (base);
7501ca28 209 }
0038d4e0 210 /* Fixup types in global initializers. */
73aef89e
RG
211 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
212 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
213
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
215 cval = fold_convert (TREE_TYPE (orig_cval), cval);
216 return cval;
17f39a39 217 }
846abd0d
RB
218 if (TREE_OVERFLOW_P (cval))
219 return drop_tree_overflow (cval);
50619002 220 return orig_cval;
17f39a39 221}
cbdd87d4
RG
222
223/* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
225
226tree
227get_symbol_constant_value (tree sym)
228{
6a6dac52
JH
229 tree val = ctor_for_folding (sym);
230 if (val != error_mark_node)
cbdd87d4 231 {
cbdd87d4
RG
232 if (val)
233 {
9d60be38 234 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 235 if (val && is_gimple_min_invariant (val))
17f39a39 236 return val;
1389294c
JH
237 else
238 return NULL_TREE;
cbdd87d4
RG
239 }
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
243 if (!val
b8a8c472 244 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 245 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
246 }
247
248 return NULL_TREE;
249}
250
251
cbdd87d4
RG
252
253/* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
257
258static tree
259maybe_fold_reference (tree expr, bool is_lhs)
260{
17f39a39 261 tree result;
cbdd87d4 262
f0eddb90
RG
263 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr) == REALPART_EXPR
265 || TREE_CODE (expr) == IMAGPART_EXPR)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr),
268 TREE_CODE (expr),
269 TREE_TYPE (expr),
270 TREE_OPERAND (expr, 0));
271 else if (TREE_CODE (expr) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0),
277 TREE_OPERAND (expr, 1),
278 TREE_OPERAND (expr, 2));
279
f0eddb90
RG
280 if (!is_lhs
281 && (result = fold_const_aggregate_ref (expr))
282 && is_gimple_min_invariant (result))
283 return result;
cbdd87d4 284
cbdd87d4
RG
285 return NULL_TREE;
286}
287
288
289/* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
292 folded. */
293
294static tree
295fold_gimple_assign (gimple_stmt_iterator *si)
296{
355fe088 297 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
300
301 tree result = NULL_TREE;
302
303 switch (get_gimple_rhs_class (subcode))
304 {
305 case GIMPLE_SINGLE_RHS:
306 {
307 tree rhs = gimple_assign_rhs1 (stmt);
308
8c00ba08
JW
309 if (TREE_CLOBBER_P (rhs))
310 return NULL_TREE;
311
4e71066d 312 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
313 return maybe_fold_reference (rhs, false);
314
bdf37f7a
JH
315 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
316 {
317 tree val = OBJ_TYPE_REF_EXPR (rhs);
318 if (is_gimple_min_invariant (val))
319 return val;
f8a39967 320 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
321 {
322 bool final;
323 vec <cgraph_node *>targets
f8a39967 324 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 325 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 326 {
2b5f0895
XDL
327 if (dump_enabled_p ())
328 {
807b7d62 329 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets.length () == 1
334 ? targets[0]->name ()
3ef276e4 335 : "NULL");
2b5f0895 336 }
3ef276e4
RB
337 if (targets.length () == 1)
338 {
339 val = fold_convert (TREE_TYPE (val),
340 build_fold_addr_expr_loc
341 (loc, targets[0]->decl));
342 STRIP_USELESS_TYPE_CONVERSION (val);
343 }
344 else
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
348 return val;
349 }
350 }
bdf37f7a 351 }
7524f419 352
cbdd87d4
RG
353 else if (TREE_CODE (rhs) == ADDR_EXPR)
354 {
70f34814
RG
355 tree ref = TREE_OPERAND (rhs, 0);
356 tree tem = maybe_fold_reference (ref, true);
357 if (tem
358 && TREE_CODE (tem) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem, 1)))
360 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
361 else if (tem)
cbdd87d4
RG
362 result = fold_convert (TREE_TYPE (rhs),
363 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
364 else if (TREE_CODE (ref) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref, 1)))
366 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
367
368 if (result)
369 {
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result);
375
376 if (result != rhs && valid_gimple_rhs_p (result))
377 return result;
378 }
cbdd87d4
RG
379 }
380
381 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 382 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
383 {
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
385 unsigned i;
386 tree val;
387
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 389 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
390 return NULL_TREE;
391
392 return build_vector_from_ctor (TREE_TYPE (rhs),
393 CONSTRUCTOR_ELTS (rhs));
394 }
395
396 else if (DECL_P (rhs))
9d60be38 397 return get_symbol_constant_value (rhs);
cbdd87d4
RG
398 }
399 break;
400
401 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
402 break;
403
404 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
405 break;
406
0354c0c7 407 case GIMPLE_TERNARY_RHS:
5c099d40
RB
408 result = fold_ternary_loc (loc, subcode,
409 TREE_TYPE (gimple_assign_lhs (stmt)),
410 gimple_assign_rhs1 (stmt),
411 gimple_assign_rhs2 (stmt),
412 gimple_assign_rhs3 (stmt));
0354c0c7
BS
413
414 if (result)
415 {
416 STRIP_USELESS_TYPE_CONVERSION (result);
417 if (valid_gimple_rhs_p (result))
418 return result;
0354c0c7
BS
419 }
420 break;
421
cbdd87d4
RG
422 case GIMPLE_INVALID_RHS:
423 gcc_unreachable ();
424 }
425
426 return NULL_TREE;
427}
428
fef5a0d9
RB
429
430/* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
434
435static void
436gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
437{
355fe088 438 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
439
440 if (gimple_has_location (stmt))
441 annotate_all_with_location (stmts, gimple_location (stmt));
442
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
355fe088 445 gimple *laststore = NULL;
fef5a0d9
RB
446 for (gimple_stmt_iterator i = gsi_last (stmts);
447 !gsi_end_p (i); gsi_prev (&i))
448 {
355fe088 449 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
450 if ((gimple_assign_single_p (new_stmt)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
452 || (is_gimple_call (new_stmt)
453 && (gimple_call_flags (new_stmt)
454 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
455 {
456 tree vdef;
457 if (!laststore)
458 vdef = gimple_vdef (stmt);
459 else
460 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
461 gimple_set_vdef (new_stmt, vdef);
462 if (vdef && TREE_CODE (vdef) == SSA_NAME)
463 SSA_NAME_DEF_STMT (vdef) = new_stmt;
464 laststore = new_stmt;
465 }
466 }
467
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse = gimple_vuse (stmt);
471 for (gimple_stmt_iterator i = gsi_start (stmts);
472 !gsi_end_p (i); gsi_next (&i))
473 {
355fe088 474 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt))
478 gimple_set_vuse (new_stmt, reaching_vuse);
479 gimple_set_modified (new_stmt, true);
480 if (gimple_vdef (new_stmt))
481 reaching_vuse = gimple_vdef (new_stmt);
482 }
483
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
486 if (reaching_vuse
487 && reaching_vuse == gimple_vuse (stmt))
488 {
489 tree vdef = gimple_vdef (stmt);
490 if (vdef
491 && TREE_CODE (vdef) == SSA_NAME)
492 {
493 unlink_stmt_vdef (stmt);
494 release_ssa_name (vdef);
495 }
496 }
497
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p, stmts, false);
500}
501
cbdd87d4
RG
502/* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
cbdd87d4
RG
511
512void
513gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
514{
515 tree lhs;
355fe088 516 gimple *stmt, *new_stmt;
cbdd87d4 517 gimple_stmt_iterator i;
355a7673 518 gimple_seq stmts = NULL;
cbdd87d4
RG
519
520 stmt = gsi_stmt (*si_p);
521
522 gcc_assert (is_gimple_call (stmt));
523
45852dcc 524 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 525
e256dfce 526 lhs = gimple_call_lhs (stmt);
cbdd87d4 527 if (lhs == NULL_TREE)
6e572326
RG
528 {
529 gimplify_and_add (expr, &stmts);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts))
533 {
9fdc58de 534 pop_gimplify_context (NULL);
6e572326
RG
535 if (gimple_in_ssa_p (cfun))
536 {
537 unlink_stmt_vdef (stmt);
538 release_defs (stmt);
539 }
f6b4dc28 540 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
541 return;
542 }
543 }
cbdd87d4 544 else
e256dfce 545 {
381cdae4 546 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
547 new_stmt = gimple_build_assign (lhs, tmp);
548 i = gsi_last (stmts);
549 gsi_insert_after_without_update (&i, new_stmt,
550 GSI_CONTINUE_LINKING);
551 }
cbdd87d4
RG
552
553 pop_gimplify_context (NULL);
554
fef5a0d9
RB
555 gsi_replace_with_seq_vops (si_p, stmts);
556}
cbdd87d4 557
fef5a0d9
RB
558
559/* Replace the call at *GSI with the gimple value VAL. */
560
561static void
562replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
563{
355fe088 564 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 565 tree lhs = gimple_call_lhs (stmt);
355fe088 566 gimple *repl;
fef5a0d9 567 if (lhs)
e256dfce 568 {
fef5a0d9
RB
569 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
570 val = fold_convert (TREE_TYPE (lhs), val);
571 repl = gimple_build_assign (lhs, val);
572 }
573 else
574 repl = gimple_build_nop ();
575 tree vdef = gimple_vdef (stmt);
576 if (vdef && TREE_CODE (vdef) == SSA_NAME)
577 {
578 unlink_stmt_vdef (stmt);
579 release_ssa_name (vdef);
580 }
f6b4dc28 581 gsi_replace (gsi, repl, false);
fef5a0d9
RB
582}
583
584/* Replace the call at *GSI with the new call REPL and fold that
585 again. */
586
587static void
355fe088 588replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 589{
355fe088 590 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
591 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
592 gimple_set_location (repl, gimple_location (stmt));
593 if (gimple_vdef (stmt)
594 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
595 {
596 gimple_set_vdef (repl, gimple_vdef (stmt));
597 gimple_set_vuse (repl, gimple_vuse (stmt));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
599 }
f6b4dc28 600 gsi_replace (gsi, repl, false);
fef5a0d9
RB
601 fold_stmt (gsi);
602}
603
604/* Return true if VAR is a VAR_DECL or a component thereof. */
605
606static bool
607var_decl_component_p (tree var)
608{
609 tree inner = var;
610 while (handled_component_p (inner))
611 inner = TREE_OPERAND (inner, 0);
612 return SSA_VAR_P (inner);
613}
614
615/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 616 false if no simplification can be made.
fef5a0d9
RB
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
621 (memmove). */
622
623static bool
624gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
625 tree dest, tree src, int endp)
626{
355fe088 627 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
628 tree lhs = gimple_call_lhs (stmt);
629 tree len = gimple_call_arg (stmt, 2);
630 tree destvar, srcvar;
631 location_t loc = gimple_location (stmt);
632
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len))
635 {
355fe088 636 gimple *repl;
fef5a0d9
RB
637 if (gimple_call_lhs (stmt))
638 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
639 else
640 repl = gimple_build_nop ();
641 tree vdef = gimple_vdef (stmt);
642 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 643 {
fef5a0d9
RB
644 unlink_stmt_vdef (stmt);
645 release_ssa_name (vdef);
646 }
f6b4dc28 647 gsi_replace (gsi, repl, false);
fef5a0d9
RB
648 return true;
649 }
650
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src, dest, 0))
654 {
655 unlink_stmt_vdef (stmt);
656 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
657 release_ssa_name (gimple_vdef (stmt));
658 if (!lhs)
659 {
f6b4dc28 660 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
661 return true;
662 }
663 goto done;
664 }
665 else
666 {
667 tree srctype, desttype;
668 unsigned int src_align, dest_align;
669 tree off0;
670
3de2a40e
IE
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun->decl)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len)
680 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
681 return false;
682
fef5a0d9
RB
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
686
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
700 {
701 unsigned ilen = tree_to_uhwi (len);
146ec50f 702 if (pow2p_hwi (ilen))
fef5a0d9
RB
703 {
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
f869c12f
RR
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
713 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
714 != CODE_FOR_nothing)))
fef5a0d9
RB
715 {
716 tree srctype = type;
717 tree desttype = type;
718 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
719 srctype = build_aligned_type (type, src_align);
720 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
721 tree tem = fold_const_aggregate_ref (srcmem);
722 if (tem)
723 srcmem = tem;
724 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
f869c12f
RR
726 src_align)
727 && (optab_handler (movmisalign_optab,
728 TYPE_MODE (type))
729 == CODE_FOR_nothing))
fef5a0d9
RB
730 srcmem = NULL_TREE;
731 if (srcmem)
732 {
355fe088 733 gimple *new_stmt;
fef5a0d9
RB
734 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
735 {
736 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
737 if (gimple_in_ssa_p (cfun))
738 srcmem = make_ssa_name (TREE_TYPE (srcmem),
739 new_stmt);
740 else
b731b390 741 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
742 gimple_assign_set_lhs (new_stmt, srcmem);
743 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
744 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
745 }
746 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
747 desttype = build_aligned_type (type, dest_align);
748 new_stmt
749 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
750 dest, off0),
751 srcmem);
752 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
753 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
754 if (gimple_vdef (new_stmt)
755 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
757 if (!lhs)
758 {
f6b4dc28 759 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
760 return true;
761 }
762 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
763 goto done;
764 }
765 }
766 }
767 }
768
769 if (endp == 3)
770 {
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
773 really mandatory?
774
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align || !src_align)
777 return false;
778 if (readonly_data_expr (src)
779 || (tree_fits_uhwi_p (len)
780 && (MIN (src_align, dest_align) / BITS_PER_UNIT
781 >= tree_to_uhwi (len))))
782 {
783 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
784 if (!fn)
785 return false;
786 gimple_call_set_fndecl (stmt, fn);
787 gimple_call_set_arg (stmt, 0, dest);
788 gimple_call_set_arg (stmt, 1, src);
789 fold_stmt (gsi);
790 return true;
791 }
792
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src) == ADDR_EXPR
795 && TREE_CODE (dest) == ADDR_EXPR)
796 {
797 tree src_base, dest_base, fn;
798 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
4fda19ef 799 HOST_WIDE_INT maxsize;
fef5a0d9
RB
800
801 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
802 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
803 if (src_base == NULL)
804 src_base = srcvar;
fef5a0d9 805 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
806 dest_base = get_addr_base_and_unit_offset (destvar,
807 &dest_offset);
808 if (dest_base == NULL)
809 dest_base = destvar;
fef5a0d9
RB
810 if (tree_fits_uhwi_p (len))
811 maxsize = tree_to_uhwi (len);
812 else
813 maxsize = -1;
fef5a0d9
RB
814 if (SSA_VAR_P (src_base)
815 && SSA_VAR_P (dest_base))
816 {
817 if (operand_equal_p (src_base, dest_base, 0)
818 && ranges_overlap_p (src_offset, maxsize,
819 dest_offset, maxsize))
820 return false;
821 }
822 else if (TREE_CODE (src_base) == MEM_REF
823 && TREE_CODE (dest_base) == MEM_REF)
824 {
825 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
826 TREE_OPERAND (dest_base, 0), 0))
827 return false;
828 offset_int off = mem_ref_offset (src_base) + src_offset;
829 if (!wi::fits_shwi_p (off))
830 return false;
831 src_offset = off.to_shwi ();
832
833 off = mem_ref_offset (dest_base) + dest_offset;
834 if (!wi::fits_shwi_p (off))
835 return false;
836 dest_offset = off.to_shwi ();
837 if (ranges_overlap_p (src_offset, maxsize,
838 dest_offset, maxsize))
839 return false;
840 }
841 else
842 return false;
843
844 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 if (!fn)
846 return false;
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
850 fold_stmt (gsi);
851 return true;
852 }
853
854 /* If the destination and source do not alias optimize into
855 memcpy as well. */
856 if ((is_gimple_min_invariant (dest)
857 || TREE_CODE (dest) == SSA_NAME)
858 && (is_gimple_min_invariant (src)
859 || TREE_CODE (src) == SSA_NAME))
860 {
861 ao_ref destr, srcr;
862 ao_ref_init_from_ptr_and_size (&destr, dest, len);
863 ao_ref_init_from_ptr_and_size (&srcr, src, len);
864 if (!refs_may_alias_p_1 (&destr, &srcr, false))
865 {
866 tree fn;
867 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
868 if (!fn)
869 return false;
870 gimple_call_set_fndecl (stmt, fn);
871 gimple_call_set_arg (stmt, 0, dest);
872 gimple_call_set_arg (stmt, 1, src);
873 fold_stmt (gsi);
874 return true;
875 }
876 }
877
878 return false;
879 }
880
881 if (!tree_fits_shwi_p (len))
882 return false;
883 /* FIXME:
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
887 STRIP_NOPS (src);
888 STRIP_NOPS (dest);
889 if (!POINTER_TYPE_P (TREE_TYPE (src))
890 || !POINTER_TYPE_P (TREE_TYPE (dest)))
891 return false;
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
900 {
901 tree tem = TREE_OPERAND (src, 0);
902 STRIP_NOPS (tem);
903 if (tem != TREE_OPERAND (src, 0))
904 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
905 }
906 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
907 {
908 tree tem = TREE_OPERAND (dest, 0);
909 STRIP_NOPS (tem);
910 if (tem != TREE_OPERAND (dest, 0))
911 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
912 }
913 srctype = TREE_TYPE (TREE_TYPE (src));
914 if (TREE_CODE (srctype) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
916 {
917 srctype = TREE_TYPE (srctype);
918 STRIP_NOPS (src);
919 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
920 }
921 desttype = TREE_TYPE (TREE_TYPE (dest));
922 if (TREE_CODE (desttype) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
924 {
925 desttype = TREE_TYPE (desttype);
926 STRIP_NOPS (dest);
927 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
928 }
929 if (TREE_ADDRESSABLE (srctype)
930 || TREE_ADDRESSABLE (desttype))
931 return false;
932
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype))
936 || TREE_CODE (desttype) == BOOLEAN_TYPE
937 || TREE_CODE (desttype) == ENUMERAL_TYPE)
938 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype))
940 || TREE_CODE (srctype) == BOOLEAN_TYPE
941 || TREE_CODE (srctype) == ENUMERAL_TYPE)
942 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
943 if (!srctype)
944 srctype = desttype;
945 if (!desttype)
946 desttype = srctype;
947 if (!srctype)
948 return false;
949
950 src_align = get_pointer_alignment (src);
951 dest_align = get_pointer_alignment (dest);
952 if (dest_align < TYPE_ALIGN (desttype)
953 || src_align < TYPE_ALIGN (srctype))
954 return false;
955
956 destvar = dest;
957 STRIP_NOPS (destvar);
958 if (TREE_CODE (destvar) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
961 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
962 else
963 destvar = NULL_TREE;
964
965 srcvar = src;
966 STRIP_NOPS (srcvar);
967 if (TREE_CODE (srcvar) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
970 {
971 if (!destvar
972 || src_align >= TYPE_ALIGN (desttype))
973 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
974 srcvar, off0);
975 else if (!STRICT_ALIGNMENT)
976 {
977 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
978 src_align);
979 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
980 }
e256dfce 981 else
fef5a0d9
RB
982 srcvar = NULL_TREE;
983 }
984 else
985 srcvar = NULL_TREE;
986
987 if (srcvar == NULL_TREE && destvar == NULL_TREE)
988 return false;
989
990 if (srcvar == NULL_TREE)
991 {
992 STRIP_NOPS (src);
993 if (src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
995 else
996 {
997 if (STRICT_ALIGNMENT)
998 return false;
999 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1000 src_align);
1001 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1002 }
1003 }
1004 else if (destvar == NULL_TREE)
1005 {
1006 STRIP_NOPS (dest);
1007 if (dest_align >= TYPE_ALIGN (srctype))
1008 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1009 else
1010 {
1011 if (STRICT_ALIGNMENT)
1012 return false;
1013 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1014 dest_align);
1015 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1016 }
1017 }
1018
355fe088 1019 gimple *new_stmt;
fef5a0d9
RB
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1021 {
921b13d0
RB
1022 tree tem = fold_const_aggregate_ref (srcvar);
1023 if (tem)
1024 srcvar = tem;
1025 if (! is_gimple_min_invariant (srcvar))
1026 {
1027 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1028 if (gimple_in_ssa_p (cfun))
1029 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1030 else
1031 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1032 gimple_assign_set_lhs (new_stmt, srcvar);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1035 }
fef5a0d9
RB
1036 }
1037 new_stmt = gimple_build_assign (destvar, srcvar);
1038 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1039 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1040 if (gimple_vdef (new_stmt)
1041 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1042 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1043 if (!lhs)
1044 {
f6b4dc28 1045 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1046 return true;
1047 }
1048 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1049 }
1050
1051done:
74e3c262 1052 gimple_seq stmts = NULL;
fef5a0d9
RB
1053 if (endp == 0 || endp == 3)
1054 len = NULL_TREE;
1055 else if (endp == 2)
74e3c262
RB
1056 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1057 ssize_int (1));
fef5a0d9 1058 if (endp == 2 || endp == 1)
74e3c262
RB
1059 {
1060 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1061 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1062 TREE_TYPE (dest), dest, len);
1063 }
fef5a0d9 1064
74e3c262 1065 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1066 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1067 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1068 return true;
1069}
1070
1071/* Fold function call to builtin memset or bzero at *GSI setting the
1072 memory of size LEN to VAL. Return whether a simplification was made. */
1073
1074static bool
1075gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1076{
355fe088 1077 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1078 tree etype;
1079 unsigned HOST_WIDE_INT length, cval;
1080
1081 /* If the LEN parameter is zero, return DEST. */
1082 if (integer_zerop (len))
1083 {
1084 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1085 return true;
1086 }
1087
1088 if (! tree_fits_uhwi_p (len))
1089 return false;
1090
1091 if (TREE_CODE (c) != INTEGER_CST)
1092 return false;
1093
1094 tree dest = gimple_call_arg (stmt, 0);
1095 tree var = dest;
1096 if (TREE_CODE (var) != ADDR_EXPR)
1097 return false;
1098
1099 var = TREE_OPERAND (var, 0);
1100 if (TREE_THIS_VOLATILE (var))
1101 return false;
1102
1103 etype = TREE_TYPE (var);
1104 if (TREE_CODE (etype) == ARRAY_TYPE)
1105 etype = TREE_TYPE (etype);
1106
1107 if (!INTEGRAL_TYPE_P (etype)
1108 && !POINTER_TYPE_P (etype))
1109 return NULL_TREE;
1110
1111 if (! var_decl_component_p (var))
1112 return NULL_TREE;
1113
1114 length = tree_to_uhwi (len);
1115 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1116 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1117 return NULL_TREE;
1118
1119 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1120 return NULL_TREE;
1121
1122 if (integer_zerop (c))
1123 cval = 0;
1124 else
1125 {
1126 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1127 return NULL_TREE;
1128
1129 cval = TREE_INT_CST_LOW (c);
1130 cval &= 0xff;
1131 cval |= cval << 8;
1132 cval |= cval << 16;
1133 cval |= (cval << 31) << 1;
1134 }
1135
1136 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1137 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1138 gimple_set_vuse (store, gimple_vuse (stmt));
1139 tree vdef = gimple_vdef (stmt);
1140 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1141 {
1142 gimple_set_vdef (store, gimple_vdef (stmt));
1143 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1144 }
1145 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1146 if (gimple_call_lhs (stmt))
1147 {
355fe088 1148 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1149 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1150 }
1151 else
1152 {
1153 gimple_stmt_iterator gsi2 = *gsi;
1154 gsi_prev (gsi);
1155 gsi_remove (&gsi2, true);
1156 }
1157
1158 return true;
1159}
1160
1161
1162/* Return the string length, maximum string length or maximum value of
1163 ARG in LENGTH.
1164 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1165 is not NULL and, for TYPE == 0, its value is not equal to the length
1166 we determine or if we are unable to determine the length or value,
1167 return false. VISITED is a bitmap of visited variables.
1168 TYPE is 0 if string length should be returned, 1 for maximum string
1169 length and 2 for maximum value ARG can have. */
1170
1171static bool
dcb7fae2 1172get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1173{
1174 tree var, val;
355fe088 1175 gimple *def_stmt;
fef5a0d9
RB
1176
1177 if (TREE_CODE (arg) != SSA_NAME)
1178 {
1179 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1180 if (TREE_CODE (arg) == ADDR_EXPR
1181 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1182 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1183 {
1184 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1185 if (TREE_CODE (aop0) == INDIRECT_REF
1186 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1187 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1188 length, visited, type);
1189 }
1190
1191 if (type == 2)
1192 {
1193 val = arg;
1194 if (TREE_CODE (val) != INTEGER_CST
1195 || tree_int_cst_sgn (val) < 0)
1196 return false;
1197 }
1198 else
1199 val = c_strlen (arg, 1);
1200 if (!val)
1201 return false;
1202
1203 if (*length)
1204 {
1205 if (type > 0)
1206 {
1207 if (TREE_CODE (*length) != INTEGER_CST
1208 || TREE_CODE (val) != INTEGER_CST)
1209 return false;
1210
1211 if (tree_int_cst_lt (*length, val))
1212 *length = val;
1213 return true;
1214 }
1215 else if (simple_cst_equal (val, *length) != 1)
1216 return false;
1217 }
1218
1219 *length = val;
1220 return true;
1221 }
1222
1223 /* If ARG is registered for SSA update we cannot look at its defining
1224 statement. */
1225 if (name_registered_for_update_p (arg))
1226 return false;
1227
1228 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1229 if (!*visited)
1230 *visited = BITMAP_ALLOC (NULL);
1231 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1232 return true;
1233
1234 var = arg;
1235 def_stmt = SSA_NAME_DEF_STMT (var);
1236
1237 switch (gimple_code (def_stmt))
1238 {
1239 case GIMPLE_ASSIGN:
1240 /* The RHS of the statement defining VAR must either have a
1241 constant length or come from another SSA_NAME with a constant
1242 length. */
1243 if (gimple_assign_single_p (def_stmt)
1244 || gimple_assign_unary_nop_p (def_stmt))
1245 {
1246 tree rhs = gimple_assign_rhs1 (def_stmt);
1247 return get_maxval_strlen (rhs, length, visited, type);
1248 }
1249 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1250 {
1251 tree op2 = gimple_assign_rhs2 (def_stmt);
1252 tree op3 = gimple_assign_rhs3 (def_stmt);
1253 return get_maxval_strlen (op2, length, visited, type)
1254 && get_maxval_strlen (op3, length, visited, type);
1255 }
1256 return false;
1257
1258 case GIMPLE_PHI:
1259 {
1260 /* All the arguments of the PHI node must have the same constant
1261 length. */
1262 unsigned i;
1263
1264 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1265 {
1266 tree arg = gimple_phi_arg (def_stmt, i)->def;
1267
1268 /* If this PHI has itself as an argument, we cannot
1269 determine the string length of this argument. However,
1270 if we can find a constant string length for the other
1271 PHI args then we can still be sure that this is a
1272 constant string length. So be optimistic and just
1273 continue with the next argument. */
1274 if (arg == gimple_phi_result (def_stmt))
1275 continue;
1276
1277 if (!get_maxval_strlen (arg, length, visited, type))
1278 return false;
1279 }
1280 }
1281 return true;
1282
1283 default:
1284 return false;
1285 }
1286}
1287
dcb7fae2
RB
1288tree
1289get_maxval_strlen (tree arg, int type)
1290{
1291 bitmap visited = NULL;
1292 tree len = NULL_TREE;
1293 if (!get_maxval_strlen (arg, &len, &visited, type))
1294 len = NULL_TREE;
1295 if (visited)
1296 BITMAP_FREE (visited);
1297
1298 return len;
1299}
1300
fef5a0d9
RB
1301
1302/* Fold function call to builtin strcpy with arguments DEST and SRC.
1303 If LEN is not NULL, it represents the length of the string to be
1304 copied. Return NULL_TREE if no simplification can be made. */
1305
1306static bool
1307gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1308 tree dest, tree src)
fef5a0d9 1309{
dcb7fae2 1310 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1311 tree fn;
1312
1313 /* If SRC and DEST are the same (and not volatile), return DEST. */
1314 if (operand_equal_p (src, dest, 0))
1315 {
1316 replace_call_with_value (gsi, dest);
1317 return true;
1318 }
1319
1320 if (optimize_function_for_size_p (cfun))
1321 return false;
1322
1323 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1324 if (!fn)
1325 return false;
1326
1579e1f8 1327 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1328 if (!len)
dcb7fae2 1329 return false;
fef5a0d9
RB
1330
1331 len = fold_convert_loc (loc, size_type_node, len);
1332 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1333 len = force_gimple_operand_gsi (gsi, len, true,
1334 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1335 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1336 replace_call_with_call_and_fold (gsi, repl);
1337 return true;
1338}
1339
1340/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1341 If SLEN is not NULL, it represents the length of the source string.
1342 Return NULL_TREE if no simplification can be made. */
1343
1344static bool
dcb7fae2
RB
1345gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1346 tree dest, tree src, tree len)
fef5a0d9 1347{
dcb7fae2 1348 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1349 tree fn;
1350
1351 /* If the LEN parameter is zero, return DEST. */
1352 if (integer_zerop (len))
1353 {
1354 replace_call_with_value (gsi, dest);
1355 return true;
1356 }
1357
1358 /* We can't compare slen with len as constants below if len is not a
1359 constant. */
dcb7fae2 1360 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1361 return false;
1362
fef5a0d9 1363 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1364 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1365 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1366 return false;
1367
1368 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1369
1370 /* We do not support simplification of this case, though we do
1371 support it when expanding trees into RTL. */
1372 /* FIXME: generate a call to __builtin_memset. */
1373 if (tree_int_cst_lt (slen, len))
1374 return false;
1375
1376 /* OK transform into builtin memcpy. */
1377 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1378 if (!fn)
1379 return false;
1380
1381 len = fold_convert_loc (loc, size_type_node, len);
1382 len = force_gimple_operand_gsi (gsi, len, true,
1383 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1384 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1385 replace_call_with_call_and_fold (gsi, repl);
1386 return true;
1387}
1388
1389/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1390 to the call.
1391
1392 Return NULL_TREE if no simplification was possible, otherwise return the
1393 simplified form of the call as a tree.
1394
1395 The simplified form may be a constant or other expression which
1396 computes the same value, but in a more efficient manner (including
1397 calls to other builtin functions).
1398
1399 The call may contain arguments which need to be evaluated, but
1400 which are not useful to determine the result of the call. In
1401 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1402 COMPOUND_EXPR will be an argument which must be evaluated.
1403 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1404 COMPOUND_EXPR in the chain will contain the tree for the simplified
1405 form of the builtin function call. */
1406
1407static bool
dcb7fae2 1408gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1409{
355fe088 1410 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1411 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1412
1413 const char *p = c_getstr (src);
1414
1415 /* If the string length is zero, return the dst parameter. */
1416 if (p && *p == '\0')
1417 {
1418 replace_call_with_value (gsi, dst);
1419 return true;
1420 }
1421
1422 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1423 return false;
1424
1425 /* See if we can store by pieces into (dst + strlen(dst)). */
1426 tree newdst;
1427 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1428 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1429
1430 if (!strlen_fn || !memcpy_fn)
1431 return false;
1432
1433 /* If the length of the source string isn't computable don't
1434 split strcat into strlen and memcpy. */
dcb7fae2 1435 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1436 if (! len)
fef5a0d9
RB
1437 return false;
1438
1439 /* Create strlen (dst). */
1440 gimple_seq stmts = NULL, stmts2;
355fe088 1441 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9
RB
1442 gimple_set_location (repl, loc);
1443 if (gimple_in_ssa_p (cfun))
b731b390 1444 newdst = make_ssa_name (size_type_node);
fef5a0d9 1445 else
b731b390 1446 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1447 gimple_call_set_lhs (repl, newdst);
1448 gimple_seq_add_stmt_without_update (&stmts, repl);
1449
1450 /* Create (dst p+ strlen (dst)). */
1451 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1452 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1453 gimple_seq_add_seq_without_update (&stmts, stmts2);
1454
1455 len = fold_convert_loc (loc, size_type_node, len);
1456 len = size_binop_loc (loc, PLUS_EXPR, len,
1457 build_int_cst (size_type_node, 1));
1458 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1459 gimple_seq_add_seq_without_update (&stmts, stmts2);
1460
1461 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1462 gimple_seq_add_stmt_without_update (&stmts, repl);
1463 if (gimple_call_lhs (stmt))
1464 {
1465 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1466 gimple_seq_add_stmt_without_update (&stmts, repl);
1467 gsi_replace_with_seq_vops (gsi, stmts);
1468 /* gsi now points at the assignment to the lhs, get a
1469 stmt iterator to the memcpy call.
1470 ??? We can't use gsi_for_stmt as that doesn't work when the
1471 CFG isn't built yet. */
1472 gimple_stmt_iterator gsi2 = *gsi;
1473 gsi_prev (&gsi2);
1474 fold_stmt (&gsi2);
1475 }
1476 else
1477 {
1478 gsi_replace_with_seq_vops (gsi, stmts);
1479 fold_stmt (gsi);
1480 }
1481 return true;
1482}
1483
07f1cf56
RB
1484/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1485 are the arguments to the call. */
1486
1487static bool
1488gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1489{
355fe088 1490 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1491 tree dest = gimple_call_arg (stmt, 0);
1492 tree src = gimple_call_arg (stmt, 1);
1493 tree size = gimple_call_arg (stmt, 2);
1494 tree fn;
1495 const char *p;
1496
1497
1498 p = c_getstr (src);
1499 /* If the SRC parameter is "", return DEST. */
1500 if (p && *p == '\0')
1501 {
1502 replace_call_with_value (gsi, dest);
1503 return true;
1504 }
1505
1506 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1507 return false;
1508
1509 /* If __builtin_strcat_chk is used, assume strcat is available. */
1510 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1511 if (!fn)
1512 return false;
1513
355fe088 1514 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1515 replace_call_with_call_and_fold (gsi, repl);
1516 return true;
1517}
1518
ad03a744
RB
1519/* Simplify a call to the strncat builtin. */
1520
1521static bool
1522gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1523{
1524 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1525 tree dst = gimple_call_arg (stmt, 0);
1526 tree src = gimple_call_arg (stmt, 1);
1527 tree len = gimple_call_arg (stmt, 2);
1528
1529 const char *p = c_getstr (src);
1530
1531 /* If the requested length is zero, or the src parameter string
1532 length is zero, return the dst parameter. */
1533 if (integer_zerop (len) || (p && *p == '\0'))
1534 {
1535 replace_call_with_value (gsi, dst);
1536 return true;
1537 }
1538
1539 /* If the requested len is greater than or equal to the string
1540 length, call strcat. */
1541 if (TREE_CODE (len) == INTEGER_CST && p
1542 && compare_tree_int (len, strlen (p)) >= 0)
1543 {
1544 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1545
1546 /* If the replacement _DECL isn't initialized, don't do the
1547 transformation. */
1548 if (!fn)
1549 return false;
1550
1551 gcall *repl = gimple_build_call (fn, 2, dst, src);
1552 replace_call_with_call_and_fold (gsi, repl);
1553 return true;
1554 }
1555
1556 return false;
1557}
1558
745583f9
RB
1559/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1560 LEN, and SIZE. */
1561
1562static bool
1563gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1564{
355fe088 1565 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1566 tree dest = gimple_call_arg (stmt, 0);
1567 tree src = gimple_call_arg (stmt, 1);
1568 tree len = gimple_call_arg (stmt, 2);
1569 tree size = gimple_call_arg (stmt, 3);
1570 tree fn;
1571 const char *p;
1572
1573 p = c_getstr (src);
1574 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1575 if ((p && *p == '\0')
1576 || integer_zerop (len))
1577 {
1578 replace_call_with_value (gsi, dest);
1579 return true;
1580 }
1581
1582 if (! tree_fits_uhwi_p (size))
1583 return false;
1584
1585 if (! integer_all_onesp (size))
1586 {
1587 tree src_len = c_strlen (src, 1);
1588 if (src_len
1589 && tree_fits_uhwi_p (src_len)
1590 && tree_fits_uhwi_p (len)
1591 && ! tree_int_cst_lt (len, src_len))
1592 {
1593 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1594 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1595 if (!fn)
1596 return false;
1597
355fe088 1598 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1599 replace_call_with_call_and_fold (gsi, repl);
1600 return true;
1601 }
1602 return false;
1603 }
1604
1605 /* If __builtin_strncat_chk is used, assume strncat is available. */
1606 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1607 if (!fn)
1608 return false;
1609
355fe088 1610 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1611 replace_call_with_call_and_fold (gsi, repl);
1612 return true;
1613}
1614
fef5a0d9
RB
1615/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1616 to the call. IGNORE is true if the value returned
1617 by the builtin will be ignored. UNLOCKED is true is true if this
1618 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1619 the known length of the string. Return NULL_TREE if no simplification
1620 was possible. */
1621
1622static bool
1623gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1624 tree arg0, tree arg1,
dcb7fae2 1625 bool unlocked)
fef5a0d9 1626{
355fe088 1627 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1628
fef5a0d9
RB
1629 /* If we're using an unlocked function, assume the other unlocked
1630 functions exist explicitly. */
1631 tree const fn_fputc = (unlocked
1632 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1633 : builtin_decl_implicit (BUILT_IN_FPUTC));
1634 tree const fn_fwrite = (unlocked
1635 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1636 : builtin_decl_implicit (BUILT_IN_FWRITE));
1637
1638 /* If the return value is used, don't do the transformation. */
dcb7fae2 1639 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1640 return false;
1641
fef5a0d9
RB
1642 /* Get the length of the string passed to fputs. If the length
1643 can't be determined, punt. */
dcb7fae2 1644 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1645 if (!len
1646 || TREE_CODE (len) != INTEGER_CST)
1647 return false;
1648
1649 switch (compare_tree_int (len, 1))
1650 {
1651 case -1: /* length is 0, delete the call entirely . */
1652 replace_call_with_value (gsi, integer_zero_node);
1653 return true;
1654
1655 case 0: /* length is 1, call fputc. */
1656 {
1657 const char *p = c_getstr (arg0);
1658 if (p != NULL)
1659 {
1660 if (!fn_fputc)
1661 return false;
1662
355fe088 1663 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
1664 build_int_cst
1665 (integer_type_node, p[0]), arg1);
1666 replace_call_with_call_and_fold (gsi, repl);
1667 return true;
1668 }
1669 }
1670 /* FALLTHROUGH */
1671 case 1: /* length is greater than 1, call fwrite. */
1672 {
1673 /* If optimizing for size keep fputs. */
1674 if (optimize_function_for_size_p (cfun))
1675 return false;
1676 /* New argument list transforming fputs(string, stream) to
1677 fwrite(string, 1, len, stream). */
1678 if (!fn_fwrite)
1679 return false;
1680
355fe088 1681 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
1682 size_one_node, len, arg1);
1683 replace_call_with_call_and_fold (gsi, repl);
1684 return true;
1685 }
1686 default:
1687 gcc_unreachable ();
1688 }
1689 return false;
1690}
1691
1692/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1693 DEST, SRC, LEN, and SIZE are the arguments to the call.
1694 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1695 code of the builtin. If MAXLEN is not NULL, it is maximum length
1696 passed as third argument. */
1697
1698static bool
1699gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1700 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1701 enum built_in_function fcode)
1702{
355fe088 1703 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1704 location_t loc = gimple_location (stmt);
1705 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1706 tree fn;
1707
1708 /* If SRC and DEST are the same (and not volatile), return DEST
1709 (resp. DEST+LEN for __mempcpy_chk). */
1710 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1711 {
1712 if (fcode != BUILT_IN_MEMPCPY_CHK)
1713 {
1714 replace_call_with_value (gsi, dest);
1715 return true;
1716 }
1717 else
1718 {
74e3c262
RB
1719 gimple_seq stmts = NULL;
1720 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
1721 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1722 TREE_TYPE (dest), dest, len);
74e3c262 1723 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
1724 replace_call_with_value (gsi, temp);
1725 return true;
1726 }
1727 }
1728
1729 if (! tree_fits_uhwi_p (size))
1730 return false;
1731
dcb7fae2 1732 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1733 if (! integer_all_onesp (size))
1734 {
1735 if (! tree_fits_uhwi_p (len))
1736 {
1737 /* If LEN is not constant, try MAXLEN too.
1738 For MAXLEN only allow optimizing into non-_ocs function
1739 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1740 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1741 {
1742 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1743 {
1744 /* (void) __mempcpy_chk () can be optimized into
1745 (void) __memcpy_chk (). */
1746 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1747 if (!fn)
1748 return false;
1749
355fe088 1750 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1751 replace_call_with_call_and_fold (gsi, repl);
1752 return true;
1753 }
1754 return false;
1755 }
1756 }
1757 else
1758 maxlen = len;
1759
1760 if (tree_int_cst_lt (size, maxlen))
1761 return false;
1762 }
1763
1764 fn = NULL_TREE;
1765 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1766 mem{cpy,pcpy,move,set} is available. */
1767 switch (fcode)
1768 {
1769 case BUILT_IN_MEMCPY_CHK:
1770 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1771 break;
1772 case BUILT_IN_MEMPCPY_CHK:
1773 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1774 break;
1775 case BUILT_IN_MEMMOVE_CHK:
1776 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1777 break;
1778 case BUILT_IN_MEMSET_CHK:
1779 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1780 break;
1781 default:
1782 break;
1783 }
1784
1785 if (!fn)
1786 return false;
1787
355fe088 1788 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1789 replace_call_with_call_and_fold (gsi, repl);
1790 return true;
1791}
1792
1793/* Fold a call to the __st[rp]cpy_chk builtin.
1794 DEST, SRC, and SIZE are the arguments to the call.
1795 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1796 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1797 strings passed as second argument. */
1798
1799static bool
1800gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1801 tree dest,
fef5a0d9 1802 tree src, tree size,
fef5a0d9
RB
1803 enum built_in_function fcode)
1804{
355fe088 1805 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1806 location_t loc = gimple_location (stmt);
1807 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1808 tree len, fn;
1809
1810 /* If SRC and DEST are the same (and not volatile), return DEST. */
1811 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1812 {
1813 replace_call_with_value (gsi, dest);
1814 return true;
1815 }
1816
1817 if (! tree_fits_uhwi_p (size))
1818 return false;
1819
dcb7fae2 1820 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1821 if (! integer_all_onesp (size))
1822 {
1823 len = c_strlen (src, 1);
1824 if (! len || ! tree_fits_uhwi_p (len))
1825 {
1826 /* If LEN is not constant, try MAXLEN too.
1827 For MAXLEN only allow optimizing into non-_ocs function
1828 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1829 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1830 {
1831 if (fcode == BUILT_IN_STPCPY_CHK)
1832 {
1833 if (! ignore)
1834 return false;
1835
1836 /* If return value of __stpcpy_chk is ignored,
1837 optimize into __strcpy_chk. */
1838 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1839 if (!fn)
1840 return false;
1841
355fe088 1842 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
1843 replace_call_with_call_and_fold (gsi, repl);
1844 return true;
1845 }
1846
1847 if (! len || TREE_SIDE_EFFECTS (len))
1848 return false;
1849
1850 /* If c_strlen returned something, but not a constant,
1851 transform __strcpy_chk into __memcpy_chk. */
1852 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1853 if (!fn)
1854 return false;
1855
74e3c262
RB
1856 gimple_seq stmts = NULL;
1857 len = gimple_convert (&stmts, loc, size_type_node, len);
1858 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1859 build_int_cst (size_type_node, 1));
1860 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1861 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864 }
e256dfce 1865 }
fef5a0d9
RB
1866 else
1867 maxlen = len;
1868
1869 if (! tree_int_cst_lt (maxlen, size))
1870 return false;
e256dfce
RG
1871 }
1872
fef5a0d9
RB
1873 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1874 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1875 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1876 if (!fn)
1877 return false;
1878
355fe088 1879 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
1880 replace_call_with_call_and_fold (gsi, repl);
1881 return true;
1882}
1883
1884/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1885 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1886 length passed as third argument. IGNORE is true if return value can be
1887 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1888
1889static bool
1890gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1891 tree dest, tree src,
dcb7fae2 1892 tree len, tree size,
fef5a0d9
RB
1893 enum built_in_function fcode)
1894{
355fe088 1895 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1896 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1897 tree fn;
1898
1899 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1900 {
fef5a0d9
RB
1901 /* If return value of __stpncpy_chk is ignored,
1902 optimize into __strncpy_chk. */
1903 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1904 if (fn)
1905 {
355fe088 1906 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1907 replace_call_with_call_and_fold (gsi, repl);
1908 return true;
1909 }
cbdd87d4
RG
1910 }
1911
fef5a0d9
RB
1912 if (! tree_fits_uhwi_p (size))
1913 return false;
1914
dcb7fae2 1915 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1916 if (! integer_all_onesp (size))
cbdd87d4 1917 {
fef5a0d9 1918 if (! tree_fits_uhwi_p (len))
fe2ef088 1919 {
fef5a0d9
RB
1920 /* If LEN is not constant, try MAXLEN too.
1921 For MAXLEN only allow optimizing into non-_ocs function
1922 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1923 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1924 return false;
8a1561bc 1925 }
fef5a0d9
RB
1926 else
1927 maxlen = len;
1928
1929 if (tree_int_cst_lt (size, maxlen))
1930 return false;
cbdd87d4
RG
1931 }
1932
fef5a0d9
RB
1933 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1934 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1935 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1936 if (!fn)
1937 return false;
1938
355fe088 1939 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1940 replace_call_with_call_and_fold (gsi, repl);
1941 return true;
cbdd87d4
RG
1942}
1943
2625bb5d
RB
1944/* Fold function call to builtin stpcpy with arguments DEST and SRC.
1945 Return NULL_TREE if no simplification can be made. */
1946
1947static bool
1948gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1949{
1950 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1951 location_t loc = gimple_location (stmt);
1952 tree dest = gimple_call_arg (stmt, 0);
1953 tree src = gimple_call_arg (stmt, 1);
1954 tree fn, len, lenp1;
1955
1956 /* If the result is unused, replace stpcpy with strcpy. */
1957 if (gimple_call_lhs (stmt) == NULL_TREE)
1958 {
1959 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1960 if (!fn)
1961 return false;
1962 gimple_call_set_fndecl (stmt, fn);
1963 fold_stmt (gsi);
1964 return true;
1965 }
1966
1967 len = c_strlen (src, 1);
1968 if (!len
1969 || TREE_CODE (len) != INTEGER_CST)
1970 return false;
1971
1972 if (optimize_function_for_size_p (cfun)
1973 /* If length is zero it's small enough. */
1974 && !integer_zerop (len))
1975 return false;
1976
1977 /* If the source has a known length replace stpcpy with memcpy. */
1978 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1979 if (!fn)
1980 return false;
1981
1982 gimple_seq stmts = NULL;
1983 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1984 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1985 tem, build_int_cst (size_type_node, 1));
1986 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1987 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1988 gimple_set_vuse (repl, gimple_vuse (stmt));
1989 gimple_set_vdef (repl, gimple_vdef (stmt));
1990 if (gimple_vdef (repl)
1991 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1992 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1993 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1994 /* Replace the result with dest + len. */
1995 stmts = NULL;
1996 tem = gimple_convert (&stmts, loc, sizetype, len);
1997 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1998 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1999 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 2000 gsi_replace (gsi, ret, false);
2625bb5d
RB
2001 /* Finally fold the memcpy call. */
2002 gimple_stmt_iterator gsi2 = *gsi;
2003 gsi_prev (&gsi2);
2004 fold_stmt (&gsi2);
2005 return true;
2006}
2007
fef5a0d9
RB
2008/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2009 NULL_TREE if a normal call should be emitted rather than expanding
2010 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2011 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2012 passed as second argument. */
cbdd87d4
RG
2013
2014static bool
fef5a0d9 2015gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2016 enum built_in_function fcode)
cbdd87d4 2017{
538dd0b7 2018 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2019 tree dest, size, len, fn, fmt, flag;
2020 const char *fmt_str;
cbdd87d4 2021
fef5a0d9
RB
2022 /* Verify the required arguments in the original call. */
2023 if (gimple_call_num_args (stmt) < 5)
2024 return false;
cbdd87d4 2025
fef5a0d9
RB
2026 dest = gimple_call_arg (stmt, 0);
2027 len = gimple_call_arg (stmt, 1);
2028 flag = gimple_call_arg (stmt, 2);
2029 size = gimple_call_arg (stmt, 3);
2030 fmt = gimple_call_arg (stmt, 4);
2031
2032 if (! tree_fits_uhwi_p (size))
2033 return false;
2034
2035 if (! integer_all_onesp (size))
2036 {
dcb7fae2 2037 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2038 if (! tree_fits_uhwi_p (len))
cbdd87d4 2039 {
fef5a0d9
RB
2040 /* If LEN is not constant, try MAXLEN too.
2041 For MAXLEN only allow optimizing into non-_ocs function
2042 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2043 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2044 return false;
2045 }
2046 else
fef5a0d9 2047 maxlen = len;
cbdd87d4 2048
fef5a0d9
RB
2049 if (tree_int_cst_lt (size, maxlen))
2050 return false;
2051 }
cbdd87d4 2052
fef5a0d9
RB
2053 if (!init_target_chars ())
2054 return false;
cbdd87d4 2055
fef5a0d9
RB
2056 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2057 or if format doesn't contain % chars or is "%s". */
2058 if (! integer_zerop (flag))
2059 {
2060 fmt_str = c_getstr (fmt);
2061 if (fmt_str == NULL)
2062 return false;
2063 if (strchr (fmt_str, target_percent) != NULL
2064 && strcmp (fmt_str, target_percent_s))
2065 return false;
cbdd87d4
RG
2066 }
2067
fef5a0d9
RB
2068 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2069 available. */
2070 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2071 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2072 if (!fn)
491e0b9b
RG
2073 return false;
2074
fef5a0d9
RB
2075 /* Replace the called function and the first 5 argument by 3 retaining
2076 trailing varargs. */
2077 gimple_call_set_fndecl (stmt, fn);
2078 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2079 gimple_call_set_arg (stmt, 0, dest);
2080 gimple_call_set_arg (stmt, 1, len);
2081 gimple_call_set_arg (stmt, 2, fmt);
2082 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2083 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2084 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2085 fold_stmt (gsi);
2086 return true;
2087}
cbdd87d4 2088
fef5a0d9
RB
2089/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2090 Return NULL_TREE if a normal call should be emitted rather than
2091 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2092 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2093
fef5a0d9
RB
2094static bool
2095gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2096 enum built_in_function fcode)
2097{
538dd0b7 2098 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2099 tree dest, size, len, fn, fmt, flag;
2100 const char *fmt_str;
2101 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2102
fef5a0d9
RB
2103 /* Verify the required arguments in the original call. */
2104 if (nargs < 4)
2105 return false;
2106 dest = gimple_call_arg (stmt, 0);
2107 flag = gimple_call_arg (stmt, 1);
2108 size = gimple_call_arg (stmt, 2);
2109 fmt = gimple_call_arg (stmt, 3);
2110
2111 if (! tree_fits_uhwi_p (size))
2112 return false;
2113
2114 len = NULL_TREE;
2115
2116 if (!init_target_chars ())
2117 return false;
2118
2119 /* Check whether the format is a literal string constant. */
2120 fmt_str = c_getstr (fmt);
2121 if (fmt_str != NULL)
2122 {
2123 /* If the format doesn't contain % args or %%, we know the size. */
2124 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2125 {
fef5a0d9
RB
2126 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2127 len = build_int_cstu (size_type_node, strlen (fmt_str));
2128 }
2129 /* If the format is "%s" and first ... argument is a string literal,
2130 we know the size too. */
2131 else if (fcode == BUILT_IN_SPRINTF_CHK
2132 && strcmp (fmt_str, target_percent_s) == 0)
2133 {
2134 tree arg;
cbdd87d4 2135
fef5a0d9
RB
2136 if (nargs == 5)
2137 {
2138 arg = gimple_call_arg (stmt, 4);
2139 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2140 {
2141 len = c_strlen (arg, 1);
2142 if (! len || ! tree_fits_uhwi_p (len))
2143 len = NULL_TREE;
2144 }
2145 }
2146 }
2147 }
cbdd87d4 2148
fef5a0d9
RB
2149 if (! integer_all_onesp (size))
2150 {
2151 if (! len || ! tree_int_cst_lt (len, size))
2152 return false;
2153 }
cbdd87d4 2154
fef5a0d9
RB
2155 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2156 or if format doesn't contain % chars or is "%s". */
2157 if (! integer_zerop (flag))
2158 {
2159 if (fmt_str == NULL)
2160 return false;
2161 if (strchr (fmt_str, target_percent) != NULL
2162 && strcmp (fmt_str, target_percent_s))
2163 return false;
2164 }
cbdd87d4 2165
fef5a0d9
RB
2166 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2167 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2168 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2169 if (!fn)
2170 return false;
2171
2172 /* Replace the called function and the first 4 argument by 2 retaining
2173 trailing varargs. */
2174 gimple_call_set_fndecl (stmt, fn);
2175 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2176 gimple_call_set_arg (stmt, 0, dest);
2177 gimple_call_set_arg (stmt, 1, fmt);
2178 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2179 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2180 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2181 fold_stmt (gsi);
2182 return true;
2183}
2184
35770bb2
RB
2185/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2186 ORIG may be null if this is a 2-argument call. We don't attempt to
2187 simplify calls with more than 3 arguments.
2188
2189 Return NULL_TREE if no simplification was possible, otherwise return the
2190 simplified form of the call as a tree. If IGNORED is true, it means that
2191 the caller does not use the returned value of the function. */
2192
2193static bool
dcb7fae2 2194gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2195{
355fe088 2196 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2197 tree dest = gimple_call_arg (stmt, 0);
2198 tree fmt = gimple_call_arg (stmt, 1);
2199 tree orig = NULL_TREE;
2200 const char *fmt_str = NULL;
2201
2202 /* Verify the required arguments in the original call. We deal with two
2203 types of sprintf() calls: 'sprintf (str, fmt)' and
2204 'sprintf (dest, "%s", orig)'. */
2205 if (gimple_call_num_args (stmt) > 3)
2206 return false;
2207
2208 if (gimple_call_num_args (stmt) == 3)
2209 orig = gimple_call_arg (stmt, 2);
2210
2211 /* Check whether the format is a literal string constant. */
2212 fmt_str = c_getstr (fmt);
2213 if (fmt_str == NULL)
2214 return false;
2215
2216 if (!init_target_chars ())
2217 return false;
2218
2219 /* If the format doesn't contain % args or %%, use strcpy. */
2220 if (strchr (fmt_str, target_percent) == NULL)
2221 {
2222 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2223
2224 if (!fn)
2225 return false;
2226
2227 /* Don't optimize sprintf (buf, "abc", ptr++). */
2228 if (orig)
2229 return false;
2230
2231 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2232 'format' is known to contain no % formats. */
2233 gimple_seq stmts = NULL;
355fe088 2234 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2235 gimple_seq_add_stmt_without_update (&stmts, repl);
2236 if (gimple_call_lhs (stmt))
2237 {
2238 repl = gimple_build_assign (gimple_call_lhs (stmt),
2239 build_int_cst (integer_type_node,
2240 strlen (fmt_str)));
2241 gimple_seq_add_stmt_without_update (&stmts, repl);
2242 gsi_replace_with_seq_vops (gsi, stmts);
2243 /* gsi now points at the assignment to the lhs, get a
2244 stmt iterator to the memcpy call.
2245 ??? We can't use gsi_for_stmt as that doesn't work when the
2246 CFG isn't built yet. */
2247 gimple_stmt_iterator gsi2 = *gsi;
2248 gsi_prev (&gsi2);
2249 fold_stmt (&gsi2);
2250 }
2251 else
2252 {
2253 gsi_replace_with_seq_vops (gsi, stmts);
2254 fold_stmt (gsi);
2255 }
2256 return true;
2257 }
2258
2259 /* If the format is "%s", use strcpy if the result isn't used. */
2260 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2261 {
2262 tree fn;
2263 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2264
2265 if (!fn)
2266 return false;
2267
2268 /* Don't crash on sprintf (str1, "%s"). */
2269 if (!orig)
2270 return false;
2271
dcb7fae2
RB
2272 tree orig_len = NULL_TREE;
2273 if (gimple_call_lhs (stmt))
35770bb2 2274 {
dcb7fae2 2275 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2276 if (!orig_len)
35770bb2
RB
2277 return false;
2278 }
2279
2280 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2281 gimple_seq stmts = NULL;
355fe088 2282 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2283 gimple_seq_add_stmt_without_update (&stmts, repl);
2284 if (gimple_call_lhs (stmt))
2285 {
d7e78447
RB
2286 if (!useless_type_conversion_p (integer_type_node,
2287 TREE_TYPE (orig_len)))
2288 orig_len = fold_convert (integer_type_node, orig_len);
2289 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2290 gimple_seq_add_stmt_without_update (&stmts, repl);
2291 gsi_replace_with_seq_vops (gsi, stmts);
2292 /* gsi now points at the assignment to the lhs, get a
2293 stmt iterator to the memcpy call.
2294 ??? We can't use gsi_for_stmt as that doesn't work when the
2295 CFG isn't built yet. */
2296 gimple_stmt_iterator gsi2 = *gsi;
2297 gsi_prev (&gsi2);
2298 fold_stmt (&gsi2);
2299 }
2300 else
2301 {
2302 gsi_replace_with_seq_vops (gsi, stmts);
2303 fold_stmt (gsi);
2304 }
2305 return true;
2306 }
2307 return false;
2308}
2309
d7e78447
RB
2310/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2311 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2312 attempt to simplify calls with more than 4 arguments.
35770bb2 2313
d7e78447
RB
2314 Return NULL_TREE if no simplification was possible, otherwise return the
2315 simplified form of the call as a tree. If IGNORED is true, it means that
2316 the caller does not use the returned value of the function. */
2317
2318static bool
dcb7fae2 2319gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2320{
538dd0b7 2321 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2322 tree dest = gimple_call_arg (stmt, 0);
2323 tree destsize = gimple_call_arg (stmt, 1);
2324 tree fmt = gimple_call_arg (stmt, 2);
2325 tree orig = NULL_TREE;
2326 const char *fmt_str = NULL;
2327
2328 if (gimple_call_num_args (stmt) > 4)
2329 return false;
2330
2331 if (gimple_call_num_args (stmt) == 4)
2332 orig = gimple_call_arg (stmt, 3);
2333
2334 if (!tree_fits_uhwi_p (destsize))
2335 return false;
2336 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2337
2338 /* Check whether the format is a literal string constant. */
2339 fmt_str = c_getstr (fmt);
2340 if (fmt_str == NULL)
2341 return false;
2342
2343 if (!init_target_chars ())
2344 return false;
2345
2346 /* If the format doesn't contain % args or %%, use strcpy. */
2347 if (strchr (fmt_str, target_percent) == NULL)
2348 {
2349 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2350 if (!fn)
2351 return false;
2352
2353 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2354 if (orig)
2355 return false;
2356
2357 /* We could expand this as
2358 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2359 or to
2360 memcpy (str, fmt_with_nul_at_cstm1, cst);
2361 but in the former case that might increase code size
2362 and in the latter case grow .rodata section too much.
2363 So punt for now. */
2364 size_t len = strlen (fmt_str);
2365 if (len >= destlen)
2366 return false;
2367
2368 gimple_seq stmts = NULL;
355fe088 2369 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2370 gimple_seq_add_stmt_without_update (&stmts, repl);
2371 if (gimple_call_lhs (stmt))
2372 {
2373 repl = gimple_build_assign (gimple_call_lhs (stmt),
2374 build_int_cst (integer_type_node, len));
2375 gimple_seq_add_stmt_without_update (&stmts, repl);
2376 gsi_replace_with_seq_vops (gsi, stmts);
2377 /* gsi now points at the assignment to the lhs, get a
2378 stmt iterator to the memcpy call.
2379 ??? We can't use gsi_for_stmt as that doesn't work when the
2380 CFG isn't built yet. */
2381 gimple_stmt_iterator gsi2 = *gsi;
2382 gsi_prev (&gsi2);
2383 fold_stmt (&gsi2);
2384 }
2385 else
2386 {
2387 gsi_replace_with_seq_vops (gsi, stmts);
2388 fold_stmt (gsi);
2389 }
2390 return true;
2391 }
2392
2393 /* If the format is "%s", use strcpy if the result isn't used. */
2394 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2395 {
2396 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2397 if (!fn)
2398 return false;
2399
2400 /* Don't crash on snprintf (str1, cst, "%s"). */
2401 if (!orig)
2402 return false;
2403
dcb7fae2 2404 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2405 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2406 return false;
d7e78447
RB
2407
2408 /* We could expand this as
2409 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2410 or to
2411 memcpy (str1, str2_with_nul_at_cstm1, cst);
2412 but in the former case that might increase code size
2413 and in the latter case grow .rodata section too much.
2414 So punt for now. */
2415 if (compare_tree_int (orig_len, destlen) >= 0)
2416 return false;
2417
2418 /* Convert snprintf (str1, cst, "%s", str2) into
2419 strcpy (str1, str2) if strlen (str2) < cst. */
2420 gimple_seq stmts = NULL;
355fe088 2421 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
2422 gimple_seq_add_stmt_without_update (&stmts, repl);
2423 if (gimple_call_lhs (stmt))
2424 {
2425 if (!useless_type_conversion_p (integer_type_node,
2426 TREE_TYPE (orig_len)))
2427 orig_len = fold_convert (integer_type_node, orig_len);
2428 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2429 gimple_seq_add_stmt_without_update (&stmts, repl);
2430 gsi_replace_with_seq_vops (gsi, stmts);
2431 /* gsi now points at the assignment to the lhs, get a
2432 stmt iterator to the memcpy call.
2433 ??? We can't use gsi_for_stmt as that doesn't work when the
2434 CFG isn't built yet. */
2435 gimple_stmt_iterator gsi2 = *gsi;
2436 gsi_prev (&gsi2);
2437 fold_stmt (&gsi2);
2438 }
2439 else
2440 {
2441 gsi_replace_with_seq_vops (gsi, stmts);
2442 fold_stmt (gsi);
2443 }
2444 return true;
2445 }
2446 return false;
2447}
35770bb2 2448
edd7ae68
RB
2449/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2450 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2451 more than 3 arguments, and ARG may be null in the 2-argument case.
2452
2453 Return NULL_TREE if no simplification was possible, otherwise return the
2454 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2455 code of the function to be simplified. */
2456
2457static bool
2458gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2459 tree fp, tree fmt, tree arg,
2460 enum built_in_function fcode)
2461{
2462 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2463 tree fn_fputc, fn_fputs;
2464 const char *fmt_str = NULL;
2465
2466 /* If the return value is used, don't do the transformation. */
2467 if (gimple_call_lhs (stmt) != NULL_TREE)
2468 return false;
2469
2470 /* Check whether the format is a literal string constant. */
2471 fmt_str = c_getstr (fmt);
2472 if (fmt_str == NULL)
2473 return false;
2474
2475 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2476 {
2477 /* If we're using an unlocked function, assume the other
2478 unlocked functions exist explicitly. */
2479 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2480 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2481 }
2482 else
2483 {
2484 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2485 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2486 }
2487
2488 if (!init_target_chars ())
2489 return false;
2490
2491 /* If the format doesn't contain % args or %%, use strcpy. */
2492 if (strchr (fmt_str, target_percent) == NULL)
2493 {
2494 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2495 && arg)
2496 return false;
2497
2498 /* If the format specifier was "", fprintf does nothing. */
2499 if (fmt_str[0] == '\0')
2500 {
2501 replace_call_with_value (gsi, NULL_TREE);
2502 return true;
2503 }
2504
2505 /* When "string" doesn't contain %, replace all cases of
2506 fprintf (fp, string) with fputs (string, fp). The fputs
2507 builtin will take care of special cases like length == 1. */
2508 if (fn_fputs)
2509 {
2510 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2511 replace_call_with_call_and_fold (gsi, repl);
2512 return true;
2513 }
2514 }
2515
2516 /* The other optimizations can be done only on the non-va_list variants. */
2517 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2518 return false;
2519
2520 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2521 else if (strcmp (fmt_str, target_percent_s) == 0)
2522 {
2523 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2524 return false;
2525 if (fn_fputs)
2526 {
2527 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2528 replace_call_with_call_and_fold (gsi, repl);
2529 return true;
2530 }
2531 }
2532
2533 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2534 else if (strcmp (fmt_str, target_percent_c) == 0)
2535 {
2536 if (!arg
2537 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2538 return false;
2539 if (fn_fputc)
2540 {
2541 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2542 replace_call_with_call_and_fold (gsi, repl);
2543 return true;
2544 }
2545 }
2546
2547 return false;
2548}
2549
ad03a744
RB
2550/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2551 FMT and ARG are the arguments to the call; we don't fold cases with
2552 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2553
2554 Return NULL_TREE if no simplification was possible, otherwise return the
2555 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2556 code of the function to be simplified. */
2557
2558static bool
2559gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2560 tree arg, enum built_in_function fcode)
2561{
2562 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2563 tree fn_putchar, fn_puts, newarg;
2564 const char *fmt_str = NULL;
2565
2566 /* If the return value is used, don't do the transformation. */
2567 if (gimple_call_lhs (stmt) != NULL_TREE)
2568 return false;
2569
2570 /* Check whether the format is a literal string constant. */
2571 fmt_str = c_getstr (fmt);
2572 if (fmt_str == NULL)
2573 return false;
2574
2575 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2576 {
2577 /* If we're using an unlocked function, assume the other
2578 unlocked functions exist explicitly. */
2579 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2580 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2581 }
2582 else
2583 {
2584 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2585 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2586 }
2587
2588 if (!init_target_chars ())
2589 return false;
2590
2591 if (strcmp (fmt_str, target_percent_s) == 0
2592 || strchr (fmt_str, target_percent) == NULL)
2593 {
2594 const char *str;
2595
2596 if (strcmp (fmt_str, target_percent_s) == 0)
2597 {
2598 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2599 return false;
2600
2601 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2602 return false;
2603
2604 str = c_getstr (arg);
2605 if (str == NULL)
2606 return false;
2607 }
2608 else
2609 {
2610 /* The format specifier doesn't contain any '%' characters. */
2611 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2612 && arg)
2613 return false;
2614 str = fmt_str;
2615 }
2616
2617 /* If the string was "", printf does nothing. */
2618 if (str[0] == '\0')
2619 {
2620 replace_call_with_value (gsi, NULL_TREE);
2621 return true;
2622 }
2623
2624 /* If the string has length of 1, call putchar. */
2625 if (str[1] == '\0')
2626 {
2627 /* Given printf("c"), (where c is any one character,)
2628 convert "c"[0] to an int and pass that to the replacement
2629 function. */
2630 newarg = build_int_cst (integer_type_node, str[0]);
2631 if (fn_putchar)
2632 {
2633 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2634 replace_call_with_call_and_fold (gsi, repl);
2635 return true;
2636 }
2637 }
2638 else
2639 {
2640 /* If the string was "string\n", call puts("string"). */
2641 size_t len = strlen (str);
2642 if ((unsigned char)str[len - 1] == target_newline
2643 && (size_t) (int) len == len
2644 && (int) len > 0)
2645 {
2646 char *newstr;
2647 tree offset_node, string_cst;
2648
2649 /* Create a NUL-terminated string that's one char shorter
2650 than the original, stripping off the trailing '\n'. */
2651 newarg = build_string_literal (len, str);
2652 string_cst = string_constant (newarg, &offset_node);
2653 gcc_checking_assert (string_cst
2654 && (TREE_STRING_LENGTH (string_cst)
2655 == (int) len)
2656 && integer_zerop (offset_node)
2657 && (unsigned char)
2658 TREE_STRING_POINTER (string_cst)[len - 1]
2659 == target_newline);
2660 /* build_string_literal creates a new STRING_CST,
2661 modify it in place to avoid double copying. */
2662 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2663 newstr[len - 1] = '\0';
2664 if (fn_puts)
2665 {
2666 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2667 replace_call_with_call_and_fold (gsi, repl);
2668 return true;
2669 }
2670 }
2671 else
2672 /* We'd like to arrange to call fputs(string,stdout) here,
2673 but we need stdout and don't have a way to get it yet. */
2674 return false;
2675 }
2676 }
2677
2678 /* The other optimizations can be done only on the non-va_list variants. */
2679 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2680 return false;
2681
2682 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2683 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2684 {
2685 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2686 return false;
2687 if (fn_puts)
2688 {
2689 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2690 replace_call_with_call_and_fold (gsi, repl);
2691 return true;
2692 }
2693 }
2694
2695 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2696 else if (strcmp (fmt_str, target_percent_c) == 0)
2697 {
2698 if (!arg || ! useless_type_conversion_p (integer_type_node,
2699 TREE_TYPE (arg)))
2700 return false;
2701 if (fn_putchar)
2702 {
2703 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2704 replace_call_with_call_and_fold (gsi, repl);
2705 return true;
2706 }
2707 }
2708
2709 return false;
2710}
2711
edd7ae68 2712
fef5a0d9
RB
2713
2714/* Fold a call to __builtin_strlen with known length LEN. */
2715
2716static bool
dcb7fae2 2717gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2718{
355fe088 2719 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2720 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2721 if (!len)
2722 return false;
2813904b 2723 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2724 replace_call_with_value (gsi, len);
2725 return true;
cbdd87d4
RG
2726}
2727
48126138
NS
2728/* Fold a call to __builtin_acc_on_device. */
2729
2730static bool
2731gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2732{
2733 /* Defer folding until we know which compiler we're in. */
2734 if (symtab->state != EXPANSION)
2735 return false;
2736
2737 unsigned val_host = GOMP_DEVICE_HOST;
2738 unsigned val_dev = GOMP_DEVICE_NONE;
2739
2740#ifdef ACCEL_COMPILER
2741 val_host = GOMP_DEVICE_NOT_HOST;
2742 val_dev = ACCEL_COMPILER_acc_device;
2743#endif
2744
2745 location_t loc = gimple_location (gsi_stmt (*gsi));
2746
2747 tree host_eq = make_ssa_name (boolean_type_node);
2748 gimple *host_ass = gimple_build_assign
2749 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2750 gimple_set_location (host_ass, loc);
2751 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2752
2753 tree dev_eq = make_ssa_name (boolean_type_node);
2754 gimple *dev_ass = gimple_build_assign
2755 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2756 gimple_set_location (dev_ass, loc);
2757 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2758
2759 tree result = make_ssa_name (boolean_type_node);
2760 gimple *result_ass = gimple_build_assign
2761 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2762 gimple_set_location (result_ass, loc);
2763 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2764
2765 replace_call_with_value (gsi, result);
2766
2767 return true;
2768}
cbdd87d4 2769
dcb7fae2
RB
2770/* Fold the non-target builtin at *GSI and return whether any simplification
2771 was made. */
cbdd87d4 2772
fef5a0d9 2773static bool
dcb7fae2 2774gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2775{
538dd0b7 2776 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2777 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2778
dcb7fae2
RB
2779 /* Give up for always_inline inline builtins until they are
2780 inlined. */
2781 if (avoid_folding_inline_builtin (callee))
2782 return false;
cbdd87d4 2783
edd7ae68
RB
2784 unsigned n = gimple_call_num_args (stmt);
2785 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2786 switch (fcode)
cbdd87d4 2787 {
dcb7fae2
RB
2788 case BUILT_IN_BZERO:
2789 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2790 gimple_call_arg (stmt, 1));
2791 case BUILT_IN_MEMSET:
2792 return gimple_fold_builtin_memset (gsi,
2793 gimple_call_arg (stmt, 1),
2794 gimple_call_arg (stmt, 2));
2795 case BUILT_IN_BCOPY:
2796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2797 gimple_call_arg (stmt, 0), 3);
2798 case BUILT_IN_MEMCPY:
2799 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2800 gimple_call_arg (stmt, 1), 0);
2801 case BUILT_IN_MEMPCPY:
2802 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2803 gimple_call_arg (stmt, 1), 1);
2804 case BUILT_IN_MEMMOVE:
2805 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2806 gimple_call_arg (stmt, 1), 3);
2807 case BUILT_IN_SPRINTF_CHK:
2808 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2809 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2810 case BUILT_IN_STRCAT_CHK:
2811 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2812 case BUILT_IN_STRNCAT_CHK:
2813 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2814 case BUILT_IN_STRLEN:
dcb7fae2 2815 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2816 case BUILT_IN_STRCPY:
dcb7fae2 2817 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2818 gimple_call_arg (stmt, 0),
dcb7fae2 2819 gimple_call_arg (stmt, 1));
cbdd87d4 2820 case BUILT_IN_STRNCPY:
dcb7fae2 2821 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2822 gimple_call_arg (stmt, 0),
2823 gimple_call_arg (stmt, 1),
dcb7fae2 2824 gimple_call_arg (stmt, 2));
9a7eefec 2825 case BUILT_IN_STRCAT:
dcb7fae2
RB
2826 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2827 gimple_call_arg (stmt, 1));
ad03a744
RB
2828 case BUILT_IN_STRNCAT:
2829 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2830 case BUILT_IN_FPUTS:
dcb7fae2
RB
2831 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2832 gimple_call_arg (stmt, 1), false);
cbdd87d4 2833 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2834 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2835 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2836 case BUILT_IN_MEMCPY_CHK:
2837 case BUILT_IN_MEMPCPY_CHK:
2838 case BUILT_IN_MEMMOVE_CHK:
2839 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2840 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2841 gimple_call_arg (stmt, 0),
2842 gimple_call_arg (stmt, 1),
2843 gimple_call_arg (stmt, 2),
2844 gimple_call_arg (stmt, 3),
edd7ae68 2845 fcode);
2625bb5d
RB
2846 case BUILT_IN_STPCPY:
2847 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2848 case BUILT_IN_STRCPY_CHK:
2849 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2850 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2851 gimple_call_arg (stmt, 0),
2852 gimple_call_arg (stmt, 1),
2853 gimple_call_arg (stmt, 2),
edd7ae68 2854 fcode);
cbdd87d4 2855 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2856 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2857 return gimple_fold_builtin_stxncpy_chk (gsi,
2858 gimple_call_arg (stmt, 0),
2859 gimple_call_arg (stmt, 1),
2860 gimple_call_arg (stmt, 2),
2861 gimple_call_arg (stmt, 3),
edd7ae68 2862 fcode);
cbdd87d4
RG
2863 case BUILT_IN_SNPRINTF_CHK:
2864 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2865 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2866 case BUILT_IN_SNPRINTF:
dcb7fae2 2867 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2868 case BUILT_IN_SPRINTF:
dcb7fae2 2869 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2870 case BUILT_IN_FPRINTF:
2871 case BUILT_IN_FPRINTF_UNLOCKED:
2872 case BUILT_IN_VFPRINTF:
2873 if (n == 2 || n == 3)
2874 return gimple_fold_builtin_fprintf (gsi,
2875 gimple_call_arg (stmt, 0),
2876 gimple_call_arg (stmt, 1),
2877 n == 3
2878 ? gimple_call_arg (stmt, 2)
2879 : NULL_TREE,
2880 fcode);
2881 break;
2882 case BUILT_IN_FPRINTF_CHK:
2883 case BUILT_IN_VFPRINTF_CHK:
2884 if (n == 3 || n == 4)
2885 return gimple_fold_builtin_fprintf (gsi,
2886 gimple_call_arg (stmt, 0),
2887 gimple_call_arg (stmt, 2),
2888 n == 4
2889 ? gimple_call_arg (stmt, 3)
2890 : NULL_TREE,
2891 fcode);
2892 break;
ad03a744
RB
2893 case BUILT_IN_PRINTF:
2894 case BUILT_IN_PRINTF_UNLOCKED:
2895 case BUILT_IN_VPRINTF:
2896 if (n == 1 || n == 2)
2897 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2898 n == 2
2899 ? gimple_call_arg (stmt, 1)
2900 : NULL_TREE, fcode);
2901 break;
2902 case BUILT_IN_PRINTF_CHK:
2903 case BUILT_IN_VPRINTF_CHK:
2904 if (n == 2 || n == 3)
2905 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2906 n == 3
2907 ? gimple_call_arg (stmt, 2)
2908 : NULL_TREE, fcode);
242a37f1 2909 break;
48126138
NS
2910 case BUILT_IN_ACC_ON_DEVICE:
2911 return gimple_fold_builtin_acc_on_device (gsi,
2912 gimple_call_arg (stmt, 0));
fef5a0d9
RB
2913 default:;
2914 }
2915
2916 /* Try the generic builtin folder. */
2917 bool ignore = (gimple_call_lhs (stmt) == NULL);
2918 tree result = fold_call_stmt (stmt, ignore);
2919 if (result)
2920 {
2921 if (ignore)
2922 STRIP_NOPS (result);
2923 else
2924 result = fold_convert (gimple_call_return_type (stmt), result);
2925 if (!update_call_from_tree (gsi, result))
2926 gimplify_and_update_call_from_tree (gsi, result);
2927 return true;
2928 }
2929
2930 return false;
2931}
2932
451e8dae
NS
2933/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2934 function calls to constants, where possible. */
2935
2936static tree
2937fold_internal_goacc_dim (const gimple *call)
2938{
2939 int axis = get_oacc_ifn_dim_arg (call);
2940 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2941 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2942 tree result = NULL_TREE;
2943
2944 /* If the size is 1, or we only want the size and it is not dynamic,
2945 we know the answer. */
2946 if (size == 1 || (!is_pos && size))
2947 {
2948 tree type = TREE_TYPE (gimple_call_lhs (call));
2949 result = build_int_cst (type, size - is_pos);
2950 }
2951
2952 return result;
2953}
2954
849a76a5
JJ
2955/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
2956 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
2957 &var where var is only addressable because of such calls. */
2958
2959bool
2960optimize_atomic_compare_exchange_p (gimple *stmt)
2961{
2962 if (gimple_call_num_args (stmt) != 6
2963 || !flag_inline_atomics
2964 || !optimize
2965 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
2966 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
2967 || !gimple_vdef (stmt)
2968 || !gimple_vuse (stmt))
2969 return false;
2970
2971 tree fndecl = gimple_call_fndecl (stmt);
2972 switch (DECL_FUNCTION_CODE (fndecl))
2973 {
2974 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
2975 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
2976 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
2977 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
2978 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2979 break;
2980 default:
2981 return false;
2982 }
2983
2984 tree expected = gimple_call_arg (stmt, 1);
2985 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
2986 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
2987 return false;
2988
2989 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
2990 if (!is_gimple_reg_type (etype)
849a76a5 2991 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
2992 || TREE_THIS_VOLATILE (etype)
2993 || VECTOR_TYPE_P (etype)
2994 || TREE_CODE (etype) == COMPLEX_TYPE
2995 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
2996 might not preserve all the bits. See PR71716. */
2997 || SCALAR_FLOAT_TYPE_P (etype)
2998 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
849a76a5
JJ
2999 return false;
3000
3001 tree weak = gimple_call_arg (stmt, 3);
3002 if (!integer_zerop (weak) && !integer_onep (weak))
3003 return false;
3004
3005 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3006 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3007 machine_mode mode = TYPE_MODE (itype);
3008
3009 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3010 == CODE_FOR_nothing
3011 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3012 return false;
3013
1de3c940 3014 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
849a76a5
JJ
3015 return false;
3016
3017 return true;
3018}
3019
3020/* Fold
3021 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3022 into
3023 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3024 i = IMAGPART_EXPR <t>;
3025 r = (_Bool) i;
3026 e = REALPART_EXPR <t>; */
3027
3028void
3029fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3030{
3031 gimple *stmt = gsi_stmt (*gsi);
3032 tree fndecl = gimple_call_fndecl (stmt);
3033 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3034 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3035 tree ctype = build_complex_type (itype);
3036 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3037 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3038 expected);
3039 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3040 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3041 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3042 {
3043 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3044 build1 (VIEW_CONVERT_EXPR, itype,
3045 gimple_assign_lhs (g)));
3046 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3047 }
3048 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3049 + int_size_in_bytes (itype);
3050 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3051 gimple_call_arg (stmt, 0),
3052 gimple_assign_lhs (g),
3053 gimple_call_arg (stmt, 2),
3054 build_int_cst (integer_type_node, flag),
3055 gimple_call_arg (stmt, 4),
3056 gimple_call_arg (stmt, 5));
3057 tree lhs = make_ssa_name (ctype);
3058 gimple_call_set_lhs (g, lhs);
3059 gimple_set_vdef (g, gimple_vdef (stmt));
3060 gimple_set_vuse (g, gimple_vuse (stmt));
3061 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3062 if (gimple_call_lhs (stmt))
3063 {
3064 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3065 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3066 build1 (IMAGPART_EXPR, itype, lhs));
3067 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3068 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3069 gimple_assign_lhs (g));
3070 }
3071 gsi_replace (gsi, g, true);
3072 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3073 build1 (REALPART_EXPR, itype, lhs));
3074 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3075 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3076 {
3077 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3078 VIEW_CONVERT_EXPR,
3079 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3080 gimple_assign_lhs (g)));
3081 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3082 }
3083 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3084 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3085 *gsi = gsiret;
3086}
3087
1304953e
JJ
3088/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3089 doesn't fit into TYPE. The test for overflow should be regardless of
3090 -fwrapv, and even for unsigned types. */
3091
3092bool
3093arith_overflowed_p (enum tree_code code, const_tree type,
3094 const_tree arg0, const_tree arg1)
3095{
3096 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3097 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3098 widest2_int_cst;
3099 widest2_int warg0 = widest2_int_cst (arg0);
3100 widest2_int warg1 = widest2_int_cst (arg1);
3101 widest2_int wres;
3102 switch (code)
3103 {
3104 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3105 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3106 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3107 default: gcc_unreachable ();
3108 }
3109 signop sign = TYPE_SIGN (type);
3110 if (sign == UNSIGNED && wi::neg_p (wres))
3111 return true;
3112 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3113}
3114
cbdd87d4
RG
3115/* Attempt to fold a call statement referenced by the statement iterator GSI.
3116 The statement may be replaced by another statement, e.g., if the call
3117 simplifies to a constant value. Return true if any changes were made.
3118 It is assumed that the operands have been previously folded. */
3119
e021c122 3120static bool
ceeffab0 3121gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3122{
538dd0b7 3123 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3124 tree callee;
e021c122
RG
3125 bool changed = false;
3126 unsigned i;
cbdd87d4 3127
e021c122
RG
3128 /* Fold *& in call arguments. */
3129 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3130 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3131 {
3132 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3133 if (tmp)
3134 {
3135 gimple_call_set_arg (stmt, i, tmp);
3136 changed = true;
3137 }
3138 }
3b45a007
RG
3139
3140 /* Check for virtual calls that became direct calls. */
3141 callee = gimple_call_fn (stmt);
25583c4f 3142 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3143 {
49c471e3
MJ
3144 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3145 {
450ad0cd
JH
3146 if (dump_file && virtual_method_call_p (callee)
3147 && !possible_polymorphic_call_target_p
6f8091fc
JH
3148 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3149 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3150 {
3151 fprintf (dump_file,
a70e9985 3152 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3153 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3154 fprintf (dump_file, " to ");
3155 print_generic_expr (dump_file, callee, TDF_SLIM);
3156 fprintf (dump_file, "\n");
3157 }
3158
49c471e3 3159 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3160 changed = true;
3161 }
a70e9985 3162 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3163 {
61dd6a2e
JH
3164 bool final;
3165 vec <cgraph_node *>targets
058d0a90 3166 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3167 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3168 {
a70e9985 3169 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3170 if (dump_enabled_p ())
3171 {
807b7d62 3172 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3173 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3174 "folding virtual function call to %s\n",
3175 targets.length () == 1
3176 ? targets[0]->name ()
3177 : "__builtin_unreachable");
3178 }
61dd6a2e 3179 if (targets.length () == 1)
cf3e5a89 3180 {
18954840
JJ
3181 tree fndecl = targets[0]->decl;
3182 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 3183 changed = true;
18954840
JJ
3184 /* If changing the call to __cxa_pure_virtual
3185 or similar noreturn function, adjust gimple_call_fntype
3186 too. */
865f7046 3187 if (gimple_call_noreturn_p (stmt)
18954840
JJ
3188 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3189 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3190 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3191 == void_type_node))
3192 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 3193 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
3194 if (lhs
3195 && gimple_call_noreturn_p (stmt)
18954840 3196 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 3197 || should_remove_lhs_p (lhs)))
a70e9985
JJ
3198 {
3199 if (TREE_CODE (lhs) == SSA_NAME)
3200 {
b731b390 3201 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3202 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3203 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3204 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3205 }
3206 gimple_call_set_lhs (stmt, NULL_TREE);
3207 }
0b986c6a 3208 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3209 }
a70e9985 3210 else
cf3e5a89
JJ
3211 {
3212 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3213 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3214 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3215 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3216 {
b731b390 3217 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3218 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3219
3220 /* To satisfy condition for
3221 cgraph_update_edges_for_call_stmt_node,
3222 we need to preserve GIMPLE_CALL statement
3223 at position of GSI iterator. */
a70e9985 3224 update_call_from_tree (gsi, def);
eb14a79f 3225 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3226 }
3227 else
42e52a51
RB
3228 {
3229 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3230 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3231 gsi_replace (gsi, new_stmt, false);
3232 }
cf3e5a89
JJ
3233 return true;
3234 }
e021c122 3235 }
49c471e3 3236 }
e021c122 3237 }
49c471e3 3238
f2d3d07e
RH
3239 /* Check for indirect calls that became direct calls, and then
3240 no longer require a static chain. */
3241 if (gimple_call_chain (stmt))
3242 {
3243 tree fn = gimple_call_fndecl (stmt);
3244 if (fn && !DECL_STATIC_CHAIN (fn))
3245 {
3246 gimple_call_set_chain (stmt, NULL);
3247 changed = true;
3248 }
3249 else
3250 {
3251 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3252 if (tmp)
3253 {
3254 gimple_call_set_chain (stmt, tmp);
3255 changed = true;
3256 }
3257 }
3258 }
3259
e021c122
RG
3260 if (inplace)
3261 return changed;
3262
3263 /* Check for builtins that CCP can handle using information not
3264 available in the generic fold routines. */
fef5a0d9
RB
3265 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3266 {
3267 if (gimple_fold_builtin (gsi))
3268 changed = true;
3269 }
3270 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3271 {
ea679d55 3272 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3273 }
368b454d 3274 else if (gimple_call_internal_p (stmt))
ed9c79e1 3275 {
368b454d
JJ
3276 enum tree_code subcode = ERROR_MARK;
3277 tree result = NULL_TREE;
1304953e
JJ
3278 bool cplx_result = false;
3279 tree overflow = NULL_TREE;
368b454d
JJ
3280 switch (gimple_call_internal_fn (stmt))
3281 {
3282 case IFN_BUILTIN_EXPECT:
3283 result = fold_builtin_expect (gimple_location (stmt),
3284 gimple_call_arg (stmt, 0),
3285 gimple_call_arg (stmt, 1),
3286 gimple_call_arg (stmt, 2));
3287 break;
0e82f089
MP
3288 case IFN_UBSAN_OBJECT_SIZE:
3289 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3290 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3291 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3292 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3293 gimple_call_arg (stmt, 2))))
3294 {
f6b4dc28 3295 gsi_replace (gsi, gimple_build_nop (), false);
0e82f089
MP
3296 unlink_stmt_vdef (stmt);
3297 release_defs (stmt);
3298 return true;
3299 }
3300 break;
451e8dae
NS
3301 case IFN_GOACC_DIM_SIZE:
3302 case IFN_GOACC_DIM_POS:
3303 result = fold_internal_goacc_dim (stmt);
3304 break;
368b454d
JJ
3305 case IFN_UBSAN_CHECK_ADD:
3306 subcode = PLUS_EXPR;
3307 break;
3308 case IFN_UBSAN_CHECK_SUB:
3309 subcode = MINUS_EXPR;
3310 break;
3311 case IFN_UBSAN_CHECK_MUL:
3312 subcode = MULT_EXPR;
3313 break;
1304953e
JJ
3314 case IFN_ADD_OVERFLOW:
3315 subcode = PLUS_EXPR;
3316 cplx_result = true;
3317 break;
3318 case IFN_SUB_OVERFLOW:
3319 subcode = MINUS_EXPR;
3320 cplx_result = true;
3321 break;
3322 case IFN_MUL_OVERFLOW:
3323 subcode = MULT_EXPR;
3324 cplx_result = true;
3325 break;
368b454d
JJ
3326 default:
3327 break;
3328 }
3329 if (subcode != ERROR_MARK)
3330 {
3331 tree arg0 = gimple_call_arg (stmt, 0);
3332 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3333 tree type = TREE_TYPE (arg0);
3334 if (cplx_result)
3335 {
3336 tree lhs = gimple_call_lhs (stmt);
3337 if (lhs == NULL_TREE)
3338 type = NULL_TREE;
3339 else
3340 type = TREE_TYPE (TREE_TYPE (lhs));
3341 }
3342 if (type == NULL_TREE)
3343 ;
368b454d 3344 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3345 else if (integer_zerop (arg1))
3346 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3347 /* x = 0 + y; x = 0 * y; */
3348 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3349 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3350 /* x = y - y; */
3351 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3352 result = integer_zero_node;
368b454d 3353 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3354 else if (subcode == MULT_EXPR && integer_onep (arg1))
3355 result = arg0;
3356 else if (subcode == MULT_EXPR && integer_onep (arg0))
3357 result = arg1;
3358 else if (TREE_CODE (arg0) == INTEGER_CST
3359 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3360 {
1304953e
JJ
3361 if (cplx_result)
3362 result = int_const_binop (subcode, fold_convert (type, arg0),
3363 fold_convert (type, arg1));
3364 else
3365 result = int_const_binop (subcode, arg0, arg1);
3366 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3367 {
3368 if (cplx_result)
3369 overflow = build_one_cst (type);
3370 else
3371 result = NULL_TREE;
3372 }
3373 }
3374 if (result)
3375 {
3376 if (result == integer_zero_node)
3377 result = build_zero_cst (type);
3378 else if (cplx_result && TREE_TYPE (result) != type)
3379 {
3380 if (TREE_CODE (result) == INTEGER_CST)
3381 {
3382 if (arith_overflowed_p (PLUS_EXPR, type, result,
3383 integer_zero_node))
3384 overflow = build_one_cst (type);
3385 }
3386 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3387 && TYPE_UNSIGNED (type))
3388 || (TYPE_PRECISION (type)
3389 < (TYPE_PRECISION (TREE_TYPE (result))
3390 + (TYPE_UNSIGNED (TREE_TYPE (result))
3391 && !TYPE_UNSIGNED (type)))))
3392 result = NULL_TREE;
3393 if (result)
3394 result = fold_convert (type, result);
3395 }
368b454d
JJ
3396 }
3397 }
1304953e 3398
ed9c79e1
JJ
3399 if (result)
3400 {
1304953e
JJ
3401 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3402 result = drop_tree_overflow (result);
3403 if (cplx_result)
3404 {
3405 if (overflow == NULL_TREE)
3406 overflow = build_zero_cst (TREE_TYPE (result));
3407 tree ctype = build_complex_type (TREE_TYPE (result));
3408 if (TREE_CODE (result) == INTEGER_CST
3409 && TREE_CODE (overflow) == INTEGER_CST)
3410 result = build_complex (ctype, result, overflow);
3411 else
3412 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3413 ctype, result, overflow);
3414 }
ed9c79e1
JJ
3415 if (!update_call_from_tree (gsi, result))
3416 gimplify_and_update_call_from_tree (gsi, result);
3417 changed = true;
3418 }
3419 }
3b45a007 3420
e021c122 3421 return changed;
cbdd87d4
RG
3422}
3423
e0ee10ed 3424
89a79e96
RB
3425/* Return true whether NAME has a use on STMT. */
3426
3427static bool
355fe088 3428has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
3429{
3430 imm_use_iterator iter;
3431 use_operand_p use_p;
3432 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3433 if (USE_STMT (use_p) == stmt)
3434 return true;
3435 return false;
3436}
3437
e0ee10ed
RB
3438/* Worker for fold_stmt_1 dispatch to pattern based folding with
3439 gimple_simplify.
3440
3441 Replaces *GSI with the simplification result in RCODE and OPS
3442 and the associated statements in *SEQ. Does the replacement
3443 according to INPLACE and returns true if the operation succeeded. */
3444
3445static bool
3446replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3447 code_helper rcode, tree *ops,
3448 gimple_seq *seq, bool inplace)
3449{
355fe088 3450 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
3451
3452 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
3453 newly created statements. See also maybe_push_res_to_seq.
3454 As an exception allow such uses if there was a use of the
3455 same SSA name on the old stmt. */
e0ee10ed 3456 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
3457 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3458 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
3459 || (ops[1]
3460 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
3461 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3462 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
3463 || (ops[2]
3464 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96 3465 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
e0237993
JJ
3466 && !has_use_on_stmt (ops[2], stmt))
3467 || (COMPARISON_CLASS_P (ops[0])
3468 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3469 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3470 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3471 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3472 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3473 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
e0ee10ed
RB
3474 return false;
3475
fec40d06
RS
3476 /* Don't insert new statements when INPLACE is true, even if we could
3477 reuse STMT for the final statement. */
3478 if (inplace && !gimple_seq_empty_p (*seq))
3479 return false;
3480
538dd0b7 3481 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3482 {
3483 gcc_assert (rcode.is_tree_code ());
3484 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3485 /* GIMPLE_CONDs condition may not throw. */
3486 && (!flag_exceptions
3487 || !cfun->can_throw_non_call_exceptions
3488 || !operation_could_trap_p (rcode,
3489 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3490 false, NULL_TREE)))
538dd0b7 3491 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3492 else if (rcode == SSA_NAME)
538dd0b7 3493 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3494 build_zero_cst (TREE_TYPE (ops[0])));
3495 else if (rcode == INTEGER_CST)
3496 {
3497 if (integer_zerop (ops[0]))
538dd0b7 3498 gimple_cond_make_false (cond_stmt);
e0ee10ed 3499 else
538dd0b7 3500 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3501 }
3502 else if (!inplace)
3503 {
3504 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3505 ops, seq);
3506 if (!res)
3507 return false;
538dd0b7 3508 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3509 build_zero_cst (TREE_TYPE (res)));
3510 }
3511 else
3512 return false;
3513 if (dump_file && (dump_flags & TDF_DETAILS))
3514 {
3515 fprintf (dump_file, "gimple_simplified to ");
3516 if (!gimple_seq_empty_p (*seq))
3517 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3518 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3519 0, TDF_SLIM);
3520 }
3521 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3522 return true;
3523 }
3524 else if (is_gimple_assign (stmt)
3525 && rcode.is_tree_code ())
3526 {
3527 if (!inplace
f3582e54 3528 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3529 {
3530 maybe_build_generic_op (rcode,
545cd7ec 3531 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
00d66391 3532 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3533 if (dump_file && (dump_flags & TDF_DETAILS))
3534 {
3535 fprintf (dump_file, "gimple_simplified to ");
3536 if (!gimple_seq_empty_p (*seq))
3537 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3538 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3539 0, TDF_SLIM);
3540 }
3541 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3542 return true;
3543 }
3544 }
37d486ab 3545 else if (rcode.is_fn_code ()
c9e926ce 3546 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
3547 {
3548 unsigned i;
3549 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3550 {
3551 gcc_assert (ops[i] != NULL_TREE);
3552 gimple_call_set_arg (stmt, i, ops[i]);
3553 }
3554 if (i < 3)
3555 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
3556 if (dump_file && (dump_flags & TDF_DETAILS))
3557 {
3558 fprintf (dump_file, "gimple_simplified to ");
3559 if (!gimple_seq_empty_p (*seq))
3560 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3561 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3562 }
3563 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
3564 return true;
3565 }
e0ee10ed
RB
3566 else if (!inplace)
3567 {
3568 if (gimple_has_lhs (stmt))
3569 {
3570 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3571 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3572 ops, seq, lhs))
3573 return false;
e0ee10ed
RB
3574 if (dump_file && (dump_flags & TDF_DETAILS))
3575 {
3576 fprintf (dump_file, "gimple_simplified to ");
3577 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3578 }
3579 gsi_replace_with_seq_vops (gsi, *seq);
3580 return true;
3581 }
3582 else
3583 gcc_unreachable ();
3584 }
3585
3586 return false;
3587}
3588
040292e7
RB
3589/* Canonicalize MEM_REFs invariant address operand after propagation. */
3590
3591static bool
3592maybe_canonicalize_mem_ref_addr (tree *t)
3593{
3594 bool res = false;
3595
3596 if (TREE_CODE (*t) == ADDR_EXPR)
3597 t = &TREE_OPERAND (*t, 0);
3598
f17a223d
RB
3599 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3600 generic vector extension. The actual vector referenced is
3601 view-converted to an array type for this purpose. If the index
3602 is constant the canonical representation in the middle-end is a
3603 BIT_FIELD_REF so re-write the former to the latter here. */
3604 if (TREE_CODE (*t) == ARRAY_REF
3605 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3606 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3607 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3608 {
3609 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3610 if (VECTOR_TYPE_P (vtype))
3611 {
3612 tree low = array_ref_low_bound (*t);
3613 if (TREE_CODE (low) == INTEGER_CST)
3614 {
3615 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3616 {
3617 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3618 wi::to_widest (low));
3619 idx = wi::mul (idx, wi::to_widest
3620 (TYPE_SIZE (TREE_TYPE (*t))));
3621 widest_int ext
3622 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3623 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3624 {
3625 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3626 TREE_TYPE (*t),
3627 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3628 TYPE_SIZE (TREE_TYPE (*t)),
3629 wide_int_to_tree (sizetype, idx));
3630 res = true;
3631 }
3632 }
3633 }
3634 }
3635 }
3636
040292e7
RB
3637 while (handled_component_p (*t))
3638 t = &TREE_OPERAND (*t, 0);
3639
3640 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3641 of invariant addresses into a SSA name MEM_REF address. */
3642 if (TREE_CODE (*t) == MEM_REF
3643 || TREE_CODE (*t) == TARGET_MEM_REF)
3644 {
3645 tree addr = TREE_OPERAND (*t, 0);
3646 if (TREE_CODE (addr) == ADDR_EXPR
3647 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3648 || handled_component_p (TREE_OPERAND (addr, 0))))
3649 {
3650 tree base;
3651 HOST_WIDE_INT coffset;
3652 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3653 &coffset);
3654 if (!base)
3655 gcc_unreachable ();
3656
3657 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3658 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3659 TREE_OPERAND (*t, 1),
3660 size_int (coffset));
3661 res = true;
3662 }
3663 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3664 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3665 }
3666
3667 /* Canonicalize back MEM_REFs to plain reference trees if the object
3668 accessed is a decl that has the same access semantics as the MEM_REF. */
3669 if (TREE_CODE (*t) == MEM_REF
3670 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3671 && integer_zerop (TREE_OPERAND (*t, 1))
3672 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3673 {
3674 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3675 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3676 if (/* Same volatile qualification. */
3677 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3678 /* Same TBAA behavior with -fstrict-aliasing. */
3679 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3680 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3681 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3682 /* Same alignment. */
3683 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3684 /* We have to look out here to not drop a required conversion
3685 from the rhs to the lhs if *t appears on the lhs or vice-versa
3686 if it appears on the rhs. Thus require strict type
3687 compatibility. */
3688 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3689 {
3690 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3691 res = true;
3692 }
3693 }
3694
3695 /* Canonicalize TARGET_MEM_REF in particular with respect to
3696 the indexes becoming constant. */
3697 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3698 {
3699 tree tem = maybe_fold_tmr (*t);
3700 if (tem)
3701 {
3702 *t = tem;
3703 res = true;
3704 }
3705 }
3706
3707 return res;
3708}
3709
cbdd87d4
RG
3710/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3711 distinguishes both cases. */
3712
3713static bool
e0ee10ed 3714fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3715{
3716 bool changed = false;
355fe088 3717 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 3718 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 3719 unsigned i;
a8b85ce9 3720 fold_defer_overflow_warnings ();
cbdd87d4 3721
040292e7
RB
3722 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3723 after propagation.
3724 ??? This shouldn't be done in generic folding but in the
3725 propagation helpers which also know whether an address was
89a79e96
RB
3726 propagated.
3727 Also canonicalize operand order. */
040292e7
RB
3728 switch (gimple_code (stmt))
3729 {
3730 case GIMPLE_ASSIGN:
3731 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3732 {
3733 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3734 if ((REFERENCE_CLASS_P (*rhs)
3735 || TREE_CODE (*rhs) == ADDR_EXPR)
3736 && maybe_canonicalize_mem_ref_addr (rhs))
3737 changed = true;
3738 tree *lhs = gimple_assign_lhs_ptr (stmt);
3739 if (REFERENCE_CLASS_P (*lhs)
3740 && maybe_canonicalize_mem_ref_addr (lhs))
3741 changed = true;
3742 }
89a79e96
RB
3743 else
3744 {
3745 /* Canonicalize operand order. */
3746 enum tree_code code = gimple_assign_rhs_code (stmt);
3747 if (TREE_CODE_CLASS (code) == tcc_comparison
3748 || commutative_tree_code (code)
3749 || commutative_ternary_tree_code (code))
3750 {
3751 tree rhs1 = gimple_assign_rhs1 (stmt);
3752 tree rhs2 = gimple_assign_rhs2 (stmt);
3753 if (tree_swap_operands_p (rhs1, rhs2, false))
3754 {
3755 gimple_assign_set_rhs1 (stmt, rhs2);
3756 gimple_assign_set_rhs2 (stmt, rhs1);
3757 if (TREE_CODE_CLASS (code) == tcc_comparison)
3758 gimple_assign_set_rhs_code (stmt,
3759 swap_tree_comparison (code));
3760 changed = true;
3761 }
3762 }
3763 }
040292e7
RB
3764 break;
3765 case GIMPLE_CALL:
3766 {
3767 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3768 {
3769 tree *arg = gimple_call_arg_ptr (stmt, i);
3770 if (REFERENCE_CLASS_P (*arg)
3771 && maybe_canonicalize_mem_ref_addr (arg))
3772 changed = true;
3773 }
3774 tree *lhs = gimple_call_lhs_ptr (stmt);
3775 if (*lhs
3776 && REFERENCE_CLASS_P (*lhs)
3777 && maybe_canonicalize_mem_ref_addr (lhs))
3778 changed = true;
3779 break;
3780 }
3781 case GIMPLE_ASM:
3782 {
538dd0b7
DM
3783 gasm *asm_stmt = as_a <gasm *> (stmt);
3784 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3785 {
538dd0b7 3786 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3787 tree op = TREE_VALUE (link);
3788 if (REFERENCE_CLASS_P (op)
3789 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3790 changed = true;
3791 }
538dd0b7 3792 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3793 {
538dd0b7 3794 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3795 tree op = TREE_VALUE (link);
3796 if ((REFERENCE_CLASS_P (op)
3797 || TREE_CODE (op) == ADDR_EXPR)
3798 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3799 changed = true;
3800 }
3801 }
3802 break;
3803 case GIMPLE_DEBUG:
3804 if (gimple_debug_bind_p (stmt))
3805 {
3806 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3807 if (*val
3808 && (REFERENCE_CLASS_P (*val)
3809 || TREE_CODE (*val) == ADDR_EXPR)
3810 && maybe_canonicalize_mem_ref_addr (val))
3811 changed = true;
3812 }
3813 break;
89a79e96
RB
3814 case GIMPLE_COND:
3815 {
3816 /* Canonicalize operand order. */
3817 tree lhs = gimple_cond_lhs (stmt);
3818 tree rhs = gimple_cond_rhs (stmt);
3819 if (tree_swap_operands_p (lhs, rhs, false))
3820 {
3821 gcond *gc = as_a <gcond *> (stmt);
3822 gimple_cond_set_lhs (gc, rhs);
3823 gimple_cond_set_rhs (gc, lhs);
3824 gimple_cond_set_code (gc,
3825 swap_tree_comparison (gimple_cond_code (gc)));
3826 changed = true;
3827 }
3828 }
040292e7
RB
3829 default:;
3830 }
3831
e0ee10ed
RB
3832 /* Dispatch to pattern-based folding. */
3833 if (!inplace
3834 || is_gimple_assign (stmt)
3835 || gimple_code (stmt) == GIMPLE_COND)
3836 {
3837 gimple_seq seq = NULL;
3838 code_helper rcode;
3839 tree ops[3] = {};
0ff093d8
RB
3840 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3841 valueize, valueize))
e0ee10ed
RB
3842 {
3843 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3844 changed = true;
3845 else
3846 gimple_seq_discard (seq);
3847 }
3848 }
3849
3850 stmt = gsi_stmt (*gsi);
3851
cbdd87d4
RG
3852 /* Fold the main computation performed by the statement. */
3853 switch (gimple_code (stmt))
3854 {
3855 case GIMPLE_ASSIGN:
3856 {
819ec64c
RB
3857 /* Try to canonicalize for boolean-typed X the comparisons
3858 X == 0, X == 1, X != 0, and X != 1. */
3859 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3860 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 3861 {
819ec64c
RB
3862 tree lhs = gimple_assign_lhs (stmt);
3863 tree op1 = gimple_assign_rhs1 (stmt);
3864 tree op2 = gimple_assign_rhs2 (stmt);
3865 tree type = TREE_TYPE (op1);
3866
3867 /* Check whether the comparison operands are of the same boolean
3868 type as the result type is.
3869 Check that second operand is an integer-constant with value
3870 one or zero. */
3871 if (TREE_CODE (op2) == INTEGER_CST
3872 && (integer_zerop (op2) || integer_onep (op2))
3873 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3874 {
3875 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3876 bool is_logical_not = false;
3877
3878 /* X == 0 and X != 1 is a logical-not.of X
3879 X == 1 and X != 0 is X */
3880 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3881 || (cmp_code == NE_EXPR && integer_onep (op2)))
3882 is_logical_not = true;
3883
3884 if (is_logical_not == false)
3885 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3886 /* Only for one-bit precision typed X the transformation
3887 !X -> ~X is valied. */
3888 else if (TYPE_PRECISION (type) == 1)
3889 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3890 /* Otherwise we use !X -> X ^ 1. */
3891 else
3892 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3893 build_int_cst (type, 1));
3894 changed = true;
3895 break;
3896 }
5fbcc0ed 3897 }
819ec64c
RB
3898
3899 unsigned old_num_ops = gimple_num_ops (stmt);
3900 tree lhs = gimple_assign_lhs (stmt);
3901 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3902 if (new_rhs
3903 && !useless_type_conversion_p (TREE_TYPE (lhs),
3904 TREE_TYPE (new_rhs)))
3905 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3906 if (new_rhs
3907 && (!inplace
3908 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3909 {
3910 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3911 changed = true;
3912 }
3913 break;
3914 }
3915
cbdd87d4 3916 case GIMPLE_CALL:
ceeffab0 3917 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3918 break;
3919
3920 case GIMPLE_ASM:
3921 /* Fold *& in asm operands. */
38384150 3922 {
538dd0b7 3923 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3924 size_t noutputs;
3925 const char **oconstraints;
3926 const char *constraint;
3927 bool allows_mem, allows_reg;
3928
538dd0b7 3929 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3930 oconstraints = XALLOCAVEC (const char *, noutputs);
3931
538dd0b7 3932 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3933 {
538dd0b7 3934 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3935 tree op = TREE_VALUE (link);
3936 oconstraints[i]
3937 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3938 if (REFERENCE_CLASS_P (op)
3939 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3940 {
3941 TREE_VALUE (link) = op;
3942 changed = true;
3943 }
3944 }
538dd0b7 3945 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3946 {
538dd0b7 3947 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3948 tree op = TREE_VALUE (link);
3949 constraint
3950 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3951 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3952 oconstraints, &allows_mem, &allows_reg);
3953 if (REFERENCE_CLASS_P (op)
3954 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3955 != NULL_TREE)
3956 {
3957 TREE_VALUE (link) = op;
3958 changed = true;
3959 }
3960 }
3961 }
cbdd87d4
RG
3962 break;
3963
bd422c4a
RG
3964 case GIMPLE_DEBUG:
3965 if (gimple_debug_bind_p (stmt))
3966 {
3967 tree val = gimple_debug_bind_get_value (stmt);
3968 if (val
3969 && REFERENCE_CLASS_P (val))
3970 {
3971 tree tem = maybe_fold_reference (val, false);
3972 if (tem)
3973 {
3974 gimple_debug_bind_set_value (stmt, tem);
3975 changed = true;
3976 }
3977 }
3e888a5e
RG
3978 else if (val
3979 && TREE_CODE (val) == ADDR_EXPR)
3980 {
3981 tree ref = TREE_OPERAND (val, 0);
3982 tree tem = maybe_fold_reference (ref, false);
3983 if (tem)
3984 {
3985 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3986 gimple_debug_bind_set_value (stmt, tem);
3987 changed = true;
3988 }
3989 }
bd422c4a
RG
3990 }
3991 break;
3992
cbdd87d4
RG
3993 default:;
3994 }
3995
3996 stmt = gsi_stmt (*gsi);
3997
37376165
RB
3998 /* Fold *& on the lhs. */
3999 if (gimple_has_lhs (stmt))
cbdd87d4
RG
4000 {
4001 tree lhs = gimple_get_lhs (stmt);
4002 if (lhs && REFERENCE_CLASS_P (lhs))
4003 {
4004 tree new_lhs = maybe_fold_reference (lhs, true);
4005 if (new_lhs)
4006 {
4007 gimple_set_lhs (stmt, new_lhs);
4008 changed = true;
4009 }
4010 }
4011 }
4012
a8b85ce9 4013 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
4014 return changed;
4015}
4016
e0ee10ed
RB
4017/* Valueziation callback that ends up not following SSA edges. */
4018
4019tree
4020no_follow_ssa_edges (tree)
4021{
4022 return NULL_TREE;
4023}
4024
45cc9f96
RB
4025/* Valueization callback that ends up following single-use SSA edges only. */
4026
4027tree
4028follow_single_use_edges (tree val)
4029{
4030 if (TREE_CODE (val) == SSA_NAME
4031 && !has_single_use (val))
4032 return NULL_TREE;
4033 return val;
4034}
4035
cbdd87d4
RG
4036/* Fold the statement pointed to by GSI. In some cases, this function may
4037 replace the whole statement with a new one. Returns true iff folding
4038 makes any changes.
4039 The statement pointed to by GSI should be in valid gimple form but may
4040 be in unfolded state as resulting from for example constant propagation
4041 which can produce *&x = 0. */
4042
4043bool
4044fold_stmt (gimple_stmt_iterator *gsi)
4045{
e0ee10ed
RB
4046 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4047}
4048
4049bool
4050fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4051{
4052 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
4053}
4054
59401b92 4055/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
4056 *&x created by constant propagation are handled. The statement cannot
4057 be replaced with a new one. Return true if the statement was
4058 changed, false otherwise.
59401b92 4059 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
4060 be in unfolded state as resulting from for example constant propagation
4061 which can produce *&x = 0. */
4062
4063bool
59401b92 4064fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 4065{
355fe088 4066 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 4067 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 4068 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
4069 return changed;
4070}
4071
e89065a1
SL
4072/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4073 if EXPR is null or we don't know how.
4074 If non-null, the result always has boolean type. */
4075
4076static tree
4077canonicalize_bool (tree expr, bool invert)
4078{
4079 if (!expr)
4080 return NULL_TREE;
4081 else if (invert)
4082 {
4083 if (integer_nonzerop (expr))
4084 return boolean_false_node;
4085 else if (integer_zerop (expr))
4086 return boolean_true_node;
4087 else if (TREE_CODE (expr) == SSA_NAME)
4088 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4089 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4090 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4091 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4092 boolean_type_node,
4093 TREE_OPERAND (expr, 0),
4094 TREE_OPERAND (expr, 1));
4095 else
4096 return NULL_TREE;
4097 }
4098 else
4099 {
4100 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4101 return expr;
4102 if (integer_nonzerop (expr))
4103 return boolean_true_node;
4104 else if (integer_zerop (expr))
4105 return boolean_false_node;
4106 else if (TREE_CODE (expr) == SSA_NAME)
4107 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4108 build_int_cst (TREE_TYPE (expr), 0));
98209db3 4109 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
4110 return fold_build2 (TREE_CODE (expr),
4111 boolean_type_node,
4112 TREE_OPERAND (expr, 0),
4113 TREE_OPERAND (expr, 1));
4114 else
4115 return NULL_TREE;
4116 }
4117}
4118
4119/* Check to see if a boolean expression EXPR is logically equivalent to the
4120 comparison (OP1 CODE OP2). Check for various identities involving
4121 SSA_NAMEs. */
4122
4123static bool
4124same_bool_comparison_p (const_tree expr, enum tree_code code,
4125 const_tree op1, const_tree op2)
4126{
355fe088 4127 gimple *s;
e89065a1
SL
4128
4129 /* The obvious case. */
4130 if (TREE_CODE (expr) == code
4131 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4132 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4133 return true;
4134
4135 /* Check for comparing (name, name != 0) and the case where expr
4136 is an SSA_NAME with a definition matching the comparison. */
4137 if (TREE_CODE (expr) == SSA_NAME
4138 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4139 {
4140 if (operand_equal_p (expr, op1, 0))
4141 return ((code == NE_EXPR && integer_zerop (op2))
4142 || (code == EQ_EXPR && integer_nonzerop (op2)));
4143 s = SSA_NAME_DEF_STMT (expr);
4144 if (is_gimple_assign (s)
4145 && gimple_assign_rhs_code (s) == code
4146 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4147 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4148 return true;
4149 }
4150
4151 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4152 of name is a comparison, recurse. */
4153 if (TREE_CODE (op1) == SSA_NAME
4154 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4155 {
4156 s = SSA_NAME_DEF_STMT (op1);
4157 if (is_gimple_assign (s)
4158 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4159 {
4160 enum tree_code c = gimple_assign_rhs_code (s);
4161 if ((c == NE_EXPR && integer_zerop (op2))
4162 || (c == EQ_EXPR && integer_nonzerop (op2)))
4163 return same_bool_comparison_p (expr, c,
4164 gimple_assign_rhs1 (s),
4165 gimple_assign_rhs2 (s));
4166 if ((c == EQ_EXPR && integer_zerop (op2))
4167 || (c == NE_EXPR && integer_nonzerop (op2)))
4168 return same_bool_comparison_p (expr,
4169 invert_tree_comparison (c, false),
4170 gimple_assign_rhs1 (s),
4171 gimple_assign_rhs2 (s));
4172 }
4173 }
4174 return false;
4175}
4176
4177/* Check to see if two boolean expressions OP1 and OP2 are logically
4178 equivalent. */
4179
4180static bool
4181same_bool_result_p (const_tree op1, const_tree op2)
4182{
4183 /* Simple cases first. */
4184 if (operand_equal_p (op1, op2, 0))
4185 return true;
4186
4187 /* Check the cases where at least one of the operands is a comparison.
4188 These are a bit smarter than operand_equal_p in that they apply some
4189 identifies on SSA_NAMEs. */
98209db3 4190 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
4191 && same_bool_comparison_p (op1, TREE_CODE (op2),
4192 TREE_OPERAND (op2, 0),
4193 TREE_OPERAND (op2, 1)))
4194 return true;
98209db3 4195 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
4196 && same_bool_comparison_p (op2, TREE_CODE (op1),
4197 TREE_OPERAND (op1, 0),
4198 TREE_OPERAND (op1, 1)))
4199 return true;
4200
4201 /* Default case. */
4202 return false;
4203}
4204
4205/* Forward declarations for some mutually recursive functions. */
4206
4207static tree
4208and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4209 enum tree_code code2, tree op2a, tree op2b);
4210static tree
4211and_var_with_comparison (tree var, bool invert,
4212 enum tree_code code2, tree op2a, tree op2b);
4213static tree
355fe088 4214and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4215 enum tree_code code2, tree op2a, tree op2b);
4216static tree
4217or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4218 enum tree_code code2, tree op2a, tree op2b);
4219static tree
4220or_var_with_comparison (tree var, bool invert,
4221 enum tree_code code2, tree op2a, tree op2b);
4222static tree
355fe088 4223or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4224 enum tree_code code2, tree op2a, tree op2b);
4225
4226/* Helper function for and_comparisons_1: try to simplify the AND of the
4227 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4228 If INVERT is true, invert the value of the VAR before doing the AND.
4229 Return NULL_EXPR if we can't simplify this to a single expression. */
4230
4231static tree
4232and_var_with_comparison (tree var, bool invert,
4233 enum tree_code code2, tree op2a, tree op2b)
4234{
4235 tree t;
355fe088 4236 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4237
4238 /* We can only deal with variables whose definitions are assignments. */
4239 if (!is_gimple_assign (stmt))
4240 return NULL_TREE;
4241
4242 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4243 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4244 Then we only have to consider the simpler non-inverted cases. */
4245 if (invert)
4246 t = or_var_with_comparison_1 (stmt,
4247 invert_tree_comparison (code2, false),
4248 op2a, op2b);
4249 else
4250 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4251 return canonicalize_bool (t, invert);
4252}
4253
4254/* Try to simplify the AND of the ssa variable defined by the assignment
4255 STMT with the comparison specified by (OP2A CODE2 OP2B).
4256 Return NULL_EXPR if we can't simplify this to a single expression. */
4257
4258static tree
355fe088 4259and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4260 enum tree_code code2, tree op2a, tree op2b)
4261{
4262 tree var = gimple_assign_lhs (stmt);
4263 tree true_test_var = NULL_TREE;
4264 tree false_test_var = NULL_TREE;
4265 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4266
4267 /* Check for identities like (var AND (var == 0)) => false. */
4268 if (TREE_CODE (op2a) == SSA_NAME
4269 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4270 {
4271 if ((code2 == NE_EXPR && integer_zerop (op2b))
4272 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4273 {
4274 true_test_var = op2a;
4275 if (var == true_test_var)
4276 return var;
4277 }
4278 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4279 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4280 {
4281 false_test_var = op2a;
4282 if (var == false_test_var)
4283 return boolean_false_node;
4284 }
4285 }
4286
4287 /* If the definition is a comparison, recurse on it. */
4288 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4289 {
4290 tree t = and_comparisons_1 (innercode,
4291 gimple_assign_rhs1 (stmt),
4292 gimple_assign_rhs2 (stmt),
4293 code2,
4294 op2a,
4295 op2b);
4296 if (t)
4297 return t;
4298 }
4299
4300 /* If the definition is an AND or OR expression, we may be able to
4301 simplify by reassociating. */
eb9820c0
KT
4302 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4303 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4304 {
4305 tree inner1 = gimple_assign_rhs1 (stmt);
4306 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4307 gimple *s;
e89065a1
SL
4308 tree t;
4309 tree partial = NULL_TREE;
eb9820c0 4310 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4311
4312 /* Check for boolean identities that don't require recursive examination
4313 of inner1/inner2:
4314 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4315 inner1 AND (inner1 OR inner2) => inner1
4316 !inner1 AND (inner1 AND inner2) => false
4317 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4318 Likewise for similar cases involving inner2. */
4319 if (inner1 == true_test_var)
4320 return (is_and ? var : inner1);
4321 else if (inner2 == true_test_var)
4322 return (is_and ? var : inner2);
4323 else if (inner1 == false_test_var)
4324 return (is_and
4325 ? boolean_false_node
4326 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4327 else if (inner2 == false_test_var)
4328 return (is_and
4329 ? boolean_false_node
4330 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4331
4332 /* Next, redistribute/reassociate the AND across the inner tests.
4333 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4334 if (TREE_CODE (inner1) == SSA_NAME
4335 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4336 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4337 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4338 gimple_assign_rhs1 (s),
4339 gimple_assign_rhs2 (s),
4340 code2, op2a, op2b)))
4341 {
4342 /* Handle the AND case, where we are reassociating:
4343 (inner1 AND inner2) AND (op2a code2 op2b)
4344 => (t AND inner2)
4345 If the partial result t is a constant, we win. Otherwise
4346 continue on to try reassociating with the other inner test. */
4347 if (is_and)
4348 {
4349 if (integer_onep (t))
4350 return inner2;
4351 else if (integer_zerop (t))
4352 return boolean_false_node;
4353 }
4354
4355 /* Handle the OR case, where we are redistributing:
4356 (inner1 OR inner2) AND (op2a code2 op2b)
4357 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4358 else if (integer_onep (t))
4359 return boolean_true_node;
4360
4361 /* Save partial result for later. */
4362 partial = t;
e89065a1
SL
4363 }
4364
4365 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4366 if (TREE_CODE (inner2) == SSA_NAME
4367 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4368 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4369 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4370 gimple_assign_rhs1 (s),
4371 gimple_assign_rhs2 (s),
4372 code2, op2a, op2b)))
4373 {
4374 /* Handle the AND case, where we are reassociating:
4375 (inner1 AND inner2) AND (op2a code2 op2b)
4376 => (inner1 AND t) */
4377 if (is_and)
4378 {
4379 if (integer_onep (t))
4380 return inner1;
4381 else if (integer_zerop (t))
4382 return boolean_false_node;
8236c8eb
JJ
4383 /* If both are the same, we can apply the identity
4384 (x AND x) == x. */
4385 else if (partial && same_bool_result_p (t, partial))
4386 return t;
e89065a1
SL
4387 }
4388
4389 /* Handle the OR case. where we are redistributing:
4390 (inner1 OR inner2) AND (op2a code2 op2b)
4391 => (t OR (inner1 AND (op2a code2 op2b)))
4392 => (t OR partial) */
4393 else
4394 {
4395 if (integer_onep (t))
4396 return boolean_true_node;
4397 else if (partial)
4398 {
4399 /* We already got a simplification for the other
4400 operand to the redistributed OR expression. The
4401 interesting case is when at least one is false.
4402 Or, if both are the same, we can apply the identity
4403 (x OR x) == x. */
4404 if (integer_zerop (partial))
4405 return t;
4406 else if (integer_zerop (t))
4407 return partial;
4408 else if (same_bool_result_p (t, partial))
4409 return t;
4410 }
4411 }
4412 }
4413 }
4414 return NULL_TREE;
4415}
4416
4417/* Try to simplify the AND of two comparisons defined by
4418 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4419 If this can be done without constructing an intermediate value,
4420 return the resulting tree; otherwise NULL_TREE is returned.
4421 This function is deliberately asymmetric as it recurses on SSA_DEFs
4422 in the first comparison but not the second. */
4423
4424static tree
4425and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4426 enum tree_code code2, tree op2a, tree op2b)
4427{
ae22ac3c 4428 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4429
e89065a1
SL
4430 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4431 if (operand_equal_p (op1a, op2a, 0)
4432 && operand_equal_p (op1b, op2b, 0))
4433 {
eb9820c0 4434 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4435 tree t = combine_comparisons (UNKNOWN_LOCATION,
4436 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4437 truth_type, op1a, op1b);
e89065a1
SL
4438 if (t)
4439 return t;
4440 }
4441
4442 /* Likewise the swapped case of the above. */
4443 if (operand_equal_p (op1a, op2b, 0)
4444 && operand_equal_p (op1b, op2a, 0))
4445 {
eb9820c0 4446 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4447 tree t = combine_comparisons (UNKNOWN_LOCATION,
4448 TRUTH_ANDIF_EXPR, code1,
4449 swap_tree_comparison (code2),
31ed6226 4450 truth_type, op1a, op1b);
e89065a1
SL
4451 if (t)
4452 return t;
4453 }
4454
4455 /* If both comparisons are of the same value against constants, we might
4456 be able to merge them. */
4457 if (operand_equal_p (op1a, op2a, 0)
4458 && TREE_CODE (op1b) == INTEGER_CST
4459 && TREE_CODE (op2b) == INTEGER_CST)
4460 {
4461 int cmp = tree_int_cst_compare (op1b, op2b);
4462
4463 /* If we have (op1a == op1b), we should either be able to
4464 return that or FALSE, depending on whether the constant op1b
4465 also satisfies the other comparison against op2b. */
4466 if (code1 == EQ_EXPR)
4467 {
4468 bool done = true;
4469 bool val;
4470 switch (code2)
4471 {
4472 case EQ_EXPR: val = (cmp == 0); break;
4473 case NE_EXPR: val = (cmp != 0); break;
4474 case LT_EXPR: val = (cmp < 0); break;
4475 case GT_EXPR: val = (cmp > 0); break;
4476 case LE_EXPR: val = (cmp <= 0); break;
4477 case GE_EXPR: val = (cmp >= 0); break;
4478 default: done = false;
4479 }
4480 if (done)
4481 {
4482 if (val)
4483 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4484 else
4485 return boolean_false_node;
4486 }
4487 }
4488 /* Likewise if the second comparison is an == comparison. */
4489 else if (code2 == EQ_EXPR)
4490 {
4491 bool done = true;
4492 bool val;
4493 switch (code1)
4494 {
4495 case EQ_EXPR: val = (cmp == 0); break;
4496 case NE_EXPR: val = (cmp != 0); break;
4497 case LT_EXPR: val = (cmp > 0); break;
4498 case GT_EXPR: val = (cmp < 0); break;
4499 case LE_EXPR: val = (cmp >= 0); break;
4500 case GE_EXPR: val = (cmp <= 0); break;
4501 default: done = false;
4502 }
4503 if (done)
4504 {
4505 if (val)
4506 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4507 else
4508 return boolean_false_node;
4509 }
4510 }
4511
4512 /* Same business with inequality tests. */
4513 else if (code1 == NE_EXPR)
4514 {
4515 bool val;
4516 switch (code2)
4517 {
4518 case EQ_EXPR: val = (cmp != 0); break;
4519 case NE_EXPR: val = (cmp == 0); break;
4520 case LT_EXPR: val = (cmp >= 0); break;
4521 case GT_EXPR: val = (cmp <= 0); break;
4522 case LE_EXPR: val = (cmp > 0); break;
4523 case GE_EXPR: val = (cmp < 0); break;
4524 default:
4525 val = false;
4526 }
4527 if (val)
4528 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4529 }
4530 else if (code2 == NE_EXPR)
4531 {
4532 bool val;
4533 switch (code1)
4534 {
4535 case EQ_EXPR: val = (cmp == 0); break;
4536 case NE_EXPR: val = (cmp != 0); break;
4537 case LT_EXPR: val = (cmp <= 0); break;
4538 case GT_EXPR: val = (cmp >= 0); break;
4539 case LE_EXPR: val = (cmp < 0); break;
4540 case GE_EXPR: val = (cmp > 0); break;
4541 default:
4542 val = false;
4543 }
4544 if (val)
4545 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4546 }
4547
4548 /* Chose the more restrictive of two < or <= comparisons. */
4549 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4550 && (code2 == LT_EXPR || code2 == LE_EXPR))
4551 {
4552 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4553 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4554 else
4555 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4556 }
4557
4558 /* Likewise chose the more restrictive of two > or >= comparisons. */
4559 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4560 && (code2 == GT_EXPR || code2 == GE_EXPR))
4561 {
4562 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4563 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4564 else
4565 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4566 }
4567
4568 /* Check for singleton ranges. */
4569 else if (cmp == 0
4570 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4571 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4572 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4573
4574 /* Check for disjoint ranges. */
4575 else if (cmp <= 0
4576 && (code1 == LT_EXPR || code1 == LE_EXPR)
4577 && (code2 == GT_EXPR || code2 == GE_EXPR))
4578 return boolean_false_node;
4579 else if (cmp >= 0
4580 && (code1 == GT_EXPR || code1 == GE_EXPR)
4581 && (code2 == LT_EXPR || code2 == LE_EXPR))
4582 return boolean_false_node;
4583 }
4584
4585 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4586 NAME's definition is a truth value. See if there are any simplifications
4587 that can be done against the NAME's definition. */
4588 if (TREE_CODE (op1a) == SSA_NAME
4589 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4590 && (integer_zerop (op1b) || integer_onep (op1b)))
4591 {
4592 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4593 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4594 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4595 switch (gimple_code (stmt))
4596 {
4597 case GIMPLE_ASSIGN:
4598 /* Try to simplify by copy-propagating the definition. */
4599 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4600
4601 case GIMPLE_PHI:
4602 /* If every argument to the PHI produces the same result when
4603 ANDed with the second comparison, we win.
4604 Do not do this unless the type is bool since we need a bool
4605 result here anyway. */
4606 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4607 {
4608 tree result = NULL_TREE;
4609 unsigned i;
4610 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4611 {
4612 tree arg = gimple_phi_arg_def (stmt, i);
4613
4614 /* If this PHI has itself as an argument, ignore it.
4615 If all the other args produce the same result,
4616 we're still OK. */
4617 if (arg == gimple_phi_result (stmt))
4618 continue;
4619 else if (TREE_CODE (arg) == INTEGER_CST)
4620 {
4621 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4622 {
4623 if (!result)
4624 result = boolean_false_node;
4625 else if (!integer_zerop (result))
4626 return NULL_TREE;
4627 }
4628 else if (!result)
4629 result = fold_build2 (code2, boolean_type_node,
4630 op2a, op2b);
4631 else if (!same_bool_comparison_p (result,
4632 code2, op2a, op2b))
4633 return NULL_TREE;
4634 }
0e8b84ec
JJ
4635 else if (TREE_CODE (arg) == SSA_NAME
4636 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4637 {
6c66f733 4638 tree temp;
355fe088 4639 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4640 /* In simple cases we can look through PHI nodes,
4641 but we have to be careful with loops.
4642 See PR49073. */
4643 if (! dom_info_available_p (CDI_DOMINATORS)
4644 || gimple_bb (def_stmt) == gimple_bb (stmt)
4645 || dominated_by_p (CDI_DOMINATORS,
4646 gimple_bb (def_stmt),
4647 gimple_bb (stmt)))
4648 return NULL_TREE;
4649 temp = and_var_with_comparison (arg, invert, code2,
4650 op2a, op2b);
e89065a1
SL
4651 if (!temp)
4652 return NULL_TREE;
4653 else if (!result)
4654 result = temp;
4655 else if (!same_bool_result_p (result, temp))
4656 return NULL_TREE;
4657 }
4658 else
4659 return NULL_TREE;
4660 }
4661 return result;
4662 }
4663
4664 default:
4665 break;
4666 }
4667 }
4668 return NULL_TREE;
4669}
4670
4671/* Try to simplify the AND of two comparisons, specified by
4672 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4673 If this can be simplified to a single expression (without requiring
4674 introducing more SSA variables to hold intermediate values),
4675 return the resulting tree. Otherwise return NULL_TREE.
4676 If the result expression is non-null, it has boolean type. */
4677
4678tree
4679maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4680 enum tree_code code2, tree op2a, tree op2b)
4681{
4682 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4683 if (t)
4684 return t;
4685 else
4686 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4687}
4688
4689/* Helper function for or_comparisons_1: try to simplify the OR of the
4690 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4691 If INVERT is true, invert the value of VAR before doing the OR.
4692 Return NULL_EXPR if we can't simplify this to a single expression. */
4693
4694static tree
4695or_var_with_comparison (tree var, bool invert,
4696 enum tree_code code2, tree op2a, tree op2b)
4697{
4698 tree t;
355fe088 4699 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4700
4701 /* We can only deal with variables whose definitions are assignments. */
4702 if (!is_gimple_assign (stmt))
4703 return NULL_TREE;
4704
4705 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4706 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4707 Then we only have to consider the simpler non-inverted cases. */
4708 if (invert)
4709 t = and_var_with_comparison_1 (stmt,
4710 invert_tree_comparison (code2, false),
4711 op2a, op2b);
4712 else
4713 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4714 return canonicalize_bool (t, invert);
4715}
4716
4717/* Try to simplify the OR of the ssa variable defined by the assignment
4718 STMT with the comparison specified by (OP2A CODE2 OP2B).
4719 Return NULL_EXPR if we can't simplify this to a single expression. */
4720
4721static tree
355fe088 4722or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4723 enum tree_code code2, tree op2a, tree op2b)
4724{
4725 tree var = gimple_assign_lhs (stmt);
4726 tree true_test_var = NULL_TREE;
4727 tree false_test_var = NULL_TREE;
4728 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4729
4730 /* Check for identities like (var OR (var != 0)) => true . */
4731 if (TREE_CODE (op2a) == SSA_NAME
4732 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4733 {
4734 if ((code2 == NE_EXPR && integer_zerop (op2b))
4735 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4736 {
4737 true_test_var = op2a;
4738 if (var == true_test_var)
4739 return var;
4740 }
4741 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4742 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4743 {
4744 false_test_var = op2a;
4745 if (var == false_test_var)
4746 return boolean_true_node;
4747 }
4748 }
4749
4750 /* If the definition is a comparison, recurse on it. */
4751 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4752 {
4753 tree t = or_comparisons_1 (innercode,
4754 gimple_assign_rhs1 (stmt),
4755 gimple_assign_rhs2 (stmt),
4756 code2,
4757 op2a,
4758 op2b);
4759 if (t)
4760 return t;
4761 }
4762
4763 /* If the definition is an AND or OR expression, we may be able to
4764 simplify by reassociating. */
eb9820c0
KT
4765 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4766 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4767 {
4768 tree inner1 = gimple_assign_rhs1 (stmt);
4769 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4770 gimple *s;
e89065a1
SL
4771 tree t;
4772 tree partial = NULL_TREE;
eb9820c0 4773 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4774
4775 /* Check for boolean identities that don't require recursive examination
4776 of inner1/inner2:
4777 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4778 inner1 OR (inner1 AND inner2) => inner1
4779 !inner1 OR (inner1 OR inner2) => true
4780 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4781 */
4782 if (inner1 == true_test_var)
4783 return (is_or ? var : inner1);
4784 else if (inner2 == true_test_var)
4785 return (is_or ? var : inner2);
4786 else if (inner1 == false_test_var)
4787 return (is_or
4788 ? boolean_true_node
4789 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4790 else if (inner2 == false_test_var)
4791 return (is_or
4792 ? boolean_true_node
4793 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4794
4795 /* Next, redistribute/reassociate the OR across the inner tests.
4796 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4797 if (TREE_CODE (inner1) == SSA_NAME
4798 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4799 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4800 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4801 gimple_assign_rhs1 (s),
4802 gimple_assign_rhs2 (s),
4803 code2, op2a, op2b)))
4804 {
4805 /* Handle the OR case, where we are reassociating:
4806 (inner1 OR inner2) OR (op2a code2 op2b)
4807 => (t OR inner2)
4808 If the partial result t is a constant, we win. Otherwise
4809 continue on to try reassociating with the other inner test. */
8236c8eb 4810 if (is_or)
e89065a1
SL
4811 {
4812 if (integer_onep (t))
4813 return boolean_true_node;
4814 else if (integer_zerop (t))
4815 return inner2;
4816 }
4817
4818 /* Handle the AND case, where we are redistributing:
4819 (inner1 AND inner2) OR (op2a code2 op2b)
4820 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4821 else if (integer_zerop (t))
4822 return boolean_false_node;
4823
4824 /* Save partial result for later. */
4825 partial = t;
e89065a1
SL
4826 }
4827
4828 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4829 if (TREE_CODE (inner2) == SSA_NAME
4830 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4831 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4832 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4833 gimple_assign_rhs1 (s),
4834 gimple_assign_rhs2 (s),
4835 code2, op2a, op2b)))
4836 {
4837 /* Handle the OR case, where we are reassociating:
4838 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4839 => (inner1 OR t)
4840 => (t OR partial) */
4841 if (is_or)
e89065a1
SL
4842 {
4843 if (integer_zerop (t))
4844 return inner1;
4845 else if (integer_onep (t))
4846 return boolean_true_node;
8236c8eb
JJ
4847 /* If both are the same, we can apply the identity
4848 (x OR x) == x. */
4849 else if (partial && same_bool_result_p (t, partial))
4850 return t;
e89065a1
SL
4851 }
4852
4853 /* Handle the AND case, where we are redistributing:
4854 (inner1 AND inner2) OR (op2a code2 op2b)
4855 => (t AND (inner1 OR (op2a code2 op2b)))
4856 => (t AND partial) */
4857 else
4858 {
4859 if (integer_zerop (t))
4860 return boolean_false_node;
4861 else if (partial)
4862 {
4863 /* We already got a simplification for the other
4864 operand to the redistributed AND expression. The
4865 interesting case is when at least one is true.
4866 Or, if both are the same, we can apply the identity
8236c8eb 4867 (x AND x) == x. */
e89065a1
SL
4868 if (integer_onep (partial))
4869 return t;
4870 else if (integer_onep (t))
4871 return partial;
4872 else if (same_bool_result_p (t, partial))
8236c8eb 4873 return t;
e89065a1
SL
4874 }
4875 }
4876 }
4877 }
4878 return NULL_TREE;
4879}
4880
4881/* Try to simplify the OR of two comparisons defined by
4882 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4883 If this can be done without constructing an intermediate value,
4884 return the resulting tree; otherwise NULL_TREE is returned.
4885 This function is deliberately asymmetric as it recurses on SSA_DEFs
4886 in the first comparison but not the second. */
4887
4888static tree
4889or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4890 enum tree_code code2, tree op2a, tree op2b)
4891{
ae22ac3c 4892 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4893
e89065a1
SL
4894 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4895 if (operand_equal_p (op1a, op2a, 0)
4896 && operand_equal_p (op1b, op2b, 0))
4897 {
eb9820c0 4898 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4899 tree t = combine_comparisons (UNKNOWN_LOCATION,
4900 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4901 truth_type, op1a, op1b);
e89065a1
SL
4902 if (t)
4903 return t;
4904 }
4905
4906 /* Likewise the swapped case of the above. */
4907 if (operand_equal_p (op1a, op2b, 0)
4908 && operand_equal_p (op1b, op2a, 0))
4909 {
eb9820c0 4910 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4911 tree t = combine_comparisons (UNKNOWN_LOCATION,
4912 TRUTH_ORIF_EXPR, code1,
4913 swap_tree_comparison (code2),
31ed6226 4914 truth_type, op1a, op1b);
e89065a1
SL
4915 if (t)
4916 return t;
4917 }
4918
4919 /* If both comparisons are of the same value against constants, we might
4920 be able to merge them. */
4921 if (operand_equal_p (op1a, op2a, 0)
4922 && TREE_CODE (op1b) == INTEGER_CST
4923 && TREE_CODE (op2b) == INTEGER_CST)
4924 {
4925 int cmp = tree_int_cst_compare (op1b, op2b);
4926
4927 /* If we have (op1a != op1b), we should either be able to
4928 return that or TRUE, depending on whether the constant op1b
4929 also satisfies the other comparison against op2b. */
4930 if (code1 == NE_EXPR)
4931 {
4932 bool done = true;
4933 bool val;
4934 switch (code2)
4935 {
4936 case EQ_EXPR: val = (cmp == 0); break;
4937 case NE_EXPR: val = (cmp != 0); break;
4938 case LT_EXPR: val = (cmp < 0); break;
4939 case GT_EXPR: val = (cmp > 0); break;
4940 case LE_EXPR: val = (cmp <= 0); break;
4941 case GE_EXPR: val = (cmp >= 0); break;
4942 default: done = false;
4943 }
4944 if (done)
4945 {
4946 if (val)
4947 return boolean_true_node;
4948 else
4949 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4950 }
4951 }
4952 /* Likewise if the second comparison is a != comparison. */
4953 else if (code2 == NE_EXPR)
4954 {
4955 bool done = true;
4956 bool val;
4957 switch (code1)
4958 {
4959 case EQ_EXPR: val = (cmp == 0); break;
4960 case NE_EXPR: val = (cmp != 0); break;
4961 case LT_EXPR: val = (cmp > 0); break;
4962 case GT_EXPR: val = (cmp < 0); break;
4963 case LE_EXPR: val = (cmp >= 0); break;
4964 case GE_EXPR: val = (cmp <= 0); break;
4965 default: done = false;
4966 }
4967 if (done)
4968 {
4969 if (val)
4970 return boolean_true_node;
4971 else
4972 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4973 }
4974 }
4975
4976 /* See if an equality test is redundant with the other comparison. */
4977 else if (code1 == EQ_EXPR)
4978 {
4979 bool val;
4980 switch (code2)
4981 {
4982 case EQ_EXPR: val = (cmp == 0); break;
4983 case NE_EXPR: val = (cmp != 0); break;
4984 case LT_EXPR: val = (cmp < 0); break;
4985 case GT_EXPR: val = (cmp > 0); break;
4986 case LE_EXPR: val = (cmp <= 0); break;
4987 case GE_EXPR: val = (cmp >= 0); break;
4988 default:
4989 val = false;
4990 }
4991 if (val)
4992 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4993 }
4994 else if (code2 == EQ_EXPR)
4995 {
4996 bool val;
4997 switch (code1)
4998 {
4999 case EQ_EXPR: val = (cmp == 0); break;
5000 case NE_EXPR: val = (cmp != 0); break;
5001 case LT_EXPR: val = (cmp > 0); break;
5002 case GT_EXPR: val = (cmp < 0); break;
5003 case LE_EXPR: val = (cmp >= 0); break;
5004 case GE_EXPR: val = (cmp <= 0); break;
5005 default:
5006 val = false;
5007 }
5008 if (val)
5009 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5010 }
5011
5012 /* Chose the less restrictive of two < or <= comparisons. */
5013 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5014 && (code2 == LT_EXPR || code2 == LE_EXPR))
5015 {
5016 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5017 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5018 else
5019 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5020 }
5021
5022 /* Likewise chose the less restrictive of two > or >= comparisons. */
5023 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5024 && (code2 == GT_EXPR || code2 == GE_EXPR))
5025 {
5026 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5027 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5028 else
5029 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5030 }
5031
5032 /* Check for singleton ranges. */
5033 else if (cmp == 0
5034 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5035 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5036 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5037
5038 /* Check for less/greater pairs that don't restrict the range at all. */
5039 else if (cmp >= 0
5040 && (code1 == LT_EXPR || code1 == LE_EXPR)
5041 && (code2 == GT_EXPR || code2 == GE_EXPR))
5042 return boolean_true_node;
5043 else if (cmp <= 0
5044 && (code1 == GT_EXPR || code1 == GE_EXPR)
5045 && (code2 == LT_EXPR || code2 == LE_EXPR))
5046 return boolean_true_node;
5047 }
5048
5049 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5050 NAME's definition is a truth value. See if there are any simplifications
5051 that can be done against the NAME's definition. */
5052 if (TREE_CODE (op1a) == SSA_NAME
5053 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5054 && (integer_zerop (op1b) || integer_onep (op1b)))
5055 {
5056 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5057 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5058 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5059 switch (gimple_code (stmt))
5060 {
5061 case GIMPLE_ASSIGN:
5062 /* Try to simplify by copy-propagating the definition. */
5063 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5064
5065 case GIMPLE_PHI:
5066 /* If every argument to the PHI produces the same result when
5067 ORed with the second comparison, we win.
5068 Do not do this unless the type is bool since we need a bool
5069 result here anyway. */
5070 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5071 {
5072 tree result = NULL_TREE;
5073 unsigned i;
5074 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5075 {
5076 tree arg = gimple_phi_arg_def (stmt, i);
5077
5078 /* If this PHI has itself as an argument, ignore it.
5079 If all the other args produce the same result,
5080 we're still OK. */
5081 if (arg == gimple_phi_result (stmt))
5082 continue;
5083 else if (TREE_CODE (arg) == INTEGER_CST)
5084 {
5085 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5086 {
5087 if (!result)
5088 result = boolean_true_node;
5089 else if (!integer_onep (result))
5090 return NULL_TREE;
5091 }
5092 else if (!result)
5093 result = fold_build2 (code2, boolean_type_node,
5094 op2a, op2b);
5095 else if (!same_bool_comparison_p (result,
5096 code2, op2a, op2b))
5097 return NULL_TREE;
5098 }
0e8b84ec
JJ
5099 else if (TREE_CODE (arg) == SSA_NAME
5100 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5101 {
6c66f733 5102 tree temp;
355fe088 5103 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5104 /* In simple cases we can look through PHI nodes,
5105 but we have to be careful with loops.
5106 See PR49073. */
5107 if (! dom_info_available_p (CDI_DOMINATORS)
5108 || gimple_bb (def_stmt) == gimple_bb (stmt)
5109 || dominated_by_p (CDI_DOMINATORS,
5110 gimple_bb (def_stmt),
5111 gimple_bb (stmt)))
5112 return NULL_TREE;
5113 temp = or_var_with_comparison (arg, invert, code2,
5114 op2a, op2b);
e89065a1
SL
5115 if (!temp)
5116 return NULL_TREE;
5117 else if (!result)
5118 result = temp;
5119 else if (!same_bool_result_p (result, temp))
5120 return NULL_TREE;
5121 }
5122 else
5123 return NULL_TREE;
5124 }
5125 return result;
5126 }
5127
5128 default:
5129 break;
5130 }
5131 }
5132 return NULL_TREE;
5133}
5134
5135/* Try to simplify the OR of two comparisons, specified by
5136 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5137 If this can be simplified to a single expression (without requiring
5138 introducing more SSA variables to hold intermediate values),
5139 return the resulting tree. Otherwise return NULL_TREE.
5140 If the result expression is non-null, it has boolean type. */
5141
5142tree
5143maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5144 enum tree_code code2, tree op2a, tree op2b)
5145{
5146 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5147 if (t)
5148 return t;
5149 else
5150 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5151}
cfef45c8
RG
5152
5153
5154/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5155
5156 Either NULL_TREE, a simplified but non-constant or a constant
5157 is returned.
5158
5159 ??? This should go into a gimple-fold-inline.h file to be eventually
5160 privatized with the single valueize function used in the various TUs
5161 to avoid the indirect function call overhead. */
5162
5163tree
355fe088 5164gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 5165 tree (*gvalueize) (tree))
cfef45c8 5166{
45cc9f96
RB
5167 code_helper rcode;
5168 tree ops[3] = {};
5169 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5170 edges if there are intermediate VARYING defs. For this reason
5171 do not follow SSA edges here even though SCCVN can technically
5172 just deal fine with that. */
34050b6b 5173 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 5174 {
34050b6b 5175 tree res = NULL_TREE;
c0f62740 5176 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
5177 res = ops[0];
5178 else if (mprts_hook)
5179 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5180 if (res)
45cc9f96 5181 {
34050b6b
RB
5182 if (dump_file && dump_flags & TDF_DETAILS)
5183 {
5184 fprintf (dump_file, "Match-and-simplified ");
5185 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5186 fprintf (dump_file, " to ");
5187 print_generic_expr (dump_file, res, 0);
5188 fprintf (dump_file, "\n");
5189 }
5190 return res;
45cc9f96 5191 }
45cc9f96
RB
5192 }
5193
cfef45c8
RG
5194 location_t loc = gimple_location (stmt);
5195 switch (gimple_code (stmt))
5196 {
5197 case GIMPLE_ASSIGN:
5198 {
5199 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5200
5201 switch (get_gimple_rhs_class (subcode))
5202 {
5203 case GIMPLE_SINGLE_RHS:
5204 {
5205 tree rhs = gimple_assign_rhs1 (stmt);
5206 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5207
5208 if (TREE_CODE (rhs) == SSA_NAME)
5209 {
5210 /* If the RHS is an SSA_NAME, return its known constant value,
5211 if any. */
5212 return (*valueize) (rhs);
5213 }
5214 /* Handle propagating invariant addresses into address
5215 operations. */
5216 else if (TREE_CODE (rhs) == ADDR_EXPR
5217 && !is_gimple_min_invariant (rhs))
5218 {
d25c4172 5219 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5220 tree base;
5221 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5222 &offset,
5223 valueize);
5224 if (base
5225 && (CONSTANT_CLASS_P (base)
5226 || decl_address_invariant_p (base)))
5227 return build_invariant_address (TREE_TYPE (rhs),
5228 base, offset);
5229 }
5230 else if (TREE_CODE (rhs) == CONSTRUCTOR
5231 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5232 && (CONSTRUCTOR_NELTS (rhs)
5233 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5234 {
5235 unsigned i;
d2a12ae7 5236 tree val, *vec;
cfef45c8 5237
d2a12ae7
RG
5238 vec = XALLOCAVEC (tree,
5239 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
5240 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5241 {
5242 val = (*valueize) (val);
5243 if (TREE_CODE (val) == INTEGER_CST
5244 || TREE_CODE (val) == REAL_CST
5245 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5246 vec[i] = val;
cfef45c8
RG
5247 else
5248 return NULL_TREE;
5249 }
5250
d2a12ae7 5251 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5252 }
bdf37f7a
JH
5253 if (subcode == OBJ_TYPE_REF)
5254 {
5255 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5256 /* If callee is constant, we can fold away the wrapper. */
5257 if (is_gimple_min_invariant (val))
5258 return val;
5259 }
cfef45c8
RG
5260
5261 if (kind == tcc_reference)
5262 {
5263 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5264 || TREE_CODE (rhs) == REALPART_EXPR
5265 || TREE_CODE (rhs) == IMAGPART_EXPR)
5266 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5267 {
5268 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5269 return fold_unary_loc (EXPR_LOCATION (rhs),
5270 TREE_CODE (rhs),
5271 TREE_TYPE (rhs), val);
5272 }
5273 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5274 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5275 {
5276 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5277 return fold_ternary_loc (EXPR_LOCATION (rhs),
5278 TREE_CODE (rhs),
5279 TREE_TYPE (rhs), val,
5280 TREE_OPERAND (rhs, 1),
5281 TREE_OPERAND (rhs, 2));
5282 }
5283 else if (TREE_CODE (rhs) == MEM_REF
5284 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5285 {
5286 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5287 if (TREE_CODE (val) == ADDR_EXPR
5288 && is_gimple_min_invariant (val))
5289 {
5290 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5291 unshare_expr (val),
5292 TREE_OPERAND (rhs, 1));
5293 if (tem)
5294 rhs = tem;
5295 }
5296 }
5297 return fold_const_aggregate_ref_1 (rhs, valueize);
5298 }
5299 else if (kind == tcc_declaration)
5300 return get_symbol_constant_value (rhs);
5301 return rhs;
5302 }
5303
5304 case GIMPLE_UNARY_RHS:
f3582e54 5305 return NULL_TREE;
cfef45c8
RG
5306
5307 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5308 /* Translate &x + CST into an invariant form suitable for
5309 further propagation. */
5310 if (subcode == POINTER_PLUS_EXPR)
5311 {
4b1b9e64
RB
5312 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5313 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
5314 if (TREE_CODE (op0) == ADDR_EXPR
5315 && TREE_CODE (op1) == INTEGER_CST)
5316 {
5317 tree off = fold_convert (ptr_type_node, op1);
5318 return build_fold_addr_expr_loc
5319 (loc,
5320 fold_build2 (MEM_REF,
5321 TREE_TYPE (TREE_TYPE (op0)),
5322 unshare_expr (op0), off));
5323 }
5324 }
59c20dc7
RB
5325 /* Canonicalize bool != 0 and bool == 0 appearing after
5326 valueization. While gimple_simplify handles this
5327 it can get confused by the ~X == 1 -> X == 0 transform
5328 which we cant reduce to a SSA name or a constant
5329 (and we have no way to tell gimple_simplify to not
5330 consider those transforms in the first place). */
5331 else if (subcode == EQ_EXPR
5332 || subcode == NE_EXPR)
5333 {
5334 tree lhs = gimple_assign_lhs (stmt);
5335 tree op0 = gimple_assign_rhs1 (stmt);
5336 if (useless_type_conversion_p (TREE_TYPE (lhs),
5337 TREE_TYPE (op0)))
5338 {
5339 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5340 op0 = (*valueize) (op0);
8861704d
RB
5341 if (TREE_CODE (op0) == INTEGER_CST)
5342 std::swap (op0, op1);
5343 if (TREE_CODE (op1) == INTEGER_CST
5344 && ((subcode == NE_EXPR && integer_zerop (op1))
5345 || (subcode == EQ_EXPR && integer_onep (op1))))
5346 return op0;
59c20dc7
RB
5347 }
5348 }
4b1b9e64 5349 return NULL_TREE;
cfef45c8
RG
5350
5351 case GIMPLE_TERNARY_RHS:
5352 {
5353 /* Handle ternary operators that can appear in GIMPLE form. */
5354 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5355 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5356 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
5357 return fold_ternary_loc (loc, subcode,
5358 gimple_expr_type (stmt), op0, op1, op2);
5359 }
5360
5361 default:
5362 gcc_unreachable ();
5363 }
5364 }
5365
5366 case GIMPLE_CALL:
5367 {
25583c4f 5368 tree fn;
538dd0b7 5369 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5370
5371 if (gimple_call_internal_p (stmt))
31e071ae
MP
5372 {
5373 enum tree_code subcode = ERROR_MARK;
5374 switch (gimple_call_internal_fn (stmt))
5375 {
5376 case IFN_UBSAN_CHECK_ADD:
5377 subcode = PLUS_EXPR;
5378 break;
5379 case IFN_UBSAN_CHECK_SUB:
5380 subcode = MINUS_EXPR;
5381 break;
5382 case IFN_UBSAN_CHECK_MUL:
5383 subcode = MULT_EXPR;
5384 break;
68fa96d6
ML
5385 case IFN_BUILTIN_EXPECT:
5386 {
5387 tree arg0 = gimple_call_arg (stmt, 0);
5388 tree op0 = (*valueize) (arg0);
5389 if (TREE_CODE (op0) == INTEGER_CST)
5390 return op0;
5391 return NULL_TREE;
5392 }
31e071ae
MP
5393 default:
5394 return NULL_TREE;
5395 }
368b454d
JJ
5396 tree arg0 = gimple_call_arg (stmt, 0);
5397 tree arg1 = gimple_call_arg (stmt, 1);
5398 tree op0 = (*valueize) (arg0);
5399 tree op1 = (*valueize) (arg1);
31e071ae
MP
5400
5401 if (TREE_CODE (op0) != INTEGER_CST
5402 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5403 {
5404 switch (subcode)
5405 {
5406 case MULT_EXPR:
5407 /* x * 0 = 0 * x = 0 without overflow. */
5408 if (integer_zerop (op0) || integer_zerop (op1))
5409 return build_zero_cst (TREE_TYPE (arg0));
5410 break;
5411 case MINUS_EXPR:
5412 /* y - y = 0 without overflow. */
5413 if (operand_equal_p (op0, op1, 0))
5414 return build_zero_cst (TREE_TYPE (arg0));
5415 break;
5416 default:
5417 break;
5418 }
5419 }
5420 tree res
5421 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5422 if (res
5423 && TREE_CODE (res) == INTEGER_CST
5424 && !TREE_OVERFLOW (res))
5425 return res;
5426 return NULL_TREE;
5427 }
25583c4f
RS
5428
5429 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5430 if (TREE_CODE (fn) == ADDR_EXPR
5431 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5432 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5433 && gimple_builtin_call_types_compatible_p (stmt,
5434 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5435 {
5436 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5437 tree retval;
cfef45c8
RG
5438 unsigned i;
5439 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5440 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5441 retval = fold_builtin_call_array (loc,
538dd0b7 5442 gimple_call_return_type (call_stmt),
cfef45c8 5443 fn, gimple_call_num_args (stmt), args);
cfef45c8 5444 if (retval)
5c944c6c
RB
5445 {
5446 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5447 STRIP_NOPS (retval);
538dd0b7
DM
5448 retval = fold_convert (gimple_call_return_type (call_stmt),
5449 retval);
5c944c6c 5450 }
cfef45c8
RG
5451 return retval;
5452 }
5453 return NULL_TREE;
5454 }
5455
5456 default:
5457 return NULL_TREE;
5458 }
5459}
5460
5461/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5462 Returns NULL_TREE if folding to a constant is not possible, otherwise
5463 returns a constant according to is_gimple_min_invariant. */
5464
5465tree
355fe088 5466gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
5467{
5468 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5469 if (res && is_gimple_min_invariant (res))
5470 return res;
5471 return NULL_TREE;
5472}
5473
5474
5475/* The following set of functions are supposed to fold references using
5476 their constant initializers. */
5477
cfef45c8
RG
5478/* See if we can find constructor defining value of BASE.
5479 When we know the consructor with constant offset (such as
5480 base is array[40] and we do know constructor of array), then
5481 BIT_OFFSET is adjusted accordingly.
5482
5483 As a special case, return error_mark_node when constructor
5484 is not explicitly available, but it is known to be zero
5485 such as 'static const int a;'. */
5486static tree
5487get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5488 tree (*valueize)(tree))
5489{
5490 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
5491 bool reverse;
5492
cfef45c8
RG
5493 if (TREE_CODE (base) == MEM_REF)
5494 {
5495 if (!integer_zerop (TREE_OPERAND (base, 1)))
5496 {
9541ffee 5497 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5498 return NULL_TREE;
807e902e 5499 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5500 * BITS_PER_UNIT);
5501 }
5502
5503 if (valueize
5504 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5505 base = valueize (TREE_OPERAND (base, 0));
5506 if (!base || TREE_CODE (base) != ADDR_EXPR)
5507 return NULL_TREE;
5508 base = TREE_OPERAND (base, 0);
5509 }
13e88953
RB
5510 else if (valueize
5511 && TREE_CODE (base) == SSA_NAME)
5512 base = valueize (base);
cfef45c8
RG
5513
5514 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5515 DECL_INITIAL. If BASE is a nested reference into another
5516 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5517 the inner reference. */
5518 switch (TREE_CODE (base))
5519 {
5520 case VAR_DECL:
cfef45c8 5521 case CONST_DECL:
6a6dac52
JH
5522 {
5523 tree init = ctor_for_folding (base);
5524
688010ba 5525 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5526 NULL means unknown, while error_mark_node is 0. */
5527 if (init == error_mark_node)
5528 return NULL_TREE;
5529 if (!init)
5530 return error_mark_node;
5531 return init;
5532 }
cfef45c8 5533
13e88953
RB
5534 case VIEW_CONVERT_EXPR:
5535 return get_base_constructor (TREE_OPERAND (base, 0),
5536 bit_offset, valueize);
5537
cfef45c8
RG
5538 case ARRAY_REF:
5539 case COMPONENT_REF:
ee45a32d
EB
5540 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5541 &reverse);
cfef45c8
RG
5542 if (max_size == -1 || size != max_size)
5543 return NULL_TREE;
5544 *bit_offset += bit_offset2;
5545 return get_base_constructor (base, bit_offset, valueize);
5546
cfef45c8
RG
5547 case CONSTRUCTOR:
5548 return base;
5549
5550 default:
13e88953
RB
5551 if (CONSTANT_CLASS_P (base))
5552 return base;
5553
cfef45c8
RG
5554 return NULL_TREE;
5555 }
5556}
5557
cfef45c8
RG
5558/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5559 SIZE to the memory at bit OFFSET. */
5560
5561static tree
5562fold_array_ctor_reference (tree type, tree ctor,
5563 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5564 unsigned HOST_WIDE_INT size,
5565 tree from_decl)
cfef45c8 5566{
807e902e
KZ
5567 offset_int low_bound;
5568 offset_int elt_size;
807e902e 5569 offset_int access_index;
6a636014 5570 tree domain_type = NULL_TREE;
cfef45c8
RG
5571 HOST_WIDE_INT inner_offset;
5572
5573 /* Compute low bound and elt size. */
eb8f1123
RG
5574 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5575 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5576 if (domain_type && TYPE_MIN_VALUE (domain_type))
5577 {
5578 /* Static constructors for variably sized objects makes no sense. */
5579 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
807e902e 5580 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5581 }
5582 else
807e902e 5583 low_bound = 0;
cfef45c8 5584 /* Static constructors for variably sized objects makes no sense. */
c3284718 5585 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5586 == INTEGER_CST);
807e902e 5587 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5588
5589 /* We can handle only constantly sized accesses that are known to not
5590 be larger than size of array element. */
5591 if (!TYPE_SIZE_UNIT (type)
5592 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
032c80e9 5593 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
807e902e 5594 || elt_size == 0)
cfef45c8
RG
5595 return NULL_TREE;
5596
5597 /* Compute the array index we look for. */
807e902e
KZ
5598 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5599 elt_size);
27bcd47c 5600 access_index += low_bound;
cfef45c8
RG
5601
5602 /* And offset within the access. */
27bcd47c 5603 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5604
5605 /* See if the array field is large enough to span whole access. We do not
5606 care to fold accesses spanning multiple array indexes. */
27bcd47c 5607 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 5608 return NULL_TREE;
6a636014
AL
5609 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5610 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 5611
cfef45c8
RG
5612 /* When memory is not explicitely mentioned in constructor,
5613 it is 0 (or out of range). */
5614 return build_zero_cst (type);
5615}
5616
5617/* CTOR is CONSTRUCTOR of an aggregate or vector.
5618 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5619
5620static tree
5621fold_nonarray_ctor_reference (tree type, tree ctor,
5622 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5623 unsigned HOST_WIDE_INT size,
5624 tree from_decl)
cfef45c8
RG
5625{
5626 unsigned HOST_WIDE_INT cnt;
5627 tree cfield, cval;
5628
5629 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5630 cval)
5631 {
5632 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5633 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5634 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5635 offset_int bitoffset;
5636 offset_int bitoffset_end, access_end;
cfef45c8
RG
5637
5638 /* Variable sized objects in static constructors makes no sense,
5639 but field_size can be NULL for flexible array members. */
5640 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5641 && TREE_CODE (byte_offset) == INTEGER_CST
5642 && (field_size != NULL_TREE
5643 ? TREE_CODE (field_size) == INTEGER_CST
5644 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5645
5646 /* Compute bit offset of the field. */
807e902e 5647 bitoffset = (wi::to_offset (field_offset)
8de73453 5648 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8
RG
5649 /* Compute bit offset where the field ends. */
5650 if (field_size != NULL_TREE)
807e902e 5651 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5652 else
807e902e 5653 bitoffset_end = 0;
cfef45c8 5654
807e902e 5655 access_end = offset_int (offset) + size;
b8b2b009
JJ
5656
5657 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5658 [BITOFFSET, BITOFFSET_END)? */
807e902e 5659 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5660 && (field_size == NULL_TREE
807e902e 5661 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5662 {
807e902e 5663 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5664 /* We do have overlap. Now see if field is large enough to
5665 cover the access. Give up for accesses spanning multiple
5666 fields. */
807e902e 5667 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5668 return NULL_TREE;
032c80e9 5669 if (offset < bitoffset)
b8b2b009 5670 return NULL_TREE;
cfef45c8 5671 return fold_ctor_reference (type, cval,
27bcd47c 5672 inner_offset.to_uhwi (), size,
c44c2088 5673 from_decl);
cfef45c8
RG
5674 }
5675 }
5676 /* When memory is not explicitely mentioned in constructor, it is 0. */
5677 return build_zero_cst (type);
5678}
5679
5680/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5681 to the memory at bit OFFSET. */
5682
8403c2cf 5683tree
cfef45c8 5684fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5685 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5686{
5687 tree ret;
5688
5689 /* We found the field with exact match. */
5690 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5691 && !offset)
9d60be38 5692 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5693
5694 /* We are at the end of walk, see if we can view convert the
5695 result. */
5696 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5697 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5698 && !compare_tree_int (TYPE_SIZE (type), size)
5699 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5700 {
9d60be38 5701 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5702 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5703 if (ret)
86c5a5c3 5704 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5705 return ret;
5706 }
b2505143
RB
5707 /* For constants and byte-aligned/sized reads try to go through
5708 native_encode/interpret. */
5709 if (CONSTANT_CLASS_P (ctor)
5710 && BITS_PER_UNIT == 8
5711 && offset % BITS_PER_UNIT == 0
5712 && size % BITS_PER_UNIT == 0
5713 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5714 {
5715 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
5716 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5717 offset / BITS_PER_UNIT);
5718 if (len > 0)
5719 return native_interpret_expr (type, buf, len);
b2505143 5720 }
cfef45c8
RG
5721 if (TREE_CODE (ctor) == CONSTRUCTOR)
5722 {
5723
eb8f1123
RG
5724 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5725 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5726 return fold_array_ctor_reference (type, ctor, offset, size,
5727 from_decl);
cfef45c8 5728 else
c44c2088
JH
5729 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5730 from_decl);
cfef45c8
RG
5731 }
5732
5733 return NULL_TREE;
5734}
5735
5736/* Return the tree representing the element referenced by T if T is an
5737 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5738 names using VALUEIZE. Return NULL_TREE otherwise. */
5739
5740tree
5741fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5742{
5743 tree ctor, idx, base;
5744 HOST_WIDE_INT offset, size, max_size;
5745 tree tem;
ee45a32d 5746 bool reverse;
cfef45c8 5747
f8a7df45
RG
5748 if (TREE_THIS_VOLATILE (t))
5749 return NULL_TREE;
5750
3a65ee74 5751 if (DECL_P (t))
cfef45c8
RG
5752 return get_symbol_constant_value (t);
5753
5754 tem = fold_read_from_constant_string (t);
5755 if (tem)
5756 return tem;
5757
5758 switch (TREE_CODE (t))
5759 {
5760 case ARRAY_REF:
5761 case ARRAY_RANGE_REF:
5762 /* Constant indexes are handled well by get_base_constructor.
5763 Only special case variable offsets.
5764 FIXME: This code can't handle nested references with variable indexes
5765 (they will be handled only by iteration of ccp). Perhaps we can bring
5766 get_ref_base_and_extent here and make it use a valueize callback. */
5767 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5768 && valueize
5769 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5770 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5771 {
5772 tree low_bound, unit_size;
5773
5774 /* If the resulting bit-offset is constant, track it. */
5775 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5776 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5777 && (unit_size = array_ref_element_size (t),
807e902e 5778 tree_fits_uhwi_p (unit_size)))
cfef45c8 5779 {
807e902e
KZ
5780 offset_int woffset
5781 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5782 TYPE_PRECISION (TREE_TYPE (idx)));
5783
5784 if (wi::fits_shwi_p (woffset))
5785 {
5786 offset = woffset.to_shwi ();
5787 /* TODO: This code seems wrong, multiply then check
5788 to see if it fits. */
5789 offset *= tree_to_uhwi (unit_size);
5790 offset *= BITS_PER_UNIT;
5791
5792 base = TREE_OPERAND (t, 0);
5793 ctor = get_base_constructor (base, &offset, valueize);
5794 /* Empty constructor. Always fold to 0. */
5795 if (ctor == error_mark_node)
5796 return build_zero_cst (TREE_TYPE (t));
5797 /* Out of bound array access. Value is undefined,
5798 but don't fold. */
5799 if (offset < 0)
5800 return NULL_TREE;
5801 /* We can not determine ctor. */
5802 if (!ctor)
5803 return NULL_TREE;
5804 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5805 tree_to_uhwi (unit_size)
5806 * BITS_PER_UNIT,
5807 base);
5808 }
cfef45c8
RG
5809 }
5810 }
5811 /* Fallthru. */
5812
5813 case COMPONENT_REF:
5814 case BIT_FIELD_REF:
5815 case TARGET_MEM_REF:
5816 case MEM_REF:
ee45a32d 5817 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
5818 ctor = get_base_constructor (base, &offset, valueize);
5819
5820 /* Empty constructor. Always fold to 0. */
5821 if (ctor == error_mark_node)
5822 return build_zero_cst (TREE_TYPE (t));
5823 /* We do not know precise address. */
5824 if (max_size == -1 || max_size != size)
5825 return NULL_TREE;
5826 /* We can not determine ctor. */
5827 if (!ctor)
5828 return NULL_TREE;
5829
5830 /* Out of bound array access. Value is undefined, but don't fold. */
5831 if (offset < 0)
5832 return NULL_TREE;
5833
c44c2088
JH
5834 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5835 base);
cfef45c8
RG
5836
5837 case REALPART_EXPR:
5838 case IMAGPART_EXPR:
5839 {
5840 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5841 if (c && TREE_CODE (c) == COMPLEX_CST)
5842 return fold_build1_loc (EXPR_LOCATION (t),
5843 TREE_CODE (t), TREE_TYPE (t), c);
5844 break;
5845 }
5846
5847 default:
5848 break;
5849 }
5850
5851 return NULL_TREE;
5852}
5853
5854tree
5855fold_const_aggregate_ref (tree t)
5856{
5857 return fold_const_aggregate_ref_1 (t, NULL);
5858}
06bc3ec7 5859
85942f45 5860/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5861 at OFFSET.
5862 Set CAN_REFER if non-NULL to false if method
5863 is not referable or if the virtual table is ill-formed (such as rewriten
5864 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5865
5866tree
85942f45
JH
5867gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5868 tree v,
ec77d61f
JH
5869 unsigned HOST_WIDE_INT offset,
5870 bool *can_refer)
81fa35bd 5871{
85942f45
JH
5872 tree vtable = v, init, fn;
5873 unsigned HOST_WIDE_INT size;
8c311b50
JH
5874 unsigned HOST_WIDE_INT elt_size, access_index;
5875 tree domain_type;
81fa35bd 5876
ec77d61f
JH
5877 if (can_refer)
5878 *can_refer = true;
5879
9de2f554 5880 /* First of all double check we have virtual table. */
81fa35bd 5881 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5882 || !DECL_VIRTUAL_P (v))
ec77d61f 5883 {
ec77d61f
JH
5884 /* Pass down that we lost track of the target. */
5885 if (can_refer)
5886 *can_refer = false;
5887 return NULL_TREE;
5888 }
9de2f554 5889
2aa3da06
JH
5890 init = ctor_for_folding (v);
5891
9de2f554 5892 /* The virtual tables should always be born with constructors
2aa3da06
JH
5893 and we always should assume that they are avaialble for
5894 folding. At the moment we do not stream them in all cases,
5895 but it should never happen that ctor seem unreachable. */
5896 gcc_assert (init);
5897 if (init == error_mark_node)
5898 {
5899 gcc_assert (in_lto_p);
ec77d61f
JH
5900 /* Pass down that we lost track of the target. */
5901 if (can_refer)
5902 *can_refer = false;
2aa3da06
JH
5903 return NULL_TREE;
5904 }
81fa35bd 5905 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5906 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5907 offset *= BITS_PER_UNIT;
81fa35bd 5908 offset += token * size;
9de2f554 5909
8c311b50
JH
5910 /* Lookup the value in the constructor that is assumed to be array.
5911 This is equivalent to
5912 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5913 offset, size, NULL);
5914 but in a constant time. We expect that frontend produced a simple
5915 array without indexed initializers. */
5916
5917 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5918 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5919 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5920 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5921
5922 access_index = offset / BITS_PER_UNIT / elt_size;
5923 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5924
5925 /* This code makes an assumption that there are no
5926 indexed fileds produced by C++ FE, so we can directly index the array. */
5927 if (access_index < CONSTRUCTOR_NELTS (init))
5928 {
5929 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5930 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5931 STRIP_NOPS (fn);
5932 }
5933 else
5934 fn = NULL;
9de2f554
JH
5935
5936 /* For type inconsistent program we may end up looking up virtual method
5937 in virtual table that does not contain TOKEN entries. We may overrun
5938 the virtual table and pick up a constant or RTTI info pointer.
5939 In any case the call is undefined. */
5940 if (!fn
5941 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5942 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5943 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5944 else
5945 {
5946 fn = TREE_OPERAND (fn, 0);
5947
5948 /* When cgraph node is missing and function is not public, we cannot
5949 devirtualize. This can happen in WHOPR when the actual method
5950 ends up in other partition, because we found devirtualization
5951 possibility too late. */
5952 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5953 {
5954 if (can_refer)
5955 {
5956 *can_refer = false;
5957 return fn;
5958 }
5959 return NULL_TREE;
5960 }
9de2f554 5961 }
81fa35bd 5962
7501ca28
RG
5963 /* Make sure we create a cgraph node for functions we'll reference.
5964 They can be non-existent if the reference comes from an entry
5965 of an external vtable for example. */
d52f5295 5966 cgraph_node::get_create (fn);
7501ca28 5967
81fa35bd
MJ
5968 return fn;
5969}
5970
85942f45
JH
5971/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5972 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5973 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5974 OBJ_TYPE_REF_OBJECT(REF).
5975 Set CAN_REFER if non-NULL to false if method
5976 is not referable or if the virtual table is ill-formed (such as rewriten
5977 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5978
5979tree
ec77d61f
JH
5980gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5981 bool *can_refer)
85942f45
JH
5982{
5983 unsigned HOST_WIDE_INT offset;
5984 tree v;
5985
5986 v = BINFO_VTABLE (known_binfo);
5987 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5988 if (!v)
5989 return NULL_TREE;
5990
5991 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5992 {
5993 if (can_refer)
5994 *can_refer = false;
5995 return NULL_TREE;
5996 }
5997 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5998}
5999
b184c8f1
AM
6000/* Given a pointer value OP0, return a simplified version of an
6001 indirection through OP0, or NULL_TREE if no simplification is
6002 possible. Note that the resulting type may be different from
6003 the type pointed to in the sense that it is still compatible
6004 from the langhooks point of view. */
6005
6006tree
6007gimple_fold_indirect_ref (tree t)
6008{
6009 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6010 tree sub = t;
6011 tree subtype;
6012
6013 STRIP_NOPS (sub);
6014 subtype = TREE_TYPE (sub);
6015 if (!POINTER_TYPE_P (subtype))
6016 return NULL_TREE;
6017
6018 if (TREE_CODE (sub) == ADDR_EXPR)
6019 {
6020 tree op = TREE_OPERAND (sub, 0);
6021 tree optype = TREE_TYPE (op);
6022 /* *&p => p */
6023 if (useless_type_conversion_p (type, optype))
6024 return op;
6025
6026 /* *(foo *)&fooarray => fooarray[0] */
6027 if (TREE_CODE (optype) == ARRAY_TYPE
6028 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6029 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6030 {
6031 tree type_domain = TYPE_DOMAIN (optype);
6032 tree min_val = size_zero_node;
6033 if (type_domain && TYPE_MIN_VALUE (type_domain))
6034 min_val = TYPE_MIN_VALUE (type_domain);
6035 if (TREE_CODE (min_val) == INTEGER_CST)
6036 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6037 }
6038 /* *(foo *)&complexfoo => __real__ complexfoo */
6039 else if (TREE_CODE (optype) == COMPLEX_TYPE
6040 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6041 return fold_build1 (REALPART_EXPR, type, op);
6042 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6043 else if (TREE_CODE (optype) == VECTOR_TYPE
6044 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6045 {
6046 tree part_width = TYPE_SIZE (type);
6047 tree index = bitsize_int (0);
6048 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6049 }
6050 }
6051
6052 /* *(p + CST) -> ... */
6053 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6054 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6055 {
6056 tree addr = TREE_OPERAND (sub, 0);
6057 tree off = TREE_OPERAND (sub, 1);
6058 tree addrtype;
6059
6060 STRIP_NOPS (addr);
6061 addrtype = TREE_TYPE (addr);
6062
6063 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6064 if (TREE_CODE (addr) == ADDR_EXPR
6065 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6066 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 6067 && tree_fits_uhwi_p (off))
b184c8f1 6068 {
ae7e9ddd 6069 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
6070 tree part_width = TYPE_SIZE (type);
6071 unsigned HOST_WIDE_INT part_widthi
9439e9a1 6072 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
6073 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6074 tree index = bitsize_int (indexi);
6075 if (offset / part_widthi
e934916c 6076 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
6077 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6078 part_width, index);
6079 }
6080
6081 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6082 if (TREE_CODE (addr) == ADDR_EXPR
6083 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6084 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6085 {
6086 tree size = TYPE_SIZE_UNIT (type);
6087 if (tree_int_cst_equal (size, off))
6088 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6089 }
6090
6091 /* *(p + CST) -> MEM_REF <p, CST>. */
6092 if (TREE_CODE (addr) != ADDR_EXPR
6093 || DECL_P (TREE_OPERAND (addr, 0)))
6094 return fold_build2 (MEM_REF, type,
6095 addr,
807e902e 6096 wide_int_to_tree (ptype, off));
b184c8f1
AM
6097 }
6098
6099 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6100 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6101 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6102 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6103 {
6104 tree type_domain;
6105 tree min_val = size_zero_node;
6106 tree osub = sub;
6107 sub = gimple_fold_indirect_ref (sub);
6108 if (! sub)
6109 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6110 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6111 if (type_domain && TYPE_MIN_VALUE (type_domain))
6112 min_val = TYPE_MIN_VALUE (type_domain);
6113 if (TREE_CODE (min_val) == INTEGER_CST)
6114 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6115 }
6116
6117 return NULL_TREE;
6118}
19e51b40
JJ
6119
6120/* Return true if CODE is an operation that when operating on signed
6121 integer types involves undefined behavior on overflow and the
6122 operation can be expressed with unsigned arithmetic. */
6123
6124bool
6125arith_code_with_undefined_signed_overflow (tree_code code)
6126{
6127 switch (code)
6128 {
6129 case PLUS_EXPR:
6130 case MINUS_EXPR:
6131 case MULT_EXPR:
6132 case NEGATE_EXPR:
6133 case POINTER_PLUS_EXPR:
6134 return true;
6135 default:
6136 return false;
6137 }
6138}
6139
6140/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6141 operation that can be transformed to unsigned arithmetic by converting
6142 its operand, carrying out the operation in the corresponding unsigned
6143 type and converting the result back to the original type.
6144
6145 Returns a sequence of statements that replace STMT and also contain
6146 a modified form of STMT itself. */
6147
6148gimple_seq
355fe088 6149rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
6150{
6151 if (dump_file && (dump_flags & TDF_DETAILS))
6152 {
6153 fprintf (dump_file, "rewriting stmt with undefined signed "
6154 "overflow ");
6155 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6156 }
6157
6158 tree lhs = gimple_assign_lhs (stmt);
6159 tree type = unsigned_type_for (TREE_TYPE (lhs));
6160 gimple_seq stmts = NULL;
6161 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6162 {
74e3c262
RB
6163 tree op = gimple_op (stmt, i);
6164 op = gimple_convert (&stmts, type, op);
6165 gimple_set_op (stmt, i, op);
19e51b40
JJ
6166 }
6167 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6168 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6169 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6170 gimple_seq_add_stmt (&stmts, stmt);
355fe088 6171 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6172 gimple_seq_add_stmt (&stmts, cvt);
6173
6174 return stmts;
6175}
d4f5cd5e 6176
3d2cf79f 6177
c26de36d
RB
6178/* The valueization hook we use for the gimple_build API simplification.
6179 This makes us match fold_buildN behavior by only combining with
6180 statements in the sequence(s) we are currently building. */
6181
6182static tree
6183gimple_build_valueize (tree op)
6184{
6185 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6186 return op;
6187 return NULL_TREE;
6188}
6189
3d2cf79f 6190/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 6191 simplifying it first if possible. Returns the built
3d2cf79f
RB
6192 expression value and appends statements possibly defining it
6193 to SEQ. */
6194
6195tree
6196gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6197 enum tree_code code, tree type, tree op0)
3d2cf79f 6198{
c26de36d 6199 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
6200 if (!res)
6201 {
6202 if (gimple_in_ssa_p (cfun))
b731b390 6203 res = make_ssa_name (type);
3d2cf79f 6204 else
b731b390 6205 res = create_tmp_reg (type);
355fe088 6206 gimple *stmt;
3d2cf79f
RB
6207 if (code == REALPART_EXPR
6208 || code == IMAGPART_EXPR
6209 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6210 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6211 else
0d0e4a03 6212 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6213 gimple_set_location (stmt, loc);
6214 gimple_seq_add_stmt_without_update (seq, stmt);
6215 }
6216 return res;
6217}
6218
6219/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6220 simplifying it first if possible. Returns the built
3d2cf79f
RB
6221 expression value and appends statements possibly defining it
6222 to SEQ. */
6223
6224tree
6225gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6226 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6227{
c26de36d 6228 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6229 if (!res)
6230 {
6231 if (gimple_in_ssa_p (cfun))
b731b390 6232 res = make_ssa_name (type);
3d2cf79f 6233 else
b731b390 6234 res = create_tmp_reg (type);
355fe088 6235 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6236 gimple_set_location (stmt, loc);
6237 gimple_seq_add_stmt_without_update (seq, stmt);
6238 }
6239 return res;
6240}
6241
6242/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6243 simplifying it first if possible. Returns the built
3d2cf79f
RB
6244 expression value and appends statements possibly defining it
6245 to SEQ. */
6246
6247tree
6248gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6249 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6250{
6251 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6252 seq, gimple_build_valueize);
3d2cf79f
RB
6253 if (!res)
6254 {
6255 if (gimple_in_ssa_p (cfun))
b731b390 6256 res = make_ssa_name (type);
3d2cf79f 6257 else
b731b390 6258 res = create_tmp_reg (type);
355fe088 6259 gimple *stmt;
3d2cf79f 6260 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6261 stmt = gimple_build_assign (res, code,
6262 build3 (code, type, op0, op1, op2));
3d2cf79f 6263 else
0d0e4a03 6264 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6265 gimple_set_location (stmt, loc);
6266 gimple_seq_add_stmt_without_update (seq, stmt);
6267 }
6268 return res;
6269}
6270
6271/* Build the call FN (ARG0) with a result of type TYPE
6272 (or no result if TYPE is void) with location LOC,
c26de36d 6273 simplifying it first if possible. Returns the built
3d2cf79f
RB
6274 expression value (or NULL_TREE if TYPE is void) and appends
6275 statements possibly defining it to SEQ. */
6276
6277tree
6278gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6279 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6280{
c26de36d 6281 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6282 if (!res)
6283 {
6284 tree decl = builtin_decl_implicit (fn);
355fe088 6285 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6286 if (!VOID_TYPE_P (type))
6287 {
6288 if (gimple_in_ssa_p (cfun))
b731b390 6289 res = make_ssa_name (type);
3d2cf79f 6290 else
b731b390 6291 res = create_tmp_reg (type);
3d2cf79f
RB
6292 gimple_call_set_lhs (stmt, res);
6293 }
6294 gimple_set_location (stmt, loc);
6295 gimple_seq_add_stmt_without_update (seq, stmt);
6296 }
6297 return res;
6298}
6299
6300/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6301 (or no result if TYPE is void) with location LOC,
c26de36d 6302 simplifying it first if possible. Returns the built
3d2cf79f
RB
6303 expression value (or NULL_TREE if TYPE is void) and appends
6304 statements possibly defining it to SEQ. */
6305
6306tree
6307gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6308 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6309{
c26de36d 6310 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6311 if (!res)
6312 {
6313 tree decl = builtin_decl_implicit (fn);
355fe088 6314 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6315 if (!VOID_TYPE_P (type))
6316 {
6317 if (gimple_in_ssa_p (cfun))
b731b390 6318 res = make_ssa_name (type);
3d2cf79f 6319 else
b731b390 6320 res = create_tmp_reg (type);
3d2cf79f
RB
6321 gimple_call_set_lhs (stmt, res);
6322 }
6323 gimple_set_location (stmt, loc);
6324 gimple_seq_add_stmt_without_update (seq, stmt);
6325 }
6326 return res;
6327}
6328
6329/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6330 (or no result if TYPE is void) with location LOC,
c26de36d 6331 simplifying it first if possible. Returns the built
3d2cf79f
RB
6332 expression value (or NULL_TREE if TYPE is void) and appends
6333 statements possibly defining it to SEQ. */
6334
6335tree
6336gimple_build (gimple_seq *seq, location_t loc,
6337 enum built_in_function fn, tree type,
c26de36d 6338 tree arg0, tree arg1, tree arg2)
3d2cf79f 6339{
c26de36d
RB
6340 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6341 seq, gimple_build_valueize);
3d2cf79f
RB
6342 if (!res)
6343 {
6344 tree decl = builtin_decl_implicit (fn);
355fe088 6345 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
6346 if (!VOID_TYPE_P (type))
6347 {
6348 if (gimple_in_ssa_p (cfun))
b731b390 6349 res = make_ssa_name (type);
3d2cf79f 6350 else
b731b390 6351 res = create_tmp_reg (type);
3d2cf79f
RB
6352 gimple_call_set_lhs (stmt, res);
6353 }
6354 gimple_set_location (stmt, loc);
6355 gimple_seq_add_stmt_without_update (seq, stmt);
6356 }
6357 return res;
6358}
6359
6360/* Build the conversion (TYPE) OP with a result of type TYPE
6361 with location LOC if such conversion is neccesary in GIMPLE,
6362 simplifying it first.
6363 Returns the built expression value and appends
6364 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6365
6366tree
6367gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6368{
6369 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6370 return op;
3d2cf79f 6371 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6372}
68e57f04 6373
74e3c262
RB
6374/* Build the conversion (ptrofftype) OP with a result of a type
6375 compatible with ptrofftype with location LOC if such conversion
6376 is neccesary in GIMPLE, simplifying it first.
6377 Returns the built expression value and appends
6378 statements possibly defining it to SEQ. */
6379
6380tree
6381gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6382{
6383 if (ptrofftype_p (TREE_TYPE (op)))
6384 return op;
6385 return gimple_convert (seq, loc, sizetype, op);
6386}
6387
68e57f04
RS
6388/* Return true if the result of assignment STMT is known to be non-negative.
6389 If the return value is based on the assumption that signed overflow is
6390 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6391 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6392
6393static bool
6394gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6395 int depth)
6396{
6397 enum tree_code code = gimple_assign_rhs_code (stmt);
6398 switch (get_gimple_rhs_class (code))
6399 {
6400 case GIMPLE_UNARY_RHS:
6401 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6402 gimple_expr_type (stmt),
6403 gimple_assign_rhs1 (stmt),
6404 strict_overflow_p, depth);
6405 case GIMPLE_BINARY_RHS:
6406 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6407 gimple_expr_type (stmt),
6408 gimple_assign_rhs1 (stmt),
6409 gimple_assign_rhs2 (stmt),
6410 strict_overflow_p, depth);
6411 case GIMPLE_TERNARY_RHS:
6412 return false;
6413 case GIMPLE_SINGLE_RHS:
6414 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6415 strict_overflow_p, depth);
6416 case GIMPLE_INVALID_RHS:
6417 break;
6418 }
6419 gcc_unreachable ();
6420}
6421
6422/* Return true if return value of call STMT is known to be non-negative.
6423 If the return value is based on the assumption that signed overflow is
6424 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6425 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6426
6427static bool
6428gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6429 int depth)
6430{
6431 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6432 gimple_call_arg (stmt, 0) : NULL_TREE;
6433 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6434 gimple_call_arg (stmt, 1) : NULL_TREE;
6435
6436 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 6437 gimple_call_combined_fn (stmt),
68e57f04
RS
6438 arg0,
6439 arg1,
6440 strict_overflow_p, depth);
6441}
6442
4534c203
RB
6443/* Return true if return value of call STMT is known to be non-negative.
6444 If the return value is based on the assumption that signed overflow is
6445 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6446 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6447
6448static bool
6449gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6450 int depth)
6451{
6452 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6453 {
6454 tree arg = gimple_phi_arg_def (stmt, i);
6455 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6456 return false;
6457 }
6458 return true;
6459}
6460
68e57f04
RS
6461/* Return true if STMT is known to compute a non-negative value.
6462 If the return value is based on the assumption that signed overflow is
6463 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6464 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6465
6466bool
6467gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6468 int depth)
6469{
6470 switch (gimple_code (stmt))
6471 {
6472 case GIMPLE_ASSIGN:
6473 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6474 depth);
6475 case GIMPLE_CALL:
6476 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6477 depth);
4534c203
RB
6478 case GIMPLE_PHI:
6479 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6480 depth);
68e57f04
RS
6481 default:
6482 return false;
6483 }
6484}
67dbe582
RS
6485
6486/* Return true if the floating-point value computed by assignment STMT
6487 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 6488 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
6489
6490 DEPTH is the current nesting depth of the query. */
6491
6492static bool
6493gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6494{
6495 enum tree_code code = gimple_assign_rhs_code (stmt);
6496 switch (get_gimple_rhs_class (code))
6497 {
6498 case GIMPLE_UNARY_RHS:
6499 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6500 gimple_assign_rhs1 (stmt), depth);
6501 case GIMPLE_BINARY_RHS:
6502 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6503 gimple_assign_rhs1 (stmt),
6504 gimple_assign_rhs2 (stmt), depth);
6505 case GIMPLE_TERNARY_RHS:
6506 return false;
6507 case GIMPLE_SINGLE_RHS:
6508 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6509 case GIMPLE_INVALID_RHS:
6510 break;
6511 }
6512 gcc_unreachable ();
6513}
6514
6515/* Return true if the floating-point value computed by call STMT is known
6516 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6517 considered integer values. Return false for signaling NaN.
67dbe582
RS
6518
6519 DEPTH is the current nesting depth of the query. */
6520
6521static bool
6522gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6523{
6524 tree arg0 = (gimple_call_num_args (stmt) > 0
6525 ? gimple_call_arg (stmt, 0)
6526 : NULL_TREE);
6527 tree arg1 = (gimple_call_num_args (stmt) > 1
6528 ? gimple_call_arg (stmt, 1)
6529 : NULL_TREE);
1d9da71f 6530 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
6531 arg0, arg1, depth);
6532}
6533
6534/* Return true if the floating-point result of phi STMT is known to have
6535 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 6536 integer values. Return false for signaling NaN.
67dbe582
RS
6537
6538 DEPTH is the current nesting depth of the query. */
6539
6540static bool
6541gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6542{
6543 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6544 {
6545 tree arg = gimple_phi_arg_def (stmt, i);
6546 if (!integer_valued_real_single_p (arg, depth + 1))
6547 return false;
6548 }
6549 return true;
6550}
6551
6552/* Return true if the floating-point value computed by STMT is known
6553 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6554 considered integer values. Return false for signaling NaN.
67dbe582
RS
6555
6556 DEPTH is the current nesting depth of the query. */
6557
6558bool
6559gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6560{
6561 switch (gimple_code (stmt))
6562 {
6563 case GIMPLE_ASSIGN:
6564 return gimple_assign_integer_valued_real_p (stmt, depth);
6565 case GIMPLE_CALL:
6566 return gimple_call_integer_valued_real_p (stmt, depth);
6567 case GIMPLE_PHI:
6568 return gimple_phi_integer_valued_real_p (stmt, depth);
6569 default:
6570 return false;
6571 }
6572}