]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
genmatch.c (capture_info::capture_info): Add missing COND_EXPR handling.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
23a5b65a 2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370
DN
26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
cbdd87d4 30#include "flags.h"
83685514
AM
31#include "hashtab.h"
32#include "hash-set.h"
33#include "vec.h"
34#include "machmode.h"
35#include "hard-reg-set.h"
36#include "input.h"
cbdd87d4 37#include "function.h"
7ee2468b 38#include "dumpfile.h"
442b4905 39#include "bitmap.h"
60393bbc
AM
40#include "predict.h"
41#include "dominance.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-ssa-alias.h"
44#include "internal-fn.h"
45#include "gimple-fold.h"
46#include "gimple-expr.h"
47#include "is-a.h"
18f429e2 48#include "gimple.h"
45b0be94 49#include "gimplify.h"
5be5c238 50#include "gimple-iterator.h"
442b4905
AM
51#include "gimple-ssa.h"
52#include "tree-ssanames.h"
53#include "tree-into-ssa.h"
54#include "tree-dfa.h"
7a300452 55#include "tree-ssa.h"
cbdd87d4 56#include "tree-ssa-propagate.h"
cbdd87d4 57#include "target.h"
c582198b
AM
58#include "hash-map.h"
59#include "plugin-api.h"
60#include "ipa-ref.h"
61#include "cgraph.h"
450ad0cd
JH
62#include "ipa-utils.h"
63#include "gimple-pretty-print.h"
4484a35a 64#include "tree-ssa-address.h"
862d0b35 65#include "langhooks.h"
19e51b40 66#include "gimplify-me.h"
2b5f0895 67#include "dbgcnt.h"
9b2b7279 68#include "builtins.h"
fef5a0d9 69#include "output.h"
e0ee10ed
RB
70#include "tree-eh.h"
71#include "gimple-match.h"
45cc9f96
RB
72#include "tree-phinodes.h"
73#include "ssa-iterators.h"
cbdd87d4 74
b3b9f3d0 75/* Return true when DECL can be referenced from current unit.
c44c2088
JH
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
1389294c 79
1389294c
JH
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
3e89949e 92 we devirtualize only during final compilation stage.
b3b9f3d0
JH
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
96
1389294c 97static bool
c44c2088 98can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 99{
2c8326a5 100 varpool_node *vnode;
1389294c 101 struct cgraph_node *node;
5e20cdc9 102 symtab_node *snode;
c44c2088 103
00de328a 104 if (DECL_ABSTRACT_P (decl))
1632a686
JH
105 return false;
106
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
111
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 {
3aaf0529
JH
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
3dafb85c 117 if (symtab->function_flags_ready)
3aaf0529 118 return true;
d52f5295 119 snode = symtab_node::get (decl);
3aaf0529 120 if (!snode || !snode->definition)
1632a686 121 return false;
7de90a6c 122 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
123 return !node || !node->global.inlined_to;
124 }
125
6da8be89 126 /* We will later output the initializer, so we can refer to it.
c44c2088 127 So we are concerned only when DECL comes from initializer of
3aaf0529 128 external var or var that has been optimized out. */
c44c2088
JH
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 131 || (!DECL_EXTERNAL (from_decl)
9041d2e6 132 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 133 && vnode->definition)
6da8be89 134 || (flag_ltrans
9041d2e6 135 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 136 && vnode->in_other_partition))
c44c2088 137 return true;
c44c2088
JH
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
a33a931b 143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 145 return false;
b3b9f3d0
JH
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
3aaf0529
JH
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
154
155 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 156 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
3dafb85c 162 if (!symtab->function_flags_ready)
b3b9f3d0 163 return true;
c44c2088 164
d52f5295 165 snode = symtab_node::get (decl);
3aaf0529
JH
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
1389294c
JH
173}
174
0038d4e0 175/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
178
179tree
c44c2088 180canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 181{
50619002
EB
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
315f5f1b
RG
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 186 {
315f5f1b
RG
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
17f39a39
JH
195 }
196 if (TREE_CODE (cval) == ADDR_EXPR)
197 {
5a27a197
RG
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
200 {
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
204 }
5a27a197
RG
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
207 if (!base)
208 return NULL_TREE;
b3b9f3d0 209
7501ca28
RG
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 212 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 213 return NULL_TREE;
7501ca28 214 if (TREE_CODE (base) == VAR_DECL)
46eb666a 215 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
216 else if (TREE_CODE (base) == FUNCTION_DECL)
217 {
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
d52f5295 221 cgraph_node::get_create (base);
7501ca28 222 }
0038d4e0 223 /* Fixup types in global initializers. */
73aef89e
RG
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
226
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
17f39a39 230 }
846abd0d
RB
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
50619002 233 return orig_cval;
17f39a39 234}
cbdd87d4
RG
235
236/* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
238
239tree
240get_symbol_constant_value (tree sym)
241{
6a6dac52
JH
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
cbdd87d4 244 {
cbdd87d4
RG
245 if (val)
246 {
9d60be38 247 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 248 if (val && is_gimple_min_invariant (val))
17f39a39 249 return val;
1389294c
JH
250 else
251 return NULL_TREE;
cbdd87d4
RG
252 }
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
cbdd87d4
RG
257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 259 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
260 }
261
262 return NULL_TREE;
263}
264
265
cbdd87d4
RG
266
267/* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
271
272static tree
273maybe_fold_reference (tree expr, bool is_lhs)
274{
17f39a39 275 tree result;
cbdd87d4 276
f0eddb90
RG
277 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr) == REALPART_EXPR
279 || TREE_CODE (expr) == IMAGPART_EXPR)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr),
282 TREE_CODE (expr),
283 TREE_TYPE (expr),
284 TREE_OPERAND (expr, 0));
285 else if (TREE_CODE (expr) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr),
288 TREE_CODE (expr),
289 TREE_TYPE (expr),
290 TREE_OPERAND (expr, 0),
291 TREE_OPERAND (expr, 1),
292 TREE_OPERAND (expr, 2));
293
f0eddb90
RG
294 if (!is_lhs
295 && (result = fold_const_aggregate_ref (expr))
296 && is_gimple_min_invariant (result))
297 return result;
cbdd87d4 298
cbdd87d4
RG
299 return NULL_TREE;
300}
301
302
303/* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
306 folded. */
307
308static tree
309fold_gimple_assign (gimple_stmt_iterator *si)
310{
311 gimple stmt = gsi_stmt (*si);
312 enum tree_code subcode = gimple_assign_rhs_code (stmt);
313 location_t loc = gimple_location (stmt);
314
315 tree result = NULL_TREE;
316
317 switch (get_gimple_rhs_class (subcode))
318 {
319 case GIMPLE_SINGLE_RHS:
320 {
321 tree rhs = gimple_assign_rhs1 (stmt);
322
8c00ba08
JW
323 if (TREE_CLOBBER_P (rhs))
324 return NULL_TREE;
325
4e71066d 326 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
327 return maybe_fold_reference (rhs, false);
328
bdf37f7a
JH
329 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
330 {
331 tree val = OBJ_TYPE_REF_EXPR (rhs);
332 if (is_gimple_min_invariant (val))
333 return val;
f8a39967 334 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
335 {
336 bool final;
337 vec <cgraph_node *>targets
f8a39967 338 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 339 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 340 {
2b5f0895
XDL
341 if (dump_enabled_p ())
342 {
807b7d62 343 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
344 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
345 "resolving virtual function address "
346 "reference to function %s\n",
347 targets.length () == 1
348 ? targets[0]->name ()
3ef276e4 349 : "NULL");
2b5f0895 350 }
3ef276e4
RB
351 if (targets.length () == 1)
352 {
353 val = fold_convert (TREE_TYPE (val),
354 build_fold_addr_expr_loc
355 (loc, targets[0]->decl));
356 STRIP_USELESS_TYPE_CONVERSION (val);
357 }
358 else
359 /* We can not use __builtin_unreachable here because it
360 can not have address taken. */
361 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
362 return val;
363 }
364 }
365
366 }
cbdd87d4
RG
367 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 {
70f34814
RG
369 tree ref = TREE_OPERAND (rhs, 0);
370 tree tem = maybe_fold_reference (ref, true);
371 if (tem
372 && TREE_CODE (tem) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem, 1)))
374 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
375 else if (tem)
cbdd87d4
RG
376 result = fold_convert (TREE_TYPE (rhs),
377 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
378 else if (TREE_CODE (ref) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
381 }
382
383 else if (TREE_CODE (rhs) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
387 {
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
389 unsigned i;
390 tree val;
391
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
393 if (TREE_CODE (val) != INTEGER_CST
394 && TREE_CODE (val) != REAL_CST
395 && TREE_CODE (val) != FIXED_CST)
396 return NULL_TREE;
397
398 return build_vector_from_ctor (TREE_TYPE (rhs),
399 CONSTRUCTOR_ELTS (rhs));
400 }
401
402 else if (DECL_P (rhs))
9d60be38 403 return get_symbol_constant_value (rhs);
cbdd87d4
RG
404
405 /* If we couldn't fold the RHS, hand over to the generic
406 fold routines. */
407 if (result == NULL_TREE)
408 result = fold (rhs);
409
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result);
414
415 if (result != rhs && valid_gimple_rhs_p (result))
416 return result;
417
418 return NULL_TREE;
419 }
420 break;
421
422 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
423 break;
424
425 case GIMPLE_BINARY_RHS:
9b80d091
KT
426 /* Try to canonicalize for boolean-typed X the comparisons
427 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
428 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
429 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
430 {
431 tree lhs = gimple_assign_lhs (stmt);
432 tree op1 = gimple_assign_rhs1 (stmt);
433 tree op2 = gimple_assign_rhs2 (stmt);
434 tree type = TREE_TYPE (op1);
435
436 /* Check whether the comparison operands are of the same boolean
437 type as the result type is.
438 Check that second operand is an integer-constant with value
439 one or zero. */
440 if (TREE_CODE (op2) == INTEGER_CST
441 && (integer_zerop (op2) || integer_onep (op2))
442 && useless_type_conversion_p (TREE_TYPE (lhs), type))
443 {
444 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
445 bool is_logical_not = false;
446
447 /* X == 0 and X != 1 is a logical-not.of X
448 X == 1 and X != 0 is X */
449 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
450 || (cmp_code == NE_EXPR && integer_onep (op2)))
451 is_logical_not = true;
452
453 if (is_logical_not == false)
454 result = op1;
455 /* Only for one-bit precision typed X the transformation
456 !X -> ~X is valied. */
457 else if (TYPE_PRECISION (type) == 1)
458 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
459 type, op1);
460 /* Otherwise we use !X -> X ^ 1. */
461 else
462 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
463 type, op1, build_int_cst (type, 1));
464
465 }
466 }
cbdd87d4
RG
467
468 if (!result)
469 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
470 TREE_TYPE (gimple_assign_lhs (stmt)),
471 gimple_assign_rhs1 (stmt),
472 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
473
474 if (result)
475 {
476 STRIP_USELESS_TYPE_CONVERSION (result);
477 if (valid_gimple_rhs_p (result))
478 return result;
cbdd87d4
RG
479 }
480 break;
481
0354c0c7 482 case GIMPLE_TERNARY_RHS:
4e71066d
RG
483 /* Try to fold a conditional expression. */
484 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
485 {
486 tree op0 = gimple_assign_rhs1 (stmt);
487 tree tem;
488 bool set = false;
489 location_t cond_loc = gimple_location (stmt);
490
491 if (COMPARISON_CLASS_P (op0))
492 {
493 fold_defer_overflow_warnings ();
494 tem = fold_binary_loc (cond_loc,
495 TREE_CODE (op0), TREE_TYPE (op0),
496 TREE_OPERAND (op0, 0),
497 TREE_OPERAND (op0, 1));
498 /* This is actually a conditional expression, not a GIMPLE
499 conditional statement, however, the valid_gimple_rhs_p
500 test still applies. */
501 set = (tem && is_gimple_condexpr (tem)
502 && valid_gimple_rhs_p (tem));
503 fold_undefer_overflow_warnings (set, stmt, 0);
504 }
505 else if (is_gimple_min_invariant (op0))
506 {
507 tem = op0;
508 set = true;
509 }
510 else
511 return NULL_TREE;
512
513 if (set)
514 result = fold_build3_loc (cond_loc, COND_EXPR,
515 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
516 gimple_assign_rhs2 (stmt),
517 gimple_assign_rhs3 (stmt));
518 }
519
520 if (!result)
521 result = fold_ternary_loc (loc, subcode,
522 TREE_TYPE (gimple_assign_lhs (stmt)),
523 gimple_assign_rhs1 (stmt),
524 gimple_assign_rhs2 (stmt),
525 gimple_assign_rhs3 (stmt));
0354c0c7
BS
526
527 if (result)
528 {
529 STRIP_USELESS_TYPE_CONVERSION (result);
530 if (valid_gimple_rhs_p (result))
531 return result;
0354c0c7
BS
532 }
533 break;
534
cbdd87d4
RG
535 case GIMPLE_INVALID_RHS:
536 gcc_unreachable ();
537 }
538
539 return NULL_TREE;
540}
541
542/* Attempt to fold a conditional statement. Return true if any changes were
543 made. We only attempt to fold the condition expression, and do not perform
544 any transformation that would require alteration of the cfg. It is
545 assumed that the operands have been previously folded. */
546
547static bool
548fold_gimple_cond (gimple stmt)
549{
550 tree result = fold_binary_loc (gimple_location (stmt),
551 gimple_cond_code (stmt),
552 boolean_type_node,
553 gimple_cond_lhs (stmt),
554 gimple_cond_rhs (stmt));
555
556 if (result)
557 {
558 STRIP_USELESS_TYPE_CONVERSION (result);
559 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
560 {
561 gimple_cond_set_condition_from_tree (stmt, result);
562 return true;
563 }
564 }
565
566 return false;
567}
568
fef5a0d9
RB
569
570/* Replace a statement at *SI_P with a sequence of statements in STMTS,
571 adjusting the replacement stmts location and virtual operands.
572 If the statement has a lhs the last stmt in the sequence is expected
573 to assign to that lhs. */
574
575static void
576gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
577{
578 gimple stmt = gsi_stmt (*si_p);
579
580 if (gimple_has_location (stmt))
581 annotate_all_with_location (stmts, gimple_location (stmt));
582
583 /* First iterate over the replacement statements backward, assigning
584 virtual operands to their defining statements. */
585 gimple laststore = NULL;
586 for (gimple_stmt_iterator i = gsi_last (stmts);
587 !gsi_end_p (i); gsi_prev (&i))
588 {
589 gimple new_stmt = gsi_stmt (i);
590 if ((gimple_assign_single_p (new_stmt)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
592 || (is_gimple_call (new_stmt)
593 && (gimple_call_flags (new_stmt)
594 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
595 {
596 tree vdef;
597 if (!laststore)
598 vdef = gimple_vdef (stmt);
599 else
600 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
601 gimple_set_vdef (new_stmt, vdef);
602 if (vdef && TREE_CODE (vdef) == SSA_NAME)
603 SSA_NAME_DEF_STMT (vdef) = new_stmt;
604 laststore = new_stmt;
605 }
606 }
607
608 /* Second iterate over the statements forward, assigning virtual
609 operands to their uses. */
610 tree reaching_vuse = gimple_vuse (stmt);
611 for (gimple_stmt_iterator i = gsi_start (stmts);
612 !gsi_end_p (i); gsi_next (&i))
613 {
614 gimple new_stmt = gsi_stmt (i);
615 /* If the new statement possibly has a VUSE, update it with exact SSA
616 name we know will reach this one. */
617 if (gimple_has_mem_ops (new_stmt))
618 gimple_set_vuse (new_stmt, reaching_vuse);
619 gimple_set_modified (new_stmt, true);
620 if (gimple_vdef (new_stmt))
621 reaching_vuse = gimple_vdef (new_stmt);
622 }
623
624 /* If the new sequence does not do a store release the virtual
625 definition of the original statement. */
626 if (reaching_vuse
627 && reaching_vuse == gimple_vuse (stmt))
628 {
629 tree vdef = gimple_vdef (stmt);
630 if (vdef
631 && TREE_CODE (vdef) == SSA_NAME)
632 {
633 unlink_stmt_vdef (stmt);
634 release_ssa_name (vdef);
635 }
636 }
637
638 /* Finally replace the original statement with the sequence. */
639 gsi_replace_with_seq (si_p, stmts, false);
640}
641
cbdd87d4
RG
642/* Convert EXPR into a GIMPLE value suitable for substitution on the
643 RHS of an assignment. Insert the necessary statements before
644 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
645 is replaced. If the call is expected to produces a result, then it
646 is replaced by an assignment of the new RHS to the result variable.
647 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
648 GIMPLE_NOP. A proper VDEF chain is retained by making the first
649 VUSE and the last VDEF of the whole sequence be the same as the replaced
650 statement and using new SSA names for stores in between. */
cbdd87d4
RG
651
652void
653gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
654{
655 tree lhs;
cbdd87d4
RG
656 gimple stmt, new_stmt;
657 gimple_stmt_iterator i;
355a7673 658 gimple_seq stmts = NULL;
cbdd87d4
RG
659
660 stmt = gsi_stmt (*si_p);
661
662 gcc_assert (is_gimple_call (stmt));
663
45852dcc 664 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 665
e256dfce 666 lhs = gimple_call_lhs (stmt);
cbdd87d4 667 if (lhs == NULL_TREE)
6e572326
RG
668 {
669 gimplify_and_add (expr, &stmts);
670 /* We can end up with folding a memcpy of an empty class assignment
671 which gets optimized away by C++ gimplification. */
672 if (gimple_seq_empty_p (stmts))
673 {
9fdc58de 674 pop_gimplify_context (NULL);
6e572326
RG
675 if (gimple_in_ssa_p (cfun))
676 {
677 unlink_stmt_vdef (stmt);
678 release_defs (stmt);
679 }
77d19c72 680 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
681 return;
682 }
683 }
cbdd87d4 684 else
e256dfce
RG
685 {
686 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
687 new_stmt = gimple_build_assign (lhs, tmp);
688 i = gsi_last (stmts);
689 gsi_insert_after_without_update (&i, new_stmt,
690 GSI_CONTINUE_LINKING);
691 }
cbdd87d4
RG
692
693 pop_gimplify_context (NULL);
694
fef5a0d9
RB
695 gsi_replace_with_seq_vops (si_p, stmts);
696}
cbdd87d4 697
fef5a0d9
RB
698
699/* Replace the call at *GSI with the gimple value VAL. */
700
701static void
702replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
703{
704 gimple stmt = gsi_stmt (*gsi);
705 tree lhs = gimple_call_lhs (stmt);
706 gimple repl;
707 if (lhs)
e256dfce 708 {
fef5a0d9
RB
709 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
710 val = fold_convert (TREE_TYPE (lhs), val);
711 repl = gimple_build_assign (lhs, val);
712 }
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 {
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
721 gsi_replace (gsi, repl, true);
722}
723
724/* Replace the call at *GSI with the new call REPL and fold that
725 again. */
726
727static void
728replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
729{
730 gimple stmt = gsi_stmt (*gsi);
731 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
732 gimple_set_location (repl, gimple_location (stmt));
733 if (gimple_vdef (stmt)
734 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
735 {
736 gimple_set_vdef (repl, gimple_vdef (stmt));
737 gimple_set_vuse (repl, gimple_vuse (stmt));
738 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
739 }
740 gsi_replace (gsi, repl, true);
741 fold_stmt (gsi);
742}
743
744/* Return true if VAR is a VAR_DECL or a component thereof. */
745
746static bool
747var_decl_component_p (tree var)
748{
749 tree inner = var;
750 while (handled_component_p (inner))
751 inner = TREE_OPERAND (inner, 0);
752 return SSA_VAR_P (inner);
753}
754
755/* Fold function call to builtin mem{{,p}cpy,move}. Return
756 NULL_TREE if no simplification can be made.
757 If ENDP is 0, return DEST (like memcpy).
758 If ENDP is 1, return DEST+LEN (like mempcpy).
759 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
760 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
761 (memmove). */
762
763static bool
764gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
765 tree dest, tree src, int endp)
766{
767 gimple stmt = gsi_stmt (*gsi);
768 tree lhs = gimple_call_lhs (stmt);
769 tree len = gimple_call_arg (stmt, 2);
770 tree destvar, srcvar;
771 location_t loc = gimple_location (stmt);
772
773 /* If the LEN parameter is zero, return DEST. */
774 if (integer_zerop (len))
775 {
776 gimple repl;
777 if (gimple_call_lhs (stmt))
778 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
779 else
780 repl = gimple_build_nop ();
781 tree vdef = gimple_vdef (stmt);
782 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 783 {
fef5a0d9
RB
784 unlink_stmt_vdef (stmt);
785 release_ssa_name (vdef);
786 }
787 gsi_replace (gsi, repl, true);
788 return true;
789 }
790
791 /* If SRC and DEST are the same (and not volatile), return
792 DEST{,+LEN,+LEN-1}. */
793 if (operand_equal_p (src, dest, 0))
794 {
795 unlink_stmt_vdef (stmt);
796 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
797 release_ssa_name (gimple_vdef (stmt));
798 if (!lhs)
799 {
800 gsi_replace (gsi, gimple_build_nop (), true);
801 return true;
802 }
803 goto done;
804 }
805 else
806 {
807 tree srctype, desttype;
808 unsigned int src_align, dest_align;
809 tree off0;
810
811 /* Build accesses at offset zero with a ref-all character type. */
812 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
813 ptr_mode, true), 0);
814
815 /* If we can perform the copy efficiently with first doing all loads
816 and then all stores inline it that way. Currently efficiently
817 means that we can load all the memory into a single integer
818 register which is what MOVE_MAX gives us. */
819 src_align = get_pointer_alignment (src);
820 dest_align = get_pointer_alignment (dest);
821 if (tree_fits_uhwi_p (len)
822 && compare_tree_int (len, MOVE_MAX) <= 0
823 /* ??? Don't transform copies from strings with known length this
824 confuses the tree-ssa-strlen.c. This doesn't handle
825 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
826 reason. */
827 && !c_strlen (src, 2))
828 {
829 unsigned ilen = tree_to_uhwi (len);
830 if (exact_log2 (ilen) != -1)
831 {
832 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
833 if (type
834 && TYPE_MODE (type) != BLKmode
835 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
836 == ilen * 8)
837 /* If the destination pointer is not aligned we must be able
838 to emit an unaligned store. */
839 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
840 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
841 {
842 tree srctype = type;
843 tree desttype = type;
844 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
845 srctype = build_aligned_type (type, src_align);
846 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
847 tree tem = fold_const_aggregate_ref (srcmem);
848 if (tem)
849 srcmem = tem;
850 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
851 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
852 src_align))
853 srcmem = NULL_TREE;
854 if (srcmem)
855 {
856 gimple new_stmt;
857 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
858 {
859 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
860 if (gimple_in_ssa_p (cfun))
861 srcmem = make_ssa_name (TREE_TYPE (srcmem),
862 new_stmt);
863 else
864 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
865 NULL);
866 gimple_assign_set_lhs (new_stmt, srcmem);
867 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
868 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
869 }
870 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
871 desttype = build_aligned_type (type, dest_align);
872 new_stmt
873 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
874 dest, off0),
875 srcmem);
876 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
877 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
878 if (gimple_vdef (new_stmt)
879 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
881 if (!lhs)
882 {
883 gsi_replace (gsi, new_stmt, true);
884 return true;
885 }
886 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
887 goto done;
888 }
889 }
890 }
891 }
892
893 if (endp == 3)
894 {
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
897 really mandatory?
898
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align || !src_align)
901 return false;
902 if (readonly_data_expr (src)
903 || (tree_fits_uhwi_p (len)
904 && (MIN (src_align, dest_align) / BITS_PER_UNIT
905 >= tree_to_uhwi (len))))
906 {
907 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src) == ADDR_EXPR
919 && TREE_CODE (dest) == ADDR_EXPR)
920 {
921 tree src_base, dest_base, fn;
922 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
923 HOST_WIDE_INT size = -1;
924 HOST_WIDE_INT maxsize = -1;
925
926 srcvar = TREE_OPERAND (src, 0);
927 src_base = get_ref_base_and_extent (srcvar, &src_offset,
928 &size, &maxsize);
929 destvar = TREE_OPERAND (dest, 0);
930 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
931 &size, &maxsize);
932 if (tree_fits_uhwi_p (len))
933 maxsize = tree_to_uhwi (len);
934 else
935 maxsize = -1;
936 src_offset /= BITS_PER_UNIT;
937 dest_offset /= BITS_PER_UNIT;
938 if (SSA_VAR_P (src_base)
939 && SSA_VAR_P (dest_base))
940 {
941 if (operand_equal_p (src_base, dest_base, 0)
942 && ranges_overlap_p (src_offset, maxsize,
943 dest_offset, maxsize))
944 return false;
945 }
946 else if (TREE_CODE (src_base) == MEM_REF
947 && TREE_CODE (dest_base) == MEM_REF)
948 {
949 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
950 TREE_OPERAND (dest_base, 0), 0))
951 return false;
952 offset_int off = mem_ref_offset (src_base) + src_offset;
953 if (!wi::fits_shwi_p (off))
954 return false;
955 src_offset = off.to_shwi ();
956
957 off = mem_ref_offset (dest_base) + dest_offset;
958 if (!wi::fits_shwi_p (off))
959 return false;
960 dest_offset = off.to_shwi ();
961 if (ranges_overlap_p (src_offset, maxsize,
962 dest_offset, maxsize))
963 return false;
964 }
965 else
966 return false;
967
968 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
969 if (!fn)
970 return false;
971 gimple_call_set_fndecl (stmt, fn);
972 gimple_call_set_arg (stmt, 0, dest);
973 gimple_call_set_arg (stmt, 1, src);
974 fold_stmt (gsi);
975 return true;
976 }
977
978 /* If the destination and source do not alias optimize into
979 memcpy as well. */
980 if ((is_gimple_min_invariant (dest)
981 || TREE_CODE (dest) == SSA_NAME)
982 && (is_gimple_min_invariant (src)
983 || TREE_CODE (src) == SSA_NAME))
984 {
985 ao_ref destr, srcr;
986 ao_ref_init_from_ptr_and_size (&destr, dest, len);
987 ao_ref_init_from_ptr_and_size (&srcr, src, len);
988 if (!refs_may_alias_p_1 (&destr, &srcr, false))
989 {
990 tree fn;
991 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
992 if (!fn)
993 return false;
994 gimple_call_set_fndecl (stmt, fn);
995 gimple_call_set_arg (stmt, 0, dest);
996 gimple_call_set_arg (stmt, 1, src);
997 fold_stmt (gsi);
998 return true;
999 }
1000 }
1001
1002 return false;
1003 }
1004
1005 if (!tree_fits_shwi_p (len))
1006 return false;
1007 /* FIXME:
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1011 STRIP_NOPS (src);
1012 STRIP_NOPS (dest);
1013 if (!POINTER_TYPE_P (TREE_TYPE (src))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1015 return false;
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1024 {
1025 tree tem = TREE_OPERAND (src, 0);
1026 STRIP_NOPS (tem);
1027 if (tem != TREE_OPERAND (src, 0))
1028 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1029 }
1030 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1031 {
1032 tree tem = TREE_OPERAND (dest, 0);
1033 STRIP_NOPS (tem);
1034 if (tem != TREE_OPERAND (dest, 0))
1035 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1036 }
1037 srctype = TREE_TYPE (TREE_TYPE (src));
1038 if (TREE_CODE (srctype) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1040 {
1041 srctype = TREE_TYPE (srctype);
1042 STRIP_NOPS (src);
1043 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1044 }
1045 desttype = TREE_TYPE (TREE_TYPE (dest));
1046 if (TREE_CODE (desttype) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1048 {
1049 desttype = TREE_TYPE (desttype);
1050 STRIP_NOPS (dest);
1051 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1052 }
1053 if (TREE_ADDRESSABLE (srctype)
1054 || TREE_ADDRESSABLE (desttype))
1055 return false;
1056
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1060 || TREE_CODE (desttype) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1062 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1064 || TREE_CODE (srctype) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1066 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1067 if (!srctype)
1068 srctype = desttype;
1069 if (!desttype)
1070 desttype = srctype;
1071 if (!srctype)
1072 return false;
1073
1074 src_align = get_pointer_alignment (src);
1075 dest_align = get_pointer_alignment (dest);
1076 if (dest_align < TYPE_ALIGN (desttype)
1077 || src_align < TYPE_ALIGN (srctype))
1078 return false;
1079
1080 destvar = dest;
1081 STRIP_NOPS (destvar);
1082 if (TREE_CODE (destvar) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1085 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1086 else
1087 destvar = NULL_TREE;
1088
1089 srcvar = src;
1090 STRIP_NOPS (srcvar);
1091 if (TREE_CODE (srcvar) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1094 {
1095 if (!destvar
1096 || src_align >= TYPE_ALIGN (desttype))
1097 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1098 srcvar, off0);
1099 else if (!STRICT_ALIGNMENT)
1100 {
1101 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1102 src_align);
1103 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1104 }
e256dfce 1105 else
fef5a0d9
RB
1106 srcvar = NULL_TREE;
1107 }
1108 else
1109 srcvar = NULL_TREE;
1110
1111 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1112 return false;
1113
1114 if (srcvar == NULL_TREE)
1115 {
1116 STRIP_NOPS (src);
1117 if (src_align >= TYPE_ALIGN (desttype))
1118 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1119 else
1120 {
1121 if (STRICT_ALIGNMENT)
1122 return false;
1123 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1124 src_align);
1125 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1126 }
1127 }
1128 else if (destvar == NULL_TREE)
1129 {
1130 STRIP_NOPS (dest);
1131 if (dest_align >= TYPE_ALIGN (srctype))
1132 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1133 else
1134 {
1135 if (STRICT_ALIGNMENT)
1136 return false;
1137 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1138 dest_align);
1139 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1140 }
1141 }
1142
1143 gimple new_stmt;
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1145 {
1146 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1147 if (gimple_in_ssa_p (cfun))
1148 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1149 else
1150 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1151 gimple_assign_set_lhs (new_stmt, srcvar);
1152 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1153 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1154 }
1155 new_stmt = gimple_build_assign (destvar, srcvar);
1156 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1157 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1158 if (gimple_vdef (new_stmt)
1159 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1161 if (!lhs)
1162 {
1163 gsi_replace (gsi, new_stmt, true);
1164 return true;
1165 }
1166 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1167 }
1168
1169done:
1170 if (endp == 0 || endp == 3)
1171 len = NULL_TREE;
1172 else if (endp == 2)
1173 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1174 ssize_int (1));
1175 if (endp == 2 || endp == 1)
1176 dest = fold_build_pointer_plus_loc (loc, dest, len);
1177
1178 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1179 GSI_SAME_STMT);
1180 gimple repl = gimple_build_assign (lhs, dest);
1181 gsi_replace (gsi, repl, true);
1182 return true;
1183}
1184
1185/* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1187
1188static bool
1189gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1190{
1191 gimple stmt = gsi_stmt (*gsi);
1192 tree etype;
1193 unsigned HOST_WIDE_INT length, cval;
1194
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len))
1197 {
1198 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1199 return true;
1200 }
1201
1202 if (! tree_fits_uhwi_p (len))
1203 return false;
1204
1205 if (TREE_CODE (c) != INTEGER_CST)
1206 return false;
1207
1208 tree dest = gimple_call_arg (stmt, 0);
1209 tree var = dest;
1210 if (TREE_CODE (var) != ADDR_EXPR)
1211 return false;
1212
1213 var = TREE_OPERAND (var, 0);
1214 if (TREE_THIS_VOLATILE (var))
1215 return false;
1216
1217 etype = TREE_TYPE (var);
1218 if (TREE_CODE (etype) == ARRAY_TYPE)
1219 etype = TREE_TYPE (etype);
1220
1221 if (!INTEGRAL_TYPE_P (etype)
1222 && !POINTER_TYPE_P (etype))
1223 return NULL_TREE;
1224
1225 if (! var_decl_component_p (var))
1226 return NULL_TREE;
1227
1228 length = tree_to_uhwi (len);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1230 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1231 return NULL_TREE;
1232
1233 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1234 return NULL_TREE;
1235
1236 if (integer_zerop (c))
1237 cval = 0;
1238 else
1239 {
1240 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1241 return NULL_TREE;
1242
1243 cval = TREE_INT_CST_LOW (c);
1244 cval &= 0xff;
1245 cval |= cval << 8;
1246 cval |= cval << 16;
1247 cval |= (cval << 31) << 1;
1248 }
1249
1250 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1251 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1252 gimple_set_vuse (store, gimple_vuse (stmt));
1253 tree vdef = gimple_vdef (stmt);
1254 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1255 {
1256 gimple_set_vdef (store, gimple_vdef (stmt));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1258 }
1259 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1260 if (gimple_call_lhs (stmt))
1261 {
1262 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1263 gsi_replace (gsi, asgn, true);
1264 }
1265 else
1266 {
1267 gimple_stmt_iterator gsi2 = *gsi;
1268 gsi_prev (gsi);
1269 gsi_remove (&gsi2, true);
1270 }
1271
1272 return true;
1273}
1274
1275
1276/* Return the string length, maximum string length or maximum value of
1277 ARG in LENGTH.
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1284
1285static bool
dcb7fae2 1286get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1287{
1288 tree var, val;
1289 gimple def_stmt;
1290
1291 if (TREE_CODE (arg) != SSA_NAME)
1292 {
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1297 {
1298 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1299 if (TREE_CODE (aop0) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1301 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1302 length, visited, type);
1303 }
1304
1305 if (type == 2)
1306 {
1307 val = arg;
1308 if (TREE_CODE (val) != INTEGER_CST
1309 || tree_int_cst_sgn (val) < 0)
1310 return false;
1311 }
1312 else
1313 val = c_strlen (arg, 1);
1314 if (!val)
1315 return false;
1316
1317 if (*length)
1318 {
1319 if (type > 0)
1320 {
1321 if (TREE_CODE (*length) != INTEGER_CST
1322 || TREE_CODE (val) != INTEGER_CST)
1323 return false;
1324
1325 if (tree_int_cst_lt (*length, val))
1326 *length = val;
1327 return true;
1328 }
1329 else if (simple_cst_equal (val, *length) != 1)
1330 return false;
1331 }
1332
1333 *length = val;
1334 return true;
1335 }
1336
1337 /* If ARG is registered for SSA update we cannot look at its defining
1338 statement. */
1339 if (name_registered_for_update_p (arg))
1340 return false;
1341
1342 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1343 if (!*visited)
1344 *visited = BITMAP_ALLOC (NULL);
1345 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1346 return true;
1347
1348 var = arg;
1349 def_stmt = SSA_NAME_DEF_STMT (var);
1350
1351 switch (gimple_code (def_stmt))
1352 {
1353 case GIMPLE_ASSIGN:
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1356 length. */
1357 if (gimple_assign_single_p (def_stmt)
1358 || gimple_assign_unary_nop_p (def_stmt))
1359 {
1360 tree rhs = gimple_assign_rhs1 (def_stmt);
1361 return get_maxval_strlen (rhs, length, visited, type);
1362 }
1363 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1364 {
1365 tree op2 = gimple_assign_rhs2 (def_stmt);
1366 tree op3 = gimple_assign_rhs3 (def_stmt);
1367 return get_maxval_strlen (op2, length, visited, type)
1368 && get_maxval_strlen (op3, length, visited, type);
1369 }
1370 return false;
1371
1372 case GIMPLE_PHI:
1373 {
1374 /* All the arguments of the PHI node must have the same constant
1375 length. */
1376 unsigned i;
1377
1378 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1379 {
1380 tree arg = gimple_phi_arg (def_stmt, i)->def;
1381
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg == gimple_phi_result (def_stmt))
1389 continue;
1390
1391 if (!get_maxval_strlen (arg, length, visited, type))
1392 return false;
1393 }
1394 }
1395 return true;
1396
1397 default:
1398 return false;
1399 }
1400}
1401
dcb7fae2
RB
1402tree
1403get_maxval_strlen (tree arg, int type)
1404{
1405 bitmap visited = NULL;
1406 tree len = NULL_TREE;
1407 if (!get_maxval_strlen (arg, &len, &visited, type))
1408 len = NULL_TREE;
1409 if (visited)
1410 BITMAP_FREE (visited);
1411
1412 return len;
1413}
1414
fef5a0d9
RB
1415
1416/* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1419
1420static bool
1421gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1422 tree dest, tree src)
fef5a0d9 1423{
dcb7fae2 1424 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1425 tree fn;
1426
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src, dest, 0))
1429 {
1430 replace_call_with_value (gsi, dest);
1431 return true;
1432 }
1433
1434 if (optimize_function_for_size_p (cfun))
1435 return false;
1436
1437 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1438 if (!fn)
1439 return false;
1440
1579e1f8 1441 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1442 if (!len)
dcb7fae2 1443 return false;
fef5a0d9
RB
1444
1445 len = fold_convert_loc (loc, size_type_node, len);
1446 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1447 len = force_gimple_operand_gsi (gsi, len, true,
1448 NULL_TREE, true, GSI_SAME_STMT);
1449 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1450 replace_call_with_call_and_fold (gsi, repl);
1451 return true;
1452}
1453
1454/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1457
1458static bool
dcb7fae2
RB
1459gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1460 tree dest, tree src, tree len)
fef5a0d9 1461{
dcb7fae2 1462 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1463 tree fn;
1464
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len))
1467 {
1468 replace_call_with_value (gsi, dest);
1469 return true;
1470 }
1471
1472 /* We can't compare slen with len as constants below if len is not a
1473 constant. */
dcb7fae2 1474 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1475 return false;
1476
fef5a0d9 1477 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1478 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1479 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1480 return false;
1481
1482 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1483
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen, len))
1488 return false;
1489
1490 /* OK transform into builtin memcpy. */
1491 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1492 if (!fn)
1493 return false;
1494
1495 len = fold_convert_loc (loc, size_type_node, len);
1496 len = force_gimple_operand_gsi (gsi, len, true,
1497 NULL_TREE, true, GSI_SAME_STMT);
1498 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1499 replace_call_with_call_and_fold (gsi, repl);
1500 return true;
1501}
1502
1503/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1504 to the call.
1505
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1508
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1512
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1520
1521static bool
dcb7fae2 1522gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9
RB
1523{
1524 gimple stmt = gsi_stmt (*gsi);
dcb7fae2 1525 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1526
1527 const char *p = c_getstr (src);
1528
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p && *p == '\0')
1531 {
1532 replace_call_with_value (gsi, dst);
1533 return true;
1534 }
1535
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1537 return false;
1538
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1540 tree newdst;
1541 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1542 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1543
1544 if (!strlen_fn || !memcpy_fn)
1545 return false;
1546
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
dcb7fae2 1549 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1550 if (! len)
fef5a0d9
RB
1551 return false;
1552
1553 /* Create strlen (dst). */
1554 gimple_seq stmts = NULL, stmts2;
1555 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1556 gimple_set_location (repl, loc);
1557 if (gimple_in_ssa_p (cfun))
1558 newdst = make_ssa_name (size_type_node, NULL);
1559 else
1560 newdst = create_tmp_reg (size_type_node, NULL);
1561 gimple_call_set_lhs (repl, newdst);
1562 gimple_seq_add_stmt_without_update (&stmts, repl);
1563
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1566 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1567 gimple_seq_add_seq_without_update (&stmts, stmts2);
1568
1569 len = fold_convert_loc (loc, size_type_node, len);
1570 len = size_binop_loc (loc, PLUS_EXPR, len,
1571 build_int_cst (size_type_node, 1));
1572 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1573 gimple_seq_add_seq_without_update (&stmts, stmts2);
1574
1575 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1576 gimple_seq_add_stmt_without_update (&stmts, repl);
1577 if (gimple_call_lhs (stmt))
1578 {
1579 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1580 gimple_seq_add_stmt_without_update (&stmts, repl);
1581 gsi_replace_with_seq_vops (gsi, stmts);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2 = *gsi;
1587 gsi_prev (&gsi2);
1588 fold_stmt (&gsi2);
1589 }
1590 else
1591 {
1592 gsi_replace_with_seq_vops (gsi, stmts);
1593 fold_stmt (gsi);
1594 }
1595 return true;
1596}
1597
07f1cf56
RB
1598/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1600
1601static bool
1602gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1603{
1604 gimple stmt = gsi_stmt (*gsi);
1605 tree dest = gimple_call_arg (stmt, 0);
1606 tree src = gimple_call_arg (stmt, 1);
1607 tree size = gimple_call_arg (stmt, 2);
1608 tree fn;
1609 const char *p;
1610
1611
1612 p = c_getstr (src);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p && *p == '\0')
1615 {
1616 replace_call_with_value (gsi, dest);
1617 return true;
1618 }
1619
1620 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1621 return false;
1622
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1625 if (!fn)
1626 return false;
1627
1628 gimple repl = gimple_build_call (fn, 2, dest, src);
1629 replace_call_with_call_and_fold (gsi, repl);
1630 return true;
1631}
1632
745583f9
RB
1633/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1634 LEN, and SIZE. */
1635
1636static bool
1637gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1638{
1639 gimple stmt = gsi_stmt (*gsi);
1640 tree dest = gimple_call_arg (stmt, 0);
1641 tree src = gimple_call_arg (stmt, 1);
1642 tree len = gimple_call_arg (stmt, 2);
1643 tree size = gimple_call_arg (stmt, 3);
1644 tree fn;
1645 const char *p;
1646
1647 p = c_getstr (src);
1648 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1649 if ((p && *p == '\0')
1650 || integer_zerop (len))
1651 {
1652 replace_call_with_value (gsi, dest);
1653 return true;
1654 }
1655
1656 if (! tree_fits_uhwi_p (size))
1657 return false;
1658
1659 if (! integer_all_onesp (size))
1660 {
1661 tree src_len = c_strlen (src, 1);
1662 if (src_len
1663 && tree_fits_uhwi_p (src_len)
1664 && tree_fits_uhwi_p (len)
1665 && ! tree_int_cst_lt (len, src_len))
1666 {
1667 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1668 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1669 if (!fn)
1670 return false;
1671
1672 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1673 replace_call_with_call_and_fold (gsi, repl);
1674 return true;
1675 }
1676 return false;
1677 }
1678
1679 /* If __builtin_strncat_chk is used, assume strncat is available. */
1680 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1681 if (!fn)
1682 return false;
1683
1684 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1685 replace_call_with_call_and_fold (gsi, repl);
1686 return true;
1687}
1688
fef5a0d9
RB
1689/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1690 to the call. IGNORE is true if the value returned
1691 by the builtin will be ignored. UNLOCKED is true is true if this
1692 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1693 the known length of the string. Return NULL_TREE if no simplification
1694 was possible. */
1695
1696static bool
1697gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1698 tree arg0, tree arg1,
dcb7fae2 1699 bool unlocked)
fef5a0d9 1700{
dcb7fae2
RB
1701 gimple stmt = gsi_stmt (*gsi);
1702
fef5a0d9
RB
1703 /* If we're using an unlocked function, assume the other unlocked
1704 functions exist explicitly. */
1705 tree const fn_fputc = (unlocked
1706 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1707 : builtin_decl_implicit (BUILT_IN_FPUTC));
1708 tree const fn_fwrite = (unlocked
1709 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1710 : builtin_decl_implicit (BUILT_IN_FWRITE));
1711
1712 /* If the return value is used, don't do the transformation. */
dcb7fae2 1713 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1714 return false;
1715
fef5a0d9
RB
1716 /* Get the length of the string passed to fputs. If the length
1717 can't be determined, punt. */
dcb7fae2 1718 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1719 if (!len
1720 || TREE_CODE (len) != INTEGER_CST)
1721 return false;
1722
1723 switch (compare_tree_int (len, 1))
1724 {
1725 case -1: /* length is 0, delete the call entirely . */
1726 replace_call_with_value (gsi, integer_zero_node);
1727 return true;
1728
1729 case 0: /* length is 1, call fputc. */
1730 {
1731 const char *p = c_getstr (arg0);
1732 if (p != NULL)
1733 {
1734 if (!fn_fputc)
1735 return false;
1736
1737 gimple repl = gimple_build_call (fn_fputc, 2,
1738 build_int_cst
1739 (integer_type_node, p[0]), arg1);
1740 replace_call_with_call_and_fold (gsi, repl);
1741 return true;
1742 }
1743 }
1744 /* FALLTHROUGH */
1745 case 1: /* length is greater than 1, call fwrite. */
1746 {
1747 /* If optimizing for size keep fputs. */
1748 if (optimize_function_for_size_p (cfun))
1749 return false;
1750 /* New argument list transforming fputs(string, stream) to
1751 fwrite(string, 1, len, stream). */
1752 if (!fn_fwrite)
1753 return false;
1754
1755 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1756 size_one_node, len, arg1);
1757 replace_call_with_call_and_fold (gsi, repl);
1758 return true;
1759 }
1760 default:
1761 gcc_unreachable ();
1762 }
1763 return false;
1764}
1765
1766/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1767 DEST, SRC, LEN, and SIZE are the arguments to the call.
1768 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1769 code of the builtin. If MAXLEN is not NULL, it is maximum length
1770 passed as third argument. */
1771
1772static bool
1773gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1774 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1775 enum built_in_function fcode)
1776{
dcb7fae2
RB
1777 gimple stmt = gsi_stmt (*gsi);
1778 location_t loc = gimple_location (stmt);
1779 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1780 tree fn;
1781
1782 /* If SRC and DEST are the same (and not volatile), return DEST
1783 (resp. DEST+LEN for __mempcpy_chk). */
1784 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1785 {
1786 if (fcode != BUILT_IN_MEMPCPY_CHK)
1787 {
1788 replace_call_with_value (gsi, dest);
1789 return true;
1790 }
1791 else
1792 {
1793 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1794 temp = force_gimple_operand_gsi (gsi, temp,
1795 false, NULL_TREE, true,
1796 GSI_SAME_STMT);
1797 replace_call_with_value (gsi, temp);
1798 return true;
1799 }
1800 }
1801
1802 if (! tree_fits_uhwi_p (size))
1803 return false;
1804
dcb7fae2 1805 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1806 if (! integer_all_onesp (size))
1807 {
1808 if (! tree_fits_uhwi_p (len))
1809 {
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1814 {
1815 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1816 {
1817 /* (void) __mempcpy_chk () can be optimized into
1818 (void) __memcpy_chk (). */
1819 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1820 if (!fn)
1821 return false;
1822
1823 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1824 replace_call_with_call_and_fold (gsi, repl);
1825 return true;
1826 }
1827 return false;
1828 }
1829 }
1830 else
1831 maxlen = len;
1832
1833 if (tree_int_cst_lt (size, maxlen))
1834 return false;
1835 }
1836
1837 fn = NULL_TREE;
1838 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1839 mem{cpy,pcpy,move,set} is available. */
1840 switch (fcode)
1841 {
1842 case BUILT_IN_MEMCPY_CHK:
1843 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1844 break;
1845 case BUILT_IN_MEMPCPY_CHK:
1846 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1847 break;
1848 case BUILT_IN_MEMMOVE_CHK:
1849 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1850 break;
1851 case BUILT_IN_MEMSET_CHK:
1852 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1853 break;
1854 default:
1855 break;
1856 }
1857
1858 if (!fn)
1859 return false;
1860
1861 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864}
1865
1866/* Fold a call to the __st[rp]cpy_chk builtin.
1867 DEST, SRC, and SIZE are the arguments to the call.
1868 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1870 strings passed as second argument. */
1871
1872static bool
1873gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1874 tree dest,
fef5a0d9 1875 tree src, tree size,
fef5a0d9
RB
1876 enum built_in_function fcode)
1877{
dcb7fae2
RB
1878 gimple stmt = gsi_stmt (*gsi);
1879 location_t loc = gimple_location (stmt);
1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1881 tree len, fn;
1882
1883 /* If SRC and DEST are the same (and not volatile), return DEST. */
1884 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1885 {
1886 replace_call_with_value (gsi, dest);
1887 return true;
1888 }
1889
1890 if (! tree_fits_uhwi_p (size))
1891 return false;
1892
dcb7fae2 1893 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1894 if (! integer_all_onesp (size))
1895 {
1896 len = c_strlen (src, 1);
1897 if (! len || ! tree_fits_uhwi_p (len))
1898 {
1899 /* If LEN is not constant, try MAXLEN too.
1900 For MAXLEN only allow optimizing into non-_ocs function
1901 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1902 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1903 {
1904 if (fcode == BUILT_IN_STPCPY_CHK)
1905 {
1906 if (! ignore)
1907 return false;
1908
1909 /* If return value of __stpcpy_chk is ignored,
1910 optimize into __strcpy_chk. */
1911 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1912 if (!fn)
1913 return false;
1914
1915 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1916 replace_call_with_call_and_fold (gsi, repl);
1917 return true;
1918 }
1919
1920 if (! len || TREE_SIDE_EFFECTS (len))
1921 return false;
1922
1923 /* If c_strlen returned something, but not a constant,
1924 transform __strcpy_chk into __memcpy_chk. */
1925 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1926 if (!fn)
1927 return false;
1928
1929 len = fold_convert_loc (loc, size_type_node, len);
1930 len = size_binop_loc (loc, PLUS_EXPR, len,
1931 build_int_cst (size_type_node, 1));
1932 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1933 true, GSI_SAME_STMT);
1934 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1935 replace_call_with_call_and_fold (gsi, repl);
1936 return true;
1937 }
e256dfce 1938 }
fef5a0d9
RB
1939 else
1940 maxlen = len;
1941
1942 if (! tree_int_cst_lt (maxlen, size))
1943 return false;
e256dfce
RG
1944 }
1945
fef5a0d9
RB
1946 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1947 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1948 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1949 if (!fn)
1950 return false;
1951
1952 gimple repl = gimple_build_call (fn, 2, dest, src);
1953 replace_call_with_call_and_fold (gsi, repl);
1954 return true;
1955}
1956
1957/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1958 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1959 length passed as third argument. IGNORE is true if return value can be
1960 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1961
1962static bool
1963gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1964 tree dest, tree src,
dcb7fae2 1965 tree len, tree size,
fef5a0d9
RB
1966 enum built_in_function fcode)
1967{
dcb7fae2
RB
1968 gimple stmt = gsi_stmt (*gsi);
1969 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1970 tree fn;
1971
1972 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1973 {
fef5a0d9
RB
1974 /* If return value of __stpncpy_chk is ignored,
1975 optimize into __strncpy_chk. */
1976 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1977 if (fn)
1978 {
1979 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1980 replace_call_with_call_and_fold (gsi, repl);
1981 return true;
1982 }
cbdd87d4
RG
1983 }
1984
fef5a0d9
RB
1985 if (! tree_fits_uhwi_p (size))
1986 return false;
1987
dcb7fae2 1988 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1989 if (! integer_all_onesp (size))
cbdd87d4 1990 {
fef5a0d9 1991 if (! tree_fits_uhwi_p (len))
fe2ef088 1992 {
fef5a0d9
RB
1993 /* If LEN is not constant, try MAXLEN too.
1994 For MAXLEN only allow optimizing into non-_ocs function
1995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1996 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1997 return false;
8a1561bc 1998 }
fef5a0d9
RB
1999 else
2000 maxlen = len;
2001
2002 if (tree_int_cst_lt (size, maxlen))
2003 return false;
cbdd87d4
RG
2004 }
2005
fef5a0d9
RB
2006 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2007 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2008 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2009 if (!fn)
2010 return false;
2011
2012 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2013 replace_call_with_call_and_fold (gsi, repl);
2014 return true;
cbdd87d4
RG
2015}
2016
fef5a0d9
RB
2017/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2018 NULL_TREE if a normal call should be emitted rather than expanding
2019 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2020 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2021 passed as second argument. */
cbdd87d4
RG
2022
2023static bool
fef5a0d9 2024gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2025 enum built_in_function fcode)
cbdd87d4 2026{
fef5a0d9
RB
2027 gimple stmt = gsi_stmt (*gsi);
2028 tree dest, size, len, fn, fmt, flag;
2029 const char *fmt_str;
cbdd87d4 2030
fef5a0d9
RB
2031 /* Verify the required arguments in the original call. */
2032 if (gimple_call_num_args (stmt) < 5)
2033 return false;
cbdd87d4 2034
fef5a0d9
RB
2035 dest = gimple_call_arg (stmt, 0);
2036 len = gimple_call_arg (stmt, 1);
2037 flag = gimple_call_arg (stmt, 2);
2038 size = gimple_call_arg (stmt, 3);
2039 fmt = gimple_call_arg (stmt, 4);
2040
2041 if (! tree_fits_uhwi_p (size))
2042 return false;
2043
2044 if (! integer_all_onesp (size))
2045 {
dcb7fae2 2046 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2047 if (! tree_fits_uhwi_p (len))
cbdd87d4 2048 {
fef5a0d9
RB
2049 /* If LEN is not constant, try MAXLEN too.
2050 For MAXLEN only allow optimizing into non-_ocs function
2051 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2052 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2053 return false;
2054 }
2055 else
fef5a0d9 2056 maxlen = len;
cbdd87d4 2057
fef5a0d9
RB
2058 if (tree_int_cst_lt (size, maxlen))
2059 return false;
2060 }
cbdd87d4 2061
fef5a0d9
RB
2062 if (!init_target_chars ())
2063 return false;
cbdd87d4 2064
fef5a0d9
RB
2065 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2066 or if format doesn't contain % chars or is "%s". */
2067 if (! integer_zerop (flag))
2068 {
2069 fmt_str = c_getstr (fmt);
2070 if (fmt_str == NULL)
2071 return false;
2072 if (strchr (fmt_str, target_percent) != NULL
2073 && strcmp (fmt_str, target_percent_s))
2074 return false;
cbdd87d4
RG
2075 }
2076
fef5a0d9
RB
2077 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2078 available. */
2079 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2080 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2081 if (!fn)
491e0b9b
RG
2082 return false;
2083
fef5a0d9
RB
2084 /* Replace the called function and the first 5 argument by 3 retaining
2085 trailing varargs. */
2086 gimple_call_set_fndecl (stmt, fn);
2087 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2088 gimple_call_set_arg (stmt, 0, dest);
2089 gimple_call_set_arg (stmt, 1, len);
2090 gimple_call_set_arg (stmt, 2, fmt);
2091 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2092 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2093 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2094 fold_stmt (gsi);
2095 return true;
2096}
cbdd87d4 2097
fef5a0d9
RB
2098/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2099 Return NULL_TREE if a normal call should be emitted rather than
2100 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2101 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2102
fef5a0d9
RB
2103static bool
2104gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2105 enum built_in_function fcode)
2106{
2107 gimple stmt = gsi_stmt (*gsi);
2108 tree dest, size, len, fn, fmt, flag;
2109 const char *fmt_str;
2110 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2111
fef5a0d9
RB
2112 /* Verify the required arguments in the original call. */
2113 if (nargs < 4)
2114 return false;
2115 dest = gimple_call_arg (stmt, 0);
2116 flag = gimple_call_arg (stmt, 1);
2117 size = gimple_call_arg (stmt, 2);
2118 fmt = gimple_call_arg (stmt, 3);
2119
2120 if (! tree_fits_uhwi_p (size))
2121 return false;
2122
2123 len = NULL_TREE;
2124
2125 if (!init_target_chars ())
2126 return false;
2127
2128 /* Check whether the format is a literal string constant. */
2129 fmt_str = c_getstr (fmt);
2130 if (fmt_str != NULL)
2131 {
2132 /* If the format doesn't contain % args or %%, we know the size. */
2133 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2134 {
fef5a0d9
RB
2135 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2136 len = build_int_cstu (size_type_node, strlen (fmt_str));
2137 }
2138 /* If the format is "%s" and first ... argument is a string literal,
2139 we know the size too. */
2140 else if (fcode == BUILT_IN_SPRINTF_CHK
2141 && strcmp (fmt_str, target_percent_s) == 0)
2142 {
2143 tree arg;
cbdd87d4 2144
fef5a0d9
RB
2145 if (nargs == 5)
2146 {
2147 arg = gimple_call_arg (stmt, 4);
2148 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2149 {
2150 len = c_strlen (arg, 1);
2151 if (! len || ! tree_fits_uhwi_p (len))
2152 len = NULL_TREE;
2153 }
2154 }
2155 }
2156 }
cbdd87d4 2157
fef5a0d9
RB
2158 if (! integer_all_onesp (size))
2159 {
2160 if (! len || ! tree_int_cst_lt (len, size))
2161 return false;
2162 }
cbdd87d4 2163
fef5a0d9
RB
2164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2165 or if format doesn't contain % chars or is "%s". */
2166 if (! integer_zerop (flag))
2167 {
2168 if (fmt_str == NULL)
2169 return false;
2170 if (strchr (fmt_str, target_percent) != NULL
2171 && strcmp (fmt_str, target_percent_s))
2172 return false;
2173 }
cbdd87d4 2174
fef5a0d9
RB
2175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2176 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2177 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2178 if (!fn)
2179 return false;
2180
2181 /* Replace the called function and the first 4 argument by 2 retaining
2182 trailing varargs. */
2183 gimple_call_set_fndecl (stmt, fn);
2184 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2185 gimple_call_set_arg (stmt, 0, dest);
2186 gimple_call_set_arg (stmt, 1, fmt);
2187 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2188 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2189 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2190 fold_stmt (gsi);
2191 return true;
2192}
2193
35770bb2
RB
2194/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2195 ORIG may be null if this is a 2-argument call. We don't attempt to
2196 simplify calls with more than 3 arguments.
2197
2198 Return NULL_TREE if no simplification was possible, otherwise return the
2199 simplified form of the call as a tree. If IGNORED is true, it means that
2200 the caller does not use the returned value of the function. */
2201
2202static bool
dcb7fae2 2203gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2
RB
2204{
2205 gimple stmt = gsi_stmt (*gsi);
2206 tree dest = gimple_call_arg (stmt, 0);
2207 tree fmt = gimple_call_arg (stmt, 1);
2208 tree orig = NULL_TREE;
2209 const char *fmt_str = NULL;
2210
2211 /* Verify the required arguments in the original call. We deal with two
2212 types of sprintf() calls: 'sprintf (str, fmt)' and
2213 'sprintf (dest, "%s", orig)'. */
2214 if (gimple_call_num_args (stmt) > 3)
2215 return false;
2216
2217 if (gimple_call_num_args (stmt) == 3)
2218 orig = gimple_call_arg (stmt, 2);
2219
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str = c_getstr (fmt);
2222 if (fmt_str == NULL)
2223 return false;
2224
2225 if (!init_target_chars ())
2226 return false;
2227
2228 /* If the format doesn't contain % args or %%, use strcpy. */
2229 if (strchr (fmt_str, target_percent) == NULL)
2230 {
2231 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2232
2233 if (!fn)
2234 return false;
2235
2236 /* Don't optimize sprintf (buf, "abc", ptr++). */
2237 if (orig)
2238 return false;
2239
2240 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2241 'format' is known to contain no % formats. */
2242 gimple_seq stmts = NULL;
2243 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2244 gimple_seq_add_stmt_without_update (&stmts, repl);
2245 if (gimple_call_lhs (stmt))
2246 {
2247 repl = gimple_build_assign (gimple_call_lhs (stmt),
2248 build_int_cst (integer_type_node,
2249 strlen (fmt_str)));
2250 gimple_seq_add_stmt_without_update (&stmts, repl);
2251 gsi_replace_with_seq_vops (gsi, stmts);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the memcpy call.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2 = *gsi;
2257 gsi_prev (&gsi2);
2258 fold_stmt (&gsi2);
2259 }
2260 else
2261 {
2262 gsi_replace_with_seq_vops (gsi, stmts);
2263 fold_stmt (gsi);
2264 }
2265 return true;
2266 }
2267
2268 /* If the format is "%s", use strcpy if the result isn't used. */
2269 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2270 {
2271 tree fn;
2272 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2273
2274 if (!fn)
2275 return false;
2276
2277 /* Don't crash on sprintf (str1, "%s"). */
2278 if (!orig)
2279 return false;
2280
dcb7fae2
RB
2281 tree orig_len = NULL_TREE;
2282 if (gimple_call_lhs (stmt))
35770bb2 2283 {
dcb7fae2 2284 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2285 if (!orig_len)
35770bb2
RB
2286 return false;
2287 }
2288
2289 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2290 gimple_seq stmts = NULL;
2291 gimple repl = gimple_build_call (fn, 2, dest, orig);
2292 gimple_seq_add_stmt_without_update (&stmts, repl);
2293 if (gimple_call_lhs (stmt))
2294 {
d7e78447
RB
2295 if (!useless_type_conversion_p (integer_type_node,
2296 TREE_TYPE (orig_len)))
2297 orig_len = fold_convert (integer_type_node, orig_len);
2298 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2299 gimple_seq_add_stmt_without_update (&stmts, repl);
2300 gsi_replace_with_seq_vops (gsi, stmts);
2301 /* gsi now points at the assignment to the lhs, get a
2302 stmt iterator to the memcpy call.
2303 ??? We can't use gsi_for_stmt as that doesn't work when the
2304 CFG isn't built yet. */
2305 gimple_stmt_iterator gsi2 = *gsi;
2306 gsi_prev (&gsi2);
2307 fold_stmt (&gsi2);
2308 }
2309 else
2310 {
2311 gsi_replace_with_seq_vops (gsi, stmts);
2312 fold_stmt (gsi);
2313 }
2314 return true;
2315 }
2316 return false;
2317}
2318
d7e78447
RB
2319/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2320 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2321 attempt to simplify calls with more than 4 arguments.
35770bb2 2322
d7e78447
RB
2323 Return NULL_TREE if no simplification was possible, otherwise return the
2324 simplified form of the call as a tree. If IGNORED is true, it means that
2325 the caller does not use the returned value of the function. */
2326
2327static bool
dcb7fae2 2328gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447
RB
2329{
2330 gimple stmt = gsi_stmt (*gsi);
2331 tree dest = gimple_call_arg (stmt, 0);
2332 tree destsize = gimple_call_arg (stmt, 1);
2333 tree fmt = gimple_call_arg (stmt, 2);
2334 tree orig = NULL_TREE;
2335 const char *fmt_str = NULL;
2336
2337 if (gimple_call_num_args (stmt) > 4)
2338 return false;
2339
2340 if (gimple_call_num_args (stmt) == 4)
2341 orig = gimple_call_arg (stmt, 3);
2342
2343 if (!tree_fits_uhwi_p (destsize))
2344 return false;
2345 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2346
2347 /* Check whether the format is a literal string constant. */
2348 fmt_str = c_getstr (fmt);
2349 if (fmt_str == NULL)
2350 return false;
2351
2352 if (!init_target_chars ())
2353 return false;
2354
2355 /* If the format doesn't contain % args or %%, use strcpy. */
2356 if (strchr (fmt_str, target_percent) == NULL)
2357 {
2358 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2359 if (!fn)
2360 return false;
2361
2362 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2363 if (orig)
2364 return false;
2365
2366 /* We could expand this as
2367 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2368 or to
2369 memcpy (str, fmt_with_nul_at_cstm1, cst);
2370 but in the former case that might increase code size
2371 and in the latter case grow .rodata section too much.
2372 So punt for now. */
2373 size_t len = strlen (fmt_str);
2374 if (len >= destlen)
2375 return false;
2376
2377 gimple_seq stmts = NULL;
2378 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2380 if (gimple_call_lhs (stmt))
2381 {
2382 repl = gimple_build_assign (gimple_call_lhs (stmt),
2383 build_int_cst (integer_type_node, len));
2384 gimple_seq_add_stmt_without_update (&stmts, repl);
2385 gsi_replace_with_seq_vops (gsi, stmts);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2 = *gsi;
2391 gsi_prev (&gsi2);
2392 fold_stmt (&gsi2);
2393 }
2394 else
2395 {
2396 gsi_replace_with_seq_vops (gsi, stmts);
2397 fold_stmt (gsi);
2398 }
2399 return true;
2400 }
2401
2402 /* If the format is "%s", use strcpy if the result isn't used. */
2403 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2404 {
2405 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2406 if (!fn)
2407 return false;
2408
2409 /* Don't crash on snprintf (str1, cst, "%s"). */
2410 if (!orig)
2411 return false;
2412
dcb7fae2 2413 tree orig_len = get_maxval_strlen (orig, 0);
d7e78447 2414 if (!orig_len)
dcb7fae2 2415 return false;
d7e78447
RB
2416
2417 /* We could expand this as
2418 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2419 or to
2420 memcpy (str1, str2_with_nul_at_cstm1, cst);
2421 but in the former case that might increase code size
2422 and in the latter case grow .rodata section too much.
2423 So punt for now. */
2424 if (compare_tree_int (orig_len, destlen) >= 0)
2425 return false;
2426
2427 /* Convert snprintf (str1, cst, "%s", str2) into
2428 strcpy (str1, str2) if strlen (str2) < cst. */
2429 gimple_seq stmts = NULL;
2430 gimple repl = gimple_build_call (fn, 2, dest, orig);
2431 gimple_seq_add_stmt_without_update (&stmts, repl);
2432 if (gimple_call_lhs (stmt))
2433 {
2434 if (!useless_type_conversion_p (integer_type_node,
2435 TREE_TYPE (orig_len)))
2436 orig_len = fold_convert (integer_type_node, orig_len);
2437 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2438 gimple_seq_add_stmt_without_update (&stmts, repl);
2439 gsi_replace_with_seq_vops (gsi, stmts);
2440 /* gsi now points at the assignment to the lhs, get a
2441 stmt iterator to the memcpy call.
2442 ??? We can't use gsi_for_stmt as that doesn't work when the
2443 CFG isn't built yet. */
2444 gimple_stmt_iterator gsi2 = *gsi;
2445 gsi_prev (&gsi2);
2446 fold_stmt (&gsi2);
2447 }
2448 else
2449 {
2450 gsi_replace_with_seq_vops (gsi, stmts);
2451 fold_stmt (gsi);
2452 }
2453 return true;
2454 }
2455 return false;
2456}
35770bb2 2457
fef5a0d9
RB
2458
2459/* Fold a call to __builtin_strlen with known length LEN. */
2460
2461static bool
dcb7fae2 2462gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2463{
dcb7fae2
RB
2464 gimple stmt = gsi_stmt (*gsi);
2465 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2466 if (!len)
2467 return false;
2813904b 2468 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2469 replace_call_with_value (gsi, len);
2470 return true;
cbdd87d4
RG
2471}
2472
2473
dcb7fae2
RB
2474/* Fold the non-target builtin at *GSI and return whether any simplification
2475 was made. */
cbdd87d4 2476
fef5a0d9 2477static bool
dcb7fae2 2478gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2479{
fef5a0d9 2480 gimple stmt = gsi_stmt (*gsi);
fef5a0d9 2481 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2482
dcb7fae2
RB
2483 /* Give up for always_inline inline builtins until they are
2484 inlined. */
2485 if (avoid_folding_inline_builtin (callee))
2486 return false;
cbdd87d4 2487
cbdd87d4
RG
2488 switch (DECL_FUNCTION_CODE (callee))
2489 {
dcb7fae2
RB
2490 case BUILT_IN_BZERO:
2491 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2492 gimple_call_arg (stmt, 1));
2493 case BUILT_IN_MEMSET:
2494 return gimple_fold_builtin_memset (gsi,
2495 gimple_call_arg (stmt, 1),
2496 gimple_call_arg (stmt, 2));
2497 case BUILT_IN_BCOPY:
2498 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2499 gimple_call_arg (stmt, 0), 3);
2500 case BUILT_IN_MEMCPY:
2501 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2502 gimple_call_arg (stmt, 1), 0);
2503 case BUILT_IN_MEMPCPY:
2504 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2505 gimple_call_arg (stmt, 1), 1);
2506 case BUILT_IN_MEMMOVE:
2507 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2508 gimple_call_arg (stmt, 1), 3);
2509 case BUILT_IN_SPRINTF_CHK:
2510 case BUILT_IN_VSPRINTF_CHK:
2511 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2512 case BUILT_IN_STRCAT_CHK:
2513 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2514 case BUILT_IN_STRNCAT_CHK:
2515 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2516 case BUILT_IN_STRLEN:
dcb7fae2 2517 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2518 case BUILT_IN_STRCPY:
dcb7fae2 2519 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2520 gimple_call_arg (stmt, 0),
dcb7fae2 2521 gimple_call_arg (stmt, 1));
cbdd87d4 2522 case BUILT_IN_STRNCPY:
dcb7fae2 2523 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2524 gimple_call_arg (stmt, 0),
2525 gimple_call_arg (stmt, 1),
dcb7fae2 2526 gimple_call_arg (stmt, 2));
9a7eefec 2527 case BUILT_IN_STRCAT:
dcb7fae2
RB
2528 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2529 gimple_call_arg (stmt, 1));
cbdd87d4 2530 case BUILT_IN_FPUTS:
dcb7fae2
RB
2531 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2532 gimple_call_arg (stmt, 1), false);
cbdd87d4 2533 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2534 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2535 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2536 case BUILT_IN_MEMCPY_CHK:
2537 case BUILT_IN_MEMPCPY_CHK:
2538 case BUILT_IN_MEMMOVE_CHK:
2539 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2540 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2541 gimple_call_arg (stmt, 0),
2542 gimple_call_arg (stmt, 1),
2543 gimple_call_arg (stmt, 2),
2544 gimple_call_arg (stmt, 3),
fef5a0d9 2545 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2546 case BUILT_IN_STRCPY_CHK:
2547 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2548 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2549 gimple_call_arg (stmt, 0),
2550 gimple_call_arg (stmt, 1),
2551 gimple_call_arg (stmt, 2),
fef5a0d9 2552 DECL_FUNCTION_CODE (callee));
cbdd87d4 2553 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2554 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2555 return gimple_fold_builtin_stxncpy_chk (gsi,
2556 gimple_call_arg (stmt, 0),
2557 gimple_call_arg (stmt, 1),
2558 gimple_call_arg (stmt, 2),
2559 gimple_call_arg (stmt, 3),
fef5a0d9 2560 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2561 case BUILT_IN_SNPRINTF_CHK:
2562 case BUILT_IN_VSNPRINTF_CHK:
dcb7fae2 2563 return gimple_fold_builtin_snprintf_chk (gsi,
fef5a0d9 2564 DECL_FUNCTION_CODE (callee));
d7e78447 2565 case BUILT_IN_SNPRINTF:
dcb7fae2 2566 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2567 case BUILT_IN_SPRINTF:
dcb7fae2 2568 return gimple_fold_builtin_sprintf (gsi);
fef5a0d9
RB
2569 default:;
2570 }
2571
2572 /* Try the generic builtin folder. */
2573 bool ignore = (gimple_call_lhs (stmt) == NULL);
2574 tree result = fold_call_stmt (stmt, ignore);
2575 if (result)
2576 {
2577 if (ignore)
2578 STRIP_NOPS (result);
2579 else
2580 result = fold_convert (gimple_call_return_type (stmt), result);
2581 if (!update_call_from_tree (gsi, result))
2582 gimplify_and_update_call_from_tree (gsi, result);
2583 return true;
2584 }
2585
2586 return false;
2587}
2588
cbdd87d4
RG
2589/* Attempt to fold a call statement referenced by the statement iterator GSI.
2590 The statement may be replaced by another statement, e.g., if the call
2591 simplifies to a constant value. Return true if any changes were made.
2592 It is assumed that the operands have been previously folded. */
2593
e021c122 2594static bool
ceeffab0 2595gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
2596{
2597 gimple stmt = gsi_stmt (*gsi);
3b45a007 2598 tree callee;
e021c122
RG
2599 bool changed = false;
2600 unsigned i;
cbdd87d4 2601
e021c122
RG
2602 /* Fold *& in call arguments. */
2603 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2604 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2605 {
2606 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2607 if (tmp)
2608 {
2609 gimple_call_set_arg (stmt, i, tmp);
2610 changed = true;
2611 }
2612 }
3b45a007
RG
2613
2614 /* Check for virtual calls that became direct calls. */
2615 callee = gimple_call_fn (stmt);
25583c4f 2616 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 2617 {
49c471e3
MJ
2618 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2619 {
450ad0cd
JH
2620 if (dump_file && virtual_method_call_p (callee)
2621 && !possible_polymorphic_call_target_p
6f8091fc
JH
2622 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2623 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
2624 {
2625 fprintf (dump_file,
a70e9985 2626 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
2627 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2628 fprintf (dump_file, " to ");
2629 print_generic_expr (dump_file, callee, TDF_SLIM);
2630 fprintf (dump_file, "\n");
2631 }
2632
49c471e3 2633 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
2634 changed = true;
2635 }
a70e9985 2636 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 2637 {
61dd6a2e
JH
2638 bool final;
2639 vec <cgraph_node *>targets
058d0a90 2640 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 2641 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 2642 {
a70e9985 2643 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
2644 if (dump_enabled_p ())
2645 {
807b7d62 2646 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
2647 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2648 "folding virtual function call to %s\n",
2649 targets.length () == 1
2650 ? targets[0]->name ()
2651 : "__builtin_unreachable");
2652 }
61dd6a2e 2653 if (targets.length () == 1)
cf3e5a89
JJ
2654 {
2655 gimple_call_set_fndecl (stmt, targets[0]->decl);
2656 changed = true;
a70e9985
JJ
2657 /* If the call becomes noreturn, remove the lhs. */
2658 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2659 {
2660 if (TREE_CODE (lhs) == SSA_NAME)
2661 {
2662 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2663 tree def = get_or_create_ssa_default_def (cfun, var);
2664 gimple new_stmt = gimple_build_assign (lhs, def);
2665 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2666 }
2667 gimple_call_set_lhs (stmt, NULL_TREE);
2668 }
cf3e5a89 2669 }
a70e9985 2670 else
cf3e5a89
JJ
2671 {
2672 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2673 gimple new_stmt = gimple_build_call (fndecl, 0);
2674 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
2675 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2676 {
2677 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2678 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
2679
2680 /* To satisfy condition for
2681 cgraph_update_edges_for_call_stmt_node,
2682 we need to preserve GIMPLE_CALL statement
2683 at position of GSI iterator. */
a70e9985 2684 update_call_from_tree (gsi, def);
eb14a79f 2685 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
2686 }
2687 else
42e52a51
RB
2688 {
2689 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2690 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2691 gsi_replace (gsi, new_stmt, false);
2692 }
cf3e5a89
JJ
2693 return true;
2694 }
e021c122 2695 }
49c471e3 2696 }
e021c122 2697 }
49c471e3 2698
e021c122
RG
2699 if (inplace)
2700 return changed;
2701
2702 /* Check for builtins that CCP can handle using information not
2703 available in the generic fold routines. */
fef5a0d9
RB
2704 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2705 {
2706 if (gimple_fold_builtin (gsi))
2707 changed = true;
2708 }
2709 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 2710 {
ea679d55 2711 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 2712 }
368b454d 2713 else if (gimple_call_internal_p (stmt))
ed9c79e1 2714 {
368b454d
JJ
2715 enum tree_code subcode = ERROR_MARK;
2716 tree result = NULL_TREE;
2717 switch (gimple_call_internal_fn (stmt))
2718 {
2719 case IFN_BUILTIN_EXPECT:
2720 result = fold_builtin_expect (gimple_location (stmt),
2721 gimple_call_arg (stmt, 0),
2722 gimple_call_arg (stmt, 1),
2723 gimple_call_arg (stmt, 2));
2724 break;
0e82f089
MP
2725 case IFN_UBSAN_OBJECT_SIZE:
2726 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2727 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2728 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2729 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2730 gimple_call_arg (stmt, 2))))
2731 {
2732 gsi_replace (gsi, gimple_build_nop (), true);
2733 unlink_stmt_vdef (stmt);
2734 release_defs (stmt);
2735 return true;
2736 }
2737 break;
368b454d
JJ
2738 case IFN_UBSAN_CHECK_ADD:
2739 subcode = PLUS_EXPR;
2740 break;
2741 case IFN_UBSAN_CHECK_SUB:
2742 subcode = MINUS_EXPR;
2743 break;
2744 case IFN_UBSAN_CHECK_MUL:
2745 subcode = MULT_EXPR;
2746 break;
2747 default:
2748 break;
2749 }
2750 if (subcode != ERROR_MARK)
2751 {
2752 tree arg0 = gimple_call_arg (stmt, 0);
2753 tree arg1 = gimple_call_arg (stmt, 1);
2754 /* x = y + 0; x = y - 0; x = y * 0; */
2755 if (integer_zerop (arg1))
2756 result = subcode == MULT_EXPR
2757 ? build_zero_cst (TREE_TYPE (arg0))
2758 : arg0;
2759 /* x = 0 + y; x = 0 * y; */
2760 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2761 result = subcode == MULT_EXPR
2762 ? build_zero_cst (TREE_TYPE (arg0))
2763 : arg1;
2764 /* x = y - y; */
2765 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2766 result = build_zero_cst (TREE_TYPE (arg0));
2767 /* x = y * 1; x = 1 * y; */
2768 else if (subcode == MULT_EXPR)
2769 {
2770 if (integer_onep (arg1))
2771 result = arg0;
2772 else if (integer_onep (arg0))
2773 result = arg1;
2774 }
2775 }
ed9c79e1
JJ
2776 if (result)
2777 {
2778 if (!update_call_from_tree (gsi, result))
2779 gimplify_and_update_call_from_tree (gsi, result);
2780 changed = true;
2781 }
2782 }
3b45a007 2783
e021c122 2784 return changed;
cbdd87d4
RG
2785}
2786
e0ee10ed
RB
2787
2788/* Worker for fold_stmt_1 dispatch to pattern based folding with
2789 gimple_simplify.
2790
2791 Replaces *GSI with the simplification result in RCODE and OPS
2792 and the associated statements in *SEQ. Does the replacement
2793 according to INPLACE and returns true if the operation succeeded. */
2794
2795static bool
2796replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2797 code_helper rcode, tree *ops,
2798 gimple_seq *seq, bool inplace)
2799{
2800 gimple stmt = gsi_stmt (*gsi);
2801
2802 /* Play safe and do not allow abnormals to be mentioned in
2803 newly created statements. See also maybe_push_res_to_seq. */
2804 if ((TREE_CODE (ops[0]) == SSA_NAME
2805 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2806 || (ops[1]
2807 && TREE_CODE (ops[1]) == SSA_NAME
2808 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2809 || (ops[2]
2810 && TREE_CODE (ops[2]) == SSA_NAME
2811 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2812 return false;
2813
2814 if (gimple_code (stmt) == GIMPLE_COND)
2815 {
2816 gcc_assert (rcode.is_tree_code ());
2817 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2818 /* GIMPLE_CONDs condition may not throw. */
2819 && (!flag_exceptions
2820 || !cfun->can_throw_non_call_exceptions
2821 || !operation_could_trap_p (rcode,
2822 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2823 false, NULL_TREE)))
2824 gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
2825 else if (rcode == SSA_NAME)
2826 gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
2827 build_zero_cst (TREE_TYPE (ops[0])));
2828 else if (rcode == INTEGER_CST)
2829 {
2830 if (integer_zerop (ops[0]))
2831 gimple_cond_make_false (stmt);
2832 else
2833 gimple_cond_make_true (stmt);
2834 }
2835 else if (!inplace)
2836 {
2837 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2838 ops, seq);
2839 if (!res)
2840 return false;
2841 gimple_cond_set_condition (stmt, NE_EXPR, res,
2842 build_zero_cst (TREE_TYPE (res)));
2843 }
2844 else
2845 return false;
2846 if (dump_file && (dump_flags & TDF_DETAILS))
2847 {
2848 fprintf (dump_file, "gimple_simplified to ");
2849 if (!gimple_seq_empty_p (*seq))
2850 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2851 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2852 0, TDF_SLIM);
2853 }
2854 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2855 return true;
2856 }
2857 else if (is_gimple_assign (stmt)
2858 && rcode.is_tree_code ())
2859 {
2860 if (!inplace
f3582e54 2861 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
2862 {
2863 maybe_build_generic_op (rcode,
2864 TREE_TYPE (gimple_assign_lhs (stmt)),
2865 &ops[0], ops[1], ops[2]);
2866 gimple_assign_set_rhs_with_ops_1 (gsi, rcode,
2867 ops[0], ops[1], ops[2]);
2868 if (dump_file && (dump_flags & TDF_DETAILS))
2869 {
2870 fprintf (dump_file, "gimple_simplified to ");
2871 if (!gimple_seq_empty_p (*seq))
2872 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2873 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2874 0, TDF_SLIM);
2875 }
2876 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2877 return true;
2878 }
2879 }
2880 else if (!inplace)
2881 {
2882 if (gimple_has_lhs (stmt))
2883 {
2884 tree lhs = gimple_get_lhs (stmt);
2885 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
2886 ops, seq, lhs);
2887 if (dump_file && (dump_flags & TDF_DETAILS))
2888 {
2889 fprintf (dump_file, "gimple_simplified to ");
2890 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2891 }
2892 gsi_replace_with_seq_vops (gsi, *seq);
2893 return true;
2894 }
2895 else
2896 gcc_unreachable ();
2897 }
2898
2899 return false;
2900}
2901
040292e7
RB
2902/* Canonicalize MEM_REFs invariant address operand after propagation. */
2903
2904static bool
2905maybe_canonicalize_mem_ref_addr (tree *t)
2906{
2907 bool res = false;
2908
2909 if (TREE_CODE (*t) == ADDR_EXPR)
2910 t = &TREE_OPERAND (*t, 0);
2911
2912 while (handled_component_p (*t))
2913 t = &TREE_OPERAND (*t, 0);
2914
2915 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2916 of invariant addresses into a SSA name MEM_REF address. */
2917 if (TREE_CODE (*t) == MEM_REF
2918 || TREE_CODE (*t) == TARGET_MEM_REF)
2919 {
2920 tree addr = TREE_OPERAND (*t, 0);
2921 if (TREE_CODE (addr) == ADDR_EXPR
2922 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
2923 || handled_component_p (TREE_OPERAND (addr, 0))))
2924 {
2925 tree base;
2926 HOST_WIDE_INT coffset;
2927 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
2928 &coffset);
2929 if (!base)
2930 gcc_unreachable ();
2931
2932 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
2933 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
2934 TREE_OPERAND (*t, 1),
2935 size_int (coffset));
2936 res = true;
2937 }
2938 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
2939 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
2940 }
2941
2942 /* Canonicalize back MEM_REFs to plain reference trees if the object
2943 accessed is a decl that has the same access semantics as the MEM_REF. */
2944 if (TREE_CODE (*t) == MEM_REF
2945 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
2946 && integer_zerop (TREE_OPERAND (*t, 1)))
2947 {
2948 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2949 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
2950 if (/* Same volatile qualification. */
2951 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
2952 /* Same TBAA behavior with -fstrict-aliasing. */
2953 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
2954 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
2955 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
2956 /* Same alignment. */
2957 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
2958 /* We have to look out here to not drop a required conversion
2959 from the rhs to the lhs if *t appears on the lhs or vice-versa
2960 if it appears on the rhs. Thus require strict type
2961 compatibility. */
2962 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
2963 {
2964 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2965 res = true;
2966 }
2967 }
2968
2969 /* Canonicalize TARGET_MEM_REF in particular with respect to
2970 the indexes becoming constant. */
2971 else if (TREE_CODE (*t) == TARGET_MEM_REF)
2972 {
2973 tree tem = maybe_fold_tmr (*t);
2974 if (tem)
2975 {
2976 *t = tem;
2977 res = true;
2978 }
2979 }
2980
2981 return res;
2982}
2983
cbdd87d4
RG
2984/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2985 distinguishes both cases. */
2986
2987static bool
e0ee10ed 2988fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
2989{
2990 bool changed = false;
2991 gimple stmt = gsi_stmt (*gsi);
2992 unsigned i;
2993
040292e7
RB
2994 /* First do required canonicalization of [TARGET_]MEM_REF addresses
2995 after propagation.
2996 ??? This shouldn't be done in generic folding but in the
2997 propagation helpers which also know whether an address was
2998 propagated. */
2999 switch (gimple_code (stmt))
3000 {
3001 case GIMPLE_ASSIGN:
3002 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3003 {
3004 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3005 if ((REFERENCE_CLASS_P (*rhs)
3006 || TREE_CODE (*rhs) == ADDR_EXPR)
3007 && maybe_canonicalize_mem_ref_addr (rhs))
3008 changed = true;
3009 tree *lhs = gimple_assign_lhs_ptr (stmt);
3010 if (REFERENCE_CLASS_P (*lhs)
3011 && maybe_canonicalize_mem_ref_addr (lhs))
3012 changed = true;
3013 }
3014 break;
3015 case GIMPLE_CALL:
3016 {
3017 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3018 {
3019 tree *arg = gimple_call_arg_ptr (stmt, i);
3020 if (REFERENCE_CLASS_P (*arg)
3021 && maybe_canonicalize_mem_ref_addr (arg))
3022 changed = true;
3023 }
3024 tree *lhs = gimple_call_lhs_ptr (stmt);
3025 if (*lhs
3026 && REFERENCE_CLASS_P (*lhs)
3027 && maybe_canonicalize_mem_ref_addr (lhs))
3028 changed = true;
3029 break;
3030 }
3031 case GIMPLE_ASM:
3032 {
3033 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3034 {
3035 tree link = gimple_asm_output_op (stmt, i);
3036 tree op = TREE_VALUE (link);
3037 if (REFERENCE_CLASS_P (op)
3038 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3039 changed = true;
3040 }
3041 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3042 {
3043 tree link = gimple_asm_input_op (stmt, i);
3044 tree op = TREE_VALUE (link);
3045 if ((REFERENCE_CLASS_P (op)
3046 || TREE_CODE (op) == ADDR_EXPR)
3047 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3048 changed = true;
3049 }
3050 }
3051 break;
3052 case GIMPLE_DEBUG:
3053 if (gimple_debug_bind_p (stmt))
3054 {
3055 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3056 if (*val
3057 && (REFERENCE_CLASS_P (*val)
3058 || TREE_CODE (*val) == ADDR_EXPR)
3059 && maybe_canonicalize_mem_ref_addr (val))
3060 changed = true;
3061 }
3062 break;
3063 default:;
3064 }
3065
e0ee10ed
RB
3066 /* Dispatch to pattern-based folding. */
3067 if (!inplace
3068 || is_gimple_assign (stmt)
3069 || gimple_code (stmt) == GIMPLE_COND)
3070 {
3071 gimple_seq seq = NULL;
3072 code_helper rcode;
3073 tree ops[3] = {};
3074 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3075 {
3076 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3077 changed = true;
3078 else
3079 gimple_seq_discard (seq);
3080 }
3081 }
3082
3083 stmt = gsi_stmt (*gsi);
3084
cbdd87d4
RG
3085 /* Fold the main computation performed by the statement. */
3086 switch (gimple_code (stmt))
3087 {
3088 case GIMPLE_ASSIGN:
3089 {
3090 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 3091 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 3092 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
3093 tree new_rhs;
3094 /* First canonicalize operand order. This avoids building new
3095 trees if this is the only thing fold would later do. */
3096 if ((commutative_tree_code (subcode)
3097 || commutative_ternary_tree_code (subcode))
3098 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3099 gimple_assign_rhs2 (stmt), false))
3100 {
3101 tree tem = gimple_assign_rhs1 (stmt);
3102 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3103 gimple_assign_set_rhs2 (stmt, tem);
3104 changed = true;
3105 }
3106 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3107 if (new_rhs
3108 && !useless_type_conversion_p (TREE_TYPE (lhs),
3109 TREE_TYPE (new_rhs)))
3110 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3111 if (new_rhs
3112 && (!inplace
3113 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3114 {
3115 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3116 changed = true;
3117 }
3118 break;
3119 }
3120
3121 case GIMPLE_COND:
3122 changed |= fold_gimple_cond (stmt);
3123 break;
3124
3125 case GIMPLE_CALL:
ceeffab0 3126 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3127 break;
3128
3129 case GIMPLE_ASM:
3130 /* Fold *& in asm operands. */
38384150
JJ
3131 {
3132 size_t noutputs;
3133 const char **oconstraints;
3134 const char *constraint;
3135 bool allows_mem, allows_reg;
3136
3137 noutputs = gimple_asm_noutputs (stmt);
3138 oconstraints = XALLOCAVEC (const char *, noutputs);
3139
3140 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3141 {
3142 tree link = gimple_asm_output_op (stmt, i);
3143 tree op = TREE_VALUE (link);
3144 oconstraints[i]
3145 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3146 if (REFERENCE_CLASS_P (op)
3147 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3148 {
3149 TREE_VALUE (link) = op;
3150 changed = true;
3151 }
3152 }
3153 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3154 {
3155 tree link = gimple_asm_input_op (stmt, i);
3156 tree op = TREE_VALUE (link);
3157 constraint
3158 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3159 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3160 oconstraints, &allows_mem, &allows_reg);
3161 if (REFERENCE_CLASS_P (op)
3162 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3163 != NULL_TREE)
3164 {
3165 TREE_VALUE (link) = op;
3166 changed = true;
3167 }
3168 }
3169 }
cbdd87d4
RG
3170 break;
3171
bd422c4a
RG
3172 case GIMPLE_DEBUG:
3173 if (gimple_debug_bind_p (stmt))
3174 {
3175 tree val = gimple_debug_bind_get_value (stmt);
3176 if (val
3177 && REFERENCE_CLASS_P (val))
3178 {
3179 tree tem = maybe_fold_reference (val, false);
3180 if (tem)
3181 {
3182 gimple_debug_bind_set_value (stmt, tem);
3183 changed = true;
3184 }
3185 }
3e888a5e
RG
3186 else if (val
3187 && TREE_CODE (val) == ADDR_EXPR)
3188 {
3189 tree ref = TREE_OPERAND (val, 0);
3190 tree tem = maybe_fold_reference (ref, false);
3191 if (tem)
3192 {
3193 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3194 gimple_debug_bind_set_value (stmt, tem);
3195 changed = true;
3196 }
3197 }
bd422c4a
RG
3198 }
3199 break;
3200
cbdd87d4
RG
3201 default:;
3202 }
3203
3204 stmt = gsi_stmt (*gsi);
3205
37376165
RB
3206 /* Fold *& on the lhs. */
3207 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3208 {
3209 tree lhs = gimple_get_lhs (stmt);
3210 if (lhs && REFERENCE_CLASS_P (lhs))
3211 {
3212 tree new_lhs = maybe_fold_reference (lhs, true);
3213 if (new_lhs)
3214 {
3215 gimple_set_lhs (stmt, new_lhs);
3216 changed = true;
3217 }
3218 }
3219 }
3220
3221 return changed;
3222}
3223
e0ee10ed
RB
3224/* Valueziation callback that ends up not following SSA edges. */
3225
3226tree
3227no_follow_ssa_edges (tree)
3228{
3229 return NULL_TREE;
3230}
3231
45cc9f96
RB
3232/* Valueization callback that ends up following single-use SSA edges only. */
3233
3234tree
3235follow_single_use_edges (tree val)
3236{
3237 if (TREE_CODE (val) == SSA_NAME
3238 && !has_single_use (val))
3239 return NULL_TREE;
3240 return val;
3241}
3242
cbdd87d4
RG
3243/* Fold the statement pointed to by GSI. In some cases, this function may
3244 replace the whole statement with a new one. Returns true iff folding
3245 makes any changes.
3246 The statement pointed to by GSI should be in valid gimple form but may
3247 be in unfolded state as resulting from for example constant propagation
3248 which can produce *&x = 0. */
3249
3250bool
3251fold_stmt (gimple_stmt_iterator *gsi)
3252{
e0ee10ed
RB
3253 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3254}
3255
3256bool
3257fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3258{
3259 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3260}
3261
59401b92 3262/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3263 *&x created by constant propagation are handled. The statement cannot
3264 be replaced with a new one. Return true if the statement was
3265 changed, false otherwise.
59401b92 3266 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3267 be in unfolded state as resulting from for example constant propagation
3268 which can produce *&x = 0. */
3269
3270bool
59401b92 3271fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3272{
59401b92 3273 gimple stmt = gsi_stmt (*gsi);
e0ee10ed 3274 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3275 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3276 return changed;
3277}
3278
e89065a1
SL
3279/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3280 if EXPR is null or we don't know how.
3281 If non-null, the result always has boolean type. */
3282
3283static tree
3284canonicalize_bool (tree expr, bool invert)
3285{
3286 if (!expr)
3287 return NULL_TREE;
3288 else if (invert)
3289 {
3290 if (integer_nonzerop (expr))
3291 return boolean_false_node;
3292 else if (integer_zerop (expr))
3293 return boolean_true_node;
3294 else if (TREE_CODE (expr) == SSA_NAME)
3295 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3296 build_int_cst (TREE_TYPE (expr), 0));
3297 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3298 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3299 boolean_type_node,
3300 TREE_OPERAND (expr, 0),
3301 TREE_OPERAND (expr, 1));
3302 else
3303 return NULL_TREE;
3304 }
3305 else
3306 {
3307 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3308 return expr;
3309 if (integer_nonzerop (expr))
3310 return boolean_true_node;
3311 else if (integer_zerop (expr))
3312 return boolean_false_node;
3313 else if (TREE_CODE (expr) == SSA_NAME)
3314 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3315 build_int_cst (TREE_TYPE (expr), 0));
3316 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3317 return fold_build2 (TREE_CODE (expr),
3318 boolean_type_node,
3319 TREE_OPERAND (expr, 0),
3320 TREE_OPERAND (expr, 1));
3321 else
3322 return NULL_TREE;
3323 }
3324}
3325
3326/* Check to see if a boolean expression EXPR is logically equivalent to the
3327 comparison (OP1 CODE OP2). Check for various identities involving
3328 SSA_NAMEs. */
3329
3330static bool
3331same_bool_comparison_p (const_tree expr, enum tree_code code,
3332 const_tree op1, const_tree op2)
3333{
3334 gimple s;
3335
3336 /* The obvious case. */
3337 if (TREE_CODE (expr) == code
3338 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3339 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3340 return true;
3341
3342 /* Check for comparing (name, name != 0) and the case where expr
3343 is an SSA_NAME with a definition matching the comparison. */
3344 if (TREE_CODE (expr) == SSA_NAME
3345 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3346 {
3347 if (operand_equal_p (expr, op1, 0))
3348 return ((code == NE_EXPR && integer_zerop (op2))
3349 || (code == EQ_EXPR && integer_nonzerop (op2)));
3350 s = SSA_NAME_DEF_STMT (expr);
3351 if (is_gimple_assign (s)
3352 && gimple_assign_rhs_code (s) == code
3353 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3354 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3355 return true;
3356 }
3357
3358 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3359 of name is a comparison, recurse. */
3360 if (TREE_CODE (op1) == SSA_NAME
3361 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3362 {
3363 s = SSA_NAME_DEF_STMT (op1);
3364 if (is_gimple_assign (s)
3365 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3366 {
3367 enum tree_code c = gimple_assign_rhs_code (s);
3368 if ((c == NE_EXPR && integer_zerop (op2))
3369 || (c == EQ_EXPR && integer_nonzerop (op2)))
3370 return same_bool_comparison_p (expr, c,
3371 gimple_assign_rhs1 (s),
3372 gimple_assign_rhs2 (s));
3373 if ((c == EQ_EXPR && integer_zerop (op2))
3374 || (c == NE_EXPR && integer_nonzerop (op2)))
3375 return same_bool_comparison_p (expr,
3376 invert_tree_comparison (c, false),
3377 gimple_assign_rhs1 (s),
3378 gimple_assign_rhs2 (s));
3379 }
3380 }
3381 return false;
3382}
3383
3384/* Check to see if two boolean expressions OP1 and OP2 are logically
3385 equivalent. */
3386
3387static bool
3388same_bool_result_p (const_tree op1, const_tree op2)
3389{
3390 /* Simple cases first. */
3391 if (operand_equal_p (op1, op2, 0))
3392 return true;
3393
3394 /* Check the cases where at least one of the operands is a comparison.
3395 These are a bit smarter than operand_equal_p in that they apply some
3396 identifies on SSA_NAMEs. */
3397 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3398 && same_bool_comparison_p (op1, TREE_CODE (op2),
3399 TREE_OPERAND (op2, 0),
3400 TREE_OPERAND (op2, 1)))
3401 return true;
3402 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3403 && same_bool_comparison_p (op2, TREE_CODE (op1),
3404 TREE_OPERAND (op1, 0),
3405 TREE_OPERAND (op1, 1)))
3406 return true;
3407
3408 /* Default case. */
3409 return false;
3410}
3411
3412/* Forward declarations for some mutually recursive functions. */
3413
3414static tree
3415and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3416 enum tree_code code2, tree op2a, tree op2b);
3417static tree
3418and_var_with_comparison (tree var, bool invert,
3419 enum tree_code code2, tree op2a, tree op2b);
3420static tree
3421and_var_with_comparison_1 (gimple stmt,
3422 enum tree_code code2, tree op2a, tree op2b);
3423static tree
3424or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3425 enum tree_code code2, tree op2a, tree op2b);
3426static tree
3427or_var_with_comparison (tree var, bool invert,
3428 enum tree_code code2, tree op2a, tree op2b);
3429static tree
3430or_var_with_comparison_1 (gimple stmt,
3431 enum tree_code code2, tree op2a, tree op2b);
3432
3433/* Helper function for and_comparisons_1: try to simplify the AND of the
3434 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3435 If INVERT is true, invert the value of the VAR before doing the AND.
3436 Return NULL_EXPR if we can't simplify this to a single expression. */
3437
3438static tree
3439and_var_with_comparison (tree var, bool invert,
3440 enum tree_code code2, tree op2a, tree op2b)
3441{
3442 tree t;
3443 gimple stmt = SSA_NAME_DEF_STMT (var);
3444
3445 /* We can only deal with variables whose definitions are assignments. */
3446 if (!is_gimple_assign (stmt))
3447 return NULL_TREE;
3448
3449 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3450 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3451 Then we only have to consider the simpler non-inverted cases. */
3452 if (invert)
3453 t = or_var_with_comparison_1 (stmt,
3454 invert_tree_comparison (code2, false),
3455 op2a, op2b);
3456 else
3457 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3458 return canonicalize_bool (t, invert);
3459}
3460
3461/* Try to simplify the AND of the ssa variable defined by the assignment
3462 STMT with the comparison specified by (OP2A CODE2 OP2B).
3463 Return NULL_EXPR if we can't simplify this to a single expression. */
3464
3465static tree
3466and_var_with_comparison_1 (gimple stmt,
3467 enum tree_code code2, tree op2a, tree op2b)
3468{
3469 tree var = gimple_assign_lhs (stmt);
3470 tree true_test_var = NULL_TREE;
3471 tree false_test_var = NULL_TREE;
3472 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3473
3474 /* Check for identities like (var AND (var == 0)) => false. */
3475 if (TREE_CODE (op2a) == SSA_NAME
3476 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3477 {
3478 if ((code2 == NE_EXPR && integer_zerop (op2b))
3479 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3480 {
3481 true_test_var = op2a;
3482 if (var == true_test_var)
3483 return var;
3484 }
3485 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3486 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3487 {
3488 false_test_var = op2a;
3489 if (var == false_test_var)
3490 return boolean_false_node;
3491 }
3492 }
3493
3494 /* If the definition is a comparison, recurse on it. */
3495 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3496 {
3497 tree t = and_comparisons_1 (innercode,
3498 gimple_assign_rhs1 (stmt),
3499 gimple_assign_rhs2 (stmt),
3500 code2,
3501 op2a,
3502 op2b);
3503 if (t)
3504 return t;
3505 }
3506
3507 /* If the definition is an AND or OR expression, we may be able to
3508 simplify by reassociating. */
eb9820c0
KT
3509 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3510 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
3511 {
3512 tree inner1 = gimple_assign_rhs1 (stmt);
3513 tree inner2 = gimple_assign_rhs2 (stmt);
3514 gimple s;
3515 tree t;
3516 tree partial = NULL_TREE;
eb9820c0 3517 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
3518
3519 /* Check for boolean identities that don't require recursive examination
3520 of inner1/inner2:
3521 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3522 inner1 AND (inner1 OR inner2) => inner1
3523 !inner1 AND (inner1 AND inner2) => false
3524 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3525 Likewise for similar cases involving inner2. */
3526 if (inner1 == true_test_var)
3527 return (is_and ? var : inner1);
3528 else if (inner2 == true_test_var)
3529 return (is_and ? var : inner2);
3530 else if (inner1 == false_test_var)
3531 return (is_and
3532 ? boolean_false_node
3533 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3534 else if (inner2 == false_test_var)
3535 return (is_and
3536 ? boolean_false_node
3537 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3538
3539 /* Next, redistribute/reassociate the AND across the inner tests.
3540 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3541 if (TREE_CODE (inner1) == SSA_NAME
3542 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3543 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3544 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3545 gimple_assign_rhs1 (s),
3546 gimple_assign_rhs2 (s),
3547 code2, op2a, op2b)))
3548 {
3549 /* Handle the AND case, where we are reassociating:
3550 (inner1 AND inner2) AND (op2a code2 op2b)
3551 => (t AND inner2)
3552 If the partial result t is a constant, we win. Otherwise
3553 continue on to try reassociating with the other inner test. */
3554 if (is_and)
3555 {
3556 if (integer_onep (t))
3557 return inner2;
3558 else if (integer_zerop (t))
3559 return boolean_false_node;
3560 }
3561
3562 /* Handle the OR case, where we are redistributing:
3563 (inner1 OR inner2) AND (op2a code2 op2b)
3564 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
3565 else if (integer_onep (t))
3566 return boolean_true_node;
3567
3568 /* Save partial result for later. */
3569 partial = t;
e89065a1
SL
3570 }
3571
3572 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3573 if (TREE_CODE (inner2) == SSA_NAME
3574 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3575 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3576 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3577 gimple_assign_rhs1 (s),
3578 gimple_assign_rhs2 (s),
3579 code2, op2a, op2b)))
3580 {
3581 /* Handle the AND case, where we are reassociating:
3582 (inner1 AND inner2) AND (op2a code2 op2b)
3583 => (inner1 AND t) */
3584 if (is_and)
3585 {
3586 if (integer_onep (t))
3587 return inner1;
3588 else if (integer_zerop (t))
3589 return boolean_false_node;
8236c8eb
JJ
3590 /* If both are the same, we can apply the identity
3591 (x AND x) == x. */
3592 else if (partial && same_bool_result_p (t, partial))
3593 return t;
e89065a1
SL
3594 }
3595
3596 /* Handle the OR case. where we are redistributing:
3597 (inner1 OR inner2) AND (op2a code2 op2b)
3598 => (t OR (inner1 AND (op2a code2 op2b)))
3599 => (t OR partial) */
3600 else
3601 {
3602 if (integer_onep (t))
3603 return boolean_true_node;
3604 else if (partial)
3605 {
3606 /* We already got a simplification for the other
3607 operand to the redistributed OR expression. The
3608 interesting case is when at least one is false.
3609 Or, if both are the same, we can apply the identity
3610 (x OR x) == x. */
3611 if (integer_zerop (partial))
3612 return t;
3613 else if (integer_zerop (t))
3614 return partial;
3615 else if (same_bool_result_p (t, partial))
3616 return t;
3617 }
3618 }
3619 }
3620 }
3621 return NULL_TREE;
3622}
3623
3624/* Try to simplify the AND of two comparisons defined by
3625 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3626 If this can be done without constructing an intermediate value,
3627 return the resulting tree; otherwise NULL_TREE is returned.
3628 This function is deliberately asymmetric as it recurses on SSA_DEFs
3629 in the first comparison but not the second. */
3630
3631static tree
3632and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3633 enum tree_code code2, tree op2a, tree op2b)
3634{
ae22ac3c 3635 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 3636
e89065a1
SL
3637 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3638 if (operand_equal_p (op1a, op2a, 0)
3639 && operand_equal_p (op1b, op2b, 0))
3640 {
eb9820c0 3641 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3642 tree t = combine_comparisons (UNKNOWN_LOCATION,
3643 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 3644 truth_type, op1a, op1b);
e89065a1
SL
3645 if (t)
3646 return t;
3647 }
3648
3649 /* Likewise the swapped case of the above. */
3650 if (operand_equal_p (op1a, op2b, 0)
3651 && operand_equal_p (op1b, op2a, 0))
3652 {
eb9820c0 3653 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3654 tree t = combine_comparisons (UNKNOWN_LOCATION,
3655 TRUTH_ANDIF_EXPR, code1,
3656 swap_tree_comparison (code2),
31ed6226 3657 truth_type, op1a, op1b);
e89065a1
SL
3658 if (t)
3659 return t;
3660 }
3661
3662 /* If both comparisons are of the same value against constants, we might
3663 be able to merge them. */
3664 if (operand_equal_p (op1a, op2a, 0)
3665 && TREE_CODE (op1b) == INTEGER_CST
3666 && TREE_CODE (op2b) == INTEGER_CST)
3667 {
3668 int cmp = tree_int_cst_compare (op1b, op2b);
3669
3670 /* If we have (op1a == op1b), we should either be able to
3671 return that or FALSE, depending on whether the constant op1b
3672 also satisfies the other comparison against op2b. */
3673 if (code1 == EQ_EXPR)
3674 {
3675 bool done = true;
3676 bool val;
3677 switch (code2)
3678 {
3679 case EQ_EXPR: val = (cmp == 0); break;
3680 case NE_EXPR: val = (cmp != 0); break;
3681 case LT_EXPR: val = (cmp < 0); break;
3682 case GT_EXPR: val = (cmp > 0); break;
3683 case LE_EXPR: val = (cmp <= 0); break;
3684 case GE_EXPR: val = (cmp >= 0); break;
3685 default: done = false;
3686 }
3687 if (done)
3688 {
3689 if (val)
3690 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3691 else
3692 return boolean_false_node;
3693 }
3694 }
3695 /* Likewise if the second comparison is an == comparison. */
3696 else if (code2 == EQ_EXPR)
3697 {
3698 bool done = true;
3699 bool val;
3700 switch (code1)
3701 {
3702 case EQ_EXPR: val = (cmp == 0); break;
3703 case NE_EXPR: val = (cmp != 0); break;
3704 case LT_EXPR: val = (cmp > 0); break;
3705 case GT_EXPR: val = (cmp < 0); break;
3706 case LE_EXPR: val = (cmp >= 0); break;
3707 case GE_EXPR: val = (cmp <= 0); break;
3708 default: done = false;
3709 }
3710 if (done)
3711 {
3712 if (val)
3713 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3714 else
3715 return boolean_false_node;
3716 }
3717 }
3718
3719 /* Same business with inequality tests. */
3720 else if (code1 == NE_EXPR)
3721 {
3722 bool val;
3723 switch (code2)
3724 {
3725 case EQ_EXPR: val = (cmp != 0); break;
3726 case NE_EXPR: val = (cmp == 0); break;
3727 case LT_EXPR: val = (cmp >= 0); break;
3728 case GT_EXPR: val = (cmp <= 0); break;
3729 case LE_EXPR: val = (cmp > 0); break;
3730 case GE_EXPR: val = (cmp < 0); break;
3731 default:
3732 val = false;
3733 }
3734 if (val)
3735 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3736 }
3737 else if (code2 == NE_EXPR)
3738 {
3739 bool val;
3740 switch (code1)
3741 {
3742 case EQ_EXPR: val = (cmp == 0); break;
3743 case NE_EXPR: val = (cmp != 0); break;
3744 case LT_EXPR: val = (cmp <= 0); break;
3745 case GT_EXPR: val = (cmp >= 0); break;
3746 case LE_EXPR: val = (cmp < 0); break;
3747 case GE_EXPR: val = (cmp > 0); break;
3748 default:
3749 val = false;
3750 }
3751 if (val)
3752 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3753 }
3754
3755 /* Chose the more restrictive of two < or <= comparisons. */
3756 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3757 && (code2 == LT_EXPR || code2 == LE_EXPR))
3758 {
3759 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3760 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3761 else
3762 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3763 }
3764
3765 /* Likewise chose the more restrictive of two > or >= comparisons. */
3766 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3767 && (code2 == GT_EXPR || code2 == GE_EXPR))
3768 {
3769 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3770 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3771 else
3772 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3773 }
3774
3775 /* Check for singleton ranges. */
3776 else if (cmp == 0
3777 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3778 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3779 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3780
3781 /* Check for disjoint ranges. */
3782 else if (cmp <= 0
3783 && (code1 == LT_EXPR || code1 == LE_EXPR)
3784 && (code2 == GT_EXPR || code2 == GE_EXPR))
3785 return boolean_false_node;
3786 else if (cmp >= 0
3787 && (code1 == GT_EXPR || code1 == GE_EXPR)
3788 && (code2 == LT_EXPR || code2 == LE_EXPR))
3789 return boolean_false_node;
3790 }
3791
3792 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3793 NAME's definition is a truth value. See if there are any simplifications
3794 that can be done against the NAME's definition. */
3795 if (TREE_CODE (op1a) == SSA_NAME
3796 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3797 && (integer_zerop (op1b) || integer_onep (op1b)))
3798 {
3799 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3800 || (code1 == NE_EXPR && integer_onep (op1b)));
3801 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3802 switch (gimple_code (stmt))
3803 {
3804 case GIMPLE_ASSIGN:
3805 /* Try to simplify by copy-propagating the definition. */
3806 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3807
3808 case GIMPLE_PHI:
3809 /* If every argument to the PHI produces the same result when
3810 ANDed with the second comparison, we win.
3811 Do not do this unless the type is bool since we need a bool
3812 result here anyway. */
3813 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3814 {
3815 tree result = NULL_TREE;
3816 unsigned i;
3817 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3818 {
3819 tree arg = gimple_phi_arg_def (stmt, i);
3820
3821 /* If this PHI has itself as an argument, ignore it.
3822 If all the other args produce the same result,
3823 we're still OK. */
3824 if (arg == gimple_phi_result (stmt))
3825 continue;
3826 else if (TREE_CODE (arg) == INTEGER_CST)
3827 {
3828 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3829 {
3830 if (!result)
3831 result = boolean_false_node;
3832 else if (!integer_zerop (result))
3833 return NULL_TREE;
3834 }
3835 else if (!result)
3836 result = fold_build2 (code2, boolean_type_node,
3837 op2a, op2b);
3838 else if (!same_bool_comparison_p (result,
3839 code2, op2a, op2b))
3840 return NULL_TREE;
3841 }
0e8b84ec
JJ
3842 else if (TREE_CODE (arg) == SSA_NAME
3843 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 3844 {
6c66f733
JJ
3845 tree temp;
3846 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3847 /* In simple cases we can look through PHI nodes,
3848 but we have to be careful with loops.
3849 See PR49073. */
3850 if (! dom_info_available_p (CDI_DOMINATORS)
3851 || gimple_bb (def_stmt) == gimple_bb (stmt)
3852 || dominated_by_p (CDI_DOMINATORS,
3853 gimple_bb (def_stmt),
3854 gimple_bb (stmt)))
3855 return NULL_TREE;
3856 temp = and_var_with_comparison (arg, invert, code2,
3857 op2a, op2b);
e89065a1
SL
3858 if (!temp)
3859 return NULL_TREE;
3860 else if (!result)
3861 result = temp;
3862 else if (!same_bool_result_p (result, temp))
3863 return NULL_TREE;
3864 }
3865 else
3866 return NULL_TREE;
3867 }
3868 return result;
3869 }
3870
3871 default:
3872 break;
3873 }
3874 }
3875 return NULL_TREE;
3876}
3877
3878/* Try to simplify the AND of two comparisons, specified by
3879 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3880 If this can be simplified to a single expression (without requiring
3881 introducing more SSA variables to hold intermediate values),
3882 return the resulting tree. Otherwise return NULL_TREE.
3883 If the result expression is non-null, it has boolean type. */
3884
3885tree
3886maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3887 enum tree_code code2, tree op2a, tree op2b)
3888{
3889 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3890 if (t)
3891 return t;
3892 else
3893 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3894}
3895
3896/* Helper function for or_comparisons_1: try to simplify the OR of the
3897 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3898 If INVERT is true, invert the value of VAR before doing the OR.
3899 Return NULL_EXPR if we can't simplify this to a single expression. */
3900
3901static tree
3902or_var_with_comparison (tree var, bool invert,
3903 enum tree_code code2, tree op2a, tree op2b)
3904{
3905 tree t;
3906 gimple stmt = SSA_NAME_DEF_STMT (var);
3907
3908 /* We can only deal with variables whose definitions are assignments. */
3909 if (!is_gimple_assign (stmt))
3910 return NULL_TREE;
3911
3912 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3913 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3914 Then we only have to consider the simpler non-inverted cases. */
3915 if (invert)
3916 t = and_var_with_comparison_1 (stmt,
3917 invert_tree_comparison (code2, false),
3918 op2a, op2b);
3919 else
3920 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3921 return canonicalize_bool (t, invert);
3922}
3923
3924/* Try to simplify the OR of the ssa variable defined by the assignment
3925 STMT with the comparison specified by (OP2A CODE2 OP2B).
3926 Return NULL_EXPR if we can't simplify this to a single expression. */
3927
3928static tree
3929or_var_with_comparison_1 (gimple stmt,
3930 enum tree_code code2, tree op2a, tree op2b)
3931{
3932 tree var = gimple_assign_lhs (stmt);
3933 tree true_test_var = NULL_TREE;
3934 tree false_test_var = NULL_TREE;
3935 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3936
3937 /* Check for identities like (var OR (var != 0)) => true . */
3938 if (TREE_CODE (op2a) == SSA_NAME
3939 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3940 {
3941 if ((code2 == NE_EXPR && integer_zerop (op2b))
3942 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3943 {
3944 true_test_var = op2a;
3945 if (var == true_test_var)
3946 return var;
3947 }
3948 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3949 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3950 {
3951 false_test_var = op2a;
3952 if (var == false_test_var)
3953 return boolean_true_node;
3954 }
3955 }
3956
3957 /* If the definition is a comparison, recurse on it. */
3958 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3959 {
3960 tree t = or_comparisons_1 (innercode,
3961 gimple_assign_rhs1 (stmt),
3962 gimple_assign_rhs2 (stmt),
3963 code2,
3964 op2a,
3965 op2b);
3966 if (t)
3967 return t;
3968 }
3969
3970 /* If the definition is an AND or OR expression, we may be able to
3971 simplify by reassociating. */
eb9820c0
KT
3972 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3973 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
3974 {
3975 tree inner1 = gimple_assign_rhs1 (stmt);
3976 tree inner2 = gimple_assign_rhs2 (stmt);
3977 gimple s;
3978 tree t;
3979 tree partial = NULL_TREE;
eb9820c0 3980 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
3981
3982 /* Check for boolean identities that don't require recursive examination
3983 of inner1/inner2:
3984 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3985 inner1 OR (inner1 AND inner2) => inner1
3986 !inner1 OR (inner1 OR inner2) => true
3987 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3988 */
3989 if (inner1 == true_test_var)
3990 return (is_or ? var : inner1);
3991 else if (inner2 == true_test_var)
3992 return (is_or ? var : inner2);
3993 else if (inner1 == false_test_var)
3994 return (is_or
3995 ? boolean_true_node
3996 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
3997 else if (inner2 == false_test_var)
3998 return (is_or
3999 ? boolean_true_node
4000 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4001
4002 /* Next, redistribute/reassociate the OR across the inner tests.
4003 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4004 if (TREE_CODE (inner1) == SSA_NAME
4005 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4006 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4007 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4008 gimple_assign_rhs1 (s),
4009 gimple_assign_rhs2 (s),
4010 code2, op2a, op2b)))
4011 {
4012 /* Handle the OR case, where we are reassociating:
4013 (inner1 OR inner2) OR (op2a code2 op2b)
4014 => (t OR inner2)
4015 If the partial result t is a constant, we win. Otherwise
4016 continue on to try reassociating with the other inner test. */
8236c8eb 4017 if (is_or)
e89065a1
SL
4018 {
4019 if (integer_onep (t))
4020 return boolean_true_node;
4021 else if (integer_zerop (t))
4022 return inner2;
4023 }
4024
4025 /* Handle the AND case, where we are redistributing:
4026 (inner1 AND inner2) OR (op2a code2 op2b)
4027 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4028 else if (integer_zerop (t))
4029 return boolean_false_node;
4030
4031 /* Save partial result for later. */
4032 partial = t;
e89065a1
SL
4033 }
4034
4035 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4036 if (TREE_CODE (inner2) == SSA_NAME
4037 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4038 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4039 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4040 gimple_assign_rhs1 (s),
4041 gimple_assign_rhs2 (s),
4042 code2, op2a, op2b)))
4043 {
4044 /* Handle the OR case, where we are reassociating:
4045 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4046 => (inner1 OR t)
4047 => (t OR partial) */
4048 if (is_or)
e89065a1
SL
4049 {
4050 if (integer_zerop (t))
4051 return inner1;
4052 else if (integer_onep (t))
4053 return boolean_true_node;
8236c8eb
JJ
4054 /* If both are the same, we can apply the identity
4055 (x OR x) == x. */
4056 else if (partial && same_bool_result_p (t, partial))
4057 return t;
e89065a1
SL
4058 }
4059
4060 /* Handle the AND case, where we are redistributing:
4061 (inner1 AND inner2) OR (op2a code2 op2b)
4062 => (t AND (inner1 OR (op2a code2 op2b)))
4063 => (t AND partial) */
4064 else
4065 {
4066 if (integer_zerop (t))
4067 return boolean_false_node;
4068 else if (partial)
4069 {
4070 /* We already got a simplification for the other
4071 operand to the redistributed AND expression. The
4072 interesting case is when at least one is true.
4073 Or, if both are the same, we can apply the identity
8236c8eb 4074 (x AND x) == x. */
e89065a1
SL
4075 if (integer_onep (partial))
4076 return t;
4077 else if (integer_onep (t))
4078 return partial;
4079 else if (same_bool_result_p (t, partial))
8236c8eb 4080 return t;
e89065a1
SL
4081 }
4082 }
4083 }
4084 }
4085 return NULL_TREE;
4086}
4087
4088/* Try to simplify the OR of two comparisons defined by
4089 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4090 If this can be done without constructing an intermediate value,
4091 return the resulting tree; otherwise NULL_TREE is returned.
4092 This function is deliberately asymmetric as it recurses on SSA_DEFs
4093 in the first comparison but not the second. */
4094
4095static tree
4096or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4097 enum tree_code code2, tree op2a, tree op2b)
4098{
ae22ac3c 4099 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4100
e89065a1
SL
4101 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4102 if (operand_equal_p (op1a, op2a, 0)
4103 && operand_equal_p (op1b, op2b, 0))
4104 {
eb9820c0 4105 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4106 tree t = combine_comparisons (UNKNOWN_LOCATION,
4107 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4108 truth_type, op1a, op1b);
e89065a1
SL
4109 if (t)
4110 return t;
4111 }
4112
4113 /* Likewise the swapped case of the above. */
4114 if (operand_equal_p (op1a, op2b, 0)
4115 && operand_equal_p (op1b, op2a, 0))
4116 {
eb9820c0 4117 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4118 tree t = combine_comparisons (UNKNOWN_LOCATION,
4119 TRUTH_ORIF_EXPR, code1,
4120 swap_tree_comparison (code2),
31ed6226 4121 truth_type, op1a, op1b);
e89065a1
SL
4122 if (t)
4123 return t;
4124 }
4125
4126 /* If both comparisons are of the same value against constants, we might
4127 be able to merge them. */
4128 if (operand_equal_p (op1a, op2a, 0)
4129 && TREE_CODE (op1b) == INTEGER_CST
4130 && TREE_CODE (op2b) == INTEGER_CST)
4131 {
4132 int cmp = tree_int_cst_compare (op1b, op2b);
4133
4134 /* If we have (op1a != op1b), we should either be able to
4135 return that or TRUE, depending on whether the constant op1b
4136 also satisfies the other comparison against op2b. */
4137 if (code1 == NE_EXPR)
4138 {
4139 bool done = true;
4140 bool val;
4141 switch (code2)
4142 {
4143 case EQ_EXPR: val = (cmp == 0); break;
4144 case NE_EXPR: val = (cmp != 0); break;
4145 case LT_EXPR: val = (cmp < 0); break;
4146 case GT_EXPR: val = (cmp > 0); break;
4147 case LE_EXPR: val = (cmp <= 0); break;
4148 case GE_EXPR: val = (cmp >= 0); break;
4149 default: done = false;
4150 }
4151 if (done)
4152 {
4153 if (val)
4154 return boolean_true_node;
4155 else
4156 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4157 }
4158 }
4159 /* Likewise if the second comparison is a != comparison. */
4160 else if (code2 == NE_EXPR)
4161 {
4162 bool done = true;
4163 bool val;
4164 switch (code1)
4165 {
4166 case EQ_EXPR: val = (cmp == 0); break;
4167 case NE_EXPR: val = (cmp != 0); break;
4168 case LT_EXPR: val = (cmp > 0); break;
4169 case GT_EXPR: val = (cmp < 0); break;
4170 case LE_EXPR: val = (cmp >= 0); break;
4171 case GE_EXPR: val = (cmp <= 0); break;
4172 default: done = false;
4173 }
4174 if (done)
4175 {
4176 if (val)
4177 return boolean_true_node;
4178 else
4179 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4180 }
4181 }
4182
4183 /* See if an equality test is redundant with the other comparison. */
4184 else if (code1 == EQ_EXPR)
4185 {
4186 bool val;
4187 switch (code2)
4188 {
4189 case EQ_EXPR: val = (cmp == 0); break;
4190 case NE_EXPR: val = (cmp != 0); break;
4191 case LT_EXPR: val = (cmp < 0); break;
4192 case GT_EXPR: val = (cmp > 0); break;
4193 case LE_EXPR: val = (cmp <= 0); break;
4194 case GE_EXPR: val = (cmp >= 0); break;
4195 default:
4196 val = false;
4197 }
4198 if (val)
4199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4200 }
4201 else if (code2 == EQ_EXPR)
4202 {
4203 bool val;
4204 switch (code1)
4205 {
4206 case EQ_EXPR: val = (cmp == 0); break;
4207 case NE_EXPR: val = (cmp != 0); break;
4208 case LT_EXPR: val = (cmp > 0); break;
4209 case GT_EXPR: val = (cmp < 0); break;
4210 case LE_EXPR: val = (cmp >= 0); break;
4211 case GE_EXPR: val = (cmp <= 0); break;
4212 default:
4213 val = false;
4214 }
4215 if (val)
4216 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4217 }
4218
4219 /* Chose the less restrictive of two < or <= comparisons. */
4220 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4221 && (code2 == LT_EXPR || code2 == LE_EXPR))
4222 {
4223 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4224 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4225 else
4226 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4227 }
4228
4229 /* Likewise chose the less restrictive of two > or >= comparisons. */
4230 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4231 && (code2 == GT_EXPR || code2 == GE_EXPR))
4232 {
4233 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4234 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4235 else
4236 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4237 }
4238
4239 /* Check for singleton ranges. */
4240 else if (cmp == 0
4241 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4242 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4243 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4244
4245 /* Check for less/greater pairs that don't restrict the range at all. */
4246 else if (cmp >= 0
4247 && (code1 == LT_EXPR || code1 == LE_EXPR)
4248 && (code2 == GT_EXPR || code2 == GE_EXPR))
4249 return boolean_true_node;
4250 else if (cmp <= 0
4251 && (code1 == GT_EXPR || code1 == GE_EXPR)
4252 && (code2 == LT_EXPR || code2 == LE_EXPR))
4253 return boolean_true_node;
4254 }
4255
4256 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4257 NAME's definition is a truth value. See if there are any simplifications
4258 that can be done against the NAME's definition. */
4259 if (TREE_CODE (op1a) == SSA_NAME
4260 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4261 && (integer_zerop (op1b) || integer_onep (op1b)))
4262 {
4263 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4264 || (code1 == NE_EXPR && integer_onep (op1b)));
4265 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4266 switch (gimple_code (stmt))
4267 {
4268 case GIMPLE_ASSIGN:
4269 /* Try to simplify by copy-propagating the definition. */
4270 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4271
4272 case GIMPLE_PHI:
4273 /* If every argument to the PHI produces the same result when
4274 ORed with the second comparison, we win.
4275 Do not do this unless the type is bool since we need a bool
4276 result here anyway. */
4277 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4278 {
4279 tree result = NULL_TREE;
4280 unsigned i;
4281 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4282 {
4283 tree arg = gimple_phi_arg_def (stmt, i);
4284
4285 /* If this PHI has itself as an argument, ignore it.
4286 If all the other args produce the same result,
4287 we're still OK. */
4288 if (arg == gimple_phi_result (stmt))
4289 continue;
4290 else if (TREE_CODE (arg) == INTEGER_CST)
4291 {
4292 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4293 {
4294 if (!result)
4295 result = boolean_true_node;
4296 else if (!integer_onep (result))
4297 return NULL_TREE;
4298 }
4299 else if (!result)
4300 result = fold_build2 (code2, boolean_type_node,
4301 op2a, op2b);
4302 else if (!same_bool_comparison_p (result,
4303 code2, op2a, op2b))
4304 return NULL_TREE;
4305 }
0e8b84ec
JJ
4306 else if (TREE_CODE (arg) == SSA_NAME
4307 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4308 {
6c66f733
JJ
4309 tree temp;
4310 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4311 /* In simple cases we can look through PHI nodes,
4312 but we have to be careful with loops.
4313 See PR49073. */
4314 if (! dom_info_available_p (CDI_DOMINATORS)
4315 || gimple_bb (def_stmt) == gimple_bb (stmt)
4316 || dominated_by_p (CDI_DOMINATORS,
4317 gimple_bb (def_stmt),
4318 gimple_bb (stmt)))
4319 return NULL_TREE;
4320 temp = or_var_with_comparison (arg, invert, code2,
4321 op2a, op2b);
e89065a1
SL
4322 if (!temp)
4323 return NULL_TREE;
4324 else if (!result)
4325 result = temp;
4326 else if (!same_bool_result_p (result, temp))
4327 return NULL_TREE;
4328 }
4329 else
4330 return NULL_TREE;
4331 }
4332 return result;
4333 }
4334
4335 default:
4336 break;
4337 }
4338 }
4339 return NULL_TREE;
4340}
4341
4342/* Try to simplify the OR of two comparisons, specified by
4343 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4344 If this can be simplified to a single expression (without requiring
4345 introducing more SSA variables to hold intermediate values),
4346 return the resulting tree. Otherwise return NULL_TREE.
4347 If the result expression is non-null, it has boolean type. */
4348
4349tree
4350maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4351 enum tree_code code2, tree op2a, tree op2b)
4352{
4353 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4354 if (t)
4355 return t;
4356 else
4357 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4358}
cfef45c8
RG
4359
4360
4361/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4362
4363 Either NULL_TREE, a simplified but non-constant or a constant
4364 is returned.
4365
4366 ??? This should go into a gimple-fold-inline.h file to be eventually
4367 privatized with the single valueize function used in the various TUs
4368 to avoid the indirect function call overhead. */
4369
4370tree
4371gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
4372{
45cc9f96
RB
4373 code_helper rcode;
4374 tree ops[3] = {};
4375 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4376 edges if there are intermediate VARYING defs. For this reason
4377 do not follow SSA edges here even though SCCVN can technically
4378 just deal fine with that. */
4379 if (gimple_simplify (stmt, &rcode, ops, NULL, no_follow_ssa_edges)
4380 && rcode.is_tree_code ()
4381 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4382 || ((tree_code) rcode) == ADDR_EXPR)
4383 && is_gimple_val (ops[0]))
4384 {
4385 tree res = ops[0];
4386 if (dump_file && dump_flags & TDF_DETAILS)
4387 {
4388 fprintf (dump_file, "Match-and-simplified ");
4389 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4390 fprintf (dump_file, " to ");
4391 print_generic_expr (dump_file, res, 0);
4392 fprintf (dump_file, "\n");
4393 }
4394 return res;
4395 }
4396
cfef45c8
RG
4397 location_t loc = gimple_location (stmt);
4398 switch (gimple_code (stmt))
4399 {
4400 case GIMPLE_ASSIGN:
4401 {
4402 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4403
4404 switch (get_gimple_rhs_class (subcode))
4405 {
4406 case GIMPLE_SINGLE_RHS:
4407 {
4408 tree rhs = gimple_assign_rhs1 (stmt);
4409 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4410
4411 if (TREE_CODE (rhs) == SSA_NAME)
4412 {
4413 /* If the RHS is an SSA_NAME, return its known constant value,
4414 if any. */
4415 return (*valueize) (rhs);
4416 }
4417 /* Handle propagating invariant addresses into address
4418 operations. */
4419 else if (TREE_CODE (rhs) == ADDR_EXPR
4420 && !is_gimple_min_invariant (rhs))
4421 {
d25c4172 4422 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4423 tree base;
4424 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4425 &offset,
4426 valueize);
4427 if (base
4428 && (CONSTANT_CLASS_P (base)
4429 || decl_address_invariant_p (base)))
4430 return build_invariant_address (TREE_TYPE (rhs),
4431 base, offset);
4432 }
4433 else if (TREE_CODE (rhs) == CONSTRUCTOR
4434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4435 && (CONSTRUCTOR_NELTS (rhs)
4436 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4437 {
4438 unsigned i;
d2a12ae7 4439 tree val, *vec;
cfef45c8 4440
d2a12ae7
RG
4441 vec = XALLOCAVEC (tree,
4442 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4443 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4444 {
4445 val = (*valueize) (val);
4446 if (TREE_CODE (val) == INTEGER_CST
4447 || TREE_CODE (val) == REAL_CST
4448 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 4449 vec[i] = val;
cfef45c8
RG
4450 else
4451 return NULL_TREE;
4452 }
4453
d2a12ae7 4454 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 4455 }
bdf37f7a
JH
4456 if (subcode == OBJ_TYPE_REF)
4457 {
4458 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4459 /* If callee is constant, we can fold away the wrapper. */
4460 if (is_gimple_min_invariant (val))
4461 return val;
4462 }
cfef45c8
RG
4463
4464 if (kind == tcc_reference)
4465 {
4466 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4467 || TREE_CODE (rhs) == REALPART_EXPR
4468 || TREE_CODE (rhs) == IMAGPART_EXPR)
4469 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4470 {
4471 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4472 return fold_unary_loc (EXPR_LOCATION (rhs),
4473 TREE_CODE (rhs),
4474 TREE_TYPE (rhs), val);
4475 }
4476 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4477 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4478 {
4479 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4480 return fold_ternary_loc (EXPR_LOCATION (rhs),
4481 TREE_CODE (rhs),
4482 TREE_TYPE (rhs), val,
4483 TREE_OPERAND (rhs, 1),
4484 TREE_OPERAND (rhs, 2));
4485 }
4486 else if (TREE_CODE (rhs) == MEM_REF
4487 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4488 {
4489 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4490 if (TREE_CODE (val) == ADDR_EXPR
4491 && is_gimple_min_invariant (val))
4492 {
4493 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4494 unshare_expr (val),
4495 TREE_OPERAND (rhs, 1));
4496 if (tem)
4497 rhs = tem;
4498 }
4499 }
4500 return fold_const_aggregate_ref_1 (rhs, valueize);
4501 }
4502 else if (kind == tcc_declaration)
4503 return get_symbol_constant_value (rhs);
4504 return rhs;
4505 }
4506
4507 case GIMPLE_UNARY_RHS:
f3582e54 4508 return NULL_TREE;
cfef45c8
RG
4509
4510 case GIMPLE_BINARY_RHS:
4511 {
4512 /* Handle binary operators that can appear in GIMPLE form. */
4513 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4514 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4515
4516 /* Translate &x + CST into an invariant form suitable for
4517 further propagation. */
4518 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4519 && TREE_CODE (op0) == ADDR_EXPR
4520 && TREE_CODE (op1) == INTEGER_CST)
4521 {
4522 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
4523 return build_fold_addr_expr_loc
4524 (loc,
4525 fold_build2 (MEM_REF,
cfef45c8
RG
4526 TREE_TYPE (TREE_TYPE (op0)),
4527 unshare_expr (op0), off));
4528 }
4529
4530 return fold_binary_loc (loc, subcode,
4531 gimple_expr_type (stmt), op0, op1);
4532 }
4533
4534 case GIMPLE_TERNARY_RHS:
4535 {
4536 /* Handle ternary operators that can appear in GIMPLE form. */
4537 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4538 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4539 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4540
d3878abf
RG
4541 /* Fold embedded expressions in ternary codes. */
4542 if ((subcode == COND_EXPR
4543 || subcode == VEC_COND_EXPR)
4544 && COMPARISON_CLASS_P (op0))
4545 {
4546 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4547 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4548 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4549 TREE_TYPE (op0), op00, op01);
4550 if (tem)
4551 op0 = tem;
4552 }
4553
cfef45c8
RG
4554 return fold_ternary_loc (loc, subcode,
4555 gimple_expr_type (stmt), op0, op1, op2);
4556 }
4557
4558 default:
4559 gcc_unreachable ();
4560 }
4561 }
4562
4563 case GIMPLE_CALL:
4564 {
25583c4f
RS
4565 tree fn;
4566
4567 if (gimple_call_internal_p (stmt))
31e071ae
MP
4568 {
4569 enum tree_code subcode = ERROR_MARK;
4570 switch (gimple_call_internal_fn (stmt))
4571 {
4572 case IFN_UBSAN_CHECK_ADD:
4573 subcode = PLUS_EXPR;
4574 break;
4575 case IFN_UBSAN_CHECK_SUB:
4576 subcode = MINUS_EXPR;
4577 break;
4578 case IFN_UBSAN_CHECK_MUL:
4579 subcode = MULT_EXPR;
4580 break;
4581 default:
4582 return NULL_TREE;
4583 }
368b454d
JJ
4584 tree arg0 = gimple_call_arg (stmt, 0);
4585 tree arg1 = gimple_call_arg (stmt, 1);
4586 tree op0 = (*valueize) (arg0);
4587 tree op1 = (*valueize) (arg1);
31e071ae
MP
4588
4589 if (TREE_CODE (op0) != INTEGER_CST
4590 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
4591 {
4592 switch (subcode)
4593 {
4594 case MULT_EXPR:
4595 /* x * 0 = 0 * x = 0 without overflow. */
4596 if (integer_zerop (op0) || integer_zerop (op1))
4597 return build_zero_cst (TREE_TYPE (arg0));
4598 break;
4599 case MINUS_EXPR:
4600 /* y - y = 0 without overflow. */
4601 if (operand_equal_p (op0, op1, 0))
4602 return build_zero_cst (TREE_TYPE (arg0));
4603 break;
4604 default:
4605 break;
4606 }
4607 }
4608 tree res
4609 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
4610 if (res
4611 && TREE_CODE (res) == INTEGER_CST
4612 && !TREE_OVERFLOW (res))
4613 return res;
4614 return NULL_TREE;
4615 }
25583c4f
RS
4616
4617 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
4618 if (TREE_CODE (fn) == ADDR_EXPR
4619 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
4620 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4621 && gimple_builtin_call_types_compatible_p (stmt,
4622 TREE_OPERAND (fn, 0)))
cfef45c8
RG
4623 {
4624 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4625 tree call, retval;
4626 unsigned i;
4627 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4628 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4629 call = build_call_array_loc (loc,
4630 gimple_call_return_type (stmt),
4631 fn, gimple_call_num_args (stmt), args);
4632 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4633 if (retval)
5c944c6c
RB
4634 {
4635 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4636 STRIP_NOPS (retval);
4637 retval = fold_convert (gimple_call_return_type (stmt), retval);
4638 }
cfef45c8
RG
4639 return retval;
4640 }
4641 return NULL_TREE;
4642 }
4643
4644 default:
4645 return NULL_TREE;
4646 }
4647}
4648
4649/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4650 Returns NULL_TREE if folding to a constant is not possible, otherwise
4651 returns a constant according to is_gimple_min_invariant. */
4652
4653tree
4654gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4655{
4656 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4657 if (res && is_gimple_min_invariant (res))
4658 return res;
4659 return NULL_TREE;
4660}
4661
4662
4663/* The following set of functions are supposed to fold references using
4664 their constant initializers. */
4665
4666static tree fold_ctor_reference (tree type, tree ctor,
4667 unsigned HOST_WIDE_INT offset,
c44c2088 4668 unsigned HOST_WIDE_INT size, tree);
cfef45c8
RG
4669
4670/* See if we can find constructor defining value of BASE.
4671 When we know the consructor with constant offset (such as
4672 base is array[40] and we do know constructor of array), then
4673 BIT_OFFSET is adjusted accordingly.
4674
4675 As a special case, return error_mark_node when constructor
4676 is not explicitly available, but it is known to be zero
4677 such as 'static const int a;'. */
4678static tree
4679get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4680 tree (*valueize)(tree))
4681{
4682 HOST_WIDE_INT bit_offset2, size, max_size;
4683 if (TREE_CODE (base) == MEM_REF)
4684 {
4685 if (!integer_zerop (TREE_OPERAND (base, 1)))
4686 {
9541ffee 4687 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 4688 return NULL_TREE;
807e902e 4689 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
4690 * BITS_PER_UNIT);
4691 }
4692
4693 if (valueize
4694 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4695 base = valueize (TREE_OPERAND (base, 0));
4696 if (!base || TREE_CODE (base) != ADDR_EXPR)
4697 return NULL_TREE;
4698 base = TREE_OPERAND (base, 0);
4699 }
4700
4701 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4702 DECL_INITIAL. If BASE is a nested reference into another
4703 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4704 the inner reference. */
4705 switch (TREE_CODE (base))
4706 {
4707 case VAR_DECL:
cfef45c8 4708 case CONST_DECL:
6a6dac52
JH
4709 {
4710 tree init = ctor_for_folding (base);
4711
688010ba 4712 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
4713 NULL means unknown, while error_mark_node is 0. */
4714 if (init == error_mark_node)
4715 return NULL_TREE;
4716 if (!init)
4717 return error_mark_node;
4718 return init;
4719 }
cfef45c8
RG
4720
4721 case ARRAY_REF:
4722 case COMPONENT_REF:
4723 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4724 if (max_size == -1 || size != max_size)
4725 return NULL_TREE;
4726 *bit_offset += bit_offset2;
4727 return get_base_constructor (base, bit_offset, valueize);
4728
4729 case STRING_CST:
4730 case CONSTRUCTOR:
4731 return base;
4732
4733 default:
4734 return NULL_TREE;
4735 }
4736}
4737
cfef45c8
RG
4738/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4739 SIZE to the memory at bit OFFSET. */
4740
4741static tree
4742fold_array_ctor_reference (tree type, tree ctor,
4743 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4744 unsigned HOST_WIDE_INT size,
4745 tree from_decl)
cfef45c8
RG
4746{
4747 unsigned HOST_WIDE_INT cnt;
4748 tree cfield, cval;
807e902e
KZ
4749 offset_int low_bound;
4750 offset_int elt_size;
4751 offset_int index, max_index;
4752 offset_int access_index;
b48e22b2 4753 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
4754 HOST_WIDE_INT inner_offset;
4755
4756 /* Compute low bound and elt size. */
eb8f1123
RG
4757 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4758 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
4759 if (domain_type && TYPE_MIN_VALUE (domain_type))
4760 {
4761 /* Static constructors for variably sized objects makes no sense. */
4762 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 4763 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 4764 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
4765 }
4766 else
807e902e 4767 low_bound = 0;
cfef45c8 4768 /* Static constructors for variably sized objects makes no sense. */
c3284718 4769 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 4770 == INTEGER_CST);
807e902e 4771 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
4772
4773 /* We can handle only constantly sized accesses that are known to not
4774 be larger than size of array element. */
4775 if (!TYPE_SIZE_UNIT (type)
4776 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
4777 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4778 || elt_size == 0)
cfef45c8
RG
4779 return NULL_TREE;
4780
4781 /* Compute the array index we look for. */
807e902e
KZ
4782 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4783 elt_size);
27bcd47c 4784 access_index += low_bound;
b48e22b2 4785 if (index_type)
807e902e
KZ
4786 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4787 TYPE_SIGN (index_type));
cfef45c8
RG
4788
4789 /* And offset within the access. */
27bcd47c 4790 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
4791
4792 /* See if the array field is large enough to span whole access. We do not
4793 care to fold accesses spanning multiple array indexes. */
27bcd47c 4794 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
4795 return NULL_TREE;
4796
807e902e 4797 index = low_bound - 1;
b48e22b2 4798 if (index_type)
807e902e
KZ
4799 index = wi::ext (index, TYPE_PRECISION (index_type),
4800 TYPE_SIGN (index_type));
b48e22b2 4801
cfef45c8
RG
4802 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4803 {
4804 /* Array constructor might explicitely set index, or specify range
4805 or leave index NULL meaning that it is next index after previous
4806 one. */
4807 if (cfield)
4808 {
4809 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 4810 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
4811 else
4812 {
4813 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
4814 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4815 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
4816 }
4817 }
4818 else
b48e22b2 4819 {
807e902e 4820 index += 1;
b48e22b2 4821 if (index_type)
807e902e
KZ
4822 index = wi::ext (index, TYPE_PRECISION (index_type),
4823 TYPE_SIGN (index_type));
b48e22b2
EB
4824 max_index = index;
4825 }
cfef45c8
RG
4826
4827 /* Do we have match? */
807e902e
KZ
4828 if (wi::cmpu (access_index, index) >= 0
4829 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
4830 return fold_ctor_reference (type, cval, inner_offset, size,
4831 from_decl);
cfef45c8
RG
4832 }
4833 /* When memory is not explicitely mentioned in constructor,
4834 it is 0 (or out of range). */
4835 return build_zero_cst (type);
4836}
4837
4838/* CTOR is CONSTRUCTOR of an aggregate or vector.
4839 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4840
4841static tree
4842fold_nonarray_ctor_reference (tree type, tree ctor,
4843 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4844 unsigned HOST_WIDE_INT size,
4845 tree from_decl)
cfef45c8
RG
4846{
4847 unsigned HOST_WIDE_INT cnt;
4848 tree cfield, cval;
4849
4850 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4851 cval)
4852 {
4853 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4854 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4855 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
4856 offset_int bitoffset;
4857 offset_int bitoffset_end, access_end;
cfef45c8
RG
4858
4859 /* Variable sized objects in static constructors makes no sense,
4860 but field_size can be NULL for flexible array members. */
4861 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4862 && TREE_CODE (byte_offset) == INTEGER_CST
4863 && (field_size != NULL_TREE
4864 ? TREE_CODE (field_size) == INTEGER_CST
4865 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4866
4867 /* Compute bit offset of the field. */
807e902e
KZ
4868 bitoffset = (wi::to_offset (field_offset)
4869 + wi::lshift (wi::to_offset (byte_offset),
4870 LOG2_BITS_PER_UNIT));
cfef45c8
RG
4871 /* Compute bit offset where the field ends. */
4872 if (field_size != NULL_TREE)
807e902e 4873 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 4874 else
807e902e 4875 bitoffset_end = 0;
cfef45c8 4876
807e902e 4877 access_end = offset_int (offset) + size;
b8b2b009
JJ
4878
4879 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4880 [BITOFFSET, BITOFFSET_END)? */
807e902e 4881 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 4882 && (field_size == NULL_TREE
807e902e 4883 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 4884 {
807e902e 4885 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
4886 /* We do have overlap. Now see if field is large enough to
4887 cover the access. Give up for accesses spanning multiple
4888 fields. */
807e902e 4889 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 4890 return NULL_TREE;
807e902e 4891 if (wi::lts_p (offset, bitoffset))
b8b2b009 4892 return NULL_TREE;
cfef45c8 4893 return fold_ctor_reference (type, cval,
27bcd47c 4894 inner_offset.to_uhwi (), size,
c44c2088 4895 from_decl);
cfef45c8
RG
4896 }
4897 }
4898 /* When memory is not explicitely mentioned in constructor, it is 0. */
4899 return build_zero_cst (type);
4900}
4901
4902/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4903 to the memory at bit OFFSET. */
4904
4905static tree
4906fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 4907 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
4908{
4909 tree ret;
4910
4911 /* We found the field with exact match. */
4912 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4913 && !offset)
9d60be38 4914 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
4915
4916 /* We are at the end of walk, see if we can view convert the
4917 result. */
4918 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4919 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
4920 && !compare_tree_int (TYPE_SIZE (type), size)
4921 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 4922 {
9d60be38 4923 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
4924 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4925 if (ret)
4926 STRIP_NOPS (ret);
4927 return ret;
4928 }
b2505143
RB
4929 /* For constants and byte-aligned/sized reads try to go through
4930 native_encode/interpret. */
4931 if (CONSTANT_CLASS_P (ctor)
4932 && BITS_PER_UNIT == 8
4933 && offset % BITS_PER_UNIT == 0
4934 && size % BITS_PER_UNIT == 0
4935 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4936 {
4937 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4938 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4939 offset / BITS_PER_UNIT) > 0)
4940 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4941 }
cfef45c8
RG
4942 if (TREE_CODE (ctor) == CONSTRUCTOR)
4943 {
4944
eb8f1123
RG
4945 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4946 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
4947 return fold_array_ctor_reference (type, ctor, offset, size,
4948 from_decl);
cfef45c8 4949 else
c44c2088
JH
4950 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4951 from_decl);
cfef45c8
RG
4952 }
4953
4954 return NULL_TREE;
4955}
4956
4957/* Return the tree representing the element referenced by T if T is an
4958 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4959 names using VALUEIZE. Return NULL_TREE otherwise. */
4960
4961tree
4962fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4963{
4964 tree ctor, idx, base;
4965 HOST_WIDE_INT offset, size, max_size;
4966 tree tem;
4967
f8a7df45
RG
4968 if (TREE_THIS_VOLATILE (t))
4969 return NULL_TREE;
4970
cfef45c8
RG
4971 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4972 return get_symbol_constant_value (t);
4973
4974 tem = fold_read_from_constant_string (t);
4975 if (tem)
4976 return tem;
4977
4978 switch (TREE_CODE (t))
4979 {
4980 case ARRAY_REF:
4981 case ARRAY_RANGE_REF:
4982 /* Constant indexes are handled well by get_base_constructor.
4983 Only special case variable offsets.
4984 FIXME: This code can't handle nested references with variable indexes
4985 (they will be handled only by iteration of ccp). Perhaps we can bring
4986 get_ref_base_and_extent here and make it use a valueize callback. */
4987 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
4988 && valueize
4989 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 4990 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
4991 {
4992 tree low_bound, unit_size;
4993
4994 /* If the resulting bit-offset is constant, track it. */
4995 if ((low_bound = array_ref_low_bound (t),
b48e22b2 4996 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 4997 && (unit_size = array_ref_element_size (t),
807e902e 4998 tree_fits_uhwi_p (unit_size)))
cfef45c8 4999 {
807e902e
KZ
5000 offset_int woffset
5001 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5002 TYPE_PRECISION (TREE_TYPE (idx)));
5003
5004 if (wi::fits_shwi_p (woffset))
5005 {
5006 offset = woffset.to_shwi ();
5007 /* TODO: This code seems wrong, multiply then check
5008 to see if it fits. */
5009 offset *= tree_to_uhwi (unit_size);
5010 offset *= BITS_PER_UNIT;
5011
5012 base = TREE_OPERAND (t, 0);
5013 ctor = get_base_constructor (base, &offset, valueize);
5014 /* Empty constructor. Always fold to 0. */
5015 if (ctor == error_mark_node)
5016 return build_zero_cst (TREE_TYPE (t));
5017 /* Out of bound array access. Value is undefined,
5018 but don't fold. */
5019 if (offset < 0)
5020 return NULL_TREE;
5021 /* We can not determine ctor. */
5022 if (!ctor)
5023 return NULL_TREE;
5024 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5025 tree_to_uhwi (unit_size)
5026 * BITS_PER_UNIT,
5027 base);
5028 }
cfef45c8
RG
5029 }
5030 }
5031 /* Fallthru. */
5032
5033 case COMPONENT_REF:
5034 case BIT_FIELD_REF:
5035 case TARGET_MEM_REF:
5036 case MEM_REF:
5037 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5038 ctor = get_base_constructor (base, &offset, valueize);
5039
5040 /* Empty constructor. Always fold to 0. */
5041 if (ctor == error_mark_node)
5042 return build_zero_cst (TREE_TYPE (t));
5043 /* We do not know precise address. */
5044 if (max_size == -1 || max_size != size)
5045 return NULL_TREE;
5046 /* We can not determine ctor. */
5047 if (!ctor)
5048 return NULL_TREE;
5049
5050 /* Out of bound array access. Value is undefined, but don't fold. */
5051 if (offset < 0)
5052 return NULL_TREE;
5053
c44c2088
JH
5054 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5055 base);
cfef45c8
RG
5056
5057 case REALPART_EXPR:
5058 case IMAGPART_EXPR:
5059 {
5060 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5061 if (c && TREE_CODE (c) == COMPLEX_CST)
5062 return fold_build1_loc (EXPR_LOCATION (t),
5063 TREE_CODE (t), TREE_TYPE (t), c);
5064 break;
5065 }
5066
5067 default:
5068 break;
5069 }
5070
5071 return NULL_TREE;
5072}
5073
5074tree
5075fold_const_aggregate_ref (tree t)
5076{
5077 return fold_const_aggregate_ref_1 (t, NULL);
5078}
06bc3ec7 5079
85942f45 5080/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5081 at OFFSET.
5082 Set CAN_REFER if non-NULL to false if method
5083 is not referable or if the virtual table is ill-formed (such as rewriten
5084 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5085
5086tree
85942f45
JH
5087gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5088 tree v,
ec77d61f
JH
5089 unsigned HOST_WIDE_INT offset,
5090 bool *can_refer)
81fa35bd 5091{
85942f45
JH
5092 tree vtable = v, init, fn;
5093 unsigned HOST_WIDE_INT size;
8c311b50
JH
5094 unsigned HOST_WIDE_INT elt_size, access_index;
5095 tree domain_type;
81fa35bd 5096
ec77d61f
JH
5097 if (can_refer)
5098 *can_refer = true;
5099
9de2f554 5100 /* First of all double check we have virtual table. */
81fa35bd 5101 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5102 || !DECL_VIRTUAL_P (v))
ec77d61f
JH
5103 {
5104 gcc_assert (in_lto_p);
5105 /* Pass down that we lost track of the target. */
5106 if (can_refer)
5107 *can_refer = false;
5108 return NULL_TREE;
5109 }
9de2f554 5110
2aa3da06
JH
5111 init = ctor_for_folding (v);
5112
9de2f554 5113 /* The virtual tables should always be born with constructors
2aa3da06
JH
5114 and we always should assume that they are avaialble for
5115 folding. At the moment we do not stream them in all cases,
5116 but it should never happen that ctor seem unreachable. */
5117 gcc_assert (init);
5118 if (init == error_mark_node)
5119 {
5120 gcc_assert (in_lto_p);
ec77d61f
JH
5121 /* Pass down that we lost track of the target. */
5122 if (can_refer)
5123 *can_refer = false;
2aa3da06
JH
5124 return NULL_TREE;
5125 }
81fa35bd 5126 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5127 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5128 offset *= BITS_PER_UNIT;
81fa35bd 5129 offset += token * size;
9de2f554 5130
8c311b50
JH
5131 /* Lookup the value in the constructor that is assumed to be array.
5132 This is equivalent to
5133 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5134 offset, size, NULL);
5135 but in a constant time. We expect that frontend produced a simple
5136 array without indexed initializers. */
5137
5138 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5139 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5140 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5141 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5142
5143 access_index = offset / BITS_PER_UNIT / elt_size;
5144 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5145
5146 /* This code makes an assumption that there are no
5147 indexed fileds produced by C++ FE, so we can directly index the array. */
5148 if (access_index < CONSTRUCTOR_NELTS (init))
5149 {
5150 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5151 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5152 STRIP_NOPS (fn);
5153 }
5154 else
5155 fn = NULL;
9de2f554
JH
5156
5157 /* For type inconsistent program we may end up looking up virtual method
5158 in virtual table that does not contain TOKEN entries. We may overrun
5159 the virtual table and pick up a constant or RTTI info pointer.
5160 In any case the call is undefined. */
5161 if (!fn
5162 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5163 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5164 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5165 else
5166 {
5167 fn = TREE_OPERAND (fn, 0);
5168
5169 /* When cgraph node is missing and function is not public, we cannot
5170 devirtualize. This can happen in WHOPR when the actual method
5171 ends up in other partition, because we found devirtualization
5172 possibility too late. */
5173 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5174 {
5175 if (can_refer)
5176 {
5177 *can_refer = false;
5178 return fn;
5179 }
5180 return NULL_TREE;
5181 }
9de2f554 5182 }
81fa35bd 5183
7501ca28
RG
5184 /* Make sure we create a cgraph node for functions we'll reference.
5185 They can be non-existent if the reference comes from an entry
5186 of an external vtable for example. */
d52f5295 5187 cgraph_node::get_create (fn);
7501ca28 5188
81fa35bd
MJ
5189 return fn;
5190}
5191
85942f45
JH
5192/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5193 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5194 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5195 OBJ_TYPE_REF_OBJECT(REF).
5196 Set CAN_REFER if non-NULL to false if method
5197 is not referable or if the virtual table is ill-formed (such as rewriten
5198 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5199
5200tree
ec77d61f
JH
5201gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5202 bool *can_refer)
85942f45
JH
5203{
5204 unsigned HOST_WIDE_INT offset;
5205 tree v;
5206
5207 v = BINFO_VTABLE (known_binfo);
5208 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5209 if (!v)
5210 return NULL_TREE;
5211
5212 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5213 {
5214 if (can_refer)
5215 *can_refer = false;
5216 return NULL_TREE;
5217 }
5218 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5219}
5220
06bc3ec7
BS
5221/* Return true iff VAL is a gimple expression that is known to be
5222 non-negative. Restricted to floating-point inputs. */
5223
5224bool
5225gimple_val_nonnegative_real_p (tree val)
5226{
5227 gimple def_stmt;
5228
5229 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5230
5231 /* Use existing logic for non-gimple trees. */
5232 if (tree_expr_nonnegative_p (val))
5233 return true;
5234
5235 if (TREE_CODE (val) != SSA_NAME)
5236 return false;
5237
5238 /* Currently we look only at the immediately defining statement
5239 to make this determination, since recursion on defining
5240 statements of operands can lead to quadratic behavior in the
5241 worst case. This is expected to catch almost all occurrences
5242 in practice. It would be possible to implement limited-depth
5243 recursion if important cases are lost. Alternatively, passes
5244 that need this information (such as the pow/powi lowering code
5245 in the cse_sincos pass) could be revised to provide it through
5246 dataflow propagation. */
5247
5248 def_stmt = SSA_NAME_DEF_STMT (val);
5249
5250 if (is_gimple_assign (def_stmt))
5251 {
5252 tree op0, op1;
5253
5254 /* See fold-const.c:tree_expr_nonnegative_p for additional
5255 cases that could be handled with recursion. */
5256
5257 switch (gimple_assign_rhs_code (def_stmt))
5258 {
5259 case ABS_EXPR:
5260 /* Always true for floating-point operands. */
5261 return true;
5262
5263 case MULT_EXPR:
5264 /* True if the two operands are identical (since we are
5265 restricted to floating-point inputs). */
5266 op0 = gimple_assign_rhs1 (def_stmt);
5267 op1 = gimple_assign_rhs2 (def_stmt);
5268
5269 if (op0 == op1
5270 || operand_equal_p (op0, op1, 0))
5271 return true;
5272
5273 default:
5274 return false;
5275 }
5276 }
5277 else if (is_gimple_call (def_stmt))
5278 {
5279 tree fndecl = gimple_call_fndecl (def_stmt);
5280 if (fndecl
5281 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5282 {
5283 tree arg1;
5284
5285 switch (DECL_FUNCTION_CODE (fndecl))
5286 {
5287 CASE_FLT_FN (BUILT_IN_ACOS):
5288 CASE_FLT_FN (BUILT_IN_ACOSH):
5289 CASE_FLT_FN (BUILT_IN_CABS):
5290 CASE_FLT_FN (BUILT_IN_COSH):
5291 CASE_FLT_FN (BUILT_IN_ERFC):
5292 CASE_FLT_FN (BUILT_IN_EXP):
5293 CASE_FLT_FN (BUILT_IN_EXP10):
5294 CASE_FLT_FN (BUILT_IN_EXP2):
5295 CASE_FLT_FN (BUILT_IN_FABS):
5296 CASE_FLT_FN (BUILT_IN_FDIM):
5297 CASE_FLT_FN (BUILT_IN_HYPOT):
5298 CASE_FLT_FN (BUILT_IN_POW10):
5299 return true;
5300
5301 CASE_FLT_FN (BUILT_IN_SQRT):
5302 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5303 nonnegative inputs. */
5304 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5305 return true;
5306
5307 break;
5308
5309 CASE_FLT_FN (BUILT_IN_POWI):
5310 /* True if the second argument is an even integer. */
5311 arg1 = gimple_call_arg (def_stmt, 1);
5312
5313 if (TREE_CODE (arg1) == INTEGER_CST
5314 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5315 return true;
5316
5317 break;
5318
5319 CASE_FLT_FN (BUILT_IN_POW):
5320 /* True if the second argument is an even integer-valued
5321 real. */
5322 arg1 = gimple_call_arg (def_stmt, 1);
5323
5324 if (TREE_CODE (arg1) == REAL_CST)
5325 {
5326 REAL_VALUE_TYPE c;
5327 HOST_WIDE_INT n;
5328
5329 c = TREE_REAL_CST (arg1);
5330 n = real_to_integer (&c);
5331
5332 if ((n & 1) == 0)
5333 {
5334 REAL_VALUE_TYPE cint;
807e902e 5335 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5336 if (real_identical (&c, &cint))
5337 return true;
5338 }
5339 }
5340
5341 break;
5342
5343 default:
5344 return false;
5345 }
5346 }
5347 }
5348
5349 return false;
5350}
b184c8f1
AM
5351
5352/* Given a pointer value OP0, return a simplified version of an
5353 indirection through OP0, or NULL_TREE if no simplification is
5354 possible. Note that the resulting type may be different from
5355 the type pointed to in the sense that it is still compatible
5356 from the langhooks point of view. */
5357
5358tree
5359gimple_fold_indirect_ref (tree t)
5360{
5361 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5362 tree sub = t;
5363 tree subtype;
5364
5365 STRIP_NOPS (sub);
5366 subtype = TREE_TYPE (sub);
5367 if (!POINTER_TYPE_P (subtype))
5368 return NULL_TREE;
5369
5370 if (TREE_CODE (sub) == ADDR_EXPR)
5371 {
5372 tree op = TREE_OPERAND (sub, 0);
5373 tree optype = TREE_TYPE (op);
5374 /* *&p => p */
5375 if (useless_type_conversion_p (type, optype))
5376 return op;
5377
5378 /* *(foo *)&fooarray => fooarray[0] */
5379 if (TREE_CODE (optype) == ARRAY_TYPE
5380 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5381 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5382 {
5383 tree type_domain = TYPE_DOMAIN (optype);
5384 tree min_val = size_zero_node;
5385 if (type_domain && TYPE_MIN_VALUE (type_domain))
5386 min_val = TYPE_MIN_VALUE (type_domain);
5387 if (TREE_CODE (min_val) == INTEGER_CST)
5388 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5389 }
5390 /* *(foo *)&complexfoo => __real__ complexfoo */
5391 else if (TREE_CODE (optype) == COMPLEX_TYPE
5392 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5393 return fold_build1 (REALPART_EXPR, type, op);
5394 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5395 else if (TREE_CODE (optype) == VECTOR_TYPE
5396 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5397 {
5398 tree part_width = TYPE_SIZE (type);
5399 tree index = bitsize_int (0);
5400 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5401 }
5402 }
5403
5404 /* *(p + CST) -> ... */
5405 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5406 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5407 {
5408 tree addr = TREE_OPERAND (sub, 0);
5409 tree off = TREE_OPERAND (sub, 1);
5410 tree addrtype;
5411
5412 STRIP_NOPS (addr);
5413 addrtype = TREE_TYPE (addr);
5414
5415 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5416 if (TREE_CODE (addr) == ADDR_EXPR
5417 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5418 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5419 && tree_fits_uhwi_p (off))
b184c8f1 5420 {
ae7e9ddd 5421 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5422 tree part_width = TYPE_SIZE (type);
5423 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5424 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5425 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5426 tree index = bitsize_int (indexi);
5427 if (offset / part_widthi
e934916c 5428 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5429 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5430 part_width, index);
5431 }
5432
5433 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5434 if (TREE_CODE (addr) == ADDR_EXPR
5435 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5436 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5437 {
5438 tree size = TYPE_SIZE_UNIT (type);
5439 if (tree_int_cst_equal (size, off))
5440 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5441 }
5442
5443 /* *(p + CST) -> MEM_REF <p, CST>. */
5444 if (TREE_CODE (addr) != ADDR_EXPR
5445 || DECL_P (TREE_OPERAND (addr, 0)))
5446 return fold_build2 (MEM_REF, type,
5447 addr,
807e902e 5448 wide_int_to_tree (ptype, off));
b184c8f1
AM
5449 }
5450
5451 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5452 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5453 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5454 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5455 {
5456 tree type_domain;
5457 tree min_val = size_zero_node;
5458 tree osub = sub;
5459 sub = gimple_fold_indirect_ref (sub);
5460 if (! sub)
5461 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5462 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5463 if (type_domain && TYPE_MIN_VALUE (type_domain))
5464 min_val = TYPE_MIN_VALUE (type_domain);
5465 if (TREE_CODE (min_val) == INTEGER_CST)
5466 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5467 }
5468
5469 return NULL_TREE;
5470}
19e51b40
JJ
5471
5472/* Return true if CODE is an operation that when operating on signed
5473 integer types involves undefined behavior on overflow and the
5474 operation can be expressed with unsigned arithmetic. */
5475
5476bool
5477arith_code_with_undefined_signed_overflow (tree_code code)
5478{
5479 switch (code)
5480 {
5481 case PLUS_EXPR:
5482 case MINUS_EXPR:
5483 case MULT_EXPR:
5484 case NEGATE_EXPR:
5485 case POINTER_PLUS_EXPR:
5486 return true;
5487 default:
5488 return false;
5489 }
5490}
5491
5492/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5493 operation that can be transformed to unsigned arithmetic by converting
5494 its operand, carrying out the operation in the corresponding unsigned
5495 type and converting the result back to the original type.
5496
5497 Returns a sequence of statements that replace STMT and also contain
5498 a modified form of STMT itself. */
5499
5500gimple_seq
5501rewrite_to_defined_overflow (gimple stmt)
5502{
5503 if (dump_file && (dump_flags & TDF_DETAILS))
5504 {
5505 fprintf (dump_file, "rewriting stmt with undefined signed "
5506 "overflow ");
5507 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5508 }
5509
5510 tree lhs = gimple_assign_lhs (stmt);
5511 tree type = unsigned_type_for (TREE_TYPE (lhs));
5512 gimple_seq stmts = NULL;
5513 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5514 {
5515 gimple_seq stmts2 = NULL;
5516 gimple_set_op (stmt, i,
5517 force_gimple_operand (fold_convert (type,
5518 gimple_op (stmt, i)),
5519 &stmts2, true, NULL_TREE));
5520 gimple_seq_add_seq (&stmts, stmts2);
5521 }
5522 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5523 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5524 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5525 gimple_seq_add_stmt (&stmts, stmt);
5526 gimple cvt = gimple_build_assign_with_ops
5527 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5528 gimple_seq_add_stmt (&stmts, cvt);
5529
5530 return stmts;
5531}
d4f5cd5e 5532
3d2cf79f
RB
5533
5534/* Build the expression CODE OP0 of type TYPE with location LOC,
5535 simplifying it first if possible using VALUEIZE if not NULL.
5536 OP0 is expected to be valueized already. Returns the built
5537 expression value and appends statements possibly defining it
5538 to SEQ. */
5539
5540tree
5541gimple_build (gimple_seq *seq, location_t loc,
5542 enum tree_code code, tree type, tree op0,
5543 tree (*valueize)(tree))
5544{
5545 tree res = gimple_simplify (code, type, op0, seq, valueize);
5546 if (!res)
5547 {
5548 if (gimple_in_ssa_p (cfun))
5549 res = make_ssa_name (type, NULL);
5550 else
5551 res = create_tmp_reg (type, NULL);
5552 gimple stmt;
5553 if (code == REALPART_EXPR
5554 || code == IMAGPART_EXPR
5555 || code == VIEW_CONVERT_EXPR)
5556 stmt = gimple_build_assign_with_ops (code, res,
5557 build1 (code, type,
5558 op0), NULL_TREE);
5559 else
5560 stmt = gimple_build_assign_with_ops (code, res, op0, NULL_TREE);
5561 gimple_set_location (stmt, loc);
5562 gimple_seq_add_stmt_without_update (seq, stmt);
5563 }
5564 return res;
5565}
5566
5567/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5568 simplifying it first if possible using VALUEIZE if not NULL.
5569 OP0 and OP1 are expected to be valueized already. Returns the built
5570 expression value and appends statements possibly defining it
5571 to SEQ. */
5572
5573tree
5574gimple_build (gimple_seq *seq, location_t loc,
5575 enum tree_code code, tree type, tree op0, tree op1,
5576 tree (*valueize)(tree))
5577{
5578 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5579 if (!res)
5580 {
5581 if (gimple_in_ssa_p (cfun))
5582 res = make_ssa_name (type, NULL);
5583 else
5584 res = create_tmp_reg (type, NULL);
5585 gimple stmt = gimple_build_assign_with_ops (code, res, op0, op1);
5586 gimple_set_location (stmt, loc);
5587 gimple_seq_add_stmt_without_update (seq, stmt);
5588 }
5589 return res;
5590}
5591
5592/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5593 simplifying it first if possible using VALUEIZE if not NULL.
5594 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5595 expression value and appends statements possibly defining it
5596 to SEQ. */
5597
5598tree
5599gimple_build (gimple_seq *seq, location_t loc,
5600 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5601 tree (*valueize)(tree))
5602{
5603 tree res = gimple_simplify (code, type, op0, op1, op2,
5604 seq, valueize);
5605 if (!res)
5606 {
5607 if (gimple_in_ssa_p (cfun))
5608 res = make_ssa_name (type, NULL);
5609 else
5610 res = create_tmp_reg (type, NULL);
5611 gimple stmt;
5612 if (code == BIT_FIELD_REF)
5613 stmt = gimple_build_assign_with_ops (code, res,
5614 build3 (BIT_FIELD_REF, type,
5615 op0, op1, op2),
5616 NULL_TREE);
5617 else
5618 stmt = gimple_build_assign_with_ops (code, res, op0, op1, op2);
5619 gimple_set_location (stmt, loc);
5620 gimple_seq_add_stmt_without_update (seq, stmt);
5621 }
5622 return res;
5623}
5624
5625/* Build the call FN (ARG0) with a result of type TYPE
5626 (or no result if TYPE is void) with location LOC,
5627 simplifying it first if possible using VALUEIZE if not NULL.
5628 ARG0 is expected to be valueized already. Returns the built
5629 expression value (or NULL_TREE if TYPE is void) and appends
5630 statements possibly defining it to SEQ. */
5631
5632tree
5633gimple_build (gimple_seq *seq, location_t loc,
5634 enum built_in_function fn, tree type, tree arg0,
5635 tree (*valueize)(tree))
5636{
5637 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5638 if (!res)
5639 {
5640 tree decl = builtin_decl_implicit (fn);
5641 gimple stmt = gimple_build_call (decl, 1, arg0);
5642 if (!VOID_TYPE_P (type))
5643 {
5644 if (gimple_in_ssa_p (cfun))
5645 res = make_ssa_name (type, NULL);
5646 else
5647 res = create_tmp_reg (type, NULL);
5648 gimple_call_set_lhs (stmt, res);
5649 }
5650 gimple_set_location (stmt, loc);
5651 gimple_seq_add_stmt_without_update (seq, stmt);
5652 }
5653 return res;
5654}
5655
5656/* Build the call FN (ARG0, ARG1) with a result of type TYPE
5657 (or no result if TYPE is void) with location LOC,
5658 simplifying it first if possible using VALUEIZE if not NULL.
5659 ARG0 is expected to be valueized already. Returns the built
5660 expression value (or NULL_TREE if TYPE is void) and appends
5661 statements possibly defining it to SEQ. */
5662
5663tree
5664gimple_build (gimple_seq *seq, location_t loc,
5665 enum built_in_function fn, tree type, tree arg0, tree arg1,
5666 tree (*valueize)(tree))
5667{
5668 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5669 if (!res)
5670 {
5671 tree decl = builtin_decl_implicit (fn);
5672 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5673 if (!VOID_TYPE_P (type))
5674 {
5675 if (gimple_in_ssa_p (cfun))
5676 res = make_ssa_name (type, NULL);
5677 else
5678 res = create_tmp_reg (type, NULL);
5679 gimple_call_set_lhs (stmt, res);
5680 }
5681 gimple_set_location (stmt, loc);
5682 gimple_seq_add_stmt_without_update (seq, stmt);
5683 }
5684 return res;
5685}
5686
5687/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5688 (or no result if TYPE is void) with location LOC,
5689 simplifying it first if possible using VALUEIZE if not NULL.
5690 ARG0 is expected to be valueized already. Returns the built
5691 expression value (or NULL_TREE if TYPE is void) and appends
5692 statements possibly defining it to SEQ. */
5693
5694tree
5695gimple_build (gimple_seq *seq, location_t loc,
5696 enum built_in_function fn, tree type,
5697 tree arg0, tree arg1, tree arg2,
5698 tree (*valueize)(tree))
5699{
5700 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5701 if (!res)
5702 {
5703 tree decl = builtin_decl_implicit (fn);
5704 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5705 if (!VOID_TYPE_P (type))
5706 {
5707 if (gimple_in_ssa_p (cfun))
5708 res = make_ssa_name (type, NULL);
5709 else
5710 res = create_tmp_reg (type, NULL);
5711 gimple_call_set_lhs (stmt, res);
5712 }
5713 gimple_set_location (stmt, loc);
5714 gimple_seq_add_stmt_without_update (seq, stmt);
5715 }
5716 return res;
5717}
5718
5719/* Build the conversion (TYPE) OP with a result of type TYPE
5720 with location LOC if such conversion is neccesary in GIMPLE,
5721 simplifying it first.
5722 Returns the built expression value and appends
5723 statements possibly defining it to SEQ. */
d4f5cd5e
RB
5724
5725tree
5726gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5727{
5728 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5729 return op;
3d2cf79f 5730 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 5731}