]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
2014-10-15 Paolo Carlini <paolo.carlini@oracle.com>
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
23a5b65a 2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370
DN
26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
cbdd87d4 30#include "flags.h"
cbdd87d4 31#include "function.h"
7ee2468b 32#include "dumpfile.h"
442b4905 33#include "bitmap.h"
2fb9a547
AM
34#include "basic-block.h"
35#include "tree-ssa-alias.h"
36#include "internal-fn.h"
37#include "gimple-fold.h"
38#include "gimple-expr.h"
39#include "is-a.h"
18f429e2 40#include "gimple.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
442b4905
AM
43#include "gimple-ssa.h"
44#include "tree-ssanames.h"
45#include "tree-into-ssa.h"
46#include "tree-dfa.h"
7a300452 47#include "tree-ssa.h"
cbdd87d4 48#include "tree-ssa-propagate.h"
cbdd87d4 49#include "target.h"
450ad0cd
JH
50#include "ipa-utils.h"
51#include "gimple-pretty-print.h"
4484a35a 52#include "tree-ssa-address.h"
862d0b35 53#include "langhooks.h"
19e51b40 54#include "gimplify-me.h"
2b5f0895 55#include "dbgcnt.h"
9b2b7279 56#include "builtins.h"
fef5a0d9 57#include "output.h"
cbdd87d4 58
b3b9f3d0 59/* Return true when DECL can be referenced from current unit.
c44c2088
JH
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
1389294c 63
1389294c
JH
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
3e89949e 76 we devirtualize only during final compilation stage.
b3b9f3d0
JH
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
80
1389294c 81static bool
c44c2088 82can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 83{
2c8326a5 84 varpool_node *vnode;
1389294c 85 struct cgraph_node *node;
5e20cdc9 86 symtab_node *snode;
c44c2088 87
00de328a 88 if (DECL_ABSTRACT_P (decl))
1632a686
JH
89 return false;
90
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
95
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 {
3aaf0529
JH
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
3dafb85c 101 if (symtab->function_flags_ready)
3aaf0529 102 return true;
d52f5295 103 snode = symtab_node::get (decl);
3aaf0529 104 if (!snode || !snode->definition)
1632a686 105 return false;
7de90a6c 106 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
107 return !node || !node->global.inlined_to;
108 }
109
6da8be89 110 /* We will later output the initializer, so we can refer to it.
c44c2088 111 So we are concerned only when DECL comes from initializer of
3aaf0529 112 external var or var that has been optimized out. */
c44c2088
JH
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 115 || (!DECL_EXTERNAL (from_decl)
9041d2e6 116 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 117 && vnode->definition)
6da8be89 118 || (flag_ltrans
9041d2e6 119 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 120 && vnode->in_other_partition))
c44c2088 121 return true;
c44c2088
JH
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
a33a931b 127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 129 return false;
b3b9f3d0
JH
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
3aaf0529
JH
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
138
139 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 140 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
3dafb85c 146 if (!symtab->function_flags_ready)
b3b9f3d0 147 return true;
c44c2088 148
d52f5295 149 snode = symtab_node::get (decl);
3aaf0529
JH
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
1389294c
JH
157}
158
0038d4e0 159/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
162
163tree
c44c2088 164canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 165{
50619002
EB
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
315f5f1b
RG
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 170 {
315f5f1b
RG
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
17f39a39
JH
179 }
180 if (TREE_CODE (cval) == ADDR_EXPR)
181 {
5a27a197
RG
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
184 {
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
188 }
5a27a197
RG
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
191 if (!base)
192 return NULL_TREE;
b3b9f3d0 193
7501ca28
RG
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 196 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 197 return NULL_TREE;
7501ca28 198 if (TREE_CODE (base) == VAR_DECL)
46eb666a 199 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
200 else if (TREE_CODE (base) == FUNCTION_DECL)
201 {
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
d52f5295 205 cgraph_node::get_create (base);
7501ca28 206 }
0038d4e0 207 /* Fixup types in global initializers. */
73aef89e
RG
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
210
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
17f39a39 214 }
846abd0d
RB
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
50619002 217 return orig_cval;
17f39a39 218}
cbdd87d4
RG
219
220/* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
222
223tree
224get_symbol_constant_value (tree sym)
225{
6a6dac52
JH
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
cbdd87d4 228 {
cbdd87d4
RG
229 if (val)
230 {
9d60be38 231 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 232 if (val && is_gimple_min_invariant (val))
17f39a39 233 return val;
1389294c
JH
234 else
235 return NULL_TREE;
cbdd87d4
RG
236 }
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
cbdd87d4
RG
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 243 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
244 }
245
246 return NULL_TREE;
247}
248
249
cbdd87d4
RG
250
251/* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
255
256static tree
257maybe_fold_reference (tree expr, bool is_lhs)
258{
17f39a39 259 tree result;
cbdd87d4 260
f0eddb90
RG
261 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
262 || TREE_CODE (expr) == REALPART_EXPR
263 || TREE_CODE (expr) == IMAGPART_EXPR)
264 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
265 return fold_unary_loc (EXPR_LOCATION (expr),
266 TREE_CODE (expr),
267 TREE_TYPE (expr),
268 TREE_OPERAND (expr, 0));
269 else if (TREE_CODE (expr) == BIT_FIELD_REF
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
271 return fold_ternary_loc (EXPR_LOCATION (expr),
272 TREE_CODE (expr),
273 TREE_TYPE (expr),
274 TREE_OPERAND (expr, 0),
275 TREE_OPERAND (expr, 1),
276 TREE_OPERAND (expr, 2));
277
f0eddb90
RG
278 if (!is_lhs
279 && (result = fold_const_aggregate_ref (expr))
280 && is_gimple_min_invariant (result))
281 return result;
cbdd87d4 282
cbdd87d4
RG
283 return NULL_TREE;
284}
285
286
287/* Attempt to fold an assignment statement pointed-to by SI. Returns a
288 replacement rhs for the statement or NULL_TREE if no simplification
289 could be made. It is assumed that the operands have been previously
290 folded. */
291
292static tree
293fold_gimple_assign (gimple_stmt_iterator *si)
294{
295 gimple stmt = gsi_stmt (*si);
296 enum tree_code subcode = gimple_assign_rhs_code (stmt);
297 location_t loc = gimple_location (stmt);
298
299 tree result = NULL_TREE;
300
301 switch (get_gimple_rhs_class (subcode))
302 {
303 case GIMPLE_SINGLE_RHS:
304 {
305 tree rhs = gimple_assign_rhs1 (stmt);
306
4e71066d 307 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
308 return maybe_fold_reference (rhs, false);
309
bdf37f7a
JH
310 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
311 {
312 tree val = OBJ_TYPE_REF_EXPR (rhs);
313 if (is_gimple_min_invariant (val))
314 return val;
f8a39967 315 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
316 {
317 bool final;
318 vec <cgraph_node *>targets
f8a39967 319 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 320 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 321 {
2b5f0895
XDL
322 if (dump_enabled_p ())
323 {
807b7d62 324 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
325 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
326 "resolving virtual function address "
327 "reference to function %s\n",
328 targets.length () == 1
329 ? targets[0]->name ()
3ef276e4 330 : "NULL");
2b5f0895 331 }
3ef276e4
RB
332 if (targets.length () == 1)
333 {
334 val = fold_convert (TREE_TYPE (val),
335 build_fold_addr_expr_loc
336 (loc, targets[0]->decl));
337 STRIP_USELESS_TYPE_CONVERSION (val);
338 }
339 else
340 /* We can not use __builtin_unreachable here because it
341 can not have address taken. */
342 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
343 return val;
344 }
345 }
346
347 }
cbdd87d4
RG
348 else if (TREE_CODE (rhs) == ADDR_EXPR)
349 {
70f34814
RG
350 tree ref = TREE_OPERAND (rhs, 0);
351 tree tem = maybe_fold_reference (ref, true);
352 if (tem
353 && TREE_CODE (tem) == MEM_REF
354 && integer_zerop (TREE_OPERAND (tem, 1)))
355 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
356 else if (tem)
cbdd87d4
RG
357 result = fold_convert (TREE_TYPE (rhs),
358 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
359 else if (TREE_CODE (ref) == MEM_REF
360 && integer_zerop (TREE_OPERAND (ref, 1)))
361 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
362 }
363
364 else if (TREE_CODE (rhs) == CONSTRUCTOR
365 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
366 && (CONSTRUCTOR_NELTS (rhs)
367 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
368 {
369 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
370 unsigned i;
371 tree val;
372
373 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
374 if (TREE_CODE (val) != INTEGER_CST
375 && TREE_CODE (val) != REAL_CST
376 && TREE_CODE (val) != FIXED_CST)
377 return NULL_TREE;
378
379 return build_vector_from_ctor (TREE_TYPE (rhs),
380 CONSTRUCTOR_ELTS (rhs));
381 }
382
383 else if (DECL_P (rhs))
9d60be38 384 return get_symbol_constant_value (rhs);
cbdd87d4
RG
385
386 /* If we couldn't fold the RHS, hand over to the generic
387 fold routines. */
388 if (result == NULL_TREE)
389 result = fold (rhs);
390
391 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
392 that may have been added by fold, and "useless" type
393 conversions that might now be apparent due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
395
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
398
399 return NULL_TREE;
400 }
401 break;
402
403 case GIMPLE_UNARY_RHS:
404 {
405 tree rhs = gimple_assign_rhs1 (stmt);
406
407 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
408 if (result)
409 {
410 /* If the operation was a conversion do _not_ mark a
411 resulting constant with TREE_OVERFLOW if the original
412 constant was not. These conversions have implementation
413 defined behavior and retaining the TREE_OVERFLOW flag
414 here would confuse later passes such as VRP. */
415 if (CONVERT_EXPR_CODE_P (subcode)
416 && TREE_CODE (result) == INTEGER_CST
417 && TREE_CODE (rhs) == INTEGER_CST)
418 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
419
420 STRIP_USELESS_TYPE_CONVERSION (result);
421 if (valid_gimple_rhs_p (result))
422 return result;
423 }
cbdd87d4
RG
424 }
425 break;
426
427 case GIMPLE_BINARY_RHS:
9b80d091
KT
428 /* Try to canonicalize for boolean-typed X the comparisons
429 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
430 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
431 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
432 {
433 tree lhs = gimple_assign_lhs (stmt);
434 tree op1 = gimple_assign_rhs1 (stmt);
435 tree op2 = gimple_assign_rhs2 (stmt);
436 tree type = TREE_TYPE (op1);
437
438 /* Check whether the comparison operands are of the same boolean
439 type as the result type is.
440 Check that second operand is an integer-constant with value
441 one or zero. */
442 if (TREE_CODE (op2) == INTEGER_CST
443 && (integer_zerop (op2) || integer_onep (op2))
444 && useless_type_conversion_p (TREE_TYPE (lhs), type))
445 {
446 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
447 bool is_logical_not = false;
448
449 /* X == 0 and X != 1 is a logical-not.of X
450 X == 1 and X != 0 is X */
451 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
452 || (cmp_code == NE_EXPR && integer_onep (op2)))
453 is_logical_not = true;
454
455 if (is_logical_not == false)
456 result = op1;
457 /* Only for one-bit precision typed X the transformation
458 !X -> ~X is valied. */
459 else if (TYPE_PRECISION (type) == 1)
460 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
461 type, op1);
462 /* Otherwise we use !X -> X ^ 1. */
463 else
464 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
465 type, op1, build_int_cst (type, 1));
466
467 }
468 }
cbdd87d4
RG
469
470 if (!result)
471 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
472 TREE_TYPE (gimple_assign_lhs (stmt)),
473 gimple_assign_rhs1 (stmt),
474 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
475
476 if (result)
477 {
478 STRIP_USELESS_TYPE_CONVERSION (result);
479 if (valid_gimple_rhs_p (result))
480 return result;
cbdd87d4
RG
481 }
482 break;
483
0354c0c7 484 case GIMPLE_TERNARY_RHS:
4e71066d
RG
485 /* Try to fold a conditional expression. */
486 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
487 {
488 tree op0 = gimple_assign_rhs1 (stmt);
489 tree tem;
490 bool set = false;
491 location_t cond_loc = gimple_location (stmt);
492
493 if (COMPARISON_CLASS_P (op0))
494 {
495 fold_defer_overflow_warnings ();
496 tem = fold_binary_loc (cond_loc,
497 TREE_CODE (op0), TREE_TYPE (op0),
498 TREE_OPERAND (op0, 0),
499 TREE_OPERAND (op0, 1));
500 /* This is actually a conditional expression, not a GIMPLE
501 conditional statement, however, the valid_gimple_rhs_p
502 test still applies. */
503 set = (tem && is_gimple_condexpr (tem)
504 && valid_gimple_rhs_p (tem));
505 fold_undefer_overflow_warnings (set, stmt, 0);
506 }
507 else if (is_gimple_min_invariant (op0))
508 {
509 tem = op0;
510 set = true;
511 }
512 else
513 return NULL_TREE;
514
515 if (set)
516 result = fold_build3_loc (cond_loc, COND_EXPR,
517 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
518 gimple_assign_rhs2 (stmt),
519 gimple_assign_rhs3 (stmt));
520 }
521
522 if (!result)
523 result = fold_ternary_loc (loc, subcode,
524 TREE_TYPE (gimple_assign_lhs (stmt)),
525 gimple_assign_rhs1 (stmt),
526 gimple_assign_rhs2 (stmt),
527 gimple_assign_rhs3 (stmt));
0354c0c7
BS
528
529 if (result)
530 {
531 STRIP_USELESS_TYPE_CONVERSION (result);
532 if (valid_gimple_rhs_p (result))
533 return result;
0354c0c7
BS
534 }
535 break;
536
cbdd87d4
RG
537 case GIMPLE_INVALID_RHS:
538 gcc_unreachable ();
539 }
540
541 return NULL_TREE;
542}
543
544/* Attempt to fold a conditional statement. Return true if any changes were
545 made. We only attempt to fold the condition expression, and do not perform
546 any transformation that would require alteration of the cfg. It is
547 assumed that the operands have been previously folded. */
548
549static bool
550fold_gimple_cond (gimple stmt)
551{
552 tree result = fold_binary_loc (gimple_location (stmt),
553 gimple_cond_code (stmt),
554 boolean_type_node,
555 gimple_cond_lhs (stmt),
556 gimple_cond_rhs (stmt));
557
558 if (result)
559 {
560 STRIP_USELESS_TYPE_CONVERSION (result);
561 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
562 {
563 gimple_cond_set_condition_from_tree (stmt, result);
564 return true;
565 }
566 }
567
568 return false;
569}
570
fef5a0d9
RB
571
572/* Replace a statement at *SI_P with a sequence of statements in STMTS,
573 adjusting the replacement stmts location and virtual operands.
574 If the statement has a lhs the last stmt in the sequence is expected
575 to assign to that lhs. */
576
577static void
578gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
579{
580 gimple stmt = gsi_stmt (*si_p);
581
582 if (gimple_has_location (stmt))
583 annotate_all_with_location (stmts, gimple_location (stmt));
584
585 /* First iterate over the replacement statements backward, assigning
586 virtual operands to their defining statements. */
587 gimple laststore = NULL;
588 for (gimple_stmt_iterator i = gsi_last (stmts);
589 !gsi_end_p (i); gsi_prev (&i))
590 {
591 gimple new_stmt = gsi_stmt (i);
592 if ((gimple_assign_single_p (new_stmt)
593 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
594 || (is_gimple_call (new_stmt)
595 && (gimple_call_flags (new_stmt)
596 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
597 {
598 tree vdef;
599 if (!laststore)
600 vdef = gimple_vdef (stmt);
601 else
602 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
603 gimple_set_vdef (new_stmt, vdef);
604 if (vdef && TREE_CODE (vdef) == SSA_NAME)
605 SSA_NAME_DEF_STMT (vdef) = new_stmt;
606 laststore = new_stmt;
607 }
608 }
609
610 /* Second iterate over the statements forward, assigning virtual
611 operands to their uses. */
612 tree reaching_vuse = gimple_vuse (stmt);
613 for (gimple_stmt_iterator i = gsi_start (stmts);
614 !gsi_end_p (i); gsi_next (&i))
615 {
616 gimple new_stmt = gsi_stmt (i);
617 /* If the new statement possibly has a VUSE, update it with exact SSA
618 name we know will reach this one. */
619 if (gimple_has_mem_ops (new_stmt))
620 gimple_set_vuse (new_stmt, reaching_vuse);
621 gimple_set_modified (new_stmt, true);
622 if (gimple_vdef (new_stmt))
623 reaching_vuse = gimple_vdef (new_stmt);
624 }
625
626 /* If the new sequence does not do a store release the virtual
627 definition of the original statement. */
628 if (reaching_vuse
629 && reaching_vuse == gimple_vuse (stmt))
630 {
631 tree vdef = gimple_vdef (stmt);
632 if (vdef
633 && TREE_CODE (vdef) == SSA_NAME)
634 {
635 unlink_stmt_vdef (stmt);
636 release_ssa_name (vdef);
637 }
638 }
639
640 /* Finally replace the original statement with the sequence. */
641 gsi_replace_with_seq (si_p, stmts, false);
642}
643
cbdd87d4
RG
644/* Convert EXPR into a GIMPLE value suitable for substitution on the
645 RHS of an assignment. Insert the necessary statements before
646 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
647 is replaced. If the call is expected to produces a result, then it
648 is replaced by an assignment of the new RHS to the result variable.
649 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
650 GIMPLE_NOP. A proper VDEF chain is retained by making the first
651 VUSE and the last VDEF of the whole sequence be the same as the replaced
652 statement and using new SSA names for stores in between. */
cbdd87d4
RG
653
654void
655gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
656{
657 tree lhs;
cbdd87d4
RG
658 gimple stmt, new_stmt;
659 gimple_stmt_iterator i;
355a7673 660 gimple_seq stmts = NULL;
cbdd87d4
RG
661
662 stmt = gsi_stmt (*si_p);
663
664 gcc_assert (is_gimple_call (stmt));
665
45852dcc 666 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 667
e256dfce 668 lhs = gimple_call_lhs (stmt);
cbdd87d4 669 if (lhs == NULL_TREE)
6e572326
RG
670 {
671 gimplify_and_add (expr, &stmts);
672 /* We can end up with folding a memcpy of an empty class assignment
673 which gets optimized away by C++ gimplification. */
674 if (gimple_seq_empty_p (stmts))
675 {
9fdc58de 676 pop_gimplify_context (NULL);
6e572326
RG
677 if (gimple_in_ssa_p (cfun))
678 {
679 unlink_stmt_vdef (stmt);
680 release_defs (stmt);
681 }
77d19c72 682 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
683 return;
684 }
685 }
cbdd87d4 686 else
e256dfce
RG
687 {
688 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
689 new_stmt = gimple_build_assign (lhs, tmp);
690 i = gsi_last (stmts);
691 gsi_insert_after_without_update (&i, new_stmt,
692 GSI_CONTINUE_LINKING);
693 }
cbdd87d4
RG
694
695 pop_gimplify_context (NULL);
696
fef5a0d9
RB
697 gsi_replace_with_seq_vops (si_p, stmts);
698}
cbdd87d4 699
fef5a0d9
RB
700
701/* Replace the call at *GSI with the gimple value VAL. */
702
703static void
704replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
705{
706 gimple stmt = gsi_stmt (*gsi);
707 tree lhs = gimple_call_lhs (stmt);
708 gimple repl;
709 if (lhs)
e256dfce 710 {
fef5a0d9
RB
711 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
712 val = fold_convert (TREE_TYPE (lhs), val);
713 repl = gimple_build_assign (lhs, val);
714 }
715 else
716 repl = gimple_build_nop ();
717 tree vdef = gimple_vdef (stmt);
718 if (vdef && TREE_CODE (vdef) == SSA_NAME)
719 {
720 unlink_stmt_vdef (stmt);
721 release_ssa_name (vdef);
722 }
723 gsi_replace (gsi, repl, true);
724}
725
726/* Replace the call at *GSI with the new call REPL and fold that
727 again. */
728
729static void
730replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
731{
732 gimple stmt = gsi_stmt (*gsi);
733 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
734 gimple_set_location (repl, gimple_location (stmt));
735 if (gimple_vdef (stmt)
736 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
737 {
738 gimple_set_vdef (repl, gimple_vdef (stmt));
739 gimple_set_vuse (repl, gimple_vuse (stmt));
740 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
741 }
742 gsi_replace (gsi, repl, true);
743 fold_stmt (gsi);
744}
745
746/* Return true if VAR is a VAR_DECL or a component thereof. */
747
748static bool
749var_decl_component_p (tree var)
750{
751 tree inner = var;
752 while (handled_component_p (inner))
753 inner = TREE_OPERAND (inner, 0);
754 return SSA_VAR_P (inner);
755}
756
757/* Fold function call to builtin mem{{,p}cpy,move}. Return
758 NULL_TREE if no simplification can be made.
759 If ENDP is 0, return DEST (like memcpy).
760 If ENDP is 1, return DEST+LEN (like mempcpy).
761 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
762 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
763 (memmove). */
764
765static bool
766gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
767 tree dest, tree src, int endp)
768{
769 gimple stmt = gsi_stmt (*gsi);
770 tree lhs = gimple_call_lhs (stmt);
771 tree len = gimple_call_arg (stmt, 2);
772 tree destvar, srcvar;
773 location_t loc = gimple_location (stmt);
774
775 /* If the LEN parameter is zero, return DEST. */
776 if (integer_zerop (len))
777 {
778 gimple repl;
779 if (gimple_call_lhs (stmt))
780 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
781 else
782 repl = gimple_build_nop ();
783 tree vdef = gimple_vdef (stmt);
784 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 785 {
fef5a0d9
RB
786 unlink_stmt_vdef (stmt);
787 release_ssa_name (vdef);
788 }
789 gsi_replace (gsi, repl, true);
790 return true;
791 }
792
793 /* If SRC and DEST are the same (and not volatile), return
794 DEST{,+LEN,+LEN-1}. */
795 if (operand_equal_p (src, dest, 0))
796 {
797 unlink_stmt_vdef (stmt);
798 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
799 release_ssa_name (gimple_vdef (stmt));
800 if (!lhs)
801 {
802 gsi_replace (gsi, gimple_build_nop (), true);
803 return true;
804 }
805 goto done;
806 }
807 else
808 {
809 tree srctype, desttype;
810 unsigned int src_align, dest_align;
811 tree off0;
812
813 /* Build accesses at offset zero with a ref-all character type. */
814 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
815 ptr_mode, true), 0);
816
817 /* If we can perform the copy efficiently with first doing all loads
818 and then all stores inline it that way. Currently efficiently
819 means that we can load all the memory into a single integer
820 register which is what MOVE_MAX gives us. */
821 src_align = get_pointer_alignment (src);
822 dest_align = get_pointer_alignment (dest);
823 if (tree_fits_uhwi_p (len)
824 && compare_tree_int (len, MOVE_MAX) <= 0
825 /* ??? Don't transform copies from strings with known length this
826 confuses the tree-ssa-strlen.c. This doesn't handle
827 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
828 reason. */
829 && !c_strlen (src, 2))
830 {
831 unsigned ilen = tree_to_uhwi (len);
832 if (exact_log2 (ilen) != -1)
833 {
834 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
835 if (type
836 && TYPE_MODE (type) != BLKmode
837 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
838 == ilen * 8)
839 /* If the destination pointer is not aligned we must be able
840 to emit an unaligned store. */
841 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
842 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
843 {
844 tree srctype = type;
845 tree desttype = type;
846 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
847 srctype = build_aligned_type (type, src_align);
848 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
849 tree tem = fold_const_aggregate_ref (srcmem);
850 if (tem)
851 srcmem = tem;
852 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
853 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
854 src_align))
855 srcmem = NULL_TREE;
856 if (srcmem)
857 {
858 gimple new_stmt;
859 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
860 {
861 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
862 if (gimple_in_ssa_p (cfun))
863 srcmem = make_ssa_name (TREE_TYPE (srcmem),
864 new_stmt);
865 else
866 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
867 NULL);
868 gimple_assign_set_lhs (new_stmt, srcmem);
869 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
870 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
871 }
872 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
873 desttype = build_aligned_type (type, dest_align);
874 new_stmt
875 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
876 dest, off0),
877 srcmem);
878 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
879 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
880 if (gimple_vdef (new_stmt)
881 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
882 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
883 if (!lhs)
884 {
885 gsi_replace (gsi, new_stmt, true);
886 return true;
887 }
888 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
889 goto done;
890 }
891 }
892 }
893 }
894
895 if (endp == 3)
896 {
897 /* Both DEST and SRC must be pointer types.
898 ??? This is what old code did. Is the testing for pointer types
899 really mandatory?
900
901 If either SRC is readonly or length is 1, we can use memcpy. */
902 if (!dest_align || !src_align)
903 return false;
904 if (readonly_data_expr (src)
905 || (tree_fits_uhwi_p (len)
906 && (MIN (src_align, dest_align) / BITS_PER_UNIT
907 >= tree_to_uhwi (len))))
908 {
909 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
917 }
918
919 /* If *src and *dest can't overlap, optimize into memcpy as well. */
920 if (TREE_CODE (src) == ADDR_EXPR
921 && TREE_CODE (dest) == ADDR_EXPR)
922 {
923 tree src_base, dest_base, fn;
924 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
925 HOST_WIDE_INT size = -1;
926 HOST_WIDE_INT maxsize = -1;
927
928 srcvar = TREE_OPERAND (src, 0);
929 src_base = get_ref_base_and_extent (srcvar, &src_offset,
930 &size, &maxsize);
931 destvar = TREE_OPERAND (dest, 0);
932 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
933 &size, &maxsize);
934 if (tree_fits_uhwi_p (len))
935 maxsize = tree_to_uhwi (len);
936 else
937 maxsize = -1;
938 src_offset /= BITS_PER_UNIT;
939 dest_offset /= BITS_PER_UNIT;
940 if (SSA_VAR_P (src_base)
941 && SSA_VAR_P (dest_base))
942 {
943 if (operand_equal_p (src_base, dest_base, 0)
944 && ranges_overlap_p (src_offset, maxsize,
945 dest_offset, maxsize))
946 return false;
947 }
948 else if (TREE_CODE (src_base) == MEM_REF
949 && TREE_CODE (dest_base) == MEM_REF)
950 {
951 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
952 TREE_OPERAND (dest_base, 0), 0))
953 return false;
954 offset_int off = mem_ref_offset (src_base) + src_offset;
955 if (!wi::fits_shwi_p (off))
956 return false;
957 src_offset = off.to_shwi ();
958
959 off = mem_ref_offset (dest_base) + dest_offset;
960 if (!wi::fits_shwi_p (off))
961 return false;
962 dest_offset = off.to_shwi ();
963 if (ranges_overlap_p (src_offset, maxsize,
964 dest_offset, maxsize))
965 return false;
966 }
967 else
968 return false;
969
970 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
971 if (!fn)
972 return false;
973 gimple_call_set_fndecl (stmt, fn);
974 gimple_call_set_arg (stmt, 0, dest);
975 gimple_call_set_arg (stmt, 1, src);
976 fold_stmt (gsi);
977 return true;
978 }
979
980 /* If the destination and source do not alias optimize into
981 memcpy as well. */
982 if ((is_gimple_min_invariant (dest)
983 || TREE_CODE (dest) == SSA_NAME)
984 && (is_gimple_min_invariant (src)
985 || TREE_CODE (src) == SSA_NAME))
986 {
987 ao_ref destr, srcr;
988 ao_ref_init_from_ptr_and_size (&destr, dest, len);
989 ao_ref_init_from_ptr_and_size (&srcr, src, len);
990 if (!refs_may_alias_p_1 (&destr, &srcr, false))
991 {
992 tree fn;
993 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
994 if (!fn)
995 return false;
996 gimple_call_set_fndecl (stmt, fn);
997 gimple_call_set_arg (stmt, 0, dest);
998 gimple_call_set_arg (stmt, 1, src);
999 fold_stmt (gsi);
1000 return true;
1001 }
1002 }
1003
1004 return false;
1005 }
1006
1007 if (!tree_fits_shwi_p (len))
1008 return false;
1009 /* FIXME:
1010 This logic lose for arguments like (type *)malloc (sizeof (type)),
1011 since we strip the casts of up to VOID return value from malloc.
1012 Perhaps we ought to inherit type from non-VOID argument here? */
1013 STRIP_NOPS (src);
1014 STRIP_NOPS (dest);
1015 if (!POINTER_TYPE_P (TREE_TYPE (src))
1016 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1017 return false;
1018 /* In the following try to find a type that is most natural to be
1019 used for the memcpy source and destination and that allows
1020 the most optimization when memcpy is turned into a plain assignment
1021 using that type. In theory we could always use a char[len] type
1022 but that only gains us that the destination and source possibly
1023 no longer will have their address taken. */
1024 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1025 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1026 {
1027 tree tem = TREE_OPERAND (src, 0);
1028 STRIP_NOPS (tem);
1029 if (tem != TREE_OPERAND (src, 0))
1030 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1031 }
1032 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1033 {
1034 tree tem = TREE_OPERAND (dest, 0);
1035 STRIP_NOPS (tem);
1036 if (tem != TREE_OPERAND (dest, 0))
1037 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1038 }
1039 srctype = TREE_TYPE (TREE_TYPE (src));
1040 if (TREE_CODE (srctype) == ARRAY_TYPE
1041 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1042 {
1043 srctype = TREE_TYPE (srctype);
1044 STRIP_NOPS (src);
1045 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1046 }
1047 desttype = TREE_TYPE (TREE_TYPE (dest));
1048 if (TREE_CODE (desttype) == ARRAY_TYPE
1049 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1050 {
1051 desttype = TREE_TYPE (desttype);
1052 STRIP_NOPS (dest);
1053 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1054 }
1055 if (TREE_ADDRESSABLE (srctype)
1056 || TREE_ADDRESSABLE (desttype))
1057 return false;
1058
1059 /* Make sure we are not copying using a floating-point mode or
1060 a type whose size possibly does not match its precision. */
1061 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1062 || TREE_CODE (desttype) == BOOLEAN_TYPE
1063 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1064 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1065 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1066 || TREE_CODE (srctype) == BOOLEAN_TYPE
1067 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1068 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1069 if (!srctype)
1070 srctype = desttype;
1071 if (!desttype)
1072 desttype = srctype;
1073 if (!srctype)
1074 return false;
1075
1076 src_align = get_pointer_alignment (src);
1077 dest_align = get_pointer_alignment (dest);
1078 if (dest_align < TYPE_ALIGN (desttype)
1079 || src_align < TYPE_ALIGN (srctype))
1080 return false;
1081
1082 destvar = dest;
1083 STRIP_NOPS (destvar);
1084 if (TREE_CODE (destvar) == ADDR_EXPR
1085 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1086 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1087 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1088 else
1089 destvar = NULL_TREE;
1090
1091 srcvar = src;
1092 STRIP_NOPS (srcvar);
1093 if (TREE_CODE (srcvar) == ADDR_EXPR
1094 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1095 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1096 {
1097 if (!destvar
1098 || src_align >= TYPE_ALIGN (desttype))
1099 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1100 srcvar, off0);
1101 else if (!STRICT_ALIGNMENT)
1102 {
1103 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1104 src_align);
1105 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1106 }
e256dfce 1107 else
fef5a0d9
RB
1108 srcvar = NULL_TREE;
1109 }
1110 else
1111 srcvar = NULL_TREE;
1112
1113 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1114 return false;
1115
1116 if (srcvar == NULL_TREE)
1117 {
1118 STRIP_NOPS (src);
1119 if (src_align >= TYPE_ALIGN (desttype))
1120 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1121 else
1122 {
1123 if (STRICT_ALIGNMENT)
1124 return false;
1125 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1126 src_align);
1127 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1128 }
1129 }
1130 else if (destvar == NULL_TREE)
1131 {
1132 STRIP_NOPS (dest);
1133 if (dest_align >= TYPE_ALIGN (srctype))
1134 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1135 else
1136 {
1137 if (STRICT_ALIGNMENT)
1138 return false;
1139 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1140 dest_align);
1141 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1142 }
1143 }
1144
1145 gimple new_stmt;
1146 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1147 {
1148 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1149 if (gimple_in_ssa_p (cfun))
1150 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1151 else
1152 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1153 gimple_assign_set_lhs (new_stmt, srcvar);
1154 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1155 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1156 }
1157 new_stmt = gimple_build_assign (destvar, srcvar);
1158 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1159 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1160 if (gimple_vdef (new_stmt)
1161 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1163 if (!lhs)
1164 {
1165 gsi_replace (gsi, new_stmt, true);
1166 return true;
1167 }
1168 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1169 }
1170
1171done:
1172 if (endp == 0 || endp == 3)
1173 len = NULL_TREE;
1174 else if (endp == 2)
1175 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1176 ssize_int (1));
1177 if (endp == 2 || endp == 1)
1178 dest = fold_build_pointer_plus_loc (loc, dest, len);
1179
1180 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1181 GSI_SAME_STMT);
1182 gimple repl = gimple_build_assign (lhs, dest);
1183 gsi_replace (gsi, repl, true);
1184 return true;
1185}
1186
1187/* Fold function call to builtin memset or bzero at *GSI setting the
1188 memory of size LEN to VAL. Return whether a simplification was made. */
1189
1190static bool
1191gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1192{
1193 gimple stmt = gsi_stmt (*gsi);
1194 tree etype;
1195 unsigned HOST_WIDE_INT length, cval;
1196
1197 /* If the LEN parameter is zero, return DEST. */
1198 if (integer_zerop (len))
1199 {
1200 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1201 return true;
1202 }
1203
1204 if (! tree_fits_uhwi_p (len))
1205 return false;
1206
1207 if (TREE_CODE (c) != INTEGER_CST)
1208 return false;
1209
1210 tree dest = gimple_call_arg (stmt, 0);
1211 tree var = dest;
1212 if (TREE_CODE (var) != ADDR_EXPR)
1213 return false;
1214
1215 var = TREE_OPERAND (var, 0);
1216 if (TREE_THIS_VOLATILE (var))
1217 return false;
1218
1219 etype = TREE_TYPE (var);
1220 if (TREE_CODE (etype) == ARRAY_TYPE)
1221 etype = TREE_TYPE (etype);
1222
1223 if (!INTEGRAL_TYPE_P (etype)
1224 && !POINTER_TYPE_P (etype))
1225 return NULL_TREE;
1226
1227 if (! var_decl_component_p (var))
1228 return NULL_TREE;
1229
1230 length = tree_to_uhwi (len);
1231 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1232 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1233 return NULL_TREE;
1234
1235 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1236 return NULL_TREE;
1237
1238 if (integer_zerop (c))
1239 cval = 0;
1240 else
1241 {
1242 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1243 return NULL_TREE;
1244
1245 cval = TREE_INT_CST_LOW (c);
1246 cval &= 0xff;
1247 cval |= cval << 8;
1248 cval |= cval << 16;
1249 cval |= (cval << 31) << 1;
1250 }
1251
1252 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1253 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1254 gimple_set_vuse (store, gimple_vuse (stmt));
1255 tree vdef = gimple_vdef (stmt);
1256 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1257 {
1258 gimple_set_vdef (store, gimple_vdef (stmt));
1259 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1260 }
1261 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1262 if (gimple_call_lhs (stmt))
1263 {
1264 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1265 gsi_replace (gsi, asgn, true);
1266 }
1267 else
1268 {
1269 gimple_stmt_iterator gsi2 = *gsi;
1270 gsi_prev (gsi);
1271 gsi_remove (&gsi2, true);
1272 }
1273
1274 return true;
1275}
1276
1277
1278/* Return the string length, maximum string length or maximum value of
1279 ARG in LENGTH.
1280 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1281 is not NULL and, for TYPE == 0, its value is not equal to the length
1282 we determine or if we are unable to determine the length or value,
1283 return false. VISITED is a bitmap of visited variables.
1284 TYPE is 0 if string length should be returned, 1 for maximum string
1285 length and 2 for maximum value ARG can have. */
1286
1287static bool
dcb7fae2 1288get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1289{
1290 tree var, val;
1291 gimple def_stmt;
1292
1293 if (TREE_CODE (arg) != SSA_NAME)
1294 {
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1298 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1299 {
1300 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1301 if (TREE_CODE (aop0) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1303 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1304 length, visited, type);
1305 }
1306
1307 if (type == 2)
1308 {
1309 val = arg;
1310 if (TREE_CODE (val) != INTEGER_CST
1311 || tree_int_cst_sgn (val) < 0)
1312 return false;
1313 }
1314 else
1315 val = c_strlen (arg, 1);
1316 if (!val)
1317 return false;
1318
1319 if (*length)
1320 {
1321 if (type > 0)
1322 {
1323 if (TREE_CODE (*length) != INTEGER_CST
1324 || TREE_CODE (val) != INTEGER_CST)
1325 return false;
1326
1327 if (tree_int_cst_lt (*length, val))
1328 *length = val;
1329 return true;
1330 }
1331 else if (simple_cst_equal (val, *length) != 1)
1332 return false;
1333 }
1334
1335 *length = val;
1336 return true;
1337 }
1338
1339 /* If ARG is registered for SSA update we cannot look at its defining
1340 statement. */
1341 if (name_registered_for_update_p (arg))
1342 return false;
1343
1344 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1345 if (!*visited)
1346 *visited = BITMAP_ALLOC (NULL);
1347 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1348 return true;
1349
1350 var = arg;
1351 def_stmt = SSA_NAME_DEF_STMT (var);
1352
1353 switch (gimple_code (def_stmt))
1354 {
1355 case GIMPLE_ASSIGN:
1356 /* The RHS of the statement defining VAR must either have a
1357 constant length or come from another SSA_NAME with a constant
1358 length. */
1359 if (gimple_assign_single_p (def_stmt)
1360 || gimple_assign_unary_nop_p (def_stmt))
1361 {
1362 tree rhs = gimple_assign_rhs1 (def_stmt);
1363 return get_maxval_strlen (rhs, length, visited, type);
1364 }
1365 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1366 {
1367 tree op2 = gimple_assign_rhs2 (def_stmt);
1368 tree op3 = gimple_assign_rhs3 (def_stmt);
1369 return get_maxval_strlen (op2, length, visited, type)
1370 && get_maxval_strlen (op3, length, visited, type);
1371 }
1372 return false;
1373
1374 case GIMPLE_PHI:
1375 {
1376 /* All the arguments of the PHI node must have the same constant
1377 length. */
1378 unsigned i;
1379
1380 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1381 {
1382 tree arg = gimple_phi_arg (def_stmt, i)->def;
1383
1384 /* If this PHI has itself as an argument, we cannot
1385 determine the string length of this argument. However,
1386 if we can find a constant string length for the other
1387 PHI args then we can still be sure that this is a
1388 constant string length. So be optimistic and just
1389 continue with the next argument. */
1390 if (arg == gimple_phi_result (def_stmt))
1391 continue;
1392
1393 if (!get_maxval_strlen (arg, length, visited, type))
1394 return false;
1395 }
1396 }
1397 return true;
1398
1399 default:
1400 return false;
1401 }
1402}
1403
dcb7fae2
RB
1404tree
1405get_maxval_strlen (tree arg, int type)
1406{
1407 bitmap visited = NULL;
1408 tree len = NULL_TREE;
1409 if (!get_maxval_strlen (arg, &len, &visited, type))
1410 len = NULL_TREE;
1411 if (visited)
1412 BITMAP_FREE (visited);
1413
1414 return len;
1415}
1416
fef5a0d9
RB
1417
1418/* Fold function call to builtin strcpy with arguments DEST and SRC.
1419 If LEN is not NULL, it represents the length of the string to be
1420 copied. Return NULL_TREE if no simplification can be made. */
1421
1422static bool
1423gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1424 tree dest, tree src)
fef5a0d9 1425{
dcb7fae2 1426 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1427 tree fn;
1428
1429 /* If SRC and DEST are the same (and not volatile), return DEST. */
1430 if (operand_equal_p (src, dest, 0))
1431 {
1432 replace_call_with_value (gsi, dest);
1433 return true;
1434 }
1435
1436 if (optimize_function_for_size_p (cfun))
1437 return false;
1438
1439 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1440 if (!fn)
1441 return false;
1442
1579e1f8 1443 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1444 if (!len)
dcb7fae2 1445 return false;
fef5a0d9
RB
1446
1447 len = fold_convert_loc (loc, size_type_node, len);
1448 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1449 len = force_gimple_operand_gsi (gsi, len, true,
1450 NULL_TREE, true, GSI_SAME_STMT);
1451 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1452 replace_call_with_call_and_fold (gsi, repl);
1453 return true;
1454}
1455
1456/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1457 If SLEN is not NULL, it represents the length of the source string.
1458 Return NULL_TREE if no simplification can be made. */
1459
1460static bool
dcb7fae2
RB
1461gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1462 tree dest, tree src, tree len)
fef5a0d9 1463{
dcb7fae2 1464 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1465 tree fn;
1466
1467 /* If the LEN parameter is zero, return DEST. */
1468 if (integer_zerop (len))
1469 {
1470 replace_call_with_value (gsi, dest);
1471 return true;
1472 }
1473
1474 /* We can't compare slen with len as constants below if len is not a
1475 constant. */
dcb7fae2 1476 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1477 return false;
1478
fef5a0d9 1479 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1480 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1481 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1482 return false;
1483
1484 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1485
1486 /* We do not support simplification of this case, though we do
1487 support it when expanding trees into RTL. */
1488 /* FIXME: generate a call to __builtin_memset. */
1489 if (tree_int_cst_lt (slen, len))
1490 return false;
1491
1492 /* OK transform into builtin memcpy. */
1493 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1494 if (!fn)
1495 return false;
1496
1497 len = fold_convert_loc (loc, size_type_node, len);
1498 len = force_gimple_operand_gsi (gsi, len, true,
1499 NULL_TREE, true, GSI_SAME_STMT);
1500 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1501 replace_call_with_call_and_fold (gsi, repl);
1502 return true;
1503}
1504
1505/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1506 to the call.
1507
1508 Return NULL_TREE if no simplification was possible, otherwise return the
1509 simplified form of the call as a tree.
1510
1511 The simplified form may be a constant or other expression which
1512 computes the same value, but in a more efficient manner (including
1513 calls to other builtin functions).
1514
1515 The call may contain arguments which need to be evaluated, but
1516 which are not useful to determine the result of the call. In
1517 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1518 COMPOUND_EXPR will be an argument which must be evaluated.
1519 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1520 COMPOUND_EXPR in the chain will contain the tree for the simplified
1521 form of the builtin function call. */
1522
1523static bool
dcb7fae2 1524gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9
RB
1525{
1526 gimple stmt = gsi_stmt (*gsi);
dcb7fae2 1527 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1528
1529 const char *p = c_getstr (src);
1530
1531 /* If the string length is zero, return the dst parameter. */
1532 if (p && *p == '\0')
1533 {
1534 replace_call_with_value (gsi, dst);
1535 return true;
1536 }
1537
1538 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1539 return false;
1540
1541 /* See if we can store by pieces into (dst + strlen(dst)). */
1542 tree newdst;
1543 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1544 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1545
1546 if (!strlen_fn || !memcpy_fn)
1547 return false;
1548
1549 /* If the length of the source string isn't computable don't
1550 split strcat into strlen and memcpy. */
dcb7fae2 1551 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1552 if (! len)
fef5a0d9
RB
1553 return false;
1554
1555 /* Create strlen (dst). */
1556 gimple_seq stmts = NULL, stmts2;
1557 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1558 gimple_set_location (repl, loc);
1559 if (gimple_in_ssa_p (cfun))
1560 newdst = make_ssa_name (size_type_node, NULL);
1561 else
1562 newdst = create_tmp_reg (size_type_node, NULL);
1563 gimple_call_set_lhs (repl, newdst);
1564 gimple_seq_add_stmt_without_update (&stmts, repl);
1565
1566 /* Create (dst p+ strlen (dst)). */
1567 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1568 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1569 gimple_seq_add_seq_without_update (&stmts, stmts2);
1570
1571 len = fold_convert_loc (loc, size_type_node, len);
1572 len = size_binop_loc (loc, PLUS_EXPR, len,
1573 build_int_cst (size_type_node, 1));
1574 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1575 gimple_seq_add_seq_without_update (&stmts, stmts2);
1576
1577 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1579 if (gimple_call_lhs (stmt))
1580 {
1581 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1582 gimple_seq_add_stmt_without_update (&stmts, repl);
1583 gsi_replace_with_seq_vops (gsi, stmts);
1584 /* gsi now points at the assignment to the lhs, get a
1585 stmt iterator to the memcpy call.
1586 ??? We can't use gsi_for_stmt as that doesn't work when the
1587 CFG isn't built yet. */
1588 gimple_stmt_iterator gsi2 = *gsi;
1589 gsi_prev (&gsi2);
1590 fold_stmt (&gsi2);
1591 }
1592 else
1593 {
1594 gsi_replace_with_seq_vops (gsi, stmts);
1595 fold_stmt (gsi);
1596 }
1597 return true;
1598}
1599
07f1cf56
RB
1600/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1601 are the arguments to the call. */
1602
1603static bool
1604gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1605{
1606 gimple stmt = gsi_stmt (*gsi);
1607 tree dest = gimple_call_arg (stmt, 0);
1608 tree src = gimple_call_arg (stmt, 1);
1609 tree size = gimple_call_arg (stmt, 2);
1610 tree fn;
1611 const char *p;
1612
1613
1614 p = c_getstr (src);
1615 /* If the SRC parameter is "", return DEST. */
1616 if (p && *p == '\0')
1617 {
1618 replace_call_with_value (gsi, dest);
1619 return true;
1620 }
1621
1622 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1623 return false;
1624
1625 /* If __builtin_strcat_chk is used, assume strcat is available. */
1626 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1627 if (!fn)
1628 return false;
1629
1630 gimple repl = gimple_build_call (fn, 2, dest, src);
1631 replace_call_with_call_and_fold (gsi, repl);
1632 return true;
1633}
1634
fef5a0d9
RB
1635/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1636 to the call. IGNORE is true if the value returned
1637 by the builtin will be ignored. UNLOCKED is true is true if this
1638 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1639 the known length of the string. Return NULL_TREE if no simplification
1640 was possible. */
1641
1642static bool
1643gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1644 tree arg0, tree arg1,
dcb7fae2 1645 bool unlocked)
fef5a0d9 1646{
dcb7fae2
RB
1647 gimple stmt = gsi_stmt (*gsi);
1648
fef5a0d9
RB
1649 /* If we're using an unlocked function, assume the other unlocked
1650 functions exist explicitly. */
1651 tree const fn_fputc = (unlocked
1652 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1653 : builtin_decl_implicit (BUILT_IN_FPUTC));
1654 tree const fn_fwrite = (unlocked
1655 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1656 : builtin_decl_implicit (BUILT_IN_FWRITE));
1657
1658 /* If the return value is used, don't do the transformation. */
dcb7fae2 1659 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1660 return false;
1661
fef5a0d9
RB
1662 /* Get the length of the string passed to fputs. If the length
1663 can't be determined, punt. */
dcb7fae2 1664 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1665 if (!len
1666 || TREE_CODE (len) != INTEGER_CST)
1667 return false;
1668
1669 switch (compare_tree_int (len, 1))
1670 {
1671 case -1: /* length is 0, delete the call entirely . */
1672 replace_call_with_value (gsi, integer_zero_node);
1673 return true;
1674
1675 case 0: /* length is 1, call fputc. */
1676 {
1677 const char *p = c_getstr (arg0);
1678 if (p != NULL)
1679 {
1680 if (!fn_fputc)
1681 return false;
1682
1683 gimple repl = gimple_build_call (fn_fputc, 2,
1684 build_int_cst
1685 (integer_type_node, p[0]), arg1);
1686 replace_call_with_call_and_fold (gsi, repl);
1687 return true;
1688 }
1689 }
1690 /* FALLTHROUGH */
1691 case 1: /* length is greater than 1, call fwrite. */
1692 {
1693 /* If optimizing for size keep fputs. */
1694 if (optimize_function_for_size_p (cfun))
1695 return false;
1696 /* New argument list transforming fputs(string, stream) to
1697 fwrite(string, 1, len, stream). */
1698 if (!fn_fwrite)
1699 return false;
1700
1701 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1702 size_one_node, len, arg1);
1703 replace_call_with_call_and_fold (gsi, repl);
1704 return true;
1705 }
1706 default:
1707 gcc_unreachable ();
1708 }
1709 return false;
1710}
1711
1712/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1713 DEST, SRC, LEN, and SIZE are the arguments to the call.
1714 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1715 code of the builtin. If MAXLEN is not NULL, it is maximum length
1716 passed as third argument. */
1717
1718static bool
1719gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1720 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1721 enum built_in_function fcode)
1722{
dcb7fae2
RB
1723 gimple stmt = gsi_stmt (*gsi);
1724 location_t loc = gimple_location (stmt);
1725 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1726 tree fn;
1727
1728 /* If SRC and DEST are the same (and not volatile), return DEST
1729 (resp. DEST+LEN for __mempcpy_chk). */
1730 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1731 {
1732 if (fcode != BUILT_IN_MEMPCPY_CHK)
1733 {
1734 replace_call_with_value (gsi, dest);
1735 return true;
1736 }
1737 else
1738 {
1739 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1740 temp = force_gimple_operand_gsi (gsi, temp,
1741 false, NULL_TREE, true,
1742 GSI_SAME_STMT);
1743 replace_call_with_value (gsi, temp);
1744 return true;
1745 }
1746 }
1747
1748 if (! tree_fits_uhwi_p (size))
1749 return false;
1750
dcb7fae2 1751 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1752 if (! integer_all_onesp (size))
1753 {
1754 if (! tree_fits_uhwi_p (len))
1755 {
1756 /* If LEN is not constant, try MAXLEN too.
1757 For MAXLEN only allow optimizing into non-_ocs function
1758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1759 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1760 {
1761 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1762 {
1763 /* (void) __mempcpy_chk () can be optimized into
1764 (void) __memcpy_chk (). */
1765 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1766 if (!fn)
1767 return false;
1768
1769 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1770 replace_call_with_call_and_fold (gsi, repl);
1771 return true;
1772 }
1773 return false;
1774 }
1775 }
1776 else
1777 maxlen = len;
1778
1779 if (tree_int_cst_lt (size, maxlen))
1780 return false;
1781 }
1782
1783 fn = NULL_TREE;
1784 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1785 mem{cpy,pcpy,move,set} is available. */
1786 switch (fcode)
1787 {
1788 case BUILT_IN_MEMCPY_CHK:
1789 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1790 break;
1791 case BUILT_IN_MEMPCPY_CHK:
1792 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1793 break;
1794 case BUILT_IN_MEMMOVE_CHK:
1795 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1796 break;
1797 case BUILT_IN_MEMSET_CHK:
1798 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1799 break;
1800 default:
1801 break;
1802 }
1803
1804 if (!fn)
1805 return false;
1806
1807 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1808 replace_call_with_call_and_fold (gsi, repl);
1809 return true;
1810}
1811
1812/* Fold a call to the __st[rp]cpy_chk builtin.
1813 DEST, SRC, and SIZE are the arguments to the call.
1814 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1815 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1816 strings passed as second argument. */
1817
1818static bool
1819gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1820 tree dest,
fef5a0d9 1821 tree src, tree size,
fef5a0d9
RB
1822 enum built_in_function fcode)
1823{
dcb7fae2
RB
1824 gimple stmt = gsi_stmt (*gsi);
1825 location_t loc = gimple_location (stmt);
1826 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1827 tree len, fn;
1828
1829 /* If SRC and DEST are the same (and not volatile), return DEST. */
1830 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1831 {
1832 replace_call_with_value (gsi, dest);
1833 return true;
1834 }
1835
1836 if (! tree_fits_uhwi_p (size))
1837 return false;
1838
dcb7fae2 1839 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1840 if (! integer_all_onesp (size))
1841 {
1842 len = c_strlen (src, 1);
1843 if (! len || ! tree_fits_uhwi_p (len))
1844 {
1845 /* If LEN is not constant, try MAXLEN too.
1846 For MAXLEN only allow optimizing into non-_ocs function
1847 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1848 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1849 {
1850 if (fcode == BUILT_IN_STPCPY_CHK)
1851 {
1852 if (! ignore)
1853 return false;
1854
1855 /* If return value of __stpcpy_chk is ignored,
1856 optimize into __strcpy_chk. */
1857 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1858 if (!fn)
1859 return false;
1860
1861 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864 }
1865
1866 if (! len || TREE_SIDE_EFFECTS (len))
1867 return false;
1868
1869 /* If c_strlen returned something, but not a constant,
1870 transform __strcpy_chk into __memcpy_chk. */
1871 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1872 if (!fn)
1873 return false;
1874
1875 len = fold_convert_loc (loc, size_type_node, len);
1876 len = size_binop_loc (loc, PLUS_EXPR, len,
1877 build_int_cst (size_type_node, 1));
1878 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1879 true, GSI_SAME_STMT);
1880 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1881 replace_call_with_call_and_fold (gsi, repl);
1882 return true;
1883 }
e256dfce 1884 }
fef5a0d9
RB
1885 else
1886 maxlen = len;
1887
1888 if (! tree_int_cst_lt (maxlen, size))
1889 return false;
e256dfce
RG
1890 }
1891
fef5a0d9
RB
1892 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1893 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1894 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1895 if (!fn)
1896 return false;
1897
1898 gimple repl = gimple_build_call (fn, 2, dest, src);
1899 replace_call_with_call_and_fold (gsi, repl);
1900 return true;
1901}
1902
1903/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1904 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1905 length passed as third argument. IGNORE is true if return value can be
1906 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1907
1908static bool
1909gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1910 tree dest, tree src,
dcb7fae2 1911 tree len, tree size,
fef5a0d9
RB
1912 enum built_in_function fcode)
1913{
dcb7fae2
RB
1914 gimple stmt = gsi_stmt (*gsi);
1915 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1916 tree fn;
1917
1918 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1919 {
fef5a0d9
RB
1920 /* If return value of __stpncpy_chk is ignored,
1921 optimize into __strncpy_chk. */
1922 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1923 if (fn)
1924 {
1925 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1926 replace_call_with_call_and_fold (gsi, repl);
1927 return true;
1928 }
cbdd87d4
RG
1929 }
1930
fef5a0d9
RB
1931 if (! tree_fits_uhwi_p (size))
1932 return false;
1933
dcb7fae2 1934 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1935 if (! integer_all_onesp (size))
cbdd87d4 1936 {
fef5a0d9 1937 if (! tree_fits_uhwi_p (len))
fe2ef088 1938 {
fef5a0d9
RB
1939 /* If LEN is not constant, try MAXLEN too.
1940 For MAXLEN only allow optimizing into non-_ocs function
1941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1942 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1943 return false;
8a1561bc 1944 }
fef5a0d9
RB
1945 else
1946 maxlen = len;
1947
1948 if (tree_int_cst_lt (size, maxlen))
1949 return false;
cbdd87d4
RG
1950 }
1951
fef5a0d9
RB
1952 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1953 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1954 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1955 if (!fn)
1956 return false;
1957
1958 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1959 replace_call_with_call_and_fold (gsi, repl);
1960 return true;
cbdd87d4
RG
1961}
1962
fef5a0d9
RB
1963/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1964 NULL_TREE if a normal call should be emitted rather than expanding
1965 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1966 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1967 passed as second argument. */
cbdd87d4
RG
1968
1969static bool
fef5a0d9 1970gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1971 enum built_in_function fcode)
cbdd87d4 1972{
fef5a0d9
RB
1973 gimple stmt = gsi_stmt (*gsi);
1974 tree dest, size, len, fn, fmt, flag;
1975 const char *fmt_str;
cbdd87d4 1976
fef5a0d9
RB
1977 /* Verify the required arguments in the original call. */
1978 if (gimple_call_num_args (stmt) < 5)
1979 return false;
cbdd87d4 1980
fef5a0d9
RB
1981 dest = gimple_call_arg (stmt, 0);
1982 len = gimple_call_arg (stmt, 1);
1983 flag = gimple_call_arg (stmt, 2);
1984 size = gimple_call_arg (stmt, 3);
1985 fmt = gimple_call_arg (stmt, 4);
1986
1987 if (! tree_fits_uhwi_p (size))
1988 return false;
1989
1990 if (! integer_all_onesp (size))
1991 {
dcb7fae2 1992 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1993 if (! tree_fits_uhwi_p (len))
cbdd87d4 1994 {
fef5a0d9
RB
1995 /* If LEN is not constant, try MAXLEN too.
1996 For MAXLEN only allow optimizing into non-_ocs function
1997 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1998 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
1999 return false;
2000 }
2001 else
fef5a0d9 2002 maxlen = len;
cbdd87d4 2003
fef5a0d9
RB
2004 if (tree_int_cst_lt (size, maxlen))
2005 return false;
2006 }
cbdd87d4 2007
fef5a0d9
RB
2008 if (!init_target_chars ())
2009 return false;
cbdd87d4 2010
fef5a0d9
RB
2011 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2012 or if format doesn't contain % chars or is "%s". */
2013 if (! integer_zerop (flag))
2014 {
2015 fmt_str = c_getstr (fmt);
2016 if (fmt_str == NULL)
2017 return false;
2018 if (strchr (fmt_str, target_percent) != NULL
2019 && strcmp (fmt_str, target_percent_s))
2020 return false;
cbdd87d4
RG
2021 }
2022
fef5a0d9
RB
2023 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2024 available. */
2025 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2026 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2027 if (!fn)
491e0b9b
RG
2028 return false;
2029
fef5a0d9
RB
2030 /* Replace the called function and the first 5 argument by 3 retaining
2031 trailing varargs. */
2032 gimple_call_set_fndecl (stmt, fn);
2033 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2034 gimple_call_set_arg (stmt, 0, dest);
2035 gimple_call_set_arg (stmt, 1, len);
2036 gimple_call_set_arg (stmt, 2, fmt);
2037 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2038 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2039 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2040 fold_stmt (gsi);
2041 return true;
2042}
cbdd87d4 2043
fef5a0d9
RB
2044/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2045 Return NULL_TREE if a normal call should be emitted rather than
2046 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2047 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2048
fef5a0d9
RB
2049static bool
2050gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2051 enum built_in_function fcode)
2052{
2053 gimple stmt = gsi_stmt (*gsi);
2054 tree dest, size, len, fn, fmt, flag;
2055 const char *fmt_str;
2056 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2057
fef5a0d9
RB
2058 /* Verify the required arguments in the original call. */
2059 if (nargs < 4)
2060 return false;
2061 dest = gimple_call_arg (stmt, 0);
2062 flag = gimple_call_arg (stmt, 1);
2063 size = gimple_call_arg (stmt, 2);
2064 fmt = gimple_call_arg (stmt, 3);
2065
2066 if (! tree_fits_uhwi_p (size))
2067 return false;
2068
2069 len = NULL_TREE;
2070
2071 if (!init_target_chars ())
2072 return false;
2073
2074 /* Check whether the format is a literal string constant. */
2075 fmt_str = c_getstr (fmt);
2076 if (fmt_str != NULL)
2077 {
2078 /* If the format doesn't contain % args or %%, we know the size. */
2079 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2080 {
fef5a0d9
RB
2081 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2082 len = build_int_cstu (size_type_node, strlen (fmt_str));
2083 }
2084 /* If the format is "%s" and first ... argument is a string literal,
2085 we know the size too. */
2086 else if (fcode == BUILT_IN_SPRINTF_CHK
2087 && strcmp (fmt_str, target_percent_s) == 0)
2088 {
2089 tree arg;
cbdd87d4 2090
fef5a0d9
RB
2091 if (nargs == 5)
2092 {
2093 arg = gimple_call_arg (stmt, 4);
2094 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2095 {
2096 len = c_strlen (arg, 1);
2097 if (! len || ! tree_fits_uhwi_p (len))
2098 len = NULL_TREE;
2099 }
2100 }
2101 }
2102 }
cbdd87d4 2103
fef5a0d9
RB
2104 if (! integer_all_onesp (size))
2105 {
2106 if (! len || ! tree_int_cst_lt (len, size))
2107 return false;
2108 }
cbdd87d4 2109
fef5a0d9
RB
2110 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2111 or if format doesn't contain % chars or is "%s". */
2112 if (! integer_zerop (flag))
2113 {
2114 if (fmt_str == NULL)
2115 return false;
2116 if (strchr (fmt_str, target_percent) != NULL
2117 && strcmp (fmt_str, target_percent_s))
2118 return false;
2119 }
cbdd87d4 2120
fef5a0d9
RB
2121 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2122 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2123 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2124 if (!fn)
2125 return false;
2126
2127 /* Replace the called function and the first 4 argument by 2 retaining
2128 trailing varargs. */
2129 gimple_call_set_fndecl (stmt, fn);
2130 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2131 gimple_call_set_arg (stmt, 0, dest);
2132 gimple_call_set_arg (stmt, 1, fmt);
2133 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2134 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2135 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2136 fold_stmt (gsi);
2137 return true;
2138}
2139
35770bb2
RB
2140/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2141 ORIG may be null if this is a 2-argument call. We don't attempt to
2142 simplify calls with more than 3 arguments.
2143
2144 Return NULL_TREE if no simplification was possible, otherwise return the
2145 simplified form of the call as a tree. If IGNORED is true, it means that
2146 the caller does not use the returned value of the function. */
2147
2148static bool
dcb7fae2 2149gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2
RB
2150{
2151 gimple stmt = gsi_stmt (*gsi);
2152 tree dest = gimple_call_arg (stmt, 0);
2153 tree fmt = gimple_call_arg (stmt, 1);
2154 tree orig = NULL_TREE;
2155 const char *fmt_str = NULL;
2156
2157 /* Verify the required arguments in the original call. We deal with two
2158 types of sprintf() calls: 'sprintf (str, fmt)' and
2159 'sprintf (dest, "%s", orig)'. */
2160 if (gimple_call_num_args (stmt) > 3)
2161 return false;
2162
2163 if (gimple_call_num_args (stmt) == 3)
2164 orig = gimple_call_arg (stmt, 2);
2165
2166 /* Check whether the format is a literal string constant. */
2167 fmt_str = c_getstr (fmt);
2168 if (fmt_str == NULL)
2169 return false;
2170
2171 if (!init_target_chars ())
2172 return false;
2173
2174 /* If the format doesn't contain % args or %%, use strcpy. */
2175 if (strchr (fmt_str, target_percent) == NULL)
2176 {
2177 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2178
2179 if (!fn)
2180 return false;
2181
2182 /* Don't optimize sprintf (buf, "abc", ptr++). */
2183 if (orig)
2184 return false;
2185
2186 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2187 'format' is known to contain no % formats. */
2188 gimple_seq stmts = NULL;
2189 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2190 gimple_seq_add_stmt_without_update (&stmts, repl);
2191 if (gimple_call_lhs (stmt))
2192 {
2193 repl = gimple_build_assign (gimple_call_lhs (stmt),
2194 build_int_cst (integer_type_node,
2195 strlen (fmt_str)));
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 gsi_replace_with_seq_vops (gsi, stmts);
2198 /* gsi now points at the assignment to the lhs, get a
2199 stmt iterator to the memcpy call.
2200 ??? We can't use gsi_for_stmt as that doesn't work when the
2201 CFG isn't built yet. */
2202 gimple_stmt_iterator gsi2 = *gsi;
2203 gsi_prev (&gsi2);
2204 fold_stmt (&gsi2);
2205 }
2206 else
2207 {
2208 gsi_replace_with_seq_vops (gsi, stmts);
2209 fold_stmt (gsi);
2210 }
2211 return true;
2212 }
2213
2214 /* If the format is "%s", use strcpy if the result isn't used. */
2215 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2216 {
2217 tree fn;
2218 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2219
2220 if (!fn)
2221 return false;
2222
2223 /* Don't crash on sprintf (str1, "%s"). */
2224 if (!orig)
2225 return false;
2226
dcb7fae2
RB
2227 tree orig_len = NULL_TREE;
2228 if (gimple_call_lhs (stmt))
35770bb2 2229 {
dcb7fae2 2230 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2231 if (!orig_len)
35770bb2
RB
2232 return false;
2233 }
2234
2235 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2236 gimple_seq stmts = NULL;
2237 gimple repl = gimple_build_call (fn, 2, dest, orig);
2238 gimple_seq_add_stmt_without_update (&stmts, repl);
2239 if (gimple_call_lhs (stmt))
2240 {
d7e78447
RB
2241 if (!useless_type_conversion_p (integer_type_node,
2242 TREE_TYPE (orig_len)))
2243 orig_len = fold_convert (integer_type_node, orig_len);
2244 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2245 gimple_seq_add_stmt_without_update (&stmts, repl);
2246 gsi_replace_with_seq_vops (gsi, stmts);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the memcpy call.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2 = *gsi;
2252 gsi_prev (&gsi2);
2253 fold_stmt (&gsi2);
2254 }
2255 else
2256 {
2257 gsi_replace_with_seq_vops (gsi, stmts);
2258 fold_stmt (gsi);
2259 }
2260 return true;
2261 }
2262 return false;
2263}
2264
d7e78447
RB
2265/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2266 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2267 attempt to simplify calls with more than 4 arguments.
35770bb2 2268
d7e78447
RB
2269 Return NULL_TREE if no simplification was possible, otherwise return the
2270 simplified form of the call as a tree. If IGNORED is true, it means that
2271 the caller does not use the returned value of the function. */
2272
2273static bool
dcb7fae2 2274gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447
RB
2275{
2276 gimple stmt = gsi_stmt (*gsi);
2277 tree dest = gimple_call_arg (stmt, 0);
2278 tree destsize = gimple_call_arg (stmt, 1);
2279 tree fmt = gimple_call_arg (stmt, 2);
2280 tree orig = NULL_TREE;
2281 const char *fmt_str = NULL;
2282
2283 if (gimple_call_num_args (stmt) > 4)
2284 return false;
2285
2286 if (gimple_call_num_args (stmt) == 4)
2287 orig = gimple_call_arg (stmt, 3);
2288
2289 if (!tree_fits_uhwi_p (destsize))
2290 return false;
2291 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2292
2293 /* Check whether the format is a literal string constant. */
2294 fmt_str = c_getstr (fmt);
2295 if (fmt_str == NULL)
2296 return false;
2297
2298 if (!init_target_chars ())
2299 return false;
2300
2301 /* If the format doesn't contain % args or %%, use strcpy. */
2302 if (strchr (fmt_str, target_percent) == NULL)
2303 {
2304 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2305 if (!fn)
2306 return false;
2307
2308 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2309 if (orig)
2310 return false;
2311
2312 /* We could expand this as
2313 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2314 or to
2315 memcpy (str, fmt_with_nul_at_cstm1, cst);
2316 but in the former case that might increase code size
2317 and in the latter case grow .rodata section too much.
2318 So punt for now. */
2319 size_t len = strlen (fmt_str);
2320 if (len >= destlen)
2321 return false;
2322
2323 gimple_seq stmts = NULL;
2324 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2325 gimple_seq_add_stmt_without_update (&stmts, repl);
2326 if (gimple_call_lhs (stmt))
2327 {
2328 repl = gimple_build_assign (gimple_call_lhs (stmt),
2329 build_int_cst (integer_type_node, len));
2330 gimple_seq_add_stmt_without_update (&stmts, repl);
2331 gsi_replace_with_seq_vops (gsi, stmts);
2332 /* gsi now points at the assignment to the lhs, get a
2333 stmt iterator to the memcpy call.
2334 ??? We can't use gsi_for_stmt as that doesn't work when the
2335 CFG isn't built yet. */
2336 gimple_stmt_iterator gsi2 = *gsi;
2337 gsi_prev (&gsi2);
2338 fold_stmt (&gsi2);
2339 }
2340 else
2341 {
2342 gsi_replace_with_seq_vops (gsi, stmts);
2343 fold_stmt (gsi);
2344 }
2345 return true;
2346 }
2347
2348 /* If the format is "%s", use strcpy if the result isn't used. */
2349 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2350 {
2351 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2352 if (!fn)
2353 return false;
2354
2355 /* Don't crash on snprintf (str1, cst, "%s"). */
2356 if (!orig)
2357 return false;
2358
dcb7fae2 2359 tree orig_len = get_maxval_strlen (orig, 0);
d7e78447 2360 if (!orig_len)
dcb7fae2 2361 return false;
d7e78447
RB
2362
2363 /* We could expand this as
2364 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2365 or to
2366 memcpy (str1, str2_with_nul_at_cstm1, cst);
2367 but in the former case that might increase code size
2368 and in the latter case grow .rodata section too much.
2369 So punt for now. */
2370 if (compare_tree_int (orig_len, destlen) >= 0)
2371 return false;
2372
2373 /* Convert snprintf (str1, cst, "%s", str2) into
2374 strcpy (str1, str2) if strlen (str2) < cst. */
2375 gimple_seq stmts = NULL;
2376 gimple repl = gimple_build_call (fn, 2, dest, orig);
2377 gimple_seq_add_stmt_without_update (&stmts, repl);
2378 if (gimple_call_lhs (stmt))
2379 {
2380 if (!useless_type_conversion_p (integer_type_node,
2381 TREE_TYPE (orig_len)))
2382 orig_len = fold_convert (integer_type_node, orig_len);
2383 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2384 gimple_seq_add_stmt_without_update (&stmts, repl);
2385 gsi_replace_with_seq_vops (gsi, stmts);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2 = *gsi;
2391 gsi_prev (&gsi2);
2392 fold_stmt (&gsi2);
2393 }
2394 else
2395 {
2396 gsi_replace_with_seq_vops (gsi, stmts);
2397 fold_stmt (gsi);
2398 }
2399 return true;
2400 }
2401 return false;
2402}
35770bb2 2403
fef5a0d9
RB
2404
2405/* Fold a call to __builtin_strlen with known length LEN. */
2406
2407static bool
dcb7fae2 2408gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2409{
dcb7fae2
RB
2410 gimple stmt = gsi_stmt (*gsi);
2411 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2412 if (!len)
2413 return false;
2813904b 2414 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2415 replace_call_with_value (gsi, len);
2416 return true;
cbdd87d4
RG
2417}
2418
2419
dcb7fae2
RB
2420/* Fold the non-target builtin at *GSI and return whether any simplification
2421 was made. */
cbdd87d4 2422
fef5a0d9 2423static bool
dcb7fae2 2424gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2425{
fef5a0d9 2426 gimple stmt = gsi_stmt (*gsi);
fef5a0d9 2427 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2428
dcb7fae2
RB
2429 /* Give up for always_inline inline builtins until they are
2430 inlined. */
2431 if (avoid_folding_inline_builtin (callee))
2432 return false;
cbdd87d4 2433
cbdd87d4
RG
2434 switch (DECL_FUNCTION_CODE (callee))
2435 {
dcb7fae2
RB
2436 case BUILT_IN_BZERO:
2437 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2438 gimple_call_arg (stmt, 1));
2439 case BUILT_IN_MEMSET:
2440 return gimple_fold_builtin_memset (gsi,
2441 gimple_call_arg (stmt, 1),
2442 gimple_call_arg (stmt, 2));
2443 case BUILT_IN_BCOPY:
2444 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2445 gimple_call_arg (stmt, 0), 3);
2446 case BUILT_IN_MEMCPY:
2447 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2448 gimple_call_arg (stmt, 1), 0);
2449 case BUILT_IN_MEMPCPY:
2450 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2451 gimple_call_arg (stmt, 1), 1);
2452 case BUILT_IN_MEMMOVE:
2453 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2454 gimple_call_arg (stmt, 1), 3);
2455 case BUILT_IN_SPRINTF_CHK:
2456 case BUILT_IN_VSPRINTF_CHK:
2457 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2458 case BUILT_IN_STRCAT_CHK:
2459 return gimple_fold_builtin_strcat_chk (gsi);
cbdd87d4 2460 case BUILT_IN_STRLEN:
dcb7fae2 2461 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2462 case BUILT_IN_STRCPY:
dcb7fae2 2463 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2464 gimple_call_arg (stmt, 0),
dcb7fae2 2465 gimple_call_arg (stmt, 1));
cbdd87d4 2466 case BUILT_IN_STRNCPY:
dcb7fae2 2467 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2468 gimple_call_arg (stmt, 0),
2469 gimple_call_arg (stmt, 1),
dcb7fae2 2470 gimple_call_arg (stmt, 2));
9a7eefec 2471 case BUILT_IN_STRCAT:
dcb7fae2
RB
2472 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2473 gimple_call_arg (stmt, 1));
cbdd87d4 2474 case BUILT_IN_FPUTS:
dcb7fae2
RB
2475 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2476 gimple_call_arg (stmt, 1), false);
cbdd87d4 2477 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2478 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2479 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2480 case BUILT_IN_MEMCPY_CHK:
2481 case BUILT_IN_MEMPCPY_CHK:
2482 case BUILT_IN_MEMMOVE_CHK:
2483 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2484 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2485 gimple_call_arg (stmt, 0),
2486 gimple_call_arg (stmt, 1),
2487 gimple_call_arg (stmt, 2),
2488 gimple_call_arg (stmt, 3),
fef5a0d9 2489 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2490 case BUILT_IN_STRCPY_CHK:
2491 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2492 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2493 gimple_call_arg (stmt, 0),
2494 gimple_call_arg (stmt, 1),
2495 gimple_call_arg (stmt, 2),
fef5a0d9 2496 DECL_FUNCTION_CODE (callee));
cbdd87d4 2497 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2498 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2499 return gimple_fold_builtin_stxncpy_chk (gsi,
2500 gimple_call_arg (stmt, 0),
2501 gimple_call_arg (stmt, 1),
2502 gimple_call_arg (stmt, 2),
2503 gimple_call_arg (stmt, 3),
fef5a0d9 2504 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2505 case BUILT_IN_SNPRINTF_CHK:
2506 case BUILT_IN_VSNPRINTF_CHK:
dcb7fae2 2507 return gimple_fold_builtin_snprintf_chk (gsi,
fef5a0d9 2508 DECL_FUNCTION_CODE (callee));
d7e78447 2509 case BUILT_IN_SNPRINTF:
dcb7fae2 2510 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2511 case BUILT_IN_SPRINTF:
dcb7fae2 2512 return gimple_fold_builtin_sprintf (gsi);
fef5a0d9
RB
2513 default:;
2514 }
2515
2516 /* Try the generic builtin folder. */
2517 bool ignore = (gimple_call_lhs (stmt) == NULL);
2518 tree result = fold_call_stmt (stmt, ignore);
2519 if (result)
2520 {
2521 if (ignore)
2522 STRIP_NOPS (result);
2523 else
2524 result = fold_convert (gimple_call_return_type (stmt), result);
2525 if (!update_call_from_tree (gsi, result))
2526 gimplify_and_update_call_from_tree (gsi, result);
2527 return true;
2528 }
2529
2530 return false;
2531}
2532
cbdd87d4
RG
2533/* Attempt to fold a call statement referenced by the statement iterator GSI.
2534 The statement may be replaced by another statement, e.g., if the call
2535 simplifies to a constant value. Return true if any changes were made.
2536 It is assumed that the operands have been previously folded. */
2537
e021c122 2538static bool
ceeffab0 2539gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
2540{
2541 gimple stmt = gsi_stmt (*gsi);
3b45a007 2542 tree callee;
e021c122
RG
2543 bool changed = false;
2544 unsigned i;
cbdd87d4 2545
e021c122
RG
2546 /* Fold *& in call arguments. */
2547 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2548 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2549 {
2550 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2551 if (tmp)
2552 {
2553 gimple_call_set_arg (stmt, i, tmp);
2554 changed = true;
2555 }
2556 }
3b45a007
RG
2557
2558 /* Check for virtual calls that became direct calls. */
2559 callee = gimple_call_fn (stmt);
25583c4f 2560 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 2561 {
49c471e3
MJ
2562 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2563 {
450ad0cd
JH
2564 if (dump_file && virtual_method_call_p (callee)
2565 && !possible_polymorphic_call_target_p
6f8091fc
JH
2566 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2567 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
2568 {
2569 fprintf (dump_file,
a70e9985 2570 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
2571 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2572 fprintf (dump_file, " to ");
2573 print_generic_expr (dump_file, callee, TDF_SLIM);
2574 fprintf (dump_file, "\n");
2575 }
2576
49c471e3 2577 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
2578 changed = true;
2579 }
a70e9985 2580 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 2581 {
61dd6a2e
JH
2582 bool final;
2583 vec <cgraph_node *>targets
058d0a90 2584 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 2585 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 2586 {
a70e9985 2587 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
2588 if (dump_enabled_p ())
2589 {
807b7d62 2590 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
2591 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2592 "folding virtual function call to %s\n",
2593 targets.length () == 1
2594 ? targets[0]->name ()
2595 : "__builtin_unreachable");
2596 }
61dd6a2e 2597 if (targets.length () == 1)
cf3e5a89
JJ
2598 {
2599 gimple_call_set_fndecl (stmt, targets[0]->decl);
2600 changed = true;
a70e9985
JJ
2601 /* If the call becomes noreturn, remove the lhs. */
2602 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2603 {
2604 if (TREE_CODE (lhs) == SSA_NAME)
2605 {
2606 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2607 tree def = get_or_create_ssa_default_def (cfun, var);
2608 gimple new_stmt = gimple_build_assign (lhs, def);
2609 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2610 }
2611 gimple_call_set_lhs (stmt, NULL_TREE);
2612 }
cf3e5a89 2613 }
a70e9985 2614 else
cf3e5a89
JJ
2615 {
2616 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2617 gimple new_stmt = gimple_build_call (fndecl, 0);
2618 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
2619 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2620 {
2621 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2622 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
2623
2624 /* To satisfy condition for
2625 cgraph_update_edges_for_call_stmt_node,
2626 we need to preserve GIMPLE_CALL statement
2627 at position of GSI iterator. */
a70e9985 2628 update_call_from_tree (gsi, def);
eb14a79f 2629 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
2630 }
2631 else
2632 gsi_replace (gsi, new_stmt, true);
cf3e5a89
JJ
2633 return true;
2634 }
e021c122 2635 }
49c471e3 2636 }
e021c122 2637 }
49c471e3 2638
e021c122
RG
2639 if (inplace)
2640 return changed;
2641
2642 /* Check for builtins that CCP can handle using information not
2643 available in the generic fold routines. */
fef5a0d9
RB
2644 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2645 {
2646 if (gimple_fold_builtin (gsi))
2647 changed = true;
2648 }
2649 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 2650 {
ea679d55 2651 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 2652 }
368b454d 2653 else if (gimple_call_internal_p (stmt))
ed9c79e1 2654 {
368b454d
JJ
2655 enum tree_code subcode = ERROR_MARK;
2656 tree result = NULL_TREE;
2657 switch (gimple_call_internal_fn (stmt))
2658 {
2659 case IFN_BUILTIN_EXPECT:
2660 result = fold_builtin_expect (gimple_location (stmt),
2661 gimple_call_arg (stmt, 0),
2662 gimple_call_arg (stmt, 1),
2663 gimple_call_arg (stmt, 2));
2664 break;
0e82f089
MP
2665 case IFN_UBSAN_OBJECT_SIZE:
2666 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2667 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2668 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2669 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2670 gimple_call_arg (stmt, 2))))
2671 {
2672 gsi_replace (gsi, gimple_build_nop (), true);
2673 unlink_stmt_vdef (stmt);
2674 release_defs (stmt);
2675 return true;
2676 }
2677 break;
368b454d
JJ
2678 case IFN_UBSAN_CHECK_ADD:
2679 subcode = PLUS_EXPR;
2680 break;
2681 case IFN_UBSAN_CHECK_SUB:
2682 subcode = MINUS_EXPR;
2683 break;
2684 case IFN_UBSAN_CHECK_MUL:
2685 subcode = MULT_EXPR;
2686 break;
2687 default:
2688 break;
2689 }
2690 if (subcode != ERROR_MARK)
2691 {
2692 tree arg0 = gimple_call_arg (stmt, 0);
2693 tree arg1 = gimple_call_arg (stmt, 1);
2694 /* x = y + 0; x = y - 0; x = y * 0; */
2695 if (integer_zerop (arg1))
2696 result = subcode == MULT_EXPR
2697 ? build_zero_cst (TREE_TYPE (arg0))
2698 : arg0;
2699 /* x = 0 + y; x = 0 * y; */
2700 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2701 result = subcode == MULT_EXPR
2702 ? build_zero_cst (TREE_TYPE (arg0))
2703 : arg1;
2704 /* x = y - y; */
2705 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2706 result = build_zero_cst (TREE_TYPE (arg0));
2707 /* x = y * 1; x = 1 * y; */
2708 else if (subcode == MULT_EXPR)
2709 {
2710 if (integer_onep (arg1))
2711 result = arg0;
2712 else if (integer_onep (arg0))
2713 result = arg1;
2714 }
2715 }
ed9c79e1
JJ
2716 if (result)
2717 {
2718 if (!update_call_from_tree (gsi, result))
2719 gimplify_and_update_call_from_tree (gsi, result);
2720 changed = true;
2721 }
2722 }
3b45a007 2723
e021c122 2724 return changed;
cbdd87d4
RG
2725}
2726
040292e7
RB
2727/* Canonicalize MEM_REFs invariant address operand after propagation. */
2728
2729static bool
2730maybe_canonicalize_mem_ref_addr (tree *t)
2731{
2732 bool res = false;
2733
2734 if (TREE_CODE (*t) == ADDR_EXPR)
2735 t = &TREE_OPERAND (*t, 0);
2736
2737 while (handled_component_p (*t))
2738 t = &TREE_OPERAND (*t, 0);
2739
2740 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2741 of invariant addresses into a SSA name MEM_REF address. */
2742 if (TREE_CODE (*t) == MEM_REF
2743 || TREE_CODE (*t) == TARGET_MEM_REF)
2744 {
2745 tree addr = TREE_OPERAND (*t, 0);
2746 if (TREE_CODE (addr) == ADDR_EXPR
2747 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
2748 || handled_component_p (TREE_OPERAND (addr, 0))))
2749 {
2750 tree base;
2751 HOST_WIDE_INT coffset;
2752 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
2753 &coffset);
2754 if (!base)
2755 gcc_unreachable ();
2756
2757 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
2758 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
2759 TREE_OPERAND (*t, 1),
2760 size_int (coffset));
2761 res = true;
2762 }
2763 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
2764 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
2765 }
2766
2767 /* Canonicalize back MEM_REFs to plain reference trees if the object
2768 accessed is a decl that has the same access semantics as the MEM_REF. */
2769 if (TREE_CODE (*t) == MEM_REF
2770 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
2771 && integer_zerop (TREE_OPERAND (*t, 1)))
2772 {
2773 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2774 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
2775 if (/* Same volatile qualification. */
2776 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
2777 /* Same TBAA behavior with -fstrict-aliasing. */
2778 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
2779 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
2780 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
2781 /* Same alignment. */
2782 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
2783 /* We have to look out here to not drop a required conversion
2784 from the rhs to the lhs if *t appears on the lhs or vice-versa
2785 if it appears on the rhs. Thus require strict type
2786 compatibility. */
2787 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
2788 {
2789 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2790 res = true;
2791 }
2792 }
2793
2794 /* Canonicalize TARGET_MEM_REF in particular with respect to
2795 the indexes becoming constant. */
2796 else if (TREE_CODE (*t) == TARGET_MEM_REF)
2797 {
2798 tree tem = maybe_fold_tmr (*t);
2799 if (tem)
2800 {
2801 *t = tem;
2802 res = true;
2803 }
2804 }
2805
2806 return res;
2807}
2808
cbdd87d4
RG
2809/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2810 distinguishes both cases. */
2811
2812static bool
2813fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2814{
2815 bool changed = false;
2816 gimple stmt = gsi_stmt (*gsi);
2817 unsigned i;
2818
040292e7
RB
2819 /* First do required canonicalization of [TARGET_]MEM_REF addresses
2820 after propagation.
2821 ??? This shouldn't be done in generic folding but in the
2822 propagation helpers which also know whether an address was
2823 propagated. */
2824 switch (gimple_code (stmt))
2825 {
2826 case GIMPLE_ASSIGN:
2827 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
2828 {
2829 tree *rhs = gimple_assign_rhs1_ptr (stmt);
2830 if ((REFERENCE_CLASS_P (*rhs)
2831 || TREE_CODE (*rhs) == ADDR_EXPR)
2832 && maybe_canonicalize_mem_ref_addr (rhs))
2833 changed = true;
2834 tree *lhs = gimple_assign_lhs_ptr (stmt);
2835 if (REFERENCE_CLASS_P (*lhs)
2836 && maybe_canonicalize_mem_ref_addr (lhs))
2837 changed = true;
2838 }
2839 break;
2840 case GIMPLE_CALL:
2841 {
2842 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2843 {
2844 tree *arg = gimple_call_arg_ptr (stmt, i);
2845 if (REFERENCE_CLASS_P (*arg)
2846 && maybe_canonicalize_mem_ref_addr (arg))
2847 changed = true;
2848 }
2849 tree *lhs = gimple_call_lhs_ptr (stmt);
2850 if (*lhs
2851 && REFERENCE_CLASS_P (*lhs)
2852 && maybe_canonicalize_mem_ref_addr (lhs))
2853 changed = true;
2854 break;
2855 }
2856 case GIMPLE_ASM:
2857 {
2858 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2859 {
2860 tree link = gimple_asm_output_op (stmt, i);
2861 tree op = TREE_VALUE (link);
2862 if (REFERENCE_CLASS_P (op)
2863 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
2864 changed = true;
2865 }
2866 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2867 {
2868 tree link = gimple_asm_input_op (stmt, i);
2869 tree op = TREE_VALUE (link);
2870 if ((REFERENCE_CLASS_P (op)
2871 || TREE_CODE (op) == ADDR_EXPR)
2872 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
2873 changed = true;
2874 }
2875 }
2876 break;
2877 case GIMPLE_DEBUG:
2878 if (gimple_debug_bind_p (stmt))
2879 {
2880 tree *val = gimple_debug_bind_get_value_ptr (stmt);
2881 if (*val
2882 && (REFERENCE_CLASS_P (*val)
2883 || TREE_CODE (*val) == ADDR_EXPR)
2884 && maybe_canonicalize_mem_ref_addr (val))
2885 changed = true;
2886 }
2887 break;
2888 default:;
2889 }
2890
cbdd87d4
RG
2891 /* Fold the main computation performed by the statement. */
2892 switch (gimple_code (stmt))
2893 {
2894 case GIMPLE_ASSIGN:
2895 {
2896 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 2897 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 2898 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
2899 tree new_rhs;
2900 /* First canonicalize operand order. This avoids building new
2901 trees if this is the only thing fold would later do. */
2902 if ((commutative_tree_code (subcode)
2903 || commutative_ternary_tree_code (subcode))
2904 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2905 gimple_assign_rhs2 (stmt), false))
2906 {
2907 tree tem = gimple_assign_rhs1 (stmt);
2908 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
2909 gimple_assign_set_rhs2 (stmt, tem);
2910 changed = true;
2911 }
2912 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
2913 if (new_rhs
2914 && !useless_type_conversion_p (TREE_TYPE (lhs),
2915 TREE_TYPE (new_rhs)))
2916 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2917 if (new_rhs
2918 && (!inplace
2919 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2920 {
2921 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2922 changed = true;
2923 }
2924 break;
2925 }
2926
2927 case GIMPLE_COND:
2928 changed |= fold_gimple_cond (stmt);
2929 break;
2930
2931 case GIMPLE_CALL:
ceeffab0 2932 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
2933 break;
2934
2935 case GIMPLE_ASM:
2936 /* Fold *& in asm operands. */
38384150
JJ
2937 {
2938 size_t noutputs;
2939 const char **oconstraints;
2940 const char *constraint;
2941 bool allows_mem, allows_reg;
2942
2943 noutputs = gimple_asm_noutputs (stmt);
2944 oconstraints = XALLOCAVEC (const char *, noutputs);
2945
2946 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2947 {
2948 tree link = gimple_asm_output_op (stmt, i);
2949 tree op = TREE_VALUE (link);
2950 oconstraints[i]
2951 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2952 if (REFERENCE_CLASS_P (op)
2953 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2954 {
2955 TREE_VALUE (link) = op;
2956 changed = true;
2957 }
2958 }
2959 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2960 {
2961 tree link = gimple_asm_input_op (stmt, i);
2962 tree op = TREE_VALUE (link);
2963 constraint
2964 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2965 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
2966 oconstraints, &allows_mem, &allows_reg);
2967 if (REFERENCE_CLASS_P (op)
2968 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
2969 != NULL_TREE)
2970 {
2971 TREE_VALUE (link) = op;
2972 changed = true;
2973 }
2974 }
2975 }
cbdd87d4
RG
2976 break;
2977
bd422c4a
RG
2978 case GIMPLE_DEBUG:
2979 if (gimple_debug_bind_p (stmt))
2980 {
2981 tree val = gimple_debug_bind_get_value (stmt);
2982 if (val
2983 && REFERENCE_CLASS_P (val))
2984 {
2985 tree tem = maybe_fold_reference (val, false);
2986 if (tem)
2987 {
2988 gimple_debug_bind_set_value (stmt, tem);
2989 changed = true;
2990 }
2991 }
3e888a5e
RG
2992 else if (val
2993 && TREE_CODE (val) == ADDR_EXPR)
2994 {
2995 tree ref = TREE_OPERAND (val, 0);
2996 tree tem = maybe_fold_reference (ref, false);
2997 if (tem)
2998 {
2999 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3000 gimple_debug_bind_set_value (stmt, tem);
3001 changed = true;
3002 }
3003 }
bd422c4a
RG
3004 }
3005 break;
3006
cbdd87d4
RG
3007 default:;
3008 }
3009
3010 stmt = gsi_stmt (*gsi);
3011
37376165
RB
3012 /* Fold *& on the lhs. */
3013 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3014 {
3015 tree lhs = gimple_get_lhs (stmt);
3016 if (lhs && REFERENCE_CLASS_P (lhs))
3017 {
3018 tree new_lhs = maybe_fold_reference (lhs, true);
3019 if (new_lhs)
3020 {
3021 gimple_set_lhs (stmt, new_lhs);
3022 changed = true;
3023 }
3024 }
3025 }
3026
3027 return changed;
3028}
3029
3030/* Fold the statement pointed to by GSI. In some cases, this function may
3031 replace the whole statement with a new one. Returns true iff folding
3032 makes any changes.
3033 The statement pointed to by GSI should be in valid gimple form but may
3034 be in unfolded state as resulting from for example constant propagation
3035 which can produce *&x = 0. */
3036
3037bool
3038fold_stmt (gimple_stmt_iterator *gsi)
3039{
3040 return fold_stmt_1 (gsi, false);
3041}
3042
59401b92 3043/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3044 *&x created by constant propagation are handled. The statement cannot
3045 be replaced with a new one. Return true if the statement was
3046 changed, false otherwise.
59401b92 3047 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3048 be in unfolded state as resulting from for example constant propagation
3049 which can produce *&x = 0. */
3050
3051bool
59401b92 3052fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3053{
59401b92
RG
3054 gimple stmt = gsi_stmt (*gsi);
3055 bool changed = fold_stmt_1 (gsi, true);
3056 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3057 return changed;
3058}
3059
e89065a1
SL
3060/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3061 if EXPR is null or we don't know how.
3062 If non-null, the result always has boolean type. */
3063
3064static tree
3065canonicalize_bool (tree expr, bool invert)
3066{
3067 if (!expr)
3068 return NULL_TREE;
3069 else if (invert)
3070 {
3071 if (integer_nonzerop (expr))
3072 return boolean_false_node;
3073 else if (integer_zerop (expr))
3074 return boolean_true_node;
3075 else if (TREE_CODE (expr) == SSA_NAME)
3076 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3077 build_int_cst (TREE_TYPE (expr), 0));
3078 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3079 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3080 boolean_type_node,
3081 TREE_OPERAND (expr, 0),
3082 TREE_OPERAND (expr, 1));
3083 else
3084 return NULL_TREE;
3085 }
3086 else
3087 {
3088 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3089 return expr;
3090 if (integer_nonzerop (expr))
3091 return boolean_true_node;
3092 else if (integer_zerop (expr))
3093 return boolean_false_node;
3094 else if (TREE_CODE (expr) == SSA_NAME)
3095 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3096 build_int_cst (TREE_TYPE (expr), 0));
3097 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3098 return fold_build2 (TREE_CODE (expr),
3099 boolean_type_node,
3100 TREE_OPERAND (expr, 0),
3101 TREE_OPERAND (expr, 1));
3102 else
3103 return NULL_TREE;
3104 }
3105}
3106
3107/* Check to see if a boolean expression EXPR is logically equivalent to the
3108 comparison (OP1 CODE OP2). Check for various identities involving
3109 SSA_NAMEs. */
3110
3111static bool
3112same_bool_comparison_p (const_tree expr, enum tree_code code,
3113 const_tree op1, const_tree op2)
3114{
3115 gimple s;
3116
3117 /* The obvious case. */
3118 if (TREE_CODE (expr) == code
3119 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3120 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3121 return true;
3122
3123 /* Check for comparing (name, name != 0) and the case where expr
3124 is an SSA_NAME with a definition matching the comparison. */
3125 if (TREE_CODE (expr) == SSA_NAME
3126 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3127 {
3128 if (operand_equal_p (expr, op1, 0))
3129 return ((code == NE_EXPR && integer_zerop (op2))
3130 || (code == EQ_EXPR && integer_nonzerop (op2)));
3131 s = SSA_NAME_DEF_STMT (expr);
3132 if (is_gimple_assign (s)
3133 && gimple_assign_rhs_code (s) == code
3134 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3135 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3136 return true;
3137 }
3138
3139 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3140 of name is a comparison, recurse. */
3141 if (TREE_CODE (op1) == SSA_NAME
3142 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3143 {
3144 s = SSA_NAME_DEF_STMT (op1);
3145 if (is_gimple_assign (s)
3146 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3147 {
3148 enum tree_code c = gimple_assign_rhs_code (s);
3149 if ((c == NE_EXPR && integer_zerop (op2))
3150 || (c == EQ_EXPR && integer_nonzerop (op2)))
3151 return same_bool_comparison_p (expr, c,
3152 gimple_assign_rhs1 (s),
3153 gimple_assign_rhs2 (s));
3154 if ((c == EQ_EXPR && integer_zerop (op2))
3155 || (c == NE_EXPR && integer_nonzerop (op2)))
3156 return same_bool_comparison_p (expr,
3157 invert_tree_comparison (c, false),
3158 gimple_assign_rhs1 (s),
3159 gimple_assign_rhs2 (s));
3160 }
3161 }
3162 return false;
3163}
3164
3165/* Check to see if two boolean expressions OP1 and OP2 are logically
3166 equivalent. */
3167
3168static bool
3169same_bool_result_p (const_tree op1, const_tree op2)
3170{
3171 /* Simple cases first. */
3172 if (operand_equal_p (op1, op2, 0))
3173 return true;
3174
3175 /* Check the cases where at least one of the operands is a comparison.
3176 These are a bit smarter than operand_equal_p in that they apply some
3177 identifies on SSA_NAMEs. */
3178 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3179 && same_bool_comparison_p (op1, TREE_CODE (op2),
3180 TREE_OPERAND (op2, 0),
3181 TREE_OPERAND (op2, 1)))
3182 return true;
3183 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3184 && same_bool_comparison_p (op2, TREE_CODE (op1),
3185 TREE_OPERAND (op1, 0),
3186 TREE_OPERAND (op1, 1)))
3187 return true;
3188
3189 /* Default case. */
3190 return false;
3191}
3192
3193/* Forward declarations for some mutually recursive functions. */
3194
3195static tree
3196and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3197 enum tree_code code2, tree op2a, tree op2b);
3198static tree
3199and_var_with_comparison (tree var, bool invert,
3200 enum tree_code code2, tree op2a, tree op2b);
3201static tree
3202and_var_with_comparison_1 (gimple stmt,
3203 enum tree_code code2, tree op2a, tree op2b);
3204static tree
3205or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3206 enum tree_code code2, tree op2a, tree op2b);
3207static tree
3208or_var_with_comparison (tree var, bool invert,
3209 enum tree_code code2, tree op2a, tree op2b);
3210static tree
3211or_var_with_comparison_1 (gimple stmt,
3212 enum tree_code code2, tree op2a, tree op2b);
3213
3214/* Helper function for and_comparisons_1: try to simplify the AND of the
3215 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3216 If INVERT is true, invert the value of the VAR before doing the AND.
3217 Return NULL_EXPR if we can't simplify this to a single expression. */
3218
3219static tree
3220and_var_with_comparison (tree var, bool invert,
3221 enum tree_code code2, tree op2a, tree op2b)
3222{
3223 tree t;
3224 gimple stmt = SSA_NAME_DEF_STMT (var);
3225
3226 /* We can only deal with variables whose definitions are assignments. */
3227 if (!is_gimple_assign (stmt))
3228 return NULL_TREE;
3229
3230 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3231 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3232 Then we only have to consider the simpler non-inverted cases. */
3233 if (invert)
3234 t = or_var_with_comparison_1 (stmt,
3235 invert_tree_comparison (code2, false),
3236 op2a, op2b);
3237 else
3238 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3239 return canonicalize_bool (t, invert);
3240}
3241
3242/* Try to simplify the AND of the ssa variable defined by the assignment
3243 STMT with the comparison specified by (OP2A CODE2 OP2B).
3244 Return NULL_EXPR if we can't simplify this to a single expression. */
3245
3246static tree
3247and_var_with_comparison_1 (gimple stmt,
3248 enum tree_code code2, tree op2a, tree op2b)
3249{
3250 tree var = gimple_assign_lhs (stmt);
3251 tree true_test_var = NULL_TREE;
3252 tree false_test_var = NULL_TREE;
3253 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3254
3255 /* Check for identities like (var AND (var == 0)) => false. */
3256 if (TREE_CODE (op2a) == SSA_NAME
3257 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3258 {
3259 if ((code2 == NE_EXPR && integer_zerop (op2b))
3260 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3261 {
3262 true_test_var = op2a;
3263 if (var == true_test_var)
3264 return var;
3265 }
3266 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3267 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3268 {
3269 false_test_var = op2a;
3270 if (var == false_test_var)
3271 return boolean_false_node;
3272 }
3273 }
3274
3275 /* If the definition is a comparison, recurse on it. */
3276 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3277 {
3278 tree t = and_comparisons_1 (innercode,
3279 gimple_assign_rhs1 (stmt),
3280 gimple_assign_rhs2 (stmt),
3281 code2,
3282 op2a,
3283 op2b);
3284 if (t)
3285 return t;
3286 }
3287
3288 /* If the definition is an AND or OR expression, we may be able to
3289 simplify by reassociating. */
eb9820c0
KT
3290 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3291 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
3292 {
3293 tree inner1 = gimple_assign_rhs1 (stmt);
3294 tree inner2 = gimple_assign_rhs2 (stmt);
3295 gimple s;
3296 tree t;
3297 tree partial = NULL_TREE;
eb9820c0 3298 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
3299
3300 /* Check for boolean identities that don't require recursive examination
3301 of inner1/inner2:
3302 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3303 inner1 AND (inner1 OR inner2) => inner1
3304 !inner1 AND (inner1 AND inner2) => false
3305 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3306 Likewise for similar cases involving inner2. */
3307 if (inner1 == true_test_var)
3308 return (is_and ? var : inner1);
3309 else if (inner2 == true_test_var)
3310 return (is_and ? var : inner2);
3311 else if (inner1 == false_test_var)
3312 return (is_and
3313 ? boolean_false_node
3314 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3315 else if (inner2 == false_test_var)
3316 return (is_and
3317 ? boolean_false_node
3318 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3319
3320 /* Next, redistribute/reassociate the AND across the inner tests.
3321 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3322 if (TREE_CODE (inner1) == SSA_NAME
3323 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3324 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3325 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3326 gimple_assign_rhs1 (s),
3327 gimple_assign_rhs2 (s),
3328 code2, op2a, op2b)))
3329 {
3330 /* Handle the AND case, where we are reassociating:
3331 (inner1 AND inner2) AND (op2a code2 op2b)
3332 => (t AND inner2)
3333 If the partial result t is a constant, we win. Otherwise
3334 continue on to try reassociating with the other inner test. */
3335 if (is_and)
3336 {
3337 if (integer_onep (t))
3338 return inner2;
3339 else if (integer_zerop (t))
3340 return boolean_false_node;
3341 }
3342
3343 /* Handle the OR case, where we are redistributing:
3344 (inner1 OR inner2) AND (op2a code2 op2b)
3345 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
3346 else if (integer_onep (t))
3347 return boolean_true_node;
3348
3349 /* Save partial result for later. */
3350 partial = t;
e89065a1
SL
3351 }
3352
3353 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3354 if (TREE_CODE (inner2) == SSA_NAME
3355 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3356 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3357 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3358 gimple_assign_rhs1 (s),
3359 gimple_assign_rhs2 (s),
3360 code2, op2a, op2b)))
3361 {
3362 /* Handle the AND case, where we are reassociating:
3363 (inner1 AND inner2) AND (op2a code2 op2b)
3364 => (inner1 AND t) */
3365 if (is_and)
3366 {
3367 if (integer_onep (t))
3368 return inner1;
3369 else if (integer_zerop (t))
3370 return boolean_false_node;
8236c8eb
JJ
3371 /* If both are the same, we can apply the identity
3372 (x AND x) == x. */
3373 else if (partial && same_bool_result_p (t, partial))
3374 return t;
e89065a1
SL
3375 }
3376
3377 /* Handle the OR case. where we are redistributing:
3378 (inner1 OR inner2) AND (op2a code2 op2b)
3379 => (t OR (inner1 AND (op2a code2 op2b)))
3380 => (t OR partial) */
3381 else
3382 {
3383 if (integer_onep (t))
3384 return boolean_true_node;
3385 else if (partial)
3386 {
3387 /* We already got a simplification for the other
3388 operand to the redistributed OR expression. The
3389 interesting case is when at least one is false.
3390 Or, if both are the same, we can apply the identity
3391 (x OR x) == x. */
3392 if (integer_zerop (partial))
3393 return t;
3394 else if (integer_zerop (t))
3395 return partial;
3396 else if (same_bool_result_p (t, partial))
3397 return t;
3398 }
3399 }
3400 }
3401 }
3402 return NULL_TREE;
3403}
3404
3405/* Try to simplify the AND of two comparisons defined by
3406 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3407 If this can be done without constructing an intermediate value,
3408 return the resulting tree; otherwise NULL_TREE is returned.
3409 This function is deliberately asymmetric as it recurses on SSA_DEFs
3410 in the first comparison but not the second. */
3411
3412static tree
3413and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3414 enum tree_code code2, tree op2a, tree op2b)
3415{
ae22ac3c 3416 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 3417
e89065a1
SL
3418 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3419 if (operand_equal_p (op1a, op2a, 0)
3420 && operand_equal_p (op1b, op2b, 0))
3421 {
eb9820c0 3422 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3423 tree t = combine_comparisons (UNKNOWN_LOCATION,
3424 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 3425 truth_type, op1a, op1b);
e89065a1
SL
3426 if (t)
3427 return t;
3428 }
3429
3430 /* Likewise the swapped case of the above. */
3431 if (operand_equal_p (op1a, op2b, 0)
3432 && operand_equal_p (op1b, op2a, 0))
3433 {
eb9820c0 3434 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3435 tree t = combine_comparisons (UNKNOWN_LOCATION,
3436 TRUTH_ANDIF_EXPR, code1,
3437 swap_tree_comparison (code2),
31ed6226 3438 truth_type, op1a, op1b);
e89065a1
SL
3439 if (t)
3440 return t;
3441 }
3442
3443 /* If both comparisons are of the same value against constants, we might
3444 be able to merge them. */
3445 if (operand_equal_p (op1a, op2a, 0)
3446 && TREE_CODE (op1b) == INTEGER_CST
3447 && TREE_CODE (op2b) == INTEGER_CST)
3448 {
3449 int cmp = tree_int_cst_compare (op1b, op2b);
3450
3451 /* If we have (op1a == op1b), we should either be able to
3452 return that or FALSE, depending on whether the constant op1b
3453 also satisfies the other comparison against op2b. */
3454 if (code1 == EQ_EXPR)
3455 {
3456 bool done = true;
3457 bool val;
3458 switch (code2)
3459 {
3460 case EQ_EXPR: val = (cmp == 0); break;
3461 case NE_EXPR: val = (cmp != 0); break;
3462 case LT_EXPR: val = (cmp < 0); break;
3463 case GT_EXPR: val = (cmp > 0); break;
3464 case LE_EXPR: val = (cmp <= 0); break;
3465 case GE_EXPR: val = (cmp >= 0); break;
3466 default: done = false;
3467 }
3468 if (done)
3469 {
3470 if (val)
3471 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3472 else
3473 return boolean_false_node;
3474 }
3475 }
3476 /* Likewise if the second comparison is an == comparison. */
3477 else if (code2 == EQ_EXPR)
3478 {
3479 bool done = true;
3480 bool val;
3481 switch (code1)
3482 {
3483 case EQ_EXPR: val = (cmp == 0); break;
3484 case NE_EXPR: val = (cmp != 0); break;
3485 case LT_EXPR: val = (cmp > 0); break;
3486 case GT_EXPR: val = (cmp < 0); break;
3487 case LE_EXPR: val = (cmp >= 0); break;
3488 case GE_EXPR: val = (cmp <= 0); break;
3489 default: done = false;
3490 }
3491 if (done)
3492 {
3493 if (val)
3494 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3495 else
3496 return boolean_false_node;
3497 }
3498 }
3499
3500 /* Same business with inequality tests. */
3501 else if (code1 == NE_EXPR)
3502 {
3503 bool val;
3504 switch (code2)
3505 {
3506 case EQ_EXPR: val = (cmp != 0); break;
3507 case NE_EXPR: val = (cmp == 0); break;
3508 case LT_EXPR: val = (cmp >= 0); break;
3509 case GT_EXPR: val = (cmp <= 0); break;
3510 case LE_EXPR: val = (cmp > 0); break;
3511 case GE_EXPR: val = (cmp < 0); break;
3512 default:
3513 val = false;
3514 }
3515 if (val)
3516 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3517 }
3518 else if (code2 == NE_EXPR)
3519 {
3520 bool val;
3521 switch (code1)
3522 {
3523 case EQ_EXPR: val = (cmp == 0); break;
3524 case NE_EXPR: val = (cmp != 0); break;
3525 case LT_EXPR: val = (cmp <= 0); break;
3526 case GT_EXPR: val = (cmp >= 0); break;
3527 case LE_EXPR: val = (cmp < 0); break;
3528 case GE_EXPR: val = (cmp > 0); break;
3529 default:
3530 val = false;
3531 }
3532 if (val)
3533 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3534 }
3535
3536 /* Chose the more restrictive of two < or <= comparisons. */
3537 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3538 && (code2 == LT_EXPR || code2 == LE_EXPR))
3539 {
3540 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3541 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3542 else
3543 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3544 }
3545
3546 /* Likewise chose the more restrictive of two > or >= comparisons. */
3547 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3548 && (code2 == GT_EXPR || code2 == GE_EXPR))
3549 {
3550 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3551 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3552 else
3553 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3554 }
3555
3556 /* Check for singleton ranges. */
3557 else if (cmp == 0
3558 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3559 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3560 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3561
3562 /* Check for disjoint ranges. */
3563 else if (cmp <= 0
3564 && (code1 == LT_EXPR || code1 == LE_EXPR)
3565 && (code2 == GT_EXPR || code2 == GE_EXPR))
3566 return boolean_false_node;
3567 else if (cmp >= 0
3568 && (code1 == GT_EXPR || code1 == GE_EXPR)
3569 && (code2 == LT_EXPR || code2 == LE_EXPR))
3570 return boolean_false_node;
3571 }
3572
3573 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3574 NAME's definition is a truth value. See if there are any simplifications
3575 that can be done against the NAME's definition. */
3576 if (TREE_CODE (op1a) == SSA_NAME
3577 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3578 && (integer_zerop (op1b) || integer_onep (op1b)))
3579 {
3580 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3581 || (code1 == NE_EXPR && integer_onep (op1b)));
3582 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3583 switch (gimple_code (stmt))
3584 {
3585 case GIMPLE_ASSIGN:
3586 /* Try to simplify by copy-propagating the definition. */
3587 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3588
3589 case GIMPLE_PHI:
3590 /* If every argument to the PHI produces the same result when
3591 ANDed with the second comparison, we win.
3592 Do not do this unless the type is bool since we need a bool
3593 result here anyway. */
3594 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3595 {
3596 tree result = NULL_TREE;
3597 unsigned i;
3598 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3599 {
3600 tree arg = gimple_phi_arg_def (stmt, i);
3601
3602 /* If this PHI has itself as an argument, ignore it.
3603 If all the other args produce the same result,
3604 we're still OK. */
3605 if (arg == gimple_phi_result (stmt))
3606 continue;
3607 else if (TREE_CODE (arg) == INTEGER_CST)
3608 {
3609 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3610 {
3611 if (!result)
3612 result = boolean_false_node;
3613 else if (!integer_zerop (result))
3614 return NULL_TREE;
3615 }
3616 else if (!result)
3617 result = fold_build2 (code2, boolean_type_node,
3618 op2a, op2b);
3619 else if (!same_bool_comparison_p (result,
3620 code2, op2a, op2b))
3621 return NULL_TREE;
3622 }
0e8b84ec
JJ
3623 else if (TREE_CODE (arg) == SSA_NAME
3624 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 3625 {
6c66f733
JJ
3626 tree temp;
3627 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3628 /* In simple cases we can look through PHI nodes,
3629 but we have to be careful with loops.
3630 See PR49073. */
3631 if (! dom_info_available_p (CDI_DOMINATORS)
3632 || gimple_bb (def_stmt) == gimple_bb (stmt)
3633 || dominated_by_p (CDI_DOMINATORS,
3634 gimple_bb (def_stmt),
3635 gimple_bb (stmt)))
3636 return NULL_TREE;
3637 temp = and_var_with_comparison (arg, invert, code2,
3638 op2a, op2b);
e89065a1
SL
3639 if (!temp)
3640 return NULL_TREE;
3641 else if (!result)
3642 result = temp;
3643 else if (!same_bool_result_p (result, temp))
3644 return NULL_TREE;
3645 }
3646 else
3647 return NULL_TREE;
3648 }
3649 return result;
3650 }
3651
3652 default:
3653 break;
3654 }
3655 }
3656 return NULL_TREE;
3657}
3658
3659/* Try to simplify the AND of two comparisons, specified by
3660 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3661 If this can be simplified to a single expression (without requiring
3662 introducing more SSA variables to hold intermediate values),
3663 return the resulting tree. Otherwise return NULL_TREE.
3664 If the result expression is non-null, it has boolean type. */
3665
3666tree
3667maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3668 enum tree_code code2, tree op2a, tree op2b)
3669{
3670 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3671 if (t)
3672 return t;
3673 else
3674 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3675}
3676
3677/* Helper function for or_comparisons_1: try to simplify the OR of the
3678 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3679 If INVERT is true, invert the value of VAR before doing the OR.
3680 Return NULL_EXPR if we can't simplify this to a single expression. */
3681
3682static tree
3683or_var_with_comparison (tree var, bool invert,
3684 enum tree_code code2, tree op2a, tree op2b)
3685{
3686 tree t;
3687 gimple stmt = SSA_NAME_DEF_STMT (var);
3688
3689 /* We can only deal with variables whose definitions are assignments. */
3690 if (!is_gimple_assign (stmt))
3691 return NULL_TREE;
3692
3693 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3694 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3695 Then we only have to consider the simpler non-inverted cases. */
3696 if (invert)
3697 t = and_var_with_comparison_1 (stmt,
3698 invert_tree_comparison (code2, false),
3699 op2a, op2b);
3700 else
3701 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3702 return canonicalize_bool (t, invert);
3703}
3704
3705/* Try to simplify the OR of the ssa variable defined by the assignment
3706 STMT with the comparison specified by (OP2A CODE2 OP2B).
3707 Return NULL_EXPR if we can't simplify this to a single expression. */
3708
3709static tree
3710or_var_with_comparison_1 (gimple stmt,
3711 enum tree_code code2, tree op2a, tree op2b)
3712{
3713 tree var = gimple_assign_lhs (stmt);
3714 tree true_test_var = NULL_TREE;
3715 tree false_test_var = NULL_TREE;
3716 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3717
3718 /* Check for identities like (var OR (var != 0)) => true . */
3719 if (TREE_CODE (op2a) == SSA_NAME
3720 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3721 {
3722 if ((code2 == NE_EXPR && integer_zerop (op2b))
3723 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3724 {
3725 true_test_var = op2a;
3726 if (var == true_test_var)
3727 return var;
3728 }
3729 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3730 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3731 {
3732 false_test_var = op2a;
3733 if (var == false_test_var)
3734 return boolean_true_node;
3735 }
3736 }
3737
3738 /* If the definition is a comparison, recurse on it. */
3739 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3740 {
3741 tree t = or_comparisons_1 (innercode,
3742 gimple_assign_rhs1 (stmt),
3743 gimple_assign_rhs2 (stmt),
3744 code2,
3745 op2a,
3746 op2b);
3747 if (t)
3748 return t;
3749 }
3750
3751 /* If the definition is an AND or OR expression, we may be able to
3752 simplify by reassociating. */
eb9820c0
KT
3753 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3754 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
3755 {
3756 tree inner1 = gimple_assign_rhs1 (stmt);
3757 tree inner2 = gimple_assign_rhs2 (stmt);
3758 gimple s;
3759 tree t;
3760 tree partial = NULL_TREE;
eb9820c0 3761 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
3762
3763 /* Check for boolean identities that don't require recursive examination
3764 of inner1/inner2:
3765 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3766 inner1 OR (inner1 AND inner2) => inner1
3767 !inner1 OR (inner1 OR inner2) => true
3768 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3769 */
3770 if (inner1 == true_test_var)
3771 return (is_or ? var : inner1);
3772 else if (inner2 == true_test_var)
3773 return (is_or ? var : inner2);
3774 else if (inner1 == false_test_var)
3775 return (is_or
3776 ? boolean_true_node
3777 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
3778 else if (inner2 == false_test_var)
3779 return (is_or
3780 ? boolean_true_node
3781 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
3782
3783 /* Next, redistribute/reassociate the OR across the inner tests.
3784 Compute the first partial result, (inner1 OR (op2a code op2b)) */
3785 if (TREE_CODE (inner1) == SSA_NAME
3786 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3787 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3788 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3789 gimple_assign_rhs1 (s),
3790 gimple_assign_rhs2 (s),
3791 code2, op2a, op2b)))
3792 {
3793 /* Handle the OR case, where we are reassociating:
3794 (inner1 OR inner2) OR (op2a code2 op2b)
3795 => (t OR inner2)
3796 If the partial result t is a constant, we win. Otherwise
3797 continue on to try reassociating with the other inner test. */
8236c8eb 3798 if (is_or)
e89065a1
SL
3799 {
3800 if (integer_onep (t))
3801 return boolean_true_node;
3802 else if (integer_zerop (t))
3803 return inner2;
3804 }
3805
3806 /* Handle the AND case, where we are redistributing:
3807 (inner1 AND inner2) OR (op2a code2 op2b)
3808 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
3809 else if (integer_zerop (t))
3810 return boolean_false_node;
3811
3812 /* Save partial result for later. */
3813 partial = t;
e89065a1
SL
3814 }
3815
3816 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
3817 if (TREE_CODE (inner2) == SSA_NAME
3818 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3819 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3820 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3821 gimple_assign_rhs1 (s),
3822 gimple_assign_rhs2 (s),
3823 code2, op2a, op2b)))
3824 {
3825 /* Handle the OR case, where we are reassociating:
3826 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
3827 => (inner1 OR t)
3828 => (t OR partial) */
3829 if (is_or)
e89065a1
SL
3830 {
3831 if (integer_zerop (t))
3832 return inner1;
3833 else if (integer_onep (t))
3834 return boolean_true_node;
8236c8eb
JJ
3835 /* If both are the same, we can apply the identity
3836 (x OR x) == x. */
3837 else if (partial && same_bool_result_p (t, partial))
3838 return t;
e89065a1
SL
3839 }
3840
3841 /* Handle the AND case, where we are redistributing:
3842 (inner1 AND inner2) OR (op2a code2 op2b)
3843 => (t AND (inner1 OR (op2a code2 op2b)))
3844 => (t AND partial) */
3845 else
3846 {
3847 if (integer_zerop (t))
3848 return boolean_false_node;
3849 else if (partial)
3850 {
3851 /* We already got a simplification for the other
3852 operand to the redistributed AND expression. The
3853 interesting case is when at least one is true.
3854 Or, if both are the same, we can apply the identity
8236c8eb 3855 (x AND x) == x. */
e89065a1
SL
3856 if (integer_onep (partial))
3857 return t;
3858 else if (integer_onep (t))
3859 return partial;
3860 else if (same_bool_result_p (t, partial))
8236c8eb 3861 return t;
e89065a1
SL
3862 }
3863 }
3864 }
3865 }
3866 return NULL_TREE;
3867}
3868
3869/* Try to simplify the OR of two comparisons defined by
3870 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3871 If this can be done without constructing an intermediate value,
3872 return the resulting tree; otherwise NULL_TREE is returned.
3873 This function is deliberately asymmetric as it recurses on SSA_DEFs
3874 in the first comparison but not the second. */
3875
3876static tree
3877or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3878 enum tree_code code2, tree op2a, tree op2b)
3879{
ae22ac3c 3880 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 3881
e89065a1
SL
3882 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
3883 if (operand_equal_p (op1a, op2a, 0)
3884 && operand_equal_p (op1b, op2b, 0))
3885 {
eb9820c0 3886 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3887 tree t = combine_comparisons (UNKNOWN_LOCATION,
3888 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 3889 truth_type, op1a, op1b);
e89065a1
SL
3890 if (t)
3891 return t;
3892 }
3893
3894 /* Likewise the swapped case of the above. */
3895 if (operand_equal_p (op1a, op2b, 0)
3896 && operand_equal_p (op1b, op2a, 0))
3897 {
eb9820c0 3898 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3899 tree t = combine_comparisons (UNKNOWN_LOCATION,
3900 TRUTH_ORIF_EXPR, code1,
3901 swap_tree_comparison (code2),
31ed6226 3902 truth_type, op1a, op1b);
e89065a1
SL
3903 if (t)
3904 return t;
3905 }
3906
3907 /* If both comparisons are of the same value against constants, we might
3908 be able to merge them. */
3909 if (operand_equal_p (op1a, op2a, 0)
3910 && TREE_CODE (op1b) == INTEGER_CST
3911 && TREE_CODE (op2b) == INTEGER_CST)
3912 {
3913 int cmp = tree_int_cst_compare (op1b, op2b);
3914
3915 /* If we have (op1a != op1b), we should either be able to
3916 return that or TRUE, depending on whether the constant op1b
3917 also satisfies the other comparison against op2b. */
3918 if (code1 == NE_EXPR)
3919 {
3920 bool done = true;
3921 bool val;
3922 switch (code2)
3923 {
3924 case EQ_EXPR: val = (cmp == 0); break;
3925 case NE_EXPR: val = (cmp != 0); break;
3926 case LT_EXPR: val = (cmp < 0); break;
3927 case GT_EXPR: val = (cmp > 0); break;
3928 case LE_EXPR: val = (cmp <= 0); break;
3929 case GE_EXPR: val = (cmp >= 0); break;
3930 default: done = false;
3931 }
3932 if (done)
3933 {
3934 if (val)
3935 return boolean_true_node;
3936 else
3937 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3938 }
3939 }
3940 /* Likewise if the second comparison is a != comparison. */
3941 else if (code2 == NE_EXPR)
3942 {
3943 bool done = true;
3944 bool val;
3945 switch (code1)
3946 {
3947 case EQ_EXPR: val = (cmp == 0); break;
3948 case NE_EXPR: val = (cmp != 0); break;
3949 case LT_EXPR: val = (cmp > 0); break;
3950 case GT_EXPR: val = (cmp < 0); break;
3951 case LE_EXPR: val = (cmp >= 0); break;
3952 case GE_EXPR: val = (cmp <= 0); break;
3953 default: done = false;
3954 }
3955 if (done)
3956 {
3957 if (val)
3958 return boolean_true_node;
3959 else
3960 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3961 }
3962 }
3963
3964 /* See if an equality test is redundant with the other comparison. */
3965 else if (code1 == EQ_EXPR)
3966 {
3967 bool val;
3968 switch (code2)
3969 {
3970 case EQ_EXPR: val = (cmp == 0); break;
3971 case NE_EXPR: val = (cmp != 0); break;
3972 case LT_EXPR: val = (cmp < 0); break;
3973 case GT_EXPR: val = (cmp > 0); break;
3974 case LE_EXPR: val = (cmp <= 0); break;
3975 case GE_EXPR: val = (cmp >= 0); break;
3976 default:
3977 val = false;
3978 }
3979 if (val)
3980 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3981 }
3982 else if (code2 == EQ_EXPR)
3983 {
3984 bool val;
3985 switch (code1)
3986 {
3987 case EQ_EXPR: val = (cmp == 0); break;
3988 case NE_EXPR: val = (cmp != 0); break;
3989 case LT_EXPR: val = (cmp > 0); break;
3990 case GT_EXPR: val = (cmp < 0); break;
3991 case LE_EXPR: val = (cmp >= 0); break;
3992 case GE_EXPR: val = (cmp <= 0); break;
3993 default:
3994 val = false;
3995 }
3996 if (val)
3997 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3998 }
3999
4000 /* Chose the less restrictive of two < or <= comparisons. */
4001 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4002 && (code2 == LT_EXPR || code2 == LE_EXPR))
4003 {
4004 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4005 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4006 else
4007 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4008 }
4009
4010 /* Likewise chose the less restrictive of two > or >= comparisons. */
4011 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4012 && (code2 == GT_EXPR || code2 == GE_EXPR))
4013 {
4014 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4015 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4016 else
4017 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4018 }
4019
4020 /* Check for singleton ranges. */
4021 else if (cmp == 0
4022 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4023 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4024 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4025
4026 /* Check for less/greater pairs that don't restrict the range at all. */
4027 else if (cmp >= 0
4028 && (code1 == LT_EXPR || code1 == LE_EXPR)
4029 && (code2 == GT_EXPR || code2 == GE_EXPR))
4030 return boolean_true_node;
4031 else if (cmp <= 0
4032 && (code1 == GT_EXPR || code1 == GE_EXPR)
4033 && (code2 == LT_EXPR || code2 == LE_EXPR))
4034 return boolean_true_node;
4035 }
4036
4037 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4038 NAME's definition is a truth value. See if there are any simplifications
4039 that can be done against the NAME's definition. */
4040 if (TREE_CODE (op1a) == SSA_NAME
4041 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4042 && (integer_zerop (op1b) || integer_onep (op1b)))
4043 {
4044 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4045 || (code1 == NE_EXPR && integer_onep (op1b)));
4046 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4047 switch (gimple_code (stmt))
4048 {
4049 case GIMPLE_ASSIGN:
4050 /* Try to simplify by copy-propagating the definition. */
4051 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4052
4053 case GIMPLE_PHI:
4054 /* If every argument to the PHI produces the same result when
4055 ORed with the second comparison, we win.
4056 Do not do this unless the type is bool since we need a bool
4057 result here anyway. */
4058 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4059 {
4060 tree result = NULL_TREE;
4061 unsigned i;
4062 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4063 {
4064 tree arg = gimple_phi_arg_def (stmt, i);
4065
4066 /* If this PHI has itself as an argument, ignore it.
4067 If all the other args produce the same result,
4068 we're still OK. */
4069 if (arg == gimple_phi_result (stmt))
4070 continue;
4071 else if (TREE_CODE (arg) == INTEGER_CST)
4072 {
4073 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4074 {
4075 if (!result)
4076 result = boolean_true_node;
4077 else if (!integer_onep (result))
4078 return NULL_TREE;
4079 }
4080 else if (!result)
4081 result = fold_build2 (code2, boolean_type_node,
4082 op2a, op2b);
4083 else if (!same_bool_comparison_p (result,
4084 code2, op2a, op2b))
4085 return NULL_TREE;
4086 }
0e8b84ec
JJ
4087 else if (TREE_CODE (arg) == SSA_NAME
4088 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4089 {
6c66f733
JJ
4090 tree temp;
4091 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4092 /* In simple cases we can look through PHI nodes,
4093 but we have to be careful with loops.
4094 See PR49073. */
4095 if (! dom_info_available_p (CDI_DOMINATORS)
4096 || gimple_bb (def_stmt) == gimple_bb (stmt)
4097 || dominated_by_p (CDI_DOMINATORS,
4098 gimple_bb (def_stmt),
4099 gimple_bb (stmt)))
4100 return NULL_TREE;
4101 temp = or_var_with_comparison (arg, invert, code2,
4102 op2a, op2b);
e89065a1
SL
4103 if (!temp)
4104 return NULL_TREE;
4105 else if (!result)
4106 result = temp;
4107 else if (!same_bool_result_p (result, temp))
4108 return NULL_TREE;
4109 }
4110 else
4111 return NULL_TREE;
4112 }
4113 return result;
4114 }
4115
4116 default:
4117 break;
4118 }
4119 }
4120 return NULL_TREE;
4121}
4122
4123/* Try to simplify the OR of two comparisons, specified by
4124 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4125 If this can be simplified to a single expression (without requiring
4126 introducing more SSA variables to hold intermediate values),
4127 return the resulting tree. Otherwise return NULL_TREE.
4128 If the result expression is non-null, it has boolean type. */
4129
4130tree
4131maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4132 enum tree_code code2, tree op2a, tree op2b)
4133{
4134 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4135 if (t)
4136 return t;
4137 else
4138 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4139}
cfef45c8
RG
4140
4141
4142/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4143
4144 Either NULL_TREE, a simplified but non-constant or a constant
4145 is returned.
4146
4147 ??? This should go into a gimple-fold-inline.h file to be eventually
4148 privatized with the single valueize function used in the various TUs
4149 to avoid the indirect function call overhead. */
4150
4151tree
4152gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
4153{
4154 location_t loc = gimple_location (stmt);
4155 switch (gimple_code (stmt))
4156 {
4157 case GIMPLE_ASSIGN:
4158 {
4159 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4160
4161 switch (get_gimple_rhs_class (subcode))
4162 {
4163 case GIMPLE_SINGLE_RHS:
4164 {
4165 tree rhs = gimple_assign_rhs1 (stmt);
4166 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4167
4168 if (TREE_CODE (rhs) == SSA_NAME)
4169 {
4170 /* If the RHS is an SSA_NAME, return its known constant value,
4171 if any. */
4172 return (*valueize) (rhs);
4173 }
4174 /* Handle propagating invariant addresses into address
4175 operations. */
4176 else if (TREE_CODE (rhs) == ADDR_EXPR
4177 && !is_gimple_min_invariant (rhs))
4178 {
d25c4172 4179 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4180 tree base;
4181 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4182 &offset,
4183 valueize);
4184 if (base
4185 && (CONSTANT_CLASS_P (base)
4186 || decl_address_invariant_p (base)))
4187 return build_invariant_address (TREE_TYPE (rhs),
4188 base, offset);
4189 }
4190 else if (TREE_CODE (rhs) == CONSTRUCTOR
4191 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4192 && (CONSTRUCTOR_NELTS (rhs)
4193 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4194 {
4195 unsigned i;
d2a12ae7 4196 tree val, *vec;
cfef45c8 4197
d2a12ae7
RG
4198 vec = XALLOCAVEC (tree,
4199 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4200 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4201 {
4202 val = (*valueize) (val);
4203 if (TREE_CODE (val) == INTEGER_CST
4204 || TREE_CODE (val) == REAL_CST
4205 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 4206 vec[i] = val;
cfef45c8
RG
4207 else
4208 return NULL_TREE;
4209 }
4210
d2a12ae7 4211 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 4212 }
bdf37f7a
JH
4213 if (subcode == OBJ_TYPE_REF)
4214 {
4215 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4216 /* If callee is constant, we can fold away the wrapper. */
4217 if (is_gimple_min_invariant (val))
4218 return val;
4219 }
cfef45c8
RG
4220
4221 if (kind == tcc_reference)
4222 {
4223 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4224 || TREE_CODE (rhs) == REALPART_EXPR
4225 || TREE_CODE (rhs) == IMAGPART_EXPR)
4226 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4227 {
4228 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4229 return fold_unary_loc (EXPR_LOCATION (rhs),
4230 TREE_CODE (rhs),
4231 TREE_TYPE (rhs), val);
4232 }
4233 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4234 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4235 {
4236 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4237 return fold_ternary_loc (EXPR_LOCATION (rhs),
4238 TREE_CODE (rhs),
4239 TREE_TYPE (rhs), val,
4240 TREE_OPERAND (rhs, 1),
4241 TREE_OPERAND (rhs, 2));
4242 }
4243 else if (TREE_CODE (rhs) == MEM_REF
4244 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4245 {
4246 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4247 if (TREE_CODE (val) == ADDR_EXPR
4248 && is_gimple_min_invariant (val))
4249 {
4250 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4251 unshare_expr (val),
4252 TREE_OPERAND (rhs, 1));
4253 if (tem)
4254 rhs = tem;
4255 }
4256 }
4257 return fold_const_aggregate_ref_1 (rhs, valueize);
4258 }
4259 else if (kind == tcc_declaration)
4260 return get_symbol_constant_value (rhs);
4261 return rhs;
4262 }
4263
4264 case GIMPLE_UNARY_RHS:
4265 {
4266 /* Handle unary operators that can appear in GIMPLE form.
4267 Note that we know the single operand must be a constant,
4268 so this should almost always return a simplified RHS. */
cfef45c8
RG
4269 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4270
cfef45c8
RG
4271 return
4272 fold_unary_ignore_overflow_loc (loc, subcode,
4273 gimple_expr_type (stmt), op0);
4274 }
4275
4276 case GIMPLE_BINARY_RHS:
4277 {
4278 /* Handle binary operators that can appear in GIMPLE form. */
4279 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4280 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4281
4282 /* Translate &x + CST into an invariant form suitable for
4283 further propagation. */
4284 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4285 && TREE_CODE (op0) == ADDR_EXPR
4286 && TREE_CODE (op1) == INTEGER_CST)
4287 {
4288 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
4289 return build_fold_addr_expr_loc
4290 (loc,
4291 fold_build2 (MEM_REF,
cfef45c8
RG
4292 TREE_TYPE (TREE_TYPE (op0)),
4293 unshare_expr (op0), off));
4294 }
4295
4296 return fold_binary_loc (loc, subcode,
4297 gimple_expr_type (stmt), op0, op1);
4298 }
4299
4300 case GIMPLE_TERNARY_RHS:
4301 {
4302 /* Handle ternary operators that can appear in GIMPLE form. */
4303 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4304 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4305 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4306
d3878abf
RG
4307 /* Fold embedded expressions in ternary codes. */
4308 if ((subcode == COND_EXPR
4309 || subcode == VEC_COND_EXPR)
4310 && COMPARISON_CLASS_P (op0))
4311 {
4312 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4313 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4314 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4315 TREE_TYPE (op0), op00, op01);
4316 if (tem)
4317 op0 = tem;
4318 }
4319
cfef45c8
RG
4320 return fold_ternary_loc (loc, subcode,
4321 gimple_expr_type (stmt), op0, op1, op2);
4322 }
4323
4324 default:
4325 gcc_unreachable ();
4326 }
4327 }
4328
4329 case GIMPLE_CALL:
4330 {
25583c4f
RS
4331 tree fn;
4332
4333 if (gimple_call_internal_p (stmt))
31e071ae
MP
4334 {
4335 enum tree_code subcode = ERROR_MARK;
4336 switch (gimple_call_internal_fn (stmt))
4337 {
4338 case IFN_UBSAN_CHECK_ADD:
4339 subcode = PLUS_EXPR;
4340 break;
4341 case IFN_UBSAN_CHECK_SUB:
4342 subcode = MINUS_EXPR;
4343 break;
4344 case IFN_UBSAN_CHECK_MUL:
4345 subcode = MULT_EXPR;
4346 break;
4347 default:
4348 return NULL_TREE;
4349 }
368b454d
JJ
4350 tree arg0 = gimple_call_arg (stmt, 0);
4351 tree arg1 = gimple_call_arg (stmt, 1);
4352 tree op0 = (*valueize) (arg0);
4353 tree op1 = (*valueize) (arg1);
31e071ae
MP
4354
4355 if (TREE_CODE (op0) != INTEGER_CST
4356 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
4357 {
4358 switch (subcode)
4359 {
4360 case MULT_EXPR:
4361 /* x * 0 = 0 * x = 0 without overflow. */
4362 if (integer_zerop (op0) || integer_zerop (op1))
4363 return build_zero_cst (TREE_TYPE (arg0));
4364 break;
4365 case MINUS_EXPR:
4366 /* y - y = 0 without overflow. */
4367 if (operand_equal_p (op0, op1, 0))
4368 return build_zero_cst (TREE_TYPE (arg0));
4369 break;
4370 default:
4371 break;
4372 }
4373 }
4374 tree res
4375 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
4376 if (res
4377 && TREE_CODE (res) == INTEGER_CST
4378 && !TREE_OVERFLOW (res))
4379 return res;
4380 return NULL_TREE;
4381 }
25583c4f
RS
4382
4383 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
4384 if (TREE_CODE (fn) == ADDR_EXPR
4385 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
4386 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4387 && gimple_builtin_call_types_compatible_p (stmt,
4388 TREE_OPERAND (fn, 0)))
cfef45c8
RG
4389 {
4390 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4391 tree call, retval;
4392 unsigned i;
4393 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4394 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4395 call = build_call_array_loc (loc,
4396 gimple_call_return_type (stmt),
4397 fn, gimple_call_num_args (stmt), args);
4398 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4399 if (retval)
5c944c6c
RB
4400 {
4401 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4402 STRIP_NOPS (retval);
4403 retval = fold_convert (gimple_call_return_type (stmt), retval);
4404 }
cfef45c8
RG
4405 return retval;
4406 }
4407 return NULL_TREE;
4408 }
4409
4410 default:
4411 return NULL_TREE;
4412 }
4413}
4414
4415/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4416 Returns NULL_TREE if folding to a constant is not possible, otherwise
4417 returns a constant according to is_gimple_min_invariant. */
4418
4419tree
4420gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4421{
4422 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4423 if (res && is_gimple_min_invariant (res))
4424 return res;
4425 return NULL_TREE;
4426}
4427
4428
4429/* The following set of functions are supposed to fold references using
4430 their constant initializers. */
4431
4432static tree fold_ctor_reference (tree type, tree ctor,
4433 unsigned HOST_WIDE_INT offset,
c44c2088 4434 unsigned HOST_WIDE_INT size, tree);
cfef45c8
RG
4435
4436/* See if we can find constructor defining value of BASE.
4437 When we know the consructor with constant offset (such as
4438 base is array[40] and we do know constructor of array), then
4439 BIT_OFFSET is adjusted accordingly.
4440
4441 As a special case, return error_mark_node when constructor
4442 is not explicitly available, but it is known to be zero
4443 such as 'static const int a;'. */
4444static tree
4445get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4446 tree (*valueize)(tree))
4447{
4448 HOST_WIDE_INT bit_offset2, size, max_size;
4449 if (TREE_CODE (base) == MEM_REF)
4450 {
4451 if (!integer_zerop (TREE_OPERAND (base, 1)))
4452 {
9541ffee 4453 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 4454 return NULL_TREE;
807e902e 4455 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
4456 * BITS_PER_UNIT);
4457 }
4458
4459 if (valueize
4460 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4461 base = valueize (TREE_OPERAND (base, 0));
4462 if (!base || TREE_CODE (base) != ADDR_EXPR)
4463 return NULL_TREE;
4464 base = TREE_OPERAND (base, 0);
4465 }
4466
4467 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4468 DECL_INITIAL. If BASE is a nested reference into another
4469 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4470 the inner reference. */
4471 switch (TREE_CODE (base))
4472 {
4473 case VAR_DECL:
cfef45c8 4474 case CONST_DECL:
6a6dac52
JH
4475 {
4476 tree init = ctor_for_folding (base);
4477
688010ba 4478 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
4479 NULL means unknown, while error_mark_node is 0. */
4480 if (init == error_mark_node)
4481 return NULL_TREE;
4482 if (!init)
4483 return error_mark_node;
4484 return init;
4485 }
cfef45c8
RG
4486
4487 case ARRAY_REF:
4488 case COMPONENT_REF:
4489 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4490 if (max_size == -1 || size != max_size)
4491 return NULL_TREE;
4492 *bit_offset += bit_offset2;
4493 return get_base_constructor (base, bit_offset, valueize);
4494
4495 case STRING_CST:
4496 case CONSTRUCTOR:
4497 return base;
4498
4499 default:
4500 return NULL_TREE;
4501 }
4502}
4503
cfef45c8
RG
4504/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4505 SIZE to the memory at bit OFFSET. */
4506
4507static tree
4508fold_array_ctor_reference (tree type, tree ctor,
4509 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4510 unsigned HOST_WIDE_INT size,
4511 tree from_decl)
cfef45c8
RG
4512{
4513 unsigned HOST_WIDE_INT cnt;
4514 tree cfield, cval;
807e902e
KZ
4515 offset_int low_bound;
4516 offset_int elt_size;
4517 offset_int index, max_index;
4518 offset_int access_index;
b48e22b2 4519 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
4520 HOST_WIDE_INT inner_offset;
4521
4522 /* Compute low bound and elt size. */
eb8f1123
RG
4523 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4524 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
4525 if (domain_type && TYPE_MIN_VALUE (domain_type))
4526 {
4527 /* Static constructors for variably sized objects makes no sense. */
4528 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 4529 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 4530 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
4531 }
4532 else
807e902e 4533 low_bound = 0;
cfef45c8 4534 /* Static constructors for variably sized objects makes no sense. */
c3284718 4535 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 4536 == INTEGER_CST);
807e902e 4537 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
4538
4539 /* We can handle only constantly sized accesses that are known to not
4540 be larger than size of array element. */
4541 if (!TYPE_SIZE_UNIT (type)
4542 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
4543 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4544 || elt_size == 0)
cfef45c8
RG
4545 return NULL_TREE;
4546
4547 /* Compute the array index we look for. */
807e902e
KZ
4548 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4549 elt_size);
27bcd47c 4550 access_index += low_bound;
b48e22b2 4551 if (index_type)
807e902e
KZ
4552 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4553 TYPE_SIGN (index_type));
cfef45c8
RG
4554
4555 /* And offset within the access. */
27bcd47c 4556 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
4557
4558 /* See if the array field is large enough to span whole access. We do not
4559 care to fold accesses spanning multiple array indexes. */
27bcd47c 4560 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
4561 return NULL_TREE;
4562
807e902e 4563 index = low_bound - 1;
b48e22b2 4564 if (index_type)
807e902e
KZ
4565 index = wi::ext (index, TYPE_PRECISION (index_type),
4566 TYPE_SIGN (index_type));
b48e22b2 4567
cfef45c8
RG
4568 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4569 {
4570 /* Array constructor might explicitely set index, or specify range
4571 or leave index NULL meaning that it is next index after previous
4572 one. */
4573 if (cfield)
4574 {
4575 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 4576 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
4577 else
4578 {
4579 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
4580 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4581 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
4582 }
4583 }
4584 else
b48e22b2 4585 {
807e902e 4586 index += 1;
b48e22b2 4587 if (index_type)
807e902e
KZ
4588 index = wi::ext (index, TYPE_PRECISION (index_type),
4589 TYPE_SIGN (index_type));
b48e22b2
EB
4590 max_index = index;
4591 }
cfef45c8
RG
4592
4593 /* Do we have match? */
807e902e
KZ
4594 if (wi::cmpu (access_index, index) >= 0
4595 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
4596 return fold_ctor_reference (type, cval, inner_offset, size,
4597 from_decl);
cfef45c8
RG
4598 }
4599 /* When memory is not explicitely mentioned in constructor,
4600 it is 0 (or out of range). */
4601 return build_zero_cst (type);
4602}
4603
4604/* CTOR is CONSTRUCTOR of an aggregate or vector.
4605 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4606
4607static tree
4608fold_nonarray_ctor_reference (tree type, tree ctor,
4609 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4610 unsigned HOST_WIDE_INT size,
4611 tree from_decl)
cfef45c8
RG
4612{
4613 unsigned HOST_WIDE_INT cnt;
4614 tree cfield, cval;
4615
4616 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4617 cval)
4618 {
4619 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4620 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4621 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
4622 offset_int bitoffset;
4623 offset_int bitoffset_end, access_end;
cfef45c8
RG
4624
4625 /* Variable sized objects in static constructors makes no sense,
4626 but field_size can be NULL for flexible array members. */
4627 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4628 && TREE_CODE (byte_offset) == INTEGER_CST
4629 && (field_size != NULL_TREE
4630 ? TREE_CODE (field_size) == INTEGER_CST
4631 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4632
4633 /* Compute bit offset of the field. */
807e902e
KZ
4634 bitoffset = (wi::to_offset (field_offset)
4635 + wi::lshift (wi::to_offset (byte_offset),
4636 LOG2_BITS_PER_UNIT));
cfef45c8
RG
4637 /* Compute bit offset where the field ends. */
4638 if (field_size != NULL_TREE)
807e902e 4639 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 4640 else
807e902e 4641 bitoffset_end = 0;
cfef45c8 4642
807e902e 4643 access_end = offset_int (offset) + size;
b8b2b009
JJ
4644
4645 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4646 [BITOFFSET, BITOFFSET_END)? */
807e902e 4647 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 4648 && (field_size == NULL_TREE
807e902e 4649 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 4650 {
807e902e 4651 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
4652 /* We do have overlap. Now see if field is large enough to
4653 cover the access. Give up for accesses spanning multiple
4654 fields. */
807e902e 4655 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 4656 return NULL_TREE;
807e902e 4657 if (wi::lts_p (offset, bitoffset))
b8b2b009 4658 return NULL_TREE;
cfef45c8 4659 return fold_ctor_reference (type, cval,
27bcd47c 4660 inner_offset.to_uhwi (), size,
c44c2088 4661 from_decl);
cfef45c8
RG
4662 }
4663 }
4664 /* When memory is not explicitely mentioned in constructor, it is 0. */
4665 return build_zero_cst (type);
4666}
4667
4668/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4669 to the memory at bit OFFSET. */
4670
4671static tree
4672fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 4673 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
4674{
4675 tree ret;
4676
4677 /* We found the field with exact match. */
4678 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4679 && !offset)
9d60be38 4680 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
4681
4682 /* We are at the end of walk, see if we can view convert the
4683 result. */
4684 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4685 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
4686 && !compare_tree_int (TYPE_SIZE (type), size)
4687 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 4688 {
9d60be38 4689 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
4690 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4691 if (ret)
4692 STRIP_NOPS (ret);
4693 return ret;
4694 }
b2505143
RB
4695 /* For constants and byte-aligned/sized reads try to go through
4696 native_encode/interpret. */
4697 if (CONSTANT_CLASS_P (ctor)
4698 && BITS_PER_UNIT == 8
4699 && offset % BITS_PER_UNIT == 0
4700 && size % BITS_PER_UNIT == 0
4701 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4702 {
4703 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4704 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4705 offset / BITS_PER_UNIT) > 0)
4706 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4707 }
cfef45c8
RG
4708 if (TREE_CODE (ctor) == CONSTRUCTOR)
4709 {
4710
eb8f1123
RG
4711 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4712 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
4713 return fold_array_ctor_reference (type, ctor, offset, size,
4714 from_decl);
cfef45c8 4715 else
c44c2088
JH
4716 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4717 from_decl);
cfef45c8
RG
4718 }
4719
4720 return NULL_TREE;
4721}
4722
4723/* Return the tree representing the element referenced by T if T is an
4724 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4725 names using VALUEIZE. Return NULL_TREE otherwise. */
4726
4727tree
4728fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4729{
4730 tree ctor, idx, base;
4731 HOST_WIDE_INT offset, size, max_size;
4732 tree tem;
4733
f8a7df45
RG
4734 if (TREE_THIS_VOLATILE (t))
4735 return NULL_TREE;
4736
cfef45c8
RG
4737 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4738 return get_symbol_constant_value (t);
4739
4740 tem = fold_read_from_constant_string (t);
4741 if (tem)
4742 return tem;
4743
4744 switch (TREE_CODE (t))
4745 {
4746 case ARRAY_REF:
4747 case ARRAY_RANGE_REF:
4748 /* Constant indexes are handled well by get_base_constructor.
4749 Only special case variable offsets.
4750 FIXME: This code can't handle nested references with variable indexes
4751 (they will be handled only by iteration of ccp). Perhaps we can bring
4752 get_ref_base_and_extent here and make it use a valueize callback. */
4753 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
4754 && valueize
4755 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 4756 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
4757 {
4758 tree low_bound, unit_size;
4759
4760 /* If the resulting bit-offset is constant, track it. */
4761 if ((low_bound = array_ref_low_bound (t),
b48e22b2 4762 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 4763 && (unit_size = array_ref_element_size (t),
807e902e 4764 tree_fits_uhwi_p (unit_size)))
cfef45c8 4765 {
807e902e
KZ
4766 offset_int woffset
4767 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
4768 TYPE_PRECISION (TREE_TYPE (idx)));
4769
4770 if (wi::fits_shwi_p (woffset))
4771 {
4772 offset = woffset.to_shwi ();
4773 /* TODO: This code seems wrong, multiply then check
4774 to see if it fits. */
4775 offset *= tree_to_uhwi (unit_size);
4776 offset *= BITS_PER_UNIT;
4777
4778 base = TREE_OPERAND (t, 0);
4779 ctor = get_base_constructor (base, &offset, valueize);
4780 /* Empty constructor. Always fold to 0. */
4781 if (ctor == error_mark_node)
4782 return build_zero_cst (TREE_TYPE (t));
4783 /* Out of bound array access. Value is undefined,
4784 but don't fold. */
4785 if (offset < 0)
4786 return NULL_TREE;
4787 /* We can not determine ctor. */
4788 if (!ctor)
4789 return NULL_TREE;
4790 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
4791 tree_to_uhwi (unit_size)
4792 * BITS_PER_UNIT,
4793 base);
4794 }
cfef45c8
RG
4795 }
4796 }
4797 /* Fallthru. */
4798
4799 case COMPONENT_REF:
4800 case BIT_FIELD_REF:
4801 case TARGET_MEM_REF:
4802 case MEM_REF:
4803 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
4804 ctor = get_base_constructor (base, &offset, valueize);
4805
4806 /* Empty constructor. Always fold to 0. */
4807 if (ctor == error_mark_node)
4808 return build_zero_cst (TREE_TYPE (t));
4809 /* We do not know precise address. */
4810 if (max_size == -1 || max_size != size)
4811 return NULL_TREE;
4812 /* We can not determine ctor. */
4813 if (!ctor)
4814 return NULL_TREE;
4815
4816 /* Out of bound array access. Value is undefined, but don't fold. */
4817 if (offset < 0)
4818 return NULL_TREE;
4819
c44c2088
JH
4820 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
4821 base);
cfef45c8
RG
4822
4823 case REALPART_EXPR:
4824 case IMAGPART_EXPR:
4825 {
4826 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
4827 if (c && TREE_CODE (c) == COMPLEX_CST)
4828 return fold_build1_loc (EXPR_LOCATION (t),
4829 TREE_CODE (t), TREE_TYPE (t), c);
4830 break;
4831 }
4832
4833 default:
4834 break;
4835 }
4836
4837 return NULL_TREE;
4838}
4839
4840tree
4841fold_const_aggregate_ref (tree t)
4842{
4843 return fold_const_aggregate_ref_1 (t, NULL);
4844}
06bc3ec7 4845
85942f45 4846/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
4847 at OFFSET.
4848 Set CAN_REFER if non-NULL to false if method
4849 is not referable or if the virtual table is ill-formed (such as rewriten
4850 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
4851
4852tree
85942f45
JH
4853gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
4854 tree v,
ec77d61f
JH
4855 unsigned HOST_WIDE_INT offset,
4856 bool *can_refer)
81fa35bd 4857{
85942f45
JH
4858 tree vtable = v, init, fn;
4859 unsigned HOST_WIDE_INT size;
8c311b50
JH
4860 unsigned HOST_WIDE_INT elt_size, access_index;
4861 tree domain_type;
81fa35bd 4862
ec77d61f
JH
4863 if (can_refer)
4864 *can_refer = true;
4865
9de2f554 4866 /* First of all double check we have virtual table. */
81fa35bd 4867 if (TREE_CODE (v) != VAR_DECL
2aa3da06 4868 || !DECL_VIRTUAL_P (v))
ec77d61f
JH
4869 {
4870 gcc_assert (in_lto_p);
4871 /* Pass down that we lost track of the target. */
4872 if (can_refer)
4873 *can_refer = false;
4874 return NULL_TREE;
4875 }
9de2f554 4876
2aa3da06
JH
4877 init = ctor_for_folding (v);
4878
9de2f554 4879 /* The virtual tables should always be born with constructors
2aa3da06
JH
4880 and we always should assume that they are avaialble for
4881 folding. At the moment we do not stream them in all cases,
4882 but it should never happen that ctor seem unreachable. */
4883 gcc_assert (init);
4884 if (init == error_mark_node)
4885 {
4886 gcc_assert (in_lto_p);
ec77d61f
JH
4887 /* Pass down that we lost track of the target. */
4888 if (can_refer)
4889 *can_refer = false;
2aa3da06
JH
4890 return NULL_TREE;
4891 }
81fa35bd 4892 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 4893 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 4894 offset *= BITS_PER_UNIT;
81fa35bd 4895 offset += token * size;
9de2f554 4896
8c311b50
JH
4897 /* Lookup the value in the constructor that is assumed to be array.
4898 This is equivalent to
4899 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
4900 offset, size, NULL);
4901 but in a constant time. We expect that frontend produced a simple
4902 array without indexed initializers. */
4903
4904 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
4905 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
4906 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
4907 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
4908
4909 access_index = offset / BITS_PER_UNIT / elt_size;
4910 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
4911
4912 /* This code makes an assumption that there are no
4913 indexed fileds produced by C++ FE, so we can directly index the array. */
4914 if (access_index < CONSTRUCTOR_NELTS (init))
4915 {
4916 fn = CONSTRUCTOR_ELT (init, access_index)->value;
4917 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
4918 STRIP_NOPS (fn);
4919 }
4920 else
4921 fn = NULL;
9de2f554
JH
4922
4923 /* For type inconsistent program we may end up looking up virtual method
4924 in virtual table that does not contain TOKEN entries. We may overrun
4925 the virtual table and pick up a constant or RTTI info pointer.
4926 In any case the call is undefined. */
4927 if (!fn
4928 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
4929 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
4930 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4931 else
4932 {
4933 fn = TREE_OPERAND (fn, 0);
4934
4935 /* When cgraph node is missing and function is not public, we cannot
4936 devirtualize. This can happen in WHOPR when the actual method
4937 ends up in other partition, because we found devirtualization
4938 possibility too late. */
4939 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
4940 {
4941 if (can_refer)
4942 {
4943 *can_refer = false;
4944 return fn;
4945 }
4946 return NULL_TREE;
4947 }
9de2f554 4948 }
81fa35bd 4949
7501ca28
RG
4950 /* Make sure we create a cgraph node for functions we'll reference.
4951 They can be non-existent if the reference comes from an entry
4952 of an external vtable for example. */
d52f5295 4953 cgraph_node::get_create (fn);
7501ca28 4954
81fa35bd
MJ
4955 return fn;
4956}
4957
85942f45
JH
4958/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
4959 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
4960 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
4961 OBJ_TYPE_REF_OBJECT(REF).
4962 Set CAN_REFER if non-NULL to false if method
4963 is not referable or if the virtual table is ill-formed (such as rewriten
4964 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
4965
4966tree
ec77d61f
JH
4967gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
4968 bool *can_refer)
85942f45
JH
4969{
4970 unsigned HOST_WIDE_INT offset;
4971 tree v;
4972
4973 v = BINFO_VTABLE (known_binfo);
4974 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
4975 if (!v)
4976 return NULL_TREE;
4977
4978 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
4979 {
4980 if (can_refer)
4981 *can_refer = false;
4982 return NULL_TREE;
4983 }
4984 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
4985}
4986
06bc3ec7
BS
4987/* Return true iff VAL is a gimple expression that is known to be
4988 non-negative. Restricted to floating-point inputs. */
4989
4990bool
4991gimple_val_nonnegative_real_p (tree val)
4992{
4993 gimple def_stmt;
4994
4995 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
4996
4997 /* Use existing logic for non-gimple trees. */
4998 if (tree_expr_nonnegative_p (val))
4999 return true;
5000
5001 if (TREE_CODE (val) != SSA_NAME)
5002 return false;
5003
5004 /* Currently we look only at the immediately defining statement
5005 to make this determination, since recursion on defining
5006 statements of operands can lead to quadratic behavior in the
5007 worst case. This is expected to catch almost all occurrences
5008 in practice. It would be possible to implement limited-depth
5009 recursion if important cases are lost. Alternatively, passes
5010 that need this information (such as the pow/powi lowering code
5011 in the cse_sincos pass) could be revised to provide it through
5012 dataflow propagation. */
5013
5014 def_stmt = SSA_NAME_DEF_STMT (val);
5015
5016 if (is_gimple_assign (def_stmt))
5017 {
5018 tree op0, op1;
5019
5020 /* See fold-const.c:tree_expr_nonnegative_p for additional
5021 cases that could be handled with recursion. */
5022
5023 switch (gimple_assign_rhs_code (def_stmt))
5024 {
5025 case ABS_EXPR:
5026 /* Always true for floating-point operands. */
5027 return true;
5028
5029 case MULT_EXPR:
5030 /* True if the two operands are identical (since we are
5031 restricted to floating-point inputs). */
5032 op0 = gimple_assign_rhs1 (def_stmt);
5033 op1 = gimple_assign_rhs2 (def_stmt);
5034
5035 if (op0 == op1
5036 || operand_equal_p (op0, op1, 0))
5037 return true;
5038
5039 default:
5040 return false;
5041 }
5042 }
5043 else if (is_gimple_call (def_stmt))
5044 {
5045 tree fndecl = gimple_call_fndecl (def_stmt);
5046 if (fndecl
5047 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5048 {
5049 tree arg1;
5050
5051 switch (DECL_FUNCTION_CODE (fndecl))
5052 {
5053 CASE_FLT_FN (BUILT_IN_ACOS):
5054 CASE_FLT_FN (BUILT_IN_ACOSH):
5055 CASE_FLT_FN (BUILT_IN_CABS):
5056 CASE_FLT_FN (BUILT_IN_COSH):
5057 CASE_FLT_FN (BUILT_IN_ERFC):
5058 CASE_FLT_FN (BUILT_IN_EXP):
5059 CASE_FLT_FN (BUILT_IN_EXP10):
5060 CASE_FLT_FN (BUILT_IN_EXP2):
5061 CASE_FLT_FN (BUILT_IN_FABS):
5062 CASE_FLT_FN (BUILT_IN_FDIM):
5063 CASE_FLT_FN (BUILT_IN_HYPOT):
5064 CASE_FLT_FN (BUILT_IN_POW10):
5065 return true;
5066
5067 CASE_FLT_FN (BUILT_IN_SQRT):
5068 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5069 nonnegative inputs. */
5070 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5071 return true;
5072
5073 break;
5074
5075 CASE_FLT_FN (BUILT_IN_POWI):
5076 /* True if the second argument is an even integer. */
5077 arg1 = gimple_call_arg (def_stmt, 1);
5078
5079 if (TREE_CODE (arg1) == INTEGER_CST
5080 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5081 return true;
5082
5083 break;
5084
5085 CASE_FLT_FN (BUILT_IN_POW):
5086 /* True if the second argument is an even integer-valued
5087 real. */
5088 arg1 = gimple_call_arg (def_stmt, 1);
5089
5090 if (TREE_CODE (arg1) == REAL_CST)
5091 {
5092 REAL_VALUE_TYPE c;
5093 HOST_WIDE_INT n;
5094
5095 c = TREE_REAL_CST (arg1);
5096 n = real_to_integer (&c);
5097
5098 if ((n & 1) == 0)
5099 {
5100 REAL_VALUE_TYPE cint;
807e902e 5101 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5102 if (real_identical (&c, &cint))
5103 return true;
5104 }
5105 }
5106
5107 break;
5108
5109 default:
5110 return false;
5111 }
5112 }
5113 }
5114
5115 return false;
5116}
b184c8f1
AM
5117
5118/* Given a pointer value OP0, return a simplified version of an
5119 indirection through OP0, or NULL_TREE if no simplification is
5120 possible. Note that the resulting type may be different from
5121 the type pointed to in the sense that it is still compatible
5122 from the langhooks point of view. */
5123
5124tree
5125gimple_fold_indirect_ref (tree t)
5126{
5127 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5128 tree sub = t;
5129 tree subtype;
5130
5131 STRIP_NOPS (sub);
5132 subtype = TREE_TYPE (sub);
5133 if (!POINTER_TYPE_P (subtype))
5134 return NULL_TREE;
5135
5136 if (TREE_CODE (sub) == ADDR_EXPR)
5137 {
5138 tree op = TREE_OPERAND (sub, 0);
5139 tree optype = TREE_TYPE (op);
5140 /* *&p => p */
5141 if (useless_type_conversion_p (type, optype))
5142 return op;
5143
5144 /* *(foo *)&fooarray => fooarray[0] */
5145 if (TREE_CODE (optype) == ARRAY_TYPE
5146 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5147 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5148 {
5149 tree type_domain = TYPE_DOMAIN (optype);
5150 tree min_val = size_zero_node;
5151 if (type_domain && TYPE_MIN_VALUE (type_domain))
5152 min_val = TYPE_MIN_VALUE (type_domain);
5153 if (TREE_CODE (min_val) == INTEGER_CST)
5154 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5155 }
5156 /* *(foo *)&complexfoo => __real__ complexfoo */
5157 else if (TREE_CODE (optype) == COMPLEX_TYPE
5158 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5159 return fold_build1 (REALPART_EXPR, type, op);
5160 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5161 else if (TREE_CODE (optype) == VECTOR_TYPE
5162 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5163 {
5164 tree part_width = TYPE_SIZE (type);
5165 tree index = bitsize_int (0);
5166 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5167 }
5168 }
5169
5170 /* *(p + CST) -> ... */
5171 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5172 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5173 {
5174 tree addr = TREE_OPERAND (sub, 0);
5175 tree off = TREE_OPERAND (sub, 1);
5176 tree addrtype;
5177
5178 STRIP_NOPS (addr);
5179 addrtype = TREE_TYPE (addr);
5180
5181 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5182 if (TREE_CODE (addr) == ADDR_EXPR
5183 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5184 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5185 && tree_fits_uhwi_p (off))
b184c8f1 5186 {
ae7e9ddd 5187 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5188 tree part_width = TYPE_SIZE (type);
5189 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5190 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5191 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5192 tree index = bitsize_int (indexi);
5193 if (offset / part_widthi
e934916c 5194 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5195 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5196 part_width, index);
5197 }
5198
5199 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5200 if (TREE_CODE (addr) == ADDR_EXPR
5201 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5202 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5203 {
5204 tree size = TYPE_SIZE_UNIT (type);
5205 if (tree_int_cst_equal (size, off))
5206 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5207 }
5208
5209 /* *(p + CST) -> MEM_REF <p, CST>. */
5210 if (TREE_CODE (addr) != ADDR_EXPR
5211 || DECL_P (TREE_OPERAND (addr, 0)))
5212 return fold_build2 (MEM_REF, type,
5213 addr,
807e902e 5214 wide_int_to_tree (ptype, off));
b184c8f1
AM
5215 }
5216
5217 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5218 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5219 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5220 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5221 {
5222 tree type_domain;
5223 tree min_val = size_zero_node;
5224 tree osub = sub;
5225 sub = gimple_fold_indirect_ref (sub);
5226 if (! sub)
5227 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5228 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5229 if (type_domain && TYPE_MIN_VALUE (type_domain))
5230 min_val = TYPE_MIN_VALUE (type_domain);
5231 if (TREE_CODE (min_val) == INTEGER_CST)
5232 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5233 }
5234
5235 return NULL_TREE;
5236}
19e51b40
JJ
5237
5238/* Return true if CODE is an operation that when operating on signed
5239 integer types involves undefined behavior on overflow and the
5240 operation can be expressed with unsigned arithmetic. */
5241
5242bool
5243arith_code_with_undefined_signed_overflow (tree_code code)
5244{
5245 switch (code)
5246 {
5247 case PLUS_EXPR:
5248 case MINUS_EXPR:
5249 case MULT_EXPR:
5250 case NEGATE_EXPR:
5251 case POINTER_PLUS_EXPR:
5252 return true;
5253 default:
5254 return false;
5255 }
5256}
5257
5258/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5259 operation that can be transformed to unsigned arithmetic by converting
5260 its operand, carrying out the operation in the corresponding unsigned
5261 type and converting the result back to the original type.
5262
5263 Returns a sequence of statements that replace STMT and also contain
5264 a modified form of STMT itself. */
5265
5266gimple_seq
5267rewrite_to_defined_overflow (gimple stmt)
5268{
5269 if (dump_file && (dump_flags & TDF_DETAILS))
5270 {
5271 fprintf (dump_file, "rewriting stmt with undefined signed "
5272 "overflow ");
5273 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5274 }
5275
5276 tree lhs = gimple_assign_lhs (stmt);
5277 tree type = unsigned_type_for (TREE_TYPE (lhs));
5278 gimple_seq stmts = NULL;
5279 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5280 {
5281 gimple_seq stmts2 = NULL;
5282 gimple_set_op (stmt, i,
5283 force_gimple_operand (fold_convert (type,
5284 gimple_op (stmt, i)),
5285 &stmts2, true, NULL_TREE));
5286 gimple_seq_add_seq (&stmts, stmts2);
5287 }
5288 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5289 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5290 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5291 gimple_seq_add_stmt (&stmts, stmt);
5292 gimple cvt = gimple_build_assign_with_ops
5293 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5294 gimple_seq_add_stmt (&stmts, cvt);
5295
5296 return stmts;
5297}
d4f5cd5e
RB
5298
5299/* Return OP converted to TYPE by emitting a conversion statement on SEQ
5300 if required using location LOC. Note that OP will be returned
5301 unmodified if GIMPLE does not require an explicit conversion between
5302 its type and TYPE. */
5303
5304tree
5305gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5306{
5307 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5308 return op;
5309 op = fold_convert_loc (loc, type, op);
5310 gimple_seq stmts = NULL;
5311 op = force_gimple_operand (op, &stmts, true, NULL_TREE);
5312 gimple_seq_add_seq_without_update (seq, stmts);
5313 return op;
5314}