]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
[AArch64] Remove/merge redundant iterators
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
23a5b65a 2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370
DN
26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
cbdd87d4 30#include "flags.h"
83685514
AM
31#include "hashtab.h"
32#include "hash-set.h"
33#include "vec.h"
34#include "machmode.h"
35#include "hard-reg-set.h"
36#include "input.h"
cbdd87d4 37#include "function.h"
7ee2468b 38#include "dumpfile.h"
442b4905 39#include "bitmap.h"
60393bbc
AM
40#include "predict.h"
41#include "dominance.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-ssa-alias.h"
44#include "internal-fn.h"
45#include "gimple-fold.h"
46#include "gimple-expr.h"
47#include "is-a.h"
18f429e2 48#include "gimple.h"
45b0be94 49#include "gimplify.h"
5be5c238 50#include "gimple-iterator.h"
442b4905
AM
51#include "gimple-ssa.h"
52#include "tree-ssanames.h"
53#include "tree-into-ssa.h"
54#include "tree-dfa.h"
7a300452 55#include "tree-ssa.h"
cbdd87d4 56#include "tree-ssa-propagate.h"
cbdd87d4 57#include "target.h"
c582198b
AM
58#include "hash-map.h"
59#include "plugin-api.h"
60#include "ipa-ref.h"
61#include "cgraph.h"
450ad0cd
JH
62#include "ipa-utils.h"
63#include "gimple-pretty-print.h"
4484a35a 64#include "tree-ssa-address.h"
862d0b35 65#include "langhooks.h"
19e51b40 66#include "gimplify-me.h"
2b5f0895 67#include "dbgcnt.h"
9b2b7279 68#include "builtins.h"
fef5a0d9 69#include "output.h"
e0ee10ed
RB
70#include "tree-eh.h"
71#include "gimple-match.h"
45cc9f96
RB
72#include "tree-phinodes.h"
73#include "ssa-iterators.h"
cbdd87d4 74
b3b9f3d0 75/* Return true when DECL can be referenced from current unit.
c44c2088
JH
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
1389294c 79
1389294c
JH
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
3e89949e 92 we devirtualize only during final compilation stage.
b3b9f3d0
JH
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
96
1389294c 97static bool
c44c2088 98can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 99{
2c8326a5 100 varpool_node *vnode;
1389294c 101 struct cgraph_node *node;
5e20cdc9 102 symtab_node *snode;
c44c2088 103
00de328a 104 if (DECL_ABSTRACT_P (decl))
1632a686
JH
105 return false;
106
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
111
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 {
3aaf0529
JH
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
3dafb85c 117 if (symtab->function_flags_ready)
3aaf0529 118 return true;
d52f5295 119 snode = symtab_node::get (decl);
3aaf0529 120 if (!snode || !snode->definition)
1632a686 121 return false;
7de90a6c 122 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
123 return !node || !node->global.inlined_to;
124 }
125
6da8be89 126 /* We will later output the initializer, so we can refer to it.
c44c2088 127 So we are concerned only when DECL comes from initializer of
3aaf0529 128 external var or var that has been optimized out. */
c44c2088
JH
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 131 || (!DECL_EXTERNAL (from_decl)
9041d2e6 132 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 133 && vnode->definition)
6da8be89 134 || (flag_ltrans
9041d2e6 135 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 136 && vnode->in_other_partition))
c44c2088 137 return true;
c44c2088
JH
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
a33a931b 143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 145 return false;
b3b9f3d0
JH
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
3aaf0529
JH
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
154
155 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 156 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
3dafb85c 162 if (!symtab->function_flags_ready)
b3b9f3d0 163 return true;
c44c2088 164
d52f5295 165 snode = symtab_node::get (decl);
3aaf0529
JH
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
1389294c
JH
173}
174
0038d4e0 175/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
178
179tree
c44c2088 180canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 181{
50619002
EB
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
315f5f1b
RG
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 186 {
315f5f1b
RG
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
17f39a39
JH
195 }
196 if (TREE_CODE (cval) == ADDR_EXPR)
197 {
5a27a197
RG
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
200 {
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
204 }
5a27a197
RG
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
207 if (!base)
208 return NULL_TREE;
b3b9f3d0 209
7501ca28
RG
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 212 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 213 return NULL_TREE;
7501ca28 214 if (TREE_CODE (base) == VAR_DECL)
46eb666a 215 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
216 else if (TREE_CODE (base) == FUNCTION_DECL)
217 {
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
d52f5295 221 cgraph_node::get_create (base);
7501ca28 222 }
0038d4e0 223 /* Fixup types in global initializers. */
73aef89e
RG
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
226
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
17f39a39 230 }
846abd0d
RB
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
50619002 233 return orig_cval;
17f39a39 234}
cbdd87d4
RG
235
236/* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
238
239tree
240get_symbol_constant_value (tree sym)
241{
6a6dac52
JH
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
cbdd87d4 244 {
cbdd87d4
RG
245 if (val)
246 {
9d60be38 247 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 248 if (val && is_gimple_min_invariant (val))
17f39a39 249 return val;
1389294c
JH
250 else
251 return NULL_TREE;
cbdd87d4
RG
252 }
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
b8a8c472 257 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 258 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
259 }
260
261 return NULL_TREE;
262}
263
264
cbdd87d4
RG
265
266/* Subroutine of fold_stmt. We perform several simplifications of the
267 memory reference tree EXPR and make sure to re-gimplify them properly
268 after propagation of constant addresses. IS_LHS is true if the
269 reference is supposed to be an lvalue. */
270
271static tree
272maybe_fold_reference (tree expr, bool is_lhs)
273{
17f39a39 274 tree result;
cbdd87d4 275
f0eddb90
RG
276 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
277 || TREE_CODE (expr) == REALPART_EXPR
278 || TREE_CODE (expr) == IMAGPART_EXPR)
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_unary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0));
284 else if (TREE_CODE (expr) == BIT_FIELD_REF
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
286 return fold_ternary_loc (EXPR_LOCATION (expr),
287 TREE_CODE (expr),
288 TREE_TYPE (expr),
289 TREE_OPERAND (expr, 0),
290 TREE_OPERAND (expr, 1),
291 TREE_OPERAND (expr, 2));
292
f0eddb90
RG
293 if (!is_lhs
294 && (result = fold_const_aggregate_ref (expr))
295 && is_gimple_min_invariant (result))
296 return result;
cbdd87d4 297
cbdd87d4
RG
298 return NULL_TREE;
299}
300
301
302/* Attempt to fold an assignment statement pointed-to by SI. Returns a
303 replacement rhs for the statement or NULL_TREE if no simplification
304 could be made. It is assumed that the operands have been previously
305 folded. */
306
307static tree
308fold_gimple_assign (gimple_stmt_iterator *si)
309{
310 gimple stmt = gsi_stmt (*si);
311 enum tree_code subcode = gimple_assign_rhs_code (stmt);
312 location_t loc = gimple_location (stmt);
313
314 tree result = NULL_TREE;
315
316 switch (get_gimple_rhs_class (subcode))
317 {
318 case GIMPLE_SINGLE_RHS:
319 {
320 tree rhs = gimple_assign_rhs1 (stmt);
321
8c00ba08
JW
322 if (TREE_CLOBBER_P (rhs))
323 return NULL_TREE;
324
4e71066d 325 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
326 return maybe_fold_reference (rhs, false);
327
bdf37f7a
JH
328 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
329 {
330 tree val = OBJ_TYPE_REF_EXPR (rhs);
331 if (is_gimple_min_invariant (val))
332 return val;
f8a39967 333 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
334 {
335 bool final;
336 vec <cgraph_node *>targets
f8a39967 337 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 338 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 339 {
2b5f0895
XDL
340 if (dump_enabled_p ())
341 {
807b7d62 342 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
344 "resolving virtual function address "
345 "reference to function %s\n",
346 targets.length () == 1
347 ? targets[0]->name ()
3ef276e4 348 : "NULL");
2b5f0895 349 }
3ef276e4
RB
350 if (targets.length () == 1)
351 {
352 val = fold_convert (TREE_TYPE (val),
353 build_fold_addr_expr_loc
354 (loc, targets[0]->decl));
355 STRIP_USELESS_TYPE_CONVERSION (val);
356 }
357 else
358 /* We can not use __builtin_unreachable here because it
359 can not have address taken. */
360 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
361 return val;
362 }
363 }
364
365 }
cbdd87d4
RG
366 else if (TREE_CODE (rhs) == ADDR_EXPR)
367 {
70f34814
RG
368 tree ref = TREE_OPERAND (rhs, 0);
369 tree tem = maybe_fold_reference (ref, true);
370 if (tem
371 && TREE_CODE (tem) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem, 1)))
373 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
374 else if (tem)
cbdd87d4
RG
375 result = fold_convert (TREE_TYPE (rhs),
376 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
377 else if (TREE_CODE (ref) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
380 }
381
382 else if (TREE_CODE (rhs) == CONSTRUCTOR
383 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
384 && (CONSTRUCTOR_NELTS (rhs)
385 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
386 {
387 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
388 unsigned i;
389 tree val;
390
391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
392 if (TREE_CODE (val) != INTEGER_CST
393 && TREE_CODE (val) != REAL_CST
394 && TREE_CODE (val) != FIXED_CST)
395 return NULL_TREE;
396
397 return build_vector_from_ctor (TREE_TYPE (rhs),
398 CONSTRUCTOR_ELTS (rhs));
399 }
400
401 else if (DECL_P (rhs))
9d60be38 402 return get_symbol_constant_value (rhs);
cbdd87d4
RG
403
404 /* If we couldn't fold the RHS, hand over to the generic
405 fold routines. */
406 if (result == NULL_TREE)
407 result = fold (rhs);
408
409 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
410 that may have been added by fold, and "useless" type
411 conversions that might now be apparent due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result);
413
414 if (result != rhs && valid_gimple_rhs_p (result))
415 return result;
416
417 return NULL_TREE;
418 }
419 break;
420
421 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
422 break;
423
424 case GIMPLE_BINARY_RHS:
9b80d091
KT
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431 tree op1 = gimple_assign_rhs1 (stmt);
432 tree op2 = gimple_assign_rhs2 (stmt);
433 tree type = TREE_TYPE (op1);
434
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
438 one or zero. */
439 if (TREE_CODE (op2) == INTEGER_CST
440 && (integer_zerop (op2) || integer_onep (op2))
441 && useless_type_conversion_p (TREE_TYPE (lhs), type))
442 {
443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
444 bool is_logical_not = false;
445
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
449 || (cmp_code == NE_EXPR && integer_onep (op2)))
450 is_logical_not = true;
451
452 if (is_logical_not == false)
453 result = op1;
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type) == 1)
457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
458 type, op1);
459 /* Otherwise we use !X -> X ^ 1. */
460 else
461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
462 type, op1, build_int_cst (type, 1));
463
464 }
465 }
cbdd87d4
RG
466
467 if (!result)
468 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
469 TREE_TYPE (gimple_assign_lhs (stmt)),
470 gimple_assign_rhs1 (stmt),
471 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
472
473 if (result)
474 {
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
cbdd87d4
RG
478 }
479 break;
480
0354c0c7 481 case GIMPLE_TERNARY_RHS:
4e71066d
RG
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
484 {
485 tree op0 = gimple_assign_rhs1 (stmt);
486 tree tem;
487 bool set = false;
488 location_t cond_loc = gimple_location (stmt);
489
490 if (COMPARISON_CLASS_P (op0))
491 {
492 fold_defer_overflow_warnings ();
493 tem = fold_binary_loc (cond_loc,
494 TREE_CODE (op0), TREE_TYPE (op0),
495 TREE_OPERAND (op0, 0),
496 TREE_OPERAND (op0, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set = (tem && is_gimple_condexpr (tem)
501 && valid_gimple_rhs_p (tem));
502 fold_undefer_overflow_warnings (set, stmt, 0);
503 }
504 else if (is_gimple_min_invariant (op0))
505 {
506 tem = op0;
507 set = true;
508 }
509 else
510 return NULL_TREE;
511
512 if (set)
513 result = fold_build3_loc (cond_loc, COND_EXPR,
514 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
515 gimple_assign_rhs2 (stmt),
516 gimple_assign_rhs3 (stmt));
517 }
518
519 if (!result)
520 result = fold_ternary_loc (loc, subcode,
521 TREE_TYPE (gimple_assign_lhs (stmt)),
522 gimple_assign_rhs1 (stmt),
523 gimple_assign_rhs2 (stmt),
524 gimple_assign_rhs3 (stmt));
0354c0c7
BS
525
526 if (result)
527 {
528 STRIP_USELESS_TYPE_CONVERSION (result);
529 if (valid_gimple_rhs_p (result))
530 return result;
0354c0c7
BS
531 }
532 break;
533
cbdd87d4
RG
534 case GIMPLE_INVALID_RHS:
535 gcc_unreachable ();
536 }
537
538 return NULL_TREE;
539}
540
541/* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
545
546static bool
538dd0b7 547fold_gimple_cond (gcond *stmt)
cbdd87d4
RG
548{
549 tree result = fold_binary_loc (gimple_location (stmt),
550 gimple_cond_code (stmt),
551 boolean_type_node,
552 gimple_cond_lhs (stmt),
553 gimple_cond_rhs (stmt));
554
555 if (result)
556 {
557 STRIP_USELESS_TYPE_CONVERSION (result);
558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
559 {
560 gimple_cond_set_condition_from_tree (stmt, result);
561 return true;
562 }
563 }
564
565 return false;
566}
567
fef5a0d9
RB
568
569/* Replace a statement at *SI_P with a sequence of statements in STMTS,
570 adjusting the replacement stmts location and virtual operands.
571 If the statement has a lhs the last stmt in the sequence is expected
572 to assign to that lhs. */
573
574static void
575gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
576{
577 gimple stmt = gsi_stmt (*si_p);
578
579 if (gimple_has_location (stmt))
580 annotate_all_with_location (stmts, gimple_location (stmt));
581
582 /* First iterate over the replacement statements backward, assigning
583 virtual operands to their defining statements. */
584 gimple laststore = NULL;
585 for (gimple_stmt_iterator i = gsi_last (stmts);
586 !gsi_end_p (i); gsi_prev (&i))
587 {
588 gimple new_stmt = gsi_stmt (i);
589 if ((gimple_assign_single_p (new_stmt)
590 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
591 || (is_gimple_call (new_stmt)
592 && (gimple_call_flags (new_stmt)
593 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
594 {
595 tree vdef;
596 if (!laststore)
597 vdef = gimple_vdef (stmt);
598 else
599 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
600 gimple_set_vdef (new_stmt, vdef);
601 if (vdef && TREE_CODE (vdef) == SSA_NAME)
602 SSA_NAME_DEF_STMT (vdef) = new_stmt;
603 laststore = new_stmt;
604 }
605 }
606
607 /* Second iterate over the statements forward, assigning virtual
608 operands to their uses. */
609 tree reaching_vuse = gimple_vuse (stmt);
610 for (gimple_stmt_iterator i = gsi_start (stmts);
611 !gsi_end_p (i); gsi_next (&i))
612 {
613 gimple new_stmt = gsi_stmt (i);
614 /* If the new statement possibly has a VUSE, update it with exact SSA
615 name we know will reach this one. */
616 if (gimple_has_mem_ops (new_stmt))
617 gimple_set_vuse (new_stmt, reaching_vuse);
618 gimple_set_modified (new_stmt, true);
619 if (gimple_vdef (new_stmt))
620 reaching_vuse = gimple_vdef (new_stmt);
621 }
622
623 /* If the new sequence does not do a store release the virtual
624 definition of the original statement. */
625 if (reaching_vuse
626 && reaching_vuse == gimple_vuse (stmt))
627 {
628 tree vdef = gimple_vdef (stmt);
629 if (vdef
630 && TREE_CODE (vdef) == SSA_NAME)
631 {
632 unlink_stmt_vdef (stmt);
633 release_ssa_name (vdef);
634 }
635 }
636
637 /* Finally replace the original statement with the sequence. */
638 gsi_replace_with_seq (si_p, stmts, false);
639}
640
cbdd87d4
RG
641/* Convert EXPR into a GIMPLE value suitable for substitution on the
642 RHS of an assignment. Insert the necessary statements before
643 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
644 is replaced. If the call is expected to produces a result, then it
645 is replaced by an assignment of the new RHS to the result variable.
646 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
647 GIMPLE_NOP. A proper VDEF chain is retained by making the first
648 VUSE and the last VDEF of the whole sequence be the same as the replaced
649 statement and using new SSA names for stores in between. */
cbdd87d4
RG
650
651void
652gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
653{
654 tree lhs;
cbdd87d4
RG
655 gimple stmt, new_stmt;
656 gimple_stmt_iterator i;
355a7673 657 gimple_seq stmts = NULL;
cbdd87d4
RG
658
659 stmt = gsi_stmt (*si_p);
660
661 gcc_assert (is_gimple_call (stmt));
662
45852dcc 663 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 664
e256dfce 665 lhs = gimple_call_lhs (stmt);
cbdd87d4 666 if (lhs == NULL_TREE)
6e572326
RG
667 {
668 gimplify_and_add (expr, &stmts);
669 /* We can end up with folding a memcpy of an empty class assignment
670 which gets optimized away by C++ gimplification. */
671 if (gimple_seq_empty_p (stmts))
672 {
9fdc58de 673 pop_gimplify_context (NULL);
6e572326
RG
674 if (gimple_in_ssa_p (cfun))
675 {
676 unlink_stmt_vdef (stmt);
677 release_defs (stmt);
678 }
77d19c72 679 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
680 return;
681 }
682 }
cbdd87d4 683 else
e256dfce
RG
684 {
685 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
686 new_stmt = gimple_build_assign (lhs, tmp);
687 i = gsi_last (stmts);
688 gsi_insert_after_without_update (&i, new_stmt,
689 GSI_CONTINUE_LINKING);
690 }
cbdd87d4
RG
691
692 pop_gimplify_context (NULL);
693
fef5a0d9
RB
694 gsi_replace_with_seq_vops (si_p, stmts);
695}
cbdd87d4 696
fef5a0d9
RB
697
698/* Replace the call at *GSI with the gimple value VAL. */
699
700static void
701replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
702{
703 gimple stmt = gsi_stmt (*gsi);
704 tree lhs = gimple_call_lhs (stmt);
705 gimple repl;
706 if (lhs)
e256dfce 707 {
fef5a0d9
RB
708 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
709 val = fold_convert (TREE_TYPE (lhs), val);
710 repl = gimple_build_assign (lhs, val);
711 }
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
716 {
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
720 gsi_replace (gsi, repl, true);
721}
722
723/* Replace the call at *GSI with the new call REPL and fold that
724 again. */
725
726static void
727replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
728{
729 gimple stmt = gsi_stmt (*gsi);
730 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
731 gimple_set_location (repl, gimple_location (stmt));
732 if (gimple_vdef (stmt)
733 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 {
735 gimple_set_vdef (repl, gimple_vdef (stmt));
736 gimple_set_vuse (repl, gimple_vuse (stmt));
737 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
738 }
739 gsi_replace (gsi, repl, true);
740 fold_stmt (gsi);
741}
742
743/* Return true if VAR is a VAR_DECL or a component thereof. */
744
745static bool
746var_decl_component_p (tree var)
747{
748 tree inner = var;
749 while (handled_component_p (inner))
750 inner = TREE_OPERAND (inner, 0);
751 return SSA_VAR_P (inner);
752}
753
754/* Fold function call to builtin mem{{,p}cpy,move}. Return
755 NULL_TREE if no simplification can be made.
756 If ENDP is 0, return DEST (like memcpy).
757 If ENDP is 1, return DEST+LEN (like mempcpy).
758 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
759 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
760 (memmove). */
761
762static bool
763gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
764 tree dest, tree src, int endp)
765{
766 gimple stmt = gsi_stmt (*gsi);
767 tree lhs = gimple_call_lhs (stmt);
768 tree len = gimple_call_arg (stmt, 2);
769 tree destvar, srcvar;
770 location_t loc = gimple_location (stmt);
771
772 /* If the LEN parameter is zero, return DEST. */
773 if (integer_zerop (len))
774 {
775 gimple repl;
776 if (gimple_call_lhs (stmt))
777 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
778 else
779 repl = gimple_build_nop ();
780 tree vdef = gimple_vdef (stmt);
781 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 782 {
fef5a0d9
RB
783 unlink_stmt_vdef (stmt);
784 release_ssa_name (vdef);
785 }
786 gsi_replace (gsi, repl, true);
787 return true;
788 }
789
790 /* If SRC and DEST are the same (and not volatile), return
791 DEST{,+LEN,+LEN-1}. */
792 if (operand_equal_p (src, dest, 0))
793 {
794 unlink_stmt_vdef (stmt);
795 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
796 release_ssa_name (gimple_vdef (stmt));
797 if (!lhs)
798 {
799 gsi_replace (gsi, gimple_build_nop (), true);
800 return true;
801 }
802 goto done;
803 }
804 else
805 {
806 tree srctype, desttype;
807 unsigned int src_align, dest_align;
808 tree off0;
809
810 /* Build accesses at offset zero with a ref-all character type. */
811 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
812 ptr_mode, true), 0);
813
814 /* If we can perform the copy efficiently with first doing all loads
815 and then all stores inline it that way. Currently efficiently
816 means that we can load all the memory into a single integer
817 register which is what MOVE_MAX gives us. */
818 src_align = get_pointer_alignment (src);
819 dest_align = get_pointer_alignment (dest);
820 if (tree_fits_uhwi_p (len)
821 && compare_tree_int (len, MOVE_MAX) <= 0
822 /* ??? Don't transform copies from strings with known length this
823 confuses the tree-ssa-strlen.c. This doesn't handle
824 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
825 reason. */
826 && !c_strlen (src, 2))
827 {
828 unsigned ilen = tree_to_uhwi (len);
829 if (exact_log2 (ilen) != -1)
830 {
831 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
832 if (type
833 && TYPE_MODE (type) != BLKmode
834 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
835 == ilen * 8)
836 /* If the destination pointer is not aligned we must be able
837 to emit an unaligned store. */
838 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
839 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
840 {
841 tree srctype = type;
842 tree desttype = type;
843 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
844 srctype = build_aligned_type (type, src_align);
845 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
846 tree tem = fold_const_aggregate_ref (srcmem);
847 if (tem)
848 srcmem = tem;
849 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
850 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
851 src_align))
852 srcmem = NULL_TREE;
853 if (srcmem)
854 {
855 gimple new_stmt;
856 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
857 {
858 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
859 if (gimple_in_ssa_p (cfun))
860 srcmem = make_ssa_name (TREE_TYPE (srcmem),
861 new_stmt);
862 else
b731b390 863 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
864 gimple_assign_set_lhs (new_stmt, srcmem);
865 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
866 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
867 }
868 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
869 desttype = build_aligned_type (type, dest_align);
870 new_stmt
871 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
872 dest, off0),
873 srcmem);
874 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
875 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
876 if (gimple_vdef (new_stmt)
877 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
878 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
879 if (!lhs)
880 {
881 gsi_replace (gsi, new_stmt, true);
882 return true;
883 }
884 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
885 goto done;
886 }
887 }
888 }
889 }
890
891 if (endp == 3)
892 {
893 /* Both DEST and SRC must be pointer types.
894 ??? This is what old code did. Is the testing for pointer types
895 really mandatory?
896
897 If either SRC is readonly or length is 1, we can use memcpy. */
898 if (!dest_align || !src_align)
899 return false;
900 if (readonly_data_expr (src)
901 || (tree_fits_uhwi_p (len)
902 && (MIN (src_align, dest_align) / BITS_PER_UNIT
903 >= tree_to_uhwi (len))))
904 {
905 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
906 if (!fn)
907 return false;
908 gimple_call_set_fndecl (stmt, fn);
909 gimple_call_set_arg (stmt, 0, dest);
910 gimple_call_set_arg (stmt, 1, src);
911 fold_stmt (gsi);
912 return true;
913 }
914
915 /* If *src and *dest can't overlap, optimize into memcpy as well. */
916 if (TREE_CODE (src) == ADDR_EXPR
917 && TREE_CODE (dest) == ADDR_EXPR)
918 {
919 tree src_base, dest_base, fn;
920 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
921 HOST_WIDE_INT size = -1;
922 HOST_WIDE_INT maxsize = -1;
923
924 srcvar = TREE_OPERAND (src, 0);
925 src_base = get_ref_base_and_extent (srcvar, &src_offset,
926 &size, &maxsize);
927 destvar = TREE_OPERAND (dest, 0);
928 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
929 &size, &maxsize);
930 if (tree_fits_uhwi_p (len))
931 maxsize = tree_to_uhwi (len);
932 else
933 maxsize = -1;
934 src_offset /= BITS_PER_UNIT;
935 dest_offset /= BITS_PER_UNIT;
936 if (SSA_VAR_P (src_base)
937 && SSA_VAR_P (dest_base))
938 {
939 if (operand_equal_p (src_base, dest_base, 0)
940 && ranges_overlap_p (src_offset, maxsize,
941 dest_offset, maxsize))
942 return false;
943 }
944 else if (TREE_CODE (src_base) == MEM_REF
945 && TREE_CODE (dest_base) == MEM_REF)
946 {
947 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
948 TREE_OPERAND (dest_base, 0), 0))
949 return false;
950 offset_int off = mem_ref_offset (src_base) + src_offset;
951 if (!wi::fits_shwi_p (off))
952 return false;
953 src_offset = off.to_shwi ();
954
955 off = mem_ref_offset (dest_base) + dest_offset;
956 if (!wi::fits_shwi_p (off))
957 return false;
958 dest_offset = off.to_shwi ();
959 if (ranges_overlap_p (src_offset, maxsize,
960 dest_offset, maxsize))
961 return false;
962 }
963 else
964 return false;
965
966 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
967 if (!fn)
968 return false;
969 gimple_call_set_fndecl (stmt, fn);
970 gimple_call_set_arg (stmt, 0, dest);
971 gimple_call_set_arg (stmt, 1, src);
972 fold_stmt (gsi);
973 return true;
974 }
975
976 /* If the destination and source do not alias optimize into
977 memcpy as well. */
978 if ((is_gimple_min_invariant (dest)
979 || TREE_CODE (dest) == SSA_NAME)
980 && (is_gimple_min_invariant (src)
981 || TREE_CODE (src) == SSA_NAME))
982 {
983 ao_ref destr, srcr;
984 ao_ref_init_from_ptr_and_size (&destr, dest, len);
985 ao_ref_init_from_ptr_and_size (&srcr, src, len);
986 if (!refs_may_alias_p_1 (&destr, &srcr, false))
987 {
988 tree fn;
989 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
990 if (!fn)
991 return false;
992 gimple_call_set_fndecl (stmt, fn);
993 gimple_call_set_arg (stmt, 0, dest);
994 gimple_call_set_arg (stmt, 1, src);
995 fold_stmt (gsi);
996 return true;
997 }
998 }
999
1000 return false;
1001 }
1002
1003 if (!tree_fits_shwi_p (len))
1004 return false;
1005 /* FIXME:
1006 This logic lose for arguments like (type *)malloc (sizeof (type)),
1007 since we strip the casts of up to VOID return value from malloc.
1008 Perhaps we ought to inherit type from non-VOID argument here? */
1009 STRIP_NOPS (src);
1010 STRIP_NOPS (dest);
1011 if (!POINTER_TYPE_P (TREE_TYPE (src))
1012 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1013 return false;
1014 /* In the following try to find a type that is most natural to be
1015 used for the memcpy source and destination and that allows
1016 the most optimization when memcpy is turned into a plain assignment
1017 using that type. In theory we could always use a char[len] type
1018 but that only gains us that the destination and source possibly
1019 no longer will have their address taken. */
1020 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1021 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1022 {
1023 tree tem = TREE_OPERAND (src, 0);
1024 STRIP_NOPS (tem);
1025 if (tem != TREE_OPERAND (src, 0))
1026 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1027 }
1028 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1029 {
1030 tree tem = TREE_OPERAND (dest, 0);
1031 STRIP_NOPS (tem);
1032 if (tem != TREE_OPERAND (dest, 0))
1033 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1034 }
1035 srctype = TREE_TYPE (TREE_TYPE (src));
1036 if (TREE_CODE (srctype) == ARRAY_TYPE
1037 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1038 {
1039 srctype = TREE_TYPE (srctype);
1040 STRIP_NOPS (src);
1041 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1042 }
1043 desttype = TREE_TYPE (TREE_TYPE (dest));
1044 if (TREE_CODE (desttype) == ARRAY_TYPE
1045 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1046 {
1047 desttype = TREE_TYPE (desttype);
1048 STRIP_NOPS (dest);
1049 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1050 }
1051 if (TREE_ADDRESSABLE (srctype)
1052 || TREE_ADDRESSABLE (desttype))
1053 return false;
1054
1055 /* Make sure we are not copying using a floating-point mode or
1056 a type whose size possibly does not match its precision. */
1057 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1058 || TREE_CODE (desttype) == BOOLEAN_TYPE
1059 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1060 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1061 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1062 || TREE_CODE (srctype) == BOOLEAN_TYPE
1063 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1064 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1065 if (!srctype)
1066 srctype = desttype;
1067 if (!desttype)
1068 desttype = srctype;
1069 if (!srctype)
1070 return false;
1071
1072 src_align = get_pointer_alignment (src);
1073 dest_align = get_pointer_alignment (dest);
1074 if (dest_align < TYPE_ALIGN (desttype)
1075 || src_align < TYPE_ALIGN (srctype))
1076 return false;
1077
1078 destvar = dest;
1079 STRIP_NOPS (destvar);
1080 if (TREE_CODE (destvar) == ADDR_EXPR
1081 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1082 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1083 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1084 else
1085 destvar = NULL_TREE;
1086
1087 srcvar = src;
1088 STRIP_NOPS (srcvar);
1089 if (TREE_CODE (srcvar) == ADDR_EXPR
1090 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1091 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1092 {
1093 if (!destvar
1094 || src_align >= TYPE_ALIGN (desttype))
1095 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1096 srcvar, off0);
1097 else if (!STRICT_ALIGNMENT)
1098 {
1099 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1100 src_align);
1101 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1102 }
e256dfce 1103 else
fef5a0d9
RB
1104 srcvar = NULL_TREE;
1105 }
1106 else
1107 srcvar = NULL_TREE;
1108
1109 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1110 return false;
1111
1112 if (srcvar == NULL_TREE)
1113 {
1114 STRIP_NOPS (src);
1115 if (src_align >= TYPE_ALIGN (desttype))
1116 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1117 else
1118 {
1119 if (STRICT_ALIGNMENT)
1120 return false;
1121 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1122 src_align);
1123 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1124 }
1125 }
1126 else if (destvar == NULL_TREE)
1127 {
1128 STRIP_NOPS (dest);
1129 if (dest_align >= TYPE_ALIGN (srctype))
1130 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1131 else
1132 {
1133 if (STRICT_ALIGNMENT)
1134 return false;
1135 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1136 dest_align);
1137 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1138 }
1139 }
1140
1141 gimple new_stmt;
1142 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1143 {
1144 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1145 if (gimple_in_ssa_p (cfun))
1146 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1147 else
b731b390 1148 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1149 gimple_assign_set_lhs (new_stmt, srcvar);
1150 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1151 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1152 }
1153 new_stmt = gimple_build_assign (destvar, srcvar);
1154 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1155 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1156 if (gimple_vdef (new_stmt)
1157 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1158 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1159 if (!lhs)
1160 {
1161 gsi_replace (gsi, new_stmt, true);
1162 return true;
1163 }
1164 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1165 }
1166
1167done:
1168 if (endp == 0 || endp == 3)
1169 len = NULL_TREE;
1170 else if (endp == 2)
1171 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1172 ssize_int (1));
1173 if (endp == 2 || endp == 1)
1174 dest = fold_build_pointer_plus_loc (loc, dest, len);
1175
1176 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1177 GSI_SAME_STMT);
1178 gimple repl = gimple_build_assign (lhs, dest);
1179 gsi_replace (gsi, repl, true);
1180 return true;
1181}
1182
1183/* Fold function call to builtin memset or bzero at *GSI setting the
1184 memory of size LEN to VAL. Return whether a simplification was made. */
1185
1186static bool
1187gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1188{
1189 gimple stmt = gsi_stmt (*gsi);
1190 tree etype;
1191 unsigned HOST_WIDE_INT length, cval;
1192
1193 /* If the LEN parameter is zero, return DEST. */
1194 if (integer_zerop (len))
1195 {
1196 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1197 return true;
1198 }
1199
1200 if (! tree_fits_uhwi_p (len))
1201 return false;
1202
1203 if (TREE_CODE (c) != INTEGER_CST)
1204 return false;
1205
1206 tree dest = gimple_call_arg (stmt, 0);
1207 tree var = dest;
1208 if (TREE_CODE (var) != ADDR_EXPR)
1209 return false;
1210
1211 var = TREE_OPERAND (var, 0);
1212 if (TREE_THIS_VOLATILE (var))
1213 return false;
1214
1215 etype = TREE_TYPE (var);
1216 if (TREE_CODE (etype) == ARRAY_TYPE)
1217 etype = TREE_TYPE (etype);
1218
1219 if (!INTEGRAL_TYPE_P (etype)
1220 && !POINTER_TYPE_P (etype))
1221 return NULL_TREE;
1222
1223 if (! var_decl_component_p (var))
1224 return NULL_TREE;
1225
1226 length = tree_to_uhwi (len);
1227 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1228 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1229 return NULL_TREE;
1230
1231 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1232 return NULL_TREE;
1233
1234 if (integer_zerop (c))
1235 cval = 0;
1236 else
1237 {
1238 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1239 return NULL_TREE;
1240
1241 cval = TREE_INT_CST_LOW (c);
1242 cval &= 0xff;
1243 cval |= cval << 8;
1244 cval |= cval << 16;
1245 cval |= (cval << 31) << 1;
1246 }
1247
1248 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1249 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1250 gimple_set_vuse (store, gimple_vuse (stmt));
1251 tree vdef = gimple_vdef (stmt);
1252 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1253 {
1254 gimple_set_vdef (store, gimple_vdef (stmt));
1255 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1256 }
1257 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1258 if (gimple_call_lhs (stmt))
1259 {
1260 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1261 gsi_replace (gsi, asgn, true);
1262 }
1263 else
1264 {
1265 gimple_stmt_iterator gsi2 = *gsi;
1266 gsi_prev (gsi);
1267 gsi_remove (&gsi2, true);
1268 }
1269
1270 return true;
1271}
1272
1273
1274/* Return the string length, maximum string length or maximum value of
1275 ARG in LENGTH.
1276 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1277 is not NULL and, for TYPE == 0, its value is not equal to the length
1278 we determine or if we are unable to determine the length or value,
1279 return false. VISITED is a bitmap of visited variables.
1280 TYPE is 0 if string length should be returned, 1 for maximum string
1281 length and 2 for maximum value ARG can have. */
1282
1283static bool
dcb7fae2 1284get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1285{
1286 tree var, val;
1287 gimple def_stmt;
1288
1289 if (TREE_CODE (arg) != SSA_NAME)
1290 {
1291 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1292 if (TREE_CODE (arg) == ADDR_EXPR
1293 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1294 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1295 {
1296 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1297 if (TREE_CODE (aop0) == INDIRECT_REF
1298 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1299 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1300 length, visited, type);
1301 }
1302
1303 if (type == 2)
1304 {
1305 val = arg;
1306 if (TREE_CODE (val) != INTEGER_CST
1307 || tree_int_cst_sgn (val) < 0)
1308 return false;
1309 }
1310 else
1311 val = c_strlen (arg, 1);
1312 if (!val)
1313 return false;
1314
1315 if (*length)
1316 {
1317 if (type > 0)
1318 {
1319 if (TREE_CODE (*length) != INTEGER_CST
1320 || TREE_CODE (val) != INTEGER_CST)
1321 return false;
1322
1323 if (tree_int_cst_lt (*length, val))
1324 *length = val;
1325 return true;
1326 }
1327 else if (simple_cst_equal (val, *length) != 1)
1328 return false;
1329 }
1330
1331 *length = val;
1332 return true;
1333 }
1334
1335 /* If ARG is registered for SSA update we cannot look at its defining
1336 statement. */
1337 if (name_registered_for_update_p (arg))
1338 return false;
1339
1340 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1341 if (!*visited)
1342 *visited = BITMAP_ALLOC (NULL);
1343 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1344 return true;
1345
1346 var = arg;
1347 def_stmt = SSA_NAME_DEF_STMT (var);
1348
1349 switch (gimple_code (def_stmt))
1350 {
1351 case GIMPLE_ASSIGN:
1352 /* The RHS of the statement defining VAR must either have a
1353 constant length or come from another SSA_NAME with a constant
1354 length. */
1355 if (gimple_assign_single_p (def_stmt)
1356 || gimple_assign_unary_nop_p (def_stmt))
1357 {
1358 tree rhs = gimple_assign_rhs1 (def_stmt);
1359 return get_maxval_strlen (rhs, length, visited, type);
1360 }
1361 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1362 {
1363 tree op2 = gimple_assign_rhs2 (def_stmt);
1364 tree op3 = gimple_assign_rhs3 (def_stmt);
1365 return get_maxval_strlen (op2, length, visited, type)
1366 && get_maxval_strlen (op3, length, visited, type);
1367 }
1368 return false;
1369
1370 case GIMPLE_PHI:
1371 {
1372 /* All the arguments of the PHI node must have the same constant
1373 length. */
1374 unsigned i;
1375
1376 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1377 {
1378 tree arg = gimple_phi_arg (def_stmt, i)->def;
1379
1380 /* If this PHI has itself as an argument, we cannot
1381 determine the string length of this argument. However,
1382 if we can find a constant string length for the other
1383 PHI args then we can still be sure that this is a
1384 constant string length. So be optimistic and just
1385 continue with the next argument. */
1386 if (arg == gimple_phi_result (def_stmt))
1387 continue;
1388
1389 if (!get_maxval_strlen (arg, length, visited, type))
1390 return false;
1391 }
1392 }
1393 return true;
1394
1395 default:
1396 return false;
1397 }
1398}
1399
dcb7fae2
RB
1400tree
1401get_maxval_strlen (tree arg, int type)
1402{
1403 bitmap visited = NULL;
1404 tree len = NULL_TREE;
1405 if (!get_maxval_strlen (arg, &len, &visited, type))
1406 len = NULL_TREE;
1407 if (visited)
1408 BITMAP_FREE (visited);
1409
1410 return len;
1411}
1412
fef5a0d9
RB
1413
1414/* Fold function call to builtin strcpy with arguments DEST and SRC.
1415 If LEN is not NULL, it represents the length of the string to be
1416 copied. Return NULL_TREE if no simplification can be made. */
1417
1418static bool
1419gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1420 tree dest, tree src)
fef5a0d9 1421{
dcb7fae2 1422 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1423 tree fn;
1424
1425 /* If SRC and DEST are the same (and not volatile), return DEST. */
1426 if (operand_equal_p (src, dest, 0))
1427 {
1428 replace_call_with_value (gsi, dest);
1429 return true;
1430 }
1431
1432 if (optimize_function_for_size_p (cfun))
1433 return false;
1434
1435 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1436 if (!fn)
1437 return false;
1438
1579e1f8 1439 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1440 if (!len)
dcb7fae2 1441 return false;
fef5a0d9
RB
1442
1443 len = fold_convert_loc (loc, size_type_node, len);
1444 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1445 len = force_gimple_operand_gsi (gsi, len, true,
1446 NULL_TREE, true, GSI_SAME_STMT);
1447 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1448 replace_call_with_call_and_fold (gsi, repl);
1449 return true;
1450}
1451
1452/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1453 If SLEN is not NULL, it represents the length of the source string.
1454 Return NULL_TREE if no simplification can be made. */
1455
1456static bool
dcb7fae2
RB
1457gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1458 tree dest, tree src, tree len)
fef5a0d9 1459{
dcb7fae2 1460 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1461 tree fn;
1462
1463 /* If the LEN parameter is zero, return DEST. */
1464 if (integer_zerop (len))
1465 {
1466 replace_call_with_value (gsi, dest);
1467 return true;
1468 }
1469
1470 /* We can't compare slen with len as constants below if len is not a
1471 constant. */
dcb7fae2 1472 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1473 return false;
1474
fef5a0d9 1475 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1476 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1477 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1478 return false;
1479
1480 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1481
1482 /* We do not support simplification of this case, though we do
1483 support it when expanding trees into RTL. */
1484 /* FIXME: generate a call to __builtin_memset. */
1485 if (tree_int_cst_lt (slen, len))
1486 return false;
1487
1488 /* OK transform into builtin memcpy. */
1489 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1490 if (!fn)
1491 return false;
1492
1493 len = fold_convert_loc (loc, size_type_node, len);
1494 len = force_gimple_operand_gsi (gsi, len, true,
1495 NULL_TREE, true, GSI_SAME_STMT);
1496 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1497 replace_call_with_call_and_fold (gsi, repl);
1498 return true;
1499}
1500
1501/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1502 to the call.
1503
1504 Return NULL_TREE if no simplification was possible, otherwise return the
1505 simplified form of the call as a tree.
1506
1507 The simplified form may be a constant or other expression which
1508 computes the same value, but in a more efficient manner (including
1509 calls to other builtin functions).
1510
1511 The call may contain arguments which need to be evaluated, but
1512 which are not useful to determine the result of the call. In
1513 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1514 COMPOUND_EXPR will be an argument which must be evaluated.
1515 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1516 COMPOUND_EXPR in the chain will contain the tree for the simplified
1517 form of the builtin function call. */
1518
1519static bool
dcb7fae2 1520gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9
RB
1521{
1522 gimple stmt = gsi_stmt (*gsi);
dcb7fae2 1523 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1524
1525 const char *p = c_getstr (src);
1526
1527 /* If the string length is zero, return the dst parameter. */
1528 if (p && *p == '\0')
1529 {
1530 replace_call_with_value (gsi, dst);
1531 return true;
1532 }
1533
1534 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1535 return false;
1536
1537 /* See if we can store by pieces into (dst + strlen(dst)). */
1538 tree newdst;
1539 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1540 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1541
1542 if (!strlen_fn || !memcpy_fn)
1543 return false;
1544
1545 /* If the length of the source string isn't computable don't
1546 split strcat into strlen and memcpy. */
dcb7fae2 1547 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1548 if (! len)
fef5a0d9
RB
1549 return false;
1550
1551 /* Create strlen (dst). */
1552 gimple_seq stmts = NULL, stmts2;
1553 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1554 gimple_set_location (repl, loc);
1555 if (gimple_in_ssa_p (cfun))
b731b390 1556 newdst = make_ssa_name (size_type_node);
fef5a0d9 1557 else
b731b390 1558 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1559 gimple_call_set_lhs (repl, newdst);
1560 gimple_seq_add_stmt_without_update (&stmts, repl);
1561
1562 /* Create (dst p+ strlen (dst)). */
1563 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1564 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1565 gimple_seq_add_seq_without_update (&stmts, stmts2);
1566
1567 len = fold_convert_loc (loc, size_type_node, len);
1568 len = size_binop_loc (loc, PLUS_EXPR, len,
1569 build_int_cst (size_type_node, 1));
1570 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1571 gimple_seq_add_seq_without_update (&stmts, stmts2);
1572
1573 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1574 gimple_seq_add_stmt_without_update (&stmts, repl);
1575 if (gimple_call_lhs (stmt))
1576 {
1577 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1579 gsi_replace_with_seq_vops (gsi, stmts);
1580 /* gsi now points at the assignment to the lhs, get a
1581 stmt iterator to the memcpy call.
1582 ??? We can't use gsi_for_stmt as that doesn't work when the
1583 CFG isn't built yet. */
1584 gimple_stmt_iterator gsi2 = *gsi;
1585 gsi_prev (&gsi2);
1586 fold_stmt (&gsi2);
1587 }
1588 else
1589 {
1590 gsi_replace_with_seq_vops (gsi, stmts);
1591 fold_stmt (gsi);
1592 }
1593 return true;
1594}
1595
07f1cf56
RB
1596/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1597 are the arguments to the call. */
1598
1599static bool
1600gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1601{
1602 gimple stmt = gsi_stmt (*gsi);
1603 tree dest = gimple_call_arg (stmt, 0);
1604 tree src = gimple_call_arg (stmt, 1);
1605 tree size = gimple_call_arg (stmt, 2);
1606 tree fn;
1607 const char *p;
1608
1609
1610 p = c_getstr (src);
1611 /* If the SRC parameter is "", return DEST. */
1612 if (p && *p == '\0')
1613 {
1614 replace_call_with_value (gsi, dest);
1615 return true;
1616 }
1617
1618 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1619 return false;
1620
1621 /* If __builtin_strcat_chk is used, assume strcat is available. */
1622 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1623 if (!fn)
1624 return false;
1625
1626 gimple repl = gimple_build_call (fn, 2, dest, src);
1627 replace_call_with_call_and_fold (gsi, repl);
1628 return true;
1629}
1630
745583f9
RB
1631/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1632 LEN, and SIZE. */
1633
1634static bool
1635gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1636{
1637 gimple stmt = gsi_stmt (*gsi);
1638 tree dest = gimple_call_arg (stmt, 0);
1639 tree src = gimple_call_arg (stmt, 1);
1640 tree len = gimple_call_arg (stmt, 2);
1641 tree size = gimple_call_arg (stmt, 3);
1642 tree fn;
1643 const char *p;
1644
1645 p = c_getstr (src);
1646 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1647 if ((p && *p == '\0')
1648 || integer_zerop (len))
1649 {
1650 replace_call_with_value (gsi, dest);
1651 return true;
1652 }
1653
1654 if (! tree_fits_uhwi_p (size))
1655 return false;
1656
1657 if (! integer_all_onesp (size))
1658 {
1659 tree src_len = c_strlen (src, 1);
1660 if (src_len
1661 && tree_fits_uhwi_p (src_len)
1662 && tree_fits_uhwi_p (len)
1663 && ! tree_int_cst_lt (len, src_len))
1664 {
1665 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1666 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1667 if (!fn)
1668 return false;
1669
1670 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1671 replace_call_with_call_and_fold (gsi, repl);
1672 return true;
1673 }
1674 return false;
1675 }
1676
1677 /* If __builtin_strncat_chk is used, assume strncat is available. */
1678 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1679 if (!fn)
1680 return false;
1681
1682 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1683 replace_call_with_call_and_fold (gsi, repl);
1684 return true;
1685}
1686
fef5a0d9
RB
1687/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1688 to the call. IGNORE is true if the value returned
1689 by the builtin will be ignored. UNLOCKED is true is true if this
1690 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1691 the known length of the string. Return NULL_TREE if no simplification
1692 was possible. */
1693
1694static bool
1695gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1696 tree arg0, tree arg1,
dcb7fae2 1697 bool unlocked)
fef5a0d9 1698{
dcb7fae2
RB
1699 gimple stmt = gsi_stmt (*gsi);
1700
fef5a0d9
RB
1701 /* If we're using an unlocked function, assume the other unlocked
1702 functions exist explicitly. */
1703 tree const fn_fputc = (unlocked
1704 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1705 : builtin_decl_implicit (BUILT_IN_FPUTC));
1706 tree const fn_fwrite = (unlocked
1707 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1708 : builtin_decl_implicit (BUILT_IN_FWRITE));
1709
1710 /* If the return value is used, don't do the transformation. */
dcb7fae2 1711 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1712 return false;
1713
fef5a0d9
RB
1714 /* Get the length of the string passed to fputs. If the length
1715 can't be determined, punt. */
dcb7fae2 1716 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1717 if (!len
1718 || TREE_CODE (len) != INTEGER_CST)
1719 return false;
1720
1721 switch (compare_tree_int (len, 1))
1722 {
1723 case -1: /* length is 0, delete the call entirely . */
1724 replace_call_with_value (gsi, integer_zero_node);
1725 return true;
1726
1727 case 0: /* length is 1, call fputc. */
1728 {
1729 const char *p = c_getstr (arg0);
1730 if (p != NULL)
1731 {
1732 if (!fn_fputc)
1733 return false;
1734
1735 gimple repl = gimple_build_call (fn_fputc, 2,
1736 build_int_cst
1737 (integer_type_node, p[0]), arg1);
1738 replace_call_with_call_and_fold (gsi, repl);
1739 return true;
1740 }
1741 }
1742 /* FALLTHROUGH */
1743 case 1: /* length is greater than 1, call fwrite. */
1744 {
1745 /* If optimizing for size keep fputs. */
1746 if (optimize_function_for_size_p (cfun))
1747 return false;
1748 /* New argument list transforming fputs(string, stream) to
1749 fwrite(string, 1, len, stream). */
1750 if (!fn_fwrite)
1751 return false;
1752
1753 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1754 size_one_node, len, arg1);
1755 replace_call_with_call_and_fold (gsi, repl);
1756 return true;
1757 }
1758 default:
1759 gcc_unreachable ();
1760 }
1761 return false;
1762}
1763
1764/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1765 DEST, SRC, LEN, and SIZE are the arguments to the call.
1766 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1767 code of the builtin. If MAXLEN is not NULL, it is maximum length
1768 passed as third argument. */
1769
1770static bool
1771gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1772 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1773 enum built_in_function fcode)
1774{
dcb7fae2
RB
1775 gimple stmt = gsi_stmt (*gsi);
1776 location_t loc = gimple_location (stmt);
1777 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1778 tree fn;
1779
1780 /* If SRC and DEST are the same (and not volatile), return DEST
1781 (resp. DEST+LEN for __mempcpy_chk). */
1782 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1783 {
1784 if (fcode != BUILT_IN_MEMPCPY_CHK)
1785 {
1786 replace_call_with_value (gsi, dest);
1787 return true;
1788 }
1789 else
1790 {
1791 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1792 temp = force_gimple_operand_gsi (gsi, temp,
1793 false, NULL_TREE, true,
1794 GSI_SAME_STMT);
1795 replace_call_with_value (gsi, temp);
1796 return true;
1797 }
1798 }
1799
1800 if (! tree_fits_uhwi_p (size))
1801 return false;
1802
dcb7fae2 1803 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1804 if (! integer_all_onesp (size))
1805 {
1806 if (! tree_fits_uhwi_p (len))
1807 {
1808 /* If LEN is not constant, try MAXLEN too.
1809 For MAXLEN only allow optimizing into non-_ocs function
1810 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1811 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1812 {
1813 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1814 {
1815 /* (void) __mempcpy_chk () can be optimized into
1816 (void) __memcpy_chk (). */
1817 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1818 if (!fn)
1819 return false;
1820
1821 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1822 replace_call_with_call_and_fold (gsi, repl);
1823 return true;
1824 }
1825 return false;
1826 }
1827 }
1828 else
1829 maxlen = len;
1830
1831 if (tree_int_cst_lt (size, maxlen))
1832 return false;
1833 }
1834
1835 fn = NULL_TREE;
1836 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1837 mem{cpy,pcpy,move,set} is available. */
1838 switch (fcode)
1839 {
1840 case BUILT_IN_MEMCPY_CHK:
1841 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1842 break;
1843 case BUILT_IN_MEMPCPY_CHK:
1844 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1845 break;
1846 case BUILT_IN_MEMMOVE_CHK:
1847 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1848 break;
1849 case BUILT_IN_MEMSET_CHK:
1850 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1851 break;
1852 default:
1853 break;
1854 }
1855
1856 if (!fn)
1857 return false;
1858
1859 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1860 replace_call_with_call_and_fold (gsi, repl);
1861 return true;
1862}
1863
1864/* Fold a call to the __st[rp]cpy_chk builtin.
1865 DEST, SRC, and SIZE are the arguments to the call.
1866 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1867 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1868 strings passed as second argument. */
1869
1870static bool
1871gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1872 tree dest,
fef5a0d9 1873 tree src, tree size,
fef5a0d9
RB
1874 enum built_in_function fcode)
1875{
dcb7fae2
RB
1876 gimple stmt = gsi_stmt (*gsi);
1877 location_t loc = gimple_location (stmt);
1878 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1879 tree len, fn;
1880
1881 /* If SRC and DEST are the same (and not volatile), return DEST. */
1882 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1883 {
1884 replace_call_with_value (gsi, dest);
1885 return true;
1886 }
1887
1888 if (! tree_fits_uhwi_p (size))
1889 return false;
1890
dcb7fae2 1891 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1892 if (! integer_all_onesp (size))
1893 {
1894 len = c_strlen (src, 1);
1895 if (! len || ! tree_fits_uhwi_p (len))
1896 {
1897 /* If LEN is not constant, try MAXLEN too.
1898 For MAXLEN only allow optimizing into non-_ocs function
1899 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1900 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1901 {
1902 if (fcode == BUILT_IN_STPCPY_CHK)
1903 {
1904 if (! ignore)
1905 return false;
1906
1907 /* If return value of __stpcpy_chk is ignored,
1908 optimize into __strcpy_chk. */
1909 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1910 if (!fn)
1911 return false;
1912
1913 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1914 replace_call_with_call_and_fold (gsi, repl);
1915 return true;
1916 }
1917
1918 if (! len || TREE_SIDE_EFFECTS (len))
1919 return false;
1920
1921 /* If c_strlen returned something, but not a constant,
1922 transform __strcpy_chk into __memcpy_chk. */
1923 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1924 if (!fn)
1925 return false;
1926
1927 len = fold_convert_loc (loc, size_type_node, len);
1928 len = size_binop_loc (loc, PLUS_EXPR, len,
1929 build_int_cst (size_type_node, 1));
1930 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1931 true, GSI_SAME_STMT);
1932 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1933 replace_call_with_call_and_fold (gsi, repl);
1934 return true;
1935 }
e256dfce 1936 }
fef5a0d9
RB
1937 else
1938 maxlen = len;
1939
1940 if (! tree_int_cst_lt (maxlen, size))
1941 return false;
e256dfce
RG
1942 }
1943
fef5a0d9
RB
1944 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1945 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1946 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1947 if (!fn)
1948 return false;
1949
1950 gimple repl = gimple_build_call (fn, 2, dest, src);
1951 replace_call_with_call_and_fold (gsi, repl);
1952 return true;
1953}
1954
1955/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1956 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1957 length passed as third argument. IGNORE is true if return value can be
1958 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1959
1960static bool
1961gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1962 tree dest, tree src,
dcb7fae2 1963 tree len, tree size,
fef5a0d9
RB
1964 enum built_in_function fcode)
1965{
dcb7fae2
RB
1966 gimple stmt = gsi_stmt (*gsi);
1967 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1968 tree fn;
1969
1970 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1971 {
fef5a0d9
RB
1972 /* If return value of __stpncpy_chk is ignored,
1973 optimize into __strncpy_chk. */
1974 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1975 if (fn)
1976 {
1977 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1978 replace_call_with_call_and_fold (gsi, repl);
1979 return true;
1980 }
cbdd87d4
RG
1981 }
1982
fef5a0d9
RB
1983 if (! tree_fits_uhwi_p (size))
1984 return false;
1985
dcb7fae2 1986 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1987 if (! integer_all_onesp (size))
cbdd87d4 1988 {
fef5a0d9 1989 if (! tree_fits_uhwi_p (len))
fe2ef088 1990 {
fef5a0d9
RB
1991 /* If LEN is not constant, try MAXLEN too.
1992 For MAXLEN only allow optimizing into non-_ocs function
1993 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1994 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1995 return false;
8a1561bc 1996 }
fef5a0d9
RB
1997 else
1998 maxlen = len;
1999
2000 if (tree_int_cst_lt (size, maxlen))
2001 return false;
cbdd87d4
RG
2002 }
2003
fef5a0d9
RB
2004 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2005 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2006 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2007 if (!fn)
2008 return false;
2009
2010 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2011 replace_call_with_call_and_fold (gsi, repl);
2012 return true;
cbdd87d4
RG
2013}
2014
fef5a0d9
RB
2015/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2016 NULL_TREE if a normal call should be emitted rather than expanding
2017 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2018 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2019 passed as second argument. */
cbdd87d4
RG
2020
2021static bool
fef5a0d9 2022gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2023 enum built_in_function fcode)
cbdd87d4 2024{
538dd0b7 2025 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2026 tree dest, size, len, fn, fmt, flag;
2027 const char *fmt_str;
cbdd87d4 2028
fef5a0d9
RB
2029 /* Verify the required arguments in the original call. */
2030 if (gimple_call_num_args (stmt) < 5)
2031 return false;
cbdd87d4 2032
fef5a0d9
RB
2033 dest = gimple_call_arg (stmt, 0);
2034 len = gimple_call_arg (stmt, 1);
2035 flag = gimple_call_arg (stmt, 2);
2036 size = gimple_call_arg (stmt, 3);
2037 fmt = gimple_call_arg (stmt, 4);
2038
2039 if (! tree_fits_uhwi_p (size))
2040 return false;
2041
2042 if (! integer_all_onesp (size))
2043 {
dcb7fae2 2044 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2045 if (! tree_fits_uhwi_p (len))
cbdd87d4 2046 {
fef5a0d9
RB
2047 /* If LEN is not constant, try MAXLEN too.
2048 For MAXLEN only allow optimizing into non-_ocs function
2049 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2050 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2051 return false;
2052 }
2053 else
fef5a0d9 2054 maxlen = len;
cbdd87d4 2055
fef5a0d9
RB
2056 if (tree_int_cst_lt (size, maxlen))
2057 return false;
2058 }
cbdd87d4 2059
fef5a0d9
RB
2060 if (!init_target_chars ())
2061 return false;
cbdd87d4 2062
fef5a0d9
RB
2063 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2064 or if format doesn't contain % chars or is "%s". */
2065 if (! integer_zerop (flag))
2066 {
2067 fmt_str = c_getstr (fmt);
2068 if (fmt_str == NULL)
2069 return false;
2070 if (strchr (fmt_str, target_percent) != NULL
2071 && strcmp (fmt_str, target_percent_s))
2072 return false;
cbdd87d4
RG
2073 }
2074
fef5a0d9
RB
2075 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2076 available. */
2077 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2078 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2079 if (!fn)
491e0b9b
RG
2080 return false;
2081
fef5a0d9
RB
2082 /* Replace the called function and the first 5 argument by 3 retaining
2083 trailing varargs. */
2084 gimple_call_set_fndecl (stmt, fn);
2085 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2086 gimple_call_set_arg (stmt, 0, dest);
2087 gimple_call_set_arg (stmt, 1, len);
2088 gimple_call_set_arg (stmt, 2, fmt);
2089 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2090 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2091 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2092 fold_stmt (gsi);
2093 return true;
2094}
cbdd87d4 2095
fef5a0d9
RB
2096/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2097 Return NULL_TREE if a normal call should be emitted rather than
2098 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2099 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2100
fef5a0d9
RB
2101static bool
2102gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2103 enum built_in_function fcode)
2104{
538dd0b7 2105 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2106 tree dest, size, len, fn, fmt, flag;
2107 const char *fmt_str;
2108 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2109
fef5a0d9
RB
2110 /* Verify the required arguments in the original call. */
2111 if (nargs < 4)
2112 return false;
2113 dest = gimple_call_arg (stmt, 0);
2114 flag = gimple_call_arg (stmt, 1);
2115 size = gimple_call_arg (stmt, 2);
2116 fmt = gimple_call_arg (stmt, 3);
2117
2118 if (! tree_fits_uhwi_p (size))
2119 return false;
2120
2121 len = NULL_TREE;
2122
2123 if (!init_target_chars ())
2124 return false;
2125
2126 /* Check whether the format is a literal string constant. */
2127 fmt_str = c_getstr (fmt);
2128 if (fmt_str != NULL)
2129 {
2130 /* If the format doesn't contain % args or %%, we know the size. */
2131 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2132 {
fef5a0d9
RB
2133 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2134 len = build_int_cstu (size_type_node, strlen (fmt_str));
2135 }
2136 /* If the format is "%s" and first ... argument is a string literal,
2137 we know the size too. */
2138 else if (fcode == BUILT_IN_SPRINTF_CHK
2139 && strcmp (fmt_str, target_percent_s) == 0)
2140 {
2141 tree arg;
cbdd87d4 2142
fef5a0d9
RB
2143 if (nargs == 5)
2144 {
2145 arg = gimple_call_arg (stmt, 4);
2146 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2147 {
2148 len = c_strlen (arg, 1);
2149 if (! len || ! tree_fits_uhwi_p (len))
2150 len = NULL_TREE;
2151 }
2152 }
2153 }
2154 }
cbdd87d4 2155
fef5a0d9
RB
2156 if (! integer_all_onesp (size))
2157 {
2158 if (! len || ! tree_int_cst_lt (len, size))
2159 return false;
2160 }
cbdd87d4 2161
fef5a0d9
RB
2162 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2163 or if format doesn't contain % chars or is "%s". */
2164 if (! integer_zerop (flag))
2165 {
2166 if (fmt_str == NULL)
2167 return false;
2168 if (strchr (fmt_str, target_percent) != NULL
2169 && strcmp (fmt_str, target_percent_s))
2170 return false;
2171 }
cbdd87d4 2172
fef5a0d9
RB
2173 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2174 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2175 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2176 if (!fn)
2177 return false;
2178
2179 /* Replace the called function and the first 4 argument by 2 retaining
2180 trailing varargs. */
2181 gimple_call_set_fndecl (stmt, fn);
2182 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2183 gimple_call_set_arg (stmt, 0, dest);
2184 gimple_call_set_arg (stmt, 1, fmt);
2185 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2186 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2187 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2188 fold_stmt (gsi);
2189 return true;
2190}
2191
35770bb2
RB
2192/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2193 ORIG may be null if this is a 2-argument call. We don't attempt to
2194 simplify calls with more than 3 arguments.
2195
2196 Return NULL_TREE if no simplification was possible, otherwise return the
2197 simplified form of the call as a tree. If IGNORED is true, it means that
2198 the caller does not use the returned value of the function. */
2199
2200static bool
dcb7fae2 2201gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2
RB
2202{
2203 gimple stmt = gsi_stmt (*gsi);
2204 tree dest = gimple_call_arg (stmt, 0);
2205 tree fmt = gimple_call_arg (stmt, 1);
2206 tree orig = NULL_TREE;
2207 const char *fmt_str = NULL;
2208
2209 /* Verify the required arguments in the original call. We deal with two
2210 types of sprintf() calls: 'sprintf (str, fmt)' and
2211 'sprintf (dest, "%s", orig)'. */
2212 if (gimple_call_num_args (stmt) > 3)
2213 return false;
2214
2215 if (gimple_call_num_args (stmt) == 3)
2216 orig = gimple_call_arg (stmt, 2);
2217
2218 /* Check whether the format is a literal string constant. */
2219 fmt_str = c_getstr (fmt);
2220 if (fmt_str == NULL)
2221 return false;
2222
2223 if (!init_target_chars ())
2224 return false;
2225
2226 /* If the format doesn't contain % args or %%, use strcpy. */
2227 if (strchr (fmt_str, target_percent) == NULL)
2228 {
2229 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2230
2231 if (!fn)
2232 return false;
2233
2234 /* Don't optimize sprintf (buf, "abc", ptr++). */
2235 if (orig)
2236 return false;
2237
2238 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2239 'format' is known to contain no % formats. */
2240 gimple_seq stmts = NULL;
2241 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2242 gimple_seq_add_stmt_without_update (&stmts, repl);
2243 if (gimple_call_lhs (stmt))
2244 {
2245 repl = gimple_build_assign (gimple_call_lhs (stmt),
2246 build_int_cst (integer_type_node,
2247 strlen (fmt_str)));
2248 gimple_seq_add_stmt_without_update (&stmts, repl);
2249 gsi_replace_with_seq_vops (gsi, stmts);
2250 /* gsi now points at the assignment to the lhs, get a
2251 stmt iterator to the memcpy call.
2252 ??? We can't use gsi_for_stmt as that doesn't work when the
2253 CFG isn't built yet. */
2254 gimple_stmt_iterator gsi2 = *gsi;
2255 gsi_prev (&gsi2);
2256 fold_stmt (&gsi2);
2257 }
2258 else
2259 {
2260 gsi_replace_with_seq_vops (gsi, stmts);
2261 fold_stmt (gsi);
2262 }
2263 return true;
2264 }
2265
2266 /* If the format is "%s", use strcpy if the result isn't used. */
2267 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2268 {
2269 tree fn;
2270 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2271
2272 if (!fn)
2273 return false;
2274
2275 /* Don't crash on sprintf (str1, "%s"). */
2276 if (!orig)
2277 return false;
2278
dcb7fae2
RB
2279 tree orig_len = NULL_TREE;
2280 if (gimple_call_lhs (stmt))
35770bb2 2281 {
dcb7fae2 2282 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2283 if (!orig_len)
35770bb2
RB
2284 return false;
2285 }
2286
2287 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2288 gimple_seq stmts = NULL;
2289 gimple repl = gimple_build_call (fn, 2, dest, orig);
2290 gimple_seq_add_stmt_without_update (&stmts, repl);
2291 if (gimple_call_lhs (stmt))
2292 {
d7e78447
RB
2293 if (!useless_type_conversion_p (integer_type_node,
2294 TREE_TYPE (orig_len)))
2295 orig_len = fold_convert (integer_type_node, orig_len);
2296 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2297 gimple_seq_add_stmt_without_update (&stmts, repl);
2298 gsi_replace_with_seq_vops (gsi, stmts);
2299 /* gsi now points at the assignment to the lhs, get a
2300 stmt iterator to the memcpy call.
2301 ??? We can't use gsi_for_stmt as that doesn't work when the
2302 CFG isn't built yet. */
2303 gimple_stmt_iterator gsi2 = *gsi;
2304 gsi_prev (&gsi2);
2305 fold_stmt (&gsi2);
2306 }
2307 else
2308 {
2309 gsi_replace_with_seq_vops (gsi, stmts);
2310 fold_stmt (gsi);
2311 }
2312 return true;
2313 }
2314 return false;
2315}
2316
d7e78447
RB
2317/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2318 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2319 attempt to simplify calls with more than 4 arguments.
35770bb2 2320
d7e78447
RB
2321 Return NULL_TREE if no simplification was possible, otherwise return the
2322 simplified form of the call as a tree. If IGNORED is true, it means that
2323 the caller does not use the returned value of the function. */
2324
2325static bool
dcb7fae2 2326gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2327{
538dd0b7 2328 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2329 tree dest = gimple_call_arg (stmt, 0);
2330 tree destsize = gimple_call_arg (stmt, 1);
2331 tree fmt = gimple_call_arg (stmt, 2);
2332 tree orig = NULL_TREE;
2333 const char *fmt_str = NULL;
2334
2335 if (gimple_call_num_args (stmt) > 4)
2336 return false;
2337
2338 if (gimple_call_num_args (stmt) == 4)
2339 orig = gimple_call_arg (stmt, 3);
2340
2341 if (!tree_fits_uhwi_p (destsize))
2342 return false;
2343 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2344
2345 /* Check whether the format is a literal string constant. */
2346 fmt_str = c_getstr (fmt);
2347 if (fmt_str == NULL)
2348 return false;
2349
2350 if (!init_target_chars ())
2351 return false;
2352
2353 /* If the format doesn't contain % args or %%, use strcpy. */
2354 if (strchr (fmt_str, target_percent) == NULL)
2355 {
2356 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2357 if (!fn)
2358 return false;
2359
2360 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2361 if (orig)
2362 return false;
2363
2364 /* We could expand this as
2365 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2366 or to
2367 memcpy (str, fmt_with_nul_at_cstm1, cst);
2368 but in the former case that might increase code size
2369 and in the latter case grow .rodata section too much.
2370 So punt for now. */
2371 size_t len = strlen (fmt_str);
2372 if (len >= destlen)
2373 return false;
2374
2375 gimple_seq stmts = NULL;
2376 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2377 gimple_seq_add_stmt_without_update (&stmts, repl);
2378 if (gimple_call_lhs (stmt))
2379 {
2380 repl = gimple_build_assign (gimple_call_lhs (stmt),
2381 build_int_cst (integer_type_node, len));
2382 gimple_seq_add_stmt_without_update (&stmts, repl);
2383 gsi_replace_with_seq_vops (gsi, stmts);
2384 /* gsi now points at the assignment to the lhs, get a
2385 stmt iterator to the memcpy call.
2386 ??? We can't use gsi_for_stmt as that doesn't work when the
2387 CFG isn't built yet. */
2388 gimple_stmt_iterator gsi2 = *gsi;
2389 gsi_prev (&gsi2);
2390 fold_stmt (&gsi2);
2391 }
2392 else
2393 {
2394 gsi_replace_with_seq_vops (gsi, stmts);
2395 fold_stmt (gsi);
2396 }
2397 return true;
2398 }
2399
2400 /* If the format is "%s", use strcpy if the result isn't used. */
2401 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2402 {
2403 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2404 if (!fn)
2405 return false;
2406
2407 /* Don't crash on snprintf (str1, cst, "%s"). */
2408 if (!orig)
2409 return false;
2410
dcb7fae2 2411 tree orig_len = get_maxval_strlen (orig, 0);
d7e78447 2412 if (!orig_len)
dcb7fae2 2413 return false;
d7e78447
RB
2414
2415 /* We could expand this as
2416 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2417 or to
2418 memcpy (str1, str2_with_nul_at_cstm1, cst);
2419 but in the former case that might increase code size
2420 and in the latter case grow .rodata section too much.
2421 So punt for now. */
2422 if (compare_tree_int (orig_len, destlen) >= 0)
2423 return false;
2424
2425 /* Convert snprintf (str1, cst, "%s", str2) into
2426 strcpy (str1, str2) if strlen (str2) < cst. */
2427 gimple_seq stmts = NULL;
2428 gimple repl = gimple_build_call (fn, 2, dest, orig);
2429 gimple_seq_add_stmt_without_update (&stmts, repl);
2430 if (gimple_call_lhs (stmt))
2431 {
2432 if (!useless_type_conversion_p (integer_type_node,
2433 TREE_TYPE (orig_len)))
2434 orig_len = fold_convert (integer_type_node, orig_len);
2435 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2436 gimple_seq_add_stmt_without_update (&stmts, repl);
2437 gsi_replace_with_seq_vops (gsi, stmts);
2438 /* gsi now points at the assignment to the lhs, get a
2439 stmt iterator to the memcpy call.
2440 ??? We can't use gsi_for_stmt as that doesn't work when the
2441 CFG isn't built yet. */
2442 gimple_stmt_iterator gsi2 = *gsi;
2443 gsi_prev (&gsi2);
2444 fold_stmt (&gsi2);
2445 }
2446 else
2447 {
2448 gsi_replace_with_seq_vops (gsi, stmts);
2449 fold_stmt (gsi);
2450 }
2451 return true;
2452 }
2453 return false;
2454}
35770bb2 2455
fef5a0d9
RB
2456
2457/* Fold a call to __builtin_strlen with known length LEN. */
2458
2459static bool
dcb7fae2 2460gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2461{
dcb7fae2
RB
2462 gimple stmt = gsi_stmt (*gsi);
2463 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2464 if (!len)
2465 return false;
2813904b 2466 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2467 replace_call_with_value (gsi, len);
2468 return true;
cbdd87d4
RG
2469}
2470
2471
dcb7fae2
RB
2472/* Fold the non-target builtin at *GSI and return whether any simplification
2473 was made. */
cbdd87d4 2474
fef5a0d9 2475static bool
dcb7fae2 2476gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2477{
538dd0b7 2478 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2479 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2480
dcb7fae2
RB
2481 /* Give up for always_inline inline builtins until they are
2482 inlined. */
2483 if (avoid_folding_inline_builtin (callee))
2484 return false;
cbdd87d4 2485
cbdd87d4
RG
2486 switch (DECL_FUNCTION_CODE (callee))
2487 {
dcb7fae2
RB
2488 case BUILT_IN_BZERO:
2489 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2490 gimple_call_arg (stmt, 1));
2491 case BUILT_IN_MEMSET:
2492 return gimple_fold_builtin_memset (gsi,
2493 gimple_call_arg (stmt, 1),
2494 gimple_call_arg (stmt, 2));
2495 case BUILT_IN_BCOPY:
2496 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2497 gimple_call_arg (stmt, 0), 3);
2498 case BUILT_IN_MEMCPY:
2499 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2500 gimple_call_arg (stmt, 1), 0);
2501 case BUILT_IN_MEMPCPY:
2502 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2503 gimple_call_arg (stmt, 1), 1);
2504 case BUILT_IN_MEMMOVE:
2505 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2506 gimple_call_arg (stmt, 1), 3);
2507 case BUILT_IN_SPRINTF_CHK:
2508 case BUILT_IN_VSPRINTF_CHK:
2509 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2510 case BUILT_IN_STRCAT_CHK:
2511 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2512 case BUILT_IN_STRNCAT_CHK:
2513 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2514 case BUILT_IN_STRLEN:
dcb7fae2 2515 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2516 case BUILT_IN_STRCPY:
dcb7fae2 2517 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2518 gimple_call_arg (stmt, 0),
dcb7fae2 2519 gimple_call_arg (stmt, 1));
cbdd87d4 2520 case BUILT_IN_STRNCPY:
dcb7fae2 2521 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2522 gimple_call_arg (stmt, 0),
2523 gimple_call_arg (stmt, 1),
dcb7fae2 2524 gimple_call_arg (stmt, 2));
9a7eefec 2525 case BUILT_IN_STRCAT:
dcb7fae2
RB
2526 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2527 gimple_call_arg (stmt, 1));
cbdd87d4 2528 case BUILT_IN_FPUTS:
dcb7fae2
RB
2529 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2530 gimple_call_arg (stmt, 1), false);
cbdd87d4 2531 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2532 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2533 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2534 case BUILT_IN_MEMCPY_CHK:
2535 case BUILT_IN_MEMPCPY_CHK:
2536 case BUILT_IN_MEMMOVE_CHK:
2537 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2538 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2539 gimple_call_arg (stmt, 0),
2540 gimple_call_arg (stmt, 1),
2541 gimple_call_arg (stmt, 2),
2542 gimple_call_arg (stmt, 3),
fef5a0d9 2543 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2544 case BUILT_IN_STRCPY_CHK:
2545 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2546 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2547 gimple_call_arg (stmt, 0),
2548 gimple_call_arg (stmt, 1),
2549 gimple_call_arg (stmt, 2),
fef5a0d9 2550 DECL_FUNCTION_CODE (callee));
cbdd87d4 2551 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2552 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2553 return gimple_fold_builtin_stxncpy_chk (gsi,
2554 gimple_call_arg (stmt, 0),
2555 gimple_call_arg (stmt, 1),
2556 gimple_call_arg (stmt, 2),
2557 gimple_call_arg (stmt, 3),
fef5a0d9 2558 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2559 case BUILT_IN_SNPRINTF_CHK:
2560 case BUILT_IN_VSNPRINTF_CHK:
dcb7fae2 2561 return gimple_fold_builtin_snprintf_chk (gsi,
fef5a0d9 2562 DECL_FUNCTION_CODE (callee));
d7e78447 2563 case BUILT_IN_SNPRINTF:
dcb7fae2 2564 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2565 case BUILT_IN_SPRINTF:
dcb7fae2 2566 return gimple_fold_builtin_sprintf (gsi);
fef5a0d9
RB
2567 default:;
2568 }
2569
2570 /* Try the generic builtin folder. */
2571 bool ignore = (gimple_call_lhs (stmt) == NULL);
2572 tree result = fold_call_stmt (stmt, ignore);
2573 if (result)
2574 {
2575 if (ignore)
2576 STRIP_NOPS (result);
2577 else
2578 result = fold_convert (gimple_call_return_type (stmt), result);
2579 if (!update_call_from_tree (gsi, result))
2580 gimplify_and_update_call_from_tree (gsi, result);
2581 return true;
2582 }
2583
2584 return false;
2585}
2586
1304953e
JJ
2587/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2588 doesn't fit into TYPE. The test for overflow should be regardless of
2589 -fwrapv, and even for unsigned types. */
2590
2591bool
2592arith_overflowed_p (enum tree_code code, const_tree type,
2593 const_tree arg0, const_tree arg1)
2594{
2595 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2596 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2597 widest2_int_cst;
2598 widest2_int warg0 = widest2_int_cst (arg0);
2599 widest2_int warg1 = widest2_int_cst (arg1);
2600 widest2_int wres;
2601 switch (code)
2602 {
2603 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2604 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2605 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2606 default: gcc_unreachable ();
2607 }
2608 signop sign = TYPE_SIGN (type);
2609 if (sign == UNSIGNED && wi::neg_p (wres))
2610 return true;
2611 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2612}
2613
cbdd87d4
RG
2614/* Attempt to fold a call statement referenced by the statement iterator GSI.
2615 The statement may be replaced by another statement, e.g., if the call
2616 simplifies to a constant value. Return true if any changes were made.
2617 It is assumed that the operands have been previously folded. */
2618
e021c122 2619static bool
ceeffab0 2620gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 2621{
538dd0b7 2622 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 2623 tree callee;
e021c122
RG
2624 bool changed = false;
2625 unsigned i;
cbdd87d4 2626
e021c122
RG
2627 /* Fold *& in call arguments. */
2628 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2629 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2630 {
2631 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2632 if (tmp)
2633 {
2634 gimple_call_set_arg (stmt, i, tmp);
2635 changed = true;
2636 }
2637 }
3b45a007
RG
2638
2639 /* Check for virtual calls that became direct calls. */
2640 callee = gimple_call_fn (stmt);
25583c4f 2641 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 2642 {
49c471e3
MJ
2643 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2644 {
450ad0cd
JH
2645 if (dump_file && virtual_method_call_p (callee)
2646 && !possible_polymorphic_call_target_p
6f8091fc
JH
2647 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2648 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
2649 {
2650 fprintf (dump_file,
a70e9985 2651 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
2652 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2653 fprintf (dump_file, " to ");
2654 print_generic_expr (dump_file, callee, TDF_SLIM);
2655 fprintf (dump_file, "\n");
2656 }
2657
49c471e3 2658 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
2659 changed = true;
2660 }
a70e9985 2661 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 2662 {
61dd6a2e
JH
2663 bool final;
2664 vec <cgraph_node *>targets
058d0a90 2665 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 2666 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 2667 {
a70e9985 2668 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
2669 if (dump_enabled_p ())
2670 {
807b7d62 2671 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
2672 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2673 "folding virtual function call to %s\n",
2674 targets.length () == 1
2675 ? targets[0]->name ()
2676 : "__builtin_unreachable");
2677 }
61dd6a2e 2678 if (targets.length () == 1)
cf3e5a89
JJ
2679 {
2680 gimple_call_set_fndecl (stmt, targets[0]->decl);
2681 changed = true;
a70e9985
JJ
2682 /* If the call becomes noreturn, remove the lhs. */
2683 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2684 {
2685 if (TREE_CODE (lhs) == SSA_NAME)
2686 {
b731b390 2687 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985
JJ
2688 tree def = get_or_create_ssa_default_def (cfun, var);
2689 gimple new_stmt = gimple_build_assign (lhs, def);
2690 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2691 }
2692 gimple_call_set_lhs (stmt, NULL_TREE);
2693 }
cf3e5a89 2694 }
a70e9985 2695 else
cf3e5a89
JJ
2696 {
2697 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2698 gimple new_stmt = gimple_build_call (fndecl, 0);
2699 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
2700 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2701 {
b731b390 2702 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 2703 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
2704
2705 /* To satisfy condition for
2706 cgraph_update_edges_for_call_stmt_node,
2707 we need to preserve GIMPLE_CALL statement
2708 at position of GSI iterator. */
a70e9985 2709 update_call_from_tree (gsi, def);
eb14a79f 2710 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
2711 }
2712 else
42e52a51
RB
2713 {
2714 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2715 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2716 gsi_replace (gsi, new_stmt, false);
2717 }
cf3e5a89
JJ
2718 return true;
2719 }
e021c122 2720 }
49c471e3 2721 }
e021c122 2722 }
49c471e3 2723
f2d3d07e
RH
2724 /* Check for indirect calls that became direct calls, and then
2725 no longer require a static chain. */
2726 if (gimple_call_chain (stmt))
2727 {
2728 tree fn = gimple_call_fndecl (stmt);
2729 if (fn && !DECL_STATIC_CHAIN (fn))
2730 {
2731 gimple_call_set_chain (stmt, NULL);
2732 changed = true;
2733 }
2734 else
2735 {
2736 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
2737 if (tmp)
2738 {
2739 gimple_call_set_chain (stmt, tmp);
2740 changed = true;
2741 }
2742 }
2743 }
2744
e021c122
RG
2745 if (inplace)
2746 return changed;
2747
2748 /* Check for builtins that CCP can handle using information not
2749 available in the generic fold routines. */
fef5a0d9
RB
2750 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2751 {
2752 if (gimple_fold_builtin (gsi))
2753 changed = true;
2754 }
2755 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 2756 {
ea679d55 2757 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 2758 }
368b454d 2759 else if (gimple_call_internal_p (stmt))
ed9c79e1 2760 {
368b454d
JJ
2761 enum tree_code subcode = ERROR_MARK;
2762 tree result = NULL_TREE;
1304953e
JJ
2763 bool cplx_result = false;
2764 tree overflow = NULL_TREE;
368b454d
JJ
2765 switch (gimple_call_internal_fn (stmt))
2766 {
2767 case IFN_BUILTIN_EXPECT:
2768 result = fold_builtin_expect (gimple_location (stmt),
2769 gimple_call_arg (stmt, 0),
2770 gimple_call_arg (stmt, 1),
2771 gimple_call_arg (stmt, 2));
2772 break;
0e82f089
MP
2773 case IFN_UBSAN_OBJECT_SIZE:
2774 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2775 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2776 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2777 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2778 gimple_call_arg (stmt, 2))))
2779 {
2780 gsi_replace (gsi, gimple_build_nop (), true);
2781 unlink_stmt_vdef (stmt);
2782 release_defs (stmt);
2783 return true;
2784 }
2785 break;
368b454d
JJ
2786 case IFN_UBSAN_CHECK_ADD:
2787 subcode = PLUS_EXPR;
2788 break;
2789 case IFN_UBSAN_CHECK_SUB:
2790 subcode = MINUS_EXPR;
2791 break;
2792 case IFN_UBSAN_CHECK_MUL:
2793 subcode = MULT_EXPR;
2794 break;
1304953e
JJ
2795 case IFN_ADD_OVERFLOW:
2796 subcode = PLUS_EXPR;
2797 cplx_result = true;
2798 break;
2799 case IFN_SUB_OVERFLOW:
2800 subcode = MINUS_EXPR;
2801 cplx_result = true;
2802 break;
2803 case IFN_MUL_OVERFLOW:
2804 subcode = MULT_EXPR;
2805 cplx_result = true;
2806 break;
368b454d
JJ
2807 default:
2808 break;
2809 }
2810 if (subcode != ERROR_MARK)
2811 {
2812 tree arg0 = gimple_call_arg (stmt, 0);
2813 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
2814 tree type = TREE_TYPE (arg0);
2815 if (cplx_result)
2816 {
2817 tree lhs = gimple_call_lhs (stmt);
2818 if (lhs == NULL_TREE)
2819 type = NULL_TREE;
2820 else
2821 type = TREE_TYPE (TREE_TYPE (lhs));
2822 }
2823 if (type == NULL_TREE)
2824 ;
368b454d 2825 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
2826 else if (integer_zerop (arg1))
2827 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
2828 /* x = 0 + y; x = 0 * y; */
2829 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 2830 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
2831 /* x = y - y; */
2832 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 2833 result = integer_zero_node;
368b454d 2834 /* x = y * 1; x = 1 * y; */
1304953e
JJ
2835 else if (subcode == MULT_EXPR && integer_onep (arg1))
2836 result = arg0;
2837 else if (subcode == MULT_EXPR && integer_onep (arg0))
2838 result = arg1;
2839 else if (TREE_CODE (arg0) == INTEGER_CST
2840 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 2841 {
1304953e
JJ
2842 if (cplx_result)
2843 result = int_const_binop (subcode, fold_convert (type, arg0),
2844 fold_convert (type, arg1));
2845 else
2846 result = int_const_binop (subcode, arg0, arg1);
2847 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
2848 {
2849 if (cplx_result)
2850 overflow = build_one_cst (type);
2851 else
2852 result = NULL_TREE;
2853 }
2854 }
2855 if (result)
2856 {
2857 if (result == integer_zero_node)
2858 result = build_zero_cst (type);
2859 else if (cplx_result && TREE_TYPE (result) != type)
2860 {
2861 if (TREE_CODE (result) == INTEGER_CST)
2862 {
2863 if (arith_overflowed_p (PLUS_EXPR, type, result,
2864 integer_zero_node))
2865 overflow = build_one_cst (type);
2866 }
2867 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
2868 && TYPE_UNSIGNED (type))
2869 || (TYPE_PRECISION (type)
2870 < (TYPE_PRECISION (TREE_TYPE (result))
2871 + (TYPE_UNSIGNED (TREE_TYPE (result))
2872 && !TYPE_UNSIGNED (type)))))
2873 result = NULL_TREE;
2874 if (result)
2875 result = fold_convert (type, result);
2876 }
368b454d
JJ
2877 }
2878 }
1304953e 2879
ed9c79e1
JJ
2880 if (result)
2881 {
1304953e
JJ
2882 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
2883 result = drop_tree_overflow (result);
2884 if (cplx_result)
2885 {
2886 if (overflow == NULL_TREE)
2887 overflow = build_zero_cst (TREE_TYPE (result));
2888 tree ctype = build_complex_type (TREE_TYPE (result));
2889 if (TREE_CODE (result) == INTEGER_CST
2890 && TREE_CODE (overflow) == INTEGER_CST)
2891 result = build_complex (ctype, result, overflow);
2892 else
2893 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
2894 ctype, result, overflow);
2895 }
ed9c79e1
JJ
2896 if (!update_call_from_tree (gsi, result))
2897 gimplify_and_update_call_from_tree (gsi, result);
2898 changed = true;
2899 }
2900 }
3b45a007 2901
e021c122 2902 return changed;
cbdd87d4
RG
2903}
2904
e0ee10ed
RB
2905
2906/* Worker for fold_stmt_1 dispatch to pattern based folding with
2907 gimple_simplify.
2908
2909 Replaces *GSI with the simplification result in RCODE and OPS
2910 and the associated statements in *SEQ. Does the replacement
2911 according to INPLACE and returns true if the operation succeeded. */
2912
2913static bool
2914replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2915 code_helper rcode, tree *ops,
2916 gimple_seq *seq, bool inplace)
2917{
2918 gimple stmt = gsi_stmt (*gsi);
2919
2920 /* Play safe and do not allow abnormals to be mentioned in
2921 newly created statements. See also maybe_push_res_to_seq. */
2922 if ((TREE_CODE (ops[0]) == SSA_NAME
2923 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2924 || (ops[1]
2925 && TREE_CODE (ops[1]) == SSA_NAME
2926 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2927 || (ops[2]
2928 && TREE_CODE (ops[2]) == SSA_NAME
2929 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2930 return false;
2931
538dd0b7 2932 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
2933 {
2934 gcc_assert (rcode.is_tree_code ());
2935 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2936 /* GIMPLE_CONDs condition may not throw. */
2937 && (!flag_exceptions
2938 || !cfun->can_throw_non_call_exceptions
2939 || !operation_could_trap_p (rcode,
2940 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2941 false, NULL_TREE)))
538dd0b7 2942 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 2943 else if (rcode == SSA_NAME)
538dd0b7 2944 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
2945 build_zero_cst (TREE_TYPE (ops[0])));
2946 else if (rcode == INTEGER_CST)
2947 {
2948 if (integer_zerop (ops[0]))
538dd0b7 2949 gimple_cond_make_false (cond_stmt);
e0ee10ed 2950 else
538dd0b7 2951 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
2952 }
2953 else if (!inplace)
2954 {
2955 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2956 ops, seq);
2957 if (!res)
2958 return false;
538dd0b7 2959 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
2960 build_zero_cst (TREE_TYPE (res)));
2961 }
2962 else
2963 return false;
2964 if (dump_file && (dump_flags & TDF_DETAILS))
2965 {
2966 fprintf (dump_file, "gimple_simplified to ");
2967 if (!gimple_seq_empty_p (*seq))
2968 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2969 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2970 0, TDF_SLIM);
2971 }
2972 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2973 return true;
2974 }
2975 else if (is_gimple_assign (stmt)
2976 && rcode.is_tree_code ())
2977 {
2978 if (!inplace
f3582e54 2979 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
2980 {
2981 maybe_build_generic_op (rcode,
2982 TREE_TYPE (gimple_assign_lhs (stmt)),
2983 &ops[0], ops[1], ops[2]);
00d66391 2984 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
2985 if (dump_file && (dump_flags & TDF_DETAILS))
2986 {
2987 fprintf (dump_file, "gimple_simplified to ");
2988 if (!gimple_seq_empty_p (*seq))
2989 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2990 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2991 0, TDF_SLIM);
2992 }
2993 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2994 return true;
2995 }
2996 }
2997 else if (!inplace)
2998 {
2999 if (gimple_has_lhs (stmt))
3000 {
3001 tree lhs = gimple_get_lhs (stmt);
3002 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3003 ops, seq, lhs);
3004 if (dump_file && (dump_flags & TDF_DETAILS))
3005 {
3006 fprintf (dump_file, "gimple_simplified to ");
3007 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3008 }
3009 gsi_replace_with_seq_vops (gsi, *seq);
3010 return true;
3011 }
3012 else
3013 gcc_unreachable ();
3014 }
3015
3016 return false;
3017}
3018
040292e7
RB
3019/* Canonicalize MEM_REFs invariant address operand after propagation. */
3020
3021static bool
3022maybe_canonicalize_mem_ref_addr (tree *t)
3023{
3024 bool res = false;
3025
3026 if (TREE_CODE (*t) == ADDR_EXPR)
3027 t = &TREE_OPERAND (*t, 0);
3028
3029 while (handled_component_p (*t))
3030 t = &TREE_OPERAND (*t, 0);
3031
3032 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3033 of invariant addresses into a SSA name MEM_REF address. */
3034 if (TREE_CODE (*t) == MEM_REF
3035 || TREE_CODE (*t) == TARGET_MEM_REF)
3036 {
3037 tree addr = TREE_OPERAND (*t, 0);
3038 if (TREE_CODE (addr) == ADDR_EXPR
3039 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3040 || handled_component_p (TREE_OPERAND (addr, 0))))
3041 {
3042 tree base;
3043 HOST_WIDE_INT coffset;
3044 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3045 &coffset);
3046 if (!base)
3047 gcc_unreachable ();
3048
3049 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3050 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3051 TREE_OPERAND (*t, 1),
3052 size_int (coffset));
3053 res = true;
3054 }
3055 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3056 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3057 }
3058
3059 /* Canonicalize back MEM_REFs to plain reference trees if the object
3060 accessed is a decl that has the same access semantics as the MEM_REF. */
3061 if (TREE_CODE (*t) == MEM_REF
3062 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3063 && integer_zerop (TREE_OPERAND (*t, 1))
3064 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3065 {
3066 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3067 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3068 if (/* Same volatile qualification. */
3069 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3070 /* Same TBAA behavior with -fstrict-aliasing. */
3071 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3072 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3073 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3074 /* Same alignment. */
3075 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3076 /* We have to look out here to not drop a required conversion
3077 from the rhs to the lhs if *t appears on the lhs or vice-versa
3078 if it appears on the rhs. Thus require strict type
3079 compatibility. */
3080 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3081 {
3082 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3083 res = true;
3084 }
3085 }
3086
3087 /* Canonicalize TARGET_MEM_REF in particular with respect to
3088 the indexes becoming constant. */
3089 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3090 {
3091 tree tem = maybe_fold_tmr (*t);
3092 if (tem)
3093 {
3094 *t = tem;
3095 res = true;
3096 }
3097 }
3098
3099 return res;
3100}
3101
cbdd87d4
RG
3102/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3103 distinguishes both cases. */
3104
3105static bool
e0ee10ed 3106fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3107{
3108 bool changed = false;
3109 gimple stmt = gsi_stmt (*gsi);
3110 unsigned i;
3111
040292e7
RB
3112 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3113 after propagation.
3114 ??? This shouldn't be done in generic folding but in the
3115 propagation helpers which also know whether an address was
3116 propagated. */
3117 switch (gimple_code (stmt))
3118 {
3119 case GIMPLE_ASSIGN:
3120 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3121 {
3122 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3123 if ((REFERENCE_CLASS_P (*rhs)
3124 || TREE_CODE (*rhs) == ADDR_EXPR)
3125 && maybe_canonicalize_mem_ref_addr (rhs))
3126 changed = true;
3127 tree *lhs = gimple_assign_lhs_ptr (stmt);
3128 if (REFERENCE_CLASS_P (*lhs)
3129 && maybe_canonicalize_mem_ref_addr (lhs))
3130 changed = true;
3131 }
3132 break;
3133 case GIMPLE_CALL:
3134 {
3135 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3136 {
3137 tree *arg = gimple_call_arg_ptr (stmt, i);
3138 if (REFERENCE_CLASS_P (*arg)
3139 && maybe_canonicalize_mem_ref_addr (arg))
3140 changed = true;
3141 }
3142 tree *lhs = gimple_call_lhs_ptr (stmt);
3143 if (*lhs
3144 && REFERENCE_CLASS_P (*lhs)
3145 && maybe_canonicalize_mem_ref_addr (lhs))
3146 changed = true;
3147 break;
3148 }
3149 case GIMPLE_ASM:
3150 {
538dd0b7
DM
3151 gasm *asm_stmt = as_a <gasm *> (stmt);
3152 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3153 {
538dd0b7 3154 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3155 tree op = TREE_VALUE (link);
3156 if (REFERENCE_CLASS_P (op)
3157 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3158 changed = true;
3159 }
538dd0b7 3160 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3161 {
538dd0b7 3162 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3163 tree op = TREE_VALUE (link);
3164 if ((REFERENCE_CLASS_P (op)
3165 || TREE_CODE (op) == ADDR_EXPR)
3166 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3167 changed = true;
3168 }
3169 }
3170 break;
3171 case GIMPLE_DEBUG:
3172 if (gimple_debug_bind_p (stmt))
3173 {
3174 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3175 if (*val
3176 && (REFERENCE_CLASS_P (*val)
3177 || TREE_CODE (*val) == ADDR_EXPR)
3178 && maybe_canonicalize_mem_ref_addr (val))
3179 changed = true;
3180 }
3181 break;
3182 default:;
3183 }
3184
e0ee10ed
RB
3185 /* Dispatch to pattern-based folding. */
3186 if (!inplace
3187 || is_gimple_assign (stmt)
3188 || gimple_code (stmt) == GIMPLE_COND)
3189 {
3190 gimple_seq seq = NULL;
3191 code_helper rcode;
3192 tree ops[3] = {};
3193 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3194 {
3195 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3196 changed = true;
3197 else
3198 gimple_seq_discard (seq);
3199 }
3200 }
3201
3202 stmt = gsi_stmt (*gsi);
3203
cbdd87d4
RG
3204 /* Fold the main computation performed by the statement. */
3205 switch (gimple_code (stmt))
3206 {
3207 case GIMPLE_ASSIGN:
3208 {
3209 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 3210 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 3211 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
3212 tree new_rhs;
3213 /* First canonicalize operand order. This avoids building new
3214 trees if this is the only thing fold would later do. */
3215 if ((commutative_tree_code (subcode)
3216 || commutative_ternary_tree_code (subcode))
3217 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3218 gimple_assign_rhs2 (stmt), false))
3219 {
3220 tree tem = gimple_assign_rhs1 (stmt);
3221 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3222 gimple_assign_set_rhs2 (stmt, tem);
3223 changed = true;
3224 }
3225 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3226 if (new_rhs
3227 && !useless_type_conversion_p (TREE_TYPE (lhs),
3228 TREE_TYPE (new_rhs)))
3229 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3230 if (new_rhs
3231 && (!inplace
3232 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3233 {
3234 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3235 changed = true;
3236 }
3237 break;
3238 }
3239
3240 case GIMPLE_COND:
538dd0b7 3241 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
cbdd87d4
RG
3242 break;
3243
3244 case GIMPLE_CALL:
ceeffab0 3245 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3246 break;
3247
3248 case GIMPLE_ASM:
3249 /* Fold *& in asm operands. */
38384150 3250 {
538dd0b7 3251 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3252 size_t noutputs;
3253 const char **oconstraints;
3254 const char *constraint;
3255 bool allows_mem, allows_reg;
3256
538dd0b7 3257 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3258 oconstraints = XALLOCAVEC (const char *, noutputs);
3259
538dd0b7 3260 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3261 {
538dd0b7 3262 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3263 tree op = TREE_VALUE (link);
3264 oconstraints[i]
3265 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3266 if (REFERENCE_CLASS_P (op)
3267 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3268 {
3269 TREE_VALUE (link) = op;
3270 changed = true;
3271 }
3272 }
538dd0b7 3273 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3274 {
538dd0b7 3275 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3276 tree op = TREE_VALUE (link);
3277 constraint
3278 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3279 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3280 oconstraints, &allows_mem, &allows_reg);
3281 if (REFERENCE_CLASS_P (op)
3282 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3283 != NULL_TREE)
3284 {
3285 TREE_VALUE (link) = op;
3286 changed = true;
3287 }
3288 }
3289 }
cbdd87d4
RG
3290 break;
3291
bd422c4a
RG
3292 case GIMPLE_DEBUG:
3293 if (gimple_debug_bind_p (stmt))
3294 {
3295 tree val = gimple_debug_bind_get_value (stmt);
3296 if (val
3297 && REFERENCE_CLASS_P (val))
3298 {
3299 tree tem = maybe_fold_reference (val, false);
3300 if (tem)
3301 {
3302 gimple_debug_bind_set_value (stmt, tem);
3303 changed = true;
3304 }
3305 }
3e888a5e
RG
3306 else if (val
3307 && TREE_CODE (val) == ADDR_EXPR)
3308 {
3309 tree ref = TREE_OPERAND (val, 0);
3310 tree tem = maybe_fold_reference (ref, false);
3311 if (tem)
3312 {
3313 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3314 gimple_debug_bind_set_value (stmt, tem);
3315 changed = true;
3316 }
3317 }
bd422c4a
RG
3318 }
3319 break;
3320
cbdd87d4
RG
3321 default:;
3322 }
3323
3324 stmt = gsi_stmt (*gsi);
3325
37376165
RB
3326 /* Fold *& on the lhs. */
3327 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3328 {
3329 tree lhs = gimple_get_lhs (stmt);
3330 if (lhs && REFERENCE_CLASS_P (lhs))
3331 {
3332 tree new_lhs = maybe_fold_reference (lhs, true);
3333 if (new_lhs)
3334 {
3335 gimple_set_lhs (stmt, new_lhs);
3336 changed = true;
3337 }
3338 }
3339 }
3340
3341 return changed;
3342}
3343
e0ee10ed
RB
3344/* Valueziation callback that ends up not following SSA edges. */
3345
3346tree
3347no_follow_ssa_edges (tree)
3348{
3349 return NULL_TREE;
3350}
3351
45cc9f96
RB
3352/* Valueization callback that ends up following single-use SSA edges only. */
3353
3354tree
3355follow_single_use_edges (tree val)
3356{
3357 if (TREE_CODE (val) == SSA_NAME
3358 && !has_single_use (val))
3359 return NULL_TREE;
3360 return val;
3361}
3362
cbdd87d4
RG
3363/* Fold the statement pointed to by GSI. In some cases, this function may
3364 replace the whole statement with a new one. Returns true iff folding
3365 makes any changes.
3366 The statement pointed to by GSI should be in valid gimple form but may
3367 be in unfolded state as resulting from for example constant propagation
3368 which can produce *&x = 0. */
3369
3370bool
3371fold_stmt (gimple_stmt_iterator *gsi)
3372{
e0ee10ed
RB
3373 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3374}
3375
3376bool
3377fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3378{
3379 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3380}
3381
59401b92 3382/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3383 *&x created by constant propagation are handled. The statement cannot
3384 be replaced with a new one. Return true if the statement was
3385 changed, false otherwise.
59401b92 3386 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3387 be in unfolded state as resulting from for example constant propagation
3388 which can produce *&x = 0. */
3389
3390bool
59401b92 3391fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3392{
59401b92 3393 gimple stmt = gsi_stmt (*gsi);
e0ee10ed 3394 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3395 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3396 return changed;
3397}
3398
e89065a1
SL
3399/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3400 if EXPR is null or we don't know how.
3401 If non-null, the result always has boolean type. */
3402
3403static tree
3404canonicalize_bool (tree expr, bool invert)
3405{
3406 if (!expr)
3407 return NULL_TREE;
3408 else if (invert)
3409 {
3410 if (integer_nonzerop (expr))
3411 return boolean_false_node;
3412 else if (integer_zerop (expr))
3413 return boolean_true_node;
3414 else if (TREE_CODE (expr) == SSA_NAME)
3415 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3416 build_int_cst (TREE_TYPE (expr), 0));
3417 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3418 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3419 boolean_type_node,
3420 TREE_OPERAND (expr, 0),
3421 TREE_OPERAND (expr, 1));
3422 else
3423 return NULL_TREE;
3424 }
3425 else
3426 {
3427 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3428 return expr;
3429 if (integer_nonzerop (expr))
3430 return boolean_true_node;
3431 else if (integer_zerop (expr))
3432 return boolean_false_node;
3433 else if (TREE_CODE (expr) == SSA_NAME)
3434 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3435 build_int_cst (TREE_TYPE (expr), 0));
3436 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3437 return fold_build2 (TREE_CODE (expr),
3438 boolean_type_node,
3439 TREE_OPERAND (expr, 0),
3440 TREE_OPERAND (expr, 1));
3441 else
3442 return NULL_TREE;
3443 }
3444}
3445
3446/* Check to see if a boolean expression EXPR is logically equivalent to the
3447 comparison (OP1 CODE OP2). Check for various identities involving
3448 SSA_NAMEs. */
3449
3450static bool
3451same_bool_comparison_p (const_tree expr, enum tree_code code,
3452 const_tree op1, const_tree op2)
3453{
3454 gimple s;
3455
3456 /* The obvious case. */
3457 if (TREE_CODE (expr) == code
3458 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3459 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3460 return true;
3461
3462 /* Check for comparing (name, name != 0) and the case where expr
3463 is an SSA_NAME with a definition matching the comparison. */
3464 if (TREE_CODE (expr) == SSA_NAME
3465 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3466 {
3467 if (operand_equal_p (expr, op1, 0))
3468 return ((code == NE_EXPR && integer_zerop (op2))
3469 || (code == EQ_EXPR && integer_nonzerop (op2)));
3470 s = SSA_NAME_DEF_STMT (expr);
3471 if (is_gimple_assign (s)
3472 && gimple_assign_rhs_code (s) == code
3473 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3474 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3475 return true;
3476 }
3477
3478 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3479 of name is a comparison, recurse. */
3480 if (TREE_CODE (op1) == SSA_NAME
3481 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3482 {
3483 s = SSA_NAME_DEF_STMT (op1);
3484 if (is_gimple_assign (s)
3485 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3486 {
3487 enum tree_code c = gimple_assign_rhs_code (s);
3488 if ((c == NE_EXPR && integer_zerop (op2))
3489 || (c == EQ_EXPR && integer_nonzerop (op2)))
3490 return same_bool_comparison_p (expr, c,
3491 gimple_assign_rhs1 (s),
3492 gimple_assign_rhs2 (s));
3493 if ((c == EQ_EXPR && integer_zerop (op2))
3494 || (c == NE_EXPR && integer_nonzerop (op2)))
3495 return same_bool_comparison_p (expr,
3496 invert_tree_comparison (c, false),
3497 gimple_assign_rhs1 (s),
3498 gimple_assign_rhs2 (s));
3499 }
3500 }
3501 return false;
3502}
3503
3504/* Check to see if two boolean expressions OP1 and OP2 are logically
3505 equivalent. */
3506
3507static bool
3508same_bool_result_p (const_tree op1, const_tree op2)
3509{
3510 /* Simple cases first. */
3511 if (operand_equal_p (op1, op2, 0))
3512 return true;
3513
3514 /* Check the cases where at least one of the operands is a comparison.
3515 These are a bit smarter than operand_equal_p in that they apply some
3516 identifies on SSA_NAMEs. */
3517 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3518 && same_bool_comparison_p (op1, TREE_CODE (op2),
3519 TREE_OPERAND (op2, 0),
3520 TREE_OPERAND (op2, 1)))
3521 return true;
3522 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3523 && same_bool_comparison_p (op2, TREE_CODE (op1),
3524 TREE_OPERAND (op1, 0),
3525 TREE_OPERAND (op1, 1)))
3526 return true;
3527
3528 /* Default case. */
3529 return false;
3530}
3531
3532/* Forward declarations for some mutually recursive functions. */
3533
3534static tree
3535and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3536 enum tree_code code2, tree op2a, tree op2b);
3537static tree
3538and_var_with_comparison (tree var, bool invert,
3539 enum tree_code code2, tree op2a, tree op2b);
3540static tree
3541and_var_with_comparison_1 (gimple stmt,
3542 enum tree_code code2, tree op2a, tree op2b);
3543static tree
3544or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3545 enum tree_code code2, tree op2a, tree op2b);
3546static tree
3547or_var_with_comparison (tree var, bool invert,
3548 enum tree_code code2, tree op2a, tree op2b);
3549static tree
3550or_var_with_comparison_1 (gimple stmt,
3551 enum tree_code code2, tree op2a, tree op2b);
3552
3553/* Helper function for and_comparisons_1: try to simplify the AND of the
3554 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3555 If INVERT is true, invert the value of the VAR before doing the AND.
3556 Return NULL_EXPR if we can't simplify this to a single expression. */
3557
3558static tree
3559and_var_with_comparison (tree var, bool invert,
3560 enum tree_code code2, tree op2a, tree op2b)
3561{
3562 tree t;
3563 gimple stmt = SSA_NAME_DEF_STMT (var);
3564
3565 /* We can only deal with variables whose definitions are assignments. */
3566 if (!is_gimple_assign (stmt))
3567 return NULL_TREE;
3568
3569 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3570 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3571 Then we only have to consider the simpler non-inverted cases. */
3572 if (invert)
3573 t = or_var_with_comparison_1 (stmt,
3574 invert_tree_comparison (code2, false),
3575 op2a, op2b);
3576 else
3577 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3578 return canonicalize_bool (t, invert);
3579}
3580
3581/* Try to simplify the AND of the ssa variable defined by the assignment
3582 STMT with the comparison specified by (OP2A CODE2 OP2B).
3583 Return NULL_EXPR if we can't simplify this to a single expression. */
3584
3585static tree
3586and_var_with_comparison_1 (gimple stmt,
3587 enum tree_code code2, tree op2a, tree op2b)
3588{
3589 tree var = gimple_assign_lhs (stmt);
3590 tree true_test_var = NULL_TREE;
3591 tree false_test_var = NULL_TREE;
3592 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3593
3594 /* Check for identities like (var AND (var == 0)) => false. */
3595 if (TREE_CODE (op2a) == SSA_NAME
3596 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3597 {
3598 if ((code2 == NE_EXPR && integer_zerop (op2b))
3599 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3600 {
3601 true_test_var = op2a;
3602 if (var == true_test_var)
3603 return var;
3604 }
3605 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3606 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3607 {
3608 false_test_var = op2a;
3609 if (var == false_test_var)
3610 return boolean_false_node;
3611 }
3612 }
3613
3614 /* If the definition is a comparison, recurse on it. */
3615 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3616 {
3617 tree t = and_comparisons_1 (innercode,
3618 gimple_assign_rhs1 (stmt),
3619 gimple_assign_rhs2 (stmt),
3620 code2,
3621 op2a,
3622 op2b);
3623 if (t)
3624 return t;
3625 }
3626
3627 /* If the definition is an AND or OR expression, we may be able to
3628 simplify by reassociating. */
eb9820c0
KT
3629 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3630 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
3631 {
3632 tree inner1 = gimple_assign_rhs1 (stmt);
3633 tree inner2 = gimple_assign_rhs2 (stmt);
3634 gimple s;
3635 tree t;
3636 tree partial = NULL_TREE;
eb9820c0 3637 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
3638
3639 /* Check for boolean identities that don't require recursive examination
3640 of inner1/inner2:
3641 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3642 inner1 AND (inner1 OR inner2) => inner1
3643 !inner1 AND (inner1 AND inner2) => false
3644 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3645 Likewise for similar cases involving inner2. */
3646 if (inner1 == true_test_var)
3647 return (is_and ? var : inner1);
3648 else if (inner2 == true_test_var)
3649 return (is_and ? var : inner2);
3650 else if (inner1 == false_test_var)
3651 return (is_and
3652 ? boolean_false_node
3653 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3654 else if (inner2 == false_test_var)
3655 return (is_and
3656 ? boolean_false_node
3657 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3658
3659 /* Next, redistribute/reassociate the AND across the inner tests.
3660 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3661 if (TREE_CODE (inner1) == SSA_NAME
3662 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3663 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3664 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3665 gimple_assign_rhs1 (s),
3666 gimple_assign_rhs2 (s),
3667 code2, op2a, op2b)))
3668 {
3669 /* Handle the AND case, where we are reassociating:
3670 (inner1 AND inner2) AND (op2a code2 op2b)
3671 => (t AND inner2)
3672 If the partial result t is a constant, we win. Otherwise
3673 continue on to try reassociating with the other inner test. */
3674 if (is_and)
3675 {
3676 if (integer_onep (t))
3677 return inner2;
3678 else if (integer_zerop (t))
3679 return boolean_false_node;
3680 }
3681
3682 /* Handle the OR case, where we are redistributing:
3683 (inner1 OR inner2) AND (op2a code2 op2b)
3684 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
3685 else if (integer_onep (t))
3686 return boolean_true_node;
3687
3688 /* Save partial result for later. */
3689 partial = t;
e89065a1
SL
3690 }
3691
3692 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3693 if (TREE_CODE (inner2) == SSA_NAME
3694 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3695 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3696 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3697 gimple_assign_rhs1 (s),
3698 gimple_assign_rhs2 (s),
3699 code2, op2a, op2b)))
3700 {
3701 /* Handle the AND case, where we are reassociating:
3702 (inner1 AND inner2) AND (op2a code2 op2b)
3703 => (inner1 AND t) */
3704 if (is_and)
3705 {
3706 if (integer_onep (t))
3707 return inner1;
3708 else if (integer_zerop (t))
3709 return boolean_false_node;
8236c8eb
JJ
3710 /* If both are the same, we can apply the identity
3711 (x AND x) == x. */
3712 else if (partial && same_bool_result_p (t, partial))
3713 return t;
e89065a1
SL
3714 }
3715
3716 /* Handle the OR case. where we are redistributing:
3717 (inner1 OR inner2) AND (op2a code2 op2b)
3718 => (t OR (inner1 AND (op2a code2 op2b)))
3719 => (t OR partial) */
3720 else
3721 {
3722 if (integer_onep (t))
3723 return boolean_true_node;
3724 else if (partial)
3725 {
3726 /* We already got a simplification for the other
3727 operand to the redistributed OR expression. The
3728 interesting case is when at least one is false.
3729 Or, if both are the same, we can apply the identity
3730 (x OR x) == x. */
3731 if (integer_zerop (partial))
3732 return t;
3733 else if (integer_zerop (t))
3734 return partial;
3735 else if (same_bool_result_p (t, partial))
3736 return t;
3737 }
3738 }
3739 }
3740 }
3741 return NULL_TREE;
3742}
3743
3744/* Try to simplify the AND of two comparisons defined by
3745 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3746 If this can be done without constructing an intermediate value,
3747 return the resulting tree; otherwise NULL_TREE is returned.
3748 This function is deliberately asymmetric as it recurses on SSA_DEFs
3749 in the first comparison but not the second. */
3750
3751static tree
3752and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3753 enum tree_code code2, tree op2a, tree op2b)
3754{
ae22ac3c 3755 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 3756
e89065a1
SL
3757 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3758 if (operand_equal_p (op1a, op2a, 0)
3759 && operand_equal_p (op1b, op2b, 0))
3760 {
eb9820c0 3761 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3762 tree t = combine_comparisons (UNKNOWN_LOCATION,
3763 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 3764 truth_type, op1a, op1b);
e89065a1
SL
3765 if (t)
3766 return t;
3767 }
3768
3769 /* Likewise the swapped case of the above. */
3770 if (operand_equal_p (op1a, op2b, 0)
3771 && operand_equal_p (op1b, op2a, 0))
3772 {
eb9820c0 3773 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3774 tree t = combine_comparisons (UNKNOWN_LOCATION,
3775 TRUTH_ANDIF_EXPR, code1,
3776 swap_tree_comparison (code2),
31ed6226 3777 truth_type, op1a, op1b);
e89065a1
SL
3778 if (t)
3779 return t;
3780 }
3781
3782 /* If both comparisons are of the same value against constants, we might
3783 be able to merge them. */
3784 if (operand_equal_p (op1a, op2a, 0)
3785 && TREE_CODE (op1b) == INTEGER_CST
3786 && TREE_CODE (op2b) == INTEGER_CST)
3787 {
3788 int cmp = tree_int_cst_compare (op1b, op2b);
3789
3790 /* If we have (op1a == op1b), we should either be able to
3791 return that or FALSE, depending on whether the constant op1b
3792 also satisfies the other comparison against op2b. */
3793 if (code1 == EQ_EXPR)
3794 {
3795 bool done = true;
3796 bool val;
3797 switch (code2)
3798 {
3799 case EQ_EXPR: val = (cmp == 0); break;
3800 case NE_EXPR: val = (cmp != 0); break;
3801 case LT_EXPR: val = (cmp < 0); break;
3802 case GT_EXPR: val = (cmp > 0); break;
3803 case LE_EXPR: val = (cmp <= 0); break;
3804 case GE_EXPR: val = (cmp >= 0); break;
3805 default: done = false;
3806 }
3807 if (done)
3808 {
3809 if (val)
3810 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3811 else
3812 return boolean_false_node;
3813 }
3814 }
3815 /* Likewise if the second comparison is an == comparison. */
3816 else if (code2 == EQ_EXPR)
3817 {
3818 bool done = true;
3819 bool val;
3820 switch (code1)
3821 {
3822 case EQ_EXPR: val = (cmp == 0); break;
3823 case NE_EXPR: val = (cmp != 0); break;
3824 case LT_EXPR: val = (cmp > 0); break;
3825 case GT_EXPR: val = (cmp < 0); break;
3826 case LE_EXPR: val = (cmp >= 0); break;
3827 case GE_EXPR: val = (cmp <= 0); break;
3828 default: done = false;
3829 }
3830 if (done)
3831 {
3832 if (val)
3833 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3834 else
3835 return boolean_false_node;
3836 }
3837 }
3838
3839 /* Same business with inequality tests. */
3840 else if (code1 == NE_EXPR)
3841 {
3842 bool val;
3843 switch (code2)
3844 {
3845 case EQ_EXPR: val = (cmp != 0); break;
3846 case NE_EXPR: val = (cmp == 0); break;
3847 case LT_EXPR: val = (cmp >= 0); break;
3848 case GT_EXPR: val = (cmp <= 0); break;
3849 case LE_EXPR: val = (cmp > 0); break;
3850 case GE_EXPR: val = (cmp < 0); break;
3851 default:
3852 val = false;
3853 }
3854 if (val)
3855 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3856 }
3857 else if (code2 == NE_EXPR)
3858 {
3859 bool val;
3860 switch (code1)
3861 {
3862 case EQ_EXPR: val = (cmp == 0); break;
3863 case NE_EXPR: val = (cmp != 0); break;
3864 case LT_EXPR: val = (cmp <= 0); break;
3865 case GT_EXPR: val = (cmp >= 0); break;
3866 case LE_EXPR: val = (cmp < 0); break;
3867 case GE_EXPR: val = (cmp > 0); break;
3868 default:
3869 val = false;
3870 }
3871 if (val)
3872 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3873 }
3874
3875 /* Chose the more restrictive of two < or <= comparisons. */
3876 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3877 && (code2 == LT_EXPR || code2 == LE_EXPR))
3878 {
3879 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3880 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3881 else
3882 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3883 }
3884
3885 /* Likewise chose the more restrictive of two > or >= comparisons. */
3886 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3887 && (code2 == GT_EXPR || code2 == GE_EXPR))
3888 {
3889 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3890 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3891 else
3892 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3893 }
3894
3895 /* Check for singleton ranges. */
3896 else if (cmp == 0
3897 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3898 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3899 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3900
3901 /* Check for disjoint ranges. */
3902 else if (cmp <= 0
3903 && (code1 == LT_EXPR || code1 == LE_EXPR)
3904 && (code2 == GT_EXPR || code2 == GE_EXPR))
3905 return boolean_false_node;
3906 else if (cmp >= 0
3907 && (code1 == GT_EXPR || code1 == GE_EXPR)
3908 && (code2 == LT_EXPR || code2 == LE_EXPR))
3909 return boolean_false_node;
3910 }
3911
3912 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3913 NAME's definition is a truth value. See if there are any simplifications
3914 that can be done against the NAME's definition. */
3915 if (TREE_CODE (op1a) == SSA_NAME
3916 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3917 && (integer_zerop (op1b) || integer_onep (op1b)))
3918 {
3919 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3920 || (code1 == NE_EXPR && integer_onep (op1b)));
3921 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3922 switch (gimple_code (stmt))
3923 {
3924 case GIMPLE_ASSIGN:
3925 /* Try to simplify by copy-propagating the definition. */
3926 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3927
3928 case GIMPLE_PHI:
3929 /* If every argument to the PHI produces the same result when
3930 ANDed with the second comparison, we win.
3931 Do not do this unless the type is bool since we need a bool
3932 result here anyway. */
3933 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3934 {
3935 tree result = NULL_TREE;
3936 unsigned i;
3937 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3938 {
3939 tree arg = gimple_phi_arg_def (stmt, i);
3940
3941 /* If this PHI has itself as an argument, ignore it.
3942 If all the other args produce the same result,
3943 we're still OK. */
3944 if (arg == gimple_phi_result (stmt))
3945 continue;
3946 else if (TREE_CODE (arg) == INTEGER_CST)
3947 {
3948 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3949 {
3950 if (!result)
3951 result = boolean_false_node;
3952 else if (!integer_zerop (result))
3953 return NULL_TREE;
3954 }
3955 else if (!result)
3956 result = fold_build2 (code2, boolean_type_node,
3957 op2a, op2b);
3958 else if (!same_bool_comparison_p (result,
3959 code2, op2a, op2b))
3960 return NULL_TREE;
3961 }
0e8b84ec
JJ
3962 else if (TREE_CODE (arg) == SSA_NAME
3963 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 3964 {
6c66f733
JJ
3965 tree temp;
3966 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3967 /* In simple cases we can look through PHI nodes,
3968 but we have to be careful with loops.
3969 See PR49073. */
3970 if (! dom_info_available_p (CDI_DOMINATORS)
3971 || gimple_bb (def_stmt) == gimple_bb (stmt)
3972 || dominated_by_p (CDI_DOMINATORS,
3973 gimple_bb (def_stmt),
3974 gimple_bb (stmt)))
3975 return NULL_TREE;
3976 temp = and_var_with_comparison (arg, invert, code2,
3977 op2a, op2b);
e89065a1
SL
3978 if (!temp)
3979 return NULL_TREE;
3980 else if (!result)
3981 result = temp;
3982 else if (!same_bool_result_p (result, temp))
3983 return NULL_TREE;
3984 }
3985 else
3986 return NULL_TREE;
3987 }
3988 return result;
3989 }
3990
3991 default:
3992 break;
3993 }
3994 }
3995 return NULL_TREE;
3996}
3997
3998/* Try to simplify the AND of two comparisons, specified by
3999 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4000 If this can be simplified to a single expression (without requiring
4001 introducing more SSA variables to hold intermediate values),
4002 return the resulting tree. Otherwise return NULL_TREE.
4003 If the result expression is non-null, it has boolean type. */
4004
4005tree
4006maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4007 enum tree_code code2, tree op2a, tree op2b)
4008{
4009 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4010 if (t)
4011 return t;
4012 else
4013 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4014}
4015
4016/* Helper function for or_comparisons_1: try to simplify the OR of the
4017 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4018 If INVERT is true, invert the value of VAR before doing the OR.
4019 Return NULL_EXPR if we can't simplify this to a single expression. */
4020
4021static tree
4022or_var_with_comparison (tree var, bool invert,
4023 enum tree_code code2, tree op2a, tree op2b)
4024{
4025 tree t;
4026 gimple stmt = SSA_NAME_DEF_STMT (var);
4027
4028 /* We can only deal with variables whose definitions are assignments. */
4029 if (!is_gimple_assign (stmt))
4030 return NULL_TREE;
4031
4032 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4033 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4034 Then we only have to consider the simpler non-inverted cases. */
4035 if (invert)
4036 t = and_var_with_comparison_1 (stmt,
4037 invert_tree_comparison (code2, false),
4038 op2a, op2b);
4039 else
4040 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4041 return canonicalize_bool (t, invert);
4042}
4043
4044/* Try to simplify the OR of the ssa variable defined by the assignment
4045 STMT with the comparison specified by (OP2A CODE2 OP2B).
4046 Return NULL_EXPR if we can't simplify this to a single expression. */
4047
4048static tree
4049or_var_with_comparison_1 (gimple stmt,
4050 enum tree_code code2, tree op2a, tree op2b)
4051{
4052 tree var = gimple_assign_lhs (stmt);
4053 tree true_test_var = NULL_TREE;
4054 tree false_test_var = NULL_TREE;
4055 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4056
4057 /* Check for identities like (var OR (var != 0)) => true . */
4058 if (TREE_CODE (op2a) == SSA_NAME
4059 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4060 {
4061 if ((code2 == NE_EXPR && integer_zerop (op2b))
4062 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4063 {
4064 true_test_var = op2a;
4065 if (var == true_test_var)
4066 return var;
4067 }
4068 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4069 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4070 {
4071 false_test_var = op2a;
4072 if (var == false_test_var)
4073 return boolean_true_node;
4074 }
4075 }
4076
4077 /* If the definition is a comparison, recurse on it. */
4078 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4079 {
4080 tree t = or_comparisons_1 (innercode,
4081 gimple_assign_rhs1 (stmt),
4082 gimple_assign_rhs2 (stmt),
4083 code2,
4084 op2a,
4085 op2b);
4086 if (t)
4087 return t;
4088 }
4089
4090 /* If the definition is an AND or OR expression, we may be able to
4091 simplify by reassociating. */
eb9820c0
KT
4092 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4093 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4094 {
4095 tree inner1 = gimple_assign_rhs1 (stmt);
4096 tree inner2 = gimple_assign_rhs2 (stmt);
4097 gimple s;
4098 tree t;
4099 tree partial = NULL_TREE;
eb9820c0 4100 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4101
4102 /* Check for boolean identities that don't require recursive examination
4103 of inner1/inner2:
4104 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4105 inner1 OR (inner1 AND inner2) => inner1
4106 !inner1 OR (inner1 OR inner2) => true
4107 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4108 */
4109 if (inner1 == true_test_var)
4110 return (is_or ? var : inner1);
4111 else if (inner2 == true_test_var)
4112 return (is_or ? var : inner2);
4113 else if (inner1 == false_test_var)
4114 return (is_or
4115 ? boolean_true_node
4116 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4117 else if (inner2 == false_test_var)
4118 return (is_or
4119 ? boolean_true_node
4120 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4121
4122 /* Next, redistribute/reassociate the OR across the inner tests.
4123 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4124 if (TREE_CODE (inner1) == SSA_NAME
4125 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4126 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4127 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4128 gimple_assign_rhs1 (s),
4129 gimple_assign_rhs2 (s),
4130 code2, op2a, op2b)))
4131 {
4132 /* Handle the OR case, where we are reassociating:
4133 (inner1 OR inner2) OR (op2a code2 op2b)
4134 => (t OR inner2)
4135 If the partial result t is a constant, we win. Otherwise
4136 continue on to try reassociating with the other inner test. */
8236c8eb 4137 if (is_or)
e89065a1
SL
4138 {
4139 if (integer_onep (t))
4140 return boolean_true_node;
4141 else if (integer_zerop (t))
4142 return inner2;
4143 }
4144
4145 /* Handle the AND case, where we are redistributing:
4146 (inner1 AND inner2) OR (op2a code2 op2b)
4147 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4148 else if (integer_zerop (t))
4149 return boolean_false_node;
4150
4151 /* Save partial result for later. */
4152 partial = t;
e89065a1
SL
4153 }
4154
4155 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4156 if (TREE_CODE (inner2) == SSA_NAME
4157 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4158 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4159 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4160 gimple_assign_rhs1 (s),
4161 gimple_assign_rhs2 (s),
4162 code2, op2a, op2b)))
4163 {
4164 /* Handle the OR case, where we are reassociating:
4165 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4166 => (inner1 OR t)
4167 => (t OR partial) */
4168 if (is_or)
e89065a1
SL
4169 {
4170 if (integer_zerop (t))
4171 return inner1;
4172 else if (integer_onep (t))
4173 return boolean_true_node;
8236c8eb
JJ
4174 /* If both are the same, we can apply the identity
4175 (x OR x) == x. */
4176 else if (partial && same_bool_result_p (t, partial))
4177 return t;
e89065a1
SL
4178 }
4179
4180 /* Handle the AND case, where we are redistributing:
4181 (inner1 AND inner2) OR (op2a code2 op2b)
4182 => (t AND (inner1 OR (op2a code2 op2b)))
4183 => (t AND partial) */
4184 else
4185 {
4186 if (integer_zerop (t))
4187 return boolean_false_node;
4188 else if (partial)
4189 {
4190 /* We already got a simplification for the other
4191 operand to the redistributed AND expression. The
4192 interesting case is when at least one is true.
4193 Or, if both are the same, we can apply the identity
8236c8eb 4194 (x AND x) == x. */
e89065a1
SL
4195 if (integer_onep (partial))
4196 return t;
4197 else if (integer_onep (t))
4198 return partial;
4199 else if (same_bool_result_p (t, partial))
8236c8eb 4200 return t;
e89065a1
SL
4201 }
4202 }
4203 }
4204 }
4205 return NULL_TREE;
4206}
4207
4208/* Try to simplify the OR of two comparisons defined by
4209 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4210 If this can be done without constructing an intermediate value,
4211 return the resulting tree; otherwise NULL_TREE is returned.
4212 This function is deliberately asymmetric as it recurses on SSA_DEFs
4213 in the first comparison but not the second. */
4214
4215static tree
4216or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4217 enum tree_code code2, tree op2a, tree op2b)
4218{
ae22ac3c 4219 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4220
e89065a1
SL
4221 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4222 if (operand_equal_p (op1a, op2a, 0)
4223 && operand_equal_p (op1b, op2b, 0))
4224 {
eb9820c0 4225 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4226 tree t = combine_comparisons (UNKNOWN_LOCATION,
4227 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4228 truth_type, op1a, op1b);
e89065a1
SL
4229 if (t)
4230 return t;
4231 }
4232
4233 /* Likewise the swapped case of the above. */
4234 if (operand_equal_p (op1a, op2b, 0)
4235 && operand_equal_p (op1b, op2a, 0))
4236 {
eb9820c0 4237 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4238 tree t = combine_comparisons (UNKNOWN_LOCATION,
4239 TRUTH_ORIF_EXPR, code1,
4240 swap_tree_comparison (code2),
31ed6226 4241 truth_type, op1a, op1b);
e89065a1
SL
4242 if (t)
4243 return t;
4244 }
4245
4246 /* If both comparisons are of the same value against constants, we might
4247 be able to merge them. */
4248 if (operand_equal_p (op1a, op2a, 0)
4249 && TREE_CODE (op1b) == INTEGER_CST
4250 && TREE_CODE (op2b) == INTEGER_CST)
4251 {
4252 int cmp = tree_int_cst_compare (op1b, op2b);
4253
4254 /* If we have (op1a != op1b), we should either be able to
4255 return that or TRUE, depending on whether the constant op1b
4256 also satisfies the other comparison against op2b. */
4257 if (code1 == NE_EXPR)
4258 {
4259 bool done = true;
4260 bool val;
4261 switch (code2)
4262 {
4263 case EQ_EXPR: val = (cmp == 0); break;
4264 case NE_EXPR: val = (cmp != 0); break;
4265 case LT_EXPR: val = (cmp < 0); break;
4266 case GT_EXPR: val = (cmp > 0); break;
4267 case LE_EXPR: val = (cmp <= 0); break;
4268 case GE_EXPR: val = (cmp >= 0); break;
4269 default: done = false;
4270 }
4271 if (done)
4272 {
4273 if (val)
4274 return boolean_true_node;
4275 else
4276 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4277 }
4278 }
4279 /* Likewise if the second comparison is a != comparison. */
4280 else if (code2 == NE_EXPR)
4281 {
4282 bool done = true;
4283 bool val;
4284 switch (code1)
4285 {
4286 case EQ_EXPR: val = (cmp == 0); break;
4287 case NE_EXPR: val = (cmp != 0); break;
4288 case LT_EXPR: val = (cmp > 0); break;
4289 case GT_EXPR: val = (cmp < 0); break;
4290 case LE_EXPR: val = (cmp >= 0); break;
4291 case GE_EXPR: val = (cmp <= 0); break;
4292 default: done = false;
4293 }
4294 if (done)
4295 {
4296 if (val)
4297 return boolean_true_node;
4298 else
4299 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4300 }
4301 }
4302
4303 /* See if an equality test is redundant with the other comparison. */
4304 else if (code1 == EQ_EXPR)
4305 {
4306 bool val;
4307 switch (code2)
4308 {
4309 case EQ_EXPR: val = (cmp == 0); break;
4310 case NE_EXPR: val = (cmp != 0); break;
4311 case LT_EXPR: val = (cmp < 0); break;
4312 case GT_EXPR: val = (cmp > 0); break;
4313 case LE_EXPR: val = (cmp <= 0); break;
4314 case GE_EXPR: val = (cmp >= 0); break;
4315 default:
4316 val = false;
4317 }
4318 if (val)
4319 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4320 }
4321 else if (code2 == EQ_EXPR)
4322 {
4323 bool val;
4324 switch (code1)
4325 {
4326 case EQ_EXPR: val = (cmp == 0); break;
4327 case NE_EXPR: val = (cmp != 0); break;
4328 case LT_EXPR: val = (cmp > 0); break;
4329 case GT_EXPR: val = (cmp < 0); break;
4330 case LE_EXPR: val = (cmp >= 0); break;
4331 case GE_EXPR: val = (cmp <= 0); break;
4332 default:
4333 val = false;
4334 }
4335 if (val)
4336 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4337 }
4338
4339 /* Chose the less restrictive of two < or <= comparisons. */
4340 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4341 && (code2 == LT_EXPR || code2 == LE_EXPR))
4342 {
4343 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4344 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4345 else
4346 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4347 }
4348
4349 /* Likewise chose the less restrictive of two > or >= comparisons. */
4350 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4351 && (code2 == GT_EXPR || code2 == GE_EXPR))
4352 {
4353 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4354 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4355 else
4356 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4357 }
4358
4359 /* Check for singleton ranges. */
4360 else if (cmp == 0
4361 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4362 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4363 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4364
4365 /* Check for less/greater pairs that don't restrict the range at all. */
4366 else if (cmp >= 0
4367 && (code1 == LT_EXPR || code1 == LE_EXPR)
4368 && (code2 == GT_EXPR || code2 == GE_EXPR))
4369 return boolean_true_node;
4370 else if (cmp <= 0
4371 && (code1 == GT_EXPR || code1 == GE_EXPR)
4372 && (code2 == LT_EXPR || code2 == LE_EXPR))
4373 return boolean_true_node;
4374 }
4375
4376 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4377 NAME's definition is a truth value. See if there are any simplifications
4378 that can be done against the NAME's definition. */
4379 if (TREE_CODE (op1a) == SSA_NAME
4380 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4381 && (integer_zerop (op1b) || integer_onep (op1b)))
4382 {
4383 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4384 || (code1 == NE_EXPR && integer_onep (op1b)));
4385 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4386 switch (gimple_code (stmt))
4387 {
4388 case GIMPLE_ASSIGN:
4389 /* Try to simplify by copy-propagating the definition. */
4390 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4391
4392 case GIMPLE_PHI:
4393 /* If every argument to the PHI produces the same result when
4394 ORed with the second comparison, we win.
4395 Do not do this unless the type is bool since we need a bool
4396 result here anyway. */
4397 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4398 {
4399 tree result = NULL_TREE;
4400 unsigned i;
4401 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4402 {
4403 tree arg = gimple_phi_arg_def (stmt, i);
4404
4405 /* If this PHI has itself as an argument, ignore it.
4406 If all the other args produce the same result,
4407 we're still OK. */
4408 if (arg == gimple_phi_result (stmt))
4409 continue;
4410 else if (TREE_CODE (arg) == INTEGER_CST)
4411 {
4412 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4413 {
4414 if (!result)
4415 result = boolean_true_node;
4416 else if (!integer_onep (result))
4417 return NULL_TREE;
4418 }
4419 else if (!result)
4420 result = fold_build2 (code2, boolean_type_node,
4421 op2a, op2b);
4422 else if (!same_bool_comparison_p (result,
4423 code2, op2a, op2b))
4424 return NULL_TREE;
4425 }
0e8b84ec
JJ
4426 else if (TREE_CODE (arg) == SSA_NAME
4427 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4428 {
6c66f733
JJ
4429 tree temp;
4430 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4431 /* In simple cases we can look through PHI nodes,
4432 but we have to be careful with loops.
4433 See PR49073. */
4434 if (! dom_info_available_p (CDI_DOMINATORS)
4435 || gimple_bb (def_stmt) == gimple_bb (stmt)
4436 || dominated_by_p (CDI_DOMINATORS,
4437 gimple_bb (def_stmt),
4438 gimple_bb (stmt)))
4439 return NULL_TREE;
4440 temp = or_var_with_comparison (arg, invert, code2,
4441 op2a, op2b);
e89065a1
SL
4442 if (!temp)
4443 return NULL_TREE;
4444 else if (!result)
4445 result = temp;
4446 else if (!same_bool_result_p (result, temp))
4447 return NULL_TREE;
4448 }
4449 else
4450 return NULL_TREE;
4451 }
4452 return result;
4453 }
4454
4455 default:
4456 break;
4457 }
4458 }
4459 return NULL_TREE;
4460}
4461
4462/* Try to simplify the OR of two comparisons, specified by
4463 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4464 If this can be simplified to a single expression (without requiring
4465 introducing more SSA variables to hold intermediate values),
4466 return the resulting tree. Otherwise return NULL_TREE.
4467 If the result expression is non-null, it has boolean type. */
4468
4469tree
4470maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4471 enum tree_code code2, tree op2a, tree op2b)
4472{
4473 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4474 if (t)
4475 return t;
4476 else
4477 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4478}
cfef45c8
RG
4479
4480
4481/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4482
4483 Either NULL_TREE, a simplified but non-constant or a constant
4484 is returned.
4485
4486 ??? This should go into a gimple-fold-inline.h file to be eventually
4487 privatized with the single valueize function used in the various TUs
4488 to avoid the indirect function call overhead. */
4489
4490tree
d2a85801
RB
4491gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4492 tree (*gvalueize) (tree))
cfef45c8 4493{
45cc9f96
RB
4494 code_helper rcode;
4495 tree ops[3] = {};
4496 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4497 edges if there are intermediate VARYING defs. For this reason
4498 do not follow SSA edges here even though SCCVN can technically
4499 just deal fine with that. */
d2a85801 4500 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
45cc9f96
RB
4501 && rcode.is_tree_code ()
4502 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4503 || ((tree_code) rcode) == ADDR_EXPR)
4504 && is_gimple_val (ops[0]))
4505 {
4506 tree res = ops[0];
4507 if (dump_file && dump_flags & TDF_DETAILS)
4508 {
4509 fprintf (dump_file, "Match-and-simplified ");
4510 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4511 fprintf (dump_file, " to ");
4512 print_generic_expr (dump_file, res, 0);
4513 fprintf (dump_file, "\n");
4514 }
4515 return res;
4516 }
4517
cfef45c8
RG
4518 location_t loc = gimple_location (stmt);
4519 switch (gimple_code (stmt))
4520 {
4521 case GIMPLE_ASSIGN:
4522 {
4523 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4524
4525 switch (get_gimple_rhs_class (subcode))
4526 {
4527 case GIMPLE_SINGLE_RHS:
4528 {
4529 tree rhs = gimple_assign_rhs1 (stmt);
4530 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4531
4532 if (TREE_CODE (rhs) == SSA_NAME)
4533 {
4534 /* If the RHS is an SSA_NAME, return its known constant value,
4535 if any. */
4536 return (*valueize) (rhs);
4537 }
4538 /* Handle propagating invariant addresses into address
4539 operations. */
4540 else if (TREE_CODE (rhs) == ADDR_EXPR
4541 && !is_gimple_min_invariant (rhs))
4542 {
d25c4172 4543 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4544 tree base;
4545 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4546 &offset,
4547 valueize);
4548 if (base
4549 && (CONSTANT_CLASS_P (base)
4550 || decl_address_invariant_p (base)))
4551 return build_invariant_address (TREE_TYPE (rhs),
4552 base, offset);
4553 }
4554 else if (TREE_CODE (rhs) == CONSTRUCTOR
4555 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4556 && (CONSTRUCTOR_NELTS (rhs)
4557 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4558 {
4559 unsigned i;
d2a12ae7 4560 tree val, *vec;
cfef45c8 4561
d2a12ae7
RG
4562 vec = XALLOCAVEC (tree,
4563 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4564 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4565 {
4566 val = (*valueize) (val);
4567 if (TREE_CODE (val) == INTEGER_CST
4568 || TREE_CODE (val) == REAL_CST
4569 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 4570 vec[i] = val;
cfef45c8
RG
4571 else
4572 return NULL_TREE;
4573 }
4574
d2a12ae7 4575 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 4576 }
bdf37f7a
JH
4577 if (subcode == OBJ_TYPE_REF)
4578 {
4579 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4580 /* If callee is constant, we can fold away the wrapper. */
4581 if (is_gimple_min_invariant (val))
4582 return val;
4583 }
cfef45c8
RG
4584
4585 if (kind == tcc_reference)
4586 {
4587 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4588 || TREE_CODE (rhs) == REALPART_EXPR
4589 || TREE_CODE (rhs) == IMAGPART_EXPR)
4590 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4591 {
4592 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4593 return fold_unary_loc (EXPR_LOCATION (rhs),
4594 TREE_CODE (rhs),
4595 TREE_TYPE (rhs), val);
4596 }
4597 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4598 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4599 {
4600 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4601 return fold_ternary_loc (EXPR_LOCATION (rhs),
4602 TREE_CODE (rhs),
4603 TREE_TYPE (rhs), val,
4604 TREE_OPERAND (rhs, 1),
4605 TREE_OPERAND (rhs, 2));
4606 }
4607 else if (TREE_CODE (rhs) == MEM_REF
4608 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4609 {
4610 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4611 if (TREE_CODE (val) == ADDR_EXPR
4612 && is_gimple_min_invariant (val))
4613 {
4614 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4615 unshare_expr (val),
4616 TREE_OPERAND (rhs, 1));
4617 if (tem)
4618 rhs = tem;
4619 }
4620 }
4621 return fold_const_aggregate_ref_1 (rhs, valueize);
4622 }
4623 else if (kind == tcc_declaration)
4624 return get_symbol_constant_value (rhs);
4625 return rhs;
4626 }
4627
4628 case GIMPLE_UNARY_RHS:
f3582e54 4629 return NULL_TREE;
cfef45c8
RG
4630
4631 case GIMPLE_BINARY_RHS:
4632 {
4633 /* Handle binary operators that can appear in GIMPLE form. */
4634 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4635 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4636
4637 /* Translate &x + CST into an invariant form suitable for
4638 further propagation. */
4639 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4640 && TREE_CODE (op0) == ADDR_EXPR
4641 && TREE_CODE (op1) == INTEGER_CST)
4642 {
4643 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
4644 return build_fold_addr_expr_loc
4645 (loc,
4646 fold_build2 (MEM_REF,
cfef45c8
RG
4647 TREE_TYPE (TREE_TYPE (op0)),
4648 unshare_expr (op0), off));
4649 }
4650
4651 return fold_binary_loc (loc, subcode,
4652 gimple_expr_type (stmt), op0, op1);
4653 }
4654
4655 case GIMPLE_TERNARY_RHS:
4656 {
4657 /* Handle ternary operators that can appear in GIMPLE form. */
4658 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4659 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4660 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4661
d3878abf
RG
4662 /* Fold embedded expressions in ternary codes. */
4663 if ((subcode == COND_EXPR
4664 || subcode == VEC_COND_EXPR)
4665 && COMPARISON_CLASS_P (op0))
4666 {
4667 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4668 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4669 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4670 TREE_TYPE (op0), op00, op01);
4671 if (tem)
4672 op0 = tem;
4673 }
4674
cfef45c8
RG
4675 return fold_ternary_loc (loc, subcode,
4676 gimple_expr_type (stmt), op0, op1, op2);
4677 }
4678
4679 default:
4680 gcc_unreachable ();
4681 }
4682 }
4683
4684 case GIMPLE_CALL:
4685 {
25583c4f 4686 tree fn;
538dd0b7 4687 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
4688
4689 if (gimple_call_internal_p (stmt))
31e071ae
MP
4690 {
4691 enum tree_code subcode = ERROR_MARK;
4692 switch (gimple_call_internal_fn (stmt))
4693 {
4694 case IFN_UBSAN_CHECK_ADD:
4695 subcode = PLUS_EXPR;
4696 break;
4697 case IFN_UBSAN_CHECK_SUB:
4698 subcode = MINUS_EXPR;
4699 break;
4700 case IFN_UBSAN_CHECK_MUL:
4701 subcode = MULT_EXPR;
4702 break;
4703 default:
4704 return NULL_TREE;
4705 }
368b454d
JJ
4706 tree arg0 = gimple_call_arg (stmt, 0);
4707 tree arg1 = gimple_call_arg (stmt, 1);
4708 tree op0 = (*valueize) (arg0);
4709 tree op1 = (*valueize) (arg1);
31e071ae
MP
4710
4711 if (TREE_CODE (op0) != INTEGER_CST
4712 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
4713 {
4714 switch (subcode)
4715 {
4716 case MULT_EXPR:
4717 /* x * 0 = 0 * x = 0 without overflow. */
4718 if (integer_zerop (op0) || integer_zerop (op1))
4719 return build_zero_cst (TREE_TYPE (arg0));
4720 break;
4721 case MINUS_EXPR:
4722 /* y - y = 0 without overflow. */
4723 if (operand_equal_p (op0, op1, 0))
4724 return build_zero_cst (TREE_TYPE (arg0));
4725 break;
4726 default:
4727 break;
4728 }
4729 }
4730 tree res
4731 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
4732 if (res
4733 && TREE_CODE (res) == INTEGER_CST
4734 && !TREE_OVERFLOW (res))
4735 return res;
4736 return NULL_TREE;
4737 }
25583c4f
RS
4738
4739 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
4740 if (TREE_CODE (fn) == ADDR_EXPR
4741 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
4742 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4743 && gimple_builtin_call_types_compatible_p (stmt,
4744 TREE_OPERAND (fn, 0)))
cfef45c8
RG
4745 {
4746 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4747 tree call, retval;
4748 unsigned i;
4749 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4750 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4751 call = build_call_array_loc (loc,
538dd0b7 4752 gimple_call_return_type (call_stmt),
cfef45c8
RG
4753 fn, gimple_call_num_args (stmt), args);
4754 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4755 if (retval)
5c944c6c
RB
4756 {
4757 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4758 STRIP_NOPS (retval);
538dd0b7
DM
4759 retval = fold_convert (gimple_call_return_type (call_stmt),
4760 retval);
5c944c6c 4761 }
cfef45c8
RG
4762 return retval;
4763 }
4764 return NULL_TREE;
4765 }
4766
4767 default:
4768 return NULL_TREE;
4769 }
4770}
4771
4772/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4773 Returns NULL_TREE if folding to a constant is not possible, otherwise
4774 returns a constant according to is_gimple_min_invariant. */
4775
4776tree
4777gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4778{
4779 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4780 if (res && is_gimple_min_invariant (res))
4781 return res;
4782 return NULL_TREE;
4783}
4784
4785
4786/* The following set of functions are supposed to fold references using
4787 their constant initializers. */
4788
cfef45c8
RG
4789/* See if we can find constructor defining value of BASE.
4790 When we know the consructor with constant offset (such as
4791 base is array[40] and we do know constructor of array), then
4792 BIT_OFFSET is adjusted accordingly.
4793
4794 As a special case, return error_mark_node when constructor
4795 is not explicitly available, but it is known to be zero
4796 such as 'static const int a;'. */
4797static tree
4798get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4799 tree (*valueize)(tree))
4800{
4801 HOST_WIDE_INT bit_offset2, size, max_size;
4802 if (TREE_CODE (base) == MEM_REF)
4803 {
4804 if (!integer_zerop (TREE_OPERAND (base, 1)))
4805 {
9541ffee 4806 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 4807 return NULL_TREE;
807e902e 4808 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
4809 * BITS_PER_UNIT);
4810 }
4811
4812 if (valueize
4813 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4814 base = valueize (TREE_OPERAND (base, 0));
4815 if (!base || TREE_CODE (base) != ADDR_EXPR)
4816 return NULL_TREE;
4817 base = TREE_OPERAND (base, 0);
4818 }
4819
4820 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4821 DECL_INITIAL. If BASE is a nested reference into another
4822 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4823 the inner reference. */
4824 switch (TREE_CODE (base))
4825 {
4826 case VAR_DECL:
cfef45c8 4827 case CONST_DECL:
6a6dac52
JH
4828 {
4829 tree init = ctor_for_folding (base);
4830
688010ba 4831 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
4832 NULL means unknown, while error_mark_node is 0. */
4833 if (init == error_mark_node)
4834 return NULL_TREE;
4835 if (!init)
4836 return error_mark_node;
4837 return init;
4838 }
cfef45c8
RG
4839
4840 case ARRAY_REF:
4841 case COMPONENT_REF:
4842 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4843 if (max_size == -1 || size != max_size)
4844 return NULL_TREE;
4845 *bit_offset += bit_offset2;
4846 return get_base_constructor (base, bit_offset, valueize);
4847
4848 case STRING_CST:
4849 case CONSTRUCTOR:
4850 return base;
4851
4852 default:
4853 return NULL_TREE;
4854 }
4855}
4856
cfef45c8
RG
4857/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4858 SIZE to the memory at bit OFFSET. */
4859
4860static tree
4861fold_array_ctor_reference (tree type, tree ctor,
4862 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4863 unsigned HOST_WIDE_INT size,
4864 tree from_decl)
cfef45c8
RG
4865{
4866 unsigned HOST_WIDE_INT cnt;
4867 tree cfield, cval;
807e902e
KZ
4868 offset_int low_bound;
4869 offset_int elt_size;
4870 offset_int index, max_index;
4871 offset_int access_index;
b48e22b2 4872 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
4873 HOST_WIDE_INT inner_offset;
4874
4875 /* Compute low bound and elt size. */
eb8f1123
RG
4876 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4877 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
4878 if (domain_type && TYPE_MIN_VALUE (domain_type))
4879 {
4880 /* Static constructors for variably sized objects makes no sense. */
4881 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 4882 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 4883 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
4884 }
4885 else
807e902e 4886 low_bound = 0;
cfef45c8 4887 /* Static constructors for variably sized objects makes no sense. */
c3284718 4888 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 4889 == INTEGER_CST);
807e902e 4890 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
4891
4892 /* We can handle only constantly sized accesses that are known to not
4893 be larger than size of array element. */
4894 if (!TYPE_SIZE_UNIT (type)
4895 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
4896 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4897 || elt_size == 0)
cfef45c8
RG
4898 return NULL_TREE;
4899
4900 /* Compute the array index we look for. */
807e902e
KZ
4901 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4902 elt_size);
27bcd47c 4903 access_index += low_bound;
b48e22b2 4904 if (index_type)
807e902e
KZ
4905 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4906 TYPE_SIGN (index_type));
cfef45c8
RG
4907
4908 /* And offset within the access. */
27bcd47c 4909 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
4910
4911 /* See if the array field is large enough to span whole access. We do not
4912 care to fold accesses spanning multiple array indexes. */
27bcd47c 4913 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
4914 return NULL_TREE;
4915
807e902e 4916 index = low_bound - 1;
b48e22b2 4917 if (index_type)
807e902e
KZ
4918 index = wi::ext (index, TYPE_PRECISION (index_type),
4919 TYPE_SIGN (index_type));
b48e22b2 4920
cfef45c8
RG
4921 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4922 {
4923 /* Array constructor might explicitely set index, or specify range
4924 or leave index NULL meaning that it is next index after previous
4925 one. */
4926 if (cfield)
4927 {
4928 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 4929 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
4930 else
4931 {
4932 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
4933 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4934 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
4935 }
4936 }
4937 else
b48e22b2 4938 {
807e902e 4939 index += 1;
b48e22b2 4940 if (index_type)
807e902e
KZ
4941 index = wi::ext (index, TYPE_PRECISION (index_type),
4942 TYPE_SIGN (index_type));
b48e22b2
EB
4943 max_index = index;
4944 }
cfef45c8
RG
4945
4946 /* Do we have match? */
807e902e
KZ
4947 if (wi::cmpu (access_index, index) >= 0
4948 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
4949 return fold_ctor_reference (type, cval, inner_offset, size,
4950 from_decl);
cfef45c8
RG
4951 }
4952 /* When memory is not explicitely mentioned in constructor,
4953 it is 0 (or out of range). */
4954 return build_zero_cst (type);
4955}
4956
4957/* CTOR is CONSTRUCTOR of an aggregate or vector.
4958 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4959
4960static tree
4961fold_nonarray_ctor_reference (tree type, tree ctor,
4962 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4963 unsigned HOST_WIDE_INT size,
4964 tree from_decl)
cfef45c8
RG
4965{
4966 unsigned HOST_WIDE_INT cnt;
4967 tree cfield, cval;
4968
4969 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4970 cval)
4971 {
4972 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4973 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4974 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
4975 offset_int bitoffset;
4976 offset_int bitoffset_end, access_end;
cfef45c8
RG
4977
4978 /* Variable sized objects in static constructors makes no sense,
4979 but field_size can be NULL for flexible array members. */
4980 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4981 && TREE_CODE (byte_offset) == INTEGER_CST
4982 && (field_size != NULL_TREE
4983 ? TREE_CODE (field_size) == INTEGER_CST
4984 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4985
4986 /* Compute bit offset of the field. */
807e902e
KZ
4987 bitoffset = (wi::to_offset (field_offset)
4988 + wi::lshift (wi::to_offset (byte_offset),
4989 LOG2_BITS_PER_UNIT));
cfef45c8
RG
4990 /* Compute bit offset where the field ends. */
4991 if (field_size != NULL_TREE)
807e902e 4992 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 4993 else
807e902e 4994 bitoffset_end = 0;
cfef45c8 4995
807e902e 4996 access_end = offset_int (offset) + size;
b8b2b009
JJ
4997
4998 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4999 [BITOFFSET, BITOFFSET_END)? */
807e902e 5000 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5001 && (field_size == NULL_TREE
807e902e 5002 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5003 {
807e902e 5004 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5005 /* We do have overlap. Now see if field is large enough to
5006 cover the access. Give up for accesses spanning multiple
5007 fields. */
807e902e 5008 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5009 return NULL_TREE;
807e902e 5010 if (wi::lts_p (offset, bitoffset))
b8b2b009 5011 return NULL_TREE;
cfef45c8 5012 return fold_ctor_reference (type, cval,
27bcd47c 5013 inner_offset.to_uhwi (), size,
c44c2088 5014 from_decl);
cfef45c8
RG
5015 }
5016 }
5017 /* When memory is not explicitely mentioned in constructor, it is 0. */
5018 return build_zero_cst (type);
5019}
5020
5021/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5022 to the memory at bit OFFSET. */
5023
8403c2cf 5024tree
cfef45c8 5025fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5026 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5027{
5028 tree ret;
5029
5030 /* We found the field with exact match. */
5031 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5032 && !offset)
9d60be38 5033 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5034
5035 /* We are at the end of walk, see if we can view convert the
5036 result. */
5037 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5038 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5039 && !compare_tree_int (TYPE_SIZE (type), size)
5040 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5041 {
9d60be38 5042 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5043 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5044 if (ret)
5045 STRIP_NOPS (ret);
5046 return ret;
5047 }
b2505143
RB
5048 /* For constants and byte-aligned/sized reads try to go through
5049 native_encode/interpret. */
5050 if (CONSTANT_CLASS_P (ctor)
5051 && BITS_PER_UNIT == 8
5052 && offset % BITS_PER_UNIT == 0
5053 && size % BITS_PER_UNIT == 0
5054 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5055 {
5056 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5057 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5058 offset / BITS_PER_UNIT) > 0)
5059 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5060 }
cfef45c8
RG
5061 if (TREE_CODE (ctor) == CONSTRUCTOR)
5062 {
5063
eb8f1123
RG
5064 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5065 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5066 return fold_array_ctor_reference (type, ctor, offset, size,
5067 from_decl);
cfef45c8 5068 else
c44c2088
JH
5069 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5070 from_decl);
cfef45c8
RG
5071 }
5072
5073 return NULL_TREE;
5074}
5075
5076/* Return the tree representing the element referenced by T if T is an
5077 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5078 names using VALUEIZE. Return NULL_TREE otherwise. */
5079
5080tree
5081fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5082{
5083 tree ctor, idx, base;
5084 HOST_WIDE_INT offset, size, max_size;
5085 tree tem;
5086
f8a7df45
RG
5087 if (TREE_THIS_VOLATILE (t))
5088 return NULL_TREE;
5089
cfef45c8
RG
5090 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5091 return get_symbol_constant_value (t);
5092
5093 tem = fold_read_from_constant_string (t);
5094 if (tem)
5095 return tem;
5096
5097 switch (TREE_CODE (t))
5098 {
5099 case ARRAY_REF:
5100 case ARRAY_RANGE_REF:
5101 /* Constant indexes are handled well by get_base_constructor.
5102 Only special case variable offsets.
5103 FIXME: This code can't handle nested references with variable indexes
5104 (they will be handled only by iteration of ccp). Perhaps we can bring
5105 get_ref_base_and_extent here and make it use a valueize callback. */
5106 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5107 && valueize
5108 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5109 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5110 {
5111 tree low_bound, unit_size;
5112
5113 /* If the resulting bit-offset is constant, track it. */
5114 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5115 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5116 && (unit_size = array_ref_element_size (t),
807e902e 5117 tree_fits_uhwi_p (unit_size)))
cfef45c8 5118 {
807e902e
KZ
5119 offset_int woffset
5120 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5121 TYPE_PRECISION (TREE_TYPE (idx)));
5122
5123 if (wi::fits_shwi_p (woffset))
5124 {
5125 offset = woffset.to_shwi ();
5126 /* TODO: This code seems wrong, multiply then check
5127 to see if it fits. */
5128 offset *= tree_to_uhwi (unit_size);
5129 offset *= BITS_PER_UNIT;
5130
5131 base = TREE_OPERAND (t, 0);
5132 ctor = get_base_constructor (base, &offset, valueize);
5133 /* Empty constructor. Always fold to 0. */
5134 if (ctor == error_mark_node)
5135 return build_zero_cst (TREE_TYPE (t));
5136 /* Out of bound array access. Value is undefined,
5137 but don't fold. */
5138 if (offset < 0)
5139 return NULL_TREE;
5140 /* We can not determine ctor. */
5141 if (!ctor)
5142 return NULL_TREE;
5143 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5144 tree_to_uhwi (unit_size)
5145 * BITS_PER_UNIT,
5146 base);
5147 }
cfef45c8
RG
5148 }
5149 }
5150 /* Fallthru. */
5151
5152 case COMPONENT_REF:
5153 case BIT_FIELD_REF:
5154 case TARGET_MEM_REF:
5155 case MEM_REF:
5156 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5157 ctor = get_base_constructor (base, &offset, valueize);
5158
5159 /* Empty constructor. Always fold to 0. */
5160 if (ctor == error_mark_node)
5161 return build_zero_cst (TREE_TYPE (t));
5162 /* We do not know precise address. */
5163 if (max_size == -1 || max_size != size)
5164 return NULL_TREE;
5165 /* We can not determine ctor. */
5166 if (!ctor)
5167 return NULL_TREE;
5168
5169 /* Out of bound array access. Value is undefined, but don't fold. */
5170 if (offset < 0)
5171 return NULL_TREE;
5172
c44c2088
JH
5173 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5174 base);
cfef45c8
RG
5175
5176 case REALPART_EXPR:
5177 case IMAGPART_EXPR:
5178 {
5179 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5180 if (c && TREE_CODE (c) == COMPLEX_CST)
5181 return fold_build1_loc (EXPR_LOCATION (t),
5182 TREE_CODE (t), TREE_TYPE (t), c);
5183 break;
5184 }
5185
5186 default:
5187 break;
5188 }
5189
5190 return NULL_TREE;
5191}
5192
5193tree
5194fold_const_aggregate_ref (tree t)
5195{
5196 return fold_const_aggregate_ref_1 (t, NULL);
5197}
06bc3ec7 5198
85942f45 5199/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5200 at OFFSET.
5201 Set CAN_REFER if non-NULL to false if method
5202 is not referable or if the virtual table is ill-formed (such as rewriten
5203 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5204
5205tree
85942f45
JH
5206gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5207 tree v,
ec77d61f
JH
5208 unsigned HOST_WIDE_INT offset,
5209 bool *can_refer)
81fa35bd 5210{
85942f45
JH
5211 tree vtable = v, init, fn;
5212 unsigned HOST_WIDE_INT size;
8c311b50
JH
5213 unsigned HOST_WIDE_INT elt_size, access_index;
5214 tree domain_type;
81fa35bd 5215
ec77d61f
JH
5216 if (can_refer)
5217 *can_refer = true;
5218
9de2f554 5219 /* First of all double check we have virtual table. */
81fa35bd 5220 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5221 || !DECL_VIRTUAL_P (v))
ec77d61f
JH
5222 {
5223 gcc_assert (in_lto_p);
5224 /* Pass down that we lost track of the target. */
5225 if (can_refer)
5226 *can_refer = false;
5227 return NULL_TREE;
5228 }
9de2f554 5229
2aa3da06
JH
5230 init = ctor_for_folding (v);
5231
9de2f554 5232 /* The virtual tables should always be born with constructors
2aa3da06
JH
5233 and we always should assume that they are avaialble for
5234 folding. At the moment we do not stream them in all cases,
5235 but it should never happen that ctor seem unreachable. */
5236 gcc_assert (init);
5237 if (init == error_mark_node)
5238 {
5239 gcc_assert (in_lto_p);
ec77d61f
JH
5240 /* Pass down that we lost track of the target. */
5241 if (can_refer)
5242 *can_refer = false;
2aa3da06
JH
5243 return NULL_TREE;
5244 }
81fa35bd 5245 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5246 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5247 offset *= BITS_PER_UNIT;
81fa35bd 5248 offset += token * size;
9de2f554 5249
8c311b50
JH
5250 /* Lookup the value in the constructor that is assumed to be array.
5251 This is equivalent to
5252 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5253 offset, size, NULL);
5254 but in a constant time. We expect that frontend produced a simple
5255 array without indexed initializers. */
5256
5257 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5258 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5259 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5260 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5261
5262 access_index = offset / BITS_PER_UNIT / elt_size;
5263 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5264
5265 /* This code makes an assumption that there are no
5266 indexed fileds produced by C++ FE, so we can directly index the array. */
5267 if (access_index < CONSTRUCTOR_NELTS (init))
5268 {
5269 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5270 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5271 STRIP_NOPS (fn);
5272 }
5273 else
5274 fn = NULL;
9de2f554
JH
5275
5276 /* For type inconsistent program we may end up looking up virtual method
5277 in virtual table that does not contain TOKEN entries. We may overrun
5278 the virtual table and pick up a constant or RTTI info pointer.
5279 In any case the call is undefined. */
5280 if (!fn
5281 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5282 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5283 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5284 else
5285 {
5286 fn = TREE_OPERAND (fn, 0);
5287
5288 /* When cgraph node is missing and function is not public, we cannot
5289 devirtualize. This can happen in WHOPR when the actual method
5290 ends up in other partition, because we found devirtualization
5291 possibility too late. */
5292 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5293 {
5294 if (can_refer)
5295 {
5296 *can_refer = false;
5297 return fn;
5298 }
5299 return NULL_TREE;
5300 }
9de2f554 5301 }
81fa35bd 5302
7501ca28
RG
5303 /* Make sure we create a cgraph node for functions we'll reference.
5304 They can be non-existent if the reference comes from an entry
5305 of an external vtable for example. */
d52f5295 5306 cgraph_node::get_create (fn);
7501ca28 5307
81fa35bd
MJ
5308 return fn;
5309}
5310
85942f45
JH
5311/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5312 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5313 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5314 OBJ_TYPE_REF_OBJECT(REF).
5315 Set CAN_REFER if non-NULL to false if method
5316 is not referable or if the virtual table is ill-formed (such as rewriten
5317 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5318
5319tree
ec77d61f
JH
5320gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5321 bool *can_refer)
85942f45
JH
5322{
5323 unsigned HOST_WIDE_INT offset;
5324 tree v;
5325
5326 v = BINFO_VTABLE (known_binfo);
5327 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5328 if (!v)
5329 return NULL_TREE;
5330
5331 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5332 {
5333 if (can_refer)
5334 *can_refer = false;
5335 return NULL_TREE;
5336 }
5337 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5338}
5339
06bc3ec7
BS
5340/* Return true iff VAL is a gimple expression that is known to be
5341 non-negative. Restricted to floating-point inputs. */
5342
5343bool
5344gimple_val_nonnegative_real_p (tree val)
5345{
5346 gimple def_stmt;
5347
5348 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5349
5350 /* Use existing logic for non-gimple trees. */
5351 if (tree_expr_nonnegative_p (val))
5352 return true;
5353
5354 if (TREE_CODE (val) != SSA_NAME)
5355 return false;
5356
5357 /* Currently we look only at the immediately defining statement
5358 to make this determination, since recursion on defining
5359 statements of operands can lead to quadratic behavior in the
5360 worst case. This is expected to catch almost all occurrences
5361 in practice. It would be possible to implement limited-depth
5362 recursion if important cases are lost. Alternatively, passes
5363 that need this information (such as the pow/powi lowering code
5364 in the cse_sincos pass) could be revised to provide it through
5365 dataflow propagation. */
5366
5367 def_stmt = SSA_NAME_DEF_STMT (val);
5368
5369 if (is_gimple_assign (def_stmt))
5370 {
5371 tree op0, op1;
5372
5373 /* See fold-const.c:tree_expr_nonnegative_p for additional
5374 cases that could be handled with recursion. */
5375
5376 switch (gimple_assign_rhs_code (def_stmt))
5377 {
5378 case ABS_EXPR:
5379 /* Always true for floating-point operands. */
5380 return true;
5381
5382 case MULT_EXPR:
5383 /* True if the two operands are identical (since we are
5384 restricted to floating-point inputs). */
5385 op0 = gimple_assign_rhs1 (def_stmt);
5386 op1 = gimple_assign_rhs2 (def_stmt);
5387
5388 if (op0 == op1
5389 || operand_equal_p (op0, op1, 0))
5390 return true;
5391
5392 default:
5393 return false;
5394 }
5395 }
5396 else if (is_gimple_call (def_stmt))
5397 {
5398 tree fndecl = gimple_call_fndecl (def_stmt);
5399 if (fndecl
5400 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5401 {
5402 tree arg1;
5403
5404 switch (DECL_FUNCTION_CODE (fndecl))
5405 {
5406 CASE_FLT_FN (BUILT_IN_ACOS):
5407 CASE_FLT_FN (BUILT_IN_ACOSH):
5408 CASE_FLT_FN (BUILT_IN_CABS):
5409 CASE_FLT_FN (BUILT_IN_COSH):
5410 CASE_FLT_FN (BUILT_IN_ERFC):
5411 CASE_FLT_FN (BUILT_IN_EXP):
5412 CASE_FLT_FN (BUILT_IN_EXP10):
5413 CASE_FLT_FN (BUILT_IN_EXP2):
5414 CASE_FLT_FN (BUILT_IN_FABS):
5415 CASE_FLT_FN (BUILT_IN_FDIM):
5416 CASE_FLT_FN (BUILT_IN_HYPOT):
5417 CASE_FLT_FN (BUILT_IN_POW10):
5418 return true;
5419
5420 CASE_FLT_FN (BUILT_IN_SQRT):
5421 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5422 nonnegative inputs. */
5423 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5424 return true;
5425
5426 break;
5427
5428 CASE_FLT_FN (BUILT_IN_POWI):
5429 /* True if the second argument is an even integer. */
5430 arg1 = gimple_call_arg (def_stmt, 1);
5431
5432 if (TREE_CODE (arg1) == INTEGER_CST
5433 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5434 return true;
5435
5436 break;
5437
5438 CASE_FLT_FN (BUILT_IN_POW):
5439 /* True if the second argument is an even integer-valued
5440 real. */
5441 arg1 = gimple_call_arg (def_stmt, 1);
5442
5443 if (TREE_CODE (arg1) == REAL_CST)
5444 {
5445 REAL_VALUE_TYPE c;
5446 HOST_WIDE_INT n;
5447
5448 c = TREE_REAL_CST (arg1);
5449 n = real_to_integer (&c);
5450
5451 if ((n & 1) == 0)
5452 {
5453 REAL_VALUE_TYPE cint;
807e902e 5454 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5455 if (real_identical (&c, &cint))
5456 return true;
5457 }
5458 }
5459
5460 break;
5461
5462 default:
5463 return false;
5464 }
5465 }
5466 }
5467
5468 return false;
5469}
b184c8f1
AM
5470
5471/* Given a pointer value OP0, return a simplified version of an
5472 indirection through OP0, or NULL_TREE if no simplification is
5473 possible. Note that the resulting type may be different from
5474 the type pointed to in the sense that it is still compatible
5475 from the langhooks point of view. */
5476
5477tree
5478gimple_fold_indirect_ref (tree t)
5479{
5480 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5481 tree sub = t;
5482 tree subtype;
5483
5484 STRIP_NOPS (sub);
5485 subtype = TREE_TYPE (sub);
5486 if (!POINTER_TYPE_P (subtype))
5487 return NULL_TREE;
5488
5489 if (TREE_CODE (sub) == ADDR_EXPR)
5490 {
5491 tree op = TREE_OPERAND (sub, 0);
5492 tree optype = TREE_TYPE (op);
5493 /* *&p => p */
5494 if (useless_type_conversion_p (type, optype))
5495 return op;
5496
5497 /* *(foo *)&fooarray => fooarray[0] */
5498 if (TREE_CODE (optype) == ARRAY_TYPE
5499 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5500 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5501 {
5502 tree type_domain = TYPE_DOMAIN (optype);
5503 tree min_val = size_zero_node;
5504 if (type_domain && TYPE_MIN_VALUE (type_domain))
5505 min_val = TYPE_MIN_VALUE (type_domain);
5506 if (TREE_CODE (min_val) == INTEGER_CST)
5507 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5508 }
5509 /* *(foo *)&complexfoo => __real__ complexfoo */
5510 else if (TREE_CODE (optype) == COMPLEX_TYPE
5511 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5512 return fold_build1 (REALPART_EXPR, type, op);
5513 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5514 else if (TREE_CODE (optype) == VECTOR_TYPE
5515 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5516 {
5517 tree part_width = TYPE_SIZE (type);
5518 tree index = bitsize_int (0);
5519 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5520 }
5521 }
5522
5523 /* *(p + CST) -> ... */
5524 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5525 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5526 {
5527 tree addr = TREE_OPERAND (sub, 0);
5528 tree off = TREE_OPERAND (sub, 1);
5529 tree addrtype;
5530
5531 STRIP_NOPS (addr);
5532 addrtype = TREE_TYPE (addr);
5533
5534 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5535 if (TREE_CODE (addr) == ADDR_EXPR
5536 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5537 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5538 && tree_fits_uhwi_p (off))
b184c8f1 5539 {
ae7e9ddd 5540 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5541 tree part_width = TYPE_SIZE (type);
5542 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5543 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5544 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5545 tree index = bitsize_int (indexi);
5546 if (offset / part_widthi
e934916c 5547 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5548 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5549 part_width, index);
5550 }
5551
5552 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5553 if (TREE_CODE (addr) == ADDR_EXPR
5554 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5555 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5556 {
5557 tree size = TYPE_SIZE_UNIT (type);
5558 if (tree_int_cst_equal (size, off))
5559 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5560 }
5561
5562 /* *(p + CST) -> MEM_REF <p, CST>. */
5563 if (TREE_CODE (addr) != ADDR_EXPR
5564 || DECL_P (TREE_OPERAND (addr, 0)))
5565 return fold_build2 (MEM_REF, type,
5566 addr,
807e902e 5567 wide_int_to_tree (ptype, off));
b184c8f1
AM
5568 }
5569
5570 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5571 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5572 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5573 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5574 {
5575 tree type_domain;
5576 tree min_val = size_zero_node;
5577 tree osub = sub;
5578 sub = gimple_fold_indirect_ref (sub);
5579 if (! sub)
5580 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5581 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5582 if (type_domain && TYPE_MIN_VALUE (type_domain))
5583 min_val = TYPE_MIN_VALUE (type_domain);
5584 if (TREE_CODE (min_val) == INTEGER_CST)
5585 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5586 }
5587
5588 return NULL_TREE;
5589}
19e51b40
JJ
5590
5591/* Return true if CODE is an operation that when operating on signed
5592 integer types involves undefined behavior on overflow and the
5593 operation can be expressed with unsigned arithmetic. */
5594
5595bool
5596arith_code_with_undefined_signed_overflow (tree_code code)
5597{
5598 switch (code)
5599 {
5600 case PLUS_EXPR:
5601 case MINUS_EXPR:
5602 case MULT_EXPR:
5603 case NEGATE_EXPR:
5604 case POINTER_PLUS_EXPR:
5605 return true;
5606 default:
5607 return false;
5608 }
5609}
5610
5611/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5612 operation that can be transformed to unsigned arithmetic by converting
5613 its operand, carrying out the operation in the corresponding unsigned
5614 type and converting the result back to the original type.
5615
5616 Returns a sequence of statements that replace STMT and also contain
5617 a modified form of STMT itself. */
5618
5619gimple_seq
5620rewrite_to_defined_overflow (gimple stmt)
5621{
5622 if (dump_file && (dump_flags & TDF_DETAILS))
5623 {
5624 fprintf (dump_file, "rewriting stmt with undefined signed "
5625 "overflow ");
5626 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5627 }
5628
5629 tree lhs = gimple_assign_lhs (stmt);
5630 tree type = unsigned_type_for (TREE_TYPE (lhs));
5631 gimple_seq stmts = NULL;
5632 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5633 {
5634 gimple_seq stmts2 = NULL;
5635 gimple_set_op (stmt, i,
5636 force_gimple_operand (fold_convert (type,
5637 gimple_op (stmt, i)),
5638 &stmts2, true, NULL_TREE));
5639 gimple_seq_add_seq (&stmts, stmts2);
5640 }
5641 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5642 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5643 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5644 gimple_seq_add_stmt (&stmts, stmt);
0d0e4a03 5645 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
5646 gimple_seq_add_stmt (&stmts, cvt);
5647
5648 return stmts;
5649}
d4f5cd5e 5650
3d2cf79f
RB
5651
5652/* Build the expression CODE OP0 of type TYPE with location LOC,
5653 simplifying it first if possible using VALUEIZE if not NULL.
5654 OP0 is expected to be valueized already. Returns the built
5655 expression value and appends statements possibly defining it
5656 to SEQ. */
5657
5658tree
5659gimple_build (gimple_seq *seq, location_t loc,
5660 enum tree_code code, tree type, tree op0,
5661 tree (*valueize)(tree))
5662{
5663 tree res = gimple_simplify (code, type, op0, seq, valueize);
5664 if (!res)
5665 {
5666 if (gimple_in_ssa_p (cfun))
b731b390 5667 res = make_ssa_name (type);
3d2cf79f 5668 else
b731b390 5669 res = create_tmp_reg (type);
3d2cf79f
RB
5670 gimple stmt;
5671 if (code == REALPART_EXPR
5672 || code == IMAGPART_EXPR
5673 || code == VIEW_CONVERT_EXPR)
0d0e4a03 5674 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 5675 else
0d0e4a03 5676 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
5677 gimple_set_location (stmt, loc);
5678 gimple_seq_add_stmt_without_update (seq, stmt);
5679 }
5680 return res;
5681}
5682
5683/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5684 simplifying it first if possible using VALUEIZE if not NULL.
5685 OP0 and OP1 are expected to be valueized already. Returns the built
5686 expression value and appends statements possibly defining it
5687 to SEQ. */
5688
5689tree
5690gimple_build (gimple_seq *seq, location_t loc,
5691 enum tree_code code, tree type, tree op0, tree op1,
5692 tree (*valueize)(tree))
5693{
5694 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5695 if (!res)
5696 {
5697 if (gimple_in_ssa_p (cfun))
b731b390 5698 res = make_ssa_name (type);
3d2cf79f 5699 else
b731b390 5700 res = create_tmp_reg (type);
0d0e4a03 5701 gimple stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
5702 gimple_set_location (stmt, loc);
5703 gimple_seq_add_stmt_without_update (seq, stmt);
5704 }
5705 return res;
5706}
5707
5708/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5709 simplifying it first if possible using VALUEIZE if not NULL.
5710 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5711 expression value and appends statements possibly defining it
5712 to SEQ. */
5713
5714tree
5715gimple_build (gimple_seq *seq, location_t loc,
5716 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5717 tree (*valueize)(tree))
5718{
5719 tree res = gimple_simplify (code, type, op0, op1, op2,
5720 seq, valueize);
5721 if (!res)
5722 {
5723 if (gimple_in_ssa_p (cfun))
b731b390 5724 res = make_ssa_name (type);
3d2cf79f 5725 else
b731b390 5726 res = create_tmp_reg (type);
3d2cf79f
RB
5727 gimple stmt;
5728 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
5729 stmt = gimple_build_assign (res, code,
5730 build3 (code, type, op0, op1, op2));
3d2cf79f 5731 else
0d0e4a03 5732 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
5733 gimple_set_location (stmt, loc);
5734 gimple_seq_add_stmt_without_update (seq, stmt);
5735 }
5736 return res;
5737}
5738
5739/* Build the call FN (ARG0) with a result of type TYPE
5740 (or no result if TYPE is void) with location LOC,
5741 simplifying it first if possible using VALUEIZE if not NULL.
5742 ARG0 is expected to be valueized already. Returns the built
5743 expression value (or NULL_TREE if TYPE is void) and appends
5744 statements possibly defining it to SEQ. */
5745
5746tree
5747gimple_build (gimple_seq *seq, location_t loc,
5748 enum built_in_function fn, tree type, tree arg0,
5749 tree (*valueize)(tree))
5750{
5751 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5752 if (!res)
5753 {
5754 tree decl = builtin_decl_implicit (fn);
5755 gimple stmt = gimple_build_call (decl, 1, arg0);
5756 if (!VOID_TYPE_P (type))
5757 {
5758 if (gimple_in_ssa_p (cfun))
b731b390 5759 res = make_ssa_name (type);
3d2cf79f 5760 else
b731b390 5761 res = create_tmp_reg (type);
3d2cf79f
RB
5762 gimple_call_set_lhs (stmt, res);
5763 }
5764 gimple_set_location (stmt, loc);
5765 gimple_seq_add_stmt_without_update (seq, stmt);
5766 }
5767 return res;
5768}
5769
5770/* Build the call FN (ARG0, ARG1) with a result of type TYPE
5771 (or no result if TYPE is void) with location LOC,
5772 simplifying it first if possible using VALUEIZE if not NULL.
5773 ARG0 is expected to be valueized already. Returns the built
5774 expression value (or NULL_TREE if TYPE is void) and appends
5775 statements possibly defining it to SEQ. */
5776
5777tree
5778gimple_build (gimple_seq *seq, location_t loc,
5779 enum built_in_function fn, tree type, tree arg0, tree arg1,
5780 tree (*valueize)(tree))
5781{
5782 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5783 if (!res)
5784 {
5785 tree decl = builtin_decl_implicit (fn);
5786 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5787 if (!VOID_TYPE_P (type))
5788 {
5789 if (gimple_in_ssa_p (cfun))
b731b390 5790 res = make_ssa_name (type);
3d2cf79f 5791 else
b731b390 5792 res = create_tmp_reg (type);
3d2cf79f
RB
5793 gimple_call_set_lhs (stmt, res);
5794 }
5795 gimple_set_location (stmt, loc);
5796 gimple_seq_add_stmt_without_update (seq, stmt);
5797 }
5798 return res;
5799}
5800
5801/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5802 (or no result if TYPE is void) with location LOC,
5803 simplifying it first if possible using VALUEIZE if not NULL.
5804 ARG0 is expected to be valueized already. Returns the built
5805 expression value (or NULL_TREE if TYPE is void) and appends
5806 statements possibly defining it to SEQ. */
5807
5808tree
5809gimple_build (gimple_seq *seq, location_t loc,
5810 enum built_in_function fn, tree type,
5811 tree arg0, tree arg1, tree arg2,
5812 tree (*valueize)(tree))
5813{
5814 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5815 if (!res)
5816 {
5817 tree decl = builtin_decl_implicit (fn);
5818 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5819 if (!VOID_TYPE_P (type))
5820 {
5821 if (gimple_in_ssa_p (cfun))
b731b390 5822 res = make_ssa_name (type);
3d2cf79f 5823 else
b731b390 5824 res = create_tmp_reg (type);
3d2cf79f
RB
5825 gimple_call_set_lhs (stmt, res);
5826 }
5827 gimple_set_location (stmt, loc);
5828 gimple_seq_add_stmt_without_update (seq, stmt);
5829 }
5830 return res;
5831}
5832
5833/* Build the conversion (TYPE) OP with a result of type TYPE
5834 with location LOC if such conversion is neccesary in GIMPLE,
5835 simplifying it first.
5836 Returns the built expression value and appends
5837 statements possibly defining it to SEQ. */
d4f5cd5e
RB
5838
5839tree
5840gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5841{
5842 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5843 return op;
3d2cf79f 5844 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 5845}