]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
don't ICE when section attribute is used on things in comdats
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
23a5b65a 2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370
DN
26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
cbdd87d4 30#include "flags.h"
83685514
AM
31#include "hashtab.h"
32#include "hash-set.h"
33#include "vec.h"
34#include "machmode.h"
35#include "hard-reg-set.h"
36#include "input.h"
cbdd87d4 37#include "function.h"
7ee2468b 38#include "dumpfile.h"
442b4905 39#include "bitmap.h"
60393bbc
AM
40#include "predict.h"
41#include "dominance.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-ssa-alias.h"
44#include "internal-fn.h"
45#include "gimple-fold.h"
46#include "gimple-expr.h"
47#include "is-a.h"
18f429e2 48#include "gimple.h"
45b0be94 49#include "gimplify.h"
5be5c238 50#include "gimple-iterator.h"
442b4905
AM
51#include "gimple-ssa.h"
52#include "tree-ssanames.h"
53#include "tree-into-ssa.h"
54#include "tree-dfa.h"
7a300452 55#include "tree-ssa.h"
cbdd87d4 56#include "tree-ssa-propagate.h"
cbdd87d4 57#include "target.h"
c582198b
AM
58#include "hash-map.h"
59#include "plugin-api.h"
60#include "ipa-ref.h"
61#include "cgraph.h"
450ad0cd
JH
62#include "ipa-utils.h"
63#include "gimple-pretty-print.h"
4484a35a 64#include "tree-ssa-address.h"
862d0b35 65#include "langhooks.h"
19e51b40 66#include "gimplify-me.h"
2b5f0895 67#include "dbgcnt.h"
9b2b7279 68#include "builtins.h"
fef5a0d9 69#include "output.h"
e0ee10ed
RB
70#include "tree-eh.h"
71#include "gimple-match.h"
45cc9f96
RB
72#include "tree-phinodes.h"
73#include "ssa-iterators.h"
cbdd87d4 74
b3b9f3d0 75/* Return true when DECL can be referenced from current unit.
c44c2088
JH
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
1389294c 79
1389294c
JH
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
3e89949e 92 we devirtualize only during final compilation stage.
b3b9f3d0
JH
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
96
1389294c 97static bool
c44c2088 98can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 99{
2c8326a5 100 varpool_node *vnode;
1389294c 101 struct cgraph_node *node;
5e20cdc9 102 symtab_node *snode;
c44c2088 103
00de328a 104 if (DECL_ABSTRACT_P (decl))
1632a686
JH
105 return false;
106
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
111
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 {
3aaf0529
JH
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
3dafb85c 117 if (symtab->function_flags_ready)
3aaf0529 118 return true;
d52f5295 119 snode = symtab_node::get (decl);
3aaf0529 120 if (!snode || !snode->definition)
1632a686 121 return false;
7de90a6c 122 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
123 return !node || !node->global.inlined_to;
124 }
125
6da8be89 126 /* We will later output the initializer, so we can refer to it.
c44c2088 127 So we are concerned only when DECL comes from initializer of
3aaf0529 128 external var or var that has been optimized out. */
c44c2088
JH
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 131 || (!DECL_EXTERNAL (from_decl)
9041d2e6 132 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 133 && vnode->definition)
6da8be89 134 || (flag_ltrans
9041d2e6 135 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 136 && vnode->in_other_partition))
c44c2088 137 return true;
c44c2088
JH
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
a33a931b 143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 145 return false;
b3b9f3d0
JH
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
3aaf0529
JH
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
154
155 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 156 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
3dafb85c 162 if (!symtab->function_flags_ready)
b3b9f3d0 163 return true;
c44c2088 164
d52f5295 165 snode = symtab_node::get (decl);
3aaf0529
JH
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
1389294c
JH
173}
174
0038d4e0 175/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
178
179tree
c44c2088 180canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 181{
50619002
EB
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
315f5f1b
RG
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 186 {
315f5f1b
RG
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
17f39a39
JH
195 }
196 if (TREE_CODE (cval) == ADDR_EXPR)
197 {
5a27a197
RG
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
200 {
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
204 }
5a27a197
RG
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
207 if (!base)
208 return NULL_TREE;
b3b9f3d0 209
7501ca28
RG
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 212 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 213 return NULL_TREE;
7501ca28 214 if (TREE_CODE (base) == VAR_DECL)
46eb666a 215 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
216 else if (TREE_CODE (base) == FUNCTION_DECL)
217 {
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
d52f5295 221 cgraph_node::get_create (base);
7501ca28 222 }
0038d4e0 223 /* Fixup types in global initializers. */
73aef89e
RG
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
226
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
17f39a39 230 }
846abd0d
RB
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
50619002 233 return orig_cval;
17f39a39 234}
cbdd87d4
RG
235
236/* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
238
239tree
240get_symbol_constant_value (tree sym)
241{
6a6dac52
JH
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
cbdd87d4 244 {
cbdd87d4
RG
245 if (val)
246 {
9d60be38 247 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 248 if (val && is_gimple_min_invariant (val))
17f39a39 249 return val;
1389294c
JH
250 else
251 return NULL_TREE;
cbdd87d4
RG
252 }
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
b8a8c472 257 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 258 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
259 }
260
261 return NULL_TREE;
262}
263
264
cbdd87d4
RG
265
266/* Subroutine of fold_stmt. We perform several simplifications of the
267 memory reference tree EXPR and make sure to re-gimplify them properly
268 after propagation of constant addresses. IS_LHS is true if the
269 reference is supposed to be an lvalue. */
270
271static tree
272maybe_fold_reference (tree expr, bool is_lhs)
273{
17f39a39 274 tree result;
cbdd87d4 275
f0eddb90
RG
276 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
277 || TREE_CODE (expr) == REALPART_EXPR
278 || TREE_CODE (expr) == IMAGPART_EXPR)
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_unary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0));
284 else if (TREE_CODE (expr) == BIT_FIELD_REF
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
286 return fold_ternary_loc (EXPR_LOCATION (expr),
287 TREE_CODE (expr),
288 TREE_TYPE (expr),
289 TREE_OPERAND (expr, 0),
290 TREE_OPERAND (expr, 1),
291 TREE_OPERAND (expr, 2));
292
f0eddb90
RG
293 if (!is_lhs
294 && (result = fold_const_aggregate_ref (expr))
295 && is_gimple_min_invariant (result))
296 return result;
cbdd87d4 297
cbdd87d4
RG
298 return NULL_TREE;
299}
300
301
302/* Attempt to fold an assignment statement pointed-to by SI. Returns a
303 replacement rhs for the statement or NULL_TREE if no simplification
304 could be made. It is assumed that the operands have been previously
305 folded. */
306
307static tree
308fold_gimple_assign (gimple_stmt_iterator *si)
309{
310 gimple stmt = gsi_stmt (*si);
311 enum tree_code subcode = gimple_assign_rhs_code (stmt);
312 location_t loc = gimple_location (stmt);
313
314 tree result = NULL_TREE;
315
316 switch (get_gimple_rhs_class (subcode))
317 {
318 case GIMPLE_SINGLE_RHS:
319 {
320 tree rhs = gimple_assign_rhs1 (stmt);
321
8c00ba08
JW
322 if (TREE_CLOBBER_P (rhs))
323 return NULL_TREE;
324
4e71066d 325 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
326 return maybe_fold_reference (rhs, false);
327
bdf37f7a
JH
328 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
329 {
330 tree val = OBJ_TYPE_REF_EXPR (rhs);
331 if (is_gimple_min_invariant (val))
332 return val;
f8a39967 333 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
334 {
335 bool final;
336 vec <cgraph_node *>targets
f8a39967 337 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 338 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 339 {
2b5f0895
XDL
340 if (dump_enabled_p ())
341 {
807b7d62 342 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
344 "resolving virtual function address "
345 "reference to function %s\n",
346 targets.length () == 1
347 ? targets[0]->name ()
3ef276e4 348 : "NULL");
2b5f0895 349 }
3ef276e4
RB
350 if (targets.length () == 1)
351 {
352 val = fold_convert (TREE_TYPE (val),
353 build_fold_addr_expr_loc
354 (loc, targets[0]->decl));
355 STRIP_USELESS_TYPE_CONVERSION (val);
356 }
357 else
358 /* We can not use __builtin_unreachable here because it
359 can not have address taken. */
360 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
361 return val;
362 }
363 }
364
365 }
cbdd87d4
RG
366 else if (TREE_CODE (rhs) == ADDR_EXPR)
367 {
70f34814
RG
368 tree ref = TREE_OPERAND (rhs, 0);
369 tree tem = maybe_fold_reference (ref, true);
370 if (tem
371 && TREE_CODE (tem) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem, 1)))
373 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
374 else if (tem)
cbdd87d4
RG
375 result = fold_convert (TREE_TYPE (rhs),
376 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
377 else if (TREE_CODE (ref) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
380 }
381
382 else if (TREE_CODE (rhs) == CONSTRUCTOR
383 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
384 && (CONSTRUCTOR_NELTS (rhs)
385 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
386 {
387 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
388 unsigned i;
389 tree val;
390
391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
392 if (TREE_CODE (val) != INTEGER_CST
393 && TREE_CODE (val) != REAL_CST
394 && TREE_CODE (val) != FIXED_CST)
395 return NULL_TREE;
396
397 return build_vector_from_ctor (TREE_TYPE (rhs),
398 CONSTRUCTOR_ELTS (rhs));
399 }
400
401 else if (DECL_P (rhs))
9d60be38 402 return get_symbol_constant_value (rhs);
cbdd87d4
RG
403
404 /* If we couldn't fold the RHS, hand over to the generic
405 fold routines. */
406 if (result == NULL_TREE)
407 result = fold (rhs);
408
409 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
410 that may have been added by fold, and "useless" type
411 conversions that might now be apparent due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result);
413
414 if (result != rhs && valid_gimple_rhs_p (result))
415 return result;
416
417 return NULL_TREE;
418 }
419 break;
420
421 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
422 break;
423
424 case GIMPLE_BINARY_RHS:
9b80d091
KT
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431 tree op1 = gimple_assign_rhs1 (stmt);
432 tree op2 = gimple_assign_rhs2 (stmt);
433 tree type = TREE_TYPE (op1);
434
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
438 one or zero. */
439 if (TREE_CODE (op2) == INTEGER_CST
440 && (integer_zerop (op2) || integer_onep (op2))
441 && useless_type_conversion_p (TREE_TYPE (lhs), type))
442 {
443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
444 bool is_logical_not = false;
445
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
449 || (cmp_code == NE_EXPR && integer_onep (op2)))
450 is_logical_not = true;
451
452 if (is_logical_not == false)
453 result = op1;
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type) == 1)
457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
458 type, op1);
459 /* Otherwise we use !X -> X ^ 1. */
460 else
461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
462 type, op1, build_int_cst (type, 1));
463
464 }
465 }
cbdd87d4
RG
466
467 if (!result)
468 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
469 TREE_TYPE (gimple_assign_lhs (stmt)),
470 gimple_assign_rhs1 (stmt),
471 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
472
473 if (result)
474 {
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
cbdd87d4
RG
478 }
479 break;
480
0354c0c7 481 case GIMPLE_TERNARY_RHS:
4e71066d
RG
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
484 {
485 tree op0 = gimple_assign_rhs1 (stmt);
486 tree tem;
487 bool set = false;
488 location_t cond_loc = gimple_location (stmt);
489
490 if (COMPARISON_CLASS_P (op0))
491 {
492 fold_defer_overflow_warnings ();
493 tem = fold_binary_loc (cond_loc,
494 TREE_CODE (op0), TREE_TYPE (op0),
495 TREE_OPERAND (op0, 0),
496 TREE_OPERAND (op0, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set = (tem && is_gimple_condexpr (tem)
501 && valid_gimple_rhs_p (tem));
502 fold_undefer_overflow_warnings (set, stmt, 0);
503 }
504 else if (is_gimple_min_invariant (op0))
505 {
506 tem = op0;
507 set = true;
508 }
509 else
510 return NULL_TREE;
511
512 if (set)
513 result = fold_build3_loc (cond_loc, COND_EXPR,
514 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
515 gimple_assign_rhs2 (stmt),
516 gimple_assign_rhs3 (stmt));
517 }
518
519 if (!result)
520 result = fold_ternary_loc (loc, subcode,
521 TREE_TYPE (gimple_assign_lhs (stmt)),
522 gimple_assign_rhs1 (stmt),
523 gimple_assign_rhs2 (stmt),
524 gimple_assign_rhs3 (stmt));
0354c0c7
BS
525
526 if (result)
527 {
528 STRIP_USELESS_TYPE_CONVERSION (result);
529 if (valid_gimple_rhs_p (result))
530 return result;
0354c0c7
BS
531 }
532 break;
533
cbdd87d4
RG
534 case GIMPLE_INVALID_RHS:
535 gcc_unreachable ();
536 }
537
538 return NULL_TREE;
539}
540
541/* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
545
546static bool
538dd0b7 547fold_gimple_cond (gcond *stmt)
cbdd87d4
RG
548{
549 tree result = fold_binary_loc (gimple_location (stmt),
550 gimple_cond_code (stmt),
551 boolean_type_node,
552 gimple_cond_lhs (stmt),
553 gimple_cond_rhs (stmt));
554
555 if (result)
556 {
557 STRIP_USELESS_TYPE_CONVERSION (result);
558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
559 {
560 gimple_cond_set_condition_from_tree (stmt, result);
561 return true;
562 }
563 }
564
565 return false;
566}
567
fef5a0d9
RB
568
569/* Replace a statement at *SI_P with a sequence of statements in STMTS,
570 adjusting the replacement stmts location and virtual operands.
571 If the statement has a lhs the last stmt in the sequence is expected
572 to assign to that lhs. */
573
574static void
575gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
576{
577 gimple stmt = gsi_stmt (*si_p);
578
579 if (gimple_has_location (stmt))
580 annotate_all_with_location (stmts, gimple_location (stmt));
581
582 /* First iterate over the replacement statements backward, assigning
583 virtual operands to their defining statements. */
584 gimple laststore = NULL;
585 for (gimple_stmt_iterator i = gsi_last (stmts);
586 !gsi_end_p (i); gsi_prev (&i))
587 {
588 gimple new_stmt = gsi_stmt (i);
589 if ((gimple_assign_single_p (new_stmt)
590 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
591 || (is_gimple_call (new_stmt)
592 && (gimple_call_flags (new_stmt)
593 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
594 {
595 tree vdef;
596 if (!laststore)
597 vdef = gimple_vdef (stmt);
598 else
599 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
600 gimple_set_vdef (new_stmt, vdef);
601 if (vdef && TREE_CODE (vdef) == SSA_NAME)
602 SSA_NAME_DEF_STMT (vdef) = new_stmt;
603 laststore = new_stmt;
604 }
605 }
606
607 /* Second iterate over the statements forward, assigning virtual
608 operands to their uses. */
609 tree reaching_vuse = gimple_vuse (stmt);
610 for (gimple_stmt_iterator i = gsi_start (stmts);
611 !gsi_end_p (i); gsi_next (&i))
612 {
613 gimple new_stmt = gsi_stmt (i);
614 /* If the new statement possibly has a VUSE, update it with exact SSA
615 name we know will reach this one. */
616 if (gimple_has_mem_ops (new_stmt))
617 gimple_set_vuse (new_stmt, reaching_vuse);
618 gimple_set_modified (new_stmt, true);
619 if (gimple_vdef (new_stmt))
620 reaching_vuse = gimple_vdef (new_stmt);
621 }
622
623 /* If the new sequence does not do a store release the virtual
624 definition of the original statement. */
625 if (reaching_vuse
626 && reaching_vuse == gimple_vuse (stmt))
627 {
628 tree vdef = gimple_vdef (stmt);
629 if (vdef
630 && TREE_CODE (vdef) == SSA_NAME)
631 {
632 unlink_stmt_vdef (stmt);
633 release_ssa_name (vdef);
634 }
635 }
636
637 /* Finally replace the original statement with the sequence. */
638 gsi_replace_with_seq (si_p, stmts, false);
639}
640
cbdd87d4
RG
641/* Convert EXPR into a GIMPLE value suitable for substitution on the
642 RHS of an assignment. Insert the necessary statements before
643 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
644 is replaced. If the call is expected to produces a result, then it
645 is replaced by an assignment of the new RHS to the result variable.
646 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
647 GIMPLE_NOP. A proper VDEF chain is retained by making the first
648 VUSE and the last VDEF of the whole sequence be the same as the replaced
649 statement and using new SSA names for stores in between. */
cbdd87d4
RG
650
651void
652gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
653{
654 tree lhs;
cbdd87d4
RG
655 gimple stmt, new_stmt;
656 gimple_stmt_iterator i;
355a7673 657 gimple_seq stmts = NULL;
cbdd87d4
RG
658
659 stmt = gsi_stmt (*si_p);
660
661 gcc_assert (is_gimple_call (stmt));
662
45852dcc 663 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 664
e256dfce 665 lhs = gimple_call_lhs (stmt);
cbdd87d4 666 if (lhs == NULL_TREE)
6e572326
RG
667 {
668 gimplify_and_add (expr, &stmts);
669 /* We can end up with folding a memcpy of an empty class assignment
670 which gets optimized away by C++ gimplification. */
671 if (gimple_seq_empty_p (stmts))
672 {
9fdc58de 673 pop_gimplify_context (NULL);
6e572326
RG
674 if (gimple_in_ssa_p (cfun))
675 {
676 unlink_stmt_vdef (stmt);
677 release_defs (stmt);
678 }
77d19c72 679 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
680 return;
681 }
682 }
cbdd87d4 683 else
e256dfce
RG
684 {
685 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
686 new_stmt = gimple_build_assign (lhs, tmp);
687 i = gsi_last (stmts);
688 gsi_insert_after_without_update (&i, new_stmt,
689 GSI_CONTINUE_LINKING);
690 }
cbdd87d4
RG
691
692 pop_gimplify_context (NULL);
693
fef5a0d9
RB
694 gsi_replace_with_seq_vops (si_p, stmts);
695}
cbdd87d4 696
fef5a0d9
RB
697
698/* Replace the call at *GSI with the gimple value VAL. */
699
700static void
701replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
702{
703 gimple stmt = gsi_stmt (*gsi);
704 tree lhs = gimple_call_lhs (stmt);
705 gimple repl;
706 if (lhs)
e256dfce 707 {
fef5a0d9
RB
708 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
709 val = fold_convert (TREE_TYPE (lhs), val);
710 repl = gimple_build_assign (lhs, val);
711 }
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
716 {
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
720 gsi_replace (gsi, repl, true);
721}
722
723/* Replace the call at *GSI with the new call REPL and fold that
724 again. */
725
726static void
727replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
728{
729 gimple stmt = gsi_stmt (*gsi);
730 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
731 gimple_set_location (repl, gimple_location (stmt));
732 if (gimple_vdef (stmt)
733 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 {
735 gimple_set_vdef (repl, gimple_vdef (stmt));
736 gimple_set_vuse (repl, gimple_vuse (stmt));
737 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
738 }
739 gsi_replace (gsi, repl, true);
740 fold_stmt (gsi);
741}
742
743/* Return true if VAR is a VAR_DECL or a component thereof. */
744
745static bool
746var_decl_component_p (tree var)
747{
748 tree inner = var;
749 while (handled_component_p (inner))
750 inner = TREE_OPERAND (inner, 0);
751 return SSA_VAR_P (inner);
752}
753
754/* Fold function call to builtin mem{{,p}cpy,move}. Return
755 NULL_TREE if no simplification can be made.
756 If ENDP is 0, return DEST (like memcpy).
757 If ENDP is 1, return DEST+LEN (like mempcpy).
758 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
759 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
760 (memmove). */
761
762static bool
763gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
764 tree dest, tree src, int endp)
765{
766 gimple stmt = gsi_stmt (*gsi);
767 tree lhs = gimple_call_lhs (stmt);
768 tree len = gimple_call_arg (stmt, 2);
769 tree destvar, srcvar;
770 location_t loc = gimple_location (stmt);
771
772 /* If the LEN parameter is zero, return DEST. */
773 if (integer_zerop (len))
774 {
775 gimple repl;
776 if (gimple_call_lhs (stmt))
777 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
778 else
779 repl = gimple_build_nop ();
780 tree vdef = gimple_vdef (stmt);
781 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 782 {
fef5a0d9
RB
783 unlink_stmt_vdef (stmt);
784 release_ssa_name (vdef);
785 }
786 gsi_replace (gsi, repl, true);
787 return true;
788 }
789
790 /* If SRC and DEST are the same (and not volatile), return
791 DEST{,+LEN,+LEN-1}. */
792 if (operand_equal_p (src, dest, 0))
793 {
794 unlink_stmt_vdef (stmt);
795 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
796 release_ssa_name (gimple_vdef (stmt));
797 if (!lhs)
798 {
799 gsi_replace (gsi, gimple_build_nop (), true);
800 return true;
801 }
802 goto done;
803 }
804 else
805 {
806 tree srctype, desttype;
807 unsigned int src_align, dest_align;
808 tree off0;
809
810 /* Build accesses at offset zero with a ref-all character type. */
811 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
812 ptr_mode, true), 0);
813
814 /* If we can perform the copy efficiently with first doing all loads
815 and then all stores inline it that way. Currently efficiently
816 means that we can load all the memory into a single integer
817 register which is what MOVE_MAX gives us. */
818 src_align = get_pointer_alignment (src);
819 dest_align = get_pointer_alignment (dest);
820 if (tree_fits_uhwi_p (len)
821 && compare_tree_int (len, MOVE_MAX) <= 0
822 /* ??? Don't transform copies from strings with known length this
823 confuses the tree-ssa-strlen.c. This doesn't handle
824 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
825 reason. */
826 && !c_strlen (src, 2))
827 {
828 unsigned ilen = tree_to_uhwi (len);
829 if (exact_log2 (ilen) != -1)
830 {
831 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
832 if (type
833 && TYPE_MODE (type) != BLKmode
834 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
835 == ilen * 8)
836 /* If the destination pointer is not aligned we must be able
837 to emit an unaligned store. */
838 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
839 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
840 {
841 tree srctype = type;
842 tree desttype = type;
843 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
844 srctype = build_aligned_type (type, src_align);
845 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
846 tree tem = fold_const_aggregate_ref (srcmem);
847 if (tem)
848 srcmem = tem;
849 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
850 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
851 src_align))
852 srcmem = NULL_TREE;
853 if (srcmem)
854 {
855 gimple new_stmt;
856 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
857 {
858 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
859 if (gimple_in_ssa_p (cfun))
860 srcmem = make_ssa_name (TREE_TYPE (srcmem),
861 new_stmt);
862 else
b731b390 863 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
864 gimple_assign_set_lhs (new_stmt, srcmem);
865 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
866 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
867 }
868 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
869 desttype = build_aligned_type (type, dest_align);
870 new_stmt
871 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
872 dest, off0),
873 srcmem);
874 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
875 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
876 if (gimple_vdef (new_stmt)
877 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
878 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
879 if (!lhs)
880 {
881 gsi_replace (gsi, new_stmt, true);
882 return true;
883 }
884 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
885 goto done;
886 }
887 }
888 }
889 }
890
891 if (endp == 3)
892 {
893 /* Both DEST and SRC must be pointer types.
894 ??? This is what old code did. Is the testing for pointer types
895 really mandatory?
896
897 If either SRC is readonly or length is 1, we can use memcpy. */
898 if (!dest_align || !src_align)
899 return false;
900 if (readonly_data_expr (src)
901 || (tree_fits_uhwi_p (len)
902 && (MIN (src_align, dest_align) / BITS_PER_UNIT
903 >= tree_to_uhwi (len))))
904 {
905 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
906 if (!fn)
907 return false;
908 gimple_call_set_fndecl (stmt, fn);
909 gimple_call_set_arg (stmt, 0, dest);
910 gimple_call_set_arg (stmt, 1, src);
911 fold_stmt (gsi);
912 return true;
913 }
914
915 /* If *src and *dest can't overlap, optimize into memcpy as well. */
916 if (TREE_CODE (src) == ADDR_EXPR
917 && TREE_CODE (dest) == ADDR_EXPR)
918 {
919 tree src_base, dest_base, fn;
920 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
921 HOST_WIDE_INT size = -1;
922 HOST_WIDE_INT maxsize = -1;
923
924 srcvar = TREE_OPERAND (src, 0);
925 src_base = get_ref_base_and_extent (srcvar, &src_offset,
926 &size, &maxsize);
927 destvar = TREE_OPERAND (dest, 0);
928 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
929 &size, &maxsize);
930 if (tree_fits_uhwi_p (len))
931 maxsize = tree_to_uhwi (len);
932 else
933 maxsize = -1;
934 src_offset /= BITS_PER_UNIT;
935 dest_offset /= BITS_PER_UNIT;
936 if (SSA_VAR_P (src_base)
937 && SSA_VAR_P (dest_base))
938 {
939 if (operand_equal_p (src_base, dest_base, 0)
940 && ranges_overlap_p (src_offset, maxsize,
941 dest_offset, maxsize))
942 return false;
943 }
944 else if (TREE_CODE (src_base) == MEM_REF
945 && TREE_CODE (dest_base) == MEM_REF)
946 {
947 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
948 TREE_OPERAND (dest_base, 0), 0))
949 return false;
950 offset_int off = mem_ref_offset (src_base) + src_offset;
951 if (!wi::fits_shwi_p (off))
952 return false;
953 src_offset = off.to_shwi ();
954
955 off = mem_ref_offset (dest_base) + dest_offset;
956 if (!wi::fits_shwi_p (off))
957 return false;
958 dest_offset = off.to_shwi ();
959 if (ranges_overlap_p (src_offset, maxsize,
960 dest_offset, maxsize))
961 return false;
962 }
963 else
964 return false;
965
966 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
967 if (!fn)
968 return false;
969 gimple_call_set_fndecl (stmt, fn);
970 gimple_call_set_arg (stmt, 0, dest);
971 gimple_call_set_arg (stmt, 1, src);
972 fold_stmt (gsi);
973 return true;
974 }
975
976 /* If the destination and source do not alias optimize into
977 memcpy as well. */
978 if ((is_gimple_min_invariant (dest)
979 || TREE_CODE (dest) == SSA_NAME)
980 && (is_gimple_min_invariant (src)
981 || TREE_CODE (src) == SSA_NAME))
982 {
983 ao_ref destr, srcr;
984 ao_ref_init_from_ptr_and_size (&destr, dest, len);
985 ao_ref_init_from_ptr_and_size (&srcr, src, len);
986 if (!refs_may_alias_p_1 (&destr, &srcr, false))
987 {
988 tree fn;
989 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
990 if (!fn)
991 return false;
992 gimple_call_set_fndecl (stmt, fn);
993 gimple_call_set_arg (stmt, 0, dest);
994 gimple_call_set_arg (stmt, 1, src);
995 fold_stmt (gsi);
996 return true;
997 }
998 }
999
1000 return false;
1001 }
1002
1003 if (!tree_fits_shwi_p (len))
1004 return false;
1005 /* FIXME:
1006 This logic lose for arguments like (type *)malloc (sizeof (type)),
1007 since we strip the casts of up to VOID return value from malloc.
1008 Perhaps we ought to inherit type from non-VOID argument here? */
1009 STRIP_NOPS (src);
1010 STRIP_NOPS (dest);
1011 if (!POINTER_TYPE_P (TREE_TYPE (src))
1012 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1013 return false;
1014 /* In the following try to find a type that is most natural to be
1015 used for the memcpy source and destination and that allows
1016 the most optimization when memcpy is turned into a plain assignment
1017 using that type. In theory we could always use a char[len] type
1018 but that only gains us that the destination and source possibly
1019 no longer will have their address taken. */
1020 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1021 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1022 {
1023 tree tem = TREE_OPERAND (src, 0);
1024 STRIP_NOPS (tem);
1025 if (tem != TREE_OPERAND (src, 0))
1026 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1027 }
1028 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1029 {
1030 tree tem = TREE_OPERAND (dest, 0);
1031 STRIP_NOPS (tem);
1032 if (tem != TREE_OPERAND (dest, 0))
1033 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1034 }
1035 srctype = TREE_TYPE (TREE_TYPE (src));
1036 if (TREE_CODE (srctype) == ARRAY_TYPE
1037 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1038 {
1039 srctype = TREE_TYPE (srctype);
1040 STRIP_NOPS (src);
1041 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1042 }
1043 desttype = TREE_TYPE (TREE_TYPE (dest));
1044 if (TREE_CODE (desttype) == ARRAY_TYPE
1045 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1046 {
1047 desttype = TREE_TYPE (desttype);
1048 STRIP_NOPS (dest);
1049 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1050 }
1051 if (TREE_ADDRESSABLE (srctype)
1052 || TREE_ADDRESSABLE (desttype))
1053 return false;
1054
1055 /* Make sure we are not copying using a floating-point mode or
1056 a type whose size possibly does not match its precision. */
1057 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1058 || TREE_CODE (desttype) == BOOLEAN_TYPE
1059 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1060 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1061 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1062 || TREE_CODE (srctype) == BOOLEAN_TYPE
1063 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1064 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1065 if (!srctype)
1066 srctype = desttype;
1067 if (!desttype)
1068 desttype = srctype;
1069 if (!srctype)
1070 return false;
1071
1072 src_align = get_pointer_alignment (src);
1073 dest_align = get_pointer_alignment (dest);
1074 if (dest_align < TYPE_ALIGN (desttype)
1075 || src_align < TYPE_ALIGN (srctype))
1076 return false;
1077
1078 destvar = dest;
1079 STRIP_NOPS (destvar);
1080 if (TREE_CODE (destvar) == ADDR_EXPR
1081 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1082 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1083 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1084 else
1085 destvar = NULL_TREE;
1086
1087 srcvar = src;
1088 STRIP_NOPS (srcvar);
1089 if (TREE_CODE (srcvar) == ADDR_EXPR
1090 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1091 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1092 {
1093 if (!destvar
1094 || src_align >= TYPE_ALIGN (desttype))
1095 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1096 srcvar, off0);
1097 else if (!STRICT_ALIGNMENT)
1098 {
1099 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1100 src_align);
1101 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1102 }
e256dfce 1103 else
fef5a0d9
RB
1104 srcvar = NULL_TREE;
1105 }
1106 else
1107 srcvar = NULL_TREE;
1108
1109 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1110 return false;
1111
1112 if (srcvar == NULL_TREE)
1113 {
1114 STRIP_NOPS (src);
1115 if (src_align >= TYPE_ALIGN (desttype))
1116 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1117 else
1118 {
1119 if (STRICT_ALIGNMENT)
1120 return false;
1121 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1122 src_align);
1123 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1124 }
1125 }
1126 else if (destvar == NULL_TREE)
1127 {
1128 STRIP_NOPS (dest);
1129 if (dest_align >= TYPE_ALIGN (srctype))
1130 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1131 else
1132 {
1133 if (STRICT_ALIGNMENT)
1134 return false;
1135 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1136 dest_align);
1137 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1138 }
1139 }
1140
1141 gimple new_stmt;
1142 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1143 {
1144 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1145 if (gimple_in_ssa_p (cfun))
1146 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1147 else
b731b390 1148 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1149 gimple_assign_set_lhs (new_stmt, srcvar);
1150 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1151 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1152 }
1153 new_stmt = gimple_build_assign (destvar, srcvar);
1154 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1155 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1156 if (gimple_vdef (new_stmt)
1157 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1158 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1159 if (!lhs)
1160 {
1161 gsi_replace (gsi, new_stmt, true);
1162 return true;
1163 }
1164 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1165 }
1166
1167done:
1168 if (endp == 0 || endp == 3)
1169 len = NULL_TREE;
1170 else if (endp == 2)
1171 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1172 ssize_int (1));
1173 if (endp == 2 || endp == 1)
1174 dest = fold_build_pointer_plus_loc (loc, dest, len);
1175
1176 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1177 GSI_SAME_STMT);
1178 gimple repl = gimple_build_assign (lhs, dest);
1179 gsi_replace (gsi, repl, true);
1180 return true;
1181}
1182
1183/* Fold function call to builtin memset or bzero at *GSI setting the
1184 memory of size LEN to VAL. Return whether a simplification was made. */
1185
1186static bool
1187gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1188{
1189 gimple stmt = gsi_stmt (*gsi);
1190 tree etype;
1191 unsigned HOST_WIDE_INT length, cval;
1192
1193 /* If the LEN parameter is zero, return DEST. */
1194 if (integer_zerop (len))
1195 {
1196 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1197 return true;
1198 }
1199
1200 if (! tree_fits_uhwi_p (len))
1201 return false;
1202
1203 if (TREE_CODE (c) != INTEGER_CST)
1204 return false;
1205
1206 tree dest = gimple_call_arg (stmt, 0);
1207 tree var = dest;
1208 if (TREE_CODE (var) != ADDR_EXPR)
1209 return false;
1210
1211 var = TREE_OPERAND (var, 0);
1212 if (TREE_THIS_VOLATILE (var))
1213 return false;
1214
1215 etype = TREE_TYPE (var);
1216 if (TREE_CODE (etype) == ARRAY_TYPE)
1217 etype = TREE_TYPE (etype);
1218
1219 if (!INTEGRAL_TYPE_P (etype)
1220 && !POINTER_TYPE_P (etype))
1221 return NULL_TREE;
1222
1223 if (! var_decl_component_p (var))
1224 return NULL_TREE;
1225
1226 length = tree_to_uhwi (len);
1227 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1228 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1229 return NULL_TREE;
1230
1231 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1232 return NULL_TREE;
1233
1234 if (integer_zerop (c))
1235 cval = 0;
1236 else
1237 {
1238 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1239 return NULL_TREE;
1240
1241 cval = TREE_INT_CST_LOW (c);
1242 cval &= 0xff;
1243 cval |= cval << 8;
1244 cval |= cval << 16;
1245 cval |= (cval << 31) << 1;
1246 }
1247
1248 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1249 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1250 gimple_set_vuse (store, gimple_vuse (stmt));
1251 tree vdef = gimple_vdef (stmt);
1252 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1253 {
1254 gimple_set_vdef (store, gimple_vdef (stmt));
1255 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1256 }
1257 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1258 if (gimple_call_lhs (stmt))
1259 {
1260 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1261 gsi_replace (gsi, asgn, true);
1262 }
1263 else
1264 {
1265 gimple_stmt_iterator gsi2 = *gsi;
1266 gsi_prev (gsi);
1267 gsi_remove (&gsi2, true);
1268 }
1269
1270 return true;
1271}
1272
1273
1274/* Return the string length, maximum string length or maximum value of
1275 ARG in LENGTH.
1276 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1277 is not NULL and, for TYPE == 0, its value is not equal to the length
1278 we determine or if we are unable to determine the length or value,
1279 return false. VISITED is a bitmap of visited variables.
1280 TYPE is 0 if string length should be returned, 1 for maximum string
1281 length and 2 for maximum value ARG can have. */
1282
1283static bool
dcb7fae2 1284get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1285{
1286 tree var, val;
1287 gimple def_stmt;
1288
1289 if (TREE_CODE (arg) != SSA_NAME)
1290 {
1291 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1292 if (TREE_CODE (arg) == ADDR_EXPR
1293 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1294 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1295 {
1296 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1297 if (TREE_CODE (aop0) == INDIRECT_REF
1298 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1299 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1300 length, visited, type);
1301 }
1302
1303 if (type == 2)
1304 {
1305 val = arg;
1306 if (TREE_CODE (val) != INTEGER_CST
1307 || tree_int_cst_sgn (val) < 0)
1308 return false;
1309 }
1310 else
1311 val = c_strlen (arg, 1);
1312 if (!val)
1313 return false;
1314
1315 if (*length)
1316 {
1317 if (type > 0)
1318 {
1319 if (TREE_CODE (*length) != INTEGER_CST
1320 || TREE_CODE (val) != INTEGER_CST)
1321 return false;
1322
1323 if (tree_int_cst_lt (*length, val))
1324 *length = val;
1325 return true;
1326 }
1327 else if (simple_cst_equal (val, *length) != 1)
1328 return false;
1329 }
1330
1331 *length = val;
1332 return true;
1333 }
1334
1335 /* If ARG is registered for SSA update we cannot look at its defining
1336 statement. */
1337 if (name_registered_for_update_p (arg))
1338 return false;
1339
1340 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1341 if (!*visited)
1342 *visited = BITMAP_ALLOC (NULL);
1343 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1344 return true;
1345
1346 var = arg;
1347 def_stmt = SSA_NAME_DEF_STMT (var);
1348
1349 switch (gimple_code (def_stmt))
1350 {
1351 case GIMPLE_ASSIGN:
1352 /* The RHS of the statement defining VAR must either have a
1353 constant length or come from another SSA_NAME with a constant
1354 length. */
1355 if (gimple_assign_single_p (def_stmt)
1356 || gimple_assign_unary_nop_p (def_stmt))
1357 {
1358 tree rhs = gimple_assign_rhs1 (def_stmt);
1359 return get_maxval_strlen (rhs, length, visited, type);
1360 }
1361 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1362 {
1363 tree op2 = gimple_assign_rhs2 (def_stmt);
1364 tree op3 = gimple_assign_rhs3 (def_stmt);
1365 return get_maxval_strlen (op2, length, visited, type)
1366 && get_maxval_strlen (op3, length, visited, type);
1367 }
1368 return false;
1369
1370 case GIMPLE_PHI:
1371 {
1372 /* All the arguments of the PHI node must have the same constant
1373 length. */
1374 unsigned i;
1375
1376 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1377 {
1378 tree arg = gimple_phi_arg (def_stmt, i)->def;
1379
1380 /* If this PHI has itself as an argument, we cannot
1381 determine the string length of this argument. However,
1382 if we can find a constant string length for the other
1383 PHI args then we can still be sure that this is a
1384 constant string length. So be optimistic and just
1385 continue with the next argument. */
1386 if (arg == gimple_phi_result (def_stmt))
1387 continue;
1388
1389 if (!get_maxval_strlen (arg, length, visited, type))
1390 return false;
1391 }
1392 }
1393 return true;
1394
1395 default:
1396 return false;
1397 }
1398}
1399
dcb7fae2
RB
1400tree
1401get_maxval_strlen (tree arg, int type)
1402{
1403 bitmap visited = NULL;
1404 tree len = NULL_TREE;
1405 if (!get_maxval_strlen (arg, &len, &visited, type))
1406 len = NULL_TREE;
1407 if (visited)
1408 BITMAP_FREE (visited);
1409
1410 return len;
1411}
1412
fef5a0d9
RB
1413
1414/* Fold function call to builtin strcpy with arguments DEST and SRC.
1415 If LEN is not NULL, it represents the length of the string to be
1416 copied. Return NULL_TREE if no simplification can be made. */
1417
1418static bool
1419gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1420 tree dest, tree src)
fef5a0d9 1421{
dcb7fae2 1422 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1423 tree fn;
1424
1425 /* If SRC and DEST are the same (and not volatile), return DEST. */
1426 if (operand_equal_p (src, dest, 0))
1427 {
1428 replace_call_with_value (gsi, dest);
1429 return true;
1430 }
1431
1432 if (optimize_function_for_size_p (cfun))
1433 return false;
1434
1435 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1436 if (!fn)
1437 return false;
1438
1579e1f8 1439 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1440 if (!len)
dcb7fae2 1441 return false;
fef5a0d9
RB
1442
1443 len = fold_convert_loc (loc, size_type_node, len);
1444 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1445 len = force_gimple_operand_gsi (gsi, len, true,
1446 NULL_TREE, true, GSI_SAME_STMT);
1447 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1448 replace_call_with_call_and_fold (gsi, repl);
1449 return true;
1450}
1451
1452/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1453 If SLEN is not NULL, it represents the length of the source string.
1454 Return NULL_TREE if no simplification can be made. */
1455
1456static bool
dcb7fae2
RB
1457gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1458 tree dest, tree src, tree len)
fef5a0d9 1459{
dcb7fae2 1460 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1461 tree fn;
1462
1463 /* If the LEN parameter is zero, return DEST. */
1464 if (integer_zerop (len))
1465 {
1466 replace_call_with_value (gsi, dest);
1467 return true;
1468 }
1469
1470 /* We can't compare slen with len as constants below if len is not a
1471 constant. */
dcb7fae2 1472 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1473 return false;
1474
fef5a0d9 1475 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1476 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1477 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1478 return false;
1479
1480 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1481
1482 /* We do not support simplification of this case, though we do
1483 support it when expanding trees into RTL. */
1484 /* FIXME: generate a call to __builtin_memset. */
1485 if (tree_int_cst_lt (slen, len))
1486 return false;
1487
1488 /* OK transform into builtin memcpy. */
1489 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1490 if (!fn)
1491 return false;
1492
1493 len = fold_convert_loc (loc, size_type_node, len);
1494 len = force_gimple_operand_gsi (gsi, len, true,
1495 NULL_TREE, true, GSI_SAME_STMT);
1496 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1497 replace_call_with_call_and_fold (gsi, repl);
1498 return true;
1499}
1500
1501/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1502 to the call.
1503
1504 Return NULL_TREE if no simplification was possible, otherwise return the
1505 simplified form of the call as a tree.
1506
1507 The simplified form may be a constant or other expression which
1508 computes the same value, but in a more efficient manner (including
1509 calls to other builtin functions).
1510
1511 The call may contain arguments which need to be evaluated, but
1512 which are not useful to determine the result of the call. In
1513 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1514 COMPOUND_EXPR will be an argument which must be evaluated.
1515 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1516 COMPOUND_EXPR in the chain will contain the tree for the simplified
1517 form of the builtin function call. */
1518
1519static bool
dcb7fae2 1520gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9
RB
1521{
1522 gimple stmt = gsi_stmt (*gsi);
dcb7fae2 1523 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1524
1525 const char *p = c_getstr (src);
1526
1527 /* If the string length is zero, return the dst parameter. */
1528 if (p && *p == '\0')
1529 {
1530 replace_call_with_value (gsi, dst);
1531 return true;
1532 }
1533
1534 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1535 return false;
1536
1537 /* See if we can store by pieces into (dst + strlen(dst)). */
1538 tree newdst;
1539 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1540 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1541
1542 if (!strlen_fn || !memcpy_fn)
1543 return false;
1544
1545 /* If the length of the source string isn't computable don't
1546 split strcat into strlen and memcpy. */
dcb7fae2 1547 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1548 if (! len)
fef5a0d9
RB
1549 return false;
1550
1551 /* Create strlen (dst). */
1552 gimple_seq stmts = NULL, stmts2;
1553 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1554 gimple_set_location (repl, loc);
1555 if (gimple_in_ssa_p (cfun))
b731b390 1556 newdst = make_ssa_name (size_type_node);
fef5a0d9 1557 else
b731b390 1558 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1559 gimple_call_set_lhs (repl, newdst);
1560 gimple_seq_add_stmt_without_update (&stmts, repl);
1561
1562 /* Create (dst p+ strlen (dst)). */
1563 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1564 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1565 gimple_seq_add_seq_without_update (&stmts, stmts2);
1566
1567 len = fold_convert_loc (loc, size_type_node, len);
1568 len = size_binop_loc (loc, PLUS_EXPR, len,
1569 build_int_cst (size_type_node, 1));
1570 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1571 gimple_seq_add_seq_without_update (&stmts, stmts2);
1572
1573 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1574 gimple_seq_add_stmt_without_update (&stmts, repl);
1575 if (gimple_call_lhs (stmt))
1576 {
1577 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1579 gsi_replace_with_seq_vops (gsi, stmts);
1580 /* gsi now points at the assignment to the lhs, get a
1581 stmt iterator to the memcpy call.
1582 ??? We can't use gsi_for_stmt as that doesn't work when the
1583 CFG isn't built yet. */
1584 gimple_stmt_iterator gsi2 = *gsi;
1585 gsi_prev (&gsi2);
1586 fold_stmt (&gsi2);
1587 }
1588 else
1589 {
1590 gsi_replace_with_seq_vops (gsi, stmts);
1591 fold_stmt (gsi);
1592 }
1593 return true;
1594}
1595
07f1cf56
RB
1596/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1597 are the arguments to the call. */
1598
1599static bool
1600gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1601{
1602 gimple stmt = gsi_stmt (*gsi);
1603 tree dest = gimple_call_arg (stmt, 0);
1604 tree src = gimple_call_arg (stmt, 1);
1605 tree size = gimple_call_arg (stmt, 2);
1606 tree fn;
1607 const char *p;
1608
1609
1610 p = c_getstr (src);
1611 /* If the SRC parameter is "", return DEST. */
1612 if (p && *p == '\0')
1613 {
1614 replace_call_with_value (gsi, dest);
1615 return true;
1616 }
1617
1618 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1619 return false;
1620
1621 /* If __builtin_strcat_chk is used, assume strcat is available. */
1622 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1623 if (!fn)
1624 return false;
1625
1626 gimple repl = gimple_build_call (fn, 2, dest, src);
1627 replace_call_with_call_and_fold (gsi, repl);
1628 return true;
1629}
1630
ad03a744
RB
1631/* Simplify a call to the strncat builtin. */
1632
1633static bool
1634gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1635{
1636 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1637 tree dst = gimple_call_arg (stmt, 0);
1638 tree src = gimple_call_arg (stmt, 1);
1639 tree len = gimple_call_arg (stmt, 2);
1640
1641 const char *p = c_getstr (src);
1642
1643 /* If the requested length is zero, or the src parameter string
1644 length is zero, return the dst parameter. */
1645 if (integer_zerop (len) || (p && *p == '\0'))
1646 {
1647 replace_call_with_value (gsi, dst);
1648 return true;
1649 }
1650
1651 /* If the requested len is greater than or equal to the string
1652 length, call strcat. */
1653 if (TREE_CODE (len) == INTEGER_CST && p
1654 && compare_tree_int (len, strlen (p)) >= 0)
1655 {
1656 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1657
1658 /* If the replacement _DECL isn't initialized, don't do the
1659 transformation. */
1660 if (!fn)
1661 return false;
1662
1663 gcall *repl = gimple_build_call (fn, 2, dst, src);
1664 replace_call_with_call_and_fold (gsi, repl);
1665 return true;
1666 }
1667
1668 return false;
1669}
1670
745583f9
RB
1671/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1672 LEN, and SIZE. */
1673
1674static bool
1675gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1676{
1677 gimple stmt = gsi_stmt (*gsi);
1678 tree dest = gimple_call_arg (stmt, 0);
1679 tree src = gimple_call_arg (stmt, 1);
1680 tree len = gimple_call_arg (stmt, 2);
1681 tree size = gimple_call_arg (stmt, 3);
1682 tree fn;
1683 const char *p;
1684
1685 p = c_getstr (src);
1686 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1687 if ((p && *p == '\0')
1688 || integer_zerop (len))
1689 {
1690 replace_call_with_value (gsi, dest);
1691 return true;
1692 }
1693
1694 if (! tree_fits_uhwi_p (size))
1695 return false;
1696
1697 if (! integer_all_onesp (size))
1698 {
1699 tree src_len = c_strlen (src, 1);
1700 if (src_len
1701 && tree_fits_uhwi_p (src_len)
1702 && tree_fits_uhwi_p (len)
1703 && ! tree_int_cst_lt (len, src_len))
1704 {
1705 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1706 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1707 if (!fn)
1708 return false;
1709
1710 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1711 replace_call_with_call_and_fold (gsi, repl);
1712 return true;
1713 }
1714 return false;
1715 }
1716
1717 /* If __builtin_strncat_chk is used, assume strncat is available. */
1718 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1719 if (!fn)
1720 return false;
1721
1722 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1723 replace_call_with_call_and_fold (gsi, repl);
1724 return true;
1725}
1726
fef5a0d9
RB
1727/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1728 to the call. IGNORE is true if the value returned
1729 by the builtin will be ignored. UNLOCKED is true is true if this
1730 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1731 the known length of the string. Return NULL_TREE if no simplification
1732 was possible. */
1733
1734static bool
1735gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1736 tree arg0, tree arg1,
dcb7fae2 1737 bool unlocked)
fef5a0d9 1738{
dcb7fae2
RB
1739 gimple stmt = gsi_stmt (*gsi);
1740
fef5a0d9
RB
1741 /* If we're using an unlocked function, assume the other unlocked
1742 functions exist explicitly. */
1743 tree const fn_fputc = (unlocked
1744 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1745 : builtin_decl_implicit (BUILT_IN_FPUTC));
1746 tree const fn_fwrite = (unlocked
1747 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1748 : builtin_decl_implicit (BUILT_IN_FWRITE));
1749
1750 /* If the return value is used, don't do the transformation. */
dcb7fae2 1751 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1752 return false;
1753
fef5a0d9
RB
1754 /* Get the length of the string passed to fputs. If the length
1755 can't be determined, punt. */
dcb7fae2 1756 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1757 if (!len
1758 || TREE_CODE (len) != INTEGER_CST)
1759 return false;
1760
1761 switch (compare_tree_int (len, 1))
1762 {
1763 case -1: /* length is 0, delete the call entirely . */
1764 replace_call_with_value (gsi, integer_zero_node);
1765 return true;
1766
1767 case 0: /* length is 1, call fputc. */
1768 {
1769 const char *p = c_getstr (arg0);
1770 if (p != NULL)
1771 {
1772 if (!fn_fputc)
1773 return false;
1774
1775 gimple repl = gimple_build_call (fn_fputc, 2,
1776 build_int_cst
1777 (integer_type_node, p[0]), arg1);
1778 replace_call_with_call_and_fold (gsi, repl);
1779 return true;
1780 }
1781 }
1782 /* FALLTHROUGH */
1783 case 1: /* length is greater than 1, call fwrite. */
1784 {
1785 /* If optimizing for size keep fputs. */
1786 if (optimize_function_for_size_p (cfun))
1787 return false;
1788 /* New argument list transforming fputs(string, stream) to
1789 fwrite(string, 1, len, stream). */
1790 if (!fn_fwrite)
1791 return false;
1792
1793 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1794 size_one_node, len, arg1);
1795 replace_call_with_call_and_fold (gsi, repl);
1796 return true;
1797 }
1798 default:
1799 gcc_unreachable ();
1800 }
1801 return false;
1802}
1803
1804/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1805 DEST, SRC, LEN, and SIZE are the arguments to the call.
1806 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1807 code of the builtin. If MAXLEN is not NULL, it is maximum length
1808 passed as third argument. */
1809
1810static bool
1811gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1812 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1813 enum built_in_function fcode)
1814{
dcb7fae2
RB
1815 gimple stmt = gsi_stmt (*gsi);
1816 location_t loc = gimple_location (stmt);
1817 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1818 tree fn;
1819
1820 /* If SRC and DEST are the same (and not volatile), return DEST
1821 (resp. DEST+LEN for __mempcpy_chk). */
1822 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1823 {
1824 if (fcode != BUILT_IN_MEMPCPY_CHK)
1825 {
1826 replace_call_with_value (gsi, dest);
1827 return true;
1828 }
1829 else
1830 {
1831 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1832 temp = force_gimple_operand_gsi (gsi, temp,
1833 false, NULL_TREE, true,
1834 GSI_SAME_STMT);
1835 replace_call_with_value (gsi, temp);
1836 return true;
1837 }
1838 }
1839
1840 if (! tree_fits_uhwi_p (size))
1841 return false;
1842
dcb7fae2 1843 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1844 if (! integer_all_onesp (size))
1845 {
1846 if (! tree_fits_uhwi_p (len))
1847 {
1848 /* If LEN is not constant, try MAXLEN too.
1849 For MAXLEN only allow optimizing into non-_ocs function
1850 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1851 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1852 {
1853 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1854 {
1855 /* (void) __mempcpy_chk () can be optimized into
1856 (void) __memcpy_chk (). */
1857 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1858 if (!fn)
1859 return false;
1860
1861 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864 }
1865 return false;
1866 }
1867 }
1868 else
1869 maxlen = len;
1870
1871 if (tree_int_cst_lt (size, maxlen))
1872 return false;
1873 }
1874
1875 fn = NULL_TREE;
1876 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1877 mem{cpy,pcpy,move,set} is available. */
1878 switch (fcode)
1879 {
1880 case BUILT_IN_MEMCPY_CHK:
1881 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1882 break;
1883 case BUILT_IN_MEMPCPY_CHK:
1884 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1885 break;
1886 case BUILT_IN_MEMMOVE_CHK:
1887 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1888 break;
1889 case BUILT_IN_MEMSET_CHK:
1890 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1891 break;
1892 default:
1893 break;
1894 }
1895
1896 if (!fn)
1897 return false;
1898
1899 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1900 replace_call_with_call_and_fold (gsi, repl);
1901 return true;
1902}
1903
1904/* Fold a call to the __st[rp]cpy_chk builtin.
1905 DEST, SRC, and SIZE are the arguments to the call.
1906 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1907 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1908 strings passed as second argument. */
1909
1910static bool
1911gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1912 tree dest,
fef5a0d9 1913 tree src, tree size,
fef5a0d9
RB
1914 enum built_in_function fcode)
1915{
dcb7fae2
RB
1916 gimple stmt = gsi_stmt (*gsi);
1917 location_t loc = gimple_location (stmt);
1918 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1919 tree len, fn;
1920
1921 /* If SRC and DEST are the same (and not volatile), return DEST. */
1922 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1923 {
1924 replace_call_with_value (gsi, dest);
1925 return true;
1926 }
1927
1928 if (! tree_fits_uhwi_p (size))
1929 return false;
1930
dcb7fae2 1931 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1932 if (! integer_all_onesp (size))
1933 {
1934 len = c_strlen (src, 1);
1935 if (! len || ! tree_fits_uhwi_p (len))
1936 {
1937 /* If LEN is not constant, try MAXLEN too.
1938 For MAXLEN only allow optimizing into non-_ocs function
1939 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1940 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1941 {
1942 if (fcode == BUILT_IN_STPCPY_CHK)
1943 {
1944 if (! ignore)
1945 return false;
1946
1947 /* If return value of __stpcpy_chk is ignored,
1948 optimize into __strcpy_chk. */
1949 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1950 if (!fn)
1951 return false;
1952
1953 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1954 replace_call_with_call_and_fold (gsi, repl);
1955 return true;
1956 }
1957
1958 if (! len || TREE_SIDE_EFFECTS (len))
1959 return false;
1960
1961 /* If c_strlen returned something, but not a constant,
1962 transform __strcpy_chk into __memcpy_chk. */
1963 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1964 if (!fn)
1965 return false;
1966
1967 len = fold_convert_loc (loc, size_type_node, len);
1968 len = size_binop_loc (loc, PLUS_EXPR, len,
1969 build_int_cst (size_type_node, 1));
1970 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1971 true, GSI_SAME_STMT);
1972 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1973 replace_call_with_call_and_fold (gsi, repl);
1974 return true;
1975 }
e256dfce 1976 }
fef5a0d9
RB
1977 else
1978 maxlen = len;
1979
1980 if (! tree_int_cst_lt (maxlen, size))
1981 return false;
e256dfce
RG
1982 }
1983
fef5a0d9
RB
1984 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1985 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1986 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1987 if (!fn)
1988 return false;
1989
1990 gimple repl = gimple_build_call (fn, 2, dest, src);
1991 replace_call_with_call_and_fold (gsi, repl);
1992 return true;
1993}
1994
1995/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1996 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1997 length passed as third argument. IGNORE is true if return value can be
1998 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1999
2000static bool
2001gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2002 tree dest, tree src,
dcb7fae2 2003 tree len, tree size,
fef5a0d9
RB
2004 enum built_in_function fcode)
2005{
dcb7fae2
RB
2006 gimple stmt = gsi_stmt (*gsi);
2007 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2008 tree fn;
2009
2010 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2011 {
fef5a0d9
RB
2012 /* If return value of __stpncpy_chk is ignored,
2013 optimize into __strncpy_chk. */
2014 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2015 if (fn)
2016 {
2017 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2018 replace_call_with_call_and_fold (gsi, repl);
2019 return true;
2020 }
cbdd87d4
RG
2021 }
2022
fef5a0d9
RB
2023 if (! tree_fits_uhwi_p (size))
2024 return false;
2025
dcb7fae2 2026 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2027 if (! integer_all_onesp (size))
cbdd87d4 2028 {
fef5a0d9 2029 if (! tree_fits_uhwi_p (len))
fe2ef088 2030 {
fef5a0d9
RB
2031 /* If LEN is not constant, try MAXLEN too.
2032 For MAXLEN only allow optimizing into non-_ocs function
2033 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2034 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2035 return false;
8a1561bc 2036 }
fef5a0d9
RB
2037 else
2038 maxlen = len;
2039
2040 if (tree_int_cst_lt (size, maxlen))
2041 return false;
cbdd87d4
RG
2042 }
2043
fef5a0d9
RB
2044 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2045 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2046 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2047 if (!fn)
2048 return false;
2049
2050 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2051 replace_call_with_call_and_fold (gsi, repl);
2052 return true;
cbdd87d4
RG
2053}
2054
fef5a0d9
RB
2055/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2056 NULL_TREE if a normal call should be emitted rather than expanding
2057 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2058 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2059 passed as second argument. */
cbdd87d4
RG
2060
2061static bool
fef5a0d9 2062gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2063 enum built_in_function fcode)
cbdd87d4 2064{
538dd0b7 2065 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2066 tree dest, size, len, fn, fmt, flag;
2067 const char *fmt_str;
cbdd87d4 2068
fef5a0d9
RB
2069 /* Verify the required arguments in the original call. */
2070 if (gimple_call_num_args (stmt) < 5)
2071 return false;
cbdd87d4 2072
fef5a0d9
RB
2073 dest = gimple_call_arg (stmt, 0);
2074 len = gimple_call_arg (stmt, 1);
2075 flag = gimple_call_arg (stmt, 2);
2076 size = gimple_call_arg (stmt, 3);
2077 fmt = gimple_call_arg (stmt, 4);
2078
2079 if (! tree_fits_uhwi_p (size))
2080 return false;
2081
2082 if (! integer_all_onesp (size))
2083 {
dcb7fae2 2084 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2085 if (! tree_fits_uhwi_p (len))
cbdd87d4 2086 {
fef5a0d9
RB
2087 /* If LEN is not constant, try MAXLEN too.
2088 For MAXLEN only allow optimizing into non-_ocs function
2089 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2090 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2091 return false;
2092 }
2093 else
fef5a0d9 2094 maxlen = len;
cbdd87d4 2095
fef5a0d9
RB
2096 if (tree_int_cst_lt (size, maxlen))
2097 return false;
2098 }
cbdd87d4 2099
fef5a0d9
RB
2100 if (!init_target_chars ())
2101 return false;
cbdd87d4 2102
fef5a0d9
RB
2103 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2104 or if format doesn't contain % chars or is "%s". */
2105 if (! integer_zerop (flag))
2106 {
2107 fmt_str = c_getstr (fmt);
2108 if (fmt_str == NULL)
2109 return false;
2110 if (strchr (fmt_str, target_percent) != NULL
2111 && strcmp (fmt_str, target_percent_s))
2112 return false;
cbdd87d4
RG
2113 }
2114
fef5a0d9
RB
2115 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2116 available. */
2117 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2118 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2119 if (!fn)
491e0b9b
RG
2120 return false;
2121
fef5a0d9
RB
2122 /* Replace the called function and the first 5 argument by 3 retaining
2123 trailing varargs. */
2124 gimple_call_set_fndecl (stmt, fn);
2125 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2126 gimple_call_set_arg (stmt, 0, dest);
2127 gimple_call_set_arg (stmt, 1, len);
2128 gimple_call_set_arg (stmt, 2, fmt);
2129 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2130 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2131 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2132 fold_stmt (gsi);
2133 return true;
2134}
cbdd87d4 2135
fef5a0d9
RB
2136/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2137 Return NULL_TREE if a normal call should be emitted rather than
2138 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2139 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2140
fef5a0d9
RB
2141static bool
2142gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2143 enum built_in_function fcode)
2144{
538dd0b7 2145 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2146 tree dest, size, len, fn, fmt, flag;
2147 const char *fmt_str;
2148 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2149
fef5a0d9
RB
2150 /* Verify the required arguments in the original call. */
2151 if (nargs < 4)
2152 return false;
2153 dest = gimple_call_arg (stmt, 0);
2154 flag = gimple_call_arg (stmt, 1);
2155 size = gimple_call_arg (stmt, 2);
2156 fmt = gimple_call_arg (stmt, 3);
2157
2158 if (! tree_fits_uhwi_p (size))
2159 return false;
2160
2161 len = NULL_TREE;
2162
2163 if (!init_target_chars ())
2164 return false;
2165
2166 /* Check whether the format is a literal string constant. */
2167 fmt_str = c_getstr (fmt);
2168 if (fmt_str != NULL)
2169 {
2170 /* If the format doesn't contain % args or %%, we know the size. */
2171 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2172 {
fef5a0d9
RB
2173 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2174 len = build_int_cstu (size_type_node, strlen (fmt_str));
2175 }
2176 /* If the format is "%s" and first ... argument is a string literal,
2177 we know the size too. */
2178 else if (fcode == BUILT_IN_SPRINTF_CHK
2179 && strcmp (fmt_str, target_percent_s) == 0)
2180 {
2181 tree arg;
cbdd87d4 2182
fef5a0d9
RB
2183 if (nargs == 5)
2184 {
2185 arg = gimple_call_arg (stmt, 4);
2186 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2187 {
2188 len = c_strlen (arg, 1);
2189 if (! len || ! tree_fits_uhwi_p (len))
2190 len = NULL_TREE;
2191 }
2192 }
2193 }
2194 }
cbdd87d4 2195
fef5a0d9
RB
2196 if (! integer_all_onesp (size))
2197 {
2198 if (! len || ! tree_int_cst_lt (len, size))
2199 return false;
2200 }
cbdd87d4 2201
fef5a0d9
RB
2202 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2203 or if format doesn't contain % chars or is "%s". */
2204 if (! integer_zerop (flag))
2205 {
2206 if (fmt_str == NULL)
2207 return false;
2208 if (strchr (fmt_str, target_percent) != NULL
2209 && strcmp (fmt_str, target_percent_s))
2210 return false;
2211 }
cbdd87d4 2212
fef5a0d9
RB
2213 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2214 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2215 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2216 if (!fn)
2217 return false;
2218
2219 /* Replace the called function and the first 4 argument by 2 retaining
2220 trailing varargs. */
2221 gimple_call_set_fndecl (stmt, fn);
2222 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2223 gimple_call_set_arg (stmt, 0, dest);
2224 gimple_call_set_arg (stmt, 1, fmt);
2225 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2226 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2227 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2228 fold_stmt (gsi);
2229 return true;
2230}
2231
35770bb2
RB
2232/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2233 ORIG may be null if this is a 2-argument call. We don't attempt to
2234 simplify calls with more than 3 arguments.
2235
2236 Return NULL_TREE if no simplification was possible, otherwise return the
2237 simplified form of the call as a tree. If IGNORED is true, it means that
2238 the caller does not use the returned value of the function. */
2239
2240static bool
dcb7fae2 2241gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2
RB
2242{
2243 gimple stmt = gsi_stmt (*gsi);
2244 tree dest = gimple_call_arg (stmt, 0);
2245 tree fmt = gimple_call_arg (stmt, 1);
2246 tree orig = NULL_TREE;
2247 const char *fmt_str = NULL;
2248
2249 /* Verify the required arguments in the original call. We deal with two
2250 types of sprintf() calls: 'sprintf (str, fmt)' and
2251 'sprintf (dest, "%s", orig)'. */
2252 if (gimple_call_num_args (stmt) > 3)
2253 return false;
2254
2255 if (gimple_call_num_args (stmt) == 3)
2256 orig = gimple_call_arg (stmt, 2);
2257
2258 /* Check whether the format is a literal string constant. */
2259 fmt_str = c_getstr (fmt);
2260 if (fmt_str == NULL)
2261 return false;
2262
2263 if (!init_target_chars ())
2264 return false;
2265
2266 /* If the format doesn't contain % args or %%, use strcpy. */
2267 if (strchr (fmt_str, target_percent) == NULL)
2268 {
2269 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2270
2271 if (!fn)
2272 return false;
2273
2274 /* Don't optimize sprintf (buf, "abc", ptr++). */
2275 if (orig)
2276 return false;
2277
2278 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2279 'format' is known to contain no % formats. */
2280 gimple_seq stmts = NULL;
2281 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2282 gimple_seq_add_stmt_without_update (&stmts, repl);
2283 if (gimple_call_lhs (stmt))
2284 {
2285 repl = gimple_build_assign (gimple_call_lhs (stmt),
2286 build_int_cst (integer_type_node,
2287 strlen (fmt_str)));
2288 gimple_seq_add_stmt_without_update (&stmts, repl);
2289 gsi_replace_with_seq_vops (gsi, stmts);
2290 /* gsi now points at the assignment to the lhs, get a
2291 stmt iterator to the memcpy call.
2292 ??? We can't use gsi_for_stmt as that doesn't work when the
2293 CFG isn't built yet. */
2294 gimple_stmt_iterator gsi2 = *gsi;
2295 gsi_prev (&gsi2);
2296 fold_stmt (&gsi2);
2297 }
2298 else
2299 {
2300 gsi_replace_with_seq_vops (gsi, stmts);
2301 fold_stmt (gsi);
2302 }
2303 return true;
2304 }
2305
2306 /* If the format is "%s", use strcpy if the result isn't used. */
2307 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2308 {
2309 tree fn;
2310 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2311
2312 if (!fn)
2313 return false;
2314
2315 /* Don't crash on sprintf (str1, "%s"). */
2316 if (!orig)
2317 return false;
2318
dcb7fae2
RB
2319 tree orig_len = NULL_TREE;
2320 if (gimple_call_lhs (stmt))
35770bb2 2321 {
dcb7fae2 2322 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2323 if (!orig_len)
35770bb2
RB
2324 return false;
2325 }
2326
2327 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2328 gimple_seq stmts = NULL;
2329 gimple repl = gimple_build_call (fn, 2, dest, orig);
2330 gimple_seq_add_stmt_without_update (&stmts, repl);
2331 if (gimple_call_lhs (stmt))
2332 {
d7e78447
RB
2333 if (!useless_type_conversion_p (integer_type_node,
2334 TREE_TYPE (orig_len)))
2335 orig_len = fold_convert (integer_type_node, orig_len);
2336 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2337 gimple_seq_add_stmt_without_update (&stmts, repl);
2338 gsi_replace_with_seq_vops (gsi, stmts);
2339 /* gsi now points at the assignment to the lhs, get a
2340 stmt iterator to the memcpy call.
2341 ??? We can't use gsi_for_stmt as that doesn't work when the
2342 CFG isn't built yet. */
2343 gimple_stmt_iterator gsi2 = *gsi;
2344 gsi_prev (&gsi2);
2345 fold_stmt (&gsi2);
2346 }
2347 else
2348 {
2349 gsi_replace_with_seq_vops (gsi, stmts);
2350 fold_stmt (gsi);
2351 }
2352 return true;
2353 }
2354 return false;
2355}
2356
d7e78447
RB
2357/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2358 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2359 attempt to simplify calls with more than 4 arguments.
35770bb2 2360
d7e78447
RB
2361 Return NULL_TREE if no simplification was possible, otherwise return the
2362 simplified form of the call as a tree. If IGNORED is true, it means that
2363 the caller does not use the returned value of the function. */
2364
2365static bool
dcb7fae2 2366gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2367{
538dd0b7 2368 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2369 tree dest = gimple_call_arg (stmt, 0);
2370 tree destsize = gimple_call_arg (stmt, 1);
2371 tree fmt = gimple_call_arg (stmt, 2);
2372 tree orig = NULL_TREE;
2373 const char *fmt_str = NULL;
2374
2375 if (gimple_call_num_args (stmt) > 4)
2376 return false;
2377
2378 if (gimple_call_num_args (stmt) == 4)
2379 orig = gimple_call_arg (stmt, 3);
2380
2381 if (!tree_fits_uhwi_p (destsize))
2382 return false;
2383 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2384
2385 /* Check whether the format is a literal string constant. */
2386 fmt_str = c_getstr (fmt);
2387 if (fmt_str == NULL)
2388 return false;
2389
2390 if (!init_target_chars ())
2391 return false;
2392
2393 /* If the format doesn't contain % args or %%, use strcpy. */
2394 if (strchr (fmt_str, target_percent) == NULL)
2395 {
2396 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2397 if (!fn)
2398 return false;
2399
2400 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2401 if (orig)
2402 return false;
2403
2404 /* We could expand this as
2405 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2406 or to
2407 memcpy (str, fmt_with_nul_at_cstm1, cst);
2408 but in the former case that might increase code size
2409 and in the latter case grow .rodata section too much.
2410 So punt for now. */
2411 size_t len = strlen (fmt_str);
2412 if (len >= destlen)
2413 return false;
2414
2415 gimple_seq stmts = NULL;
2416 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2417 gimple_seq_add_stmt_without_update (&stmts, repl);
2418 if (gimple_call_lhs (stmt))
2419 {
2420 repl = gimple_build_assign (gimple_call_lhs (stmt),
2421 build_int_cst (integer_type_node, len));
2422 gimple_seq_add_stmt_without_update (&stmts, repl);
2423 gsi_replace_with_seq_vops (gsi, stmts);
2424 /* gsi now points at the assignment to the lhs, get a
2425 stmt iterator to the memcpy call.
2426 ??? We can't use gsi_for_stmt as that doesn't work when the
2427 CFG isn't built yet. */
2428 gimple_stmt_iterator gsi2 = *gsi;
2429 gsi_prev (&gsi2);
2430 fold_stmt (&gsi2);
2431 }
2432 else
2433 {
2434 gsi_replace_with_seq_vops (gsi, stmts);
2435 fold_stmt (gsi);
2436 }
2437 return true;
2438 }
2439
2440 /* If the format is "%s", use strcpy if the result isn't used. */
2441 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2442 {
2443 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2444 if (!fn)
2445 return false;
2446
2447 /* Don't crash on snprintf (str1, cst, "%s"). */
2448 if (!orig)
2449 return false;
2450
dcb7fae2 2451 tree orig_len = get_maxval_strlen (orig, 0);
d7e78447 2452 if (!orig_len)
dcb7fae2 2453 return false;
d7e78447
RB
2454
2455 /* We could expand this as
2456 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2457 or to
2458 memcpy (str1, str2_with_nul_at_cstm1, cst);
2459 but in the former case that might increase code size
2460 and in the latter case grow .rodata section too much.
2461 So punt for now. */
2462 if (compare_tree_int (orig_len, destlen) >= 0)
2463 return false;
2464
2465 /* Convert snprintf (str1, cst, "%s", str2) into
2466 strcpy (str1, str2) if strlen (str2) < cst. */
2467 gimple_seq stmts = NULL;
2468 gimple repl = gimple_build_call (fn, 2, dest, orig);
2469 gimple_seq_add_stmt_without_update (&stmts, repl);
2470 if (gimple_call_lhs (stmt))
2471 {
2472 if (!useless_type_conversion_p (integer_type_node,
2473 TREE_TYPE (orig_len)))
2474 orig_len = fold_convert (integer_type_node, orig_len);
2475 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2476 gimple_seq_add_stmt_without_update (&stmts, repl);
2477 gsi_replace_with_seq_vops (gsi, stmts);
2478 /* gsi now points at the assignment to the lhs, get a
2479 stmt iterator to the memcpy call.
2480 ??? We can't use gsi_for_stmt as that doesn't work when the
2481 CFG isn't built yet. */
2482 gimple_stmt_iterator gsi2 = *gsi;
2483 gsi_prev (&gsi2);
2484 fold_stmt (&gsi2);
2485 }
2486 else
2487 {
2488 gsi_replace_with_seq_vops (gsi, stmts);
2489 fold_stmt (gsi);
2490 }
2491 return true;
2492 }
2493 return false;
2494}
35770bb2 2495
edd7ae68
RB
2496/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2497 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2498 more than 3 arguments, and ARG may be null in the 2-argument case.
2499
2500 Return NULL_TREE if no simplification was possible, otherwise return the
2501 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2502 code of the function to be simplified. */
2503
2504static bool
2505gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2506 tree fp, tree fmt, tree arg,
2507 enum built_in_function fcode)
2508{
2509 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2510 tree fn_fputc, fn_fputs;
2511 const char *fmt_str = NULL;
2512
2513 /* If the return value is used, don't do the transformation. */
2514 if (gimple_call_lhs (stmt) != NULL_TREE)
2515 return false;
2516
2517 /* Check whether the format is a literal string constant. */
2518 fmt_str = c_getstr (fmt);
2519 if (fmt_str == NULL)
2520 return false;
2521
2522 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2523 {
2524 /* If we're using an unlocked function, assume the other
2525 unlocked functions exist explicitly. */
2526 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2527 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2528 }
2529 else
2530 {
2531 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2532 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2533 }
2534
2535 if (!init_target_chars ())
2536 return false;
2537
2538 /* If the format doesn't contain % args or %%, use strcpy. */
2539 if (strchr (fmt_str, target_percent) == NULL)
2540 {
2541 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2542 && arg)
2543 return false;
2544
2545 /* If the format specifier was "", fprintf does nothing. */
2546 if (fmt_str[0] == '\0')
2547 {
2548 replace_call_with_value (gsi, NULL_TREE);
2549 return true;
2550 }
2551
2552 /* When "string" doesn't contain %, replace all cases of
2553 fprintf (fp, string) with fputs (string, fp). The fputs
2554 builtin will take care of special cases like length == 1. */
2555 if (fn_fputs)
2556 {
2557 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2558 replace_call_with_call_and_fold (gsi, repl);
2559 return true;
2560 }
2561 }
2562
2563 /* The other optimizations can be done only on the non-va_list variants. */
2564 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2565 return false;
2566
2567 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2568 else if (strcmp (fmt_str, target_percent_s) == 0)
2569 {
2570 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2571 return false;
2572 if (fn_fputs)
2573 {
2574 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2575 replace_call_with_call_and_fold (gsi, repl);
2576 return true;
2577 }
2578 }
2579
2580 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2581 else if (strcmp (fmt_str, target_percent_c) == 0)
2582 {
2583 if (!arg
2584 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2585 return false;
2586 if (fn_fputc)
2587 {
2588 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2589 replace_call_with_call_and_fold (gsi, repl);
2590 return true;
2591 }
2592 }
2593
2594 return false;
2595}
2596
ad03a744
RB
2597/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2598 FMT and ARG are the arguments to the call; we don't fold cases with
2599 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2600
2601 Return NULL_TREE if no simplification was possible, otherwise return the
2602 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2603 code of the function to be simplified. */
2604
2605static bool
2606gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2607 tree arg, enum built_in_function fcode)
2608{
2609 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2610 tree fn_putchar, fn_puts, newarg;
2611 const char *fmt_str = NULL;
2612
2613 /* If the return value is used, don't do the transformation. */
2614 if (gimple_call_lhs (stmt) != NULL_TREE)
2615 return false;
2616
2617 /* Check whether the format is a literal string constant. */
2618 fmt_str = c_getstr (fmt);
2619 if (fmt_str == NULL)
2620 return false;
2621
2622 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2623 {
2624 /* If we're using an unlocked function, assume the other
2625 unlocked functions exist explicitly. */
2626 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2627 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2628 }
2629 else
2630 {
2631 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2632 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2633 }
2634
2635 if (!init_target_chars ())
2636 return false;
2637
2638 if (strcmp (fmt_str, target_percent_s) == 0
2639 || strchr (fmt_str, target_percent) == NULL)
2640 {
2641 const char *str;
2642
2643 if (strcmp (fmt_str, target_percent_s) == 0)
2644 {
2645 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2646 return false;
2647
2648 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2649 return false;
2650
2651 str = c_getstr (arg);
2652 if (str == NULL)
2653 return false;
2654 }
2655 else
2656 {
2657 /* The format specifier doesn't contain any '%' characters. */
2658 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2659 && arg)
2660 return false;
2661 str = fmt_str;
2662 }
2663
2664 /* If the string was "", printf does nothing. */
2665 if (str[0] == '\0')
2666 {
2667 replace_call_with_value (gsi, NULL_TREE);
2668 return true;
2669 }
2670
2671 /* If the string has length of 1, call putchar. */
2672 if (str[1] == '\0')
2673 {
2674 /* Given printf("c"), (where c is any one character,)
2675 convert "c"[0] to an int and pass that to the replacement
2676 function. */
2677 newarg = build_int_cst (integer_type_node, str[0]);
2678 if (fn_putchar)
2679 {
2680 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2681 replace_call_with_call_and_fold (gsi, repl);
2682 return true;
2683 }
2684 }
2685 else
2686 {
2687 /* If the string was "string\n", call puts("string"). */
2688 size_t len = strlen (str);
2689 if ((unsigned char)str[len - 1] == target_newline
2690 && (size_t) (int) len == len
2691 && (int) len > 0)
2692 {
2693 char *newstr;
2694 tree offset_node, string_cst;
2695
2696 /* Create a NUL-terminated string that's one char shorter
2697 than the original, stripping off the trailing '\n'. */
2698 newarg = build_string_literal (len, str);
2699 string_cst = string_constant (newarg, &offset_node);
2700 gcc_checking_assert (string_cst
2701 && (TREE_STRING_LENGTH (string_cst)
2702 == (int) len)
2703 && integer_zerop (offset_node)
2704 && (unsigned char)
2705 TREE_STRING_POINTER (string_cst)[len - 1]
2706 == target_newline);
2707 /* build_string_literal creates a new STRING_CST,
2708 modify it in place to avoid double copying. */
2709 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2710 newstr[len - 1] = '\0';
2711 if (fn_puts)
2712 {
2713 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2714 replace_call_with_call_and_fold (gsi, repl);
2715 return true;
2716 }
2717 }
2718 else
2719 /* We'd like to arrange to call fputs(string,stdout) here,
2720 but we need stdout and don't have a way to get it yet. */
2721 return false;
2722 }
2723 }
2724
2725 /* The other optimizations can be done only on the non-va_list variants. */
2726 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2727 return false;
2728
2729 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2730 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2731 {
2732 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2733 return false;
2734 if (fn_puts)
2735 {
2736 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2737 replace_call_with_call_and_fold (gsi, repl);
2738 return true;
2739 }
2740 }
2741
2742 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2743 else if (strcmp (fmt_str, target_percent_c) == 0)
2744 {
2745 if (!arg || ! useless_type_conversion_p (integer_type_node,
2746 TREE_TYPE (arg)))
2747 return false;
2748 if (fn_putchar)
2749 {
2750 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2751 replace_call_with_call_and_fold (gsi, repl);
2752 return true;
2753 }
2754 }
2755
2756 return false;
2757}
2758
edd7ae68 2759
fef5a0d9
RB
2760
2761/* Fold a call to __builtin_strlen with known length LEN. */
2762
2763static bool
dcb7fae2 2764gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2765{
dcb7fae2
RB
2766 gimple stmt = gsi_stmt (*gsi);
2767 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2768 if (!len)
2769 return false;
2813904b 2770 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2771 replace_call_with_value (gsi, len);
2772 return true;
cbdd87d4
RG
2773}
2774
2775
dcb7fae2
RB
2776/* Fold the non-target builtin at *GSI and return whether any simplification
2777 was made. */
cbdd87d4 2778
fef5a0d9 2779static bool
dcb7fae2 2780gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2781{
538dd0b7 2782 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2783 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2784
dcb7fae2
RB
2785 /* Give up for always_inline inline builtins until they are
2786 inlined. */
2787 if (avoid_folding_inline_builtin (callee))
2788 return false;
cbdd87d4 2789
edd7ae68
RB
2790 unsigned n = gimple_call_num_args (stmt);
2791 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2792 switch (fcode)
cbdd87d4 2793 {
dcb7fae2
RB
2794 case BUILT_IN_BZERO:
2795 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2796 gimple_call_arg (stmt, 1));
2797 case BUILT_IN_MEMSET:
2798 return gimple_fold_builtin_memset (gsi,
2799 gimple_call_arg (stmt, 1),
2800 gimple_call_arg (stmt, 2));
2801 case BUILT_IN_BCOPY:
2802 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2803 gimple_call_arg (stmt, 0), 3);
2804 case BUILT_IN_MEMCPY:
2805 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2806 gimple_call_arg (stmt, 1), 0);
2807 case BUILT_IN_MEMPCPY:
2808 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2809 gimple_call_arg (stmt, 1), 1);
2810 case BUILT_IN_MEMMOVE:
2811 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2812 gimple_call_arg (stmt, 1), 3);
2813 case BUILT_IN_SPRINTF_CHK:
2814 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2815 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2816 case BUILT_IN_STRCAT_CHK:
2817 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2818 case BUILT_IN_STRNCAT_CHK:
2819 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2820 case BUILT_IN_STRLEN:
dcb7fae2 2821 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2822 case BUILT_IN_STRCPY:
dcb7fae2 2823 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2824 gimple_call_arg (stmt, 0),
dcb7fae2 2825 gimple_call_arg (stmt, 1));
cbdd87d4 2826 case BUILT_IN_STRNCPY:
dcb7fae2 2827 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2828 gimple_call_arg (stmt, 0),
2829 gimple_call_arg (stmt, 1),
dcb7fae2 2830 gimple_call_arg (stmt, 2));
9a7eefec 2831 case BUILT_IN_STRCAT:
dcb7fae2
RB
2832 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2833 gimple_call_arg (stmt, 1));
ad03a744
RB
2834 case BUILT_IN_STRNCAT:
2835 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2836 case BUILT_IN_FPUTS:
dcb7fae2
RB
2837 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2838 gimple_call_arg (stmt, 1), false);
cbdd87d4 2839 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2840 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2841 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2842 case BUILT_IN_MEMCPY_CHK:
2843 case BUILT_IN_MEMPCPY_CHK:
2844 case BUILT_IN_MEMMOVE_CHK:
2845 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2846 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2847 gimple_call_arg (stmt, 0),
2848 gimple_call_arg (stmt, 1),
2849 gimple_call_arg (stmt, 2),
2850 gimple_call_arg (stmt, 3),
edd7ae68 2851 fcode);
cbdd87d4
RG
2852 case BUILT_IN_STRCPY_CHK:
2853 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2854 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2855 gimple_call_arg (stmt, 0),
2856 gimple_call_arg (stmt, 1),
2857 gimple_call_arg (stmt, 2),
edd7ae68 2858 fcode);
cbdd87d4 2859 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2860 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2861 return gimple_fold_builtin_stxncpy_chk (gsi,
2862 gimple_call_arg (stmt, 0),
2863 gimple_call_arg (stmt, 1),
2864 gimple_call_arg (stmt, 2),
2865 gimple_call_arg (stmt, 3),
edd7ae68 2866 fcode);
cbdd87d4
RG
2867 case BUILT_IN_SNPRINTF_CHK:
2868 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2869 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2870 case BUILT_IN_SNPRINTF:
dcb7fae2 2871 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2872 case BUILT_IN_SPRINTF:
dcb7fae2 2873 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2874 case BUILT_IN_FPRINTF:
2875 case BUILT_IN_FPRINTF_UNLOCKED:
2876 case BUILT_IN_VFPRINTF:
2877 if (n == 2 || n == 3)
2878 return gimple_fold_builtin_fprintf (gsi,
2879 gimple_call_arg (stmt, 0),
2880 gimple_call_arg (stmt, 1),
2881 n == 3
2882 ? gimple_call_arg (stmt, 2)
2883 : NULL_TREE,
2884 fcode);
2885 break;
2886 case BUILT_IN_FPRINTF_CHK:
2887 case BUILT_IN_VFPRINTF_CHK:
2888 if (n == 3 || n == 4)
2889 return gimple_fold_builtin_fprintf (gsi,
2890 gimple_call_arg (stmt, 0),
2891 gimple_call_arg (stmt, 2),
2892 n == 4
2893 ? gimple_call_arg (stmt, 3)
2894 : NULL_TREE,
2895 fcode);
2896 break;
ad03a744
RB
2897 case BUILT_IN_PRINTF:
2898 case BUILT_IN_PRINTF_UNLOCKED:
2899 case BUILT_IN_VPRINTF:
2900 if (n == 1 || n == 2)
2901 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2902 n == 2
2903 ? gimple_call_arg (stmt, 1)
2904 : NULL_TREE, fcode);
2905 break;
2906 case BUILT_IN_PRINTF_CHK:
2907 case BUILT_IN_VPRINTF_CHK:
2908 if (n == 2 || n == 3)
2909 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2910 n == 3
2911 ? gimple_call_arg (stmt, 2)
2912 : NULL_TREE, fcode);
fef5a0d9
RB
2913 default:;
2914 }
2915
2916 /* Try the generic builtin folder. */
2917 bool ignore = (gimple_call_lhs (stmt) == NULL);
2918 tree result = fold_call_stmt (stmt, ignore);
2919 if (result)
2920 {
2921 if (ignore)
2922 STRIP_NOPS (result);
2923 else
2924 result = fold_convert (gimple_call_return_type (stmt), result);
2925 if (!update_call_from_tree (gsi, result))
2926 gimplify_and_update_call_from_tree (gsi, result);
2927 return true;
2928 }
2929
2930 return false;
2931}
2932
1304953e
JJ
2933/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2934 doesn't fit into TYPE. The test for overflow should be regardless of
2935 -fwrapv, and even for unsigned types. */
2936
2937bool
2938arith_overflowed_p (enum tree_code code, const_tree type,
2939 const_tree arg0, const_tree arg1)
2940{
2941 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2942 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2943 widest2_int_cst;
2944 widest2_int warg0 = widest2_int_cst (arg0);
2945 widest2_int warg1 = widest2_int_cst (arg1);
2946 widest2_int wres;
2947 switch (code)
2948 {
2949 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2950 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2951 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2952 default: gcc_unreachable ();
2953 }
2954 signop sign = TYPE_SIGN (type);
2955 if (sign == UNSIGNED && wi::neg_p (wres))
2956 return true;
2957 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2958}
2959
cbdd87d4
RG
2960/* Attempt to fold a call statement referenced by the statement iterator GSI.
2961 The statement may be replaced by another statement, e.g., if the call
2962 simplifies to a constant value. Return true if any changes were made.
2963 It is assumed that the operands have been previously folded. */
2964
e021c122 2965static bool
ceeffab0 2966gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 2967{
538dd0b7 2968 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 2969 tree callee;
e021c122
RG
2970 bool changed = false;
2971 unsigned i;
cbdd87d4 2972
e021c122
RG
2973 /* Fold *& in call arguments. */
2974 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2975 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2976 {
2977 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2978 if (tmp)
2979 {
2980 gimple_call_set_arg (stmt, i, tmp);
2981 changed = true;
2982 }
2983 }
3b45a007
RG
2984
2985 /* Check for virtual calls that became direct calls. */
2986 callee = gimple_call_fn (stmt);
25583c4f 2987 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 2988 {
49c471e3
MJ
2989 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2990 {
450ad0cd
JH
2991 if (dump_file && virtual_method_call_p (callee)
2992 && !possible_polymorphic_call_target_p
6f8091fc
JH
2993 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2994 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
2995 {
2996 fprintf (dump_file,
a70e9985 2997 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
2998 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2999 fprintf (dump_file, " to ");
3000 print_generic_expr (dump_file, callee, TDF_SLIM);
3001 fprintf (dump_file, "\n");
3002 }
3003
49c471e3 3004 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3005 changed = true;
3006 }
a70e9985 3007 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3008 {
61dd6a2e
JH
3009 bool final;
3010 vec <cgraph_node *>targets
058d0a90 3011 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3012 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3013 {
a70e9985 3014 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3015 if (dump_enabled_p ())
3016 {
807b7d62 3017 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3018 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3019 "folding virtual function call to %s\n",
3020 targets.length () == 1
3021 ? targets[0]->name ()
3022 : "__builtin_unreachable");
3023 }
61dd6a2e 3024 if (targets.length () == 1)
cf3e5a89
JJ
3025 {
3026 gimple_call_set_fndecl (stmt, targets[0]->decl);
3027 changed = true;
a70e9985
JJ
3028 /* If the call becomes noreturn, remove the lhs. */
3029 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3030 {
3031 if (TREE_CODE (lhs) == SSA_NAME)
3032 {
b731b390 3033 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985
JJ
3034 tree def = get_or_create_ssa_default_def (cfun, var);
3035 gimple new_stmt = gimple_build_assign (lhs, def);
3036 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3037 }
3038 gimple_call_set_lhs (stmt, NULL_TREE);
3039 }
cf3e5a89 3040 }
a70e9985 3041 else
cf3e5a89
JJ
3042 {
3043 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3044 gimple new_stmt = gimple_build_call (fndecl, 0);
3045 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3046 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3047 {
b731b390 3048 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3049 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3050
3051 /* To satisfy condition for
3052 cgraph_update_edges_for_call_stmt_node,
3053 we need to preserve GIMPLE_CALL statement
3054 at position of GSI iterator. */
a70e9985 3055 update_call_from_tree (gsi, def);
eb14a79f 3056 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3057 }
3058 else
42e52a51
RB
3059 {
3060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3062 gsi_replace (gsi, new_stmt, false);
3063 }
cf3e5a89
JJ
3064 return true;
3065 }
e021c122 3066 }
49c471e3 3067 }
e021c122 3068 }
49c471e3 3069
f2d3d07e
RH
3070 /* Check for indirect calls that became direct calls, and then
3071 no longer require a static chain. */
3072 if (gimple_call_chain (stmt))
3073 {
3074 tree fn = gimple_call_fndecl (stmt);
3075 if (fn && !DECL_STATIC_CHAIN (fn))
3076 {
3077 gimple_call_set_chain (stmt, NULL);
3078 changed = true;
3079 }
3080 else
3081 {
3082 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3083 if (tmp)
3084 {
3085 gimple_call_set_chain (stmt, tmp);
3086 changed = true;
3087 }
3088 }
3089 }
3090
e021c122
RG
3091 if (inplace)
3092 return changed;
3093
3094 /* Check for builtins that CCP can handle using information not
3095 available in the generic fold routines. */
fef5a0d9
RB
3096 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3097 {
3098 if (gimple_fold_builtin (gsi))
3099 changed = true;
3100 }
3101 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3102 {
ea679d55 3103 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3104 }
368b454d 3105 else if (gimple_call_internal_p (stmt))
ed9c79e1 3106 {
368b454d
JJ
3107 enum tree_code subcode = ERROR_MARK;
3108 tree result = NULL_TREE;
1304953e
JJ
3109 bool cplx_result = false;
3110 tree overflow = NULL_TREE;
368b454d
JJ
3111 switch (gimple_call_internal_fn (stmt))
3112 {
3113 case IFN_BUILTIN_EXPECT:
3114 result = fold_builtin_expect (gimple_location (stmt),
3115 gimple_call_arg (stmt, 0),
3116 gimple_call_arg (stmt, 1),
3117 gimple_call_arg (stmt, 2));
3118 break;
0e82f089
MP
3119 case IFN_UBSAN_OBJECT_SIZE:
3120 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3121 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3122 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3123 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3124 gimple_call_arg (stmt, 2))))
3125 {
3126 gsi_replace (gsi, gimple_build_nop (), true);
3127 unlink_stmt_vdef (stmt);
3128 release_defs (stmt);
3129 return true;
3130 }
3131 break;
368b454d
JJ
3132 case IFN_UBSAN_CHECK_ADD:
3133 subcode = PLUS_EXPR;
3134 break;
3135 case IFN_UBSAN_CHECK_SUB:
3136 subcode = MINUS_EXPR;
3137 break;
3138 case IFN_UBSAN_CHECK_MUL:
3139 subcode = MULT_EXPR;
3140 break;
1304953e
JJ
3141 case IFN_ADD_OVERFLOW:
3142 subcode = PLUS_EXPR;
3143 cplx_result = true;
3144 break;
3145 case IFN_SUB_OVERFLOW:
3146 subcode = MINUS_EXPR;
3147 cplx_result = true;
3148 break;
3149 case IFN_MUL_OVERFLOW:
3150 subcode = MULT_EXPR;
3151 cplx_result = true;
3152 break;
368b454d
JJ
3153 default:
3154 break;
3155 }
3156 if (subcode != ERROR_MARK)
3157 {
3158 tree arg0 = gimple_call_arg (stmt, 0);
3159 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3160 tree type = TREE_TYPE (arg0);
3161 if (cplx_result)
3162 {
3163 tree lhs = gimple_call_lhs (stmt);
3164 if (lhs == NULL_TREE)
3165 type = NULL_TREE;
3166 else
3167 type = TREE_TYPE (TREE_TYPE (lhs));
3168 }
3169 if (type == NULL_TREE)
3170 ;
368b454d 3171 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3172 else if (integer_zerop (arg1))
3173 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3174 /* x = 0 + y; x = 0 * y; */
3175 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3176 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3177 /* x = y - y; */
3178 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3179 result = integer_zero_node;
368b454d 3180 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3181 else if (subcode == MULT_EXPR && integer_onep (arg1))
3182 result = arg0;
3183 else if (subcode == MULT_EXPR && integer_onep (arg0))
3184 result = arg1;
3185 else if (TREE_CODE (arg0) == INTEGER_CST
3186 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3187 {
1304953e
JJ
3188 if (cplx_result)
3189 result = int_const_binop (subcode, fold_convert (type, arg0),
3190 fold_convert (type, arg1));
3191 else
3192 result = int_const_binop (subcode, arg0, arg1);
3193 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3194 {
3195 if (cplx_result)
3196 overflow = build_one_cst (type);
3197 else
3198 result = NULL_TREE;
3199 }
3200 }
3201 if (result)
3202 {
3203 if (result == integer_zero_node)
3204 result = build_zero_cst (type);
3205 else if (cplx_result && TREE_TYPE (result) != type)
3206 {
3207 if (TREE_CODE (result) == INTEGER_CST)
3208 {
3209 if (arith_overflowed_p (PLUS_EXPR, type, result,
3210 integer_zero_node))
3211 overflow = build_one_cst (type);
3212 }
3213 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3214 && TYPE_UNSIGNED (type))
3215 || (TYPE_PRECISION (type)
3216 < (TYPE_PRECISION (TREE_TYPE (result))
3217 + (TYPE_UNSIGNED (TREE_TYPE (result))
3218 && !TYPE_UNSIGNED (type)))))
3219 result = NULL_TREE;
3220 if (result)
3221 result = fold_convert (type, result);
3222 }
368b454d
JJ
3223 }
3224 }
1304953e 3225
ed9c79e1
JJ
3226 if (result)
3227 {
1304953e
JJ
3228 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3229 result = drop_tree_overflow (result);
3230 if (cplx_result)
3231 {
3232 if (overflow == NULL_TREE)
3233 overflow = build_zero_cst (TREE_TYPE (result));
3234 tree ctype = build_complex_type (TREE_TYPE (result));
3235 if (TREE_CODE (result) == INTEGER_CST
3236 && TREE_CODE (overflow) == INTEGER_CST)
3237 result = build_complex (ctype, result, overflow);
3238 else
3239 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3240 ctype, result, overflow);
3241 }
ed9c79e1
JJ
3242 if (!update_call_from_tree (gsi, result))
3243 gimplify_and_update_call_from_tree (gsi, result);
3244 changed = true;
3245 }
3246 }
3b45a007 3247
e021c122 3248 return changed;
cbdd87d4
RG
3249}
3250
e0ee10ed
RB
3251
3252/* Worker for fold_stmt_1 dispatch to pattern based folding with
3253 gimple_simplify.
3254
3255 Replaces *GSI with the simplification result in RCODE and OPS
3256 and the associated statements in *SEQ. Does the replacement
3257 according to INPLACE and returns true if the operation succeeded. */
3258
3259static bool
3260replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3261 code_helper rcode, tree *ops,
3262 gimple_seq *seq, bool inplace)
3263{
3264 gimple stmt = gsi_stmt (*gsi);
3265
3266 /* Play safe and do not allow abnormals to be mentioned in
3267 newly created statements. See also maybe_push_res_to_seq. */
3268 if ((TREE_CODE (ops[0]) == SSA_NAME
3269 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3270 || (ops[1]
3271 && TREE_CODE (ops[1]) == SSA_NAME
3272 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3273 || (ops[2]
3274 && TREE_CODE (ops[2]) == SSA_NAME
3275 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3276 return false;
3277
538dd0b7 3278 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3279 {
3280 gcc_assert (rcode.is_tree_code ());
3281 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3282 /* GIMPLE_CONDs condition may not throw. */
3283 && (!flag_exceptions
3284 || !cfun->can_throw_non_call_exceptions
3285 || !operation_could_trap_p (rcode,
3286 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3287 false, NULL_TREE)))
538dd0b7 3288 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3289 else if (rcode == SSA_NAME)
538dd0b7 3290 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3291 build_zero_cst (TREE_TYPE (ops[0])));
3292 else if (rcode == INTEGER_CST)
3293 {
3294 if (integer_zerop (ops[0]))
538dd0b7 3295 gimple_cond_make_false (cond_stmt);
e0ee10ed 3296 else
538dd0b7 3297 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3298 }
3299 else if (!inplace)
3300 {
3301 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3302 ops, seq);
3303 if (!res)
3304 return false;
538dd0b7 3305 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3306 build_zero_cst (TREE_TYPE (res)));
3307 }
3308 else
3309 return false;
3310 if (dump_file && (dump_flags & TDF_DETAILS))
3311 {
3312 fprintf (dump_file, "gimple_simplified to ");
3313 if (!gimple_seq_empty_p (*seq))
3314 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3315 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3316 0, TDF_SLIM);
3317 }
3318 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3319 return true;
3320 }
3321 else if (is_gimple_assign (stmt)
3322 && rcode.is_tree_code ())
3323 {
3324 if (!inplace
f3582e54 3325 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3326 {
3327 maybe_build_generic_op (rcode,
3328 TREE_TYPE (gimple_assign_lhs (stmt)),
3329 &ops[0], ops[1], ops[2]);
00d66391 3330 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3331 if (dump_file && (dump_flags & TDF_DETAILS))
3332 {
3333 fprintf (dump_file, "gimple_simplified to ");
3334 if (!gimple_seq_empty_p (*seq))
3335 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3336 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3337 0, TDF_SLIM);
3338 }
3339 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3340 return true;
3341 }
3342 }
3343 else if (!inplace)
3344 {
3345 if (gimple_has_lhs (stmt))
3346 {
3347 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3348 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3349 ops, seq, lhs))
3350 return false;
e0ee10ed
RB
3351 if (dump_file && (dump_flags & TDF_DETAILS))
3352 {
3353 fprintf (dump_file, "gimple_simplified to ");
3354 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3355 }
3356 gsi_replace_with_seq_vops (gsi, *seq);
3357 return true;
3358 }
3359 else
3360 gcc_unreachable ();
3361 }
3362
3363 return false;
3364}
3365
040292e7
RB
3366/* Canonicalize MEM_REFs invariant address operand after propagation. */
3367
3368static bool
3369maybe_canonicalize_mem_ref_addr (tree *t)
3370{
3371 bool res = false;
3372
3373 if (TREE_CODE (*t) == ADDR_EXPR)
3374 t = &TREE_OPERAND (*t, 0);
3375
3376 while (handled_component_p (*t))
3377 t = &TREE_OPERAND (*t, 0);
3378
3379 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3380 of invariant addresses into a SSA name MEM_REF address. */
3381 if (TREE_CODE (*t) == MEM_REF
3382 || TREE_CODE (*t) == TARGET_MEM_REF)
3383 {
3384 tree addr = TREE_OPERAND (*t, 0);
3385 if (TREE_CODE (addr) == ADDR_EXPR
3386 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3387 || handled_component_p (TREE_OPERAND (addr, 0))))
3388 {
3389 tree base;
3390 HOST_WIDE_INT coffset;
3391 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3392 &coffset);
3393 if (!base)
3394 gcc_unreachable ();
3395
3396 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3397 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3398 TREE_OPERAND (*t, 1),
3399 size_int (coffset));
3400 res = true;
3401 }
3402 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3403 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3404 }
3405
3406 /* Canonicalize back MEM_REFs to plain reference trees if the object
3407 accessed is a decl that has the same access semantics as the MEM_REF. */
3408 if (TREE_CODE (*t) == MEM_REF
3409 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3410 && integer_zerop (TREE_OPERAND (*t, 1))
3411 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3412 {
3413 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3414 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3415 if (/* Same volatile qualification. */
3416 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3417 /* Same TBAA behavior with -fstrict-aliasing. */
3418 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3419 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3420 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3421 /* Same alignment. */
3422 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3423 /* We have to look out here to not drop a required conversion
3424 from the rhs to the lhs if *t appears on the lhs or vice-versa
3425 if it appears on the rhs. Thus require strict type
3426 compatibility. */
3427 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3428 {
3429 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3430 res = true;
3431 }
3432 }
3433
3434 /* Canonicalize TARGET_MEM_REF in particular with respect to
3435 the indexes becoming constant. */
3436 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3437 {
3438 tree tem = maybe_fold_tmr (*t);
3439 if (tem)
3440 {
3441 *t = tem;
3442 res = true;
3443 }
3444 }
3445
3446 return res;
3447}
3448
cbdd87d4
RG
3449/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3450 distinguishes both cases. */
3451
3452static bool
e0ee10ed 3453fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3454{
3455 bool changed = false;
3456 gimple stmt = gsi_stmt (*gsi);
3457 unsigned i;
3458
040292e7
RB
3459 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3460 after propagation.
3461 ??? This shouldn't be done in generic folding but in the
3462 propagation helpers which also know whether an address was
3463 propagated. */
3464 switch (gimple_code (stmt))
3465 {
3466 case GIMPLE_ASSIGN:
3467 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3468 {
3469 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3470 if ((REFERENCE_CLASS_P (*rhs)
3471 || TREE_CODE (*rhs) == ADDR_EXPR)
3472 && maybe_canonicalize_mem_ref_addr (rhs))
3473 changed = true;
3474 tree *lhs = gimple_assign_lhs_ptr (stmt);
3475 if (REFERENCE_CLASS_P (*lhs)
3476 && maybe_canonicalize_mem_ref_addr (lhs))
3477 changed = true;
3478 }
3479 break;
3480 case GIMPLE_CALL:
3481 {
3482 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3483 {
3484 tree *arg = gimple_call_arg_ptr (stmt, i);
3485 if (REFERENCE_CLASS_P (*arg)
3486 && maybe_canonicalize_mem_ref_addr (arg))
3487 changed = true;
3488 }
3489 tree *lhs = gimple_call_lhs_ptr (stmt);
3490 if (*lhs
3491 && REFERENCE_CLASS_P (*lhs)
3492 && maybe_canonicalize_mem_ref_addr (lhs))
3493 changed = true;
3494 break;
3495 }
3496 case GIMPLE_ASM:
3497 {
538dd0b7
DM
3498 gasm *asm_stmt = as_a <gasm *> (stmt);
3499 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3500 {
538dd0b7 3501 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3502 tree op = TREE_VALUE (link);
3503 if (REFERENCE_CLASS_P (op)
3504 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3505 changed = true;
3506 }
538dd0b7 3507 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3508 {
538dd0b7 3509 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3510 tree op = TREE_VALUE (link);
3511 if ((REFERENCE_CLASS_P (op)
3512 || TREE_CODE (op) == ADDR_EXPR)
3513 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3514 changed = true;
3515 }
3516 }
3517 break;
3518 case GIMPLE_DEBUG:
3519 if (gimple_debug_bind_p (stmt))
3520 {
3521 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3522 if (*val
3523 && (REFERENCE_CLASS_P (*val)
3524 || TREE_CODE (*val) == ADDR_EXPR)
3525 && maybe_canonicalize_mem_ref_addr (val))
3526 changed = true;
3527 }
3528 break;
3529 default:;
3530 }
3531
e0ee10ed
RB
3532 /* Dispatch to pattern-based folding. */
3533 if (!inplace
3534 || is_gimple_assign (stmt)
3535 || gimple_code (stmt) == GIMPLE_COND)
3536 {
3537 gimple_seq seq = NULL;
3538 code_helper rcode;
3539 tree ops[3] = {};
3540 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3541 {
3542 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3543 changed = true;
3544 else
3545 gimple_seq_discard (seq);
3546 }
3547 }
3548
3549 stmt = gsi_stmt (*gsi);
3550
cbdd87d4
RG
3551 /* Fold the main computation performed by the statement. */
3552 switch (gimple_code (stmt))
3553 {
3554 case GIMPLE_ASSIGN:
3555 {
3556 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 3557 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 3558 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
3559 tree new_rhs;
3560 /* First canonicalize operand order. This avoids building new
3561 trees if this is the only thing fold would later do. */
3562 if ((commutative_tree_code (subcode)
3563 || commutative_ternary_tree_code (subcode))
3564 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3565 gimple_assign_rhs2 (stmt), false))
3566 {
3567 tree tem = gimple_assign_rhs1 (stmt);
3568 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3569 gimple_assign_set_rhs2 (stmt, tem);
3570 changed = true;
3571 }
3572 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3573 if (new_rhs
3574 && !useless_type_conversion_p (TREE_TYPE (lhs),
3575 TREE_TYPE (new_rhs)))
3576 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3577 if (new_rhs
3578 && (!inplace
3579 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3580 {
3581 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3582 changed = true;
3583 }
3584 break;
3585 }
3586
3587 case GIMPLE_COND:
538dd0b7 3588 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
cbdd87d4
RG
3589 break;
3590
3591 case GIMPLE_CALL:
ceeffab0 3592 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3593 break;
3594
3595 case GIMPLE_ASM:
3596 /* Fold *& in asm operands. */
38384150 3597 {
538dd0b7 3598 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3599 size_t noutputs;
3600 const char **oconstraints;
3601 const char *constraint;
3602 bool allows_mem, allows_reg;
3603
538dd0b7 3604 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3605 oconstraints = XALLOCAVEC (const char *, noutputs);
3606
538dd0b7 3607 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3608 {
538dd0b7 3609 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3610 tree op = TREE_VALUE (link);
3611 oconstraints[i]
3612 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3613 if (REFERENCE_CLASS_P (op)
3614 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3615 {
3616 TREE_VALUE (link) = op;
3617 changed = true;
3618 }
3619 }
538dd0b7 3620 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3621 {
538dd0b7 3622 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3623 tree op = TREE_VALUE (link);
3624 constraint
3625 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3626 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3627 oconstraints, &allows_mem, &allows_reg);
3628 if (REFERENCE_CLASS_P (op)
3629 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3630 != NULL_TREE)
3631 {
3632 TREE_VALUE (link) = op;
3633 changed = true;
3634 }
3635 }
3636 }
cbdd87d4
RG
3637 break;
3638
bd422c4a
RG
3639 case GIMPLE_DEBUG:
3640 if (gimple_debug_bind_p (stmt))
3641 {
3642 tree val = gimple_debug_bind_get_value (stmt);
3643 if (val
3644 && REFERENCE_CLASS_P (val))
3645 {
3646 tree tem = maybe_fold_reference (val, false);
3647 if (tem)
3648 {
3649 gimple_debug_bind_set_value (stmt, tem);
3650 changed = true;
3651 }
3652 }
3e888a5e
RG
3653 else if (val
3654 && TREE_CODE (val) == ADDR_EXPR)
3655 {
3656 tree ref = TREE_OPERAND (val, 0);
3657 tree tem = maybe_fold_reference (ref, false);
3658 if (tem)
3659 {
3660 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3661 gimple_debug_bind_set_value (stmt, tem);
3662 changed = true;
3663 }
3664 }
bd422c4a
RG
3665 }
3666 break;
3667
cbdd87d4
RG
3668 default:;
3669 }
3670
3671 stmt = gsi_stmt (*gsi);
3672
37376165
RB
3673 /* Fold *& on the lhs. */
3674 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3675 {
3676 tree lhs = gimple_get_lhs (stmt);
3677 if (lhs && REFERENCE_CLASS_P (lhs))
3678 {
3679 tree new_lhs = maybe_fold_reference (lhs, true);
3680 if (new_lhs)
3681 {
3682 gimple_set_lhs (stmt, new_lhs);
3683 changed = true;
3684 }
3685 }
3686 }
3687
3688 return changed;
3689}
3690
e0ee10ed
RB
3691/* Valueziation callback that ends up not following SSA edges. */
3692
3693tree
3694no_follow_ssa_edges (tree)
3695{
3696 return NULL_TREE;
3697}
3698
45cc9f96
RB
3699/* Valueization callback that ends up following single-use SSA edges only. */
3700
3701tree
3702follow_single_use_edges (tree val)
3703{
3704 if (TREE_CODE (val) == SSA_NAME
3705 && !has_single_use (val))
3706 return NULL_TREE;
3707 return val;
3708}
3709
cbdd87d4
RG
3710/* Fold the statement pointed to by GSI. In some cases, this function may
3711 replace the whole statement with a new one. Returns true iff folding
3712 makes any changes.
3713 The statement pointed to by GSI should be in valid gimple form but may
3714 be in unfolded state as resulting from for example constant propagation
3715 which can produce *&x = 0. */
3716
3717bool
3718fold_stmt (gimple_stmt_iterator *gsi)
3719{
e0ee10ed
RB
3720 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3721}
3722
3723bool
3724fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3725{
3726 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3727}
3728
59401b92 3729/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3730 *&x created by constant propagation are handled. The statement cannot
3731 be replaced with a new one. Return true if the statement was
3732 changed, false otherwise.
59401b92 3733 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3734 be in unfolded state as resulting from for example constant propagation
3735 which can produce *&x = 0. */
3736
3737bool
59401b92 3738fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3739{
59401b92 3740 gimple stmt = gsi_stmt (*gsi);
e0ee10ed 3741 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3742 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3743 return changed;
3744}
3745
e89065a1
SL
3746/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3747 if EXPR is null or we don't know how.
3748 If non-null, the result always has boolean type. */
3749
3750static tree
3751canonicalize_bool (tree expr, bool invert)
3752{
3753 if (!expr)
3754 return NULL_TREE;
3755 else if (invert)
3756 {
3757 if (integer_nonzerop (expr))
3758 return boolean_false_node;
3759 else if (integer_zerop (expr))
3760 return boolean_true_node;
3761 else if (TREE_CODE (expr) == SSA_NAME)
3762 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3763 build_int_cst (TREE_TYPE (expr), 0));
3764 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3765 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3766 boolean_type_node,
3767 TREE_OPERAND (expr, 0),
3768 TREE_OPERAND (expr, 1));
3769 else
3770 return NULL_TREE;
3771 }
3772 else
3773 {
3774 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3775 return expr;
3776 if (integer_nonzerop (expr))
3777 return boolean_true_node;
3778 else if (integer_zerop (expr))
3779 return boolean_false_node;
3780 else if (TREE_CODE (expr) == SSA_NAME)
3781 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3782 build_int_cst (TREE_TYPE (expr), 0));
3783 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3784 return fold_build2 (TREE_CODE (expr),
3785 boolean_type_node,
3786 TREE_OPERAND (expr, 0),
3787 TREE_OPERAND (expr, 1));
3788 else
3789 return NULL_TREE;
3790 }
3791}
3792
3793/* Check to see if a boolean expression EXPR is logically equivalent to the
3794 comparison (OP1 CODE OP2). Check for various identities involving
3795 SSA_NAMEs. */
3796
3797static bool
3798same_bool_comparison_p (const_tree expr, enum tree_code code,
3799 const_tree op1, const_tree op2)
3800{
3801 gimple s;
3802
3803 /* The obvious case. */
3804 if (TREE_CODE (expr) == code
3805 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3806 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3807 return true;
3808
3809 /* Check for comparing (name, name != 0) and the case where expr
3810 is an SSA_NAME with a definition matching the comparison. */
3811 if (TREE_CODE (expr) == SSA_NAME
3812 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3813 {
3814 if (operand_equal_p (expr, op1, 0))
3815 return ((code == NE_EXPR && integer_zerop (op2))
3816 || (code == EQ_EXPR && integer_nonzerop (op2)));
3817 s = SSA_NAME_DEF_STMT (expr);
3818 if (is_gimple_assign (s)
3819 && gimple_assign_rhs_code (s) == code
3820 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3821 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3822 return true;
3823 }
3824
3825 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3826 of name is a comparison, recurse. */
3827 if (TREE_CODE (op1) == SSA_NAME
3828 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3829 {
3830 s = SSA_NAME_DEF_STMT (op1);
3831 if (is_gimple_assign (s)
3832 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3833 {
3834 enum tree_code c = gimple_assign_rhs_code (s);
3835 if ((c == NE_EXPR && integer_zerop (op2))
3836 || (c == EQ_EXPR && integer_nonzerop (op2)))
3837 return same_bool_comparison_p (expr, c,
3838 gimple_assign_rhs1 (s),
3839 gimple_assign_rhs2 (s));
3840 if ((c == EQ_EXPR && integer_zerop (op2))
3841 || (c == NE_EXPR && integer_nonzerop (op2)))
3842 return same_bool_comparison_p (expr,
3843 invert_tree_comparison (c, false),
3844 gimple_assign_rhs1 (s),
3845 gimple_assign_rhs2 (s));
3846 }
3847 }
3848 return false;
3849}
3850
3851/* Check to see if two boolean expressions OP1 and OP2 are logically
3852 equivalent. */
3853
3854static bool
3855same_bool_result_p (const_tree op1, const_tree op2)
3856{
3857 /* Simple cases first. */
3858 if (operand_equal_p (op1, op2, 0))
3859 return true;
3860
3861 /* Check the cases where at least one of the operands is a comparison.
3862 These are a bit smarter than operand_equal_p in that they apply some
3863 identifies on SSA_NAMEs. */
3864 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3865 && same_bool_comparison_p (op1, TREE_CODE (op2),
3866 TREE_OPERAND (op2, 0),
3867 TREE_OPERAND (op2, 1)))
3868 return true;
3869 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3870 && same_bool_comparison_p (op2, TREE_CODE (op1),
3871 TREE_OPERAND (op1, 0),
3872 TREE_OPERAND (op1, 1)))
3873 return true;
3874
3875 /* Default case. */
3876 return false;
3877}
3878
3879/* Forward declarations for some mutually recursive functions. */
3880
3881static tree
3882and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3883 enum tree_code code2, tree op2a, tree op2b);
3884static tree
3885and_var_with_comparison (tree var, bool invert,
3886 enum tree_code code2, tree op2a, tree op2b);
3887static tree
3888and_var_with_comparison_1 (gimple stmt,
3889 enum tree_code code2, tree op2a, tree op2b);
3890static tree
3891or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3892 enum tree_code code2, tree op2a, tree op2b);
3893static tree
3894or_var_with_comparison (tree var, bool invert,
3895 enum tree_code code2, tree op2a, tree op2b);
3896static tree
3897or_var_with_comparison_1 (gimple stmt,
3898 enum tree_code code2, tree op2a, tree op2b);
3899
3900/* Helper function for and_comparisons_1: try to simplify the AND of the
3901 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3902 If INVERT is true, invert the value of the VAR before doing the AND.
3903 Return NULL_EXPR if we can't simplify this to a single expression. */
3904
3905static tree
3906and_var_with_comparison (tree var, bool invert,
3907 enum tree_code code2, tree op2a, tree op2b)
3908{
3909 tree t;
3910 gimple stmt = SSA_NAME_DEF_STMT (var);
3911
3912 /* We can only deal with variables whose definitions are assignments. */
3913 if (!is_gimple_assign (stmt))
3914 return NULL_TREE;
3915
3916 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3917 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3918 Then we only have to consider the simpler non-inverted cases. */
3919 if (invert)
3920 t = or_var_with_comparison_1 (stmt,
3921 invert_tree_comparison (code2, false),
3922 op2a, op2b);
3923 else
3924 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3925 return canonicalize_bool (t, invert);
3926}
3927
3928/* Try to simplify the AND of the ssa variable defined by the assignment
3929 STMT with the comparison specified by (OP2A CODE2 OP2B).
3930 Return NULL_EXPR if we can't simplify this to a single expression. */
3931
3932static tree
3933and_var_with_comparison_1 (gimple stmt,
3934 enum tree_code code2, tree op2a, tree op2b)
3935{
3936 tree var = gimple_assign_lhs (stmt);
3937 tree true_test_var = NULL_TREE;
3938 tree false_test_var = NULL_TREE;
3939 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3940
3941 /* Check for identities like (var AND (var == 0)) => false. */
3942 if (TREE_CODE (op2a) == SSA_NAME
3943 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3944 {
3945 if ((code2 == NE_EXPR && integer_zerop (op2b))
3946 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3947 {
3948 true_test_var = op2a;
3949 if (var == true_test_var)
3950 return var;
3951 }
3952 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3953 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3954 {
3955 false_test_var = op2a;
3956 if (var == false_test_var)
3957 return boolean_false_node;
3958 }
3959 }
3960
3961 /* If the definition is a comparison, recurse on it. */
3962 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3963 {
3964 tree t = and_comparisons_1 (innercode,
3965 gimple_assign_rhs1 (stmt),
3966 gimple_assign_rhs2 (stmt),
3967 code2,
3968 op2a,
3969 op2b);
3970 if (t)
3971 return t;
3972 }
3973
3974 /* If the definition is an AND or OR expression, we may be able to
3975 simplify by reassociating. */
eb9820c0
KT
3976 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3977 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
3978 {
3979 tree inner1 = gimple_assign_rhs1 (stmt);
3980 tree inner2 = gimple_assign_rhs2 (stmt);
3981 gimple s;
3982 tree t;
3983 tree partial = NULL_TREE;
eb9820c0 3984 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
3985
3986 /* Check for boolean identities that don't require recursive examination
3987 of inner1/inner2:
3988 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3989 inner1 AND (inner1 OR inner2) => inner1
3990 !inner1 AND (inner1 AND inner2) => false
3991 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3992 Likewise for similar cases involving inner2. */
3993 if (inner1 == true_test_var)
3994 return (is_and ? var : inner1);
3995 else if (inner2 == true_test_var)
3996 return (is_and ? var : inner2);
3997 else if (inner1 == false_test_var)
3998 return (is_and
3999 ? boolean_false_node
4000 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4001 else if (inner2 == false_test_var)
4002 return (is_and
4003 ? boolean_false_node
4004 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4005
4006 /* Next, redistribute/reassociate the AND across the inner tests.
4007 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4008 if (TREE_CODE (inner1) == SSA_NAME
4009 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4010 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4011 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4012 gimple_assign_rhs1 (s),
4013 gimple_assign_rhs2 (s),
4014 code2, op2a, op2b)))
4015 {
4016 /* Handle the AND case, where we are reassociating:
4017 (inner1 AND inner2) AND (op2a code2 op2b)
4018 => (t AND inner2)
4019 If the partial result t is a constant, we win. Otherwise
4020 continue on to try reassociating with the other inner test. */
4021 if (is_and)
4022 {
4023 if (integer_onep (t))
4024 return inner2;
4025 else if (integer_zerop (t))
4026 return boolean_false_node;
4027 }
4028
4029 /* Handle the OR case, where we are redistributing:
4030 (inner1 OR inner2) AND (op2a code2 op2b)
4031 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4032 else if (integer_onep (t))
4033 return boolean_true_node;
4034
4035 /* Save partial result for later. */
4036 partial = t;
e89065a1
SL
4037 }
4038
4039 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4040 if (TREE_CODE (inner2) == SSA_NAME
4041 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4042 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4043 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4044 gimple_assign_rhs1 (s),
4045 gimple_assign_rhs2 (s),
4046 code2, op2a, op2b)))
4047 {
4048 /* Handle the AND case, where we are reassociating:
4049 (inner1 AND inner2) AND (op2a code2 op2b)
4050 => (inner1 AND t) */
4051 if (is_and)
4052 {
4053 if (integer_onep (t))
4054 return inner1;
4055 else if (integer_zerop (t))
4056 return boolean_false_node;
8236c8eb
JJ
4057 /* If both are the same, we can apply the identity
4058 (x AND x) == x. */
4059 else if (partial && same_bool_result_p (t, partial))
4060 return t;
e89065a1
SL
4061 }
4062
4063 /* Handle the OR case. where we are redistributing:
4064 (inner1 OR inner2) AND (op2a code2 op2b)
4065 => (t OR (inner1 AND (op2a code2 op2b)))
4066 => (t OR partial) */
4067 else
4068 {
4069 if (integer_onep (t))
4070 return boolean_true_node;
4071 else if (partial)
4072 {
4073 /* We already got a simplification for the other
4074 operand to the redistributed OR expression. The
4075 interesting case is when at least one is false.
4076 Or, if both are the same, we can apply the identity
4077 (x OR x) == x. */
4078 if (integer_zerop (partial))
4079 return t;
4080 else if (integer_zerop (t))
4081 return partial;
4082 else if (same_bool_result_p (t, partial))
4083 return t;
4084 }
4085 }
4086 }
4087 }
4088 return NULL_TREE;
4089}
4090
4091/* Try to simplify the AND of two comparisons defined by
4092 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4093 If this can be done without constructing an intermediate value,
4094 return the resulting tree; otherwise NULL_TREE is returned.
4095 This function is deliberately asymmetric as it recurses on SSA_DEFs
4096 in the first comparison but not the second. */
4097
4098static tree
4099and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4100 enum tree_code code2, tree op2a, tree op2b)
4101{
ae22ac3c 4102 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4103
e89065a1
SL
4104 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4105 if (operand_equal_p (op1a, op2a, 0)
4106 && operand_equal_p (op1b, op2b, 0))
4107 {
eb9820c0 4108 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4109 tree t = combine_comparisons (UNKNOWN_LOCATION,
4110 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4111 truth_type, op1a, op1b);
e89065a1
SL
4112 if (t)
4113 return t;
4114 }
4115
4116 /* Likewise the swapped case of the above. */
4117 if (operand_equal_p (op1a, op2b, 0)
4118 && operand_equal_p (op1b, op2a, 0))
4119 {
eb9820c0 4120 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4121 tree t = combine_comparisons (UNKNOWN_LOCATION,
4122 TRUTH_ANDIF_EXPR, code1,
4123 swap_tree_comparison (code2),
31ed6226 4124 truth_type, op1a, op1b);
e89065a1
SL
4125 if (t)
4126 return t;
4127 }
4128
4129 /* If both comparisons are of the same value against constants, we might
4130 be able to merge them. */
4131 if (operand_equal_p (op1a, op2a, 0)
4132 && TREE_CODE (op1b) == INTEGER_CST
4133 && TREE_CODE (op2b) == INTEGER_CST)
4134 {
4135 int cmp = tree_int_cst_compare (op1b, op2b);
4136
4137 /* If we have (op1a == op1b), we should either be able to
4138 return that or FALSE, depending on whether the constant op1b
4139 also satisfies the other comparison against op2b. */
4140 if (code1 == EQ_EXPR)
4141 {
4142 bool done = true;
4143 bool val;
4144 switch (code2)
4145 {
4146 case EQ_EXPR: val = (cmp == 0); break;
4147 case NE_EXPR: val = (cmp != 0); break;
4148 case LT_EXPR: val = (cmp < 0); break;
4149 case GT_EXPR: val = (cmp > 0); break;
4150 case LE_EXPR: val = (cmp <= 0); break;
4151 case GE_EXPR: val = (cmp >= 0); break;
4152 default: done = false;
4153 }
4154 if (done)
4155 {
4156 if (val)
4157 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4158 else
4159 return boolean_false_node;
4160 }
4161 }
4162 /* Likewise if the second comparison is an == comparison. */
4163 else if (code2 == EQ_EXPR)
4164 {
4165 bool done = true;
4166 bool val;
4167 switch (code1)
4168 {
4169 case EQ_EXPR: val = (cmp == 0); break;
4170 case NE_EXPR: val = (cmp != 0); break;
4171 case LT_EXPR: val = (cmp > 0); break;
4172 case GT_EXPR: val = (cmp < 0); break;
4173 case LE_EXPR: val = (cmp >= 0); break;
4174 case GE_EXPR: val = (cmp <= 0); break;
4175 default: done = false;
4176 }
4177 if (done)
4178 {
4179 if (val)
4180 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4181 else
4182 return boolean_false_node;
4183 }
4184 }
4185
4186 /* Same business with inequality tests. */
4187 else if (code1 == NE_EXPR)
4188 {
4189 bool val;
4190 switch (code2)
4191 {
4192 case EQ_EXPR: val = (cmp != 0); break;
4193 case NE_EXPR: val = (cmp == 0); break;
4194 case LT_EXPR: val = (cmp >= 0); break;
4195 case GT_EXPR: val = (cmp <= 0); break;
4196 case LE_EXPR: val = (cmp > 0); break;
4197 case GE_EXPR: val = (cmp < 0); break;
4198 default:
4199 val = false;
4200 }
4201 if (val)
4202 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4203 }
4204 else if (code2 == NE_EXPR)
4205 {
4206 bool val;
4207 switch (code1)
4208 {
4209 case EQ_EXPR: val = (cmp == 0); break;
4210 case NE_EXPR: val = (cmp != 0); break;
4211 case LT_EXPR: val = (cmp <= 0); break;
4212 case GT_EXPR: val = (cmp >= 0); break;
4213 case LE_EXPR: val = (cmp < 0); break;
4214 case GE_EXPR: val = (cmp > 0); break;
4215 default:
4216 val = false;
4217 }
4218 if (val)
4219 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4220 }
4221
4222 /* Chose the more restrictive of two < or <= comparisons. */
4223 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4224 && (code2 == LT_EXPR || code2 == LE_EXPR))
4225 {
4226 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4227 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4228 else
4229 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4230 }
4231
4232 /* Likewise chose the more restrictive of two > or >= comparisons. */
4233 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4234 && (code2 == GT_EXPR || code2 == GE_EXPR))
4235 {
4236 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4237 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4238 else
4239 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4240 }
4241
4242 /* Check for singleton ranges. */
4243 else if (cmp == 0
4244 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4245 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4246 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4247
4248 /* Check for disjoint ranges. */
4249 else if (cmp <= 0
4250 && (code1 == LT_EXPR || code1 == LE_EXPR)
4251 && (code2 == GT_EXPR || code2 == GE_EXPR))
4252 return boolean_false_node;
4253 else if (cmp >= 0
4254 && (code1 == GT_EXPR || code1 == GE_EXPR)
4255 && (code2 == LT_EXPR || code2 == LE_EXPR))
4256 return boolean_false_node;
4257 }
4258
4259 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4260 NAME's definition is a truth value. See if there are any simplifications
4261 that can be done against the NAME's definition. */
4262 if (TREE_CODE (op1a) == SSA_NAME
4263 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4264 && (integer_zerop (op1b) || integer_onep (op1b)))
4265 {
4266 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4267 || (code1 == NE_EXPR && integer_onep (op1b)));
4268 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4269 switch (gimple_code (stmt))
4270 {
4271 case GIMPLE_ASSIGN:
4272 /* Try to simplify by copy-propagating the definition. */
4273 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4274
4275 case GIMPLE_PHI:
4276 /* If every argument to the PHI produces the same result when
4277 ANDed with the second comparison, we win.
4278 Do not do this unless the type is bool since we need a bool
4279 result here anyway. */
4280 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4281 {
4282 tree result = NULL_TREE;
4283 unsigned i;
4284 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4285 {
4286 tree arg = gimple_phi_arg_def (stmt, i);
4287
4288 /* If this PHI has itself as an argument, ignore it.
4289 If all the other args produce the same result,
4290 we're still OK. */
4291 if (arg == gimple_phi_result (stmt))
4292 continue;
4293 else if (TREE_CODE (arg) == INTEGER_CST)
4294 {
4295 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4296 {
4297 if (!result)
4298 result = boolean_false_node;
4299 else if (!integer_zerop (result))
4300 return NULL_TREE;
4301 }
4302 else if (!result)
4303 result = fold_build2 (code2, boolean_type_node,
4304 op2a, op2b);
4305 else if (!same_bool_comparison_p (result,
4306 code2, op2a, op2b))
4307 return NULL_TREE;
4308 }
0e8b84ec
JJ
4309 else if (TREE_CODE (arg) == SSA_NAME
4310 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4311 {
6c66f733
JJ
4312 tree temp;
4313 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4314 /* In simple cases we can look through PHI nodes,
4315 but we have to be careful with loops.
4316 See PR49073. */
4317 if (! dom_info_available_p (CDI_DOMINATORS)
4318 || gimple_bb (def_stmt) == gimple_bb (stmt)
4319 || dominated_by_p (CDI_DOMINATORS,
4320 gimple_bb (def_stmt),
4321 gimple_bb (stmt)))
4322 return NULL_TREE;
4323 temp = and_var_with_comparison (arg, invert, code2,
4324 op2a, op2b);
e89065a1
SL
4325 if (!temp)
4326 return NULL_TREE;
4327 else if (!result)
4328 result = temp;
4329 else if (!same_bool_result_p (result, temp))
4330 return NULL_TREE;
4331 }
4332 else
4333 return NULL_TREE;
4334 }
4335 return result;
4336 }
4337
4338 default:
4339 break;
4340 }
4341 }
4342 return NULL_TREE;
4343}
4344
4345/* Try to simplify the AND of two comparisons, specified by
4346 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4347 If this can be simplified to a single expression (without requiring
4348 introducing more SSA variables to hold intermediate values),
4349 return the resulting tree. Otherwise return NULL_TREE.
4350 If the result expression is non-null, it has boolean type. */
4351
4352tree
4353maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4354 enum tree_code code2, tree op2a, tree op2b)
4355{
4356 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4357 if (t)
4358 return t;
4359 else
4360 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4361}
4362
4363/* Helper function for or_comparisons_1: try to simplify the OR of the
4364 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4365 If INVERT is true, invert the value of VAR before doing the OR.
4366 Return NULL_EXPR if we can't simplify this to a single expression. */
4367
4368static tree
4369or_var_with_comparison (tree var, bool invert,
4370 enum tree_code code2, tree op2a, tree op2b)
4371{
4372 tree t;
4373 gimple stmt = SSA_NAME_DEF_STMT (var);
4374
4375 /* We can only deal with variables whose definitions are assignments. */
4376 if (!is_gimple_assign (stmt))
4377 return NULL_TREE;
4378
4379 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4380 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4381 Then we only have to consider the simpler non-inverted cases. */
4382 if (invert)
4383 t = and_var_with_comparison_1 (stmt,
4384 invert_tree_comparison (code2, false),
4385 op2a, op2b);
4386 else
4387 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4388 return canonicalize_bool (t, invert);
4389}
4390
4391/* Try to simplify the OR of the ssa variable defined by the assignment
4392 STMT with the comparison specified by (OP2A CODE2 OP2B).
4393 Return NULL_EXPR if we can't simplify this to a single expression. */
4394
4395static tree
4396or_var_with_comparison_1 (gimple stmt,
4397 enum tree_code code2, tree op2a, tree op2b)
4398{
4399 tree var = gimple_assign_lhs (stmt);
4400 tree true_test_var = NULL_TREE;
4401 tree false_test_var = NULL_TREE;
4402 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4403
4404 /* Check for identities like (var OR (var != 0)) => true . */
4405 if (TREE_CODE (op2a) == SSA_NAME
4406 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4407 {
4408 if ((code2 == NE_EXPR && integer_zerop (op2b))
4409 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4410 {
4411 true_test_var = op2a;
4412 if (var == true_test_var)
4413 return var;
4414 }
4415 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4416 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4417 {
4418 false_test_var = op2a;
4419 if (var == false_test_var)
4420 return boolean_true_node;
4421 }
4422 }
4423
4424 /* If the definition is a comparison, recurse on it. */
4425 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4426 {
4427 tree t = or_comparisons_1 (innercode,
4428 gimple_assign_rhs1 (stmt),
4429 gimple_assign_rhs2 (stmt),
4430 code2,
4431 op2a,
4432 op2b);
4433 if (t)
4434 return t;
4435 }
4436
4437 /* If the definition is an AND or OR expression, we may be able to
4438 simplify by reassociating. */
eb9820c0
KT
4439 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4440 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4441 {
4442 tree inner1 = gimple_assign_rhs1 (stmt);
4443 tree inner2 = gimple_assign_rhs2 (stmt);
4444 gimple s;
4445 tree t;
4446 tree partial = NULL_TREE;
eb9820c0 4447 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4448
4449 /* Check for boolean identities that don't require recursive examination
4450 of inner1/inner2:
4451 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4452 inner1 OR (inner1 AND inner2) => inner1
4453 !inner1 OR (inner1 OR inner2) => true
4454 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4455 */
4456 if (inner1 == true_test_var)
4457 return (is_or ? var : inner1);
4458 else if (inner2 == true_test_var)
4459 return (is_or ? var : inner2);
4460 else if (inner1 == false_test_var)
4461 return (is_or
4462 ? boolean_true_node
4463 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4464 else if (inner2 == false_test_var)
4465 return (is_or
4466 ? boolean_true_node
4467 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4468
4469 /* Next, redistribute/reassociate the OR across the inner tests.
4470 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4471 if (TREE_CODE (inner1) == SSA_NAME
4472 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4473 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4474 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4475 gimple_assign_rhs1 (s),
4476 gimple_assign_rhs2 (s),
4477 code2, op2a, op2b)))
4478 {
4479 /* Handle the OR case, where we are reassociating:
4480 (inner1 OR inner2) OR (op2a code2 op2b)
4481 => (t OR inner2)
4482 If the partial result t is a constant, we win. Otherwise
4483 continue on to try reassociating with the other inner test. */
8236c8eb 4484 if (is_or)
e89065a1
SL
4485 {
4486 if (integer_onep (t))
4487 return boolean_true_node;
4488 else if (integer_zerop (t))
4489 return inner2;
4490 }
4491
4492 /* Handle the AND case, where we are redistributing:
4493 (inner1 AND inner2) OR (op2a code2 op2b)
4494 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4495 else if (integer_zerop (t))
4496 return boolean_false_node;
4497
4498 /* Save partial result for later. */
4499 partial = t;
e89065a1
SL
4500 }
4501
4502 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4503 if (TREE_CODE (inner2) == SSA_NAME
4504 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4505 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4506 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4507 gimple_assign_rhs1 (s),
4508 gimple_assign_rhs2 (s),
4509 code2, op2a, op2b)))
4510 {
4511 /* Handle the OR case, where we are reassociating:
4512 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4513 => (inner1 OR t)
4514 => (t OR partial) */
4515 if (is_or)
e89065a1
SL
4516 {
4517 if (integer_zerop (t))
4518 return inner1;
4519 else if (integer_onep (t))
4520 return boolean_true_node;
8236c8eb
JJ
4521 /* If both are the same, we can apply the identity
4522 (x OR x) == x. */
4523 else if (partial && same_bool_result_p (t, partial))
4524 return t;
e89065a1
SL
4525 }
4526
4527 /* Handle the AND case, where we are redistributing:
4528 (inner1 AND inner2) OR (op2a code2 op2b)
4529 => (t AND (inner1 OR (op2a code2 op2b)))
4530 => (t AND partial) */
4531 else
4532 {
4533 if (integer_zerop (t))
4534 return boolean_false_node;
4535 else if (partial)
4536 {
4537 /* We already got a simplification for the other
4538 operand to the redistributed AND expression. The
4539 interesting case is when at least one is true.
4540 Or, if both are the same, we can apply the identity
8236c8eb 4541 (x AND x) == x. */
e89065a1
SL
4542 if (integer_onep (partial))
4543 return t;
4544 else if (integer_onep (t))
4545 return partial;
4546 else if (same_bool_result_p (t, partial))
8236c8eb 4547 return t;
e89065a1
SL
4548 }
4549 }
4550 }
4551 }
4552 return NULL_TREE;
4553}
4554
4555/* Try to simplify the OR of two comparisons defined by
4556 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4557 If this can be done without constructing an intermediate value,
4558 return the resulting tree; otherwise NULL_TREE is returned.
4559 This function is deliberately asymmetric as it recurses on SSA_DEFs
4560 in the first comparison but not the second. */
4561
4562static tree
4563or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4564 enum tree_code code2, tree op2a, tree op2b)
4565{
ae22ac3c 4566 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4567
e89065a1
SL
4568 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4569 if (operand_equal_p (op1a, op2a, 0)
4570 && operand_equal_p (op1b, op2b, 0))
4571 {
eb9820c0 4572 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4573 tree t = combine_comparisons (UNKNOWN_LOCATION,
4574 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4575 truth_type, op1a, op1b);
e89065a1
SL
4576 if (t)
4577 return t;
4578 }
4579
4580 /* Likewise the swapped case of the above. */
4581 if (operand_equal_p (op1a, op2b, 0)
4582 && operand_equal_p (op1b, op2a, 0))
4583 {
eb9820c0 4584 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4585 tree t = combine_comparisons (UNKNOWN_LOCATION,
4586 TRUTH_ORIF_EXPR, code1,
4587 swap_tree_comparison (code2),
31ed6226 4588 truth_type, op1a, op1b);
e89065a1
SL
4589 if (t)
4590 return t;
4591 }
4592
4593 /* If both comparisons are of the same value against constants, we might
4594 be able to merge them. */
4595 if (operand_equal_p (op1a, op2a, 0)
4596 && TREE_CODE (op1b) == INTEGER_CST
4597 && TREE_CODE (op2b) == INTEGER_CST)
4598 {
4599 int cmp = tree_int_cst_compare (op1b, op2b);
4600
4601 /* If we have (op1a != op1b), we should either be able to
4602 return that or TRUE, depending on whether the constant op1b
4603 also satisfies the other comparison against op2b. */
4604 if (code1 == NE_EXPR)
4605 {
4606 bool done = true;
4607 bool val;
4608 switch (code2)
4609 {
4610 case EQ_EXPR: val = (cmp == 0); break;
4611 case NE_EXPR: val = (cmp != 0); break;
4612 case LT_EXPR: val = (cmp < 0); break;
4613 case GT_EXPR: val = (cmp > 0); break;
4614 case LE_EXPR: val = (cmp <= 0); break;
4615 case GE_EXPR: val = (cmp >= 0); break;
4616 default: done = false;
4617 }
4618 if (done)
4619 {
4620 if (val)
4621 return boolean_true_node;
4622 else
4623 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4624 }
4625 }
4626 /* Likewise if the second comparison is a != comparison. */
4627 else if (code2 == NE_EXPR)
4628 {
4629 bool done = true;
4630 bool val;
4631 switch (code1)
4632 {
4633 case EQ_EXPR: val = (cmp == 0); break;
4634 case NE_EXPR: val = (cmp != 0); break;
4635 case LT_EXPR: val = (cmp > 0); break;
4636 case GT_EXPR: val = (cmp < 0); break;
4637 case LE_EXPR: val = (cmp >= 0); break;
4638 case GE_EXPR: val = (cmp <= 0); break;
4639 default: done = false;
4640 }
4641 if (done)
4642 {
4643 if (val)
4644 return boolean_true_node;
4645 else
4646 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4647 }
4648 }
4649
4650 /* See if an equality test is redundant with the other comparison. */
4651 else if (code1 == EQ_EXPR)
4652 {
4653 bool val;
4654 switch (code2)
4655 {
4656 case EQ_EXPR: val = (cmp == 0); break;
4657 case NE_EXPR: val = (cmp != 0); break;
4658 case LT_EXPR: val = (cmp < 0); break;
4659 case GT_EXPR: val = (cmp > 0); break;
4660 case LE_EXPR: val = (cmp <= 0); break;
4661 case GE_EXPR: val = (cmp >= 0); break;
4662 default:
4663 val = false;
4664 }
4665 if (val)
4666 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4667 }
4668 else if (code2 == EQ_EXPR)
4669 {
4670 bool val;
4671 switch (code1)
4672 {
4673 case EQ_EXPR: val = (cmp == 0); break;
4674 case NE_EXPR: val = (cmp != 0); break;
4675 case LT_EXPR: val = (cmp > 0); break;
4676 case GT_EXPR: val = (cmp < 0); break;
4677 case LE_EXPR: val = (cmp >= 0); break;
4678 case GE_EXPR: val = (cmp <= 0); break;
4679 default:
4680 val = false;
4681 }
4682 if (val)
4683 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4684 }
4685
4686 /* Chose the less restrictive of two < or <= comparisons. */
4687 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4688 && (code2 == LT_EXPR || code2 == LE_EXPR))
4689 {
4690 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4691 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4692 else
4693 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4694 }
4695
4696 /* Likewise chose the less restrictive of two > or >= comparisons. */
4697 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4698 && (code2 == GT_EXPR || code2 == GE_EXPR))
4699 {
4700 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4701 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4702 else
4703 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4704 }
4705
4706 /* Check for singleton ranges. */
4707 else if (cmp == 0
4708 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4709 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4710 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4711
4712 /* Check for less/greater pairs that don't restrict the range at all. */
4713 else if (cmp >= 0
4714 && (code1 == LT_EXPR || code1 == LE_EXPR)
4715 && (code2 == GT_EXPR || code2 == GE_EXPR))
4716 return boolean_true_node;
4717 else if (cmp <= 0
4718 && (code1 == GT_EXPR || code1 == GE_EXPR)
4719 && (code2 == LT_EXPR || code2 == LE_EXPR))
4720 return boolean_true_node;
4721 }
4722
4723 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4724 NAME's definition is a truth value. See if there are any simplifications
4725 that can be done against the NAME's definition. */
4726 if (TREE_CODE (op1a) == SSA_NAME
4727 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4728 && (integer_zerop (op1b) || integer_onep (op1b)))
4729 {
4730 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4731 || (code1 == NE_EXPR && integer_onep (op1b)));
4732 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4733 switch (gimple_code (stmt))
4734 {
4735 case GIMPLE_ASSIGN:
4736 /* Try to simplify by copy-propagating the definition. */
4737 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4738
4739 case GIMPLE_PHI:
4740 /* If every argument to the PHI produces the same result when
4741 ORed with the second comparison, we win.
4742 Do not do this unless the type is bool since we need a bool
4743 result here anyway. */
4744 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4745 {
4746 tree result = NULL_TREE;
4747 unsigned i;
4748 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4749 {
4750 tree arg = gimple_phi_arg_def (stmt, i);
4751
4752 /* If this PHI has itself as an argument, ignore it.
4753 If all the other args produce the same result,
4754 we're still OK. */
4755 if (arg == gimple_phi_result (stmt))
4756 continue;
4757 else if (TREE_CODE (arg) == INTEGER_CST)
4758 {
4759 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4760 {
4761 if (!result)
4762 result = boolean_true_node;
4763 else if (!integer_onep (result))
4764 return NULL_TREE;
4765 }
4766 else if (!result)
4767 result = fold_build2 (code2, boolean_type_node,
4768 op2a, op2b);
4769 else if (!same_bool_comparison_p (result,
4770 code2, op2a, op2b))
4771 return NULL_TREE;
4772 }
0e8b84ec
JJ
4773 else if (TREE_CODE (arg) == SSA_NAME
4774 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4775 {
6c66f733
JJ
4776 tree temp;
4777 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4778 /* In simple cases we can look through PHI nodes,
4779 but we have to be careful with loops.
4780 See PR49073. */
4781 if (! dom_info_available_p (CDI_DOMINATORS)
4782 || gimple_bb (def_stmt) == gimple_bb (stmt)
4783 || dominated_by_p (CDI_DOMINATORS,
4784 gimple_bb (def_stmt),
4785 gimple_bb (stmt)))
4786 return NULL_TREE;
4787 temp = or_var_with_comparison (arg, invert, code2,
4788 op2a, op2b);
e89065a1
SL
4789 if (!temp)
4790 return NULL_TREE;
4791 else if (!result)
4792 result = temp;
4793 else if (!same_bool_result_p (result, temp))
4794 return NULL_TREE;
4795 }
4796 else
4797 return NULL_TREE;
4798 }
4799 return result;
4800 }
4801
4802 default:
4803 break;
4804 }
4805 }
4806 return NULL_TREE;
4807}
4808
4809/* Try to simplify the OR of two comparisons, specified by
4810 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4811 If this can be simplified to a single expression (without requiring
4812 introducing more SSA variables to hold intermediate values),
4813 return the resulting tree. Otherwise return NULL_TREE.
4814 If the result expression is non-null, it has boolean type. */
4815
4816tree
4817maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4818 enum tree_code code2, tree op2a, tree op2b)
4819{
4820 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4821 if (t)
4822 return t;
4823 else
4824 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4825}
cfef45c8
RG
4826
4827
4828/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4829
4830 Either NULL_TREE, a simplified but non-constant or a constant
4831 is returned.
4832
4833 ??? This should go into a gimple-fold-inline.h file to be eventually
4834 privatized with the single valueize function used in the various TUs
4835 to avoid the indirect function call overhead. */
4836
4837tree
d2a85801
RB
4838gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4839 tree (*gvalueize) (tree))
cfef45c8 4840{
45cc9f96
RB
4841 code_helper rcode;
4842 tree ops[3] = {};
4843 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4844 edges if there are intermediate VARYING defs. For this reason
4845 do not follow SSA edges here even though SCCVN can technically
4846 just deal fine with that. */
d2a85801 4847 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
45cc9f96
RB
4848 && rcode.is_tree_code ()
4849 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4850 || ((tree_code) rcode) == ADDR_EXPR)
4851 && is_gimple_val (ops[0]))
4852 {
4853 tree res = ops[0];
4854 if (dump_file && dump_flags & TDF_DETAILS)
4855 {
4856 fprintf (dump_file, "Match-and-simplified ");
4857 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4858 fprintf (dump_file, " to ");
4859 print_generic_expr (dump_file, res, 0);
4860 fprintf (dump_file, "\n");
4861 }
4862 return res;
4863 }
4864
cfef45c8
RG
4865 location_t loc = gimple_location (stmt);
4866 switch (gimple_code (stmt))
4867 {
4868 case GIMPLE_ASSIGN:
4869 {
4870 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4871
4872 switch (get_gimple_rhs_class (subcode))
4873 {
4874 case GIMPLE_SINGLE_RHS:
4875 {
4876 tree rhs = gimple_assign_rhs1 (stmt);
4877 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4878
4879 if (TREE_CODE (rhs) == SSA_NAME)
4880 {
4881 /* If the RHS is an SSA_NAME, return its known constant value,
4882 if any. */
4883 return (*valueize) (rhs);
4884 }
4885 /* Handle propagating invariant addresses into address
4886 operations. */
4887 else if (TREE_CODE (rhs) == ADDR_EXPR
4888 && !is_gimple_min_invariant (rhs))
4889 {
d25c4172 4890 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4891 tree base;
4892 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4893 &offset,
4894 valueize);
4895 if (base
4896 && (CONSTANT_CLASS_P (base)
4897 || decl_address_invariant_p (base)))
4898 return build_invariant_address (TREE_TYPE (rhs),
4899 base, offset);
4900 }
4901 else if (TREE_CODE (rhs) == CONSTRUCTOR
4902 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4903 && (CONSTRUCTOR_NELTS (rhs)
4904 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4905 {
4906 unsigned i;
d2a12ae7 4907 tree val, *vec;
cfef45c8 4908
d2a12ae7
RG
4909 vec = XALLOCAVEC (tree,
4910 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4911 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4912 {
4913 val = (*valueize) (val);
4914 if (TREE_CODE (val) == INTEGER_CST
4915 || TREE_CODE (val) == REAL_CST
4916 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 4917 vec[i] = val;
cfef45c8
RG
4918 else
4919 return NULL_TREE;
4920 }
4921
d2a12ae7 4922 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 4923 }
bdf37f7a
JH
4924 if (subcode == OBJ_TYPE_REF)
4925 {
4926 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4927 /* If callee is constant, we can fold away the wrapper. */
4928 if (is_gimple_min_invariant (val))
4929 return val;
4930 }
cfef45c8
RG
4931
4932 if (kind == tcc_reference)
4933 {
4934 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4935 || TREE_CODE (rhs) == REALPART_EXPR
4936 || TREE_CODE (rhs) == IMAGPART_EXPR)
4937 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4938 {
4939 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4940 return fold_unary_loc (EXPR_LOCATION (rhs),
4941 TREE_CODE (rhs),
4942 TREE_TYPE (rhs), val);
4943 }
4944 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4945 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4946 {
4947 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4948 return fold_ternary_loc (EXPR_LOCATION (rhs),
4949 TREE_CODE (rhs),
4950 TREE_TYPE (rhs), val,
4951 TREE_OPERAND (rhs, 1),
4952 TREE_OPERAND (rhs, 2));
4953 }
4954 else if (TREE_CODE (rhs) == MEM_REF
4955 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4956 {
4957 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4958 if (TREE_CODE (val) == ADDR_EXPR
4959 && is_gimple_min_invariant (val))
4960 {
4961 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4962 unshare_expr (val),
4963 TREE_OPERAND (rhs, 1));
4964 if (tem)
4965 rhs = tem;
4966 }
4967 }
4968 return fold_const_aggregate_ref_1 (rhs, valueize);
4969 }
4970 else if (kind == tcc_declaration)
4971 return get_symbol_constant_value (rhs);
4972 return rhs;
4973 }
4974
4975 case GIMPLE_UNARY_RHS:
f3582e54 4976 return NULL_TREE;
cfef45c8
RG
4977
4978 case GIMPLE_BINARY_RHS:
4979 {
4980 /* Handle binary operators that can appear in GIMPLE form. */
4981 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4982 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4983
4984 /* Translate &x + CST into an invariant form suitable for
4985 further propagation. */
4986 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4987 && TREE_CODE (op0) == ADDR_EXPR
4988 && TREE_CODE (op1) == INTEGER_CST)
4989 {
4990 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
4991 return build_fold_addr_expr_loc
4992 (loc,
4993 fold_build2 (MEM_REF,
cfef45c8
RG
4994 TREE_TYPE (TREE_TYPE (op0)),
4995 unshare_expr (op0), off));
4996 }
4997
4998 return fold_binary_loc (loc, subcode,
4999 gimple_expr_type (stmt), op0, op1);
5000 }
5001
5002 case GIMPLE_TERNARY_RHS:
5003 {
5004 /* Handle ternary operators that can appear in GIMPLE form. */
5005 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5006 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5007 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5008
d3878abf
RG
5009 /* Fold embedded expressions in ternary codes. */
5010 if ((subcode == COND_EXPR
5011 || subcode == VEC_COND_EXPR)
5012 && COMPARISON_CLASS_P (op0))
5013 {
5014 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5015 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5016 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5017 TREE_TYPE (op0), op00, op01);
5018 if (tem)
5019 op0 = tem;
5020 }
5021
cfef45c8
RG
5022 return fold_ternary_loc (loc, subcode,
5023 gimple_expr_type (stmt), op0, op1, op2);
5024 }
5025
5026 default:
5027 gcc_unreachable ();
5028 }
5029 }
5030
5031 case GIMPLE_CALL:
5032 {
25583c4f 5033 tree fn;
538dd0b7 5034 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5035
5036 if (gimple_call_internal_p (stmt))
31e071ae
MP
5037 {
5038 enum tree_code subcode = ERROR_MARK;
5039 switch (gimple_call_internal_fn (stmt))
5040 {
5041 case IFN_UBSAN_CHECK_ADD:
5042 subcode = PLUS_EXPR;
5043 break;
5044 case IFN_UBSAN_CHECK_SUB:
5045 subcode = MINUS_EXPR;
5046 break;
5047 case IFN_UBSAN_CHECK_MUL:
5048 subcode = MULT_EXPR;
5049 break;
5050 default:
5051 return NULL_TREE;
5052 }
368b454d
JJ
5053 tree arg0 = gimple_call_arg (stmt, 0);
5054 tree arg1 = gimple_call_arg (stmt, 1);
5055 tree op0 = (*valueize) (arg0);
5056 tree op1 = (*valueize) (arg1);
31e071ae
MP
5057
5058 if (TREE_CODE (op0) != INTEGER_CST
5059 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5060 {
5061 switch (subcode)
5062 {
5063 case MULT_EXPR:
5064 /* x * 0 = 0 * x = 0 without overflow. */
5065 if (integer_zerop (op0) || integer_zerop (op1))
5066 return build_zero_cst (TREE_TYPE (arg0));
5067 break;
5068 case MINUS_EXPR:
5069 /* y - y = 0 without overflow. */
5070 if (operand_equal_p (op0, op1, 0))
5071 return build_zero_cst (TREE_TYPE (arg0));
5072 break;
5073 default:
5074 break;
5075 }
5076 }
5077 tree res
5078 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5079 if (res
5080 && TREE_CODE (res) == INTEGER_CST
5081 && !TREE_OVERFLOW (res))
5082 return res;
5083 return NULL_TREE;
5084 }
25583c4f
RS
5085
5086 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5087 if (TREE_CODE (fn) == ADDR_EXPR
5088 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5089 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5090 && gimple_builtin_call_types_compatible_p (stmt,
5091 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5092 {
5093 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5094 tree retval;
cfef45c8
RG
5095 unsigned i;
5096 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5097 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5098 retval = fold_builtin_call_array (loc,
538dd0b7 5099 gimple_call_return_type (call_stmt),
cfef45c8 5100 fn, gimple_call_num_args (stmt), args);
cfef45c8 5101 if (retval)
5c944c6c
RB
5102 {
5103 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5104 STRIP_NOPS (retval);
538dd0b7
DM
5105 retval = fold_convert (gimple_call_return_type (call_stmt),
5106 retval);
5c944c6c 5107 }
cfef45c8
RG
5108 return retval;
5109 }
5110 return NULL_TREE;
5111 }
5112
5113 default:
5114 return NULL_TREE;
5115 }
5116}
5117
5118/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5119 Returns NULL_TREE if folding to a constant is not possible, otherwise
5120 returns a constant according to is_gimple_min_invariant. */
5121
5122tree
5123gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5124{
5125 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5126 if (res && is_gimple_min_invariant (res))
5127 return res;
5128 return NULL_TREE;
5129}
5130
5131
5132/* The following set of functions are supposed to fold references using
5133 their constant initializers. */
5134
cfef45c8
RG
5135/* See if we can find constructor defining value of BASE.
5136 When we know the consructor with constant offset (such as
5137 base is array[40] and we do know constructor of array), then
5138 BIT_OFFSET is adjusted accordingly.
5139
5140 As a special case, return error_mark_node when constructor
5141 is not explicitly available, but it is known to be zero
5142 such as 'static const int a;'. */
5143static tree
5144get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5145 tree (*valueize)(tree))
5146{
5147 HOST_WIDE_INT bit_offset2, size, max_size;
5148 if (TREE_CODE (base) == MEM_REF)
5149 {
5150 if (!integer_zerop (TREE_OPERAND (base, 1)))
5151 {
9541ffee 5152 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5153 return NULL_TREE;
807e902e 5154 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5155 * BITS_PER_UNIT);
5156 }
5157
5158 if (valueize
5159 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5160 base = valueize (TREE_OPERAND (base, 0));
5161 if (!base || TREE_CODE (base) != ADDR_EXPR)
5162 return NULL_TREE;
5163 base = TREE_OPERAND (base, 0);
5164 }
5165
5166 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5167 DECL_INITIAL. If BASE is a nested reference into another
5168 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5169 the inner reference. */
5170 switch (TREE_CODE (base))
5171 {
5172 case VAR_DECL:
cfef45c8 5173 case CONST_DECL:
6a6dac52
JH
5174 {
5175 tree init = ctor_for_folding (base);
5176
688010ba 5177 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5178 NULL means unknown, while error_mark_node is 0. */
5179 if (init == error_mark_node)
5180 return NULL_TREE;
5181 if (!init)
5182 return error_mark_node;
5183 return init;
5184 }
cfef45c8
RG
5185
5186 case ARRAY_REF:
5187 case COMPONENT_REF:
5188 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5189 if (max_size == -1 || size != max_size)
5190 return NULL_TREE;
5191 *bit_offset += bit_offset2;
5192 return get_base_constructor (base, bit_offset, valueize);
5193
5194 case STRING_CST:
5195 case CONSTRUCTOR:
5196 return base;
5197
5198 default:
5199 return NULL_TREE;
5200 }
5201}
5202
cfef45c8
RG
5203/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5204 SIZE to the memory at bit OFFSET. */
5205
5206static tree
5207fold_array_ctor_reference (tree type, tree ctor,
5208 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5209 unsigned HOST_WIDE_INT size,
5210 tree from_decl)
cfef45c8
RG
5211{
5212 unsigned HOST_WIDE_INT cnt;
5213 tree cfield, cval;
807e902e
KZ
5214 offset_int low_bound;
5215 offset_int elt_size;
5216 offset_int index, max_index;
5217 offset_int access_index;
b48e22b2 5218 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
5219 HOST_WIDE_INT inner_offset;
5220
5221 /* Compute low bound and elt size. */
eb8f1123
RG
5222 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5223 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5224 if (domain_type && TYPE_MIN_VALUE (domain_type))
5225 {
5226 /* Static constructors for variably sized objects makes no sense. */
5227 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 5228 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 5229 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5230 }
5231 else
807e902e 5232 low_bound = 0;
cfef45c8 5233 /* Static constructors for variably sized objects makes no sense. */
c3284718 5234 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5235 == INTEGER_CST);
807e902e 5236 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5237
5238 /* We can handle only constantly sized accesses that are known to not
5239 be larger than size of array element. */
5240 if (!TYPE_SIZE_UNIT (type)
5241 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5242 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5243 || elt_size == 0)
cfef45c8
RG
5244 return NULL_TREE;
5245
5246 /* Compute the array index we look for. */
807e902e
KZ
5247 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5248 elt_size);
27bcd47c 5249 access_index += low_bound;
b48e22b2 5250 if (index_type)
807e902e
KZ
5251 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5252 TYPE_SIGN (index_type));
cfef45c8
RG
5253
5254 /* And offset within the access. */
27bcd47c 5255 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5256
5257 /* See if the array field is large enough to span whole access. We do not
5258 care to fold accesses spanning multiple array indexes. */
27bcd47c 5259 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
5260 return NULL_TREE;
5261
807e902e 5262 index = low_bound - 1;
b48e22b2 5263 if (index_type)
807e902e
KZ
5264 index = wi::ext (index, TYPE_PRECISION (index_type),
5265 TYPE_SIGN (index_type));
b48e22b2 5266
cfef45c8
RG
5267 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5268 {
5269 /* Array constructor might explicitely set index, or specify range
5270 or leave index NULL meaning that it is next index after previous
5271 one. */
5272 if (cfield)
5273 {
5274 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 5275 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
5276 else
5277 {
5278 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
5279 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5280 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
5281 }
5282 }
5283 else
b48e22b2 5284 {
807e902e 5285 index += 1;
b48e22b2 5286 if (index_type)
807e902e
KZ
5287 index = wi::ext (index, TYPE_PRECISION (index_type),
5288 TYPE_SIGN (index_type));
b48e22b2
EB
5289 max_index = index;
5290 }
cfef45c8
RG
5291
5292 /* Do we have match? */
807e902e
KZ
5293 if (wi::cmpu (access_index, index) >= 0
5294 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
5295 return fold_ctor_reference (type, cval, inner_offset, size,
5296 from_decl);
cfef45c8
RG
5297 }
5298 /* When memory is not explicitely mentioned in constructor,
5299 it is 0 (or out of range). */
5300 return build_zero_cst (type);
5301}
5302
5303/* CTOR is CONSTRUCTOR of an aggregate or vector.
5304 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5305
5306static tree
5307fold_nonarray_ctor_reference (tree type, tree ctor,
5308 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5309 unsigned HOST_WIDE_INT size,
5310 tree from_decl)
cfef45c8
RG
5311{
5312 unsigned HOST_WIDE_INT cnt;
5313 tree cfield, cval;
5314
5315 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5316 cval)
5317 {
5318 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5319 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5320 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5321 offset_int bitoffset;
5322 offset_int bitoffset_end, access_end;
cfef45c8
RG
5323
5324 /* Variable sized objects in static constructors makes no sense,
5325 but field_size can be NULL for flexible array members. */
5326 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5327 && TREE_CODE (byte_offset) == INTEGER_CST
5328 && (field_size != NULL_TREE
5329 ? TREE_CODE (field_size) == INTEGER_CST
5330 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5331
5332 /* Compute bit offset of the field. */
807e902e
KZ
5333 bitoffset = (wi::to_offset (field_offset)
5334 + wi::lshift (wi::to_offset (byte_offset),
5335 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5336 /* Compute bit offset where the field ends. */
5337 if (field_size != NULL_TREE)
807e902e 5338 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5339 else
807e902e 5340 bitoffset_end = 0;
cfef45c8 5341
807e902e 5342 access_end = offset_int (offset) + size;
b8b2b009
JJ
5343
5344 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5345 [BITOFFSET, BITOFFSET_END)? */
807e902e 5346 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5347 && (field_size == NULL_TREE
807e902e 5348 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5349 {
807e902e 5350 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5351 /* We do have overlap. Now see if field is large enough to
5352 cover the access. Give up for accesses spanning multiple
5353 fields. */
807e902e 5354 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5355 return NULL_TREE;
807e902e 5356 if (wi::lts_p (offset, bitoffset))
b8b2b009 5357 return NULL_TREE;
cfef45c8 5358 return fold_ctor_reference (type, cval,
27bcd47c 5359 inner_offset.to_uhwi (), size,
c44c2088 5360 from_decl);
cfef45c8
RG
5361 }
5362 }
5363 /* When memory is not explicitely mentioned in constructor, it is 0. */
5364 return build_zero_cst (type);
5365}
5366
5367/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5368 to the memory at bit OFFSET. */
5369
8403c2cf 5370tree
cfef45c8 5371fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5372 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5373{
5374 tree ret;
5375
5376 /* We found the field with exact match. */
5377 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5378 && !offset)
9d60be38 5379 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5380
5381 /* We are at the end of walk, see if we can view convert the
5382 result. */
5383 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5384 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5385 && !compare_tree_int (TYPE_SIZE (type), size)
5386 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5387 {
9d60be38 5388 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5389 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5390 if (ret)
5391 STRIP_NOPS (ret);
5392 return ret;
5393 }
b2505143
RB
5394 /* For constants and byte-aligned/sized reads try to go through
5395 native_encode/interpret. */
5396 if (CONSTANT_CLASS_P (ctor)
5397 && BITS_PER_UNIT == 8
5398 && offset % BITS_PER_UNIT == 0
5399 && size % BITS_PER_UNIT == 0
5400 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5401 {
5402 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5403 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5404 offset / BITS_PER_UNIT) > 0)
5405 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5406 }
cfef45c8
RG
5407 if (TREE_CODE (ctor) == CONSTRUCTOR)
5408 {
5409
eb8f1123
RG
5410 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5411 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5412 return fold_array_ctor_reference (type, ctor, offset, size,
5413 from_decl);
cfef45c8 5414 else
c44c2088
JH
5415 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5416 from_decl);
cfef45c8
RG
5417 }
5418
5419 return NULL_TREE;
5420}
5421
5422/* Return the tree representing the element referenced by T if T is an
5423 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5424 names using VALUEIZE. Return NULL_TREE otherwise. */
5425
5426tree
5427fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5428{
5429 tree ctor, idx, base;
5430 HOST_WIDE_INT offset, size, max_size;
5431 tree tem;
5432
f8a7df45
RG
5433 if (TREE_THIS_VOLATILE (t))
5434 return NULL_TREE;
5435
cfef45c8
RG
5436 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5437 return get_symbol_constant_value (t);
5438
5439 tem = fold_read_from_constant_string (t);
5440 if (tem)
5441 return tem;
5442
5443 switch (TREE_CODE (t))
5444 {
5445 case ARRAY_REF:
5446 case ARRAY_RANGE_REF:
5447 /* Constant indexes are handled well by get_base_constructor.
5448 Only special case variable offsets.
5449 FIXME: This code can't handle nested references with variable indexes
5450 (they will be handled only by iteration of ccp). Perhaps we can bring
5451 get_ref_base_and_extent here and make it use a valueize callback. */
5452 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5453 && valueize
5454 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5455 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5456 {
5457 tree low_bound, unit_size;
5458
5459 /* If the resulting bit-offset is constant, track it. */
5460 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5461 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5462 && (unit_size = array_ref_element_size (t),
807e902e 5463 tree_fits_uhwi_p (unit_size)))
cfef45c8 5464 {
807e902e
KZ
5465 offset_int woffset
5466 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5467 TYPE_PRECISION (TREE_TYPE (idx)));
5468
5469 if (wi::fits_shwi_p (woffset))
5470 {
5471 offset = woffset.to_shwi ();
5472 /* TODO: This code seems wrong, multiply then check
5473 to see if it fits. */
5474 offset *= tree_to_uhwi (unit_size);
5475 offset *= BITS_PER_UNIT;
5476
5477 base = TREE_OPERAND (t, 0);
5478 ctor = get_base_constructor (base, &offset, valueize);
5479 /* Empty constructor. Always fold to 0. */
5480 if (ctor == error_mark_node)
5481 return build_zero_cst (TREE_TYPE (t));
5482 /* Out of bound array access. Value is undefined,
5483 but don't fold. */
5484 if (offset < 0)
5485 return NULL_TREE;
5486 /* We can not determine ctor. */
5487 if (!ctor)
5488 return NULL_TREE;
5489 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5490 tree_to_uhwi (unit_size)
5491 * BITS_PER_UNIT,
5492 base);
5493 }
cfef45c8
RG
5494 }
5495 }
5496 /* Fallthru. */
5497
5498 case COMPONENT_REF:
5499 case BIT_FIELD_REF:
5500 case TARGET_MEM_REF:
5501 case MEM_REF:
5502 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5503 ctor = get_base_constructor (base, &offset, valueize);
5504
5505 /* Empty constructor. Always fold to 0. */
5506 if (ctor == error_mark_node)
5507 return build_zero_cst (TREE_TYPE (t));
5508 /* We do not know precise address. */
5509 if (max_size == -1 || max_size != size)
5510 return NULL_TREE;
5511 /* We can not determine ctor. */
5512 if (!ctor)
5513 return NULL_TREE;
5514
5515 /* Out of bound array access. Value is undefined, but don't fold. */
5516 if (offset < 0)
5517 return NULL_TREE;
5518
c44c2088
JH
5519 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5520 base);
cfef45c8
RG
5521
5522 case REALPART_EXPR:
5523 case IMAGPART_EXPR:
5524 {
5525 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5526 if (c && TREE_CODE (c) == COMPLEX_CST)
5527 return fold_build1_loc (EXPR_LOCATION (t),
5528 TREE_CODE (t), TREE_TYPE (t), c);
5529 break;
5530 }
5531
5532 default:
5533 break;
5534 }
5535
5536 return NULL_TREE;
5537}
5538
5539tree
5540fold_const_aggregate_ref (tree t)
5541{
5542 return fold_const_aggregate_ref_1 (t, NULL);
5543}
06bc3ec7 5544
85942f45 5545/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5546 at OFFSET.
5547 Set CAN_REFER if non-NULL to false if method
5548 is not referable or if the virtual table is ill-formed (such as rewriten
5549 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5550
5551tree
85942f45
JH
5552gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5553 tree v,
ec77d61f
JH
5554 unsigned HOST_WIDE_INT offset,
5555 bool *can_refer)
81fa35bd 5556{
85942f45
JH
5557 tree vtable = v, init, fn;
5558 unsigned HOST_WIDE_INT size;
8c311b50
JH
5559 unsigned HOST_WIDE_INT elt_size, access_index;
5560 tree domain_type;
81fa35bd 5561
ec77d61f
JH
5562 if (can_refer)
5563 *can_refer = true;
5564
9de2f554 5565 /* First of all double check we have virtual table. */
81fa35bd 5566 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5567 || !DECL_VIRTUAL_P (v))
ec77d61f
JH
5568 {
5569 gcc_assert (in_lto_p);
5570 /* Pass down that we lost track of the target. */
5571 if (can_refer)
5572 *can_refer = false;
5573 return NULL_TREE;
5574 }
9de2f554 5575
2aa3da06
JH
5576 init = ctor_for_folding (v);
5577
9de2f554 5578 /* The virtual tables should always be born with constructors
2aa3da06
JH
5579 and we always should assume that they are avaialble for
5580 folding. At the moment we do not stream them in all cases,
5581 but it should never happen that ctor seem unreachable. */
5582 gcc_assert (init);
5583 if (init == error_mark_node)
5584 {
5585 gcc_assert (in_lto_p);
ec77d61f
JH
5586 /* Pass down that we lost track of the target. */
5587 if (can_refer)
5588 *can_refer = false;
2aa3da06
JH
5589 return NULL_TREE;
5590 }
81fa35bd 5591 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5592 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5593 offset *= BITS_PER_UNIT;
81fa35bd 5594 offset += token * size;
9de2f554 5595
8c311b50
JH
5596 /* Lookup the value in the constructor that is assumed to be array.
5597 This is equivalent to
5598 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5599 offset, size, NULL);
5600 but in a constant time. We expect that frontend produced a simple
5601 array without indexed initializers. */
5602
5603 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5604 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5605 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5606 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5607
5608 access_index = offset / BITS_PER_UNIT / elt_size;
5609 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5610
5611 /* This code makes an assumption that there are no
5612 indexed fileds produced by C++ FE, so we can directly index the array. */
5613 if (access_index < CONSTRUCTOR_NELTS (init))
5614 {
5615 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5616 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5617 STRIP_NOPS (fn);
5618 }
5619 else
5620 fn = NULL;
9de2f554
JH
5621
5622 /* For type inconsistent program we may end up looking up virtual method
5623 in virtual table that does not contain TOKEN entries. We may overrun
5624 the virtual table and pick up a constant or RTTI info pointer.
5625 In any case the call is undefined. */
5626 if (!fn
5627 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5628 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5629 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5630 else
5631 {
5632 fn = TREE_OPERAND (fn, 0);
5633
5634 /* When cgraph node is missing and function is not public, we cannot
5635 devirtualize. This can happen in WHOPR when the actual method
5636 ends up in other partition, because we found devirtualization
5637 possibility too late. */
5638 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5639 {
5640 if (can_refer)
5641 {
5642 *can_refer = false;
5643 return fn;
5644 }
5645 return NULL_TREE;
5646 }
9de2f554 5647 }
81fa35bd 5648
7501ca28
RG
5649 /* Make sure we create a cgraph node for functions we'll reference.
5650 They can be non-existent if the reference comes from an entry
5651 of an external vtable for example. */
d52f5295 5652 cgraph_node::get_create (fn);
7501ca28 5653
81fa35bd
MJ
5654 return fn;
5655}
5656
85942f45
JH
5657/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5658 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5659 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5660 OBJ_TYPE_REF_OBJECT(REF).
5661 Set CAN_REFER if non-NULL to false if method
5662 is not referable or if the virtual table is ill-formed (such as rewriten
5663 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5664
5665tree
ec77d61f
JH
5666gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5667 bool *can_refer)
85942f45
JH
5668{
5669 unsigned HOST_WIDE_INT offset;
5670 tree v;
5671
5672 v = BINFO_VTABLE (known_binfo);
5673 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5674 if (!v)
5675 return NULL_TREE;
5676
5677 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5678 {
5679 if (can_refer)
5680 *can_refer = false;
5681 return NULL_TREE;
5682 }
5683 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5684}
5685
06bc3ec7
BS
5686/* Return true iff VAL is a gimple expression that is known to be
5687 non-negative. Restricted to floating-point inputs. */
5688
5689bool
5690gimple_val_nonnegative_real_p (tree val)
5691{
5692 gimple def_stmt;
5693
5694 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5695
5696 /* Use existing logic for non-gimple trees. */
5697 if (tree_expr_nonnegative_p (val))
5698 return true;
5699
5700 if (TREE_CODE (val) != SSA_NAME)
5701 return false;
5702
5703 /* Currently we look only at the immediately defining statement
5704 to make this determination, since recursion on defining
5705 statements of operands can lead to quadratic behavior in the
5706 worst case. This is expected to catch almost all occurrences
5707 in practice. It would be possible to implement limited-depth
5708 recursion if important cases are lost. Alternatively, passes
5709 that need this information (such as the pow/powi lowering code
5710 in the cse_sincos pass) could be revised to provide it through
5711 dataflow propagation. */
5712
5713 def_stmt = SSA_NAME_DEF_STMT (val);
5714
5715 if (is_gimple_assign (def_stmt))
5716 {
5717 tree op0, op1;
5718
5719 /* See fold-const.c:tree_expr_nonnegative_p for additional
5720 cases that could be handled with recursion. */
5721
5722 switch (gimple_assign_rhs_code (def_stmt))
5723 {
5724 case ABS_EXPR:
5725 /* Always true for floating-point operands. */
5726 return true;
5727
5728 case MULT_EXPR:
5729 /* True if the two operands are identical (since we are
5730 restricted to floating-point inputs). */
5731 op0 = gimple_assign_rhs1 (def_stmt);
5732 op1 = gimple_assign_rhs2 (def_stmt);
5733
5734 if (op0 == op1
5735 || operand_equal_p (op0, op1, 0))
5736 return true;
5737
5738 default:
5739 return false;
5740 }
5741 }
5742 else if (is_gimple_call (def_stmt))
5743 {
5744 tree fndecl = gimple_call_fndecl (def_stmt);
5745 if (fndecl
5746 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5747 {
5748 tree arg1;
5749
5750 switch (DECL_FUNCTION_CODE (fndecl))
5751 {
5752 CASE_FLT_FN (BUILT_IN_ACOS):
5753 CASE_FLT_FN (BUILT_IN_ACOSH):
5754 CASE_FLT_FN (BUILT_IN_CABS):
5755 CASE_FLT_FN (BUILT_IN_COSH):
5756 CASE_FLT_FN (BUILT_IN_ERFC):
5757 CASE_FLT_FN (BUILT_IN_EXP):
5758 CASE_FLT_FN (BUILT_IN_EXP10):
5759 CASE_FLT_FN (BUILT_IN_EXP2):
5760 CASE_FLT_FN (BUILT_IN_FABS):
5761 CASE_FLT_FN (BUILT_IN_FDIM):
5762 CASE_FLT_FN (BUILT_IN_HYPOT):
5763 CASE_FLT_FN (BUILT_IN_POW10):
5764 return true;
5765
5766 CASE_FLT_FN (BUILT_IN_SQRT):
5767 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5768 nonnegative inputs. */
5769 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5770 return true;
5771
5772 break;
5773
5774 CASE_FLT_FN (BUILT_IN_POWI):
5775 /* True if the second argument is an even integer. */
5776 arg1 = gimple_call_arg (def_stmt, 1);
5777
5778 if (TREE_CODE (arg1) == INTEGER_CST
5779 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5780 return true;
5781
5782 break;
5783
5784 CASE_FLT_FN (BUILT_IN_POW):
5785 /* True if the second argument is an even integer-valued
5786 real. */
5787 arg1 = gimple_call_arg (def_stmt, 1);
5788
5789 if (TREE_CODE (arg1) == REAL_CST)
5790 {
5791 REAL_VALUE_TYPE c;
5792 HOST_WIDE_INT n;
5793
5794 c = TREE_REAL_CST (arg1);
5795 n = real_to_integer (&c);
5796
5797 if ((n & 1) == 0)
5798 {
5799 REAL_VALUE_TYPE cint;
807e902e 5800 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5801 if (real_identical (&c, &cint))
5802 return true;
5803 }
5804 }
5805
5806 break;
5807
5808 default:
5809 return false;
5810 }
5811 }
5812 }
5813
5814 return false;
5815}
b184c8f1
AM
5816
5817/* Given a pointer value OP0, return a simplified version of an
5818 indirection through OP0, or NULL_TREE if no simplification is
5819 possible. Note that the resulting type may be different from
5820 the type pointed to in the sense that it is still compatible
5821 from the langhooks point of view. */
5822
5823tree
5824gimple_fold_indirect_ref (tree t)
5825{
5826 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5827 tree sub = t;
5828 tree subtype;
5829
5830 STRIP_NOPS (sub);
5831 subtype = TREE_TYPE (sub);
5832 if (!POINTER_TYPE_P (subtype))
5833 return NULL_TREE;
5834
5835 if (TREE_CODE (sub) == ADDR_EXPR)
5836 {
5837 tree op = TREE_OPERAND (sub, 0);
5838 tree optype = TREE_TYPE (op);
5839 /* *&p => p */
5840 if (useless_type_conversion_p (type, optype))
5841 return op;
5842
5843 /* *(foo *)&fooarray => fooarray[0] */
5844 if (TREE_CODE (optype) == ARRAY_TYPE
5845 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5846 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5847 {
5848 tree type_domain = TYPE_DOMAIN (optype);
5849 tree min_val = size_zero_node;
5850 if (type_domain && TYPE_MIN_VALUE (type_domain))
5851 min_val = TYPE_MIN_VALUE (type_domain);
5852 if (TREE_CODE (min_val) == INTEGER_CST)
5853 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5854 }
5855 /* *(foo *)&complexfoo => __real__ complexfoo */
5856 else if (TREE_CODE (optype) == COMPLEX_TYPE
5857 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5858 return fold_build1 (REALPART_EXPR, type, op);
5859 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5860 else if (TREE_CODE (optype) == VECTOR_TYPE
5861 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5862 {
5863 tree part_width = TYPE_SIZE (type);
5864 tree index = bitsize_int (0);
5865 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5866 }
5867 }
5868
5869 /* *(p + CST) -> ... */
5870 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5871 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5872 {
5873 tree addr = TREE_OPERAND (sub, 0);
5874 tree off = TREE_OPERAND (sub, 1);
5875 tree addrtype;
5876
5877 STRIP_NOPS (addr);
5878 addrtype = TREE_TYPE (addr);
5879
5880 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5881 if (TREE_CODE (addr) == ADDR_EXPR
5882 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5883 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5884 && tree_fits_uhwi_p (off))
b184c8f1 5885 {
ae7e9ddd 5886 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5887 tree part_width = TYPE_SIZE (type);
5888 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5889 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5890 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5891 tree index = bitsize_int (indexi);
5892 if (offset / part_widthi
e934916c 5893 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5894 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5895 part_width, index);
5896 }
5897
5898 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5899 if (TREE_CODE (addr) == ADDR_EXPR
5900 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5901 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5902 {
5903 tree size = TYPE_SIZE_UNIT (type);
5904 if (tree_int_cst_equal (size, off))
5905 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5906 }
5907
5908 /* *(p + CST) -> MEM_REF <p, CST>. */
5909 if (TREE_CODE (addr) != ADDR_EXPR
5910 || DECL_P (TREE_OPERAND (addr, 0)))
5911 return fold_build2 (MEM_REF, type,
5912 addr,
807e902e 5913 wide_int_to_tree (ptype, off));
b184c8f1
AM
5914 }
5915
5916 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5917 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5918 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5919 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5920 {
5921 tree type_domain;
5922 tree min_val = size_zero_node;
5923 tree osub = sub;
5924 sub = gimple_fold_indirect_ref (sub);
5925 if (! sub)
5926 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5927 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5928 if (type_domain && TYPE_MIN_VALUE (type_domain))
5929 min_val = TYPE_MIN_VALUE (type_domain);
5930 if (TREE_CODE (min_val) == INTEGER_CST)
5931 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5932 }
5933
5934 return NULL_TREE;
5935}
19e51b40
JJ
5936
5937/* Return true if CODE is an operation that when operating on signed
5938 integer types involves undefined behavior on overflow and the
5939 operation can be expressed with unsigned arithmetic. */
5940
5941bool
5942arith_code_with_undefined_signed_overflow (tree_code code)
5943{
5944 switch (code)
5945 {
5946 case PLUS_EXPR:
5947 case MINUS_EXPR:
5948 case MULT_EXPR:
5949 case NEGATE_EXPR:
5950 case POINTER_PLUS_EXPR:
5951 return true;
5952 default:
5953 return false;
5954 }
5955}
5956
5957/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5958 operation that can be transformed to unsigned arithmetic by converting
5959 its operand, carrying out the operation in the corresponding unsigned
5960 type and converting the result back to the original type.
5961
5962 Returns a sequence of statements that replace STMT and also contain
5963 a modified form of STMT itself. */
5964
5965gimple_seq
5966rewrite_to_defined_overflow (gimple stmt)
5967{
5968 if (dump_file && (dump_flags & TDF_DETAILS))
5969 {
5970 fprintf (dump_file, "rewriting stmt with undefined signed "
5971 "overflow ");
5972 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5973 }
5974
5975 tree lhs = gimple_assign_lhs (stmt);
5976 tree type = unsigned_type_for (TREE_TYPE (lhs));
5977 gimple_seq stmts = NULL;
5978 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5979 {
5980 gimple_seq stmts2 = NULL;
5981 gimple_set_op (stmt, i,
5982 force_gimple_operand (fold_convert (type,
5983 gimple_op (stmt, i)),
5984 &stmts2, true, NULL_TREE));
5985 gimple_seq_add_seq (&stmts, stmts2);
5986 }
5987 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5988 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5989 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5990 gimple_seq_add_stmt (&stmts, stmt);
0d0e4a03 5991 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
5992 gimple_seq_add_stmt (&stmts, cvt);
5993
5994 return stmts;
5995}
d4f5cd5e 5996
3d2cf79f
RB
5997
5998/* Build the expression CODE OP0 of type TYPE with location LOC,
5999 simplifying it first if possible using VALUEIZE if not NULL.
6000 OP0 is expected to be valueized already. Returns the built
6001 expression value and appends statements possibly defining it
6002 to SEQ. */
6003
6004tree
6005gimple_build (gimple_seq *seq, location_t loc,
6006 enum tree_code code, tree type, tree op0,
6007 tree (*valueize)(tree))
6008{
6009 tree res = gimple_simplify (code, type, op0, seq, valueize);
6010 if (!res)
6011 {
6012 if (gimple_in_ssa_p (cfun))
b731b390 6013 res = make_ssa_name (type);
3d2cf79f 6014 else
b731b390 6015 res = create_tmp_reg (type);
3d2cf79f
RB
6016 gimple stmt;
6017 if (code == REALPART_EXPR
6018 || code == IMAGPART_EXPR
6019 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6020 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6021 else
0d0e4a03 6022 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6023 gimple_set_location (stmt, loc);
6024 gimple_seq_add_stmt_without_update (seq, stmt);
6025 }
6026 return res;
6027}
6028
6029/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6030 simplifying it first if possible using VALUEIZE if not NULL.
6031 OP0 and OP1 are expected to be valueized already. Returns the built
6032 expression value and appends statements possibly defining it
6033 to SEQ. */
6034
6035tree
6036gimple_build (gimple_seq *seq, location_t loc,
6037 enum tree_code code, tree type, tree op0, tree op1,
6038 tree (*valueize)(tree))
6039{
6040 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
6041 if (!res)
6042 {
6043 if (gimple_in_ssa_p (cfun))
b731b390 6044 res = make_ssa_name (type);
3d2cf79f 6045 else
b731b390 6046 res = create_tmp_reg (type);
0d0e4a03 6047 gimple stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6048 gimple_set_location (stmt, loc);
6049 gimple_seq_add_stmt_without_update (seq, stmt);
6050 }
6051 return res;
6052}
6053
6054/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6055 simplifying it first if possible using VALUEIZE if not NULL.
6056 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6057 expression value and appends statements possibly defining it
6058 to SEQ. */
6059
6060tree
6061gimple_build (gimple_seq *seq, location_t loc,
6062 enum tree_code code, tree type, tree op0, tree op1, tree op2,
6063 tree (*valueize)(tree))
6064{
6065 tree res = gimple_simplify (code, type, op0, op1, op2,
6066 seq, valueize);
6067 if (!res)
6068 {
6069 if (gimple_in_ssa_p (cfun))
b731b390 6070 res = make_ssa_name (type);
3d2cf79f 6071 else
b731b390 6072 res = create_tmp_reg (type);
3d2cf79f
RB
6073 gimple stmt;
6074 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6075 stmt = gimple_build_assign (res, code,
6076 build3 (code, type, op0, op1, op2));
3d2cf79f 6077 else
0d0e4a03 6078 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6079 gimple_set_location (stmt, loc);
6080 gimple_seq_add_stmt_without_update (seq, stmt);
6081 }
6082 return res;
6083}
6084
6085/* Build the call FN (ARG0) with a result of type TYPE
6086 (or no result if TYPE is void) with location LOC,
6087 simplifying it first if possible using VALUEIZE if not NULL.
6088 ARG0 is expected to be valueized already. Returns the built
6089 expression value (or NULL_TREE if TYPE is void) and appends
6090 statements possibly defining it to SEQ. */
6091
6092tree
6093gimple_build (gimple_seq *seq, location_t loc,
6094 enum built_in_function fn, tree type, tree arg0,
6095 tree (*valueize)(tree))
6096{
6097 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
6098 if (!res)
6099 {
6100 tree decl = builtin_decl_implicit (fn);
6101 gimple stmt = gimple_build_call (decl, 1, arg0);
6102 if (!VOID_TYPE_P (type))
6103 {
6104 if (gimple_in_ssa_p (cfun))
b731b390 6105 res = make_ssa_name (type);
3d2cf79f 6106 else
b731b390 6107 res = create_tmp_reg (type);
3d2cf79f
RB
6108 gimple_call_set_lhs (stmt, res);
6109 }
6110 gimple_set_location (stmt, loc);
6111 gimple_seq_add_stmt_without_update (seq, stmt);
6112 }
6113 return res;
6114}
6115
6116/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6117 (or no result if TYPE is void) with location LOC,
6118 simplifying it first if possible using VALUEIZE if not NULL.
6119 ARG0 is expected to be valueized already. Returns the built
6120 expression value (or NULL_TREE if TYPE is void) and appends
6121 statements possibly defining it to SEQ. */
6122
6123tree
6124gimple_build (gimple_seq *seq, location_t loc,
6125 enum built_in_function fn, tree type, tree arg0, tree arg1,
6126 tree (*valueize)(tree))
6127{
6128 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
6129 if (!res)
6130 {
6131 tree decl = builtin_decl_implicit (fn);
6132 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6133 if (!VOID_TYPE_P (type))
6134 {
6135 if (gimple_in_ssa_p (cfun))
b731b390 6136 res = make_ssa_name (type);
3d2cf79f 6137 else
b731b390 6138 res = create_tmp_reg (type);
3d2cf79f
RB
6139 gimple_call_set_lhs (stmt, res);
6140 }
6141 gimple_set_location (stmt, loc);
6142 gimple_seq_add_stmt_without_update (seq, stmt);
6143 }
6144 return res;
6145}
6146
6147/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6148 (or no result if TYPE is void) with location LOC,
6149 simplifying it first if possible using VALUEIZE if not NULL.
6150 ARG0 is expected to be valueized already. Returns the built
6151 expression value (or NULL_TREE if TYPE is void) and appends
6152 statements possibly defining it to SEQ. */
6153
6154tree
6155gimple_build (gimple_seq *seq, location_t loc,
6156 enum built_in_function fn, tree type,
6157 tree arg0, tree arg1, tree arg2,
6158 tree (*valueize)(tree))
6159{
6160 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
6161 if (!res)
6162 {
6163 tree decl = builtin_decl_implicit (fn);
6164 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6165 if (!VOID_TYPE_P (type))
6166 {
6167 if (gimple_in_ssa_p (cfun))
b731b390 6168 res = make_ssa_name (type);
3d2cf79f 6169 else
b731b390 6170 res = create_tmp_reg (type);
3d2cf79f
RB
6171 gimple_call_set_lhs (stmt, res);
6172 }
6173 gimple_set_location (stmt, loc);
6174 gimple_seq_add_stmt_without_update (seq, stmt);
6175 }
6176 return res;
6177}
6178
6179/* Build the conversion (TYPE) OP with a result of type TYPE
6180 with location LOC if such conversion is neccesary in GIMPLE,
6181 simplifying it first.
6182 Returns the built expression value and appends
6183 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6184
6185tree
6186gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6187{
6188 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6189 return op;
3d2cf79f 6190 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6191}